branch_name stringclasses 149 values | text stringlengths 23 89.3M | directory_id stringlengths 40 40 | languages listlengths 1 19 | num_files int64 1 11.8k | repo_language stringclasses 38 values | repo_name stringlengths 6 114 | revision_id stringlengths 40 40 | snapshot_id stringlengths 40 40 |
|---|---|---|---|---|---|---|---|---|
refs/heads/master | <repo_name>skeptycal/mdpopup_test<file_sep>/test_uml.md
## UML
```uml
@startuml
Alice -> Bob: Authentication Request
alt successful case
Bob -> Alice: Authentication Accepted
else some kind of failure
Bob -> Alice: Authentication Failure
group My own label
Alice -> Log : Log attack start
loop 1000 times
Alice -> Bob: DNS Attack
end
Alice -> Log : Log attack end
end
else Another type of failure
Bob -> Alice: Please repeat
end
@enduml
```
<file_sep>/test.py
# -*- coding: UTF-8 -*-
"""
Mdpopups manual test module.
On load, it will clear the cache and show the test menu.
Subsequent tests can be loaded:
mdpopups.tests.menu()
If you need to reload the test module.
import imp
mdpopups.tests = imp.reload(mdpopups.tests)
If you need to clear the cache.
mdpopups.tests.clear_cache()
"""
import sublime
import sublime_plugin
import mdpopups
import sys
from . import plantuml
this = sys.modules[__name__]
TEST_MD = "Packages/mdpopup_test/test.md"
TEST_UML_MD = "Packages/mdpopup_test/test_uml.md"
frontmatter = {
"markdown_extensions": [
"markdown.extensions.admonition",
"markdown.extensions.attr_list",
"markdown.extensions.def_list",
"markdown.extensions.nl2br",
# Smart quotes always have corner cases that annoy me, so don't bother with them.
{"markdown.extensions.smarty": {"smart_quotes": False}},
"pymdownx.betterem",
{
"pymdownx.magiclink": {
"repo_url_shortener": True,
"base_repo_url": "https://github.com/facelessuser/sublime-markdown-popups"
}
},
"pymdownx.extrarawhtml",
"pymdownx.keys",
{"pymdownx.escapeall": {"hardbreak": True, "nbsp": True}},
# Sublime doesn't support superscript, so no ordinal numbers
{"pymdownx.smartsymbols": {"ordinal_numbers": False}}
]
}
frontmatter_uml = {
"custom_fences": [
{'name': 'uml', 'class': 'uml', 'format': plantuml.uml_format}
]
}
def active_view():
"""Get active view."""
return sublime.active_window().active_view()
def clear_cache():
"""Clear CSS cache."""
mdpopups.clear_cache()
def menu(fmatter, md_file):
"""Show menu allowing you to select a test."""
tests = (
"Popup Format",
"Phantom Format"
)
def run_test(value, fm, md):
"""Run the test."""
if value >= 0:
test = '_'.join(tests[value].lower().split(' '))
getattr(this, 'mdpopups_%s_test' % test)(fm, md)
window = active_view().window()
window.show_quick_panel(tests, lambda v, fm=fmatter, md=md_file: run_test(v, fm, md))
def on_close_popup(href):
"""Close the popup."""
view = active_view()
mdpopups.hide_popup(view)
def on_close_phantom(href):
"""Close all phantoms."""
view = active_view()
mdpopups.erase_phantoms(view, 'mdpopups_test')
def show_popup(text):
"""Show the popup."""
clear_cache()
close = '\n[close](#){: .btn .btn-small .btn-info}\n'
view = active_view()
region = view.visible_region()
mdpopups.show_popup(
active_view(), text + close, location=region.a, on_navigate=on_close_popup,
max_height=650, max_width=600, wrapper_class='mdpopups-test',
css='div.mdpopups-test { padding: 0.5rem; }'
)
def show_phantom(text):
"""Show the phantom."""
clear_cache()
close = '\n[close](#){: .btn .btn-small .btn-info}\n'
view = active_view()
region = view.visible_region()
mdpopups.add_phantom(
active_view(), 'mdpopups_test', region, text + close, 2,
on_navigate=on_close_phantom, wrapper_class='mdpopups-test'
)
def mdpopups_popup_format_test(fm, md):
"""Test popup."""
show_popup(mdpopups.format_frontmatter(fm) + sublime.load_resource(md))
def mdpopups_phantom_format_test(fm, md):
"""Test phantom."""
show_phantom(mdpopups.format_frontmatter(fm) + sublime.load_resource(md))
class MdpopupsTestCommand(sublime_plugin.TextCommand):
"""Test command."""
def run(self, edit):
"""Run command."""
menu(frontmatter, TEST_MD)
class MdpopupsTestUmlCommand(sublime_plugin.TextCommand):
"""Test UML command."""
def run(self, edit):
"""Run command."""
menu(frontmatter_uml, TEST_UML_MD)
<file_sep>/README.md
# MdPopups Test Plugin


Run command `Mdpopups: Test` and choose either popup or phantom. This demonstrates basic formatting.
-- or --
Run command `Mdpopups: Test UML` and choose either popup or phantom. This demonstrates custom fences using UML. Requires Java and Graphviz to be installed and in your path. See http://plantuml.com/starting.
Edit `test.py` to play around with the commands.
Need Mdpopups 2.1.0+
<file_sep>/plantuml.py
import sublime
import subprocess
import tempfile
import os
import base64
class TempFile(object):
"""Open either a temporary HTML or one at the save location."""
def __enter__(self):
"""Setup HTML file."""
self.file = tempfile.NamedTemporaryFile(mode='bw+', delete=True, suffix='png')
return self.file
def __exit__(self, type, value, traceback):
"""Tear down HTML file."""
self.file.close()
def escape_code(text, tab_size=4):
"""Format text to HTML."""
encode_table = {
'&': '&',
'>': '>',
'<': '<',
'\t': ' ' * tab_size,
'\n': '<br>'
}
return ''.join(
encode_table.get(c, c) for c in text
)
def get_environ():
"""Get environment and force utf-8."""
import os
env = {}
env.update(os.environ)
if sublime.platform() != 'windows':
shell = env['SHELL']
p = subprocess.Popen(
[shell, '-l', '-c', 'echo "#@#@#${PATH}#@#@#"'],
stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
result = p.communicate()[0].decode('utf8').split('#@#@#')
if len(result) > 1:
bin_paths = result[1].split(':')
if len(bin_paths):
env['PATH'] = ':'.join(bin_paths)
env['PYTHONIOENCODING'] = 'utf8'
env['LANG'] = 'en_US.UTF-8'
env['LC_CTYPE'] = 'en_US.UTF-8'
return env
def uml_format(source, language, css_class):
"""Render the UML."""
plantuml = os.path.join(sublime.packages_path(), 'mdpopup_test', 'plantuml.jar')
cmd = [
'java',
'-splash:no',
'-jar',
plantuml,
'-pipe',
'-tpng'
'-charset',
'UTF-8'
]
with TempFile() as png:
if sublime.platform() == "windows":
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
process = subprocess.Popen(
cmd,
startupinfo=startupinfo,
stdin=subprocess.PIPE,
stderr=subprocess.STDOUT,
stdout=png,
shell=True,
env=get_environ()
)
else:
process = subprocess.Popen(
cmd,
stdin=subprocess.PIPE,
stderr=subprocess.STDOUT,
stdout=png,
shell=False,
env=get_environ()
)
process.communicate(input=source.encode('utf-8'))
png.file.seek(0)
if process.returncode:
# Log error and output original source.
print(png.file.read().decode('utf-8'))
uml = escape_code(source)
else:
png.file.seek(0)
uml = '<img src="data:image/png;base64,%s">' % base64.b64encode(png.file.read()).decode('ascii')
return '<div class="%s">%s<div>' % (css_class, uml)
| 506e4b99f6ce71248a6e55f7cdc1e396543b8fb0 | [
"Markdown",
"Python"
] | 4 | Markdown | skeptycal/mdpopup_test | d3ecda5a7677393387c43741642571bade668217 | d3d57f81831afc7dc74ffd1297a8f7cc7452d4f0 |
refs/heads/master | <repo_name>remialvado/symfonyCustomRoutes<file_sep>/UrlSigner.class.php
<?php
class UrlSigner {
public function redirectIfUrlIsNotSigned($parameters, $context, $secretKey) {
if (!is_array($parameters) || !array_key_exists("signature_value", $parameters)) {
$this->urlIsNotProperlySigned($context);
return false;
}
$signatureValue = $parameters["signature_value"];
$signature = $this->computeSignatureValue($parameters, $secretKey);
if ($signatureValue != $signature) {
error_log("Expected signature : '$signature' & Signature : '$signatureValue'");
$this->urlIsNotProperlySigned($context);
return false;
}
return true;
}
public function computeSignatureValue($parameters, $secretKey) {
$cleanedUpParameterNames = $this->getParameterNamesExceptSignatureValueAndOrderedThemAlphabetically($parameters);
$string = "";
foreach ($cleanedUpParameterNames as $parameterName) {
$string .= $parameters[$parameterName];
}
return sha1($string . "@" . $secretKey);
}
public function getParameterNamesExceptSignatureValueAndOrderedThemAlphabetically($parameters) {
unset($parameters["signature_value"]);
$parameterNames = array_keys($parameters);
sort($parameterNames,SORT_STRING);
return $parameterNames;
}
public function urlIsNotProperlySigned($context, $message = "Url is not signed") {
$context->getResponse()->setStatusCode(401,$message);
$context->getResponse()->setHeaderOnly(true);
$context->getResponse()->send();
}
}
?>
<file_sep>/CustomRoute.class.php
<?php
class CustomRoute extends sfRequestRoute {
const DEFAULT_SECRET_KEY = "abcdef";
public function matchesUrl($url, $context = array()) {
$parameters = parent::matchesUrl($url, $context);
if (!$parameters) {
return false;
}
$queryParameters = $this->getQueryParameters($context);
$sfContext = sfContext::getInstance();
if ($this->shouldThisRouteBeSigned()) {
$urlSigner = new UrlSigner();
$urlSigner->redirectIfUrlIsNotSigned($queryParameters, $sfContext, $this->getSecretKey());
return $parameters;
}
return $parameters;
}
public function getQueryParameters($context) {
$queryParametersAsString = parse_url($context["request_uri"],PHP_URL_QUERY);
parse_str($queryParametersAsString,$queryParameters);
return $queryParameters;
}
public function shouldThisRouteBeSigned() {
if (array_key_exists('secure', $this->requirements)) {
$secure = $this->requirements["secure"];
return array_key_exists('status', $secure) && $secure["status"] === "enabled";
}
return false;
}
public function getSecretKey() {
if (array_key_exists('secure', $this->requirements)) {
$secure = $this->requirements["secure"];
return array_key_exists('secretKey', $secure) ? $secure["secretKey"] : self::DEFAULT_SECRET_KEY;
}
return self::DEFAULT_SECRET_KEY;
}
}
| 001717cd499e7c3ca315ad67e440c3ee57b13a4a | [
"PHP"
] | 2 | PHP | remialvado/symfonyCustomRoutes | 4ff7a70c4da296c8d5083056e3bc83c7c56b77e5 | cd71344bc3e49e5ff90d14638c2477255540ceca |
refs/heads/master | <file_sep>package com.baibye.cluster;
import android.content.Context;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import java.util.List;
// Adapters populate the data into the RecyclerView
// Note that we specify the custom ViewHolder which gives us access to our views
public class TransactionsAdapter extends RecyclerView.Adapter<TransactionsAdapter.TransactionsViewHolder> {
// Provides access to all views within each item row
public static class TransactionsViewHolder extends RecyclerView.ViewHolder {
// Your holder should contain a member variable
// for any view that will be set as you render a row
public TextView owner;
public TextView description;
public TextView date;
public TextView amount;
// Constructor
public TransactionsViewHolder(View v) {
super(v);
owner = (TextView) v.findViewById(R.id.transactions_owner);
description = (TextView) v.findViewById(R.id.transactions_description);
date = (TextView) v.findViewById(R.id.transactions_date);
amount = (TextView) v.findViewById(R.id.transactions_amount);
}
}
private List<Transaction> transactionsList;
public TransactionsAdapter(List<Transaction> transactionsList) {
this.transactionsList = transactionsList;
}
// Inflate the item layout and create the holder
@Override
public TransactionsViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
Context context = parent.getContext();
LayoutInflater inflater = LayoutInflater.from(context);
// Inflate the row
View transactionsRow = inflater.inflate(R.layout.transactions_row, parent, false);
// Return a new holder instance
TransactionsViewHolder viewHolder = new TransactionsViewHolder(transactionsRow);
return viewHolder;
}
// Populate data into item through holder
@Override
public void onBindViewHolder(TransactionsViewHolder viewHolder, int position) {
// Get the data model based on position
Transaction currentTransaction = transactionsList.get(position);
viewHolder.owner.setText(currentTransaction.getOwner());
viewHolder.description.setText(currentTransaction.getDescription());
viewHolder.date.setText(currentTransaction.getDate());
viewHolder.amount.setText("$" + currentTransaction.getAmount());
}
@Override
public int getItemCount() {
return transactionsList.size();
}
}
<file_sep>package com.baibye.cluster;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.Menu;
import android.view.MenuItem;
import com.firebase.client.DataSnapshot;
import com.firebase.client.Firebase;
import com.firebase.client.FirebaseError;
import com.firebase.client.ValueEventListener;
import java.util.LinkedList;
import java.util.List;
public class TransactionsActivity extends AppCompatActivity {
private RecyclerView mRecyclerView;
private RecyclerView.Adapter mAdapter;
private RecyclerView.LayoutManager mLayoutManager;
private List<Transaction> tempTransactions = new LinkedList<>();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_transactions);
mRecyclerView = (RecyclerView) findViewById(R.id.transactions_list);
// use this setting to improve performance if you know that changes
// in content do not change the layout size of the RecyclerView
mRecyclerView.setHasFixedSize(true);
// use a linear layout manager
mLayoutManager = new LinearLayoutManager(this);
mRecyclerView.setLayoutManager(mLayoutManager);
// specify an adapter
mAdapter = new TransactionsAdapter(tempTransactions);
mRecyclerView.setAdapter(mAdapter);
retrieveData();
}
private void retrieveData() {
Firebase ref = new Firebase("https://resplendent-heat-4997.firebaseIO.com/Transactions");
ref.addValueEventListener(new ValueEventListener() {
@Override
public void onDataChange(DataSnapshot snapshot) {
System.out.println("There are " + snapshot.getChildrenCount() + " transactions");
for (DataSnapshot postSnapshot : snapshot.getChildren()) {
Transaction transaction = postSnapshot.getValue(Transaction.class);
System.out.println(transaction.toString());
tempTransactions.add(transaction);
}
// TODO
// Google documentation says it's best to use one of the more specific "notify" functions.
// In the future, update to use one of these other functions.
// http://developer.android.com/reference/android/support/v7/widget/RecyclerView.Adapter.html
mAdapter.notifyDataSetChanged();
}
@Override
public void onCancelled(FirebaseError error) {
System.out.println("The read failed: " + error.getMessage());
}
});
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_transactions, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
}
<file_sep>package com.baibye.cluster;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import com.firebase.client.Firebase;
import net.danlew.android.joda.JodaTimeAndroid;
import org.joda.time.DateTime;
import java.math.BigDecimal;
public class MainActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
JodaTimeAndroid.init(this);
Firebase.setAndroidContext(this);
//populateFirebase();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
public void toTransactions(View view) {
Intent intent = new Intent(this, TransactionsActivity.class);
startActivity(intent);
}
private void populateFirebase() {
Firebase ref = new Firebase("https://resplendent-heat-4997.firebaseIO.com/Transactions");
ref.push().setValue(
new Transaction("Carl", "Boo's litter", new DateTime().toString(), new BigDecimal(15)));
ref.push().setValue(
new Transaction("Steph", "shitty webcam", new DateTime(2015, 9, 3, 20, 35).toString(), new BigDecimal(20)));
ref.push().setValue(
new Transaction("Steph", "Making Steph wake early", new DateTime(2015, 9, 5, 8, 30).toString(), new BigDecimal(123456789)));
}
}
| f913ab4e15478116cbacc8b8ebddef708e0f4e81 | [
"Java"
] | 3 | Java | carlbai/cluster | fbbca9b31e9f70912a7c0650030eaeed13104de3 | 16b012c0423868497cf2c8875ec13b1c90313ca7 |
refs/heads/master | <repo_name>victormiguez/livro-meteor<file_sep>/resumo.md
# Introdução
## Conhecendo seu mundo
Apresentação do framework que te permite escrever a aplicação inteira em JS.
Ele possui seu próprio [package manager](https://atmospherejs.com/), mas pode utilizar o NPM em alguns casos.
## Os 7 princípios do Meteor
- Data on the wire (Não envie HTML pela rede, apenas dados)
- One language (JS everywhere)
- Database anywhere (API de interface única que permite acessar o BD tanto no client quanto no server side)
- Latency compensation
- Full-Stack Reactivity (Framework real-time por default que funciona em cima do paradigma orientado a eventos)
- Embrace the ecosystem (Totalmente open-source)
- Simplicity equals Productivity (Framework de fácil aprendizado e comunidade ativa, o que aumenta muito sua produtividade)
# Configurando o ambiente de desenvolvimento
Neste capítulo o autor faz um passo a passo de como instalar tudo o que é necessário para desenvolvermos aplicações com Meteor.
Depois disso ele faz uma breve explicação de código com um projeto "Hello World" do próprio framework.
# Criando uma rede social real-time
## Projeto piloto: MeteorBird
Aqui o autor apresenta o projeto que desenvolveremos no decorrer do livro, o MeteorBird, que será uma rede social similar ao Twitter porém desenvolvida totalmente com Meteor.
## Funcionalidades da aplicação
- Atualizações da timeline em real-time;
- Sign-up através de e-mail e senha;
- Sign-up através de uma conta no Facebook;
- Acessar perfil de um usuário;
- Follow e unfollow de posts de um usuário;
## Criando o projeto
Comandos básicos para iniciarmos o projeto. Depois disso já criamos algumas pastas que serão a estrutura da aplicação.
# Implementando uma timeline de posts
Iniciamos esse capítulo aplicando o conceito de templates, criando um header e um footer para nossa aplicação, depois disso fizemos o template dos posts.
Com esses templates criados é hora de sermos apresentados ao conceito de `helpers` que são funções que tem como objetivo retornar resultados apresentáveis para os usuários.
Após entender para que serve um `helper`, passamos pela `Session`, que guarda informações no formato chave-valor no lado do cliente, ou seja, ela não faz nenhuma interação com BD e caso a página seja atualizada, os dados da `Session` serão perdidos.
Ainda nesse capítulo o autor nos apresenta o conceito de reatividade, ao criarmos `events`, o exemplo dado é para fazermos a inserção de posts na timeline da aplicação. A aplicação fica escutando o evento de submit do formulário para que possa executar a função.
Chegando no final do capítulo nós vemos como é simples fazer a integração do banco de dados seja pelo `helper` ou `event`.
# Signin e Signup de usuários
Esse capítulo eu achei que tudo o que acontece é um pouco mágico. Utilizamos o Accounts para gerenciar signin/up, porém ele já vem todo pronto e você não precisa programar nada ao chamar o formulário do Accounts UI Bootstrap 3, já era, o usuário faz signin/up sem você escrever uma linha de código para salvar no banco, etc.
Uma coisa que gostei bastante é o `{{currentUser}}` do `accounts-base`, com ele conseguimos ver se o usuário está logado para mostrar ou não o conteúdo. Caso esteja, podemos já personalizar de acordo com sua conta.
Fechamos o capítulo fazendo integração com o facebook com o package `accounts-facebook`. Com ele nós configuramos o appId e secret gerado pelo facebook e depois podemos configurar facilmente qual o tipo de informação nós queremos do usuário.
<file_sep>/meteor-bird/client/routes/home.js
Router.route('/', function() {
this.render('home', {
data: function() {
return {
posts: Posts.list(Meteor.userId())
}
}
})
}, {name: 'home'});
<file_sep>/meteor-bird/server/services.js
ServiceConfiguration.configurations.upsert(
{service: 'facebook'},
{
$set: {
appId: '421913994659224',
secret: '<KEY>'
}
}
);
<file_sep>/README.md
# Livro Meteor
Pretendo com esse repositório colocar os exercícios desenvolvidos e um [resumo](resumo.md) com o que entendi de cada capítulo do livro [Meteor - Criando aplicações web real-time com JavaScript](http://www.casadocodigo.com.br/products/livro-meteor).
#### Objetivos:
Status | Capítulo
------------------------- | -------------
<ul><li>- [x] </li></ul> | Introdução
<ul><li>- [x] </li></ul> | Configurando o ambiente de desenvolvimento
<ul><li>- [x] </li></ul> | Criando uma rede social real-time
<ul><li>- [x] </li></ul> | Implementando uma timeline de posts
<ul><li>- [x] </li></ul> | Signin e Signup de usuários
<ul><li>- [x] </li></ul> | Perfil do usuário
<ul><li>- [x] </li></ul> | Tela de perfil público do usuário
<ul><li>- [ ] </li></ul> | Follow me I will follow you
<ul><li>- [ ] </li></ul> | Publications e Subscriptions
<ul><li>- [ ] </li></ul> | Testes, testes e mais testes
<ul><li>- [ ] </li></ul> | Integração contínua no Meteor
<ul><li>- [ ] </li></ul> | Preparando para produção
<ul><li>- [ ] </li></ul> | Mais otimizações para produção
<ul><li>- [ ] </li></ul> | Hospedando uma aplicação Meteor
<ul><li>- [ ] </li></ul> | Como organizar um projeto Meteor
<ul><li>- [ ] </li></ul> | Continuando os estudos
| 29ae45c1f0ab0136e0d78774fb2e4d50e4c8ac88 | [
"Markdown",
"JavaScript"
] | 4 | Markdown | victormiguez/livro-meteor | 6d9a37176ad49de67445e70edcaa586fa413cc65 | 128c0424ed8e612050d838c40a2a31feb2523dad |
refs/heads/review-branch | <file_sep>const list = document.querySelector('.list');
const refresh = document.getElementById('delete-to-do');
const addButton = document.getElementById('add');
const clearAll = document.getElementById('clearall');
const tasks = JSON.parse(localStorage.getItem('tasksList')) || [];
function checkedTasksEvent(tasksArr, checkbox) {
checkbox.forEach((box) => box.addEventListener('change', (e) => {
const tmp = tasksArr.findIndex((el) => el.index === parseInt(e.target.id, 10));
if (e.target.checked === true) {
tasksArr[tmp].completed = true;
e.target.nextElementSibling.classList.add('completed');
} else {
tasksArr[tmp].completed = false;
e.target.nextElementSibling.classList.remove('completed');
}
JSON.parse(localStorage.getItem('tasksList'));
localStorage.setItem('tasksList', JSON.stringify(tasksArr));
}));
}
function addTaskToStorage() {
const tasksInfo = JSON.stringify(tasks);
localStorage.setItem('tasksList', tasksInfo);
}
const AddTask = () => {
const task = {
description: document.querySelector('.input-task').value,
completed: false,
index: tasks.length,
};
tasks.push(task);
addTaskToStorage();
}
let deleteCompletedTask = (taskArr) => {
const storeLength = JSON.parse(localStorage.getItem('tasksList')).length;
taskArr = taskArr.filter((task) => task.completed === false);
localStorage.setItem('tasksList', JSON.stringify(taskArr));
window.location.reload();
/* eslint-disable */
taskArr.forEach((task, i) => task.index = Array.from(Array(storeLength).keys())[i]);
/* eslint-enable */
localStorage.setItem('tasksList', JSON.stringify(taskArr));
}
let moveToTrash = (taskArr) => {
const trashCan = [...document.querySelectorAll('.trash')];
const storeLength = JSON.parse(localStorage.getItem('tasksList')).length;
trashCan.forEach((can) => can.addEventListener('click', () => {
taskArr = taskArr.filter((task) => task.index !== parseInt(can.id[6], 10));
localStorage.setItem('tasksList', JSON.stringify(taskArr));
window.location.reload();
/* eslint-disable */
taskArr.forEach((task, i) => task.index = Array.from(Array(storeLength).keys())[i]);
/* eslint-enable */
localStorage.setItem('tasksList', JSON.stringify(taskArr));
}));
}
let editTask = (description, taskArr, event) => {
if (event.target && event.target.matches('li.li-task')) {
if ([...description.attributes][1].value === 'false') {
event.target.style.backgroundColor = '#fff176';
[...description.attributes][1].value = true;
[...[...[...event.target.children][2].children][0].children][1].classList.add('trash-active');
[...[...[...event.target.children][2].children][0].children][0].style.display = 'none';
moveToTrash(taskArr);
} else if ([...description.attributes][1].value === 'true') {
[...[...event.target.children][1].attributes][1].value = false;
event.target.style.backgroundColor = 'white';
const tmp = taskArr.findIndex((el) => el.index === parseInt([...description.id][5], 10));
taskArr[tmp].description = description.textContent;
[...[...[...event.target.children][2].children][0].children][1].classList.remove('trash-active');
[...[...[...event.target.children][2].children][0].children][0].style.display = 'inline-block';
addTaskToStorage();
}
}
}
let sortTasksbyIndex = (arrTasks) => {
arrTasks.sort((task1, task2) => task1.index - task2.index);
}
const hidden = () => {
while (list.lastElementChild) {
list.removeChild(list.lastElementChild);
}
}
let createTask = (index, description, taskState) => {
const taskInfo = document.createElement('li');
const update = document.createElement('a');
if (taskState === true) {
update.innerHTML = '<button class="remove" > <i class="ellipse fa fa-ellipsis-v" aria-hidden="true"></i> <i class="trash fa fa-trash" aria-hidden="true"></i> </button>';
taskInfo.innerHTML = `<input type="checkbox" id="${index}" class="task-box" checked>
<span id ="task-${index}" contenteditable='false' class= "task-description completed"> ${description} </span>`;
taskInfo.classList.add('li-task');
taskInfo.appendChild(update);
list.appendChild(taskInfo);
} else {
update.innerHTML = `<button class="remove"> <i class="fa fa-ellipsis-v" aria-hidden="true"></i><i id ="trash-${index}" class="trash fa fa-trash" aria-hidden="true"></i> </button>`;
taskInfo.innerHTML = `<input type="checkbox" id="${index}" class="task-box">
<span id ="task-${index}" contenteditable='false' class= "task-description"> ${description} </span>`;
taskInfo.classList.add('li-task');
taskInfo.appendChild(update);
list.appendChild(taskInfo);
}
}
const loadDomList = () => {
sortTasksbyIndex(tasks);
hidden();
tasks.forEach((task) => {
createTask(task.index, task.description, task.completed);
});
}
document.addEventListener('DOMContentLoaded', () => {
JSON.parse(localStorage.getItem('tasksList'));
loadDomList();
const checkbox = [...document.querySelectorAll('.task-box')];
checkedTasksEvent(tasks, checkbox);
});
addButton.addEventListener('click', () => {
AddTask();
loadDomList();
window.location.reload();
});
list.addEventListener('click', (e) => {
const desc = [...e.target.children][1];
editTask(desc, tasks, e);
});
refresh.addEventListener('click', () => {
localStorage.setItem('tasksList', JSON.stringify([]));
window.location.reload();
});
clearAll.addEventListener('click', () => {
deleteCompletedTask(tasks);
});
| ce7d20e725fe76883a1b48920a53a9c31bdf9370 | [
"JavaScript"
] | 1 | JavaScript | redwing555/todo-review | 627f4416206aedd3efeff3f98d2027b8d545c77d | 6c84396a416f0e99ab8bfa0c9976d7fed2f039a6 |
refs/heads/master | <repo_name>bekyiu/LeetCode<file_sep>/src/other/chapter2/Page59.java
package other.chapter2;
public class Page59
{
public static void main(String[] args)
{
}
//这种解法左中右内部的顺序是随机的
public Node listPartition1(Node head, int pivot)
{
Node cur = head;
int size = 0;
while (cur != null)
{
size++;
cur = cur.next;
}
Node[] nodes = new Node[size];
cur = head;
for (int i = 0; i < size; i++)
{
nodes[i] = cur;
cur = cur.next;
}
partition(nodes, pivot);
for (int i = 1; i < size; i++)
{
nodes[i - 1].next = nodes[i];
}
nodes[size - 1].next = null;
return nodes[0];
}
public void partition(Node[] arr, int pivot)
{
int small = -1;
int big = arr.length;
int index = 0;
while (index != big)
{
if (arr[index].value < pivot)
{
swap(arr, index++, ++small);
} else if (arr[index].value == pivot)
{
index++;
} else
{
swap(arr, index, --big);
}
}
}
public void swap(Node[] arr, int a, int b)
{
Node temp = arr[a];
arr[a] = arr[b];
arr[b] = temp;
}
//现在要求左中右内部的顺序和原链表中相对的顺序一样
//时间复杂度On, 空间复杂度O1
//思路根据pivot把值分到三个链表中, 最后在串起来
public Node listPartition2(Node head, int pivot)
{
Node sH = null; //小于部分的头
Node sT = null; //小于部分的尾
Node eH = null; //equal
Node eT = null;
Node bH = null; //big
Node bT = null;
//保存下一个节点
Node next = null;
//把链表中的节点分到三个链表中
while(head != null)
{
next = head.next;
head.next = null; //把这个节点取下来
if(head.value < pivot)
{
if(sH == null)
{
sH = head;
sT = head;
}
else
{
sT.next = head;
sT = sT.next;
}
}
else if(head.value == pivot)
{
if(eH == null)
{
eH = head;
eT = head;
}
else
{
eT.next = head;
eT = eT.next;
}
}
else
{
if(bH == null)
{
bH = head;
bT = head;
}
else
{
bT.next = head;
bT = bT.next;
}
}
head = next;
}
//将小于部分和等于部分连起来
if(sT != null)
{
sT.next = eH;
eT = eT == null ? sT : eT;
}
//全部连起来
if(eT != null)
{
eT.next = bH;
}
return sH != null ? sH : (eH != null ? eH : bH);
}
class Node
{
public int value;
public Node next;
public Node(int value)
{
this.value = value;
}
}
}
<file_sep>/src/medium/Q96.java
package medium;
//page173
public class Q96
{
public int numTrees(int n) {
if (n <= 1)
{
return 1;
}
int[] dp = new int[n + 1];
dp[0] = 1;
dp[1] = 1;
//dp[i]表示: n = i的时候, 构成的二叉树有dp[i]种可能
for (int i = 2; i < dp.length; i++)
{
// 以每一个节点为头
for (int j = 1; j <= i; j++)
{
dp[i] += dp[j - 1] * dp[i - j];
}
}
return dp[n];
}
}
<file_sep>/src/medium/LRUCache.java
package medium;
import java.util.HashMap;
import java.util.Map;
// Q146
public class LRUCache
{
private Map<Integer, Node<Integer>> k2n = new HashMap<>();
private Map<Node<Integer>, Integer> n2k = new HashMap<>();
private DoubleLinkedList<Integer> list = new DoubleLinkedList<>();
private int capacity;
public LRUCache(int capacity)
{
this.capacity = capacity;
}
public int get(int key)
{
if (k2n.containsKey(key))
{
Node<Integer> node = k2n.get(key);
list.moveNodeToTail(node);
return node.vaule;
}
return -1;
}
public void put(int key, int value)
{
Node<Integer> node = null;
if (k2n.containsKey(key))
{
node = k2n.get(key);
node.vaule = value;
list.moveNodeToTail(node);
} else
{
node = new Node<Integer>(value);
k2n.put(key, node);
n2k.put(node, key);
list.add(node);
if (k2n.size() > capacity)
{
removeMostUnused();
}
}
}
public void removeMostUnused()
{
Node<Integer> node = list.removeHead();
k2n.remove(n2k.get(node));
n2k.remove(node);
}
class DoubleLinkedList<V>
{
private Node<V> head;
private Node<V> tail;
public DoubleLinkedList()
{
head = null;
tail = null;
}
// 在尾部添加节点
public void add(Node<V> newNode)
{
if (newNode == null)
{
return;
}
// 说明链表中没有元素
if (head == null)
{
head = newNode;
tail = newNode;
} else
{
tail.next = newNode;
newNode.last = tail;
tail = newNode;
}
}
// 此时的node默认是一定在链表中的, 把他移动到链表的尾部
public void moveNodeToTail(Node<V> node)
{
if (node == tail)
{
return;
}
if (node == head)
{
head = node.next;
head.last = null;
} else
{
node.last.next = node.next;
node.next.last = node.last;
}
node.next = null;
tail.next = node;
node.last = tail;
tail = node;
}
// 移除并返回head
public Node<V> removeHead()
{
if (head == null)
{
return null;
}
Node<V> temp = head;
if (head == tail)
{
head = null;
tail = null;
} else
{
head = head.next;
head.last = null;
temp.next = null;
}
return temp;
}
}
class Node<V>
{
public V vaule;
public Node<V> last;
public Node<V> next;
public Node(V value)
{
this.vaule = value;
this.last = null;
this.next = null;
}
}
}
<file_sep>/src/util/sort/QuickSort_4.java
package util.sort;
import java.util.Arrays;
import util.DSQ;
//随机快排
//每次用于比较的基准都是随机的, 而不是固定的
//概率算下来 nlogn
public class QuickSort_4
{
public static void quickSort(int[] arr, int L, int R)
{
if (L < R)
{
//每次都在待排序的位置中随机选择一个, 和R位置交换 注意加上L这个偏移
int randomIndex = (int) (Math.random() * (R - L + 1)) + L;
//randomIndex∈[L, R]
DSQ.swap(arr, randomIndex, R);
//返回的是等于部分的位置
int[] p = partition(arr, L, R);
quickSort(arr, L, p[0] - 1);
quickSort(arr, p[1] + 1, R);
}
}
public static int[] partition(int[] arr, int L, int R)
{
// 默认是以最后一个数为基准
int less = L - 1;
int more = R;
// 基准
int threshold = arr[R];
int cur = L;
while (cur < more)
{
if (arr[cur] < threshold)
{
DSQ.swap(arr, ++less, cur++);
}
else if(arr[cur] == threshold)
{
cur++;
}
else
{
DSQ.swap(arr, cur, --more);
}
}
DSQ.swap(arr, more, R);
return new int[] {less + 1, more};
}
public static void main(String[] args)
{
for(int i = 0; i < 1000000; i++)
{
int[] arr = DSQ.generateRandomArray(20, 50);
int[] arr1 = DSQ.copyArray(arr);
int[] arr2 = DSQ.copyArray(arr);
QuickSort_4.quickSort(arr1, 0, arr1.length - 1);
DSQ.okMethod(arr2);
if(!DSQ.isEqual(arr1, arr2))
{
System.out.println("sb");
System.out.println(Arrays.toString(arr));
System.out.println(Arrays.toString(arr1));
break;
}
// System.out.println(Arrays.toString(arr1));
}
System.out.println("finish...");
}
}
<file_sep>/src/hard/Q239.java
package hard;
import java.util.Arrays;
import java.util.Deque;
import java.util.LinkedList;
/*
* 给定一个数组 nums,有一个大小为 k 的滑动窗口从数组的最左侧移动到数组的最右侧。
* 你只可以看到在滑动窗口 k 内的数字。滑动窗口每次只向右移动一位。
* 返回滑动窗口最大值。
* 思路:
* 准备一个双向队列, 队列中保存的是数组的索引, 这些索引所代表的数都是从大到小有序的
* 遍历数组, cur指向当前位置
* 往队列加数的逻辑:
* 当往窗口向右扩一个的时候
* 如果arr[cur] < arr[deque.last], 加入
* 否则, pop deque.last, 然后再看能不能加进去, 如果不能继续poll, 直到队列为空, 把cur加进去
* 队列出数的逻辑:
* 当窗口向右缩一个的时候, 那么原来在窗口中最左边的那个值就过期了
* 现在去队列中观察队首是否是那个过期的值的索引, 如果是, 就从队首弹出
* 窗口内的最大值就是队列中的最大值
*/
public class Q239
{
public int[] maxSlidingWindow(int[] nums, int k)
{
int[] res = new int[nums.length - k + 1];
Deque<Integer> queue = new LinkedList<>();
int index = 0;
for(int cur = 0; cur < nums.length; cur++)
{
while(!queue.isEmpty() && nums[cur] >= nums[queue.peekLast()])
{
queue.pollLast();
}
queue.addLast(cur);
//cur - k 就是窗口缩小时去掉的那个数的索引
if(queue.peekFirst() == cur - k)
{
//如果cur - k和队首一样, 就弹出
//因为队首一定是窗口内最大值, 如果他们相等的话, 因为nums[cur - k]过期了
//所以对首的那个值也不能要了
//如果不相等, 说明nums[cur - k]不是窗口最大值
//因为后面加进去的数比他大, 所以在往队列中加数的时候, 他就被poll了, 所以队列中没有他的索引
//自然也就不用处理
queue.pollFirst();
}
if(cur >= k - 1)
{
res[index++] = nums[queue.peekFirst()];
}
}
return res;
}
public static void main(String[] args)
{
int[] a = new int[0];
System.out.println(Arrays.toString(a));
}
}
<file_sep>/src/other/chapter2/Page63.java
package other.chapter2;
import java.util.HashMap;
import java.util.Map;
public class Page63
{
class Node
{
public int value;
public Node next;
public Node random;
public Node(int value)
{
this.value = value;
}
}
//额外空间复杂度O N
public Node copyListWithRand1(Node head)
{
Map<Node, Node> map = new HashMap<>();
Node cur = head;
while(cur != null)
{
map.put(cur, new Node(cur.value));
cur = cur.next;
}
cur = head;
while(cur != null)
{
Node cpCur = map.get(cur);
Node next = cur.next;
Node nextRandom = cur.random;
Node cpNext = map.get(next);
Node cpNextRandom = map.get(nextRandom);
cpCur.next = cpNext;
cpCur.random = cpNextRandom;
cur = cur.next;
}
return map.get(head);
}
//额外空间复杂度O 1
public Node copyListWithRand2(Node head)
{
Node cur = head;
while(cur != null)
{
Node next = cur.next;
cur.next = new Node(cur.value);
cur.next.next = next;
cur = next;
}
cur = head;
Node cpNode = null;
while(cur != null)
{
cpNode = cur.next;
cpNode.random = cur.random == null ? null : cur.random.next;
cur = cur.next.next;
}
//拆分
cur = head;
Node res = cur.next;
while(cur != null)
{
Node next = cur.next.next;
cpNode = cur.next;
cpNode.next = next == null ? null : next.next;
cur.next = next;
cur = next;
}
return res;
}
public static void main(String[] args)
{
}
}
<file_sep>/src/medium/Q98.java
package medium;
import java.util.ArrayDeque;
import java.util.Deque;
/*
* 给定一个二叉树,判断其是否是一个有效的二叉搜索树。
* 思路:
* 中序遍历后升序, 则为bst
* 使用非递归的版本
*/
public class Q98
{
public class TreeNode
{
int val;
TreeNode left;
TreeNode right;
TreeNode(int x)
{
val = x;
}
}
public boolean isValidBST(TreeNode root) {
if(root == null)
{
return true;
}
Long min = Long.MIN_VALUE;
Deque<TreeNode> stack = new ArrayDeque<>();
TreeNode cur = root;
while(cur != null || !stack.isEmpty())
{
if(cur != null)
{
stack.push(cur);
cur = cur.left;
}
else
{
cur = stack.pop();
//不能是等于, 因为bst的特性
if(cur.val > min)
{
min = (long) cur.val;
}
else
{
return false;
}
cur = cur.right;
}
}
return true;
}
}
<file_sep>/src/easy/Q169.java
package easy;
import java.util.Arrays;
/*
* 给定一个大小为 n 的数组,找到其中的众数。众数是指在数组中出现次数大于 [ n/2 ] 的元素。
你可以假设数组是非空的,并且给定的数组总是存在众数。
1: 枚举每个位置出现的次数 O(n^2)
2: map记录每个位置出现的次数O(n)
3: 根据题意, 可以排序nlogn
*
*/
public class Q169
{
//7ms
public int majorityElement(int[] nums)
{
Arrays.sort(nums);
return nums[nums.length / 2];
}
//32ms
// public int majorityElement(int[] nums)
// {
// int threshold = nums.length / 2;
// Map<Integer, Integer> map = new HashMap<>();
// for (int i = 0; i < nums.length; i++)
// {
// Integer count = map.get(nums[i]);
// if(count == null)
// {
// map.put(nums[i], 1);
// }
// else
// {
// map.put(nums[i], count + 1);
// }
// }
// for(Integer key : map.keySet())
// {
// int count = map.get(key);
// if(count > threshold)
// {
// return key;
// }
// }
// return -1;
// }
//2000ms+
// public int majorityElement(int[] nums)
// {
// int threshold = nums.length / 2;
// int count = 0;
// for (int i = 0; i < nums.length; i++)
// {
// for(int j = 0; j < nums.length; j++)
// {
// if(nums[j] == nums[i])
// {
// count++;
// }
// }
// if(count > threshold)
// {
// return nums[i];
// }
// count = 0;
// }
// return -1;
//
// }
}
<file_sep>/src/other/chapter3/Page148.java
package other.chapter3;
public class Page148
{
class TreeNode
{
int val;
TreeNode left;
TreeNode right;
TreeNode(int x)
{
val = x;
// left = null;
// right = null;
}
}
public boolean isPostArray(int[] arr)
{
return p(arr, 0, arr.length - 1);
}
// arr[start..end]这个范围是能重建成bst
public boolean p(int[] arr, int start, int end)
{
if(start == end)
{
return true;
}
// 小于部分的右边界
int less = -1;
// 大于部分的左边界
int more = end;
for(int i = start; i < end; i++)
{
if(arr[i] < arr[end])
{
less = i;
}
else
{
more = more == end ? i : more;
}
}
// 无左子树或右子树
if(less == -1 || more == end)
{
return p(arr, start, end - 1);
}
if(less != more - 1)
{
return false;
}
return p(arr, start, less) && p(arr, more, end - 1);
}
// 此时已知了arr一定是后序遍历的结果
public TreeNode generate(int[] arr)
{
return p2(arr, 0, arr.length - 1);
}
public TreeNode p2(int[] arr, int start, int end)
{
// if(start == end)
// {
// return new TreeNode(arr[start]);
// }
// 多算一层, 把叶子结点左右儿子的null也赋上
// 因为TreeNode的构造函数可能是改不了的
if(start > end)
{
return null;
}
int less = -1;
int more = end;
for(int i = start; i < end; i++)
{
if(arr[i] < arr[end])
{
less = i;
}
else
{
more = more == end ? i : more;
}
}
TreeNode left = p2(arr, start, less);
TreeNode right = p2(arr, more, end - 1);
TreeNode cur = new TreeNode(arr[end]);
cur.left = left;
cur.right = right;
return cur;
}
}
<file_sep>/src/other/chapter2/Page86.java
package other.chapter2;
//LeetCode237
public class Page86
{
class ListNode
{
int val;
ListNode next;
ListNode(int x)
{
val = x;
}
}
public void deleteNode(ListNode node) {
ListNode temp = node.next;
node.val = temp.val;
//删temp
node.next = temp.next;
}
}
<file_sep>/src/util/stack_queue/QueueStack.java
package util.stack_queue;
import java.util.ArrayDeque;
import java.util.Deque;
/*
* 用队列实现栈
*
*/
public class QueueStack
{
private Deque<Integer> dataQueue = new ArrayDeque<>();
private Deque<Integer> helpQueue = new ArrayDeque<>();
public void push(int num)
{
dataQueue.add(num);
}
public int pop()
{
while(dataQueue.size() > 1)
{
helpQueue.add(dataQueue.remove());
}
int result = dataQueue.remove();
swap();
return result;
}
//交换引用
public void swap()
{
Deque<Integer> temp = dataQueue;
dataQueue = helpQueue;
helpQueue = temp;
}
public static void main(String[] args)
{
QueueStack s = new QueueStack();
s.push(1);
s.push(2);
s.push(3);
System.out.println(s.pop());
System.out.println(s.pop());
s.push(3);
System.out.println(s.pop());
System.out.println(s.pop());
}
}
<file_sep>/src/other/chapter2/Page47.java
package other.chapter2;
//翻转单链表和双链表
public class Page47
{
public static void main(String[] args)
{
}
class Node
{
public int value;
public Node next;
public Node(int value)
{
this.value = value;
}
}
public Node reverseList(Node head)
{
Node pre = null;
Node next = null;
while(head != null)
{
next = head.next;
head.next = pre;
pre = head;
head = next;
}
return pre;
}
class DoubleNode
{
public int value;
public DoubleNode last;
public DoubleNode next;
public DoubleNode(int value)
{
this.value = value;
}
}
public DoubleNode reverseList(DoubleNode head)
{
DoubleNode pre = null;
DoubleNode next = null;
while(head != null)
{
next = head.next;
head.next = pre;
head.last = next;
pre = head;
head = next;
}
return pre;
}
}
<file_sep>/src/other/chapter5/Page254.java
package other.chapter5;
// Q769
public class Page254
{
public boolean rotateString(String a, String b)
{
if (a.length() != b.length())
{
return false;
}
String c = b + b;
return c.indexOf(a) != -1 ? true : false;
}
}
<file_sep>/src/other/chapter5/Page290.java
package other.chapter5;
public class Page290
{
// 判断括号字符串是否有效, 可以用栈
public boolean isValid(String str)
{
if(str == null || str.equals(""))
{
return false;
}
char[] chs = str.toCharArray();
int leftNum = 0;
int rightNum = 0;
for(int i = 0; i < chs.length; i++)
{
if(chs[i] == '(')
{
leftNum++;
}
if(chs[i] == ')')
{
rightNum++;
}
if(chs[i] != '(' && chs[i] != ')')
{
return false;
}
if(rightNum > leftNum)
{
return false;
}
}
return leftNum == rightNum;
}
public int longestValidParentheses(String s)
{
if(s == null || s.length() < 2)
{
return 0;
}
int[] dp = new int[s.length()];
char[] chs = s.toCharArray();
int res = -1;
for(int i = 1; i < dp.length; i++)
{
if(chs[i] == '(')
{
dp[i] = 0;
}
else
{
int pre = i - dp[i - 1] - 1;
if(pre >= 0 && chs[pre] == '(')
{
dp[i] = dp[i - 1] + 2;
if(pre - 1 >= 0)
{
dp[i] += dp[pre - 1];
}
}
}
res = Math.max(res, dp[i]);
}
return res;
}
}
<file_sep>/src/other/chapter2/Page87.java
package other.chapter2;
public class Page87
{
class ListNode
{
int val;
ListNode next;
ListNode(int x)
{
val = x;
}
}
public ListNode insertNum(ListNode head, int num)
{
ListNode node = new ListNode(num);
if(head == null)
{
node.next = node;
return node;
}
ListNode pre = head;
ListNode cur = head.next;
boolean flag = false;
while(cur != head)
{
if(pre.val <= num && num <= cur.val)
{
flag = true;
pre.next = node;
node.next = cur;
}
pre = cur;
cur = cur.next;
}
if(!flag)
{
//此时cur指向head
pre.next = node;
node.next = cur;
if(num <= cur.val)
{
return node;
}
}
return head;
}
}
<file_sep>/src/other/chapter2/Page77.java
package other.chapter2;
import java.util.HashSet;
import java.util.Set;
public class Page77
{
public static void main(String[] args)
{
}
class ListNode
{
int val;
ListNode next;
ListNode(int x)
{
val = x;
}
}
//hash 时间复杂度On 空间复杂度On
public void removeRep1(ListNode head)
{
ListNode cur = head.next;
ListNode pre = head;
Set<Integer> set = new HashSet<>();
set.add(head.val);
while(cur != null)
{
if(set.contains(cur.val))
{
pre.next = cur.next;
}
else
{
set.add(cur.val);
pre = cur;
}
cur = cur.next;
}
}
//时间复杂度n^2, 空间复杂度O1
public void removeRep2(ListNode head)
{
ListNode cur = head;
ListNode next = null;
ListNode pre = null;
while(cur != null)
{
pre = cur;
next = cur.next;
while(next != null)
{
if(next.val == cur.val)
{
pre.next = next.next;
}
else
{
pre = next;
}
next = next.next;
}
cur = cur.next;
}
}
}
<file_sep>/src/util/sort/MergeSort.java
package util.sort;
import java.util.Arrays;
import util.DSQ;
public class MergeSort
{
public static void mergeSort(int[] arr)
{
process(arr, 0, arr.length - 1);
}
private static void process(int[] arr, int L, int R)
{
if(L == R)
{
return;
}
int mid = (L + R) / 2;
process(arr, L, mid);
process(arr, mid + 1, R);
merge(arr, L, mid, R);
}
private static void merge(int[] arr, int L, int mid, int R)
{
int[] help = new int[R - L + 1];
int i = 0;
int p1 = L;
int p2 = mid + 1;
while(p1 <= mid && p2 <= R)
{
if(arr[p1] < arr[p2])
{
help[i] = arr[p1];
p1++;
}
else
{
help[i] = arr[p2];
p2++;
}
i++;
}
while(p1 <= mid)
{
help[i++] = arr[p1++];
}
while(p2 <= R)
{
help[i++] = arr[p2++];
}
for(int j = 0; j < help.length; j++)
{
arr[L + j] = help[j];
}
}
public static void main(String[] args)
{
for (int i = 0; i < 1000; i++)
{
int[] arr = DSQ.generateRandomArray(10000, 1000);
int[] arr1 = DSQ.copyArray(arr);
int[] arr2 = DSQ.copyArray(arr);
int[] arr3 = DSQ.copyArray(arr);
mergeSort(arr1);
DSQ.okMethod(arr2);
if(!DSQ.isEqual(arr1, arr2))
{
System.out.println(Arrays.toString(arr3));
System.out.println("fuck");
break;
}
}
System.out.println("finish..");
}
}
<file_sep>/src/util/MyHashMap.java
package util;
public class MyHashMap<K, V>
{
static class Node<K, V>
{
int hash;
K key;
V value;
Node<K, V> next;
Node(int hash, K key, V value, Node<K, V> next)
{
this.hash = hash;
this.key = key;
this.value = value;
this.next = next;
}
}
private Node<K, V>[] table;
private int size;
private static final double RESIZE_FACTOR = 0.75;
@SuppressWarnings("unchecked")
public MyHashMap()
{
table = (Node<K, V>[]) new Node[16];
size = 0;
}
public int hash(K key, int length)
{
return key.hashCode() & (length - 1);
}
// 扩容, 取出旧得, put到新的table里
public void resize()
{
if(table.length * RESIZE_FACTOR <= size)
{
@SuppressWarnings("unchecked")
Node<K, V>[] newTable = (Node<K, V>[]) new Node[table.length * 2];
this.size = 0;
for (Node<K, V> node : table)
{
while(node != null)
{
putVal(node.key, node.value, newTable);
node = node.next;
}
}
this.table = newTable;
}
}
public void put(K key, V value)
{
resize();
putVal(key, value, null);
}
// 单纯的put值, 不考虑扩容
public void putVal(K key, V value, Node<K, V>[] newTable)
{
Node<K, V>[] bucket = newTable == null ? this.table : newTable;
int hash = hash(key, bucket.length);
Node<K, V> temp = bucket[hash];
// 无hash冲突
if (temp == null)
{
bucket[hash] = new Node<>(hash, key, value, null);
size++;
return;
}
// 产生冲突
// 记录遍历过程中最后一个node
Node<K, V> last = null;
while (temp != null)
{
// 替换value
if (temp.key.equals(key))
{
temp.value = value;
return;
}
last = temp;
temp = temp.next;
}
// 执行到这里last一定不会为null
last.next = new Node<>(hash, key, value, null);
size++;
}
public V get(K key)
{
int hash = hash(key, table.length);
Node<K, V> temp = table[hash];
while(temp != null)
{
if(temp.key.equals(key))
{
return temp.value;
}
temp = temp.next;
}
return null;
}
@Override
public String toString()
{
StringBuilder sb = new StringBuilder("{");
for (Node<K, V> node : table)
{
while(node != null)
{
sb.append(node.key + " : " + node.value + ", ");
node = node.next;
}
}
sb.setCharAt(sb.length() - 2, '}');
return sb.toString();
}
public static void main(String[] args)
{
// MyHashMap<Integer, String> map = new MyHashMap<>();
// for(int i = 0; i < 20; i++)
// {
// map.put(i, String.valueOf('a' + "_" + i));
// }
// System.out.println(map);
// System.out.println(map.get(15));
// System.out.println(map.get(1));
// for(int i = 5; i < 50; i++)
// {
// System.out.println(i + ", " + (new Integer(i).hashCode() & 15));
// }
System.out.println(new Object().hashCode());
System.out.println(new Object().hashCode());
}
}
<file_sep>/src/other/chapter5/Page302.java
package other.chapter5;
import java.util.ArrayList;
import java.util.List;
public class Page302
{
public static String pointNewchar(String s, int k)
{
List<String> list = new ArrayList<>();
char[] chs = s.toCharArray();
int i = 0;
while (i < chs.length)
{
// 如果是大写字母一定会后他后面一个构成一组
if (isBig(chs[i]))
{
String temp = "" + chs[i] + chs[i + 1];
list.add(temp);
i += 2;
}
else
{
// 如果是小写一定自己一组
list.add(String.valueOf(chs[i++]));
}
}
int len = 0;
for (String str : list)
{
len += str.length();
if(len - 1 >= k)
{
return str;
}
}
return "sb";
}
public static boolean isBig(char c)
{
if (c >= 'a' && c <= 'z')
{
return false;
}
return true;
}
public static void main(String[] args)
{
String z = Page302.pointNewchar("aaABCDEcBCg", 3);
System.out.println(z);
}
}
<file_sep>/src/other/chapter7/Page348.java
package other.chapter7;
public class Page348
{
public static void main(String[] args)
{
int a = 614, b = 0;
a = a ^ b;
b = a ^ b;
a = a ^ b;
System.out.println(a);
System.out.println(b);
}
}
<file_sep>/src/jzoffer/BstToDoubleLinkedList.java
package jzoffer;
import java.util.ArrayDeque;
import java.util.Deque;
public class BstToDoubleLinkedList
{
class TreeNode
{
int val;
TreeNode left;
TreeNode right;
TreeNode(int x)
{
val = x;
}
}
private Deque<TreeNode> queue = new ArrayDeque<>();
public TreeNode convert(TreeNode head)
{
if(head == null)
{
return null;
}
// 中序遍历把节点放入队列
inOrder(head);
// 再连起来
TreeNode pre = null;
TreeNode cur = null;
TreeNode newHead = null;
TreeNode next = null;
while(!queue.isEmpty())
{
cur = queue.pollFirst();
next = queue.peekFirst();
cur.left = pre;
cur.right = next;
pre = cur;
newHead = newHead == null ? cur : newHead;
}
return newHead;
}
public void inOrder(TreeNode node)
{
if(node != null)
{
inOrder(node.left);
queue.addLast(node);
inOrder(node.right);
}
}
}
<file_sep>/src/other/chapter1/Page21.java
package other.chapter1;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Deque;
import java.util.List;
/*
* 单调栈题目
* 求出一个数组中当前位置左边那个离他最近且比他小的那个数的位置 和
* 右边那个离他最近且比他小的那个数的位置
* O(N)
*/
public class Page21
{
public static void main(String[] args)
{
int[] arr = { 3, 1, 3, 4, 3, 5, 3, 2, 2};
int[][] res = getNearLess(arr);
// int[] arr = { 3, 4, 1, 5, 6, 2, 7 };
// int[][] res = getNearLessNoRepeat(arr);
for (int i = 0; i < res.length; i++)
{
System.out.println("{" + res[i][0] + ", " + res[i][1] + "}");
}
}
// 这里给的arr是没有重复元素的
public static int[][] getNearLessNoRepeat(int[] arr)
{
Deque<Integer> stack = new ArrayDeque<>();
int[][] ans = new int[arr.length][2];
for (int i = 0; i < arr.length; i++)
{
while (!stack.isEmpty() && arr[stack.peek()] > arr[i])
{
Integer num = stack.pop();
ans[num][0] = stack.peek() == null ? -1 : stack.peek();
ans[num][1] = i;
}
if (stack.isEmpty() || arr[stack.peek()] < arr[i])
{
stack.push(i);
}
}
while (!stack.isEmpty())
{
Integer num = stack.pop();
ans[num][0] = stack.peek() == null ? -1 : stack.peek();
ans[num][1] = -1;
}
return ans;
}
//进阶, 这里的arr中可以有重复元素
public static int[][] getNearLess(int[] arr)
{
Deque<List<Integer>> stack = new ArrayDeque<>();
int[][] ans = new int[arr.length][2];
for(int i = 0; i < arr.length; i++)
{
while(!stack.isEmpty() && arr[stack.peek().get(0)] > arr[i])
{
List<Integer> popIndexs = stack.pop();
//选最新的那一个
Integer leftIndex = stack.isEmpty() ? - 1 : stack.peek().get(stack.peek().size() - 1);
for (Integer index : popIndexs)
{
ans[index][0] = leftIndex;
ans[index][1] = i;
}
}
if(!stack.isEmpty() && arr[stack.peek().get(0)] == arr[i])
{
stack.peek().add(i);
}
else
{
List<Integer> indexs = new ArrayList<>();
indexs.add(i);
stack.push(indexs);
}
}
while(!stack.isEmpty())
{
List<Integer> popIndexs = stack.pop();
Integer leftIndex = stack.isEmpty() ? - 1 : stack.peek().get(stack.peek().size() - 1);
for (Integer index : popIndexs)
{
ans[index][0] = leftIndex;
ans[index][1] = -1;
}
}
return ans;
}
}
<file_sep>/src/medium/Q105.java
package medium;
public class Q105
{
class TreeNode
{
int val;
TreeNode left;
TreeNode right;
TreeNode(int x)
{
val = x;
}
}
public TreeNode buildTree(int[] preorder, int[] inorder)
{
if(preorder == null || inorder == null || preorder.length == 0 || inorder.length == 0)
{
return null;
}
return p(preorder, inorder, 0, preorder.length - 1, 0, inorder.length - 1);
}
public TreeNode p(int[] pre, int[] in, int preStart, int preEnd, int inStart, int inEnd)
{
TreeNode node = new TreeNode(pre[preStart]);
node.left = null;
node.right = null;
// 说明是叶节点
if(preStart == preEnd && inStart == inEnd)
{
return node;
}
// 找到node在中序遍历中的位置
int root = 0;
for(root = inStart; root <= inEnd; root++)
{
if(pre[preStart] == in[root])
{
break;
}
}
// 找出左右子树的区间
int leftLength = root - inStart;
int rightLength = inEnd - root;
// 如果还有左右子树的话
if(leftLength > 0)
{
node.left = p(pre, in, preStart + 1, preStart + leftLength, inStart, root - 1);
}
if(rightLength > 0)
{
node.right = p(pre, in, preStart + leftLength + 1, preEnd, root + 1, inEnd);
}
return node;
}
}
<file_sep>/src/easy/Q234.java
package easy;
/*
* 请判断一个链表是否为回文链表。
* 思路1:使用栈
* 思路2:把后半段逆序, 然后从两边向中间遍历
*/
public class Q234
{
// public boolean isPalindrome(ListNode head)
// {
// if(head == null)
// {
// return false;
// }
// if(head.next == null)
// {
// return true;
// }
// Deque<Integer> stack = new ArrayDeque<>();
// ListNode p = head;
// while(p != null)
// {
// stack.push(p.val);
// p = p.next;
// }
// p = head;
// while(p != null)
// {
// Integer num = stack.pop();
// if(num != p.val)
// {
// return false;
// }
// p = p.next;
// }
// return true;
// }
public boolean isPalindrome(ListNode head)
{
if(head == null)
{
return false;
}
if(head.next == null)
{
return true;
}
ListNode n1 = head;
ListNode n2 = head;
while(n2.next != null && n2.next.next != null)
{
n1 = n1.next;
n2 = n2.next.next;
}
//如果有奇数个节点 此时n1指向中间节点, 如果有偶数个节点 此时n1指向两个中间节点的前一个
//-----------------
n2 = n1.next;
n1.next = null;
ListNode n3 = null;
while(n2 != null)
{
n3 = n2.next;
n2.next = n1;
n1 = n2;
n2 = n3;
}
//翻转完成后 n1指向整个链表的最后一个节点
//----------------
n3 = n1;
n2 = head;
while(n2 != null && n1 != null)
{
if(n2.val != n1.val)
{
return false;
}
n1 = n1.next;
n2 = n2.next;
}
//完整做法 还要再将链表翻转回来
return true;
}
public static void main(String[] args)
{
}
class ListNode
{
int val;
ListNode next;
ListNode(int x)
{
val = x;
next = null;
}
}
}
<file_sep>/src/hard/Q32.java
package hard;
// Page32
public class Q32
{
public int longestValidParentheses(String s) {
if(s == null || s.length() < 2)
{
return 0;
}
int[] dp = new int[s.length()];
char[] chs = s.toCharArray();
int res = -1;
for(int i = 1; i < dp.length; i++)
{
if(chs[i] == '(')
{
dp[i] = 0;
}
else
{
int pre = i - dp[i - 1] - 1;
if(pre >= 0 && chs[pre] == '(')
{
dp[i] = dp[i - 1] + 2;
if(pre - 1 >= 0)
{
dp[i] += dp[pre - 1];
}
}
}
res = Math.max(res, dp[i]);
}
return res;
}
}
<file_sep>/src/other/chapter4/Page248.java
package other.chapter4;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
public class Page248
{
// n
public int longestConsecutive(int[] nums)
{
// 这样crud都是O(1)
Set<Integer> set = new HashSet<>();
for (int num : nums)
{
set.add(num);
}
int res = 0;
for (Integer num : nums)
{
if (set.remove(num))
{
// 向当前元素的左边搜索,eg: 当前为100, 搜索:99,98,97,...
int max = 1;
int cur = num;
while (set.remove(cur - 1))
{
cur--;
}
max += (num - cur);
// 向当前元素的右边搜索,eg: 当前为100, 搜索:101,102,103,...
cur = num;
while (set.remove(cur + 1))
{
cur++;
}
max += (cur - num);
// 更新最大值
res = Math.max(res, max);
}
}
return res;
}
// nlogn, 样本量小的时候快一些
public int longestConsecutive2(int[] nums)
{
if (nums.length == 0)
{
return 0;
}
Arrays.sort(nums);
int longestStreak = 1;
int currentStreak = 1;
for (int i = 1; i < nums.length; i++)
{
if (nums[i] != nums[i - 1])
{
if (nums[i] == nums[i - 1] + 1)
{
currentStreak += 1;
}
else
{
longestStreak = Math.max(longestStreak, currentStreak);
currentStreak = 1;
}
}
}
// 有可能最长的是最后一坨
return Math.max(longestStreak, currentStreak);
}
}
<file_sep>/src/other/chapter5/Page263.java
package other.chapter5;
public class Page263
{
public static void main(String[] args)
{
}
public int getIndex(String[] strs, String str)
{
int res = -1;
int left = 0;
int right = strs.length - 1;
int mid = 0;
int i = 0;
while(left <= right)
{
mid = (left + right) / 2;
if(strs[mid] != null && strs[mid].equals(str))
{
res = mid;
// 要看更左边还有没有
right = mid - 1;
}
else if(strs[mid] != null)
{
if(strs[mid].compareTo(str) < 0)
{
left = mid + 1;
}
else
{
right = mid - 1;
}
}
else // strs[mid] == null
{
i = mid;
// 从mid开始往左找, 找到第一个不为空的位置
while(strs[i] == null && --i >= left);
// i < left 说明上面没找到, 从mid开始往左全是null
if(i < left || strs[i].compareTo(str) < 0)
{
left = mid + 1;
}
else
{
res = strs[i].equals(str) ? i : res;
right = i - 1;
}
}
}
return res;
}
}
<file_sep>/src/other/chapter3/Page152.java
package other.chapter3;
public class Page152
{
class TreeNode
{
int val;
TreeNode left;
TreeNode right;
TreeNode(int x)
{
val = x;
}
}
public TreeNode generate(int[] arr)
{
return p(arr, 0, arr.length - 1);
}
public TreeNode p(int[] arr, int start, int end)
{
if(start > end)
{
return null;
}
int mid = (start + end) / 2;
TreeNode head = new TreeNode(arr[mid]);
head.left = p(arr, start, mid - 1);
head.right = p(arr, mid + 1, end);
return head;
}
}
<file_sep>/src/hard/Q128.java
package hard;
import java.util.HashSet;
import java.util.Set;
public class Q128
{
public int longestConsecutive(int[] nums)
{
// 这样crud都是O(1)
Set<Integer> set = new HashSet<>();
for (int num : nums)
{
set.add(num);
}
int res = 0;
for (Integer num : nums)
{
if (set.remove(num))
{
// 向当前元素的左边搜索,eg: 当前为100, 搜索:99,98,97,...
int max = 1;
int cur = num;
while (set.remove(cur - 1))
{
cur--;
}
max += (num - cur);
// 向当前元素的右边搜索,eg: 当前为100, 搜索:101,102,103,...
cur = num;
while (set.remove(cur + 1))
{
cur++;
}
max += (cur - num);
// 更新最大值
res = Math.max(res, max);
}
}
return res;
}
}
<file_sep>/src/easy/Q198.java
package easy;
public class Q198
{
public static void main(String[] args)
{
int r = new Q198().rob(new int[]
{ 2, 1, 1, 2 });
System.out.println(r);
}
// 递归版本 过不了
public int rob(int[] nums)
{
return p(nums, 0);
}
// 从start位置开始决策, 能偷到的最大金额
public int p(int[] nums, int start)
{
if (start >= nums.length)
{
return 0;
}
// 如果start位置偷
int p1 = nums[start] + p(nums, start + 2);
// 如果start位置不偷
int p2 = p(nums, start + 1);
return Math.max(p1, p2);
}
// dp版本
public int robdp(int[] nums)
{
int[] dp = new int[nums.length + 2];
dp[dp.length - 1] = 0;
dp[dp.length - 2] = 0;
for(int start = dp.length - 3; start >= 0; start--)
{
dp[start] = Math.max(nums[start] + dp[start + 2], dp[start + 1]);
}
return dp[0];
}
}
<file_sep>/src/other/chapter2/Page84.java
package other.chapter2;
public class Page84
{
class ListNode
{
int val;
ListNode next;
ListNode(int x)
{
val = x;
}
}
public static void main(String[] args)
{
}
public ListNode selectionSort(ListNode head)
{
ListNode tail = null; // 排序部分的尾部
ListNode cur = head; // 未排序部分的头部
ListNode preMin = null; // 最小值节点的前一个节点
ListNode min = null; // 最小值节点
while (cur != null)
{
// 寻找未排序部分中的最小值节点的前一个节点
min = cur;
preMin = findSmallestPreNode(cur);
// 如果找到了
if (preMin != null)
{
min = preMin.next;
preMin.next = min.next;
}
if (tail == null)
{
//只有第一次会进来, 第一次的min一定是最小的, 用于返回
head = min;
}
else
{
tail.next = min;
}
tail = min;
//只有当cur是最小值的时候才往下走
//不然选出的值可能并不是最小值
cur = cur == min ? cur.next : cur;
}
return head;
}
private ListNode findSmallestPreNode(ListNode head)
{
// 最小值节点
ListNode min = head;
// 最小值节点的前面一个节点
ListNode preMin = null;
// 当前节点的前一个节点
ListNode pre = head;
// 当前节点
ListNode cur = head.next;
while (cur != null)
{
if (cur.val < min.val)
{
preMin = pre;
min = cur;
}
pre = cur;
cur = cur.next;
}
return preMin;
}
}
<file_sep>/src/other/chapter4/Page185.java
package other.chapter4;
public class Page185
{
public int num(int[][] map)
{
return p(map, 0, 0);
}
// 从m行n列开始走, 走到右下角的最小路径和
public int p(int[][] map, int m, int n)
{
if (m == map.length - 1 && n == map[0].length - 1)
{
return map[m][n];
}
if (m == map.length - 1)
{
return map[m][n] + p(map, m, n + 1);
}
if (n == map[0].length - 1)
{
return map[m][n] + p(map, m + 1, n);
}
return Math.min(map[m][n] + p(map, m + 1, n), map[m][n] + p(map, m, n + 1));
}
public int dpMethod(int[][] map)
{
// dp[i][j] 表示 从i行j列出发 到右下角的最短路径和
int[][] dp = new int[map.length][map[0].length];
dp[dp.length - 1][dp[0].length - 1] = map[map.length - 1][map[0].length - 1];
// 最后一行
for(int i = dp[0].length - 2; i >= 0; i--)
{
dp[dp.length - 1][i] = map[dp.length - 1][i] + dp[dp.length - 1] [i + 1];
}
// 最后一列
for(int i = dp.length - 2; i >= 0; i--)
{
dp[i][dp[0].length - 1] = map[i][dp[0].length - 1] + dp[i + 1][dp[0].length - 1];
}
for(int i = dp.length - 2; i >= 0; i--)
{
for(int j = dp[0].length - 2; j >= 0; j--)
{
dp[i][j] = Math.min(map[i][j] + dp[i + 1][j], map[i][j] + dp[i][j + 1]);
}
}
return dp[0][0];
}
public static void main(String[] args)
{
int[][] map = new int[][]
{
{ 1, 3, 5, 9 },
{ 8, 1, 3, 4 },
{ 5, 0, 6, 1 },
{ 8, 8, 4, 0 } };
int z = new Page185().num(map);
System.out.println(z);
}
}
<file_sep>/src/easy/Q28.java
package easy;
public class Q28
{
public int strStr(String haystack, String needle) {
int iLen = haystack.length();
int jLen = needle.length();
if(haystack == null || needle == null || (iLen < jLen))
{
return -1;
}
char[] str1 = haystack.toCharArray();
char[] str2 = needle.toCharArray();
int i = 0, j = 0;
int[] next = this.getNext(str2);
if(next == null)
{
return 0;
}
while(i < iLen && j < jLen)
{
//j == -1 说明已经退到str2的第一个字符了的左边了
if(j == -1 || str1[i] == str2[j])
{
i++;
j++;
}
else
{
j = next[j];
}
}
return j == jLen ? i - j : -1;
}
public int[] getNext(char[] str2)
{
if(str2.length == 0)
{
return null;
}
if(str2.length == 1)
{
return new int[] {-1};
}
int[] next = new int[str2.length];
next[0] = -1;
next[1] = 0;
//用来遍历next
int i = 2;
//str2[]的索引, 当前跳到的位置
int cn = 0;
while(i < next.length)
{
if(str2[i - 1] == str2[cn])
{
next[i] = ++cn;
i++;
}
else
{
if(cn > 0)
{
cn = next[cn];
}
else
{
next[i++] = 0;
}
}
}
return next;
}
public static void main(String[] args)
{
}
}
<file_sep>/src/other/chapter3/Page132.java
package other.chapter3;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Deque;
import java.util.List;
// Q102, Q103
public class Page132
{
class TreeNode
{
int val;
TreeNode left;
TreeNode right;
TreeNode(int x)
{
val = x;
}
}
// 与书上的解法不同
public void printByLevel(TreeNode head)
{
if (head == null)
{
return;
}
Deque<TreeNode> queue = new ArrayDeque<>();
queue.add(head);
while (!queue.isEmpty())
{
// 一层 有多少个节点
int count = queue.size();
while (count > 0)
{
TreeNode cur = queue.poll();
System.out.print(cur.val + " ");
if (cur.left != null)
{
queue.add(cur.left);
}
if (cur.right != null)
{
queue.add(cur.right);
}
count--;
}
// 打印完一层之后换行
System.out.println();
}
}
// 与书上的解法不同
public void printByZigZag(TreeNode head)
{
if(head == null)
{
return;
}
Deque<TreeNode> deque = new ArrayDeque<>();
// true表示从左往右打印, false表示从右往左打印
boolean direct = true;
deque.addLast(head);
while(!deque.isEmpty())
{
int count = deque.size();
List<TreeNode> list = new ArrayList<>();
while(count > 0)
{
TreeNode cur = deque.poll();
if(direct)
{
System.out.print(cur.val + " ");
}
else
{
list.add(0, cur);
}
if (cur.left != null)
{
deque.add(cur.left);
}
if (cur.right != null)
{
deque.add(cur.right);
}
count--;
}
if(!direct)
{
for (TreeNode treeNode : list)
{
System.out.print(treeNode.val);
}
}
direct = !direct;
System.out.println();
}
}
}
<file_sep>/src/other/chapter2/Page50.java
package other.chapter2;
//环形单链表的约瑟夫环问题
public class Page50
{
public static void main(String[] args)
{
}
//报数报到m就kill O(m*n)
public Node josephus(Node head, int m)
{
if(head == null || head.next == null || m < 1)
{
return head;
}
Node last = head;
while(last.next != head)
{
last = last.next;
}
int count = 0;
while(last != head)
{
count++;
if(count == m)
{
last.next = head.next;
count = 0;
}
else
{
last = last.next;
}
head = last.next;
}
return head;
}
//进阶, 如果链表节点数为N, 实现O(N)的方法
//...
class Node
{
public int value;
public Node next;
public Node(int value)
{
this.value = value;
}
}
}
<file_sep>/src/medium/Q55.java
package medium;
public class Q55
{
// [2,3,1,1,4] true
public boolean canJump(int[] nums)
{
if(nums == null || nums.length == 0)
{
return false;
}
return dpMethod(nums);
// return jump(nums, 0);
}
public boolean dpMethod(int[] nums)
{
// i位置能否跳到nums的末尾, dp[i]
boolean[] dp = new boolean[nums.length];
dp[nums.length - 1] = true;
int falseNum = 0;
for (int i = nums.length - 2; i >= 0; i--)
{
if(dp[i + 1])
{
falseNum = 0;
if(nums[i] <= 0)
{
falseNum++;
}
dp[i] = nums[i] > 0;
}
else
{
if(nums[i] <= falseNum)
{
falseNum++;
}
dp[i] = nums[i] > falseNum;
}
}
// System.out.println(Arrays.toString(dp));
return dp[0];
}
// 当前在index位置 是否能跳到尾部
public boolean jump(int[] nums, int index)
{
if (index == nums.length - 1)
{
return true;
}
//
for (int i = 1; i <= nums[index]; i++)
{
if (jump(nums, index + i))
{
return true;
}
}
return false;
}
}
<file_sep>/src/other/chapter4/Page223.java
package other.chapter4;
public class Page223
{
public int[][] getdp(char[] str1, char[] str2)
{
int[][] dp = new int[str1.length][str2.length];
for(int i = 0; i < str1.length; i++)
{
if(str1[i] == str2[0])
{
dp[i][0] = 1;
}
}
for(int j = 0; j < str2.length; j++)
{
if(str2[j] == str1[0])
{
dp[0][j] = 1;
}
}
for(int i = 1; i < str1.length; i++)
{
for(int j = 1; j < str2.length; j++)
{
if(str1[i] == str2[j])
{
dp[i][j] = dp[i - 1][j - 1] + 1;
}
}
}
return dp;
}
public String lcst(String str1, String str2)
{
char[] chs1 = str1.toCharArray();
char[] chs2 = str2.toCharArray();
int[][] dp = getdp(chs1, chs2);
int max = 0;
int end = 0;
for(int i = 0; i < chs1.length; i++)
{
for(int j = 0; j < chs2.length; j++)
{
if(dp[i][j] > max)
{
max = dp[i][j];
end = i;
}
}
}
// 左闭右开
return str1.substring(end - max + 1, end + 1);
}
public static void main(String[] args)
{
Page223 obj = new Page223();
System.out.println(obj.lcst("akldhqiuwdhiqw", "wwhqiuzz"));
}
}
<file_sep>/src/other/chapter5/Page281.java
package other.chapter5;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Deque;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class Page281
{
public Map<String, List<String>> getNexts(List<String> words)
{
Set<String> set = new HashSet<>(words);
Map<String, List<String>> nexts = new HashMap<>();
for (int i = 0; i < words.size(); i++)
{
nexts.put(words.get(i), getNext(words.get(i), set));
}
return nexts;
}
// word只要改变一个字母, 可以变成set中的哪写字符串
public List<String> getNext(String word, Set<String> set)
{
char[] chs = word.toCharArray();
List<String> res = new ArrayList<String>();
for (char cur = 'a'; cur <= 'z'; cur++)
{
for (int i = 0; i < chs.length; i++)
{
if (chs[i] != cur)
{
char temp = chs[i];
chs[i] = cur;
if (set.contains(String.valueOf(chs)))
{
res.add(String.valueOf(chs));
}
chs[i] = temp;
}
}
}
return res;
}
// 计算其他字符串到start的最短距离
public Map<String, Integer> getDistance(String start, Map<String, List<String>> nexts)
{
Map<String, Integer> dis = new HashMap<>();
// 记录当前字符串是否被处理过
Set<String> set = new HashSet<>();
dis.put(start, 0);
set.add(start);
Deque<String> queue = new ArrayDeque<String>();
queue.add(start);
while (!queue.isEmpty())
{
String cur = queue.poll();
for (String str : nexts.get(cur))
{
if(!set.contains(str))
{
queue.add(str);
set.add(str);
dis.put(str, dis.get(cur) + 1);
}
}
}
return dis;
}
// solution 用于存储一条最短路径
// 每找到一条cur到to的最短路劲就放到res中
private void getShortestPaths(String cur, String to, Map<String, List<String>> nexts, Map<String, Integer> distances,
LinkedList<String> solution, List<List<String>> res)
{
solution.add(cur);
if(to.equals(cur))
{
res.add(new LinkedList<>(solution));
}
else
{
for (String next : nexts.get(cur))
{
// 保证每走一步, 都是在往远处走, 而不是兜圈子(环)
if(distances.get(next) == distances.get(cur) + 1)
{
getShortestPaths(next, to, nexts, distances, solution, res);
}
}
}
// 取出最后一次走到的节点, 为尝试下一次做准备
solution.pollLast();
}
public List<List<String>> findMinPaths(String start, String to, List<String> list)
{
// 生成nexts信息, nexts表示list中的每一个字符串, 仅改动一个字符, 可以变成哪些(在list中的)字符串
list.add(start);
Map<String, List<String>> nexts = getNexts(list);
// 有了nexts信息之后, 相当于有了一张图, 用bfs得到每一个字符串到start的最短距离
Map<String, Integer> distances = getDistance(start, nexts);
// 求出所有start到to的最短路径, 即是答案
LinkedList<String> path = new LinkedList<>();
List<List<String>> res = new ArrayList<>();
getShortestPaths(start, to, nexts, distances, path, res);
return res;
}
}
<file_sep>/src/other/chapter2/Page74.java
package other.chapter2;
import java.util.ArrayDeque;
import java.util.Deque;
//LeetCode 25
public class Page74
{
class ListNode
{
int val;
ListNode next;
ListNode(int x)
{
val = x;
}
}
public ListNode reverseKGroup(ListNode head, int k)
{
if(k < 2)
{
return head;
}
Deque<ListNode> stack = new ArrayDeque<>();
ListNode cur = head;
ListNode next = null;
ListNode pre = null;
ListNode newHead = head;
while(cur != null)
{
next = cur.next;
stack.push(cur);
if(stack.size() == k)
{
pre = resign1(stack, pre, next);
newHead = newHead == head ? cur : newHead;
}
cur = next;
}
return newHead;
}
private ListNode resign1(Deque<ListNode> stack, ListNode left, ListNode right)
{
ListNode cur = stack.pop();
if(left != null)
{
left.next = cur;
}
ListNode next = null;
while(!stack.isEmpty())
{
next = stack.pop();
cur.next = next;
cur = next;
}
cur.next = right;
return cur;
}
//额外空间 O 1
public ListNode reverseKGroup2(ListNode head, int k)
{
if(k < 2)
{
return head;
}
ListNode cur = head;
ListNode next = null;
ListNode start = null;
ListNode pre = null;
int count = 1;
while(cur != null)
{
next = cur.next;
if(count == k)
{
start = pre == null ? head : pre.next;
head = pre == null ? cur : head;
reverse(pre, start, cur, next);
pre = start;
count = 0;
}
cur = next;
count++;
}
return head;
}
private void reverse(ListNode left, ListNode start, ListNode end, ListNode right)
{
//这三个变量是翻转链表用的, 和上面同名变量的意义并不一样
ListNode pre = start;
ListNode cur = start.next;
ListNode next = null;
while(cur != right)
{
next = cur.next;
cur.next = pre;
pre = cur;
cur = next;
}
if(left != null)
{
left.next = end;
}
start.next = right;
}
}
<file_sep>/src/other/chapter4/Page210.java
package other.chapter4;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class Page210
{
public static void main(String[] args)
{
Page210 obj = new Page210();
int[] arr = new int[] {2, 1, 5, 3, 6, 4, 8 ,9, 7};
int[] dp = obj.getdp1(arr);
int[] res = obj.generateLIS(arr, dp);
System.out.println(Arrays.toString(res));
}
public int[] getdp1(int[] arr)
{
// dp[i]表示 以arr[i]这个数结尾的的情况下, arr[0..i]中的最大递增子序列的长度
int[] dp = new int[arr.length];
dp[0] = 1;
for (int i = 1; i < dp.length; i++)
{
dp[i] = 1;
for (int j = i - 1; j >= 0; j--)
{
if (arr[j] < arr[i])
{
dp[i] = Math.max(dp[i], dp[j] + 1);
}
}
}
return dp;
}
// 根据dp数组生成LIS
public int[] generateLIS(int[] arr, int[] dp)
{
int max = 0;
int index = 0;
// 找到dp数组中的最大值以及他的下标
for (int i = 0; i < dp.length; i++)
{
if (dp[i] > max)
{
max = dp[i];
index = i;
}
}
List<Integer> list = new ArrayList<>();
// 以arr[index]为尾的递增子序列是最长的, 所以他作为LIS的倒数第一个位置
list.add(0, arr[index]);
// 检查index位置左边的数, 假设是i位置
// 如果arr[i]小于倒数第一个数 即arr[index]
// 并且dp[i] == dp[index] - 1, 则说明arr[i]可以作为倒数第二个数, 以此类推
for (int i = index - 1; i >= 0; i--)
{
// System.out.println("max: " + max + ", index: " + index + ", i: " + i + ", " + "arr[i]: " + arr[i] + ", dp[i]: " + dp[i] + ", dp[index] - 1: " + (dp[index] - 1));
if (arr[i] < arr[index] && dp[i] == dp[index] - 1)
{
list.add(0, arr[i]);
arr[index] = arr[i];
index = i;
}
}
Object[] res = list.toArray();
int[] z = new int[res.length];
for(int i = 0; i < z.length; i++)
{
z[i] = (int)res[i];
}
return z;
}
public int[] getdp2(int[] arr)
{
// dp[i]表示 以arr[i]这个数结尾的的情况下, arr[0..i]中的最大递增子序列的长度
int[] dp = new int[arr.length];
// ends[l] = arr[i]表示众多长度为l+1的LIS中, 结尾为arr[i]的那一个LIS, 且arr[i]是结尾数最小的那一个
// 也就是说以arr[i]结尾的LIS的长度是 l + 1, 即dp[i] = l + 1
int[] ends = new int[arr.length];
ends[0] = arr[0];
dp[0] = 1;
int l = 0;
int r = 0;
int m = 0;
int right = 0;
for(int i = 1; i < arr.length; i++)
{
// 以二分的方式查找有效区中最左边的比arr[i]大的数
// 当然可能arr[i]是最大的, 找不到
l = 0;
r = right;
while(l <= r)
{
m = (l + r) / 2;
if(arr[i] > ends[m])
{
l = m + 1;
}
else
{
r = m - 1;
}
}
// 经过上面的查询之后, l要么指向的是有效区中最左边比arr[i]大的数(找到的情况)
// 要么指向的是right + 1的位置(没有找到的情况)
right = Math.max(right, l);
ends[l] = arr[i];
dp[i] = l + 1;
}
return dp;
}
}
<file_sep>/src/jzoffer/Serialize.java
package jzoffer;
import java.util.ArrayDeque;
import java.util.Deque;
public class Serialize
{
class TreeNode
{
int val;
TreeNode left;
TreeNode right;
TreeNode(int x)
{
val = x;
}
}
private StringBuilder sb = new StringBuilder();
public String serialize(TreeNode root)
{
p(root);
return sb.toString();
}
private void p(TreeNode root)
{
if(root == null)
{
sb.append("#!");
return;
}
sb.append(root.val).append("!");
p(root.left);
p(root.right);
}
public TreeNode deserialize(String str)
{
Deque<String> queue = new ArrayDeque<>();
String[] nodes = str.split("!");
for(int i = 0; i < nodes.length; i++)
{
queue.add(nodes[i]);
}
return rebuild(queue);
}
public TreeNode rebuild(Deque<String> queue)
{
String value = queue.poll();
if("#".equals(value))
{
return null;
}
TreeNode head = new TreeNode(Integer.valueOf(value));
head.left = rebuild(queue);
head.right = rebuild(queue);
return head;
}
}
<file_sep>/src/medium/Q222.java
package medium;
// Page176
public class Q222
{
}
<file_sep>/src/other/chapter4/Page236.java
package other.chapter4;
public class Page236
{
public int calculateMinimumHP(int[][] dungeon)
{
if(dungeon == null || dungeon.length == 0 || dungeon[0] == null || dungeon[0].length == 0)
{
return 1;
}
int row = dungeon.length;
int col = dungeon[0].length;
int[][] dp = new int[row--][col--];
dp[row][col] = dungeon[row][col] > 0 ? 1 : 1 - dungeon[row][col];
// 填最后一行
for(int j = col -1; j >= 0; j--)
{
dp[row][j] = Math.max(dp[row][j + 1] - dungeon[row][j], 1);
}
int right = 0;
int down = 0;
for(int i = row - 1; i >= 0; i--)
{
// 最后一列
dp[i][col] = Math.max(dp[i + 1][col] - dungeon[i][col], 1);
for(int j = col - 1; j >= 0; j--)
{
right = Math.max(dp[i][j + 1] - dungeon[i][j], 1);
down = Math.max(dp[i + 1][j] - dungeon[i][j], 1);
dp[i][j] = Math.min(right, down);
}
}
return dp[0][0];
}
}
<file_sep>/src/other/chapter1/Page12.java
package other.chapter1;
import java.util.ArrayDeque;
import java.util.Deque;
//用一个栈实现另一个栈的排序
public class Page12
{
public static void main(String[] args)
{
Deque<Integer> stack = new ArrayDeque<>();
stack.push(6);
stack.push(6);
stack.push(1);
stack.push(4);
stack.push(4);
System.out.println(stack);
sortStackByStack(stack);
System.out.println(stack);
}
public static void sortStackByStack(Deque<Integer> stack)
{
//help stack
Deque<Integer> help = new ArrayDeque<>();
while(!stack.isEmpty())
{
Integer cur = stack.pop();
if(help.isEmpty() || cur <= help.peek())
{
help.push(cur);
continue;
}
while(!help.isEmpty() && cur > help.peek())
{
stack.push(help.pop());
}
//记得最后要把当前值放回去
help.push(cur);
}
while(!help.isEmpty())
{
stack.push(help.pop());
}
}
}
<file_sep>/src/medium/Q48.java
package medium;
/*
* 给定一个 n × n 的二维矩阵表示一个图像。
* 将图像顺时针旋转 90 度。
* 思路: (a,b), (c,d)唯一确定一个矩阵
* 先旋转外层, 再旋转内层
* 旋转方式:
* 譬如每一行有四个数
* 那么从第一个数开始找到在这一圈中需要相互交换的四个数进行交换
* 来到第二个数, 找到一圈中要交换的四个数进行交换
* 来到第三个数, 换
* 此时 一圈换完
*/
public class Q48
{
public static void rotate(int[][] matrix)
{
if(matrix == null || matrix.length == 0 || (matrix.length == 1 && matrix[0].length == 1))
{
return;
}
int a = 0, b = 0;
int c = matrix.length - 1;
int d = c;
while(a <= c)
{
rotateCircle(matrix, a++, b++, c--, d--);
}
}
public static void rotateCircle(int[][] matrix, int a, int b, int c, int d)
{
int count = d - b;
int i = 0;
while(i < count)
{
int temp = matrix[a][b + i];
matrix[a][b + i] = matrix[c - i][b];
matrix[c - i][b] = matrix[c][d - i];
matrix[c][d - i] = matrix[a + i][d];
matrix[a + i][d] = temp;
i++;
}
}
public static void main(String[] args)
{
}
}
<file_sep>/src/other/chapter5/Page261.java
package other.chapter5;
public class Page261
{
public boolean f(char[] chs)
{
boolean[] map = new boolean[256];
for(int i = 0; i < chs.length; i++)
{
if(map[chs[i]])
{
return false;
}
else
{
map[chs[i]] = true;
}
}
return true;
}
}
<file_sep>/src/hard/Q124.java
package hard;
/*
* 这道题的难点在于 递归函数的返回值并不是最终问题的解
* 需要使用另外一个变量来记录这个解
*/
public class Q124
{
class TreeNode
{
int val;
TreeNode left;
TreeNode right;
TreeNode(int x)
{
val = x;
}
}
public int max = Integer.MIN_VALUE;
public int maxPathSum(TreeNode root)
{
p(root);
return max;
}
// 返回当前节点和左右子树相加组成的最大值
public int p(TreeNode node)
{
if(node == null)
{
return 0;
}
int left = p(node.left);
int right = p(node.right);
max = Math.max(max, left + right + node.val);
int temp = Math.max(left, right) + node.val;
/*
* temp 必须要大于0才返回temp, 否则返回0(代表这条路径不选择), 不然会影响最大值的决策
* 考虑 2 这种情况, 显然最大值是2
* / \
* -1 -1
* 但如果左右返回的left和right都是-1, 那么计算最大值就变成了0
*
*/
return temp > 0 ? temp : 0;
}
}
<file_sep>/src/other/chapter5/Page276.java
package other.chapter5;
public class Page276
{
public static void main(String[] args)
{
Page276 obj = new Page276();
String str = obj.removeDuplicateLetters("baacbaccac");
System.out.println(str);
}
public String removeDuplicateLetters(String s)
{
char[] str = s.toCharArray();
// 词频统计
int[] map = new int[26];
for(int i = 0; i < str.length; i++)
{
map[str[i] - 'a']++;
}
char[] res = new char[26];
int index = 0;
int l = 0;
int r = 0;
while(r < str.length)
{
if(map[str[r] - 'a'] == -1 || --map[str[r] - 'a'] > 0)
{
r++;
}
else
{
// 在[l, r]的范围上找到最小的字母
int pick = -1;
for(int i = l; i <= r; i++)
{
if(map[str[i] - 'a'] != -1 && (pick == - 1 || str[i] < str[pick]))
{
pick = i;
}
}
res[index++] = str[pick];
// 把词频次数重新加回来
for(int i = pick + 1; i <= r; i++)
{
if(map[str[i] - 'a'] != -1)
{
map[str[i] - 'a']++;
}
}
map[str[pick] - 'a'] = -1;
l = pick + 1;
r = l;
}
}
return new String(res, 0, index);
}
}
<file_sep>/src/medium/Q92.java
package medium;
/*
* 反转从位置 m 到 n 的链表。请使用一趟扫描完成反转。
* 1 ≤ m ≤ n ≤ 链表长度。
* 采用原地翻转
*/
public class Q92
{
public ListNode reverseBetween(ListNode head, int m, int n)
{
ListNode cur = head;
int len = 0;
ListNode fpre = null;
ListNode tpos = null;
while (cur != null)
{
len++;
fpre = len == m - 1 ? cur : fpre;
tpos = len == n + 1 ? cur : tpos;
cur = cur.next;
}
if (m > n || m < 1 || n > len)
{
return head;
}
cur = fpre == null ? head : fpre.next;
ListNode node2 = cur.next;
//反转后cur的后继是tpos
cur.next = tpos;
ListNode next = null;
while (node2 != tpos)
{
next = node2.next;
node2.next = cur;
cur = node2;
node2 = next;
}
if (fpre != null)
{
//指向翻转前, 待翻转区域的最后一个结点
fpre.next = cur;
return head;
}
return cur;
}
public static void main(String[] args)
{
}
class ListNode
{
int val;
ListNode next;
ListNode(int x)
{
val = x;
next = null;
}
}
}
<file_sep>/src/util/sort/BucketProblem.java
package util.sort;
import java.util.Arrays;
/*
* 给定一个数组, 求 如果排序之后, 相邻两数的最大差值
* 要求时间复杂度O(N), 且不能使用非基于比较的排序
*/
public class BucketProblem
{
public static int fun(int[] arr)
{
int max = arr[0];
int min = arr[0];
for (int i = 1; i < arr.length; i++)
{
max = Math.max(max, arr[i]);
min = Math.min(min, arr[i]);
}
if(max == min)
{
System.out.println("zzz");
return 0;
}
//准备n+1个桶
int[] maxs = new int[arr.length + 1];
int[] mins = new int[arr.length + 1];
//默认全是false
boolean[] hasNum = new boolean[arr.length + 1];
//把数放进桶里
for (int i = 0; i < arr.length; i++)
{
//得到当前述应该放到几号桶
int bid = getBucketId(arr[i], arr.length, max, min);
System.out.println(bid + ", num:" + arr[i]);
if(hasNum[bid])
{
maxs[bid] = Math.max(maxs[bid], arr[i]);
mins[bid] = Math.min(mins[bid], arr[i]);
}
else
{
maxs[bid] = mins[bid] = arr[i];
}
hasNum[bid] = true;
}
//指向当前非空桶最近的前一个非空桶
int lastNotEmpty = 0;
int result = 0;
for(int i = 1; i < arr.length + 1; i++)
{
if(hasNum[i])
{
result = Math.max(mins[i] - maxs[lastNotEmpty], result);
lastNotEmpty = i;
}
}
return result;
}
public static int getBucketId(int num, int len, int max, int min)
{
//num - min 是相较于起点的位置 再除以每个桶可以放几个数 (max - min + 1) / (len + 1)
return (int) ((num - min) * len) / (max - min);
}
public static void main(String[] args)
{
int[] arr = {3, 4, 1, 7, 9, -1, 2, 10, 20, 15, 13, -10};
int max = fun(arr);
System.out.println(max);
Arrays.sort(arr);
System.out.println(Arrays.toString(arr));
}
}
<file_sep>/src/jzoffer/CopyRandomListNode.java
package jzoffer;
import java.util.HashMap;
import java.util.Map;
public class CopyRandomListNode
{
class RandomListNode
{
int label;
RandomListNode next = null;
RandomListNode random = null;
RandomListNode(int label)
{
this.label = label;
}
}
// 空间复杂O(n)
public RandomListNode clone(RandomListNode pHead)
{
// key是原来得节点, value是对应拷贝的新节点
Map<RandomListNode, RandomListNode> map = new HashMap<>();
RandomListNode cur = pHead;
while(cur != null)
{
map.put(cur, new RandomListNode(cur.label));
cur = cur.next;
}
cur = pHead;
// 通过原来节点的关系可以找到next和random指向的老节点
// 再把老节点作为key找到对应的拷贝节点
// 再连接即可
while(cur != null)
{
RandomListNode nextNode = cur.next;
RandomListNode randomNode = cur.random;
RandomListNode copyNextNode = map.get(nextNode);
RandomListNode copyRandomNode = map.get(randomNode);
RandomListNode copyCur = map.get(cur);
copyCur.next = copyNextNode;
copyCur.random = copyRandomNode;
cur = cur.next;
}
return map.get(pHead);
}
// 空间复杂度O(1)
public RandomListNode clone1(RandomListNode pHead)
{
if(pHead == null)
{
return null;
}
RandomListNode cur = pHead;
RandomListNode next = null;
// copy
while(cur != null)
{
next = cur.next;
RandomListNode node = new RandomListNode(cur.label);
cur.next = node;
node.next = next;
cur = next;
}
// 重连random
cur = pHead;
while(cur != null)
{
RandomListNode randomNode = cur.random;
cur.next.random = randomNode != null ? randomNode.next : null;
cur = cur.next.next;
}
// 分离
cur = pHead;
RandomListNode head = pHead.next;
RandomListNode curCopy = null;
while(cur != null)
{
next = cur.next.next;
curCopy = cur.next;
cur.next = next;
curCopy.next = next != null ? next.next : null;
cur = next;
}
return head;
}
}
<file_sep>/src/jzoffer/package-info.java
/**
* 剑指offer
* @author Administrator
*
*/
package jzoffer;<file_sep>/src/other/chapter3/Page173.java
package other.chapter3;
import java.util.LinkedList;
import java.util.List;
public class Page173
{
class TreeNode
{
int val;
TreeNode left;
TreeNode right;
TreeNode(int x)
{
val = x;
}
}
// n个节点的二叉搜索树有多少种可能
public int numTrees(int n)
{
if (n <= 1)
{
return 1;
}
int sum = 0;
// 以每一个节点为头
for (int i = 1; i <= n; i++)
{
sum += numTrees(i - 1) * numTrees(n - i);
}
return sum;
}
// dp版本
public int dpMethod(int n)
{
if (n <= 1)
{
return 1;
}
int[] dp = new int[n + 1];
dp[0] = 1;
dp[1] = 1;
//dp[i]表示: n = i的时候, 构成的二叉树有dp[i]种可能
for (int i = 2; i < dp.length; i++)
{
// 以每一个节点为头
for (int j = 1; j <= i; j++)
{
dp[i] += dp[j - 1] * dp[i - j];
}
}
return dp[n];
}
// 生成对应的树
public List<TreeNode> generateTrees(int n)
{
if(n == 0)
{
return new LinkedList<>();
}
return g(1, n);
}
// 返回[start, end]生成的所有bst的head的集合
public List<TreeNode> g(int start, int end)
{
List<TreeNode> res = new LinkedList<>();
if(start > end)
{
res.add(null);
}
TreeNode head = null;
// 以每个位置为头生成树
for(int i = start; i <= end; i++)
{
head = new TreeNode(i);
List<TreeNode> left = g(start, i - 1);
List<TreeNode> right = g(i + 1, end);
for (TreeNode l : left)
{
for (TreeNode r : right)
{
head.left = l;
head.right = r;
res.add(clone(head));
}
}
}
return res;
}
// 复制出一颗新树, 是的每一刻树都是独立的, 不互相交叉
public TreeNode clone(TreeNode head)
{
if(head == null)
{
return null;
}
TreeNode res = new TreeNode(head.val);
res.left = clone(head.left);
res.right = clone(head.right);
return res;
}
public static void main(String[] args)
{
int z = new Page173().numTrees(3);
System.out.println(z);
}
}
<file_sep>/src/util/sort/PrioorityQueueDemo.java
package util.sort;
import java.util.PriorityQueue;
public class PrioorityQueueDemo
{
public static void main(String[] args)
{
//默认实现是小根堆
PriorityQueue<Student> heap = new PriorityQueue<>();
heap.add(new Student(1, 10));
heap.add(new Student(2, 10));
heap.add(new Student(3, 10));
heap.add(new Student(4, 10));
while(!heap.isEmpty())
{
System.out.println(heap.poll());
}
}
}
class Student implements Comparable<Student>
{
int id;
int age;
@Override
public int compareTo(Student o)
{
//return 负数代表当前对象的值小
//return o.id - this.id;
return this.id - o.id;
}
public Student(int id, int age)
{
this.id = id;
this.age = age;
}
@Override
public String toString()
{
return "Student [id=" + id + ", age=" + age + "]";
}
}
<file_sep>/src/util/DSQ.java
package util;
import java.util.Arrays;
public class DSQ
{
//返回一个范围在[0, size]之间, 元素大小在[-value, +value]之间的数组
public static int[] generateRandomArray(int size, int value)
{
//Math.random() -> double [0, 1)
//arr -> [0, size]
int[] arr = new int[(int) ((size + 1) * Math.random())];
for (int i = 0; i < arr.length; i++)
{
//[0, value] - [0, value - 1] -> [1 - value, value]
arr[i] = (int) ((value + 1) * Math.random()) - (int)(value * Math.random());
}
return arr;
}
public static int[] copyArray(int[] arr)
{
int[] temp = new int[arr.length];
for (int i = 0; i < arr.length; i++)
{
temp[i] = arr[i];
}
return temp;
}
public static boolean isEqual(int[] arr1, int[] arr2)
{
if(arr1.length != arr2.length)
{
return false;
}
for (int i = 0; i < arr2.length; i++)
{
if(arr1[i] != arr2[i])
{
return false;
}
}
return true;
}
public static void okMethod(int[] arr)
{
Arrays.sort(arr);
}
public static void swap(int[] arr, int i, int j)
{
int temp = arr[i];
arr[i] = arr[j];
arr[j] = temp;
}
}
<file_sep>/src/jzoffer/PrintMatrix.java
package jzoffer;
import java.util.ArrayList;
import java.util.List;
public class PrintMatrix
{
public static void main(String[] args)
{
PrintMatrix p = new PrintMatrix();
int[][] matrix = new int[][] {{1}, {2}, {3}, {4}, {5}};
List<Integer> list = p.printMatrix(matrix);
System.out.println(list);
}
public ArrayList<Integer> printMatrix(int[][] matrix)
{
if (matrix == null || matrix.length == 0)
{
return null;
}
int lefti = 0, leftj = 0;
int rightj = matrix[0].length - 1, righti = matrix.length - 1;
ArrayList<Integer> list = new ArrayList<>();
// 这个判断条件一定要写全
while (lefti <= righti && leftj <= rightj)
{
// System.out.println("lefti: " + lefti);
// System.out.println("leftj: " + leftj);
// System.out.println("righti: " + righti);
// System.out.println("rightj: " + rightj);
// System.out.println();
printCircle(list, lefti++, leftj++, righti--, rightj--, matrix);
}
return list;
}
public void printCircle(ArrayList<Integer> list, int lefti, int leftj, int righti, int rightj, int[][] matrix)
{
int i = lefti;
int j = leftj;
int m = righti;
int n = rightj;
// 只有一行
if(lefti == righti)
{
while(j <= rightj)
{
list.add(matrix[lefti][j++]);
}
}
else if(leftj == rightj) // 只有一列
{
while(i <= righti)
{
list.add(matrix[i++][leftj]);
}
}
else
{
while(j < rightj)
{
list.add(matrix[lefti][j++]);
}
while(i < righti)
{
list.add(matrix[i++][rightj]);
}
while(n > leftj)
{
list.add(matrix[righti][n--]);
}
while(m > lefti)
{
list.add(matrix[m--][leftj]);
}
}
}
}
<file_sep>/src/other/chapter8/package-info.java
/**
*
*/
/**
* @author Administrator
*
*/
package other.chapter8;<file_sep>/src/other/chapter3/Page121.java
package other.chapter3;
import util.TreeNode;
public class Page121
{
class ReturnType
{
public int size;
public TreeNode head;
public int max;
public int min;
public ReturnType(int size, TreeNode head, int max, int min)
{
this.size = size;
this.head = head;
this.max = max;
this.min = min;
}
}
public TreeNode getMaxBST(TreeNode head)
{
return p(head).head;
}
public ReturnType p(TreeNode node)
{
if(node == null)
{
return new ReturnType(0, null, Integer.MIN_VALUE, Integer.MAX_VALUE);
}
ReturnType leftInfo = p(node.left);
ReturnType rightInfo = p(node.right);
int curSize = -1;
TreeNode newHead = null;
int max = 0;
int min = 0;
if(leftInfo.head == node.left && rightInfo.head == node.right
&& leftInfo.max < node.val && rightInfo.min > node.val)
{
curSize = leftInfo.size + rightInfo.size + 1;
newHead = node;
max = rightInfo.max;
min = leftInfo.min;
}
curSize = Math.max(Math.max(leftInfo.size, rightInfo.size), curSize);
if(newHead == null)
{
newHead = curSize == leftInfo.size ? leftInfo.head : rightInfo.head;
max = curSize == leftInfo.size ? leftInfo.max : rightInfo.max;
min = curSize == leftInfo.size ? leftInfo.min : rightInfo.min;
}
return new ReturnType(curSize, newHead, max, min);
}
public ReturnType p2(TreeNode node)
{
if(node == null)
{
return new ReturnType(0, null, Integer.MIN_VALUE, Integer.MAX_VALUE);
}
ReturnType leftInfo = p(node.left);
ReturnType rightInfo = p(node.right);
int max = Math.max(Math.max(leftInfo.max, rightInfo.max), node.val);
int min = Math.min(Math.min(leftInfo.min, rightInfo.min), node.val);
int size = Math.max(leftInfo.size, rightInfo.size);
TreeNode newHead = leftInfo.size > rightInfo.size ? leftInfo.head : rightInfo.head;
//第三种可能性
if(leftInfo.head == node.left && rightInfo.head == node.right
&& leftInfo.max < node.val && rightInfo.min > node.val)
{
size = leftInfo.size + rightInfo.size + 1;
newHead = node;
}
return new ReturnType(size, newHead, max, min);
}
}
<file_sep>/src/easy/Q26.java
package easy;
public class Q26
{
public int removeDuplicates(int[] nums)
{
int result = 0;
for(int i = 1; i < nums.length; i++)
{
if(nums[result] == nums[i])
{
continue;
}
else
{
result++;
nums[result] = nums[i];
}
}
return result + 1;
}
public static void main(String[] args)
{
int[] arr = {1, 3, 4, 5, 0};
int len = new Q26().removeDuplicates(arr);
for(int i = 0; i < len; i++)
{
System.out.print(arr[i] + " ");
}
}
}
<file_sep>/src/other/chapter2/Page88.java
package other.chapter2;
public class Page88
{
class ListNode
{
int val;
ListNode next;
ListNode(int x)
{
val = x;
}
}
public ListNode mergeTwoLists(ListNode l1, ListNode l2) {
if(l1 == null && l2 == null)
{
return null;
}
ListNode root = new ListNode(-1);
ListNode ptr = root;
while(l1 != null && l2 != null)
{
int l1Val = l1.val;
int l2Val = l2.val;
if(l1Val <= l2Val)
{
ptr.next = l1;
l1 = l1.next;
}
else
{
ptr.next = l2;
l2 = l2.next;
}
ptr = ptr.next;
}
if(l1 != null)
{
ptr.next = l1;
}
if(l2 != null)
{
ptr.next = l2;
}
return root.next;
}
}
<file_sep>/src/jzoffer/FindGreatestSumOfSubArray.java
package jzoffer;
// 子数组的最大和
public class FindGreatestSumOfSubArray
{
// 递归解法
public int solution(int[] arr)
{
int max = Integer.MIN_VALUE;
for(int i = 0; i < arr.length; i++)
{
int z = p(arr, i);
System.out.println(i + ": " + z);
max = Math.max(max, z);
}
return max;
}
// 以end位置结尾的子数组的最大和
public int p(int[] arr, int end)
{
if(end == 0)
{
return arr[0];
}
int pre = p(arr, end - 1);
// 此时arr[end]类加上pre会比arr[end]本身还小
if(pre < 0)
{
return arr[end];
}
return pre + arr[end];
}
//dp 解法
public int findGreatestSumOfSubArray(int[] arr)
{
int[] dp = new int[arr.length];
dp[0] = arr[0];
int max = dp[0];
for(int i = 1; i < dp.length; i++)
{
if(dp[i - 1] < 0)
{
dp[i] = arr[i];
}
else
{
dp[i] = dp[i - 1] + arr[i];
}
max = Math.max(max, dp[i]);
}
return max;
}
}
<file_sep>/src/jzoffer/DeleteDuplication.java
package jzoffer;
public class DeleteDuplication
{
class ListNode
{
int val;
ListNode next;
ListNode(int x)
{
val = x;
next = null;
}
}
// 删除所有的重复节点
public ListNode deleteDuplication(ListNode pHead)
{
if (pHead == null)
{
return null;
}
ListNode cur = pHead;
// 永远指向当前节点的前一个节点, 方便删除
ListNode pre = null;
while (cur != null)
{
boolean needDelete = false;
ListNode pNext = cur.next;
if (pNext != null && pNext.val == cur.val)
{
needDelete = true;
}
if (!needDelete)
{
pre = cur;
cur = pNext;
}
else
{
ListNode tobeDelete = pNext;
while (tobeDelete != null && tobeDelete.val == cur.val)
{
tobeDelete = tobeDelete.next;
}
// 此时tobeDelete指向的是跳过了一堆和cur重复的节点之后
// 第一个和cur不等的节点
cur = tobeDelete;
// 从头结点开始一直都是重复的
if (pre == null)
{
pHead = cur;
}
else
{
pre.next = cur;
}
}
}
return pHead;
}
// 一组重复的节点中留一个
public ListNode deleteDuplication2(ListNode pHead)
{
if (pHead == null)
{
return null;
}
ListNode cur = pHead;
// 永远指向当前节点的前一个节点, 方便删除
ListNode pre = null;
while (cur != null)
{
boolean needDelete = false;
ListNode pNext = cur.next;
if (pNext != null && pNext.val == cur.val)
{
needDelete = true;
}
if (!needDelete)
{
pre = cur;
cur = pNext;
}
else
{
// 差异
ListNode tobeDelete = cur;
while (tobeDelete.next != null && tobeDelete.next.val == cur.val)
{
tobeDelete = tobeDelete.next;
}
// 此时tobeDelete指向的一堆和cur重复的节点中的最后一个节点
cur = tobeDelete;
// 从头结点开始一直都是重复的
if (pre == null)
{
pHead = cur;
}
else
{
pre.next = cur;
}
}
}
return pHead;
}
}
<file_sep>/src/other/chapter9/Page469.java
package other.chapter9;
import java.util.HashMap;
import java.util.Map;
// LFU, 笔试遇到基本可以放弃
public class Page469
{
class Node
{
public Integer key;
public Integer value;
public Integer times; // 被set和get的次数
public Node up; // 桶内的双向链表指针
public Node down;
public Node(Integer key, Integer value, Integer times)
{
this.key = key;
this.value = value;
this.times = times;
up = null;
down = null;
}
}
// 桶
class NodeList
{
// 每个桶都是一个双向链表
public Node head;
public Node tail;
// 桶与桶之间也是双向链表
public NodeList last;
public NodeList next;
public NodeList(Node node)
{
this.head = node;
this.tail = node;
}
// 新的结点加在桶的头部
public void addHead(Node newHead)
{
newHead.down = head;
head.up = newHead;
head = newHead;
}
// 判断桶空不空
public boolean isEmpty()
{
return head == null;
}
// 删除桶中存在的node结点
public void deleteNode(Node node)
{
// 只有一个结点
if(head == tail)
{
head = null;
tail = null;
}
else // 被删除的节点的位置
{
if(node == head)
{
head = node.down;
head.up = null;
}
else if(node == tail)
{
tail = node.up;
tail.down = null;
}
else
{
node.up.down = node.down;
node.down.up = node.up;
}
}
node.down = null;
node.up = null;
}
}
class LFUCache
{
private int capacity;
private int size;
// get(key) 根据key找到对应的node
private Map<Integer, Node> records = new HashMap<>();
// node 在哪个桶里
private Map<Node, NodeList> heads = new HashMap<>();
// 指向最左边的桶
private NodeList headList = null;
public LFUCache(int k)
{
capacity = k;
size = 0;
}
/*
* removeNodeList刚刚减少了一个node, 那么有可能这个list就没有节点了, 那她就应该消失
* 1) 如果桶不为空, 什么都不做 返回false
* 2) 如果空, 并且removeNodeList还是最左边的桶, 记得重置headList指针
* 3) 如果空, 且不是最左的, 重置removeNodeList左右的指针
*/
private boolean modifyHeadList(NodeList removeNodeList)
{
if(removeNodeList.isEmpty())
{
if(removeNodeList == headList)
{
headList = removeNodeList.next;
if(headList != null)
{
headList.last = null;
}
}
else
{
removeNodeList.last.next = removeNodeList.next;
if(removeNodeList.next != null) // removeNodeList可能是最后一个桶
{
removeNodeList.next.last = removeNodeList.last;
}
}
return true;
}
return false;
}
/*
* node的次数已经加一了, 现在还在oldNodeList里, 应该把它移到对应的新的List
*
*/
private void move(Node node, NodeList oldNodeList)
{
oldNodeList.deleteNode(node);
// preList表示的是新的List的前一个应该是谁
NodeList preList = modifyHeadList(oldNodeList) ? oldNodeList.last : oldNodeList;
NodeList nextList = oldNodeList.next;
// oldNodeList可能是最后一个桶
if(nextList == null)
{
NodeList newList = new NodeList(node);
if(preList != null)
{
preList.next = newList;
}
newList.last = preList;
if(headList == null)
{
headList = newList;
}
heads.put(node, newList);
}
else
{
if(nextList.head.times.equals(node.times))
{
nextList.addHead(node);
heads.put(node, nextList);
}
else
{
// 这个newList应该放在 preList和nextList之间
NodeList newList = new NodeList(node);
if(preList != null)
{
preList.next = newList;
}
newList.last = preList;
newList.next = nextList;
nextList.last = newList;
// 有可能oldNodeList是第一个List, 删完节点过后就没了
// 所以headList指向了nextList
if(headList == nextList)
{
headList = newList;
}
heads.put(node, newList);
}
}
}
public void set(int key, int value)
{
if(records.containsKey(key))
{
Node node = records.get(key);
node.value = value;
node.times++;
NodeList curList = heads.get(node);
move(node, curList);
}
else
{
if(size == capacity)
{
Node node = headList.tail;
headList.deleteNode(node);
modifyHeadList(headList);
records.remove(node.key);
heads.remove(node);
size--;
}
Node node = new Node(key, value , 1);
if(headList == null)
{
headList = new NodeList(node);
}
else
{
if(headList.head.times.equals(node.times))
{
headList.addHead(node);
}
else // 如果不等于1, 那么一定比1大
{
NodeList newList = new NodeList(node);
newList.next = headList;
headList.last = newList;
headList = newList;
}
}
records.put(key, node);
heads.put(node, headList);
size++;
}
}
public Integer get(int key)
{
if(!records.containsKey(key))
{
return null;
}
Node node = records.get(key);
node.times++;
move(node, heads.get(node));
return node.value;
}
}
}
<file_sep>/src/jzoffer/Match.java
package jzoffer;
public class Match
{
public boolean match(char[] str, char[] pattern)
{
return p(str, pattern, 0, 0);
}
// 从str[i]和pattern[j]开始往后配直到结尾, 能不能成功
public boolean p(char[] str, char[] pattern, int i, int j)
{
// 如果pattern用完了, 那么str也必须要用完才算是匹配成功
if(j == pattern.length)
{
return i == str.length;
}
// 如果 j是最pattern的后一个位置 或者 j + 1位置不是*
if(j + 1 == pattern.length || pattern[j + 1] != '*')
{
return i != str.length && (str[i] == pattern[j] || pattern[j] == '.')
&& p(str, pattern, i + 1, j + 1);
}
// 此时j之后必定还有字符 且 这个字符一定是*
while(i < str.length && (str[i] == pattern[j] || pattern[j] == '.'))
{
if(p(str, pattern, i, j + 2))
{
return true;
}
i++;
}
return p(str, pattern, i, j + 2);
}
}
<file_sep>/src/easy/Q867.java
package easy;
/*
* 给定一个矩阵 A, 返回 A 的转置矩阵
*/
public class Q867
{
public static int[][] transpose(int[][] A)
{
if(A == null)
{
return null;
}
if(A.length == 0)
{
return new int[][] {};
}
int newRow = A[0].length;
int newColum = A.length;
int[][] AT = new int[newRow][newColum];
for (int i = 0; i < A.length; i++)
{
for(int j = 0; j < A[0].length; j++)
{
AT[j][i] = A[i][j];
}
}
return AT;
}
public static void main(String[] args)
{
int[][] A = {{1,2,3,4}, {5,6,7,8}};
A = Q867.transpose(A);
for (int i = 0; i < A.length; i++)
{
for(int j = 0; j < A[0].length; j++)
{
System.out.print(A[i][j]);
}
System.out.println();
}
}
}
<file_sep>/src/other/chapter4/Page218.java
package other.chapter4;
public class Page218
{
// 把n个盘子从from借助mid移动到to
public static void p(int n, String from, String mid, String to)
{
if(n == 1)
{
System.out.println(from + " -> " + to);
}
else
{
p(n - 1, from, to, mid);
p(1, from, mid, to);
p(n - 1, mid, from, to);
}
}
public static void h(int n)
{
if(n > 0)
{
p(n, "left", "mid", "right");
}
}
public static void main(String[] args)
{
h(3);
}
}
<file_sep>/src/other/chapter1/Page5.java
package other.chapter1;
import java.util.ArrayDeque;
import java.util.Deque;
/*
* 两个栈实现一个队列 实现add poll peek
*/
public class Page5
{
public static void main(String[] args)
{
TwoStackQueue que = new TwoStackQueue();
que.add(1);
que.add(2);
que.add(3);
System.out.println(que.poll());
System.out.println(que.poll());
que.add(10);
System.out.println(que.poll());
que.add(5);
System.out.println(que.poll());
System.out.println(que.poll());
System.out.println(que.poll());
}
}
class TwoStackQueue
{
private Deque<Integer> stackPush = new ArrayDeque<>();
private Deque<Integer> stackPop = new ArrayDeque<>();
//把push栈的数据往pop栈中倒
private void push2pop()
{
while(!stackPush.isEmpty())
{
stackPop.push(stackPush.pop());
}
}
public void add(Integer num)
{
stackPush.push(num);
if(stackPop.isEmpty())
{
this.push2pop();
}
}
public Integer poll()
{
if(stackPop.isEmpty() && stackPush.isEmpty())
{
throw new RuntimeException("queue is empty");
}
Integer num = stackPop.pop();
if(stackPop.isEmpty())
{
this.push2pop();
}
return num;
}
public Integer peek()
{
if(stackPop.isEmpty() && stackPush.isEmpty())
{
throw new RuntimeException("queue is empty");
}
Integer num = stackPop.peek();
if(stackPop.isEmpty())
{
this.push2pop();
}
return num;
}
}
<file_sep>/src/hard/Q145.java
package hard;
import java.util.ArrayDeque;
import java.util.ArrayList;
import java.util.Deque;
import java.util.List;
/*
* 给定一个二叉树,返回它的 后序 遍历。
* 非递归思路:
* 非递归先序遍历是 根 左 右的顺序
* 那么我们可以根据非递归先序遍历 改出一个是 根 右 左的遍历顺序
* 只不过在本该打印的时候不打印, 而是把这个节点放入辅助栈中
* 知道遍历完成
* 最后再从辅助栈全部pop出来 就是 左 右 根的顺序 即后续遍历
*/
public class Q145
{
private List<Integer> res = new ArrayList<>();
// public List<Integer> postorderTraversal(TreeNode root) {
// if(root == null)
// {
// return res;
// }
// fun(root);
// return res;
// }
// public void fun(TreeNode cur)
// {
// if(cur != null)
// {
// fun(cur.left);
// fun(cur.right);
// res.add(cur.val);
// }
// }
public List<Integer> postorderTraversal(TreeNode root) {
if(root == null)
{
return res;
}
TreeNode cur = root;
Deque<TreeNode> stack = new ArrayDeque<>();
Deque<TreeNode> help = new ArrayDeque<>();
stack.push(cur);
while(!stack.isEmpty())
{
cur = stack.pop();
help.push(cur);
if(cur.left != null)
{
stack.push(cur.left);
}
if(cur.right != null)
{
stack.push(cur.right);
}
}
while(!help.isEmpty())
{
res.add(help.pop().val);
}
return res;
}
public static void main(String[] args)
{
}
public class TreeNode
{
int val;
TreeNode left;
TreeNode right;
TreeNode(int x)
{
val = x;
}
}
}
<file_sep>/src/hard/Q25.java
package hard;
//给出一个链表,每 k 个节点一组进行翻转,并返回翻转后的链表。
//k 是一个正整数,它的值小于或等于链表的长度。如果节点总数不是 k 的整数倍,那么将最后剩余节点保持原有顺序
//见Page74.java 觉得有点儿难
public class Q25
{
}
<file_sep>/src/other/chapter5/Page258.java
package other.chapter5;
public class Page258
{
public static void main(String[] args)
{
System.out.println(Page258.getCountString("aaabbfssa"));
}
public static String getCountString(String str)
{
char[] chs = str.toCharArray();
String res = chs[0] + "";
int num = 1;
for(int i = 1; i < chs.length; i++)
{
if(chs[i] == chs[i - 1])
{
num++;
}
else
{
res = concat(res, String.valueOf(num), String.valueOf(chs[i]));
num = 1;
}
}
return concat(res, String.valueOf(num), "");
}
public static String concat(String s1, String num, String s3)
{
return s1 + "_" + num + (s3.equals("") ? "" : "_") + s3;
}
}
<file_sep>/src/easy/Q654.java
package easy;
//翻转一棵二叉树。
public class Q654
{
public TreeNode invertTree(TreeNode root)
{
fun(root);
return root;
}
public void fun(TreeNode cur)
{
if (cur != null)
{
fun(cur.left);
fun(cur.right);
TreeNode temp = cur.left;
cur.left = cur.right;
cur.right = temp;
}
}
class TreeNode
{
int val;
TreeNode left;
TreeNode right;
TreeNode(int x)
{
val = x;
}
}
}
<file_sep>/src/jzoffer/VerifySquenceOfBST.java
package jzoffer;
public class VerifySquenceOfBST
{
public static void main(String[] args)
{
VerifySquenceOfBST obj = new VerifySquenceOfBST();
obj.verifySquenceOfBST(new int[] {1, 3, 2, 4, 5, 6});
}
public boolean verifySquenceOfBST(int[] sequence)
{
if(sequence == null || sequence.length == 0)
{
return false;
}
if(sequence.length == 1)
{
return true;
}
return p(sequence, 0, sequence.length - 1);
}
public boolean p(int[] seq, int start, int end)
{
// 说明子树为空
if(start >= end)
{
return true;
}
int head = seq[end];
// 找到左右子树的分界点
// 右子树的第一个值
int right;
for(right = start; right < end; right++)
{
if(seq[right] > head)
{
break;
}
}
// 遍历右子树, 如果有一个数比head小, 就返回false
for(int i = right; i < end; i++)
{
if(seq[i] < head)
{
return false;
}
}
// 判断左右子树是不是bst
return p(seq, start, right - 1) && p(seq, right, end - 1);
}
}
<file_sep>/src/medium/Q46.java
package medium;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
//给定一个没有重复数字的序列,返回其所有可能的全排列。
public class Q46
{
// private List<List<Integer>> res = new ArrayList<List<Integer>>();
// public List<List<Integer>> permute(int[] nums) {
// p(nums, 0, nums.length);
// return res;
// }
// public void p(int[] nums, int cur, int len)
// {
// //cur == len - 1也行
// if(cur == len)
// {
// List<Integer> list = new ArrayList<>();
// for (int i : nums)
// {
// list.add(i);
// }
// res.add(list);
// return;
// }
// for(int i = cur; i < len; i++)
// {
// swap(nums, cur, i);
// p(nums, cur + 1, len);
// swap(nums, cur, i);
// }
// }
// public void swap(int[] arr, int i, int j)
// {
// int temp = arr[i];
// arr[i] = arr[j];
// arr[j] = temp;
// }
private List<List<Integer>> res = new ArrayList<List<Integer>>();
private Set<Integer> visit = new HashSet<>();
private int[] box;
public List<List<Integer>> permute(int[] nums) {
box = new int[nums.length];
dfs(0, nums);
return res;
}
//dfs解法
public void dfs(int cur, int nums[])
{
if(cur == nums.length)
{
List<Integer> list = new ArrayList<>();
for (int i : box)
{
list.add(i);
}
res.add(list);
return;
}
for (int i = 0; i < nums.length; i++)
{
if(!visit.contains(nums[i]))
{
box[cur] = nums[i];
visit.add(nums[i]);
dfs(cur + 1, nums);
visit.remove(nums[i]);
}
}
}
}
<file_sep>/src/hard/Q72.java
package hard;
// Q72
public class Q72
{
public int minDistance(String word1, String word2)
{
if(word1 == null || word2 == null)
{
return 0;
}
return minCost1(word1, word2, 1, 1, 1);
}
public int minCost1(String str1, String str2, int ic, int dc, int rc)
{
char[] chs1 = str1.toCharArray();
char[] chs2 = str2.toCharArray();
int[][] dp = new int[chs1.length + 1][chs2.length + 1];
for(int i = 1; i < dp.length; i++)
{
dp[i][0] = i * dc;
}
for(int j = 1; j < dp[0].length; j++)
{
dp[0][j] = j * ic;
}
for(int i = 1; i < dp.length; i++)
{
for(int j = 1; j < dp[0].length; j++)
{
if(chs1[i - 1] == chs2[j - 1])
{
dp[i][j] = dp[i - 1][j - 1];
}
else
{
dp[i][j] = dp[i - 1][j - 1] + rc;
}
dp[i][j] = Math.min(dp[i][j], dc + dp[i - 1][j]);
dp[i][j] = Math.min(dp[i][j], ic + dp[i][j - 1]);
}
}
return dp[dp.length - 1][dp[0].length - 1];
}
}
<file_sep>/src/other/chapter8/Page372.java
package other.chapter8;
public class Page372
{
public void printHalfMajor(int[] arr)
{
int cand = 0;
int times = 0;
for(int i = 0; i < arr.length; i++)
{
if(times == 0)
{
cand = arr[i];
times = 1;
}
else
{
if(cand == arr[i])
{
times++;
}
else
{
times--;
}
}
}
times = 0;
for(int i = 0; i < arr.length; i++)
{
if(cand == arr[i])
{
times++;
}
}
if(times > arr.length / 2)
{
System.out.println(cand);
}
else
{
System.out.println("no such number");
}
}
}
<file_sep>/src/util/sort/HeapSort.java
package util.sort;
import java.util.Arrays;
import util.DSQ;
public class HeapSort
{
public static void heapSort(int[] arr)
{
if(arr == null || arr.length < 2)
{
return;
}
//每次往堆中加入一个数, 完成上浮操作, 构造大根堆
for(int i = 0; i < arr.length; i++)
{
heapInsert(arr, i);
}
int heapSize = arr.length;
DSQ.swap(arr, 0, --heapSize);
while(heapSize > 0)
{
heapify(arr, 0, heapSize);
DSQ.swap(arr, 0, --heapSize);
}
}
public static void heapInsert(int[] arr, int index)
{
int fatherIndex = (index - 1) / 2;
while(arr[index] > arr[fatherIndex])
{
DSQ.swap(arr, index, fatherIndex);
index = fatherIndex;
fatherIndex = (index - 1) / 2;
}
}
/*
在[0, heapSize-1]上形成了大根堆, index位置的数发生了变化
进行heapify操作, 使[0, heapSize-1]位置上仍是大根堆
*/
public static void heapify(int[] arr, int index, int heapSize)
{
int leftIndex = 2 * index + 1;
while(leftIndex < heapSize)
{
int rightIndex = leftIndex + 1;
int maxIndex;
//如果当前index有右儿子, 并且右儿子还比左儿子大
if(rightIndex < heapSize && arr[rightIndex] > arr[leftIndex])
{
maxIndex = rightIndex;
}
else//当前index没有右儿子, 或者又有儿子但右儿子小于左儿子
{
maxIndex = leftIndex;
}
maxIndex = arr[maxIndex] > arr[index] ? maxIndex : index;
//index是三者中的最大值, 那么就不用换了
if(maxIndex == index)
{
break;
}
DSQ.swap(arr, index, maxIndex);
index = maxIndex;
leftIndex = 2 * index + 1;
}
}
public static void main(String[] args)
{
for(int i = 0; i < 1000000; i++)
{
int[] arr = DSQ.generateRandomArray(100, 50);
int[] arr1 = DSQ.copyArray(arr);
int[] arr2 = DSQ.copyArray(arr);
HeapSort.heapSort(arr1);
DSQ.okMethod(arr2);
if(!DSQ.isEqual(arr1, arr2))
{
System.out.println("sb");
System.out.println("yuan: " + Arrays.toString(arr));
System.out.println("error: " + Arrays.toString(arr1));
break;
}
// System.out.println(Arrays.toString(arr1));
}
System.out.println("finish...");
}
}
<file_sep>/src/jzoffer/MoreThanHalfNum.java
package jzoffer;
public class MoreThanHalfNum
{
public static void main(String[] args)
{
MoreThanHalfNum obj = new MoreThanHalfNum();
int[] arr = {1, 2, 3, 2, 2, 2, 5, 4, 2};
obj.moreThanHalfNum_Solution(arr);
}
public int moreThanHalfNum_Solution(int[] arr)
{
if(arr == null || arr.length == 0)
{
return 0;
}
int res = arr[0];
// 记录res出现的次数
int times = 1;
for(int i = 1; i < arr.length; i++)
{
// 如果减到0了就记录下一个
if(times == 0)
{
res = arr[i];
times = 1;
}
else
{
// 如果这个数和res相同次数就+1
// 因为有一个数出现的次数大于一半, 所以加一要多一些
if(arr[i] == res)
{
times++;
}
else
{
times--;
}
}
}
times = 0;
for(int i = 0; i < arr.length; i++)
{
if(res == arr[i])
{
times++;
}
}
return times > arr.length / 2 ? res : 0;
}
}
<file_sep>/src/medium/Q47.java
package medium;
import java.util.ArrayList;
import java.util.List;
//给定一个可包含重复数字的序列,返回所有不重复的全排列。
public class Q47
{
private List<List<Integer>> res = new ArrayList<List<Integer>>();
public List<List<Integer>> permuteUnique(int[] nums) {
p(nums, 0, nums.length);
return res;
}
public boolean isSwap(int nums[], int i, int len)
{
for(int j = i + 1; j < len; j++)
{
if(nums[j] == nums[i])
{
return false;
}
}
return true;
}
public void p(int[] nums, int cur, int len)
{
//cur == len - 1也行
if(cur == len)
{
List<Integer> list = new ArrayList<>();
for (int i : nums)
{
list.add(i);
}
res.add(list);
return;
}
for(int i = cur; i < len; i++)
{
if(isSwap(nums, i, len))
{
swap(nums, cur, i);
p(nums, cur + 1, len);
swap(nums, cur, i);
}
}
}
public void swap(int[] arr, int i, int j)
{
int temp = arr[i];
arr[i] = arr[j];
arr[j] = temp;
}
}
<file_sep>/src/other/chapter1/Page31.java
package other.chapter1;
import java.util.ArrayDeque;
import java.util.Deque;
//最大值减去最小值小于或等于num的子数组的数量
public class Page31
{
public static void main(String[] args)
{
int[] arr =
{ 6, 1, 4 };
System.out.println(getNum(arr, 3));
}
public static int getNum(int[] arr, int num)
{
Deque<Integer> qmax = new ArrayDeque<>();
Deque<Integer> qmin = new ArrayDeque<>();
int i = 0; // arr[i..j]
int j = 0;
int res = 0;
while (i < arr.length)
{
while (j < arr.length)
{
while (!qmin.isEmpty() && arr[qmin.peekLast()] >= arr[j])
{
qmin.pollLast();
}
qmin.addLast(j);
while (!qmax.isEmpty() && arr[qmax.peekLast()] <= arr[j])
{
qmax.pollLast();
}
qmax.addLast(j);
if (arr[qmax.peekFirst()] - arr[qmin.peekFirst()] > num)
{
break;
}
j++;
}
res += (j - i);
// 窗口左边界向右走一个位置
i++;
// 处理过期
if (qmax.peekFirst() == i - 1)
{
qmax.pollFirst();
}
if (qmin.peekFirst() == i - 1)
{
qmin.pollFirst();
}
}
return res;
}
}
<file_sep>/src/other/chapter5/Page267.java
package other.chapter5;
import java.util.Arrays;
public class Page267
{
public static void main(String[] args)
{
char[] chas = "you are stupid".toCharArray();
Page267.rotateWord(chas);
// Page267.reverse(chas, 0, chas.length - 1);
System.out.println(Arrays.toString(chas));
String s = "a v c d";
System.out.println(Arrays.toString(s.split(" ")));
System.out.print(" ".trim());
System.out.print("zzz");
}
public static void rotateWord(char[] chas)
{
// 先把整个数组逆序
reverse(chas, 0, chas.length - 1);
// 再对单词逆序
int l = 0;
int r = 0;
for(int i = 0; i < chas.length; i++)
{
if(chas[i] == ' ')
{
r = i - 1;
reverse(chas, l, r);
l = i + 1;
}
if(i + 1 == chas.length)
{
reverse(chas, l, i);
}
}
}
// 对[start, end]上的数组逆序
public static void reverse(char[] chas, int start, int end)
{
int i = start;
int j = end;
while(i < j)
{
char temp = chas[j];
chas[j] = chas[i];
chas[i] = temp;
i++;
j--;
}
}
public static void rotate1(char[] chas, int size)
{
reverse(chas, 0, size - 1);
reverse(chas, size, chas.length - 1);
reverse(chas, 0, chas.length - 1);
}
}
<file_sep>/src/other/chapter7/Page349.java
package other.chapter7;
public class Page349
{
public static void main(String[] args)
{
}
// 输入0返回1, 输入1返回0
public int flip(int n)
{
return n ^ 1;
}
// n >= 0返回1, 否则返回0
public int sign(int n)
{
return flip((n >> 31) & 1);
}
// 不考虑a - b可能会溢出
public int getMax1(int a, int b)
{
int c = a - b;
int scA = sign(c);
int scB = flip(scA);
return scA * a + scB * b;
}
// 考虑溢出情况
public int getMax2(int a, int b)
{
int sa = sign(a);
int sb = sign(b);
int sc = sign(a - b);
// 只有a和b不是同符号的时候a - b才可能溢出
int difSab = sa ^ sb;
int sameSab = flip(difSab);
int returnA = difSab * sa + sameSab * sc;
int returnB = flip(returnA);
return returnA * a + returnB * b;
}
}
<file_sep>/src/jzoffer/GetMin.java
package jzoffer;
import java.util.ArrayDeque;
import java.util.Deque;
public class GetMin
{
private Deque<Integer> dataStack = new ArrayDeque<>();
private Deque<Integer> minStack = new ArrayDeque<>();
// 压入规则: datastack无脑压
// minstack为空压, 不为空判断当前要压入的数是不是比minstack的栈顶还小
// 是的话就压
public void push(int node)
{
dataStack.push(node);
if (minStack.isEmpty())
{
minStack.push(node);
}
else
{
if (node <= minStack.peek())
{
minStack.push(node);
}
}
}
// 出栈规则, dataStack无脑出
// 如果pop的数比minstack栈顶大, 就完事, 说明这一次dataStack的pop没有影响到当前栈的最小值
// 如果等于栈顶, 就把minStack也pop一次, 表示当前栈的最小值出去了
public void pop()
{
Integer num = dataStack.pop();
if(num == minStack.peek())
{
minStack.pop();
}
}
// 怪异
public int top()
{
return dataStack.peek();
}
public int min()
{
return minStack.peek();
}
}
<file_sep>/src/util/stack_queue/GetMinStack.java
package util.stack_queue;
import java.util.ArrayDeque;
/*
* 实现一个可以随时返回栈内最小元素的栈 O(1)
*/
public class GetMinStack
{
private ArrayDeque<Integer> dataStack = new ArrayDeque<>();
private ArrayDeque<Integer> helpStack = new ArrayDeque<>();
public void push(int num)
{
if(helpStack.size() == 0)
{
helpStack.push(num);
}
else
{
int min = Math.min(helpStack.peek(), num);
helpStack.push(min);
}
dataStack.push(num);
}
public int pop()
{
helpStack.pop();
return dataStack.pop();
}
public int getMin()
{
return helpStack.peek();
}
@Override
public String toString()
{
return "GetMinStack [dataStack=" + dataStack + ", helpStack=" + helpStack + "]";
}
public static void main(String[] args)
{
GetMinStack s = new GetMinStack();
s.push(1);
s.push(2);
s.push(10);
s.push(50);
s.push(0);
System.out.println(s);
System.out.println(s.getMin());
}
}
| e21005fb9369c8e8af16d97975ae77bab60f9ee7 | [
"Java"
] | 83 | Java | bekyiu/LeetCode | 8e06ce60e9d86ab80daa35a9b22f6dd482f9f1b4 | ac5383cae3b7ed131e7164b53d029621a4966d33 |
refs/heads/main | <repo_name>vishalnair198/react_mob<file_sep>/src/components/pages/events.js
import React from 'react'
import Navbar from '../Navbar';
import './events.css';
function Events() {
return (
<div className="events">
<div class="nar">
<Navbar/>
</div>
</div>
)
}
export default Events;
| 95d6b8369fa64b2746a7769b81a09216235df55a | [
"JavaScript"
] | 1 | JavaScript | vishalnair198/react_mob | 3b2d27a2e01a69c0f674c16dba8dc5c0eaef2859 | 3fd3a463bfa5213f9cdfca62d4b05759c633e237 |
refs/heads/main | <file_sep>package devjs.rnd.iagi.bankbatchconverter.usecase;
import devjs.rnd.iagi.bankbatchconverter.config.ApplicationConfig;
import devjs.rnd.iagi.bankbatchconverter.model.BankBatch;
import devjs.rnd.iagi.bankbatchconverter.service.BankBatchService;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.ss.usermodel.Workbook;
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.*;
public class BankBatchMCMUsecase {
BankBatchService bbs = new BankBatchService();
public void convertToBankBatch(String fileName,String filePath, String defaultJenisTransaksi){
Collection<BankBatch> cbb = bbs.readBankBatchXLS(filePath);
System.out.println("BankBatch file terproses, terdapat:"+cbb.size()+" data terfilter");
for (BankBatch bb : cbb){
if(bb.getEmail() == null || bb.getEmail().isEmpty()){
}else if(bb.getNomorRekening() == null || bb.getNomorRekening().isEmpty()){
}else if(bb.getNamaPemegangRekening() == null || bb.getNamaPemegangRekening().isEmpty()){
}else{
if(bb.getJenisTransfer().equalsIgnoreCase("LBU") && bb.getNamaBank().equalsIgnoreCase("MANDIRI")){
bb.addNetDeviden(2900);
bb.setJenisTransfer("IBU");
}else if(bb.getJenisTransfer().equalsIgnoreCase("IBU") && !bb.getNamaBank().equalsIgnoreCase("MANDIRI") && defaultJenisTransaksi.equalsIgnoreCase("LBU")){
bb.subNetDeviden(2900);
bb.setJenisTransfer("LBU");
} else if(bb.getJenisTransfer().equalsIgnoreCase("OBU") && bb.getNamaBank().equalsIgnoreCase("MANDIRI")){
bb.addNetDeviden(6500);
bb.setJenisTransfer("IBU");
}else if(bb.getJenisTransfer().equalsIgnoreCase("IBU") && !bb.getNamaBank().equalsIgnoreCase("MANDIRI") && defaultJenisTransaksi.equalsIgnoreCase("OBU")){
bb.subNetDeviden(6500);
bb.setJenisTransfer("LBU");
}
bb.setKodeKliringBank(ApplicationConfig.KLIRING_BANK.get(bb.getNamaBank().toUpperCase()));
}
}
bbs.toBatchFileCSV(cbb, ApplicationConfig.getOutputFolder()+"/"+fileName.toLowerCase());
System.out.println("MCM file dibuat.");
}
}
<file_sep>package devjs.rnd.iagi.bankbatchconverter.service;
import devjs.rnd.iagi.bankbatchconverter.config.ApplicationConfig;
import devjs.rnd.iagi.bankbatchconverter.model.BankBatch;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVPrinter;
import org.apache.poi.hssf.usermodel.HSSFWorkbook;
import org.apache.poi.openxml4j.exceptions.InvalidFormatException;
import org.apache.poi.ss.usermodel.*;
import org.apache.poi.xssf.usermodel.XSSFFont;
import org.apache.poi.xssf.usermodel.XSSFWorkbook;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.text.SimpleDateFormat;
import java.util.*;
public class BankBatchService {
private static Logger LOG = LoggerFactory
.getLogger(BankBatchService.class);
private StringBuilder batchFileLog;
public void toBatchFileCSV(Collection<BankBatch> lbb, String filePath) {
List<BankBatch> filteredBB = new ArrayList<BankBatch>();
batchFileLog = new StringBuilder();
this.batchFileLog.append("\n\nBankBatch Log:");
int totalTransferred = 0, nRawTotal=0;
for (BankBatch tempBA : lbb) {
nRawTotal++;
if (tempBA.getNomorRekening() == null || tempBA.getNomorRekening().isEmpty()) {
batchFileLog.append("\nInvestor["+tempBA.getNamaInvestor()+"] nomor rekening kosong.");
} else if (tempBA.getEmail() == null || tempBA.getEmail().isEmpty()) {
batchFileLog.append("\nInvestor["+tempBA.getNamaInvestor()+"] nomor email kosong.");
} else if (tempBA.getNamaPemegangRekening() == null || tempBA.getNamaPemegangRekening().isEmpty()) {
batchFileLog.append("\nInvestor["+tempBA.getNamaInvestor()+"] nomor nama-pemegang-rekening kosong.");
} else if (tempBA.getNetDeviden() < ApplicationConfig.MINIMUM_TRANSFER) {
batchFileLog.append("\nInvestor["+tempBA.getNamaInvestor()+"] net deviden kurang dari minimum transfer.");
} else {
filteredBB.add(tempBA);
totalTransferred += tempBA.getNetDeviden();
}
}
batchFileLog.append("\n\nTotal data terfilter "+filteredBB.size()+" dari "+nRawTotal);
System.out.println(batchFileLog+"\n\n\n");
String[] header = new String[44];
Arrays.fill(header, "");
header[0] = "P";
header[1] = ApplicationConfig.DATE_FORMAT.format(Calendar.getInstance().getTime());
header[2] = "1260007555633";
header[3] = String.valueOf(filteredBB.size());
header[4] = String.valueOf(totalTransferred);
try (
BufferedWriter writer = Files.newBufferedWriter(Paths.get(filePath));
CSVPrinter csvPrinter = new CSVPrinter(writer, CSVFormat.DEFAULT);
) {
csvPrinter.printRecord(header);
for (BankBatch ba : filteredBB) {
csvPrinter.printRecord(ba.asArrays());
}
csvPrinter.flush();
} catch (IOException e) {
e.printStackTrace();
}
}
public void toXLS(Collection<BankBatch> lbb, String filePath) {
Workbook workbook = new XSSFWorkbook();
Sheet sheet = workbook.createSheet("basil");
sheet.setColumnWidth(0, 6000);
sheet.setColumnWidth(1, 4000);
Row header = sheet.createRow(0);
CellStyle headerStyle = workbook.createCellStyle();
headerStyle.setFillForegroundColor(IndexedColors.GREY_25_PERCENT.getIndex());
headerStyle.setFillPattern(FillPatternType.SOLID_FOREGROUND);
XSSFFont font = ((XSSFWorkbook) workbook).createFont();
font.setFontName("Arial");
font.setFontHeightInPoints((short) 12);
font.setBold(true);
headerStyle.setFont(font);
Cell headerCell = null;
int iHeaderCell = 0;
for (String headerName : new String[]{"No", "Nama Investor", "Nomor Rekening", "Nama Pemegang Rekening ", "Kode Investor",
"Mata Uang", "Net Deviden", "N Deviden", "Berita Transfer", "Jenis Transfer", "Kode Kliring Bank",
"Nama Bank", "Alamat Bank", "Berita Tambahan", "Is Berita Email", "Email", "Kewarganegaraan Pemegang Rekening", "Pengurangan Nilai", "Charge Instruction", "Tipe Beneficiary"}) {
headerCell = header.createCell(iHeaderCell++);
headerCell.setCellValue(headerName);
headerCell.setCellStyle(headerStyle);
}
CellStyle style = workbook.createCellStyle();
style.setWrapText(true);
CellStyle coralStyle = workbook.createCellStyle();
coralStyle.setFillForegroundColor(IndexedColors.CORAL.getIndex());
coralStyle.setFillPattern(FillPatternType.SOLID_FOREGROUND);
CellStyle aquaStyle = workbook.createCellStyle();
aquaStyle.setFillForegroundColor(IndexedColors.AQUA.getIndex());
aquaStyle.setFillPattern(FillPatternType.SOLID_FOREGROUND);
int iRows = 2, i = 0;
Row row;
Cell tcell;
for (BankBatch bb : lbb) {
//LOG.info(bb.toString());
row = sheet.createRow(iRows++);
iHeaderCell = 0;
tcell = row.createCell(iHeaderCell++);
tcell.setCellValue(++i);
tcell.setCellStyle(style);
tcell = row.createCell(iHeaderCell++);
tcell.setCellValue(bb.getNamaInvestor());
tcell.setCellStyle(style);
tcell = row.createCell(iHeaderCell++);
tcell.setCellValue(bb.getNomorRekening());
tcell.setCellStyle(style);
if(bb.getNomorRekening() == null || bb.getNomorRekening().isEmpty()){
tcell.setCellStyle(aquaStyle);
}else {
tcell.setCellStyle(style);
}
tcell = row.createCell(iHeaderCell++);
tcell.setCellValue(bb.getNamaPemegangRekening());
if(bb.getNamaPemegangRekening() == null || bb.getNamaPemegangRekening().isEmpty()){
tcell.setCellStyle(aquaStyle);
}else if (bb.getNamaPemegangRekening().matches("[a-zA-Z ]+")) {
tcell.setCellStyle(style);
} else {
tcell.setCellStyle(coralStyle);
}
tcell = row.createCell(iHeaderCell++);
tcell.setCellValue(bb.getKodeInvestor());
tcell.setCellStyle(style);
tcell = row.createCell(iHeaderCell++);
tcell.setCellValue(bb.getMataUang());
tcell.setCellStyle(style);
tcell = row.createCell(iHeaderCell++);
tcell.setCellValue(bb.getNetDeviden());
tcell.setCellStyle(style);
tcell = row.createCell(iHeaderCell++);
tcell.setCellValue(bb.getNDeviden());
tcell.setCellStyle(style);
tcell = row.createCell(iHeaderCell++);
tcell.setCellValue(bb.getBeritaTransfer());
tcell.setCellStyle(style);
tcell = row.createCell(iHeaderCell++);
tcell.setCellValue(bb.getJenisTransfer());
tcell.setCellStyle(style);
tcell = row.createCell(iHeaderCell++);
tcell.setCellValue(bb.getKodeKliringBank());
tcell.setCellStyle(style);
tcell = row.createCell(iHeaderCell++);
tcell.setCellValue(bb.getNamaBank());
tcell.setCellStyle(style);
tcell = row.createCell(iHeaderCell++);
tcell.setCellValue(bb.getAlamatBank());
tcell.setCellStyle(style);
tcell = row.createCell(iHeaderCell++);
tcell.setCellValue(bb.getBeritaTransferTambahan());
tcell.setCellStyle(style);
tcell = row.createCell(iHeaderCell++);
tcell.setCellValue(bb.getIsBeritaEmail());
tcell.setCellStyle(style);
tcell = row.createCell(iHeaderCell++);
tcell.setCellValue(bb.getEmail());
tcell.setCellStyle(style);
tcell = row.createCell(iHeaderCell++);
tcell.setCellValue(bb.getKewarganegaraanPemegangRekening());
tcell.setCellStyle(style);
tcell = row.createCell(iHeaderCell++);
tcell.setCellValue(bb.getPenguranganNilai());
tcell.setCellStyle(style);
tcell = row.createCell(iHeaderCell++);
tcell.setCellValue(bb.getChargeInstruction());
tcell.setCellStyle(style);
tcell = row.createCell(iHeaderCell++);
tcell.setCellValue(bb.getBeneficiaryType());
tcell.setCellStyle(style);
}
File currDir = new File(".");
String path = currDir.getAbsolutePath();
//path.substring(0, path.length() - 1) + "temp.xlsx";
FileOutputStream outputStream = null;
try {
outputStream = new FileOutputStream(filePath);
workbook.write(outputStream);
workbook.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
public Collection<BankBatch> readBankBatchXLS(String fileLocation) {
Collection<BankBatch> result = new ArrayList<BankBatch>();
FileInputStream file = null;
Workbook workbook;
try {
file = new FileInputStream(new File(fileLocation));
workbook = WorkbookFactory.create(file);
Sheet sheet = workbook.getSheetAt(0);
BankBatch bb;
int i = 0;
String cell1st;
for (Row row : sheet) {
cell1st = getValue(row.getCell(0));
if (cell1st != null && !cell1st.equalsIgnoreCase("No") && !cell1st.isEmpty()) {
bb = new BankBatch();
bb.setNamaInvestor(getValue(row.getCell(1)));
bb.setNomorRekening(getValue(row.getCell(2)));
bb.setNamaPemegangRekening(getValue(row.getCell(3)));
bb.setKodeInvestor(getValue(row.getCell(4)));
bb.setMataUang(getValue(row.getCell(5)));
bb.setNetDeviden(getValue(row.getCell(6)), i);
bb.setNDeviden(getValue(row.getCell(7)), i);
bb.setBeritaTransfer(getValue(row.getCell(8)));
bb.setJenisTransfer(getValue(row.getCell(9)));
bb.setKodeKliringBank(getValue(row.getCell(10)));
bb.setNamaBank(getValue(row.getCell(11)));
bb.setAlamatBank(getValue(row.getCell(12)));
bb.setBeritaTransferTambahan(getValue(row.getCell(13)));
bb.setIsBeritaEmail(getValue(row.getCell(14)));
bb.setEmail(getValue(row.getCell(15)));
bb.setKewarganegaraanPemegangRekening(getValue(row.getCell(16)));
bb.setPenguranganNilai(getValue(row.getCell(17)), i);
bb.setChargeInstruction(getValue(row.getCell(18)));
bb.setBeneficiaryType(getValue(row.getCell(19)));
//System.out.println(bb.toString());
result.add(bb);
}
i++;
}
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException f) {
f.printStackTrace();
} catch (InvalidFormatException g) {
g.printStackTrace();
}
return result;
}
private String getValue(Cell cell) {
DataFormatter formatter = new DataFormatter();
return formatter.formatCellValue(cell).trim();
}
}
<file_sep>artifactId=bankbatch-converter
groupId=devjs.rnd.iagi
version=0.0.1-SNAPSHOT
| 7f7e05ea7e35a86e6392c3169c2ddcc706f93b3b | [
"Java",
"INI"
] | 3 | Java | salirajr/bankbatch-converter | 37a16f5f2d01176b894adfc8d0e2b064679c41f3 | 0fe65f381688279e982648355880bbe0905ed4c7 |
refs/heads/master | <file_sep>#!c:/python34/python.exe
# -*- coding: utf-8 -*-
#COPYRIGHT TO:
#<NAME> <physics and java implementation>
#<NAME> <java implementation>
#<NAME> <this python port of #gravity>
import math as m
from time import sleep
import os
def import_config(infile):
objects = []
file_dir = os.getcwd() +"\\"+ infile
f = open(file_dir,"r+")
for line in f:
try:
name, mass, fi, x, y, vx, vy, ax, ay = line.split(" ", 8)
objects.append(Object(name, mass, fi, x, y, vx, vy, ax, ay))
except ValueError:
try:
name, mass, fi, x, y = line.split(" ", 4)
objects.append(Object(name, mass, fi, x, y ))
except ValueError:
name, mass, fi = line.split(" ", 2)
objects.append(Object(name, mass, fi))
return objects
class Stale:
G = 6.674083*10**-11
resTime = 10.0 #sekundy
loopTimeout = 10000 # mikrosekundy
class Object:
def __init__(self, name, mass, fi, x=0, y=0, vx=0, vy=0, ax=0, ay=0):
self.name = name
self.mass = float(mass)
self.fi = float(fi)
self.x = float(x)
self.y = float(y)
self.vx = float(vx)
self.vy = float(vy)
self.ax = float(ax)
self.ay = float(ay)
def __str__(self):
return self.name+" (x:"+str(self.x)+" y:"+str(self.y)+", mass:"+str(self.mass) +")"
def __repr__(self):
return self.name+" (x:"+str(self.x)+" y:"+str(self.y)+", mass:"+str(self.mass) +")"
class ObjectManager():
def __init__(self, obj):
self.obj = obj
def cartesianDist(self, b):
dist = m.sqrt(pow(self.obj.x-b.x,2)+pow(self.obj.y-b.y,2))
if(dist==0):
print("[+]ERROR: odległosc kartezjanska(dzielenie przez 0)")
return 10e-20
else:
return dist
def checkCollision(self, b):
return self.cartesianDist(b) <= self.obj.fi+b.fi
def changeMoment(self, b):
self.obj.vx += (b.mass*b.vx-self.obj.vx*b.mass)/(self.obj.mass+b.mass)
self.obj.vy += (b.mass*b.vy-self.obj.vy*b.mass)/(self.obj.mass+b.mass)
def acceleration(self, b):
self.obj.ax += (self.cos(b)*Stale.G*b.mass)/(self.cartesianDist(b)**2)
self.obj.ay += (self.sin(b)*Stale.G*b.mass)/(self.cartesianDist(b)**2)
def ifColide(self, b):
if self.checkCollision(b):
if self.obj.mass >= b.mass:
self.changeMoment(b)
self.obj.mass+=b.mass
self.fi = m.sqrt(self.obj.fi**2+b.fi**2)
return [b,b.name+"=>"+self.obj.name]
else:
self.changeMoment(self.obj)
b.mass+=self.obj.mass
b.fi = m.sqrt(self.obj.fi**2+b.fi**2)
return [self.obj,self.obj.name+"=>"+b.name]
return None
def velocity(self):
self.obj.vx = self.obj.vx + Stale.resTime/10**6*self.obj.ax
self.obj.vy = self.obj.vy + Stale.resTime/10**6*self.obj.ay
def sin(self, b):
return (b.y-self.obj.y)/self.cartesianDist(b)
def cos(self, b):
return (b.x-self.obj.x)/self.cartesianDist(b)
def position(self):
self.obj.x += self.obj.vx
self.obj.y += self.obj.vy
def theOtherStuff(self):
self.velocity()
self.position()
#def output(self):
# return self.obj.name+" (x:"+str(self.obj.x)+" y:"+str(self.obj.y)+", mass:"+str(self.obj.mass) +")"
temp_objcts=[]
class Universe:
def __init__(self, objcts):
self.objcts = objcts
def action(self):
for obj in self.objcts:
temp_objcts = self.objcts[:]
temp_objcts.remove(obj)
obj.ax = 0
obj.ay = 0
for obj2 in temp_objcts:
collide = ObjectManager(obj).ifColide(obj2)
if collide:
print("[+]BUM " + collide[1])
self.objcts.remove(collide[0])
for x in self.objcts: print(x)
break
ObjectManager(obj).acceleration(obj2)
#print(ObjectManager(obj).output())
ObjectManager(obj).theOtherStuff()
def START_THE_UNIVERSE(self):
while(True):
try:
self.action()
except KeyboardInterrupt:
break
planety=[
Object('Slonce',3000E10,1000,0,700),
Object('Merkury',300E15,100,1,0),
Object('Mars',100111,100,2000,300),
Object('ISIS',10123,100,3000,400),
Object('Pandora',100233,100,3500,9000),
]
#planety = import_config('config.txt')
#Universe(planety).START_THE_UNIVERSE() | 4e00fd7c12798116d52395057629df4cbada7e71 | [
"Python"
] | 1 | Python | pikulak/gravity | 783797ed2a5294f70ea8e779895d987114feeb13 | 90efeb0ad3c06873871947e3b237061eefa90880 |
refs/heads/master | <repo_name>matgere/ussddynamic<file_sep>/backend/src/bo/exception/StatutException.php
<?php
namespace Exceptions;
/*
* 2SMOBILE
* ----------------------------------------
* @author Kiwi <<EMAIL>>
* @copyright 2006-2015 Kiwi/2SI Group
* @version 2.0.0
* @link http://www.kiwi.sn
* @link http://www.ssi.sn
* ----------------------------------------
*/
/**
* Classe mere des classes d'exception qui retournent un statut comme message correspondant a un code
*
* @author ssi
*/
class StatutException extends \Exception{
/**
* le code de l'exception
*/
private $code;
function __construct($code=null) {
$this->code=$code;
parent::__construct($this->getStatut(), $code, NULL);
}
/** redéfinir la methode getMessage de \Exception
* Elle retourne le statut correspondant au code d'exception
* @return type
*/
public function getMessage(){
return getStatut();
}
public function getStatut(){
// Localiser le statut correspondant au code
return "statut";
}
}
<file_sep>/cookies.php
<?php
require_once 'lib/doctrine/vendor/autoload.php';
if(isset($_POST['login']))
$login = $_POST['login'];
if(isset($_POST['password']))
$password = $_POST['password'];
if(isset($_POST['password']))
$password = $_POST['password'];
else {
header("Location: index.php");
exit;
}
$userManager = new User\UserManager();
$user = $userManager->signIn($login, $password);
$userInfos = $user['infos'];
$path="/";
$domainName = $_SERVER['SERVER_NAME'];
if($domainName=='localhost')
$domainName=null;
//$date_of_expiry = time() + (30 * 60);
$date_of_expiry = time() + (40 * 60);
setcookie("userId",strip_tags($userInfos['id']), $date_of_expiry, $path, $domainName,null,true);
setcookie("userLogin",strip_tags($userInfos['login']), $date_of_expiry, $path, $domainName,null,true);
//setcookie("userContactName",strip_tags($userInfos['mom_contact']), $date_of_expiry, $path, $domainName,null,true);
//setcookie("profilId",strip_tags($userInfos['profil_id']), $date_of_expiry, $path, $domainName,null,true);
?>
<!DOCTYPE html>
<html lang="en">
<head>
</head>
<body >
<script>
//$(document).ready(function(){
document.location.href='app/kussd/kmenu/';
// });
</script>
</body>
</html><file_sep>/backend/src/bo/menuussdtest/index.php
<?xml version='1.0' encoding='utf-8'?>
<!doctype html><html>
<head><meta charset='utf-8'>
<title>LANGUE</title>
</head><body>
<h3>Veuillez choisir une langue SVP</h3>
<a href="FRANCAIS.php?response="Français" accesskey="0" >Français</a><br/><a href="ANGLAIS.php?response="Anglais" accesskey="0" >Anglais</a><br/><a href="BAMBANANKA.php?response="Bambananka" accesskey="0" >Bambananka</a><br/>
</body></html>
<file_sep>/backend/src/bo/user/UserController.php
<?php
namespace User;
require_once '../../common/app.php';
use Log\Loggers as Logger;
use Exceptions\ConstraintException as ConstraintException;
use User\UserManager as UserManager;
//use Cabinet\ProfilManager as ProfilManager;
use User\User as User;
class UserController extends \Bo\BaseAction implements \Bo\BaseController {
private $logger;
private $userManager;
private $parameters;
public function __construct($request) {
$this->logger = new Logger(__CLASS__);
$this->userManager = new UserManager();
$this->parameters = dirname(dirname(dirname(__FILE__))) . '/lang/trad_fr.ini';
try {
if (isset($request['ACTION'])) {
switch ($request['ACTION']) {
// case \App::ACTION_GET_SESSION:
// $this->doGetSession($request);
// break;
case \App::ACTION_SIGN_IN:
$this->doSignin($request);
break;
case \App::ACTION_SIGNOUT:
$this->doSignout($request);
break;
case \App::ACTION_GET_SESSION:
$this->doGetSession($request);
break;
}
} else
throw new Exception("NO_ACTION'");
} catch (Exception $e) {
$logger = new Logger(__CLASS__);
$logger->log->trace($e->getMessage());
$this->doError('-1', $e->getMessage());
}
}
public function doSignin($request) {
$logger = new Logger(__CLASS__);
try {
if (isset($request['ACTION']) && isset($request['login']) && isset($request['password'])) {
try {
$user = $this->userManager->signin($request['login'], $request['password']);
$this->doSuccessO($user);
} catch (Exception $e) {
$logger->log->trace($e->getMessage() . ' ' . $e->getFile() . ' ' . $e->getLine());
$this->doError('-1', $e->getMessage());
} catch (Exception $e) {
$logger->log->trace($e->getMessage() . ' ' . $e->getFile() . ' ' . $e->getLine());
$this->doError('-1', $this->parameters['ERREUR_SERVEUR']);
}
} else {
throw new \Exception($this->parameters['INVALID_DATA']);
}
} catch (\Exception $e) {
$logger->log->trace($e->getMessage() . ' ' . $e->getFile() . ' ' . $e->getLine());
$this->doError('-1', $e->getMessage());
} catch (\Exception $e) {
$logger->log->trace($e->getMessage() . ' ' . $e->getFile() . ' ' . $e->getLine());
// $this->doError('-1', $this->parameters['ERREUR_SERVEUR']);
}
}
public function doView($request) {
$logger = new Logger(__CLASS__);
try {
if (isset($request['userid'])) {
$this->userManager = new userManager();
$user = $this->userManager->view($request['userid']);
if ($user != NULL) {
$this->doSuccessO($user);
} else {
echo json_encode(array());
}
} else {
$logger->log->error('View : Invalid Data');
throw new ConstraintException($this->parameters['INVALID_DATA']);
}
} catch (ConstraintException $e) {
$logger->log->trace($e->getMessage() . ' ' . $e->getFile() . ' ' . $e->getLine());
$this->doError('-1', $e->getMessage());
} catch (Exception $e) {
$logger->log->trace($e->getMessage() . ' ' . $e->getFile() . ' ' . $e->getLine());
$this->doError('-1', $this->parameters['ERREUR_SERVEUR']);
}
}
public function doListProfil($request) {
$profilManager = new ProfilManager();
$listProfils = $profilManager->findAll();
// echo json_encode($listProfils);
$this->doSuccessO($listProfils);
}
public function doInsert($request) {
$logger = new Logger(__CLASS__);
try {
$logger->log->trace("Debut insertion user");
if (isset($request['ACTION']) && isset($request['customerId']) && isset($request['contactName']) && isset($request['login']) && isset($request['password']) && isset($request['description'])) {
$customerId = $request['customerId'];
$contactName = $request['contactName'];
$login = $request['login'];
$password = md5($request['password']);
$description = $request['description'];
$email = $request['email'];
$profil = $request['profil']; // profil utilisateur simple par defaut
if ($customerId != "" && $contactName != "" && $login != "" && $password != "" && $description != "" && $profil != "") {
$userManager = new UserManager();
$customerManager = new CabinetManager();
$user = new User();
$customer = $customerManager->findById($customerId);
$user->setCabinet($customer);
// $user->setLanguage($customer->getLanguage());
$user->setPartner($customer->getPartner());
$user->setContactName($contactName);
$user->setLogin($login);
$user->setPassword($<PASSWORD>);
$user->setDescription($description);
$user->setContactEmail($email);
$profilManager = new ProfilManager();
$objectProfil = $profilManager->findById($profil);
$user->setProfil($objectProfil);
$user->setStatus(1);
$user->setActivate(1);
$userManager->create($user);
if ($user->getId() != null) {
$this->doSuccess($user->getId(), $this->parameters['SAV']);
$concat = $customerId . '-' . $contactName . '-' . $login . '-' . $password . '-' . $description . '-' . $profil;
$this->logger->log->info($concat);
} else {
$this->logger->log->error('User already exists or in trash');
throw new Exception($this->parameters['USER_ALREADY_EXISTS']);
}
}
} else {
$this->logger->log->error('List : Params not enough');
throw new Exception($this->parameters['AJOUT_USER_IMPOSSIBLE']);
}
$logger->log->trace("Fin insertion user");
} catch (Exception $e) {
$logger->log->trace($e->getMessage() . ' ' . $e->getFile() . ' ' . $e->getLine());
$this->doError('-1', $e->getMessage());
} catch (Exception $e) {
$logger->log->trace($e->getMessage() . ' ' . $e->getFile() . ' ' . $e->getLine());
$this->doError('-1', $this->parameters['ERREUR_SERVEUR']);
}
}
public function doUpdate($request) {
$logger = new Logger(__CLASS__);
try {
$logger->log->trace("Debut insertion user");
if (isset($request['ACTION']) && isset($request['customerId']) && isset($request['contactName']) && isset($request['login']) && isset($request['password']) && isset($request['description'])) {
$customerId = $request['customerId'];
$contactName = $request['contactName'];
$login = $request['login'];
$password = md5($request['password']);
$description = $request['description'];
$email = $request['email'];
$profil = $request['profil']; // profil utilisateur simple par defaut
if ($customerId != "" && $contactName != "" && $login != "" && $password != "" && $description != "" && $profil != "") {
$userManager = new UserManager();
$customerManager = new CabinetManager();
$user = new User();
$customer = $customerManager->findById($customerId);
$user->setId($request['id']);
$user->setCabinet($customer);
// $user->setLanguage($customer->getLanguage());
$user->setPartner($customer->getPartner());
$user->setContactName($contactName);
$user->setLogin($login);
$user->setPassword($password);
$user->setDescription($description);
$user->setContactEmail($email);
$profilManager = new ProfilManager();
$objectProfil = $profilManager->findById($profil);
$user->setProfil($objectProfil);
$user->setStatus(1);
$user->setActivate(1);
$us = $this->userManager->update($user, $supp = null);
$this->doSuccess($us, "Modification effectue avec succes");
}
} else {
$this->logger->log->error('List : Params not enough');
throw new Exception("Impossible d'effectuer cette modification");
}
$logger->log->trace("Fin mis a jour user");
} catch (Exception $e) {
$logger->log->trace($e->getMessage() . ' ' . $e->getFile() . ' ' . $e->getLine());
$this->doError('-1', $e->getMessage());
} catch (Exception $e) {
$logger->log->trace($e->getMessage() . ' ' . $e->getFile() . ' ' . $e->getLine());
$this->doError('-1', 'ERREUR_SERVEUR');
}
}
public function doList($request) {
$this->logger->log->info('Action List user');
$this->logger->log->info(json_encode($request));
try {
$userManager = new UserManager();
if (isset($request['customerId']) && isset($request['iDisplayStart']) && isset($request['iDisplayLength'])) {
$customerId = $request['customerId'];
$partnerId = $request['partnerId'];
// Begin order from dataTable
$sOrder = "";
$aColumns = array('u.id','contactName', 'description', 'login','p.intitule','activate');
if (isset($request['iSortCol_0'])) {
$sOrder = "ORDER BY ";
for ($i = 0; $i < intval($request['iSortingCols']); $i++) {
if ($request['bSortable_' . intval($request['iSortCol_' . $i])] == "true") {
$sOrder .= "" . $aColumns[intval($request['iSortCol_' . $i])] . " " .
($request['sSortDir_' . $i] === 'asc' ? 'asc' : 'desc') . ", ";
}
}
$sOrder = substr_replace($sOrder, "", -2);
if ($sOrder == "ORDER BY") {
$sOrder .= " u.createdDate desc ";
}
}
// End order from DataTable
// Begin filter from dataTable
$sWhere = "";
if (isset($request['sSearch']) && $request['sSearch'] != "") {
$sSearchs = explode(" ", $request['sSearch']);
for ($j = 0; $j < count($sSearchs); $j++) {
$sWhere .= " AND (";
for ($i = 0; $i < count($aColumns); $i++) {
$sWhere .= "(" . $aColumns[$i] . " LIKE '%" . ($sSearchs[$j]) . "%') OR";
if ($i == count($aColumns) - 1)
$sWhere = substr_replace($sWhere, "", -3);
}
$sWhere = $sWhere .=")";
}
}
// End filter from dataTable
$users = $userManager->listUsers($customerId, $partnerId,$request['iDisplayStart'], $request['iDisplayLength'], $sOrder, $sWhere);
if ($users != null) {
$nbUsers = $userManager->count($customerId, $partnerId,$sWhere);
$this->logger->log->info($nbUsers . 'users retrieved');
$this->doSuccessO($this->dataTableFormat($users, $request['sEcho'], $nbUsers));
} else {
$this->doSuccessO($this->dataTableFormat(array(), $request['sEcho'], 0));
}
} else {
$this->logger->log->error('List : Params not enough');
throw new Exception('Params not enough');
}
} catch (Exception $e) {
$this->logger->log->trace($e->getMessage() . ' ' . $e->getFile() . ' ' . $e->getLine());
throw $e;
} catch (Exception $e) {
$this->logger->log->error($e->getMessage() . ' ' . $e->getFile() . ' ' . $e->getLine());
throw new Exception($this->parameters['ERREUR_SERVEUR']);
}
}
public function doActiver($request) {
$this->logger->log->info('Action Activate user');
$this->logger->log->info(json_encode($request));
try {
if (isset($request['userIds'])) {
$this->logger->log->info('activer with params : ' . $request['userIds']);
$userIds = $request['userIds'];
$userManager = new UserManager();
$nbModified = $userManager->activate($userIds);
$this->doSuccess($nbModified, "Utilisateurs desactive(s) avec succes.");
} else {
$this->logger->log->error('Activate : Params not enough');
$this->doError('-1', $this->parameters['USER_NOT_ACTIVATED']);
}
} catch (ConstraintException $e) {
$this->logger->log->trace($e->getMessage() . ' ' . $e->getFile() . ' ' . $e->getLine());
throw $e;
} catch (Exception $e) {
$this->logger->log->error($e->getMessage() . ' ' . $e->getFile() . ' ' . $e->getLine());
throw new Exception($this->parameters['ERREUR_SERVEUR']);
}
}
public function doDesactiver($request) {
$this->logger->log->info('Action deactivate user');
$this->logger->log->info(json_encode($request));
try {
if (isset($request['userIds'])) {
$this->logger->log->info('Desactiver with params : ' . $request['userIds']);
$userIds = $request['userIds'];
$userManager = new UserManager();
$nbModified = $userManager->deactivate($userIds);
$this->doSuccess($nbModified, "Utilisateurs desactive(s) avec succes.");
} else {
$this->logger->log->error('Desactiver : Params not enough');
$this->doError('-1', $this->parameters['USER_NOT_DEACTIVATED']);
}
} catch (ConstraintException $e) {
$this->logger->log->trace($e->getMessage() . ' ' . $e->getFile() . ' ' . $e->getLine());
throw $e;
} catch (Exception $e) {
$this->logger->log->error($e->getMessage() . ' ' . $e->getFile() . ' ' . $e->getLine());
throw new Exception($this->parameters['ERREUR_SERVEUR']);
}
}
public function doRemove($request) {
}
// public function doUpdate($request) {
//
// }
public function dofindById($request) {
}
public function doGetInfos($request) {
$logger = new Logger(__CLASS__);
try {
if (isset($request['userId'])) {
$this->userManager = new UserManager();
$infosUser = $this->userManager->getInfos($request['userId']);
if ($infosUser != NULL) {
$this->doSuccessO(($infosUser));
} else {
echo json_encode(array());
}
} else {
$logger->log->error('List : Invalid Data');
throw new ConstraintException($this->parameters['INVALID_DATA']);
}
} catch (ConstraintException $e) {
$logger->log->trace($e->getMessage() . ' ' . $e->getFile() . ' ' . $e->getLine());
$this->doError('-1', $e->getMessage());
} catch (Exception $e) {
$logger->log->trace($e->getMessage() . ' ' . $e->getFile() . ' ' . $e->getLine());
$this->doError('-1', $this->parameters['ERREUR_SERVEUR']);
}
}
public function doSignout($request) {
$logger = new Logger(__CLASS__);
$logger->log->trace('Signout');
$this->action = "DECONNEXION : Tentative de déconnexion sur le portail";
$this->details = "";
if (isset($request['ACTION']) && $request['ACTION'] == 'SIGNOUT') {
$past = time() - 3600;
\Common\Common::unsetCookie();
$logger->log->trace('Fin Signout');
} else {
echo '0';
}
}
public function doActivate($request) {
}
public function doDeactivate($request) {
}
public function doRestore($request) {
}
public function doGetSession($request) {
$logger = new Logger(__CLASS__);
try {
if (isset($request['ACTION'])) {
if (!isset($_COOKIE['userId']) || !isset($_COOKIE['cabinetId']) ) {
$this->doSuccessO(0);
} else {
$this->doSuccessO(1);
}
} else {
$logger->log->error('Name : Invalid Data');
throw new ConstraintException($this->parameters['INVALID_DATA']);
}
} catch (ConstraintException $e) {
$logger->log->trace($e->getMessage() . ' ' . $e->getFile() . ' ' . $e->getLine());
$this->doError('-1', $e->getMessage());
} catch (Exception $e) {
$logger->log->trace($e->getMessage() . ' ' . $e->getFile() . ' ' . $e->getLine());
$this->doError('-1', $this->parameters['ERREUR_SERVEUR']);
}
}
//put your code here
}
$oUserController = new UserController($_REQUEST);
<file_sep>/backend/src/bo/menu/MenuController.php
<?php
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
/**
* Description of Controller
*
* @author <NAME>
*/
namespace Menu;
require_once '../../common/app.php';
use Bo\BaseAction;
use Bo\BaseController;
use Common\CommonManager;
use Exceptions\ConstraintException;
use Exception;
use Menu\Menu;
use Menu\MenuManager as MenuManager;
class MenuController extends BaseAction implements BaseController {
private $commonManager;
private $menu;
private $menuManager;
public function __construct($request) {
$file = dirname(dirname(dirname(__FILE__))) . '/lang/trad_fr.ini';
$this->parameters = parse_ini_file($file);
$this->commonManager = new CommonManager();
$this->menu = new Menu();
$this->menuManager = new MenuManager();
try {
if (isset($request['ACTION'])) {
switch ($request['ACTION']) {
case \App::ACTION_GET_SESSION:
$this->doGetSession($request);
break;
case \App::ACTION_LIST:
$this->doList($request);
break;
case \App::ACTION_INSERT:
$this->doInsert($request);
break;
case \App::ACTION_UPDATE:
$this->doUpdate($request);
break;
case \App::ACTION_GENERATE_MENU:
$this->doGenerateMenu($request);
break;
case \App::ACTION_GET_ALL_MENU_BY_USER:
$this->doGetAllMenuByUser($request);
break;
default: throw new Exception($this->parameters['NO_ACTION']);
}
} else
throw new Exception($this->parameters['NO_ACTION']);
} catch (Exception $e) {
$this->doLogInfo($e->getMessage());
$this->doError('-1', $e->getMessage());
}
}
public function doInsert($request) {
$this->doLogInfo('List des parametres:' . $this->doGetListParam());
try {
if (isset($request['ACTION']) && isset($request['userId']) && isset($request['name']) && isset($request['title']) && isset($request['text']) &&
isset($request['parent']) && isset($request['type']) && isset($request['actions']) && isset($request['methode']) && isset($request['url'])) {
if ($request['userId'] != '' && $request['name'] != '' && $request['title'] != '' && $request['text'] != '' &&
$request['parent'] != '' && $request['type'] != '' && $request['actions'] != '' && $request['methode'] != '' && $request['url'] != '') {
$user = $this->commonManager->findById("User\User", $request['userId']);
$this->menu->setName($request['name']);
$this->menu->setTitle($request['title']);
$this->menu->setText($request['text']);
if ($request['parent'] != "ALL") {
$parent = $this->commonManager->findById("Menu\Menu", $request['parent']);
$this->menu->setParent($parent);
}
if ($request['type'] == "accesskey") {
if (isset($request['ordre'])) {
$this->menu->setOrdre($request['ordre']);
if ($request['odre'] !== 'ALL')
$this->menu->setOrdre($request['ordre']);
else {
$this->doLogError($this->parameters['CODE_101_ADMIN']);
throw new ConstraintException('Le champs ordre est vide');
}
}
// else {
// $this->doLogError($this->parameters['CODE_100_ADMIN']);
// throw new ConstraintException($this->parameters['CODE_100']);
// }
} else
$this->menu->setOrdre(0);
$this->menu->setType($request['type']);
$this->menu->setAction($request['actions']);
$this->menu->setMethode($request['methode']);
$this->menu->setUrl($request['url']);
$this->menu->setUser($user);
$this->menu->setGenerate(1);
$this->menu->setEtape(0);
$menu = $this->commonManager->insert($this->menu);
if ($menu != null) {
$this->doSuccess($menu->getId(), $this->parameters['INSERT']);
$this->doLogInfo('***************************************** Fin ajout Menu *****************************************');
} else {
$this->doLogError($this->parameters['CODE_104_ADMIN']);
throw new ConstraintException($this->parameters['CODE_104']);
}
} else {
$this->doLogError($this->parameters['CODE_101_ADMIN']);
throw new ConstraintException('Certains champs sont vides');
}
} else {
$this->doLogError($this->parameters['CODE_100_ADMIN']);
throw new ConstraintException($this->parameters['CODE_100']);
}
// }else {
// $this->doLogError($this->parameters['CODE_100_ADMIN']);
// throw new ConstraintException($this->parameters['CODE_100']);
// }
$this->doLogInfo("Fin doGenerateMenu");
} catch (ConstraintException $e) {
$this->doLogInfo($e->getMessage() . ' ' . $e->getFile() . ' ' . $e->getLine());
$this->doError('-1', $e->getMessage());
} catch (Exception $e) {
$this->doLogInfo($e->getMessage() . ' ' . $e->getFile() . ' ' . $e->getLine());
$this->doError('-1', $this->parameters['ERREUR_SERVEUR']);
}
}
public function doGenerateMenu($request){
try {
//rep
// $chemin=getcwd();
$chemin='C:/xampp/htdocs/';
$repUser='ussdgenerate/';
$tagChild="";
$index="index";
$this->doLogInfo("Debut du doGenerateMenu");
$this->doLogInfo('List des parametres:' . $this->doGetListParam());
if (isset($request['ACTION']) && isset($request['userId']) ){
$user = $this->menuManager->findById("User\User", $request['userId']);
if($user!=null){
if($user->getRepName()!=null)
$repUser=$user->getRepName().'/';
else
$repUser=$repUser.$user->getId().'/';
$dest=$chemin.$repUser;
if (!file_exists($dest))
mkdir($dest, 0777, true);
//Menu Parent
$masterParent=$this->menuManager->getMasterParent($user);
if($masterParent!=null){
$masterId=$masterParent['id'];
$masterType=$masterParent['type'];
$masterText=$masterParent['text'];
$masterName=$masterParent['name'];
$masterOrdre=$masterParent['ordre'];
$masterTitle=$masterParent['title'];
$this->doLogInfo('Master Menu Id:' . $masterId);
if($masterType=="accesskey"){
$listMenuChild=$this->menuManager->getAllChildByParentId($masterId);
foreach ($listMenuChild as $unMenu) {
$menuChild=$this->commonManager->findById("Menu\Menu", $unMenu["id"]);
$this->doLogInfo('menuChildId:' . $menuChild->getId());
$titleChild=$menuChild->getTitle();
$nameChild=$menuChild->getName();
$textChild=$menuChild->getText();
$typeChild=$menuChild->getType();
$urlChild=$menuChild->getUrl();
$ordreChild=$menuChild->getOrdre();
$file = $chemin.$repUser.$nameChild.'.php';
// if($typeChild=="accesskey"){
$tagChild.='<a href="'.$nameChild.'.php?response="'.$titleChild.'" accesskey="'.$ordreChild.'" >'.$titleChild.'</a><br/>';
}
}
else
if($masterType=="input")
$tagChild='<form action="index.php"><input type="text" name="response"/></form>';
$file = $chemin.$repUser.$index.'.php';
$current = "<?xml version='1.0' encoding='utf-8'?>
<!doctype html><html>
<head><meta charset='utf-8'>
<title>$masterName</title>
</head><body>
<h3>$masterText</h3>
$tagChild
</body></html>
";
// Write the contents back to the file
file_put_contents($file, $current);
// Open the file to get existing content
// $this->doResult( 'Fichier '.$file.' cr�� avec succ�s');
$this->doSuccess(1, $this->parameters['GENERATED']);
}
}
else {
$this->doLogError($this->parameters['CODE_102_ADMIN']);
throw new ConstraintException('User null');
}
}else {
$this->doLogError($this->parameters['CODE_100_ADMIN']);
throw new ConstraintException($this->parameters['CODE_100']);
}
// }else {
// $this->doLogError($this->parameters['CODE_100_ADMIN']);
// throw new ConstraintException($this->parameters['CODE_100']);
// }
$this->doLogInfo("Fin doGenerate Menu");
} catch (ConstraintException $e) {
$this->doLogInfo($e->getMessage() . ' ' . $e->getFile() . ' ' . $e->getLine());
$this->doError('-1', $e->getMessage());
} catch (Exception $e) {
$this->doLogInfo($e->getMessage() . ' ' . $e->getFile() . ' ' . $e->getLine());
$this->doError('-1', $this->parameters['ERREUR_SERVEUR']);
}
}
public function doList($request) {
$this->doLogInfo("Debut doGetAllMenuByUser");
$this->doLogInfo('List des parametres:' . $this->doGetListParam());
try {
$this->doLogInfo('List des parametres:' . $this->doGetListParam());
if (isset($request['ACTION']) && isset($request['userId'])) {
$user = $this->commonManager->findById("User\User", $request['userId']);
$listMenu = $this->menuManager->getAllMenusArray($user);
// var_dump($listMenu);
// $listMenu=json_encode($listMenu);
// $this->doLogInfo('List menu:' . $listMenu);
if ($listMenu != NULL) {
$this->doSuccessO($listMenu);
$this->doLogInfo("Fin doGetAllMenuByUser");
} else {
$this->doLogError($this->parameters['CODE_110_ADMIN']);
throw new ConstraintException($this->parameters['CODE_110']);
$this->doLogInfo("Erreur liste des menus vides");
}
}
} catch (ConstraintException $e) {
$this->doLogError($e->getMessage() . ' ' . $e->getFile() . ' ' . $e->getLine());
$this->doError('-1', $e->getMessage());
$this->doLogError("Fin doGetAllMenuByUser");
} catch (Exception $e) {
$this->doLogError($e->getMessage() . ' ' . $e->getFile() . ' ' . $e->getLine());
$this->doError('-1', $this->parameters['ERREUR_SERVEUR']);
$this->doLogError("Fin doGetAllMenuByUser ");
}
}
public function doGetAllMenuByUser($request){
$this->doLogInfo("Debut doGetAllMenuByUser");
$this->doLogInfo('List des parametres:' . $this->doGetListParam());
try {
$this->doLogInfo('List des parametres:' . $this->doGetListParam());
if (isset($request['ACTION']) && isset($request['userId']) ) {
$user = $this->commonManager->findById("User\User", $request['userId']);
$listMenu = $this->menuManager->getAllMenuByUser($user);
// var_dump($listMenu);
// $listMenu=json_encode($listMenu);
// $this->doLogInfo('List menu:' . $listMenu);
if ($listMenu != NULL) {
$this->doSuccessO($listMenu);
$this->doLogInfo("Fin doGetAllMenuByUser");
} else {
$this->doLogError($this->parameters['CODE_110_ADMIN']);
throw new ConstraintException($this->parameters['CODE_110']);
$this->doLogInfo("Erreur liste des menus vides");
}
}
} catch (ConstraintException $e) {
$this->doLogError($e->getMessage() . ' ' . $e->getFile() . ' ' . $e->getLine());
$this->doError('-1', $e->getMessage());
$this->doLogError("Fin doGetAllMenuByUser");
} catch (Exception $e) {
$this->doLogError($e->getMessage() . ' ' . $e->getFile() . ' ' . $e->getLine());
$this->doError('-1', $this->parameters['ERREUR_SERVEUR']);
$this->doLogError("Fin doGetAllMenuByUser ");
}
}
public function doRestore($request) {
}
public function doDeactivate($request) {
}
public function doRemove($request) {
}
public function doView($request) {
}
public function doUpdate($request) {
}
public function doActivate($request) {
}
public function dofindById($request) {
}
//put your code here
}
$oMenuController = new MenuController($_REQUEST);
<file_sep>/backend/src/bo/user/UserManager.php
<?php
namespace User;
use User\UserQueries as UserQueries;
use User\User as User;
use Common\CommonManager as CommonManager;
use Log\Loggers as Logger;
class UserManager extends \Bo\BaseAction implements \Bo\BaseManager{
protected $user;
protected $userQueries;
protected $commonManager;
private $logger;
public function __construct() {
$this->user = new User();
$this->userQueries = new UserQueries;
$this->commonManager = new CommonManager;
$this->logger = new Logger(__CLASS__);
}
public function signIn($login, $password) {
$user = $this->userQueries->signin($login, $password);
if ($user != null && $user['activate']==1) { //utilisatuer existe et clientt active
$rslt['rc'] = 1;
$rslt['infos'] = $user;
$rslt['rcSendMail'] = 0;
}else if ($user != null && $user['activate']==0) { //utilisatuer existe et clientt active
$rslt['rc'] = 0;
$rslt['infos'] = 0;
$rslt['rcSendMail'] = 0;
}else{
$rslt['rc'] = -1;
$rslt['infos'] = -1;
$rslt['rcSendMail'] = 1;
}
return $rslt;
}
public function activate($entity, $listId, $userId) {
}
public function deactivate($entity, $listId, $userId) {
}
public function delete($entity, $listId, $userId) {
}
public function findById($entity, $id) {
}
public function insert($entity) {
}
public function remove($entity, $listId, $userId) {
}
public function restore($entity, $listId, $userId) {
}
public function update($entity) {
}
public function view($id) {
}
}
<file_sep>/backend/src/bo/exception/InternationalTrafficNotAllowedException.php
<?php
/*
* 2SMOBILE
* ----------------------------------------
* @author Kiwi <<EMAIL>>
* @copyright 2006-2015 Kiwi/2SI Group
* @version 2.0.0
* @link http://www.kiwi.sn
* @link http://www.ssi.sn
* ----------------------------------------
*/
namespace Exceptions;
/**
* Description of InternationalTrafficNotAllowed
*
* @author admin
*/
class InternationalTrafficNotAllowedException extends MessageException {
function __construct() {
parent::__construct(222);
}
}
<file_sep>/backend/src/bo/common/Common.php
<?php
/*
* 2SMOBILE
* ----------------------------------------
* @author Kiwi <<EMAIL>>
* @copyright 2006-2015 Kiwi/2SI Group
* @version 2.0.0
* @link http://www.kiwi.sn
* @link http://www.ssi.sn
* ----------------------------------------
*/
namespace Common;
/**
* Description of Common
*
* @author pouyelayese
*/
use Log\Loggers as Logger;
class Common {
private static $HEURE=1800; //30*60s voir /index.php et /signin.php
/**
* Reinitialise les cookies
*
* @param type $customerId
* @param type $customerLogin
* @param type $customercompanyName
* @param type $customerAdress
* @param type $customerEmail
* @param type $customerContactName
* @param type $customerContactPhone
* @param type $customerLanguage
*
*
*/
public static function setCookies($id, $customerId, $login,
$contactName, $code, $profil, $codeP, $isPack, $productName ) {
Common::unsetCookieCustomer();
Cookie::Set('customerId', $customerId);
Cookie::Set('userId', $id);
Cookie::Set('userLogin', $login);
Cookie::Set('userContactName', $contactName);
Cookie::Set('userLanguage', $code);
Cookie::Set('userProfil', $profil);
Cookie::Set('codeP', $codeP);
Cookie::Set('isPack', $isPack);
Cookie::Set('productName', $productName);
}
public static function setBoCookies($id, $login,
$contactName, $profil) {
Common::unsetCookieBoUser();
Cookie::Set('userId', $id);
Cookie::Set('contactName', $contactName);
Cookie::Set('login', $login);
Cookie::Set('userProfil', $profil);
}
public static function unsetCookieBoUser() {
Cookie::Delete('userId');
Cookie::Delete('contactName');
Cookie::Delete('login');
Cookie::Delete('userProfil');
}
public static function setPartnerCookies($partnerId, $partnerCode) {
Common::unsetCookieCustomer();
Cookie::Set('partnerId', $partnerId);
Cookie::Set('partnerCode', $partnerCode);
}
public static function unsetCookieCustomer() {
Cookie::Delete('customerId');
Cookie::Delete('userId');
Cookie::Delete('userLogin');
Cookie::Delete('userContactName');
Cookie::Delete('userLanguage');
Cookie::Delete('userProfil');
Cookie::Delete('codeP');
Cookie::Delete('isPack');
Cookie::Delete('productName');
}
public static function unsetCookiePartner(){
unset($_COOKIE['partnerCode']);
unset($_COOKIE['partnerName']);
unset($_COOKIE['partnerTrademark']);
unset($_COOKIE['partnerCellular']);
unset($_COOKIE['partnerFax']);
unset($_COOKIE['partnerTemplate']);
unset($_COOKIE['partnerEmail']);
unset($_COOKIE['partnerLanguage']);
}
public static function gen_uuid() {
return sprintf( '%04x%04x-%04x-%04x-%04x-%04x%04x%04x',
// 32 bits for "time_low"
mt_rand( 0, 0xffff ), mt_rand( 0, 0xffff ),
// 16 bits for "time_mid"
mt_rand( 0, 0xffff ),
// 16 bits for "time_hi_and_version",
// four most significant bits holds version number 4
mt_rand( 0, 0x0fff ) | 0x4000,
// 16 bits, 8 bits for "clk_seq_hi_res",
// 8 bits for "clk_seq_low",
// two most significant bits holds zero and one for variant DCE1.1
mt_rand( 0, 0x3fff ) | 0x8000,
// 48 bits for "node"
mt_rand( 0, 0xffff ), mt_rand( 0, 0xffff ), mt_rand( 0, 0xffff )
);
}
public static function setFormatDate(){
//format date en fonction de la langue
$lang='fr';
if(isset($_COOKIE['userLanguage'])){
$lang=$_COOKIE['userLanguage'];
}else{
$lang=$_COOKIE['partnerLanguage'];
}
if($lang=='fr'){
return '%d-%m-%Y';
}else if($lang=='en'){
return '%Y-%m-%d';
}
}
public static function setFormatDateTime(){
//format date en fonction de la langue
$lang='fr';
if(isset($_COOKIE['userLanguage'])){
$lang=$_COOKIE['userLanguage'];
}else{
$lang=$_COOKIE['partnerLanguage'];
}
if($lang=='fr'){
return '%d-%m-%Y %T';
}else if($lang=='en'){
return '%Y-%m-%d %T';
}
}
public static function setAllCookies() {
if (isset($_COOKIE['provider']) && $_COOKIE['provider'] == 'PORTAIL') {
if (isset($_COOKIE['userId'])) {
Common::setCookies($_COOKIE['userId'], $_COOKIE['customerId'], $_COOKIE['userLogin'], $_COOKIE['userContactName'], $_COOKIE['userLanguage'], $_COOKIE['userProfil'], $_COOKIE['codeP'], $_COOKIE['isPack'], $_COOKIE['productName']);
return 1;
} else {
return 0;
}
} else if (isset($_COOKIE['provider']) && $_COOKIE['provider'] == 'BO') {
if (isset($_COOKIE['userId'])) {
Common::setBoCookies($_COOKIE['userId'], $_COOKIE['login'], $_COOKIE['contactName'], $_COOKIE['userProfil']);
return 1;
} else {
return 0;
}
}
else
return 0;
}
public static function logTrace($params){
$logger = new Logger(__CLASS__);
return $logger->log->trace($params);
}
}
<file_sep>/backend/src/common/app.php
<?php
/*
* USSS_
* ----------------------------------------
* @author Kiwi <<EMAIL>>
* @copyright 2006-2015 Kiwi/2SI Group
* @version 2.0.0
* @link http://www.kiwi.sn
* @link http://www.ssi.sn
* ----------------------------------------
*/
if (is_file('lib/doctrine/vendor/autoload.php')) {
require_once 'lib/doctrine/vendor/autoload.php';
} else if (is_file('../lib/doctrine/vendor/autoload.php')) {
require_once '../lib/doctrine/vendor/autoload.php';
} else if (is_file('../../lib/doctrine/vendor/autoload.php')) {
require_once '../../lib/doctrine/vendor/autoload.php';
} else if (is_file('../../../lib/doctrine/vendor/autoload.php')) {
require_once '../../../lib/doctrine/vendor/autoload.php';
} else if (is_file('../../../../lib/doctrine/vendor/autoload.php')) {
require_once '../../../../lib/doctrine/vendor/autoload.php';
}
class App {
const APP_ROOT = 'websms';
const BO = 'src/bo';
const ACTION_VIEWPLUS = 'VIEWPLUS';
const FILE_PARAMETERS = "config/parameters.ini";
const FILE_PARAMETERS_IN_PROCESSINGMANAGER = "../../../../portail/config/parameters.ini";
const LANG = "../../lang";
const AS_LANG = "../../../lang";
const I18N_CLASS = "../../../lib/i18n/class/l18n.class.php";
const AS_I18N_CLASS = "../../../../lib/i18n/class/l18n.class.php";
const AUTOLOAD = "../../../../lib/doctrine/vendor/autoload.php";
const MAILER = "../../../../lib/mail/class.phpmailer.php";
const XLSXCLASS = "../../../../lib/pexcel/simplexlsx.class.php";
const EXCELREADER = "../../../../lib/pexcel/excel_reader.php";
const UPLOADFILE = "../../../../upload/";
const UPLOAD_DIR="../../upload/";
// Application actions
const ACTION_GET_SESSION='GET_SESSION';
const ACTION_SIGN_IN='SIGN_IN';
const ACTION_UPDATE='UPDATE';
const ACTION_INSERT='INSERT';
const ACTION_INSERT_FILE='INSERT_FILE';
const ACTION_UPDATE_NIVEAU='UPDATE_NIVEAU';
const ACTION_REMOVE='REMOVE';
const ACTION_ACCES = 'ACCES';
const ACTION_ACTIVER = 'ACTIVER';
const ACTION_CANCELLED = 'CANCELLED';
const ACTION_VALIDATE = 'VALIDATE';
const ACTION_DESACTIVER = 'DESACTIVER';
const ACTION_DELETE = 'DELETE';
const ACTION_RESTORE = 'RESTORE';
const ACTION_LIST='LIST';
const ACTION_GENERATE_MENU="GENERATE_MENU";
const ACTION_GET_ALL_MENU_BY_USER="GET_ALL_MENU_BY_USER";
//push
static function getBoPath() {
if (is_file('config/parameters.ini')) {
$parameters = parse_ini_file("config/parameters.ini");
return $parameters['backend'] . '/src/bo';
} else if (is_file('../config/parameters.ini')) {
$parameters = parse_ini_file("../config/parameters.ini");
return $parameters['backend'] . '/src/bo';
} else if (is_file('../../config/parameters.ini')) {
$parameters = parse_ini_file("../../config/parameters.ini");
return $parameters['backend'] . '/src/bo';
} else if (is_file('../../../config/parameters.ini')) {
$parameters = parse_ini_file("../../../config/parameters.ini");
return $parameters['backend'] . '/src/bo';
} else if (is_file('../../../../config/parameters.ini')) {
$parameters = parse_ini_file("../../../../config/parameters.ini");
return $parameters['backend'] . '/src/bo';
}
// $parameters = parse_ini_file("../config/parameters.ini");
// return $parameters['backend'] . '/src/bo';
}
static function getHome() {
if (is_file('config/parameters.ini')) {
$parameters = parse_ini_file("config/parameters.ini");
return $parameters['server'];
} else if (is_file('../config/parameters.ini')) {
$parameters = parse_ini_file("../config/parameters.ini");
return $parameters['server'];
} else if (is_file('../../config/parameters.ini')) {
$parameters = parse_ini_file("../../config/parameters.ini");
return $parameters['server'];
} else if (is_file('../../../config/parameters.ini')) {
$parameters = parse_ini_file("../../../config/parameters.ini");
return $parameters['server'];
} else if (is_file('../../../../config/parameters.ini')) {
$parameters = parse_ini_file("../../../../config/parameters.ini");
return $parameters['server'];
}
}
}
<file_sep>/backend/src/lang/trad_en.ini
; this is an INI file
[general]
ERREUR_SERVEUR = Error while processing your request. Please enter your administrator.
UPDATED = Updated
INSERTED = Inserted
REMOVED = Removed.
REVOKED = Revoked
GEN_SAVED = "Saved"
PARAM_NOT_ENOUGH = Params not enough
OPERATION_IMPOSSIBLE = This operation is impossible
INVALID_DATA = Invalid data
NUMBER_NOT_INSERTED = Number not inserted
NUMBER_ALREADY_EXIST = This number already exists
NUMBER_IS_EMPTY = The field number is empty
OLD_PASSWORD_NOT_CORRECT = Old password not correct
PASSWORD_ALREADY_USED = Vous ne pouvez pas utiliser un de vos deux derniers mot de passes, veuillez choisir un autre S.V.P
PASSWORD_CHANGED = Password modified
MSG_NOT_DELETED = Message not deleted
NO_ACTION = "No action"
TEMPLATE_NOT_INSERTED = Template not inserted
UNBLOCKED = "Unblocked"
BLOCKED = "Blocked"
CONTACT_NOT_UNBLOCKED = "Contact(s) not unblocked"
CONTACT_NOT_BLOCKED = "Contact(s) not blocked"
ARCHIVED = Archived
MSG_NOT_ARCHIVED = "Message(s) not archived"
PASSWORD_ALREADY_EXPIRED = Your password expired and must be changed, please reset another
PASSWORD_EXPIRED_IN_FEW_DAYS = Your password expire
PASSWORD_EXPIRED_IN_FEW_DAYS_SUITE = days. Please update another before planned delay.
DOMAIN_PORTAIL = Portail
DOMAIN_BACKOFFICE = Backoffice
[contact]
ENTETE_EXPORT = "First name(s);Last name;Phone;Email;Group(s);Additionnal field(s) "
ENTETE_EXPORT_DETAILS = "Date;Recipients;Status "
ENTETE_EXPORT_BILLING = "Account;Company name;Last traffic;National;International;Total "
MOV = "Moved"
COP = "Copied"
CONTACT_ALREADY = Contact already exists or is in trash
CONTACT_NOT_INSERTED = Contact not inserted
CONTACT_NOT_UPDATED = Contact not updated
CELLULAR_NOT_EMPTY = Cellular must not be empty
CANNOT_HAVE_LIST_CONTACT = Cannot have list of contacts
CONTACT_NOT_REMOVED = "Contact(s) not removed"
[group]
GROUP_NOT_INSERTED = Group not inserted
GROUP_IN_TRASH = This group exists in trash. Please restore
GROUP_ALREADY_EXISTS = This group already exists
GROUP_EMPTY = Group must not be empty
IMPOSSIBLE_COPY_MOVE = "Cannot copy or move : contact(s) already exists in destination group(s)"
GROUP_NAME_EMPTY = Field name of this group must not be empty
GROUP_SAME = Cannot move or copy to the same group
[signature]
SIGNATURE_ALREADY_EXISTS = This signature already exists or is in trash
NO_SIGNATURE = "No signature"
APPROVED = "Approved"
BLOCKED = "Blocked"
LIST_SIGNATURE_IMP="Cannot have list of wording"
SIGNATURE_SAVED= Signature saved
[user]
USER_ALREADY_EXISTS = This user already exists or is in trash
AJOUT_USER_IMPOSSIBLE = Cannot add a user
LIST_USER_IMP = Cannot have list of users
USER_NOT_REMOVED = "User(s) not removed"
USER_NOT_ACTIVATED = "User(s) not activated"
USER_NOT_DEACTIVATED = "User(s) not deactivated"
DEACTIVATED = Deactivated
ACTIVATED = Activated
GENERATED_CLIENTID = "Token generated."
SAV = Saved
[campaign]
SAVED = Saved
STOPPED = Stopped
DONE = Done
CAMPAIGN_NOT_INSERTED = Campaign not inserted
CAMPAIGN_NO_START = Cannot start a campaign
STARTED = Started
CAMPAIGN_NOT_STARTED = Campaign not started
CAMPAIGN_ALREADY_STARTED = Campaign already started
PAUSED = Paused
UNABLE_PAUSE = Unable to pause
RESUMED = Resumed
PLANNED = Planned
CAMPAIGN_ALREADY_PLANNED = Campaign already planned
POSTPONED = Postponed
CAMPAIGN_ALREADY_POSTPONED = Campaign already postponed
VOLUME = Daily volume
START = Start date
END = End date
[message]
MSG_EN_COURS_TRAITEMENT = "Message(s) being processed"
NO_LIST_RECIPIENT = Empty list recipients
MESSAGE_SENT = "Message(s) sent"
NO_CONTENT = "Empty content message"
[simplemessage]
MSG_NOT_INSERTED = Message not inserted
CANNOT_GET_MSG = "Cannot get message(s)"
CANNOT_GET_CONTACT = "Cannot get contact(s)"
CANNOT_LIST_MSG = "Cannot list message(s)"
DATEORTIME_NOT_SPECIFIED = Date of planning is not specified
[trash]
TRUNKED = Truncated
RESTORE = Restore
NOT_RESTORED = Element not restored
RESTORED = Restored.
[template]
TITLE_EXISTS = Template title already exists
[templateCategory]
NO_TEMPLATE = Cannot get template
GENERATED_KEY = Key generated
ENTETE_EXPORT_DETAILS = "Date;Recipients;Status "
<file_sep>/backend/src/bo/common/CommonController.php
<?php
/*
* 2SMOBILE
* ----------------------------------------
* @author Kiwi <<EMAIL>>
* @copyright 2006-2015 Kiwi/2SI Group
* @version 2.0.0
* @link http://www.kiwi.sn
* @link http://www.ssi.sn
* ----------------------------------------
*/
require_once '../../common/app.php';
require_once App::AUTOLOAD;
require_once App::MAILER;
use Parametrage\ClassePhysique as ClassePhysique;
use Parametrage\ClassePhysiqueManager as ClassePhysiqueManager;
use Bo\BaseController as BaseController;
use Bo\BaseAction as BaseAction;
use Exceptions\ConstraintException as ConstraintException;
use Parametrage\Niveau as Niveau;
use Common\CommonManager as CommonManager;
class CommonController extends BaseAction implements BaseController {
public function __construct($request) {
$this->niveauObject = 'Parametrage\Niveau';
$this->parameters = parse_ini_file("../../lang/trad_fr.ini");
$this->commonManager= new CommonManager();
try {
if (isset($request['ACTION'])) {
switch ($request['ACTION']) {
case \App::ACTION_INSERT:
$this->doInsert($request);
break;
case \App::ACTION_INSERT_NIVEAU:
$this->doInsertNiveau($request);
break;
case \App::ACTION_UPDATE:
$this->doUpdate($request);
break;
case \App::ACTION_UPDATE:
$this->doUpdateNiveau($request);
break;
case \App::ACTION_VIEW:
$this->doView($request);
break;
case \App::ACTION_LIST:
$this->doList($request);
break;
case \App::ACTION_REMOVE:
$this->doRemove($request);
break;
case \App::ACTION_RESTORE:
$this->doRestore($request);
break;
case \App::ACTION_DELETE:
$this->doDelete($request);
break;
}
} else
throw new Exception($this->parameters['NO_ACTION']);
} catch (Exception $e) {
$this->doLogError($e->getMessage());
$this->doError('-1', $e->getMessage());
}
}
public function doView($request) {
}
public function doList($request) {
}
public function doMoveOrCopy($request) {
}
public function dofindById($request) {
}
public function doActivate($request) {
}
public function doDeactivate($request) {
}
public function doInsert($request) {
}
public function doRemove($request) {
}
public function doRestore($request) {
}
public function doUpdate($request) {
}
}
$oCommonController = new CommonController($_REQUEST);
<file_sep>/app/kussd/klib/_bsDialoga.js
; (function ($) {
bsDialog = function (options) {
var o = {
header: true,
title: '',
size:'',
class:'',
draggable: true,
message: null,
buttons: []
};
var root = this;
// constructor
var construct = function (options) {
$.extend(o, options);
//init the dialog elements
if(!o.header) dialog.find('.modal-header').remove();
else dialog.find('.modal-title').html(o.title);
$.each(o.buttons, function (index, button) {
var $button = $('<button class="btn"></button>');
$button.prop('id', button.id);
$button.html(button.label);
$button.addClass(button.cssClass);
dialog.find('.modal-footer').append($button);
root.indexedButtons[button.id] = $button;
$button.toggleEnable = function (enable) {
var $this = this;
if (typeof enable !== 'undefined') {
$this.prop("disabled", !enable).toggleClass('disabled', !enable);
} else {
$this.prop("disabled", !$this.prop("disabled"));
}
return $this;
};
$button.enable = function () {
var $this = this;
$this.toggleEnable(true);
return $this;
};
$button.disable = function () {
var $this = this;
$this.toggleEnable(false);
return $this;
};
$button.hide = function (bhide) {
var $this = this;
bhide ? $this.addClass('hidden') : $this.removeClass('hidden');
return $this;
};
$button.on('click', function(e){button.action.call(this, root);});
//dialog.find('.modal-footer').append('<button id="'+button.id+'" class="btn" type="button" ><i class="icon mdi mdi-spinner mdi-spin"></i> '+button.label+'</button>');
});
if($.isFunction(options.message)) {
dialog.find('.modal-body').html(options.message.call(this, root));
//var $ct = $('<div class="switch-button switch-button-success"><input type="checkbox" checked="" name="swt452" id="swt452"><span><label for="swt452"></label></span></div>');
//dialog.find('.modal-body').append($ct);
}
$(dialog).modal({
backdrop: 'static',
keyboard: false
});
};
this.indexedButtons = {};
this.show = function () {
$(dialog).modal('show');
};
this.close = function () {
$(dialog).modal('hide');
};
this.getButton = function (id) {
if (typeof this.indexedButtons[id] !== 'undefined') {
return this.indexedButtons[id];
}
return null;
};
this.getModal = function () {
return dialog.find('.modal-dialog');
};
this.getModalBody = function () {
return dialog.find('.modal-body');
};
var dialog = $('<div class="modal fade colored-header colored-header-primary" id="md-colored" tabindex="-1" role="dialog">\
<div class="modal-dialog">\
<div class="modal-content">\
<div class="modal-header modal-header-colored bDialog-header">\
<h3 class="modal-title"></h3>\
<button class="close" type="button" data-dismiss="modal" aria-hidden="true"><span class="mdi mdi-close"></span></button>\
</div>\
<div class="modal-body" style="padding:5px;"><div class="switch-button switch-button-success"><input type="checkbox" checked="" name="swt300" id="swt300"><span><label for="swt300"></label></span></div></div>\
<div class="modal-footer bDialog-footer" style="padding: 15px 20px;"><div class="switch-button switch-button-success"><input type="checkbox" checked="" name="swt45" id="swt45"><span><label for="swt45"></label></span></div> </div>\
</div>\
</div>\
</div>');
return construct(options);
};
}(jQuery));<file_sep>/backend/src/lang/trad_fr.ini
e; this is an INI file
; Attention aux apostrophes et parenthèses ils peuvent engendrer des erreurs du genre "Undefined Index nom_cle"
[general]
ERREUR_SERVEUR = Erreur du serveur. Veuillez saisir votre administrateur.
GEN_SAVED = "Enregistré(s) avec succès"
PARAM_NOT_ENOUGH = "veuillez saisir votre administrateur SVP"
PARAM_NOT_ENOUGH_ADMIN = "Code 100, Paramètres insuffisants"
PARAM_EMPTY = "Un ou plusieurs Paramètres est vide(s)."
NOT_INSERTED = "Erreur lors de l'enregistrement. Veuillez saisir votre administrateur."
OPERATION_IMPOSSIBLE = Opération impossible
INVALID_DATA = Donnée invalide
NUMBER_NOT_INSERTED = Numéro non inséré
NUMBER_ALREADY_EXIST = Numéro déjà existant
NUMBER_IS_EMPTY = Le champs du numéro est vide
OLD_PASSWORD_NOT_CORRECT = Ancien mot de passe incorrect
PASSWORD_ALREADY_USED = Vous ne pouvez pas utiliser un de vos deux derniers mot de passes, veuillez choisir un nouveau S.V.P
PASSWORD_CHANGED = Mot de passe modifié
MSG_NOT_DELETED = "Message(s) non supprimé(s)."
NO_ACTION = "Parametre ACTION non défini"
TEMPLATE_NOT_INSERTED = Modèle non inséré
UNBLOCKED = "Débloqué(s)"
BLOCKED = "Bloqué(s)"
CONTACT_NOT_UNBLOCKED = "Contact(s) non débloqué(s)"
CONTACT_NOT_BLOCKED = "Contact(s) non bloqué(s)"
GENERATED_KEY = Clé générée
ARCHIVED = "Archivé(s)"
MSG_NOT_ARCHIVED = "Message(s) non archivé(s)"
PASSWORD_ALREADY_EXPIRED = Votre mot de passe a expiré et doit être changé, veuillez choisir un nouveau S.V.P
PASSWORD_EXPIRED_IN_FEW_DAYS = Votre mot de passe expire dans
PASSWORD_EXPIRED_IN_FEW_DAYS_SUITE = jours. Veuillez le renouveler avant ce délai.
DOMAIN_PORTAIL = Portail
DOMAIN_BACKOFFICE = Backoffice
[contact]
ENTETE_EXPORT = "Prénom(s);Nom;Téléphone;Email;Groupe(s);Champ(s) additionnel(s) "
ENTETE_EXPORT_DETAILS = "Date;Destinataires;Statut "
ENTETE_EXPORT_BILLING = "Compte;Raison sociale;Dernier trafic;National;International;Total "
MOV = "Déplacé(s)"
COP = "Copié(s)"
CONTACT_ALREADY = Ce contact existe déjà ou est dans la corbeille
CONTACT_NOT_INSERTED = Contact non inséré
CONTACT_NOT_UPDATED = Contact non modifié
CELLULAR_NOT_EMPTY = "Le numéro de téléphone est requis"
CANNOT_HAVE_LIST_CONTACT = "Impossible d'avoir la liste des contacts"
CONTACT_NOT_REMOVED = "Contact(s) non supprimé(s)."
[group]
GROUP_NOT_INSERTED = Groupe non enregistré
GROUP_IN_TRASH = "Groupe déjà présent dans la corbeille. Restaurez S.V.P."
GROUP_ALREADY_EXISTS = Ce groupe existe déjà
GROUP_EMPTY = Le groupe ne doit pas être vide
IMPOSSIBLE_COPY_MOVE = "Impossible de copier ou déplacer : contact(s) déjà existant dans le(s) groupe(s) destination"
GROUP_NAME_EMPTY = Le nom du groupe ne doit pas être vide
GROUP_SAME = Vous ne pouvez pas copier ou déplacer vers le même groupe
[signature]
SIGNATURE_ALREADY_EXISTS = Cette signature existe déjà ou est dans la corbeille
NO_SIGNATURE = "Il n'y a pas de signature"
APPROVED = "Signature Approuvée"
BLOCKED = "Signature Bloquée"
LIST_SIGNATURE_IMP="Impossible d'avoir la liste des signatures"
SIGNATURE_SAVED="Signature enregistrée"
[user]
USER_ALREADY_EXISTS = Cet utilisateur existe déjà ou est dans la corbeille
AJOUT_USER_IMPOSSIBLE = "Impossible d'ajouter un utilisateur"
LIST_USER_IMP = "Impossible d'avoir la liste des utilisateurs"
USER_NOT_REMOVED = "Utilisateur(s) non supprimé(s)."
USER_NOT_ACTIVATED = "Utilisateur(s) non activé(s)."
USER_NOT_DEACTIVATED = "Utilisateur(s) non desactivé(s)."
DEACTIVATED = "Desactivé(s)."
GENERATED_CLIENTID = "Token généré."
ACTIVATED = "Activé(s)."
SAV = "Enregistré"
[campaign]
SAVED = "Enregistrée(s)."
STOPPED = "Stoppée(s)."
DONE = "Finie(s)."
CAMPAIGN_NOT_INSERTED = Campagne non insérée
CAMPAIGN_NO_START = Impossible de démarrer une campagne
STARTED = "Démarrée(s)"
CAMPAIGN_NOT_STARTED = Campagne non démarrée
CAMPAIGN_ALREADY_STARTED = Campagne déjà démarrée
PAUSED = En pause
UNABLE_PAUSE = Pause impossible
RESUMED = "Reprise(s)"
PLANNED = Planifiée
CAMPAIGN_ALREADY_PLANNED = Campagne déjà planifiée
POSTPONED = Reportée
CAMPAIGN_ALREADY_POSTPONED = Campagne déjà reportée
VOLUME = Volume journalier
START = Date début
END = Date fin
[message]
MSG_EN_COURS_TRAITEMENT = "Message(s) en cours de traitement"
NO_LIST_RECIPIENT = "Destinataire(s) non trouvé(s)"
MESSAGE_SENT = "Message(s) envoyé(s)"
NO_CONTENT = "Contenu du message vide"
[simplemessage]
MSG_NOT_INSERTED = Message non inséré
CANNOT_GET_MSG = "Impossible d'avoir le message"
CANNOT_GET_CONTACT = "Impossible d'avoir les numéros"
CANNOT_LIST_MSG = "Message(s) non trouvé(s)"
DATEORTIME_NOT_SPECIFIED = "La date du planning n'est pas spécifiée"
[trash]
TRUNKED = Placé à la corbeille
RESTORE = Restaurer
RESTORED = "Restauré(s)."
NOT_RESTORED = "Elément(s) non restauré(s)"
[template]
TITLE_EXISTS = Le titre du modèle existe déjà
[templateCategory]
NO_TEMPLATE = "Impossible d'avoir le modèle"
ENTETE_EXPORT_DETAILS = "Date;Destinataires;Statut "
[Code]
CODE_100="Erreur 100, veuillez saisir votre administrateur SVP!"
CODE_100_ADMIN="Code 100, Paramètres insuffisants"
CODE_101="Erreur 101, veuillez saisir votre administrateur SVP!"
CODE_101_ADMIN="Code 101, Certains champs sont vides"
CODE_102="Erreur 102, veuillez saisir votre administrateur SVP"
CODE_102_ADMIN="Code 102, Entité ou variable nulle"
CODE_103="Erreur 103, veuillez saisir votre administrateur SVP!"
CODE_103_ADMIN="Code 103, Ce code existe déjà"
CODE_104="Erreur 104, veuillez saisir votre administrateur SVP!"
CODE_104_ADMIN="Code 104, Erreur lors de l'insertion"
CODE_105="Erreur 105, veuillez saisir votre administrateur SVP!"
CODE_105_ADMIN="Code 105, Erreur lors de la modification"
CODE_106="Erreur 106, veuillez saisir votre administrateur SVP!"
CODE_106_ADMIN="Code 106, Erreur Interne"
CODE_107="Erreur 107, veuillez saisir votre administrateur SVP!"
CODE_107_ADMIN="Code 107, $entity=-1, Erreur lors du traitement."
CODE_108="Erreur 108, veuillez saisir votre administrateur SVP!"
CODE_108_ADMIN="Code 108, staut non valide, La liste est vide."
CODE_109="Erreur 109, veuillez saisir votre administrateur SVP!"
CODE_109_ADMIN="Code 109, Ce Libelle existe déjà"
CODE_110="Erreur 110, veuillez saisir votre administrateur SVP!"
CODE_110_ADMIN="Code 110, Cette liste est vide"
CODE_111="Erreur 111, veuillez saisir votre administrateur SVP!"
CODE_111_ADMIN="Code 111, Ce téléphone existe déjà"
CODE_112="Erreur 112, veuillez saisir votre administrateur SVP!"
CODE_112_ADMIN="Code 112, Cet Email existe déjà"
CODE_113="Erreur 113, veuillez saisir votre administrateur SVP!"
CODE_113_ADMIN="Code 113, Erreur lors de l'affectation"
CODE_114="Erreur 114, veuillez saisir votre administrateur SVP!"
CODE_114_ADMIN="Code 114, Erreur lors de l'inscription"
CODE_115="Erreur 115, veuillez saisir votre administrateur SVP!"
CODE_115_ADMIN="Code 115, Erreur Professeur occupé"
CODE_116="Erreur 116, veuillez saisir votre administrateur SVP!"
CODE_116_ADMIN="Code 116, Erreur Salle occupée"
CODE_117="Erreur 117, veuillez saisir votre administrateur SVP!"
CODE_117_ADMIN="Code 117, Erreur Classe pedagogique occupé"
CODE_118="Erreur 118, veuillez saisir votre administrateur SVP!"
CODE_118_ADMIN="Code 118, Ce matricule existe déjà"
CODE_119="Erreur 119, veuillez saisir votre administrateur SVP!"
CODE_119_ADMIN="Code 119, le format du téléphone n'est pas valide"
CODE_120="Erreur 120, veuillez saisir votre administrateur SVP!"
CODE_120_ADMIN="Code 120, un lien de parenthé père est deja assigné à cet élève"
CODE_121="Erreur 121, veuillez saisir votre administrateur SVP!"
CODE_121_ADMIN="Code 121, un lien de parenthé mère est deja assigné à cet élève"
CODE_122="Erreur 122, veuillez saisir votre administrateur SVP!"
CODE_122_ADMIN="Code 122, element existe déjà"
CODE_123="Erreur 123, veuillez saisir votre administrateur SVP!"
CODE_123_ADMIN="Code 123, Cette op"
CODE_124="Erreur 124, veuillez saisir votre administrateur SVP!"
CODE_124_ADMIN="Code 124, Echec revoke association"
CODE_125="Erreur 125, veuillez saisir votre administrateur SVP!"
CODE_125_ADMIN="Code 125,Format heure incorrect"
CODE_126="Erreur 126, veuillez saisir votre administrateur SVP!"
CODE_126_ADMIN="Code 126,horaire Existe deja "
CODE_127="Erreur 127, veuillez saisir votre administrateur SVP!"
CODE_127_ADMIN="Code 127,horaire incorrect "
CODE_128="Erreur 128, veuillez saisir votre administrateur SVP!"
CODE_128_ADMIN="Code 128,ClassePhysique est Occuppee "
CODE_129="Erreur 129, veuillez saisir votre administrateur SVP!"
CODE_129_ADMIN="Code 129,Professeur est Occuppe "
CODE_130="Erreur 130, veuillez saisir votre administrateur SVP!"
CODE_130_ADMIN="Code 130,Professeur n'existe pas "
CODE_131="Erreur 131, veuillez saisir votre administrateur SVP!"
CODE_131_ADMIN="Code 131,la classe pedagogique n'existe pas "
CODE_132="Erreur 132, veuillez saisir votre administrateur SVP!"
CODE_132_ADMIN="Code 132,la classe pedagogique est occuppee "
CODE_133="Erreur 133, veuillez saisir votre administrateur SVP!"
CODE_133_ADMIN="Code 133,Veuiller modifier le cahier au niveau de l'evaluation "
CODE_134="Erreur 134, veuillez saisir votre administrateur SVP!"
CODE_134_ADMIN="Code 134,Ce cours existe deja! "
CODE_135="Erreur 135, veuillez saisir votre administrateur SVP!"
CODE_135_ADMIN="Code 135,Utlisateur existe deja! "
CODE_136="Erreur 136, veuillez saisir votre administrateur SVP!"
CODE_136_ADMIN="Code 136,Profil existe deja! "
CODE_137="Erreur 137, veuillez saisir votre administrateur SVP!"
CODE_137_ADMIN="Code 137,Profil existe deja! "
CODE_138="Erreur 138, veuillez saisir votre administrateur SVP!"
CODE_138_ADMIN="Code 138,Format image incorrect ! "
CODE_139="Erreur 139, veuillez saisir votre administrateur SVP!"
CODE_139_ADMIN="Code 139,Action existe deja! "
CODE_140="Erreur 140, veuillez saisir votre administrateur SVP!"
CODE_140_ADMIN="Code 140,Echec d'uplode du fichier! "
CODE_141="Erreur 141, veuillez saisir votre administrateur SVP!"
CODE_141_ADMIN="Code 141,Cet etablissement n'existe pas! "
CODE_142="Erreur 142, veuillez saisir votre administrateur SVP!"
CODE_142_ADMIN="Code 142,Cet Actualite existe deja! "
CODE_143="Erreur 143, veuillez saisir votre administrateur SVP!"
CODE_143_ADMIN="Code 143,Code Etablissement n'existe pas! "
CODE_144="Erreur 144, veuillez saisir votre administrateur SVP!"
CODE_144_ADMIN="Code 144, les frais sont vides! "
CODE_145="Erreur 145, veuillez saisir votre administrateur SVP!"
CODE_145_ADMIN="Code 145, Cet �l�ve n'a pas encore de compte! "
CODE_146="Erreur 146, veuillez saisir votre administrateur SVP!"
CODE_146_ADMIN="Code 146, Cet echeancier existe deja! "
CODE_147="Erreur 147, veuillez saisir votre administrateur SVP!"
CODE_147_ADMIN="Code 147, Ce Frais est d�j� affect� � cette Classe! "
CODE_148="Erreur 148, veuillez saisir votre administrateur SVP!"
CODE_148_ADMIN="Code 148, Ce Frais est d�j� affect� � ce niveau ! "
CODE_149="Erreur 149, veuillez saisir votre administrateur SVP!"
CODE_149_ADMIN="Code 149, Facture d�j� pay�!"
CODE_150_ADMIN="Erreur 150, la somme totale des �ch�ances ne couvre pas le montant global global de l'�ch�ancier"
CODE_150="Code 150, veuillez saisir votre administrateur SVP!"
CODE_151_ADMIN="Erreur 151, le nombre echeance est inferieur � la totalit� des �ch�eances "
CODE_151="Code 151, veuillez saisir votre administrateur SVP!"
CODE_152_ADMIN="Erreur 152, la somme totale des �ch�ances est sup�rieure au montant global de l'�ch�ancier"
CODE_152="Code 152, veuillez saisir votre administrateur SVP!"
CODE_153_ADMIN="Error 153, cet �ch�ance est d�j� affect� � cet �l�ve"
CODE_153="Code 153, veuillez saisir votre administrateur SVP!"
CODE_154_ADMIN="Error 154, cet �ch�ancier est d�j� personnalis�"
CODE_154="Code 154, veuillez saisir votre administrateur SVP!"
CODE_155="Erreur 155, veuillez saisir votre administrateur SVP!"
CODE_155_ADMIN="Code 155,Le montant doit etre superieur a zero !"
CODE_156="Erreur 156, veuillez saisir votre administrateur SVP!"
CODE_156_ADMIN="Code 156,Echec annulation!"
CODE_157="Erreur 157, veuillez saisir votre administrateur SVP!"
CODE_157_ADMIN="Code 157,Echecance deja Paye!"
CODE_158="Erreur 158, veuillez saisir votre administrateur SVP!"
CODE_158_ADMIN="Code 158,Le montant doit couvrir l'ensemble des echeances!"
CODE_159="Erreur 159, veuillez saisir votre administrateur SVP!"
CODE_159_ADMIN="Code 159,Facture deja Paye!"
CODE_160="Erreur 160, veuillez saisir votre administrateur SVP!"
CODE_160_ADMIN="Code 160,Ech�ance deja Payee!"
CODE_161="Erreur 161, veuillez saisir votre administrateur SVP!"
CODE_161_ADMIN="Cette Historique Etat a été deja enregistré!"
[Action]
SAV = "Insertion Reussie."
INSERT = "Insertion Reussie."
UPDATED = "Modification Reussie."
REMOVED = "Deplacement dans Corbeille reussi."
DELETED = "Suppression reussie."
RESTORED = "Restauration reussie."
REVOKED = "Enlevé(s)."
ARCHIVED = "Archivage reussi."
UNDOARCHIVED = "Désarchivage reussi."
REVOKED="classe association revoke avec succes"
LISTED="Lister avec succes"
ACTIVATED="Activation reussie ."
DEACTIVATED="Desactivation reussie"
CANCELLED="annulation reussie"
GENERATED="Génération reussie"
<file_sep>/backend/src/bo/exception/BlockedNumberException.php
<?php
/*
* 2SMOBILE
* ----------------------------------------
* @author Kiwi <<EMAIL>>
* @copyright 2006-2015 Kiwi/2SI Group
* @version 2.0.0
* @link http://www.kiwi.sn
* @link http://www.ssi.sn
* ----------------------------------------
*/
namespace Exceptions;
/**
* Exception lancée quand le numéro est bloqué par l'utilisateur
*
* @author Soukeyna
*/
class BlockedNumberException extends MessageException{
function __construct() {
parent::__construct(220);
}
}
<file_sep>/backend/src/be/Profil.php
<?php
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
namespace User;
use Be\BaseEntite as BaseEntite;
/** @Entity @HasLifecycleCallbacks
* @Table(name="ud_profil") * */
class Profil extends BaseEntite {
/** @Id
* @Column(type="integer"), @GeneratedValue
*/
private $id;
/**
* @Column(type="string", length=50, nullable=false)
* */
private $code;
/**
* @Column(type="string", length=255, nullable=true)
* */
private $description;
/**
* @Column(type="string", length=50, nullable=true)
* */
private $libelle;
/**
* @Column(type="string", length=50, nullable=true)
* */
private $codeEtablissement;
function getId() {
return $this->id;
}
function getCode() {
return $this->code;
}
function getDescription() {
return $this->description;
}
function getLibelle() {
return $this->libelle;
}
function getCodeEtablissement() {
return $this->codeEtablissement;
}
function setId($id) {
$this->id = $id;
}
function setCode($code) {
$this->code = $code;
}
function setDescription($description) {
$this->description = $description;
}
function setLibelle($libelle) {
$this->libelle = $libelle;
}
function setCodeEtablissement($codeEtablissement) {
$this->codeEtablissement = $codeEtablissement;
}
}<file_sep>/backend/src/be/User.php
<?php
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
namespace User;
use Be\BaseEntite as BaseEntite;
/** @Entity @HasLifecycleCallbacks
@Table(name="ud_user"),uniqueConstraints={@UniqueConstraint(columns={"login", "password"})}) */
Class User extends BaseEntite {
/** @Id
* @Column(type="bigint",length=20), @GeneratedValue
*/
private $id;
/**
* @Column(type="datetime",nullable=true) */
private $dateConnected;
/**
* @Column(type="datetime", nullable=true)
*/
private $dateDisconnected;
/**
* @Column(type="bigint",length=20, nullable=true)
*/
private $dateexpirationToken;
/**
* @Column(type="smallint",length=1, nullable=false)
*/
private $etatCompte;
/**
* @Column(type="string", length=200, nullable=false)
*/
private $login;
/**
* @Column(type="string", length=200, nullable=false)
*/
private $password;
/**
* @Column(type="smallint",length=1, nullable=false)
*/
private $statut;
/**
* @Column(type="string", length=255, nullable=true)
*/
private $token;
/**
* @Column(type="smallint",length=1, nullable=true)
*/
private $validate;
/**
* @Column(type="string", length=200, nullable=true)
* */
private $repName;
/**
* @Column(type="string", length=200, nullable=true)
* */
private $numTel;
/**
* @return mixed
*/
public function getRepName()
{
return $this->repName;
}
/**
* @return mixed
*/
public function getNumTel()
{
return $this->numTel;
}
/**
* @param mixed $repName
*/
public function setRepName($repName)
{
$this->repName = $repName;
}
/**
* @param mixed $numTel
*/
public function setNumTel($numTel)
{
$this->numTel = $numTel;
}
/**
* @return mixed
*/
public function getId()
{
return $this->id;
}
/**
* @return mixed
*/
public function getDateConnected()
{
return $this->dateConnected;
}
/**
* @return mixed
*/
public function getDateDisconnected()
{
return $this->dateDisconnected;
}
/**
* @return mixed
*/
public function getDateexpirationToken()
{
return $this->dateexpirationToken;
}
/**
* @return mixed
*/
public function getEtatCompte()
{
return $this->etatCompte;
}
/**
* @return mixed
*/
public function getLogin()
{
return $this->login;
}
/**
* @return mixed
*/
public function getPassword()
{
return $this->password;
}
/**
* @return mixed
*/
public function getStatut()
{
return $this->statut;
}
/**
* @return mixed
*/
public function getToken()
{
return $this->token;
}
/**
* @return mixed
*/
public function getValidate()
{
return $this->validate;
}
/**
* @return mixed
*/
public function getActivateUser()
{
return $this->activateUser;
}
/**
* @param mixed $id
*/
public function setId($id)
{
$this->id = $id;
}
/**
* @param mixed $dateConnected
*/
public function setDateConnected($dateConnected)
{
$this->dateConnected = $dateConnected;
}
/**
* @param mixed $dateDisconnected
*/
public function setDateDisconnected($dateDisconnected)
{
$this->dateDisconnected = $dateDisconnected;
}
/**
* @param mixed $dateexpirationToken
*/
public function setDateexpirationToken($dateexpirationToken)
{
$this->dateexpirationToken = $dateexpirationToken;
}
/**
* @param mixed $etatCompte
*/
public function setEtatCompte($etatCompte)
{
$this->etatCompte = $etatCompte;
}
/**
* @param mixed $login
*/
public function setLogin($login)
{
$this->login = $login;
}
/**
* @param mixed $password
*/
public function setPassword($password)
{
$this->password = $password;
}
/**
* @param mixed $statut
*/
public function setStatut($statut)
{
$this->statut = $statut;
}
/**
* @param mixed $token
*/
public function setToken($token)
{
$this->token = $token;
}
/**
* @param mixed $validate
*/
public function setValidate($validate)
{
$this->validate = $validate;
}
/**
* @param mixed $activateUser
*/
public function setActivateUser($activateUser)
{
$this->activateUser = $activateUser;
}
}
<file_sep>/backend/src/be/Menu.php
<?php
namespace Menu;
/**
* @author <NAME>
*/
use Be\BaseEntite as BaseEntite;
/** @Entity @HasLifecycleCallbacks
* @Table(name="ud_menu",uniqueConstraints={@UniqueConstraint(columns={"name", "user_id"})}) * */
class Menu extends BaseEntite
{
/** @Id
* @Column(type="integer"), @GeneratedValue
*/
private $id;
/**
* @Column(type="date", length=200,nullable=true)
* */
private $date;
/**
* @Column(type="string", length=200,nullable=false)
* */
private $name;
/**
* @Column(type="string", length=200,nullable=false)
* */
private $text;
/**
* @Column(type="string", length=200,nullable=true)
* */
private $title;
/**
* @Column(type="string", length=200,nullable=true)
* */
private $type;
/**
* @Column(type="integer")
* */
private $ordre;
/**
* @Column(type="integer", options={"default":0})
* */
private $etape;
/**
* @Column(type="string", length=200,nullable=true)
* */
private $action;
/**
* @Column(type="string", length=200,nullable=true)
* */
private $methode;
/**
* @Column(type="string", length=200,nullable=true)
* */
private $url;
/**
* @Column(type="integer", options={"default":1})
*/
protected $generate;
// /**
// * @Column(type="integer")
// * */
// private $sequence;
/** @ManyToOne(targetEntity="User\User",cascade={"persist"}) */
private $user;
/** @ManyToOne(targetEntity="Menu\Menu",cascade={"persist"}) */
private $parent;
/**
* @return mixed
*/
public function getEtape()
{
return $this->etape;
}
/**
* @param mixed $etape
*/
public function setEtape($etape)
{
$this->etape = $etape;
}
/**
* @return mixed
*/
public function getGenerate()
{
return $this->generate;
}
/**
* @param mixed $generate
*/
public function setGenerate($generate)
{
$this->generate = $generate;
}
/**
* @return mixed
*/
public function getId()
{
return $this->id;
}
/**
* @return mixed
*/
public function getDate()
{
return $this->date;
}
/**
* @return mixed
*/
public function getName()
{
return $this->name;
}
/**
* @return mixed
*/
public function getText()
{
return $this->text;
}
/**
* @return mixed
*/
public function getTitle()
{
return $this->title;
}
/**
* @return mixed
*/
public function getType()
{
return $this->type;
}
/**
* @return mixed
*/
public function getAction()
{
return $this->action;
}
/**
* @return mixed
*/
public function getMethode()
{
return $this->methode;
}
/**
* @return mixed
*/
public function getUrl()
{
return $this->url;
}
// /**
// * @return mixed
// */
// public function getSequence()
// {
// return $this->sequence;
// }
/**
* @return mixed
*/
public function getParent()
{
return $this->parent;
}
/**
* @return mixed
*/
public function getUser()
{
return $this->user;
}
/**
* @param mixed $id
*/
public function setId($id)
{
$this->id = $id;
}
/**
* @param mixed $date
*/
public function setDate($date)
{
$this->date = $date;
}
/**
* @param mixed $name
*/
public function setName($name)
{
$this->name = $name;
}
/**
* @param mixed $text
*/
public function setText($text)
{
$this->text = $text;
}
/**
* @param mixed $menu
*/
public function setMenu($menu)
{
$this->menu = $menu;
}
/**
* @param mixed $title
*/
public function setTitle($title)
{
$this->title = $title;
}
/**
* @param mixed $type
*/
public function setType($type)
{
$this->type = $type;
}
/**
* @param mixed $action
*/
public function setAction($action)
{
$this->action = $action;
}
/**
* @param mixed $methode
*/
public function setMethode($methode)
{
$this->methode = $methode;
}
/**
* @param mixed $url
*/
public function setUrl($url)
{
$this->url = $url;
}
// /**
// * @param mixed $sequence
// */
// public function setSequence($sequence)
// {
// $this->sequence = $sequence;
// }
/**
* @param mixed $parent
*/
public function setParent($parent)
{
$this->parent = $parent;
}
/**
* @param mixed $user
*/
public function setUser($user)
{
$this->user = $user;
}
function getOrdre() {
return $this->ordre;
}
function setOrdre($ordre) {
$this->ordre = $ordre;
}
}
<file_sep>/config/parameters.ini
; this is an INI file
host = 127.0.0.1
dbname = ussd_dynamic
user = root
password =
port = 3306
;server = http://localhost/ussddynamic/app
;backend = http://localhost/ussddynamic/backend
;;wordwrap
WordWrap = 50
;;niveau mot passe 1(jour)
Level_1 = 30
;;niveau mot passe 2(jour)
Level_2 = 60
;;niveau mot passe 3(jour)
Level_3 = 180
;;nombre de connexion maximum autorise
authorized = 100
;;nombre authentification autorisee pour un user
nombre_connexion = 80
<file_sep>/backend/src/bo/menu/MenuQueries.php
<?php
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
namespace Menu;
use Racine\Bootstrap as B;
use Exception;
use Common\CommonQueries;
use Racine\Bootstrap;
class MenuQueries extends \Bo\BaseAction implements \Bo\BaseQueries
{
private $commonQueries;
function __construct()
{
$this->commonQueries = new CommonQueries();
date_default_timezone_set('GMT');
}
public function activate($entity, $listId, $userId = null)
{}
public function deactivate($entity, $listId, $userId = null)
{}
public function delete($entity, $listId)
{}
public function insert($entity, $ligneFactures = null)
{}
public function remove($entity, $listId, $userId = null)
{}
public function restore($entity, $listId, $userId = null)
{}
public function update($entity, $supp = null)
{}
public function view($id)
{}
public function getAllTestsByEtabId($anId, $etabId)
{
if ($anId != null && $etabId != null) {
$stmt = Bootstrap::$entityManager->getConnection()->prepare("SELECT ta.id as id, ta.libelle as libelle FROM da_test ta WHERE ta.anneeScolaire_id=:anId AND ta.etablissement_id=:etabId ");
$stmt->execute(array(
'etabId' => $etabId->getId(),
'anId' => $anId->getId()
));
$result = $stmt->fetchAll();
if ($result != null)
return $result;
else
return null;
}
}
public function getAllTestsByNiveauId($nivId, $anId)
{
$this->doLogInfo('nivId=' . $nivId->getId() . ' et anId=' . $anId->getId());
if ($anId != null && $nivId != null) {
$stmt = Bootstrap::$entityManager->getConnection()->prepare(
"SELECT DISTINCT(t.id ) testId, t.libelle FROM da_test t WHERE t.anneeScolaire_id=:anId
AND t.niveau_id=:nivId AND t.status=1");
$stmt->execute(array(
'nivId' => $nivId->getId(),
'anId' => $anId->getId()
));
$result = $stmt->fetchAll();
if ($result != null) {
$this->doLogInfo('ok');
return $result;
} else {
$this->doLogInfo('no');
return null;
}
}
}
public function getAllTestsNoCandiByNiveauId($nivId, $anId)
{
$this->doLogInfo('nivId=' . $nivId->getId() . ' et anId=' . $anId->getId());
if ($anId != null && $nivId != null) {
$stmt = Bootstrap::$entityManager->getConnection()->prepare(
"SELECT DISTINCT(t.id ) testId, t.libelle FROM da_test t,da_testcandidat tc WHERE t.anneeScolaire_id=:anId
AND t.niveau_id=:nivId AND t.status=1 AND t.id NOT IN
(SELECT DISTINCT(t.id ) FROM da_test t, da_testcandidat tc
WHERE tc.testAdmis_id=t.id AND t.anneeScolaire_id=:anId
AND t.niveau_id=:nivId AND t.status=1 AND tc.archive=0)");
// $stmt = Bootstrap::$entityManager->getConnection()->prepare(
// "SELECT DISTINCT(t.id ) testId, t.libelle ,'0' as nbCandi FROM da_test t WHERE t.anneeScolaire_id=:nivId
// AND t.niveau_id=:nivId AND t.status=1 AND t.id NOT IN
// (SELECT DISTINCT(t.id ) FROM da_test t, da_testcandidat tc
// WHERE tc.testAdmis_id=t.id AND t.anneeScolaire_id=:anId
// AND t.niveau_id=:nivId AND t.status=1)UNION (SELECT ta.id testId,
// ta.libelle, COUNT(tc.candidat_id) nbCandi
// FROM da_test ta, da_testcandidat tc
// WHERE tc.testAdmis_id=ta.id AND ta.anneeScolaire_id=:anId
// AND ta.niveau_id=:nivId AND ta.status=1 GROUP BY ta.id ORDER BY ta.sequence ASC)"
// );
// SELECT ta.id testId,
// ta.libelle, COUNT(tc.candidat_id) nbCandi
// FROM da_test ta, da_testcandidat tc
// where tc.testAdmis_id=ta.id and ta.anneeScolaire_id=:anId
// and ta.niveau_id=:nivId and ta.status=1 GROUP by ta.id order by ta.sequence asc
$stmt->execute(array(
'nivId' => $nivId->getId(),
'anId' => $anId->getId()
));
$result = $stmt->fetchAll();
if ($result != null) {
$this->doLogInfo('ok');
return $result;
} else {
$this->doLogInfo('no');
return null;
}
}
}
public function getAllTestsCandiByNivId($nivId, $anId)
{
$this->doLogInfo('nivId=' . $nivId->getId() . ' et anId=' . $anId->getId());
if ($anId != null && $nivId != null) {
$stmt = Bootstrap::$entityManager->getConnection()->prepare(
"SELECT ta.id testId,
ta.libelle, COUNT(tc.candidat_id) nbCandi
FROM da_test ta, da_testcandidat tc
where tc.testAdmis_id=ta.id and ta.anneeScolaire_id=:anId
and ta.niveau_id=:nivId and ta.status=1 AND tc.status=1 AND tc.archive=0 GROUP by ta.id order by ta.sequence asc");
$stmt->execute(array(
'nivId' => $nivId->getId(),
'anId' => $anId->getId()
));
$result = $stmt->fetchAll();
if ($result != null) {
$this->doLogInfo('ok');
return $result;
} else {
$this->doLogInfo('no');
return null;
}
}
}
public function getAllTestsCandi($test)
{
// $this->doLogInfo('test=' . $test->getId() . ' et candidat=' . $candidat->getId());
if ($test != null ) {
$stmt = Bootstrap::$entityManager->getConnection()->prepare(
"SELECT ta.id testId,
ta.libelle, tc.candidat_id candidatId
FROM da_test ta, da_testcandidat tc, da_candidat c
where tc.testAdmis_id=ta.id
and tc.testAdmis_id=:test and tc.candidat_id=c.id and ta.status=1 AND tc.status=1 AND tc.archive=0");
$stmt->execute(array(
'test' => $test->getId(),
));
$result = $stmt->fetchAll();
if ($result != null) {
$this->doLogInfo('ok');
return $result;
} else {
$this->doLogInfo('no');
return null;
}
}
}
public function getAllMenuByUser($user)
{
$this->doLogInfo('userId=' . $user->getId() );
if ($user != null) {
$stmt = Bootstrap::$entityManager->getConnection()->prepare(
"SELECT DISTINCT(m.id), parent_id, m.name, m.title, text, type, action, methode, url,ordre, generate FROM ud_menu m WHERE m.user_id=:user
AND m.status=1 AND m.generate=1 order by parent_id");
$stmt->execute(array(
'user' => $user->getId()
));
$result = $stmt->fetchAll();
if ($result != null) {
$this->doLogInfo('ok');
return $result;
} else {
$this->doLogInfo('no');
return null;
}
}
}
// public function getMasterParent(){
// $query = Bootstrap::$entityManager->createQuery("SELECT DISTINCT(m.id) id, m.name, m.title, ordre, generate, type
// FROM Menu\Menu m WHERE m.user=:user and m.parent=null
// AND m.status=1 AND m.generate=1");
// $result = $query->getResult();
// if ($result[0] != null)
// return $result[0];
// else
// return null;
// }
public function getMasterParent($user) {
if ($user != null) {
$query = Bootstrap::$entityManager->createQuery("select DISTINCT(m.id) id, m.name, m.title, m.ordre, m.generate, m.text, m.type, m.action, m.methode
from Menu\Menu m where m.user=:user and m.parent is null
AND m.status=1 AND m.generate=1");
$query->setParameter('user', $user);
$result = $query->getResult();
if ($result != null)
return $result[0];
else
return null;
}
}
public function getAllChildByParentId($parent) {
if ($parent != null) {
$query = Bootstrap::$entityManager->createQuery("select DISTINCT(m.id) id, m.name, m.title, m.ordre, m.generate, m.text, m.type, m.action, m.methode
from Menu\Menu m where m.parent=:parent AND m.status=1 AND m.generate=1");
$query->setParameter('parent', $parent);
$result = $query->getResult();
if ($result != null)
return $result;
else
return null;
}
}
public function getAllParents($userId){
if ($userId != null) {
$stmt = Bootstrap::$entityManager->getConnection()->prepare(
"SELECT DISTINCT(parent_id) parent_id, m.name, m.title, m.text, m.type, m.action, m.methode, m.url, m.generate FROM ud_menu m WHERE m.user_id=$userId
AND m.status=1 AND m.generate=1");
$stmt->execute(array(
));
$result = $stmt->fetchAll();
if ($result != null) {
return $result;
} else {
return null;
}
}
}
public function getAllMenuByParents($parent_id){
if ($parent_id != null) {
$stmt = Bootstrap::$entityManager->getConnection()->prepare(
"SELECT DISTINCT(m.id), parent_id, m.name, m.title, text, type, action, methode, url, generate, ordre FROM ud_menu m WHERE
m.status=1 AND m.generate=1 and parent_id = $parent_id");
$stmt->execute(array(
));
$result = $stmt->fetchAll();
if ($result != null) {
return $result;
} else {
return null;
}
}
}
}
<file_sep>/app/kussd/kmenu/index.php
<?php
if(!isset($_COOKIE['userId']) || !isset($_COOKIE['userLogin'])){
header('Location: ../../../index.php');
exit();
}
$userId = $_COOKIE['userId'];
$userLogin = $_COOKIE['userLogin'];
?>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="description" content="">
<meta name="author" content="">
<link rel="shortcut icon" href="../../assets/img/logo-fav.png">
<title>UniversEdu</title>
<link rel="stylesheet" type="text/css" href="../../assets/lib/perfect-scrollbar/css/perfect-scrollbar.css"/>
<link rel="stylesheet" type="text/css" href="../../assets/lib/material-design-icons/css/material-design-iconic-font.min.css"/>
<link rel="stylesheet" type="text/css" href="../../assets/lib/datetimepicker/css/bootstrap-datetimepicker.min.css"/>
<link rel="stylesheet" type="text/css" href="../../assets/lib/select2/css/select2.min.css"/>
<link rel="stylesheet" type="text/css" href="../../assets/lib/dropzone/dropzone.css"/>
<link rel="stylesheet" href="../../assets/css/app.css" type="text/css"/>
<link rel="stylesheet" href="../css/core.css" type="text/css"/>
<!-- Query TreeTable plugin's script -->
<link rel="stylesheet" href="../../assets/css/jquery.treetable.css" type="text/css"/>
<!-- <link rel="stylesheet" href="../../assets/css/jquery.treetable.theme.default.css" type="text/css"/> -->
<link rel="stylesheet" href="../css/core.css" type="text/css"/>
<!-- Ajout�s -->
<link rel="stylesheet" href="../css/prism.css" type="text/css" />
<link rel="stylesheet" href="../css/demo.css" type="text/css" />
<link rel="stylesheet" href="../css/intlTelInput.css" type="text/css" />
<link rel="stylesheet" href="../css/isValidNumber.css" type="text/css" />
<link rel="stylesheet" href="../../assets/css/app.css" type="text/css" />
<link rel="stylesheet" href="../css/core.css" type="text/css" />
<link rel="stylesheet" href="../../plugins/pnotify/pnotify.custom.min.css" type="text/css" />
<style>
<!--
.expander{
margin-left: -13px !important;
padding: 8px !important;
}
-->
</style>
</head>
<body>
<!-- <div class="be-top-header"> -->
<div class="be-wrapper">
<nav class="navbar navbar-expand fixed-top be-top-header">
<div class="container-fluid">
<div class="be-navbar-header"><a class="navbar-brand" href="#" style="background-image:none; font-size: 20px;">KUSSD+</a>
</div>
</div>
</nav>
<!-- <div class=""> -->
<div class="page-head" style=" top: -10px;">
<div class="btn-toolbar float-left margin-bottom">
<h3 style="color: #14181f; font-family: bold;">Menus Hatsi Jari</h3>
</div>
<div class="btn-toolbar float-right margin-bottom">
<div class="btn-group btn-space mr-2">
<button id="btn_newMenu" class="btn btn-secondary" type="button" title="Ajouter un nouveau Menu USSD" data-toggle="tooltip" data-placement="bottom"><i class="icon mdi mdi-plus"></i></button>
</div>
<div class="btn-group btn-space mr-2">
<button id="btn_generateMenu" class="btn btn-secondary" type="button" title="generer menu(s)" data-toggle="tooltip" data-placement="bottom"><i class="icon mdi mdi-play"></i></button>
</div>
</div>
</div>
<div class="main-content container-fluid">
<div class="card card-table">
<table class="table" id="tree">
<thead>
<tr>
<th>Titre(title)</th>
<th>Text</th>
<th>Type(acceskey ou input)</th>
<th>Parent</th>
<th>Action(GET ou POST)</th>
<th>URL(WebService)</th>
</tr>
</thead>
<tbody class="tree-tbody">
</tbody>
</table>
</div>
</div>
<!-- </div> -->
</div>
<!-- </div> -->
<script src="//ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js" type="text/javascript"></script>
<script src="../../assets/lib/perfect-scrollbar/js/perfect-scrollbar.min.js" type="text/javascript"></script>
<script src="../../assets/lib/bootstrap/dist/js/bootstrap.bundle.min.js" type="text/javascript"></script>
<script src="../../assets/js/app.js" type="text/javascript"></script>
<script src="../../assets/lib/jquery-ui/jquery-ui.min.js" type="text/javascript"></script>
<script src="../../assets/lib/moment.js/min/moment.min.js" type="text/javascript"></script>
<script src="../../assets/lib/datetimepicker/js/bootstrap-datetimepicker.min.js" type="text/javascript"></script>
<script src="../../assets/lib/fuelux/js/wizard.js" type="text/javascript"></script>
<script src="../../assets/lib/dropzone/dropzone.js" type="text/javascript"></script>
<script src="../../assets/lib/select2/js/select2.min.js" type="text/javascript"></script>
<script src="../../assets/lib/select2/js/select2.full.min.js" type="text/javascript"></script>
<script src="../../assets/lib/jquery.maskedinput/jquery.maskedinput.js" type="text/javascript"></script>
<script src="../../assets/js/app-form-masks.js" type="text/javascript"></script>
<!-- <script src="../kadm/kadm.js" type="text/javascript"></script> -->
<script src="../klib/kutils.js" type="text/javascript"></script>
<script src="kmenu.js" type="text/javascript"></script>
<script src="kmain.js" type="text/javascript"></script>
<script src="../klib/bsDialog.js" type="text/javascript"></script>
<script src="../../assets/lib/parsley/parsley.min.js" type="text/javascript"></script>
<script src="../../plugins/pnotify/pnotify.custom.min.js"type="text/javascript"></script>
<script src="../../assets/js/prism.js" type="text/javascript"></script>
<script src="../../assets/js/intlTelInput.js" type="text/javascript"></script>
<!-- Query TreeTable plugin's script -->
<script src="../../assets/js/jquery.treetable.js"></script>
<script src="../../assets/js/jquery.cookie.js" type="text/javascript"></script>
</body>
</html>
<file_sep>/backend/src/be/BaseEntite.php
<?php
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
namespace Be;
/**
* Description of BaseEntite
*
* @author Admin
*/
abstract class BaseEntite {
/**
* * @Column(type="integer", options={"default":1})
*/
protected $status;
/**
* @Column(type="integer", options={"default":1})
*/
protected $activate;//0=desactivite
/**
* @Column(type="integer", options={"default":0})
*/
protected $archive;
/**
* @Column(type="datetime", nullable=true)
* */
protected $createdDate;
/**
* @Column(type="datetime", nullable=true)
* */
protected $updatedDate;
/**
* @Column(type="datetime", nullable=true)
* */
protected $deletedDate;
/**
* @Column(type="datetime", nullable=true)
* */
protected $activatedDate;
/**
* @Column(type="datetime", nullable=true)
* */
protected $deactivatedDate;
/**
* @Column(type="datetime", nullable=true)
* */
protected $archivedDate;
/**
* @Column(type="datetime", nullable=true)
* */
protected $restoredDate;
/**
* @Column(type="datetime", nullable=true)
* */
protected $removedDate;
/**
* @Column(type="datetime", nullable=true)
* */
protected $undoArhivedDate;
/**
* @Column(type="integer", length=11, nullable=true)
* */
protected $createdBy;
/**
* @Column(type="integer", length=11, nullable=true)
* */
protected $updatedBy;
/**
* @Column(type="integer", length=11, nullable=true)
* */
protected $removedBy;
/**
* @Column(type="integer", length=11, nullable=true)
* */
protected $deletedBy;
/**
* @Column(type="integer", length=11, nullable=true)
* */
protected $restoredBy;
/**
* @Column(type="integer", length=11, nullable=true)
* */
protected $activatedBy;
/**
* @Column(type="integer", length=11, nullable=true)
* */
protected $deactivatedBy;
/**
* @Column(type="integer", length=11, nullable=true)
* */
protected $archivedBy;
/**
* @Column(type="integer", length=11, nullable=true)
* */
protected $undoarchivedBy;
public function getStatus() {
return $this->status;
}
public function getActivate() {
return $this->activate;
}
public function getArchive() {
return $this->archive;
}
public function getCreatedDate() {
return $this->createdDate;
}
public function getUpdatedDate() {
return $this->updatedDate;
}
public function getRemovedDate() {
return $this->removedDate;
}
public function getDeletedDate() {
return $this->deletedDate;
}
public function getActivatedDate() {
return $this->activatedDate;
}
public function getDeactivatedDate() {
return $this->deactivatedDate;
}
public function getArchivedDate() {
return $this->archivedDate;
}
public function getUndoArhivedDate() {
return $this->undoArhivedDate;
}
public function getCreatedBy() {
return $this->createdBy;
}
public function getUpdatedBy() {
return $this->updatedBy;
}
public function getRemovedBy() {
return $this->$removedBy;
}
public function getDeletedBy() {
return $this->deletedBy;
}
public function getRestoredBy() {
return $this->restoredBy;
}
public function getRestoredDate() {
return $this->restoredDate;
}
public function getActivatedBy() {
return $this->activatedBy;
}
public function getDeactivatedBy() {
return $this->deactivatedBy;
}
public function getArchivedBy() {
return $this->archivedBy;
}
public function getUndoarchivedBy() {
return $this->undoarchivedBy;
}
public function setStatus($status) {
$this->status = $status;
}
public function setActivate($activate) {
$this->activate = $activate;
}
public function setArchive($archive) {
$this->archive = $archive;
}
public function setCreatedDate($createdDate) {
$this->createdDate = $createdDate;
}
public function setUpdatedDate($updatedDate) {
$this->updatedDate = $updatedDate;
}
public function setRemovedDate($removedDate) {
$this->removedDate = $removedDate;
}
public function setDeletedDate($deletedDate) {
$this->deletedDate = $deletedDate;
}
public function setRestoredDate($restoredDate) {
$this->$restoredDate = $restoredDate;
}
public function setActivatedDate($activatedDate) {
$this->activatedDate = $activatedDate;
}
public function setDeactivatedDate($deactivatedDate) {
$this->deactivatedDate = $deactivatedDate;
}
public function setArchivedDate($archivedDate) {
$this->archivedDate = $archivedDate;
}
public function setUndoArhivedDate($undoArhivedDate) {
$this->undoArhivedDate = $undoArhivedDate;
}
public function setCreatedBy($createdBy) {
$this->createdBy = $createdBy;
}
public function setUpdatedBy($updatedBy) {
$this->updatedBy = $updatedBy;
}
public function setRemovedBy($removedBy) {
$this->$removedBy = $removedBy;
}
public function setDeletedBy($deletedBy) {
$this->deletedBy = $deletedBy;
}
public function setRestoredBy($restoredBy) {
$this->restoredBy = $restoredBy;
}
public function setActivatedBy($activatedBy) {
$this->activatedBy = $activatedBy;
}
public function setDeactivatedBy($deactivatedBy) {
$this->deactivatedBy = $deactivatedBy;
}
public function setArchivedBy($archivedBy) {
$this->archivedBy = $archivedBy;
}
public function setUndoarchivedBy($undoarchivedBy) {
$this->undoarchivedBy = $undoarchivedBy;
}
/** @PrePersist */
public function doPrePersist() {
$this->archive = 0;
$this->activate = 1;
$this->status = 1;
$this->createdDate = new \DateTime("now");
}
/** @PreUpdate */
public function doPreUpdate() {
$this->updatedDate = new \DateTime("now");
}
}
<file_sep>/app/kussd/klib/kutils.js
/* ==========================================================================
* @author <NAME>
* Univers Edu V2
* kutils.js plugin contient toutes les fonctions utiles du projet
* @copyright 2018 Kiwi/2SI Group
* ==========================================================================
*/
;
(function ($) {
kutils = function (options) {
var o = {
addUrl: '/e/Add/',
editUrl: '/e/Update/',
};
// constructor
var construct = function (options) {
$.extend(o, options);
};
//formatage date
this.FormatageDate = function (dateE, action) {
var dateN;
var dateNew;
if (action === 'TEST')
dateN = dateE;
else
dateN = dateE.date;
if (action === 'SEARCH')
dateNew = dateE;
else
dateNew = dateN.substring(0, 10);
var chD = dateNew.split("-");
var annee = chD[0];
var mois = chD[1];
var jour = chD[2];
if (mois == 01)
mois = "Janv"
else if (mois == 02)
mois = "Fev"
else if (mois == 03)
mois = "Mars"
else if (mois == 04)
mois = "Avril"
else if (mois == 05)
mois = "Mai"
else if (mois == 06)
mois = "Juin"
else if (mois == 07)
mois = "Juil"
else if (mois == 08)
mois = "Août"
else if (mois == 09)
mois = "Sept"
else if (mois == 10)
mois = "Oct"
else if (mois == 11)
mois = "Nov"
else if (mois == 12)
mois = "Dec";
chD = chD[2] + " " + mois + " " + chD[0]
return chD;
};
// fin
// fonction
function ValidateEmail(email) {
var expr = /^([\w-\.]+)@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.)|(([\w-]+\.)+))([a-zA-Z]{2,4}|[0-9]{1,3})(\]?)$/;
return expr.test(email);
}
return construct(options);
};
}(jQuery));
<file_sep>/backend/src/bo/user/UserQueries.php
<?php
namespace User;
use Racine\Bootstrap as Bootstrap;
use Racine\Bootstrap as B;
use Log\Loggers as Logger;
use User\User as User;
use Common\CommonManager as CommonManager;
class UserQueries extends \Bo\BaseAction implements \Bo\BaseQueries {
private $logger;
private $commonManager;
public function __construct() {
$this->commonManager = new CommonManager;
$this->logger = new Logger(__CLASS__);
date_default_timezone_set('GMT');
}
public function signin($login, $password) {
$sql = "SELECT * FROM ud_user u WHERE u.login='$login' and u.password='$<PASSWORD>' and u.status=1";
try {
$stmt = B::$entityManager->getConnection()->prepare($sql);
$stmt->execute();
$rslt = $stmt->fetchAll();
if(empty($rslt)){
return null;
}else{
return $rslt[0];
}
} catch (Exception $e) {
$this->logger->log->trace($e->getMessage());
throw $e;
}
}
public function findById($userId,$supp = null) {
return $user = $this->commonManager->findById('Cabinet\User', $userId);
}
public function activate($entity, $listId, $userId = null) {
}
public function deactivate($entity, $listId, $userId = null) {
}
public function delete($entity, $listId) {
}
public function insert($entity, $supp = null) {
}
public function remove($entity, $listId, $userId = null) {
}
public function restore($entity, $listId, $userId = null) {
}
public function update($entity, $supp = null) {
}
public function view($id) {
}
}
<file_sep>/lib/doctrine/vendor/composer/autoload_static.php
<?php
// autoload_static.php @generated by Composer
namespace Composer\Autoload;
class ComposerStaticInitd84324078eac9e0f5cbb7e606278da29
{
public static $prefixesPsr0 = array (
'S' =>
array (
'Symfony\\Component\\Console\\' =>
array (
0 => __DIR__ . '/..' . '/symfony/console',
),
),
'D' =>
array (
'Doctrine\\ORM\\' =>
array (
0 => __DIR__ . '/..' . '/doctrine/orm/lib',
),
'Doctrine\\DBAL\\' =>
array (
0 => __DIR__ . '/..' . '/doctrine/dbal/lib',
),
'Doctrine\\Common\\Lexer\\' =>
array (
0 => __DIR__ . '/..' . '/doctrine/lexer/lib',
),
'Doctrine\\Common\\Inflector\\' =>
array (
0 => __DIR__ . '/..' . '/doctrine/inflector/lib',
),
'Doctrine\\Common\\Collections\\' =>
array (
0 => __DIR__ . '/..' . '/doctrine/collections/lib',
),
'Doctrine\\Common\\Cache\\' =>
array (
0 => __DIR__ . '/..' . '/doctrine/cache/lib',
),
'Doctrine\\Common\\Annotations\\' =>
array (
0 => __DIR__ . '/..' . '/doctrine/annotations/lib',
),
'Doctrine\\Common\\' =>
array (
0 => __DIR__ . '/..' . '/doctrine/common/lib',
),
),
);
public static $classMap = array (
'Be\\BaseEntite' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/be/BaseEntite.php',
'Bo\\BaseAction' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/bo/BaseAction.php',
'Bo\\BaseController' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/bo/BaseController.php',
'Bo\\BaseManager' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/bo/BaseManager.php',
'Bo\\BaseQueries' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/bo/BaseQueries.php',
'CommonController' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/bo/common/CommonController.php',
'Common\\Common' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/bo/common/Common.php',
'Common\\CommonManager' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/bo/common/CommonManager.php',
'Common\\CommonQueries' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/bo/common/CommonQueries.php',
'Common\\Cookie' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/bo/common/Cookie.php',
'Common\\DoctrineLogger' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/bo/common/DoctrineLogger.php',
'Exceptions\\BlockedNumberException' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/bo/exception/BlockedNumberException.php',
'Exceptions\\ConstraintException' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/bo/exception/ConstraintException.php',
'Exceptions\\EmptyMessageException' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/bo/exception/EmptyMessageException.php',
'Exceptions\\EmptyNumberException' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/bo/exception/EmptyNumberException.php',
'Exceptions\\InternationalTrafficNotAllowedException' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/bo/exception/InternationalTrafficNotAllowedException.php',
'Exceptions\\MessageException' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/bo/exception/MessageException.php',
'Exceptions\\StatutException' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/bo/exception/StatutException.php',
'Exceptions\\TooLongMessageException' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/bo/exception/TooLongMessageException.php',
'Exceptions\\TooLongOrShortNumberException' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/bo/exception/TooLongOrShortNumberException.php',
'Exceptions\\UnknownIndicativeException' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/bo/exception/UnknownIndicativeException.php',
'Log\\Loggers' => __DIR__ . '/../../../..'.'/config' . '/Loggers.php',
'Menu\\Menu' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/be/Menu.php',
'Menu\\MenuController' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/bo/menu/MenuController.php',
'Menu\\MenuManager' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/bo/menu/MenuManager.php',
'Menu\\MenuQueries' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/bo/menu/MenuQueries.php',
'Racine\\Bootstrap' => __DIR__ . '/../../../..'.'/config' . '/bootstrap.php',
'UVd\\DoctrineFunction\\DateFormat' => __DIR__ . '/../../../..'.'/config' . '/../lib/DoctrineFunction/DateFormat.php',
'UVd\\DoctrineFunction\\UnixTimestamp' => __DIR__ . '/../../../..'.'/config' . '/../lib/DoctrineFunction/UnixTimestamp.php',
'User\\Profil' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/be/Profil.php',
'User\\User' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/be/User.php',
'User\\UserController' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/bo/user/UserController.php',
'User\\UserManager' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/bo/user/UserManager.php',
'User\\UserQueries' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/bo/user/UserQueries.php',
'tools\\Tool' => __DIR__ . '/../../../..'.'/config' . '/../backend/src/tools/Tool.php',
);
public static function getInitializer(ClassLoader $loader)
{
return \Closure::bind(function () use ($loader) {
$loader->prefixesPsr0 = ComposerStaticInitd84324078eac9e0f5cbb7e606278da29::$prefixesPsr0;
$loader->classMap = ComposerStaticInitd84324078eac9e0f5cbb7e606278da29::$classMap;
}, null, ClassLoader::class);
}
}
<file_sep>/app/plugins/kprod.js
; (function ($) {
kprod = function (options) {
var o = {
getProByCatUrl:'http://localhost/DemoUglifyJs/Controller/CategorieController.php?action=getProduit',
};
// constructor
var construct = function (options) {
$.extend(o, options);
};
this.getProByCat = function (onDataReceived) {
$.ajax({
method: "GET",
url: o.getProByCatUrl,
dataType: "json",
data: {idcat: 1 },
async: true,
success: function (result) {
onDataReceived(result);
},
})
};
};
}(jQuery));
<file_sep>/lib/i18n/class/l18n.class.php
<?php
/**
* l10n
* @author <NAME> - http://www.romainlaurent.com
* @version 0.2
*
* Todolist:
* - internationnalisation du t�l�phone
**/
include_once('JSON.php');
$GLOBALS['LANGUAGE'] = '';
$GLOBALS['DICTIONARY'] = Array();
class I18n {
/**
* @param String Lang ISO 639-1
* @param String Path for the lang file. Default: "."
* @return Boolean File parsing OK
*/
private static $instance;
static function get_instance() {
if(!is_object(self::$instance)) {
$c = __CLASS__;
self::$instance = new $c;
}
return self::$instance;
}
static function setLang ($lang, $path = '.') {
$GLOBALS['LANGUAGE'] = $lang;
$path = (substr($path, (strlen($path) - 1)) == '../')?$path:$path.'/';
if (empty($GLOBALS['DICTIONARY'][$GLOBALS['LANGUAGE']])) {
$json = new Services_JSON();
if (file_exists($path.'lang_'.$lang.'.ini')) {
$input = file_get_contents($path.'lang_'.$lang.'.ini');
$GLOBALS['DICTIONARY'][$GLOBALS['LANGUAGE']] = $json->decode($input);
return(true);
}
else {
return(false);
}
}
else {
return(true);
}
}
/**
* @param String Id for text
* @return String
*/
function getText ($id) {
if (isset($GLOBALS['DICTIONARY'][$GLOBALS['LANGUAGE']]->$id)) {
return($GLOBALS['DICTIONARY'][$GLOBALS['LANGUAGE']]->$id);
}
else {
return($id.'::pas de traduction');
}
}
/**
* @return String
*/
function getLang () {
$l = (empty($GLOBALS['LANGUAGE']))?'LANG UNDEFINED':$GLOBALS['LANGUAGE'];
return($l);
}
}
?>
<file_sep>/backend/src/bo/common/CommonQueries.php
<?php
namespace Common;
/**
* les classes utilis�es dans ce fichier.
*/
use Bo\BaseAction as BaseAction;
use Doctrine\ORM\Mapping\Entity;
use Racine\Bootstrap;
use Exception;
use Exceptions\ConstraintException as ConstraintException;
class CommonQueries extends BaseAction implements \Bo\BaseQueries {
protected $entityManager;
public function __construct() {
date_default_timezone_set('GMT');
}
public function insert($entity, $supp = null, $listEntities = null) {
$this->doLogInfo('Debut insertion');
Bootstrap::$entityManager->getConnection()->beginTransaction();
// var_dump($entity);
if ($entity != null) {
try {
Bootstrap::$entityManager->persist($entity);
if ($supp !== null) {
$this->doLogInfo('Debut autre insertion');
Bootstrap::$entityManager->persist($supp);
}
$i=0;
$entityAddP=null;
// $this->doLogInfo($listEntities.' listEntities');
if ($listEntities != null || $listEntities!='') {
foreach ($listEntities as $entityAdd) {
// $i++;
$entityAddP= Bootstrap::$entityManager->persist($entityAdd);
// if($i==2)
// throw new Exception('Erreur insertion');
}
Bootstrap::$entityManager->flush();
// Bootstrap::$entityManager->getConnection()->commit();
}
else{
Bootstrap::$entityManager->flush();
}
Bootstrap::$entityManager->getConnection()->commit();
$this->doLogInfo('Fin insertion');
$this->doLogInfo('Id genere: '. $entity->getId());
return $entity;
} catch (\Exception $e) {
$this->doLogError($e->getMessage());
$this->doLogError('Fin insertion');
Bootstrap::$entityManager->getConnection()->rollback();
Bootstrap::$entityManager->close();
$b = new Bootstrap();
Bootstrap::$entityManager = $b->getEntityManager();
return null;
}
}
}
public function update($entity, $supp = null) {
$this->doLogInfo('Debut update');
Bootstrap::$entityManager->getConnection()->beginTransaction();
if ($entity != null) {
try {
Bootstrap::$entityManager->merge($entity);
Bootstrap::$entityManager->flush();
if ($supp !== null) {
$this->doLogInfo('Debut autre update');
Bootstrap::$entityManager->merge($supp);
Bootstrap::$entityManager->flush();
}
Bootstrap::$entityManager->getConnection()->commit();
$this->doLogInfo('Fin update: succes');
return $entity;
} catch (\Exception $e) {
$this->doLogError($e->getMessage());
$this->doLogInfo('Fin update: echec');
Bootstrap::$entityManager->getConnection()->rollback();
Bootstrap::$entityManager->close();
$b = new Bootstrap();
Bootstrap::$entityManager = $b->getEntityManager();
return null;
}
}
}
/**
* remove: permet de déplacer une ligne d'une table corbeille en changeant le statut à 0
*
* @param Entity $entityId, $listId
*
*/
public function remove($entity, $listId, $userId = null) {
$this->doLogInfo('Debut suppression');
$q = '';
if ($userId != null)
$q = ',e.removedBy=' . $userId;
try {
$dql = 'update ' . $entity . ' e set e.status=0 ,e.removedDate=CURRENT_TIMESTAMP()' . $q . ' WHERE e.id in (' . $listId . ') and e.status=1';
$query = Bootstrap::$entityManager->createQuery($dql);
$rslt = $query->getResult();
$this->doLogInfo('Fin suppression');
return $rslt;
} catch (\Exception $ex) {
$this->doLogError($ex->getMessage());
$this->doLogError('Fin suppression');
return -1;
//throw $ex;
}
}
/**
* @author Diodio
* remove: permet de supprimer définitivement une ligne d'une table
*
* @param $entityId, $listId
*
*/
public function delete($entity, $listId, $userId = null) {
$this->doLogInfo('Debut suppression');
$q = '';
if ($userId != null)
$q = ',e.deletedBy=' . $userId;
try {
// $dql = 'DELETE FROM '.$entity.' e WHERE e.id in (' . $listId.') AND e.status=0';
$dql = 'update ' . $entity . ' e set e.status=-1 ,e.deletedDate=CURRENT_TIMESTAMP()' . $q . ' WHERE e.id in (' . $listId . ') and e.status=0';
$query = Bootstrap::$entityManager->createQuery($dql);
$rslt = $query->getResult();
$this->doLogInfo('Fin suppression');
return $rslt;
} catch (\Exception $ex) {
$this->doLogError($ex->getMessage());
$this->doLogError('Fin suppression');
return -1;
//throw $ex;
}
}
/**
* @author Diodio
* remove: permet de rechercher une entity par son code
*
* @param $entity, $entityCode
*
*/
public function findByCode($entity, $entityCode) {
if ($entityCode != null) {
$query = Bootstrap::$entityManager->createQuery("select e.id,e.code from '$entity' e where e.code=:entityCode");
$query->setParameter('entityCode', $entityCode);
$result = $query->getResult();
if ($result != null)
return $result[0];
else
return null;
}
}
/**
* @author Momar
* remove: permet de rechercher un etablissement par son code
*
* @param $entity, $entityCode
*
*/
public function findEtablissementByCode($entity, $entityCode) {
try {
$etablissementRepository = Bootstrap::$entityManager->getRepository($entity);
$etablissement = $etablissementRepository->findOneBy([
'code' => $entityCode,
'status' => 1,
]);
if ($etablissement != null) {
return $etablissement;
} else {
return null;
}
} catch (Exception $e) {
throw $e;
}
}
/**
* @author Momar
* remove: permet de rechercher une entity par son code et son etablissement
*
* @param $entity, $entityCode,$etablissementId
*
*/
public function findByCodeAndEtablissement($entity, $entityCode, $etablissementId) {
if ($entityCode != null) {
$query = Bootstrap::$entityManager->createQuery("select e from '$entity' e where e.code=:entityCode and e.etablissement=:etablissementId");
$query->setParameter('entityCode', $entityCode);
$query->setParameter('etablissementId', $etablissementId);
$result = $query->getResult();
if ($result != null)
return $result[0];
else
return null;
}
}
/**
* @author Diodio
* remove: permet de rechercher une entity par son code
*
* @param $entity, $entityCode
*
*/
public function restore($entity, $listId, $userId = null) {
$this->doLogInfo('Debut restauration');
$q = '';
if ($userId != null)
$q = ',e.restoredBy=' . $userId;
try {
$dql = 'update ' . $entity . ' e set e.status=1 ,e.restoredDate=CURRENT_TIMESTAMP()' . $q . ' WHERE e.id in (' . $listId . ') and e.status=0';
$query = Bootstrap::$entityManager->createQuery($dql);
$rslt = $query->getResult();
$this->doLogInfo('Fin restauration');
return $rslt;
} catch (\Exception $ex) {
$this->doLogError($ex->getMessage());
$this->doLogError('Fin restauration');
return -1;
//throw $ex;
}
}
public function findAll() {
$entityRepository = Bootstrap::$entityManager->getRepository($entity);
$listtemplate = $entityRepository->findAll();
return $listtemplate;
}
public function findById($entity, $entityId) {
if ($entityId != null) {
return Bootstrap::$entityManager->find($entity, $entityId);
}
}
/**
* @author Diodio MBODJ
* Cette fonction permet de rechercher une entitt� � travers le code
* @param $entity, $entityCode
*
* @return Entity
*/
public function findByCodeEntity($entity, $entityCode) {
$criteria = array(
'status' => 1,
'code' => $entityCode
);
$entityRepository = Bootstrap::$entityManager->getRepository($entity);
$entities = $entityRepository->findBy($criteria);
if (count($entities) != 0) {
return $entities [0];
}
return null;
}
public function view($id) {
}
public function activate($entity, $listId, $userId = null) {//$entity=namePackage\nameClass
$this->doLogInfo('Debut activation');
$q = '';
if ($userId != null)
$q = ',e.activatedBy=' . $userId;
try {
$dql = 'update ' . $entity . ' e set e.activate=1 ,e.activatedDate=CURRENT_TIMESTAMP() ' . $q . ' WHERE e.id in (' . $listId . ') and e.activate=0 and e.status=1';
$query = Bootstrap::$entityManager->createQuery($dql);
$rslt = $query->getResult();
$this->doLogInfo('Fin activation');
return $rslt;
} catch (\Exception $ex) {
$this->doLogError($ex->getMessage());
$this->doLogError('Fin activation');
return -1;
//throw $ex;
}
}
public function deactivate($entity, $listId, $userId = null) {
$this->doLogInfo('Debut desactivation');
$q = '';
if ($userId != null)
$q = ',e.deactivatedBy=' . $userId;
try {
$dql = 'update ' . $entity . ' e set e.activate=0 ,e.deactivatedDate=CURRENT_TIMESTAMP() ' . $q . ' WHERE e.id in (' . $listId . ') and e.activate=1 and e.status=1';
$query = Bootstrap::$entityManager->createQuery($dql);
$rslt = $query->getResult();
$this->doLogInfo('Fin desactivation');
return $rslt;
} catch (\Exception $ex) {
$this->doLogError($ex->getMessage());
$this->doLogError('Fin desactivation');
return -1;
//throw $ex;
}
}
public function doArchive($entity, $listId, $userId = null) {
$this->doLogInfo('Debut archivage');
$q = '';
if ($userId != null)
$q = ',e.archivedBy=' . $userId;
try {
$dql = 'update ' . $entity . ' e set e.archive=1 ,e.archivedDate=CURRENT_TIMESTAMP() ' . $q . ' WHERE e.id in (' . $listId . ') and e.archive=0 and e.status=1';
$query = Bootstrap::$entityManager->createQuery($dql);
$rslt = $query->getResult();
$this->doLogInfo('Fin archivage');
return $rslt;
} catch (\Exception $ex) {
$this->doLogError($ex->getMessage());
$this->doLogError('Fin archivage');
return -1;
//throw $ex;
}
}
public function undoArchive($entity, $listId, $userId = null) {
$this->doLogInfo('Debut desarchivage');
$q = '';
if ($userId != null)
$q = ',e.undoarchivedBy=' . $userId;
try {
$dql = 'update ' . $entity . ' e set e.archive=0 ,e.undoArhivedDate=CURRENT_TIMESTAMP() ' . $q . ' WHERE e.id in (' . $listId . ') and e.archive=1 and e.status=1';
$query = Bootstrap::$entityManager->createQuery($dql);
$rslt = $query->getResult();
$this->doLogInfo('Fin desarchivage');
return $rslt;
} catch (\Exception $ex) {
$this->doLogError($ex->getMessage());
$this->doLogError('Fin desarchivage');
return -1;
//throw $ex;
}
}
/**
* @author <NAME>
* Cette fonction permet de tester si l'email donné en parametre esxiste ou pas selon etablissement
* @param
* @param
* @return null
*/
public function isExistEmailByEtablissement($entity, $email, $etablissementId) {
$query = Bootstrap::$entityManager->createQuery("select e.email from '$entity' e where e.email = :email and e.etablissement=:etablissementId");
$query->setParameter('email', $email);
$query->setParameter('etablissementId', $etablissementId);
$result = $query->getResult();
if ($result != null)
return $result[0];
else
return null;
}
/**
* @author <NAME>
* Cette fonction permet de tester si le numero de telephone donné en parametre esxiste ou pas selon etablissement
* @param
* @param
* @return null
*/
public function isExistNumberPhoneByEtablissement($entity, $telephone, $etablissementId) {
$query = Bootstrap::$entityManager->createQuery("select e.telephone from '$entity' e where e.telephone = :telephone and e.etablissement=:etablissementId");
$query->setParameter('telephone', $telephone);
$query->setParameter('etablissementId', $etablissementId);
$result = $query->getResult();
if ($result != null)
return $result[0];
else
return null;
}
/**
* @author Momar
* Cette fonction retourn une liste d'entite archive dans unn etablissement
* @param $entity,$etablissementId
*/
public function findAllEntitiesArchivesByEtablissement($entity, $etablissementId) {
$query = Bootstrap::$entityManager->createQuery("select e.id, e.nom, e.prenom, e.dateNaissance, e.lieuDeNaissance, e.genre, e.email, e.adresse, e.telephone, e.archivedDate from '$entity' e where e.etablissement=:etablissementId and e.status=1 and e.archive=1");
$query->setParameter('etablissementId', $etablissementId);
$result = $query->getResult();
if ($result != null)
return $result;
else
return null;
}
/**
* @author Momar
* Cette fonction retourn une liste d'entite non archive dans unn etablissement
* @param $entity,$etablissementId
*/
public function findAllEntitiesNonArchivesByEtablissement($entity, $etablissementId) {
$query = Bootstrap::$entityManager->createQuery("select e.id, e.nom, e.prenom, e.dateNaissance, e.lieuDeNaissance, e.genre, e.email, e.adresse, e.telephone, e.archivedDate from '$entity' e where e.etablissement=:etablissementId and e.status=1 and e.archive=0");
$query->setParameter('etablissementId', $etablissementId);
$result = $query->getResult();
if ($result != null)
return $result;
else
return null;
}
/**
* @author Momar
* Cette fonction genere un code
* @param $libelle
*/
public function codeGenerator($libelle) {
$tablibelle = explode(" ", $libelle);
$code = "";
for ($i = 0; $i < count($tablibelle); $i++) {
$code = $code . substr($tablibelle[$i], 0, 3);
}
return $code;
}
public function getLasId($entity) {
$query = Bootstrap::$entityManager->createQuery("select MAX(e.id) from '$entity' e where e.status=1");
$result = $query->getResult();
if ($result != null)
return $result;
else
return null;
}
/**
* @author Momar
* remove: permet de supprimer définitivement une ligne d'une table
*
* @param $entityId, $anneeScolaire,$classProfOrclasseSurvOrmatiereProfId
*
*/
public function revokeAssociation($entity, $anneeScolaireId, $classProfOrclasseSurvOrmatiereProfId) {
if (preg_match("/CLASSEPROF/i", $entity)) {
$sql = "DELETE FROM '$entity' e "
. " where e.status=1 and e.anneeScolaire=:anneeSolaireId and e.id in(:classeProfId)";
$query = Bootstrap::$entityManager->createQuery($sql);
$query->setParameter('anneeSolaireId', $anneeScolaireId);
$query->setParameter('classeProfId', $classProfOrclasseSurvOrmatiereProfId);
}
if (preg_match("/CLASSESURVEILLANT/i", $entity)) {
$sql = "DELETE FROM '$entity' e "
. " where e.status=1 and e.anneeScolaire=:anneeSolaireId and e.id in (:classeSurveillantId)";
$query = Bootstrap::$entityManager->createQuery($sql);
$query->setParameter('anneeSolaireId', $anneeScolaireId);
$query->setParameter('classeSurveillantId', $classProfOrclasseSurvOrmatiereProfId);
}
if (preg_match("/MATIEREPROF/i", $entity)) {
$sql = "DELETE FROM '$entity' e "
. " where e.status=1 and e.anneeScolaire=:anneeSolaireId and e.id in (:matiereProfId) ";
$query = Bootstrap::$entityManager->createQuery($sql);
$query->setParameter('anneeSolaireId', $anneeScolaireId);
$query->setParameter('matiereProfId', $classProfOrclasseSurvOrmatiereProfId);
}
$result = $query->getResult();
if ($result != null)
return $result;
else
return null;
}
/**
* @author Diodio
* remove: permet de supprimer définitivement une ligne d'une table
*
* @param $entityId, $listId
*
*/
public function revoke($entity, $id) {
try {
$sql = "DELETE FROM $entity e where e.id=:id and e.status=1";
$query = Bootstrap::$entityManager->createQuery($sql);
$query->setParameter('id', $id);
$rslt = $query->getResult();
return $rslt;
} catch (\Exception $ex) {
$this->doLogError($ex->getMessage());
return null;
}
}
public function getEntitiesByEtabAndLib($entity, $libelle, $etablissementId) {
$sql = "SELECT e.id FROM $entity e WHERE e.etablissement=:etablissementId and e.libelle=:libelle ";
$query = Bootstrap::$entityManager->createQuery($sql);
$query->setParameter('etablissementId', $etablissementId);
$query->setParameter('libelle', $libelle);
$result = $query->getResult();
if ($result != null)
return $result;
else
return null;
}
/**
* @author DIODIO
* validate: permet de valider une facture en changeant le validate � 1
*
* @param Entity $entityId, $listId
*
*/
public function validate($entity, $listId, $userId = null) {
$this->doLogInfo('Debut validation');
$q = '';
if ($userId != null)
$q = ',e.validatedBy=' . $userId;
try {
$dql = 'update ' . $entity . ' e set e.validated=1 ,e.validatedDate=CURRENT_TIMESTAMP()' . $q . ' WHERE e.id in (' . $listId . ') and e.validated=0';
$query = Bootstrap::$entityManager->createQuery($dql);
$rslt = $query->getResult();
$this->doLogInfo('Fin validation');
return $rslt;
} catch (\Exception $ex) {
$this->doLogError($ex->getMessage());
$this->doLogError('Fin validation');
return -1;
//throw $ex;
}
}
/**
* @author DIODIO
* cancelled: permet d'annuler une facture en changeant le cancelled � 1
*
* @param Entity $entityId, $listId
*
*/
public function cancelled($entity, $listId, $userId = null) {
$this->doLogInfo('Debut annulation');
$q = '';
if ($userId != null)
$q = ',e.cancelledBy=' . $userId;
try {
$dql = 'update ' . $entity . ' e set e.cancelled=1 ,e.cancelledDate=CURRENT_TIMESTAMP()' . $q . ' WHERE e.id in (' . $listId . ') and e.cancelled=0 and e.validate=0';
$query = Bootstrap::$entityManager->createQuery($dql);
$rslt = $query->getResult();
$this->doLogInfo('Fin annulation');
return $rslt;
} catch (\Exception $ex) {
$this->doLogError($ex->getMessage());
$this->doLogError('Fin annulation');
return -1;
}
}
}
//$CommonQueries = new CommonQueries($_REQUEST);
<file_sep>/backend/src/bo/BaseController.php
<?php
namespace Bo;
/*
* 2SMOBILE
* ----------------------------------------
* @author Kiwi <<EMAIL>>
* @copyright 2006-2015 Kiwi/2SI Group
* @version 2.0.0
* @link http://www.kiwi.sn
* @link http://www.ssi.sn
* ----------------------------------------
*/
/**
* Interface
*/
interface BaseController{
function doInsert($request);
function doUpdate($request);
function doRemove($request);
function doView($request);
function doList($request);
function dofindById($request);
function doRestore($request);
function doActivate($request);
function doDeactivate($request);
}
<file_sep>/backend/src/tools/Tool.php
<?php
/*
* 2SMOBILE
* ----------------------------------------
* @author Kiwi <<EMAIL>>
* @copyright 2006-2015 Kiwi/2SI Group
* @version 2.0.0
* @link http://www.kiwi.sn
* @link http://www.ssi.sn
* ----------------------------------------
*/
namespace tools;
use Log\Loggers as Logger;
class Tool {
static function affichageDateSpreciale($date){
if(!ctype_digit($date))
$date = strtotime($date);
if(date('Ymd', $date) == date('Ymd'))
return 'Aujourd\'hui à '.date('H:i:s', $date);
else if(date('Ymd', $date) == date('Ymd', strtotime('- 1 DAY')))
return 'Hier à '.date('H:i:s', $date);
else if(date('Ymd', $date) == date('Ymd', strtotime('- 2 DAY')))
return 'Avant-hier à '.date('H:i:s', $date);
else
return 'Le '.date('d/m/Y à H:i:s', $date);
}
}
<file_sep>/backend/src/bo/menu/MenuManager.php
<?php
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
/**
* Description of TestAdmisManager
*
* @author lenovo
*/
namespace Menu;
use Menu\Menu;
use Bo\BaseAction;
use Common\CommonManager;
use Menu\MenuQueries;
//use DA\TestAdmisQueries as TestAdmisQueries;
class MenuManager extends BaseAction implements \Bo\BaseManager {
private $commonManager;
private $menuQueries;
private $testAdmis;
public function __construct() {
$this->commonManager = new CommonManager();
// $this->testAdmisQueries= new tes
$this->menuQueries = new MenuQueries();
}
public function findById($object, $id) {
return $this->commonManager->findById($object, $id);
}
// public function insrcire($listeInscrite, $listInscriptionUpdate) {
// return $this->testAdmisQueries->insrcire($listeInscrite, $listInscriptionUpdate);
// }
public function insert($object, $supp = null) {
return $this->commonManager->insert($object, $supp);
}
public function remove($entity, $listId, $userId = null) {
return $this->commonManager->remove($entity, $listId, $userId);
}
public function restore($entity, $listId, $userId = null) {
return $this->commonManager->restore($entity, $listId, $userId);
}
public function update($object) {
return $this->commonManager->update($object);
}
public function view($id) {
return $this->testAdmisQueries->view($id);
}
public function activate($entity, $listId, $userId = null) {//$entity=namePackage\nameClass
return $this->commonManager->activate($entity, $listId, $userId);
}
public function deactivate($entity, $listId, $userId = null) {
return $this->commonManager->deactivate($entity, $listId, $userId);
}
public function doArchive($entity, $listId, $userId = null) {
return $this->commonManager->doArchive($entity, $listId, $userId);
}
public function undoArchive($entity, $listId, $userId = null) {
return $this->commonManager->undoArchive($entity, $listId, $userId);
}
public function delete($entity, $listId, $userId = null) {
return $this->commonManager->delete($entity, $listId, $userId);
}
public function findByCode($entity, $entityCode) {
return $this->commonManager->findByCode($entity, $entityCode);
}
public function getAllMenuByUser($user) {
return $this->menuQueries->getAllMenuByUser($user);
}
public function getAllMenusArray($user) {
$listMenus =array();
$masterParent = $this->menuQueries->getMasterParent($user);
$masterParentArray= array();
$masterParentArray['Parent'] = $masterParent['title'];
$masterParentArray['id'] = $masterParent['id'];
$listMenus[] = $masterParentArray;
$listMenuChild = $this->menuQueries->getAllChildByParentId($masterParent['id']);
foreach ($listMenuChild as $unMenuChild) {
$menuChild = $this->commonManager->findById("Menu\Menu", $unMenuChild["id"]);
$this->doLogInfo('menuChildId:' . $menuChild->getId());
$titleChild = $menuChild->getTitle();
$nameChild = $menuChild->getName();
$textChild = $menuChild->getText();
$typeChild = $menuChild->getType();
$urlChild = $menuChild->getUrl();
$ordreChild = $menuChild->getOrdre();
$menuChildArray1 = array();
$menuChildArray1 ['parent_id']= $menuChild->getParent()->getId();
$menuChildArray1 ['parent_name']= $menuChild->getParent()->getName();
$menuChildArray1 ['id']= $menuChild->getId();
$menuChildArray1 ['title']= $titleChild;
$menuChildArray1 ['name']= $nameChild;
$menuChildArray1 ['type']= $typeChild;
$menuChildArray1 ['text']= $textChild;
$menuChildArray1 ['url']= $urlChild;
$menuChildArray1 ['ordre']= $ordreChild;
$listMenus[] = $menuChildArray1;
$listMenuChild2=$this->menuQueries->getAllChildByParentId($unMenuChild["id"]);
//var_dump($listMenuChild2);
foreach ($listMenuChild2 as $unMenuChild2) {
$menuChild2 = array();
$menuChild2 = $this->commonManager->findById("Menu\Menu", $unMenuChild2["id"]);
$titleChild2 = $menuChild2->getTitle();
$nameChild2 = $menuChild2->getName();
$textChild2 = $menuChild2->getText();
$typeChild2 = $menuChild2->getType();
$urlChild2 = $menuChild2->getUrl();
$ordreChild2 = $menuChild2->getOrdre();
// $menuChildArray2 ['title']= $titleChild2;
// $listMenus['child1'] = $menuChildArray2;
//
//
//var_dump($titleChild .' parent de '.$titleChild2 );
//var_dump($unMenuChild2["id"] .' ==== '.$menuChild2->getId() );
$menuChildArray2 = array();
$menuChildArray2 ['id']= $menuChild2->getId();
$menuChildArray2 ['parent_id']= $menuChild2->getParent()->getId();
$menuChildArray2 ['parent_name']= $menuChild2->getParent()->getName();
$menuChildArray2 ['title']= $titleChild2;
$menuChildArray2 ['name']= $nameChild2;
$menuChildArray2 ['type']= $typeChild2;
$menuChildArray2 ['text']= $textChild2;
$menuChildArray2 ['url']= $urlChild2;
$menuChildArray2 ['ordre']= $ordreChild2;
$listMenus[] = $menuChildArray2;
$listMenuChild3=$this->menuQueries->getAllChildByParentId($unMenuChild2["id"]);
//var_dump($listMenuChild3);
foreach ($listMenuChild3 as $unMenuChild3) {
//$menuChild3 = array();
$menuChild3 = $this->commonManager->findById("Menu\Menu", $unMenuChild3["id"]);
$titleChild3 = $menuChild3->getTitle();
$nameChild3 = $menuChild3->getName();
$textChild3 = $menuChild3->getText();
$typeChild3 = $menuChild3->getType();
$urlChild3 = $menuChild3->getUrl();
$ordreChild3 = $menuChild3->getOrdre();
//var_dump($titleChild2 .' bis -- parent de '.$titleChild3 );
$menuChildArray3 = array();
$menuChildArray3 ['id']= $menuChild3->getId();
$menuChildArray3 ['parent_id']= $menuChild3->getParent()->getId();
$menuChildArray3 ['parent_name']= $menuChild3->getParent()->getName();
$menuChildArray3 ['title']= $titleChild3;
$menuChildArray3 ['name']= $nameChild3;
$menuChildArray3 ['type']= $typeChild3;
$menuChildArray3 ['text']= $textChild3;
$menuChildArray3 ['url']= $urlChild3;
$menuChildArray3 ['ordre']= $ordreChild3;
$listMenus[] = $menuChildArray3;
}
}
}
return $listMenus;
}
public function getMenuTab($user) {
$array = $this->menuQueries->getAllMenuByUser($user);
foreach ($array as $key => $value) {
}
}
public function getMasterParent($parent){
return $this->menuQueries->getMasterParent($parent);
}
public function getAllChildByParentId($parent){
return $this->menuQueries->getAllChildByParentId($parent);
}
//put your code here
}
<file_sep>/backend/src/bo/BaseAction.php
<?php
namespace Bo;
/*
* 2SMOBILE
* ----------------------------------------
* @author Kiwi <<EMAIL>>
* @copyright 2006-2015 Kiwi/2SI Group
* @version 2.0.0
* @link http://www.kiwi.sn
* @link http://www.ssi.sn
* ----------------------------------------
*/
use Log\Loggers as Logger;
abstract class BaseAction{
protected function doSuccess($id, $message){
$array['rc'] = 0;
$array['oId'] = $id;
$array['message'] = $message;
echo json_encode($array);
}
protected function doSuccessCB($id, $numero, $message){
$array['rc'] = 0;
$array['oId'] = $id;
$array['message'] = $message;
$array['oNumero'] = $numero;
echo json_encode($array);
}
protected function doSuccessO($object){
$object=json_encode((array)$object);
echo str_replace("\u0000*\u0000", "", $object);
}
protected function doSuccessImport($nbrContact, $nbrContactExist){
$array=array();
$array['contactInserted']=$nbrContact;
$array['contactExist']=$nbrContactExist;
echo json_encode($array);
}
protected function doStatus($messageId, $status){
$status['messageId'] = $messageId;
echo json_encode($status);
}
protected function doError($errorCode,$message){
$array['rc'] = $errorCode;
$array['error'] = $message;
echo json_encode($array);
}
protected function doLogError($message) {
$logger = new Logger(__CLASS__);
return $logger->log->error($this->doTag().$message);
}
protected function doLogInfo($message) {
$logger = new Logger(__CLASS__);
return $logger->log->info($this->doTag().$message);
}
protected function doTag() {
$tag = "";
if (isset($_REQUEST['code_etablissement'])) {
$tag = $tag . '[code_eta-' . $_REQUEST['code_etablissement'] . ']';
}
if (isset($_REQUEST['code_annee_scolaire'])) {
$tag = $tag . '[code_annee-' . $_REQUEST['code_annee_scolaire'] . ']';
}
if (isset($_REQUEST['user_id'])) {
$tag = $tag . '[user_id-' . $_REQUEST['user_id'] . ']';
}
if (isset($_REQUEST['userId'])) {
$tag = $tag . '[userId-' . $_REQUEST['userId'] . ']';
}
$tag = $tag . ' - ';
return $tag;
}
protected function doGetListParam() {
$params = "";
foreach ($_REQUEST as $key => $value){
$params = $params.' - '.$key. ' : '.$value ;
}
return $params;
}
protected function generateNumeroDossier($anneeScolaire,$currentCode) {
$code=0;
if(strlen($currentCode)==1) $code=$anneeScolaire."-00000".$currentCode;
else if(strlen($currentCode)==2) $code=$anneeScolaire."-0000".$currentCode;
else if(strlen($currentCode)==3) $code=$anneeScolaire."-000".$currentCode;
else if(strlen($currentCode)==4) $code=$anneeScolaire."-00".$currentCode;
else if(strlen($currentCode)==5) $code=$anneeScolaire."-0".$currentCode;
return $code;
}
protected function generateNumeroCandidat($anneeScolaire,$currentCode) {
$code=0;
if(strlen($currentCode)==1) $code="DA/".$anneeScolaire."-00000".$currentCode;
else if(strlen($currentCode)==2) $code="DA/".$anneeScolaire."-0000".$currentCode;
else if(strlen($currentCode)==3) $code="DA/".$anneeScolaire."-000".$currentCode;
else if(strlen($currentCode)==4) $code="DA/".$anneeScolaire."-00".$currentCode;
else if(strlen($currentCode)==5) $code="DA/".$anneeScolaire."-0".$currentCode;
return $code;
}
protected function generateNumero($currentCode) {
$code=0;
if(strlen($currentCode)==1) $code="-00000".$currentCode;
else if(strlen($currentCode)==2) $code="-0000".$currentCode;
else if(strlen($currentCode)==3) $code="-000".$currentCode;
else if(strlen($currentCode)==4) $code="-00".$currentCode;
else if(strlen($currentCode)==5) $code="-0".$currentCode;
return $code;
}
/**
* Cette fonction retourne une liste d'objets sous forme de tableau
* @param type listObject
* @return array
*/
protected function listObjectToArray($object){
$array=array();
foreach ($object as $value) {
$array[]= (array)$value;
}
return $array;
}
protected function objectToArray($value){
return (array)$value;
}
protected function dataTableFormat($objects, $sEcho, $iTotalRecords) {
$arraySEcho['sEcho']=$sEcho;
$arraySEcho['iTotalRecords']= count($objects).'';
$arraySEcho['iTotalDisplayRecords']= $iTotalRecords.'';
$arraySEcho['aaData']=(array) $objects;
return $arraySEcho;
}
protected function doSuccessJson($json) {
echo $json;
}
// protected function doResult($list, $result){
// $array['rc'] = 0;
// $array['list'] = $list;
// $array['resultat'] = $result;
// echo json_encode($array);
// }
/**
* @author Diodio
*
* Cette fonction permet de renvoyer une liste avec succes et erreur
*
* @param
*/
protected function doResult($data){
$array['rc'] = 0;
// $array['list'] = $list;
$array['data'] = $data;
echo json_encode($array);
}
/**
* @author Diodio
*
* Cette méthode permet de vérifier si le format téléphone est valide ou pas
*
* @param
*/
// protected function isValidTelephone($tel) {
// $telF = $tel.toString();
// if ($telF != null && $telF.trim().length() > 0)
//
// return $telF.matches("^[0-9]{9,9}$");
// return false;
// }
}
<file_sep>/lib/doctrine/vendor/composer/autoload_classmap.php
<?php
// autoload_classmap.php @generated by Composer
$vendorDir = dirname(dirname(__FILE__));
$baseDir = dirname(dirname(dirname($vendorDir))).'/config';
return array(
'Be\\BaseEntite' => $baseDir . '/../backend/src/be/BaseEntite.php',
'Bo\\BaseAction' => $baseDir . '/../backend/src/bo/BaseAction.php',
'Bo\\BaseController' => $baseDir . '/../backend/src/bo/BaseController.php',
'Bo\\BaseManager' => $baseDir . '/../backend/src/bo/BaseManager.php',
'Bo\\BaseQueries' => $baseDir . '/../backend/src/bo/BaseQueries.php',
'CommonController' => $baseDir . '/../backend/src/bo/common/CommonController.php',
'Common\\Common' => $baseDir . '/../backend/src/bo/common/Common.php',
'Common\\CommonManager' => $baseDir . '/../backend/src/bo/common/CommonManager.php',
'Common\\CommonQueries' => $baseDir . '/../backend/src/bo/common/CommonQueries.php',
'Common\\Cookie' => $baseDir . '/../backend/src/bo/common/Cookie.php',
'Common\\DoctrineLogger' => $baseDir . '/../backend/src/bo/common/DoctrineLogger.php',
'Exceptions\\BlockedNumberException' => $baseDir . '/../backend/src/bo/exception/BlockedNumberException.php',
'Exceptions\\ConstraintException' => $baseDir . '/../backend/src/bo/exception/ConstraintException.php',
'Exceptions\\EmptyMessageException' => $baseDir . '/../backend/src/bo/exception/EmptyMessageException.php',
'Exceptions\\EmptyNumberException' => $baseDir . '/../backend/src/bo/exception/EmptyNumberException.php',
'Exceptions\\InternationalTrafficNotAllowedException' => $baseDir . '/../backend/src/bo/exception/InternationalTrafficNotAllowedException.php',
'Exceptions\\MessageException' => $baseDir . '/../backend/src/bo/exception/MessageException.php',
'Exceptions\\StatutException' => $baseDir . '/../backend/src/bo/exception/StatutException.php',
'Exceptions\\TooLongMessageException' => $baseDir . '/../backend/src/bo/exception/TooLongMessageException.php',
'Exceptions\\TooLongOrShortNumberException' => $baseDir . '/../backend/src/bo/exception/TooLongOrShortNumberException.php',
'Exceptions\\UnknownIndicativeException' => $baseDir . '/../backend/src/bo/exception/UnknownIndicativeException.php',
'Log\\Loggers' => $baseDir . '/Loggers.php',
'Menu\\Menu' => $baseDir . '/../backend/src/be/Menu.php',
'Menu\\MenuController' => $baseDir . '/../backend/src/bo/menu/MenuController.php',
'Menu\\MenuManager' => $baseDir . '/../backend/src/bo/menu/MenuManager.php',
'Menu\\MenuQueries' => $baseDir . '/../backend/src/bo/menu/MenuQueries.php',
'Racine\\Bootstrap' => $baseDir . '/bootstrap.php',
'UVd\\DoctrineFunction\\DateFormat' => $baseDir . '/../lib/DoctrineFunction/DateFormat.php',
'UVd\\DoctrineFunction\\UnixTimestamp' => $baseDir . '/../lib/DoctrineFunction/UnixTimestamp.php',
'User\\Profil' => $baseDir . '/../backend/src/be/Profil.php',
'User\\User' => $baseDir . '/../backend/src/be/User.php',
'User\\UserController' => $baseDir . '/../backend/src/bo/user/UserController.php',
'User\\UserManager' => $baseDir . '/../backend/src/bo/user/UserManager.php',
'User\\UserQueries' => $baseDir . '/../backend/src/bo/user/UserQueries.php',
'tools\\Tool' => $baseDir . '/../backend/src/tools/Tool.php',
);
<file_sep>/auth.js
;(function ($) {
Auth = function (options) {
// constructor
var construct = function (options) {
$.extend(options);
};
this.signin = function (username,password) {
if(username!=='' && password!==''){
$.ajax({
type: "POST",
url: "backend/src/bo/user/UserController.php",
data: {
login: username,
password: $.md5(<PASSWORD>),
ACTION: 'SIGN_IN'
},
success: function(data) {
data=$.parseJSON(data);
if(data.rc===1){
var form = document.createElement("form");
var login = document.createElement("input");
var pass = document.createElement("input");
form.action = "cookies.php";
form.method = "post";
login.name = "login";
login.type = "hidden";
login.value = username;
pass.name = "password";
pass.type = "hidden";
pass.value = $.md5(password);
form.appendChild(login);
form.appendChild(pass);
document.body.appendChild(form);
form.submit();
}else if(data.rc===0){
new PNotify({type: 'info', title: 'Authentification', text: 'Ce compte est desactivé. Veuillez contacter votre administrateur.'});
}else{
new PNotify({type: 'error', title: 'Authentification', text: 'Login ou mot de passe incorrect.'});
}
},
error: function(data) {
new PNotify({type: 'error', title: 'Authentification', text: 'Erreur de connexion.'});
return false;
}
});
}
else{
new PNotify({type: 'error', title: 'Authentification', text: 'Login ou mot de passe vide.'});
}
};
return construct(options);
};
}(jQuery));
<file_sep>/backend/src/bo/common/CommonManager.php
<?php
namespace Common;
/**
* les classes utilis�es dans ce fichier.
*
*/
//use Common\Template as Template;
use Doctrine\DBAL\Types\TextType;
use Doctrine\DBAL\Types\TimeType;
use Doctrine\DBAL\Types\Type;
use Doctrine\ORM\Mapping\Entity;
use Common\CommonQueries as CommonQueries;
/*
* 2SMOBILE
* ----------------------------------------
* @author Kiwi <<EMAIL>>
* @copyright 2006-2015 Kiwi/2SI TemplateManager
* @version 2.0.0
* @link http://www.kiwi.sn
* @link http://www.ssi.sn
* ----------------------------------------
*/
/**
* Fait office d'intermédiaire entre le controller et les queries.
*
*/
use Bo\BaseAction as BaseAction;
class CommonManager extends BaseAction implements \Bo\BaseManager {
private $commonQueries;
public function __construct() {
$this->commonQueries = new CommonQueries();
}
public function activate($entity, $listId, $userId = null) {
return $this->commonQueries->activate($entity, $listId, $userId);
}
public function cancelled($entity, $listId, $userId = null) {
return $this->commonQueries->cancelled($entity, $listId, $userId);
}
public function validate($entity, $listId, $userId = null) {
return $this->commonQueries->validate($entity, $listId, $userId);
}
public function deactivate($entity, $listId, $userId = null) {
return $this->commonQueries->deactivate($entity, $listId, $userId);
}
public function findById($entity, $entityId) {
return $this->commonQueries->findById($entity, $entityId);
}
public function insert($entity, $supp = null, $ligneEntities = null) {
return $this->commonQueries->insert($entity, $supp, $ligneEntities);
}
/**
* remove: permet de déplacer une ligne d'une table dans corbeille en changeant le statut à 0
*
* @param Entity $entityId, $listId
*
*/
public function remove($entity, $listId, $userId = null) {
return $this->commonQueries->remove($entity, $listId, $userId);
}
/**
* @author Diodio
* remove: permet de supprimer une ligne d'une table en changeant son statut à -1
*
* @param Entity $entityId
*
*/
public function delete($entity, $listId, $userId) {
return $this->commonQueries->delete($entity, $listId, $userId);
}
/**
* @author Diodio
* Cette methode permet de verifier si ce codes existe ou pas
* @param type $codeType
* @return type
*/
public function findByCode($entity, $entityCode) {
return $this->commonQueries->findByCode($entity, $entityCode);
}
/**
* @author Diodio
* Cette methode permet de verifier si ce codes existe ou pas
* @param type $codeType
* @return entity
*/
public function findByCodeEntity($entity, $entityCode) {
return $this->commonQueries->findByCodeEntity($entity, $entityCode);
}
/**
* @author Diodio
* Cette methode permet de verifier si ce codes existe ou pas selon un autre parametre
* @param type $codeType
* @return type
*/
public function findByCodeByEntityId($entity, $entityCode, $entityId) {
return $this->commonQueries->findByCodeByEntityId($entity, $entityCode, $entityId);
}
public function restore($entity, $listId, $userId = null) {
return $this->commonQueries->restore($entity, $listId, $userId);
}
public function update($entity) {
return $this->commonQueries->update($entity);
}
public function view($id) {
return $this->commonQueries->view($id);
}
public function doArchive($entity, $listId, $userId = null) {
return $this->commonQueries->doArchive($entity, $listId, $userId);
}
public function undoArchive($entity, $listId, $userId = null) {
return $this->commonQueries->undoArchive($entity, $listId, $userId);
}
public function isExistEmailByEtablissement($entity, $email, $etablissementId) {
return $this->commonQueries->isExistEmailByEtablissement($entity, $email, $etablissementId);
}
public function isExistNumberPhoneByEtablissement($entity, $telephone, $etablissementId) {
return $this->commonQueries->isExistNumberPhoneByEtablissement($entity, $telephone, $etablissementId);
}
public function findAllEntitiesArchivesByEtablissement($entity, $etablissementId) {
return $this->commonQueries->findAllEntitiesArchivesByEtablissement($entity, $etablissementId);
}
public function findAllEntitiesNonArchivesByEtablissement($entity, $etablissementId) {
return $this->commonQueries->findAllEntitiesNonArchivesByEtablissement($entity, $etablissementId);
}
public function findByCodeAndEtablissement($entity, $entityCode, $etablissementId) {
return $this->commonQueries->findByCodeAndEtablissement($entity, $entityCode, $etablissementId);
}
public function codeGenerator($libelle) {
return $this->commonQueries->codeGenerator($libelle);
}
public function getLasId($entity) {
return $this->commonQueries->getLasId($entity);
}
public function revokePersonnelAssociation($entity, $anneeScolaireId, $classProfOrclasseSurvOrmatiereProfId) {
return $this->commonQueries->revokeAssociation($entity, $anneeScolaireId, $classProfOrclasseSurvOrmatiereProfId);
}
public function revoke($entity, $id) {
return $this->commonQueries->revoke($entity, $id);
}
public function getEntitiesByEtabAndLib($entity, $libelle, $etablissementId) {
return $this->commonQueries->getEntitiesByEtabAndLib($entity, $libelle, $etablissementId);
}
public function getLastId($entity) {
$maxId = $this->commonQueries->getLasId($entity);
if ($maxId != null) {
foreach ($maxId as $maxid) {
$idMax = $maxid;
}
return $idMax[1];
}
return null;
}
public function findEtablissementByCode($entity, $entityCode) {
return $this->commonQueries->findEtablissementByCode($entity, $entityCode);
}
}
| 50237b7538eed711dcd45f219850813c3780aaa2 | [
"JavaScript",
"PHP",
"INI"
] | 34 | PHP | matgere/ussddynamic | 99cc6e1ff65026a84acf8234bc23571554413294 | a6af40d7b1ad74d664caf8b22897da39a2a60fe0 |
refs/heads/master | <repo_name>edmarr2/GrupeApp<file_sep>/README.md
# GrupeApp
Sistema de troca de mensagens na web
Disciplina: Topicos em Sistemas de Informação
<file_sep>/app/firebase-db.js
import Firebase from 'firebase';
var firebaseApp = Firebase.initializeApp({
apiKey: "<KEY>",
authDomain: "grupe-2ee3c.firebaseapp.com",
databaseURL: "https://grupe-2ee3c.firebaseio.com",
projectId: "grupe-2ee3c",
storageBucket: "grupe-2ee3c.appspot.com",
messagingSenderId: "557423079572",
appId: "1:557423079572:web:f37b725c4adc0155a40481",
measurementId: "G-5BHT5Z1FHR"
});
export default firebaseApp.database();<file_sep>/app/rooms-create.component.js
import Vue from 'vue';
import VueFire from 'vuefire';
import db from './firebase-db';
Vue.use(VueFire);
var rooms = [
{id: "001", name: "CALCULO", description: "Dia 17/10/2019,Horario do estudo: 17h"},
{id: "002", name: "FISICA", description: "Dia 18/10/2019,Horario do estudo: 18h"},
{id: "003", name: "TOP. EM SISTEMAS DE INFORMAÇÃO", description: "Dia 20/10/2019,Horario do estudo: 18h"},
{id: "004", name: "DESENVOLVIMENTO MOBILE", description: "Dia 21/10/2019,Horario do estudo: 18h"},
{id: "005", name: "<NAME>", description: "Dia 23/10/2019,Horario do estudo: 18h"},
{id: "006", name: "LAB. PROGRAMAÇÃO I", description: "Dia 25/11/2019,Horario do estudo: 18h"},
];
export default {
template: require('html-loader!../templates/rooms-create.component.html'),
firebase: {
rooms: db.ref('chat/rooms')
},
ready: function () {
var chatRef = db.ref('chat')
var roomsChildren = chatRef.child('rooms');
rooms.forEach(function (room) {
roomsChildren.child(room.id).set({
name: room.name,
description: room.description
});
})
}
}; | 94c81338be86f8987db39bbe41fb30bb8afc2d74 | [
"Markdown",
"JavaScript"
] | 3 | Markdown | edmarr2/GrupeApp | baa056b334989efd6f4a6c6e27b8547aa3fcd3b4 | 29bc52992ee779c5aaa179361144e6ff1ce56e84 |
refs/heads/main | <file_sep># The Deep Dream Algorithm 💻💤💤
Creating Vivid Visualizations with Machine Learning and Math: Activation Maximization Through Gradient Ascent
<p align="center">
<img src="gifs/movie5.gif" width="300"/>
<img src="gifs/movie4.gif" width="300"/>
</p>
## What is Deep Dream, and how can a computer "Dream"?
As image recognition, object detection and segmentation neural networks get more advanced, they suffer more and more from a lack of interpretability. Interpretability is a critical and often overlooked aspect of Machine Learning; how can we trust the models we build? The Deep Dream algorithm offers a growing solution to the poor interpretability of neural networks by shining a light into the notorious "black box" of these models.
Deep Dream performs Activation Maximization, also known as Feature Visualization, in order to reveal the features that a given convolutional neural network is looking for. By maximally activating the brain of the model, we get vivid visualizations of the learned representations.
## Human Brains vs Artificial Brains (VGG16 CNN Layers Trained on the ImageNet Dataset):
Here are some examples of the VGG16 layers visualized:
### Shallow Layers
Layers closer to the input and beginning of the network contain simple edges and patterns.
<p align="center">
<img src="vgg_outputs/vggblock1_conv2.jpg" width="200"/>
<img src="vgg_outputs/vggblock3_conv1.jpg" width="200"/>
</p>
### Deeper Layers
Deeper Layers, layers closer to the classification layers and output, contain higher level features like eyes.
<p align="center">
<img src="vgg_outputs/vggblock4_conv1.jpg" width="200"/>
<img src="vgg_outputs/vggblock5_conv1.jpg" width="200"/>
</p>
### Parallels to the Human Mind
A Convolutional Neural Network architecture learns a separation of concerns that draws natural parallels to the human visual cortex areas. In our primary visual cortex, 6 layers of cells extract basic information of the visual field, some of which is edges, orientation, and color of objects. A ventral stream then passes through the secondary visual cortex; neurons in this region are responsible for recognizing objects, such as faces, based on their size, shape, and color. In convolutional neural networks, the receptive fields of earlier layers of the network capture edges, orientations, and colors, just like the primary visual cortex. Also, the later layers capture higher complexity information like patterns and objects themselves, similar to the secondary visual cortex. This mathematically learned visual system is incredibly biomimetic, it mimics our neurobiology, and there may be cause for augmenting current state-of-the-art models with insights from Cognitive Science.
## InceptionV3: Trained on ImageNet
We can also input any input image to see how the model might "hallucinate" learned features, resulting in some psychedlic imagery. Here are some exciting examples:
<p align="center">
<img src="uploads/castle.jpg" width="700"/>
<img src="uploads/dream2.png" width="700"/>
</p>
<p align="center">
<img src="uploads/butterfly.jpg" width="700"/>
<img src="uploads/butterfly5.png" width="700"/>
</p>
One example video I made early in the creation of this program: https://www.youtube.com/watch?v=oWClAmB6xAM
## Getting Started
Create a Dreamer object with the default hyperparameters. Call the object with your input image to start spectating the dreaming process! Fine tuning hyperparameters is really a matter of trial and error and personal preference, since what you find visually pleasing is completely subjective! I hope you enjoy this project and find this example useful in your own investigations of image recognition models.
I also recommend you check out an excellent tutorial at https://www.tensorflow.org/tutorials/generative/deepdream , and a brilliant article https://ai.googleblog.com/2015/06/inceptionism-going-deeper-into-neural.html
<file_sep>from scipy.ndimage import zoom
import numpy as np
import tensorflow as tf
import cv2
import IPython.display as display
import PIL.Image
import os
import imutils
def clipped_zoom(img, zoom_factor, rotate = False):
if rotate:
img = imutils.rotate(img.numpy(), angle = 0.5)
h, w = img.shape[:2]
# For multichannel images we don't want to apply the zoom factor to the RGB
# dimension, so instead we create a tuple of zoom factors, one per array
# dimension, with 1's for any trailing dimensions after the width and height.
zoom_tuple = (zoom_factor,) * 2 + (1,) * (img.ndim - 2)
# Zooming out
if zoom_factor < 1:
# Bounding box of the zoomed-out image within the output array
zh = int(np.round(h * zoom_factor))
zw = int(np.round(w * zoom_factor))
top = (h - zh) // 2
left = (w - zw) // 2
# Zero-padding
out = np.zeros_like(img)
out[top:top+zh, left:left+zw] = zoom(img, zoom_tuple)
# Zooming in
elif zoom_factor > 1:
# Bounding box of the zoomed-in region within the input array
zh = int(np.round(h / zoom_factor))
zw = int(np.round(w / zoom_factor))
top = (h - zh) // 2
left = (w - zw) // 2
out = zoom(img[top:top+zh, left:left+zw], zoom_tuple)
# `out` might still be slightly larger than `img` due to rounding, so
# trim off any extra pixels at the edges
trim_top = ((out.shape[0] - h) // 2)
trim_left = ((out.shape[1] - w) // 2)
out = out[trim_top:trim_top+h, trim_left:trim_left+w]
# If zoom_factor == 1, just return the input array
else:
out = img
return out
# Randomly shift the image to avoid tiled boundaries.
def random_roll(img, maxroll):
shift = tf.random.uniform(shape=[2], minval=-maxroll, maxval=maxroll, dtype=tf.int32)
img_rolled = tf.roll(img, shift=shift, axis=[0,1])
return shift, img_rolled
# Normalize an image
def deprocess(img):
img = 255*(img + 1.0)/2.0
return tf.cast(img, tf.uint8)
# Display an image
def show(img):
display.display(PIL.Image.fromarray(np.array(img)))
def save_image(img, directory = 'outputs', file_name = 'test'):
if type(img) != np.ndarray:
img = img.numpy()
#img = img * 255
try:
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
except:
pass
#img = tf.image.resize(img, ((500,500)))
#img = tf.image.convert_image_dtype(img/255.0, dtype=tf.uint8)
if os.path.isdir(directory) == False:
os.mkdir(directory)
cv2.imwrite(directory + '/' + file_name + '.jpg', img)
| e2c3a1c4087921e20f402bb96a49128b3517e6e2 | [
"Markdown",
"Python"
] | 2 | Markdown | stephenjarrell19/DeepDream | 7ea72dd420a8dd7f51f71ffc90dd70dec8ec264d | 1c00dd2665be31bc0b2b9194775520ed06588f2d |
refs/heads/master | <file_sep>package modelo;
//Manjeja la informacion relativa al tamano del tablero y el nivel de dificultad
public enum MedidasTablero {
//format = "7x10 (Nivel 1)"
medida1("7x10", " (Nivel 1)"), medida2("10x15", " (Nivel 2)"), medida3("12x25", " (Nivel 3)");
private String medidas;
private String nombre;
private MedidasTablero (String medidas, String nombre){
this.medidas = medidas;
this.nombre = nombre;
}
public String getMedidas() {
return medidas;
}
public String getNombre() {
return nombre;
}
}
<file_sep>package modelo;
//Manneja el contador de minas localizadas respecto de las totales
public class Contador {
private int minasTotales;
private int minasContadas;
public Contador(int numMinasInicial) {
minasTotales=numMinasInicial;
}
public String actualizarContador(int num)
{
minasContadas=minasContadas + num;
return (minasContadas+"/"+minasTotales);
}
public int obtConteo() {
return minasContadas;
}
}
<file_sep>package test;
import java.util.Arrays;
import java.util.Collection;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import modelo.Cronometro;
@RunWith(Parameterized.class)
//Control del contador de tiempo
public class CronometroTest extends Thread implements Runnable {
public String cron;
// crea los datos de test
@Parameters
public static Collection<Object[]> data() {
Object[][] data = new Object[][] { { "1"}, { "" }, { "0" } };
return Arrays.asList(data);
}
//Comprobar que el valor inicial del cronómetro es 0
@Test
public void initialize() {
CronometroTest cronometroTest = new CronometroTest("0");
Assert.assertEquals(cronometroTest.obtTiempoTest(), 0);
//Comprobar que el cronometro funciona
cronometroTest.run();
}
public CronometroTest(String r2) {
cron = r2;
}
@Test
public void run() {
try {
for (int j = 0; j < 1000; j++) {
cron="" + j;
Thread.sleep(1000);
}
cron="Tiempo infinito";
} catch (InterruptedException ex) {
System.out.println(Cronometro.class.getName());
}
}
public int obtTiempoTest() {
System.out.println(Integer.parseInt(cron));
return Integer.parseInt(cron);
}
}
<file_sep>package vista;
import java.awt.Color;
import java.awt.Image;
import javax.swing.BorderFactory;
import javax.swing.Icon;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.border.Border;
//Mostrar emoticono smile, sonrisa cuando juega o gana, triste cuando pierde la partida
public class Emoticono {
JButton emoticon;
public Emoticono() {
emoticon = new JButton();
}
public JButton emoticon_sonrisa(boolean x) {
String pathSonrisaq;
if (x) {
pathSonrisaq = "/imagenes/cara_si.png";
} else {
pathSonrisaq = "/imagenes/cara_no.png";
}
try {
ImageIcon icon1 = new ImageIcon(getClass().getResource(pathSonrisaq));
Icon icono1 = new ImageIcon(icon1.getImage().getScaledInstance(100, 100, Image.SCALE_DEFAULT));
emoticon.setText(null);
emoticon.setIcon(icono1);
emoticon.setBackground(new Color(0,0,0,0));
Border emptyBorder = BorderFactory.createEmptyBorder();
emoticon.setBorder(emptyBorder);
} catch (Exception e) {
System.out.print(e);
}
return emoticon;
}
}
<file_sep>package vista;
import java.awt.Dimension;
import java.awt.Image;
import javax.swing.Icon;
import javax.swing.ImageIcon;
import javax.swing.JButton;
//Representa cada casilla del juego
public class Casilla extends JButton {
public boolean esMina;
public boolean visitado;
public boolean bandera;
public boolean detectado;
public int minas_adyacentes;
public int index;
public Casilla(int in) {
esMina = false;
minas_adyacentes = 0;
index = in;
visitado = false;
bandera = false;
detectado = false;
this.setPreferredSize(new Dimension(25, 25));
}
public void cambiarimagen(String ruta) {
ImageIcon icon1 = new ImageIcon(getClass().getResource(ruta));
Icon icono1 = new ImageIcon(icon1.getImage().getScaledInstance(getWidth(), getHeight(), Image.SCALE_DEFAULT));
setText(null);
setIcon(icono1);
}
}<file_sep>package controlador;
import java.awt.EventQueue;
import modelo.Contador;
import modelo.Puntuacion;
import vista.Tablero;
import vista.Casilla;
/**
* Incia la aplicacion.
*/
public class ProgramaBuscaminas {
public static void main(String[] args) {
EventQueue.invokeLater(new Runnable() {
public void run() {
try {
Tablero frame = new Tablero();
frame.setVisible(true);
} catch (Exception e) {
e.printStackTrace();
}
}
});
}
}<file_sep>package vista;
import javax.swing.JFrame;
import javax.swing.JPanel;
import javax.swing.WindowConstants;
import javax.swing.border.Border;
import javax.swing.border.EmptyBorder;
import modelo.Buscaminas;
import modelo.GestorPuntuaciones;
import modelo.Puntuacion;
import modelo.MedidasTablero;
import javax.swing.JLabel;
import javax.swing.JMenu;
import javax.swing.JMenuBar;
import javax.swing.JMenuItem;
import javax.swing.JOptionPane;
import javax.swing.BorderFactory;
import javax.swing.Icon;
import javax.swing.ImageIcon;
import java.awt.GridBagLayout;
import java.awt.GridLayout;
import java.awt.Image;
import java.awt.GridBagConstraints;
import java.awt.Insets;
import java.awt.Toolkit;
import java.awt.Dimension;
import java.util.ArrayList;
import java.util.List;
import java.util.Timer;
import java.util.TimerTask;
// Contiene la informacion del panel de juego
public class Tablero extends JFrame {
public JPanel contentPane;
public GridBagLayout gbl_contentPane;
public JPanel panelVisual, rejilla, panelContadores, panelContador, panelEmoticon, panelCronometro;
private JMenuBar menuBarInicio;
private JMenuItem menuItemSalir, menuItemIniciar, menuItemPuntuaciones, menuItemGuardarResultado;
private JMenu menuArchivo, menuAyuda;
private Emoticono emoticon;
private GestorPuntuaciones gesPun;
private String nombreUsuario;
private String medidaTableroNivelSeleccionado;
public Casilla[][] c;
public JLabel contadorLabel,cronometroLabel;
TimerTask timer;
String tiempo = "";
private int x, y, anchoTablero, altoTablero;
private Buscaminas logicaJuegoBuscaminas;
List<Integer> posicion_minas = new ArrayList<>();
Icon clicado;
boolean fin, gano;
Puntuacion puntuacion = null;
public int n1[] = { 1, 1, 0, -1, -1, -1, 0, 1 };
public int n2[] = { 0, 1, 1, 1, 0, -1, -1, -1 };
int n3[] = { 1, 0, -1, -1, 0, 0, 1, 1 };
int n4[] = { 0, 1, 0, 0, -1, -1, 0, 0 };
int can_minas = 10;
/**
* Crear el Frame.
*/
public Tablero() {
initialize();
}
private void initialize() {
setResizable(false);
setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE);
setBounds(300, 100, 500, 507);
// Centrado en la pantalla
Dimension dimemsion = Toolkit.getDefaultToolkit().getScreenSize();
this.setLocation(dimemsion.width / 2 - this.getSize().width / 2,
dimemsion.height / 2 - this.getSize().height / 2);
contentPane = new JPanel();
contentPane.setBorder(new EmptyBorder(5, 5, 5, 5));
setContentPane(contentPane);
gbl_contentPane = new GridBagLayout();
gbl_contentPane.columnWidths = new int[] { 658, 0 };
gbl_contentPane.rowHeights = new int[] { 0, 409, 35, 0 };
gbl_contentPane.columnWeights = new double[] { 1.0, Double.MIN_VALUE };
gbl_contentPane.rowWeights = new double[] { 0.0, 1.0, 0.0, Double.MIN_VALUE };
contentPane.setLayout(gbl_contentPane);
GridBagConstraints gbc_panelVisual = new GridBagConstraints();
gbc_panelVisual.insets = new Insets(0, 0, 5, 0);
gbc_panelVisual.fill = GridBagConstraints.BOTH;
gbc_panelVisual.gridx = 0;
gbc_panelVisual.gridy = 0;
contentPane.add(obtPanelMenu(), gbc_panelVisual);
}
// Panel visual donde estará la barra de menu y el juego
private JPanel obtPanelMenu() {
if (panelVisual == null) {
panelVisual = new JPanel();
GridBagLayout layoutMenu = new GridBagLayout();
layoutMenu.columnWidths = new int[] { 500, 370, 0, 0 };
layoutMenu.rowHeights = new int[] { 0, 49, 35, 0 };
GridBagConstraints constraints = new GridBagConstraints();
constraints.gridheight=3;
constraints.fill = GridBagConstraints.VERTICAL;
constraints.gridx = 0;
constraints.gridy = 0;
panelVisual.add(obtenerBarInicio(),constraints);
panelVisual.setLayout(layoutMenu);
constraints.gridx = 0;
constraints.gridy = 1;
panelVisual.add(obtenerBarContadores(),constraints);
String title = "Buscaminas-singletonsgroup";
Border border = BorderFactory.createTitledBorder(title);
panelVisual.setBorder(border);
}
return panelVisual;
}
// Iniciar la barra de menu
private JMenuBar obtenerBarInicio() {
if (menuBarInicio == null) {
menuBarInicio = new JMenuBar();
menuArchivo = new JMenu("Archivo");
menuAyuda = new JMenu("Ayuda");
menuItemSalir = new JMenuItem("Salir");
menuItemIniciar = new JMenuItem("Iniciar...");
menuItemGuardarResultado = new JMenuItem("GuardarResultado");
menuItemPuntuaciones = new JMenuItem("Puntuaciones");
menuItemIniciar.addActionListener(e -> presentarTablero());
menuItemPuntuaciones.addActionListener(e -> mostrarPuntuaciones());
menuItemGuardarResultado.addActionListener(e -> grabarPuntuacion(nombreUsuario, 0));
menuItemSalir.addActionListener(e -> this.dispose());
menuArchivo.add(menuItemIniciar);
menuArchivo.add(menuItemPuntuaciones);
menuArchivo.add(menuItemGuardarResultado);
menuArchivo.add(menuItemSalir);
menuBarInicio.add(menuArchivo);
menuBarInicio.add(menuAyuda);
}
return menuBarInicio;
}
//Panel de paneles: Contador, Emoticon, Cronometro
private JPanel obtenerBarContadores() {
if (panelContadores == null) {
panelContadores = new JPanel();
GridBagLayout gbl_contentPanelContadores = new GridBagLayout();
gbl_contentPanelContadores.columnWidths = new int[] { 350, 0 };
gbl_contentPanelContadores.rowHeights = new int[] { 0, 0, 0, 0 };
gbl_contentPanelContadores.columnWeights = new double[] { 1.0, Double.MIN_VALUE };
gbl_contentPanelContadores.rowWeights = new double[] { 0.0, 1.0, 0.0, Double.MIN_VALUE };
panelContadores.setLayout(gbl_contentPanelContadores);
//Panel del Contador
panelContador = new JPanel();
contadorLabel = new JLabel("0/0");
panelContador.setName("contador");
panelContador.add(contadorLabel);
GridBagConstraints gbc_constraints = new GridBagConstraints();
gbc_constraints.insets = new Insets(0, 0, 0, 0);
gbc_constraints.fill = GridBagConstraints.CENTER;
gbc_constraints.gridx = 1;
gbc_constraints.gridy = 0;
panelContadores.add(panelContador,gbc_constraints);
//Panel del Emoticon
panelEmoticon = new JPanel();
panelEmoticon.setName("emoticon");
emoticon = new Emoticono();
panelEmoticon.add(emoticon.emoticon_sonrisa(true));
gbc_constraints.insets = new Insets(0, 0, 0, 0);
gbc_constraints.fill = GridBagConstraints.CENTER;
gbc_constraints.gridx = 2;
gbc_constraints.gridy = 0;
panelContadores.add(panelEmoticon,gbc_constraints);
//Panel del Cronometro
panelCronometro = new JPanel();
cronometroLabel = new JLabel("0");
panelCronometro.setName("cronometro");
panelCronometro.add(cronometroLabel);
gbc_constraints.insets = new Insets(0, 0, 0, 0);
gbc_constraints.fill = GridBagConstraints.CENTER;
gbc_constraints.gridx = 3;
gbc_constraints.gridy = 0;
panelContadores.add(panelCronometro,gbc_constraints);
}
return panelContadores;
}
// Mostrar puntuaciones
private void mostrarPuntuaciones() {
gesPun = new GestorPuntuaciones();
JOptionPane.showMessageDialog(panelVisual, gesPun.obtPuntuacionesDesdeXMLEnTabla());
}
// Grabar puntuacion
private void grabarPuntuacion(String nombre, int puntuacionValor) {
gesPun = new GestorPuntuaciones();
gesPun.anadirPuntuacionDeUsuarioEnXML(nombre, puntuacionValor);
}
// Mostrar el tablero
private void presentarTablero() {
this.elegirTamanoTablero();
x = altoTablero;
y = anchoTablero;
try {
ImageIcon icon1 = new ImageIcon(getClass().getResource(String.format("/imagenes/boton_clicado.png")));
Icon icono1 = new ImageIcon(
icon1.getImage().getScaledInstance(getWidth(), getHeight(), Image.SCALE_DEFAULT));
clicado = icon1;
} catch (Exception e) {
System.out.println(e);
}
logicaJuegoBuscaminas = new Buscaminas(x,y,clicado,tiempo,n1,n2);
logicaJuegoBuscaminas.grabarNombreEnSesion(nombreUsuario,medidaTableroNivelSeleccionado);
generarTablero();
}
//Generar tablero
void generarTablero() {
if (rejilla!=null){
contentPane.remove(rejilla);
rejilla=null;
contentPane.updateUI();
}
rejilla = new JPanel();
if (x == 12) {
setSize(555, 670);
} else if (x == 10) {
setSize(505, 620);
} else if (x == 7) {
setSize(505, 620);
}
logicaJuegoBuscaminas.reiniciar(x, y, clicado);
contadorLabel.setText(logicaJuegoBuscaminas.contador.actualizarContador(0));
panelEmoticon.removeAll();
panelEmoticon.add(logicaJuegoBuscaminas.emoticono.emoticon_sonrisa(true));
try {
actualizarContadorMinas();
actualizarCronometro();
actualizarEmoticon();
}catch(Exception e) {}
rejilla.setLayout(new GridLayout(x, y));
for (int i = 0; i < x; i++) {
for (int j = 0; j < y; j++) {
rejilla.add(logicaJuegoBuscaminas.obtCasilla(i, j));
}
}
//Incluir la rejilla con las casillas en el contenedor JPanel
GridBagConstraints gbc_panelCasillas = new GridBagConstraints();
gbc_panelCasillas.insets = new Insets(0, 0, -35, 0);
gbc_panelCasillas.gridx = 0;
gbc_panelCasillas.gridy = 1;
gbc_panelCasillas.fill = GridBagConstraints.BOTH;
contentPane.add(rejilla, gbc_panelCasillas);
}
// establecer medida tablero utilizando el enum
private void elegirTamanoTablero() {
MedidasTablero medida_1 = MedidasTablero.medida1;
MedidasTablero medida_2 = MedidasTablero.medida2;
MedidasTablero medida_3 = MedidasTablero.medida3;
Object[] opciones = {medida_1.getMedidas() + medida_1.getNombre(), medida_2.getMedidas() + medida_2.getNombre(), medida_3.getMedidas() + medida_3.getNombre()};
Object selecionado = JOptionPane.showInputDialog(null, "Elige tamano de tablero", "Tablero",
JOptionPane.INFORMATION_MESSAGE, null, opciones, opciones[0]);
anchoTablero = Integer.parseInt(selecionado.toString().split(" ")[0].split("x")[0]);
altoTablero = Integer.parseInt(selecionado.toString().split(" ")[0].split("x")[1]);
//Almacenar en sesion nombre de usuario y nivel dedificultad elegido (medida tablero)
nombreUsuario = JOptionPane.showInputDialog("Escribe tu nombre");
medidaTableroNivelSeleccionado=selecionado.toString();
}
//Actualizar el contador de minas en el Panel
public void actualizarContadorMinas() throws InterruptedException {
Timer timer = new Timer();
timer.schedule(new TimerTask() {
@Override
public void run() {
contadorLabel.setText(logicaJuegoBuscaminas.contador.actualizarContador(0));
}
}, 0, 1000);
}
//Actualizar el cronómetro en el Panel
public void actualizarCronometro() throws InterruptedException {
Timer timer = new Timer();
timer.schedule(new TimerTask() {
@Override
public void run() {
cronometroLabel.setText(logicaJuegoBuscaminas.cronometro.cron.toString());
}
}, 0, 1000);
}
//Actualizar el emoticon en el Panel
public void actualizarEmoticon() throws InterruptedException {
Timer timer = new Timer();
timer.schedule(new TimerTask() {
@Override
public void run() {
if (logicaJuegoBuscaminas.fin) {
panelEmoticon.removeAll();
panelEmoticon.add(logicaJuegoBuscaminas.emoticono.emoticon_sonrisa(false));
contentPane.updateUI();
}
}
}, 0, 1000);
}
}<file_sep>package modelo;
//Maneja la sesion durante la partida junto con su informacion
public class Sesion {
private String nombreJugador;
private String medidaTablero;
private static Sesion sesion;
private int puntuacion;
private Sesion(String nomJugador,String medTablero) {
nombreJugador = nomJugador;
medidaTablero = medTablero;
}
public static Sesion getInstanciaSingleton(String nomJugador,String medTablero)
{
if (sesion == null) {
sesion = new Sesion (nomJugador,medTablero);
}
else
{
System.out.println("No se pudo crear el objeto "+ sesion + " porque ya existe un objeto de la clase sesion");
}
return sesion;
}
public String obtenerNombre() {
return nombreJugador;
}
public String obtenerMedidaTablero() {
return medidaTablero;
}
public int obtenerPuntuacion() {
return puntuacion;
}
public void establecerPuntuacion(int puntos) {
puntuacion=puntos;
}
}
| 5baac4f6dd7af5340b96ac2fb0eea52a3ace81ab | [
"Java"
] | 8 | Java | sergioehu/buscaminas-ehu | e26f798377c5f7e8e13ad44790a08c4c21258dc8 | fdd12485bc7080bdff0f9b7570eb0daf82e9395c |
refs/heads/master | <repo_name>pxpsoft/test<file_sep>/src/main/java/com/pxp/word/controller/WordController.java
package com.pxp.word.controller;
import com.pxp.word.entity.Pager;
import com.pxp.word.entity.ReturnMsg;
import com.pxp.word.entity.Word;
import com.pxp.word.mapper.WordMapperXml;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@RestController
public class WordController {
@Autowired
WordMapperXml wordMapperXml;
@RequestMapping("get10Word")
public List<Word> get10Word() {
List<Word> list = wordMapperXml.get10Word();
return list;
}
@RequestMapping("getWordById")
public Word getWordById(@RequestParam(value = "idWord") int idWord) {
Word word = wordMapperXml.getWordById(idWord);
return word;
}
@RequestMapping("getWordByPager")
//page 页码
//size 每页size
public Pager<Word> findByPager(@RequestParam(value = "classId") int classId, @RequestParam(value = "page") int page, @RequestParam(value = "size") int size) {
Map<String, Object> params = new HashMap<String, Object>();
params.put("classId", classId);
params.put("startNum", (page - 1) * size);
params.put("size", size);
Pager<Word> pager = new Pager<Word>(page, size);
List<Word> list = wordMapperXml.findByPager(params);
long count = wordMapperXml.count(classId);
pager.setRows(list);
pager.setTotal(count);
return pager;
}
@RequestMapping("insertWord")
public ReturnMsg insertWord(@RequestParam(value = "classId") int classId, @RequestParam(value = "name") String name, @RequestParam(value = "symbol") String symbol, @RequestParam(value = "desc") String desc) {
Word word = new Word();
// word.setIdWord(0);
word.setClassId(classId);
word.setName(name);
word.setDesc(desc);
word.setSymbol(symbol);
// " values (#{name},#{desc},#{symbol},#{classId})")
int num = wordMapperXml.insertWord(word);
//获取到自增id
System.out.println("自增id:" + word.getIdWord());
if (num > 0) {
ReturnMsg returnMsg = ReturnMsg.ReturnMsgSuccess();
returnMsg.data = word;
return returnMsg;
} else {
return ReturnMsg.ReturnMsgFail();
}
}
@RequestMapping("updateWord")
public ReturnMsg updateWord(@RequestParam(value = "idWord") int idWord, @RequestParam(value = "classId") int classId, @RequestParam(value = "name") String name, @RequestParam(value = "symbol") String symbol, @RequestParam(value = "desc") String desc) {
/*
Map<String, Object> params = new HashMap<String, Object>();
params.put("idWord", idWord);
params.put("classId", classId);
params.put("name",name);
params.put("desc",desc);
params.put("symbol",symbol);
*/
Word word = new Word();
word.setIdWord(idWord);
word.setClassId(classId);
word.setName(name);
word.setDesc(desc);
word.setSymbol(symbol);
int num = wordMapperXml.updateWord(word);
if (num > 0) {
ReturnMsg returnMsg = ReturnMsg.ReturnMsgSuccess();
returnMsg.data = word;
return returnMsg;
} else {
return ReturnMsg.ReturnMsgFail();
}
}
@RequestMapping("deleteWord")
public ReturnMsg deleteWord(@RequestParam(value = "idWord") int idWord) {
int num = wordMapperXml.deleteWord(idWord);
if (num > 0) {
ReturnMsg returnMsg = ReturnMsg.ReturnMsgSuccess();
returnMsg.msg = "删除成功";
return returnMsg;
} else {
return ReturnMsg.ReturnMsgFail();
}
}
}
<file_sep>/src/main/java/com/pxp/word/mapper/WordMapper.java
package com.pxp.word.mapper;
import com.pxp.word.entity.Word;
import org.apache.ibatis.annotations.*;
import org.springframework.stereotype.Repository;
import java.util.List;
import java.util.Map;
@Mapper
@Repository
public interface WordMapper {
@Select("select * from word limit 0,10")
public List<Word> get10Word();
//传入参数
@Select("select * from word WHERE idWord = #{id}")
public Word getWordById(int id);
//传入参数
@Select("select * from word WHERE name = #{name} limit 0,1")
public Word getWordByName(String name);
@Select("select * from word where classId=#{classId} limit #{startNum},#{size}")
public List<Word> findByPager(Map<String, Object> params);
@Select("select count(1) from word where classId=#{classId}")
public long count(int classId);
//@Insert("insert into word (`name`,`desc`,`symbol`,`classId`)\n" +
// " values (#{name},#{desc},#{symbol},#{classId})")
//返回影响的行数
// public int insertWord(Word word);
@Update("update word \n" +
" SET `name` = #{name},`desc` = #{desc},`symbol` = #{symbol},`classId` = #{classId} WHERE `idWord` = #{idWord}")
//返回影响的行数
public int updateWord(Word word);
@Delete(" delete from word where idWord = #{idWord}")
public int deleteWord(int idWord);
}
<file_sep>/src/test/java/com/pxp/word/WordApplicationTests.java
package com.pxp.word;
import com.pxp.word.controller.WordController;
import com.pxp.word.entity.Word;
import com.pxp.word.mapper.UserMapper;
import com.pxp.word.mapper.WordMapperXml;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import java.util.List;
@SpringBootTest
class WordApplicationTests {
@Autowired
UserMapper userMapper;
@Autowired
WordMapperXml wordMapperxml;
@Autowired
WordController wordController;
@Test
void getData() {
//System.out.println(wordMapperxml.deleteWord(166970));
Word word = new Word();
word.setIdWord(166969);
word.setClassId(82);
word.setName("mytest5");
word.setDesc("测试5");
word.setSymbol("[测试5]");
System.out.println(wordMapperxml.updateWord(word));
// values (#{name},#{desc},#{symbol},#{classId})
// System.out.println(wordMapperxml.count(82));
/*
Map<String, Object> params = new HashMap<>();
params.put("classId", 82);
params.put("name", "mytest6");
params.put("desc", "测试6");
params.put("symbol", "[测试6]");
System.out.println(wordMapperxml.insertWord(params));
*/
/*
Word word=new Word();
word.setClassId(82);
word.setName( "mytest6");
word.setDesc("测试6");
word.setSymbol("[测试6]");
System.out.println(wordMapperxml.insertWord(word));
//获取到自增id
System.out.println(word.getIdWord());
*/
/*
List<Word> list= wordMapperxml.get10Word();
System.out.println(list);
System.out.println(wordMapperxml.getWordById(56464));
System.out.println(wordMapperxml.getWordByName("hello"));
// System.out.println(wordMapperxml.findByPager(82,(2-1)*10,10));
Map<String, Object> params = new HashMap<>();
params.put("classId", 82);
params.put("startNum", (2-1)*10);
params.put("size", 10);
System.out.println(wordMapperxml.findByPager(params));*/
}
@Test
void get10Word() {
List<Word> list = wordMapperxml.get10Word();
System.out.println(list);
}
}
<file_sep>/src/main/java/com/pxp/word/mapper/WordMapperXml.java
package com.pxp.word.mapper;
import com.pxp.word.entity.Word;
import org.springframework.stereotype.Component;
import java.util.List;
import java.util.Map;
@Component
public interface WordMapperXml {
public List<Word> get10Word();
public Word getWordById(int id);
public Word getWordByName(String name);
public List<Word> findByPager(Map<String, Object> params);
public long count(int classId);
//返回影响的行数
// public int insertWord(Map<String, Object> params);
public int insertWord(Word word);
//返回影响的行数
public int updateWord(Word word);
public int deleteWord(int idWord);
}
| 54477e84397fcf85931d29724dd4cbe3c7bc2d5d | [
"Java"
] | 4 | Java | pxpsoft/test | 5fa9fcc52e025949f0f811535599647e0a530efa | 6990c9ccc221668503c4151895ca44012423bf42 |
refs/heads/main | <file_sep>$(window).on("load", function () {
$(".loader-wrapper").fadeOut("slow");
});
window.cont = function cont() {
if (document.getElementById('cont').style.display == 'none') {
document.getElementById('cont').style.display = 'block';
} else {
document.getElementById('cont').style.display = 'none';
}
}
var btn = $('#button');
$(window).scroll(function () {
if ($(window).scrollTop() > 0) {
btn.addClass('show');
} else {
btn.removeClass('show');
}
});
btn.on('click', function (e) {
e.preventDefault();
$('html, body').animate({
scrollTop: 0
}, '0');
});
var d = new Date();
document.getElementById("time").innerHTML = d; | d32b51722e275a228d6ba9117386309afa7ae8ab | [
"JavaScript"
] | 1 | JavaScript | thanatchay1/group4-source | c35ef5cb4c23d0bbf9657597821a121141c7290c | 8f41c74119f947596d1126511b4dd143c7bebd9b |
refs/heads/master | <repo_name>Yukei2K/GlueckUndKanja<file_sep>/index.php
<html>
<head>
<script src="https://cdnjs.cloudflare.com/ajax/libs/paho-mqtt/1.0.1/mqttws31.js" type="text/javascript"></script>
</head>
<?php
require "header.php";
?>
<main>
<p>You are logged out!</p>
<p>You are logged in!</p>
</main>
<file_sep>/README.md
# GlueckUndKanja
Webpage for showing CO2 values of specific rooms.
<file_sep>/mqtt_websockets.py
#Imports
from SimpleWebSocketServer import WebSocket, SimpleWebSocketServer, SimpleSSLWebSocketServer
import signal, sys, logging
from optparse import OptionParser
import MySQLdb as mdb
import string
import datetime
import time
import thread
import json
from collections import defaultdict
import paho.mqtt.client as mqtt
websockets_port=9002
mqtt_server="localhost"
mqtt_port=1884
mqtt_username="websockets"
mqtt_password="<PASSWORD>"
mysql_db='HAC'
mysql_username='websockets'
mysql_password='<PASSWORD>'
mysql_server='localhost'
#Variables
var_connections_id = {}
delchars = ''.join(c for c in map(chr, range(256)) if not c.isalnum())
#Mysql
class DB:
conn = None
def connect(self):
self.conn = mdb.connect(mysql_server, mysql_username, mysql_password, mysql_db)
self.conn.autocommit(True)
print("MySQL Connected")
def query(self, sql):
try:
cursor = self.conn.cursor()
cursor.execute(sql)
except (AttributeError, mdb.OperationalError):
self.connect()
cursor = self.conn.cursor()
cursor.execute(sql)
return cursor
db = DB()
db.connect()
db2 = DB()
db2.connect()
db3 = DB()
db3.connect()
db4 = DB()
db4.connect()
#MQTT on connect callback
def on_connect(mqttc, userdata, flags, rc):
print("MQTT Connected...")
#MQTT on message callback
def on_message(mqttc, userdata, msg):
#print(msg.topic+" "+str(msg.qos)+" "+str(msg.payload))
global var_connections_id
msg_topic_sql={}
msg_topic_sql[0]=""
msg_topic_sql[1]=""
msg_topic_sql[2]=""
msg_topic_sql[3]=""
msg_topic_sql[4]=""
msg_topic_sql[5]=""
msg_topic_sql[6]=""
msg_topic_sql[7]=""
msg_topic_sql[8]=""
msg_topic_sql[9]=""
temp_msg_topic = string.split(str(msg.topic),'/')
for i in range(len(temp_msg_topic)):
msg_topic_sql[i]=temp_msg_topic[i]
sql4 ="SELECT `connections_id` FROM `websockets_topics` WHERE (`topic1`='"+str(msg_topic_sql[0])+"') AND `topic2`='"+str(msg_topic_sql[1])+"' AND `topic3`='"+str(msg_topic_sql[2])+"' AND `topic4`='"+str(msg_topic_sql[3])+"' AND `topic5`='"+str(msg_topic_sql[4])+"' AND `topic6`='"+str(msg_topic_sql[5])+"' AND `topic7`='"+str(msg_topic_sql[6])+"' AND `topic8`='"+str(msg_topic_sql[7])+"' AND `topic9`='"+str(msg_topic_sql[8])+"' AND `topic10`='"+str(msg_topic_sql[9])+"';"
cur4 = db4.query(sql4)
numrows4 = int(cur4.rowcount)
if(numrows4>=1):
results = cur4.fetchall()
for row in results:
try:
temp_id =row[0]
client=var_connections_id[temp_id][0]
temp_send_ws={}
temp_send_ws['topic']=str(msg.topic)
#temp_send_ws['message']=str(msg.payload)
temp_send_ws['message']=msg.payload.decode("UTF-8")
temp_send_ws['mode']="mqtt"
print(msg.payload.decode("UTF-8"))
print(msg.payload.decode("UTF-8"))
print(str(json.dumps(temp_send_ws)))
client.sendMessage(str(json.dumps(temp_send_ws)))
except Exception as n:
print(n)
#MQTT Client Function
def start_mqtt():
global mqttc
mqttc = mqtt.Client()
mqttc.on_connect = on_connect
mqttc.on_message = on_message
mqttc.username_pw_set(mqtt_username, mqtt_password)
mqttc.connect(mqtt_server, mqtt_port, 60)
mqttc.loop_forever()
thread.start_new_thread( start_mqtt, (), )
class SimpleChat(WebSocket):
def handleMessage(self):
if self.data is None:
self.data = ''
#print(self.data)
global var_connections_id
global delchars
print('New Message')
print(self.data.decode("UTF-8"))
jason_message =json.loads(self.data.decode("UTF-8"))
#jason_message =json.loads(str(self.data))
print(jason_message)
#print('Hello2')
try:
sql2 ="SELECT `logged_in`,`username_md5` FROM `websockets_conlist` WHERE `connections_id`='"+str(id(self))+"';"
cur2 = db2.query(sql2)
row2 = cur2.fetchone()
var_loggedin=row2[0]
var_username=row2[1]
except:
print("error 1")
if (var_loggedin==1):#if Allowed
if jason_message['mode']=="login":
try:
temp_send_ws4={}
temp_send_ws4['mode']="system"
temp_send_ws4['ws_token']=jason_message['ws_token']
self.sendMessage(str(json.dumps(temp_send_ws4)))
except Exception as n:
print(n)
elif jason_message['mode']=="subscribe":
var_data_on=0
var_temp_data = str(jason_message['topic'])
if var_temp_data!='':
sql ="SELECT * FROM websockets_acls WHERE username = '"+str(var_username)+"'&&rw >= 1"
cur = db.query(sql)
numrows = int(cur.rowcount)
if(numrows>=1):
results = cur.fetchall()
for row in results:
data_topic_db = row[2]
data_topic_db = string.split(data_topic_db,'/')
data_topic_message = string.split(var_temp_data, '/')
if row[2]=="#":
var_data_on=1
break
elif row[2]==var_temp_data:
var_data_on=1
break
elif "guest"==data_topic_message[0]:
var_data_on=1
break
else:
var_data_on=0
else:
var_data_on=0
if var_data_on==1:
msg_topic_sql={}
msg_topic_sql[0]=""
msg_topic_sql[1]=""
msg_topic_sql[2]=""
msg_topic_sql[3]=""
msg_topic_sql[4]=""
msg_topic_sql[5]=""
msg_topic_sql[6]=""
msg_topic_sql[7]=""
msg_topic_sql[8]=""
msg_topic_sql[9]=""
temp_msg_topic = string.split(str(var_temp_data),'/')
for i in range(len(temp_msg_topic)):
msg_topic_sql[i]=temp_msg_topic[i]
sql3="INSERT INTO `websockets_topics`(`id`, `connections_id`, `topic1`, `topic2`, `topic3`, `topic4`, `topic5`, `topic6`, `topic7`, `topic8`, `topic9`, `topic10`) VALUES (NULL,'"+str(id(self))+"','"+str(msg_topic_sql[0])+"','"+str(msg_topic_sql[1])+"','"+str(msg_topic_sql[2])+"','"+str(msg_topic_sql[3])+"','"+str(msg_topic_sql[4])+"','"+str(msg_topic_sql[5])+"','"+str(msg_topic_sql[6])+"','"+str(msg_topic_sql[7])+"','"+str(msg_topic_sql[8])+"','"+str(msg_topic_sql[9])+"');"
cur3 = db3.query(sql3)
mqttc.subscribe(var_temp_data)
print("Subcribe Topic: "+var_temp_data)
try:
temp_send_ws3={}
temp_send_ws3['mode']="system"
temp_send_ws3['status']="SUBSCRIBED_GRANTED"
self.sendMessage(str(json.dumps(temp_send_ws3)))
except:
print("Error 2")
else:
try:
temp_send_ws3={}
temp_send_ws3['mode']="system"
temp_send_ws3['status']="SUBSCRIBED_DENIED"
self.sendMessage(str(json.dumps(temp_send_ws3)))
except:
print("Error 3")
elif jason_message['mode']=="publish":
try:
var_temp_data = str(jason_message['topic'])
var_temp_data2 = str(jason_message['message'])
#print(jason_message['message'])
#print(str(json.dumps(jason_message['message'])))
var_data_on=0
sql ="SELECT * FROM websockets_acls WHERE username = '"+str(var_username)+"'&&rw >= 2"
cur = db.query(sql)
numrows = int(cur.rowcount)
if(numrows>=1):
if var_temp_data!='':
results = cur.fetchall()
for row in results:
data_topic_db = row[2]
data_topic_db = string.split(data_topic_db,'/')
data_topic_message = string.split(var_temp_data, '/')
if row[2]=="#":
var_data_on=1
break
elif row[2]==var_temp_data:
var_data_on=1
break
elif "guest"==data_topic_message[0]:
var_data_on=1
break
if(var_data_on==1):
print("Published: "+var_temp_data)
try:
temp_send_ws3={}
temp_send_ws3['mode']="system"
temp_send_ws3['status']="PUBLISHED_GRANTED"
self.sendMessage(str(json.dumps(temp_send_ws3)))
print(jason_message['message'])
mqttc.publish(var_temp_data, jason_message['message'])
except:
print("Error 4")
else:
try:
temp_send_ws3={}
temp_send_ws3['mode']="system"
temp_send_ws3['status']="PUBLISHED_DENIED"
self.sendMessage(str(json.dumps(temp_send_ws3)))
except:
print("Error 5")
except Exception as n: #elif "#topic#:"
print n
print ("Error elif #topic#:")
else:
print("Error"+str(self.data))
if (var_loggedin==0): #if busy
sent=0
if jason_message["mode"]=="login":
var_temp_data = str(jason_message['ws_token'])
var_sql_q = var_temp_data.translate(None, delchars)
sql ="SELECT * FROM logins WHERE WS_token = '"+var_sql_q+"'&&active = 1"
cur = db.query(sql)
numrows = int(cur.rowcount)
if(numrows==1):
row = cur.fetchone()
if(row[3]>=time.strftime('%Y-%m-%d %H:%M:%S')):
print("Allowed " + str(id(self)))
print("IP: " + str(self.address[0]))
sql ="UPDATE `websockets_conlist` SET `username_md5`='"+str(row[1])+"',`logged_in`=1 WHERE `connections_id`='"+str(id(self))+"';"
cur2 = db2.query(sql)
try:
temp_send_ws3={}
temp_send_ws3['mode']="system"
temp_send_ws3['status']="ACCESS_GRANTED"
self.sendMessage(str(json.dumps(temp_send_ws3)))
sent=1
except Exception as n:
print(n)
else:
#print "NOOPE"
try:
temp_send_ws3={}
temp_send_ws3['mode']="system"
temp_send_ws3['status']="ACCESS_DENIED"
self.sendMessage(str(json.dumps(temp_send_ws3)))
sent=1
except Exception as n:
print(n)
def handleConnected(self):
print("Connected " + str(self))
global var_connections_id
#Make Sure no old rows
sql ="DELETE FROM `websockets_conlist` WHERE `websockets_conlist`.`connections_id`='"+str(id(self))+"';"
cur2 = db2.query(sql)
print (id(self))
print (var_connections_id)
sql ="INSERT INTO `websockets_conlist` (`id`, `connections_id`, `username_md5`, `connected`,`IP`) VALUES (NULL, '"+str(id(self))+"', '', '1','"+str(self.address[0])+"');"
cur2 = db2.query(sql)
if id(self) in var_connections_id:
del var_connections_id[id(self)]
print("Deleted: var_connections_id")
var_connections_id[id(self)]=[]
var_connections_id[id(self)].append(self)
print("Connected FOR GOOD")
try:
temp_send_ws2={}
temp_send_ws2['mode']="login"
self.sendMessage(str(json.dumps(temp_send_ws2)))
except Exception as n:
print(n)
def handleClose(self):
print("Closed " + str(self))
global var_connections_id
sql ="DELETE FROM `websockets_conlist` WHERE `websockets_conlist`.`connections_id`='"+str(id(self))+"';"
cur2 = db2.query(sql)
sql ="DELETE FROM `websockets_topics` WHERE `websockets_topics`.`connections_id`='"+str(id(self))+"';"
cur2 = db2.query(sql)
if id(self) in var_connections_id:
del var_connections_id[id(self)]
print(var_connections_id)
print("Closed FOR GOOD")
if __name__ == "__main__":
cls = SimpleChat
server = SimpleWebSocketServer('', websockets_port, cls)
def close_sig_handler(signal, frame):
server.close()
sys.exit()
signal.signal(signal.SIGINT, close_sig_handler)
server.serveforever()
<file_sep>/includes/mqtt-client.py
import paho.mqtt.client as mqtt
def Initialise_clients(cname):
#callback assignment
client= mqtt.Client(cname,False) #don't use clean session
if mqttclient_log: #enable mqqt client logging
client.on_log=on_log
client.on_connect= on_connect #attach function to callback
client.on_message=on_message #attach function to callback
client.on_subscribe=on_subscribe
#flags set
client.topic_ack=[]
client.run_flag=False
client.running_loop=False
client.subscribe_flag=False
client.bad_connection_flag=False
client.connected_flag=False
client.disconnect_flag=False
return client
| c85438b99fbcf27176024c655097e374352eda8c | [
"Markdown",
"Python",
"PHP"
] | 4 | PHP | Yukei2K/GlueckUndKanja | 68a9355733ee99f9df541306431e1d9f2bfb9d26 | 017a70818cd85ecd13121feaaab4b8768e58e21f |
refs/heads/main | <repo_name>metanorma/mn-samples-iec<file_sep>/sources/bilingual.sh
bundle exec metanorma iec-rice-en.adoc
bundle exec metanorma iec-rice-fr.adoc
bundle exec metanorma collection bilingual.yml -x xml,html,doc,presentation,pdf -w bilingual -c collection_cover.html
<file_sep>/README.adoc
= Metanorma IEC samples
image:https://github.com/metanorma/mn-samples-iec/workflows/generate/badge.svg["Build Status", link="https://github.com/metanorma/mn-samples-iec/actions/workflow/generate.yml"]
image:https://github.com/metanorma/mn-samples-iec/workflows/docker/badge.svg["Build Status", link="https://github.com/metanorma/mn-samples-iec/actions?workflow=docker"]
This document is available in its rendered forms here:
* https://metanorma.github.io/mn-samples-iec/[Metanorma IEC samples (HTML)]
== Fetching the document
[source,sh]
----
git clone https://github.com/metanorma/mn-iec-docs/
----
== Installing Build Tools
See https://www.metanorma.com/author/topics/install/
== Running via Docker or locally
If you have installed the build tools locally, and wish to run the
locally-installed compilation tools, there is nothing further to set.
If you don't want to deal with local dependencies, use the docker:
[source,sh]
----
docker run -v "$(pwd)":/metanorma -w /metanorma -it metanorma/mn metanorma site generate . -o published -c metanorma.yml
----
== Building The Document
[source,sh]
----
metanorma site generate . -o published -c metanorma.yml
----
The following outputs will be built:
* ISO/IEC XML (https://github.com/metanorma/metanorma-model-iso) (`[filename].xml`)
* HTML (`[filename].html`)
* PDF (`[filename].pdf`)
* Word DOC (`[filename].doc`)
== Iterating the document
[source,sh]
----
make html
open cc-*.html
----
| ea9dce14b4ac0456f8b39d3f524e6838b00f7c71 | [
"AsciiDoc",
"Shell"
] | 2 | Shell | metanorma/mn-samples-iec | fafe81e52cd91ba3e5ae28f8776b6ed811088395 | 22eefed389ba2bf974a579940723261ceab42d8c |
refs/heads/master | <repo_name>Nukelawe/todomvcTest<file_sep>/src/todomvctest/TodoPage.java
package todomvctest;
import org.openqa.selenium.By;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.support.CacheLookup;
import org.openqa.selenium.support.FindBy;
import org.openqa.selenium.support.How;
public class TodoPage {
/*
* This class does all the interactions with the browser.
* It is the only place where the html layout of the todo app is specified.
* The initial state of the application is assumed to contain 1 todo.
* A separate page object would have to be created for testing an empty page
* as it has a different html structure.
*/
public final int initTodoListSize = 1;
@FindBy(how = How.CLASS_NAME, using = "new-todo")
@CacheLookup
private WebElement addTodoField;
@FindBy(how = How.CLASS_NAME, using = "todo-list")
private WebElement todoList;
@FindBy(how = How.CLASS_NAME, using = "footer")
private WebElement footer;
public void addTodo(String todo) {
// Other architectures might have a different standard for new lines,
// i.e. \n might not work everywhere.
addTodoField.sendKeys(todo + "\n");
}
public void completeTodo(String todo) {
String xpath = "//label[contains(text(), '" + todo + "')]/..";
WebElement listElement = todoList.findElement(By.xpath(xpath));
WebElement toggleCompleted = listElement.findElement(By.tagName("input"));
toggleCompleted.click();
}
public int todoListLength() {
return todoList.findElements(By.tagName("li")).size();
}
public boolean containsTodo(String todo) {
String xpath = "//label[contains(text(), '" + todo + "')]";
return !todoList.findElements(By.xpath(xpath)).isEmpty();
}
public String getActiveItemCountDisplayText() {
return footer.findElement(By.className("todo-count")).getText();
}
public boolean todoHasCompletedTag(String todo) {
// The path to the html element which has its class-attribute set to completed once the todo is completed.
// The label-element in which the todo text is, is nested in 2 html elements.
String xpath = "//label[contains(text(), '" + todo + "')]/../..";
String classAttribute = todoList.findElement(By.xpath(xpath)).getAttribute("class");
return classAttribute.equals("completed");
}
}
<file_sep>/src/todomvctest/MarkTodoCompletedTest.java
package todomvctest;
import static org.junit.Assert.*;
import org.junit.BeforeClass;
import org.junit.Test;
public class MarkTodoCompletedTest extends TodoTest {
/*
* This class contains methods for testing that completing a todo works correctly.
*/
private static String todo = "task";
@BeforeClass
public static void completeTodo() {
todoPage.addTodo(todo);
todoPage.completeTodo(todo);
}
@Test
public void todoIsCompleted() {
assertTrue(todoPage.todoHasCompletedTag(todo));
}
@Test
public void activeItemCountIsUpdated() {
assertEquals("1 item left", todoPage.getActiveItemCountDisplayText());
}
}
| ac1256c73915e8b0582d7a69a08c28a523a72984 | [
"Java"
] | 2 | Java | Nukelawe/todomvcTest | 79ac4d04c588d8073940ad1bca36e9de06e45b0b | c96fbdf9018ea0dbdd4044d1c7f519a8b95b0532 |
refs/heads/master | <file_sep>let express = require('express');
let router = express.Router();
router.get('/', (req, res) => {
res.render('index', {
title: 'Sorcerer',
message: 'try it make better'
})
});
router.get('/auth', (req, res) => {
res.send('auth');
});
module.exports = router;<file_sep>let express = require('express');
let app = express();
let main = require('./src/api/main');
app.use(express.static('public'));
app.set('view engine', 'pug');
app.use('/', main);
app.listen(3000, function () {
console.log('Port 3000 listening');
}); | 4c5bf5f165f65457adc2f7dd73a26623630fada2 | [
"JavaScript"
] | 2 | JavaScript | BuridansAss/project | 2ebb0f3e8d1176f3a69fc3351a8529a485a53fb3 | 70bd1e4da6bef2dc3b84d5c311a8c0d8ef7bb706 |
refs/heads/main | <repo_name>MeetBudheliya/SwiftUIApi<file_sep>/SwiftUIApi/ContentView.swift
//
// ContentView.swift
// SwiftUIApi
//
// Created by <NAME> on 16/06/21.
//
import SwiftUI
struct ContentView: View {
@State var result = [Results]()
var body: some View {
NavigationView{
List(result, id: \.trackId){ item in
VStack(alignment: .leading,content: {
NavigationLink(
destination: InnerContentView() ,
label: {
VStack(alignment: .leading){
Text(item.trackName!)
.font(.headline)
Text(item.collectionName!)
}
})
})
}
.onAppear(perform: {
LoadData()
})
.navigationTitle("Songs List")
}
}
func LoadData(){
print("Load Data")
guard let url = URL(string: "https://itunes.apple.com/search?term=taylor+swift&entity=song") else {
return
}
print(url)
URLSession.shared.dataTask(with: url) { data, res, err in
guard err == nil else{
print(err?.localizedDescription)
return
}
guard data != nil else{
print("data is nil")
return
}
do{
let json = try JSONDecoder().decode(Json4Swift_Base.self, from: data!)
print(json)
result = json.results!
}catch{
print("not decode")
}
}.resume()
}
}
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView()
}
}
struct Json4Swift_Base : Codable {
let resultCount : Int?
let results : [Results]?
enum CodingKeys: String, CodingKey {
case resultCount = "resultCount"
case results = "results"
}
init(from decoder: Decoder) throws {
let values = try decoder.container(keyedBy: CodingKeys.self)
resultCount = try values.decodeIfPresent(Int.self, forKey: .resultCount)
results = try values.decodeIfPresent([Results].self, forKey: .results)
}
}
struct Results : Codable {
let wrapperType : String?
let kind : String?
let artistId : Int?
let collectionId : Int?
let trackId : Int?
let artistName : String?
let collectionName : String?
let trackName : String?
let collectionCensoredName : String?
let trackCensoredName : String?
let artistViewUrl : String?
let collectionViewUrl : String?
let trackViewUrl : String?
let previewUrl : String?
let artworkUrl30 : String?
let artworkUrl60 : String?
let artworkUrl100 : String?
let collectionPrice : Double?
let trackPrice : Double?
let releaseDate : String?
let collectionExplicitness : String?
let trackExplicitness : String?
let discCount : Int?
let discNumber : Int?
let trackCount : Int?
let trackNumber : Int?
let trackTimeMillis : Int?
let country : String?
let currency : String?
let primaryGenreName : String?
let isStreamable : Bool?
enum CodingKeys: String, CodingKey {
case wrapperType = "wrapperType"
case kind = "kind"
case artistId = "artistId"
case collectionId = "collectionId"
case trackId = "trackId"
case artistName = "artistName"
case collectionName = "collectionName"
case trackName = "trackName"
case collectionCensoredName = "collectionCensoredName"
case trackCensoredName = "trackCensoredName"
case artistViewUrl = "artistViewUrl"
case collectionViewUrl = "collectionViewUrl"
case trackViewUrl = "trackViewUrl"
case previewUrl = "previewUrl"
case artworkUrl30 = "artworkUrl30"
case artworkUrl60 = "artworkUrl60"
case artworkUrl100 = "artworkUrl100"
case collectionPrice = "collectionPrice"
case trackPrice = "trackPrice"
case releaseDate = "releaseDate"
case collectionExplicitness = "collectionExplicitness"
case trackExplicitness = "trackExplicitness"
case discCount = "discCount"
case discNumber = "discNumber"
case trackCount = "trackCount"
case trackNumber = "trackNumber"
case trackTimeMillis = "trackTimeMillis"
case country = "country"
case currency = "currency"
case primaryGenreName = "primaryGenreName"
case isStreamable = "isStreamable"
}
init(from decoder: Decoder) throws {
let values = try decoder.container(keyedBy: CodingKeys.self)
wrapperType = try values.decodeIfPresent(String.self, forKey: .wrapperType)
kind = try values.decodeIfPresent(String.self, forKey: .kind)
artistId = try values.decodeIfPresent(Int.self, forKey: .artistId)
collectionId = try values.decodeIfPresent(Int.self, forKey: .collectionId)
trackId = try values.decodeIfPresent(Int.self, forKey: .trackId)
artistName = try values.decodeIfPresent(String.self, forKey: .artistName)
collectionName = try values.decodeIfPresent(String.self, forKey: .collectionName)
trackName = try values.decodeIfPresent(String.self, forKey: .trackName)
collectionCensoredName = try values.decodeIfPresent(String.self, forKey: .collectionCensoredName)
trackCensoredName = try values.decodeIfPresent(String.self, forKey: .trackCensoredName)
artistViewUrl = try values.decodeIfPresent(String.self, forKey: .artistViewUrl)
collectionViewUrl = try values.decodeIfPresent(String.self, forKey: .collectionViewUrl)
trackViewUrl = try values.decodeIfPresent(String.self, forKey: .trackViewUrl)
previewUrl = try values.decodeIfPresent(String.self, forKey: .previewUrl)
artworkUrl30 = try values.decodeIfPresent(String.self, forKey: .artworkUrl30)
artworkUrl60 = try values.decodeIfPresent(String.self, forKey: .artworkUrl60)
artworkUrl100 = try values.decodeIfPresent(String.self, forKey: .artworkUrl100)
collectionPrice = try values.decodeIfPresent(Double.self, forKey: .collectionPrice)
trackPrice = try values.decodeIfPresent(Double.self, forKey: .trackPrice)
releaseDate = try values.decodeIfPresent(String.self, forKey: .releaseDate)
collectionExplicitness = try values.decodeIfPresent(String.self, forKey: .collectionExplicitness)
trackExplicitness = try values.decodeIfPresent(String.self, forKey: .trackExplicitness)
discCount = try values.decodeIfPresent(Int.self, forKey: .discCount)
discNumber = try values.decodeIfPresent(Int.self, forKey: .discNumber)
trackCount = try values.decodeIfPresent(Int.self, forKey: .trackCount)
trackNumber = try values.decodeIfPresent(Int.self, forKey: .trackNumber)
trackTimeMillis = try values.decodeIfPresent(Int.self, forKey: .trackTimeMillis)
country = try values.decodeIfPresent(String.self, forKey: .country)
currency = try values.decodeIfPresent(String.self, forKey: .currency)
primaryGenreName = try values.decodeIfPresent(String.self, forKey: .primaryGenreName)
isStreamable = try values.decodeIfPresent(Bool.self, forKey: .isStreamable)
}
}
<file_sep>/SwiftUIApi/InnerContentView.swift
//
// InnerContentView.swift
// SwiftUIApi
//
// Created by <NAME> on 16/06/21.
//
import SwiftUI
struct InnerContentView: View {
@State var res:Results?
var body: some View {
NavigationView{
Text("sd")
}
.onAppear(perform: {
print(res)
})
}
}
struct InnerContentView_Previews: PreviewProvider {
static var previews: some View {
InnerContentView()
}
}
| 02e34744dd914d91fd00b2715d4a962680a8cf6b | [
"Swift"
] | 2 | Swift | MeetBudheliya/SwiftUIApi | 1be67c26d5f5255d36b4daecff475c35f4cdd014 | 62398da4fb22d466d4cb1bf7b942c91e8e42848a |
refs/heads/master | <repo_name>vamsirajendra/parse-cli<file_sep>/configure_cmd.go
package main
import (
"fmt"
"github.com/facebookgo/stackerr"
"github.com/spf13/cobra"
)
type configureCmd struct {
login login
}
func (c *configureCmd) accountKey(e *env) error {
token, err := c.login.helpCreateToken(e)
if err != nil {
return err
}
credentials := credentials{token: token}
_, err = (&apps{login: login{credentials: credentials}}).restFetchApps(e)
if err != nil {
if err == errAuth {
fmt.Fprintf(e.Err,
`Sorry, the account key you provided is not valid.
Please follow instructions at %s to generate a new account key.
`,
keysURL,
)
} else {
fmt.Fprintf(e.Err, "Unable to validate token with error:\n%s\n", err)
}
return stackerr.New("Could not store credentials. Please try again.")
}
err = c.login.storeCredentials(e, &credentials)
if err == nil {
fmt.Fprintln(e.Out, "Successfully stored credentials.")
}
return stackerr.Wrap(err)
}
func newConfigureCmd(e *env) *cobra.Command {
var c configureCmd
cmd := &cobra.Command{
Use: "configure",
Short: "Configure various Parse settings",
Long: "Configure various Parse settings like account keys, project type, and more.",
Run: func(c *cobra.Command, args []string) {
c.Help()
},
}
cmd.AddCommand(&cobra.Command{
Use: "accountkey",
Short: "Store Parse account key on machine",
Long: "Stores Parse account key in ~/.parse/netrc.",
Run: runNoArgs(e, c.accountKey),
Aliases: []string{"key"},
})
return cmd
}
<file_sep>/configure_cmd_test.go
package main
import (
"io/ioutil"
"regexp"
"strings"
"testing"
"github.com/facebookgo/ensure"
)
func TestConfigureAcessToken(t *testing.T) {
t.Parallel()
h, _ := newAppHarness(t)
defer h.Stop()
c := configureCmd{login: login{tokenReader: strings.NewReader("")}}
h.env.In = ioutil.NopCloser(strings.NewReader("token\n"))
ensure.Nil(t, c.accountKey(h.env))
ensure.DeepEqual(
t,
h.Out.String(),
`
Input your account key or press enter to generate a new one.
Account Key: Successfully stored credentials.
`)
h.env.In = ioutil.NopCloser(strings.NewReader("email\ninvalid\n"))
ensure.Err(t, c.accountKey(h.env), regexp.MustCompile("Please try again"))
ensure.DeepEqual(t,
h.Err.String(),
`Sorry, the account key you provided is not valid.
Please follow instructions at https://www.parse.com/account_keys to generate a new account key.
`,
)
}
<file_sep>/login_test.go
package main
import (
"regexp"
"strings"
"testing"
"github.com/facebookgo/ensure"
)
func TestPopulateCreds(t *testing.T) {
t.Parallel()
h := newHarness(t)
defer h.Stop()
l := &login{}
h.env.In = strings.NewReader("email\npassword\n")
ensure.Nil(t, l.populateCreds(h.env))
ensure.DeepEqual(t, l.credentials.email, "email")
ensure.DeepEqual(t, l.credentials.password, "<PASSWORD>")
}
func TestGetTokenCredentials(t *testing.T) {
t.Parallel()
h := newHarness(t)
defer h.Stop()
l := &login{}
h.env.Server = "http://api.example.com/1/"
l.tokenReader = strings.NewReader(
`machine api.example.com
login default
password <PASSWORD>
`,
)
credentials, err := l.getTokenCredentials(h.env)
ensure.Nil(t, err)
ensure.DeepEqual(t, credentials.token, "token")
h.env.Server = "http://api.parse.com"
credentials, err = l.getTokenCredentials(h.env)
ensure.Err(t, err, regexp.MustCompile("could not find token for"))
}
func TestAuthUserWithToken(t *testing.T) {
t.Parallel()
h, _ := newAppHarness(t)
defer h.Stop()
l := &login{}
h.env.Server = "http://api.example.org/1/"
l.tokenReader = strings.NewReader(
`machine api.example.org
login email
password <PASSWORD>
`,
)
ensure.Nil(t, l.authUserWithToken(h.env))
}
func TestUpdatedNetrcContent(t *testing.T) {
t.Parallel()
h := newHarness(t)
defer h.Stop()
l := &login{}
h.env.Server = "https://api.example.com/1/"
updated, err := l.updatedNetrcContent(h.env,
strings.NewReader(
`machine api.example.com
login default
password <PASSWORD>
machine api.example.org
login default
password <PASSWORD>
`,
),
&credentials{token: "token"},
)
ensure.Nil(t, err)
ensure.DeepEqual(t,
string(updated),
`machine api.example.com
login default
password <PASSWORD>
machine api.example.org
login default
password <PASSWORD>
`,
)
h.env.Server = "https://api.example.org/1/"
updated, err = l.updatedNetrcContent(h.env,
strings.NewReader(
`machine api.example.com
login default
password <PASSWORD>
`,
),
&credentials{token: "token"},
)
ensure.Nil(t, err)
ensure.DeepEqual(t,
string(updated),
`machine api.example.com
login default
password <PASSWORD>
machine api.example.org
login default
password <PASSWORD>`,
)
}
| 7adf46972e245eaf8e3761f9cfa48328a5784564 | [
"Go"
] | 3 | Go | vamsirajendra/parse-cli | b3f0b8e76759e482a0e98288c90d4b83ef0b67dd | 125440f2489a6b836bbf6f4000400aced994c1e1 |
refs/heads/master | <repo_name>vcamargo/MyList<file_sep>/app/src/main/java/com/vcamargo/mylist/fragment/PostsListFragment.kt
package com.vcamargo.mylist.fragment
import android.os.Bundle
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import androidx.fragment.app.Fragment
import androidx.lifecycle.Observer
import androidx.lifecycle.ViewModelProviders
import com.vcamargo.mylist.adapter.PostsListAdapter
import com.vcamargo.mylist.databinding.FragmentAlbunsListBinding
import com.vcamargo.mylist.viewmodel.PostListViewModel
/**
* A simple [Fragment] subclass.
*
*/
class PostsListFragment : Fragment() {
override fun onCreateView(
inflater: LayoutInflater, container: ViewGroup?,
savedInstanceState: Bundle?
): View? {
val binding = FragmentAlbunsListBinding.inflate(inflater, container, false)
val adapter = PostsListAdapter().apply {
binding.postsList.adapter = this
binding.postsList.setHasFixedSize(true)
}
subscribeUi(adapter, binding)
return binding.root
}
private fun subscribeUi(adapter: PostsListAdapter, binding: FragmentAlbunsListBinding) {
val postsViewModel = ViewModelProviders.of(this).get(PostListViewModel::class.java)
binding.viewModel = postsViewModel
postsViewModel.getPosts()
postsViewModel.postsLiveData?.observe(this, Observer { data ->
if (!data.isNullOrEmpty()) {
adapter.loadPosts(data)
adapter.notifyDataSetChanged()
}
})
}
}
<file_sep>/app/src/main/java/com/vcamargo/mylist/adapter/PostsListAdapter.kt
package com.vcamargo.mylist.adapter
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import androidx.navigation.findNavController
import androidx.recyclerview.widget.RecyclerView
import com.vcamargo.mylist.BR
import com.vcamargo.mylist.databinding.PostListItemBinding
import com.vcamargo.mylist.fragment.PostsListFragmentDirections
import com.vcamargo.mylist.model.Post
class PostsListAdapter : RecyclerView.Adapter<PostsListAdapter.ViewHolder>() {
private var dataset = listOf<Post>()
fun loadPosts(dataset : List<Post>) {
this.dataset = dataset
}
override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): ViewHolder {
val itemBinding = PostListItemBinding.inflate(
LayoutInflater.from(
parent.context
), parent, false)
return ViewHolder(itemBinding)
}
override fun getItemCount(): Int {
return dataset.size
}
override fun onBindViewHolder(holder: ViewHolder, position: Int) {
dataset[position].let { post ->
holder.bind(createOnClickListener(post.id), post)
}
}
private fun createOnClickListener(postId: Int): View.OnClickListener {
return View.OnClickListener {
val action = PostsListFragmentDirections.actionPostsListFragToPostDetailsFragment(postId)
it.findNavController().navigate(action)
}
}
class ViewHolder(private val binding: PostListItemBinding) : RecyclerView.ViewHolder(binding.root) {
fun bind(listener: View.OnClickListener, post : Post) {
with(binding) {
clickListener = listener
this.setVariable(BR.post, post)
this.executePendingBindings()
}
}
}
}<file_sep>/app/src/main/java/com/vcamargo/mylist/dao/PostDao.kt
package com.vcamargo.mylist.dao
import androidx.room.Dao
import androidx.room.Delete
import androidx.room.Insert
import androidx.room.Query
import com.vcamargo.mylist.model.Post
@Dao
interface PostDao {
@Query("SELECT * FROM post")
fun getAll(): List<Post>
@Insert
fun insertAll(vararg posts: Post)
@Delete
fun delete(post: Post)
}<file_sep>/app/src/main/java/com/vcamargo/mylist/db/AppDatabase.kt
package com.vcamargo.mylist.db
import android.content.Context
import androidx.room.Database
import androidx.room.Room
import androidx.room.RoomDatabase
import com.vcamargo.mylist.dao.PostDao
import com.vcamargo.mylist.model.Post
@Database(entities = [Post::class], version = 1)
abstract class AppDatabase : RoomDatabase() {
abstract fun postDao(): PostDao
companion object {
fun getDatabase(context : Context) : AppDatabase {
return Room.databaseBuilder(
context,
AppDatabase::class.java, "database-name"
).build()
}
}
}<file_sep>/app/src/main/java/com/vcamargo/mylist/viewmodel/PostListViewModel.kt
package com.vcamargo.mylist.viewmodel
import android.view.View
import androidx.databinding.ObservableInt
import androidx.lifecycle.LiveData
import androidx.lifecycle.ViewModel
import com.vcamargo.mylist.model.Post
import com.vcamargo.mylist.repository.IRepository
import com.vcamargo.mylist.repository.MockRepository
class PostListViewModel : ViewModel() {
private val repo : IRepository = MockRepository()
var postsLiveData : LiveData<List<Post>>? = null
var postsListVisibility = ObservableInt(View.GONE)
var emptyListVisibility = ObservableInt(View.VISIBLE)
fun getPosts() {
postsLiveData = repo.getPosts()
if (postsLiveData?.value?.isNotEmpty() == true) {
postsListVisibility.set(View.VISIBLE)
emptyListVisibility.set(View.GONE)
}
}
}<file_sep>/app/src/main/java/com/vcamargo/mylist/viewmodel/PostDetailsViewModel.kt
package com.vcamargo.mylist.viewmodel
import androidx.lifecycle.ViewModel
class PostDetailsViewModel : ViewModel() {
}<file_sep>/app/src/main/java/com/vcamargo/mylist/model/Comment.kt
package com.vcamargo.mylist.model
import androidx.room.ColumnInfo
import androidx.room.Entity
import androidx.room.PrimaryKey
@Entity(tableName = "comments")
data class Comment (
@PrimaryKey val uid: Int,
@ColumnInfo(name = "postId") val postId : Int,
@ColumnInfo(name = "name") val name : String,
@ColumnInfo(name = "email") val email : String,
@ColumnInfo(name = "body") val body : String
)<file_sep>/app/src/main/java/com/vcamargo/mylist/model/Post.kt
package com.vcamargo.mylist.model
import androidx.room.ColumnInfo
import androidx.room.Entity
import androidx.room.PrimaryKey
@Entity(tableName = "posts")
data class Post (
@PrimaryKey val id : Int,
@ColumnInfo(name = "title") val title : String,
@ColumnInfo(name = "body") val body : String,
@ColumnInfo(name = "thumbnail_url") val thumbnailUrl : String
)
<file_sep>/app/src/main/java/com/vcamargo/mylist/activity/MainActivity.kt
package com.vcamargo.mylist.activity
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import androidx.core.view.GravityCompat
import androidx.databinding.DataBindingUtil
import androidx.drawerlayout.widget.DrawerLayout
import androidx.navigation.NavController
import androidx.navigation.Navigation
import androidx.navigation.findNavController
import androidx.navigation.ui.AppBarConfiguration
import androidx.navigation.ui.setupActionBarWithNavController
import com.vcamargo.mylist.R
import com.vcamargo.mylist.databinding.ActivityMainBinding
class MainActivity : AppCompatActivity() {
private lateinit var drawerLayout: DrawerLayout
private lateinit var appBarConfiguration: AppBarConfiguration
private lateinit var navController: NavController
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
val binding : ActivityMainBinding = DataBindingUtil.setContentView(this,
R.layout.activity_main)
drawerLayout = binding.drawerLayout
navController = Navigation.findNavController(this, R.id.nav_host_fragment)
appBarConfiguration = AppBarConfiguration(navController.graph, drawerLayout)
// Set up ActionBar
setSupportActionBar(binding.toolbar)
setupActionBarWithNavController(navController, appBarConfiguration)
}
override fun onSupportNavigateUp() =
findNavController(R.id.nav_host_fragment).navigateUp()
override fun onBackPressed() {
if (drawerLayout.isDrawerOpen(GravityCompat.START)) {
drawerLayout.closeDrawer(GravityCompat.START)
} else {
super.onBackPressed()
}
}
}
<file_sep>/README.md
[](https://travis-ci.org/vcamargo/MyList)
<file_sep>/app/src/main/java/com/vcamargo/mylist/repository/IRepository.kt
package com.vcamargo.mylist.repository
import androidx.lifecycle.LiveData
import com.vcamargo.mylist.model.Comment
import com.vcamargo.mylist.model.Post
interface IRepository {
fun getPosts() : LiveData<List<Post>>
fun getComments(postId : Int) : LiveData<List<Comment>>
}<file_sep>/app/src/main/java/com/vcamargo/mylist/fragment/PostDetailsFragment.kt
package com.vcamargo.mylist.fragment
import android.os.Bundle
import androidx.fragment.app.Fragment
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import androidx.navigation.fragment.navArgs
import com.vcamargo.mylist.R
/**
* A simple [Fragment] subclass.
*
*/
class PostDetailsFragment : Fragment() {
override fun onCreateView(
inflater: LayoutInflater, container: ViewGroup?,
savedInstanceState: Bundle?
): View? {
val args : PostDetailsFragmentArgs by navArgs()
// Inflate the layout for this fragment
return inflater.inflate(R.layout.fragment_post_details, container, false)
}
}
<file_sep>/app/src/main/java/com/vcamargo/mylist/repository/MockRepository.kt
package com.vcamargo.mylist.repository
import android.content.Context
import androidx.lifecycle.LiveData
import androidx.lifecycle.MutableLiveData
import androidx.room.Room
import com.vcamargo.mylist.db.AppDatabase
import com.vcamargo.mylist.model.Comment
import com.vcamargo.mylist.model.Post
class MockRepository : IRepository {
val dataset = listOf<Post>(
Post(1,
"sunt aut facere repellat provident occaecati excepturi optio reprehenderit",
"quia et suscipit suscipit recusandae consequuntur expedita et cum reprehenderit molestiae ut ut quas totam nostrum rerum est autem sunt rem eveniet architecto",
"https://via.placeholder.com/150/92c952"
),
Post(2,
"quo vero reiciendis velit similique earum",
"est natus enim nihil est dolore omnis voluptatem numquam et omnis occaecati quod ullam at voluptatem error expedita pariatur nihil sint nostrum voluptatem reiciendis et",
"https://via.placeholder.com/150/771796"
),
Post(3,
"odio adipisci rerum aut animi",
"quia molestiae reprehenderit quasi aspernatur aut expedita occaecati aliquam eveniet laudantium omnis quibusdam delectus saepe quia accusamus maiores nam est cum et ducimus et vero voluptates excepturi deleniti ratione",
"https://via.placeholder.com/150/24f355"
),
Post(4,
"sunt aut facere repellat provident occaecati excepturi optio reprehenderit",
"quia et suscipit suscipit recusandae consequuntur expedita et cum reprehenderit molestiae ut ut quas totam nostrum rerum est autem sunt rem eveniet architecto",
"https://via.placeholder.com/150/92c952"
),
Post(5,
"quo vero reiciendis velit similique earum",
"est natus enim nihil est dolore omnis voluptatem numquam et omnis occaecati quod ullam at voluptatem error expedita pariatur nihil sint nostrum voluptatem reiciendis et",
"https://via.placeholder.com/150/771796"
),
Post(6,
"odio adipisci rerum aut animi",
"quia molestiae reprehenderit quasi aspernatur aut expedita occaecati aliquam eveniet laudantium omnis quibusdam delectus saepe quia accusamus maiores nam est cum et ducimus et vero voluptates excepturi deleniti ratione",
"https://via.placeholder.com/150/24f355"
),
Post(7,
"quo vero reiciendis velit similique earum",
"est natus enim nihil est dolore omnis voluptatem numquam et omnis occaecati quod ullam at voluptatem error expedita pariatur nihil sint nostrum voluptatem reiciendis et",
"https://via.placeholder.com/150/771796"
),
Post(8,
"odio adipisci rerum aut animi",
"quia molestiae reprehenderit quasi aspernatur aut expedita occaecati aliquam eveniet laudantium omnis quibusdam delectus saepe quia accusamus maiores nam est cum et ducimus et vero voluptates excepturi deleniti ratione",
"https://via.placeholder.com/150/24f355"
),
Post(9,
"sunt aut facere repellat provident occaecati excepturi optio reprehenderit",
"quia et suscipit suscipit recusandae consequuntur expedita et cum reprehenderit molestiae ut ut quas totam nostrum rerum est autem sunt rem eveniet architecto",
"https://via.placeholder.com/150/92c952"
),
Post(10,
"quo vero reiciendis velit similique earum",
"est natus enim nihil est dolore omnis voluptatem numquam et omnis occaecati quod ullam at voluptatem error expedita pariatur nihil sint nostrum voluptatem reiciendis et",
"https://via.placeholder.com/150/771796"
),
Post(11,
"odio adipisci rerum aut animi",
"quia molestiae reprehenderit quasi aspernatur aut expedita occaecati aliquam eveniet laudantium omnis quibusdam delectus saepe quia accusamus maiores nam est cum et ducimus et vero voluptates excepturi deleniti ratione",
"https://via.placeholder.com/150/24f355"
)
)
// val commentsDatasource = listOf<Comment>(
// Comment(
// "id labore ex et quam laborum",
// "<EMAIL>",
// "laudantium enim quasi est quidem magnam voluptate ipsam eos tempora quo necessitatibus dolor quam autem quasi reiciendis et nam sapiente accusantium"
// ),
// Comment(
// "quo vero reiciendis velit similique earum",
// "<EMAIL>",
// "est natus enim nihil est dolore omnis voluptatem numquam et omnis occaecati quod ullam at voluptatem error expedita pariatur nihil sint nostrum voluptatem reiciendis et"
// ),
// Comment(
// "odio adipisci rerum aut animi",
// "<EMAIL>",
// "quia molestiae reprehenderit quasi aspernatur aut expedita occaecati aliquam eveniet laudantium omnis quibusdam delectus saepe quia accusamus maiores nam est cum et ducimus et vero voluptates excepturi deleniti ratione"
// )
// )
var postList = MutableLiveData<List<Post>>()
var commenList = MutableLiveData<List<Comment>>()
override fun getPosts(): LiveData<List<Post>> {
postList.value = dataset
return postList
}
override fun getComments(postId: Int): LiveData<List<Comment>> {
// commenList.value = commentsDatasource
return commenList
}
} | 7383a5de996ff3862960a856b8809870b22f34ce | [
"Markdown",
"Kotlin"
] | 13 | Kotlin | vcamargo/MyList | 2ef0a7b562cf5628823887a66d3a36f2939f5071 | 347bc568811f3c4db96575af9716ba5ba9a4163e |
refs/heads/master | <file_sep>// Client side C/C++ program to demonstrate Socket programming
#include <stdio.h>
#include <sys/socket.h>
#include <arpa/inet.h>
#include <stdlib.h>
#include <netinet/in.h>
#include <string.h>
#include <unistd.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <fcntl.h>
#include "lock_set.c"
// #define PORT 8080
#define BUFFER_SIZE 1024
unsigned long long recvSize(int sock){
unsigned long long length;
read(sock, (char *)&length, sizeof(unsigned long long));
return length;
}
void sendSize(int sock, unsigned long long length){
write(sock, (char *)&length, sizeof(unsigned long long));
}
int main(int argc, char const *argv[]){
char *cmds = "Always fire up server first.\n\
default port 8080\n\n\
Upload file to server\n\
----\n\
server: ./server [-p port]\n\
client: ./client -u -l path/on/client -i serverIP [-p port] -r path/on/server\n\n\
Download file from server\n\
----\n\
server: ./server [-p port]\n\
client: ./client -d -l path/on/client -i serverIP [-p port] -r path/on/server\n";
// printf("%s", cmds);
// exit(0);
// default parameters
int isUpload = 1;
char* action = "u";
char* local_filename = "1.jpeg";
char* remote_filename = "0.jpeg";
char* serverIP = "127.0.0.1";
int PORT = 8080;
// opterr=0;
char ch;
while ((ch = getopt(argc, argv, "udl:i:p:r:")) != EOF /*-1*/) {
// printf("optind: %d\n", optind);
switch (ch){
case 'u':
break;
case 'd':
isUpload = 0;
break;
case 'l':
local_filename = optarg;
// printf("l: %s", optarg);
break;
case 'i':
serverIP = optarg;
break;
case 'p':
PORT = atoi(optarg);
break;
case 'r':
remote_filename = optarg;
// printf("r: %s", optarg);
break;
default:
printf("%s", cmds);
exit(1);
}
}
if (!isUpload) {
action = "d";
}
// struct sockaddr_in {
//
// short int sin_family; /* 通信类型 */
//
// unsigned short int sin_port; /* 端口 */
//
// struct in_addr sin_addr; /* Internet 地址 */
//
// unsigned char sin_zero[8]; /* 与sockaddr结构的长度相同*/
//
// };
// local address
struct sockaddr_in address;
// server address
struct sockaddr_in serv_addr;
int sock = 0;
if ((sock = socket(AF_INET, SOCK_STREAM, 0)) < 0)
{
printf("\n Socket creation error \n");
return -1;
}
memset(&serv_addr, '0', sizeof(serv_addr));
serv_addr.sin_family = AF_INET;
// host to network short
serv_addr.sin_port = htons(PORT);
// Convert IPv4 and IPv6 addresses from text to binary form
if(inet_pton(AF_INET, serverIP, &serv_addr.sin_addr)<=0)
{
printf("\nInvalid address/ Address not supported \n");
return -1;
}
if (connect(sock, (struct sockaddr *)&serv_addr, sizeof(serv_addr)) < 0)
{
printf("\nConnection Failed \n");
return -1;
}
printf("***Connected to server %s@%d***\n", serverIP, PORT);
// send action: upload or download
printf("Send action: %s\n", action);
unsigned long long action_len = strlen(action) + 1;
sendSize(sock, action_len);
write(sock, action, strlen(action)+1);
char buffer[BUFFER_SIZE] = {0};
// upload file:
if (isUpload){
printf("Preparing uploading file: %s\n", local_filename);
// int fd = open(local_filename, O_RDONLY);
int fd;
if((fd=open(local_filename, O_RDONLY)) < 0)//打开操作不成功
{
perror("open file failed");
exit(EXIT_FAILURE);
}
// file size
struct stat statbuf;
stat(local_filename,&statbuf);
unsigned long long filesize=statbuf.st_size;
// send filename
unsigned long long path_len = strlen(remote_filename) + 1;
sendSize(sock, path_len);
printf("writing remote_filename: %s to socket\n", remote_filename);
write(sock, remote_filename, path_len);
printf("Upload %s of size %lld bytes to the server at %s\n", local_filename, filesize, remote_filename);
sendSize(sock, filesize);
ssize_t singleRead = 0;
double sentsize = 0;
lock_set(fd, F_RDLCK);
do {
singleRead = read(fd, buffer, sizeof(buffer));
// with 0 flag, equivalent to write
// write(sock, buffer, singleRead)
if(singleRead > 0 && send(sock, buffer, singleRead, 0) >= 0 ){
sentsize += singleRead;
printf("\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b");
printf("%-4.2f%% data sent", sentsize/filesize*100);
fflush(stdout);
} else {
break;
}
memset( buffer,0, sizeof(buffer) );
} while (singleRead > 0);
printf("\n");
lock_set(fd, F_UNLCK);
close(fd);
} else {
// Download from server
printf("Preparing downloading file: %s\n", remote_filename);
int fd;
if((fd=open(local_filename, O_CREAT|O_WRONLY|O_TRUNC, S_IRUSR|S_IWUSR|S_IRGRP|S_IROTH)) < 0)//打开操作不成功
{
perror("open file failed");
exit(EXIT_FAILURE);
}
// send filename
unsigned long long path_len = strlen(remote_filename) + 1;
sendSize(sock, path_len);
printf("writing remote_filename: %s to socket\n", remote_filename);
write(sock, remote_filename, path_len);
unsigned long long filesize = recvSize(sock);
printf("Download %s of size %lld bytes from server to %s\n", remote_filename, filesize, local_filename);
unsigned long long bytesRecvd = 0;
// write lock
lock_set(fd, F_WRLCK);
do {
memset(buffer, 0, BUFFER_SIZE);
int singleRecvd = read(sock, buffer, sizeof(buffer));
write(fd, buffer, singleRecvd); // return number of bytes written
bytesRecvd += singleRecvd;
printf("\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b");
printf("%-4.2f%% data recv", bytesRecvd*100.0/filesize);
fflush(stdout);
} while (filesize - bytesRecvd > 0);
printf("\n");
lock_set(fd, F_UNLCK);
close(fd);
}
close(sock);
return 0;
}
<file_sep>**Always fire up server first.**
**default port 8080**
---
### Upload file to server
- server: `./server [-p port] [-m maxClient]`
- client: `./client [-u] -l path/on/client [-i serverIP] [-p port] -r path/on/server`
---
### Download file from server
- server: `./server [-p port] [-m maxClient]`
- client: `./client -d -l path/on/client [-i serverIP] [-p port] -r path/on/server`
<file_sep>// #define _GNU_SOURCE /* See feature_test_macros(7) */
#include <unistd.h>
#include <stdio.h>
#include <sys/socket.h>
#include <arpa/inet.h>
#include <stdlib.h>
#include <netinet/in.h>
#include <string.h>
#include <pthread.h>
#include <fcntl.h>
#include <sys/stat.h>
#include "lock_set.c"
#define BUFFER_SIZE 1024
// #define PORT 8080
char buffer[BUFFER_SIZE] = {0};
unsigned long long recvSize(int sock){
unsigned long long length;
read(sock, (char *)&length, sizeof(unsigned long long));
return length;
}
void sendSize(int sock, unsigned long long length){
write(sock, (char *)&length, sizeof(unsigned long long));
}
void handleUpload(void* p_new_socket){
char buffer[BUFFER_SIZE] = {0};
long new_socket = (long)(p_new_socket);
// send filename
memset(buffer, 0, BUFFER_SIZE);
unsigned long long path_len = recvSize(new_socket);
read(new_socket, buffer, path_len);
printf("filename: %s\n", buffer);
unsigned long long filesize = recvSize(new_socket);
// uid_t ruid ,euid,suid;
// getresuid(&ruid, &euid, &suid);
// printf("resuid: (%u, %u, %u)", ruid, euid, suid);
// user/group: rw, other: r
int fd;
if((fd=open(buffer, O_CREAT|O_WRONLY|O_TRUNC, S_IRUSR|S_IWUSR|S_IRGRP|S_IROTH)) < 0)//打开操作不成功
{
perror("open file failed");
exit(EXIT_FAILURE);
}
printf("prepared file %s for upload\n", buffer);
// printf("Recving data of size %lld bytes from client\n", filesize);
unsigned long long bytesRecvd = 0;
// write lock
lock_set(fd, F_WRLCK);
do {
memset(buffer, 0, BUFFER_SIZE);
int singleRecvd = read(new_socket, buffer, sizeof(buffer));
write(fd, buffer, singleRecvd); // return number of bytes written
bytesRecvd += singleRecvd;
// printf("\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b\b");
// printf("%-4.2f%% data recv", bytesRecvd*100.0/filesize);
// fflush(stdout);
} while (filesize - bytesRecvd > 0);
// } while (1);
// unlock
lock_set(fd, F_UNLCK);
close(fd);
close(new_socket);
printf("---------------------\n");
}
void handleDownload(void* p_new_socket){
char buffer[BUFFER_SIZE] = {0};
long new_socket = (long)(p_new_socket);
// send filename
memset(buffer, 0, BUFFER_SIZE);
unsigned long long path_len = recvSize(new_socket);
read(new_socket, buffer, path_len);
printf("filename: %s\n", buffer);
// uid_t ruid ,euid,suid;
// getresuid(&ruid, &euid, &suid);
// printf("resuid: (%u, %u, %u)", ruid, euid, suid);
// user: rw, group/other: r
int fd;
if((fd=open(buffer, O_RDONLY)) < 0)//打开操作不成功
{
perror("open file failed");
exit(EXIT_FAILURE);
}
printf("prepared file %s for download\n", buffer);
// send file size
struct stat statbuf;
stat(buffer,&statbuf);
unsigned long long filesize=statbuf.st_size;
sendSize(new_socket, filesize);
printf("send %s of size %lld bytes to the client\n", buffer, filesize);
ssize_t singleRead = 0;
double sentsize = 0;
lock_set(fd, F_RDLCK);
do {
singleRead = read(fd, buffer, sizeof(buffer));
// with 0 flag, equivalent to write
if(singleRead > 0 && send(new_socket, buffer, singleRead, 0) >= 0 ){
sentsize += singleRead;
} else {
break;
}
memset( buffer,0, sizeof(buffer) );
} while (singleRead > 0);
lock_set(fd, F_UNLCK);
close(fd);
close(new_socket);
printf("---------------------\n");
}
int main(int argc, char const *argv[])
{
char *cmds = "Always fire up server first.\n\
default port 8080\n\
default maxClient 5\n\n\
Upload file to server\n\
----\n\
server: ./server [-p port] [-m maxClient]\n\
client: ./client [-u] -l path/on/client [-i serverIP] [-p port] -r path/on/server\n\n\
Download file from server\n\
----\n\
server: ./server [-p port] [-m maxClient]\n\
client: ./client -d -l path/on/client [-i serverIP] [-p port] -r path/on/server\n";
// printf("%s", cmds);
// exit(0);
int PORT = 8080;
int maxClient = 5;
// opterr = 0;
char ch;
while ((ch = getopt(argc, argv, "p:m:")) != EOF /*-1*/) {
// printf("optind: %d\n", optind);
switch (ch){
case 'p':
PORT = atoi(optarg);
break;
case 'm':
maxClient = atoi(optarg) < 100? atoi(optarg):maxClient;
break;
default:
printf("%s", cmds);
exit(1);
}
}
// chdir("./serverFile");
int server_fd, valread;
struct sockaddr_in address;
int opt = 1;
int addrlen = sizeof(address);
// Creating socket file descriptor
if ((server_fd = socket(AF_INET, SOCK_STREAM, 0)) == 0)
{
perror("socket failed");
exit(EXIT_FAILURE);
}
// Forcefully attaching socket to the port 8080
if (setsockopt(server_fd, SOL_SOCKET, SO_REUSEADDR | SO_REUSEPORT,
&opt, sizeof(opt)))
{
perror("setsockopt");
exit(EXIT_FAILURE);
}
address.sin_family = AF_INET;
address.sin_addr.s_addr = INADDR_ANY;
address.sin_port = htons( PORT );
// Forcefully attaching socket to the port 8080
if (bind(server_fd, (struct sockaddr *)&address,
sizeof(address))<0)
{
perror("bind failed");
exit(EXIT_FAILURE);
}
// maximum client
if (listen(server_fd, maxClient) < 0)
{
perror("listen");
exit(EXIT_FAILURE);
}
printf("server listening at port %d, allow maxClient -- %d\n", PORT, maxClient);
fflush(stdout);
while (1) {
// int* p_new_socket = (int*)malloc(sizeof(int));
long new_socket;
if ((new_socket = accept(server_fd, (struct sockaddr *)&address,
(socklen_t*)&addrlen))<0)
{
perror("accept");
exit(EXIT_FAILURE);
}
printf("***Connected to client: %s@%d***\n", inet_ntoa(address.sin_addr), ntohs(address.sin_port));
// send action: upload or download
unsigned long long action_len = recvSize(new_socket);
read(new_socket, buffer, action_len);
int isUpload = 1;
printf("*Action: %s\n", buffer);
if (strcmp(buffer, "u")){
isUpload = 0;
}
// if (isUpload){
// printf("upload file\n");
// } else {
// printf("download file\n");
// }
// handle upload file
pthread_t thread;
int ret_thrd;
if (isUpload){
printf("upload file to me\n");
ret_thrd = pthread_create(&thread, NULL, (void *)&handleUpload, (void *)new_socket);
} else {
printf("download file from me\n");
ret_thrd = pthread_create(&thread, NULL, (void *)&handleDownload, (void *)new_socket);
}
if (ret_thrd) { // != 0
printf("create handle thread failed\n");
} else { // = 0
printf("create handle thread success\n");
}
}
close(server_fd);
return 0;
}
| 7a83bcca71dbf04666f385f2297a3ec5e271727b | [
"Markdown",
"C"
] | 3 | C | sky-bro/c-socket-hands-on | 60477b7e780d77efbe24ec2f063bea168576bc73 | 7fd5b954e4f08d1decf55b93b66bebeeb60d3a37 |
refs/heads/main | <repo_name>Alex4196/PI-Food<file_sep>/client/src/reducer/index.js
import Swal from 'sweetalert2'
const initialState = {
recipes: [],
filter: [],
allRecipes: [],
types: [],
detail: []
}
function rootReducer(state = initialState, action) {
switch (action.type) {
case "GET_RECIPES":
return {
...state,
recipes: action.payload,
allRecipes: action.payload,
};
case "GET_NAME_RECIPES":
return {
...state,
recipes: action.payload
}
case "GET_TYPES":
return {
...state,
types: action.payload
}
case "ORDER_BY":
let sortedArr = action.payload === "asc" ?
state.recipes.sort(function (a, b) {
if (a.title.toLowerCase() > b.title.toLowerCase()) {
return 1;
}
if (b.title.toLowerCase() > a.title.toLowerCase()) {
return -1;
}
return 0;
}) : action.payload === "desc" ?
state.recipes.sort(function (a, b) {
if (a.title.toLowerCase() > b.title.toLowerCase()) {
return -1;
}
if (b.title.toLowerCase() > a.title.toLowerCase()) {
return 1;
}
return 0;
}) : action.payload === "punAsc" ?
state.recipes.sort(function (a, b) {
if (a.spoonacularScore > b.spoonacularScore) {
return 1;
}
if (b.spoonacularScore > a.spoonacularScore) {
return -1;
}
return 0;
}) : action.payload === "punDesc" ?
state.recipes.sort(function (a, b) {
if (a.spoonacularScore > b.spoonacularScore) {
return -1;
}
if (b.spoonacularScore > a.spoonacularScore) {
return 1;
}
return 0;
}) : null
return {
...state,
recipes: sortedArr
}
case "GET_RECIPE_DETAIL":
return {
...state,
detail: action.payload
};
case "FILTER_BY_TYPE":
const allRecipes = state.allRecipes
const dietasApi = []
allRecipes.forEach(e => {
if (e.hasOwnProperty('diets') && e.diets.includes(action.payload)) {
dietasApi.push(e)
}
})
const dietasDb = []
allRecipes.forEach(e => {
if (e.hasOwnProperty('types') && e.types.find(c => c.name === action.payload)) {
dietasDb.push(e)
}
})
const encontradas = dietasApi.concat(dietasDb)
if (encontradas.length) {
return {
...state,
recipes: encontradas
};
}
else {
alert('Any recipe has that diet')
}
break;
default: return state;
}
}
export default rootReducer;
<file_sep>/client/src/components/Recipes.jsx
import React from "react";
import { Link } from "react-router-dom";
import styles from "./Recipes.module.css"
import plato from "./plato.jpg"
export default function Recipes({ name, image, title, id }) {
return (
<div className={styles.recipes}>
<div className={styles.box} >
<Link to={`/${id}`} >
<h1 className={styles.title}>{title.length >= 34 ? title.slice(0,45)+"..." : title}</h1>
<h5 className={styles.name} > Suitable for: {name && name.length? name: "All diets"}</h5>
<div className={styles.imagen} >
<img className={styles.img} src={image ? image : plato} alt='' />
</div>
</Link>
</div>
</div>
)
}
<file_sep>/api/src/routes/type.js
const {Router} = require('express');
const {getAllType} = require('../Controllers/type');
const router = Router();
router.get('/', getAllType);
module.exports = router;
<file_sep>/api/src/routes/index.js
const {Router} = require('express');
// Importar todos los routers;
// Ejemplo: const authRouter = require('./auth.js');
/* const axios = require ('axios') */
const RecipeRoutes = require('./recipe');
const TypeRoutes = require('./type');
const router = Router();
// Configurar los routers
// Ejemplo: router.use('/auth', authRouter);
router.use('/recipes', RecipeRoutes);
router.use('/types', TypeRoutes);
module.exports = router;
<file_sep>/client/src/components/Home.jsx
import React from "react";
import { useState, useEffect } from "react";
import { useDispatch, useSelector } from "react-redux";
import { listRecipes, filterByTypes, getTypes, orderBy } from "../actions";
import { Link } from "react-router-dom";
import Recipes from "./Recipes";
import Paginado from "./Paginado";
import SearchBar from "./SearchBar";
import styles from './Home.module.css'
import { NavLink } from "react-router-dom";
export default function Home() {
const dispatch = useDispatch() //es para despachar mis acciones, usando la constante
const allRecipes = useSelector((state) => state.recipes) //es igual al mapstatetoprops, con use selector traeme todo lo que esta en el estado de recipes, entonces voy a usar allRecipes, me trae del reducer el estado recipes
const [order, setOrder] = useState('')
const [currentPage, setCurrentPage] = useState(1) //empieza en uno porque empiezo desde esa pagina
const [recipesPerPage, setRecipesPerPage] = useState(9) //cuantas recetas quiero por pagina
const indexOfLastRecipe = currentPage * recipesPerPage //esto da 9
const indexOfFirstRecipe = indexOfLastRecipe - recipesPerPage //esto da 0
const currentRecipes = allRecipes.slice(indexOfFirstRecipe, indexOfLastRecipe)//allrecipes es el arreglo de estado que me traje del reducer, el slice agarra un arreglo y toma una porcion dependiendo lo que le estoy pasando por parametro.
const paginado = (pageNumber) => (
setCurrentPage(pageNumber)
)
useEffect(() => {
dispatch(listRecipes());
}, [dispatch])
useEffect(() => {
dispatch(getTypes())
}, [dispatch])
function handleonClick(e) {
e.preventDefault();
dispatch(listRecipes());
}
function handleFilterTypes(e) {
dispatch(filterByTypes(e.target.value))
}
function handleSort(e) {
e.preventDefault();
dispatch(orderBy(e.target.value))
setCurrentPage(1); //cuango el ordenamineto lo seteo en la primera pagina
setOrder(`Ordered ${e.target.value}`) //me modifica el estado local que esta vacio y se renderiza
}
return (
<div>
<SearchBar />
<div className={styles.newrecipe }>
<NavLink to='/recipecreate'> <button className={styles.boton}>Create a new recipe</button> </NavLink >
</div>
<div className={styles.title}>
<h1 onClick={(e) => handleonClick(e)} >The ultimate food app </h1>
</div>
<div >
<select defaultValue={'DEFAULT'} className={styles.filterdiets} onChange={e => handleFilterTypes(e)}>
<option value='gluten free'>Gluten Free</option>
<option value='fodmap friendly'>Fodmap Friendly</option>
<option value='dairy free'>Dairy Free</option>
<option value='lacto ovo vegetarian'>Ovo-Vegetarian</option>
<option value='vegan'>Vegan</option>
<option value='pescatarian'>Pescetarian</option>
<option value='paleolithic'>Paleo</option>
<option value='primal'>Primal</option>
<option value='whole 30'>Whole30</option>
<option hidden value="DEFAULT" disabled >Type of Diet</option>
</select>
</div>
<div>
<select defaultValue={'DEFAULT'} className={styles.filteralph} onChange={(e) => handleSort(e)} >
<option value='asc'>A-Z</option>
<option value='desc'>Z-A</option>
<option hidden value="DEFAULT" disabled>Alphabetical order</option>
</select>
<select defaultValue={'DEFAULT'} className={styles.filterpun} onChange={(e) => handleSort(e)} >
<option value='punAsc'>Ascending order</option>
<option value='punDesc'>descending order</option>
<option hidden value="DEFAULT" disabled>Punctuation</option>
</select>
<div className={styles.paginado} >
<Paginado
recipesPerPage={recipesPerPage}
allRecipes={allRecipes.length} //necesito un valor numerico
paginado={paginado} />
</div>
<div className={styles.cartas} >
{ currentRecipes && currentRecipes.map(e => {
return (
<fragment key={e.id} className={styles.link} >
<Link to={"/home/" + e.id}>
<Recipes
id={e.id}
title={e.title}
name={e.diets ? e.diets : e.types && e.types.map(e => e.name)}
image={e.image}
/>
</Link>
</fragment>
)
})
}
</div>
</div>
<h5 className={styles.cookers}>Enjoy cookers!!</h5>
<p className={styles.copy} > Copyright ©️ 2021 The ultimate food app</p>
</div>
)
}<file_sep>/api/src/Controllers/type.js
const {Type} = require('../db')
const axios = require('axios')
const diets = [
{name: "gluten free"},
{name: "dairy free"},
{name: "lacto ovo vegetarian"},
{name: "vegan"},
{name: "paleolithic"},
{name: "primal"},
{name: "pescatarian"},
{name: "fodmap friendly"},
{name: "whole 30"},
]
async function getAllType (req, res, next) {
try{
const respuesta = await Type.findAll()
if(respuesta.length >0) return res.json(respuesta)
else{try{
const dietDb = await Type.bulkCreate(diets)
return res.json(dietDb)
} catch(err){
next(err)
}
}
} catch(err){
next(err)
}
}
module.exports = {
getAllType
}<file_sep>/client/src/actions/index.js
import axios from 'axios';
import Swal from 'sweetalert2'
export function filterByTypes(payload) {
return {
type: "FILTER_BY_TYPE",
payload
};
}
export function listRecipes() {
return async function (dispatch) {
var json = await axios.get('http://localhost:3001/recipes')
return dispatch({
type: "GET_RECIPES",
payload: json.data
})
}
}
export function orderBy(payload) {
return { type: "ORDER_BY", payload };
}
export function getNameRecipes(name, next) {
return async function (dispatch) {
try {
var json = await axios.get("http://localhost:3001/recipes?name=" + name)
return dispatch({
type: "GET_NAME_RECIPES",
payload: json.data
})
} catch (err) {
Swal.fire({
title: 'The Recipe does not exist',
text: '',
icon: 'error',
confirmButtonText: 'Cool'
});
}
}
}
/* export function getNameRecipes(name) {
return function(dispatch){
axios.get("http://localhost:3001/recipes?name=" + name)
.then(r => r.data)
.then(data => dispatch({
type: "GET_NAME_RECIPES",
payload: data
}))
}
}
*/
export function getRecipeDetail(id) {
return async function (dispatch) {
var json = await axios.get(`http://localhost:3001/recipes/${id}`)
return dispatch({
type: "GET_RECIPE_DETAIL",
payload: json.data
})
}
}
export function getTypes(name, next) {
return async function (dispatch) {
try {
var json = await axios.get("http://localhost:3001/types")
/* console.log(json) */
return dispatch({
type: "GET_TYPES",
payload: json.data
})
} catch (err) {
Swal.fire({
title: 'Does not exist recipe with that Diet',
text: '',
icon: 'error',
confirmButtonText: 'Cool'
});
}
}
}
export const postRecipe = (input, next) => {
const pasos=[]
pasos.push(input.stepbystep)
return async function (dispatch) {
try{
const newRecipe = await axios({
method: 'post',
url: 'http://localhost:3001/recipes/create',
data: {
title: input.name,
summary: input.summary,
spoonacularScore: parseInt(input.spoonacularScore),
healthScore: parseInt(input.healthscore),
steps: pasos,
diets: input.types
}
});
return dispatch({
type: 'CREATE_RECIPE',
payload: newRecipe.data
})
} catch(err){
console.log('The creation of recipe fail')
}
};
}
<file_sep>/client/src/components/RecipeCreate.jsx
import React, { useState} from 'react';
import { Link, useHistory } from 'react-router-dom';
import { postRecipe} from '../actions/index'
import { useDispatch, useSelector } from 'react-redux';
import styles from "./RecipeCreate.module.css"
import { IoRestaurantSharp } from "react-icons/io5";
import { IoFastFoodOutline } from "react-icons/io5";
import { IoArrowBackOutline } from "react-icons/io5";
import Swal from 'sweetalert2'
export function validate(input) {
let errors = {};
if (!input.name) {
errors.name= 'Name is required';
}
if (!input.summary) {
errors.summary = 'Summary is required';
}
if(input.healthscore < 0 || input.healthscore > 100 ){
errors.healthscore = 'The health score has to be between 0 and 100'
}
if(input.spoonacularScore < 0 || input.spoonacularScore > 100 ){
errors.spoonacularScore = 'The punctuation has to be between 0 and 100'
}
if (!input.stepbystep) {
errors.stepbystep = 'Step by step is required';
}
return errors;
};
export default function RecipeCreate() {
const dispatch = useDispatch()
const history = useHistory()
const types = useSelector((state) => state.types)
const [errors, setErrors] = useState({});
const [input, setInput] = useState({
name: "",
summary: "",
spoonacularScore: "",
healthscore: "",
stepbystep: "",
types: []
})
console.log(input)
function handleInputChange(e) {
setInput({
...input,
[e.target.name]: e.target.value //cada vez que ejecute la funcion, a mi estado input, ademas de lo que tengo le agrego , el target value de lo que estoy modificando
})
let objError = validate({
...input,
[e.target.name]: e.target.value
});
setErrors(objError);
}
function handleSelect(e) {
if (input.types.includes(e.target.value)) {
alert("You already selected this diet. Try again.");
} else if (input.types.length >= 4) {
alert("You can select up to 3 diets.");
} else {
setInput({
...input,
types: [...input.types, e.target.value]
})
}
}
function handleSubmit(e){
e.preventDefault();
dispatch(postRecipe(input))
Swal.fire({
title: 'The new recipe has been created!',
text: 'Enjoy the recipe!',
icon: 'success',
confirmButtonText: 'Cool'
})
setInput({
name: "",
summary: "",
spoonacularScore: "",
healthscore: "",
stepbystep: "",
types: []
})
history.push("/home")
}
return (
<div>
<Link to='/home'><button className={styles.home} > <IoArrowBackOutline/><IoFastFoodOutline/> </button></Link>
<div className={styles.title}>
<h1> Make your own recipe! </h1>
</div>
<div className={styles.conteiner}>
<form onSubmit={(e) => handleSubmit(e)}>
<div >
<input className={errors.name && 'danger'} className={styles.name} placeholder="Recipe Name..." type="text" name="name" value={input.name} onChange={handleInputChange} />
{errors.name && (
<p className={styles.danger}>{errors.name}</p>
)}
</div>
<div className={styles.summary} >
<textarea className={errors.summary && 'danger'} placeholder="Dish Summary..." type="text" name="summary" value={input.summary} onChange={handleInputChange} />
{errors.summary && (
<p className={styles.danger}>{errors.summary}</p>
)}
</div>
<div className={styles.spoonacularScore} >
<input className={errors.spoonacularScore && 'danger'} placeholder="Punctuation..." type="number" name="spoonacularScore" value={input.punctuation} onChange={handleInputChange} />
{errors.spoonacularScore && (
<p className={styles.danger}>{errors.spoonacularScore}</p>
)}
</div>
<div className={styles.healthscore} >
<input className={errors.healthscore && 'danger'} placeholder = "Health Score..." type="number" name="healthscore" value={input.healthscore} onChange={handleInputChange} />
{errors.healthscore && (
<p className={styles.danger}>{errors.healthscore}</p>
)}
</div>
<div className={styles.steps} >
<textarea className={errors.stepbystep && 'danger'} placeholder="Step by Step..." type="text" name="stepbystep" value={input.stepbystep} onChange={handleInputChange} />
{errors.stepbystep && (
<p className={styles.danger}>{errors.stepbystep}</p>
)}
</div>
<div className={styles.types}>
<select onChange={(e) => handleSelect(e)}> <option hidden disabled selected value>Choose the types of Diets...</option>
{types?.map((e) =>
<option value={e.name} key={e.id}>{e.name}</option>
)}
</select>
<div>
<ul className={styles.list}>
<li className={styles.lista} >{input.types.map(i => i + ", ")} </li>
</ul>
</div>
</div>
<div >
<button className={styles.boton} type="submit" > Create Recipe <IoRestaurantSharp/></button>
</div>
</form>
</div>
<p className={styles.copy} > Copyright ©️ 2021 The ultimate food app</p>
</div>
)
}
<file_sep>/client/src/components/Paginado.jsx
import React from 'react';
import styles from './Paginado.module.css'
export default function Paginado ({recipesPerPage, allRecipes, paginado}) {
const pageNumbers = []
for(let i = 0; i<Math.ceil(allRecipes/recipesPerPage); i++) {
pageNumbers.push(i+1)
}
return(
<nav >
<ul className={styles.pag}>
{ pageNumbers &&
pageNumbers.map(number =>{
return (
<li key={number} >
<button className={styles.paginado} onClick={() => paginado(number)}>{number}</button>
</li>
)
} )}
</ul>
</nav>
)
}<file_sep>/client/src/components/SearchBar.jsx
import React from 'react';
import { useState } from 'react';
import {useDispatch} from 'react-redux';
import { getNameRecipes } from '../actions';
import styles from "./SearchBar.module.css";
import { IoSearchSharp } from "react-icons/io5";
import Swal from 'sweetalert2'
export default function SearchBar(){
const dispatch = useDispatch()
const [name, setName] = useState("")
function handleInputChange(e){
e.preventDefault()
setName(e.target.value)
}
function handleSubmit(e){
e.preventDefault()
dispatch(getNameRecipes(name))
setName("");
}
return (
<div className={styles.wrap}>
<div className={styles.search}>
<input className={styles.searchTerm} value={name} type= 'text' placeholder='Search a recipe...' onChange={(e) => handleInputChange(e)}/>
<button className={styles.searchButton} type='submit' onClick={(e) => handleSubmit(e)}> <IoSearchSharp/> </button>
</div>
</div>
)
}<file_sep>/client/src/components/RecipeDetail.jsx
import React from 'react';
import { Link } from 'react-router-dom';
import { useDispatch, useSelector } from 'react-redux';
import { getRecipeDetail } from '../actions';
import { useEffect } from 'react';
import styles from './RecipeDetail.module.css';
import { IoFastFoodOutline } from "react-icons/io5";
import { IoArrowBackOutline } from "react-icons/io5";
export default function Detail(props) {
const dispatch = useDispatch()
useEffect(() => {
dispatch(getRecipeDetail(props.match.params.id));
}, [dispatch])
const myRecipe = useSelector((state) => state.detail)
myRecipe && console.log(myRecipe )
return (
<div >
{
typeof myRecipe === "object" ?
<div >
<Link to='/home'>
<button className={styles.home}> <IoArrowBackOutline/> <IoFastFoodOutline/> </button>
</Link>
<div classname={styles.contenedor} >
<div className={styles.thecard}>
<div className={styles.thefront}>
<h1 className={styles.title} >{myRecipe.title}</h1>
<h2 className={styles.summary}> Dish summary: {myRecipe.summary && myRecipe.summary.replace(/<[^>]*>?/g, '')} </h2>
<div className={styles.image} >
{myRecipe.image ?
<img className={styles.image} src={myRecipe.image} alt='' /> : null }
</div>
<p className={styles.puntos}> Punctuation: {myRecipe.spoonacularScore}</p>
<h3 className={styles.score} > Health score: {myRecipe.healthScore}</h3>
<ul className={styles.types}> Suitable for these diets:
{
myRecipe.diets && myRecipe.diets.length ? myRecipe.diets.map(e => {
return <li>
{e}
</li>
}) :
myRecipe.diets ? " All diets" :
myRecipe.types && myRecipe.types.map(f => {
return <li>
{f.name}
</li>
})
}
</ul>
</div>
<div className={styles.theback}>
<h4 className={styles.steps}> <h1 className={styles.titulostep}> Step by step </h1>
{ myRecipe.analyzedInstructions && myRecipe.analyzedInstructions.length ? myRecipe.analyzedInstructions.map(r => r.steps.map(s => s.step)).flat(1).join(' '): myRecipe.analyzedInstructions ? "Sorry, this recipe does not have Step by Step.": myRecipe.steps} </h4>
<div className={styles.enjoy} >
<h1 > {myRecipe.analyzedInstructions && myRecipe.analyzedInstructions.length? "Enjoy this wonderful and exquisite recipe!!" : null}</h1>
</div>
</div>
</div>
</div>
</div> : <p>Loading...</p>
}
<p className={styles.copy} > Copyright ©️ 2021 The ultimate food app</p>
</div>
)
} | 01f2a25424bd9fcf51cf6ee93e4e143aeceb4cb9 | [
"JavaScript"
] | 11 | JavaScript | Alex4196/PI-Food | e33e6b2b0e8f4c5d31996569800373a5d44c04f0 | 2f06d62fd4b61d1cc484bb2bf6ad0b351356ab14 |
refs/heads/master | <file_sep>package dev.phoenixxt.mlplayground
import androidx.appcompat.app.AppCompatActivity
import android.os.Bundle
import android.util.Log
import android.widget.Toast
import com.google.firebase.FirebaseApp
import com.google.firebase.ml.naturallanguage.FirebaseNaturalLanguage
import com.google.firebase.ml.naturallanguage.smartreply.FirebaseTextMessage
import com.google.firebase.ml.naturallanguage.smartreply.SmartReplySuggestionResult
class MainActivity : AppCompatActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
val conversation = listOf(
FirebaseTextMessage.createForLocalUser("hi, what's up?", System.currentTimeMillis()),
FirebaseTextMessage.createForLocalUser("where are you at now?", System.currentTimeMillis())
//FirebaseTextMessage.createForLocalUser("heading out now", System.currentTimeMillis() + 101)
)
val smartReply = FirebaseNaturalLanguage.getInstance().smartReply
smartReply.suggestReplies(conversation)
.addOnSuccessListener { result ->
if (result.status == SmartReplySuggestionResult.STATUS_SUCCESS) {
Toast.makeText(this, result.suggestions.firstOrNull()?.text ?: "", Toast.LENGTH_LONG).show()
result.suggestions.forEach {
Log.e("asdasd", it.text)
}
}
}
}
}
| 596b35c1d9748ca94a3bec1211949e046ce23486 | [
"Kotlin"
] | 1 | Kotlin | phoenixxt/MLPlayground | a7b17a526b9175b36d6fd89334afcdbf3216418a | 122985b454ffceed1d1a2f1b5d6aab09730f7219 |
refs/heads/master | <repo_name>dahiandrea/pruebaMongo<file_sep>/PruebaMongo/src/main/java/co/com/prueba/mongo/entidad/Comentario.java
package co.com.prueba.mongo.entidad;
public class Comentario {
private String fecha;
private String contenido;
private String tipo;
private String userid;
private Usuario usuario;
public String getFecha() {
return fecha;
}
public void setFecha(String fecha) {
this.fecha = fecha;
}
public String getContenido() {
return contenido;
}
public void setContenido(String contenido) {
this.contenido = contenido;
}
public String getTipo() {
return tipo;
}
public void setTipo(String tipo) {
this.tipo = tipo;
}
public String getUserid() {
return userid;
}
public void setUserid(String userid) {
this.userid = userid;
}
public Usuario getUsuario() {
return usuario;
}
public void setUsuario(Usuario usuario) {
this.usuario = usuario;
}
}
<file_sep>/FrontPruebaMongo/src/client/app/home/articulo.model.ts
import { Comentario } from './comentario.model';
export class Articulo {
id:string;
nombre:string;
descripcion:string;
autor:string;
comentarios: Comentario[];
}
<file_sep>/FrontPruebaMongo/src/client/app/home/comentario.model.ts
import { Usuario } from './usuario.model';
export class Comentario {
fecha: string;
contenido: string;
usuario: Usuario;
tipo:string;
userid:string;
}
<file_sep>/PruebaMongo/src/main/java/co/com/prueba/mongo/repositorio/RepositorioUsuario.java
package co.com.prueba.mongo.repositorio;
import org.springframework.data.mongodb.repository.MongoRepository;
import co.com.prueba.mongo.entidad.Usuario;
public interface RepositorioUsuario extends MongoRepository<Usuario, String>{
}
<file_sep>/PruebaMongo/src/main/java/co/com/prueba/mongo/repositorio/RepositorioArticulo.java
package co.com.prueba.mongo.repositorio;
import org.springframework.data.mongodb.repository.MongoRepository;
import co.com.prueba.mongo.entidad.Articulo;
public interface RepositorioArticulo extends MongoRepository<Articulo, String> {
}
<file_sep>/PruebaMongo/src/main/java/co/com/prueba/mongo/Application.java
package co.com.prueba.mongo;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.web.support.SpringBootServletInitializer;
/**
* Archivo de configuraci�n en la ruta que se defina en el argumento --spring.config.location de la app
* @author diegheal
*
*/
@SpringBootApplication
public class Application extends SpringBootServletInitializer {
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
}
<file_sep>/PruebaMongo/src/main/resources/application.properties
server.contextPath = /pruebaMongo
spring.data.mongodb.database=nuevaBaseDatos
<file_sep>/PruebaMongo/src/main/java/co/com/prueba/mongo/servicio/ServicioBlogInterface.java
package co.com.prueba.mongo.servicio;
import java.util.List;
import co.com.prueba.mongo.entidad.Articulo;
import co.com.prueba.mongo.entidad.Usuario;
public interface ServicioBlogInterface {
List<Articulo> listarArticulos();
List<Usuario> listarUsuarios();
Articulo crearComentario(Articulo articulo);
List<Articulo> comentarioPorUsuario(String idUsuario);
String totalPositivos(String idUsuario);
String totalNegativos(String idUsuario);
}
<file_sep>/PruebaMongo/src/main/java/co/com/prueba/mongo/entidad/Articulo.java
package co.com.prueba.mongo.entidad;
import java.util.ArrayList;
public class Articulo {
private String id;
private String nombre;
private String descripcion;
private String autor;
private ArrayList<Comentario> comentarios;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getNombre() {
return nombre;
}
public void setNombre(String nombre) {
this.nombre = nombre;
}
public String getContenido() {
return descripcion;
}
public void setContenido(String contenido) {
this.descripcion = contenido;
}
public String getAutor() {
return autor;
}
public void setAutor(String autor) {
this.autor = autor;
}
public ArrayList<Comentario> getComentarios() {
return comentarios;
}
public void setComentarios(ArrayList<Comentario> comentarios) {
this.comentarios = comentarios;
}
}
<file_sep>/FrontPruebaMongo/src/client/app/home/usuario.model.ts
export class Usuario {
id: string;
nombre:string;
edad:number;
totalPositivos: string;
totalNegativos: string;
}
<file_sep>/FrontPruebaMongo/src/client/app/about/about.component.ts
import { Component, OnInit } from '@angular/core';
import { NameListService } from '../shared/name-list/name-list.service';
import { Usuario } from '../home/usuario.model';
import { Comentario } from '../home/comentario.model';
import { Articulo } from '../home/articulo.model';
/**
* This class represents the lazy loaded AboutComponent.
*/
@Component({
moduleId: module.id,
selector: 'sd-about',
templateUrl: 'about.component.html',
styleUrls: ['about.component.css']
})
export class AboutComponent implements OnInit {
usuarios: Usuario[];
errorMessage: string;
comentarios : Comentario[];
articulos: Articulo[];
contenido: boolean = true;
usuario : Usuario;
constructor(public nameListService: NameListService) {}
ngOnInit() {
this.getUsuarios();
}
getUsuarios() {
this.nameListService.get()
.subscribe(
usuarios => this.usuarios = usuarios,
error => this.errorMessage = <any>error
);
}
getComentarios(usuario: Usuario) {
this.usuario = usuario;
this.contenido = false;
this.nameListService.getComentarios(usuario.id).subscribe(
articulos => this.articulos = articulos,
error => this.errorMessage = <any>error
);
this.nameListService.getPositivos(usuario.id).subscribe(
positivos => this.usuario.totalPositivos = positivos,
error => this.errorMessage = <any>error
);
this.nameListService.getNegativos(usuario.id).subscribe(
negativos => this.usuario.totalNegativos = negativos,
error => this.errorMessage = <any>error
);
}
}
<file_sep>/FrontPruebaMongo/src/client/app/home/home.component.ts
import { Component, OnInit } from '@angular/core';
import { NameListService } from '../shared/name-list/name-list.service';
import { Articulo } from './articulo.model';
import { Usuario } from './usuario.model';
import { Comentario } from './comentario.model';
/**
* This class represents the lazy loaded HomeComponent.
*/
@Component({
moduleId: module.id,
selector: 'sd-home',
templateUrl: 'home.component.html',
styleUrls: ['home.component.css'],
})
export class HomeComponent implements OnInit {
nuevoComentario: Comentario = new Comentario;
errorMessage: string;
usuarios: Usuario[] = [];
articulos: Articulo[] = [];
contenido: boolean = true;
articulo: Articulo;
usuario: Usuario;
tipoPositivo: boolean;
tipoNegativo: boolean;
/**
* Creates an instance of the HomeComponent with the injected
* NameListService.
*
* @param {NameListService} nameListService - The injected NameListService.
*/
constructor(public nameListService: NameListService) {}
/**
* Get the names OnInit
*/
ngOnInit() {
this.getUsuarios();
this.getArticulos();
}
/**
* Handle the nameListService observable
*/
getUsuarios() {
this.nameListService.get()
.subscribe(
usuarios => this.usuarios = usuarios,
error => this.errorMessage = <any>error
);
}
getArticulos() {
this.nameListService.getArticulos().subscribe(
articulos => this.articulos = articulos,
error => this.errorMessage = <any>error
);
}
verContenido(articulo:Articulo) {
this.contenido = false;
this.articulo = articulo;
}
/**
* Pushes a new name onto the names array
* @return {boolean} false to prevent default form submit behavior to refresh the page.
*/
addComentario(): boolean {
this.nuevoComentario.fecha = new Date().toISOString().split('T')[0];
this.articulo.comentarios.push(this.nuevoComentario);
this.nameListService.addComentario(this.articulo).subscribe(
articulo => this.articulo = articulo,
error => this.errorMessage = <any>error
);
return true;
}
}
| e864abd403f83048aca4c84026c60a147b815170 | [
"Java",
"TypeScript",
"INI"
] | 12 | Java | dahiandrea/pruebaMongo | 707305b9ff47afb09261b8f05010a260c665f952 | 5241a56d708b1be9978f0f445705964cc2ca307a |
refs/heads/master | <repo_name>kparasch/kostas-mplstyle<file_sep>/install.sh
#!/bin/bash
mkdir -p ~/.config/matplotlib/stylelib
abs_path_to_here=`pwd`
ln -s ${abs_path_to_here}/kostas.mplstyle ~/.config/matplotlib/stylelib/kostas.mplstyle
| efbce86817496352ec36781eb8893ca61997648e | [
"Shell"
] | 1 | Shell | kparasch/kostas-mplstyle | 5398c012b74a11dc7de7a11dce254433198cc738 | 69f8aa1fb884223c2f5800eac23d7d8010739604 |
refs/heads/master | <repo_name>caphindsight/Ising<file_sep>/Ising.c
#include <inttypes.h>
#include <math.h>
#include <stdio.h>
#include <stdlib.h>
#include <time.h>
typedef int8_t spin_t;
const spin_t SPIN_UP = 1;
const spin_t SPIN_DOWN = -1;
typedef int64_t energy_t;
typedef double weight_t;
weight_t uniformRandom() {
return ((weight_t) rand() / (weight_t) RAND_MAX);
}
typedef struct {
size_t width;
size_t height;
spin_t* data;
} Lattice;
Lattice allocateLattice(size_t width, size_t height) {
Lattice result;
result.width = width;
result.height = height;
result.data = calloc(width * height, sizeof(spin_t));
return result;
}
void deallocateLattice(Lattice l) {
free(l.data);
}
size_t getIndex(Lattice lattice, size_t x, size_t y) {
x = (x + lattice.width) % lattice.width;
y = (y + lattice.height) % lattice.height;
return x * lattice.height + y;
}
spin_t get(Lattice lattice, size_t x, size_t y) {
return lattice.data[getIndex(lattice, x, y)];
}
spin_t set(Lattice lattice, size_t x, size_t y, spin_t value) {
lattice.data[getIndex(lattice, x, y)] = value;
}
void fillGroundState(Lattice lattice, spin_t spin) {
size_t x, y;
for (x = 0; x < lattice.width; ++x)
for (y = 0; y < lattice.height; ++y)
set(lattice, x, y, spin);
}
void flip(Lattice lattice, size_t x, size_t y) {
set(lattice, x, y, - get(lattice, x, y));
}
energy_t calcEnergy(Lattice lattice) {
size_t x, y;
energy_t energy = 0;
for (x = 0; x < lattice.width; ++x) {
for (y = 0; y < lattice.height; ++y) {
energy += get(lattice, x, y) * get(lattice, x + 1, y);
energy += get(lattice, x, y) * get(lattice, x - 1, y);
energy += get(lattice, x, y) * get(lattice, x, y + 1);
energy += get(lattice, x, y) * get(lattice, x, y - 1);
}
}
return energy;
}
weight_t calcWeight(Lattice lattice, weight_t beta) {
energy_t energy = calcEnergy(lattice);
return exp(- beta * energy);
}
void tryFlipOneSpin(Lattice lattice, weight_t beta, weight_t* buffer) {
size_t x, y;
for (x = 0; x < lattice.width; ++x) {
for (y = 0; y < lattice.height; ++y) {
flip(lattice, x, y);
weight_t weight = calcWeight(lattice, beta);
buffer[getIndex(lattice, x, y)] = weight;
flip(lattice, x, y);
}
}
weight_t sum = 0;
for (x = 0; x < lattice.width; ++x)
for (y = 0; y < lattice.height; ++y)
sum += buffer[getIndex(lattice, x, y)];
for (x = 0; x < lattice.width; ++x)
for (y = 0; y < lattice.height; ++y)
buffer[getIndex(lattice, x, y)] /= sum;
double r = uniformRandom();
sum = 0;
for (x = 0; x < lattice.width; ++x) {
for (y = 0; y < lattice.height; ++y) {
sum += buffer[getIndex(lattice, x, y)];
if (sum > r) {
flip(lattice, x, y);
return;
}
}
}
}
void evolveIntoThermalState(Lattice lattice, weight_t beta, size_t max_consequential_hits) {
energy_t energy = calcEnergy(lattice);
weight_t *buffer = calloc(lattice.width * lattice.height, sizeof(weight_t));
size_t consequential_hits = 0;
for (;;) {
tryFlipOneSpin(lattice, beta, buffer);
energy_t newEnergy = calcEnergy(lattice);
if (energy == newEnergy)
++consequential_hits;
else
consequential_hits = 0;
if (consequential_hits >= max_consequential_hits)
break;
energy = newEnergy;
}
free(buffer);
}
void printLattice(Lattice lattice, FILE* file_descriptor) {
size_t x, y;
for (x = 0; x < lattice.width; ++x) {
for (y = 0; y < lattice.height; ++y)
fprintf(file_descriptor, "%c", get(lattice, x, y) == 1 ? '+' : '-');
fprintf(file_descriptor, "\n");
}
}
int main(int argc, char *argv[]) {
srand(time(NULL));
Lattice lattice = allocateLattice(5, 5);
fillGroundState(lattice, SPIN_DOWN);
printf("Lattice at the ground state:\n");
printLattice(lattice, stdout);
energy_t groundEnergy = calcEnergy(lattice);
printf("Energy: %lld\n", groundEnergy);
evolveIntoThermalState(lattice, 1, 3);
printf("Lattice at the thermal state:\n");
printLattice(lattice, stdout);
energy_t thermalEnergy = calcEnergy(lattice);
printf("Energy: %lld\n", thermalEnergy);
deallocateLattice(lattice);
return 0;
}
<file_sep>/Makefile
ising: Ising.c
gcc -lm -g -O0 -o ising Ising.c
release: Ising.c
gcc -lm -O2 -o ising Ising.c
clean:
rm -f ising
.PHONY: build clean
| 3872d714b2402b254e14eba059ea2cad675141b8 | [
"C",
"Makefile"
] | 2 | C | caphindsight/Ising | f397f7e50466b1482bf8038da36bcd89a76d1ecd | 77ec4f32c8cc7a759d04d78bc059469658bd041b |
refs/heads/master | <file_sep>module github.com/spotahome/redis-operator
go 1.17
require (
github.com/go-redis/redis v6.15.9+incompatible
github.com/prometheus/client_golang v1.11.0
github.com/sirupsen/logrus v1.8.1
github.com/spotahome/kooper/v2 v2.1.0
github.com/stretchr/testify v1.7.0
k8s.io/api v0.22.2
k8s.io/apiextensions-apiserver v0.22.2
k8s.io/apimachinery v0.22.2
k8s.io/client-go v0.22.2
)
require (
github.com/beorn7/perks v1.0.1 // indirect
github.com/cespare/xxhash/v2 v2.1.2 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/evanphx/json-patch v4.11.0+incompatible // indirect
github.com/go-logr/logr v1.1.0 // indirect
github.com/gogo/protobuf v1.3.2 // indirect
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
github.com/golang/protobuf v1.5.2 // indirect
github.com/google/go-cmp v0.5.6 // indirect
github.com/google/gofuzz v1.2.0 // indirect
github.com/google/uuid v1.3.0 // indirect
github.com/googleapis/gnostic v0.5.5 // indirect
github.com/imdario/mergo v0.3.12 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/prometheus/client_model v0.2.0 // indirect
github.com/prometheus/common v0.31.1 // indirect
github.com/prometheus/procfs v0.7.3 // indirect
github.com/spf13/pflag v1.0.5 // indirect
github.com/stretchr/objx v0.3.0 // indirect
golang.org/x/net v0.0.0-20211006190231-62292e806868 // indirect
golang.org/x/oauth2 v0.0.0-20211005180243-6b3c2da341f1 // indirect
golang.org/x/sys v0.0.0-20211006225509-1a26e0398eed // indirect
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211 // indirect
golang.org/x/text v0.3.7 // indirect
golang.org/x/time v0.0.0-20210723032227-1f47c861a9ac // indirect
google.golang.org/appengine v1.6.7 // indirect
google.golang.org/protobuf v1.27.1 // indirect
gopkg.in/inf.v0 v0.9.1 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect
k8s.io/klog/v2 v2.20.0 // indirect
k8s.io/kube-openapi v0.0.0-20210929172449-94abcedd1aa4 // indirect
k8s.io/utils v0.0.0-20210930125809-cb0fa318a74b // indirect
sigs.k8s.io/structured-merge-diff/v4 v4.1.2 // indirect
sigs.k8s.io/yaml v1.3.0 // indirect
)
<file_sep>package metrics
import (
"github.com/prometheus/client_golang/prometheus"
koopercontroller "github.com/spotahome/kooper/v2/controller"
kooperprometheus "github.com/spotahome/kooper/v2/metrics/prometheus"
)
const (
promControllerSubsystem = "controller"
)
// Instrumenter is the interface that will collect the metrics and has ability to send/expose those metrics.
type Recorder interface {
koopercontroller.MetricsRecorder
SetClusterOK(namespace string, name string)
SetClusterError(namespace string, name string)
DeleteCluster(namespace string, name string)
}
// PromMetrics implements the instrumenter so the metrics can be managed by Prometheus.
type recorder struct {
// Metrics fields.
clusterOK *prometheus.GaugeVec // clusterOk is the status of a cluster
koopercontroller.MetricsRecorder
}
// NewPrometheusMetrics returns a new PromMetrics object.
func NewRecorder(namespace string, reg prometheus.Registerer) Recorder {
// Create metrics.
clusterOK := prometheus.NewGaugeVec(prometheus.GaugeOpts{
Namespace: namespace,
Subsystem: promControllerSubsystem,
Name: "cluster_ok",
Help: "Number of failover clusters managed by the operator.",
}, []string{"namespace", "name"})
// Create the instance.
r := recorder{
clusterOK: clusterOK,
MetricsRecorder: kooperprometheus.New(kooperprometheus.Config{
Registerer: reg,
}),
}
// Register metrics.
reg.MustRegister(
r.clusterOK,
)
return r
}
// SetClusterOK set the cluster status to OK
func (r recorder) SetClusterOK(namespace string, name string) {
r.clusterOK.WithLabelValues(namespace, name).Set(1)
}
// SetClusterError set the cluster status to Error
func (r recorder) SetClusterError(namespace string, name string) {
r.clusterOK.WithLabelValues(namespace, name).Set(0)
}
// DeleteCluster set the cluster status to Error
func (r recorder) DeleteCluster(namespace string, name string) {
r.clusterOK.DeleteLabelValues(namespace, name)
}
| ec7ffe709665d9f0f77ca17c6eec9ab8fed1fb5b | [
"Go",
"Go Module"
] | 2 | Go Module | mrsrvman/redis-operator | 6e08a31232077f3e32319a7fde109fad005b8772 | 17fbbad7ff9aada638e80be89a1bf2d23db34498 |
refs/heads/master | <repo_name>hyunyouchoi/R-Studio<file_sep>/PD/R03_Merging_original.R
##############################################################################
## File Name: R03_Merging.R
## Author: KZ
## Date: 5/1/2017 Created
## Purpose: To import and merge BBCN and Wilshire data accoring to
## "03 - merging.sas"
##############################################################################
requirements <- c("dplyr", "reshape2", "data.table","zoo")
for(rr in requirements){
if(! rr %in% installed.packages()) install.packages(rr)
}
require(dplyr)
require(reshape2)
require(data.table)
require(zoo)
setwd("C:/Users/OL07805/Desktop/DFAST/")
## Import BBCN and Wilshire Data (SAS File 03, Line 1 to 49)
df_final_bbcn <- read.csv("PD/Code to Create input file/df_final_bbcn.csv")
df_final_wilshire <- read.csv("PD/Code to Create input file/df_final_wilshire.csv")
df_boh <- rbind(df_final_bbcn, df_final_wilshire)
df_boh$fileDate <- as.Date(df_boh$fileDate, "%Y-%m-%d")
df_boh$origination_date <- as.Date(df_boh$origination_date, "%Y-%m-%d")
df_boh$maturity_date <- as.Date(df_boh$maturity_date,"%Y-%m-%d")
## Expand data from 2016Q2 to 2018Q2 (SAS File 03, Line 123 to 174)
df_boh_1Q <- filter(df_boh, fileDate == "2016-03-31")
dateSeq <- seq(as.Date("2016-07-01"),as.Date("2018-07-01"), by="quarter")-1
df_boh_9Q <- NULL
for(ii in 1:9 ){
df_boh_1Q$fileDate <- dateSeq[ii]
df_boh_9Q <- rbind(df_boh_9Q, df_boh_1Q)
}
## deleted obs after maturity and create new variables
df_boh_9Q$loan_age_q <- (as.yearqtr(df_boh_9Q$fileDate) - as.yearqtr(df_boh_9Q$origination_date)
) * 4
df_boh_9Q$term_q <- (as.yearqtr(df_boh_9Q$maturity_date) - as.yearqtr(df_boh_9Q$origination_date)
) * 4
df_boh_9Q <- filter(df_boh_9Q, loan_age_q <= term_q)
df_boh_9Q$year <- year(df_boh_9Q$fileDate)
df_boh_9Q$month <- month(df_boh_9Q$fileDate)
df_boh_9Q$q <- quarter(df_boh_9Q$fileDate)
df_boh_9Q$POB <- 100 * df_boh_9Q$loan_age_q / df_boh_9Q$term_q
## combind data before 2016Q1 and after 2016Q1 (SAS File 03, Line 177 to 179)
df_boh_9Q <- subset(df_boh_9Q, select = -c(term_q))
df_boh_dev <- filter(df_boh, as.Date(fileDate) <= as.Date("2016-03-31"))
df_boh_2018 <- rbind(df_boh_dev, df_boh_9Q)
## !! in the SAS code, Interim.df_boh_merged2 also contains data after 2016Q1
## I deleted those obs here
## add macroeconomic variables (SAS File 03, Line 184 to 231)
for(scenario in c("base", "adverse", "severe")){
print(paste0("==== ", scenario, " ===="))
macro_var <- fread(paste0("Raw Data/Macrovars/", scenario, ".csv"))
macro_var <- subset(macro_var, select = c(year, quarter,rgdp_qg_lag_2, CAUR_yd, CAUR_yd_lag_1, CAUR_yd_lag_2,
CAUR_yd_lag_3, CAUR_yd_lag_4, CAHPI_ag, CAHPI_ag_lag_1, CAHPI_ag_lag_2,
CAHPI_ag_lag_3, CAHPI_ag_lag_4))
names(macro_var)[names(macro_var)=="quarter"] <- "q"
df_boh_macro_var <- merge(x = df_boh_2018, y = macro_var, by = c("year","q"), all.x = TRUE)
assign(paste0("df_boh_",scenario), df_boh_macro_var)
}
## Save data
save(df_boh_base, file = "df_boh_base.RData")
save(df_boh_adverse, file = "df_boh_adverse.RData")
save(df_boh_severe, file = "df_boh_severe.RData")
write.csv(df_boh_base, file = "df_boh_base.csv", row.names = FALSE)
write.csv(df_boh_adverse, file = "df_boh_adverse.csv", row.names = FALSE)
write.csv(df_boh_severe, file = "df_boh_severe.csv", row.names = FALSE)
<file_sep>/PD/merging wilshire data.R
# Set the working directory
#setwd(dirname(rstudioapi::getActiveDocumentContext()$path))
setwd("C:/Users/ic07949/Desktop/KPMG/Model Development/development code and data/PD/Dataset/Wilshire")
# Required libraries for this code
library(openxlsx)
library (lubridate)
# append the wilshire data together
df16q1=read.xlsx("Data as of 2016 0331_GLreconciled.xlsx",detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df16q1=df16q1[!is.na(df16q1[,1]), ]
df16q1$Rate.Adjuster=NULL
date=rep("03/31/2016", nrow(df16q1))
df16q1=as.data.frame(cbind(date,df16q1))
column.names=colnames(df16q1)
df15q4=read.xlsx("Data as of 2015 1231_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df15q4=df15q4[!is.na(df15q4[,1]), ]
date=rep("12/31/2015", nrow(df15q4))
df15q4=as.data.frame(cbind(date,df15q4))
colnames(df15q4)= column.names
df15q3=read.xlsx("Data as of 2015 0930_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df15q3=df15q3[!is.na(df15q3[,1]), ]
date=rep("09/30/2015", nrow(df15q3))
df15q3=as.data.frame(cbind(date,df15q3))
colnames(df15q3)= column.names
df15q2=read.xlsx("Data as of 2015 0630_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df15q2=df15q2[!is.na(df15q2[,1]), ]
date=rep("06/30/2015", nrow(df15q2))
df15q2=as.data.frame(cbind(date,df15q2))
df15q1=read.xlsx("Data as of 2015 0331_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df15q1=df15q1[!is.na(df15q1[,1]), ]
date=rep("03/31/2015", nrow(df15q1))
df15q1=as.data.frame(cbind(date,df15q1))
df14q4=read.xlsx("Data as of 2014 1231_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df14q4=df14q4[!is.na(df14q4[,1]), ]
date=rep("12/31/2014", nrow(df14q4))
df14q4=as.data.frame(cbind(date,df14q4))
df14q3=read.xlsx("Data as of 2014 0930_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df14q3=df14q3[!is.na(df14q3[,1]), ]
date=rep("09/30/2014", nrow(df14q3))
df14q3=as.data.frame(cbind(date,df14q3))
df14q2=read.xlsx("Data as of 2014 0630_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df14q2=df14q2[!is.na(df14q2[,1]), ]
date=rep("06/30/2014", nrow(df14q2))
df14q2=as.data.frame(cbind(date,df14q2))
df14q1=read.xlsx("Data as of 2014 0331_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df14q1=df14q1[!is.na(df14q1[,1]), ]
date=rep("03/31/2014", nrow(df14q1))
df14q1=as.data.frame(cbind(date,df14q1))
df13q4=read.xlsx("Data as of 2013 1231_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df13q4=df13q4[!is.na(df13q4[,1]), ]
date=rep("12/31/2013", nrow(df13q4))
df13q4=as.data.frame(cbind(date,df13q4))
df13q3=read.xlsx("Data as of 2013 0930_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df13q3=df13q3[!is.na(df13q3[,1]), ]
date=rep("09/30/2013", nrow(df13q3))
df13q3=as.data.frame(cbind(date,df13q3))
df13q2=read.xlsx("Data as of 2013 0630_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df13q2=df13q2[!is.na(df13q2[,1]), ]
date=rep("06/30/2013", nrow(df13q2))
df13q2=as.data.frame(cbind(date,df13q2))
df13q1=read.xlsx("Data as of 2013 0331_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df13q1=df13q1[!is.na(df13q1[,1]), ]
date=rep("03/31/2013", nrow(df13q1))
df13q1=as.data.frame(cbind(date,df13q1))
df12q4=read.xlsx("Data as of 2012 1231_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df12q4=df12q4[!is.na(df12q4[,1]), ]
date=rep("12/31/2012", nrow(df12q4))
df12q4=as.data.frame(cbind(date,df12q4))
df12q3=read.xlsx("Data as of 2012 0930_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df12q3=df12q3[!is.na(df12q3[,1]), ]
date=rep("09/30/2012", nrow(df12q3))
df12q3=as.data.frame(cbind(date,df12q3))
df12q2=read.xlsx("Data as of 2012 0630_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df12q2=df12q2[!is.na(df12q2[,1]), ]
date=rep("06/30/2012", nrow(df12q2))
df12q2=as.data.frame(cbind(date,df12q2))
df12q1=read.xlsx("Data as of 2012 0331_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df12q1=df12q1[!is.na(df12q1[,1]), ]
date=rep("03/31/2012", nrow(df12q1))
df12q1=as.data.frame(cbind(date,df12q1))
df11q4=read.xlsx("Data as of 2011 1231_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df11q4=df11q4[!is.na(df11q4[,1]), ]
date=rep("12/31/2011", nrow(df11q4))
df11q4=as.data.frame(cbind(date,df11q4))
df11q3=read.xlsx("Data as of 2011 0930_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df11q3=df11q3[!is.na(df11q3[,1]), ]
date=rep("09/30/2011", nrow(df11q3))
df11q3=as.data.frame(cbind(date,df11q3))
df11q2=read.xlsx("Data as of 2011 0630_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df11q2=df11q2[!is.na(df11q2[,1]), ]
date=rep("06/30/2011", nrow(df11q2))
df11q2=as.data.frame(cbind(date,df11q2))
df11q1=read.xlsx("Data as of 2011 0331_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df11q1=df11q1[!is.na(df11q1[,1]), ]
date=rep("03/31/2011", nrow(df11q1))
df11q1=as.data.frame(cbind(date,df11q1))
df10q4=read.xlsx("Data as of 2010 1231_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df10q4=df10q4[!is.na(df10q4[,1]), ]
date=rep("12/31/2010", nrow(df10q4))
df10q4=as.data.frame(cbind(date,df10q4))
df10q3=read.xlsx("Data as of 2010 0930_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df10q3=df10q3[!is.na(df10q3[,1]), ]
date=rep("09/30/2010", nrow(df10q3))
df10q3=as.data.frame(cbind(date,df10q3))
df10q2=read.xlsx("Data as of 2010 0630_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df10q2=df10q2[!is.na(df10q2[,1]), ]
date=rep("06/30/2010", nrow(df10q2))
df10q2=as.data.frame(cbind(date,df10q2))
df10q1=read.xlsx("Data as of 2010 0331_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df10q1=df10q1[!is.na(df10q1[,1]), ]
date=rep("03/31/2010", nrow(df10q1))
df10q1=as.data.frame(cbind(date,df10q1))
df09q4=read.xlsx("Data as of 2009 1231_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df09q4=df09q4[!is.na(df09q4[,1]), ]
date=rep("12/31/2009", nrow(df09q4))
df09q4=as.data.frame(cbind(date,df09q4))
df09q3=read.xlsx("Data as of 2009 0930_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df09q3=df09q3[!is.na(df09q3[,1]), ]
date=rep("09/30/2009", nrow(df09q3))
df09q3=as.data.frame(cbind(date,df09q3))
df09q2=read.xlsx("Data as of 2009 0630_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df09q2=df09q2[!is.na(df09q2[,1]), ]
date=rep("06/30/2009", nrow(df09q2))
df09q2=as.data.frame(cbind(date,df09q2))
df09q1=read.xlsx("Data as of 2009 0331_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df09q1=df09q1[!is.na(df09q1[,1]), ]
date=rep("03/31/2009", nrow(df09q1))
df09q1=as.data.frame(cbind(date,df09q1))
df08q4=read.xlsx("Data as of 2008 1231_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df08q4=df08q4[!is.na(df08q4[,1]), ]
date=rep("12/31/2008", nrow(df08q4))
df08q4=as.data.frame(cbind(date,df08q4))
df08q3=read.xlsx("Data as of 2008 0930_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df08q3=df08q3[!is.na(df08q3[,1]), ]
date=rep("09/30/2008", nrow(df08q3))
df08q3=as.data.frame(cbind(date,df08q3))
df08q2=read.xlsx("Data as of 2008 0630_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df08q2=df08q2[!is.na(df08q2[,1]), ]
date=rep("06/30/2008", nrow(df08q2))
df08q2=as.data.frame(cbind(date,df08q2))
df08q1=read.xlsx("Data as of 2008 0331_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df08q1=df08q1[!is.na(df08q1[,1]), ]
date=rep("03/31/2008", nrow(df08q1))
df08q1=as.data.frame(cbind(date,df08q1))
df07q4=read.xlsx("Data as of 2007 1231_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df07q4=df07q4[!is.na(df07q4[,1]), ]
date=rep("12/31/2007", nrow(df07q4))
df07q4=as.data.frame(cbind(date,df07q4))
df07q3=read.xlsx("Data as of 2007 0930_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df07q3=df07q3[!is.na(df07q3[,1]), ]
date=rep("09/30/2007", nrow(df07q3))
df07q3=as.data.frame(cbind(date,df07q3))
df07q2=read.xlsx("Data as of 2007 0630_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df07q2=df07q2[!is.na(df07q2[,1]), ]
date=rep("06/30/2007", nrow(df07q2))
df07q2=as.data.frame(cbind(date,df07q2))
df07q1=read.xlsx("Data as of 2007 0331_GLreconciled.xlsx", detectDates = FALSE, sheet = 1, startRow = 1, colNames = TRUE, rowNames = FALSE, skipEmptyRows = TRUE)
df07q1=df07q1[!is.na(df07q1[,1]), ]
date=rep("03/31/2007", nrow(df07q1))
df07q1=as.data.frame(cbind(date,df07q1))
Total_commit=rep("NA", nrow(df09q4))
df09q4_2=as.data.frame(cbind(df09q4[,c(1:8)], Total_commit, df09q4[,c(9:ncol(df09q4))]))
df09q4 = df09q4_2
Total_commit=rep("NA", nrow(df09q3))
df09q3_2=as.data.frame(cbind(df09q3[,c(1:8)], Total_commit, df09q3[,c(9:ncol(df09q3))]))
df09q3 = df09q3_2
Total_commit=rep("NA", nrow(df09q2))
df09q2_2=as.data.frame(cbind(df09q2[,c(1:8)], Total_commit, df09q2[,c(9:ncol(df09q2))]))
df09q2 = df09q2_2
Total_commit=rep("NA", nrow(df09q1))
df09q1_2=as.data.frame(cbind(df09q1[,c(1:8)], Total_commit, df09q1[,c(9:ncol(df09q1))]))
df09q1 = df09q1_2
Total_commit=rep("NA", nrow(df08q4))
df08q4_2=as.data.frame(cbind(df08q4[,c(1:8)], Total_commit, df08q4[,c(9:ncol(df08q4))]))
df08q4 = df08q4_2
Total_commit=rep("NA", nrow(df08q3))
df08q3_2=as.data.frame(cbind(df08q3[,c(1:8)], Total_commit, df08q3[,c(9:ncol(df08q3))]))
df08q3 = df08q3_2
Total_commit=rep("NA", nrow(df08q2))
df08q2_2=as.data.frame(cbind(df08q2[,c(1:8)], Total_commit, df08q2[,c(9:ncol(df08q2))]))
df08q2 = df08q2_2
Total_commit=rep("NA", nrow(df08q1))
df08q1_2=as.data.frame(cbind(df08q1[,c(1:8)], Total_commit, df08q1[,c(9:ncol(df08q1))]))
df08q1 = df08q1_2
Total_commit=rep("NA", nrow(df07q4))
df07q4_2=as.data.frame(cbind(df07q4[,c(1:8)], Total_commit, df07q4[,c(9:ncol(df07q4))]))
df07q4 = df07q4_2
Total_commit=rep("NA", nrow(df07q3))
df07q3_2=as.data.frame(cbind(df07q3[,c(1:8)], Total_commit, df07q3[,c(9:ncol(df07q3))]))
df07q3 = df07q3_2
Total_commit=rep("NA", nrow(df07q2))
df07q2_2=as.data.frame(cbind(df07q2[,c(1:8)], Total_commit, df07q2[,c(9:ncol(df07q2))]))
df07q2 = df07q2_2
Total_commit=rep("NA", nrow(df07q1))
df07q1_2=as.data.frame(cbind(df07q1[,c(1:8)], Total_commit, df07q1[,c(9:ncol(df07q1))]))
df07q1 = df07q1_2
colnames(df15q4)= column.names
colnames(df15q3)= column.names
colnames(df15q2)= column.names
colnames(df15q1)= column.names
colnames(df14q4)= column.names
colnames(df14q3)= column.names
colnames(df14q2)= column.names
colnames(df14q1)= column.names
colnames(df13q4)= column.names
colnames(df13q3)= column.names
colnames(df13q2)= column.names
colnames(df13q1)= column.names
colnames(df12q4)= column.names
colnames(df12q3)= column.names
colnames(df12q2)= column.names
colnames(df12q1)= column.names
colnames(df11q4)= column.names
colnames(df11q3)= column.names
colnames(df11q2)= column.names
colnames(df11q1)= column.names
colnames(df10q4)= column.names
colnames(df10q3)= column.names
colnames(df10q2)= column.names
colnames(df10q1)= column.names
colnames(df09q4)= column.names
colnames(df09q3)= column.names
colnames(df09q2)= column.names
colnames(df09q1)= column.names
colnames(df08q4)= column.names
colnames(df08q3)= column.names
colnames(df08q2)= column.names
colnames(df08q1)= column.names
colnames(df07q4)= column.names
colnames(df07q3)= column.names
colnames(df07q2)= column.names
colnames(df07q1)= column.names
df_final_wilshire= rbind( df16q1, df15q4, df15q3, df15q2, df15q1, df14q4, df14q3, df14q2, df14q1,
df13q4, df13q3, df13q2, df13q1, df12q4, df12q3, df12q2, df12q1,
df11q4, df11q3, df11q2, df11q1,df10q4, df10q3, df10q2, df10q1,
df09q4, df09q3, df09q2, df09q1,df08q4, df08q3, df08q2, df08q1,
df07q4, df07q3, df07q2, df07q1)
##################################
# set the dates. The date origins for R SAS and Excel are different.
#This should be taken into account.
##################################
filedate=mdy(df_final_wilshire$date)
Originationdate=as.Date(df_final_wilshire$Original.Note.Date, origin="1899-12-30")
maturitydate=as.Date(df_final_wilshire$Maturity.Date, origin="1899-12-30")
df_final_wilshire2=cbind(filedate, df_final_wilshire, maturitydate, Originationdate)
colnames(df_final_wilshire2)= c("filedate", column.names, "maturitydate", "originationdate")
df_final_wilshire2$date=NULL
df_final_wilshire2$Maturity.Date=NULL
df_final_wilshire2$Original.Note.Date=NULL
df_final_wilshire_sorted=df_final_wilshire2[order(df_final_wilshire2$Note.Number),]
indx=colnames(df_final_wilshire_sorted)
#change the DCR format
df_final_wilshire_sorted[is.na(df_final_wilshire_sorted$DCR.from.Stress.Test), 26]=10000
df_final_wilshire_sorted[,43]=as.numeric(df_final_wilshire_sorted[,26])
df_final_wilshire_sorted[is.na(df_final_wilshire_sorted[,43]), 26]= substr(df_final_wilshire_sorted[is.na(df_final_wilshire_sorted[,43]), 26], 1, nchar(df_final_wilshire_sorted[is.na(df_final_wilshire_sorted[,43]), 26])-1)
df_final_wilshire_sorted[,44]=as.numeric(df_final_wilshire_sorted[,26])
df_final_wilshire_sorted=df_final_wilshire_sorted[,-43]
colnames(df_final_wilshire_sorted)=c(indx, "DCR")
colnames(df_final_wilshire_sorted)
##########################
# Export the data
##########################
write.csv(df_final_wilshire_sorted, file = "df_final_wilshire_sorted.csv", col.names = TRUE, row.names = F)
##########################
#send the missing codes to Soo!
##########################
df_error_collateral=df_final_wilshire_sorted[which(df_final_wilshire_sorted$Collateral.Code %in% c(75,83,371)),c(1,2,3,5,8,18)]
write.csv(df_error_collateral, file = 'missing collateral codes.csv', col.names = T, row.names = F)
df_error_property=df_final_wilshire_sorted[which(df_final_wilshire_sorted$Property.Type.Code==20),c(1,2,3,5,8,17)]
write.csv(df_error_property, file = 'missing property types code.csv', col.names = T, row.names = F)
a=unique(df_error_collateral$Note.Number)
b=unique(df_error_property$Note.Number)
d=c(a,b)
write.csv(d, file = 'accounts w missing codes.csv', col.names = T, row.names = F)
<file_sep>/PD/PD_CRE_PROFILE.R
##########
# Profiling Plots
##########
# Start
# Plot average PD by risk rating
# Risk rating
cre_dev_training$risk_rate <- ifelse(cre_dev_training$boh_rating1_R1==1,"R1",ifelse(cre_dev_training$boh_rating1_R2==1,"R2","R3"))
#Making average actual default by risk rating
defaulters_risk <- cre_dev_training %>% group_by(fileDate, risk_rate) %>% summarise(Defaulters = sum(y)) %>% data.frame()
total_risk <- cre_dev_training %>% group_by(fileDate, risk_rate) %>% count() %>% data.frame()
defaulters_risk <- merge(defaulters_risk, total_risk)
colnames(defaulters_risk) <- c("fileDate", "Risk","Default","Total")
defaulters_risk <- defaulters_risk %>% group_by(fileDate, Risk) %>% mutate(default_per = Default/Total) %>% data.frame()
head(defaulters_risk)
#Make actual nondefault by risk rating
nondefault_risk <- cre_dev_training %>% group_by(fileDate, risk_rate) %>% filter(y==0) %>% count() %>% data.frame()
colnames(nondefault_risk) <- c("fileDate", "Risk","NonDefault")
risk_df <- merge(x = defaulters_risk, y = nondefault_risk, by.x = c("fileDate","Risk"), by.y = c("fileDate","Risk"))
risk_df <- risk_df %>% group_by(fileDate,Risk) %>% mutate(nondefault_per = NonDefault/Total) %>% data.frame()
risk_df <- risk_df %>% group_by(Risk) %>% mutate(pd_actual = default_per/lag(nondefault_per)) %>% data.frame() %>% na.omit()
head(risk_df)
pd_actual_risk <- aggregate(risk_df$pd_actual, list(risk_df$Risk), mean)
colnames(pd_actual_risk) <- c("Risk","PD_Actual")
##########
# Plot Avereage PD by Risk Rating
#Observations per bin
risk_rate_in_graph <- as.data.frame(count(cre_dev_training, risk_rate))
# Mean PD of each bin
risk_rate_in_graph$mean <- aggregate(cre_dev_training$p_hat, list(cre_dev_training$risk_rate), mean)[,2]
colnames(risk_rate_in_graph) <- c("Risk","Observations","Mean")
df <- merge(risk_rate_in_graph, pd_actual_risk)
green <- rgb(.5, 1, .5, alpha=0.2)
pdf(paste("./R output", paste("Default Rate - ",colnames(df)[1],"_PROF.pdf", sep=""),sep ="/"), height = 5, width = 10)
par(mar = c(5,5,2,5))
par(new = T)
with(df, plot(as.numeric(row.names(df)), PD_Actual, col="blue", xaxt = "n",type = "l", lwd=2,
ylab="", xlab="", ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
bp <- with(df, barplot(df$Observations,axes=F, xlab=NA, ylab=NA, col=green))
axis(side = 4)
mtext(side = 4, line = 3, 'Observation Count')
legend("topleft",
legend=c("Average PD","Obs."),
lty=1,lwd=5, col=c("blue", green))
axis(1, at=bp, labels=df[,1])
dev.off()
##########
# End
# Plot average PD by risk rating
##########
##########
# Start
# Plot average PD by CA UR
#CA UR Bins
cre_dev_training$caur_bins <- cut(cre_dev_training$CAUR_yd,breaks = 4)
# Make actual default
defaulters_ur <- cre_dev_training %>% group_by(fileDate, caur_bins) %>% summarise(Defaulters = sum(y)) %>% data.frame()
total_ur <- cre_dev_training %>% group_by(fileDate, caur_bins) %>% count() %>% data.frame()
defaulters_ur <- merge(defaulters_ur, total_ur)
colnames(defaulters_ur) <- c("fileDate", "UR","Default","Total")
defaulters_ur <- defaulters_ur %>% group_by(fileDate, UR) %>% mutate(default_per = Default/Total) %>% data.frame()
head(defaulters_ur)
#Make actual nondefault
nondefault_ur <- cre_dev_training %>% group_by(fileDate, caur_bins) %>% filter(y==0) %>% count() %>% data.frame()
colnames(nondefault_ur) <- c("fileDate", "UR","NonDefault")
ur_df <- merge(x = defaulters_ur, y = nondefault_ur, by.x = c("fileDate","UR"), by.y = c("fileDate","UR"))
ur_df <- ur_df %>% group_by(fileDate,UR) %>% mutate(nondefault_per = NonDefault/Total) %>% data.frame()
ur_df <- ur_df %>% group_by(UR) %>% mutate(pd_actual = default_per/lag(nondefault_per)) %>% data.frame() %>% na.omit()
head(ur_df)
ur_pd_actual <- aggregate(ur_df$pd_actual, list(ur_df$UR), mean)
colnames(ur_pd_actual) <- c("UR","PD_Actual")
#Observations per bin
ur_in_graph <- as.data.frame(count(cre_dev_training, caur_bins))
# Mean PD of each bin
ur_in_graph$mean <- aggregate(cre_dev_training$p_hat, list(cre_dev_training$caur_bins), mean)[,2]
colnames(ur_in_graph) <- c("UR","Observations","Mean")
df <- merge(ur_in_graph, ur_pd_actual)
green <- rgb(.5, 1, .5, alpha=0.2)
pdf(paste("./R output", paste("Default Rate - ",colnames(df)[1],"_PROF.pdf", sep=""),sep ="/"), height = 5, width = 10)
par(mar = c(5,5,2,5))
par(new = T)
with(df, plot(as.numeric(row.names(df)), PD_Actual, col="blue", xaxt = "n",type = "l", lwd=2,
ylab="", xlab="", ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
bp <- with(df, barplot(df$Observations,axes=F, xlab=NA, ylab=NA, col=green))
axis(side = 4)
mtext(side = 4, line = 3, 'Observation Count')
legend("topleft",
legend=c("Average PD","Obs."),
lty=1,lwd=5, col=c("blue", green))
axis(1, at=bp, labels=df[,1])
dev.off()
# End
# Plot average PD by CA UR
##########
##########
# Start
# Plot average PD by CA HPI
#CA HPI Bins
cre_dev_training$cahpi_bins <- cut(cre_dev_training$CAHPI_ag,breaks = 3)
# Make actual default
defaulters_hpi <- cre_dev_training %>% group_by(fileDate, cahpi_bins) %>% summarise(Defaulters = sum(y)) %>% data.frame()
total_hpi <- cre_dev_training %>% group_by(fileDate, cahpi_bins) %>% count() %>% data.frame()
defaulters_hpi <- merge(defaulters_hpi, total_hpi)
colnames(defaulters_hpi) <- c("fileDate", "HPI","Default","Total")
defaulters_hpi <- defaulters_hpi %>% group_by(fileDate, HPI) %>% mutate(default_per = Default/Total) %>% data.frame()
head(defaulters_hpi)
#Make actual nondefault
nondefault_hpi <- cre_dev_training %>% group_by(fileDate, cahpi_bins) %>% filter(y==0) %>% count() %>% data.frame()
colnames(nondefault_hpi) <- c("fileDate", "HPI","NonDefault")
hpi_df <- merge(x = defaulters_hpi, y = nondefault_hpi, by.x = c("fileDate","HPI"), by.y = c("fileDate","HPI"))
hpi_df <- hpi_df %>% group_by(fileDate,HPI) %>% mutate(nondefault_per = NonDefault/Total) %>% data.frame()
hpi_df <- hpi_df %>% group_by(HPI) %>% mutate(pd_actual = default_per/lag(nondefault_per)) %>% data.frame() %>% na.omit()
head(hpi_df)
hpi_pd_actual <- aggregate(hpi_df$pd_actual, list(hpi_df$HPI), mean)
colnames(hpi_pd_actual) <- c("HPI","PD_Actual")
#Observations per bin
hpi_in_graph <- as.data.frame(count(cre_dev_training, cahpi_bins))
# Mean PD of each bin
hpi_in_graph$mean <- aggregate(cre_dev_training$p_hat, list(cre_dev_training$cahpi_bins), mean)[,2]
colnames(hpi_in_graph) <- c("HPI","Observations","Mean")
df <- merge(hpi_in_graph, hpi_pd_actual)
green <- rgb(.5, 1, .5, alpha=0.2)
pdf(paste("./R output", paste("Default Rate - ",colnames(df)[1],"_PROF.pdf", sep=""),sep ="/"), height = 5, width = 10)
par(mar = c(5,5,2,5))
par(new = T)
with(df, plot(as.numeric(row.names(df)), PD_Actual, col="blue", xaxt = "n",type = "l", lwd=2,
ylab="", xlab="", ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
bp <- with(df, barplot(df$Observations,axes=F, xlab=NA, ylab=NA, col=green))
axis(side = 4)
mtext(side = 4, line = 3, 'Observation Count')
legend("topleft",
legend=c("Average PD","Obs."),
lty=1,lwd=5, col=c("blue", green))
axis(1, at=bp, labels=df[,1])
dev.off()
# End
# Plot average PD by CA HPI
##########
##########
# Start
# Plot average PD by GDP
#GDP Bins
cre_dev_training$gdp_bins <- cut(cre_dev_training$rgdp_qg_lag_2_neg, breaks = 2)
# Make actual default
defaulters_gdp <- cre_dev_training %>% group_by(fileDate, gdp_bins) %>% summarise(Defaulters = sum(y)) %>% data.frame()
total_gdp <- cre_dev_training %>% group_by(fileDate, gdp_bins) %>% count() %>% data.frame()
defaulters_gdp <- merge(defaulters_gdp, total_gdp)
colnames(defaulters_gdp) <- c("fileDate", "GDP","Default","Total")
defaulters_gdp <- defaulters_gdp %>% group_by(fileDate, GDP) %>% mutate(default_per = Default/Total) %>% data.frame()
head(defaulters_gdp)
#Make actual nondefault
nondefault_gdp <- cre_dev_training %>% group_by(fileDate, gdp_bins) %>% filter(y==0) %>% count() %>% data.frame()
colnames(nondefault_gdp) <- c("fileDate", "GDP","NonDefault")
gdp_df <- merge(x = defaulters_gdp, y = nondefault_gdp, by.x = c("fileDate","GDP"), by.y = c("fileDate","GDP"))
gdp_df <- gdp_df %>% group_by(fileDate,GDP) %>% mutate(nondefault_per = NonDefault/Total) %>% data.frame()
gdp_df <- gdp_df %>% group_by(GDP) %>% mutate(pd_actual = default_per/lag(nondefault_per)) %>% data.frame() %>% na.omit()
head(gdp_df)
gdp_pd_actual <- aggregate(gdp_df$pd_actual, list(gdp_df$GDP), mean)
colnames(gdp_pd_actual) <- c("GDP","PD_Actual")
#Observations per bin
gdp_in_graph <- as.data.frame(count(cre_dev_training, gdp_bins))
# Mean PD of each bin
gdp_in_graph$mean <- aggregate(cre_dev_training$p_hat, list(cre_dev_training$gdp_bins), mean)[,2]
colnames(gdp_in_graph) <- c("GDP","Observations","Mean")
df <- merge(gdp_in_graph, gdp_pd_actual, all.x = T)
green <- rgb(.5, 1, .5, alpha=0.2)
pdf(paste("./R output", paste("Default Rate - ",colnames(df)[1],"_PROF.pdf", sep=""),sep ="/"), height = 5, width = 10)
par(mar = c(5,5,2,5))
par(new = T)
with(df, plot(as.numeric(row.names(df)), PD_Actual, col="blue", xaxt = "n",type = "l", lwd=2,
ylab="", xlab="", ylim=c(0,max(apply(df[,3:4], 2, function(x) max(x, na.rm=T))))))
par(new = T)
bp <- with(df, barplot(df$Observations,axes=F, xlab=NA, ylab=NA, col=green))
axis(side = 4)
mtext(side = 4, line = 3, 'Observation Count')
legend("topleft",
legend=c("Average PD","Obs."),
lty=1,lwd=5, col=c("blue", green))
axis(1, at=bp, labels=df[,1])
dev.off()
# End
# Plot average PD by GDP
##########
##########
# Start
# Plot average PD by Prop Type
# Make actual default
defaulters_prop <- cre_dev_training %>% group_by(fileDate, property_type) %>% summarise(Defaulters = sum(y)) %>% data.frame()
total_prop <- cre_dev_training %>% group_by(fileDate, property_type) %>% count() %>% data.frame()
defaulters_prop <- merge(defaulters_prop, total_prop)
colnames(defaulters_prop) <- c("fileDate", "Prop_type","Default","Total")
defaulters_prop <- defaulters_prop %>% group_by(fileDate, Prop_type) %>% mutate(default_per = Default/Total) %>% data.frame()
head(defaulters_prop)
#Make actual nondefault
nondefault_prop <- cre_dev_training %>% group_by(fileDate, property_type) %>% filter(y==0) %>% count() %>% data.frame()
colnames(nondefault_prop) <- c("fileDate", "Prop_type","NonDefault")
prop_df <- merge(x = defaulters_prop, y = nondefault_prop, by.x = c("fileDate","Prop_type"), by.y = c("fileDate","Prop_type"))
prop_df <- prop_df %>% group_by(fileDate,Prop_type) %>% mutate(nondefault_per = NonDefault/Total) %>% data.frame()
prop_df <- prop_df %>% group_by(Prop_type) %>% mutate(pd_actual = default_per/lag(nondefault_per)) %>% data.frame() %>% na.omit()
head(prop_df)
prop_pd_actual <- aggregate(prop_df$pd_actual, list(prop_df$Prop_type), mean)
colnames(prop_pd_actual) <- c("Prop_type","PD_Actual")
#Observations per bin
prop_in_graph <- as.data.frame(count(cre_dev_training, property_type))
# Mean PD of each bin
prop_in_graph$mean <- aggregate(cre_dev_training$p_hat, list(cre_dev_training$property_type), mean)[,2]
colnames(prop_in_graph) <- c("Prop_type","Observations","Mean")
df <- merge(prop_in_graph, prop_pd_actual, all.x = T)
green <- rgb(.5, 1, .5, alpha=0.2)
pdf(paste("./R output", paste("Default Rate - ",colnames(df)[1],"_PROF.pdf", sep=""),sep ="/"), height = 5, width = 10)
par(mar = c(5,5,2,5))
par(new = T)
with(df, plot(as.numeric(row.names(df)), PD_Actual, col="blue", xaxt = "n",type = "l", lwd=2,
ylab="", xlab="", ylim=c(0,max(apply(df[,3:4], 2, function(x) max(x, na.rm=T))))))
par(new = T)
bp <- with(df, barplot(df$Observations,axes=F, xlab=NA, ylab=NA, col=green))
axis(side = 4)
mtext(side = 4, line = 3, 'Observation Count')
legend("topleft",
legend=c("Average PD","Obs."),
lty=1,lwd=5, col=c("blue", green))
axis(1, at=bp, labels=df[,1])
dev.off()
# End
# Plot average PD by Prop type
##########
##########
# Start
# Plot average PD by POB
#POB Bins
cre_dev_outsample$pob_bins <- cut(cre_dev_outsample$POB,breaks = 6)
# Make actual default
defaulters_pob <- cre_dev_outsample %>% group_by(fileDate, pob_bins) %>% summarise(Defaulters = sum(y)) %>% data.frame()
total_pob <- cre_dev_outsample %>% group_by(fileDate, pob_bins) %>% count() %>% data.frame()
defaulters_pob <- merge(defaulters_pob, total_pob)
colnames(defaulters_pob) <- c("fileDate", "POB","Default","Total")
defaulters_pob <- defaulters_pob %>% group_by(fileDate, POB) %>% mutate(default_per = Default/Total) %>% data.frame()
head(defaulters_pob)
#Make actual nondefault
nondefault_pob <- cre_dev_outsample %>% group_by(fileDate, pob_bins) %>% filter(y==0) %>% count() %>% data.frame()
colnames(nondefault_pob) <- c("fileDate", "POB","NonDefault")
pob_df <- merge(x = defaulters_pob, y = nondefault_pob, by.x = c("fileDate","POB"), by.y = c("fileDate","POB"))
pob_df <- pob_df %>% group_by(fileDate,POB) %>% mutate(nondefault_per = NonDefault/Total) %>% data.frame()
pob_df <- pob_df %>% group_by(POB) %>% mutate(pd_actual = default_per/lag(nondefault_per)) %>% data.frame() %>% na.omit()
head(pob_df)
pob_pd_actual <- aggregate(pob_df$pd_actual, list(pob_df$POB), mean)
colnames(pob_pd_actual) <- c("POB","PD_Actual")
#Observations per bin
pob_in_graph <- as.data.frame(count(cre_dev_outsample, pob_bins))
# Mean PD of each bin
pob_in_graph$mean <- aggregate(cre_dev_outsample$p_hat, list(cre_dev_outsample$pob_bins), mean)[,2]
colnames(pob_in_graph) <- c("POB","Observations","Mean")
df <- merge(pob_in_graph, pob_pd_actual)
green <- rgb(.5, 1, .5, alpha=0.2)
pdf(paste("./R output", paste("Default Rate - ",colnames(df)[1],"_PROF.pdf", sep=""),sep ="/"), height = 5, width = 10)
par(mar = c(5,5,2,5))
par(new = T)
with(df, plot(as.numeric(row.names(df)), PD_Actual, col="blue", xaxt = "n",type = "l", lwd=2,
ylab="", xlab="", ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
bp <- with(df, barplot(df$Observations,axes=F, xlab=NA, ylab=NA, col=green))
axis(side = 4)
mtext(side = 4, line = 3, 'Observation Count')
legend("topleft",
legend=c("Average PD","Obs."),
lty=1,lwd=5, col=c("blue", green))
axis(1, at=bp, labels=df[,1])
dev.off()
# End
# Plot outsample average PD by POB
##########
<file_sep>/S3_00_dev-support.R
### General Purpose Utilities ##################################################
zip_to_list = function(a,b) {
z = list()
i = 1
for (element in a) {
z[[element]] = b[i]
i = i + 1
}
z
}
rm_blanks = function(u) {
i = which(u=="")
if (length(i) == 0) {
out = u
} else {
out = u[-i]
}
out
}
concat = function(...) {
paste(..., sep="")
}
get_excel = function(file, sheet) {
# needs openxlsx package
read.xlsx(
file
, sheet=sheet
, colNames=TRUE
, startRow=1
, skipEmptyRows=FALSE
, skipEmptyCols=FALSE
, detectDates=TRUE
, check.names=TRUE
, na.strings="NA"
)
}
stack = function(..., labels=NA) {
df_list = list(...)
m = length(df_list)
n = 0
df_n_list = list()
for (i in 1:m) {
k = dim(df_list[[i]])[1]
df_n_list[[i]] = k
n = n + k
}
# Get all column names
is_numeric_list = list()
name_vec = c()
for (i in 1:m) {
test_df = df_list[[i]]
for (name in names(test_df)) {
is_numeric_list[name] = is.numeric(test_df[name])
}
name_vec = c(names(test_df), name_vec)
}
name_vec = unique(name_vec)
# add missing columns to each data frame
for (i in 1:m) {
col_names = names(is_numeric_list)
for (name in col_names) {
if (!(name %in% names(df_list[[i]]))) {
if (is_numeric_list[[name]]) {
df_list[[i]][name] = NA
} else {
df_list[[i]][name] = "<NA>"
}
}
}
}
# allocate stacked df space
out_df = data.frame(dummy=numeric(n))
for(name in name_vec) {
if (is_numeric_list[[name]]) {
out_df[name] = numeric(n)
} else {
out_df[name] = character(n)
}
}
out_df = out_df[, !(names(out_df) %in% "dummy")]
out_df["label"] = "<NA>"
#populate data
start_row = 0
for (i in 1:m) {
rows = df_n_list[[i]]
curr_df = df_list[[i]]
for (j in 1:rows) {
k = start_row + j
out_df[k, name_vec] = curr_df[j, name_vec]
if (length(labels) == m) {
out_df[k, "label"] = labels[i]
}
}
start_row = start_row + rows
}
out_df
}
### Transformation #############################################################
delta = function(y, lag=1) {
# needs zoo package
rollapply(y, lag+1, function(x) {diff(x, lag=lag)}, fill=NA, align="right")
}
log_diff = function(y, lag=1) {
# needs zoo package
delta(log(y), lag=lag)
}
ma = function(y, n=1) {
# needs zoo package
rollapply(y, n, mean, fill=NA, align="right")
}
gr = function(y, lag=1) {
100 * (delta(y, lag=lag)/shift(y, n=lag))
}
### Error Calculations #########################################################
calc_rsq = function(y, yhat) {
mu = mean(y)
sse = sum((y - yhat)^2)
sst = sum((y - mu)^2)
rsq_cost = 1 - sse/sst;
rsq_cost
}
calc_rmse = function(y, yhat) {
mse = mean((y - yhat)^2)
rmse = sqrt(mse)
rmse
}
calc_mad = function(y, yhat) {
mad = mean(abs(y - yhat))
mad
}
calc_mxad = function(y, yhat) {
mxad = max(abs(y - yhat))
mxad
}
calc_mape = function(y, yhat) {
mape = mean(abs(yhat/y - 1))
mape
}
### Binning ####################################################################
bin_interval_variable <- function(u, n=10) {
if (!is.vector(u)) {
u <- u[[1]]
}
if (100 %% n == 0) {
pct_increment = n/100
} else {
print("Adjusted requested bin count to be a factor on 100.")
new_n <- n - (100 %% n)
}
pct_list <- seq(from=pct_increment, to =1 - pct_increment, by=pct_increment)
quantile_list <- unique(unname(quantile(u, pct_list)))
# calculate bin midpoints with real data
lo <- min(u)
hi <- max(u)
bin_values <- c(lo, quantile_list, hi)
bin_len <- length(bin_values)
midpoints <- rep(NA, bin_len - 1)
for (i in 2:bin_len) {
midpoints[i-1] <- mean(bin_values[(i-1):i])
}
breaks <- c(-Inf, quantile_list, Inf)
bin_info <- list(
breaks=breaks
, midpoints=midpoints
)
bin_info
}
### Model Specific Functions ###################################################
get_bal_forecast = function(start_bal, scores) {
start_bal * exp(cumsum(scores))
}
### Variable Selection #########################################################
calc_bic = function(y, yhat, k, family="binomial") {
n = length(y)
if (family == "binomal") {
log_like = sum( y * log(yhat) + (1 - y) * log(1 - yhat))
bic_cost = -2 * log_like + k * log(n)
} else if (family == "gaussian") {
sg2 = sum((y-yhat)^2)/n
sg = sqrt(sg2)
log_like = sum(log(dnorm(y, mean=yhat, sd=sg)))
# k + 1 since we also had to estimate sigma
bic_cost = -2 * log_like + (k + 1) * log(n)
}
return(bic_cost)
}
cv_step = function(data, resp="log_diff", test_var="", model="", from_yr=2007, to_yr=2016) {
mod = c(test_var, model)
mod = rm_blanks(mod)
input_data = data[, c(resp, "year","target", mod), with=FALSE]
setnames(input_data, resp, "resp")
full_fit = lm(data=input_data, resp ~ .-year-target)
# full_fit = lm(data=input_data, resp ~ ENTITY_NAME)
params = data.frame(summary(full_fit)$coefficients)
names(params) = c("est", "se", "t", "p_value")
params$var = row.names(params)
params = data.table(params)
params = params[var != "(Intercept)",]
worst_p_value = max(params[["p_value"]])
if (test_var != "") {
coefficient = params[var == test_var,"est"][[1]]
} else {
coefficient = 0
}
p = full_fit$rank
n = length(full_fit$residuals)
if (p > 2) {
c1 <- try(worst_vif <- max(vif(full_fit)),silent = TRUE)
if(is(c1,"try-error")){worst_vif<-NA}
} else {
worst_vif = 0
}
j = 1
for (yr in from_yr:to_yr) {
hold_out_yr = input_data[year == yr,]
test_data = input_data[year != yr,]
hold_out_test = hold_out_yr[, c("target","resp")]
head(test_data)
hold_out_fit = lm(data=test_data, resp ~ .-year-target)
hold_out_test[["est"]] = predict(hold_out_fit, hold_out_yr)
hold_out_test[["est.full"]] = predict(full_fit, hold_out_yr)
if (test_var != "") {
cv_coef = hold_out_fit$coefficients[[test_var]]
} else {
cv_coef = 0
}
if (j == 1) {
cv_data = hold_out_test
coef_data = cv_coef
} else {
cv_data = rbind(cv_data, hold_out_test)
coef_data = rbind(cv_coef, coef_data)
}
j = j + 1
}
y = cv_data[["resp"]]
y_est = cv_data[["est"]]
y_est_f = cv_data[["est.full"]]
bic = calc_bic(y, y_est, p, family="gaussian")
rsq = calc_rsq(y, y_est)
mape = calc_mape(y, y_est)
rmse = calc_rmse(y, y_est)
mad = calc_mad(y, y_est)
mxad = calc_mxad(y, y_est)
shapiro.pvalue=shapiro.test(y-y_est)$p.value
bic.f = calc_bic(y, y_est_f, p, family="gaussian")
rsq.f = calc_rsq(y, y_est_f)
mape.f = calc_mape(y, y_est_f)
rmse.f = calc_rmse(y, y_est_f)
mad.f = calc_mad(y, y_est_f)
mxad.f = calc_mxad(y, y_est_f)
shapiro.pvalue.f=shapiro.test(y-y_est_f)$p.value
y = cv_data[target==1,][["resp"]]
y_est = cv_data[target==1,][["est"]]
y_est_f = cv_data[target==1,][["est.full"]]
bic.sub = calc_bic(y, y_est, p, family="gaussian")
rsq.sub = calc_rsq(y, y_est)
mape.sub = calc_mape(y, y_est)
rmse.sub = calc_rmse(y, y_est)
mad.sub = calc_mad(y, y_est)
mxad.sub = calc_mxad(y, y_est)
shapiro.pvalue.sub=shapiro.test(y-y_est)$p.value
bic.f.sub = calc_bic(y, y_est_f, p, family="gaussian")
rsq.f.sub = calc_rsq(y, y_est_f)
mape.f.sub = calc_mape(y, y_est_f)
rmse.f.sub = calc_rmse(y, y_est_f)
mad.f.sub = calc_mad(y, y_est_f)
mxad.f.sub = calc_mxad(y, y_est_f)
shapiro.pvalue.f.sub=shapiro.test(y-y_est_f)$p.value
data.table(
var=test_var
, coefficient=round(coefficient,6)
, rsq=round(rsq,6)
, rsq.f=round(rsq.f,6)
, rsq.sub=round(rsq.sub,6)
, rsq.f.sub=round(rsq.f.sub,6)
, rmse=round(rmse,6)
, rmse.f=round(rmse.f,6)
, rmse.sub=round(rmse.sub,6)
, rmse.f.sub=round(rmse.f.sub,6)
, mad=round(mad,6)
, mad.f=round(mad.f,6)
, mad.sub=round(mad.sub,6)
, mad.f.sub=round(mad.f.sub,6)
# , mxad=round(mxad,6)
# , mxad.f=round(mxad.f,6)
, shapiro.pvalue=shapiro.pvalue
, shapiro.pvalue.f=shapiro.pvalue.f
, shapiro.pvalue.sub=shapiro.pvalue.sub
, shapiro.pvalue.f.sub=shapiro.pvalue.f.sub
, worst_vif=worst_vif
, n=n
, p=p
# , bic=bic
# , mape=mape
, worst_p_value=worst_p_value
, coefficient_mapd=mean(abs(coef_data/coefficient - 1))
)
}
cv_select = function(data, info, criteria="rsq", resp="log_diff", modl="", iter=2, from_yr=2007, to_yr=2016, vif_tol=10, sig_tol=0.10) {
mdl.base<-info[name%in%modl,unique(base),]
info_table = info[!base%in%mdl.base,,]
selections = modl
selection_rsq = NA
summary_out = list()
for (i in 1:iter) {
# collect info on each variable
if (dim(info_table)[1] != 0) {
var_tuples = zip_to_list(info_table[["name"]],info_table[["sign"]])
cat_tuples = zip_to_list(info_table[["name"]],info_table[["base"]])
j = 1
for (var in names(var_tuples)) {
step_result = cv_step(
data
, resp=resp
, test_var=var
, model=selections
, from_yr=from_yr
, to_yr=to_yr
)
step_result[["sign"]] = var_tuples[[var]]
step_result[["base"]] = cat_tuples[[var]]
if (j == 1) {
step_collection = step_result
} else {
step_collection = rbind(step_collection, step_result)
}
j = j + 1
}
# pick a variable
if (criteria == "rsq") {
step_collection = step_collection[order(-rsq)]
} else if (criteria == "bic") {
step_collection = step_collection[order(bic)]
} else if (criteria == "mape") {
step_collection = step_collection[order(mape)]
} else if (criteria == "coefficient_mapd") {
step_collection = step_collection[order(coefficient_mapd)]
}
summary_out[[concat("iteration-",i)]] = step_collection
step_filter = step_collection[
((sign(coefficient) == sign) & (worst_vif <= vif_tol) & (worst_p_value <= sig_tol))
,]
if (dim(step_filter)[1] != 0 ) {
selection = step_filter[1, var][[1]]
selection_base = step_filter[1, base][[1]]
selections = c(selections, selection)
selection_base = step_filter[1, base][[1]]
selection_rsq = na.remove(c(selection_rsq, step_filter[1, rsq][[1]]))
# remove variable from consideration
info_table = info_table[!(name == selection | base == selection_base),,]
} else {
break
}
}
}
list(summary=summary_out, selections=rm_blanks(selections), selection_rsq=selection_rsq)
}
<file_sep>/remediation/CnI_Path_Dependency.R
################################################################################
# Bank of Hope
# Commercial Real Estate Ending Balances
# Program: <>.R
# Author(s): KPMG, LLP
# Purpose:
# Data Dependences:
#
#
# R-version: R version 3.3.1 (2016-06-21)
# -- "Bug in Your Hair" Copyright (C) 2016 The R Foundation
# for Statistical Computing Platform: x86_64-apple-darwin13.4.0 (64-bit)
################################################################################
### Environment Settings #######################################################
pth_inputs = "C:/Users/ol07805/Desktop/Desktop Things/Ending Balance Model Final/Ending Balance Remediation Plan 09_25_18/Ending Balance CnI Path Dependency/read-only-inputs"
pth_lib = "C:/Users/ol07805/Desktop/Desktop Things/Ending Balance Model Final/Ending Balance Remediation Plan 09_25_18/Ending Balance CnI Path Dependency/library"
pth_out = "C:/Users/ol07805/Desktop/Desktop Things/Ending Balance Model Final/Ending Balance Remediation Plan 09_25_18/Ending Balance CnI Path Dependency"
### No need to make changes below after this line ##############################
### Dependencies
source(paste(pth_lib,"/dev-support.R", sep=""))
source(paste(pth_lib,"/dfast-support.R", sep=""))
# source has the following functions:
# - stack()
# - get_bal_forecast()
# - concat()
# - bin_interval_variable()
# - calc_rsq(), calc_mape(), calc_mad(), calc_rmset()
# - cv_step(), cv_select()
library("openxlsx")
library("data.table")
library("lubridate")
library("ggplot2")
library("RGraphics")
library("gridExtra")
library("ggplot2")
library("scales")
library("tseries")
library("car")
library("urca")
library("lmtest")
library("stats")
library("orcutt")
################################################################################
### Import Data ################################################################
boh = readRDS(concat(pth_out, "/data-boh.RDS"))
boh_train = function(seg) {
train_ind = concat("is_train_", seg)
boh_cp = copy(boh)
setnames(boh_cp, train_ind, "is_train")
boh_cp[is_train == TRUE,]
}
boh_train_ce = boh_train("ip")
boh_train_ci = boh_train("ci")
################################################################################
info = readRDS(concat(pth_inputs, "/table-variable_information_rho.RDS"))
################################################################################
################################################################################
ci_smape = cv_select_bal(
boh_train_ci
, info[c_i_tier != 0,]
, resp="ldiff_ci"
, bal="ci"
, use_cochrane_orcutt=TRUE
, modl=""
, iter=5
, criteria="smape"
, vif_tol=3
, sig_tol=0.05
, from_yr=2003
, to_yr=2016
)
####################################################################################################################
# Train the old model
oldModel <- lm(paste0("ldiff_ci ~","gdp_ag_lag1 + inc_qg_lag2 + ca_rinc_ag_lag3"),data = boh_train_ci)
# Get rid of the first variable path selected
infoTwo <- info[-grep("gdp",info$name)]
# run through variable selection algorithm
ci_smapeTwo = cv_select_bal(
boh_train_ci[,-grep("gdp",names(boh_train_ci)),with = F]
, infoTwo[c_i_tier != 0,]
, resp="ldiff_ci"
, bal="ci"
, use_cochrane_orcutt=TRUE
, modl=""
, iter=5
, criteria="smape"
, vif_tol=3
, sig_tol=0.05
, from_yr=2003
, to_yr=2016
)
# train new model to check summary statistics
newModel <- lm(paste0("ldiff_ci ~",paste(ci_smapeTwo$selections,collapse = "+")),data = boh_train_ci)
# Get rid of second selection path as well
infoTwo <- infoTwo[-grep("ca_gsp|ca_rgsp",infoTwo$name)]
# run through variable selection algorithm
ci_smapeTwo = cv_select_bal(
boh_train_ci[,-grep("ca_gsp|gdp|ca_rgsp",names(boh_train_ci)),with = F]
, infoTwo[c_i_tier != 0,]
, resp="ldiff_ci"
, bal="ci"
, use_cochrane_orcutt=TRUE
, modl=""
, iter=5
, criteria="smape"
, vif_tol=3
, sig_tol=0.05
, from_yr=2003
, to_yr=2016
)
# train new model to check summary statistics
newModelTwo <- lm(paste0("ldiff_ci ~",paste(ci_smapeTwo$selections,collapse = "+")),data = boh_train_ci)
####################################################################################################################
ip_smape = cv_select_bal(
boh_train_ce
, info[cre_tier != 0,]
, resp="ldiff_ip"
, bal="ip"
, use_cochrane_orcutt=TRUE
, modl=""
, iter=5
, criteria="smape"
, vif_tol=3
, sig_tol=0.05
, from_yr=2007
, to_yr=2016
)
oo_smape = cv_select_bal(
boh_train_ce
, info[cre_tier != 0,]
, resp="ldiff_oo"
, bal="oo"
, use_cochrane_orcutt=TRUE
, modl=""
, iter=5
, criteria="smape"
, vif_tol=3
, sig_tol=0.05
, from_yr=2007
, to_yr=2016
)
################################################################################
################################################################################
#saveRDS(ci_smape, concat(pth_out, "/robj-LeastSquaresSelectionSmape-ci.RDS"))
saveRDS(ip_smape, concat(pth_out, "/robj-LeastSquaresSelectionSmape-ip.RDS"))
saveRDS(oo_smape, concat(pth_out, "/robj-LeastSquaresSelectionSmape-oo.RDS"))
################################################################################
### Output Iteration Data for Review
#ci_itr_1 = ci_smape[["summary"]][["iteration-1"]]
#ci_itr_2 = ci_smape[["summary"]][["iteration-2"]]
ip_itr_1 = ip_smape[["summary"]][["iteration-1"]]
ip_itr_2 = ip_smape[["summary"]][["iteration-2"]]
oo_itr_1 = oo_smape[["summary"]][["iteration-1"]]
oo_itr_2 = oo_smape[["summary"]][["iteration-2"]]
#ci_itr_1[["iter"]] = 1
#ci_itr_2[["iter"]] = 2
ip_itr_1[["iter"]] = 1
ip_itr_2[["iter"]] = 2
oo_itr_1[["iter"]] = 1
oo_itr_2[["iter"]] = 2
#ci = rbind(ci_itr_1, ci_itr_2)
ip = rbind(ip_itr_1, ip_itr_2)
oo = rbind(oo_itr_1, oo_itr_2)
#setnames(ci, "var", "name")
setnames(ip, "var", "name")
setnames(oo, "var", "name")
#write.csv(ci[info, on="name"], concat(pth_out, "/ci-selections.csv"))
write.csv(ip[info, on="name"], concat(pth_out, "/ip-selections.csv"))
write.csv(oo[info, on="name"], concat(pth_out, "/oo-selections.csv"))
################################################################################
<file_sep>/PD/R02_WB_Data_original.R
##############################################################################
## File Name: R02_WB_Data.R
## Author: KZ
## Date: 5/1/2017 Created
## Purpose: To import and clean Wilshre data accoring to "02 - WB data.sas"
##############################################################################
require(dplyr)
require(reshape2)
require(data.table)
require(zoo)
setwd("C:/Users/ic07949/Desktop/KPMG/Model Development/development code and data/PD/Dataset/Wilshire")
## Import Wilshire Data (SAS File 02, Line 1 to 69)
wilshire <- read.csv("df_final_wilshire_sorted.csv")
names(wilshire)[names(wilshire)=="Note.Number"] <- "note_number"
names(wilshire)[names(wilshire)=="Non.Accrual.Code"] <- "non_accrual_code"
names(wilshire)[names(wilshire)=="NAP...NAIP...NAIP.in.GL"] <- "WB_balance"
names(wilshire)[names(wilshire)=="Rate.Over.Split"] <- "interest_rate"
wilshire$filedate <- as.Date(wilshire$filedate, "%Y-%m-%d")
wilshire$originationdate <- as.Date(wilshire$originationdate, "%Y-%m-%d")
wilshire$maturitydate <- as.Date(wilshire$maturitydate,"%Y-%m-%d")
wilshire_chargeoffs <- read.csv("wilshire charge offs cleaned.csv")
rates <- fread("rates.csv")
wilshire_acquired_idx1 <- read.csv("Wilshire_aquired_list_20090930.csv")
names(wilshire_acquired_idx1)[names(wilshire_acquired_idx1)=="Note.Number"] <- "note_number"
wilshire_acquired_idx1 <- subset(wilshire_acquired_idx1, Mirae == "M", select = c(note_number,Mirae))
wilshire_acquired_idx2 <- read.csv("Wilshire_aquired_list_20131231.csv")
names(wilshire_acquired_idx2)[names(wilshire_acquired_idx2)=="Note_Number"] <- "note_number"
wilshire_acquired_idx2 <- filter(wilshire_acquired_idx2, Bank %in% c("A", "S", "M"))
acq <- c(unique(wilshire_acquired_idx1$note_number), unique(wilshire_acquired_idx2$note_number))
## create a label in Wilshire for acquired loans. (SAS File 02, Line 71 to 77)
wilshire$acquired_identifier <- ifelse(wilshire$note_number %in% acq,
paste("acquired_wilshire"),
paste("Wilshire_originated"))
table(wilshire$acquired_identifier)
## merge wilshire and chargeoffs (SAS File 02, Line 81 to 87)
wilshire <- merge(x = wilshire, y = wilshire_chargeoffs, by = "note_number", all.x = TRUE)
## create event and other variables (SAS File 02, Line 90 to 146)
wilshire$co_ind <- ifelse( is.na(wilshire$co_ind ), 0, wilshire$co_ind )
wilshire$y <- ifelse( wilshire$non_accrual_code %in% c(2,4) | wilshire$co_ind == 1 ,
1,
ifelse( wilshire$non_accrual_code %in% c(0,9) & wilshire$co_ind != 1 ,
0, 111))
table(wilshire$y)
wilshire$yr_maturity <- year(wilshire$maturitydate)
wilshire$yr_file <- year(wilshire$filedate)
wilshire$mn_maturity <- month(wilshire$maturitydate)
wilshire$mn_file <- month(wilshire$filedate)
wilshire$q_file <- quarter(wilshire$filedate)
wilshire$ttm_m= 12*(wilshire$yr_maturity - wilshire$yr_file ) + (
wilshire$mn_maturity - wilshire$mn_file)
wilshire$loan_age_q <- (as.yearqtr(wilshire$filedate) - as.yearqtr(wilshire$originationdate)
) * 4
wilshire$term_q <- (as.yearqtr(wilshire$maturitydate) - as.yearqtr(wilshire$originationdate)
) * 4
wilshire$POB <- 100 * wilshire$loan_age_q / wilshire$term_q
## create variable min_non_acc_date (SAS File 02, Line 147 to 170)
indx_wilshire <- subset(wilshire, y==1, select = c(note_number,filedate))
indx_wilshire <- as.data.table(indx_wilshire[order(indx_wilshire$note_number, indx_wilshire$filedate),])
indx_wilshire <- indx_wilshire %>% group_by(note_number)%>% filter(row_number(filedate) == 1)
names(indx_wilshire)[names(indx_wilshire)=="filedate"] <- "min_non_acc_date"
wilshire <- merge(x = wilshire, y = indx_wilshire, by = "note_number", all.x = TRUE)
wilshire$f_non_acc_date <- ifelse(is.na(wilshire$first_co_date), as.Date(wilshire$min_non_acc_date),
ifelse (as.Date(wilshire$first_co_date) <= as.Date(wilshire$min_non_acc_date),
as.Date(wilshire$first_co_date),
as.Date(wilshire$min_non_acc_date)))
wilshire$f_non_acc_date <- as.Date(wilshire$f_non_acc_date)
## merge with the rate data set (SAS File 02, Line 173 to 179)
rates <- subset(rates, select = -c(date,month))
setnames(rates, old = c("year","q"), new = c("yr_file","q_file"))
wilshire <- merge(x = wilshire, y = rates, by = c("yr_file","q_file"), all.x = TRUE)
## clean up data set (SAS File 02, Line 182 to 196)
wilshire_df <- filter(wilshire, yr_maturity > 2006)
wilshire_df <- filter(wilshire_df, !(!is.na(as.Date(first_co_date)) & as.Date(filedate)>as.Date(first_co_date) ))
wilshire_df <- filter(wilshire_df, !(!is.na(as.Date(min_non_acc_date)) & as.Date(filedate)>as.Date(min_non_acc_date) ))
wilshire_df <- filter(wilshire_df, yr_maturity >= yr_file)
wilshire_df <- filter(wilshire_df, !(yr_maturity == yr_file & (mn_file - mn_maturity)>2) )
wilshire_df$boh_id <- "wilshire"
## create portfolio_id: CRE or CI (SAS File 02, Line 198 to 207)
wilshire_df$class_code2 <- as.numeric(as.character(wilshire_df$Class.Code))
wilshire_df <- filter(wilshire_df, class_code2 %in% c(2,3,5,6,10,13,20, 21,30,31,32,33,34,35,
36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,59,60,61,63,99))
## clean up data (SAS File 02, Line 209 to 239)
wilshire_df <- filter(wilshire_df, !(WB_balance == 0 | is.na(WB_balance)))
wilshire_df <- filter(wilshire_df, !(interest_rate == 0 | is.na(interest_rate)))
## !! SAS File 02, Line 241 to 456 can be ignored because it doesn't affect the final results.
## only need to create variable property_type for CRE model
wilshire_df$property_type <- wilshire_df$Property.Type.Code
wilshire_df$property_type <- as.numeric(wilshire_df$property_type)
table(wilshire_df$Property.Type.Code)
table(wilshire_df$property_type)
## create CRE/C&I portfolio ID (SAS File 02, Line 458 to 470)
wilshire_df$portfolio_id <- ifelse(wilshire_df$class_code2 %in% c(2,3,5,6,10,13,20),
"CRE",
ifelse(wilshire_df$class_code2 %in% c(21,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,
46,47,48,49,50,59,60,61,63,99),
"CI", "error"))
table(wilshire_df$portfolio_id)
## filter out error portfolio_id (SAS File 02, Line 472 to 475)
wilshire_df <- filter(wilshire_df, portfolio_id != "error")
## !! SAS File 02, Line 477 to 513 can be ignored because it doesn't affect the final results.
## change the loan Rating system (SAS File 02, Line 516 to 535)
table(wilshire_df$Loan.Rating.Code1)
wilshire_df$boh_rating <- ifelse(wilshire_df$Loan.Rating.Code1 ==0, 0,
ifelse(wilshire_df$Loan.Rating.Code1 == 1000, 1,
ifelse(wilshire_df$Loan.Rating.Code1 == 2000, 2,
ifelse(wilshire_df$Loan.Rating.Code1 == 3000, 3,
ifelse(wilshire_df$Loan.Rating.Code1 == 4000, 4,
ifelse(wilshire_df$Loan.Rating.Code1 == 5000, 4,
ifelse(wilshire_df$Loan.Rating.Code1 == 6000, 1000,
ifelse(wilshire_df$Loan.Rating.Code1 == 7000, 2000,
ifelse(wilshire_df$Loan.Rating.Code1 == 8000, 3000,
ifelse(wilshire_df$Loan.Rating.Code1 == 9000, 4000, 111)
)))))))))
table(wilshire_df$boh_rating)
## clean up data set (SAS File 02 Line 549 to 601) and create final data set for Wilshire
df_final_wilshire <- as.data.frame(wilshire_df)
df_final_wilshire <- as.data.table(df_final_wilshire)
df_final_wilshire <- subset(df_final_wilshire, select = -c(Name.1, Collateral.Address ))
setnames(df_final_wilshire, old = c("Times.Past.Due.01.To.29.Days", "WB_balance","Original.Note.Amount",
"filedate", "yr_file","q_file","mn_file","note_number","originationdate",
"maturitydate","Fixed.or.Variable.Interest.Rate","f_non_acc_date",
"NAICS.Code"),
new = c("dpd0129","current_balance", "original_balance",
"fileDate","year","q","month","account_id","origination_date",
"maturity_date","interest_rate_type","first_nonacc_date",
"naicsCode"))
#### !!! R is case sensitivity to var names
## !! (SAS File 02 Line 565 to 571) change dpd0129 with NA to 0
## !! The original data set contains wrong values (e.g. address) in variable Times.Past.Due.01.To.29.Days
## dpd0129 is one independent variable in CI PD model. Thus, results may have problems.
df_final_wilshire$dpd0129 <- substr(as.character(df_final_wilshire$dpd0129),1,3)
## !! Follow Sina's wrong logic in SAS, extract the first 3 characters from variable dpd0129
## R outputs (first 3 elements) are different from SAS. But the numbers of obs
## of dpd0129 = 0 and dpd0129 =1 are the same -- 73833 and 13546.
df_final_wilshire$dpd0129 <- as.numeric(as.character(df_final_wilshire$dpd0129))
## Warning message: NAs introduced by coercion
df_final_wilshire$dpd0129 <- ifelse(is.na(df_final_wilshire$dpd0129), 0, df_final_wilshire$dpd0129)
## The way R produces "NA" when changing character to numeric is different from SAS
## Thus, R results have more dpd0129 = 0. R: 88430 obs, SAS: 88376
## 0 observation with loan_spread_v>100 (or interest_rate = 1234.56) (SAS File 02 Line 582 to 585)
df_final_wilshire <- filter(df_final_wilshire, interest_rate != 1234.56)
df_final_wilshire <- subset(df_final_wilshire, select = c(fileDate, account_id, boh_id, acquired_identifier,
portfolio_id, original_balance, origination_date, maturity_date,
current_balance, interest_rate, interest_rate_type,
loan_age_q, POB, boh_rating, DCR,
dpd0129, first_nonacc_date,
naicsCode, property_type, tb1m, tb3m, tb6m, tb1y, tb2y,
tb3y, tb5y, tb7y, tb10y,tb20y, tb30y, year, q, month, y))
save(df_final_wilshire, file = "./Data output/df_final_wilshire.RData")
write.csv(df_final_wilshire, file = "./Data output/df_final_wilshire.csv", row.names = FALSE)
<file_sep>/PPNR/PPNR_SBA.R
###################################################################
# Project: Bank of Hope
# PPNR - Gain on Sale of Loans (SBA)
###################################################################
setwd(dirname(rstudioapi::getActiveDocumentContext()$path))
library(DataAnalytics)
library(tseries)
library(urca)
library (fUnitRoots)
library(lubridate)
library(forecast)
library(tseries)
library(CADFtest)
library (leaps)
library(data.table)
library(openxlsx)
library(car)
####################
##save image
# save.image("sba-v1.RData")
##load image
#load("sba-v1.RData")
##################################
#####################
#read in the raw data
#####################
df=read.csv("sba.csv",header = TRUE)
names(df)= c("date", "year", "q", "sba")
###############
# Plot the data
###############
df = df[-1,] #2003 Q1 starting point
plot(df$sba)
acf(df$sba)
pacf(df$sba)
###################################
#transformations-- Q-o-Q and Y-o-Y
##################################
df$sba_qd=c(NA, diff(df$sba))
df$sba_ad= df$sba- back(df$sba, noperiods = 4)
df$sba_qg=c(NA, diff(log(df$sba)))
df$sba_ag=log(df$sba)-back(log(df$sba), noperiods = 4)
###########################################
#Stationarity tests for the input variables
###########################################
pp.test(na.remove(df$sba), lshort = F) # Stationary!
pp.test(na.remove(df$sba_qd), lshort = F) # Stationary!
pp.test(na.remove(df$sba_ad), lshort = F) # Stationary!
#boxplot & auto correlations
boxplot(df$sba_qd~df$q, main="QoQ SBA",
xlab="season/quarter", ylab="SBA")
acf(na.remove(df$sba_qd), lag.max = 25)
boxplot(df$sba_ad~df$q, main="YoY SBA",
xlab="season/quarter", ylab="SBA")
acf(na.remove(df$sba_ad), lag.max = 25)
#######################
#read in the macro vars
#######################
######
#base
######
base=read.csv("base_sba2.csv", header=T)
aaa=which(base$year==2003 & base$quarter==1)
bbb=which(base$year==2018 & base$quarter==4)
base=base[aaa:bbb,]
#########
#adverse
#########
adverse=read.csv("adverse_sba2.csv", header=T)
aaa=which(adverse$year==2003 & adverse$quarter==1)
bbb=which(adverse$year==2018 & adverse$quarter==4)
adverse=adverse[aaa:bbb,]
########
#severe
########
severe=read.csv("severe_sba2.csv", header=T)
aaa=which(severe$year==2003 & severe$quarter==1)
bbb=which(severe$year==2018 & severe$quarter==4)
severe=severe[aaa:bbb,]
##################
#development macro
##################
D1=which(base$year==2003 & base$q==1)
D2=which(base$year==2016 & base$q==3)
macro_dev=base[c(D1:D2), ]
########################################
# Create the dep_var matrix
########################################
#create var info
var.names=colnames(macro_dev[,-c(1,2,3)])
var_info=as.data.frame(matrix(0, length(var.names), 6 ))
names(var_info) = c("var", "tier", "base", "lag", "diff", "sign")
var_info[,1]=var.names
var_info[,5]=0
#diff
var_info[grepl("_qd", var_info$var),5] = TRUE
var_info[grepl("_yd", var_info$var),5] = TRUE
var_info[grepl("_ad", var_info$var),5] = TRUE
var_info[grepl("_ag", var_info$var),5] = TRUE
var_info[grepl("_qg", var_info$var),5] = TRUE
#lag
var_info[grepl("_lag_1", var_info$var),4] = 1
var_info[grepl("_lag_2", var_info$var),4] = 2
var_info[grepl("_lag_3", var_info$var),4] = 3
var_info[grepl("_lag_4", var_info$var),4] = 4
#var.base
var_info[grepl("ngdp", var_info$var),3] = "gdp"
var_info[grepl("rgdp", var_info$var),3] = "gdp"
var_info[grepl("rdi", var_info$var),3] = "dpi"
var_info[grepl("ndi", var_info$var),3] = "dpi"
var_info[grepl("ur_", var_info$var),3] = "ur_diff"
var_info[grepl("UR_", var_info$var),3] = "ur_diff"
var_info[grepl("cpi_", var_info$var),3] = "cpi"
var_info[grepl("i3m", var_info$var),3] = "i"
var_info[grepl("i5y", var_info$var),3] = "i"
var_info[grepl("i10y", var_info$var),3] = "i"
var_info[grepl("bbb", var_info$var),3] = "spr10"
var_info[grepl("imort", var_info$var),3] = "i"
var_info[grepl("iprim", var_info$var),3] = "i"
var_info[grepl("cppi", var_info$var),3] = "cppi"
var_info[grepl("dji", var_info$var),3] = "dji"
var_info[grepl("VIX", var_info$var),3] = "vix"
var_info[grepl("vix", var_info$var),3] = "vix"
var_info[grepl("hpi_q", var_info$var),3] = "hpi_diff"
var_info[grepl("HPI_q", var_info$var),3] = "hpi_diff"
var_info[grepl("hpi_a", var_info$var),3] = "hpi_diff"
var_info[grepl("HPI_a", var_info$var),3] = "hpi_diff"
var_info[grepl("hpi_g", var_info$var),3] = "hpi_diff"
var_info[grepl("spr10", var_info$var),3] = "spr10"
var_info[grepl("spr10_q", var_info$var),3] = "spr10"
var_info[grepl("spr10_a", var_info$var),3] = "spr10"
var_info[grepl("equipment", var_info$var), 3]= "equipment"
var_info[grepl("pfi_nonres", var_info$var), 3]= "pfi_nonres"
var_info[grepl("willreit", var_info$var), 3]= "willreit"
var_info[grepl("KOGDP", var_info$var), 3]= "KOGDP"
var_info[grepl("KOCPI", var_info$var), 3]= "KOCPI"
var_info[grepl("CCI", var_info$var),3] = "CCI_g"
var_info[grepl("NCREIF", var_info$var),3] = "NCREIF"
var_info[grepl("fall", var_info$var),3] = "fall"
var_info[grepl("winter", var_info$var),3] = "winter"
var_info[grepl("spring", var_info$var),3] = "spring"
var_info[grepl("summer", var_info$var),3] = "summer"
#var_info[var_info$base==0,]
#sign
var_info[grepl("ngdp", var_info$var),6] = 1
var_info[grepl("rgdp", var_info$var),6] = 1
var_info[grepl("rdi", var_info$var),6] = 1
var_info[grepl("ndi", var_info$var),6] = 1
var_info[grepl("ur_", var_info$var),6] = -1
var_info[grepl("UR_", var_info$var),6] = -1
var_info[grepl("cpi_", var_info$var),6] = 0
var_info[grepl("i3m", var_info$var),6] = 0
var_info[grepl("i5y", var_info$var),6] = 0
var_info[grepl("i10y", var_info$var),6] = 0
var_info[grepl("bbb", var_info$var),6] = -1
var_info[grepl("imort", var_info$var),6] = 0
var_info[grepl("iprim", var_info$var),6] = 0
var_info[grepl("cppi", var_info$var),6] = 1
var_info[grepl("dji", var_info$var),6] = 1
var_info[grepl("VIX", var_info$var),6] = 0
var_info[grepl("vix", var_info$var),6] = 0
var_info[grepl("hpi_q", var_info$var),6] = 1
var_info[grepl("HPI_q", var_info$var),6] = 1
var_info[grepl("hpi_a", var_info$var),6] = 1
var_info[grepl("HPI_a", var_info$var),6] = 1
var_info[grepl("hpi_g", var_info$var),6] = 1
var_info[grepl("spr10", var_info$var),6] = -1
var_info[grepl("spr10_q", var_info$var),6] = -1
var_info[grepl("spr10_a", var_info$var),6] = -1
var_info[grepl("equipment", var_info$var), 6]= 1
var_info[grepl("pfi_nonres", var_info$var), 6]= 1
var_info[grepl("willreit", var_info$var), 6]= 1
var_info[grepl("KOGDP", var_info$var), 6]= 1
var_info[grepl("KOCPI", var_info$var), 6]= 0
var_info[grepl("CCI", var_info$var),6] = 1
var_info[grepl("NCREIF", var_info$var),6] = 1
var_info[grepl("fall", var_info$var),6] = 0
var_info[grepl("winter", var_info$var),6] = 0
var_info[grepl("spring", var_info$var),6] = 0
var_info[grepl("summer", var_info$var),6] = 0
#var_info[var_info$sign==0,]
#Tier
var_info[grepl("fall", var_info$var),2] = 1
var_info[grepl("spring", var_info$var),2] = 1
var_info[grepl("summer", var_info$var),2] = 1
var_info[grepl("winter", var_info$var),2] = 1
var_info[grepl("ngdp", var_info$var),2] = 1
var_info[grepl("rgdp", var_info$var),2] = 1
var_info[grepl("rdi", var_info$var),2] = 1
var_info[grepl("ndi", var_info$var),2] = 1
var_info[grepl("ur_", var_info$var),2] = 1
var_info[grepl("UR_", var_info$var),2] = 1
var_info[grepl("cpi_", var_info$var),2] = 3
var_info[grepl("i3m", var_info$var),2] = 1
var_info[grepl("i5y", var_info$var),2] = 1
var_info[grepl("i10y", var_info$var),2] = 1
var_info[grepl("bbb", var_info$var),2] = 1
var_info[grepl("imort", var_info$var),2] = 3
var_info[grepl("iprim", var_info$var),2] = 2
var_info[grepl("cppi", var_info$var),2] = 2
var_info[grepl("dji", var_info$var),2] = 1
var_info[grepl("VIX", var_info$var),2] = 2
var_info[grepl("vix", var_info$var),2] = 2
var_info[grepl("hpi_q", var_info$var),2] = 2
var_info[grepl("HPI_q", var_info$var),2] = 2
var_info[grepl("hpi_a", var_info$var),2] = 2
var_info[grepl("HPI_a", var_info$var),2] = 2
var_info[grepl("hpi_g", var_info$var),2] = 2
var_info[grepl("spr10", var_info$var),2] = 3
var_info[grepl("spr10_q", var_info$var),2] = 3
var_info[grepl("spr10_a", var_info$var),2] = 3
var_info[grepl("equipment", var_info$var), 2]= 2
var_info[grepl("pfi_nonres", var_info$var), 2]= 2
var_info[grepl("willreit", var_info$var), 2]= 2
var_info[grepl("KOGDP", var_info$var), 2]= 3
var_info[grepl("KOCPI", var_info$var), 2]= 3
var_info[grepl("CCI", var_info$var),2] = 4
var_info[grepl("NCREIF", var_info$var),2] = 2
# var_info[var_info$tier==0,]
#####################
#Variable Selection
#####################
D1=which(df$year==2003 & df$q==1)
D2=which(df$year==2016 & df$q==3)
df_dev=df[c(D1:D2), ]
date_col= as.data.frame(df$date)
names(date_col)="Date"
sba_input=as.data.frame(cbind(df$sba, df$sba_qd, df$sba_ad, df$sba_qg, df$sba_ag))
names(sba_input)=c("sba", "sba_qd", "sba_ad", "sba_qg", "sba_ag")
b1=cbind(date_col, sba_input)
names(b1)=c("Date", names(sba_input))
b=data.table(b1)
c1=cbind(date_col, macro_dev)
names(c1)=c("Date", names(macro_dev))
c=data.table(c1)
a=data.table(var_info)
df_total_dev= as.data.frame(cbind(date_col, sba_input, macro_dev))
source("StepFun.R")
########################################################
# Model Selection
########################################################
# fix_vars0=c("1")
# model3_sba_sep=StepFun(a,b,c, tier=1, #indicate which tier of variables to consider
# y='sba~', #indicate response variable
# thresh=c(0.05, 0.01, 0.001), #significance level for SE based p-value and LR test based p-value for each tier
# criteria='SE.p', #variable selection criteria; other values='bic', 'LR.p', 'SE.p', 'rsq'
# vars0 = c("1"), #model 0 variables
# fix_vars0, #indicate which variables are fixed
# out.print=T #indicate wheter intermediate output will be printed
# )
# # Add CAUR_qd_lag_1, djia_ag_lag_4, i10y_qd_lag_1
#
# fix_vars0=c("1", "CAUR_qd_lag_1", "i10y_qd_lag_1", "djia_ag_lag_4")
# model3_sba_sep=StepFun(a,b,c, tier=2, #indicate which tier of variables to consider
# y='sba~', #indicate response variable
# thresh=c(0.05, 0.01, 0.001), #significance level for SE based p-value and LR test based p-value for each tier
# criteria='SE.p', #variable selection criteria; other values='bic', 'LR.p', 'SE.p', 'rsq'
# vars0 = c("1", "CAUR_qd_lag_1", "i10y_qd_lag_1", "djia_ag_lag_4"), #model 0 variables
# fix_vars0, #indicate which variables are fixed
# out.print=T #indicate wheter intermediate output will be printed
# )
# # no added variable
#
# fix_vars0=c("1", "CAUR_qd_lag_1", "i10y_qd_lag_1", "djia_ag_lag_4")
# model3_sba_sep=StepFun(a,b,c, tier=3, #indicate which tier of variables to consider
# y='sba~', #indicate response variable
# thresh=c(0.05, 0.01, 0.001), #significance level for SE based p-value and LR test based p-value for each tier
# criteria='SE.p', #variable selection criteria; other values='bic', 'LR.p', 'SE.p', 'rsq'
# vars0 = c("1", "CAUR_qd_lag_1", "i10y_qd_lag_1", "djia_ag_lag_4"), #model 0 variables
# fix_vars0, #indicate which variables are fixed
# out.print=T #indicate wheter intermediate output will be printed
# )
#
# # no added variable
out=lm(b1$sba~c1$CAUR_qd_lag_1+c1$i10y_qd_lag_1+c1$djia_ag_lag_4)
summary(out)
vif(out)
acf(out$residuals)
Box.test(out$residuals, type = "Ljung-Box", lag = 3)
durbinWatsonTest(out)
#####################
#Independent Variables
#####################
x1=c1$CAUR_qd_lag_1
mu1=mean(c1$CAUR_qd_lag_1)
sd1= stdev(c1$CAUR_qd_lag_1)
x2= c1$i10y_qd_lag_1
mu2=mean(c1$i10y_qd_lag_1)
sd2= stdev(c1$i10y_qd_lag_1)
x3= c1$djia_ag_lag_4
mu3=mean(c1$djia_ag_lag_4)
sd3= stdev(c1$djia_ag_lag_4)
#####################
# Model Estimation
#####################
out=lm(b1$sba~x1+x2+x3)
summary(out)
lmSumm(out, HAC = T)
#Multicolinearity
vif(out)
#Stationarity
summary(ur.df(na.remove(base$CAUR_qd_lag_1), selectlags = c("BIC")))
summary(ur.df(na.remove(base$i10y_qd_lag_1), selectlags = c("BIC")))
summary(ur.df(na.remove(base$djia_ag_lag_4), selectlags = c("BIC")))
#####################
# Residual tests
#####################
out_res=out$residuals
out_res2=rstandard(out)
# Autocorrelations
par(mfrow=c(1,2))
acf(out$residuals, main="")
pacf(out$residuals, main="")
#white noise tests
Box.test(out$residuals, type = "Ljung-Box", lag = 3) #null: independence ==> accept
durbinWatsonTest(out)
#Q-Q Plot
par(mfrow=c(1,1))
qqnorm(out_res2, ylab="Residuals", xlab="Quantiles of Standard Normal", main="SBA Model")
qqline(out_res2)
# Residual vs predicted
plot(out$fitted.values,out_res2, ylab="Residuals", xlab="Fitted Values", main="SBA Model")
abline(0, 0)
#####################
#implement the model
#####################
ndata=nrow(b1)
npred=9
output=as.data.frame(matrix(0, ndata+npred,7))
D1=which(df$year==2003 & df$q==1)
D2=which(df$year==2016 & df$q==3)
output[1:ndata, 1]=df$sba[D1:D2]
output[1:ndata, 2]=out$fitted.values
plot(output[1:ndata, 2], ylim=c(-1200000,7000000))
lines(output[1:ndata, 1], col='red')
#####################
#Scenario Forecasts
#####################
aaaa=which(base$year==2016 & base$quarter==1)
bbbb=which(base$year==2018 & base$quarter==1)
indx=cbind(base$CAUR_qd_lag_1,base$i10y_qd_lag_1,base$djia_ag_lag_4)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2", "x3")
fitted.base=as.data.frame(predict(out, xreg_base))
indx=cbind(adverse$CAUR_qd_lag_1,adverse$i10y_qd_lag_1,adverse$djia_ag_lag_4)
xreg_adverse=as.data.frame(indx[aaaa:bbbb,])
names(xreg_adverse)=c("x1", "x2", "x3")
fitted.adverse=as.data.frame(predict(out, xreg_adverse))
indx=cbind(severe$CAUR_qd_lag_1,severe$i10y_qd_lag_1,severe$djia_ag_lag_4)
xreg_severe=as.data.frame(indx[aaaa:bbbb,])
names(xreg_severe)=c("x1", "x2", "x3")
fitted.severe=as.data.frame(predict(out, xreg_severe))
abb=ndata+1
abc=nrow(output)
output[abb:abc, 2]=fitted.base
output[abb:abc, 3]=fitted.adverse
output[abb:abc, 4]=fitted.severe
plot(output[1:ndata, 4], ylim=c(-1200000,7000000))
lines(output[1:abc, 3], col='magenta')
lines(output[1:abc, 2], col='green')
lines(output[1:abc, 1], col='black')
write.csv(as.data.frame(cbind(output)), "outpput_sba_final_model.csv", col.names = T, row.names = F)
#####################
#sensitivity Analysis
#####################
# CAUR_qd_lag_1
indx=cbind(base$CAUR_qd_lag_1,base$i10y_qd_lag_1,base$djia_ag_lag_4)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2", "x3")
fitted.base=as.data.frame(predict(out, xreg_base))
indx=cbind(base$CAUR_qd_lag_1+sd1,base$i10y_qd_lag_1,base$djia_ag_lag_4)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2", "x3")
fitted.base1=as.data.frame(predict(out, xreg_base))
indx=cbind(base$CAUR_qd_lag_1+2*sd1,base$i10y_qd_lag_1,base$djia_ag_lag_4)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2", "x3")
fitted.base2=as.data.frame(predict(out, xreg_base))
abb=ndata+1
abc=nrow(output)
output[abb:abc, 2]=fitted.base
output[abb:abc, 3]=fitted.base1
output[abb:abc, 4]=fitted.base2
write.csv(as.data.frame(cbind(output)), "outpput_sba_final_sensitivity_caur.csv", col.names = T, row.names = F)
#i10y_qd_lag_1
indx=cbind(base$CAUR_qd_lag_1,base$i10y_qd_lag_1,base$djia_ag_lag_4)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2", "x3")
fitted.base=as.data.frame(predict(out, xreg_base))
indx=cbind(base$CAUR_qd_lag_1,base$i10y_qd_lag_1+sd2,base$djia_ag_lag_4)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2", "x3")
fitted.base1=as.data.frame(predict(out, xreg_base))
indx=cbind(base$CAUR_qd_lag_1,base$i10y_qd_lag_1+2*sd2,base$djia_ag_lag_4)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2", "x3")
fitted.base2=as.data.frame(predict(out, xreg_base))
abb=ndata+1
abc=nrow(output)
output[abb:abc, 2]=fitted.base
output[abb:abc, 3]=fitted.base1
output[abb:abc, 4]=fitted.base2
write.csv(as.data.frame(cbind(output)), "outpput_sba_final_sensitivity_i10.csv", col.names = T, row.names = F)
#djia_ag_lag_4
indx=cbind(base$CAUR_qd_lag_1,base$i10y_qd_lag_1,base$djia_ag_lag_4)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2", "x3")
fitted.base=as.data.frame(predict(out, xreg_base))
indx=cbind(base$CAUR_qd_lag_1,base$i10y_qd_lag_1,base$djia_ag_lag_4+sd3)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2", "x3")
fitted.base1=as.data.frame(predict(out, xreg_base))
indx=cbind(base$CAUR_qd_lag_1,base$i10y_qd_lag_1,base$djia_ag_lag_4+2*sd3)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2", "x3")
fitted.base2=as.data.frame(predict(out, xreg_base))
abb=ndata+1
abc=nrow(output)
output[abb:abc, 2]=fitted.base
output[abb:abc, 3]=fitted.base1
output[abb:abc, 4]=fitted.base2
write.csv(as.data.frame(cbind(output)), "outpput_sba-final_sensitivity_djia.csv", col.names = T, row.names = F)
#####################################
# OOS testing
####################################
oos<-function(n){
#n defines how many quarters before 2015Q4
ind=nrow(df_total_dev)-n
df_oos=df_total_dev[1:ind,]
ind0=nrow(df_oos)
ind1=nrow(df_oos)+1
ind2=nrow(df_oos)+npred
ind4=nrow(df_total_dev)
ind5=nrow(df_total_dev)-n+1
x1=df_oos$CAUR_qd_lag_1
mu1=mean(df_oos$CAUR_qd_lag_1)
sd1= stdev(df_oos$CAUR_qd_lag_1)
x2= df_oos$i10y_qd_lag_1
mu2=mean(df_oos$i10y_qd_lag_1)
sd2= stdev(df_oos$i10y_qd_lag_1)
x3= df_oos$djia_ag_lag_4
mu3=mean(df_oos$djia_ag_lag_4)
sd3= stdev(df_oos$djia_ag_lag_4)
out_oos=lm(df_oos$sba~x1+x2+x3)
summary(out_oos)
####################################
#implement the model - out of sample
####################################
ndata=nrow(df_oos)
npred=1
ind3=ndata+npred
output_oos=as.data.frame(matrix(0, ind3,7))
input=sba_input[1:ind3,]
output_oos[, 1]=input$sba
output_oos[1:ndata, 2]=out_oos$fitted.values
#PREDICT
indx=cbind(df_total_dev$CAUR_qd_lag_1,df_total_dev$i10y_qd_lag_1,df_total_dev$djia_ag_lag_4)
xreg_base=as.data.frame(t(indx[ind5,]))
names(xreg_base)=c("x1", "x2", "x3")
fitted.base=as.data.frame(predict(out_oos, xreg_base))
# get the values
output_oos[ind5, 2]=fitted.base
pct_error= 100*(output_oos[ind5,2]-output_oos[ind5,1])/output_oos[ind5,2]
result_oos=as.data.frame(cbind(n, output_oos[ind1,1], output_oos[ind1,2],pct_error))
return(result_oos)
}
oos(2)
oos(3)
oos(4)
oos(5)
indx101=rbind(oos(2),oos(3),oos(4),oos(5))
#####################################
# Prediction CI
#####################################
ndata=nrow(b1)
npred=9
output_ci=as.data.frame(matrix(0, ndata+npred,10))
D1=which(df$year==2003 & df$q==1)
D2=which(df$year==2016 & df$q==3)
output_ci[1:ndata, 1]=df$sba[D1:D2]
output_ci[1:ndata, 2]=out$fitted.values
#####################
#Scenario Forecasts
#####################
aaaa=which(base$year==2016 & base$quarter==1)
bbbb=which(base$year==2018 & base$quarter==1)
indx=cbind(base$CAUR_qd_lag_1,base$i10y_qd_lag_1,base$djia_ag_lag_4)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2", "x3")
fitted.base=as.data.frame(predict(out, xreg_base, interval = "predict", level = 0.95))
indx=cbind(adverse$CAUR_qd_lag_1,adverse$i10y_qd_lag_1,adverse$djia_ag_lag_4)
xreg_adverse=as.data.frame(indx[aaaa:bbbb,])
names(xreg_adverse)=c("x1", "x2", "x3")
fitted.adverse=as.data.frame(predict(out, xreg_adverse, interval = "predict", level = 0.95))
indx=cbind(severe$CAUR_qd_lag_1,severe$i10y_qd_lag_1,severe$djia_ag_lag_4)
xreg_severe=as.data.frame(indx[aaaa:bbbb,])
names(xreg_severe)=c("x1", "x2", "x3")
fitted.severe=as.data.frame(predict(out, xreg_severe, interval = "predict", level = 0.95))
abb=ndata+1
abc=nrow(output_ci)
output_ci[abb:abc, 2]=fitted.base[,1]
output_ci[abb:abc, 3]=fitted.base[,2]
output_ci[abb:abc, 4]=fitted.base[,3]
output_ci[abb:abc, 5]=fitted.adverse[,1]
output_ci[abb:abc, 6]=fitted.adverse[,2]
output_ci[abb:abc, 7]=fitted.adverse[,3]
output_ci[abb:abc, 8]=fitted.severe[,1]
output_ci[abb:abc, 9]=fitted.severe[,2]
output_ci[abb:abc, 10]=fitted.severe[,3]
colnames(output_ci)=c("Historical", "estimated_base_fit", "estimated_base_lwr",
"estimated_base_upr", "estimated_adverse_fit",
"estimated_adverse_lwr", "estimated_adverse_upr",
"estimated_severe_fit", "estimated_severe_lwr",
"estimated_severe_upr")
date1 = seq(ISOdate(2003,1,1), by = "quarter", length.out = 64)
write.csv(as.data.frame(cbind(date1,output_ci)), "SBA prediction ci.csv", col.names = T, row.names = F)
<file_sep>/dfast-support.R
get_hold_out_perf = function(history, train_data, bal_var="all", dt_var="qtr_dt", resp="log_diff", model="", tau=0.50, type="smp", ar_term="log_diff_lag1") {
### make fit object
keep_vars = c(resp, model, dt_var, bal_var)
hst = history[, c(dt_var, bal_var), with=FALSE]
setnames(hst, c(dt_var, bal_var), c("dt", "bal"))
hst[["year"]] = year(hst[["dt"]])
hst_desc = hst[order(-dt),.SD[1], keyby="year"]
last_qtr_bal = hst_desc[,.SD[1], keyby="year"][, c("year", "bal")]
prior_year_end_bal = last_qtr_bal[, year := year + 1]
### make test data
keep_vars = c(resp, model, dt_var, bal_var)
train = train_data[, keep_vars, with=FALSE]
setnames(train, c(dt_var, resp, bal_var), c("dt", "resp", "bal"))
train[["year"]] = year(train[["dt"]])
if (type == "arx") {
model = replace(model, model == ar_term, "resp_lag1")
setnames(train, c(ar_term), c("resp_lag1"))
}
train_years = unique(train[["year"]])
j = 1
for (yr in train_years) {
hold_out_yr = train[year == yr, ]
input_data = train[year != yr, ]
if (type == "qrg") {
hold_out_fit_obj = rq(resp~., tau=tau, data=input_data[, c("resp", model), with=FALSE])
} else {
hold_out_fit_obj = lm(resp~., data=input_data[, c("resp", model), with=FALSE])
}
coefs = data.frame(summary(hold_out_fit_obj)$coefficients)
if (type == "qrg") {
names(coefs) = c("est", "lower_bd", "uppder_bd")
coefs$var = row.names(summary(hold_out_fit_obj)$coefficients)
coefs$yr_out = yr
} else {
names(coefs) = c("est", "se", "t", "p_value")
coefs$var = row.names(summary(hold_out_fit_obj)$coefficients)
coefs$yr_out = yr
}
if (j == 1) {
hold_out_coefs = coefs
} else {
hold_out_coefs = rbind(hold_out_coefs, coefs)
}
start_bal = prior_year_end_bal[year == yr, "bal"][[1]]
target_data = hold_out_yr
predictions = predict(hold_out_fit_obj, hold_out_yr)
target_data[["resp_hat"]] = predictions
target_data[["bal_est"]] = get_bal_forecast(start_bal, predictions)
if (j == 1) {
target_hold_data = target_data[, c("dt", "year", "bal", "bal_est", "resp", "resp_hat"), with=FALSE]
} else {
target_hold_data = rbind(target_hold_data, target_data[, c("dt","year", "bal", "bal_est", "resp", "resp_hat")])
}
j = j + 1
}
coef_data = data.table(hold_out_coefs)
coef_data = coef_data[order(var),]
list(hold_out_data=target_hold_data,hold_out_coefs=coef_data)
}
### out of time testing
get_oot_data = function(train_data, test_data, bal_var="all", dt_var="qtr_dt", resp="log_diff", model="", tau=0.50, type="smp", ar_term="log_diff_lag1", max_train_dt="2016-09-30",ma_n=2) {
### make fit object
keep_vars = c(resp, model, dt_var, bal_var)
train_ind = ifelse(train_data[[dt_var]] <= as.Date(max_train_dt), TRUE, FALSE)
train = train_data[train_ind, keep_vars, with=FALSE]
### make test data
keep_vars = c(resp, model, dt_var, bal_var)
test = test_data[, keep_vars, with=FALSE]
setnames(train, c(dt_var, resp, bal_var), c("dt", "resp", "bal"))
setnames(test, c(dt_var, resp, bal_var), c("dt", "resp", "bal"))
test[["bal_ma"]] = ma(test[["bal"]], n=ma_n)
if (type == "arx") {
model = replace(model, model == ar_term, "resp_lag1")
setnames(train, c(ar_term), c("resp_lag1"))
setnames(test, c(ar_term), c("resp_lag1"))
}
if (type == "qrg") {
fit_obj = rq(resp~., tau=tau, data=train[, c("resp", model), with=FALSE])
} else {
fit_obj = lm(resp~., data=train[, c("resp", model), with=FALSE])
}
test[["resp_hat"]] = predict(fit_obj, test)
if (type == "arx") {
### use calculated resp_lag1
test[dt >= as.Date(max_train_dt),][["resp_hat"]] = predict_arx(fit_obj, test[dt >= as.Date(max_train_dt),], ar_term="resp_lag1")
}
test[["start_bal"]] = test[dt == as.Date(max_train_dt),][["bal_ma"]]
test[["cum_resp_hat"]] = 0
test[, `:=`(cum_resp_hat = cumsum(ifelse(dt > as.Date(max_train_dt), resp_hat, 0)))]
test[["is_oot"]] = ifelse(test[["dt"]] > as.Date(max_train_dt), 1, 0)
test[["bal_est"]] = test[["start_bal"]] * exp(test[["cum_resp_hat"]])
if (type == "arx") {
setnames(test, "resp_lag1", ar_term)
}
test
}
### Forecasts ##################################################################
get_forecasts = function(train_data, base, adv, sev, bal_var="all", dt_var="qtr_dt", model="", model_obj=NA, resp="log_diff", type="smp", ar_term="log_diff_lag1", ma_n=2) {
### get historical data
actuals = train_data
if (type == "arx") {
keep_vars_for_actuals = c(resp, dt_var, bal_var, ar_term)
} else{
keep_vars_for_actuals = c(resp, dt_var, bal_var)
}
actuals = actuals[, keep_vars_for_actuals, with=FALSE]
setnames(actuals, c(dt_var, resp, bal_var), c("dt", "resp", "bal"))
### set up scenario data
keep_vars = c(model, dt_var)
baseline = baseline[, keep_vars, with=FALSE]
adverse = adverse[, keep_vars, with=FALSE]
severe = severe[, keep_vars, with=FALSE]
setnames(baseline, c(dt_var), c("dt"))
setnames(adverse, c(dt_var), c("dt"))
setnames(severe, c(dt_var), c("dt"))
first_actual_record = actuals[order(dt), .SD[1]]
first_actual_dt = first_actual_record[["dt"]]
baseline = baseline[dt >= first_actual_dt,]
adverse = adverse[dt >= first_actual_dt,]
severe = severe[dt >= first_actual_dt,]
### place actuals into scenario data
if (type == "arx") {
outcomes = actuals[, c("dt", "resp", "bal", ar_term), with=FALSE]
} else {
outcomes = actuals[, c("dt", "resp", "bal")]
}
outcomes[["bal_ma"]] = ma(outcomes[["bal"]], n=ma_n)
baseline = outcomes[baseline, on="dt"]
adverse = outcomes[adverse, on="dt"]
severe = outcomes[severe, on="dt"]
last_actual_record = outcomes[order(-dt), .SD[1]]
last_actual_dt = last_actual_record[["dt"]]
last_actual_bal = last_actual_record[["bal_ma"]]
baseline[["is_fcst"]] = ifelse(baseline[["dt"]] > last_actual_dt, 1, 0)
adverse[["is_fcst"]] = ifelse(adverse[["dt"]] > last_actual_dt, 1, 0)
severe[["is_fcst"]] = ifelse(severe[["dt"]] > last_actual_dt, 1, 0)
if (type != "qrg") {
baseline[, c("fcst", "pi_lo","pi_hi")] = data.frame(predict(model_obj, baseline, interval="predict"))
adverse[, c("fcst", "pi_lo","pi_hi")] = data.frame(predict(model_obj, adverse, interval="predict"))
severe[, c("fcst", "pi_lo","pi_hi")] = data.frame(predict(model_obj, severe, interval="predict"))
} else {
baseline[, c("fcst", "pi_lo","pi_hi")] = data.frame(predict(model_obj, baseline, interval="confidence"))
adverse[, c("fcst", "pi_lo","pi_hi")] = data.frame(predict(model_obj, adverse, interval="confidence"))
severe[, c("fcst", "pi_lo","pi_hi")] = data.frame(predict(model_obj, severe, interval="confidence"))
}
if (type == "arx") {
### use calculated resp_lag1
baseline[dt >= as.Date(fcst_start_dt),][["fcst"]] = predict_arx(model_obj, baseline[qtr_dt >= as.Date(last_actual_dt),], ar_term=ar_term)
adverse[dt >= as.Date(fcst_start_dt),][["fcst"]] = predict_arx(model_obj, adverse[qtr_dt >= as.Date(last_actual_dt),], ar_term=ar_term)
severe[dt >= as.Date(fcst_start_dt),][["fcst"]] = predict_arx(model_obj, severe[qtr_dt >= as.Date(last_actual_dt),], ar_term=ar_term)
}
baseline[["xfcst"]] = ifelse(baseline[["is_fcst"]] == 1, baseline[["fcst"]], 0)
adverse[["xfcst"]] = ifelse(adverse[["is_fcst"]] == 1, adverse[["fcst"]], 0)
severe[["xfcst"]] = ifelse(severe[["is_fcst"]] == 1, severe[["fcst"]], 0)
baseline[["fcst_bal"]] = get_bal_forecast(last_actual_bal, baseline[["xfcst"]])
adverse[["fcst_bal"]] = get_bal_forecast(last_actual_bal, adverse[["xfcst"]])
severe[["fcst_bal"]] = get_bal_forecast(last_actual_bal, severe[["xfcst"]])
baseline[["fcst_bal"]] = ifelse(baseline[["is_fcst"]] == 1, baseline[["fcst_bal"]], baseline[["bal"]])
adverse[["fcst_bal"]] = ifelse(adverse[["is_fcst"]] == 1, adverse[["fcst_bal"]], adverse[["bal"]])
severe[["fcst_bal"]] = ifelse(severe[["is_fcst"]] == 1, severe[["fcst_bal"]], severe[["bal"]])
### output balance forecasts
baseline_fcst_bal = baseline[, c("dt", "fcst_bal", "bal")]
adverse_fcst_bal = adverse[, c("dt", "fcst_bal")]
severe_fcst_bal = severe[, c("dt", "fcst_bal")]
### output ldiff forecasts
baseline_fcst_ldiff = baseline[, c("dt", "resp", "fcst", "pi_lo", "pi_hi")]
adverse_fcst_ldiff = adverse[, c("dt", "resp", "fcst", "pi_lo", "pi_hi")]
severe_fcst_ldiff = severe[, c("dt", "resp", "fcst", "pi_lo", "pi_hi")]
setnames(baseline_fcst_bal, "fcst_bal", "baseline")
setnames(adverse_fcst_bal, "fcst_bal", "adverse")
setnames(severe_fcst_bal, "fcst_bal", "severe")
setnames(baseline_fcst_ldiff, c("fcst", "pi_lo", "pi_hi"), c("baseline" ,"baseline_pi_lo", "baseline_pi_hi"))
setnames(adverse_fcst_ldiff, c("fcst", "pi_lo", "pi_hi"), c("adverse", "adverse_pi_lo", "adverse_pi_hi"))
setnames(severe_fcst_ldiff, c("fcst", "pi_lo", "pi_hi"), c("severe", "severe_pi_lo", "severe_pi_hi"))
fcst_bal_data = severe_fcst_bal[adverse_fcst_bal[baseline_fcst_bal, on="dt"], on="dt"]
key = c("dt", "resp")
fcst_ldiff_data = severe_fcst_ldiff[adverse_fcst_ldiff[baseline_fcst_ldiff, on=key], on=key]
fcst_ldiff_data[!is.na(resp), ns_resp_hat := baseline]
list(balance=fcst_bal_data, ldiff=fcst_ldiff_data)
}
<file_sep>/LGD/lgd ci - KPMG.R
###################################################################
# Project: Bank of Hope
# LGD C&I Model
###################################################################
setwd(dirname(rstudioapi::getActiveDocumentContext()$path))
library(DataAnalytics)
library(tseries)
library(urca)
library (fUnitRoots)
library(lubridate)
library(forecast)
library(tseries)
library(CADFtest)
library (leaps)
library(data.table)
library(openxlsx)
library(car)
####################
##save image
# save.image("lgd-ci-v2.RData")
##load image
#load("lgd-ci-v2.RData")
##################################
# StepFun= dget("StepFun.R")
#####################
#read in the raw data
#####################
df=read.csv("input_lgd_ci.csv",header = TRUE)
names(df)= c("date", "wa_lgd","year", "q")
plot(df$wa_lgd)
pp.test(na.remove(df$wa_lgd), lshort = F)
###################################
#transformations-- Q-o-Q and Y-o-Y
##################################
#q-o-q
df$wa_lgd_qd=c(NA, diff(df$wa_lgd))
#y-o-y
df$wa_lgd_ad= df$wa_lgd- back(df$wa_lgd, noperiods = 4)
#######################
#read in the macro vars
#######################
######
#base
######
base=read.csv("base.csv", header=T)
aaa=which(base$year==2007 & base$quarter==1)
bbb=which(base$year==2018 & base$quarter==4)
base=base[aaa:bbb,]
#########
#adverse
#########
adverse=read.csv("adverse.csv", header=T)
aaa=which(adverse$year==2007 & adverse$quarter==1)
bbb=which(adverse$year==2018 & adverse$quarter==4)
adverse=adverse[aaa:bbb,]
########
#severe
########
severe=read.csv("severe.csv", header=T)
aaa=which(severe$year==2007 & severe$quarter==1)
bbb=which(severe$year==2018 & severe$quarter==4)
severe=severe[aaa:bbb,]
##################
#development macro
##################
D1=which(base$year==2007 & base$q==1)
D2=which(base$year==2015 & base$q==4)
macro_dev=base[c(D1:D2), ]
######################################
# Create the dep_var matrix
######################################
#create var info
var.names=colnames(macro_dev[,-c(1,2,3)])
var_info=as.data.frame(matrix(0, length(var.names), 6 ))
names(var_info) = c("var", "tier", "base", "lag", "diff", "sign")
var_info[,1]=var.names
var_info[,5]=0
#diff
var_info[grepl("_qd", var_info$var),5] = TRUE
var_info[grepl("_yd", var_info$var),5] = TRUE
var_info[grepl("_ad", var_info$var),5] = TRUE
var_info[grepl("_ag", var_info$var),5] = TRUE
var_info[grepl("_qg", var_info$var),5] = TRUE
#lag
var_info[grepl("_lag_1", var_info$var),4] = 1
var_info[grepl("_lag_2", var_info$var),4] = 2
var_info[grepl("_lag_3", var_info$var),4] = 3
var_info[grepl("_lag_4", var_info$var),4] = 4
#var.base
var_info[grepl("ngdp", var_info$var),3] = "gdp"
var_info[grepl("rgdp", var_info$var),3] = "gdp"
var_info[grepl("rdi", var_info$var),3] = "dpi"
var_info[grepl("ndi", var_info$var),3] = "dpi"
var_info[grepl("ur_", var_info$var),3] = "ur_diff"
var_info[grepl("UR_", var_info$var),3] = "ur_diff"
var_info[grepl("cpi_", var_info$var),3] = "cpi"
var_info[grepl("i3m", var_info$var),3] = "i"
var_info[grepl("i5y", var_info$var),3] = "i"
var_info[grepl("i10y", var_info$var),3] = "i"
var_info[grepl("bbb", var_info$var),3] = "spr10"
var_info[grepl("imort", var_info$var),3] = "i"
var_info[grepl("iprim", var_info$var),3] = "i"
var_info[grepl("cppi", var_info$var),3] = "cppi"
var_info[grepl("dji", var_info$var),3] = "dji"
var_info[grepl("VIX", var_info$var),3] = "vix"
var_info[grepl("vix", var_info$var),3] = "vix"
var_info[grepl("hpi_q", var_info$var),3] = "hpi_diff"
var_info[grepl("HPI_q", var_info$var),3] = "hpi_diff"
var_info[grepl("hpi_a", var_info$var),3] = "hpi_diff"
var_info[grepl("HPI_a", var_info$var),3] = "hpi_diff"
var_info[grepl("hpi_g", var_info$var),3] = "hpi_diff"
var_info[grepl("spr10", var_info$var),3] = "spr10"
var_info[grepl("spr10_q", var_info$var),3] = "spr10"
var_info[grepl("spr10_a", var_info$var),3] = "spr10"
var_info[grepl("equipment", var_info$var), 3]= "equipment"
var_info[grepl("pfi_nonres", var_info$var), 3]= "pfi_nonres"
var_info[grepl("willreit", var_info$var), 3]= "willreit"
var_info[grepl("KOGDP", var_info$var), 3]= "KOGDP"
var_info[grepl("KOCPI", var_info$var), 3]= "KOCPI"
var_info[grepl("CCI", var_info$var),3] = "CCI_g"
var_info[grepl("NCREIF", var_info$var),3] = "NCREIF"
var_info[var_info$base==0,]
#sign
var_info[grepl("ngdp", var_info$var),6] = -1
var_info[grepl("rgdp", var_info$var),6] = -1
var_info[grepl("rdi", var_info$var),6] = -1
var_info[grepl("ndi", var_info$var),6] = -1
var_info[grepl("ur_", var_info$var),6] = 1
var_info[grepl("UR_", var_info$var),6] = 1
var_info[grepl("cpi_", var_info$var),6] = 0
var_info[grepl("i3m", var_info$var),6] = 0
var_info[grepl("i5y", var_info$var),6] = 0
var_info[grepl("i10y", var_info$var),6] = 0
var_info[grepl("bbb", var_info$var),6] = 1
var_info[grepl("imort", var_info$var),6] = 0
var_info[grepl("iprim", var_info$var),6] = 0
var_info[grepl("cppi", var_info$var),6] = -1
var_info[grepl("dji", var_info$var),6] = -1
var_info[grepl("VIX", var_info$var),6] = 0
var_info[grepl("vix", var_info$var),6] = 0
var_info[grepl("hpi_q", var_info$var),6] = -1
var_info[grepl("HPI_q", var_info$var),6] = -1
var_info[grepl("hpi_a", var_info$var),6] = -1
var_info[grepl("HPI_a", var_info$var),6] = -1
var_info[grepl("hpi_g", var_info$var),6] = -1
var_info[grepl("spr10", var_info$var),6] = 1
var_info[grepl("spr10_q", var_info$var),6] = 1
var_info[grepl("spr10_a", var_info$var),6] = 1
var_info[grepl("equipment", var_info$var), 6]= -1
var_info[grepl("pfi_nonres", var_info$var), 6]= -1
var_info[grepl("willreit", var_info$var), 6]= -1
var_info[grepl("KOGDP", var_info$var), 6]= -1
var_info[grepl("KOCPI", var_info$var), 6]= 0
var_info[grepl("CCI", var_info$var),6] = -1
var_info[grepl("NCREIF", var_info$var),6] = -1
var_info[var_info$sign==0,]
#Tier
var_info[grepl("ngdp", var_info$var),2] = 1
var_info[grepl("rgdp", var_info$var),2] = 1
var_info[grepl("rdi", var_info$var),2] = 1
var_info[grepl("ndi", var_info$var),2] = 1
var_info[grepl("ur_", var_info$var),2] = 1
var_info[grepl("UR_", var_info$var),2] = 1
var_info[grepl("cpi_", var_info$var),2] = 3
var_info[grepl("i3m", var_info$var),2] = 3
var_info[grepl("i5y", var_info$var),2] = 3
var_info[grepl("i10y", var_info$var),2] = 3
var_info[grepl("bbb", var_info$var),2] = 3
var_info[grepl("imort", var_info$var),2] = 3
var_info[grepl("iprim", var_info$var),2] = 3
var_info[grepl("cppi", var_info$var),2] = 2
var_info[grepl("dji", var_info$var),2] = 1
var_info[grepl("VIX", var_info$var),2] = 2
var_info[grepl("vix", var_info$var),2] = 2
var_info[grepl("hpi_q", var_info$var),2] = 2
var_info[grepl("HPI_q", var_info$var),2] = 2
var_info[grepl("hpi_a", var_info$var),2] = 2
var_info[grepl("HPI_a", var_info$var),2] = 2
var_info[grepl("hpi_g", var_info$var),2] = 2
var_info[grepl("spr10", var_info$var),2] = 3
var_info[grepl("spr10_q", var_info$var),2] = 3
var_info[grepl("spr10_a", var_info$var),2] = 3
var_info[grepl("equipment", var_info$var), 2]= 1
var_info[grepl("pfi_nonres", var_info$var), 2]= 1
var_info[grepl("willreit", var_info$var), 2]= 2
var_info[grepl("KOGDP", var_info$var), 2]= 3
var_info[grepl("KOCPI", var_info$var), 2]= 3
var_info[grepl("CCI", var_info$var),2] = 3
var_info[grepl("NCREIF", var_info$var),2] = 2
#####################
#Variable Selection
#####################
df2=df[c(1:36),]
date_col= as.data.frame(df2$date)
names(date_col)="Date"
lgd_input=as.data.frame(cbind(df2$wa_lgd, df2$wa_lgd_qd, df2$wa_lgd_ad))
names(lgd_input)=c("wa_lgd", "wa_lgd_qd", "wa_lgd_ad")
b1=cbind(date_col, lgd_input)
names(b1)=c("Date", names(lgd_input))
b=data.table(b1)
c1=cbind(date_col, macro_dev)
names(c1)=c("Date", names(macro_dev))
c=data.table(c1)
a=data.table(var_info)
df_total_dev= as.data.frame(cbind(date_col, lgd_input, macro_dev))
source("StepFun.R")
# #
# fix_vars0=c("1")
# model1_ci_lgd_sep=StepFun(a,b,c, tier=1, #indicate which tier of variables to consider
# y='wa_lgd~', #indicate response variable
# thresh=c(0.05, 0.01, 0.001), #significance level for SE based p-value and LR test based p-value for each tier
# criteria='SE.p', #variable selection criteria; other values='bic', 'LR.p', 'SE.p', 'rsq'
# vars0 = c("1"), #model 0 variables
# fix_vars0, #indicate which variables are fixed
# out.print=T #indicate wheter intermediate output will be printed
# )
#
# fix_vars0=c("1", "NJUR_qd", "real_gross_pdi_equipment_ag_lag_4")
# model1_ci_lgd_sep=StepFun(a,b,c, tier=2, #indicate which tier of variables to consider
# y='wa_lgd~', #indicate response variable
# thresh=c(0.05, 0.01, 0.001), #significance level for SE based p-value and LR test based p-value for each tier
# criteria='SE.p', #variable selection criteria; other values='bic', 'LR.p', 'SE.p', 'rsq'
# vars0 = c("1", "NJUR_qd", "real_gross_pdi_equipment_ag_lag_4"), #model 0 variables
# fix_vars0, #indicate which variables are fixed
# out.print=T #indicate wheter intermediate output will be printed
# )
#
# fix_vars0=c("1", "NJUR_qd", "real_gross_pdi_equipment_ag_lag_4")
# model1_ci_lgd_sep=StepFun(a,b,c, tier=3, #indicate which tier of variables to consider
# y='wa_lgd~', #indicate response variable
# thresh=c(0.05, 0.01, 0.001), #significance level for SE based p-value and LR test based p-value for each tier
# criteria='SE.p', #variable selection criteria; other values='bic', 'LR.p', 'SE.p', 'rsq'
# vars0 = c("1", "NJUR_qd", "real_gross_pdi_equipment_ag_lag_4"), #model 0 variables
# fix_vars0, #indicate which variables are fixed
# out.print=T #indicate wheter intermediate output will be printed
# )
#
# out=lm(b1$wa_lgd~c1$NJUR_qd+c1$real_gross_pdi_equipment_ag_lag_4)
# summary(out)
# acf(out$residuals)
# Box.test(out$residuals, type = "Ljung-Box", lag = 3)
# vif(out)
#
# x1=c1$NJUR_qd
# sd1= stdev(c1$NJUR_qd)
# x2=c1$real_gross_pdi_equipment_ag_lag_4
# sd2=stdev(c1$real_gross_pdi_equipment_ag_lag_4)
#
# out=lm(b1$wa_lgd~x1+x2)
# summary(out)
# acf(out$residuals)
# durbinWatsonTest(out)
# Box.test(out$residuals, type = "Ljung-Box", lag = 3) #null: independence ==> accept
#
# #
#
# # #implement the model
# #
# ndata=nrow(b1)
# npred=9
# output=as.data.frame(matrix(0, ndata+npred,5))
#
# D1=which(df$year==2007 & df$q==1)
# D2=which(df$year==2015 & df$q==4)
#
# output[1:ndata, 1]=df$wa_lgd[D1:D2]
# output[1:ndata, 2]=out$fitted.values
#
# plot(output[2:ndata, 2])
# lines(output[2:ndata, 1], col='red')
#
# #Predict
# aaaa=which(base$year==2016 & base$quarter==1)
# bbbb=which(base$year==2018 & base$quarter==1)
#
# indx=cbind(base$NJUR_qd, base$real_gross_pdi_equipment_ag_lag_4)
# xreg_base=as.data.frame(indx[aaaa:bbbb,])
# names(xreg_base)=c("x1", "x2")
# fitted.base=as.data.frame(predict(out, xreg_base))
#
# indx=cbind(adverse$NJUR_qd, adverse$real_gross_pdi_equipment_ag_lag_4)
# xreg_adverse=as.data.frame(indx[aaaa:bbbb,])
# names(xreg_adverse)=c("x1", "x2")
# fitted.adverse=as.data.frame(predict(out, xreg_adverse))
#
# indx=cbind(severe$NJUR_qd, severe$real_gross_pdi_equipment_ag_lag_4)
# xreg_severe=as.data.frame(indx[aaaa:bbbb,])
# names(xreg_severe)=c("x1", "x2")
# fitted.severe=as.data.frame(predict(out, xreg_severe))
#
# abb=ndata+1
# abc=nrow(output)
# output[abb:abc, 3]=fitted.base
# output[abb:abc, 4]=fitted.adverse
# output[abb:abc, 5]=fitted.severe
#
# output[1:ndata,c(3,4,5)]=output[1:ndata, 2]
#
#
# output[abb:abc,1]=NA
#
# date1 = seq(ISOdate(2007,1,1), by = "quarter", length.out = 45)
# plot(date1,output[,5], type='l', ylab="C&I LGD", col='red', ylim=c(0,1))
# lines(date1,output[,4], col='magenta')
# lines(date1,output[,3], col='black')
# lines(date1,output[,1], col='green')
# legend("topleft", legend= c("base", "adverse", "severe", "Historical"), fill=c("black", "blue", "red", "green"))
# colnames(output)=c("Historical", "estimated", "estimated_base", "estimated_adverse", "estimated_severe")
# output_wa_lgd_ci=output
# write.csv(as.data.frame(cbind(date1,output)), "wa_lgd_ci-v2.csv", col.names = T, row.names = F)
#
# # sensitivity
# #NJUR
#
# indx=cbind(base$NJUR_qd, base$real_gross_pdi_equipment_ag_lag_4)
# xreg_base=as.data.frame(indx[aaaa:bbbb,])
# names(xreg_base)=c("x1", "x2")
# fitted.base=as.data.frame(predict(out, xreg_base))
#
# indx_1sd=cbind(base$NJUR_qd+sd1, base$real_gross_pdi_equipment_ag_lag_4)
# xreg_base_1sd=as.data.frame(indx_1sd[aaaa:bbbb,])
# names(xreg_base_1sd)=c("x1", "x2")
# fitted.base_1sd=as.data.frame(predict(out, xreg_base_1sd))
#
# indx_2sd=cbind(base$NJUR_qd+2*sd1, base$real_gross_pdi_equipment_ag_lag_4)
# xreg_base_2sd=as.data.frame(indx_2sd[aaaa:bbbb,])
# names(xreg_base_2sd)=c("x1", "x2")
# fitted.base_2sd=as.data.frame(predict(out, xreg_base_2sd))
#
# abb=ndata+1
# abc=nrow(output)
# output[abb:abc, 3]=fitted.base
# output[abb:abc, 4]=fitted.base_1sd
# output[abb:abc, 5]=fitted.base_2sd
#
# output[1:ndata,c(3,4,5)]=output[1:ndata, 2]
#
#
# output[abb:abc,1]=NA
#
# date1 = seq(ISOdate(2007,1,1), by = "quarter", length.out = 45)
# plot(date1,output[,5], type='l', ylab="C&I LGD", col='red', ylim=c(0,1))
# lines(date1,output[,4], col='magenta')
# lines(date1,output[,3], col='black')
# lines(date1,output[,1], col='green')
# legend("topleft", legend= c("base", "adverse", "severe", "Historical"), fill=c("black", "blue", "red", "green"))
#
# colnames(output)=c("Historical", "estimated", "estimated_base", "estimated_adverse", "estimated_severe")
# output_wa_lgd_ci_sensitivity_NJUR=output
# write.csv(as.data.frame(cbind(date1,output)), "wa_lgd_ci_sensitivity_NJUR.csv", col.names = T, row.names = F)
#
# #gross pdi
# indx=cbind(base$NJUR_qd, base$real_gross_pdi_equipment_ag_lag_4)
# xreg_base=as.data.frame(indx[aaaa:bbbb,])
# names(xreg_base)=c("x1", "x2")
# fitted.base=as.data.frame(predict(out, xreg_base))
#
# indx_1sd=cbind(base$NJUR_qd, base$real_gross_pdi_equipment_ag_lag_4+sd2)
# xreg_base_1sd=as.data.frame(indx_1sd[aaaa:bbbb,])
# names(xreg_base_1sd)=c("x1", "x2")
# fitted.base_1sd=as.data.frame(predict(out, xreg_base_1sd))
#
# indx_2sd=cbind(base$NJUR_qd, base$real_gross_pdi_equipment_ag_lag_4+2*sd2)
# xreg_base_2sd=as.data.frame(indx_2sd[aaaa:bbbb,])
# names(xreg_base_2sd)=c("x1", "x2")
# fitted.base_2sd=as.data.frame(predict(out, xreg_base_2sd))
#
# abb=ndata+1
# abc=nrow(output)
# output[abb:abc, 3]=fitted.base
# output[abb:abc, 4]=fitted.base_1sd
# output[abb:abc, 5]=fitted.base_2sd
#
# output[1:ndata,c(3,4,5)]=output[1:ndata, 2]
#
#
# output[abb:abc,1]=NA
#
# date1 = seq(ISOdate(2007,1,1), by = "quarter", length.out = 45)
# plot(date1,output[,5], type='l', ylab="C&I LGD", col='red', ylim=c(0,1))
# lines(date1,output[,4], col='magenta')
# lines(date1,output[,3], col='black')
# lines(date1,output[,1], col='green')
# legend("topleft", legend= c("base", "adverse", "severe", "Historical"), fill=c("black", "blue", "red", "green"))
#
# colnames(output)=c("Historical", "estimated", "estimated_base", "estimated_adverse", "estimated_severe")
# output_wa_lgd_ci_sensitivity_PDI=output
# write.csv(as.data.frame(cbind(date1,output)), "wa_lgd_ci_sensitivity_PDI.csv", col.names = T, row.names = F)
#
#
####################################
#final Model
####################################
fix_vars0=c("1")
model1_ci_lgd_sep=StepFun(a,b,c, tier=1, #indicate which tier of variables to consider
y='wa_lgd~', #indicate response variable
thresh=c(0.05, 0.01, 0.001), #significance level for SE based p-value and LR test based p-value for each tier
criteria='SE.p', #variable selection criteria; other values='bic', 'LR.p', 'SE.p', 'rsq'
vars0 = c("1"), #model 0 variables
fix_vars0, #indicate which variables are fixed
out.print=T #indicate wheter intermediate output will be printed
)
# Select CAUR_yd_lag_3
fix_vars0=c("1")
model1_ci_lgd_sep=StepFun(a,b,c, tier=1, #indicate which tier of variables to consider
y='wa_lgd~', #indicate response variable
thresh=c(0.05, 0.01, 0.001), #significance level for SE based p-value and LR test based p-value for each tier
criteria='rsq', #variable selection criteria; other values='bic', 'LR.p', 'SE.p', 'rsq'
vars0 = c("1","CAUR_yd_lag_3"), #model 0 variables
fix_vars0, #indicate which variables are fixed
out.print=T #indicate wheter intermediate output will be printed
)
# Select rgdp_qg
fix_vars0=c("1", "CAUR_yd_lag_3", "rgdp_qg")
model1_ci_lgd_sep=StepFun(a,b,c, tier=2, #indicate which tier of variables to consider
y='wa_lgd~', #indicate response variable
thresh=c(0.05, 0.01, 0.001), #significance level for SE based p-value and LR test based p-value for each tier
criteria='rsq', #variable selection criteria; other values='bic', 'LR.p', 'SE.p', 'rsq'
vars0 = c("1","CAUR_yd_lag_3", "rgdp_qg"), #model 0 variables
fix_vars0, #indicate which variables are fixed
out.print=T #indicate wheter intermediate output will be printed
)
# no added variable
fix_vars0=c("1", "CAUR_yd_lag_3", "rgdp_qg")
model1_ci_lgd_sep=StepFun(a,b,c, tier=3, #indicate which tier of variables to consider
y='wa_lgd~', #indicate response variable
thresh=c(0.05, 0.01, 0.001), #significance level for SE based p-value and LR test based p-value for each tier
criteria='rsq', #variable selection criteria; other values='bic', 'LR.p', 'SE.p', 'rsq'
vars0 = c("1","CAUR_yd_lag_3", "rgdp_qg"), #model 0 variables
fix_vars0, #indicate which variables are fixed
out.print=T #indicate wheter intermediate output will be printed
)
# no added variable
x1=c1$CAUR_yd_lag_3
sd1=stdev(c1$CAUR_yd_lag_3)
x2=c1$rgdp_qg
sd2= stdev(c1$rgdp_qg)
out=lm(b1$wa_lgd~x1+x2)
summary(out)
#Multicolinearity
vif(out)
#####################
# Residual tests
#####################
out_res=out$residuals
fitted=out$fitted.values
# Autocorrelations
par(mfrow=c(1,2))
acf(out_res, main="")
pacf(out_res, main="")
#white noise tests
Box.test(out_res, type = "Ljung-Box") #null: independence ==> accept
durbinWatsonTest(model = out)
#Q-Q Plot
par(mfrow=c(1,1))
qqnorm(out_res, ylab="Residuals", xlab="Quantiles of Standard Normal", main="CI LGD Model")
qqline(out_res)
# Residual vs predicted
plot(fitted,out_res, ylab="Residuals", xlab="predicted values", main="CI LGD Model", ylim=c(-0.5, 0.5))
abline(0, 0)
# plot(b1$wa_lgd)
# lines(out$fitted.values)
##############
#stationarity
#############
summary(ur.df(na.remove(base$CAUR_yd_lag_3)))
summary(ur.df(na.remove(base$rgdp_qg)))
#####################
#implement the model
#####################
ndata=nrow(b1)
npred=9
output=as.data.frame(matrix(0, ndata+npred,5))
D1=which(df$year==2007 & df$q==1)
D2=which(df$year==2015 & df$q==4)
output[1:ndata, 1]=df$wa_lgd[D1:D2]
output[1:ndata, 2]=out$fitted.values
plot(output[2:ndata, 2])
lines(output[2:ndata, 1], col='red')
#################################
#Predict
#################################
aaaa=which(base$year==2016 & base$quarter==1)
bbbb=which(base$year==2018 & base$quarter==1)
indx=cbind(base$CAUR_yd_lag_3, base$rgdp_qg)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2")
fitted.base=as.data.frame(predict(out, xreg_base))
indx=cbind( adverse$CAUR_yd_lag_3, adverse$rgdp_qg)
xreg_adverse=as.data.frame(indx[aaaa:bbbb,])
names(xreg_adverse)=c("x1", "x2")
fitted.adverse=as.data.frame(predict(out, xreg_adverse))
indx=cbind( severe$CAUR_yd_lag_3, severe$rgdp_qg)
xreg_severe=as.data.frame(indx[aaaa:bbbb,])
names(xreg_severe)=c("x1", "x2")
fitted.severe=as.data.frame(predict(out, xreg_severe))
abb=ndata+1
abc=nrow(output)
output[abb:abc, 3]=fitted.base
output[abb:abc, 4]=fitted.adverse
output[abb:abc, 5]=fitted.severe
output[1:ndata,c(3,4,5)]=output[1:ndata, 2]
output[abb:abc,1]=NA
date1 = seq(ISOdate(2007,1,1), by = "quarter", length.out = 45)
plot(date1,output[,5], type='l', ylab="C&I LGD", col='red', ylim=c(0,1))
lines(date1,output[,4], col='magenta')
lines(date1,output[,3], col='black')
lines(date1,output[,1], col='green')
legend("topleft", legend= c("base", "adverse", "severe", "Historical"), fill=c("black", "blue", "red", "green"))
colnames(output)=c("Historical", "estimated", "estimated_base", "estimated_adverse", "estimated_severe")
output_wa_lgd_ci=output
fcompare=output$estimated[1:36]
write.csv(as.data.frame(cbind(date1,output)), "ci_lgd_output.csv", col.names = T, row.names = F)
###############################################
#sensitivity
###############################################
#####
#CAur
#####
indx=cbind(base$CAUR_yd_lag_3, base$rgdp_qg)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2")
fitted.base=as.data.frame(predict(out, xreg_base))
indx_1sd=cbind(base$CAUR_yd_lag_3+sd1, base$rgdp_qg)
xreg_base_1sd=as.data.frame(indx_1sd[aaaa:bbbb,])
names(xreg_base_1sd)=c("x1", "x2")
fitted.base_1sd=as.data.frame(predict(out, xreg_base_1sd))
indx_2sd=cbind(base$CAUR_yd_lag_3+2*sd1, base$rgdp_qg)
xreg_base_2sd=as.data.frame(indx_2sd[aaaa:bbbb,])
names(xreg_base_2sd)=c("x1", "x2")
fitted.base_2sd=as.data.frame(predict(out, xreg_base_2sd))
abb=ndata+1
abc=nrow(output)
output[abb:abc, 3]=fitted.base
output[abb:abc, 4]=fitted.base_1sd
output[abb:abc, 5]=fitted.base_2sd
output[1:ndata,c(3,4,5)]=output[1:ndata, 2]
output[abb:abc,1]=NA
date1 = seq(ISOdate(2007,1,1), by = "quarter", length.out = 45)
plot(date1,output[,5], type='l', ylab="C&I LGD", col='red', ylim=c(0,1))
lines(date1,output[,4], col='magenta')
lines(date1,output[,3], col='black')
lines(date1,output[,1], col='green')
legend("topleft", legend= c("base", "adverse", "severe", "Historical"), fill=c("black", "blue", "red", "green"))
colnames(output)=c("Historical", "estimated", "estimated_base", "estimated_adverse", "estimated_severe")
output_wa_lgd_ci_sensitivity_caur=output
write.csv(as.data.frame(cbind(date1,output)), "lgd_ci_sensitivity_caur-v3.csv", col.names = T, row.names = F)
#####
#rgdp
#####
indx=cbind(base$CAUR_yd_lag_3, base$rgdp_qg)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2")
fitted.base=as.data.frame(predict(out, xreg_base))
indx_1sd=cbind(base$CAUR_yd_lag_3, base$rgdp_qg+sd2)
xreg_base_1sd=as.data.frame(indx_1sd[aaaa:bbbb,])
names(xreg_base_1sd)=c("x1", "x2")
fitted.base_1sd=as.data.frame(predict(out, xreg_base_1sd))
indx_2sd=cbind(base$CAUR_yd_lag_3, base$rgdp_qg+2*sd2)
xreg_base_2sd=as.data.frame(indx_2sd[aaaa:bbbb,])
names(xreg_base_2sd)=c("x1", "x2")
fitted.base_2sd=as.data.frame(predict(out, xreg_base_2sd))
abb=ndata+1
abc=nrow(output)
output[abb:abc, 3]=fitted.base
output[abb:abc, 4]=fitted.base_1sd
output[abb:abc, 5]=fitted.base_2sd
output[1:ndata,c(3,4,5)]=output[1:ndata, 2]
output[abb:abc,1]=NA
date1 = seq(ISOdate(2007,1,1), by = "quarter", length.out = 45)
plot(date1,output[,5], type='l', ylab="C&I LGD", col='red', ylim=c(0,1))
lines(date1,output[,4], col='magenta')
lines(date1,output[,3], col='black')
lines(date1,output[,1], col='green')
legend("topleft", legend= c("base", "adverse", "severe", "Historical"), fill=c("black", "blue", "red", "green"))
colnames(output)=c("Historical", "estimated", "estimated_base", "estimated_adverse", "estimated_severe")
output_wa_lgd_ci_sensitivity_rgdp=output
write.csv(as.data.frame(cbind(date1,output)), "wa_lgd_ci_sensitivity_rgdp-v3.csv", col.names = T, row.names = F)
#####################################
# OOS testing
####################################
n=1
oos<-function(n){
#n defines how many quarters before 2015Q4
ind=nrow(df_total_dev)-n
df_oos=df_total_dev[1:ind,]
ind0=nrow(df_oos)
ind1=nrow(df_oos)+1
ind2=nrow(df_oos)+npred
ind4=nrow(df_total_dev)
ind5=nrow(df_total_dev)-n+1
x1=df_oos$CAUR_yd_lag_3
x2=df_oos$rgdp_qg
out_oos=lm(df_oos$wa_lgd~x1+x2)
summary(out_oos)
####################################
#implement the model - out of sample
####################################
ndata=nrow(df_oos)
npred=1
ind3=ndata+npred
output_oos=as.data.frame(matrix(0, ind3,7))
input=fcompare[1:ind3]
output_oos[, 1]=input
output_oos[1:ndata, 2]=out_oos$fitted.values
#PREDICT
indx=cbind(df_total_dev$CAUR_yd_lag_3, df_total_dev$rgdp_qg)
xreg_base=as.data.frame(t(indx[ind5,]))
names(xreg_base)=c("x1", "x2")
fitted.base=as.data.frame(predict(out_oos, xreg_base))
# get the values
output_oos[ind5, 2]=fitted.base
in_out_diff= output_oos[ind5,2]-output_oos[ind5,1]
result_oos=as.data.frame(cbind(n, output_oos[ind1,1], output_oos[ind1,2],in_out_diff))
return(result_oos)
}
oos(1)
oos(2)
oos(3)
oos(4)
#####################################
# Prediction CI
#####################################
ndata=nrow(b1)
npred=11
output_ci=as.data.frame(matrix(0, ndata+npred,10))
D1=which(df$year==2007 & df$q==1)
D2=which(df$year==2015 & df$q==4)
output_ci[1:ndata, 1]=df$wa_lgd[D1:D2]
output_ci[1:ndata, 2]=out$fitted.values
#Predict
aaaa=which(base$year==2016 & base$quarter==1)
bbbb=which(base$year==2018 & base$quarter==3)
x1=c1$CAUR_yd_lag_3
sd1=stdev(c1$CAUR_yd_lag_3)
x2=c1$rgdp_qg
sd2= stdev(c1$rgdp_qg)
indx=cbind(base$CAUR_yd_lag_3, base$rgdp_qg)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2")
fitted.base=as.data.frame(predict(out, xreg_base, interval = "predict", level = 0.95))
indx=cbind( adverse$CAUR_yd_lag_3, adverse$rgdp_qg)
xreg_adverse=as.data.frame(indx[aaaa:bbbb,])
names(xreg_adverse)=c("x1", "x2")
fitted.adverse=as.data.frame(predict(out, xreg_adverse, interval = "predict", level = 0.95))
indx=cbind( severe$CAUR_yd_lag_3, severe$rgdp_qg)
xreg_severe=as.data.frame(indx[aaaa:bbbb,])
names(xreg_severe)=c("x1", "x2")
fitted.severe=as.data.frame(predict(out, xreg_severe, interval = "predict", level = 0.95))
abb=ndata+1
abc=nrow(output_ci)
output_ci[abb:abc, 2]=fitted.base[,1]
output_ci[abb:abc, 3]=fitted.base[,2]
output_ci[abb:abc, 4]=fitted.base[,3]
output_ci[abb:abc, 5]=fitted.adverse[,1]
output_ci[abb:abc, 6]=fitted.adverse[,2]
output_ci[abb:abc, 7]=fitted.adverse[,3]
output_ci[abb:abc, 8]=fitted.severe[,1]
output_ci[abb:abc, 9]=fitted.severe[,2]
output_ci[abb:abc, 10]=fitted.severe[,3]
colnames(output_ci)=c("Historical", "estimated_base_fit", "estimated_base_lwr",
"estimated_base_upr", "estimated_adverse_fit",
"estimated_adverse_lwr", "estimated_adverse_upr",
"estimated_severe_fit", "estimated_severe_lwr",
"estimated_severe_upr")
date1 = seq(ISOdate(2007,1,1), by = "quarter", length.out = 47)
write.csv(as.data.frame(cbind(date1,output_ci)), "C&I LGD prediction ci.csv", col.names = T, row.names = F)
<file_sep>/EBModel/3-ImportCIEndingBalances.R
################################################################################
# Bank of Hope
# Commercial Real Estate Ending Balances
# Program: <>.R
# Author(s): KPMG, LLP
# Purpose:
# Data Dependences:
#
#
# R-version: R version 3.3.1 (2016-06-21)
# -- "Bug in Your Hair" Copyright (C) 2016 The R Foundation
# for Statistical Computing Platform: x86_64-apple-darwin13.4.0 (64-bit)
################################################################################
### Environment Settings #######################################################
pth_inputs = "/Users/jerrywatkins/Projects/Engagements/BOH/ending-balance/combined/read-only-inputs"
pth_lib = "/Users/jerrywatkins/Projects/Engagements/BOH/ending-balance/combined/library"
pth_out = "/Users/jerrywatkins/Projects/Engagements/BOH/ending-balance/combined"
### No need to make changes below after this line ##############################
### Dependencies
source(paste(pth_lib,"/dev-support.R", sep=""))
source(paste(pth_lib,"/dfast-support.R", sep=""))
# source has the following functions:
# - stack()
# - get_bal_forecast()
# - concat()
# - bin_interval_variable()
# - calc_rsq(), calc_mape(), calc_mad(), calc_rmset()
# - log_diff()
# - cv_step(), cv_select()
library("openxlsx")
library("data.table")
library("lubridate")
library("ggplot2")
library("scales")
library("zoo")
### Collect SNL Data
# note: the ending balance units are thousands
c_i = read.xlsx(
concat(pth_inputs, "/snl/Modified SNL_CI_EB.xlsx")
, sheet="Sheet1"
, colNames=FALSE
, startRow=3
, skipEmptyRows=FALSE
, skipEmptyCols=FALSE
, detectDates=TRUE
, check.names=TRUE
, na.strings="NA"
)
rename_list = list(
"X1" = "quarter_date"
, "X2" = "quarter_month"
, "X3" = "quarter_year"
, "X4" = "snl_field_key"
, "X5" = "qtr_string"
, "X6" = "bbcn_eb"
, "X7" = "wilshire_eb"
, "X8" = "saehan_eb"
, "X9" = "bank_asiana_eb"
, "X10" = "foster_eb"
, "X11" = "pacific_eb"
, "X12" = "nara_eb"
, "X13" = "asiana_bank_eb"
, "X14" = "liberty_eb"
, "X15" = "mirae_eb"
, "X16" = "innovative_eb"
)
c_i = c_i[, names(rename_list)]
new_names = sapply(names(rename_list), function(x) rename_list[[x]])
names(new_names) = NULL
names(c_i) = new_names
c_i = data.table(c_i)
# convert ending balances to numeric
# and replace NA values with 0
name_vec = names(c_i)
numeric_conv_vec = name_vec[grep("_eb", name_vec)]
for (name in numeric_conv_vec) {
c_i[, name] = as.numeric(c_i[, ..name][[1]])
col = as.numeric(c_i[, ..name][[1]])/1e6
c_i[, name] = ifelse(is.na(col), 0, col)
}
# Add variables
# total ending balance
c_i[, ci :=
bbcn_eb
+ wilshire_eb
+ saehan_eb
+ bank_asiana_eb
+ foster_eb
+ pacific_eb
+ nara_eb
+ asiana_bank_eb
+ liberty_eb
+ mirae_eb
+ innovative_eb
]
# Date Variable
c_i[, qtr_dt := as.Date(quarter_date, "%Y-%m-%d")]
c_i = c_i[order(qtr_dt)]
banks = c(
"bbcn_eb"
, "wilshire_eb"
, "saehan_eb"
, "bank_asiana_eb"
, "foster_eb"
, "pacific_eb"
, "nara_eb"
, "asiana_bank_eb"
, "liberty_eb"
, "mirae_eb"
, "innovative_eb"
)
c_i_banks = melt(c_i, c("qtr_dt", "ci"), banks, variable.name="bank", value.name="ci_bank")
c_i_banks[, bank_pct_ci := ifelse(ci > 0, ci_bank/ci, 0)]
c_i = c_i[, c("qtr_dt", "ci")]
### Save Files for Later #######################################################
saveRDS(c_i, concat(pth_out, "/data-c_i.RDS"))
saveRDS(c_i_banks, concat(pth_out, "/data-c_i_banks.RDS"))
################################################################################
<file_sep>/1-ImportEconomicData.R
################################################################################
# Bank of Hope
# Commercial Real Estate Ending Balances
# Program: <>.R
# Author(s): KPMG, LLP
# Purpose:
# Data Dependences:
#
#
# R-version: R version 3.3.1 (2016-06-21)
# -- "Bug in Your Hair" Copyright (C) 2016 The R Foundation
# for Statistical Computing Platform: x86_64-apple-darwin13.4.0 (64-bit)
################################################################################
### Environment Settings #######################################################
pth_inputs = ""
pth_lib = ""
pth_out = ""
setwd("C:/Users/OL07805/Desktop/Desktop Things/Ending Balance Model Final/DFAST Production Run 2018/")
### No need to make changes below after this line ##############################
### Dependencies
source(paste(pth_lib,"dev-support.R", sep=""))
source(paste(pth_lib,"dfast-support.R", sep=""))
library("openxlsx")
library("data.table")
library("lubridate")
library("ggplot2")
library("scales")
library("zoo")
library("tseries")
# Step 1 of 4
### Regional Data ##############################################################
# needs openxlsx package
# ! Caution ! make sure the column order still matches the following
# column variable scenaro description
# x1 qtr_dt scenario none
# x2 ca_unemp baseline FRB CCAR 2017 - Baseline : Labor: Unemployment Rate, (%, SA)
# x3 ca_unemp adverse FRB CCAR 2017 - Adverse : Labor: Unemployment Rate, (%, SA)
# x4 ca_unemp severe FRB CCAR 2017 - Severely Adverse : Labor: Unemployment Rate, (%, SA)
# x5 ca_hpi baseline FRB CCAR 2017 - Baseline : FHFA All Transactions Home Price Index, (1980Q1 = 100, SA)
# x6 ca_hpi adverse FRB CCAR 2017 - Adverse : FHFA All Transactions Home Price Index, (1980Q1 = 100, SA)
# x7 ca_hpi severe FRB CCAR 2017 - Severely Adverse : FHFA All Transactions Home Price Index, (1980Q1 = 100, SA)
# x8 ca_gsp baseline FRB CCAR 2017 - Baseline : Gross State Product: Total, (Bil. $, SAAR) Nominal
# x9 ca_gsp adverse FRB CCAR 2017 - Adverse : Gross State Product: Total, (Bil. $, SAAR) Nominal
# x10 ca_gsp severe FRB CCAR 2017 - Severely Adverse : Gross State Product: Total, (Bil. $, SAAR) Nominal
# x11 ca_real_gsp baseline FRB CCAR 2017 - Baseline : Gross State Product: Total, (Bil. Chained 2009 $, SAAR) Real
# x12 ca_real_gsp adverse FRB CCAR 2017 - Adverse : Gross State Product: Total, (Bil. Chained 2009 $, SAAR) Real
# x13 ca_real_gsp severe FRB CCAR 2017 - Severely Adverse : Gross State Product: Total, (Bil. Chained 2009 $, SAAR) Real
# x14 ca_income baseline FRB CCAR 2017 - Baseline : Income: Disposable Personal, (Mil. $, SAAR) Nominal
# x15 ca_income adverse FRB CCAR 2017 - Adverse : Income: Disposable Personal, (Mil. $, SAAR) Nominal
# x16 ca_income severe FRB CCAR 2017 - Severely Adverse : Income: Disposable Personal, (Mil. $, SAAR) Nominal
# x17 ca_real_income baseline FRB CCAR 2017 - Baseline : Disposable Personal Income, (Mil. 09$, SAAR) Real
# x18 ca_real_income adverse FRB CCAR 2017 - Adverse : Disposable Personal Income, (Mil. 09$, SAAR) Real
# x19 ca_real_income severe FRB CCAR 2017 - Severely Adverse : Disposable Personal Income, (Mil. 09$, SAAR) Real
raw_region_data = read.xlsx(
concat(pth_inputs, "Regional Macrovariables Moodys 2018.xlsx")
, sheet="Sheet1"
, colNames=FALSE
, startRow=6
, skipEmptyRows=FALSE
, skipEmptyCols=FALSE
, detectDates=TRUE
, check.names=TRUE
, na.strings="ND"
)
reg_baseline_cols = c("X1", "X2", "X5", "X8", "X11", "X14", "X17")
reg_adverse_cols = c("X1", "X3", "X6", "X9", "X12", "X15", "X18")
reg_severe_cols = c("X1", "X4", "X7", "X10", "X13", "X16", "X19")
reg_new_col_names = c(
"qtr_dt"
, "ca_unemp"
, "ca_hpi"
, "ca_gsp"
, "ca_rgsp"
, "ca_inc"
, "ca_rinc"
)
reg_baseline = data.table(raw_region_data[, reg_baseline_cols])
reg_adverse = data.table(raw_region_data[, reg_adverse_cols])
reg_severe = data.table(raw_region_data[, reg_severe_cols])
setnames(reg_baseline, reg_baseline_cols, reg_new_col_names)
setnames(reg_adverse, reg_adverse_cols, reg_new_col_names)
setnames(reg_severe, reg_severe_cols, reg_new_col_names)
# Step 2 of 4
### Employment Data ############################################################
# ! Caution ! make sure the column order still matches the following
# r_name var scenario region description:
# X1 qtr_dt none none Description:
# X2 empl baseline us FRB CCAR 2017 - Baseline: Employment: Total Nonagricultural, (Mil. #, SA)
# X3 empl adverse us FRB CCAR 2017 - Adverse: Employment: Total Nonagricultural, (Mil. #, SA)
# X4 empl severe us FRB CCAR 2017 - Severely Adverse: Employment: Total Nonagricultural, (Mil. #, SA)
# X5 ca_empl baseline ca FRB CCAR 2017 - Baseline : Employment: Total Nonagricultural, (Ths., SA)
# X6 ca_empl adverse ca FRB CCAR 2017 - Adverse : Employment: Total Nonagricultural, (Ths., SA)
# X7 ca_empl severe ca FRB CCAR 2017 - Severely Adverse : Employment: Total Nonagricultural, (Ths., SA)
raw_empl_data = read.xlsx(
concat(pth_inputs, "Non Farm Employment Moodys 2018.xlsx")
, sheet="Sheet1"
, colNames=FALSE
, startRow=6
, skipEmptyRows=FALSE
, skipEmptyCols=FALSE
, detectDates=TRUE
, check.names=TRUE
, na.strings="ND"
)
empl_baseline_cols = c("X1", "X2", "X5")
empl_adverse_cols = c("X1", "X3", "X6")
empl_severe_cols = c("X1", "X4", "X7")
empl_new_col_names = c(
"qtr_dt"
, "empl"
, "ca_empl"
)
empl_baseline = data.table(raw_empl_data[, empl_baseline_cols])
empl_adverse = data.table(raw_empl_data[, empl_adverse_cols])
empl_severe = data.table(raw_empl_data[, empl_severe_cols])
setnames(empl_baseline, empl_baseline_cols, empl_new_col_names)
setnames(empl_adverse, empl_adverse_cols, empl_new_col_names)
setnames(empl_severe, empl_severe_cols, empl_new_col_names)
# Step 3 of 4
# FRB Data #####################################################################
# Collect historical data
raw_historic = fread(concat(pth_inputs, "Historic_Domestic_2018.csv"))
raw_baseline = fread(concat(pth_inputs, "Table_2A_Supervisory_Baseline_Domestic_2018.csv"))
raw_adverse = fread(concat(pth_inputs, "Table_3A_Supervisory_Adverse_Domestic_2018.csv"))
raw_severe = fread(concat(pth_inputs, "Table_4A_Supervisory_Severely_Adverse_Domestic_2018.csv"))
# Step 4 of 4
# Transformations ##############################################################
get_frb_data = function(raw_frb_data) {
tf_data = copy(raw_frb_data)
orig_names = c(
"Real GDP growth"
, "Nominal GDP growth"
, "Real disposable income growth"
, "Nominal disposable income growth"
, "Unemployment rate"
, "CPI inflation rate"
, "3-month Treasury rate"
, "5-year Treasury yield"
, "10-year Treasury yield"
, "BBB corporate yield"
, "Mortgage rate"
, "Prime rate"
, "Dow Jones Total Stock Market Index (Level)"
, "House Price Index (Level)"
, "Commercial Real Estate Price Index (Level)"
, "Market Volatility Index (Level)"
, "Date"
)
new_names = c(
"rgdp_qg"
, "gdp_qg"
, "rinc_qg"
, "inc_qg"
, "unemp"
, "cpi"
, "yld_03m"
, "yld_05y"
, "yld_10y"
, "yld_bbb"
, "mort"
, "prime"
, "dow"
, "hpi"
, "crei"
, "vix"
, "qtr_date_string"
)
setnames(tf_data, orig_names, new_names)
# also add yield spread
tf_data[["bbb_spread"]]= tf_data[["yld_bbb"]] - tf_data[["yld_10y"]]
tf_data[["yld_spread"]]= tf_data[["yld_10y"]] - tf_data[["yld_03m"]]
# get date variable
yr = substr(tf_data[["qtr_date_string"]], 1, 4)
qtr = substr(tf_data[["qtr_date_string"]], 6, 7)
qtr_yr = paste(qtr, yr)
tf_data[["qtr_dt"]] = as.Date(as.yearqtr(qtr_yr, format = "Q%q %Y"), frac=1)
tf_data
}
frb_historic = get_frb_data(raw_historic)
frb_baseline = get_frb_data(rbind(raw_historic, raw_baseline))
frb_adverse = get_frb_data(rbind(raw_historic, raw_adverse))
frb_severe = get_frb_data(rbind(raw_historic, raw_severe))
transform = function(raw_frb_data, reg_data, empl_data) {
# Add regional and empl variables
tf_data = empl_data[reg_data[raw_frb_data, on="qtr_dt"], on="qtr_dt"]
# calc growth rates
tf_data[["hpi_qg"]] = gr(tf_data[["hpi"]])
tf_data[["ca_hpi_qg"]] = gr(tf_data[["ca_hpi"]])
tf_data[["crei_qg"]] = gr(tf_data[["crei"]])
tf_data[["dow_qg"]] = gr(tf_data[["dow"]])
tf_data[["empl_qg"]] = gr(tf_data[["empl"]])
tf_data[["ca_empl_qg"]] = gr(tf_data[["ca_empl"]])
tf_data[["ca_gsp_qg"]] = gr(tf_data[["ca_gsp"]])
tf_data[["ca_rgsp_qg"]] = gr(tf_data[["ca_rgsp"]])
tf_data[["ca_inc_qg"]] = gr(tf_data[["ca_inc"]])
tf_data[["ca_rinc_qg"]] = gr(tf_data[["ca_rinc"]])
tf_data[["hpi_eg"]] = gr(tf_data[["hpi"]], lag=8)
tf_data[["ca_hpi_eg"]] = gr(tf_data[["ca_hpi"]], lag=8)
tf_data[["crei_eg"]] = gr(tf_data[["crei"]], lag=8)
tf_data[["dow_eg"]] = gr(tf_data[["dow"]], lag=8)
tf_data[["empl_eg"]] = gr(tf_data[["empl"]], lag=8)
tf_data[["ca_empl_eg"]] = gr(tf_data[["ca_empl"]], lag=8)
tf_data[["ca_gsp_eg"]] = gr(tf_data[["ca_gsp"]], lag=8)
tf_data[["ca_rgsp_eg"]] = gr(tf_data[["ca_rgsp"]], lag=8)
tf_data[["ca_inc_eg"]] = gr(tf_data[["ca_inc"]], lag=8)
tf_data[["ca_rinc_eg"]] = gr(tf_data[["ca_rinc"]], lag=8)
# keep relevant columns
core_names = c(
"dow"
, "hpi"
, "ca_hpi"
, "crei"
, "dow_qg"
, "hpi_qg"
, "ca_hpi_qg"
, "crei_qg"
, "ca_rgsp_qg"
, "ca_gsp_qg"
, "rgdp_qg"
, "gdp_qg"
, "ca_rinc_qg"
, "ca_inc_qg"
, "rinc_qg"
, "inc_qg"
, "ca_unemp"
, "unemp"
, "ca_empl_qg"
, "empl_qg"
, "yld_spread"
, "bbb_spread"
, "hpi_eg"
, "ca_hpi_eg"
, "crei_eg"
, "dow_eg"
, "empl_eg"
, "ca_empl_eg"
, "ca_gsp_eg"
, "ca_rgsp_eg"
, "ca_inc_eg"
, "ca_rinc_eg"
)
tf_data = tf_data[, c("qtr_dt", "yld_03m", core_names), with=FALSE]
for (name in c("yld_spread", "bbb_spread", "unemp", "ca_unemp", "yld_03m")) {
dq_nm = concat(name, "_qd")
dy_nm = concat(name, "_yd")
tf_data[[dq_nm]] = delta(tf_data[[name]], lag=1)
tf_data[[concat(dq_nm, "_lag", 1)]] = shift(tf_data[[dq_nm]], n=1)
tf_data[[concat(dq_nm, "_lag", 2)]] = shift(tf_data[[dq_nm]], n=2)
tf_data[[concat(dq_nm, "_lag", 3)]] = shift(tf_data[[dq_nm]], n=3)
tf_data[[dy_nm]] = delta(tf_data[[name]], lag=4)
tf_data[[concat(dy_nm, "_lag", 1)]] = shift(tf_data[[dy_nm]], n=1)
tf_data[[concat(dy_nm, "_lag", 2)]] = shift(tf_data[[dy_nm]], n=2)
tf_data[[concat(dy_nm, "_lag", 3)]] = shift(tf_data[[dy_nm]], n=3)
}
for (name in core_names) {
# Transformations:
if (name %in% c("hpi", "crei", "ca_hpi", "bbb_spread", "yld_spread")) {
# Log-run ratio
lf_nm = concat(name,"_lf")
le_nm = concat(name,"_le")
lt_nm = concat(name,"_lt")
# Long-run ratio:
tf_data[[lf_nm]] = 100 * ((tf_data[[name]]/ma(tf_data[[name]], n=4)) - 1)
tf_data[[concat(lf_nm, "_lag", 1)]] = shift(tf_data[[lf_nm]], n=1)
tf_data[[concat(lf_nm, "_lag", 2)]] = shift(tf_data[[lf_nm]], n=2)
tf_data[[concat(lf_nm, "_lag", 3)]] = shift(tf_data[[lf_nm]], n=3)
tf_data[[concat(lf_nm, "_lag", 4)]] = shift(tf_data[[lf_nm]], n=4)
tf_data[[le_nm]] = 100 * ((tf_data[[name]]/ma(tf_data[[name]], n=8)) - 1)
tf_data[[concat(le_nm, "_lag", 1)]] = shift(tf_data[[le_nm]], n=1)
tf_data[[concat(le_nm, "_lag", 2)]] = shift(tf_data[[le_nm]], n=2)
tf_data[[concat(le_nm, "_lag", 3)]] = shift(tf_data[[le_nm]], n=3)
tf_data[[concat(le_nm, "_lag", 4)]] = shift(tf_data[[le_nm]], n=4)
tf_data[[lt_nm]] = 100 * ((tf_data[[name]]/ma(tf_data[[name]], n=12)) - 1)
tf_data[[concat(lt_nm, "_lag", 1)]] = shift(tf_data[[lt_nm]], n=1)
tf_data[[concat(lt_nm, "_lag", 2)]] = shift(tf_data[[lt_nm]], n=2)
tf_data[[concat(lt_nm, "_lag", 3)]] = shift(tf_data[[lt_nm]], n=3)
tf_data[[concat(lt_nm, "_lag", 4)]] = shift(tf_data[[lt_nm]], n=4)
}
# Lag-1:
tf_data[[concat(name, "_lag", 1)]] = shift(tf_data[[name]], n=1)
tf_data[[concat(name, "_lag", 2)]] = shift(tf_data[[name]], n=2)
tf_data[[concat(name, "_lag", 3)]] = shift(tf_data[[name]], n=3)
tf_data[[concat(name, "_lag", 4)]] = shift(tf_data[[name]], n=4)
# Annualized Growth Rates
if (length(grep("_qg", name)) != 0) {
# Annualized Rate
ag_nm = gsub("_qg", "_ag", name)
rate_vec = tf_data[[name]]/100
n = length(rate_vec)
tf_data[[ag_nm]] = sapply(1:n, function(t) {
if (t < 4) { agr = NA }
else {
agr = 1
for (j in 0:3) {
agr = agr * (1 + rate_vec[t - j])
}
}
agr = (agr^(1/4)) - 1
agr = 100 * agr
agr
}
)
tf_data[[concat(ag_nm, "_lag", 1)]] = shift(tf_data[[ag_nm]], n=1)
tf_data[[concat(ag_nm, "_lag", 2)]] = shift(tf_data[[ag_nm]], n=2)
tf_data[[concat(ag_nm, "_lag", 3)]] = shift(tf_data[[ag_nm]], n=3)
tf_data[[concat(ag_nm, "_lag", 4)]] = shift(tf_data[[ag_nm]], n=4)
ya_nm = gsub("_qg", "_ya", name)
tf_data[[ya_nm]] = ma(tf_data[[name]], n=4)
tf_data[[concat(ya_nm, "_lag", 1)]] = shift(tf_data[[ya_nm]], n=1)
tf_data[[concat(ya_nm, "_lag", 2)]] = shift(tf_data[[ya_nm]], n=2)
tf_data[[concat(ya_nm, "_lag", 3)]] = shift(tf_data[[ya_nm]], n=3)
tf_data[[concat(ya_nm, "_lag", 4)]] = shift(tf_data[[ya_nm]], n=4)
}
}
RECESSION_START = "2007-12-31"
RECESSION_END = "2009-06-30"
START_9Q = "2018-03-31"
END_9Q = "2020-03-31"
tf_data[["is_recession"]] = ifelse(tf_data[["qtr_dt"]] >= as.Date(RECESSION_START) & tf_data[["qtr_dt"]] <= as.Date(RECESSION_END), TRUE, FALSE)
tf_data[["is_9q_data"]] = ifelse(tf_data[["qtr_dt"]] >= as.Date(START_9Q) & tf_data[["qtr_dt"]] <= as.Date(END_9Q), TRUE, FALSE)
tf_data
}
######## Omar changed date filter here.
historic = transform(frb_historic, reg_baseline, empl_baseline)[qtr_dt <= as.Date("2021-03-31"),]
baseline = transform(frb_baseline, reg_baseline, empl_baseline)[qtr_dt <= as.Date("2021-03-31"),]
adverse = transform(frb_adverse, reg_adverse, empl_adverse)[qtr_dt <= as.Date("2021-03-31"),]
severe = transform(frb_severe, reg_severe, empl_severe)[qtr_dt <= as.Date("2021-03-31"),]
### Save Files for Later #######################################################
saveRDS(historic, concat(pth_out, "econ-data-historic.RDS"))
saveRDS(baseline, concat(pth_out, "econ-data-baseline.RDS"))
saveRDS(adverse, concat(pth_out, "econ-data-adverse.RDS"))
saveRDS(severe, concat(pth_out, "econ-data-severe.RDS"))
################################################################################
<file_sep>/simple regression.R
myd= read.table("Banking.txt", header=T)
pairs(myd)
cor(myd)
hist()
hist(Balance)
fit <- lm(Balance ~ Age + Education + Income, data =myd)
summary(fit)
fit2 <- lm(Balance ~ Age + Income, data =myd)
summary(fit2)
hist(Balance)
hist()
Balance
hist(Banking$Balance, xlab="Balance", prob=TRUE, main="Histogram")
<file_sep>/PD/R07_PD_CRE.R
##############################################################################
## File Name: R07_PD_CRE.R
## Author: KZ
## Date: 7/5/2017 Created
## Purpose: To build PD model for BOH CRE portfolio accoring to
## "07 - cre - model.sas"
## Download 8/7/2017
##############################################################################
#setwd("//useomvfs77/mclp/Common/Clients/Bank of Hope/Model Development/PD Models")
setwd("C:/Users/ic07949/Desktop/dataset/Data output")
requirements <- c("dplyr", "reshape2", "data.table","zoo","ggplot2","pROC","boot","tidyr","lazyeval","Hmisc")
for(rr in requirements){
if(! rr %in% installed.packages()) install.packages(rr)
}
for(requirement in requirements){if(!(requirement %in% installed.packages())) install.packages(requirement)}
lapply(requirements, require, character.only=T)
## load data
cre_dev <- read.csv("df_boh_base.csv")
cre_dev$fileDate <- as.Date(cre_dev$fileDate, "%Y-%m-%d")
# Getting description of the current sample
y_filter_1 <- describe(cre_dev$y)
bal_sum_1 <- sum(cre_dev$current_balance)
main_data <- cre_dev
cre_dev <- filter(cre_dev, portfolio_id == "CRE" & as.Date(fileDate) <= as.Date("2016-03-31") &
as.Date(fileDate) >= as.Date("2007-12-31") )
# Getting description of the current sample
y_filter_2 <- describe(cre_dev$y)
bal_sum_2 <- sum(cre_dev$current_balance)
##########################
# Plot of CRE vs CI - start
# Data frames for the plots
# Using the main data
cre_dev_p <- cre_dev
ci_dev_p <- filter(main_data, portfolio_id == "CI" & as.Date(fileDate) <= as.Date("2016-03-31") &
as.Date(fileDate) >= as.Date("2007-12-31") )
cre_default<- cre_dev_p %>% group_by(fileDate) %>% summarise(Defaulters_CRE = sum(y)) %>% data.frame()
cre_nondefault <- cre_dev_p %>% group_by(fileDate) %>% filter(y==0) %>% count() %>% data.frame()
colnames(cre_nondefault) <- c("fileDate","Nondefaulters_CRE")
cre_default <- merge(cre_default, cre_nondefault, by.x = "fileDate", by.y = "fileDate")
cre_default$cre_default_pct <- round(100*cre_default$Defaulters_CRE/lag(cre_default$Nondefaulters_CRE),3)
ci_default<- ci_dev_p %>% group_by(fileDate) %>% summarise(Defaulters_CI = sum(y)) %>% data.frame()
ci_nondefault <- ci_dev_p %>% group_by(fileDate) %>% filter(y==0) %>% count() %>% data.frame()
colnames(ci_nondefault) <- c("fileDate","Nondefaulters_CI")
ci_default <- merge(ci_default, ci_nondefault, by.x = "fileDate", by.y = "fileDate")
ci_default$ci_default_pct <- round(100*ci_default$Defaulters_CI/lag(ci_default$Nondefaulters_CI),3)
gg_df <- merge(cre_default, ci_default)
gg_df <- data.frame(Date = gg_df[,1],
CRE = gg_df$cre_default_pct,
CI = gg_df$ci_default_pct) %>%
reshape2::melt(id.vars = 'Date')
default_cre_ci_p <- ggplot(data = gg_df, mapping = aes(x = Date, y = value, group = variable, color = variable)) + geom_line() + theme(legend.position = 'bottom') + theme(legend.title=element_blank()) + xlab("Date") + ylab("Percentage %") + ggtitle("Default Rate Percentage CRE vs. CI") + theme(plot.title = element_text(hjust = 0.5)) + theme(text = element_text(size=15))
default_cre_ci_p
# save Plot
ggsave("./R output/CRE-CI_Default-Rate-Per.png", width = 7, height = 7)
# Plot of CRE vs CI - end
##########################
## delete obs with naicsCode = 0
# cre_dev <- filter(cre_dev, naicsCode != 0)
# cre_dev <- filter(cre_dev, !is.na(POB))
cre_dev <- filter(cre_dev, !(current_balance == 0 & y ==0))
# Getting description of the current sample
y_filter_3 <- describe(cre_dev$y)
bal_sum_3 <- sum(cre_dev$current_balance)
## delete obs with property_descr in ('missing', 'None')
cre_dev <- filter(cre_dev, !(is.na(property_type) | property_type == 0))
cre_dev_mean_3 <- apply(cre_dev[,which(colnames(cre_dev) %in% c("y")),drop=F],2,function (x) round(mean(x),4))
cre_dev_n_3 <- apply(cre_dev[,which(colnames(cre_dev) %in% c("y")),drop=F],2, length)
cre_dev_mean_3
cre_dev_n_3
## create final input variables
## (1) variable prop_res: 1-4 residential and multifamily
## BBCN: 10, 11
## wilshire: 11, 12
cre_dev$prop_res <- ifelse( (cre_dev$boh_id == "bbcn" & cre_dev$property_type %in% c(10 , 11) ) |
(cre_dev$boh_id == "wilshire" & cre_dev$property_type %in% c(11 , 12) ) ,
1,
0)
table(cre_dev$prop_res)
## (2a) variable prop_retail: Retail shopping center
## BBCN: 15, 16, 17, 18
## wilshire: 1
cre_dev$prop_retail <- ifelse( (cre_dev$boh_id == "bbcn" & cre_dev$property_type %in% c(15, 16, 17, 18) ) |
(cre_dev$boh_id == "wilshire" & cre_dev$property_type == 1 ) ,
1,
0)
## (2b) variable prop_auto: Gas Stations, Car Washes, and Auto Repair Centers
## BBCN: 33, 34, 36
## wilshire: 7, 8, 16
cre_dev$prop_auto <- ifelse( (cre_dev$boh_id == "bbcn" & cre_dev$property_type %in% c(33, 34, 36) ) |
(cre_dev$boh_id == "wilshire" & cre_dev$property_type %in% c(7, 8, 16) ) ,
1,
0)
## (2c) variable prop_hotel: Hotels and Motels
## BBCN: 28,29
## wilshire: 5
cre_dev$prop_hotel <- ifelse( (cre_dev$boh_id == "bbcn" & cre_dev$property_type %in% c(28, 29) ) |
(cre_dev$boh_id == "wilshire" & cre_dev$property_type == 5 ) ,
1,
0)
table(cre_dev$prop_retail)
## (3) variable boh_rating1
cre_dev$boh_rating1 <- ifelse(cre_dev$boh_rating %in% c(0,1,2,3), "R1",
ifelse(cre_dev$boh_rating %in% c(4,1000), "R2",
ifelse(cre_dev$boh_rating %in% c(2000,3000), "R3", "error")
))
cre_dev$boh_rating1_R1 <- ifelse(cre_dev$boh_rating1 == "R1",1, 0)
cre_dev$boh_rating1_R2 <- ifelse(cre_dev$boh_rating1 == "R2",1, 0)
#Lagged R1 and R2
cre_dev <- cre_dev %>% group_by(account_id) %>% mutate(boh_rating1_R1_l = lag(boh_rating1_R1)) %>% as.data.frame()
cre_dev <- cre_dev %>% group_by(account_id) %>% mutate(boh_rating1_R2_l = lag(boh_rating1_R2)) %>% as.data.frame()
# Update lagged values with current value
cre_dev$boh_rating1_R1_l <- ifelse(is.na(cre_dev$boh_rating1_R1_l),cre_dev$boh_rating1_R1,cre_dev$boh_rating1_R1_l)
cre_dev$boh_rating1_R2_l <- ifelse(is.na(cre_dev$boh_rating1_R2_l),cre_dev$boh_rating1_R2,cre_dev$boh_rating1_R2_l)
## (4) variable rgdp_qg_lag_2_neg
cre_dev$rgdp_qg_lag_2_neg <- ifelse(cre_dev$rgdp_qg_lag_2 >= 0, 0, cre_dev$rgdp_qg_lag_2 )
mean(cre_dev$rgdp_qg_lag_2_neg )
## (5) variable CAUR_yd_3
cre_dev$CAUR_yd_3 <- ifelse(cre_dev$CAUR_yd >= 3, 3, cre_dev$CAUR_yd)
mean(cre_dev$CAUR_yd_3)
## (6) variable CAHPI_ag_6
cre_dev$CAHPI_ag_6 <- ifelse(cre_dev$CAHPI_ag >= 6, 6, cre_dev$CAHPI_ag)
mean(cre_dev$CAHPI_ag_6)
## (7) variable POB_95
cre_dev$POB_95 <- ifelse(cre_dev$POB <= 95, 95, cre_dev$POB)
mean(cre_dev$POB_95)
## Wilshire Dummy
cre_dev$wilshire_d <- ifelse(cre_dev$boh_id == "wilshire",1,0)
# Getting description of the current sample
y_filter_4 <- describe(cre_dev$y)
bal_sum_4 <- sum(cre_dev$current_balance)
## Sampling the data
set.seed(20170502)
# Sample fraction
sample_fraction <- .8
cre_dev_training <- cre_dev %>% sample_frac(sample_fraction)
describe(cre_dev_training$fileDate)
# Getting description of the in sample
y_filter_in <- describe(cre_dev_training$y)
bal_sum_in <- sum(cre_dev_training$current_balance)
##Out of sample data
cre_dev_outsample <- cre_dev[-which(rownames(cre_dev) %in% rownames(cre_dev_training)),]
# Getting description of the out sample
y_filter_out <- describe(cre_dev_outsample$y)
bal_sum_out <- sum(cre_dev_outsample$current_balance)
# Table of dependent variable sample stats
dep_var_filter_stats <- as.data.frame(rbind(y_filter_1$counts, y_filter_2$counts, y_filter_3$counts, y_filter_4$counts, y_filter_in$counts, y_filter_out$counts))
bal_sum_stats <- as.data.frame(rbind(bal_sum_1,bal_sum_2,bal_sum_3,bal_sum_4,bal_sum_in,bal_sum_out))
dep_var_filter_stats <- cbind(dep_var_filter_stats,bal_sum_stats)
rownames(dep_var_filter_stats) <- c("Input","CRE & (2007-2016)","Balance","Full Sample","In-sample","Out-of-sample")
colnames(dep_var_filter_stats)[8] <- "Sum of Curr. Bal."
write.csv(dep_var_filter_stats, "CRE_dep_var_filter_stats.csv")
###################################
## run logistic regression - insample
model <- y ~ prop_res + boh_rating1_R1_l + boh_rating1_R2_l + CAUR_yd_3 + POB_95
model <- y ~ boh_rating1_R1_l + boh_rating1_R2_l + CAUR_yd_3 + POB_95
fit <- glm(model, family = binomial(link = "logit"), data = cre_dev_training)
summary(fit)
#model_w <- y ~ prop_res + boh_rating1_R1_l + boh_rating1_R2_l + CAUR_yd_3 + POB_95 + wilshire_d
model_w <- y ~ boh_rating1_R1_l + boh_rating1_R2_l + CAUR_yd_3 + POB_95
fit_w <- glm(model_w, family = binomial(link = "logit"), data = cre_dev_training)
summary(fit_w)
# Wilshire model comparison
stargazer::stargazer(fit,fit_w, type = "text", out = "W_compare_logit_CRE_PD.txt")
# McFadden's pseudo R squared for a fitted model
pR2 <- 1 - fit$deviance / fit$null.deviance
###################################
# Output the regression table in academic format
stargazer::stargazer(fit, type = "text", out = "compare_logit_CRE_PD.txt")
coef_cre <- as.data.frame(summary(fit)$coefficients)
write.csv(coef_cre, "coef_cre.csv")
coef_cre$X <- rownames(coef_cre)
# In-sample Prediction
prob <- predict(fit,type=c("response"))
cre_dev_training$p_hat <- prob
roc_in_df <- data.frame(y = cre_dev_training$y, prob)
# Find AUC
auc_in <- round(auc(roc_in_df$y, roc_in_df$prob),4)
roc_in <- roc(y ~ prob, data = roc_in_df)
## get ROC and AUC
plot(roc_in, main =paste0("CRE PD ROC IN \n AUC = ", auc_in))
# save Plot
pdf("CRE_ROC_AUC.pdf")
plot(roc_in, main =paste0("CRE PD ROC IN \n AUC = ", auc_in))
dev.off()
# Out-of-sample #
## get out-sample prediction p_hat for each account
predict_out <- predict(fit, cre_dev_outsample, type="response")
cre_dev_outsample$p_hat <- predict_out
## get ROC and AUC
roc_out <- data.frame(predict = predict_out, y = cre_dev_outsample$y)
roc_out_plot <- roc(y ~ predict, data = roc_out)
auc_out <- round(as.numeric(roc_out_plot$auc),4)
plot(roc_out_plot, main =paste0("CRE PD ROC OUT \n AUC = ", auc_out))
pdf(paste0("CRE_ROC_AUC_OUT.pdf"))
plot(roc_out_plot, main =paste0("CRE PD ROC OUT \n AUC = ", auc_out))
dev.off()
## get quarterly average PD in-sample
cre_pd_quarterly <- subset(cre_dev_training, select = c(fileDate, y, p_hat))
cre_default <- cre_pd_quarterly %>% group_by(fileDate) %>% summarise(Defaulters_CRE = sum(y)) %>% data.frame()
cre_nondefault <- cre_pd_quarterly %>% group_by(fileDate) %>% filter(y==0) %>% count() %>% data.frame()
colnames(cre_nondefault) <- c("fileDate","Nondefaulters_CRE")
cre_default <- merge(cre_default, cre_nondefault, by.x = "fileDate", by.y = "fileDate")
cre_default$cre_pd <- round(cre_default$Defaulters_CRE/lag(cre_default$Nondefaulters_CRE),3)
cre_pd_quarterly <- merge(cre_pd_quarterly, cre_default, by.x = "fileDate", by.y = "fileDate")
cre_pd_quarterly <- aggregate(cre_pd_quarterly[,c("p_hat","cre_pd")], list(cre_pd_quarterly$fileDate), mean)
colnames(cre_pd_quarterly) <- c("fileDate","Fitted","Actual")
cre_pd_quarterly <- melt(cre_pd_quarterly, id = "fileDate")
cbPalette <- c("#000000", "#0072B2")
cre_pd_training_plot <- ggplot(cre_pd_quarterly, aes(x=fileDate, y = value, color=variable)) +
geom_line() + scale_colour_manual(values=cbPalette) + xlab("Date") + ylab("Default Rate") + ggtitle("Ave. Default Rate CRE - In-sample") +
theme(plot.title = element_text(hjust = 0.5)) +
theme(text = element_text(size=15)) + theme(legend.title=element_blank())
cre_pd_training_plot
ggsave("./R output/CRE_PD_actual_fitted_insample.png", width = 7, height = 7)
## get quarterly average PD out-sample
cre_pd_quarterly_out <- subset(cre_dev_outsample, select = c(fileDate, y, p_hat))
cre_default <- cre_pd_quarterly_out %>% group_by(fileDate) %>% summarise(Defaulters_CRE = sum(y)) %>% data.frame()
cre_nondefault <- cre_pd_quarterly_out %>% group_by(fileDate) %>% filter(y==0) %>% count() %>% data.frame()
colnames(cre_nondefault) <- c("fileDate","Nondefaulters_CRE")
cre_default <- merge(cre_default, cre_nondefault, by.x = "fileDate", by.y = "fileDate")
cre_default$cre_pd <- round(cre_default$Defaulters_CRE/lag(cre_default$Nondefaulters_CRE),3)
cre_pd_quarterly_out <- merge(cre_pd_quarterly_out, cre_default, by.x = "fileDate", by.y = "fileDate")
cre_pd_quarterly_out <- aggregate(cre_pd_quarterly_out[,c("p_hat","cre_pd")], list(cre_pd_quarterly_out$fileDate), mean)
colnames(cre_pd_quarterly_out) <- c("fileDate","Fitted","Actual")
cre_pd_quarterly_out <- melt(cre_pd_quarterly_out, id = "fileDate")
cbPalette <- c("#000000", "#0072B2")
cre_pd_out_plot <- ggplot(cre_pd_quarterly_out, aes(x=fileDate, y = value, color=variable)) +
geom_line() + scale_colour_manual(values=cbPalette) + xlab("Date") + ylab("Default Rate") + ggtitle("Ave. Default Rate CRE - Out-of-sample") +
theme(plot.title = element_text(hjust = 0.5)) +
theme(text = element_text(size=15)) + theme(legend.title=element_blank())
cre_pd_out_plot
ggsave("./R output/CRE_PD_actual_fitted_outsample.png", width = 7, height = 7)
###########################################
## Forecast for 3 scenarios
setwd("C:/Users/ic07949/Desktop/dataset/Data output")
for(scenario in c("base", "adverse", "severe")){
#for(scenario in c("severe")){
print(paste0("==== ", scenario, " ===="))
cre_forecast <- read.csv(paste0("df_boh_",scenario, ".csv"))
cre_forecast$fileDate <- as.Date(cre_forecast$fileDate, "%Y-%m-%d")
cre_forecast <- filter(cre_forecast, portfolio_id == "CRE" & as.Date(fileDate) > as.Date("2016-03-31") )
## delete obs with naicsCode = 0
# cre_forecast <- filter(cre_forecast, naicsCode != 0)
# cre_forecast <- filter(cre_forecast, !is.na(POB))
cre_forecast <- filter(cre_forecast, !(current_balance == 0 & y ==0))
## create final input variables
## (1) variable prop_res: 1-4 residential and multifamily
## BBCN: 10, 11
## wilshire: 11, 12
cre_forecast$prop_res <- ifelse( (cre_forecast$boh_id == "bbcn" & cre_forecast$property_type %in% c(10 , 11) ) |
(cre_forecast$boh_id == "wilshire" & cre_forecast$property_type %in% c(11 , 12) ) ,
1,
0)
## (2a) variable prop_retail: Retail shopping center
## BBCN: 15, 16, 17, 18
## wilshire: 1
cre_forecast$prop_retail <- ifelse( (cre_forecast$boh_id == "bbcn" & cre_forecast$property_type %in% c(15, 16, 17, 18) ) |
(cre_forecast$boh_id == "wilshire" & cre_forecast$property_type == 1 ) ,
1,
0)
## (2b) variable prop_auto: Gas Stations, Car Washes, and Auto Repair Centers
## BBCN: 33, 34, 36
## wilshire: 7, 8, 16
cre_forecast$prop_auto <- ifelse( (cre_forecast$boh_id == "bbcn" & cre_forecast$property_type %in% c(33, 34, 36) ) |
(cre_forecast$boh_id == "wilshire" & cre_forecast$property_type %in% c(7, 8, 16) ) ,
1,
0)
## (2c) variable prop_hotel: Hotels and Motels
## BBCN: 28,29
## wilshire: 5
cre_forecast$prop_hotel <- ifelse( (cre_forecast$boh_id == "bbcn" & cre_forecast$property_type %in% c(28, 29) ) |
(cre_forecast$boh_id == "wilshire" & cre_forecast$property_type == 5 ) ,
1,
0)
## (3) variable boh_rating1
cre_forecast$boh_rating1 <- ifelse(cre_forecast$boh_rating %in% c(0,1,2,3), "R1",
ifelse(cre_forecast$boh_rating %in% c(4,1000), "R2",
ifelse(cre_forecast$boh_rating %in% c(2000,3000), "R3", "error")
))
cre_forecast$boh_rating1_R1 <- ifelse(cre_forecast$boh_rating1 == "R1",1, 0)
cre_forecast$boh_rating1_R2 <- ifelse(cre_forecast$boh_rating1 == "R2",1, 0)
#Lagged R1 and R2
cre_forecast <- cre_forecast %>% group_by(account_id) %>% mutate(boh_rating1_R1_l = lag(boh_rating1_R1))
cre_forecast <- cre_forecast %>% group_by(account_id) %>% mutate(boh_rating1_R2_l = lag(boh_rating1_R2))
# Update lagged values with current value
cre_forecast$boh_rating1_R1_l <- ifelse(is.na(cre_forecast$boh_rating1_R1_l),cre_forecast$boh_rating1_R1,cre_forecast$boh_rating1_R1_l)
cre_forecast$boh_rating1_R2_l <- ifelse(is.na(cre_forecast$boh_rating1_R2_l),cre_forecast$boh_rating1_R2,cre_forecast$boh_rating1_R2_l)
## (4) variable rgdp_qg_lag_2_neg
cre_forecast$rgdp_qg_lag_2_neg <- ifelse(cre_forecast$rgdp_qg_lag_2 >= 0, 0, cre_forecast$rgdp_qg_lag_2 )
## (5) variable CAUR_yd_3
cre_forecast$CAUR_yd_3 <- ifelse(cre_forecast$CAUR_yd >= 3, 3, cre_forecast$CAUR_yd)
## (6) variable CAHPI_ag_6
cre_forecast$CAHPI_ag_6 <- ifelse(cre_forecast$CAHPI_ag >= 6, 6, cre_forecast$CAHPI_ag)
## (7) variable POB_95
cre_forecast$POB_95 <- ifelse(cre_forecast$POB <= 95, 95, cre_forecast$POB)
## Wilshire Dummy
cre_forecast$wilshire_d <- ifelse(cre_forecast$boh_id == "wilshire",1,0)
## get pd forecast p_hat for each account
cre_forecast <- as.data.table(cre_forecast)
cre_forecast$p_hat <- as.matrix (cre_forecast[, coef_cre$X[-1],with = FALSE]) %*% coef_cre$Estimate[-1] +
coef_cre$Estimate[1]
cre_forecast$p_hat <- 1/(1+exp(-cre_forecast$p_hat))
## get quarterly average PD
cre_pd_quarterly_9Q <- subset(cre_forecast, select = c(fileDate, p_hat))
cre_pd_quarterly_9Q <- aggregate(cre_pd_quarterly_9Q[,2], list(cre_pd_quarterly_9Q$fileDate), mean)
setnames(cre_pd_quarterly_9Q, old = c("Group.1","p_hat"),
new = c("fileDate", "value"))
cre_pd_quarterly_9Q$variable <- scenario
cre_pd_quarterly_9Q <- cre_pd_quarterly_9Q[,c(1,3,2)]
assign(paste0("cre_pd_quarterly_",scenario), cre_pd_quarterly_9Q)
}
cre_pd_quarterly_9Q <- rbind(cre_pd_quarterly_base, cre_pd_quarterly_adverse, cre_pd_quarterly_severe)
cre_pd_quarterly_all <- rbind(cre_pd_quarterly, cre_pd_quarterly_9Q)
setnames(cre_pd_quarterly_all, old = c("variable", "value"),
new = c("scenario","PD"))
## final plot
cbPalette <- c("#000000", "#0072B2", "#006600", "#E69F00", "#D55E00")
cre_pd_plot <- ggplot(cre_pd_quarterly_all, aes(x = fileDate, y = PD, color = scenario)) +
geom_line() + scale_colour_manual(values=cbPalette) +
ggtitle("BOH CRE PD") + xlab("Date") + ylab("Default Rate") + ggtitle("Average Default Rate CRE") +
theme(plot.title = element_text(hjust = 0.5)) +
theme(text = element_text(size=15))
cre_pd_plot
ggsave("./R output/CRE_PD_actual_fitted_forecast.png", width = 7, height = 7)
## output results
write.csv(cre_pd_quarterly_all, "./R output/CRE_PD_quarterly_actual_fitted_forecast.csv", row.names = FALSE)
# Summary stats per sample
options(scipen=999)
sum_nms <- c("y","prop_res","boh_rating1_R1_l","boh_rating1_R2_l","CAUR_yd_3","POB_95","CAHPI_ag_lag_1")
training_df <- as.data.frame.matrix(cre_dev_training)
testing_df <- as.data.frame.matrix(cre_dev_outsample)
summary(training_df)
# Make the summary stats table between the samples
cre_dev_mean <- apply(cre_dev[,which(colnames(cre_dev) %in% sum_nms),drop=F],2,function (x) round(mean(x),4))
cre_dev_in_mean <- apply(training_df[,which(colnames(training_df) %in% sum_nms),drop=F],2,function (x) round(mean(x),4))
cre_dev_out_mean <- apply(testing_df[,which(colnames(testing_df) %in% sum_nms),drop=F],2,function (x) round(mean(x),4))
cre_dev_sd <- apply(cre_dev[,which(colnames(cre_dev) %in% sum_nms),drop=F],2,function (x) round(sd(x),4))
cre_dev_in_sd <- apply(training_df[,which(colnames(training_df) %in% sum_nms),drop=F],2,function (x) round(sd(x),4))
cre_dev_out_sd <- apply(testing_df[,which(colnames(testing_df) %in% sum_nms),drop=F],2,function (x) round(sd(x),4))
cre_dev_max <- apply(cre_dev[,which(colnames(cre_dev) %in% sum_nms),drop=F],2,function (x) round(max(x),4))
cre_dev_in_max <- apply(training_df[,which(colnames(training_df) %in% sum_nms),drop=F],2,function (x) round(max(x),4))
cre_dev_out_max <- apply(testing_df[,which(colnames(testing_df) %in% sum_nms),drop=F],2,function (x) round(max(x),4))
cre_dev_min <- apply(cre_dev[,which(colnames(cre_dev) %in% sum_nms),drop=F],2,function (x) round(min(x),4))
cre_dev_in_min <- apply(training_df[,which(colnames(training_df) %in% sum_nms),drop=F],2,function (x) round(min(x),4))
cre_dev_out_min <- apply(testing_df[,which(colnames(testing_df) %in% sum_nms),drop=F],2,function (x) round(min(x),4))
cre_dev_n <- apply(cre_dev[,which(colnames(cre_dev) %in% sum_nms),drop=F],2, length)
cre_dev_in_n <- apply(training_df[,which(colnames(training_df) %in% sum_nms),drop=F],2, length)
cre_dev_out_n <- apply(testing_df[,which(colnames(testing_df) %in% sum_nms),drop=F],2, length)
cre_df_sample_stats <- rbind(
cre_dev_mean, cre_dev_in_mean, cre_dev_out_mean,
cre_dev_sd, cre_dev_in_sd, cre_dev_out_sd,
cre_dev_max, cre_dev_in_max, cre_dev_out_max,
cre_dev_min, cre_dev_in_min, cre_dev_out_min,
cre_dev_n, cre_dev_in_n, cre_dev_out_n
)
rownames(cre_df_sample_stats) <- c("Mean (All Obs)","Mean (Training)","Mean (Test)","SD (All Obs)","SD (Training)","SD (Test)","Max (All Obs)","Max (Training)","Max (Test)","Min (All Obs)","Min (Training)","Min (Test)","Obs (All Obs)","Obs (Training)","Obs (Test)")
write.csv(cre_df_sample_stats, "./R output/cre_df_sample_stats.csv")
# Number of default events per period
def_events_df <- cre_dev[,which(colnames(cre_dev) %in% c("fileDate","y")),drop=F]
def_events_df_sum <- def_events_df %>% group_by(fileDate) %>% summarise(Defaults = sum(y)) %>% data.frame()
Obs <- def_events_df %>% group_by(fileDate) %>% tally() %>% data.frame()
def_events_df_sum <- merge(def_events_df_sum, Obs)
colnames(def_events_df_sum) <- c("Date","No. of Defaults","Observations")
write.csv(def_events_df_sum, "./R output/cre_def_events_df_sum.csv")
##########
# Plots
# Profile bins
cre_dev$pob_bins <- cut(cre_dev$POB,breaks = 6)
cre_dev$risk_bins <- ifelse(cre_dev$boh_rating %in% c(0,1,2,3), "R1",
ifelse(cre_dev$boh_rating %in% c(4,1000), "R2",
ifelse(cre_dev$boh_rating %in% c(2000,3000), "R3", "error")
))
cre_dev$caur_bins <- cut(cre_dev$CAUR_yd,breaks = 4)
cre_dev$cahpi_bins <- cut(cre_dev$CAHPI_ag,breaks = 4)
cre_dev$gdp_bins <- cut(cre_dev$rgdp_qg_lag_2_neg, breaks = 2)
# In-sample bins
cre_dev_training$pob_bins <- cut(cre_dev_training$POB,breaks = 6)
cre_dev_training$risk_bins <- ifelse(cre_dev_training$boh_rating %in% c(0,1,2,3), "R1",
ifelse(cre_dev_training$boh_rating %in% c(4,1000), "R2",
ifelse(cre_dev_training$boh_rating %in% c(2000,3000), "R3", "error")
))
cre_dev_training$caur_bins <- cut(cre_dev_training$CAUR_yd,breaks = 4)
cre_dev_training$cahpi_bins <- cut(cre_dev_training$CAHPI_ag,breaks = 4)
cre_dev_training$gdp_bins <- cut(cre_dev_training$rgdp_qg_lag_2_neg, breaks = 2)
# Out-sample bins
cre_dev_outsample$pob_bins <- cut(cre_dev_outsample$POB,breaks = 6)
cre_dev_outsample$risk_bins <- ifelse(cre_dev_outsample$boh_rating %in% c(0,1,2,3), "R1",
ifelse(cre_dev_outsample$boh_rating %in% c(4,1000), "R2",
ifelse(cre_dev_outsample$boh_rating %in% c(2000,3000), "R3", "error")
))
cre_dev_outsample$caur_bins <- cut(cre_dev_outsample$CAUR_yd,breaks = 4)
cre_dev_outsample$cahpi_bins <- cut(cre_dev_outsample$CAHPI_ag,breaks = 4)
cre_dev_outsample$gdp_bins <- cut(cre_dev_outsample$rgdp_qg_lag_2_neg, breaks = 2)
######################
# Function to generate the plots:
pd_bin_plot <- function(data, date, dep_var, estimate, bins, title, profile){
require(lazyeval)
require(dplyr)
defaulters <- data %>% group_by_(date, bins) %>% summarise_(Defaulters = interp(~sum(var, na.rm = F), var = as.name(dep_var))) %>% data.frame()
nondefaulters <- data %>% group_by_(date, bins) %>% filter_(interp(~ var == 0, var = as.name(dep_var))) %>% count_() %>% data.frame()
df <- merge(defaulters, nondefaulters)
df <- df %>% group_by_(bins) %>% mutate(pd_actual = Defaulters/lag(n)) %>% data.frame() %>% na.omit
#df <- df %>% group_by_(bins) %>% mutate(pd_actual = Defaulters/n) %>% data.frame() %>% na.omit
colnames(df) <- c("Date","bins","Defaulters","Nondefaulters","PD_Actual")
df <- aggregate(df$PD_Actual, list(df$bins), mean)
colnames(df) <- c("Bins","Actual")
obs <- data %>% group_by_(bins) %>% count_()
if (profile == T){
df_2 <- obs
colnames(df_2) <- c("Bins","Observations")
df <- merge(df_2, df)
df <- df[with(df,order(Bins)),]
row.names(df) <- NULL
layout(rbind(1,2), heights=c(7,1)) # put legend on bottom 1/8th of the chart
blue <- rgb(0, 0, 1, alpha=0.2)
green <- rgb(.5, 1, .5, alpha=0.2)
par(mar = c(5,5,2,5))
with(df, plot(as.numeric(row.names(df)), Actual, col="green4", type = "l", xaxt = "n",lwd=2,main = paste("Default Rate",title, sep=" - "),
ylab="Default Rate",xlab=names(df)[1],
ylim=c(0,max(df[,3]))))
par(new = T)
bp <- with(df, barplot(df$Observations,axes=F, xlab=NA, ylab=NA, col=blue))
axis(side = 4)
mtext(side = 4, line = 3, 'Observation Count')
axis(1, at=bp, labels=df[,1])
# setup for no margins on the legend
par(mar=c(0, 0, 0, 0))
# c(bottom, left, top, right)
plot.new()
legend("center",
legend=c("Actual","Obs."),
lty=1,lwd=5, col=c("green4",blue), ncol=2)
} else if (profile == F){
estimate <- data %>% group_by_(bins) %>% summarise_(Estimate = interp(~mean(var, na.rm = F), var = as.name(estimate))) %>% data.frame()
df_2 <- merge(obs, estimate)
colnames(df_2) <- c("Bins","Observations","Estimate")
df <- merge(df_2, df)
df <- df[with(df,order(Bins)),]
row.names(df) <- NULL
layout(rbind(1,2), heights=c(7,1)) # put legend on bottom 1/8th of the chart
green <- rgb(.5, 1, .5, alpha=0.2)
par(mar = c(5,5,2,5))
with(df, plot(as.numeric(row.names(df)), Estimate, col="red", type = "l", xaxt = "n",lwd=2,main = paste("Default Rate",title, sep=" - "),
ylab="Default Rate",xlab=names(df)[1],
ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
with(df, plot(as.numeric(row.names(df)), Actual, col="blue", xaxt = "n",type = "l", lwd=2,
ylab="", xlab="", ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
bp <- with(df, barplot(df$Observations,axes=F, xlab=NA, ylab=NA, col=green))
axis(side = 4)
mtext(side = 4, line = 3, 'Observation Count')
axis(1, at=bp, labels=df[,1])
# setup for no margins on the legend
par(mar=c(0, 0, 0, 0))
# c(bottom, left, top, right)
plot.new()
legend("center",
legend=c("Estimate", "Actual","Obs."),
lty=1,lwd=5, col=c("red3","blue", green), ncol=3)
}
}
#####################
# Profile Plots
#####################
##########
# Plot profile POB
pd_bin_plot(data = cre_dev, date = "fileDate", dep_var = "y", estimate = NA, bins = "pob_bins", title = "POB - IN", profile = T)
##########
# Plot profile risk rating
pd_bin_plot(data = cre_dev, date = "fileDate", dep_var = "y", estimate = NA, bins = "risk_bins", title = "Risk Rating - IN", profile = T)
##########
# Plot profile CA UR
pd_bin_plot(data = cre_dev, date = "fileDate", dep_var = "y", estimate = NA, bins = "caur_bins", title = "CA UR - IN", profile = T)
##########
# Plot profile CA HPI
pd_bin_plot(data = cre_dev, date = "fileDate", dep_var = "y", estimate = NA, bins = "cahpi_bins", title = "CA HPI - IN", profile = T)
##########
# Plot profile GDP
pd_bin_plot(data = cre_dev, date = "fileDate", dep_var = "y", estimate = NA, bins = "gdp_bins", title = "GDP - IN", profile = T)
#####################
# Insample Plots
#####################
##########
# Plot insample average PD by POB
pdf(paste("./R output", "Default_Rate-POB_IN.pdf" ,sep ="/"), height = 5, width = 10)
pd_bin_plot(data = cre_dev_training, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "pob_bins", title = "POB - IN", profile = F)
dev.off()
pd_bin_plot(data = cre_dev_training, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "pob_bins", title = "POB - IN", profile = F)
##########
# Plot insample average PD by risk rating
pdf(paste("./R output", "Default_Rate-RiskIN.pdf" ,sep ="/"), height = 5, width = 10)
pd_bin_plot(data = cre_dev_training, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "risk_bins", title = "Risk Rating - IN", profile = F)
dev.off()
pd_bin_plot(data = cre_dev_training, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "risk_bins", title = "Risk Rating - IN", profile = F)
##########
# Plot insample average PD by CA UR
pdf(paste("./R output", "Default_Rate-CAURIN.pdf" ,sep ="/"), height = 5, width = 10)
pd_bin_plot(data = cre_dev_training, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "caur_bins", title = "CA UR - IN", profile = F)
dev.off()
pd_bin_plot(data = cre_dev_training, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "caur_bins", title = "CA UR - IN", profile = F)
##########
# Plot insample average PD by CA HPI
pdf(paste("./R output", "Default_Rate-CAHPIIN.pdf" ,sep ="/"), height = 5, width = 10)
pd_bin_plot(data = cre_dev_training, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "cahpi_bins", title = "CA HPI - IN", profile = F)
dev.off()
pd_bin_plot(data = cre_dev_training, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "cahpi_bins", title = "CA HPI - IN", profile = F)
##########
# Plot insample average PD by GDP
pdf(paste("./R output", "Default_Rate-GDPIN.pdf" ,sep ="/"), height = 5, width = 10)
pd_bin_plot(data = cre_dev_training, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "gdp_bins", title = "GDP - IN", profile = F)
dev.off()
pd_bin_plot(data = cre_dev_training, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "gdp_bins", title = "GDP - IN", profile = F)
#####################
# Out of sample plots
#####################
##########
# Plot insample average PD by POB
pdf(paste("./R output", "Default_Rate-POB_OUT.pdf" ,sep ="/"), height = 5, width = 10)
pd_bin_plot(data = cre_dev_outsample, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "pob_bins", title = "POB - OUT", profile = F)
dev.off()
pd_bin_plot(data = cre_dev_outsample, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "pob_bins", title = "POB - OUT", profile = F)
##########
# Plot insample average PD by risk rating
pdf(paste("./R output", "Default_Rate-Risk_OUT.pdf" ,sep ="/"), height = 5, width = 10)
pd_bin_plot(data = cre_dev_outsample, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "risk_bins", title = "Risk Rating - OUT", profile = F)
dev.off()
pd_bin_plot(data = cre_dev_outsample, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "risk_bins", title = "Risk Rating - OUT", profile = F)
##########
# Plot insample average PD by CA UR
pdf(paste("./R output", "Default_Rate-CAUR_OUT.pdf" ,sep ="/"), height = 5, width = 10)
pd_bin_plot(data = cre_dev_outsample, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "caur_bins", title = "CA UR - OUT", profile = F)
dev.off()
pd_bin_plot(data = cre_dev_outsample, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "caur_bins", title = "CA UR - OUT", profile = F)
##########
# Plot insample average PD by CA HPI
pdf(paste("./R output", "Default_Rate-CAHPI_OUT.pdf" ,sep ="/"), height = 5, width = 10)
pd_bin_plot(data = cre_dev_outsample, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "cahpi_bins", title = "CA HPI - OUT", profile = F)
dev.off()
pd_bin_plot(data = cre_dev_outsample, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "cahpi_bins", title = "CA HPI - OUT", profile = F)
##########
# Plot insample average PD by GDP
pdf(paste("./R output", "Default_Rate-GDP_OUT.pdf" ,sep ="/"), height = 5, width = 10)
pd_bin_plot(data = cre_dev_outsample, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "gdp_bins", title = "GDP - OUT", profile = F)
dev.off()
pd_bin_plot(data = cre_dev_outsample, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "gdp_bins", title = "GDP - OUT", profile = F)
#################
# Coefficient Stability
# Generate in and out samples and forecast
# Make random samples
# Sample Number
sample_number <- 10
sample_fraction <- .8
set.seed(20170502)
cre_dev_training <- cre_dev %>% sample_frac(sample_fraction)
cre_dev_outsample <- cre_dev[-which(rownames(cre_dev) %in% rownames(cre_dev_training)),]
# Round the sample size to a whole number
# Uses the sample fraction set above to partition in-out samples
sample_size <- round(nrow(cre_dev)*sample_fraction)
# Sample from the df
predict_s <- list()
for (i in 1:sample_number){
##In sample data
set.seed(i)
df_sample_in <- cre_dev[sample(nrow(cre_dev), sample_size, replace = FALSE), ]
##Out of sample data
df_sample_out <- cre_dev[-which(rownames(cre_dev) %in% rownames(df_sample_in)),]
##Estimate the model
logit_s <- glm(model, family = binomial(link = "logit"), data = df_sample_in)
predict_s[[i]] <- predict(logit_s, df_sample_out, type="response")
}
# Make a df for the predict df and assign names
header_predict <- paste("Sample_", seq(1:sample_number),sep="")
predict_s_df <- data.frame(predict_s)
colnames(predict_s_df) <- header_predict
# Make data frame of all predictions
test_out_df <- data.frame(cre_dev_outsample,predict_s_df)
test_out_df <- test_out_df %>% group_by(fileDate) %>% mutate(defaulters = sum(y)) %>% data.frame()
test_out_df <- test_out_df %>% group_by(fileDate) %>% filter(y==0) %>% mutate(nondefaulters = n()) %>% data.frame()
test_out_df <- test_out_df %>% group_by(fileDate) %>% mutate(pd_actual = defaulters/lag(nondefaulters)) %>% data.frame()
fcst_df_nms <- c("fileDate",header_predict,"pd_actual")
test_out_df <- na.omit(test_out_df[,which(colnames(test_out_df) %in% fcst_df_nms), drop = F])
test_out_df <- aggregate(x = test_out_df[,-1],
FUN = mean,
by = list(Date = test_out_df$fileDate))
# Plot of all Forecasts
predict_samples_gg <- melt(test_out_df, id = "Date")
sample_fcst_p <- ggplot(data = predict_samples_gg, aes(x = Date, y = value, group = variable, color = variable)) + geom_line() + xlab("Date") + ylab("Default Rate") + ggtitle("Ave. Default Rate CRE Out-of-Sample") +
theme(plot.title = element_text(hjust = 0.5)) +
theme(text = element_text(size=12)) + theme(legend.title=element_blank())
sample_fcst_p
ggsave(paste("./R output/cre_sample_fcst_plot.png"), width = 5, height = 5)
##################################
# Manual boot strapping - coefficients and p-values
# Make random samples
# Sample Number
sample_number <- 500
sample_fraction <- .8
# Round the sample size to a whole number
# Uses the sample fraction set above to partition in-out samples
sample_size <- round(nrow(cre_dev)*sample_fraction)
# Sample from the df
df_samples <- list()
coeff_l <- list()
pval_l <- list()
start.time <- Sys.time()
start.time
for (i in 1:sample_number){
##In sample data
set.seed(i)
df_sample <- cre_dev[sample(nrow(cre_dev), sample_size, replace = FALSE), ]
logit <- glm(model, family = binomial(link = "logit"), data = df_sample)
coeff_l[[i]] <- round(coef(summary(logit))[,1],5)
pval_l[[i]] <- round(coef(summary(logit))[,4],5)
}
end.time <- Sys.time()
time.taken <- end.time - start.time
time.taken
# Turn into data frames
pval_boot_df <- as.data.frame(do.call("rbind",pval_l))
pval_boot_df <- data.frame(Sample=seq(from=1,to=sample_number,by=1),pval_boot_df)
write.csv(pval_boot_df, "./R output/cre_pval_boot_df.csv", row.names = T)
coef_boot_df <- as.data.frame(do.call("rbind",coeff_l))
coef_boot_df <- data.frame(Sample=seq(from=1,to=sample_number,by=1),coef_boot_df)
write.csv(coef_boot_df, "./R output/cre_coef_boot_df.csv", row.names = T)
# P-value Histograms
gg_p_df <- list()
for (i in 2:ncol(pval_boot_df)){
gg_p_df[[i]] <- melt(pval_boot_df[,c(1,i)], id = "Sample")
pval_h_plot <- ggplot(gg_p_df[[i]], aes(value)) + geom_histogram(fill = "#006600") + xlab("Value") + ylab("Frequency") + ggtitle(paste("P-Value",gg_p_df[[i]][1,2],sep=" - ")) + theme(text = element_text(size=12)) + theme(legend.title=element_blank()) + theme(plot.title = element_text(hjust = 0.5)) + geom_vline(xintercept=0)
print(pval_h_plot)
ggsave( paste0(paste("./R output/", paste("CRE P-Value Histogram",gg_p_df[[i]][1,2],sep=" - "),sep=""),".png") , width = 5, height = 5)
}
# Coefficient Densities
gg_c_df <- list()
for (i in 2:ncol(coef_boot_df)){
gg_c_df[[i]] <- melt(coef_boot_df[,c(1,i)], id = "Sample")
coef_d_plot <- ggplot(gg_c_df[[i]], mapping = aes(x = value, group = variable, fill=variable)) + geom_density() + ggtitle(paste("Coef. Density",gg_c_df[[i]][1,2],sep=" - ")) + theme(text = element_text(size=12)) + theme(legend.title=element_blank()) + theme(plot.title = element_text(hjust = 0.5))+ xlab("Value") + ylab("Density") + theme(legend.position="none") + scale_fill_manual(values=c("#003399")) + scale_colour_manual(values=c("black")) + geom_vline(xintercept=0)
print(coef_d_plot)
ggsave( paste0(paste("./R output/", paste("CRE Coefficient Density",gg_c_df[[i]][1,2],sep=" - "),sep=""),".png") , width = 5, height = 5)
}
time.taken
<file_sep>/EBModel/colors.R
### color scheme
# src: http://www.cookbook-r.com/Graphs/Colors_(ggplot2)/
cbPalette <- c("#999999", "#E69F00", "#56B4E9", "#00CC66", "#FFCC00", "#0072B2", "#D55E00", "#FF6699")
# To use for fills, add
#scale_fill_manual(values=cbPalette)
# To use for line and point colors, add
#scale_colour_manual(values=cbPalette)
hxGray = cbPalette[1]
hxOrange = cbPalette[2]
hxBlue = cbPalette[3]
hxGreen = cbPalette[4]
hxYellow = cbPalette[5]
hxDarkBlue = cbPalette[6]
hxDarkOrange = cbPalette[7]
hxDarkPink = cbPalette[8]
hxDRed = "#F8766D"
hxDGreen = "#7CAE00"
hxDAqua = "#00BFC4"
hxDBlue = "#00B0F6"
hxDPurple = "#C77CFF"
<file_sep>/dev-support.R
### General Purpose Utilities ##################################################
zip_to_list = function(a,b) {
z = list()
i = 1
for (element in a) {
z[[element]] = b[i]
i = i + 1
}
z
}
rm_blanks = function(u) {
i = which(u=="")
if (length(i) == 0) {
out = u
} else {
out = u[-i]
}
out
}
concat = function(...) {
paste(..., sep="")
}
get_excel = function(file, sheet) {
# needs openxlsx package
read.xlsx(
file
, sheet=sheet
, colNames=TRUE
, startRow=1
, skipEmptyRows=FALSE
, skipEmptyCols=FALSE
, detectDates=TRUE
, check.names=TRUE
, na.strings="NA"
)
}
stack = function(..., labels=NA) {
df_list = list(...)
m = length(df_list)
n = 0
df_n_list = list()
for (i in 1:m) {
k = dim(df_list[[i]])[1]
df_n_list[[i]] = k
n = n + k
}
# Get all column names
is_numeric_list = list()
name_vec = c()
for (i in 1:m) {
test_df = df_list[[i]]
for (name in names(test_df)) {
is_numeric_list[name] = is.numeric(test_df[name])
}
name_vec = c(names(test_df), name_vec)
}
name_vec = unique(name_vec)
# add missing columns to each data frame
for (i in 1:m) {
col_names = names(is_numeric_list)
for (name in col_names) {
if (!(name %in% names(df_list[[i]]))) {
if (is_numeric_list[[name]]) {
df_list[[i]][name] = NA
} else {
df_list[[i]][name] = "<NA>"
}
}
}
}
# allocate stacked df space
out_df = data.frame(dummy=numeric(n))
for(name in name_vec) {
if (is_numeric_list[[name]]) {
out_df[name] = numeric(n)
} else {
out_df[name] = character(n)
}
}
out_df = out_df[, !(names(out_df) %in% "dummy")]
out_df["label"] = "<NA>"
#populate data
start_row = 0
for (i in 1:m) {
rows = df_n_list[[i]]
curr_df = df_list[[i]]
for (j in 1:rows) {
k = start_row + j
out_df[k, name_vec] = curr_df[j, name_vec]
if (length(labels) == m) {
out_df[k, "label"] = labels[i]
}
}
start_row = start_row + rows
}
out_df
}
### Transformation #############################################################
delta = function(y, lag=1) {
# needs zoo package
rollapply(y, lag+1, function(x) {diff(x, lag=lag)}, fill=NA, align="right")
}
log_diff = function(y, lag=1) {
# needs zoo package
delta(log(y), lag=lag)
}
ma = function(y, n=1) {
# needs zoo package
rollapply(y, n, mean, fill=NA, align="right")
}
gr = function(y, lag=1) {
100 * (delta(y, lag=lag)/shift(y, n=lag))
}
### Error Calculations #########################################################
calc_rsq = function(y, yhat) {
mu = mean(y)
sse = sum((y - yhat)^2)
sst = sum((y - mu)^2)
rsq_cost = 1 - sse/sst;
rsq_cost
}
calc_rmse = function(y, yhat) {
mse = mean((y - yhat)^2)
rmse = sqrt(mse)
rmse
}
calc_mad = function(y, yhat) {
mad = mean(abs(y - yhat))
mad
}
calc_mape = function(y, yhat) {
mape = mean(abs(yhat/y - 1))
mape
}
calc_smape = function(y, yhat) {
num_i = 2 * abs(y - yhat)
den_i = abs(y) + abs(yhat)
smape = mean(num_i/den_i)
smape
}
calc_maape = function(y, yhat) {
aape = atan(abs(y - yhat)/abs(yhat))
maape = mean(aape)
maape
}
calc_under_rate = function(y, yhat, pct_tol=0.025) {
pct_less_than_y = -(yhat/y - 1)
under_rate = mean(ifelse(pct_less_than_y > pct_tol, 1, 0))
under_rate
}
### Binning ####################################################################
bin_interval_variable <- function(u, n=10) {
if (!is.vector(u)) {
u <- u[[1]]
}
if (100 %% n == 0) {
pct_increment = n/100
} else {
print("Adjusted requested bin count to be a factor on 100.")
new_n <- n - (100 %% n)
}
pct_list <- seq(from=pct_increment, to =1 - pct_increment, by=pct_increment)
quantile_list <- unique(unname(quantile(u, pct_list)))
# calculate bin midpoints with real data
lo <- min(u)
hi <- max(u)
bin_values <- c(lo, quantile_list, hi)
bin_len <- length(bin_values)
midpoints <- rep(NA, bin_len - 1)
for (i in 2:bin_len) {
midpoints[i-1] <- mean(bin_values[(i-1):i])
}
breaks <- c(-Inf, quantile_list, Inf)
bin_info <- list(
breaks=breaks
, midpoints=midpoints
)
bin_info
}
### Model Specific Functions ###################################################
get_bal_forecast = function(start_bal, scores) {
start_bal * exp(cumsum(scores))
}
get_ldiff_forecast = function(start_ldiff, scores) {
ldiff = start_ldiff + cumsum(scores)
ldiff
}
### PSI Series #################################################################
get_psi_from_vector_amts = function(snap_shot, data, cols=NA, const=1e-6) {
snap_raw = apply(as.matrix(snap_shot[,cols, with=FALSE]), 2, sum)
snap_row_sums = sum(snap_raw)
snap_dist = snap_raw/snap_row_sums
raw = as.matrix(data[, cols, with=FALSE])
row_sums = apply(raw, 1, sum)
dist = raw/row_sums
str = dim(dist)
I = str[1]
J = str[2]
psi_series = sapply(1:I,
function(i) {
psi = 0
for (j in 1:J) {
psi = psi + (dist[i, j] - snap_dist[j]) * log(dist[i, j]/snap_dist[j])
}
psi
}
)
names(psi_series) = NULL
psi_series
}
### Variable Selection #########################################################
calc_bic = function(y, yhat, k, family="binomial") {
n = length(y)
if (family == "binomal") {
log_like = sum( y * log(yhat) + (1 - y) * log(1 - yhat))
bic_cost = -2 * log_like + k * log(n)
} else if (family == "gaussian") {
sg2 = sum((y-yhat)^2)/n
sg = sqrt(sg2)
log_like = sum(log(dnorm(y, mean=yhat, sd=sg)))
# k + 1 since we also had to estimate sigma
bic_cost = -2 * log_like + (k + 1) * log(n)
}
return(bic_cost)
}
#### outlier flag ##############################################################
is_outlier_ia = function(x) {
# Iglewicz and Hoaglin modififed z-score
K = 0.6745
p50 = median(x, na.rm=TRUE)
mdd = median(abs(x - p50), na.rm=TRUE)
mod_z = K * (x - p50)/mdd
ifelse(abs(mod_z) > 3.5, 1, 0)
}
################################################################################
cv_step_bal = function(data, bal="bal", resp="log_diff", use_cochrane_orcutt=FALSE, test_var="", model="", from_yr=2007, to_yr=2016) {
# need orcutt package.
mod = c(test_var, model)
mod = rm_blanks(mod)
input_data = data[, c(bal, resp, "year", mod), with=FALSE]
mod_stuff = c("resp", mod)
setnames(input_data, c(resp, bal), c("resp", "bal"))
test_fit = lm(data=input_data[,mod_stuff, with=FALSE], resp ~ .)
if (use_cochrane_orcutt == TRUE) {
full_fit = cochrane.orcutt(lm(data=input_data[,mod_stuff, with=FALSE], resp ~ .))
} else {
full_fit = test_fit
}
params = data.frame(summary(full_fit)$coefficients)
names(params) = c("est", "se", "t", "p_value")
params$var = row.names(params)
params = data.table(params)
params = params[var != "(Intercept)",]
worst_p_value = max(params[["p_value"]])
if (test_var != "") {
coefficient = params[var == test_var,"est"][[1]]
correlation = cor(input_data[, c("resp", test_var), with=FALSE])["resp", test_var]
} else {
coefficient = 0
correlation = 0
}
p = full_fit$rank
n = length(full_fit$residuals)
if (p > 2) {
worst_vif = max(vif(test_fit)) #needs car package
} else {
worst_vif = 0
}
j = 1
for (yr in from_yr:to_yr) {
hold_out_yr = input_data[year == yr,]
test_data = input_data[year != yr,]
hold_out_test = hold_out_yr[, c("resp","bal")]
if (yr == from_yr) {
start_bal = input_data[year == yr,.SD[1]][["bal"]]
} else {
start_bal = input_data[ year == (yr - 1),.SD[.N]][["bal"]]
}
hold_out_fit = lm(data=test_data[, mod_stuff, with=FALSE], resp ~ .)
hold_out_test[["est"]] = predict(hold_out_fit, hold_out_yr)
hold_out_test[["bal_est"]] = get_bal_forecast(start_bal, hold_out_test[["est"]])
if (test_var != "") {
cv_coef = hold_out_fit$coefficients[[test_var]]
} else {
cv_coef = 0
}
if (j == 1) {
cv_data = hold_out_test
coef_data = cv_coef
} else {
cv_data = rbind(cv_data, hold_out_test)
coef_data = rbind(cv_coef, coef_data)
}
j = j + 1
}
y = cv_data[["resp"]]
y_est = cv_data[["est"]]
bal_y = cv_data[["bal"]]
bal_y_est = cv_data[["bal_est"]]
bic = calc_bic(y, y_est, p, family="gaussian")
rsq = calc_rsq(y, y_est)
mape = calc_mape(y, y_est)
smape = calc_smape(y, y_est)
maape = calc_maape(y, y_est)
bmape = calc_mape(bal_y, bal_y_est)
bsmape = calc_smape(bal_y, bal_y_est)
data.table(
var=test_var
, n=n
, p=p
, bic=bic
, rsq=rsq
, mape=mape
, smape=smape
, maape=maape
, bmape=bmape
, bsmape=bsmape
, worst_p_value=worst_p_value
, worst_vif=worst_vif
, coefficient=coefficient
, correlation=correlation
, coefficient_mapd=mean(abs(coef_data/coefficient - 1))
)
}
cv_select_bal = function(data, info, criteria="rsq", bal="bal", resp="log_diff", use_cochrane_orcutt=FALSE, modl="", iter=2, from_yr=2007, to_yr=2016, vif_tol=10, sig_tol=0.10) {
info_table = info
selections = modl
selection_rsq = NA
selection_mape = NA
selection_smape = NA
selection_bmape = NA
selection_bsmape = NA
summary_out = list()
for (i in 1:iter) {
# collect info on each variable
if (dim(info_table)[1] != 0) {
var_tuples = zip_to_list(info_table[["name"]],info_table[["sign"]])
cat_tuples = zip_to_list(info_table[["name"]],info_table[["base"]])
j = 1
for (var in names(var_tuples)) {
step_result = cv_step_bal(
data
, bal=bal
, resp=resp
, use_cochrane_orcutt=use_cochrane_orcutt
, test_var=var
, model=selections
, from_yr=from_yr
, to_yr=to_yr
)
step_result[["sign"]] = var_tuples[[var]]
step_result[["base"]] = cat_tuples[[var]]
if (j == 1) {
step_collection = step_result
} else {
step_collection = rbind(step_collection, step_result)
}
j = j + 1
}
# pick a variable
if (criteria == "rsq") {
step_collection = step_collection[order(-rsq)]
} else if (criteria == "bic") {
step_collection = step_collection[order(bic)]
} else if (criteria == "mape") {
step_collection = step_collection[order(mape)]
} else if (criteria == "smape") {
step_collection = step_collection[order(smape)]
} else if (criteria == "bmape") {
step_collection = step_collection[order(bmape)]
} else if (criteria == "bsmape") {
step_collection = step_collection[order(bsmape)]
} else if (criteria == "maape") {
step_collection = step_collection[order(maape)]
} else if (criteria == "coefficient_mapd") {
step_collection = step_collection[order(coefficient_mapd)]
}
summary_out[[concat("iteration-",i)]] = step_collection
step_filter = step_collection[
((sign(coefficient) == sign) & (sign(correlation) == sign) & (worst_vif <= vif_tol) & (worst_p_value <= sig_tol))
,]
if (dim(step_filter)[1] != 0 ) {
selection = step_filter[1, var][[1]]
selection_base = step_filter[1, base][[1]]
selections = c(selections, selection)
selection_base = step_filter[1, base][[1]]
selection_rsq = na.remove(c(selection_rsq, step_filter[1, rsq][[1]]))
selection_mape = na.remove(c(selection_mape, step_filter[1, mape][[1]]))
selection_smape = na.remove(c(selection_smape, step_filter[1, smape][[1]]))
selection_bmape = na.remove(c(selection_bmape, step_filter[1, bmape][[1]]))
selection_bsmape = na.remove(c(selection_bmape, step_filter[1, bsmape][[1]]))
# remove variable from consideration
info_table = info_table[!(name == selection | base == selection_base),]
} else {
break
}
}
}
list(
summary=summary_out
, selections=rm_blanks(selections)
, selection_rsq=selection_rsq
, selection_mape=selection_mape
, selection_smape=selection_smape
, selection_bmape=selection_bmape
, selection_bsmape=selection_bsmape
)
}
cv_step_qr_bal = function(data, bal="bal", resp="log_diff", tau=0.50, test_var="", model="", from_yr=2007, to_yr=2016) {
# needs quantreg package
mod = c(test_var, model)
mod = rm_blanks(mod)
input_data = data[, c(bal, resp, "year", mod), with=FALSE]
setnames(input_data, c(resp, bal), c("resp", "bal"))
mod_stuff = c("resp", mod)
full_fit = rq(data=input_data[,mod_stuff, with=FALSE], resp ~ ., tau=tau)
fit_dummy = lm(data=input_data[,mod_stuff, with=FALSE], resp ~ .)
params = data.frame(summary(full_fit)$coefficients)
names(params) = c("est", "lower_bd", "upper_bd")
params$var = row.names(params)
params = data.table(params)
params = params[var != "(Intercept)",]
if (test_var != "") {
coefficient = params[var == test_var,"est"][[1]]
correlation = cor(input_data[, c("resp", test_var), with=FALSE])["resp", test_var]
} else {
coefficient = 0
correlation = 0
}
p = fit_dummy$rank
n = length(full_fit$residuals)
if (p > 2) {
worst_vif = max(vif(fit_dummy)) #needs car package
} else {
worst_vif = 0
}
j = 1
for (yr in from_yr:to_yr) {
hold_out_yr = input_data[year == yr,]
test_data = input_data[year != yr,]
hold_out_test = hold_out_yr[, c("resp","bal")]
if (yr == from_yr) {
start_bal = input_data[year == yr,.SD[1]][["bal"]]
} else {
start_bal = input_data[ year == (yr - 1),.SD[.N]][["bal"]]
}
hold_out_fit = rq(data=test_data[,mod_stuff, with=FALSE], resp ~ ., tau=tau)
hold_out_test[["est"]] = predict(hold_out_fit, hold_out_yr)
hold_out_test[["bal_est"]] = get_bal_forecast(start_bal, hold_out_test[["est"]])
if (test_var != "") {
cv_coef = hold_out_fit$coefficients[[test_var]]
} else {
cv_coef = 0
}
if (j == 1) {
cv_data = hold_out_test
coef_data = cv_coef
} else {
cv_data = rbind(cv_data, hold_out_test)
coef_data = rbind(cv_coef, coef_data)
}
j = j + 1
}
y = cv_data[["resp"]]
y_est = cv_data[["est"]]
bal_y = cv_data[["bal"]]
bal_y_est = cv_data[["bal_est"]]
bic = calc_bic(y, y_est, p, family="gaussian")
rsq = calc_rsq(y, y_est)
mape = calc_mape(y, y_est)
smape = calc_smape(y, y_est)
maape = calc_maape(y, y_est)
bmape = calc_mape(bal_y, bal_y_est)
bsmape = calc_smape(bal_y, bal_y_est)
data.table(
var=test_var
, n=n
, p=p
, bic=bic
, rsq=rsq
, mape=mape
, smape=smape
, maape=maape
, bmape=bmape
, bsmape=bsmape
, worst_vif=worst_vif
, coefficient=coefficient
, correlation=correlation
, coefficient_mapd=mean(abs(coef_data/coefficient - 1))
)
}
<file_sep>/Adobe/server.R
library(RColorBrewer)
library(scales)
library(lattice)
library(dplyr)
library(shiny)
library(ggplot2)
library(markdown)
server <- function(input, output, session) {
shiny_studentInfo <- read.csv("shiny_studentInfo.csv")
shiny_assessments <- read.csv("shiny_assessments.csv")
shiny_assessments_courses <- read.csv("assessments_courses.csv")
output$plot2 <- renderPlot({
b = shiny_studentInfo[which(shiny_studentInfo$id == input$id ),]
# myinput <- input$info
if (input$info == 'gender'){
aaa<- b%>% group_by(id, final_result,gender)%>% dplyr::summarise(k = n())
p1 <- ggplot(data=aaa, aes(x=final_result, y=k,fill = final_result)) +
geom_bar(stat="identity")+
theme(legend.position="none")+theme(plot.title = element_text(size=20, hjust=0.5))+
facet_grid(.~ gender)+ggtitle("Student Final Result by demography")+
labs(x = "results", y = "counts" )+ theme(axis.text.x = element_text(face="bold", size=14),axis.text.y = element_text(face="bold", size=14))
print(p1)}
else if(input$info == 'region'){
aaa<- b%>% group_by(id, final_result,region)%>% dplyr::summarise(k = n())
p1 <- ggplot(data=aaa, aes(x=final_result, y=k,fill = final_result)) +
geom_bar(stat="identity")+
theme(legend.position="none")+theme(plot.title = element_text(size=20, face="bold", hjust=0.5))+
facet_grid(.~ region)+ggtitle("Student Final Result Group by Demography")+
labs(x = "results", y = "counts" )+ theme(axis.text.x = element_text(size=10, angle=90),axis.text.y = element_text(face="bold", size=14))
print(p1)
}
else if(input$info == 'highest_education'){
aaa<- b%>% group_by(id, final_result,highest_education)%>% dplyr::summarise(k = n())
p1 <- ggplot(data=aaa, aes(x=final_result, y=k,fill = final_result)) +
geom_bar(stat="identity")+
theme(legend.position="none")+theme(plot.title = element_text(size=20, face="bold", hjust=0.5))+
facet_grid(.~ highest_education)+ggtitle("Student Final Result by demography")+
labs(x = "results", y = "counts" )+ theme(axis.text.x = element_text(size=10, angle=90),axis.text.y = element_text(face="bold", size=14) )
print(p1)
}
else if(input$info == 'imd_band'){
aaa<- b%>% group_by(id, final_result,imd_band)%>% dplyr::summarise(k = n())
p1 <- ggplot(data=aaa, aes(x=final_result, y=k,fill = final_result)) +
geom_bar(stat="identity")+
theme(legend.position="none")+theme(plot.title = element_text(size=20, face="bold", hjust=0.5))+
facet_grid(.~ imd_band)+ggtitle("Student Final Result by demography")+
labs(x = "results", y = "counts" )+ theme(axis.text.x = element_text(size=10, angle=90),axis.text.y = element_text(face="bold", size=14))
print(p1)
}
else if(input$info == 'age_band'){
aaa<- b%>% group_by(id, final_result,age_band)%>% dplyr::summarise(k = n())
p1 <- ggplot(data=aaa, aes(x=final_result, y=k,fill = final_result)) +
geom_bar(stat="identity")+
theme(legend.position="none")+theme(plot.title = element_text(size=20, hjust=0.5))+
facet_grid(.~ age_band)+ggtitle("Student Final Result by demography")+
labs(x = "results", y = "counts" )+ theme(axis.text.x = element_text( size=14),axis.text.y = element_text(face="bold", size=14))
print(p1)
}
else if(input$info == 'disability'){
aaa<- b%>% group_by(id, final_result,disability)%>% dplyr::summarise(k = n())
p1 <- ggplot(data=aaa, aes(x=final_result, y=k,fill = final_result)) +
geom_bar(stat="identity")+
theme(legend.position="none")+ theme(plot.title = element_text(size=19, hjust=0.5))+
facet_grid(.~ disability)+ggtitle("Student Final Result by demography")+
labs(x = "results", y = "counts" )+ theme(axis.text.x = element_text(size=14),axis.text.y = element_text(face="bold", size=14))
print(p1)
}
})
output$table <- renderTable({
c= shiny_assessments[which(shiny_assessments$id == input$id ),]
#print(c)
})
output$plot<-renderPlot({
b = shiny_studentInfo[which(shiny_studentInfo$id == input$id ),]
aaaa<- b%>% group_by(id, final_result)%>% dplyr::summarise(k = n())
p <- ggplot(data=aaaa, aes(x=final_result, y=k, fill = final_result)) +
geom_bar(stat="identity")+
theme(legend.position="none")+
ggtitle("Distribution of Student Final Result") + theme(plot.title = element_text(size=22, hjust=0.5))+
labs(x = "results", y = "counts" ) + theme(axis.text.x = element_text(size=14),axis.text.y = element_text(face="bold", size=14))
print(p)
}, height = 400, width = 500)
output$plot3<-renderPlot({
shiny_assessments_courses$id <- paste(shiny_assessments_courses$code_module, shiny_assessments_courses$code_presentation)
cccc = shiny_assessments_courses[which(shiny_assessments_courses$id == input$id ),]
p3 <- ggplot(data=cccc, aes(x=date, y=id_assessment, color = assessment_type)) +
ggtitle("Submission Date of Assessments") + theme(plot.title = element_text(size=22, hjust=0.5))+
geom_text(aes(label = date, y = id_assessment, size = weight))+ theme(axis.text.x = element_text(size=14),axis.text.y = element_text(face="bold", size=14))
print(p3)
}, height = 300, width = 800)
}<file_sep>/EBModel/9-SegmentationTestingf.R
################################################################################
# Bank of Hope
# Commercial Real Estate Ending Balances
# Program: <>.R
# Author(s): KPMG, LLP
# Purpose:
# Data Dependences:
#
#
# R-version: R version 3.3.1 (2016-06-21)
# -- "Bug in Your Hair" Copyright (C) 2016 The R Foundation
# for Statistical Computing Platform: x86_64-apple-darwin13.4.0 (64-bit)
################################################################################
### Environment Settings #######################################################
pth_inputs = "C:/Users/ic07949/Desktop/KPMG/Model Development/development code and data/ending-balance_combined/combined/read-only-inputs"
pth_lib = "C:/Users/ic07949/Desktop/KPMG/Model Development/development code and data/ending-balance_combined/combined/library"
pth_out = "C:/Users/ic07949/Desktop/KPMG/Model Development/development code and data/ending-balance_combined/combined"
### No need to make changes below after this line ##############################
### Dependencies
source(paste(pth_lib,"/dev-support.R", sep=""))
source(paste(pth_lib,"/dfast-support.R", sep=""))
source(paste(pth_lib,"/colors.R", sep=""))
# source has the following functions:
# - stack()
# - get_bal_forecast()
# - concat()
# - bin_interval_variable()
# - calc_rsq(), calc_mape(), calc_mad(), calc_rmset()
# - cv_step(), cv_select()
library("openxlsx")
library("data.table")
library("lubridate")
library("ggplot2")
library("RGraphics")
library("gridExtra")
library("ggplot2")
library("scales")
library("tseries")
library("car")
library("urca")
library("lmtest")
library("nortest")
library("stats")
library("orcutt")
library("quantreg")
### Import Data ################################################################
boh = readRDS(concat(pth_out, "/data-boh.RDS"))
boh_train = function(seg) {
train_ind = concat("is_train_", seg)
boh_cp = copy(boh)
setnames(boh_cp, train_ind, "is_train")
boh_cp[is_train == TRUE,]
}
boh_train_ce = boh_train("ip")
################################################################################
### Any statistical outliers ? #################################################
boh_train_ce = boh_train_ce[, exclude_for_ChowTest := ifelse(is_outlier_ia(ldiff_mf) == 1, TRUE, FALSE)]
boh_train_ce[exclude_for_ChowTest == TRUE, c("qtr_dt", "ldiff_mf")]
ChowTest_data = boh_train_ce[exclude_for_ChowTest == FALSE,]
### Chow Test ##################################################################
library("gap")
ols_ip_model = c("crei_eg_lag4","dow_ya")
X = as.matrix(ChowTest_data[, ols_ip_model, with=FALSE])
y_mf = as.matrix(ChowTest_data[["ldiff_mf"]])
y_no = as.matrix(ChowTest_data[["ldiff_no"]])
chow.test(y_no, X, y_mf, X)
### chow test assumptions
mf_fit = lm(ldiff_mf~., data=ChowTest_data[, c("ldiff_mf", ols_ip_model), with=FALSE])
length(mf_fit$residuals) # removed three observations that would prevent normal residuals
no_fit = lm(ldiff_no~., data=ChowTest_data[, c("ldiff_no", ols_ip_model), with=FALSE])
ip_model_obj_list = list(mf=mf_fit, no=no_fit)
### Export coefficient data ####################################################
step = 1
for (seg in c("mf", "no")) {
coef_names = names(ip_model_obj_list[[seg]]$coefficients)
coef_data = data.table(summary(ip_model_obj_list[[seg]])$coefficients)
oc_coef_data = data.table(summary(cochrane.orcutt(ip_model_obj_list[[seg]]))$coefficients)
names(coef_data) = c("ols_est", "se", "t_value", "p_value")
names(oc_coef_data) = c("oc_est", "oc_se", "oc_t_value", "oc_p_value")
coef_data[["parameter"]] = coef_names
coef_data[["segment"]] = seg
if (step == 1) {
combined_parameter_data = coef_data
} else {
combined_parameter_data = rbind(combined_parameter_data, coef_data)
}
step = step + 1
}
write.csv(combined_parameter_data, file=concat(pth_out, "/table-SegmentTesting-boh-paramater_data.csv"))
### Chow Test Assumptions ######################################################
# MF Normality
shapiro.test(mf_fit$residuals)
ad.test(mf_fit$residuals)
# Homoskedasticity
ncvTest(mf_fit)
# NO Normality
shapiro.test(no_fit$residuals)
ad.test(no_fit$residuals)
# Homoskedasticity
ncvTest(no_fit)
# Kruskal Wallace (common dist)
mf_resid = data.table(resid=mf_fit$residuals, seg_id=1, segment="Multifamily")
no_resid = data.table(resid=no_fit$residuals, seg_id=2, segment="Non-Owner Occupied")
ip_resid = rbind(mf_resid, no_resid)
kruskal.test(resid ~ seg_id, data=ip_resid)
<file_sep>/EBCRE/end Bals.R
###################################################################
# Project: Bank of Hope
# Finding Ending Balances
###################################################################
setwd(dirname(rstudioapi::getActiveDocumentContext()$path))
library(DataAnalytics)
library(tseries)
library(urca)
library (fUnitRoots)
library(lubridate)
library(forecast)
library(tseries)
library(CADFtest)
library (leaps)
library(data.table)
library(openxlsx)
library(car)
library(lmtest)
library(orcutt)
library(plot.lm)
library(lmtest)
####################
##save image
# save.image("endbalmodels.RData")
##load image
#load("endbalmodels.RData")
##################################
#####################
#read in the raw data
#####################
#read in the raw data
endbal=read.csv("Ending Balances.csv",header = TRUE)
endbal=endbal[,c(4,2,3)]
names(endbal)= c("date", "ci_bal", "cre_bal")
# #Difference transformation
endbal$cre_qd=c(NA, diff(endbal$cre_bal))
endbal$cre_yd=endbal$cre_bal-back(endbal$cre_bal, noperiods = 4)
endbal$cre_qg=c(NA, diff(log(endbal$cre_bal)))
endbal$cre_yg=log(endbal$cre_bal)-back(log(endbal$cre_bal), noperiods = 4)
endbal$ci_qd=c(NA, diff(endbal$ci_bal))
endbal$ci_yd=endbal$ci_bal-back(endbal$ci_bal, noperiods = 4)
endbal$ci_qg=c(NA, diff(log(endbal$ci_bal)))
endbal$ci_yg=log(endbal$ci_bal)-back(log(endbal$ci_bal), noperiods = 4)
#making dataset ready for merge
endbal$year=year(mdy(endbal$date))
endbal$month=month(mdy(endbal$date))
endbal$q[endbal$month %in% c(1,2,3)]=1
endbal$q[endbal$month %in% c(4,5,6)]=2
endbal$q[endbal$month %in% c(7,8,9)]=3
endbal$q[endbal$month %in% c(10,11,12)]=4
endbal$month=NULL
#max_lag
max_lag= floor(12*(nrow(endbal)/100)^(1/4))
#######################
#read in the macro vars
#######################
######
#base
######
base=read.csv("macro_base.csv", header=T)
aaa=which(base$year==2003 & base$q==1)
bbb=which(base$year==2018 & base$q==4)
base=base[aaa:bbb,]
#########
#adverse
#########
adverse=read.csv("macro_adverse.csv", header=T)
aaa=which(adverse$year==2003 & adverse$q==1)
bbb=which(adverse$year==2018 & adverse$q==4)
adverse=adverse[aaa:bbb,]
########
#severe
########
severe=read.csv("macro_severe.csv", header=T)
aaa=which(severe$year==2003 & severe$q==1)
bbb=which(severe$year==2018 & severe$q==4)
severe=severe[aaa:bbb,]
##################
#development macro
##################
D1=which(base$year==2003 & base$q==1)
D2=which(base$year==2015 & base$q==4)
macro_dev=base[c(D1:D2), ]
macro_input=macro_dev[,-c(1,2,3)]
dep_var= as.data.frame(cbind(endbal))
########################################
# Create the dep_var matrix
########################################
var.names=colnames(macro_input)
var_info=as.data.frame(matrix(0, length(var.names), 6 ))
names(var_info) = c("var", "tier", "base", "lag", "diff", "sign")
var_info[,1]=var.names
var_info[,5]=0
#diff
var_info[grepl("_qd", var_info$var),5] = TRUE
var_info[grepl("_yd", var_info$var),5] = TRUE
var_info[grepl("_qg", var_info$var),5] = TRUE
var_info[grepl("_yg", var_info$var),5] = TRUE
#lag
var_info[grepl("_lag_1", var_info$var),4] = 1
var_info[grepl("_lag_2", var_info$var),4] = 2
var_info[grepl("_lag_3", var_info$var),4] = 3
var_info[grepl("_lag_4", var_info$var),4] = 4
#var.base
var_info[grepl("ngdp", var_info$var),3] = "ngdp_g"
var_info[grepl("rgdp", var_info$var),3] = "rgdp_g"
var_info[grepl("rdi", var_info$var),3] = "rdi_g"
var_info[grepl("ndi", var_info$var),3] = "ndi_g"
var_info[grepl("ur", var_info$var),3] = "ur_diff"
var_info[grepl("cpi", var_info$var),3] = "cpi"
var_info[grepl("i3m", var_info$var),3] = "i3m_diff"
var_info[grepl("i5y", var_info$var),3] = "i5yr_diff"
var_info[grepl("i10y", var_info$var),3] = "i10yr_diff"
var_info[grepl("bbb", var_info$var),3] = "bbb_diff"
var_info[grepl("imort", var_info$var),3] = "imort_diff"
var_info[grepl("iprim", var_info$var),3] = "iprim_diff"
var_info[grepl("cppi", var_info$var),3] = "cppi_diff"
var_info[grepl("dji", var_info$var),3] = "dji_diff"
var_info[grepl("vix", var_info$var),3] = "vix"
var_info[grepl("hpi", var_info$var),3] = "hpi_g"
var_info[grepl("spr10", var_info$var),3] = "spread"
var_info[grepl("spr10_q", var_info$var),3] = "spread_diff"
var_info[grepl("spr10_y", var_info$var),3] = "spread_diff"
var_info[grepl("rgpdi_eqp", var_info$var),3] = "rgpdi_eqp"
var_info[grepl("gpdi_eqp", var_info$var),3] = "gpdi_eqp"
var_info[grepl("pfi_nonres", var_info$var),3] = "pfi_nonres"
var_info[grepl("willreit", var_info$var),3] = "willreit"
#var_info[var_info$base==0,]
#sign
var_info[grepl("gdp", var_info$var),6] = 1
var_info[grepl("rdi", var_info$var),6] = 1
var_info[grepl("ndi", var_info$var),6] = 1
var_info[grepl("ur", var_info$var),6] = -1
var_info[grepl("cpi", var_info$var),6] = -1
var_info[grepl("i3m", var_info$var),6] = 0
var_info[grepl("i5y", var_info$var),6] = 0
var_info[grepl("i10y", var_info$var),6] = 0
var_info[grepl("imor", var_info$var),6] = 0
var_info[grepl("ipri", var_info$var),6] = 0
var_info[grepl("bbb", var_info$var),6] = 0
var_info[grepl("dji", var_info$var),6] = 1
var_info[grepl("hpi", var_info$var),6] = 1
var_info[grepl("vix", var_info$var),6] = -1
var_info[grepl("cppi", var_info$var),6] = 1
var_info[grepl("spr10", var_info$var),6] = -1
var_info[grepl("rgpdi_eqp", var_info$var),6] = 1
var_info[grepl("gpdi_eqp", var_info$var),6] = 1
var_info[grepl("pfi_nonres", var_info$var),6] = 1
var_info[grepl("willreit", var_info$var),6] = 1
# var_info[var_info$sign==0,]
# Tier
var_info[grepl("gdp", var_info$var),2] = 1
var_info[grepl("rdi", var_info$var),2] = 1
var_info[grepl("ndi", var_info$var),2] = 1
var_info[grepl("ur", var_info$var),2] = 1
var_info[grepl("cpi", var_info$var),2] = 3
var_info[grepl("i3m", var_info$var),2] = 2
var_info[grepl("i5y", var_info$var),2] = 2
var_info[grepl("i10y", var_info$var),2] = 2
var_info[grepl("imor", var_info$var),2] = 3
var_info[grepl("ipri", var_info$var),2] = 3
var_info[grepl("bbb", var_info$var),2] = 2
var_info[grepl("dji", var_info$var),2] = 2
var_info[grepl("hpi", var_info$var),2] = 1
var_info[grepl("vix", var_info$var),2] = 3
var_info[grepl("cppi", var_info$var),2] = 1
var_info[grepl("spr10", var_info$var),2] = 1
var_info[grepl("rgpdi_eqp", var_info$var),2] = 2
var_info[grepl("gpdi_eqp", var_info$var),2] = 2
var_info[grepl("pfi_nonres", var_info$var),2] = 2
var_info[grepl("willreit", var_info$var),2] = 2
# var_info[var_info$tier==0,]
var_info_cre=var_info
var_info_ci=var_info
#####################
#Variable Selection
#####################
first.obs=which(base$year==2003 & base$q==1)
ndata=which(base$year==2015 & base$q==4)
date_col=as.data.frame(base$Date[first.obs:ndata])
colnames(date_col)="Date"
aaa=which(endbal$year==2003 & endbal$q==1)
bbb=which(endbal$year==2015 & endbal$q==4)
loan_input=endbal[aaa:bbb,-c(1,12,13)]
b1=cbind(date_col, loan_input)
names(b1)=c("Date", names(loan_input))
b=data.table(b1)
c1=cbind(date_col, macro_input)
names(c1)=c("Date", names(macro_input))
c=data.table(c1)
a=data.table(var_info_cre)
df_total_dev= as.data.frame(cbind(date_col, loan_input, macro_input))
###############
# Models
###############
#################################
# CRE Model
#################################
# source("StepFun.R")
# fix_vars0=c("1")
# v110_model2_cre_qg_sep=StepFun(a,b,c, tier=1, #indicate which tier of variables to consider
# y='cre_qg~', #indicate response variable
# thresh=c(0.05, 0.01, 0.001), #significance level for SE based p-value and LR test based p-value for each tier
# criteria='SE.p', #variable selection criteria; other values='bic', 'LR.p', 'SE.p', 'rsq'
# vars0 = c("1"), #model 0 variables
# fix_vars0, #indicate which variables are fixed
# out.print=T #indicate wheter intermediate output will be printed
# )
# fix_vars0=c("1", "hpi_ag_lag_4", "ndi_ag_lag_4", "spr10_yd")
# v110_model2_cre_qg_sep=StepFun(a,b,c, tier=2, #indicate which tier of variables to consider
# y='cre_qg~', #indicate response variable
# thresh=c(0.05, 0.01, 0.001), #significance level for SE based p-value and LR test based p-value for each tier
# criteria='SE.p', #variable selection criteria; other values='bic', 'LR.p', 'SE.p', 'rsq'
# vars0 = c("1", "hpi_ag_lag_4", "ndi_ag_lag_4", "spr10_yd"), #model 0 variables
# fix_vars0, #indicate which variables are fixed
# out.print=T #indicate wheter intermediate output will be printed
# )
# fix_vars0=c("1", "hpi_ag_lag_4", "ndi_ag_lag_4", "spr10_yd", "i3m_yd_lag_3")
# v110_model2_cre_qg_sep=StepFun(a,b,c, tier=3, #indicate which tier of variables to consider
# y='cre_qg~', #indicate response variable
# thresh=c(0.05, 0.01, 0.001), #significance level for SE based p-value and LR test based p-value for each tier
# criteria='SE.p', #variable selection criteria; other values='bic', 'LR.p', 'SE.p', 'rsq'
# vars0 = c("1", "hpi_ag_lag_4", "ndi_ag_lag_4", "spr10_yd", "i3m_yd_lag_3"), #model 0 variables
# fix_vars0, #indicate which variables are fixed
# out.print=T #indicate wheter intermediate output will be printed
# )
out=lm(b$cre_qg~c$hpi_ag_lag_4+c$ndi_ag_lag_4+c$spr10_yd+c$i3m_yd_lag_3)
summary(out)
Box.test(out$residuals, type = "Ljung-Box", lag = 3)
vif(out)
durbinWatsonTest(out,3)
acf(out$residuals)
pacf(out$residuals)
out_res=rstandard(out)
bgtest(out,3)
#demean it
x1= c$hpi_ag_lag_4- mean(c$hpi_ag_lag_4)
x2= c$ndi_ag_lag_4- mean(c$ndi_ag_lag_4)
x3= c$spr10_yd- mean(c$spr10_yd)
x4= c$i3m_yd_lag_3- mean(c$i3m_yd_lag_3)
mu1=mean(c$hpi_ag_lag_4)
mu2=mean(c$ndi_ag_lag_4)
mu3=mean(c$spr10_yd)
mu4=mean(c$i3m_yd_lag_3)
sd1=stdev(c$hpi_ag_lag_4)
sd2=stdev(c$ndi_ag_lag_4)
sd3=stdev(c$spr10_yd)
sd4=stdev(c$i3m_yd_lag_3)
out=lm(b$cre_qg~x1+x2+x3+x4)
summary(out)
#multicolinearity
vif(out)
# Autocorrelation
par(mfrow=c(1,2))
acf(out$residuals, main="")
pacf(out$residuals, main="")
Box.test(out$residuals, type = "Ljung-Box", lag = max_lag)
bgtest(out, order = 3)
durbinWatsonTest(out, max.lag = 3)
durbinWatsonTest(out)
#normality test
#QQ-plot
par(mfrow=c(1,1))
qqnorm(out_res, ylab="Residuals", xlab="Quantiles of Standard Normal", main="CRE Ending Balance")
qqline(out_res)
# Residual histogram
hist(out_res, breaks="FD", xlab="Residuals", main="Histogram of residuals", ylim=c(0,25))
x<- -3:3
lines(x, 52*dnorm(x,0,sd(out_res)),col=2)
# Residual vs predicted
plot(b$cre_qg,out_res, ylab="Residuals", xlab="Q-o-Q Growth Rate", main="CRE Ending Balance")
abline(0, 0)
#################################
# Stationarity
#################################
summary(ur.df(na.remove(c$hpi_ag_lag_4)))
summary(ur.df(na.remove(c$ndi_ag_lag_4)))
summary(ur.df(na.remove(c$spr10_yd)))
summary(ur.df(na.remove(c$i3m_yd_lag_3)))
pp.test(na.remove(c$ndi_ag_lag_4))
summary(ur.df(na.remove(x1)))
summary(ur.df(na.remove(x2)))
summary(ur.df(na.remove(x3)))
summary(ur.df(na.remove(x4)))
#################################
#implement the model
#################################
ndata=nrow(b1)
npred=9
output=as.data.frame(matrix(0, ndata+npred,7))
D1=which(endbal$year==2003 & endbal$q==1)
D2=which(endbal$year==2015 & endbal$q==4)
input=endbal[D1:D2,]
output[1:ndata, 1]=input$cre_bal
output[1:ndata, 2]=out$fitted.values
dummy1=ndata-1
output[2:ndata, 3]= exp(log(output[1:dummy1, 1]) + output[2:ndata, 2])
# plot(output[2:ndata, 3])
# lines(output[2:ndata, 1], col='red')
#PREDICT
aaaa=which(base$year==2016 & base$q==1)
bbbb=which(base$year==2018 & base$q==1)
indx=cbind(base$hpi_ag_lag_4-mu1,base$ndi_ag_lag_4-mu2,base$spr10_yd-mu3, base$i3m_yd_lag_3-mu4)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2", "x3", "x4")
fitted.base=as.data.frame(predict(out, xreg_base))
indx=cbind(adverse$hpi_ag_lag_4-mu1,adverse$ndi_ag_lag_4-mu2,adverse$spr10_yd-mu3, adverse$i3m_yd_lag_3-mu4)
xreg_adverse=as.data.frame(indx[aaaa:bbbb,])
names(xreg_adverse)=c("x1", "x2", "x3", "x4")
fitted.adverse=predict(out, xreg_adverse)
indx=cbind(severe$hpi_ag_lag_4-mu1,severe$ndi_ag_lag_4-mu2,severe$spr10_yd-mu3, severe$i3m_yd_lag_3-mu4)
xreg_severe=as.data.frame(indx[aaaa:bbbb,])
names(xreg_severe)=c("x1", "x2", "x3", "x4")
fitted.severe=predict(out, xreg_severe)
#output[1:ndata, 1]=ppnr$Int_bearing[D1:D2]
output[53:61, 2]=fitted.base
output[53, 3]= exp(log(output[ndata, 1]) + output[53, 2])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 3]= exp(log(output[ac, 3]) + output[ab, 2])
}
output[1:ndata,c(4,5)]=output[1:ndata, c(2,3)]
output[1:ndata,c(6,7)]=output[1:ndata, c(2,3)]
output[53:61, 4]=fitted.adverse
output[53,5]= exp(log(output[ndata, 1]) + output[53, 4])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 5]= exp(log(output[ac, 5]) + output[ab, 4])
}
output[53:61, 6]=fitted.severe
output[53,7]= exp(log(output[ndata, 1]) + output[53, 6])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 7]= exp(log(output[ac, 7]) + output[ab, 6])
}
output[1, c(3,5,7)]=output[1,1]
output[which(output[,1]==0),1]=NA
#plot together
date1 = seq(ISOdate(2003,1,1), by = "quarter", length.out = 61)
plot(date1, output[,3], type='l', lty='dotted', ylab='CRE EB')
lines(date1, output[,5], col='blue')
lines(date1,output[,7], col='red')
lines(date1, output[,1], col='green')
legend("topleft", legend= c("base", "adverse", "severe", "Historical"), fill=c("black", "blue", "red", "green"))
colnames(output)=c("Historical", "estimated_base", "estimated_base_bal", "estimated_adverse", "adverse_bal", "estimated_severe", "severe_bal")
#output the Cre results
write.csv(as.data.frame(cbind(date1,output)), "cre EB Projections.csv", col.names = T, row.names = F)
#######################
# Sensitivity Analysis
#######################
##HPI
ndata=nrow(b1)
npred=9
output=as.data.frame(matrix(0, ndata+npred,7))
D1=which(endbal$year==2003 & endbal$q==1)
D2=which(endbal$year==2015 & endbal$q==4)
input=endbal[D1:D2,]
output[1:ndata, 1]=input$cre_bal
output[1:ndata, 2]=out$fitted.values
dummy1=ndata-1
output[2:ndata, 3]= exp(log(output[1:dummy1, 1]) + output[2:ndata, 2])
# plot(output[2:ndata, 3])
# lines(output[2:ndata, 1], col='red')
#PREDICT
aaaa=which(base$year==2016 & base$q==1)
bbbb=which(base$year==2018 & base$q==1)
indx=cbind(base$hpi_ag_lag_4-mu1,base$ndi_ag_lag_4-mu2,base$spr10_yd-mu3, base$i3m_yd_lag_3-mu4)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2", "x3", "x4")
fitted.base=as.data.frame(predict(out, xreg_base))
indx_1sd=cbind(base$hpi_ag_lag_4-mu1+sd1,base$ndi_ag_lag_4-mu2,base$spr10_yd-mu3, base$i3m_yd_lag_3-mu4)
xreg_1sd=as.data.frame(indx_1sd[aaaa:bbbb,])
names(xreg_1sd)=c("x1", "x2", "x3", "x4")
fitted.1sd=as.data.frame(predict(out, xreg_1sd))
indx_2sd=cbind(base$hpi_ag_lag_4-mu1+2*sd1,base$ndi_ag_lag_4-mu2,base$spr10_yd-mu3, base$i3m_yd_lag_3-mu4)
xreg_2sd=as.data.frame(indx_2sd[aaaa:bbbb,])
names(xreg_2sd)=c("x1", "x2", "x3", "x4")
fitted.2sd=as.data.frame(predict(out, xreg_2sd))
#output[1:ndata, 1]=ppnr$Int_bearing[D1:D2]
output[53:61, 2]=fitted.base
output[53, 3]= exp(log(output[ndata, 1]) + output[53, 2])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 3]= exp(log(output[ac, 3]) + output[ab, 2])
}
output[1:ndata,c(4,5)]=output[1:ndata, c(2,3)]
output[1:ndata,c(6,7)]=output[1:ndata, c(2,3)]
output[53:61, 4]=fitted.1sd
output[53,5]= exp(log(output[ndata, 1]) + output[53, 4])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 5]= exp(log(output[ac, 5]) + output[ab, 4])
}
output[53:61, 6]=fitted.2sd
output[53,7]= exp(log(output[ndata, 1]) + output[53, 6])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 7]= exp(log(output[ac, 7]) + output[ab, 6])
}
output[1, c(3,5,7)]=output[1,1]
output[which(output[,1]==0),1]=NA
#plot together
date1 = seq(ISOdate(2003,1,1), by = "quarter", length.out = 61)
plot(date1, output[,3], type='l', lty='dotted', ylab='CRE EB')
lines(date1, output[,5], col='blue')
lines(date1,output[,7], col='red')
lines(date1, output[,1], col='green')
legend("topleft", legend= c("base", "adverse", "severe", "Historical"), fill=c("black", "blue", "red", "green"))
colnames(output)=c("Historical", "estimated_base", "estimated_base_bal", "estimated_adverse", "adverse_bal", "estimated_severe", "severe_bal")
#output the Cre results
#clean
output2=output[c(53:61), c(3,5,7)]
write.csv(as.data.frame(cbind(date1[53:61],output2)), "cre EB sensitivity hpi.csv", col.names = T, row.names = F)
##NDI
ndata=nrow(b1)
npred=9
output=as.data.frame(matrix(0, ndata+npred,7))
D1=which(endbal$year==2003 & endbal$q==1)
D2=which(endbal$year==2015 & endbal$q==4)
input=endbal[D1:D2,]
output[1:ndata, 1]=input$cre_bal
output[1:ndata, 2]=out$fitted.values
dummy1=ndata-1
output[2:ndata, 3]= exp(log(output[1:dummy1, 1]) + output[2:ndata, 2])
# plot(output[2:ndata, 3])
# lines(output[2:ndata, 1], col='red')
#PREDICT
aaaa=which(base$year==2016 & base$q==1)
bbbb=which(base$year==2018 & base$q==1)
indx=cbind(base$hpi_ag_lag_4-mu1,base$ndi_ag_lag_4-mu2,base$spr10_yd-mu3, base$i3m_yd_lag_3-mu4)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2", "x3", "x4")
fitted.base=as.data.frame(predict(out, xreg_base))
indx_1sd=cbind(base$hpi_ag_lag_4-mu1,base$ndi_ag_lag_4-mu2+sd2,base$spr10_yd-mu3, base$i3m_yd_lag_3-mu4)
xreg_1sd=as.data.frame(indx_1sd[aaaa:bbbb,])
names(xreg_1sd)=c("x1", "x2", "x3", "x4")
fitted.1sd=as.data.frame(predict(out, xreg_1sd))
indx_2sd=cbind(base$hpi_ag_lag_4-mu1,base$ndi_ag_lag_4+2*sd2-mu2,base$spr10_yd-mu3, base$i3m_yd_lag_3-mu4)
xreg_2sd=as.data.frame(indx_2sd[aaaa:bbbb,])
names(xreg_2sd)=c("x1", "x2", "x3", "x4")
fitted.2sd=as.data.frame(predict(out, xreg_2sd))
output[53:61, 2]=fitted.base
output[53, 3]= exp(log(output[ndata, 1]) + output[53, 2])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 3]= exp(log(output[ac, 3]) + output[ab, 2])
}
output[1:ndata,c(4,5)]=output[1:ndata, c(2,3)]
output[1:ndata,c(6,7)]=output[1:ndata, c(2,3)]
output[53:61, 4]=fitted.1sd
output[53,5]= exp(log(output[ndata, 1]) + output[53, 4])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 5]= exp(log(output[ac, 5]) + output[ab, 4])
}
output[53:61, 6]=fitted.2sd
output[53,7]= exp(log(output[ndata, 1]) + output[53, 6])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 7]= exp(log(output[ac, 7]) + output[ab, 6])
}
output[1, c(3,5,7)]=output[1,1]
output[which(output[,1]==0),1]=NA
#plot together
date1 = seq(ISOdate(2003,1,1), by = "quarter", length.out = 61)
plot(date1, output[,3], type='l', lty='dotted', ylab='CRE EB')
lines(date1, output[,5], col='blue')
lines(date1,output[,7], col='red')
lines(date1, output[,1], col='green')
legend("topleft", legend= c("base", "adverse", "severe", "Historical"), fill=c("black", "blue", "red", "green"))
colnames(output)=c("Historical", "estimated_base", "estimated_base_bal", "estimated_adverse", "adverse_bal", "estimated_severe", "severe_bal")
#output the Cre results
#clean
output2=output[c(53:61), c(3,5,7)]
write.csv(as.data.frame(cbind(date1[53:61],output2)), "cre EB sensitivity ndi.csv", col.names = T, row.names = F)
# SPR10
ndata=nrow(b1)
npred=9
output=as.data.frame(matrix(0, ndata+npred,7))
D1=which(endbal$year==2003 & endbal$q==1)
D2=which(endbal$year==2015 & endbal$q==4)
input=endbal[D1:D2,]
output[1:ndata, 1]=input$cre_bal
output[1:ndata, 2]=out$fitted.values
dummy1=ndata-1
output[2:ndata, 3]= exp(log(output[1:dummy1, 1]) + output[2:ndata, 2])
# plot(output[2:ndata, 3])
# lines(output[2:ndata, 1], col='red')
#PREDICT
aaaa=which(base$year==2016 & base$q==1)
bbbb=which(base$year==2018 & base$q==1)
indx=cbind(base$hpi_ag_lag_4-mu1,base$ndi_ag_lag_4-mu2,base$spr10_yd-mu3, base$i3m_yd_lag_3-mu4)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2", "x3", "x4")
fitted.base=as.data.frame(predict(out, xreg_base))
indx_1sd=cbind(base$hpi_ag_lag_4-mu1,base$ndi_ag_lag_4-mu2,base$spr10_yd-mu3+sd3, base$i3m_yd_lag_3-mu4)
xreg_1sd=as.data.frame(indx_1sd[aaaa:bbbb,])
names(xreg_1sd)=c("x1", "x2", "x3", "x4")
fitted.1sd=as.data.frame(predict(out, xreg_1sd))
indx_2sd=cbind(base$hpi_ag_lag_4-mu1,base$ndi_ag_lag_4-mu2,base$spr10_yd-mu3+2*sd3, base$i3m_yd_lag_3-mu4)
xreg_2sd=as.data.frame(indx_2sd[aaaa:bbbb,])
names(xreg_2sd)=c("x1", "x2", "x3", "x4")
fitted.2sd=as.data.frame(predict(out, xreg_2sd))
#output[1:ndata, 1]=ppnr$Int_bearing[D1:D2]
output[53:61, 2]=fitted.base
output[53, 3]= exp(log(output[ndata, 1]) + output[53, 2])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 3]= exp(log(output[ac, 3]) + output[ab, 2])
}
output[1:ndata,c(4,5)]=output[1:ndata, c(2,3)]
output[1:ndata,c(6,7)]=output[1:ndata, c(2,3)]
output[53:61, 4]=fitted.1sd
output[53,5]= exp(log(output[ndata, 1]) + output[53, 4])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 5]= exp(log(output[ac, 5]) + output[ab, 4])
}
output[53:61, 6]=fitted.2sd
output[53,7]= exp(log(output[ndata, 1]) + output[53, 6])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 7]= exp(log(output[ac, 7]) + output[ab, 6])
}
output[1, c(3,5,7)]=output[1,1]
output[which(output[,1]==0),1]=NA
#plot together
date1 = seq(ISOdate(2003,1,1), by = "quarter", length.out = 61)
plot(date1, output[,3], type='l', lty='dotted', ylab='CRE EB')
lines(date1, output[,5], col='blue')
lines(date1,output[,7], col='red')
lines(date1, output[,1], col='green')
legend("topleft", legend= c("base", "adverse", "severe", "Historical"), fill=c("black", "blue", "red", "green"))
colnames(output)=c("Historical", "estimated_base", "estimated_base_bal", "estimated_adverse", "adverse_bal", "estimated_severe", "severe_bal")
#output the Cre results
#clean
output2=output[c(53:61), c(3,5,7)]
write.csv(as.data.frame(cbind(date1[53:61],output2)), "cre EB sensitivity spr10.csv", col.names = T, row.names = F)
# i3m
ndata=nrow(b1)
npred=9
output=as.data.frame(matrix(0, ndata+npred,7))
D1=which(endbal$year==2003 & endbal$q==1)
D2=which(endbal$year==2015 & endbal$q==4)
input=endbal[D1:D2,]
output[1:ndata, 1]=input$cre_bal
output[1:ndata, 2]=out$fitted.values
dummy1=ndata-1
output[2:ndata, 3]= exp(log(output[1:dummy1, 1]) + output[2:ndata, 2])
# plot(output[2:ndata, 3])
# lines(output[2:ndata, 1], col='red')
#PREDICT
aaaa=which(base$year==2016 & base$q==1)
bbbb=which(base$year==2018 & base$q==1)
indx=cbind(base$hpi_ag_lag_4-mu1,base$ndi_ag_lag_4-mu2,base$spr10_yd-mu3, base$i3m_yd_lag_3-mu4)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2", "x3", "x4")
fitted.base=as.data.frame(predict(out, xreg_base))
indx_1sd=cbind(base$hpi_ag_lag_4-mu1,base$ndi_ag_lag_4-mu2,base$spr10_yd-mu3, base$i3m_yd_lag_3-mu4+sd4)
xreg_1sd=as.data.frame(indx_1sd[aaaa:bbbb,])
names(xreg_1sd)=c("x1", "x2", "x3", "x4")
fitted.1sd=as.data.frame(predict(out, xreg_1sd))
indx_2sd=cbind(base$hpi_ag_lag_4-mu1,base$ndi_ag_lag_4-mu2,base$spr10_yd-mu3, base$i3m_yd_lag_3-mu4+2*sd4)
xreg_2sd=as.data.frame(indx_2sd[aaaa:bbbb,])
names(xreg_2sd)=c("x1", "x2", "x3", "x4")
fitted.2sd=as.data.frame(predict(out, xreg_2sd))
#output[1:ndata, 1]=ppnr$Int_bearing[D1:D2]
output[53:61, 2]=fitted.base
output[53, 3]= exp(log(output[ndata, 1]) + output[53, 2])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 3]= exp(log(output[ac, 3]) + output[ab, 2])
}
output[1:ndata,c(4,5)]=output[1:ndata, c(2,3)]
output[1:ndata,c(6,7)]=output[1:ndata, c(2,3)]
output[53:61, 4]=fitted.1sd
output[53,5]= exp(log(output[ndata, 1]) + output[53, 4])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 5]= exp(log(output[ac, 5]) + output[ab, 4])
}
output[53:61, 6]=fitted.2sd
output[53,7]= exp(log(output[ndata, 1]) + output[53, 6])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 7]= exp(log(output[ac, 7]) + output[ab, 6])
}
output[1, c(3,5,7)]=output[1,1]
output[which(output[,1]==0),1]=NA
#plot together
date1 = seq(ISOdate(2003,1,1), by = "quarter", length.out = 61)
plot(date1, output[,3], type='l', lty='dotted', ylab='CRE EB')
lines(date1, output[,5], col='blue')
lines(date1,output[,7], col='red')
lines(date1, output[,1], col='green')
legend("topleft", legend= c("base", "adverse", "severe", "Historical"), fill=c("black", "blue", "red", "green"))
colnames(output)=c("Historical", "estimated_base", "estimated_base_bal", "estimated_adverse", "adverse_bal", "estimated_severe", "severe_bal")
#output the Cre results
#clean
output2=output[c(53:61), c(3,5,7)]
write.csv(as.data.frame(cbind(date1[53:61],output2)), "cre EB sensitivity i3m.csv", col.names = T, row.names = F)
#####################################
# OOS testing
####################################
oos<-function(n){
#n defines how many quarters before 2015Q4
ind=nrow(df_total_dev)-n
df_oos=df_total_dev[1:ind,]
ind0=nrow(df_oos)
ind1=nrow(df_oos)+1
ind2=nrow(df_oos)+npred
ind4=nrow(df_total_dev)
ind5=nrow(df_total_dev)-n+1
#demean it
x1= df_oos$hpi_ag_lag_4- mean(df_oos$hpi_ag_lag_4)
x2= df_oos$ndi_ag_lag_4- mean(df_oos$ndi_ag_lag_4)
x3= df_oos$spr10_yd- mean(df_oos$spr10_yd)
x4= df_oos$i3m_yd_lag_3- mean(df_oos$i3m_yd_lag_3)
mu1=mean(df_oos$hpi_ag_lag_4)
mu2=mean(df_oos$ndi_ag_lag_4)
mu3=mean(df_oos$spr10_yd)
mu4=mean(df_oos$i3m_yd_lag_3)
out_oos=lm(df_oos$cre_qg~x1+x2+x3+x4)
summary(out_oos)
####################################
#implement the model - out of sample
####################################
ndata=nrow(df_oos)
npred=1
ind3=ndata+npred
output_oos=as.data.frame(matrix(0, ndata+npred,7))
input=endbal[1:ind5,]
output_oos[, 1]=input$cre_bal
output_oos[1:ndata, 2]=out_oos$fitted.values
dummy1=ndata-1
output_oos[2:ndata, 3]= exp(log(output_oos[1:dummy1, 1]) + output_oos[2:ndata, 2])
#PREDICT
indx=cbind(df_total_dev$hpi_ag_lag_4-mu1,df_total_dev$ndi_ag_lag_4-mu2,df_total_dev$spr10_yd-mu3, df_total_dev$i3m_yd_lag_3-mu4)
xreg_base=as.data.frame(t(indx[ind5,]))
names(xreg_base)=c("x1", "x2", "x3", "x4")
fitted.base=as.data.frame(predict(out_oos, xreg_base))
# get the values
output_oos[ind5, 2]=fitted.base
output_oos[ind5, 3]= exp(log(output_oos[ndata, 1]) + output_oos[ind1, 2])
pct_error= 100*(output_oos[ind5,3]-output_oos[ind5,1])/output_oos[ind5,3]
result_oos=as.data.frame(cbind(n, output_oos[ind1,1], output_oos[ind1,3],pct_error))
return(result_oos)
}
oos(1)
oos(2)
oos(3)
oos(4)
#####################################
# Prediction CI
####################################
ndata=nrow(b1)
npred=9
output_ci=as.data.frame(matrix(0, ndata+npred,10))
D1=which(endbal$year==2003 & endbal$q==1)
D2=which(endbal$year==2015 & endbal$q==4)
input=endbal[D1:D2,]
output_ci[1:ndata, 1]=input$cre_qg
output_ci[1:ndata, 2]=out$fitted.values
#PREDICT
aaaa=which(base$year==2016 & base$q==1)
bbbb=which(base$year==2018 & base$q==1)
indx=cbind(base$hpi_ag_lag_4-mu1,base$ndi_ag_lag_4-mu2,base$spr10_yd-mu3, base$i3m_yd_lag_3-mu4)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2", "x3", "x4")
fitted.base=as.data.frame(predict(out, xreg_base,interval = "predict", level = 0.95))
indx=cbind(adverse$hpi_ag_lag_4-mu1,adverse$ndi_ag_lag_4-mu2,adverse$spr10_yd-mu3, adverse$i3m_yd_lag_3-mu4)
xreg_adverse=as.data.frame(indx[aaaa:bbbb,])
names(xreg_adverse)=c("x1", "x2", "x3", "x4")
fitted.adverse=predict(out, xreg_adverse,interval = "predict",level = 0.95)
indx=cbind(severe$hpi_ag_lag_4-mu1,severe$ndi_ag_lag_4-mu2,severe$spr10_yd-mu3, severe$i3m_yd_lag_3-mu4)
xreg_severe=as.data.frame(indx[aaaa:bbbb,])
names(xreg_severe)=c("x1", "x2", "x3", "x4")
fitted.severe=predict(out, xreg_severe ,interval = "predict",level = 0.95)
output_ci[53:61, 2]=fitted.base[,1] #fit base
output_ci[53:61, 3]=fitted.base[,2] #lwr base
output_ci[53:61, 4]=fitted.base[,3] #upr base
output_ci[53:61, 5]=fitted.adverse[,1] #fit adverse
output_ci[53:61, 6]=fitted.adverse[,2] #lwr adverse
output_ci[53:61, 7]=fitted.adverse[,3] #upr adverse
output_ci[53:61,8]=fitted.severe[,1]#fit severe
output_ci[53:61,9]=fitted.severe[,2] #lwr severe
output_ci[53:61,10]=fitted.severe[,3] # upr Severe
colnames(output_ci)=c("Historical", "estimated_base_fit", "estimated_base_lwr",
"estimated_base_upr", "estimated_adverse_fit",
"estimated_adverse_lwr", "estimated_adverse_upr",
"estimated_severe_fit", "estimated_severe_lwr",
"estimated_severe_upr")
write.csv(as.data.frame(cbind(date1,output_ci)), "cre_prediction_ci.csv", col.names = T, row.names = F)
#####################################################################
# CI Model
#####################################################################
# source("StepFun.R")
# fix_vars0=c("1", "rgdp_ag_lag_1")
# v110_model2_ci_qd_sep=StepFun(a,b,c, tier=1, #indicate which tier of variables to consider
# y='ci_qd~', #indicate response variable
# thresh=c(0.05, 0.01, 0.001), #significance level for SE based p-value and LR test based p-value for each tier
# criteria='SE.p', #variable selection criteria; other values='bic', 'LR.p', 'SE.p', 'rsq'
# vars0 = c("1", "rgdp_ag_lag_1"), #model 0 variables
# fix_vars0, #indicate which variables are fixed
# out.print=T #indicate wheter intermediate output will be printed
# )
#
# fix_vars0=c("1", "rgdp_ag_lag_1", "rdi_ag_lag_4")
# v110_model2_ci_qd_sep=StepFun(a,b,c, tier=2, #indicate which tier of variables to consider
# y='ci_qd~', #indicate response variable
# thresh=c(0.05, 0.01, 0.001), #significance level for SE based p-value and LR test based p-value for each tier
# criteria='SE.p', #variable selection criteria; other values='bic', 'LR.p', 'SE.p', 'rsq'
# vars0 = c("1", "rgdp_ag_lag_1", "rdi_ag_lag_4"), #model 0 variables
# fix_vars0, #indicate which variables are fixed
# out.print=T #indicate wheter intermediate output will be printed
# )
#
# fix_vars0=c("1", "rgdp_ag_lag_1", "rdi_ag_lag_4")
# v110_model2_ci_qd_sep=StepFun(a,b,c, tier=3, #indicate which tier of variables to consider
# y='ci_qd~', #indicate response variable
# thresh=c(0.05, 0.01, 0.001), #significance level for SE based p-value and LR test based p-value for each tier
# criteria='SE.p', #variable selection criteria; other values='bic', 'LR.p', 'SE.p', 'rsq'
# vars0 = c("1", "rgdp_ag_lag_1", "rdi_ag_lag_4"), #model 0 variables
# fix_vars0, #indicate which variables are fixed
# out.print=T #indicate wheter intermediate output will be printed
# )
x1=c$rgdp_ag_lag_1-mean(c$rgdp_ag_lag_1)
x2=c$rdi_ag_lag_4-mean(c$rdi_ag_lag_4)
mu1=mean(c$rgdp_ag_lag_1)
mu2= mean(c$rdi_ag_lag_4)
sd1=stdev(c$rgdp_ag_lag_1)
sd2= stdev(c$rdi_ag_lag_4)
out=lm(b$ci_qd~x1+x2)
summary(out)
Box.test(out$residuals, type = "Ljung-Box", lag = 1)
#Multicolinearity
vif(out)
par(mfrow=c(1,2))
acf(out$residuals, main="")
pacf(out$residuals, main="")
Box.test(out$residuals, type = "Ljung-Box", lag = max_lag)
bgtest(out, order = 3)
durbinWatsonTest(out, 3)
# Stationarity
summary(ur.df(na.remove(x1)))
summary(ur.df(na.remove(x2)))
#normality test
out_res=rstandard(out)
#QQ-plot
qqnorm(out_res, ylab="Residuals", xlab="Quantiles of Standard Normal", main="CI Ending Balance")
qqline(out_res)
max(out_res)
min(out_res)
# Residual histogram
hist(out_res, breaks="FD", xlab="Residuals", main="Histogram of residuals", ylim=c(0,25))
x<- -3:4
lines(x, 52*dnorm(x,0,sd(out_res)),col=2)
# homoskedasticity: Residual vs predicted
plot(out$fitted.values,out_res, ylab="Residuals", xlab="Predicted Values", main="CI Ending Balance")
abline(0, 0)
#################################
#implement the model
#################################
ndata=nrow(b1)
npred=9
output=as.data.frame(matrix(0, ndata+npred,7))
D1=which(endbal$year==2003 & endbal$q==1)
D2=which(endbal$year==2015 & endbal$q==4)
input=endbal[D1:D2,]
output[1:ndata, 1]=input$ci_bal
output[1:ndata, 2]=out$fitted.values
dummy1=ndata-1
output[2:ndata, 3]= output[1:dummy1, 1] + output[2:ndata, 2]
# plot(output[2:ndata, 3])
# lines(output[2:ndata, 1], col='red')
#PREDICT
aaaa=which(base$year==2016 & base$q==1)
bbbb=which(base$year==2018 & base$q==1)
indx=cbind(base$rgdp_ag_lag_1-mu1,base$rdi_ag_lag_4-mu2)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2")
fitted.base=as.data.frame(predict(out, xreg_base))
indx=cbind(adverse$rgdp_ag_lag_1-mu1,adverse$rdi_ag_lag_4-mu2)
xreg_adverse=as.data.frame(indx[aaaa:bbbb,])
names(xreg_adverse)=c("x1", "x2")
fitted.adverse=predict(out, xreg_adverse)
indx=cbind(severe$rgdp_ag_lag_1-mu1,severe$rdi_ag_lag_4-mu2)
xreg_severe=as.data.frame(indx[aaaa:bbbb,])
names(xreg_severe)=c("x1", "x2")
fitted.severe=predict(out, xreg_severe)
#output[1:ndata, 1]=ppnr$Int_bearing[D1:D2]
output[53:61, 2]=fitted.base
output[53, 3]= output[ndata, 1] + output[53, 2]
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 3]= output[ac, 3]+ output[ab, 2]
}
output[1:ndata,c(4,5)]=output[1:ndata, c(2,3)]
output[1:ndata,c(6,7)]=output[1:ndata, c(2,3)]
output[53:61, 4]=fitted.adverse
output[53,5]= output[ndata, 1] + output[53, 4]
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 5]= output[ac, 5] + output[ab, 4]
}
output[53:61, 6]=fitted.severe
output[53,7]= output[ndata, 1] + output[53, 6]
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 7]= output[ac, 7] + output[ab, 6]
}
output[1, c(3,5,7)]=output[1,1]
output[which(output[,1]==0),1]=NA
#plot together
par(mfrow=c(1,1))
date1 = seq(ISOdate(2003,1,1), by = "quarter", length.out = 61)
plot(date1, output[,3], type='l', lty='dotted', ylab='CI EB')
lines(date1, output[,5], col='blue')
lines(date1,output[,7], col='red')
lines(date1, output[,1], col='green')
legend("topleft", legend= c("base", "adverse", "severe", "Historical"), fill=c("black", "blue", "red", "green"))
colnames(output)=c("Historical", "estimated_base", "estimated_base_bal", "estimated_adverse", "adverse_bal", "estimated_severe", "severe_bal")
#output the ci results
write.csv(as.data.frame(cbind(date1,output)), "CI EB Projections.csv", col.names = T, row.names = F)
#################################
#Sensitivity Analysis
#################################
#rgdp
ndata=nrow(b1)
npred=9
output=as.data.frame(matrix(0, ndata+npred,7))
D1=which(endbal$year==2003 & endbal$q==1)
D2=which(endbal$year==2015 & endbal$q==4)
input=endbal[D1:D2,]
output[1:ndata, 1]=input$ci_bal
output[1:ndata, 2]=out$fitted.values
dummy1=ndata-1
output[2:ndata, 3]= output[1:dummy1, 1] + output[2:ndata, 2]
# plot(output[2:ndata, 3])
# lines(output[2:ndata, 1], col='red')
#PREDICT
aaaa=which(base$year==2016 & base$q==1)
bbbb=which(base$year==2018 & base$q==1)
indx=cbind(base$rgdp_ag_lag_1-mu1,base$rdi_ag_lag_4-mu2)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2")
fitted.base=as.data.frame(predict(out, xreg_base))
indx=cbind(base$rgdp_ag_lag_1-mu1+sd1,base$rdi_ag_lag_4-mu2)
xreg_1sd=as.data.frame(indx[aaaa:bbbb,])
names(xreg_1sd)=c("x1", "x2")
fitted.1sd=as.data.frame(predict(out, xreg_1sd))
indx=cbind(base$rgdp_ag_lag_1-mu1+2*sd1,base$rdi_ag_lag_4-mu2)
xreg_2sd=as.data.frame(indx[aaaa:bbbb,])
names(xreg_2sd)=c("x1", "x2")
fitted.2sd=as.data.frame(predict(out, xreg_2sd))
#output[1:ndata, 1]=ppnr$Int_bearing[D1:D2]
output[53:61, 2]=fitted.base
output[53, 3]= output[ndata, 1] + output[53, 2]
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 3]= output[ac, 3]+ output[ab, 2]
}
output[1:ndata,c(4,5)]=output[1:ndata, c(2,3)]
output[1:ndata,c(6,7)]=output[1:ndata, c(2,3)]
output[53:61, 4]=fitted.1sd
output[53,5]= output[ndata, 1] + output[53, 4]
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 5]= output[ac, 5] + output[ab, 4]
}
output[53:61, 6]=fitted.2sd
output[53,7]= output[ndata, 1] + output[53, 6]
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 7]= output[ac, 7] + output[ab, 6]
}
output[1, c(3,5,7)]=output[1,1]
output[which(output[,1]==0),1]=NA
#plot together
date1 = seq(ISOdate(2003,1,1), by = "quarter", length.out = 61)
plot(date1, output[,3], type='l', lty='dotted', ylab='CI EB')
lines(date1, output[,5], col='blue')
lines(date1,output[,7], col='red')
lines(date1, output[,1], col='green')
legend("topleft", legend= c("base", "adverse", "severe", "Historical"), fill=c("black", "blue", "red", "green"))
colnames(output)=c("Historical", "estimated_base", "estimated_base_bal", "estimated_adverse", "adverse_bal", "estimated_severe", "severe_bal")
#output the ci results
output2=output[c(53:61), c(3,5,7)]
write.csv(as.data.frame(cbind(date1[53:61],output2)), "CI EB sensitivity rgdp.csv", col.names = T, row.names = F)
#rdi
ndata=nrow(b1)
npred=9
output=as.data.frame(matrix(0, ndata+npred,7))
D1=which(endbal$year==2003 & endbal$q==1)
D2=which(endbal$year==2015 & endbal$q==4)
input=endbal[D1:D2,]
output[1:ndata, 1]=input$ci_bal
output[1:ndata, 2]=out$fitted.values
dummy1=ndata-1
output[2:ndata, 3]= output[1:dummy1, 1] + output[2:ndata, 2]
# plot(output[2:ndata, 3])
# lines(output[2:ndata, 1], col='red')
#PREDICT
aaaa=which(base$year==2016 & base$q==1)
bbbb=which(base$year==2018 & base$q==1)
indx=cbind(base$rgdp_ag_lag_1-mu1,base$rdi_ag_lag_4-mu2)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2")
fitted.base=as.data.frame(predict(out, xreg_base))
indx=cbind(base$rgdp_ag_lag_1-mu1,base$rdi_ag_lag_4-mu2+sd2)
xreg_1sd=as.data.frame(indx[aaaa:bbbb,])
names(xreg_1sd)=c("x1", "x2")
fitted.1sd=as.data.frame(predict(out, xreg_1sd))
indx=cbind(base$rgdp_ag_lag_1-mu1,base$rdi_ag_lag_4-mu2+2*sd2)
xreg_2sd=as.data.frame(indx[aaaa:bbbb,])
names(xreg_2sd)=c("x1", "x2")
fitted.2sd=as.data.frame(predict(out, xreg_2sd))
#output[1:ndata, 1]=ppnr$Int_bearing[D1:D2]
output[53:61, 2]=fitted.base
output[53, 3]= output[ndata, 1] + output[53, 2]
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 3]= output[ac, 3]+ output[ab, 2]
}
output[1:ndata,c(4,5)]=output[1:ndata, c(2,3)]
output[1:ndata,c(6,7)]=output[1:ndata, c(2,3)]
output[53:61, 4]=fitted.1sd
output[53,5]= output[ndata, 1] + output[53, 4]
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 5]= output[ac, 5] + output[ab, 4]
}
output[53:61, 6]=fitted.2sd
output[53,7]= output[ndata, 1] + output[53, 6]
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 7]= output[ac, 7] + output[ab, 6]
}
output[1, c(3,5,7)]=output[1,1]
output[which(output[,1]==0),1]=NA
#plot together
date1 = seq(ISOdate(2003,1,1), by = "quarter", length.out = 61)
plot(date1, output[,3], type='l', lty='dotted', ylab='CI EB')
lines(date1, output[,5], col='blue')
lines(date1,output[,7], col='red')
lines(date1, output[,1], col='green')
legend("topleft", legend= c("base", "adverse", "severe", "Historical"), fill=c("black", "blue", "red", "green"))
colnames(output)=c("Historical", "estimated_base", "estimated_base_bal", "estimated_adverse", "adverse_bal", "estimated_severe", "severe_bal")
#output the ci results
output2=output[c(53:61), c(3,5,7)]
write.csv(as.data.frame(cbind(date1[53:61],output2)), "CI EB sensitivity rdi.csv", col.names = T, row.names = F)
#####################################
# OOS testing
####################################
oos_ci<-function(n){
#n defines how many quarters before 2015Q4
ind=nrow(df_total_dev)-n
df_oos=df_total_dev[1:ind,]
ind0=nrow(df_oos)
ind1=nrow(df_oos)+1
ind2=nrow(df_oos)+npred
ind4=nrow(df_total_dev)
ind5=nrow(df_total_dev)-n+1
#demean it
x1=df_oos$rgdp_ag_lag_1-mean(df_oos$rgdp_ag_lag_1)
x2=df_oos$rdi_ag_lag_4-mean(df_oos$rdi_ag_lag_4)
mu1=mean(df_oos$rgdp_ag_lag_1)
mu2= mean(df_oos$rdi_ag_lag_4)
out_oos=lm(df_oos$ci_qd~x1+x2)
####################################
#implement the model - out of sample
####################################
ndata=nrow(df_oos)
npred=1
ind3=ndata+npred
output_oos=as.data.frame(matrix(0, ndata+npred,7))
input=endbal[1:ind5,]
output_oos[, 1]=input$ci_bal
output_oos[1:ndata, 2]=out_oos$fitted.values
dummy1=ndata-1
output_oos[2:ndata, 3]= output_oos[1:dummy1, 1] + output_oos[2:ndata, 2]
#PREDICT
indx=cbind(df_total_dev$rgdp_ag_lag_1-mu1,df_total_dev$rdi_ag_lag_4-mu2)
xreg_base=as.data.frame(t(indx[ind5,]))
names(xreg_base)=c("x1", "x2")
fitted.base=as.data.frame(predict(out_oos, xreg_base))
# get the values
output_oos[ind5, 2]=fitted.base
output_oos[ind5, 3]= output_oos[ndata, 1] + output_oos[ind1, 2]
pct_error= 100*(output_oos[ind5,3]-output_oos[ind5,1])/output_oos[ind5,3]
result_oos=as.data.frame(cbind(n, output_oos[ind1,1], output_oos[ind1,3],pct_error))
return(result_oos)
}
oos_ci(1)
oos_ci(2)
oos_ci(3)
oos_ci(4)
#####################################
# Prediction CI
####################################
ndata=nrow(b1)
npred=9
output_ci=as.data.frame(matrix(0, ndata+npred,10))
D1=which(endbal$year==2003 & endbal$q==1)
D2=which(endbal$year==2015 & endbal$q==4)
input=endbal[D1:D2,]
output_ci[1:ndata, 1]=input$ci_qd
output_ci[1:ndata, 2]=out$fitted.values
#PREDICT
aaaa=which(base$year==2016 & base$q==1)
bbbb=which(base$year==2018 & base$q==1)
indx=cbind(base$rgdp_ag_lag_1-mu1,base$rdi_ag_lag_4-mu2)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2")
fitted.base=as.data.frame(predict(out, xreg_base, interval = "predict",level = 0.95))
indx=cbind(adverse$rgdp_ag_lag_1-mu1,adverse$rdi_ag_lag_4-mu2)
xreg_adverse=as.data.frame(indx[aaaa:bbbb,])
names(xreg_adverse)=c("x1", "x2")
fitted.adverse=predict(out, xreg_adverse, interval = "predict",level = 0.95)
indx=cbind(severe$rgdp_ag_lag_1-mu1,severe$rdi_ag_lag_4-mu2)
xreg_severe=as.data.frame(indx[aaaa:bbbb,])
names(xreg_severe)=c("x1", "x2")
fitted.severe=predict(out, xreg_severe, interval = "predict",level = 0.95)
output_ci[53:61, 2]=fitted.base[,1] #fit base
output_ci[53:61, 3]=fitted.base[,2] #lwr base
output_ci[53:61, 4]=fitted.base[,3] #upr base
output_ci[53:61, 5]=fitted.adverse[,1] #fit adverse
output_ci[53:61, 6]=fitted.adverse[,2] #lwr adverse
output_ci[53:61, 7]=fitted.adverse[,3] #upr adverse
output_ci[53:61,8]=fitted.severe[,1]#fit severe
output_ci[53:61,9]=fitted.severe[,2] #lwr severe
output_ci[53:61,10]=fitted.severe[,3] # upr Severe
colnames(output_ci)=c("Historical", "estimated_base_fit", "estimated_base_lwr",
"estimated_base_upr", "estimated_adverse_fit",
"estimated_adverse_lwr", "estimated_adverse_upr",
"estimated_severe_fit", "estimated_severe_lwr",
"estimated_severe_upr")
write.csv(as.data.frame(cbind(date1,output_ci)), "ci_prediction_ci.csv", col.names = T, row.names = F)
<file_sep>/SandB/NIE_SB_8-21.R
###################################################################
# Project: Bank of Hope
# PPNR Models - NIE Salary and Benefits
###################################################################
#setwd("C:/Users/doxborrow/Desktop/BoH/Modeling/PPNR/NIE_ Salary and Benefits")
setwd("//useomvfs77/MCLP/Common/Clients/Bank of Hope/Model Development/Code/PPNR/NIE_ Salary and Benefits")
#library(DataAnalytics)
library(tseries)
library(urca)
#library (fUnitRoots)
library(lubridate)
library(forecast)
library(tseries)
library(CADFtest)
library (leaps)
library(data.table)
library(openxlsx)
library(car)
library(dplyr)
#read in the raw data
ppnr=read.csv("NIE salary and benefits.csv",header = TRUE)
names(ppnr)= c("Date", "Year", "q", "NIE_SB")
ind1=which(ppnr$Year==2005 & ppnr$q==1)
ind2=which(ppnr$Year==2015 & ppnr$q==4)
ppnr=ppnr[c(ind1:ind2),]
ppnr$date = as.Date(as.yearqtr(ppnr$Date, format = "%YQ%q"))
#plot the data
#jpeg(filename= "NIE_SB.jpg", width=720, height=480, bg="white" )
plot(c(1:nrow(ppnr)), ppnr$NIE_SB, type="l", col="red",lty="dotted",ylab="non_int")
# Plot
sb_area_df <- melt(ppnr[,c("date","NIE_SB")], id = "date")
sba_area_p <- ggplot(sb_area_df, aes(x=date, y=value/1000, fill=variable, group=variable)) + geom_area() + xlab("Date") + ylab("Salary and Benefits ($ Mill)") + ggtitle("Non-Interest Expense: Salary and Benefits") + theme(plot.title = element_text(hjust = 0.5)) + theme(text = element_text(size=15)) + theme(legend.title=element_blank()) + theme(legend.position="bottom") + theme(legend.position="none")
sba_area_p
#make the transformations for the data
ppnr$NIE_SB_qd=ppnr$NIE_SB-lag(ppnr$NIE_SB)
ppnr$NIE_SB_ad=ppnr$NIE_SB-lag(ppnr$NIE_SB, 4)
ppnr$NIE_SB_qg=c(NA, diff(log(ppnr$NIE_SB)))
ppnr$NIE_SB_ag= log(ppnr$NIE_SB)-log(lag(ppnr$NIE_SB, 4))
#Stationarity test
max_lag= floor(12*(nrow(ppnr)/100)^(1/4))
pp.test(na.remove(ppnr$NIE_SB_qd), lshort = T) #qd is stationary
pp.test(na.remove(ppnr$NIE_SB_ad), lshort = T) #ad fails
pp.test(na.remove(ppnr$NIE_SB_ag), lshort = T) #ag fails
pp.test(na.remove(ppnr$NIE_SB_qg), lshort = T) #qg is stationary
# Boxplot of PPNR variables
boxplot(ppnr$NIE_SB_qg~ppnr$q, main="Salary and benefits",
xlab="season/quarter", ylab="NIE_SB QG")
acf(na.remove(ppnr$NIE_SB_qg), lag.max = 25)
boxplot(ppnr$NIE_SB_qd~ppnr$q, main="Salary and benefits",
xlab="season/quarter", ylab="NIE_SB QoQ Diff")
acf(na.remove(ppnr$NIE_SB_qd), lag.max = 25)
#######################
#read in the macro vars
#######################
######
#base
######
base=read.csv("base.csv", header=T)
aaa=which(base$year==2005 & base$quarter==1)
bbb=which(base$year==2018 & base$quarter==1)
base=base[aaa:bbb,]
#########
#adverse
#########
adverse=read.csv("adverse.csv", header=T)
aaa=which(adverse$year==2005 & adverse$quarter==1)
bbb=which(adverse$year==2018 & adverse$quarter==1)
adverse=adverse[aaa:bbb,]
########
#severe
########
severe=read.csv("severe.csv", header=T)
aaa=which(severe$year==2005 & severe$quarter==1)
bbb=which(severe$year==2018 & severe$quarter==1)
severe=severe[aaa:bbb,]
##################
#development macro
##################
D1=which(base$year==2005 & base$q==1)
D2=which(base$year==2015 & base$q==4)
macro_dev=base[c(D1:D2), ]
########################################
# Create the dep_var matrix
########################################
#create var info
var.names=colnames(macro_dev[,-c(1,2,3)])
var_info=as.data.frame(matrix(0, length(var.names), 6 ))
names(var_info) = c("var", "tier", "base", "lag", "diff", "sign")
var_info[,1]=var.names
var_info[,5]=0
#diff
var_info[grepl("_qd", var_info$var),5] = TRUE
var_info[grepl("_yd", var_info$var),5] = TRUE
var_info[grepl("_ad", var_info$var),5] = TRUE
#lag
var_info[grepl("_lag_1", var_info$var),4] = 1
var_info[grepl("_lag_2", var_info$var),4] = 2
var_info[grepl("_lag_3", var_info$var),4] = 3
var_info[grepl("_lag_4", var_info$var),4] = 4
#var.base
var_info[grepl("ngdp", var_info$var),3] = "gdp"
var_info[grepl("rgdp", var_info$var),3] = "gdp"
var_info[grepl("rdi", var_info$var),3] = "dpi"
var_info[grepl("ndi", var_info$var),3] = "dpi"
var_info[grepl("ur_", var_info$var),3] = "ur_diff"
var_info[grepl("UR_", var_info$var),3] = "ur_diff"
var_info[grepl("cpi_", var_info$var),3] = "cpi"
var_info[grepl("i3m", var_info$var),3] = "i3m_diff"
var_info[grepl("i5y", var_info$var),3] = "i5yr_diff"
var_info[grepl("i10y", var_info$var),3] = "i10yr_diff"
var_info[grepl("bbb", var_info$var),3] = "bbb_diff"
var_info[grepl("imort", var_info$var),3] = "imort_diff"
var_info[grepl("iprim", var_info$var),3] = "iprim_diff"
var_info[grepl("cppi", var_info$var),3] = "cppi_diff"
var_info[grepl("dji", var_info$var),3] = "dji_diff"
var_info[grepl("VIX", var_info$var),3] = "vix"
var_info[grepl("vix", var_info$var),3] = "vix"
var_info[grepl("hpi_q", var_info$var),3] = "hpi_diff"
var_info[grepl("HPI_q", var_info$var),3] = "hpi_diff"
var_info[grepl("hpi_a", var_info$var),3] = "hpi_diff"
var_info[grepl("HPI_a", var_info$var),3] = "hpi_diff"
var_info[grepl("hpi_g", var_info$var),3] = "hpi_diff"
var_info[grepl("spr10", var_info$var),3] = "spr10"
var_info[grepl("spr10_q", var_info$var),3] = "spr10"
var_info[grepl("spr10_a", var_info$var),3] = "spr10"
var_info[grepl("equipment", var_info$var), 3]= "equipment"
var_info[grepl("pfi_nonres", var_info$var), 3]= "pfi_nonres"
var_info[grepl("willreit", var_info$var), 3]= "willreit"
var_info[grepl("KOGDP", var_info$var), 3]= "KOGDP"
var_info[grepl("KOCPI", var_info$var), 3]= "KOCPI"
var_info[grepl("CCI", var_info$var),3] = "CCI_g"
var_info[grepl("NCREIF", var_info$var),3] = "NCREIF"
#sign
var_info[grepl("ngdp", var_info$var),6] = 1
var_info[grepl("rgdp", var_info$var),6] = 1
var_info[grepl("rdi", var_info$var),6] = 1
var_info[grepl("ndi", var_info$var),6] = 1
var_info[grepl("ur_", var_info$var),6] = -1
var_info[grepl("UR_", var_info$var),6] = -1
var_info[grepl("cpi_", var_info$var),6] = 0
var_info[grepl("i3m", var_info$var),6] = 0
var_info[grepl("i5y", var_info$var),6] = 0
var_info[grepl("i10y", var_info$var),6] = 0
var_info[grepl("bbb", var_info$var),6] = -1
var_info[grepl("imort", var_info$var),6] =0
var_info[grepl("iprim", var_info$var),6] = 0
var_info[grepl("cppi", var_info$var),6] = 1
var_info[grepl("dji", var_info$var),6] = 1
var_info[grepl("VIX", var_info$var),6] = -1
var_info[grepl("vix", var_info$var),6] = -1
var_info[grepl("hpi_q", var_info$var),6] = 1
var_info[grepl("HPI_q", var_info$var),6] = 1
var_info[grepl("hpi_a", var_info$var),6] = 1
var_info[grepl("HPI_a", var_info$var),6] = 1
var_info[grepl("hpi_g", var_info$var),6] = 1
var_info[grepl("spr10", var_info$var),6] = -1
var_info[grepl("spr10_q", var_info$var),6] = -1
var_info[grepl("spr10_a", var_info$var),6] = -1
var_info[grepl("equipment", var_info$var), 6]= 1
var_info[grepl("pfi_nonres", var_info$var), 6]= 1
var_info[grepl("willreit", var_info$var), 6]= 1
var_info[grepl("KOGDP", var_info$var), 6]= 1
var_info[grepl("KOCPI", var_info$var), 6]= 0
var_info[grepl("CCI", var_info$var),6] = 1
var_info[grepl("NCREIF", var_info$var),6] = 1
#Tier
var_info[grepl("ngdp", var_info$var),2] = 1
var_info[grepl("rgdp", var_info$var),2] = 1
var_info[grepl("rdi", var_info$var),2] = 1
var_info[grepl("ndi", var_info$var),2] = 1
var_info[grepl("ur_", var_info$var),2] = 1
var_info[grepl("UR_", var_info$var),2] = 1
var_info[grepl("cpi_", var_info$var),2] = 3
var_info[grepl("i3m", var_info$var),2] = 3
var_info[grepl("i5y", var_info$var),2] = 3
var_info[grepl("i10y", var_info$var),2] = 3
var_info[grepl("bbb", var_info$var),2] = 3
var_info[grepl("imort", var_info$var),2] = 3
var_info[grepl("iprim", var_info$var),2] = 3
var_info[grepl("cppi", var_info$var),2] = 2
var_info[grepl("dji", var_info$var),2] = 2
var_info[grepl("VIX", var_info$var),2] = 3
var_info[grepl("vix", var_info$var),2] = 3
var_info[grepl("hpi_q", var_info$var),2] = 2
var_info[grepl("HPI_q", var_info$var),2] = 2
var_info[grepl("hpi_a", var_info$var),2] = 2
var_info[grepl("HPI_a", var_info$var),2] = 2
var_info[grepl("hpi_g", var_info$var),2] = 2
var_info[grepl("spr10", var_info$var),2] = 3
var_info[grepl("spr10_q", var_info$var),2] = 3
var_info[grepl("spr10_a", var_info$var),2] = 3
var_info[grepl("equipment", var_info$var), 2]= 2
var_info[grepl("pfi_nonres", var_info$var), 2]= 2
var_info[grepl("willreit", var_info$var), 2]= 2
var_info[grepl("KOGDP", var_info$var), 2]= 3
var_info[grepl("KOCPI", var_info$var), 2]= 3
var_info[grepl("CCI", var_info$var),2] = 3
var_info[grepl("NCREIF", var_info$var),2] = 3
#####################
#Variable Selection
#####################
#dependent var
D1=which(ppnr$Year==2005 & ppnr$q==1)
D2=which(ppnr$Year==2015 & ppnr$q==4)
dependent= as.data.frame(cbind(ppnr$NIE_SB_qd, ppnr$NIE_SB_qg))[c(D1:D2),]
names(dependent)= c("NIE_SB_qd","NIE_SB_qg")
date_col=as.data.frame(ppnr$Date)[c(D1:D2),]
b1=cbind(date_col, dependent)
names(b1)=c("Date", names(dependent))
b=data.table(b1)
c1=cbind(date_col, macro_dev)
names(c1)=c("Date", names(macro_dev))
c=data.table(c1)
a=data.table(var_info)
df_total_dev <- as.data.frame(cbind(b1, c1))
#######
#model
#######
source("StepFun.R")
fix_vars0=c("1")
mod1_NIE_SB_qd_sep=StepFun(a,b,c, tier=1, #indicate which tier of variables to consider
y='NIE_SB_qd~', #indicate response variable
thresh=c(0.05, 0.01, 0.001), #significance level for SE based p-value and LR test based p-value for each tier
criteria='SE.p', #variable selection criteria; other values='bic', 'LR.p', 'SE.p', 'rsq'
vars0 = c("1"), #model 0 variables
fix_vars0, #indicate which variables are fixed
out.print=T #indicate wheter intermediate output will be printed
)
# add ngdp_qg_lag_3
fix_vars0=c("1","ngdp_qg_lag_3")
mod1_NIE_SB_qd_sep=StepFun(a,b,c, tier=2, #indicate which tier of variables to consider
y='NIE_SB_qd~', #indicate response variable
thresh=c(0.05, 0.01, 0.001), #significance level for SE based p-value and LR test based p-value for each tier
criteria='SE.p', #variable selection criteria; other values='bic', 'LR.p', 'SE.p', 'rsq'
vars0 = c("1","ngdp_qg_lag_3"), #model 0 variables
fix_vars0, #indicate which variables are fixed
out.print=T #indicate wheter intermediate output will be printed
)
# add djia_qg_lag_1
fix_vars0=c("1","ngdp_qg_lag_3","djia_qg_lag_1")
mod1_NIE_SB_qd_sep=StepFun(a,b,c, tier=3, #indicate which tier of variables to consider
y='NIE_SB_qd~', #indicate response variable
thresh=c(0.05, 0.01, 0.001), #significance level for SE based p-value and LR test based p-value for each tier
criteria='SE.p', #variable selection criteria; other values='bic', 'LR.p', 'SE.p', 'rsq'
vars0 = c("1","ngdp_qg_lag_3","djia_qg_lag_1"), #model 0 variables
fix_vars0, #indicate which variables are fixed
out.print=T #indicate wheter intermediate output will be printed
)
# no variable added
# Save the alg model coefficients
coef_sb_alg <- mod1_NIE_SB_qd_sep[[1]]$final_model$coefficients
write.csv(coef_sb_alg, "coef_sb_alg.csv")
#####################
#Independent Variables - Demeaned
#####################
x1=c1$ngdp_qg_lag_3-mean(c1$ngdp_qg_lag_3)
mu1=mean(c1$ngdp_qg_lag_3)
sd1=sd(c1$ngdp_qg_lag_3)
x2=c1$djia_qg_lag_1-mean(c1$djia_qg_lag_1)
mu2=mean(c1$djia_qg_lag_1)
sd2=sd(c1$djia_qg_lag_1)
#################################
# Stationarity
#################################
summary(ur.df(na.remove(x1), lags=6, selectlags = 'BIC'))
summary(ur.df(na.remove(x2), lags=6, selectlags = 'BIC'))
#####################
# Model Estimation
#####################
out=lm(b1$NIE_SB_qd~x1+x2)
summary(out)
# Save the model coefficients
coef_sb <- summary(out)$coefficients
write.csv(coef_sb, "coef_sb.csv")
# <NAME>
durbinWatsonTest(out, 3)
#Multicolinearity
vif(out)
#Stationarity
pp.test(na.remove(base$ngdp_qg))
pp.test(na.remove(base$djia_qg))
#####################
# Residual tests
#####################
out_res=out$residuals
out_res2=rstandard(out)
# Autocorrelations
par(mfrow=c(1,2))
acf(out$residuals, main="")
pacf(out$residuals, main="")
par(mfrow=c(1,1))
#white noise tests
Box.test(out$residuals, type = "Ljung-Box", lag = 3) #null: independence ==> accept
durbinWatsonTest(out)
#Q-Q Plot
par(mfrow=c(1,1))
qqnorm(out_res2, ylab="Residuals", xlab="Quantiles of Standard Normal", main="NIE-Salary and Benefits")
qqline(out_res2)
# Residual vs predicted
plot(out$fitted.values,out_res2, ylab="Residuals", xlab="Predicted Values", main="NIE_SB Model", ylim=c(-5, 5))
abline(0, 0)
#implement the model
ndata=nrow(b1)
npred=9
output=as.data.frame(matrix(0, ndata+npred,7))
fitted.values=as.data.frame(c(NA, out$fitted.values))
output[1:ndata, 1]=ppnr$NIE_SB
output[1:ndata, 2]=fitted.values
dummy1=ndata-1
output[2:ndata, 3]= output[1:dummy1, 1] + output[2:ndata, 2]
plot(output[2:ndata, 3])
lines(output[2:ndata, 1], col='red')
#####################
#Scenario Forecasts
#####################
aaaa=which(base$year==2016 & base$quarter==1)
bbbb=which(base$year==2018 & base$quarter==1)
indx=cbind(base$ngdp_qg_lag_3-mu1, base$djia_qg_lag_1-mu2)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2")
fitted.base=as.data.frame(predict(out, xreg_base))
indx=cbind(adverse$ngdp_qg_lag_3-mu1,adverse$djia_qg_lag_1-mu2)
xreg_adverse=as.data.frame(indx[aaaa:bbbb,])
names(xreg_adverse)=c("x1", "x2")
fitted.adverse=as.data.frame(predict(out, xreg_adverse))
indx=cbind(severe$ngdp_qg_lag_3-mu1,severe$djia_qg_lag_1-mu2)
xreg_severe=as.data.frame(indx[aaaa:bbbb,])
names(xreg_severe)=c("x1", "x2")
fitted.severe=as.data.frame(predict(out, xreg_severe))
output[45:53, 2]=fitted.base
output[45, 3]=output[ndata, 1] + output[45, 2]
for (i in 2:npred){
ab=44+i
ac=44+i-1
output[ab, 3]= output[ac, 3] + output[ab, 2]
}
output[1:ndata,c(4,5)]=output[1:ndata, c(2,3)]
output[1:ndata,c(6,7)]=output[1:ndata, c(2,3)]
output[45:53, 4]=fitted.adverse
output[45,5]= output[ndata, 1] + output[45, 4]
for (i in 2:npred){
ab=44+i
ac=44+i-1
output[ab, 5]= output[ac, 5] + output[ab, 4]
}
output[45:53, 6]=fitted.severe
output[45,7]=output[ndata, 1] + output[45, 6]
for (i in 2:npred){
ab=44+i
ac=44+i-1
output[ab, 7]= output[ac, 7] + output[ab, 6]
}
output[1, c(3,5,7)]=output[1,1]
output[which(output[,1]==0),1]=NA
#plot together
date1 = seq(ISOdate(2005,1,1), by = "quarter", length.out = 53)
plot(date1, output[,3], type='l', ylab="NIE Salary & Benefits")
lines(date1, output[,5], col='blue')
lines(date1, output[,7], col='red')
lines(date1, output[,1], col='green')
legend("topleft", legend= c("base", "adverse", "severe", "Historical"), fill=c("black", "blue", "red", "green"))
colnames(output)=c("Historical", "estimated_base", "estimated_base_bal", "estimated_adverse", "adverse_bal", "estimated_severe", "severe_bal")
output_NIE_SB=output
write.csv(as.data.frame(cbind(date1,output)), "NIE_SB_Projections.csv", col.names = T, row.names = F)
sb_gg_df <- data.frame(Date = date1, Actual = output$Historical, Base = output$estimated_base_bal, Adverse = output$adverse_bal, Severe = output$severe_bal, Fitted = append(output$estimated_base_bal[1:(length(output$estimated_base_bal)-9)],rep(NA,9)))
sb_gg_df_p <- melt(sb_gg_df, id="Date")
# Plot of forecasts
sb_fcst_plot <- ggplot(sb_gg_df_p, aes(x = Date, y = value/1000, color = variable, group = variable)) +
geom_line() +
xlab("Date") + ylab("Salary and Benefits (Mill $)") + ggtitle("Scenario Forecasts") +
theme(plot.title = element_text(hjust = 0.5)) +
theme(text = element_text(size=15)) + theme(legend.title=element_blank())
sb_fcst_plot
########################
#95% Confidence Interval
########################
#Generate the 95% confidence interval for the base case. Default in R is 95%.
Date_ci = as.Date(as.yearqtr(paste(base$year[aaaa:bbbb],base$q[aaaa:bbbb],sep="-")))
indx=cbind(base$ngdp_qg_lag_3-mu1, base$djia_qg_lag_1-mu2)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2")
fitted.base_ci=as.data.frame(predict(out, xreg_base, interval = "confidence"))
base_ci_df <- as.data.frame(cbind(Date_ci,fitted.base_ci))
colnames(base_ci_df) <- c("Date", "Fcst","Lower","Upper")
write.csv(base_ci_df, "CB Confidence Interval Data.csv", row.names = F)
base_ci_df <- melt(base_ci_df, id="Date")
gg_in_df <- data.frame(Date = as.Date(as.yearqtr(b1$Date, format = "%YQ%q")), Actual = b1$NIE_SB_qd, Fitted = output$estimated_base[1:length(b1$NIE_SB_qd)])
gg_in_df <- melt(gg_in_df, id="Date")
gg_fcst_df_ci <- rbind(gg_in_df,base_ci_df)
# Plot the historical actual and fitted with base 95% forecast
sb_fcst_plot_ci <- ggplot(gg_fcst_df_ci, aes(x = Date, y = value/1000, color = variable, group = variable)) +
geom_line() +
xlab("Date") + ylab("Salary and Benefits (Mill $)") + ggtitle("NIE Salary and Benefits Baseline Forecast and 95% CI") +
theme(plot.title = element_text(hjust = 0.5)) +
theme(text = element_text(size=15)) + theme(legend.title=element_blank())
sb_fcst_plot_ci
#####################
#sensitivity Analysis
#####################
#GDP
ndata=nrow(b1)
npred=9
output=as.data.frame(matrix(0, ndata+npred,7))
fitted.values=as.data.frame(c(NA, out$fitted.values))
output[1:ndata, 1]=ppnr$NIE_SB
output[1:ndata, 2]=fitted.values
dummy1=ndata-1
output[2:ndata, 3]= output[1:dummy1, 1] + output[2:ndata, 2]
aaaa=which(base$year==2016 & base$quarter==1)
bbbb=which(base$year==2018 & base$quarter==1)
indx=cbind(base$ngdp_qg_lag_3-mu1, base$djia_qg_lag_1-mu2)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2")
fitted.base=as.data.frame(predict(out, xreg_base))
indx=cbind(base$ngdp_qg_lag_3-mu1+sd1, base$djia_qg_lag_1-mu2)
xreg_1sd=as.data.frame(indx[aaaa:bbbb,])
names(xreg_1sd)=c("x1", "x2")
fitted.1sd=as.data.frame(predict(out, xreg_1sd))
indx=cbind(base$ngdp_qg_lag_3-mu1+2*sd1, base$djia_qg_lag_1-mu2)
xreg_2sd=as.data.frame(indx[aaaa:bbbb,])
names(xreg_2sd)=c("x1", "x2")
fitted.2sd=as.data.frame(predict(out, xreg_2sd))
output[45:53, 2]=fitted.base
output[45, 3]=output[ndata, 1] + output[45, 2]
for (i in 2:npred){
ab=44+i
ac=44+i-1
output[ab, 3]= output[ac, 3] + output[ab, 2]
}
output[1:ndata,c(4,5)]=output[1:ndata, c(2,3)]
output[1:ndata,c(6,7)]=output[1:ndata, c(2,3)]
output[45:53, 4]=fitted.1sd
output[45,5]= output[ndata, 1] + output[45, 4]
for (i in 2:npred){
ab=44+i
ac=44+i-1
output[ab, 5]= output[ac, 5] + output[ab, 4]
}
output[45:53, 6]=fitted.2sd
output[45,7]=output[ndata, 1] + output[45, 6]
for (i in 2:npred){
ab=44+i
ac=44+i-1
output[ab, 7]= output[ac, 7] + output[ab, 6]
}
output[1, c(3,5,7)]=output[1,1]
output[which(output[,1]==0),1]=NA
date2=base$date.x[45:53]
sens_out=as.data.frame(cbind(date2, output[c(43:51),c(3,5,7)]))
colnames(sens_out) <- c("Date","Baseline","1_std","2_std")
gdp_sens <- sens_out
gdp_sens$Date <- as.Date(as.yearqtr(gdp_sens$Date, format = "Q%q %Y"))
write.csv(gdp_sens, "sensitivity NIE_SB GDP.csv", row.names = F)
# Plot
gdp_df_gg <- melt(gdp_sens, id = "Date")
gdp_df_gg_p <- ggplot(data = gdp_df_gg, mapping = aes(x = Date, y = value/1000, group = variable, color = variable)) + geom_line() + theme(legend.position = 'bottom') + theme(legend.title=element_blank()) + xlab("Date") + ylab("Salary and Benefits ($ Mill)") + ggtitle("Nominal GDP Quarterly Growth Rate") + theme(plot.title = element_text(hjust = 0.5)) + theme(text = element_text(size=15)) + theme(legend.position="bottom")
gdp_df_gg_p
#DJIA
ndata=nrow(b1)
npred=9
output=as.data.frame(matrix(0, ndata+npred,7))
fitted.values=as.data.frame(c(NA, out$fitted.values))
output[1:ndata, 1]=ppnr$NIE_SB
output[1:ndata, 2]=fitted.values
dummy1=ndata-1
output[2:ndata, 3]= output[1:dummy1, 1] + output[2:ndata, 2]
aaaa=which(base$year==2016 & base$quarter==1)
bbbb=which(base$year==2018 & base$quarter==1)
indx=cbind(base$ngdp_qg_lag_3-mu1, base$djia_qg_lag_1-mu2)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2")
fitted.base=as.data.frame(predict(out, xreg_base))
indx=cbind(base$ngdp_qg_lag_3-mu1, base$djia_qg_lag_1-mu2+sd2)
xreg_1sd=as.data.frame(indx[aaaa:bbbb,])
names(xreg_1sd)=c("x1", "x2")
fitted.1sd=as.data.frame(predict(out, xreg_1sd))
indx=cbind(base$ngdp_qg_lag_3-mu1, base$djia_qg_lag_1-mu2+2*sd2)
xreg_2sd=as.data.frame(indx[aaaa:bbbb,])
names(xreg_2sd)=c("x1", "x2")
fitted.2sd=as.data.frame(predict(out, xreg_2sd))
output[45:53, 2]=fitted.base
output[45, 3]=output[ndata, 1] + output[45, 2]
for (i in 2:npred){
ab=44+i
ac=44+i-1
output[ab, 3]= output[ac, 3] + output[ab, 2]
}
output[1:ndata,c(4,5)]=output[1:ndata, c(2,3)]
output[1:ndata,c(6,7)]=output[1:ndata, c(2,3)]
output[45:53, 4]=fitted.1sd
output[45,5]= output[ndata, 1] + output[45, 4]
for (i in 2:npred){
ab=44+i
ac=44+i-1
output[ab, 5]= output[ac, 5] + output[ab, 4]
}
output[45:53, 6]=fitted.2sd
output[45,7]=output[ndata, 1] + output[45, 6]
for (i in 2:npred){
ab=44+i
ac=44+i-1
output[ab, 7]= output[ac, 7] + output[ab, 6]
}
output[1, c(3,5,7)]=output[1,1]
output[which(output[,1]==0),1]=NA
date2=base$date.x[45:53]
sens_out=as.data.frame(cbind(date2, output[c(43:51),c(3,5,7)]))
colnames(sens_out) <- c("Date","Baseline","1_std","2_std")
djia_sens <- sens_out
djia_sens$Date <- as.Date(as.yearqtr(djia_sens$Date, format = "Q%q %Y"))
write.csv(djia_sens, "sensitivity NIE_SB DJIA.csv", row.names = F)
# Plot
djia_df_gg <- melt(djia_sens, id = "Date")
djia_df_gg_p <- ggplot(data = djia_df_gg, mapping = aes(x = Date, y = value/1000, group = variable, color = variable)) + geom_line() + theme(legend.position = 'bottom') + theme(legend.title=element_blank()) + xlab("Date") + ylab("Salary and Benefits ($ Mill)") + ggtitle("DJIA Quarterly Growth Rate") + theme(plot.title = element_text(hjust = 0.5)) + theme(text = element_text(size=15)) + theme(legend.position="bottom")
djia_df_gg_p
# Boot strap the regression coefficients
#rownames(mod1_NIE_SB_qd_sep[[1]]$final_model$coefficients)
model <- "NIE_SB_qd ~ ngdp_qg_lag_3 + djia_qg_lag_1"
summary(lm(model, data = df_total_dev))
set.seed(2)
# Bootstrap 95% CI for regression coefficients
library(boot)
# function to obtain regression weights
bs = function(data, indices, formula) {
d = data[indices,] # allows boot to select sample
fit = lm(formula, data=d)
return(coef(fit))
}
# bootstrapping with 100 replications
results = boot(
data=df_total_dev,
statistic=bs,
R=5000,
formula=model)
Names = names(results$t0)
SEs = sapply(data.frame(results$t), sd)
Coefs = as.numeric(results$t0)
zVals = Coefs / SEs
Pvals = 2*pnorm(-abs(zVals))
Formatted_Results = cbind(Names, Coefs, SEs, zVals, Pvals)
# Pot coefficient density
for (i in 1:length(names(results$t0))){
plot(density(results$t[,i]), main = paste(names(results$t0)[i],"Density",sep=" - "))
}
# Back-testing Analysis
bt_ppnr_df <- ppnr
bt_ppnr_df$date <- as.Date(as.yearqtr(bt_ppnr_df$Date, format = "%YQ%q"))
# Macro Variables
bt_macro_df <- as.data.frame(c1)
bt_macro_df$date <- as.Date(as.yearqtr(bt_macro_df$Date, format = "%YQ%q"))
bt_macro_df$ngdp_qg_lag_3_dm <- bt_macro_df$ngdp_qg_lag_3 - mean(bt_macro_df$ngdp_qg_lag_3)
bt_macro_df$djia_qg_lag_1_dm <- bt_macro_df$djia_qg_lag_1 - mean(bt_macro_df$djia_qg_lag_1)
# Merge the end bal and macro data
bt_df <- merge(x = bt_ppnr_df, y = bt_macro_df, by.x = "date", by.y = "date")
#names(bt_df)[names(bt_df) == 'quarter'] <- 'q'
# Partition the data
in1 <- which(bt_df$year==2005 & bt_df$q ==1)
in2 <- which(bt_df$year ==2014 & bt_df$q ==4)
out1 <- which(bt_df$year ==2015 & bt_df$q ==1)
out2 <- which(bt_df$year ==2015 & bt_df$q ==4)
insample <- bt_df[in1:in2,]
outsample <- bt_df[out1:out2,]
# Estimate the model on the insample portion
out_bt <- lm(NIE_SB_qd ~ ngdp_qg_lag_3_dm + djia_qg_lag_1_dm, data = insample)
summary(out_bt)
# Add the fitted values to the insample data
insample$fitted <- append(NA,out_bt$fitted.values)
# Forecast added to the out of sample data
outsample$fitted <- predict(out_bt, outsample)
# Append the insample and out of sample data and select the columns
bt_df_final <- rbind(insample, outsample)
bt_df_final <- bt_df_final[,c("date","NIE_SB","fitted")]
fitted_bal <- bt_df_final[1:(nrow(bt_df_final)-1), 2] + bt_df_final[2:nrow(bt_df_final), 3]
bt_df_final$fitted_bal <- append(NA, fitted_bal)
# Plot
bt_df_final_p <- melt(bt_df_final[,c("date","NIE_SB","fitted_bal")], id = "date")
bt_df_final_plot <- ggplot(data = bt_df_final_p, mapping = aes(x = date, y = value, group = variable, color = variable)) + geom_line() + theme(legend.position = 'bottom') + theme(legend.title=element_blank()) + xlab("Date") + ylab("($)") + ggtitle("Out-of-Sample Forecast") + theme(plot.title = element_text(hjust = 0.5)) + theme(text = element_text(size=15)) + theme(legend.position="bottom")
bt_df_final_plot
# Table
fcst_table <- bt_df_final[out1:out2,c("date","NIE_SB","fitted_bal")]
fcst_table$p_error <- round(100*(fcst_table$fitted_bal-fcst_table$NIE_SB)/fcst_table$NIE_SB,2)
row.names(fcst_table) <- NULL
fcst_table
<file_sep>/Adobe/4-logistic_regression.R
################################################################################
#
#
# Program: 4-logistic_regression.R
# Author: <NAME>
# Purpose: run logistic regression and find the important predictor
#
#
#
# R-version: R version 3.3.4 (2016-06-21)
# -- "Bug in Your Hair" Copyright (C) 2016 The R Foundation
# for Statistical Computing Platform: x86_64-apple-darwin13.4.0 (64-bit)
################################################################################
### Environment Settings #######################################################
pth_inputs = "C:/Users/hong3/Desktop/OULAD2/inputs"
pth_lib = "C:/Users/hong3/Desktop/OULAD2/library"
pth_out = "C:/Users/hong3/Desktop/OULAD2"
### No need to make changes below after this line ##############################
source(paste(pth_lib,"/dev-support.R", sep=""))
library("data.table")
library("dplyr")
library("ggplot2")
library("lubridate")
library("scales")
library("zoo")
library("plyr")
library("corrplot")
library("tidyr")
library("cluster")
library("fpc")
library("foreign")
library("nnet")
library("reshape2")
library("pROC")
library('ROCR')
library("car")
##########################################################################################
# Load Data
###########################################################################################
df_studentInfo <- read.csv(concat(pth_out,"/df_studentInfo.csv"))
colSums(is.na(df_studentInfo))
#create binary value for logistic model
df_studentInfo$final_result<- ifelse(df_studentInfo$final_result.x %in% c("Pass", "Distinction"), 1,0)
df_studentInfo$region <- as.integer(df_studentInfo$region)
x_data <- df_studentInfo[,c("gender", "region", "highest_education","imd_band" ,
"age_band", "num_of_prev_attempts", "studied_credits", "disability"
,"date_registration", "module_presentation_length", "count_date"
, "sum_click_sum", "frequency", "avg_date_submission","final_result")]
target <- df_studentInfo[, 21]
##########################################################################################
# variable selection Process (Forward)
###########################################################################################
model.null = glm(final_result ~ 1,
data=x_data,
family = binomial(link="logit"))
model.full = glm(final_result ~ gender+region+highest_education+imd_band+age_band+num_of_prev_attempts+studied_credits+disability+
date_registration+module_presentation_length+count_date+sum_click_sum+frequency+avg_date_submission,
data=x_data,
family = binomial(link="logit")
)
step(model.null,
scope = list(upper=model.full),
direction="both",
test="Chisq",
data=x_data)
##########################################################################################
# Model fit
###########################################################################################
model <- final_result ~ count_date + gender + studied_credits + highest_education +
imd_band + num_of_prev_attempts + module_presentation_length +
sum_click_sum + frequency + disability + age_band + date_registration
fit <- glm(model, family = binomial(link = "logit"), data = x_data)
summary(fit)
# Multicollinearity of fitted model
vif(fit)
# exclude count_date , sums_click_sum
model2 <- final_result ~ gender + studied_credits + highest_education +
imd_band + num_of_prev_attempts + module_presentation_length +
frequency + disability + age_band + date_registration
fit2 <- glm(model2, family = binomial(link = "logit"), data = x_data)
summary(fit2)
# Output the main regression table
a <- stargazer::stargazer(fit,fit2, type = "text", model.numbers = F, column.labels = c("Fit","Fit2"))
write.csv(a, concat(pth_out,"/a.csv"))
# Save the model coefficients
coef_table <- as.data.frame(summary(fit2)$coefficients)
coef_table$X <- rownames(coef_table)
##########################################################################################
# Prediction
###########################################################################################
prob <- predict(fit2,type=c("response"))
roc_in_df <- data.frame(x_data$final_result, prob)
# Find AUC
auc_in <- round(auc(roc_in_df$x_data.final_result, roc_in_df$prob),4)
roc_in <- roc(final_result~ prob, data = x_data)
## get ROC and AUC
p <- plot(roc_in, main =paste0("Logit Model ROC\n AUC = ", auc_in))
##########################################################################################
# Manual boot strapping - coefficients and p-values
###########################################################################################
par(mfrow=c(4,4))
# Make random samples
# Sample Number
sample_number <- 10
sample_fraction <- .8
# Round the sample size to a whole number
# Uses the sample fraction set above to partition in-out samples
sample_size <- round(nrow(x_data)*sample_fraction)
# Sample from the df
df_samples <- list()
coeff_l <- list()
pval_l <- list()
start.time <- Sys.time()
start.time
for (i in 1:sample_number){
##In sample data
set.seed(i)
df_sample <- x_data[sample(nrow(x_data), sample_size, replace = FALSE), ]
logit <- glm(model2, family = binomial(link = "logit"), data = df_sample)
coeff_l[[i]] <- round(coef(summary(logit))[,1],5)
pval_l[[i]] <- round(coef(summary(logit))[,4],5)
}
end.time <- Sys.time()
time.taken <- end.time - start.time
time.taken
# Turn into data frames
pval_boot_df <- as.data.frame(do.call("rbind",pval_l))
pval_boot_df <- data.frame(Sample=seq(from=1,to=sample_number,by=1),pval_boot_df)
coef_boot_df <- as.data.frame(do.call("rbind",coeff_l))
coef_boot_df <- data.frame(Sample=seq(from=1,to=sample_number,by=1),coef_boot_df)
# P-value Histograms
gg_p_df <- list()
for (i in 2:ncol(pval_boot_df)){
gg_p_df[[i]] <- melt(pval_boot_df[,c(1,i)], id = "Sample")
pval_h_plot <- ggplot(gg_p_df[[i]], aes(value)) + geom_histogram(fill = "#006600") + xlab("Value") + ylab("Frequency") + ggtitle(paste("P-Value",gg_p_df[[i]][1,2],sep=" - ")) + theme(text = element_text(size=12)) + theme(legend.title=element_blank()) + theme(plot.title = element_text(hjust = 0.5)) + geom_vline(xintercept=0)
suppressMessages(print(pval_h_plot))
}
# Coefficient Densities
gg_c_df <- list()
for (i in 2:ncol(coef_boot_df)){
gg_c_df[[i]] <- melt(coef_boot_df[,c(1,i)], id = "Sample")
coef_d_plot <- ggplot(gg_c_df[[i]], mapping = aes(x = value, group = variable, fill=variable)) + geom_density() + ggtitle(paste("Coef. Density",gg_c_df[[i]][1,2],sep=" - ")) + theme(text = element_text(size=12)) + theme(legend.title=element_blank()) + theme(plot.title = element_text(hjust = 0.5))+ xlab("Value") + ylab("Density") + theme(legend.position="none") + scale_fill_manual(values=c("#003399")) + scale_colour_manual(values=c("black")) + geom_vline(xintercept=0)
suppressMessages(print(coef_d_plot))
}
time.taken
<file_sep>/remediation/Remediation_Stationarity.R
# MRM Inquiry Response Support (Inquiry 18)
# Bank of Hope
# Original Developer: <NAME>
# Start Date: 01/24/2018
setwd("C:/Users/OL07805/Desktop/Desktop Things/Net Charge Off Models/MRM Inquiries/MRM Inquiry 18/")
source("BOH_dev_support.R")
library(dplyr)
library(data.table)
library(ggplot2)
library(lubridate)
library(zoo)
# Read in data used to train models
load("S3_00_Estimation_Sample_with_MEV_20171117")
########################################################################################################################
### Dependent Variable Stationary Testing
nooPeers <- c("Bank of Hope","Cathay General Bancorp","EAST WEST BANCORP","UMPQUA BANK","Western Alliance")
mfPeers <- c("Bank of Hope","Banner Corporation","Cathay General Bancorp","Columbia Banking System","EAST WEST BANCORP","PacWest Bancorp","UMPQUA BANK")
# Design dataframe to put into stationary function
datesA <- unique(data3$Date)
data4 <- as.data.frame(c())
data4[1:64,"fileDate"] <- as.yearqtr(datesA[1:64])
data4$fileDate <- as.yearqtr(data4$fileDate)
# Define segments to get data from main datatable
segs <- c("CnI","OOCRE","NOOCRE","MF")
# Loop and get data from main table and put into different columns
for(i in segs){
if(i %in% c("MF","NOOCRE")){
if(i == "MF"){
peersA <- mfPeers
}
else{
peersA <- nooPeers
}
for(peer in peersA){
tempData <- data3 %>% filter(Portfolio2 == i,ENTITY_NAME == peer) %>% select(Date,NCOR)
names(tempData) <- c("fileDate",paste0("NCOR_",i,"_",peer))
# Join to designed dataframe
data4 <- left_join(data4,tempData,by = "fileDate")
}
}
else{
tempData <- data3 %>% filter(Portfolio2 == i,ENTITY_NAME == "Bank of Hope") %>% select(Date,NCOR)
names(tempData) <- c("fileDate",paste0("NCOR_",i))
# Join to designed dataframe
data4 <- left_join(data4,tempData,by = "fileDate")
}
}
stationaryDependent <- get.Stationary.Results(names(data4)[!grepl("fileDate",names(data4))],data4,
sigLevel = 0.1)
########################################################################################################################
### Residual Stationary Testing
# First, train the models
data3[is.na(ENTITY_NAME)] <- "Bank of Hope"
# NOO CRE
# Peers are Cathay, East West, Umpqua, and western alliance
nooPeers <- c("Bank of Hope","Cathay General Bancorp","EAST WEST BANCORP","UMPQUA BANK","Western Alliance")
nooModel <- paste(c("ENTITY_NAME","empl_yg_EWMA4_lag1","rgdp_grw_NL_lag3","crei_yg_EWMA4_lag3"), collapse = "+")
# MF CRE
#Peers are Banner, Cathay, Columbia, East West, PacWest, and Umpqua
mfPeers <- c("Bank of Hope","Banner Corporation","Cathay General Bancorp","Columbia Banking System","EAST WEST BANCORP","PacWest Bancorp","UMPQUA BANK")
mfModel <- paste(c("ENTITY_NAME","empl_qg_EWMA2_lag3","gdp_grw_yoy_NL_lag2"),collapse = "+")
# C&I
ciModel <- paste(c("ca_rgsp_yg_EWMA4","ca_unemp_yd_EWMA4","vix_qd_lag4","prime_spread_log_qd","ca_hpi_yg_EWMA4_lag4"),collapse = "+")
# OOCRE
ooModel <- paste(c("empl_yg_EWMA4_lag4","crei_yg_EWMA2_lag4","prime_spread_log_qd_EWMA2_lag4"),collapse = "+")
# train models on relevant data
PANEL_NOO_FIT <- lm(paste0(c("NCOR ~ ",nooModel)),data = data3[(ENTITY_NAME %in% nooPeers) & (Date >= "2007 Q1" & Date <= "2016 Q4") & (Portfolio2 == "NOOCRE")])
PANEL_MF_FIT <- lm(paste0(c("NCOR ~ ",mfModel)),data = data3[(ENTITY_NAME %in% mfPeers) & (Date >= "2007 Q1" & Date <= "2016 Q4") & (Portfolio2 == "MF")])
OLS_CI_FIT <- lm(paste0(c("NCOR ~ ",ciModel)),data = data3[(ENTITY_NAME == "Bank of Hope") &(Date >= "2001 Q1" & Date <= "2016 Q4") & (Portfolio2 == "CnI")])
OLS_OO_FIT <- lm(paste0(c("NCOR ~ ",ooModel)),data = data3[(ENTITY_NAME == "Bank of Hope") &(Date >= "2007 Q1" & Date <= "2016 Q4") & (Portfolio2 == "OOCRE")])
# Loop across segments and get residuals for BOH
for(i in segs){
# Specify which model to use for each segment
if(i == "CnI"){
fit <- OLS_CI_FIT
}
else if(i == "OOCRE"){
fit <- OLS_OO_FIT
}
else if(i == "NOOCRE"){
fit <- PANEL_NOO_FIT
}
else{
fit <- PANEL_MF_FIT
}
# Do another loop for all panel banks
if(i %in% c("MF","NOOCRE")){
if(i == "MF"){
peersA <- mfPeers
}
else{
peersA <- nooPeers
}
for(peer in peersA){
# Make training dataset
train <- filter(data3,Portfolio2 == i,ENTITY_NAME == peer,Scenario == "Historic")
# Get model predictions
train[["predict"]] <- predict(fit,train)
# Get residuals
train[[paste0("resid_",i,"_",peer)]] <- train[["NCOR"]] - train[["predict"]]
# Extract only Date and residuals
train <- train[,c("Date",paste0("resid_",i,"_",peer))]
names(train)[1] <- "fileDate"
# Store residuals in other dataframe
data4 <- left_join(data4,train,by = "fileDate")
}
}
else{
# Make training dataset
train <- filter(data3,Portfolio2 == i,ENTITY_NAME == "Bank of Hope",Scenario == "Historic")
# Get model predictions
train[["predict"]] <- predict(fit,train)
# Get residuals
train[[paste0("resid_",i)]] <- train[["NCOR"]] - train[["predict"]]
# Extract only Date and residuals
train <- train[,c("Date",paste0("resid_",i))]
names(train)[1] <- "fileDate"
# Store residuals in other dataframe
data4 <- left_join(data4,train,by = "fileDate")
}
}
# Get results and merge
stationaryResiduals <- get.Stationary.Results(names(data4)[grepl("resid",names(data4))],
data4,sigLevel = 0.1)
stationaryTable <- rbind(stationaryDependent,stationaryResiduals)
write.csv(stationaryTable,"stationaryTable.csv",row.names = F)
########################################################################################################################
### Macroeconomic Data
# Read in MEV data
mev <- fread("S0_09_MEV_data_transformed_111717.csv")
# Define macrovars used in models
nooMEV <- c("empl_yg_EWMA4_lag1","rgdp_grw_NL_lag3","crei_yg_EWMA4_lag3")
mfMEV <- c("empl_qg_EWMA2_lag3","gdp_grw_yoy_NL_lag2")
ciMEV <- c("ca_rgsp_yg_EWMA4","ca_unemp_yd_EWMA4","vix_qd_lag4","prime_spread_log_qd","ca_hpi_yg_EWMA4_lag4")
ooMEV <- c("empl_yg_EWMA4_lag4","crei_yg_EWMA2_lag4","prime_spread_log_qd_EWMA2_lag4")
# change col format to Date and then later filter based on deveelopment timeframe
mev$Date <- as.Date(mev$Date)
# Get stationary results
mfStationary <- get.Stationary.Results(mfMEV,mev[Scenario == "Historic" & Date >= "2007-03-31"],sigLevel = 0.1)
nooStationary <- get.Stationary.Results(nooMEV,mev[Scenario == "Historic" & Date >= "2007-03-31"],sigLevel = 0.1)
ooStationary <- get.Stationary.Results(ooMEV,mev[Scenario == "Historic" & Date >= "2007-03-31"],sigLevel = 0.1)
ciStationary <- get.Stationary.Results(ciMEV,mev[Scenario == "Historic" & Date >= "2001-03-31"],sigLevel = 0.1)
finalStationary <- rbind(mfStationary,nooStationary,ooStationary,ciStationary)
write.csv(finalStationary,"finalStationary.csv",row.names = F)
<file_sep>/PD/R02_WB_Data.R
##############################################################################
## File Name: R02_WB_Data.R
## Author: KZ
## Date: 5/1/2017 Created
## Purpose: To import and clean Wilshre data accoring to "02 - WB data.sas"
## Download : 8/9/2017
##############################################################################
setwd("C:/Users/ic07949/Desktop/KPMG/Model Development/development code and data/PD/Dataset/Wilshire")
requirements <- c("dplyr", "reshape2", "data.table","zoo")
for(rr in requirements){
if(! rr %in% installed.packages()) install.packages(rr)
}
require(dplyr)
require(reshape2)
require(data.table)
require(zoo)
## Import Wilshire Data (SAS File 02, Line 1 to 69)
wilshire <- read.csv("df_final_wilshire_sorted.csv")
names(wilshire)[names(wilshire)=="Note.Number"] <- "note_number"
names(wilshire)[names(wilshire)=="Non.Accrual.Code"] <- "non_accrual_code"
names(wilshire)[names(wilshire)=="NAP...NAIP...NAIP.in.GL"] <- "WB_balance"
names(wilshire)[names(wilshire)=="Rate.Over.Split"] <- "interest_rate"
wilshire$filedate <- as.Date(wilshire$filedate, "%Y-%m-%d")
wilshire$originationdate <- as.Date(wilshire$originationdate, "%Y-%m-%d")
wilshire$maturitydate <- as.Date(wilshire$maturitydate,"%Y-%m-%d")
wilshire_chargeoffs <- read.csv("wilshire charge offs cleaned.csv")
rates <- fread("rates2.csv")
wilshire_acquired_idx1 <- read.csv("Wilshire_aquired_list_20090930.csv")
names(wilshire_acquired_idx1)[names(wilshire_acquired_idx1)=="Note.Number"] <- "note_number"
wilshire_acquired_idx1 <- subset(wilshire_acquired_idx1, Mirae == "M", select = c(note_number,Mirae))
wilshire_acquired_idx2 <- read.csv("Wilshire_aquired_list_20131231.csv")
names(wilshire_acquired_idx2)[names(wilshire_acquired_idx2)=="Note_Number"] <- "note_number"
wilshire_acquired_idx2 <- filter(wilshire_acquired_idx2, Bank %in% c("A", "S", "M"))
acq <- c(unique(wilshire_acquired_idx1$note_number), unique(wilshire_acquired_idx2$note_number))
## create a label in Wilshire for acquired loans. (SAS File 02, Line 71 to 77)
wilshire$acquired_identifier <- ifelse(wilshire$note_number %in% acq,
paste("acquired_wilshire"),
paste("Wilshire_originated"))
table(wilshire$acquired_identifier)
## merge wilshire and chargeoffs
### Assign first_co_date to all entries for specific note_number. Assign charge-off amount to only the quarter where it occurs
wilshire$quarterDate = as.yearqtr(wilshire$filedate,"%Y-%m-%d" )
wilshire_chargeoffs$quarterDate = as.yearqtr(wilshire_chargeoffs$first_co_date,"%Y-%m-%d" )
wilshire_coInd = subset(wilshire_chargeoffs, select = c(note_number,quarterDate, co_ind))
wilshire_firstCoDate = subset(wilshire_chargeoffs, select = c(note_number, first_co_date))
wilshire <- merge(x = wilshire, y = wilshire_coInd, by = c("note_number", "quarterDate"), all.x = TRUE)
wilshire <- merge(x = wilshire, y = wilshire_firstCoDate, by = c("note_number"), all.x = TRUE)
## create event and other variables (SAS File 02, Line 90 to 146)
wilshire$co_ind <- ifelse( is.na(wilshire$co_ind ), 0, wilshire$co_ind )
wilshire$y <- ifelse( wilshire$non_accrual_code %in% c(2,4) | wilshire$co_ind == 1 ,
1,
ifelse( wilshire$non_accrual_code %in% c(0,9) & wilshire$co_ind != 1 ,
0, 111))
table(wilshire$y)
wilshire$yr_maturity <- year(wilshire$maturitydate)
wilshire$yr_file <- year(wilshire$filedate)
wilshire$mn_maturity <- month(wilshire$maturitydate)
wilshire$mn_file <- month(wilshire$filedate)
wilshire$q_file <- quarter(wilshire$filedate)
wilshire$ttm_m= 12*(wilshire$yr_maturity - wilshire$yr_file ) + (
wilshire$mn_maturity - wilshire$mn_file)
wilshire$loan_age_q <- (as.yearqtr(wilshire$filedate) - as.yearqtr(wilshire$originationdate)
) * 4
wilshire$term_q <- (as.yearqtr(wilshire$maturitydate) - as.yearqtr(wilshire$originationdate)
) * 4
wilshire$POB <- 100 * wilshire$loan_age_q / wilshire$term_q
wilshire$POB <- ifelse(wilshire$term_q ==0,100,wilshire$POB)
## create variable min_non_acc_date (SAS File 02, Line 147 to 170)
indx_wilshire <- subset(wilshire, y==1, select = c(note_number,filedate))
indx_wilshire <- as.data.table(indx_wilshire[order(indx_wilshire$note_number, indx_wilshire$filedate),])
indx_wilshire <- indx_wilshire %>% group_by(note_number)%>% filter(row_number(filedate) == 1)
names(indx_wilshire)[names(indx_wilshire)=="filedate"] <- "min_non_acc_date"
wilshire <- merge(x = wilshire, y = indx_wilshire, by = "note_number", all.x = TRUE)
wilshire$f_non_acc_date <- ifelse(is.na(wilshire$first_co_date), as.Date(wilshire$min_non_acc_date),
ifelse (as.Date(wilshire$first_co_date) <= as.Date(wilshire$min_non_acc_date),
as.Date(wilshire$first_co_date),
as.Date(wilshire$min_non_acc_date)))
wilshire$f_non_acc_date <- as.Date(wilshire$f_non_acc_date)
## merge with the rate data set (SAS File 02, Line 173 to 179)
rates <- subset(rates, select = -c(date,month))
setnames(rates, old = c("year","q"), new = c("yr_file","q_file"))
wilshire <- merge(x = wilshire, y = rates, by = c("yr_file","q_file"), all.x = TRUE)
## clean up data set (SAS File 02, Line 182 to 196)
wilshire_df <- filter(wilshire, yr_maturity > 2006)
wilshire_df <- filter(wilshire_df, !(!is.na(as.yearqtr(first_co_date, '%Y-%m-%d')) & as.yearqtr(filedate, '%Y-%m-%d')>
as.yearqtr(first_co_date, '%Y-%m-%d') ))
wilshire_df <- filter(wilshire_df, !(!is.na(as.yearqtr(min_non_acc_date, '%Y-%m-%d')) & as.yearqtr(filedate, '%Y-%m-%d')>
as.yearqtr(min_non_acc_date, '%Y-%m-%d') ))
wilshire_df$boh_id <- "wilshire"
## create portfolio_id: CRE or CI (SAS File 02, Line 198 to 207)
wilshire_df$class_code2 <- as.numeric(as.character(wilshire_df$Class.Code))
wilshire_df <- filter(wilshire_df, class_code2 %in% c(2,3,5,6,10,13,20, 21,30,31,32,33,34,35,
36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,59,60,61,63,99))
## clean up data
wilshire_df <- filter(wilshire_df, !(is.na(WB_balance)))
wilshire_df <- filter(wilshire_df, !(interest_rate == 0 | is.na(interest_rate)))
## only need to create variable property_type for CRE model
wilshire_df$property_type <- wilshire_df$Property.Type.Code
wilshire_df$property_type <- as.numeric(wilshire_df$property_type)
table(wilshire_df$Property.Type.Code)
table(wilshire_df$property_type)
## create CRE/C&I portfolio ID
wilshire_df$portfolio_id <- ifelse(wilshire_df$class_code2 %in% c(2,3,5,6,10,13,20),
"CRE",
ifelse(wilshire_df$class_code2 %in% c(21,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,
46,47,48,49,50,59,60,61,63,99),
"CI", "error"))
table(wilshire_df$portfolio_id)
## filter out error portfolio_id
wilshire_df <- filter(wilshire_df, portfolio_id != "error")
## change the loan Rating system
table(wilshire_df$Loan.Rating.Code1)
wilshire_df$boh_rating <- ifelse(wilshire_df$Loan.Rating.Code1 ==0, 0,
ifelse(wilshire_df$Loan.Rating.Code1 == 1000, 1,
ifelse(wilshire_df$Loan.Rating.Code1 == 2000, 2,
ifelse(wilshire_df$Loan.Rating.Code1 == 3000, 3,
ifelse(wilshire_df$Loan.Rating.Code1 == 4000, 4,
ifelse(wilshire_df$Loan.Rating.Code1 == 5000, 4,
ifelse(wilshire_df$Loan.Rating.Code1 == 6000, 1000,
ifelse(wilshire_df$Loan.Rating.Code1 == 7000, 2000,
ifelse(wilshire_df$Loan.Rating.Code1 == 8000, 3000,
ifelse(wilshire_df$Loan.Rating.Code1 == 9000, 4000, 111)
)))))))))
table(wilshire_df$boh_rating)
## clean up data set (SAS File 02 Line 549 to 601) and create final data set for Wilshire
df_final_wilshire <- as.data.frame(wilshire_df)
df_final_wilshire <- as.data.table(df_final_wilshire)
df_final_wilshire <- subset(df_final_wilshire, select = -c(Name.1, Collateral.Address ))
setnames(df_final_wilshire, old = c("Times.Past.Due.01.To.29.Days", "WB_balance","Original.Note.Amount",
"filedate", "yr_file","q_file","mn_file","note_number","originationdate",
"maturitydate","Fixed.or.Variable.Interest.Rate","f_non_acc_date",
"NAICS.Code"),
new = c("dpd0129","current_balance", "original_balance",
"fileDate","year","q","month","account_id","origination_date",
"maturity_date","interest_rate_type","first_nonacc_date",
"naicsCode"))
df_final_wilshire$dpd0129 <- substr(as.character(df_final_wilshire$dpd0129),1,3)
df_final_wilshire$dpd0129 <- as.numeric(as.character(df_final_wilshire$dpd0129))
## Warning message: NAs introduced by coercion
df_final_wilshire$dpd0129 <- ifelse(is.na(df_final_wilshire$dpd0129), 0, df_final_wilshire$dpd0129)
## The way R produces "NA" when changing character to numeric is different from SAS
## Thus, R results have more dpd0129 = 0. R: 88430 obs, SAS: 88376
## 0 observation with loan_spread_v>100 (or interest_rate = 1234.56) (SAS File 02 Line 582 to 585)
df_final_wilshire <- filter(df_final_wilshire, interest_rate != 1234.56)
df_final_wilshire$callReportCodeDescr <- "wilshire"
df_final_wilshire <- subset(df_final_wilshire, select = c(fileDate, account_id, boh_id, acquired_identifier,
portfolio_id, original_balance, origination_date, maturity_date,
current_balance, interest_rate, interest_rate_type,
loan_age_q, POB, boh_rating, DCR,
dpd0129, first_nonacc_date,
naicsCode, property_type, tb1m, tb3m, tb6m, tb1y, tb2y,
tb3y, tb5y, tb7y, tb10y,tb20y, tb30y, year, q, month, y,callReportCodeDescr))
save(df_final_wilshire, file = "Data output/df_final_wilshire.RData")
write.csv(df_final_wilshire, file = "Data output/df_final_wilshire.csv", row.names = FALSE)
<file_sep>/PD/R07_PD_CRE_alt_prop_res.R
##############################################################################
## File Name: R07_PD_CRE.R
## Author: KPMG
## Date: 7/5/2017 Created
## Purpose: To build PD model for BOH CRE portfolio
##############################################################################
#setwd("C:/Users/doxborrow/Desktop/BoH/Modeling/PD Models")
#setwd("//useomvfs77/mclp/Common/Clients/Bank of Hope/Model Development/PD Models")
setwd("C:/Users/ic07949/Desktop/dataset")
requirements <- c("dplyr","reshape2","data.table","zoo","ggplot2","pROC","boot","tidyr","lazyeval","Hmisc","corrplot","car")
for(rr in requirements){
if(! rr %in% installed.packages()) install.packages(rr)
}
for(requirement in requirements){if(!(requirement %in% installed.packages())) install.packages(requirement)}
lapply(requirements, require, character.only=T)
## load data
#cre_dev <- read_alt.csv("./Data output/df_boh_base_alt.csv")
cre_dev <- readRDS("df_boh_base.RDS")
cre_dev$fileDate <- as.Date(cre_dev$fileDate, "%Y-%m-%d")
main_data <- cre_dev
########################################################################
# CRE and Date filter
cre_dev <- filter(cre_dev, portfolio_id == "CRE" & as.Date(fileDate) <= as.Date("2016-03-31") & as.Date(fileDate) >= as.Date("2007-12-31") )
# Getting description of the current sample
y_filter_1 <- describe(cre_dev$y)
bal_sum_1 <- sum(cre_dev$current_balance)
##########################
# Plot of CRE vs CI Default
cre_dev_p <- cre_dev
# CI data for plot
ci_dev_p <- filter(main_data, portfolio_id == "CI" & as.Date(fileDate) <= as.Date("2016-03-31") & as.Date(fileDate) >= as.Date("2007-12-31") )
cre_default <- cre_dev_p %>% group_by(fileDate) %>% summarise(Default_CRE = 100*mean(y)) %>% data.frame()
ci_default<- ci_dev_p %>% group_by(fileDate) %>% summarise(Defaulter_CI = 100*mean(y)) %>% data.frame()
gg_df <- merge(cre_default, ci_default)
gg_df <- data.frame(Date = gg_df[,1],CRE = gg_df$Default_CRE,CI = gg_df$Defaulter_CI) %>% reshape2::melt(id.vars = 'Date')
default_cre_ci_p <- ggplot(data = gg_df, mapping = aes(x = Date, y = value, group = variable, color = variable)) + geom_line() + theme(legend.position = 'bottom') + theme(legend.title=element_blank()) + xlab("Date") + ylab("Percentage %") + ggtitle("Default Rate Percentage CRE vs. CI") + theme(plot.title = element_text(hjust = 0.5)) + theme(text = element_text(size=15)) + theme(legend.position="bottom")
default_cre_ci_p
# save Plot
ggsave("./R output/CRE-CI_Default-Rate-Per_alt.png", width = 7, height = 7)
# Balance filter
cre_dev <- filter(cre_dev, !(current_balance == 0 & y == 0))
# Getting description of the current sample
y_filter_2 <- describe(cre_dev$y)
bal_sum_2 <- sum(cre_dev$current_balance)
## create final input variables
## (1) variable prop_res: 1-4 residential and multifamily
## BBCN: 10, 11
## wilshire: 11, 12
cre_dev$property_type <- ifelse(cre_dev$callReportCodeDescr %in% c("Conv 5+ Residential Prop","CONVENTIONAL 5+ RESIDENTIAL"),10,cre_dev$property_type)
cre_dev$prop_res <- ifelse( (cre_dev$boh_id == "bbcn" & cre_dev$property_type %in% c(10 , 11) ) |
(cre_dev$boh_id == "wilshire" & cre_dev$property_type %in% c(11 , 12) ) ,
1,
0)
# Missing Wilshire Property type filter
cre_dev <- filter(cre_dev, !(boh_id == "wilshire" & is.na(property_type)))
## (2a) variable prop_retail: Retail shopping center
## BBCN: 15, 16, 17, 18
## wilshire: 1
cre_dev$prop_retail <- ifelse( (cre_dev$boh_id == "bbcn" & cre_dev$property_type %in% c(15, 16, 17, 18) ) |
(cre_dev$boh_id == "wilshire" & cre_dev$property_type == 1 ) ,
1,
0)
## (2b) variable prop_auto: Gas Stations, Car Washes, and Auto Repair Centers
## BBCN: 33, 34, 36
## wilshire: 7, 8, 16
cre_dev$prop_auto <- ifelse( (cre_dev$boh_id == "bbcn" & cre_dev$property_type %in% c(33, 34, 36) ) |
(cre_dev$boh_id == "wilshire" & cre_dev$property_type %in% c(7, 8, 16) ) ,
1,
0)
## (2c) variable prop_hotel: Hotels and Motels
## BBCN: 28,29
## wilshire: 5
cre_dev$prop_hotel <- ifelse( (cre_dev$boh_id == "bbcn" & cre_dev$property_type %in% c(28, 29) ) |
(cre_dev$boh_id == "wilshire" & cre_dev$property_type == 5 ) ,
1,
0)
## (3) variable boh_rating1
cre_dev$boh_rating1 <- ifelse(cre_dev$boh_rating %in% c(0,1,2,3), "R1",
ifelse(cre_dev$boh_rating %in% c(4,1000), "R2",
ifelse(cre_dev$boh_rating %in% c(2000,3000), "R3", "error")
))
#R1 and R2 variables
cre_dev$boh_rating1_R1 <- ifelse(cre_dev$boh_rating1 == "R1",1, 0)
cre_dev$boh_rating1_R2 <- ifelse(cre_dev$boh_rating1 == "R2",1, 0)
#Lagged R1 and R2
cre_dev <- cre_dev %>% group_by(account_id) %>% mutate(boh_rating1_R1_l = lag(boh_rating1_R1)) %>% as.data.frame()
cre_dev <- cre_dev %>% group_by(account_id) %>% mutate(boh_rating1_R2_l = lag(boh_rating1_R2)) %>% as.data.frame()
# Update lagged R1 and R2 values with current value
cre_dev$boh_rating1_R1_l <- ifelse(is.na(cre_dev$boh_rating1_R1_l),cre_dev$boh_rating1_R1,cre_dev$boh_rating1_R1_l)
cre_dev$boh_rating1_R2_l <- ifelse(is.na(cre_dev$boh_rating1_R2_l),cre_dev$boh_rating1_R2,cre_dev$boh_rating1_R2_l)
## (4) variable rgdp_qg_lag_2_neg
cre_dev$rgdp_qg_lag_2_neg <- ifelse(cre_dev$rgdp_qg_lag_2 >= 0, 0, cre_dev$rgdp_qg_lag_2 )
## (5) variable CAUR_yd_3
cre_dev$CAUR_yd_3 <- ifelse(cre_dev$CAUR_yd >= 3, 3, cre_dev$CAUR_yd)
## (6) variable CAHPI_ag_6
cre_dev$CAHPI_ag_6 <- ifelse(cre_dev$CAHPI_ag >= 6, 6, cre_dev$CAHPI_ag)
## (7) variable POB_95
cre_dev$POB_95 <- ifelse(cre_dev$POB <= 95, 95, cre_dev$POB)
## (8) Wilshire Dummy
cre_dev$wilshire_d <- ifelse(cre_dev$boh_id == "wilshire",1,0)
# Getting description of the current sample
y_filter_3 <- describe(cre_dev$y)
bal_sum_3 <- sum(cre_dev$current_balance)
# Number of default events per period
def_events_df <- cre_dev[,which(colnames(cre_dev) %in% c("fileDate","y")),drop=F]
def_events_df_sum <- def_events_df %>% group_by(fileDate) %>% summarise(Defaults = sum(y)) %>% data.frame()
Obs <- def_events_df %>% group_by(fileDate) %>% tally() %>% data.frame()
def_events_df_sum <- merge(def_events_df_sum, Obs)
colnames(def_events_df_sum) <- c("Date","No. of Defaults","Observations")
write_alt.csv(def_events_df_sum, "cre_def_events_df_sum_cre_input_alt.csv")
## Partition the data into training and testing samples
set.seed(20170808)
# Sample fraction
sample_fraction <- .8
cre_dev_training <- cre_dev %>% sample_frac(sample_fraction)
# Getting description of the in sample
y_filter_in <- describe(cre_dev_training$y)
bal_sum_in <- sum(cre_dev_training$current_balance)
##Out of sample data
cre_dev_testing <- cre_dev[-which(rownames(cre_dev) %in% rownames(cre_dev_training)),]
# Getting description of the out sample
y_filter_out <- describe(cre_dev_testing$y)
bal_sum_out <- sum(cre_dev_testing$current_balance)
# Table of dependent variable sample stats
dep_var_filter_stats <- as.data.frame(rbind(y_filter_1$counts, y_filter_2$counts, y_filter_3$counts,y_filter_in$counts, y_filter_out$counts))
bal_sum_stats <- as.data.frame(rbind(bal_sum_1,bal_sum_2,bal_sum_3,bal_sum_in,bal_sum_out))
dep_var_filter_stats <- cbind(dep_var_filter_stats,bal_sum_stats)
rownames(dep_var_filter_stats) <- c("CRE & (2007 Q4 - 2016 Q1)","Balance Filter","Full Sample","80% In-sample","20% Out-of-sample")
colnames(dep_var_filter_stats)[8] <- "Sum of Curr. Bal."
dep_var_filter_stats
write_alt.csv(dep_var_filter_stats, "./R Output/CRE_dep_var_filter_stats_alt.csv")
# Descriptive statistics per boh_id
describe(cre_dev[,"boh_id",drop=F])
boh_id_y_stats <- cre_dev %>% group_by(boh_id) %>% summarise(mean=mean(y),sum=sum(y)) %>% as.data.frame()
boh_id_y_stats
boh_id_y_q_hist <- cre_dev %>% group_by(fileDate,boh_id) %>% summarise(mean=mean(y)) %>% as.data.frame()
boh_id_y_q_hist
boh_id_y_q_hist_p <- ggplot(boh_id_y_q_hist, aes(x=fileDate, y = mean, color=boh_id, group=boh_id)) +
geom_line() + xlab("Date") + ylab("Default Rate") + ggtitle("Ave. Default Rate CRE - BOH ID") +
theme(plot.title = element_text(hjust = 0.5)) +
theme(text = element_text(size=15)) + theme(legend.title=element_blank()) + theme(legend.position="bottom")
boh_id_y_q_hist_p
ggsave("./R output/CRE_PD_actual_fitted_insample_alt.png", width = 7, height = 7)
###################################
## Logistic regression - Training
# Main Model
model <- y ~ boh_rating1_R1_l + boh_rating1_R2_l + POB_95 + CAUR_yd + NCREIF_Property_Index_ag_lag_1
fit <- glm(model, family = binomial(link = "logit"), data = cre_dev_training)
summary(fit)
# Multicollinearity of fitted model
vif(fit)
# Model regressed on full sample
fit_full <- glm(model, family = binomial(link = "logit"), data = cre_dev)
summary(fit_full)
# McFadden's pseudo R squared for a fitted model
pR2 <- 1 - fit$deviance / fit$null.deviance
pR2
###################################
# Output the main regression table in academic format
stargazer::stargazer(fit,fit_full, type = "text", out = "logit_CRE_PD_alt.txt", model.numbers = F, column.labels = c("Train","Full"))
# Save the model coefficients
coef_cre <- as.data.frame(summary(fit)$coefficients)
write_alt.csv(coef_cre, "./R Output/coef_cre_alt_alt.csv")
coef_cre$X <- rownames(coef_cre)
# In-sample Prediction
prob <- predict(fit,type=c("response"))
cre_dev_training$p_hat <- prob
roc_in_df <- data.frame(y = cre_dev_training$y, prob)
# Find AUC
auc_in <- round(auc(roc_in_df$y, roc_in_df$prob),4)
roc_in <- roc(y ~ prob, data = roc_in_df)
## get ROC and AUC
plot(roc_in, main =paste0("CRE PD ROC IN \n AUC = ", auc_in))
# save Plot
pdf("./R output/CRE_ROC_AUC.pdf")
plot(roc_in, main =paste0("CRE PD ROC IN \n AUC = ", auc_in))
dev.off()
# Out-of-sample #
## Out-sample prediction p_hat for each account
predict_out <- predict(fit, cre_dev_testing, type="response")
cre_dev_testing$p_hat <- predict_out
## ROC and AUC
roc_out <- data.frame(predict = predict_out, y = cre_dev_testing$y)
roc_out_plot <- roc(y ~ predict, data = roc_out)
auc_out <- round(as.numeric(roc_out_plot$auc),4)
plot(roc_out_plot, main =paste0("CRE PD ROC OUT \n AUC = ", auc_out))
pdf(paste0("./R output/CRE_ROC_AUC_OUT.pdf"))
plot(roc_out_plot, main =paste0("CRE PD ROC OUT \n AUC = ", auc_out))
dev.off()
## Quarterly average PD in-sample
cre_pd_quarterly_in <- subset(cre_dev_training, select = c(fileDate, y, p_hat))
cre_pd_quarterly_in <- aggregate(cre_pd_quarterly_in[,2:3], list(cre_pd_quarterly_in$fileDate), mean)
setnames(cre_pd_quarterly_in, old = c("Group.1","y","p_hat"),
new = c("fileDate", "Actual", "Fitted"))
cre_pd_quarterly_in <- melt(cre_pd_quarterly_in, id = "fileDate")
cbPalette <- c("#000000", "#0072B2")
cre_pd_training_plot <- ggplot(cre_pd_quarterly_in, aes(x=fileDate, y = value, color=variable)) +
geom_line() + scale_colour_manual(values=cbPalette) + xlab("Date") + ylab("Default Rate") + ggtitle("Ave. Default Rate CRE - In-sample") +
theme(plot.title = element_text(hjust = 0.5)) +
theme(text = element_text(size=15)) + theme(legend.title=element_blank()) + theme(legend.position="bottom")
cre_pd_training_plot
ggsave("./R output/CRE_PD_actual_fitted_insample_alt.png", width = 7, height = 7)
## Quarterly average PD out-sample
cre_pd_quarterly_out <- subset(cre_dev_testing, select = c(fileDate, y, p_hat))
cre_pd_quarterly_out <- aggregate(cre_pd_quarterly_out[,2:3], list(cre_pd_quarterly_out$fileDate), mean)
setnames(cre_pd_quarterly_out, old = c("Group.1","y","p_hat"),
new = c("fileDate", "Actual", "Fitted"))
cre_pd_quarterly_out <- melt(cre_pd_quarterly_out, id = "fileDate")
cbPalette <- c("#000000", "#0072B2")
cre_pd_out_plot <- ggplot(cre_pd_quarterly_out, aes(x=fileDate, y = value, color=variable)) +
geom_line() + scale_colour_manual(values=cbPalette) + xlab("Date") + ylab("Default Rate") + ggtitle("Ave. Default Rate CRE - Out-of-sample") +
theme(plot.title = element_text(hjust = 0.5)) +
theme(text = element_text(size=15)) + theme(legend.title=element_blank()) + theme(legend.position="bottom")
cre_pd_out_plot
ggsave("./R output/CRE_PD_actual_fitted_outsample_alt.png", width = 7, height = 7)
###########################################
## Forecast for 3 scenarios
for(scenario in c("base", "adverse", "severe")){
print(paste0("==== ", scenario, " ===="))
cre_forecast <- readRDS(paste0("./Data output/df_boh_",scenario, ".RDS"))
cre_forecast$fileDate <- as.Date(cre_forecast$fileDate, "%Y-%m-%d")
cre_forecast <- filter(cre_forecast, portfolio_id == "CRE" & as.Date(fileDate) > as.Date("2016-03-31") )
# Balance filter
cre_forecast <- filter(cre_forecast, !(current_balance == 0 & y ==0))
## create final input variables
## (1) variable prop_res: 1-4 residential and multifamily
## BBCN: 10, 11
## wilshire: 11, 12
cre_forecast$property_type <- ifelse(cre_forecast$callReportCodeDescr %in% c("Conv 5+ Residential Prop","CONVENTIONAL 5+ RESIDENTIAL"),10,cre_forecast$property_type)
cre_forecast$prop_res <- ifelse( (cre_forecast$boh_id == "bbcn" & cre_forecast$property_type %in% c(10 , 11) ) |
(cre_forecast$boh_id == "wilshire" & cre_forecast$property_type %in% c(11 , 12) ) ,
1,
0)
# Missing Wilshire Property type filter
cre_forecast <- filter(cre_forecast, !(boh_id == "wilshire" & is.na(property_type)))
## (2a) variable prop_retail: Retail shopping center
## BBCN: 15, 16, 17, 18
## wilshire: 1
cre_forecast$prop_retail <- ifelse( (cre_forecast$boh_id == "bbcn" & cre_forecast$property_type %in% c(15, 16, 17, 18) ) |
(cre_forecast$boh_id == "wilshire" & cre_forecast$property_type == 1 ) ,
1,
0)
## (2b) variable prop_auto: Gas Stations, Car Washes, and Auto Repair Centers
## BBCN: 33, 34, 36
## wilshire: 7, 8, 16
cre_forecast$prop_auto <- ifelse( (cre_forecast$boh_id == "bbcn" & cre_forecast$property_type %in% c(33, 34, 36) ) |
(cre_forecast$boh_id == "wilshire" & cre_forecast$property_type %in% c(7, 8, 16) ) ,
1,
0)
## (2c) variable prop_hotel: Hotels and Motels
## BBCN: 28,29
## wilshire: 5
cre_forecast$prop_hotel <- ifelse( (cre_forecast$boh_id == "bbcn" & cre_forecast$property_type %in% c(28, 29) ) |
(cre_forecast$boh_id == "wilshire" & cre_forecast$property_type == 5 ) ,
1,
0)
## (3) variable boh_rating1
cre_forecast$boh_rating1 <- ifelse(cre_forecast$boh_rating %in% c(0,1,2,3), "R1",
ifelse(cre_forecast$boh_rating %in% c(4,1000), "R2",
ifelse(cre_forecast$boh_rating %in% c(2000,3000), "R3", "error")
))
cre_forecast$boh_rating1_R1 <- ifelse(cre_forecast$boh_rating1 == "R1",1, 0)
cre_forecast$boh_rating1_R2 <- ifelse(cre_forecast$boh_rating1 == "R2",1, 0)
#Lagged R1 and R2
cre_forecast <- cre_forecast %>% group_by(account_id) %>% mutate(boh_rating1_R1_l = lag(boh_rating1_R1))
cre_forecast <- cre_forecast %>% group_by(account_id) %>% mutate(boh_rating1_R2_l = lag(boh_rating1_R2))
# Update lagged values with current value
cre_forecast$boh_rating1_R1_l <- ifelse(is.na(cre_forecast$boh_rating1_R1_l),cre_forecast$boh_rating1_R1,cre_forecast$boh_rating1_R1_l)
cre_forecast$boh_rating1_R2_l <- ifelse(is.na(cre_forecast$boh_rating1_R2_l),cre_forecast$boh_rating1_R2,cre_forecast$boh_rating1_R2_l)
## (4) variable rgdp_qg_lag_2_neg
cre_forecast$rgdp_qg_lag_2_neg <- ifelse(cre_forecast$rgdp_qg_lag_2 >= 0, 0, cre_forecast$rgdp_qg_lag_2 )
## (5) variable CAUR_yd_3
cre_forecast$CAUR_yd_3 <- ifelse(cre_forecast$CAUR_yd >= 3, 3, cre_forecast$CAUR_yd)
## (6) variable CAHPI_ag_6
cre_forecast$CAHPI_ag_6 <- ifelse(cre_forecast$CAHPI_ag >= 6, 6, cre_forecast$CAHPI_ag)
## (7) variable POB_95
cre_forecast$POB_95 <- ifelse(cre_forecast$POB <= 95, 95, cre_forecast$POB)
## Wilshire Dummy
cre_forecast$wilshire_d <- ifelse(cre_forecast$boh_id == "wilshire",1,0)
## PD forecast p_hat for each account
cre_forecast <- as.data.table(cre_forecast)
cre_forecast$p_hat <- as.matrix (cre_forecast[, coef_cre$X[-1],with = FALSE]) %*% coef_cre$Estimate[-1] +
coef_cre$Estimate[1]
cre_forecast$p_hat <- 1/(1+exp(-cre_forecast$p_hat))
## quarterly average PD
cre_pd_quarterly_9Q <- subset(cre_forecast, select = c(fileDate, p_hat))
cre_pd_quarterly_9Q <- aggregate(cre_pd_quarterly_9Q[,2], list(cre_pd_quarterly_9Q$fileDate), mean)
setnames(cre_pd_quarterly_9Q, old = c("Group.1","p_hat"),
new = c("fileDate", "value"))
cre_pd_quarterly_9Q$variable <- scenario
cre_pd_quarterly_9Q <- cre_pd_quarterly_9Q[,c(1,3,2)]
assign(paste0("cre_pd_quarterly_",scenario), cre_pd_quarterly_9Q)
}
# Connect the historical and forecast data
cre_pd_quarterly_9Q <- rbind(cre_pd_quarterly_base, cre_pd_quarterly_adverse, cre_pd_quarterly_severe)
cre_pd_quarterly_all <- rbind(cre_pd_quarterly_in, cre_pd_quarterly_9Q)
setnames(cre_pd_quarterly_all, old = c("variable", "value"), new = c("scenario","PD"))
## Forecast plot
cbPalette <- c("#000000", "#0072B2", "#006600", "#E69F00", "#D55E00")
cre_pd_plot <- ggplot(cre_pd_quarterly_all, aes(x = fileDate, y = PD, color = scenario)) +
geom_line() + scale_colour_manual(values=cbPalette) +
ggtitle("BOH CRE PD") + xlab("Date") + ylab("Default Rate") + ggtitle("Average Default Rate CRE") +
theme(plot.title = element_text(hjust = 0.5)) +
theme(text = element_text(size=15)) + theme(legend.title=element_blank()) + theme(legend.position="bottom")
cre_pd_plot
ggsave("./R output/CRE_PD_actual_fitted_forecast_alt.png", width = 7, height = 7)
## output results
write_alt.csv(cre_pd_quarterly_all, "./R output/CRE_PD_quarterly_actual_fitted_forecast_alt.csv", row.names = FALSE)
# Summary stats per sample
options(scipen=999)
sum_nms <- c("y","prop_res","boh_rating1_R1_l","boh_rating1_R2_l","CAUR_yd","POB_95","NCREIF_Property_Index_ag_lag_1")
training_df <- as.data.frame.matrix(cre_dev_training)
testing_df <- as.data.frame.matrix(cre_dev_testing)
# Make the summary stats table between the samples
cre_dev_mean <- apply(cre_dev[,which(colnames(cre_dev) %in% sum_nms),drop=F],2,function (x) round(mean(x),4))
cre_dev_in_mean <- apply(training_df[,which(colnames(training_df) %in% sum_nms),drop=F],2,function (x) round(mean(x),4))
cre_dev_out_mean <- apply(testing_df[,which(colnames(testing_df) %in% sum_nms),drop=F],2,function (x) round(mean(x),4))
cre_dev_sd <- apply(cre_dev[,which(colnames(cre_dev) %in% sum_nms),drop=F],2,function (x) round(sd(x),4))
cre_dev_in_sd <- apply(training_df[,which(colnames(training_df) %in% sum_nms),drop=F],2,function (x) round(sd(x),4))
cre_dev_out_sd <- apply(testing_df[,which(colnames(testing_df) %in% sum_nms),drop=F],2,function (x) round(sd(x),4))
cre_dev_max <- apply(cre_dev[,which(colnames(cre_dev) %in% sum_nms),drop=F],2,function (x) round(max(x),4))
cre_dev_in_max <- apply(training_df[,which(colnames(training_df) %in% sum_nms),drop=F],2,function (x) round(max(x),4))
cre_dev_out_max <- apply(testing_df[,which(colnames(testing_df) %in% sum_nms),drop=F],2,function (x) round(max(x),4))
cre_dev_min <- apply(cre_dev[,which(colnames(cre_dev) %in% sum_nms),drop=F],2,function (x) round(min(x),4))
cre_dev_in_min <- apply(training_df[,which(colnames(training_df) %in% sum_nms),drop=F],2,function (x) round(min(x),4))
cre_dev_out_min <- apply(testing_df[,which(colnames(testing_df) %in% sum_nms),drop=F],2,function (x) round(min(x),4))
cre_dev_n <- apply(cre_dev[,which(colnames(cre_dev) %in% sum_nms),drop=F],2, length)
cre_dev_in_n <- apply(training_df[,which(colnames(training_df) %in% sum_nms),drop=F],2, length)
cre_dev_out_n <- apply(testing_df[,which(colnames(testing_df) %in% sum_nms),drop=F],2, length)
cre_df_sample_stats <- rbind(
cre_dev_mean, cre_dev_in_mean, cre_dev_out_mean,
cre_dev_sd, cre_dev_in_sd, cre_dev_out_sd,
cre_dev_max, cre_dev_in_max, cre_dev_out_max,
cre_dev_min, cre_dev_in_min, cre_dev_out_min,
cre_dev_n, cre_dev_in_n, cre_dev_out_n
)
rownames(cre_df_sample_stats) <- c("Mean (All Obs)","Mean (Train)","Mean (Test)","SD (All Obs)","SD (Train)","SD (Test)","Max (All Obs)","Max (Train)","Max (Test)","Min (All Obs)","Min (Train)","Min (Test)","Obs (All Obs)","Obs (Train)","Obs (Test)")
write_alt.csv(cre_df_sample_stats, "./R output/cre_df_sample_stats_alt.csv")
#####################
# Profile Plots
#####################
# Function to generate the plots:
source("./R Code/Current Model/pd_bin_plot2.R")
##########
# Plot profile POB
pob_breaks <- c(-Inf, 0,94,Inf)
cre_dev$pob_bins <- cut(cre_dev$POB,breaks = pob_breaks)
suppressMessages(pd_bin_plot2(data = cre_dev, date = "fileDate", dep_var = "y", estimate = NA, bins = "pob_bins", title = "POB - PRF", profile = T))
ggsave(paste("./R output/CRE_Default_Rate-POB_PRF_alt.png"), width = 5, height = 5)
##########
# Plot profile risk rating
cre_dev$risk_bins <- ifelse(cre_dev$boh_rating %in% c(0,1,2,3), "R1",ifelse(cre_dev$boh_rating %in% c(4,1000), "R2",ifelse(cre_dev$boh_rating %in% c(2000,3000), "R3", NA)))
suppressMessages(pd_bin_plot2(data = cre_dev, date = "fileDate", dep_var = "y", estimate = NA, bins = "risk_bins", title = "Risk Rating - PRF", profile = T))
ggsave(paste("./R output/CRE_Default_Rate-RISK_PRF_alt.png"), width = 5, height = 5)
##########
# Plot profile CA UR
cre_dev$caur_bins <- cut(cre_dev$CAUR_yd,breaks = 3)
suppressMessages(pd_bin_plot2(data = cre_dev, date = "fileDate", dep_var = "y", estimate = NA, bins = "caur_bins", title = "CA UR - PRF", profile = T))
ggsave(paste("./R output/CRE_Default_Rate-CAUR_PRF_alt.png"), width = 5, height = 5)
##########
# Plot profile CA HPI
cre_dev$cahpi_bins <- cut(cre_dev$CAHPI_ag,breaks = 3)
suppressMessages(pd_bin_plot2(data = cre_dev, date = "fileDate", dep_var = "y", estimate = NA, bins = "cahpi_bins", title = "CA HPI - PRF", profile = T))
ggsave(paste("./R output/CRE_Default_Rate-CAHPI_PRF_alt.png"), width = 5, height = 5)
##########
# Plot profile GDP
cre_dev$gdp_bins <- cut(cre_dev$rgdp_qg, breaks = 4)
suppressMessages(pd_bin_plot2(data = cre_dev, date = "fileDate", dep_var = "y", estimate = NA, bins = "gdp_bins", title = "GDP - PRF", profile = T))
ggsave(paste("./R output/CRE_Default_Rate-GDP_PRF_alt.png"), width = 5, height = 5)
##########
# Plot profile CRE Index
cre_dev$cre_bins <- cut(cre_dev$NCREIF_Property_Index_ag, breaks = 5)
suppressMessages(pd_bin_plot2(data = cre_dev, date = "fileDate", dep_var = "y", estimate = NA, bins = "cre_bins", title = "CRE - PRF", profile = T))
ggsave(paste("./R output/CRE_Default_Rate-CREINDEX_PRF_alt.png"), width = 5, height = 5)
##########
# Plot profile NY HPI Index
cre_dev$nyhpi_bins <- cut(cre_dev$NYHPI_ag, breaks = 3)
suppressMessages(pd_bin_plot2(data = cre_dev, date = "fileDate", dep_var = "y", estimate = NA, bins = "nyhpi_bins", title = "NY HPI - PRF", profile = T))
ggsave(paste("./R output/CRE_Default_Rate-NYHPI_PRF_alt.png"), width = 5, height = 5)
#####################
# Insample Plots
#####################
##########
# Plot insample average PD by POB
pob_breaks <- c(-Inf, 0,94,Inf)
cre_dev_training$pob_bins <- cut(cre_dev_training$POB,breaks = pob_breaks)
suppressMessages(pd_bin_plot2(data = cre_dev_training, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "pob_bins", title = "POB - IN", profile = F))
ggsave(paste("./R output/CRE_Default_Rate-POB_IN_alt.png"), width = 5, height = 5)
##########
# Plot insample average PD by risk rating
cre_dev_training$risk_bins <- ifelse(cre_dev_training$boh_rating %in% c(0,1,2,3), "R1",ifelse(cre_dev_training$boh_rating %in% c(4,1000), "R2",ifelse(cre_dev_training$boh_rating %in% c(2000,3000), "R3", NA)))
suppressMessages(pd_bin_plot2(data = cre_dev_training, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "risk_bins", title = "Risk Rating - IN", profile = F))
ggsave(paste("./R output/CRE_Default_Rate-Risk_IN_alt.png"), width = 5, height = 5)
##########
# Plot insample average PD by CA UR
cre_dev_training$caur_bins <- cut(cre_dev_training$CAUR_yd,breaks = 3)
suppressMessages(pd_bin_plot2(data = cre_dev_training, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "caur_bins", title = "CA UR - IN", profile = F))
ggsave(paste("./R output/CRE_Default_Rate-CAUR_IN_alt.png"), width = 5, height = 5)
##########
# Plot insample average PD by CA HPI
cre_dev_training$cahpi_bins <- cut(cre_dev_training$CAHPI_ag,breaks = 4)
suppressMessages(pd_bin_plot2(data = cre_dev_training, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "cahpi_bins", title = "CA HPI - IN", profile = F))
ggsave(paste("./R output/CRE_Default_Rate-CAHPI_IN_alt.png"), width = 5, height = 5)
##########
# Plot insample average PD by GDP
cre_dev_training$gdp_bins <- cut(cre_dev_training$rgdp_qg, breaks = 4)
suppressMessages(pd_bin_plot2(data = cre_dev_training, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "gdp_bins", title = "GDP - IN", profile = F))
ggsave(paste("./R output/CRE_Default_Rate-GDP_IN_alt.png"), width = 5, height = 5)
##########
# Plot profile CRE Index
cre_dev_training$cre_bins <- cut(cre_dev_training$NCREIF_Property_Index_ag, breaks = 4)
suppressMessages(pd_bin_plot2(data = cre_dev_training, date = "fileDate", dep_var = "y", estimate = "y", bins = "cre_bins", title = "CRE - IN", profile = F))
ggsave(paste("./R output/CRE_Default_Rate-CREIndex_IN_alt.png"), width = 5, height = 5)
#####################
# Out of sample plots
#####################
##########
# Plot out sample average PD by POB
pob_breaks <- c(-Inf, 0,94,Inf)
cre_dev_testing$pob_bins <- cut(cre_dev_testing$POB,breaks = pob_breaks)
suppressMessages(pd_bin_plot2(data = cre_dev_testing, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "pob_bins", title = "POB - OUT", profile = F))
ggsave(paste("./R output/CRE_Default_Rate-POB_OUT_alt.png"), width = 5, height = 5)
##########
# Plot out sample average PD by risk rating
cre_dev_testing$risk_bins <- ifelse(cre_dev_testing$boh_rating %in% c(0,1,2,3), "R1",ifelse(cre_dev_testing$boh_rating %in% c(4,1000), "R2",ifelse(cre_dev_testing$boh_rating %in% c(2000,3000), "R3", NA)))
suppressMessages(pd_bin_plot2(data = cre_dev_testing, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "risk_bins", title = "Risk Rating - OUT", profile = F))
ggsave(paste("./R output/CRE_Default_Rate-Risk_OUT_alt.png"), width = 5, height = 5)
##########
# Plot out sample average PD by CA UR
cre_dev_testing$caur_bins <- cut(cre_dev_testing$CAUR_yd,breaks = 3)
suppressMessages(pd_bin_plot2(data = cre_dev_testing, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "caur_bins", title = "CA UR - OUT", profile = F))
ggsave(paste("./R output/CRE_Default_Rate-CAUR_OUT_alt.png"), width = 5, height = 5)
##########
# Plot out sample average PD by CA HPI
cre_dev_testing$cahpi_bins <- cut(cre_dev_testing$CAHPI_ag,breaks = 4)
suppressMessages(pd_bin_plot2(data = cre_dev_testing, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "cahpi_bins", title = "CA HPI - OUT", profile = F))
ggsave(paste("./R output/CRE_Default_Rate-CAHPI_OUT_alt.png"), width = 5, height = 5)
##########
# Plot out sample average PD by GDP
cre_dev_testing$gdp_bins <- cut(cre_dev_testing$rgdp_qg, breaks = 4)
suppressMessages(pd_bin_plot2(data = cre_dev_testing, date = "fileDate", dep_var = "y", estimate = "p_hat",bins = "gdp_bins", title = "GDP - OUT", profile = F))
ggsave(paste("./R output/CRE_Default_Rate-GDP_OUT_alt.png"), width = 5, height = 5)
##########
# Plot profile CRE Index
cre_dev_testing$cre_bins <- cut(cre_dev_testing$NCREIF_Property_Index_ag, breaks = 4)
suppressMessages(pd_bin_plot2(data = cre_dev_testing, date = "fileDate", dep_var = "y", estimate = "y", bins = "cre_bins", title = "CRE - OUT", profile = F))
ggsave(paste("./R output/CRE_Default_Rate-CREIndex_OUT_alt.png"), width = 5, height = 5)
#################
# Coefficient Stability
# Repeated Sample Forecasts
sample_number <- 10
sample_fraction <- .8
set.seed(20170808)
cre_dev_training_s <- cre_dev %>% sample_frac(sample_fraction)
cre_dev_testing_s <- cre_dev[-which(rownames(cre_dev) %in% rownames(cre_dev_training_s)),]
# Round the sample size to a whole number
# Uses the sample fraction set above to partition in-out samples
sample_size <- round(nrow(cre_dev)*sample_fraction)
# Sample from the df
predict_s <- list()
for (i in 1:sample_number){
##In sample data
set.seed(i)
df_sample_in <- cre_dev[sample(nrow(cre_dev), sample_size, replace = FALSE), ]
##Out of sample data
df_sample_out <- cre_dev[-which(rownames(cre_dev) %in% rownames(df_sample_in)),]
##Estimate the model
logit_s <- glm(model, family = binomial(link = "logit"), data = df_sample_in)
predict_s[[i]] <- predict(logit_s, df_sample_out, type="response")
}
# Make a df for the predict df and assign names
header_predict <- paste("Sample_", seq(1:sample_number),sep="")
predict_s_df <- data.frame(predict_s)
colnames(predict_s_df) <- header_predict
# Make data frame of all predictions
test_out_df <- data.frame(cre_dev_testing_s,predict_s_df)
test_out_df <- test_out_df %>% group_by(fileDate) %>% mutate(pd_actual = mean(y)) %>% data.frame()
fcst_df_nms <- c("fileDate",header_predict,"pd_actual")
test_out_df <- na.omit(test_out_df[,which(colnames(test_out_df) %in% fcst_df_nms), drop = F])
test_out_df <- aggregate(x = test_out_df[,-1],
FUN = mean,
by = list(Date = test_out_df$fileDate))
# Plot of all Forecasts
predict_samples_gg <- melt(test_out_df, id = "Date")
sample_fcst_p <- ggplot(data = predict_samples_gg, aes(x = Date, y = value, group = variable, color = variable)) + geom_line() + xlab("Date") + ylab("Default Rate") + ggtitle("Ave. Default Rate CRE Out-of-Sample") +
theme(plot.title = element_text(hjust = 0.5)) +
theme(text = element_text(size=12)) + theme(legend.title=element_blank())
sample_fcst_p
ggsave(paste("./R output/cre_sample_fcst_plot_alt.png"), width = 5, height = 5)
##################################
# Manual boot strapping - coefficients and p-values
# Make random samples
# Sample Number
sample_number <- 500
sample_fraction <- .8
# Round the sample size to a whole number
# Uses the sample fraction set above to partition in-out samples
sample_size <- round(nrow(cre_dev)*sample_fraction)
# Sample from the df
df_samples <- list()
coeff_l <- list()
pval_l <- list()
start.time <- Sys.time()
start.time
for (i in 1:sample_number){
##In sample data
set.seed(i)
df_sample <- cre_dev[sample(nrow(cre_dev), sample_size, replace = FALSE), ]
logit <- glm(model, family = binomial(link = "logit"), data = df_sample)
coeff_l[[i]] <- round(coef(summary(logit))[,1],5)
pval_l[[i]] <- round(coef(summary(logit))[,4],5)
}
end.time <- Sys.time()
time.taken <- end.time - start.time
time.taken
# Turn into data frames
pval_boot_df <- as.data.frame(do.call("rbind",pval_l))
pval_boot_df <- data.frame(Sample=seq(from=1,to=sample_number,by=1),pval_boot_df)
write_alt.csv(pval_boot_df, "./R output/cre_pval_boot_df_alt.csv", row.names = T)
coef_boot_df <- as.data.frame(do.call("rbind",coeff_l))
coef_boot_df <- data.frame(Sample=seq(from=1,to=sample_number,by=1),coef_boot_df)
write_alt.csv(coef_boot_df, "./R output/cre_coef_boot_df_alt.csv", row.names = T)
# P-value Histograms
gg_p_df <- list()
for (i in 2:ncol(pval_boot_df)){
gg_p_df[[i]] <- melt(pval_boot_df[,c(1,i)], id = "Sample")
pval_h_plot <- ggplot(gg_p_df[[i]], aes(value)) + geom_histogram(fill = "#006600") + xlab("Value") + ylab("Frequency") + ggtitle(paste("P-Value",gg_p_df[[i]][1,2],sep=" - ")) + theme(text = element_text(size=12)) + theme(legend.title=element_blank()) + theme(plot.title = element_text(hjust = 0.5)) + geom_vline(xintercept=0)
suppressMessages(print(pval_h_plot))
ggsave( paste0(paste("./R output/", paste("CRE P-Value Histogram",gg_p_df[[i]][1,2],sep=" - "),sep=""),"_alt.png") , width = 5, height = 5)
}
# Coefficient Densities
gg_c_df <- list()
for (i in 2:ncol(coef_boot_df)){
gg_c_df[[i]] <- melt(coef_boot_df[,c(1,i)], id = "Sample")
coef_d_plot <- ggplot(gg_c_df[[i]], mapping = aes(x = value, group = variable, fill=variable)) + geom_density() + ggtitle(paste("Coef. Density",gg_c_df[[i]][1,2],sep=" - ")) + theme(text = element_text(size=12)) + theme(legend.title=element_blank()) + theme(plot.title = element_text(hjust = 0.5))+ xlab("Value") + ylab("Density") + theme(legend.position="none") + scale_fill_manual(values=c("#003399")) + scale_colour_manual(values=c("black")) + geom_vline(xintercept=0)
suppressMessages(print(coef_d_plot))
ggsave( paste0(paste("./R output/", paste("CRE Coefficient Density",gg_c_df[[i]][1,2],sep=" - "),sep=""),"_alt.png") , width = 5, height = 5)
}
time.taken
<file_sep>/PD/R07_PD_CRE_alt_filter_08212017.R
##############################################################################
## File Name: R07_PD_CRE.R
## Author: KPMG
## Date: 7/5/2017 Created
## Purpose: To build PD model for BOH CRE portfolio
##############################################################################
#setwd("//useomvfs77/MCLP/Common/Clients/Bank of Hope/Model Development/PD Models/R Code/Misc Analysis/Alternative Filter")
setwd("C:/Users/ic07949/Desktop/KPMG/Model Development/development code and data/PD")
requirements <- c("dplyr","reshape2","data.table","zoo","ggplot2","pROC","boot","tidyr","lazyeval","Hmisc","corrplot","car")
for(rr in requirements){
if(! rr %in% installed.packages()) install.packages(rr)
}
for(requirement in requirements){if(!(requirement %in% installed.packages())) install.packages(requirement)}
lapply(requirements, require, character.only=T)
## load data
cre_dev <- readRDS("df_boh_base_v2_08212017.RDS")
cre_dev$fileDate <- as.Date(cre_dev$fileDate, "%Y-%m-%d")
# CRE and Date filter
cre_dev <- filter(cre_dev, portfolio_id == "CRE" & as.Date(fileDate) <= as.Date("2016-03-31") & as.Date(fileDate) >= as.Date("2007-12-31") )
# Getting description of the current sample
y_filter_1 <- describe(cre_dev$y)
bal_sum_1 <- sum(cre_dev$current_balance)
## create final input variables
## (1) variable prop_res: 1-4 residential and multifamily
## BBCN: 10, 11
## wilshire: 11, 12
cre_dev$property_type <- ifelse(cre_dev$callReportCodeDescr %in% c("Conv 5+ Residential Prop","CONVENTIONAL 5+ RESIDENTIAL"),10,cre_dev$property_type)
cre_dev$prop_res <- ifelse( (cre_dev$boh_id == "bbcn" & cre_dev$property_type %in% c(10 , 11) ) |
(cre_dev$boh_id == "wilshire" & cre_dev$property_type %in% c(11 , 12) ) ,
1,
0)
# Missing Wilshire Property type filter
cre_dev <- filter(cre_dev, !(boh_id == "wilshire" & is.na(property_type)))
## (2a) variable prop_retail: Retail shopping center
## BBCN: 15, 16, 17, 18
## wilshire: 1
cre_dev$prop_retail <- ifelse( (cre_dev$boh_id == "bbcn" & cre_dev$property_type %in% c(15, 16, 17, 18) ) |
(cre_dev$boh_id == "wilshire" & cre_dev$property_type == 1 ) ,
1,
0)
## (2b) variable prop_auto: Gas Stations, Car Washes, and Auto Repair Centers
## BBCN: 33, 34, 36
## wilshire: 7, 8, 16
cre_dev$prop_auto <- ifelse( (cre_dev$boh_id == "bbcn" & cre_dev$property_type %in% c(33, 34, 36) ) |
(cre_dev$boh_id == "wilshire" & cre_dev$property_type %in% c(7, 8, 16) ) ,
1,
0)
## (2c) variable prop_hotel: Hotels and Motels
## BBCN: 28,29
## wilshire: 5
cre_dev$prop_hotel <- ifelse( (cre_dev$boh_id == "bbcn" & cre_dev$property_type %in% c(28, 29) ) |
(cre_dev$boh_id == "wilshire" & cre_dev$property_type == 5 ) ,
1,
0)
## (3) variable boh_rating1
cre_dev$boh_rating1 <- ifelse(cre_dev$boh_rating %in% c(0,1,2,3), "R1",
ifelse(cre_dev$boh_rating %in% c(4,1000), "R2",
ifelse(cre_dev$boh_rating %in% c(2000,3000), "R3", "error")
))
#R1 and R2 variables
cre_dev$boh_rating1_R1 <- ifelse(cre_dev$boh_rating1 == "R1",1, 0)
cre_dev$boh_rating1_R2 <- ifelse(cre_dev$boh_rating1 == "R2",1, 0)
#Lagged R1 and R2
cre_dev <- cre_dev %>% group_by(account_id) %>% mutate(boh_rating1_R1_l = lag(boh_rating1_R1)) %>% as.data.frame()
cre_dev <- cre_dev %>% group_by(account_id) %>% mutate(boh_rating1_R2_l = lag(boh_rating1_R2)) %>% as.data.frame()
# Update lagged R1 and R2 values with current value
cre_dev$boh_rating1_R1_l <- ifelse(is.na(cre_dev$boh_rating1_R1_l),cre_dev$boh_rating1_R1,cre_dev$boh_rating1_R1_l)
cre_dev$boh_rating1_R2_l <- ifelse(is.na(cre_dev$boh_rating1_R2_l),cre_dev$boh_rating1_R2,cre_dev$boh_rating1_R2_l)
## (4) variable rgdp_qg_lag_2_neg
cre_dev$rgdp_qg_lag_2_neg <- ifelse(cre_dev$rgdp_qg_lag_2 >= 0, 0, cre_dev$rgdp_qg_lag_2 )
## (5) variable CAUR_yd_3
cre_dev$CAUR_yd_3 <- ifelse(cre_dev$CAUR_yd >= 3, 3, cre_dev$CAUR_yd)
## (6) variable CAHPI_ag_6
cre_dev$CAHPI_ag_6 <- ifelse(cre_dev$CAHPI_ag >= 6, 6, cre_dev$CAHPI_ag)
## (7) variable POB_95
cre_dev$POB_95 <- ifelse(cre_dev$POB <= 95, 95, cre_dev$POB)
## (8) Wilshire Dummy
cre_dev$wilshire_d <- ifelse(cre_dev$boh_id == "wilshire",1,0)
# Getting description of the current sample
y_filter_2 <- describe(cre_dev$y)
bal_sum_2 <- sum(cre_dev$current_balance)
# Number of default events per period
def_events_df <- cre_dev[,which(colnames(cre_dev) %in% c("fileDate","y")),drop=F]
def_events_df_sum <- def_events_df %>% group_by(fileDate) %>% summarise(Defaults = sum(y)) %>% data.frame()
Obs <- def_events_df %>% group_by(fileDate) %>% tally() %>% data.frame()
def_events_df_sum <- merge(def_events_df_sum, Obs)
colnames(def_events_df_sum) <- c("Date","No. of Defaults","Observations")
## Partition the data into training and testing samples
set.seed(2017)
# Sample fraction
sample_fraction <- .8
cre_dev_training <- cre_dev %>% sample_frac(sample_fraction)
# Getting description of the in sample
y_filter_in <- describe(cre_dev_training$y)
bal_sum_in <- sum(cre_dev_training$current_balance)
##Out of sample data
cre_dev_testing <- cre_dev[-which(rownames(cre_dev) %in% rownames(cre_dev_training)),]
# Getting description of the out sample
y_filter_out <- describe(cre_dev_testing$y)
bal_sum_out <- sum(cre_dev_testing$current_balance)
# Table of dependent variable sample stats
dep_var_filter_stats <- as.data.frame(rbind(y_filter_1$counts, y_filter_2$counts,y_filter_in$counts, y_filter_out$counts))
bal_sum_stats <- as.data.frame(rbind(bal_sum_1,bal_sum_2,bal_sum_in,bal_sum_out))
dep_var_filter_stats <- cbind(dep_var_filter_stats,bal_sum_stats)
rownames(dep_var_filter_stats) <- c("CRE & (2007 Q4 - 2016 Q1)","Wilshire Property Type","80% In-sample","20% Out-of-sample")
colnames(dep_var_filter_stats)[8] <- "Sum of Curr. Bal."
dep_var_filter_stats
###################################
## Logistic regression - Training
# Main Model
model <- y ~ boh_rating1_R1_l + boh_rating1_R2_l + POB_95 + CAUR_yd + NCREIF_Property_Index_ag_lag_1 + prop_res
fit <- glm(model, family = binomial(link = "logit"), data = cre_dev_training)
summary(fit)
# Multicollinearity of fitted model
vif(fit)
# Model regressed on full sample
fit_full <- glm(model, family = binomial(link = "logit"), data = cre_dev)
summary(fit_full)
# McFadden's pseudo R squared for the main model
pR2 <- 1 - fit$deviance / fit$null.deviance
pR2
###################################
# Output the main regression table in academic format
stargazer::stargazer(fit, fit_full, type = "text", model.numbers = F, column.labels = c("Train","Full","Wilshire"))
# Save the model coefficients
coef_cre <- as.data.frame(summary(fit)$coefficients)
coef_cre$X <- rownames(coef_cre)
# In-sample Prediction
prob <- predict(fit,type=c("response"))
cre_dev_training$p_hat <- prob
roc_in_df <- data.frame(y = cre_dev_training$y, prob)
# Find AUC
auc_in <- round(auc(roc_in_df$y, roc_in_df$prob),4)
roc_in <- roc(y ~ prob, data = roc_in_df)
## get ROC and AUC
plot(roc_in, main =paste0("CRE PD ROC IN \n AUC = ", auc_in))
# Out-of-sample #
## Out-sample prediction p_hat for each account
predict_out <- predict(fit, cre_dev_testing, type="response")
cre_dev_testing$p_hat <- predict_out
## ROC and AUC
roc_out <- data.frame(predict = predict_out, y = cre_dev_testing$y)
roc_out_plot <- roc(y ~ predict, data = roc_out)
auc_out <- round(as.numeric(roc_out_plot$auc),4)
plot(roc_out_plot, main =paste0("CRE PD ROC OUT \n AUC = ", auc_out))
## Quarterly average PD in-sample
cre_pd_quarterly_in <- subset(cre_dev_training, select = c(fileDate, y, p_hat))
cre_pd_quarterly_in <- aggregate(cre_pd_quarterly_in[,2:3], list(cre_pd_quarterly_in$fileDate), mean)
setnames(cre_pd_quarterly_in, old = c("Group.1","y","p_hat"),
new = c("fileDate", "Actual", "Fitted"))
cre_pd_quarterly_in <- melt(cre_pd_quarterly_in, id = "fileDate")
cbPalette <- c("#000000", "#0072B2")
cre_pd_training_plot <- ggplot(cre_pd_quarterly_in, aes(x=fileDate, y = value, color=variable)) +
geom_line() + scale_colour_manual(values=cbPalette) + xlab("Date") + ylab("Default Rate") + ggtitle("Ave. Default Rate CRE - In-sample") +
theme(plot.title = element_text(hjust = 0.5)) +
theme(text = element_text(size=15)) + theme(legend.title=element_blank()) + theme(legend.position="bottom")
cre_pd_training_plot
## Quarterly average PD out-sample
cre_pd_quarterly_out <- subset(cre_dev_testing, select = c(fileDate, y, p_hat))
cre_pd_quarterly_out <- aggregate(cre_pd_quarterly_out[,2:3], list(cre_pd_quarterly_out$fileDate), mean)
setnames(cre_pd_quarterly_out, old = c("Group.1","y","p_hat"),
new = c("fileDate", "Actual", "Fitted"))
cre_pd_quarterly_out <- melt(cre_pd_quarterly_out, id = "fileDate")
cbPalette <- c("#000000", "#0072B2")
cre_pd_out_plot <- ggplot(cre_pd_quarterly_out, aes(x=fileDate, y = value, color=variable)) +
geom_line() + scale_colour_manual(values=cbPalette) + xlab("Date") + ylab("Default Rate") + ggtitle("Ave. Default Rate CRE - Out-of-sample") +
theme(plot.title = element_text(hjust = 0.5)) +
theme(text = element_text(size=15)) + theme(legend.title=element_blank()) + theme(legend.position="bottom")
cre_pd_out_plot
###########################################
## Forecast for 3 scenarios
for(scenario in c("base", "adverse", "severe")){
print(paste0("==== ", scenario, " ===="))
cre_forecast <- readRDS(paste0("df_boh_",scenario,"_v2", ".RDS"))
cre_forecast$fileDate <- as.Date(cre_forecast$fileDate, "%Y-%m-%d")
cre_forecast <- filter(cre_forecast, portfolio_id == "CRE" & as.Date(fileDate) > as.Date("2016-03-31") )
## create final input variables
## (1) variable prop_res: 1-4 residential and multifamily
## BBCN: 10, 11
## wilshire: 11, 12
cre_forecast$property_type <- ifelse(cre_forecast$callReportCodeDescr %in% c("Conv 5+ Residential Prop","CONVENTIONAL 5+ RESIDENTIAL"),10,cre_forecast$property_type)
cre_forecast$prop_res <- ifelse( (cre_forecast$boh_id == "bbcn" & cre_forecast$property_type %in% c(10 , 11) ) |
(cre_forecast$boh_id == "wilshire" & cre_forecast$property_type %in% c(11 , 12) ) ,
1,
0)
# Missing Wilshire Property type filter
cre_forecast <- filter(cre_forecast, !(boh_id == "wilshire" & is.na(property_type)))
## (2a) variable prop_retail: Retail shopping center
## BBCN: 15, 16, 17, 18
## wilshire: 1
cre_forecast$prop_retail <- ifelse( (cre_forecast$boh_id == "bbcn" & cre_forecast$property_type %in% c(15, 16, 17, 18) ) |
(cre_forecast$boh_id == "wilshire" & cre_forecast$property_type == 1 ) ,
1,
0)
## (2b) variable prop_auto: Gas Stations, Car Washes, and Auto Repair Centers
## BBCN: 33, 34, 36
## wilshire: 7, 8, 16
cre_forecast$prop_auto <- ifelse( (cre_forecast$boh_id == "bbcn" & cre_forecast$property_type %in% c(33, 34, 36) ) |
(cre_forecast$boh_id == "wilshire" & cre_forecast$property_type %in% c(7, 8, 16) ) ,
1,
0)
## (2c) variable prop_hotel: Hotels and Motels
## BBCN: 28,29
## wilshire: 5
cre_forecast$prop_hotel <- ifelse( (cre_forecast$boh_id == "bbcn" & cre_forecast$property_type %in% c(28, 29) ) |
(cre_forecast$boh_id == "wilshire" & cre_forecast$property_type == 5 ) ,
1,
0)
## (3) variable boh_rating1
cre_forecast$boh_rating1 <- ifelse(cre_forecast$boh_rating %in% c(0,1,2,3), "R1",
ifelse(cre_forecast$boh_rating %in% c(4,1000), "R2",
ifelse(cre_forecast$boh_rating %in% c(2000,3000), "R3", "error")
))
cre_forecast$boh_rating1_R1 <- ifelse(cre_forecast$boh_rating1 == "R1",1, 0)
cre_forecast$boh_rating1_R2 <- ifelse(cre_forecast$boh_rating1 == "R2",1, 0)
#Lagged R1 and R2
cre_forecast <- cre_forecast %>% group_by(account_id) %>% mutate(boh_rating1_R1_l = lag(boh_rating1_R1))
cre_forecast <- cre_forecast %>% group_by(account_id) %>% mutate(boh_rating1_R2_l = lag(boh_rating1_R2))
# Update lagged values with current value
cre_forecast$boh_rating1_R1_l <- ifelse(is.na(cre_forecast$boh_rating1_R1_l),cre_forecast$boh_rating1_R1,cre_forecast$boh_rating1_R1_l)
cre_forecast$boh_rating1_R2_l <- ifelse(is.na(cre_forecast$boh_rating1_R2_l),cre_forecast$boh_rating1_R2,cre_forecast$boh_rating1_R2_l)
## (4) variable rgdp_qg_lag_2_neg
cre_forecast$rgdp_qg_lag_2_neg <- ifelse(cre_forecast$rgdp_qg_lag_2 >= 0, 0, cre_forecast$rgdp_qg_lag_2 )
## (5) variable CAUR_yd_3
cre_forecast$CAUR_yd_3 <- ifelse(cre_forecast$CAUR_yd >= 3, 3, cre_forecast$CAUR_yd)
## (6) variable CAHPI_ag_6
cre_forecast$CAHPI_ag_6 <- ifelse(cre_forecast$CAHPI_ag >= 6, 6, cre_forecast$CAHPI_ag)
## (7) variable POB_95
cre_forecast$POB_95 <- ifelse(cre_forecast$POB <= 95, 95, cre_forecast$POB)
## Wilshire Dummy
cre_forecast$wilshire_d <- ifelse(cre_forecast$boh_id == "wilshire",1,0)
## PD forecast p_hat for each account
cre_forecast <- as.data.table(cre_forecast)
cre_forecast$p_hat <- as.matrix (cre_forecast[, coef_cre$X[-1],with = FALSE]) %*% coef_cre$Estimate[-1] +
coef_cre$Estimate[1]
cre_forecast$p_hat <- 1/(1+exp(-cre_forecast$p_hat))
## quarterly average PD
cre_pd_quarterly_9Q <- subset(cre_forecast, select = c(fileDate, p_hat))
cre_pd_quarterly_9Q <- aggregate(cre_pd_quarterly_9Q[,2], list(cre_pd_quarterly_9Q$fileDate), mean)
setnames(cre_pd_quarterly_9Q, old = c("Group.1","p_hat"),
new = c("fileDate", "value"))
cre_pd_quarterly_9Q$variable <- scenario
cre_pd_quarterly_9Q <- cre_pd_quarterly_9Q[,c(1,3,2)]
assign(paste0("cre_pd_quarterly_",scenario), cre_pd_quarterly_9Q)
}
# Connect the historical and forecast data
cre_pd_quarterly_9Q <- rbind(cre_pd_quarterly_base, cre_pd_quarterly_adverse, cre_pd_quarterly_severe)
cre_pd_quarterly_all <- rbind(cre_pd_quarterly_in, cre_pd_quarterly_9Q)
setnames(cre_pd_quarterly_all, old = c("variable", "value"), new = c("scenario","PD"))
## Forecast plot
cbPalette <- c("#000000", "#0072B2", "#006600", "#E69F00", "#D55E00")
cre_pd_plot <- ggplot(cre_pd_quarterly_all, aes(x = fileDate, y = PD, color = scenario)) +
geom_line() + scale_colour_manual(values=cbPalette) +
ggtitle("BOH CRE PD") + xlab("Date") + ylab("Default Rate") + ggtitle("Average Default Rate CRE") +
theme(plot.title = element_text(hjust = 0.5)) +
theme(text = element_text(size=15)) + theme(legend.title=element_blank()) + theme(legend.position="bottom")
cre_pd_plot
# Summary stats per sample
options(scipen=999)
sum_nms <- c("y","prop_res","boh_rating1_R1_l","boh_rating1_R2_l","CAUR_yd_3","POB_95","NCREIF_Property_Index_ag_lag_1")
training_df <- as.data.frame.matrix(cre_dev_training)
testing_df <- as.data.frame.matrix(cre_dev_testing)
# Make the summary stats table between the samples
cre_dev_mean <- apply(cre_dev[,which(colnames(cre_dev) %in% sum_nms),drop=F],2,function (x) round(mean(x),4))
cre_dev_in_mean <- apply(training_df[,which(colnames(training_df) %in% sum_nms),drop=F],2,function (x) round(mean(x),4))
cre_dev_out_mean <- apply(testing_df[,which(colnames(testing_df) %in% sum_nms),drop=F],2,function (x) round(mean(x),4))
cre_dev_sd <- apply(cre_dev[,which(colnames(cre_dev) %in% sum_nms),drop=F],2,function (x) round(sd(x),4))
cre_dev_in_sd <- apply(training_df[,which(colnames(training_df) %in% sum_nms),drop=F],2,function (x) round(sd(x),4))
cre_dev_out_sd <- apply(testing_df[,which(colnames(testing_df) %in% sum_nms),drop=F],2,function (x) round(sd(x),4))
cre_dev_max <- apply(cre_dev[,which(colnames(cre_dev) %in% sum_nms),drop=F],2,function (x) round(max(x),4))
cre_dev_in_max <- apply(training_df[,which(colnames(training_df) %in% sum_nms),drop=F],2,function (x) round(max(x),4))
cre_dev_out_max <- apply(testing_df[,which(colnames(testing_df) %in% sum_nms),drop=F],2,function (x) round(max(x),4))
cre_dev_min <- apply(cre_dev[,which(colnames(cre_dev) %in% sum_nms),drop=F],2,function (x) round(min(x),4))
cre_dev_in_min <- apply(training_df[,which(colnames(training_df) %in% sum_nms),drop=F],2,function (x) round(min(x),4))
cre_dev_out_min <- apply(testing_df[,which(colnames(testing_df) %in% sum_nms),drop=F],2,function (x) round(min(x),4))
cre_dev_n <- apply(cre_dev[,which(colnames(cre_dev) %in% sum_nms),drop=F],2, length)
cre_dev_in_n <- apply(training_df[,which(colnames(training_df) %in% sum_nms),drop=F],2, length)
cre_dev_out_n <- apply(testing_df[,which(colnames(testing_df) %in% sum_nms),drop=F],2, length)
cre_df_sample_stats <- rbind(
cre_dev_mean, cre_dev_in_mean, cre_dev_out_mean,
cre_dev_sd, cre_dev_in_sd, cre_dev_out_sd,
cre_dev_max, cre_dev_in_max, cre_dev_out_max,
cre_dev_min, cre_dev_in_min, cre_dev_out_min,
cre_dev_n, cre_dev_in_n, cre_dev_out_n
)
rownames(cre_df_sample_stats) <- c("Mean (All Obs)","Mean (Train)","Mean (Test)","SD (All Obs)","SD (Train)","SD (Test)","Max (All Obs)","Max (Train)","Max (Test)","Min (All Obs)","Min (Train)","Min (Test)","Obs (All Obs)","Obs (Train)","Obs (Test)")
cre_df_sample_stats
#################
# Coefficient Stability
# Repeated Sample Forecasts
sample_number <- 10
sample_fraction <- .8
set.seed(20170808)
cre_dev_training_s <- cre_dev %>% sample_frac(sample_fraction)
cre_dev_testing_s <- cre_dev[-which(rownames(cre_dev) %in% rownames(cre_dev_training_s)),]
# Sample from the df
predict_s <- list()
predict_date_s <- list()
s_name <- list()
for (i in 1:sample_number){
##In sample data
set.seed(i)
df_sample_in <- cre_dev %>% sample_frac(sample_fraction)
##Out of sample data
df_sample_out <- cre_dev[-which(rownames(cre_dev) %in% rownames(df_sample_in)),]
##Estimate the model
logit_s <- glm(model, family = binomial(link = "logit"), data = df_sample_in)
predict_s[[i]] <- predict(logit_s, df_sample_out, type="response")
predict_date_s[[i]] <- df_sample_out$fileDate
s_name[[i]] <- rep(paste("Sample",i,sep="_"),nrow(df_sample_out))
}
predict_s_tmp <- data.frame(Predict = unlist(predict_s))
predict_date_s_tmp <- data.frame(Date = as.Date(unlist(predict_date_s)))
s_name_tmp <- data.frame(Sample = unlist(s_name))
Prediction_df <- cbind(predict_date_s_tmp, s_name_tmp, predict_s_tmp)
Prediction_df1 <- aggregate(x = Prediction_df[,"Predict"],FUN = mean,by = list(Date = Prediction_df$Date, Sample = Prediction_df$Sample))
names(Prediction_df1)[names(Prediction_df1) == 'x'] <- 'value'
actual_df <- aggregate(x = cre_dev_testing_s[,"y"],FUN = mean,by = list(Date = cre_dev_testing_s$fileDate))
names(actual_df)[names(actual_df) == 'x'] <- 'value'
actual_df$Sample <- "Actual"
Prediction_df_gg <- rbind(actual_df, Prediction_df1)
# Plot of all Forecasts
sample_fcst_p <- ggplot(data = Prediction_df_gg, aes(x = Date, y = value, group = Sample, color = Sample)) + geom_line() + xlab("Date") + ylab("Default Rate") + ggtitle("Ave. Default Rate CRE Out-of-Sample") + theme(plot.title = element_text(hjust = 0.5)) + theme(text = element_text(size=12)) + theme(legend.title=element_blank())
sample_fcst_p
##################################
# Manual boot strapping - coefficients and p-values
# Make random samples
# Sample Number
sample_number <- 100
sample_fraction <- .8
# Round the sample size to a whole number
# Uses the sample fraction set above to partition in-out samples
sample_size <- round(nrow(cre_dev)*sample_fraction)
# Sample from the df
df_samples <- list()
coeff_l <- list()
pval_l <- list()
start.time <- Sys.time()
start.time
for (i in 1:sample_number){
##In sample data
set.seed(i)
df_sample <- cre_dev[sample(nrow(cre_dev), sample_size, replace = FALSE), ]
logit <- glm(model, family = binomial(link = "logit"), data = df_sample)
coeff_l[[i]] <- round(coef(summary(logit))[,1],5)
pval_l[[i]] <- round(coef(summary(logit))[,4],5)
}
end.time <- Sys.time()
time.taken <- end.time - start.time
time.taken
# Turn into data frames
pval_boot_df <- as.data.frame(do.call("rbind",pval_l))
pval_boot_df <- data.frame(Sample=seq(from=1,to=sample_number,by=1),pval_boot_df)
coef_boot_df <- as.data.frame(do.call("rbind",coeff_l))
coef_boot_df <- data.frame(Sample=seq(from=1,to=sample_number,by=1),coef_boot_df)
# P-value Histograms
gg_p_df <- list()
for (i in 2:ncol(pval_boot_df)){
gg_p_df[[i]] <- melt(pval_boot_df[,c(1,i)], id = "Sample")
pval_h_plot <- ggplot(gg_p_df[[i]], aes(value)) + geom_histogram(fill = "#006600") + xlab("Value") + ylab("Frequency") + ggtitle(paste("P-Value",gg_p_df[[i]][1,2],sep=" - ")) + theme(text = element_text(size=12)) + theme(legend.title=element_blank()) + theme(plot.title = element_text(hjust = 0.5)) + geom_vline(xintercept=0)
suppressMessages(print(pval_h_plot))
}
# Coefficient Densities
gg_c_df <- list()
for (i in 2:ncol(coef_boot_df)){
gg_c_df[[i]] <- melt(coef_boot_df[,c(1,i)], id = "Sample")
coef_d_plot <- ggplot(gg_c_df[[i]], mapping = aes(x = value, group = variable, fill=variable)) + geom_density() + ggtitle(paste("Coef. Density",gg_c_df[[i]][1,2],sep=" - ")) + theme(text = element_text(size=12)) + theme(legend.title=element_blank()) + theme(plot.title = element_text(hjust = 0.5))+ xlab("Value") + ylab("Density") + theme(legend.position="none") + scale_fill_manual(values=c("#003399")) + scale_colour_manual(values=c("black")) + geom_vline(xintercept=0)
suppressMessages(print(coef_d_plot))
}
time.taken
<file_sep>/S3_01_Model_Development_20171222__Model_ID.Rmd
---
title: "S3_01_Model_Development_20171222_MF_OLS_M4"
author: "<NAME>"
date: "December 22, 2017"
output:
html_document:
toc: true
theme: default
toc_depth: 3
toc_float:
collapsed: false
smooth_scroll: false
---
<style>
pre {
overflow-x: auto;
white-space: pre !important;
overflow-y: scroll !important;
height: 40vh !important;
}
pre code {
word-wrap: normal;
white-space: pre;
}
</style>
```{r global_options, echo = FALSE, include = FALSE}
options(width = 999)
knitr::opts_chunk$set(echo = FALSE, warning = FALSE, message = FALSE,
cache = FALSE, tidy = FALSE, size = "small")
```
## 1.1 Loadin R package
```{r,echo=F,include=FALSE}
### record the starting time
time0<-Sys.time()
requirements <- c("lattice","speedglm","data.table","ggplot2","knitr","gsubfn","zoo","sqldf","latticeExtra","sandwich","QuantPsyc","gridExtra","DT","urca","tseries","car",'ellipse','ResourceSelection','tidyr','snow','parallel','dplyr','panelAR','openxlsx',"moments",'lmtest','here')
#install packages if you have not
for(requirement in requirements){if( !(requirement %in% installed.packages())) install.packages(requirement)}
#load all required packagesd
lapply(requirements, require, character.only=T);
full=TRUE;
# dr_here()
# ?here
```
## 1.2 Import Data & Cleaning
```{r, echo=TRUE, include=FALSE, warning=FALSE}
#####################################
#### set directory and load data ####
#####################################
# setwd("C:\\Users\\kdoughan\\Documents\\01_Engagements\\Bank of Hope\\10_2017_Bank of Hope Commerial Model Build\\2 - Code")
#setwd("C:\\Users\\mingxie\\Desktop\\KPMG\\A - Projects\\201710 BOH Model Development\\2 - Code\\Model Development Code Package")
setwd("C:/Users/ic07949/Desktop/Model Development Code Package v2/Model Development Code Package")
load('S3_00_Estimation_Sample_with_MEV_20171117');
source('S3_00_dev-support.R')
data3[,.N,keyby=.(Portfolio2,ENTITY_NAME)]
data3[,year:=year(Date),]
data3[,Balance,keyby=.(Portfolio2,ENTITY_NAME,Date)]
##############################################################################
### set options ####
#MF
Segment='MF';
mfpeer <- c ("Banner Corporation", "Cathay General Bancorp", "Columbia Banking System", "EAST WEST BANCORP", "PacWest Bancorp", "UMPQUA BANK", "Bank of Hope")
data3 <- data3[ENTITY_NAME %in% mfpeer,,]
#NOOCRE
#Segment='NOOCRE';
#NOOCREpeer <- c ( "Cathay General Bancorp", "EAST WEST BANCORP", "UMPQUA BANK", "Western Alliance", "Bank of Hope")
#data3 <- data3[ENTITY_NAME %in% NOOCREpeer,,]
#CnI
#Segment='CnI';
# cipeer <- c ("BANC OF CALIFORNIA", "Bank of Hope", "Cathay General Bancorp", "EAST WEST BANCORP", "PACIFIC PREMIER BANCORP", # #"UMPQUA BANK"", "WASHINGTON FEDERAL")
# data3 <- data3[ENTITY_NAME %in% cipeer,,]
#OOCRE
#Segment='OOCRE';
# OOCREpeer <- c ( "WASHINGTON FEDERAL","Bank of Hope")
# data3 <- data3[ENTITY_NAME %in% OOCREpeer,,]
#data3<-data3[ENTITY_NAME=='Bank of Hope',,]
##############################################################################
data3[,quarter:=paste('Q',quarter(Date),sep=''),]
data3[,Q2Q4:=ifelse(quarter(Date)%in%c(2,4)&ENTITY_NAME=='Bank of Hope','Yes','No'),]
data3[,target:=ifelse(ENTITY_NAME=='Bank of Hope',1,0),]
data3.hist<-data3[Scenario=='Historic'& Portfolio2==Segment,,]
data3.fcst<-data3[Scenario!='Historic',,]
data3.fcst[,Balance:=data3.hist[Date=='2016 Q4'& ENTITY_NAME=='Bank of Hope',Balance,],]
# data3.fcst[,Balance,]
mev.list<-as.data.table(get_excel('S0_09_vars_info_111717.xlsx','all'))
```
## 2.0 Variable Selection
MEV Set Up
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=11,fig.height=18}
# debug(cv_select)
# undebug(cv_select)
# mev.list[,.N,keyby=name]
mev.list[,.N,keyby=.(base)]
# # Ming's run MEV Candidates:
# mev.list.sub<-mev.list[base%in%c('rgdp_grw','gdp_grw','unemp','ca_unemp','indus_prod','bbb_spread','yld_bbb','dow','vix','prime_spread'),,]
#For C&I
#Tier 2: CREI HPI Employment
# mev.list.sub<-mev.list[base%in%c('dow',
# 'ca_rgsp',
# 'ca_gsp',
# 'indus_prod',
# 'rgdp_grw',
# 'gdp_grw',
# 'unemp',
# 'ca_unemp',
# 'bbb_spread',
# 'prime_spread',
# 'sp500',
# 'vix'
# ,'hpi',
# 'ca_hpi',
# 'crei',
# 'empl',
# 'ca_empl'
# ),,]
#For CRE
#Tier 2: HPI Mortgage Rate BBB Spread Employment Housing Start
#mev.list.sub<-mev.list[base%in%c('dow',
# 'crei',
# 'ca_rgsp',
# 'ca_gsp',
# 'indus_prod',
# 'rgdp_grw',
# 'gdp_grw',
# 'unemp',
# 'ca_unemp',
# 'prime_spread',
# 'sp500'
# ,'hpi',
# 'ca_hpi',
# 'empl',
# 'ca_empl',
# 'bbb_spread',
# 'mort_spread',
# 'house_start',
# 'ca_house_start'
# ),,]
# #For MF
#Tier 2: Mortgage Rate BBB Spread Employment
mev.list.sub<-mev.list[base%in%c('dow',
'hpi',
'ca_hpi',
'crei',
'ca_rgsp',
'ca_gsp',
'indus_prod',
'rgdp_grw',
'gdp_grw',
'unemp',
'ca_unemp',
'prime_spread',
'sp500',
'house_start',
'ca_house_start'
,'empl',
'ca_empl',
'bbb_spread',
'mort_spread'
),,]
cat('\n Selected Candidate MEV \n')
mev.list.sub[,.N,keyby=.(base)]
```
Specification Set Up
```{r, echo=FALSE, include=FALSE, warning=FALSE,fig.width=11,fig.height=18}
modl.list<-list(
####Panel#### (C&I, OOCRE, NOOCRE, MF)
#C&I (Challenger 1): M2
# c("ENTITY_NAME")
# ,c("ENTITY_NAME","unemp_yd_EWMA4")
# ,c("ENTITY_NAME","unemp_yd_EWMA4","rgdp_grw_NL_lag3")
# ,c("ENTITY_NAME","unemp_yd_EWMA4","rgdp_grw_NL_lag3","hpi_qg_EWMA4_lag4")
# ,c("ENTITY_NAME","unemp_yd_EWMA4","rgdp_grw_NL_lag3","hpi_qg_EWMA4_lag4","prime_spread")
#OOCRE (Challenger): M9
# c("ENTITY_NAME")
# ,c("ENTITY_NAME","empl_yg_EWMA4_lag4")
# ,c("ENTITY_NAME","empl_yg_EWMA4_lag4","crei_yg_EWMA4_lag3")
# ,c("ENTITY_NAME","empl_yg_EWMA4_lag4","crei_yg_EWMA4_lag3","prime_spread_qd_EWMA4_lag3")
# ,c("ENTITY_NAME","empl_yg_EWMA4_lag4","crei_yg_EWMA4_lag3","prime_spread_qd_EWMA4_lag3","mort_spread_log_yd_EWMA4_lag4")
#NOOCRE (Champion): M8
# c("ENTITY_NAME")
# ,c("ENTITY_NAME","empl_yg_EWMA4_lag1")
# ,c("ENTITY_NAME","empl_yg_EWMA4_lag1","rgdp_grw_NL_lag3")
# ,c("ENTITY_NAME","empl_yg_EWMA4_lag1","rgdp_grw_NL_lag3","crei_yg_EWMA4_lag3")
# ,c("ENTITY_NAME","empl_yg_EWMA4_lag1","rgdp_grw_NL_lag3","crei_yg_EWMA4_lag3","dow_qg_NL_lag2")
#MF (Challenger 2): M5
c("ENTITY_NAME")
,c("ENTITY_NAME","crei_qg_EWMA4_lag2")
,c("ENTITY_NAME","crei_qg_EWMA4_lag2","empl_qg_EWMA2_lag3")
,c("ENTITY_NAME","crei_qg_EWMA4_lag2","empl_qg_EWMA2_lag3","sp500_qg_lag1")
#MF (Champion): M6
# c("ENTITY_NAME")
# ,c("ENTITY_NAME","empl_qg_EWMA2_lag3")
# ,c("ENTITY_NAME","empl_qg_EWMA2_lag3","gdp_grw_yoy_NL_lag2")
# ,c("ENTITY_NAME","empl_qg_EWMA2_lag3","gdp_grw_yoy_NL_lag2","mort_spread_qd_EWMA4")
####OLS#### (C&I, OOCRE, NOOCRE, MF)
#C&I (Challenger 2): M1
# c("")
# ,c("","ca_rgsp_yg_EWMA4")
# ,c("","ca_rgsp_yg_EWMA4","ca_empl_qg_lag1")
# ,c("","ca_rgsp_yg_EWMA4","ca_empl_qg_lag1","prime_spread_log_qd")
# ,c("","ca_rgsp_yg_EWMA4","ca_empl_qg_lag1","prime_spread_log_qd","hpi_yg_EWMA4_lag4")
# ,c("","ca_rgsp_yg_EWMA4","ca_empl_qg_lag1","prime_spread_log_qd","hpi_yg_EWMA4_lag4","dow_yg_EWMA4_lag2")
#C&I (Champion): M3
# c("")
# ,c("","ca_rgsp_yg_EWMA4")
# ,c("","ca_rgsp_yg_EWMA4","ca_unemp_yd_EWMA4")
# ,c("","ca_rgsp_yg_EWMA4","ca_unemp_yd_EWMA4","vix_qd_lag4")
# ,c("","ca_rgsp_yg_EWMA4","ca_unemp_yd_EWMA4","vix_qd_lag4","prime_spread_log_qd")
# ,c("","ca_rgsp_yg_EWMA4","ca_unemp_yd_EWMA4","vix_qd_lag4","prime_spread_log_qd","ca_hpi_yg_EWMA4_lag4")
# ,c("","ca_rgsp_yg_EWMA4","ca_unemp_yd_EWMA4","vix_qd_lag4","prime_spread_log_qd","ca_hpi_yg_EWMA4_lag4","dow_yg_EWMA4_lag3")
#OOCRE (Champion): M10
# c("")
# ,c("","empl_yg_EWMA4_lag4")
# ,c("","empl_yg_EWMA4_lag4","crei_yg_EWMA2_lag4")
# ,c("","empl_yg_EWMA4_lag4","crei_yg_EWMA2_lag4","prime_spread_log_qd_EWMA2_lag4")
# ,c("","empl_yg_EWMA4_lag4","crei_yg_EWMA2_lag4","prime_spread_log_qd_EWMA2_lag4","mort_spread_log_qd_EWMA2_lag1")
#NOOCRE (Challenger):M7
# c("")
# ,c("","crei_yg_EWMA4_lag1")
# ,c("","crei_yg_EWMA4_lag1","prime_spread_log_qd_EWMA2")
#MF (Challenger 1): M4
#Based on the current approach/rationale, the MF OLS Business approach deems a simple 1-variable model. This variable is forced into the model.
#c("")
#,c("","ca_unemp_EWMA4_lag1")
#,c("","ca_unemp_EWMA4_lag1","prime_spread_log_qd_EWMA2_lag4")
)
```
One-Step Forward Tuning
```{r,eval=FALSE, echo=FALSE, include=FALSE, warning=FALSE,fig.width=11,fig.height=18}
# modl.list[[length(modl.list)]]
# ?debug(cv_select)
# ?undebug(cv_select)
cat('\n\n ################# Latest CV Result ################# \n')
fit.cv<-cv_select(data3.hist, mev.list.sub, criteria="rsq", resp="NCOR", modl=modl.list[[length(modl.list)]], iter=1, from_yr=2007, to_yr=2016, vif_tol=10, sig_tol=0.90);
cat('\n Top 10 MEV by R-Square\n\n')
tmp<-fit.cv[[1]][[1]][,.SD[],keyby=.(rsq.n=-rsq,rsq,rsq.f)][,rsq.n:=NULL,];print(tmp[1:25,.(rsq,rsq.f,var,coefficient,sign,base,n,p),])
cat('\n\n')
tmp<-fit.cv[[1]][[1]][,.SD[],keyby=.(rsq.n=-rsq.sub,rsq.sub,rsq.f.sub)][,rsq.n:=NULL,];print(tmp[1:25,.(rsq.sub,rsq.f.sub,var,coefficient,sign,base,n,p),])
cat('\n Top 10 MEV by RMSE\n\n')
tmp<-fit.cv[[1]][[1]][,.SD[],keyby=.(rmse,rmse.f)];print(tmp[1:25,.(rmse,rmse.f,var,coefficient,sign,base,n,p),])
cat('\n\n')
tmp<-fit.cv[[1]][[1]][,.SD[],keyby=.(rmse.sub,rmse.f.sub)];print(tmp[1:25,.(rmse.sub,rmse.f.sub,var,coefficient,sign,base,n,p),])
cat('\n Top 10 MEV by MAD\n\n')
tmp<-fit.cv[[1]][[1]][,.SD[],keyby=.(mad,mad.f)];print(tmp[1:25,.(mad,mad.f,var,coefficient,sign,base,n,p),])
cat('\n\n')
tmp<-fit.cv[[1]][[1]][,.SD[],keyby=.(mad.sub,mad.f.sub)];print(tmp[1:25,.(mad.sub,mad.f.sub,var,coefficient,sign,base,n,p),])
# cat('\n Top 10 MEV by MXAD\n\n')
# tmp<-fit.cv[[1]][[1]][,.SD[],keyby=.(mxad,mxad.f)];print(tmp[1:25,,])
cat('\n Top 10 MEV by Residual Shapiro Score\n\n')
tmp<-fit.cv[[1]][[1]][,.SD[],keyby=.(shapiro.pvalue.n=-shapiro.pvalue,shapiro.pvalue,shapiro.pvalue.f)][,shapiro.pvalue.n:=NULL,];print(tmp[1:25,.(shapiro.pvalue,shapiro.pvalue.f,var,coefficient,sign,base,n,p),])
cat('\n\n')
tmp<-fit.cv[[1]][[1]][,.SD[],keyby=.(shapiro.pvalue.n=-shapiro.pvalue.sub,shapiro.pvalue.sub,shapiro.pvalue.f.sub)][,shapiro.pvalue.n:=NULL,];print(tmp[1:25,.(shapiro.pvalue.sub,shapiro.pvalue.f.sub,var,coefficient,sign,base,n,p),])
cat('\n\n ################# Last Selected Model Under OLS ################# \n')
rhs<-paste(rm_blanks(modl.list[[length(modl.list)]]),collapse='+');frm<-paste('NCOR',ifelse(rhs=='','1',rhs),sep='~')
fit<-lm(frm,data3.hist)
summary(fit);
data.table(
Test=c('Shapiro','Skewness','Kurtosis')
,Value=c(shapiro.test(fit$residuals)$statistic,skewness(fit$residuals),kurtosis(fit$residuals))
,pValue=c(shapiro.test(fit$residuals)$p.value,NA,NA)
)
# vif(fit);
# qqnorm(fit$residuals);qqline(fit$residuals)
# data3.hist[,.N,keyby=.(Portfolio2,ENTITY_NAME)]
# data3.hist[,TIME_IDX:=.GRP,by=.(Date)]
# fit<-panelAR(paste('NCOR',paste(modl.list[[length(modl.list)]],collapse='+'),sep='~'),data=as.data.frame(data3.hist[Portfolio2=='CnI',,]), panelVar="ENTITY_NAME", timeVar="TIME_IDX",autoCorr='psar1', panelCorrMethod = "parks", complete.case=TRUE,dof.correction = TRUE)
# print(summary(fit));
```
## 2.1 Selection Summary
+ Selected Model
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=11,fig.height=8}
temp0<-do.call('rbind',lapply(modl.list,function(x){
rhs<-paste(rm_blanks(x),collapse='+');frm<-paste('NCOR',ifelse(rhs=='','1',rhs),sep='~')
return(frm)
}))
print(temp0)
```
```{r,eval=full, echo=FALSE, include=TRUE, warning=FALSE,fig.width=11,fig.height=8}
# summarize the selection route
cv.statistics<-do.call('rbind',lapply(1:length(modl.list),function(x){
# x=2
modl<-modl.list[[x]]
rhs<-paste(rm_blanks(modl),collapse='+');frm<-paste('NCOR',ifelse(rhs=='','1',rhs),sep=' ~ ')
fit<-lm(frm,data3.hist)
#x=modl.list[[1]]
#x=c("ENTITY_NAME","empl_ag_lag1","ca_rinc_ag_lag1",'yld_spread_lag1',"dow_qg","ca_empl_qg_lag3","crei_lag1","bbb_spread_qd")
cat(paste('\n\n ######################## Step ',x,'########################\n'))
cat(paste(' Starting Model:\n ',frm,'\n',sep=''));
cat(paste('\n ######################################\n'))
print(summary(fit))
cat(paste(' ######################################\n'))
fit.cv<-cv_select(data3.hist, mev.list.sub, criteria="rsq", resp="NCOR", modl=modl, iter=1, from_yr=2007, to_yr=2016, vif_tol=10, sig_tol=0.90);
cat('\n Top 10 MEV by R-Square\n\n')
tmp<-fit.cv[[1]][[1]][,.SD[],keyby=.(rsq.n=-rsq,rsq,rsq.f)][,rsq.n:=NULL,];print(tmp[1:25,.(rsq,rsq.f,var,coefficient,sign,base,n,p),])
cat('\n\n')
tmp<-fit.cv[[1]][[1]][,.SD[],keyby=.(rsq.n=-rsq.sub,rsq.sub,rsq.f.sub)][,rsq.n:=NULL,];print(tmp[1:25,.(rsq.sub,rsq.f.sub,var,coefficient,sign,base,n,p),])
cat('\n Top 10 MEV by RMSE\n\n')
tmp<-fit.cv[[1]][[1]][,.SD[],keyby=.(rmse,rmse.f)];print(tmp[1:25,.(rmse,rmse.f,var,coefficient,sign,base,n,p),])
cat('\n\n')
tmp<-fit.cv[[1]][[1]][,.SD[],keyby=.(rmse.sub,rmse.f.sub)];print(tmp[1:25,.(rmse.sub,rmse.f.sub,var,coefficient,sign,base,n,p),])
cat('\n Top 10 MEV by MAD\n\n')
tmp<-fit.cv[[1]][[1]][,.SD[],keyby=.(mad,mad.f)];print(tmp[1:25,.(mad,mad.f,var,coefficient,sign,base,n,p),])
cat('\n\n')
tmp<-fit.cv[[1]][[1]][,.SD[],keyby=.(mad.sub,mad.f.sub)];print(tmp[1:25,.(mad.sub,mad.f.sub,var,coefficient,sign,base,n,p),])
# cat('\n Top 10 MEV by MXAD\n\n')
# tmp<-fit.cv[[1]][[1]][,.SD[],keyby=.(mxad,mxad.f)];print(tmp[1:25,,])
cat('\n Top 10 MEV by Residual Shapiro Score\n\n')
tmp<-fit.cv[[1]][[1]][,.SD[],keyby=.(shapiro.pvalue.n=-shapiro.pvalue,shapiro.pvalue,shapiro.pvalue.f)][,shapiro.pvalue.n:=NULL,];print(tmp[1:25,.(shapiro.pvalue,shapiro.pvalue.f,var,coefficient,sign,base,n,p),])
cat('\n\n')
tmp<-fit.cv[[1]][[1]][,.SD[],keyby=.(shapiro.pvalue.n=-shapiro.pvalue.sub,shapiro.pvalue.sub,shapiro.pvalue.f.sub)][,shapiro.pvalue.n:=NULL,];print(tmp[1:25,.(shapiro.pvalue.sub,shapiro.pvalue.f.sub,var,coefficient,sign,base,n,p),])
if(x==1){
cv.tmp<-cv_step(data=data3.hist, resp="NCOR", test_var="", model=modl, from_yr=2007, to_yr=2016)
}else{
cv.tmp<-cv_step(data=data3.hist, resp="NCOR", test_var=modl[length(modl)], model=modl[-length(modl)], from_yr=2007, to_yr=2016)
}
data3.hist[,y:=predict(fit, newdata=.SD, type=c("response"), na.action=na.pass),]
tmp<-rbind(
data.table(var=modl[[length(modl)]],IDX=x,measure='RSQ',scope='full estimation sample',value=cv.tmp$rsq.f)
,data.table(var=modl[[length(modl)]],IDX=x,measure='RSQ',scope='cross validation',value=cv.tmp$rsq)
,data.table(var=modl[[length(modl)]],IDX=x,measure='MAD',scope='full estimation sample',value=cv.tmp$mad.f)
,data.table(var=modl[[length(modl)]],IDX=x,measure='MAD',scope='cross validation',value=cv.tmp$mad)
,data.table(var=modl[[length(modl)]],IDX=x,measure='RMSE',scope='full estimation sample',value=cv.tmp$rmse.f)
,data.table(var=modl[[length(modl)]],IDX=x,measure='RMSE',scope='cross validation',value=cv.tmp$rmse)
,data.table(var=modl[[length(modl)]],IDX=x,measure='Shapiro Pvalue',scope='full estimation sample',value=cv.tmp$shapiro.pvalue.f)
,data.table(var=modl[[length(modl)]],IDX=x,measure='Shapiro Pvalue',scope='cross validation',value=cv.tmp$shapiro.pvalue)
,data.table(var=modl[[length(modl)]],IDX=x,measure='RSQ',scope='full estimation sample (BOH Only)',value=cv.tmp$rsq.f.sub)
,data.table(var=modl[[length(modl)]],IDX=x,measure='RSQ',scope='cross validation (BOH Only)',value=cv.tmp$rsq.sub)
,data.table(var=modl[[length(modl)]],IDX=x,measure='MAD',scope='full estimation sample (BOH Only)',value=cv.tmp$mad.f.sub)
,data.table(var=modl[[length(modl)]],IDX=x,measure='MAD',scope='cross validation (BOH Only)',value=cv.tmp$mad.sub)
,data.table(var=modl[[length(modl)]],IDX=x,measure='RMSE',scope='full estimation sample (BOH Only)',value=cv.tmp$rmse.f.sub)
,data.table(var=modl[[length(modl)]],IDX=x,measure='RMSE',scope='cross validation (BOH Only)',value=cv.tmp$rmse.sub)
,data.table(var=modl[[length(modl)]],IDX=x,measure='Shapiro Pvalue',scope='full estimation sample (BOH Only)',value=cv.tmp$shapiro.pvalue.f.sub)
,data.table(var=modl[[length(modl)]],IDX=x,measure='Shapiro Pvalue',scope='cross validation (BOH Only)',value=cv.tmp$shapiro.pvalue.sub)
)
return(tmp)
}))
cv.statistics[,value:=round(value,6),]
temp0<-dcast(cv.statistics,IDX+var~measure+scope,value.var = 'value',sep=' ')
datatable(temp0
,rownames = F
,extensions = c('Scroller','FixedColumns','Buttons','ColReorder')
,options = list(
#rowCallback = JS("function(r,d) {$(r).attr('height', '100px')}"),
columnDefs = list(list(
targets = c(1:4),
render = JS(
"function(data, type, row, meta) {",
"return type === 'display' && data.length > 10 ?",
"'<span title=\"' + data + '\">' + data.substr(0, 10) + '...</span>' : data;",
"}")
)), ## column index starting from 0
deferRender = TRUE,
scrollY = 300,
scroller = TRUE,
scrollX = TRUE,
fixedColumns = list(leftColumns = 2),
dom = 'Blfrtip',
buttons =
list( list(
extend = 'collection',
buttons = c('csv'),
text = 'Save as .csv file'
)), ## it doesn't work if only keep 'csv'; weird
colReorder = TRUE,
initComplete = JS(
"function(settings, json) {",
"$(this.api().table().header()).css({'font-size': '12px'});",
"$(this.api().table().body()).css({'font-size': '12px'});",
"}") ## change both header and body font size
))
# need to have full sample statistics and CV statistics together.
myColours <- c( brewer.pal(6,"Greens")[c(2,6)]
,brewer.pal(6,"Reds")[c(2,6)]
)
my.settings <- list(
superpose.symbol = list(col=myColours, border="transparent")
,superpose.line = list(col=myColours, border="transparent")
)
xyplot(value~IDX|measure,type=c('l','p','g'),group=scope
,xlab='',layout=c(2,2),ylab='',lwd=2
,par.settings = my.settings
,main='Variable Selection Performance Monitoring'
,scale=list(x=list(relation='free'),y=list(relation='free'))
,auto.key=list(x =0.65,y=.33,columns=1,border=FALSE,cex=1.2,lwd=4)
,cv.statistics)
```
## 3.0 Out-come Assessment
+ The last model in the model list are defined as stopping point model.
+ The second to last model are defined as the final selected model
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=11,fig.height=6}
# cat('\n\n ################# Last Selected Model Under OLS (Before Stopping Model) ################# \n')
rhs<-paste(rm_blanks(modl.list[[length(modl.list)-1]]),collapse='+');
frm<-paste('NCOR',ifelse(rhs=='','1',rhs),sep='~')
fit.final<-lm(frm,data3.hist)
summary(fit.final);
data.table(
Test=c('Shapiro','Skewness','Kurtosis')
,Value=c(shapiro.test(fit.final$residuals)$statistic,skewness(fit.final$residuals),kurtosis(fit.final$residuals))
,pValue=c(shapiro.test(fit.final$residuals)$p.value,NA,NA)
)
# vif(fit.final);
p<-histogram(~ fit.final$residuals , breaks=16 ,type = "density",xlab='',
panel=function(x, ...) {
panel.histogram(x, ...)
panel.densityplot(x, bw=100,kernel="gaussian",...)
})
print(p)
p<-qqmath(~ fit.final$residuals,xlab='qnorm',ylab='residuals',
prepanel = prepanel.qqmathline,
panel = function(x, ...) {
panel.qqmathline(x, ...)
panel.qqmath(x, ...)
})
print(p)
data3.hist[,y:=predict(fit.final, newdata=.SD, type=c("response"), na.action=na.pass),]
for(i in data3.hist[,unique(year(Date)),]){
fit<-lm(fit.final,data3.hist[year(Date)!=i,,])
data3.hist[year(Date)==i,y.cv:=predict(fit, newdata=.SD, type=c("response"), na.action=na.pass),]
}
data3.hist[,residual:=y-NCOR,]
fit.autotest<-do.call('rbind',lapply(data3.hist[,unique(ENTITY_NAME),],function(x){
#x='Bank of Hope'
tmp0<-data3.hist[ENTITY_NAME==x,,][,.(NCOR,y,residual),keyby=.(ENTITY_NAME,Date)]
tmp0.result1<-tmp0[,dwtest(residual~1),]
tmp0.result2<-tmp0[,bgtest(residual~1,order=4),]
# tmp0.result3<-tmp0[,bptest(NCOR~y),]
tmp1<-rbind(
data.table(ENTITY_NAME=x
,Test='DW Test'
,Null.Hypothesis='Errors are serially uncorrelated'
,p.value=tmp0.result1$p.value
)
,data.table(ENTITY_NAME=x
,Test='BG Test'
,Null.Hypothesis='No serial correlation of any order up to 4'
,p.value=tmp0.result2$p.value
)
)
return(tmp1)
}))
fit.autotest<-rbind(fit.autotest
,data.table(ENTITY_NAME='Overall'
,Test='BP Test'
,Null.Hypothesis='Homoskedasticity'
,p.value=data3.hist[,bptest(fit.final),]$p.value
)
,data.table(ENTITY_NAME='Overall'
,Test='Reset Test'
,Null.Hypothesis='No suffer from mis-specificaiton'
,p.value=data3.hist[,resettest(fit.final),]$p.value
)
)
setorder(fit.autotest,Test)
# fit.autotest[,.N,keyby=.(Test,ENTITY_NAME)]
print(fit.autotest)
temp0<-rbind(data3.hist[,.(Scenario='Historic Actual',Date,y=NCOR,ENTITY_NAME),]
,data3.hist[,.(Scenario='Historic Fit - Full Sample',Date,y=y,ENTITY_NAME),]
,data3.hist[,.(Scenario='Historic Fit - CV By Year',Date,y=y.cv,ENTITY_NAME),]
,data3.fcst[,y:=predict(fit.final, newdata=.SD, type=c("response"), na.action=na.pass),][,.(Scenario,Date,y=y,ENTITY_NAME),]
)
y.max<-temp0[ENTITY_NAME=='Bank of Hope',max(y),]
y.min<-temp0[ENTITY_NAME=='Bank of Hope',min(y),]
# col=c('purple','blue')
col=c('orange','darkgreen','grey','blue','pink','red')
p<-xyplot(y~as.Date(Date)|ENTITY_NAME,group=Scenario,scale=list(x=list(rot=90))
# ,par.settings = list(superpose.line = list(col=col),superpose.symbol = list(col= col))
,type=c('l','p','g'),xlab='',ylab='',ylim=c(min(1.3*y.min,-y.max*.2),1.3*y.max)
,cex=1.0
,par.settings = list(superpose.line = list(col=col,lwd=2),superpose.symbol = list(col= col))
,yscale.components=function(...){yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100)
return(yc)}
,main='In-sample fit'
,auto.key = list(x = .66, y=.88, corner = c(0,1) ,border = FALSE, lines = TRUE)
,temp0[ENTITY_NAME=='Bank of Hope',,])
print(p)
col=c('grey','blue','pink')
p<-xyplot(y~as.Date(Date)|ENTITY_NAME,group=Scenario,scale=list(x=list(rot=90))
# ,par.settings = list(superpose.line = list(col=col),superpose.symbol = list(col= col))
,type=c('l','p','g'),xlab='',ylab='',ylim=c(min(1.3*y.min,-y.max*.2),1.3*y.max)
,cex=1.0
,par.settings = list(superpose.line = list(col=col,lwd=2),superpose.symbol = list(col= col))
,yscale.components=function(...){yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100)
return(yc)}
,main='In-sample fit'
,auto.key = list(x = .66, y=.88, corner = c(0,1) ,border = FALSE, lines = TRUE)
,temp0[ENTITY_NAME=='Bank of Hope'&Scenario%in%c('Historic Actual','Historic Fit - CV By Year','Historic Fit - Full Sample'),,])
print(p)
# str(temp0)
```
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=11,fig.height=6}
temp0<-data3.fcst[,.(Segment=Segment,Scenario,Date,y=round(y,6),y.annual=round(1-(1-y)^4,6),ENTITY_NAME),]
temp1<-data.table(dcast(temp0,Segment+Scenario~Date,value.var = 'y'))
datatable(temp1
,rownames = F
,extensions = c('Scroller','FixedColumns','Buttons','ColReorder')
,options = list(
#rowCallback = JS("function(r,d) {$(r).attr('height', '100px')}"),
columnDefs = list(list(
targets = c(1:3),
render = JS(
"function(data, type, row, meta) {",
"return type === 'display' && data.length > 20 ?",
"'<span title=\"' + data + '\">' + data.substr(0, 20) + '...</span>' : data;",
"}")
)), ## column index starting from 0
deferRender = TRUE,
scrollY = 300 ,
scroller = TRUE,
scrollX = TRUE,
fixedColumns = list(leftColumns = 2),
dom = 'Blfrtip',
buttons =
list( list(
extend = 'collection',
buttons = c('csv'),
text = 'Save as .csv file'
)), ## it doesn't work if only keep 'csv'; weird
colReorder = TRUE,
initComplete = JS(
"function(settings, json) {",
"$(this.api().table().header()).css({'font-size': '12px'});",
"$(this.api().table().body()).css({'font-size': '12px'});",
"}") ## change both header and body font size
))
temp1<-data.table(dcast(temp0,Segment+Scenario~Date,value.var = 'y.annual'))
datatable(temp1
,rownames = F
,extensions = c('Scroller','FixedColumns','Buttons','ColReorder')
,options = list(
#rowCallback = JS("function(r,d) {$(r).attr('height', '100px')}"),
columnDefs = list(list(
targets = c(1:3),
render = JS(
"function(data, type, row, meta) {",
"return type === 'display' && data.length > 20 ?",
"'<span title=\"' + data + '\">' + data.substr(0, 20) + '...</span>' : data;",
"}")
)), ## column index starting from 0
deferRender = TRUE,
scrollY = 300 ,
scroller = TRUE,
scrollX = TRUE,
fixedColumns = list(leftColumns = 2),
dom = 'Blfrtip',
buttons =
list( list(
extend = 'collection',
buttons = c('csv'),
text = 'Save as .csv file'
)), ## it doesn't work if only keep 'csv'; weird
colReorder = TRUE,
initComplete = JS(
"function(settings, json) {",
"$(this.api().table().header()).css({'font-size': '12px'});",
"$(this.api().table().body()).css({'font-size': '12px'});",
"}") ## change both header and body font size
))
```
## 3.1 MEV - Trajectory
+ MEV History Scenario Trajectory
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=11,fig.height=4}
temp.mev0<-data3[,c('Date','Scenario',rm_blanks(modl.list[[length(modl.list)-1]])),with=FALSE][,IDX:=1:.N,keyby=.(Date,Scenario)][IDX==1,,];
temp.mev0<-temp.mev0[,c('Date','Scenario',intersect(names(temp.mev0),mev.list.sub[,name,])),with=FALSE];
temp.mev1<-melt(temp.mev0,id=c('Date','Scenario'));
col=c('orange','darkgreen','blue','red')
xyplot(value~Date|variable
,group=Scenario
,layout=c(2,1)
,xlab='',ylab=''
,par.settings = list(superpose.line = list(col=col),superpose.symbol = list(col= col))
,type=c('p','l','g')
,scale=list(x=list(relation='free'),y=list(relation='free'))
,temp.mev1)
```
## 3.2 MEV - Relative Impact
+ MEV Contribution($x_i'\beta_i$) in history and scenario forecast.
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=11,fig.height=4}
temp0<-data3[ENTITY_NAME=='Bank of Hope'&(Portfolio2==Segment|Scenario!='Historic'),,]
# temp0[,.N,keyby=.(Date)]
temp1<-do.call('rbind',lapply(intersect(modl.list[[length(modl.list)-1]],mev.list.sub[,name,]),function(x){
# x='empl_EWMA4_lag2'
coef<-fit.final$coefficients[x]
command <- paste("tmp0<-temp0[,.(Date,Scenario,variable='",x,"',value=",x,"*coef),]",sep="")
eval(parse(text=command))
return(tmp0)
}))
col=c('orange','darkgreen','blue','red')
xyplot(value~Date|variable
,group=Scenario
,layout=c(2,1)
,xlab='',ylab=''
,par.settings = list(superpose.line = list(col=col),superpose.symbol = list(col= col))
,type=c('p','l','g')
,scale=list(x=list(relation='same'),y=list(relation='same'))
,temp1)
```
## 3.3 MEV Sensitivity Analysis
* Apply a shock to the MEV with 1,2,3 standard deviation to see the simple average/cumulative NCOR forecast (T1-T13) change in each scenario.
* Plot the level of Shocks to Forecast MEV
* shock 1,2,3 standard deviation into adverse direction
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=11,fig.height=6}
temp1<-do.call('rbind',lapply(intersect(modl.list[[length(modl.list)-1]],mev.list.sub[,name,]),function(x){
# x='unemp_yd_EWMA4'
command=paste('var.sd<-data3.hist[ENTITY_NAME=="Bank of Hope",sd(',x,'),]')
eval(parse(text=command))
do.call('rbind',lapply(c(0,1,2,3),function(s){
# fit.final
## Need to adjust the shock
# s=2
# fit$coefficients[x]<-fit$coefficients[x]*(1+s*var)
data3.sens<-copy(data3.fcst)
command=paste('data3.sens[,',x,':=',x,'+',s*sign(fit$coefficients[x])*var.sd,',]')
eval(parse(text=command))
data3.sens[,y:=predict(fit.final, newdata=.SD, type=c("response"), na.action=na.pass),]
tmp0<-data3.sens[,.(var=x
,shock=s
,NCOR.cum=round(sum(y),6)
,NCO.cum=round(sum(y*Balance),2)
),keyby=.(Scenario)]
return(tmp0)
}))
}))
datatable(temp1
,rownames = F
,extensions = c('Scroller','FixedColumns','Buttons','ColReorder')
,options = list(
#rowCallback = JS("function(r,d) {$(r).attr('height', '100px')}"),
columnDefs = list(list(
targets = c(1:3),
render = JS(
"function(data, type, row, meta) {",
"return type === 'display' && data.length > 20 ?",
"'<span title=\"' + data + '\">' + data.substr(0, 20) + '...</span>' : data;",
"}")
)), ## column index starting from 0
deferRender = TRUE,
scrollY = 300 ,
scroller = TRUE,
scrollX = TRUE,
fixedColumns = list(leftColumns = 2),
dom = 'Blfrtip',
buttons =
list( list(
extend = 'collection',
buttons = c('csv'),
text = 'Save as .csv file'
)), ## it doesn't work if only keep 'csv'; weird
colReorder = TRUE,
initComplete = JS(
"function(settings, json) {",
"$(this.api().table().header()).css({'font-size': '12px'});",
"$(this.api().table().body()).css({'font-size': '12px'});",
"}") ## change both header and body font size
))
temp1[,Base:=max(ifelse(Scenario=='Baseline'&shock==0,NCOR.cum,0)),]
temp1[,Sev:=max(ifelse(Scenario=='Severe'&shock==0,NCOR.cum,0)),]
temp1[,Base.amt:=max(ifelse(Scenario=='Baseline'&shock==0,NCO.cum,0)),]
temp1[,Sev.amt:=max(ifelse(Scenario=='Severe'&shock==0,NCO.cum,0)),]
temp1<-temp1[Scenario%in%c('Severe','Baseline')&shock!=0,,]
myColours <- c(brewer.pal(3,"Greens"),brewer.pal(3,"Reds"))
my.settings <- list(superpose.polygon=list(col=myColours, border="transparent")
,superpose.symbol=list(col=myColours,pch=15, border="transparent"))
p1<-barchart(NCOR.cum~var|factor("13-Quarter Cummulative NCOR"),group=paste(Scenario,' +/- ',shock,' std',sep='')
,z=temp1,par.settings = my.settings,ylim=c(0,1.1*max(temp1$NCOR.cum))
,horizontal=FALSE,stack=FALSE,ylab='',xlab=''
,auto.key = list(x = .46, y=.88, corner = c(0,1) ,column=2,border = FALSE, lines = FALSE)
,scale=list(x=list(rot=0),y=list(relation='free'))
,yscale.components=function(...){
yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100) ## convert to strings as pct
return(yc)}
,panel=function(x,y,subscripts,z=z,groups=groups,horizontal=horizontal,stack=stack,par.settings=par.settings,...){
panel.grid(h=-1, v=0);
# print(z[subscripts,,]);print(subscripts);print(x);print(y)
# print(z[subscripts][1]$Base)
# print(z[subscripts][1]$Severe)
# panel.barchart(x,y,subscripts=subscripts,groups=groups,horizontal = horizontal,stack=stack,...)
panel.barchart(x,y,subscripts = subscripts,groups = groups,horizontal = horizontal,stack = stack,...)
panel.abline(h=z[subscripts][1]$Base,col = 'green',lwd=2,alpha=.5)
panel.abline(h=z[subscripts][1]$Sev,col = 'red',lwd=2,alpha=.5)
}
,temp1)
print(p1)
p1<-barchart(NCO.cum/1000~var|factor("13-Quarter Cummulative NCO (k $)"),group=paste(Scenario,' +/- ',shock,' std',sep='')
,z=temp1,par.settings = my.settings,ylim=c(0,1.1*max(temp1$NCO.cum/1000))
,horizontal=FALSE,stack=FALSE,ylab='',xlab=''
,auto.key = list(x = .46, y=.88, corner = c(0,1) ,column=2,border = FALSE, lines = FALSE)
,scale=list(x=list(rot=0),y=list(relation='free'))
# ,yscale.components=function(...){
# yc <- yscale.components.default(...)
# yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100) ## convert to strings as pct
# return(yc)}
,panel=function(x,y,subscripts,z=z,groups=groups,horizontal=horizontal,stack=stack,par.settings=par.settings,...){
panel.grid(h=-1, v=0);
# print(z[subscripts,,]);print(subscripts);print(x);print(y)
# print(z[subscripts][1]$Base)
# print(z[subscripts][1]$Severe)
# panel.barchart(x,y,subscripts=subscripts,groups=groups,horizontal = horizontal,stack=stack,...)
panel.barchart(x,y,subscripts = subscripts,groups = groups,horizontal = horizontal,stack = stack,...)
panel.abline(h=z[subscripts][1]$Base.amt/1000,col = 'green',lwd=2,alpha=.5)
panel.abline(h=z[subscripts][1]$Sev.amt/1000,col = 'red',lwd=2,alpha=.5)
}
,temp1)
print(p1)
```
* shock 1,2,3 standard deviation into recovering/strenghening direction
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=11,fig.height=6}
temp1<-do.call('rbind',lapply(intersect(modl.list[[length(modl.list)-1]],mev.list.sub[,name,]),function(x){
# x='unemp_yd_EWMA4'
command=paste('var.sd<-data3.hist[ENTITY_NAME=="Bank of Hope",sd(',x,'),]')
eval(parse(text=command))
do.call('rbind',lapply(c(0,1,2,3),function(s){
# fit.final
## Need to adjust the shock
# s=2
# fit$coefficients[x]<-fit$coefficients[x]*(1+s*var)
data3.sens<-copy(data3.fcst)
command=paste('data3.sens[,',x,':=',x,'-',s*sign(fit$coefficients[x])*var.sd,',]')
eval(parse(text=command))
data3.sens[,y:=predict(fit.final, newdata=.SD, type=c("response"), na.action=na.pass),]
tmp0<-data3.sens[,.(var=x
,shock=s
,NCOR.cum=round(sum(y),6)
,NCO.cum=round(sum(y*Balance),2)
),keyby=.(Scenario)]
return(tmp0)
}))
}))
datatable(temp1
,rownames = F
,extensions = c('Scroller','FixedColumns','Buttons','ColReorder')
,options = list(
#rowCallback = JS("function(r,d) {$(r).attr('height', '100px')}"),
columnDefs = list(list(
targets = c(1:3),
render = JS(
"function(data, type, row, meta) {",
"return type === 'display' && data.length > 20 ?",
"'<span title=\"' + data + '\">' + data.substr(0, 20) + '...</span>' : data;",
"}")
)), ## column index starting from 0
deferRender = TRUE,
scrollY = 300 ,
scroller = TRUE,
scrollX = TRUE,
fixedColumns = list(leftColumns = 2),
dom = 'Blfrtip',
buttons =
list( list(
extend = 'collection',
buttons = c('csv'),
text = 'Save as .csv file'
)), ## it doesn't work if only keep 'csv'; weird
colReorder = TRUE,
initComplete = JS(
"function(settings, json) {",
"$(this.api().table().header()).css({'font-size': '12px'});",
"$(this.api().table().body()).css({'font-size': '12px'});",
"}") ## change both header and body font size
))
temp1[,Base:=max(ifelse(Scenario=='Baseline'&shock==0,NCOR.cum,0)),]
temp1[,Sev:=max(ifelse(Scenario=='Severe'&shock==0,NCOR.cum,0)),]
temp1[,Base.amt:=max(ifelse(Scenario=='Baseline'&shock==0,NCO.cum,0)),]
temp1[,Sev.amt:=max(ifelse(Scenario=='Severe'&shock==0,NCO.cum,0)),]
temp1<-temp1[Scenario%in%c('Severe','Baseline')&shock!=0,,]
myColours <- c(brewer.pal(3,"Greens"),brewer.pal(3,"Reds"))
my.settings <- list(superpose.polygon=list(col=myColours, border="transparent")
,superpose.symbol=list(col=myColours,pch=15, border="transparent"))
# tr.tmp<-trellis.par.get()
# str(tr.tmp)
# tr.tmp$superpose.polygon
# tr.tmp$superpose.symbol
p1<-barchart(NCOR.cum~var|factor("13-Quarter Cummulative NCOR"),group=paste(Scenario,' +/- ',shock,' std',sep='')
,z=temp1,par.settings = my.settings,ylim=c(0,1.1*max(temp1$Sev))
,horizontal=FALSE,stack=FALSE,ylab='',xlab=''
,auto.key = list(x = .46, y=.88, corner = c(0,1) ,column=2,border = FALSE, lines = FALSE)
,scale=list(x=list(rot=0),y=list(relation='free'))
,yscale.components=function(...){
yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100) ## convert to strings as pct
return(yc)}
,panel=function(x,y,subscripts,z=z,groups=groups,horizontal=horizontal,stack=stack,par.settings=par.settings,...){
panel.grid(h=-1, v=0);
# print(z[subscripts,,]);print(subscripts);print(x);print(y)
# print(z[subscripts][1]$Base)
# print(z[subscripts][1]$Severe)
# panel.barchart(x,y,subscripts=subscripts,groups=groups,horizontal = horizontal,stack=stack,...)
panel.barchart(x,y,subscripts = subscripts,groups = groups,horizontal = horizontal,stack = stack,...)
panel.abline(h=z[subscripts][1]$Base,col = 'green',lwd=2,alpha=.5)
panel.abline(h=z[subscripts][1]$Sev,col = 'red',lwd=2,alpha=.5)
}
,temp1)
print(p1)
p1<-barchart(NCO.cum/1000~var|factor("13-Quarter Cummulative NCO (k $)"),group=paste(Scenario,' +/- ',shock,' std',sep='')
,z=temp1,par.settings = my.settings,ylim=c(0,1.1*max(temp1$Sev.amt/1000))
,horizontal=FALSE,stack=FALSE,ylab='',xlab=''
,auto.key = list(x = .46, y=.88, corner = c(0,1) ,column=2,border = FALSE, lines = FALSE)
,scale=list(x=list(rot=0),y=list(relation='free'))
# ,yscale.components=function(...){
# yc <- yscale.components.default(...)
# yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100) ## convert to strings as pct
# return(yc)}
,panel=function(x,y,subscripts,z=z,groups=groups,horizontal=horizontal,stack=stack,par.settings=par.settings,...){
panel.grid(h=-1, v=0);
# print(z[subscripts,,]);print(subscripts);print(x);print(y)
# print(z[subscripts][1]$Base)
# print(z[subscripts][1]$Severe)
# panel.barchart(x,y,subscripts=subscripts,groups=groups,horizontal = horizontal,stack=stack,...)
panel.barchart(x,y,subscripts = subscripts,groups = groups,horizontal = horizontal,stack = stack,...)
panel.abline(h=z[subscripts][1]$Base.amt/1000,col = 'green',lwd=2,alpha=.5)
panel.abline(h=z[subscripts][1]$Sev.amt/1000,col = 'red',lwd=2,alpha=.5)
}
,temp1)
print(p1)
```
## 3.4 Back Testing
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=11,fig.height=6}
data3.bck<-data3.hist
fit<-lm(fit.final,data3.bck[year(Date)<='2013 Q4',,])
data3.bck[,y:=predict(fit, newdata=.SD, type=c("response"), na.action=na.pass),]
temp0<-rbind(
data3.bck[,.(Scenario='Historic Actual',Date,y=NCOR,ENTITY_NAME),]
,data3.bck[year(Date)<='2013 Q4',.(Scenario='Back Testing In-sample Fit',Date,y=y,ENTITY_NAME),]
,data3.bck[year(Date)>'2013 Q4',.(Scenario='Back Testing Out-sample Predict',Date,y=y,ENTITY_NAME),]
)
y.max<-temp0[ENTITY_NAME=='Bank of Hope',max(y),]
y.min<-temp0[ENTITY_NAME=='Bank of Hope',min(y),]
#a <- temp0[ENTITY_NAME=='Bank of Hope',,]
# col=c('purple','blue')
col=c('pink','blue','grey')
p<-xyplot(y~as.Date(Date)|ENTITY_NAME,group=Scenario,scale=list(x=list(rot=90))
# ,par.settings = list(superpose.line = list(col=col),superpose.symbol = list(col= col))
,type=c('l','p','g'),xlab='',ylab='',ylim=c(min(1.3*y.min,-y.max*.2),1.3*y.max)
,cex=1.0
,par.settings = list(superpose.line = list(col=col,lwd=2),superpose.symbol = list(col= col))
,yscale.components=function(...){yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100)
return(yc)}
,main='Back Testing, Testing Window = (2014,2015,2016)'
,auto.key = list(x = .66, y=.88, corner = c(0,1) ,border = FALSE, lines = TRUE)
,temp0[ENTITY_NAME=='Bank of Hope',,])
print(p)
# str(temp0)
```
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=11,fig.height=6}
data3.bck<-data3.hist
fit<-lm(fit.final,data3.bck[year(Date)<='2010 Q4',,])
data3.bck[,y:=predict(fit, newdata=.SD, type=c("response"), na.action=na.pass),]
temp0<-rbind(
data3.bck[,.(Scenario='Historic Actual',Date,y=NCOR,ENTITY_NAME),]
,data3.bck[year(Date)<='2010 Q4',.(Scenario='Back Testing In-sample Fit',Date,y=y,ENTITY_NAME),]
,data3.bck[year(Date)>'2010 Q4',.(Scenario='Back Testing Out-sample Predict',Date,y=y,ENTITY_NAME),]
)
y.max<-temp0[ENTITY_NAME=='Bank of Hope',max(y),]
y.min<-temp0[ENTITY_NAME=='Bank of Hope',min(y),]
# col=c('purple','blue')
col=c('pink','blue','grey')
p<-xyplot(y~as.Date(Date)|ENTITY_NAME,group=Scenario,scale=list(x=list(rot=90))
# ,par.settings = list(superpose.line = list(col=col),superpose.symbol = list(col= col))
,type=c('l','p','g'),xlab='',ylab='',ylim=c(min(1.3*y.min,-y.max*.2),1.3*y.max)
,cex=1.0
,par.settings = list(superpose.line = list(col=col,lwd=2),superpose.symbol = list(col= col))
,yscale.components=function(...){yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100)
return(yc)}
,main='Back Testing, Testing Window = (2011 - 2016)'
,auto.key = list(x = .66, y=.88, corner = c(0,1) ,border = FALSE, lines = TRUE)
,temp0[ENTITY_NAME=='Bank of Hope',,])
print(p)
# str(temp0)
```
## 3.5 Coefficient Stability Analysis
* keep moving one quarter out to see the sign change of the coefficient and confidence interval of the estimates.
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=11,fig.height=4}
temp0<-do.call('rbind',lapply(2009:2016,function(x){
# x='2010'
fit<-lm(fit.final,data3.hist[year(Date)<=x,,])
# str(summary(fit)$coefficients)
summary(fit)$coefficients[,"Std. Error"]
tmp0<-data.table(year=x
,var=names(fit$coefficients)
,value=summary(fit)$coefficients[,"Estimate"]
,std=summary(fit)$coefficients[,"Std. Error"]
)
return(tmp0)
}))
temp1<-temp0[var!='(Intercept)',,]
xyplot(value~year|var
,type=c('g','p','l')
,auto.key = list(x = .72, y=.85, corner = c(0,1) ,border = FALSE, lines = TRUE)
# ,panel = panel.superpose
,ylab=''
,layout=c(2,1)
# ,scale=list(x=list(relation='free'),y=list(relation='free'))
# ,ylim=c(-max(temp1[,abs(value)+std*1.96,])*0.1,max(temp1[,abs(value)+std*1.96,])*1.1)
# ,ylim=c(-max(data_c$upper)*0.1,max(data_c$upper)*1.1)
,par.settings = list(superpose.line = list(col=col),superpose.symbol = list(col= col))
,panel=function(x, y,col='blue',subscripts,cex, ...) {
panel.xyplot(x, y ,col='blue',...)
panel.abline(h=0,col = 'grey',lwd=4,alpha=.5)
# panel.arrows(x, y-temp1$std[subscripts]*1.96, x,y+temp1$std[subscripts]*1.96, angle=90, code=3, length=0.02,col='red')
}
,temp1)
```
+ Stability by Individual graph
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=6,fig.height=4}
temp2<-lapply(temp1[,unique(var),],function(x){
tmp0<-temp1[var==x,,]
p<-xyplot(value~year|var
,type=c('g','p','l')
,auto.key = list(x = .72, y=.85, corner = c(0,1) ,border = FALSE, lines = TRUE)
# ,panel = panel.superpose
,ylab=''
# ,scale=list(x=list(relation='free'),y=list(relation='free'))
# ,ylim=c(min(-max(tmp0[,abs(value)+std*1.96,])*0.1,min(tmp0[,value-std*1.96,])*1.1)
# ,max(max(tmp0[,value+std*1.96,])*1.1,max(tmp0[,abs(value)+std*1.96,])*0.1)
# )
# ,ylim=c(-max(data_c$upper)*0.1,max(data_c$upper)*1.1)
,par.settings = list(superpose.line = list(col=col),superpose.symbol = list(col= col))
,panel=function(x, y,col='blue',subscripts,cex, ...) {
panel.xyplot(x, y ,col='blue',...)
panel.abline(h=0,col = 'grey',lwd=4,alpha=.5)
# panel.arrows(x, y-tmp0$std[subscripts]*1.96, x,y+tmp0$std[subscripts]*1.96, angle=90, code=3, length=0.02,col='red')
}
,tmp0);
print(p)
return(1)
})
```
## 3.6 BootStrapping Confidence Intervals
```{r,eval=full, echo=FALSE, include=TRUE, warning=FALSE,fig.width=11,fig.height=6}
set.seed(1234)
fit.list<-lapply(1:1000,function(x){
sample.n<-sample(1:data3.hist[,.N,],data3.hist[,.N,],replace = TRUE)
tmp<-data3.hist[sample.n,,]
dim(data3.hist)
tmp[,.N,keyby=.(Date)]
fit<-lm(fit.final,tmp)
return(fit$coefficients)
})
```
+ Bootstrap Coefficient Intervals
```{r,eval=full, echo=FALSE, include=TRUE, warning=FALSE,fig.width=10,fig.height=4}
temp0<-do.call('rbind',lapply(fit.list,function(x){
return(data.table(var=names(x),value=x))
}))
temp0[,value.sd:=value/sd(value),keyby=var]
boxplot.stats2<-function (x, coef = 1.5, do.conf = TRUE, do.out = TRUE)
{
if (coef < 0)
stop("'coef' must not be negative")
nna <- !is.na(x)
n <- sum(nna)
stats <- stats::fivenum(x, na.rm = TRUE)
iqr <- diff(stats[c(2, 4)])
if (coef == 0)
do.out <- FALSE
else {
out <- if (!is.na(iqr)) {
x < (stats[2L] - coef * iqr) | x > (stats[4L] + coef *
iqr)
}
else !is.finite(x)
if (any(out[nna], na.rm = TRUE))
stats[c(1, 5)] <- range(x[!out], na.rm = TRUE)
}
conf <- if (do.conf)
stats[3L] + c(-1.58, 1.58) * iqr/sqrt(n)
stats[c(1,5)]<- quantile(x, probs=c(0.025, 0.975))
list(stats = stats, n = n, conf = conf, out = if (do.out) x[out &
nna] else numeric())
}
bwplot(var~value|'Coefiicient Stability by Bootstraping (n=1000, 95% CI)'
,panel=function(x,y, ...){
panel.bwplot(x,y, ...)
panel.grid(h=-1, v=0)
panel.abline(v=0,col = 'red',lwd=4,alpha=.3)
}
,xlab='estimate'
,ylab='coefficient'
,do.out=FALSE
,stats=boxplot.stats2
,temp0)
bwplot(var~value.sd|'Coefiicient Stability by Bootstraping (n=1000,Standardized, 95% CI)'
# ,wend=.025
,panel=function(...){
panel.bwplot(...)
panel.grid(h=-1, v=0)
panel.abline(v=0,col = 'red',lwd=4,alpha=.3)
}
,xlab='estimate'
,ylab='coefficient'
,do.out=FALSE
,stats=boxplot.stats2
,temp0)
```
+ Bootstrap confidence and prediction intervals intervals
```{r,eval=full, echo=FALSE, include=TRUE, warning=FALSE,fig.width=11,fig.height=6}
residual.list<-data3.hist[,residual,]
set.seed(1234);
temp0<-do.call('rbind',lapply(1:(length(fit.list)),function(x){
#x=1
fit<-fit.final
fit$coefficients<-fit.list[[x]]
data3.hist[,y.ci:=predict(fit, newdata=.SD, type=c("response"), na.action=na.pass),]
data3.fcst[,y.ci:=predict(fit, newdata=.SD, type=c("response"), na.action=na.pass),]
data3.hist[,y.pi:=predict(fit, newdata=.SD, type=c("response"), na.action=na.pass)+sample(residual.list,.N,replace = TRUE),]
data3.fcst[,y.pi:=predict(fit, newdata=.SD, type=c("response"), na.action=na.pass)+sample(residual.list,.N,replace = TRUE),]
tmp0<-rbind(
data3.hist[,.(IDX=x,Scenario='Historic Fit',Date,y.ci,y.pi,ENTITY_NAME),]
,data3.fcst[,.(IDX=x,Scenario,Date,y.ci,y.pi,ENTITY_NAME),]
)
}))
# temp0[,alpha:=(1-0.95^(1/.N)),keyby=.(ENTITY_NAME,Scenario,IDX)]
temp0[,alpha:=.05,keyby=.(ENTITY_NAME,Scenario,IDX)]
#temp0[,unique(alpha),]
temp1<-temp0[,.(
lower.ci=quantile(y.ci,probs = mean(alpha)/2)
,upper.ci=quantile(y.ci,probs = 1-mean(alpha)/2)
,lower.pi=quantile(y.pi,probs = mean(alpha)/2)
,upper.pi=quantile(y.pi,probs = 1-mean(alpha)/2)
),keyby=.(ENTITY_NAME,Scenario,Date)]
data3.hist[,y:=predict(fit.final, newdata=.SD, type=c("response"), na.action=na.pass),]
temp2<-rbind(data3.hist[,.(Scenario='Historic Actual',Date,y=NCOR,ENTITY_NAME),]
,data3.hist[,.(Scenario='Historic Fit',Date,y=y,ENTITY_NAME),]
,data3.fcst[,y:=predict(fit.final, newdata=.SD, type=c("response"), na.action=na.pass),][,.(Scenario,Date,y=y,ENTITY_NAME),]
)
temp3<-merge(temp2,temp1,by=c('ENTITY_NAME','Scenario','Date'),all=TRUE)[ENTITY_NAME=='Bank of Hope',,]
# temp3[,.N,keyby=Scenario]
col=c('orange','darkgreen','darkgrey','blue','red')
xyplot(y~Date|factor('Quarterly NCOR 95% Confidence Interval'),group=Scenario
,type=c('p','g','l'),xlab='',ylab='',lwd=2
,ylim=c(min(-max(temp3$upper.ci,na.rm=TRUE)*0.2,min(temp3$lower.ci,na.rm=TRUE)*1.05),max(temp3$upper.ci,na.rm=TRUE)*1.05)
,auto.key = list(x = .72, y=.85, corner = c(0,1) ,border = FALSE, lines = TRUE)
,par.settings = list(superpose.line = list(col=col,lwd=2),superpose.symbol = list(col= col))
,panel = panel.superpose
,panel.groups=function(x, y,col,col.symbol,subscripts,cex, ...) {
jitter=0
# cat(subscripts);
if(temp3$Scenario[subscripts][1]=='Baseline'){jitter=-.05}
if(temp3$Scenario[subscripts][1]=='Severe'){jitter=.05}
panel.xyplot(x+jitter, y,col=col.symbol ,...)
panel.arrows(x+jitter, temp3$lower.ci[subscripts], x+jitter,temp3$upper.ci[subscripts],lwd=2,code=3,alpha=.2, angle=90, length=0.05, col=col.symbol)
}
,yscale.components=function(...){
yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100) ## convert to strings as pct
return(yc)}
,temp3)
col=c('orange','darkgreen','darkgrey','blue','red')
xyplot(y~Date|factor('Quarterly NCOR 95% Prediction Interval'),group=Scenario
,type=c('p','g','l'),xlab='',ylab='',lwd=2
,ylim=c(min(-max(temp3$upper.pi,na.rm=TRUE)*0.2,min(temp3$lower.pi,na.rm=TRUE)*1.05),max(temp3$upper.pi,na.rm=TRUE)*1.05)
,auto.key = list(x = .72, y=.85, corner = c(0,1) ,border = FALSE, lines = TRUE)
,par.settings = list(superpose.line = list(col=col,lwd=2),superpose.symbol = list(col= col))
,panel = panel.superpose
,panel.groups=function(x, y,col,col.symbol,subscripts,cex, ...) {
jitter=0
# cat(subscripts);
if(temp3$Scenario[subscripts][1]=='Baseline'){jitter=-.05}
if(temp3$Scenario[subscripts][1]=='Severe'){jitter=.05}
panel.xyplot(x+jitter, y,col=col.symbol ,...)
panel.arrows(x+jitter, temp3$lower.pi[subscripts], x+jitter,temp3$upper.pi[subscripts],lwd=2,code=3,alpha=.2, angle=90, length=0.05, col=col.symbol)
}
,yscale.components=function(...){
yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100) ## convert to strings as pct
return(yc)}
,temp3)
```
+ Bootstrap In-sample fit and forecast confidence intervals
## 3.7 with AR term
```{r,eval=TRUE, echo=FALSE, include=TRUE, warning=FALSE,fig.width=11,fig.height=6}
data3.ar<-data3.hist
data3.ar[,PANEL_ID:=ENTITY_NAME]
data3.ar[,TIME_IDX:=.GRP,by=.(Date)]
fit<-panelAR(fit.final,data=as.data.frame(data3.ar), panelVar="PANEL_ID", timeVar="TIME_IDX",autoCorr='psar1', panelCorrMethod = "parks", complete.case=TRUE,dof.correction = TRUE)
print(summary(fit));
# print(summary(fit.final));
cat('\nAR(1) Term Coef (Rho):\n');
print(data.table(Panel=names(summary(fit)$rho),rho=round(summary(fit)$rho,2)))
cat('\nPanel Covariance Structure:\n')
fit.sigma<-summary(fit)$Sigma
colnames(fit.sigma)<-1:dim(fit.sigma)[2]
print(round(fit.sigma,6));
cat('\nCorrelation of Covariance:\n')
print(round(cov2cor(fit.sigma),1));
# print(round(summary(fit)$Sigma*1000000,1))
# round(cov2cor(vcov(fit)),2)
# str(fit)
# anova(fit)
data3.ar[as.integer(names(fit$fitted.values)),y:=fit$fitted.values];
data3.ar[as.integer(names(fit$residuals)),residual:=fit$residuals];
temp3<-rbind(data3.ar[,.(Scenario='Historic Actual',Date,y=NCOR,ENTITY_NAME),],data3.ar[,.(Scenario='Historic Fit',Date,y=y,ENTITY_NAME),]);
col=c('grey','pink')
p<-xyplot(y~Date|ENTITY_NAME,group=Scenario,scale=list(x=list(rot=90))
# ,par.settings = list(superpose.line = list(col=col),superpose.symbol = list(col= col))
,type=c('l','p','g'),xlab='',ylab=''
# ,ylim=c(-.005,.05)
,par.settings = list(superpose.line = list(col=col,lwd=2),superpose.symbol = list(col= col))
,yscale.components=function(...){yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100)
return(yc)}
,main=''
,auto.key = list(x = .76, y=.88, corner = c(0,1) ,border = FALSE, lines = TRUE)
,temp3[ENTITY_NAME=='Bank of Hope',,])
print(p);
# cat('\n Shapiral Normality Test \n')
print(shapiro.test(fit$residuals));
# cat('\n QQ-Plot \n')
qqnorm(fit$residuals);qqline(fit$residuals)
fit.autotest<-do.call('rbind',lapply(data3.ar[,unique(ENTITY_NAME),],function(x){
#x='Bank of Hope'
tmp0<-data3.ar[ENTITY_NAME==x,,][,.(NCOR,y,residual),keyby=.(ENTITY_NAME,Date)]
tmp0.result1<-tmp0[,dwtest(residual~1),]
tmp0.result2<-tmp0[,bgtest(residual~1,order=4),]
# tmp0.result3<-tmp0[,bptest(NCOR~y),]
tmp1<-rbind(
data.table(ENTITY_NAME=x
,Test='DW Test'
,Null.Hypothesis='Errors are serially uncorrelated'
,p.value=tmp0.result1$p.value
)
,data.table(ENTITY_NAME=x
,Test='BG Test'
,Null.Hypothesis='No serial correlation of any order up to 4'
,p.value=tmp0.result2$p.value
)
)
return(tmp1)
}))
fit.autotest<-rbind(fit.autotest
,data.table(ENTITY_NAME='Overall'
,Test='BP Test'
,Null.Hypothesis='Homoskedasticity'
,p.value=data3.ar[,bptest(fit),]$p.value
)
,data.table(ENTITY_NAME='Overall'
,Test='Reset Test'
,Null.Hypothesis='No suffer from mis-specificaiton'
,p.value=data3.ar[,resettest(fit),]$p.value
)
)
setorder(fit.autotest,Test)
print(fit.autotest);
```
Auto Regressive Term Impact Assessment
```{r,eval=TRUE, echo=FALSE, include=TRUE, warning=FALSE,fig.width=11,fig.height=6}
# data3.ar[,ENTITY_NAME:=factor(ENTITY_NAME),]
# data3[,ENTITY_NAME:=factor(ENTITY_NAME),]
#
data3.fcst[,ENTITY_NAME:=factor(ENTITY_NAME,levels=levels(fit$model$ENTITY_NAME))]
data3.fcst[,y:=predict(fit, newdata=.SD, type=c("response"), na.action=na.pass)[['fit']],]
data3.fcst[,DATE.IDX:=4*(Date-as.yearqtr('2016 Q4')),]
t0.res<-data3.ar[ENTITY_NAME=='Bank of Hope'&Date=='2016 Q4',.(ENTITY_NAME,Date,y,residual),][,residual,]
rho<-summary(fit)$rho['Bank of Hope']
data3.fcst[,ar.correction:=t0.res*rho^DATE.IDX,]
temp1<-rbind(
temp3[ENTITY_NAME=='Bank of Hope',,]
,data3.fcst[,.(Scenario,Type='Regression Only',Date,y),]
,data3.fcst[,.(Scenario,Type='With AR Correction',Date,y=(y+ar.correction)),]
,fill=TRUE
)
# dev.off()
col <- c(
brewer.pal(9,"Greys")[c(6)]
,brewer.pal(9,"RdPu")[c(3)]
,brewer.pal(9,"Oranges")[c(4)]
,brewer.pal(9,"Greens")[c(3)]
,brewer.pal(9,"Reds")[c(5)]
,brewer.pal(9,"Oranges")[c(6)]
,brewer.pal(9,"Greens")[c(6)]
,brewer.pal(9,"Reds")[c(6)]
)
# col=c('orange','darkgreen','red')
p<-xyplot(y~as.Date(Date)|factor('Scenario Forecast from AR Regression'),group=paste(Type,' - ',Scenario)
,type=c('p','g','l'),xlab='',ylab='',lwd=2
,auto.key = list(x = .52, y=.85, corner = c(0,1) ,border = FALSE, lines = TRUE)
,par.settings = list(superpose.line = list(col=col,lwd=2),superpose.symbol = list(col= col))
,yscale.components=function(...){
yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100) ## convert to strings as pct
return(yc)}
,temp1)
print(p);
temp0<-data3.fcst[,.(Date=as.character(Date),ENTITY_NAME='Bank of Hope',Scenario,NCOR.BPS=round(y*10000,1),AR.TERM.BPS=round(ar.correction*10000,1)),]
datatable(temp0
,rownames = F
,extensions = c('Scroller','FixedColumns','Buttons','ColReorder')
,options = list(
#rowCallback = JS("function(r,d) {$(r).attr('height', '100px')}"),
columnDefs = list(list(
targets = c(1:4),
render = JS(
"function(data, type, row, meta) {",
"return type === 'display' && data.length > 20 ?",
"'<span title=\"' + data + '\">' + data.substr(0, 20) + '...</span>' : data;",
"}")
)), ## column index starting from 0
deferRender = TRUE,
scrollY = 300,
scroller = TRUE,
scrollX = TRUE,
fixedColumns = list(leftColumns = 2),
dom = 'Blfrtip',
buttons =
list( list(
extend = 'collection',
buttons = c('csv'),
text = 'Save as .csv file'
)), ## it doesn't work if only keep 'csv'; weird
colReorder = TRUE,
initComplete = JS(
"function(settings, json) {",
"$(this.api().table().header()).css({'font-size': '12px'});",
"$(this.api().table().body()).css({'font-size': '12px'});",
"}") ## change both header and body font size
))
temp0<-rbind(data.table(model='Final',var=names(fit.final[['coefficients']]),coef=format(fit.final[['coefficients']],4))
,data.table(model='With AR',var=names(fit[['coefficients']]),coef=format(fit[['coefficients']],4))
)
temp1<-data.table(dcast(temp0,var~model,value.var = 'coef'))
datatable(temp1
,rownames = F
,extensions = c('Scroller','FixedColumns','Buttons','ColReorder')
,options = list(
#rowCallback = JS("function(r,d) {$(r).attr('height', '100px')}"),
columnDefs = list(list(
targets = c(1:2),
render = JS(
"function(data, type, row, meta) {",
"return type === 'display' && data.length > 20 ?",
"'<span title=\"' + data + '\">' + data.substr(0, 20) + '...</span>' : data;",
"}")
)), ## column index starting from 0
deferRender = TRUE,
scrollY = 300,
scroller = TRUE,
scrollX = TRUE,
fixedColumns = list(leftColumns = 1),
dom = 'Blfrtip',
buttons =
list( list(
extend = 'collection',
buttons = c('csv'),
text = 'Save as .csv file'
)), ## it doesn't work if only keep 'csv'; weird
colReorder = TRUE,
initComplete = JS(
"function(settings, json) {",
"$(this.api().table().header()).css({'font-size': '12px'});",
"$(this.api().table().body()).css({'font-size': '12px'});",
"}") ## change both header and body font size
))
# display.brewer.all()
#
# summary(fit)
# str(data3.fcst)
# str(as.data.frame(data3.ar))
# data3.ar[ENTITY_NAME=='Bank of Hope'&Date=='2016 Q4',.(ENTITY_NAME,Date,y,residual),]
# cat('\n13 Quarter Quarterly AR Correction:\n')
# round(temp1,6)
# cat('\n13 Quarter Cumulative AR Correction:\n')
# round(sum(temp1),6)
```
## 9.0 Time Spend
```{r,eval=TRUE, echo=FALSE, warning=FALSE, message=FALSE,fig.width=10, fig.height=4, results="asis"}
################################
### calculate the time spent ###
################################
time1<-Sys.time()
# cat('Time Spend:')
cat('\n')
print(round(difftime(time1,time0,units='mins')))
```
<file_sep>/Adobe/ui.R
library(RColorBrewer)
library(scales)
library(lattice)
library(dplyr)
library(shiny)
library(ggplot2)
ui <- bootstrapPage(
headerPanel ("Final Result"),
sidebarPanel(
selectInput("id", "Select Module ID", unique(shiny_assessments$id)),
selectInput("info", "Select Demographic Type",c("gender", "region", "highest_education",
"imd_band","age_band", "disability"))
),
mainPanel(
plotOutput("plot", width = "100%"),
plotOutput("plot2", width = "100%"),
plotOutput("plot3", width = "100%")
)
)
<file_sep>/PD/R06_PD_CI.R
##############################################################################
## File Name: R06_PD_CI.R
## Author: KZ
## Date: 5/1/2017 Created
## Purpose: To build PD model for BOH CI portfolio accoring to
## "06 - ci -model.sas"
##############################################################################
#setwd("//useomvfs77/mclp/Common/Clients/Bank of Hope/Model Development/PD Models")
setwd("C:/Users/doxborrow/Desktop/BoH/Modeling/PD Models")
requirements <- c("dplyr", "reshape2", "data.table","zoo","ggplot2","pROC","boot")
for(rr in requirements){
if(! rr %in% installed.packages()) install.packages(rr)
}
require(dplyr)
require(reshape2)
require(data.table)
require(zoo)
require(ggplot2)
require(pROC)
require(boot)
require(tidyr)
## load data
ci_dev <- read.csv("./Data output/df_boh_base.csv")
ci_dev$fileDate <- as.Date(ci_dev$fileDate, "%Y-%m-%d")
ci_dev <- filter(ci_dev, portfolio_id == "CI" & as.Date(fileDate) <= as.Date("2016-03-31") &
as.Date(fileDate) >= as.Date("2008-03-31") )
## in the SAS code, also need to delete obs with loan_spread_v < 0.
## only a 9 obs (out of 288564 obs) with loan_spread_v < 0, can ignore.
## delete obs with naicsCode = 0
ci_dev <- filter(ci_dev, naicsCode != 0)
ci_dev <- filter(ci_dev, !is.na(POB))
## create final input variables
## (1) variable boh_rating1
ci_dev$boh_rating1 <- ifelse(ci_dev$boh_rating %in% c(1,2), "R1",
ifelse(ci_dev$boh_rating %in% c(0,3,4,1000), "R2",
ifelse(ci_dev$boh_rating %in% c(2000), "R3",
ifelse(ci_dev$boh_rating %in% c(3000,4000), "R4", "error")
)))
table(ci_dev$boh_rating)
table(ci_dev$boh_rating1)
ci_dev$boh_rating1_R1 <- ifelse(ci_dev$boh_rating1 == "R1",1, 0)
ci_dev$boh_rating1_R2 <- ifelse(ci_dev$boh_rating1 == "R2",1, 0)
ci_dev$boh_rating1_R4 <- ifelse(ci_dev$boh_rating1 == "R4",1, 0)
## (2) variable naics_code3
ci_dev$naicsCode_ch <- substr(as.character(ci_dev$naicsCode),1,2)
ci_dev$naicsCode_2d <- as.numeric(as.character(ci_dev$naicsCode_ch))
ci_dev$naics_code3 <- ifelse(ci_dev$naicsCode_2d %in% c(72,44,45), "c",
ifelse(ci_dev$naicsCode_2d %in% c(42), "d",
ifelse(ci_dev$naicsCode_2d %in% c(31,32,33), "g", "h")
))
table(ci_dev$naics_code3)
ci_dev$naics_code3_c <- ifelse(ci_dev$naics_code3 == "c",1, 0)
ci_dev$naics_code3_d <- ifelse(ci_dev$naics_code3 == "d",1, 0)
ci_dev$naics_code3_g <- ifelse(ci_dev$naics_code3 == "g",1, 0)
## (3) variable season
ci_dev$season <- ifelse(ci_dev$q == 1, "sp",
ifelse(ci_dev$q == 2, "su",
ifelse(ci_dev$q ==3, "fa", "wi")))
table(ci_dev$season)
ci_dev$season_fall <- ifelse(ci_dev$season == "fa", 1, 0)
ci_dev$season_summer <- ifelse(ci_dev$season == "su", 1, 0)
ci_dev$season_winter <- ifelse(ci_dev$season == "wi", 1, 0)
## (4) variable cahpi_ag_lag_3_n12
ci_dev$cahpi_ag_lag_3_n12 <- ifelse(ci_dev$CAHPI_ag_lag_3 <= -12, -12, ci_dev$CAHPI_ag_lag_3)
min(ci_dev$cahpi_ag_lag_3_n12)
## (5) variable dpd0129_0
ci_dev$dpd0129_0 <- ifelse(ci_dev$dpd0129 == 0, 1, 0)
table(ci_dev$dpd0129_0)
## (6) variables POB_5 and POB_50
ci_dev$POB_5 <- ifelse(ci_dev$POB >= 5, 5, ci_dev$POB)
ci_dev$POB_50 <- ifelse(ci_dev$POB >= 50, 50,
ifelse(ci_dev$POB <= 5, 5, ci_dev$POB))
mean(ci_dev$POB_5)
mean(ci_dev$POB_50)
## Sampling. 60% of the data
set.seed(20170501)
ci_dev_training <- ci_dev %>% sample_frac(0.6)
##Out of sample data
ci_dev_outsample <- ci_dev[-which(rownames(ci_dev) %in% rownames(ci_dev_training)),]
# Convert to data tables
ci_dev_training <- as.data.table(ci_dev_training)
ci_dev_outsample <- as.data.table(ci_dev_outsample)
## run logistic regression
model <- y ~ boh_rating1_R1 + boh_rating1_R2 + boh_rating1_R4 +
naics_code3_c + naics_code3_d +
season_fall + season_summer + season_winter + CAUR_yd_lag_3 +
cahpi_ag_lag_3_n12 + dpd0129_0 + POB_5 + POB_50
fit <- glm( model, family = binomial(link = "logit"), data = ci_dev_training)
summary(fit)
coef_ci <- as.data.frame(summary(fit)$coefficients)
write.csv(coef_ci, "./R Output/coef_ci.csv")
coef_ci$X <- rownames(coef_ci)
## get in-sample prediction p_hat for each account
ci_dev_training$p_hat <- as.matrix (ci_dev_training[, coef_ci$X[-1],with = FALSE]) %*% coef_ci$Estimate[-1] +
coef_ci$Estimate[1]
ci_dev_training$p_hat <- 1/(1+exp(-ci_dev_training$p_hat))
## get out-sample prediction p_hat for each account
predict_out <- predict(fit, ci_dev_outsample, type="response")
ci_dev_outsample$p_hat <- predict_out
## get ROC and AUC
ci_roc <- roc(y ~ p_hat, data = ci_dev_training)
ci_auc <- auc(ci_dev_training$y, ci_dev_training$p_hat)
ci_auc <- round(as.numeric(as.character(ci_auc)),4)
plot(ci_roc, main =paste0("CI PD ROC IN \n AUC = ", ci_auc))
pdf("./R output/CI_ROC_AUC.pdf")
plot(ci_roc, main =paste0("CI PD ROC IN \n AUC = ", ci_auc))
dev.off()
## get ROC and AUC - out-sample
roc_out <- data.frame(predict = predict_out, y = ci_dev_outsample$y)
roc_out_plot <- roc(y ~ predict, data = roc_out)
auc_out <- round(as.numeric(roc_out_plot$auc),4)
plot(roc_out_plot, main =paste0("CI PD ROC OUT \n AUC = ", auc_out))
pdf(paste0("./R output/CI_ROC_AUC_OUT.pdf"))
plot(roc_out_plot, main =paste0("CI PD ROC OUT \n AUC = ", auc_out))
dev.off()
## get quarterly average PD
# ci_pd_quarterly <- ddply(ci_dev_training, .(fileDate), summarize, average_pd = mean(p_hat))
ci_pd_quarterly <- subset(ci_dev_training, select = c(fileDate, y, p_hat))
ci_pd_quarterly <- aggregate(ci_pd_quarterly[,2:3], list(ci_pd_quarterly$fileDate), mean)
setnames(ci_pd_quarterly, old = c("Group.1","y","p_hat"),
new = c("fileDate", "actual", "fitted"))
ci_pd_quarterly <- melt(ci_pd_quarterly, id = "fileDate")
cbPalette <- c("#000000", "#0072B2")
ci_pd_training_plot <- ggplot(ci_pd_quarterly, aes(x=fileDate, y = value, color=variable)) +
geom_line() + scale_colour_manual(values=cbPalette) + xlab("Date") + ylab("Default Rate") + ggtitle("Ave. Default Rate CI In-Sample") +
theme(plot.title = element_text(hjust = 0.5)) +
theme(text = element_text(size=15)) + theme(legend.title=element_blank())
ci_pd_training_plot
pdf("./R output/CI_PD_actual_fitted_insample.pdf", height = 5, width = 10)
ci_pd_training_plot
dev.off()
## get quarterly average PD out-sample
ci_pd_quarterly_out <- subset(ci_dev_outsample, select = c(fileDate, y, p_hat))
ci_pd_quarterly_out <- aggregate(ci_pd_quarterly_out[,2:3], list(ci_pd_quarterly_out$fileDate), mean)
setnames(ci_pd_quarterly_out, old = c("Group.1","y","p_hat"),
new = c("fileDate", "Actual", "Fitted"))
ci_pd_quarterly_out <- melt(ci_pd_quarterly_out, id = "fileDate")
cbPalette <- c("#000000", "#0072B2")
ci_pd_out_plot <- ggplot(ci_pd_quarterly_out, aes(x=fileDate, y = value, color=variable)) +
geom_line() + scale_colour_manual(values=cbPalette) + xlab("Date") + ylab("Default Rate") + ggtitle("Ave. Default Rate CI Out-of-Sample") +
theme(plot.title = element_text(hjust = 0.5)) +
theme(text = element_text(size=15)) + theme(legend.title=element_blank())
ci_pd_out_plot
pdf("./R output/CI_PD_actual_fitted_outsample.pdf", height = 5, width = 10)
ci_pd_out_plot
dev.off()
###########################################
## Forecast for 3 scenarios
for(scenario in c("base", "adverse", "severe")){
print(paste0("==== ", scenario, " ===="))
ci_forecast <- read.csv(paste0("./Data output/df_boh_",scenario, ".csv"))
ci_forecast$fileDate <- as.Date(ci_forecast$fileDate, "%Y-%m-%d")
ci_forecast <- filter(ci_forecast, portfolio_id == "CI" & as.Date(fileDate) > as.Date("2016-03-31") )
## delete obs with naicsCode = 0
ci_forecast <- filter(ci_forecast, naicsCode != 0)
ci_forecast <- filter(ci_forecast, !is.na(POB))
## create final input variables
## (1) variable boh_rating1
ci_forecast$boh_rating1 <- ifelse(ci_forecast$boh_rating %in% c(1,2), "R1",
ifelse(ci_forecast$boh_rating %in% c(0,3,4,1000), "R2",
ifelse(ci_forecast$boh_rating %in% c(2000), "R3",
ifelse(ci_forecast$boh_rating %in% c(3000,4000), "R4", "error")
)))
ci_forecast$boh_rating1_R1 <- ifelse(ci_forecast$boh_rating1 == "R1",1, 0)
ci_forecast$boh_rating1_R2 <- ifelse(ci_forecast$boh_rating1 == "R2",1, 0)
ci_forecast$boh_rating1_R4 <- ifelse(ci_forecast$boh_rating1 == "R4",1, 0)
## (2) variable naics_code3
ci_forecast$naicsCode_ch <- substr(as.character(ci_forecast$naicsCode),1,2)
ci_forecast$naicsCode_2d <- as.numeric(as.character(ci_forecast$naicsCode_ch))
ci_forecast$naics_code3 <- ifelse(ci_forecast$naicsCode_2d %in% c(72,44,45), "c",
ifelse(ci_forecast$naicsCode_2d %in% c(42), "d",
ifelse(ci_forecast$naicsCode_2d %in% c(31,32,33), "g", "h")
))
ci_forecast$naics_code3_c <- ifelse(ci_forecast$naics_code3 == "c",1, 0)
ci_forecast$naics_code3_d <- ifelse(ci_forecast$naics_code3 == "d",1, 0)
ci_forecast$naics_code3_g <- ifelse(ci_forecast$naics_code3 == "g",1, 0)
## (3) variable season
ci_forecast$season <- ifelse(ci_forecast$q == 1, "sp",
ifelse(ci_forecast$q == 2, "su",
ifelse(ci_forecast$q ==3, "fa", "wi")))
ci_forecast$season_fall <- ifelse(ci_forecast$season == "fa", 1, 0)
ci_forecast$season_summer <- ifelse(ci_forecast$season == "su", 1, 0)
ci_forecast$season_winter <- ifelse(ci_forecast$season == "wi", 1, 0)
## (4) variable cahpi_ag_lag_3_n12
ci_forecast$cahpi_ag_lag_3_n12 <- ifelse(ci_forecast$CAHPI_ag_lag_3 <= -12, -12, ci_forecast$CAHPI_ag_lag_3)
## (5) variable dpd0129_0
ci_forecast$dpd0129_0 <- ifelse(ci_forecast$dpd0129 == 0, 1, 0)
## (6) variables POB_5 and POB_50
ci_forecast$POB_5 <- ifelse(ci_forecast$POB >= 5, 5, ci_forecast$POB)
ci_forecast$POB_50 <- ifelse(ci_forecast$POB >= 50, 50,
ifelse(ci_forecast$POB <= 5, 5, ci_forecast$POB))
## get pd forecast p_hat for each account
ci_forecast <- as.data.table(ci_forecast)
ci_forecast$p_hat <- as.matrix (ci_forecast[, coef_ci$X[-1],with = FALSE]) %*% coef_ci$Estimate[-1] +
coef_ci$Estimate[1]
ci_forecast$p_hat <- 1/(1+exp(-ci_forecast$p_hat))
## get quarterly average PD
ci_pd_quarterly_9Q <- subset(ci_forecast, select = c(fileDate, p_hat))
ci_pd_quarterly_9Q <- aggregate(ci_pd_quarterly_9Q[,2], list(ci_pd_quarterly_9Q$fileDate), mean)
setnames(ci_pd_quarterly_9Q, old = c("Group.1","p_hat"),
new = c("fileDate", "value"))
ci_pd_quarterly_9Q$variable <- scenario
ci_pd_quarterly_9Q <- ci_pd_quarterly_9Q[,c(1,3,2)]
assign(paste0("ci_pd_quarterly_",scenario), ci_pd_quarterly_9Q)
}
ci_pd_quarterly_9Q <- rbind(ci_pd_quarterly_base, ci_pd_quarterly_adverse, ci_pd_quarterly_severe)
ci_pd_quarterly_all <- rbind(ci_pd_quarterly, ci_pd_quarterly_9Q)
setnames(ci_pd_quarterly_all, old = c("variable", "value"),
new = c("scenario","PD"))
## final plot
cbPalette <- c("#000000", "#0072B2", "#006600", "#E69F00", "#D55E00")
ci_pd_plot <- ggplot(ci_pd_quarterly_all, aes(x = fileDate, y = PD, color = scenario)) +
geom_line() + scale_colour_manual(values=cbPalette) +
ggtitle("BOH CI PD") + xlab("Date") + ylab("Default Rate") + ggtitle("Average Default Rate CI") +
theme(plot.title = element_text(hjust = 0.5)) +
theme(text = element_text(size=15))
ci_pd_plot
pdf("./R output/CI_PD_actual_fitted_forecast.pdf", height = 5, width = 10)
ci_pd_plot
dev.off()
## output results
write.csv(ci_pd_quarterly_all, "./R output/CI_PD_quarterly_actual_fitted_forecast.csv", row.names = FALSE)
# Summary stats per sample
options(scipen=999)
#sum_nms <- c("y","boh_rating1_R1","boh_rating1_R2","boh_rating1_R1","boh_rating1_R2","boh_rating1_R4","naics_code3_c","naics_code3_d", "season_fall","season_summer","season_winter","CAUR_yd_lag_3","cahpi_ag_lag_3_n12","dpd0129_0","POB_5","POB_50")
sum_nms <- c("y","season_fall","season_summer","season_winter","CAUR_yd_lag_3","cahpi_ag_lag_3_n12","dpd0129","POB")
training_df <- as.data.frame.matrix(ci_dev_training)
testing_df <- as.data.frame.matrix(ci_dev_outsample)
summary(training_df)
# Make the summary stats table between the samples
ci_dev_mean <- apply(ci_dev[,which(colnames(ci_dev) %in% sum_nms),drop=F],2,function (x) round(mean(x),4))
ci_dev_in_mean <- apply(training_df[,which(colnames(training_df) %in% sum_nms),drop=F],2,function (x) round(mean(x),4))
ci_dev_out_mean <- apply(testing_df[,which(colnames(testing_df) %in% sum_nms),drop=F],2,function (x) round(mean(x),4))
ci_dev_sd <- apply(ci_dev[,which(colnames(ci_dev) %in% sum_nms),drop=F],2,function (x) round(sd(x),4))
ci_dev_in_sd <- apply(training_df[,which(colnames(training_df) %in% sum_nms),drop=F],2,function (x) round(sd(x),4))
ci_dev_out_sd <- apply(testing_df[,which(colnames(testing_df) %in% sum_nms),drop=F],2,function (x) round(sd(x),4))
ci_dev_max <- apply(ci_dev[,which(colnames(ci_dev) %in% sum_nms),drop=F],2,function (x) round(max(x),4))
ci_dev_in_max <- apply(training_df[,which(colnames(training_df) %in% sum_nms),drop=F],2,function (x) round(max(x),4))
ci_dev_out_max <- apply(testing_df[,which(colnames(testing_df) %in% sum_nms),drop=F],2,function (x) round(max(x),4))
ci_dev_min <- apply(ci_dev[,which(colnames(ci_dev) %in% sum_nms),drop=F],2,function (x) round(min(x),4))
ci_dev_in_min <- apply(training_df[,which(colnames(training_df) %in% sum_nms),drop=F],2,function (x) round(min(x),4))
ci_dev_out_min <- apply(testing_df[,which(colnames(testing_df) %in% sum_nms),drop=F],2,function (x) round(min(x),4))
ci_dev_n <- apply(ci_dev[,which(colnames(ci_dev) %in% sum_nms),drop=F],2, length)
ci_dev_in_n <- apply(training_df[,which(colnames(training_df) %in% sum_nms),drop=F],2, length)
ci_dev_out_n <- apply(testing_df[,which(colnames(testing_df) %in% sum_nms),drop=F],2, length)
ci_df_sample_stats <- rbind(
ci_dev_mean, ci_dev_in_mean, ci_dev_out_mean,
ci_dev_sd, ci_dev_in_sd, ci_dev_out_sd,
ci_dev_max, ci_dev_in_max, ci_dev_out_max,
ci_dev_min, ci_dev_in_min, ci_dev_out_min,
ci_dev_n, ci_dev_in_n, ci_dev_out_n
)
rownames(ci_df_sample_stats) <- c("Mean (All Obs)","Mean (Training)","Mean (Test)","SD (All Obs)","SD (Training)","SD (Test)","Max (All Obs)","Max (Training)","Max (Test)","Min (All Obs)","Min (Training)","Min (Test)","Obs (All Obs)","Obs (Training)","Obs (Test)")
write.csv(ci_df_sample_stats, "./R output/ci_df_sample_stats.csv")
##################
# Plots
# Generate Data profiling bins
ci_dev$pob_bins <- cut(ci_dev$POB,breaks = 6)
ci_dev$caur_bins <- cut(ci_dev$CAUR_yd,breaks = 14)
breaks_hpi = c(-24,-12,-2,3,7,10)
ci_dev$cahpi_bins <- cut(ci_dev$CAHPI_ag,breaks = breaks_hpi)
ci_dev$pastdue_bins_1_29 <- ifelse(ci_dev$dpd0129<=29,ci_dev$dpd0129,NA)
ci_dev$pastdue_bins <- cut(ci_dev$pastdue_bins_1_29,breaks = 5) #days past due
ci_dev$naics_grp <- ifelse(as.numeric(substr(ci_dev$naicsCode,start=1,stop=2)) == 42,"G1",
ifelse(as.numeric(substr(ci_dev$naicsCode,start=1,stop=2)) %in% c(31,32,33) ,"G2",
ifelse(as.numeric(substr(ci_dev$naicsCode,start=1,stop=2)) %in% c(72,44,45),"G3","Other")))
# Generate in sample bins:
#ci_dev_training$risk_bins <- ifelse(ci_dev_training$boh_rating1_R1==1,"R1",ifelse(ci_dev_training$boh_rating1_R2==1,"R2","R4"))
ci_dev_training$risk_bins <- ifelse(ci_dev_training$boh_rating1=="R1","R1",ifelse(ci_dev_training$boh_rating1=="R2","R2",ifelse(ci_dev_training$boh_rating1=="R3","R3",ifelse(ci_dev_training$boh_rating1=="R4","R4","Other"))))
ci_dev_training$season_bins <- ifelse(ci_dev_training$season_fall==1,"Fall",ifelse(ci_dev_training$season_summer==1,"Summer",ifelse(ci_dev_training$season_winter==1,"Winter","Other")))
ci_dev_training$caur_bins <- cut(ci_dev_training$CAUR_yd_lag_3,breaks = 2)
ci_dev_training$cahpi_bins <- cut(ci_dev_training$cahpi_ag_lag_3_n12,breaks = 3)
ci_dev_training$pob5_bins <- cut(ci_dev_training$POB_5,breaks = 2)
ci_dev_training$pob50_bins <- cut(ci_dev_training$POB_50,breaks = 6)
ci_dev_training$pob_bins <- cut(ci_dev_training$POB,breaks = 6)
ci_dev_training$naics_grp <- ifelse(as.numeric(substr(ci_dev_training$naicsCode,start=1,stop=2)) == 42,"G1",
ifelse(as.numeric(substr(ci_dev_training$naicsCode,start=1,stop=2)) %in% c(31,32,33) ,"G2",
ifelse(as.numeric(substr(ci_dev_training$naicsCode,start=1,stop=2)) %in% c(72,44,45),"G3","Other")))
# Generate out sample bins:
#ci_dev_outsample$risk_bins <- ifelse(ci_dev_outsample$boh_rating1_R1==1,"R1",ifelse(ci_dev_outsample$boh_rating1_R2==1,"R2","R4"))
ci_dev_outsample$risk_bins <- ifelse(ci_dev_outsample$boh_rating1=="R1","R1",ifelse(ci_dev_outsample$boh_rating1=="R2","R2",ifelse(ci_dev_outsample$boh_rating1=="R3","R3",ifelse(ci_dev_outsample$boh_rating1=="R4","R4","Other"))))
ci_dev_outsample$season_bins <- ifelse(ci_dev_outsample$season_fall==1,"Fall",ifelse(ci_dev_outsample$season_summer==1,"Summer",ifelse(ci_dev_outsample$season_winter==1,"Winter","Other")))
ci_dev_outsample$caur_bins <- cut(ci_dev_outsample$CAUR_yd_lag_3,breaks = 2)
ci_dev_outsample$cahpi_bins <- cut(ci_dev_outsample$cahpi_ag_lag_3_n12,breaks = 3)
ci_dev_outsample$pob5_bins <- cut(ci_dev_outsample$POB_5,breaks = 2)
ci_dev_outsample$pob50_bins <- cut(ci_dev_outsample$POB_50,breaks = 6)
ci_dev_outsample$pob_bins <- cut(ci_dev_outsample$POB,breaks = 6)
ci_dev_outsample$naics_grp <- ifelse(as.numeric(substr(ci_dev_outsample$naicsCode,start=1,stop=2)) == 42,"G1",
ifelse(as.numeric(substr(ci_dev_outsample$naicsCode,start=1,stop=2)) %in% c(31,32,33) ,"G2",
ifelse(as.numeric(substr(ci_dev_outsample$naicsCode,start=1,stop=2)) %in% c(72,44,45),"G3","Other")))
# DATA PROFILING PLOTS ###################################
#Making average actual default and plot df by data and bin - DATA PROFILING
# POB
defaulters <- ci_dev %>% group_by(fileDate, pob_bins) %>% summarise(Defaulters = sum(y)) %>% data.frame()
nondefaulters <- ci_dev %>% group_by(fileDate, pob_bins) %>% filter(y==0) %>% count() %>% data.frame()
actual_df <- merge(defaulters, nondefaulters, by.x = c("fileDate","pob_bins"), by.y = c("fileDate","pob_bins"))
actual_df$total_obs <- actual_df$Defaulters+actual_df$n
actual_df <- actual_df %>% group_by(pob_bins) %>% mutate(pd_actual = 100*Defaulters/lag(n)) %>% data.frame() %>% na.omit
#actual_df <- actual_df %>% group_by(pob_bins) %>% mutate(pd_actual = 100*Defaulters/n) %>% data.frame() %>% na.omit
colnames(actual_df) <- c("fileDate","bins","Defaulters","Nondefaulters","Observations","PD_Actual")
df <- actual_df %>% group_by(bins) %>% summarise(pd_actual_m = mean(PD_Actual), obs_sum = sum(Observations)) %>% data.frame()
colnames(df) <- c("POB", "PD", "Observations")
# Start Plot
blue <- rgb(0, 0, 1, alpha=0.2)
pdf(paste("./R output", paste("CI Default Rate - ",colnames(df)[1],"_profile.pdf", sep=""),sep ="/"), height = 5, width = 10)
par(mar = c(5,5,2,5))
with(df, plot(as.numeric(row.names(df)), PD, col="green4", type = "l", xaxt = "n",lwd=2,main = paste("Default Rate",colnames(df)[1], sep=" - "),
ylab="Default Rate (%)",xlab=names(df)[1],
ylim=c(0,max(apply(df[,2,drop=F], 2, max)))))
par(new = T)
bp <- with(df, barplot(df$Observations,axes=F, xlab=NA, ylab=NA, col=blue))
axis(side = 4)
mtext(side = 4, line = 3, 'Observation Count')
legend("topleft",
legend=c("PD","Obs."),
lty=1,lwd=5, col=c("green4",blue))
axis(1, at=bp, labels=df[,1])
dev.off()
# End Plot
#Making average actual default and plot df by data and bin - DATA PROFILING
# CAUR
caur_breaks <- c(-1.25,-0.75,0,1,2,3)
ci_dev$caur_bins <- cut(ci_dev$CAUR_yd,breaks = caur_breaks)
defaulters <- ci_dev %>% group_by(fileDate, caur_bins) %>% summarise(Defaulters = sum(y)) %>% data.frame()
nondefaulters <- ci_dev %>% group_by(fileDate, caur_bins) %>% filter(y==0) %>% count() %>% data.frame()
actual_df <- merge(defaulters, nondefaulters, by.x = c("fileDate","caur_bins"), by.y = c("fileDate","caur_bins"))
actual_df$total_obs <- actual_df$Defaulters+actual_df$n
actual_df <- actual_df %>% group_by(caur_bins) %>% mutate(pd_actual = 100*Defaulters/lag(n)) %>% data.frame() %>% na.omit
#actual_df <- actual_df %>% group_by(caur_bins) %>% mutate(pd_actual = 100*Defaulters/n) %>% data.frame() %>% na.omit
colnames(actual_df) <- c("fileDate","bins","Defaulters","Nondefaulters","Observations","PD_Actual")
df <- actual_df %>% group_by(bins) %>% summarise(pd_actual_m = mean(PD_Actual), obs_sum = sum(Observations)) %>% data.frame()
colnames(df) <- c("CAUR", "PD", "Observations")
# Start Plot
blue <- rgb(0, 0, 1, alpha=0.2)
pdf(paste("./R output", paste("CI Default Rate - ",colnames(df)[1],"_profile.pdf", sep=""),sep ="/"), height = 5, width = 10)
par(mar = c(5,5,2,5))
with(df, plot(as.numeric(row.names(df)), PD, col="green4", type = "l", xaxt = "n",lwd=2,main = paste("Default Rate",colnames(df)[1], sep=" - "),
ylab="Default Rate (%)",xlab=names(df)[1],
ylim=c(0,max(apply(df[,2,drop=F], 2, max)))))
par(new = T)
bp <- with(df, barplot(df$Observations,axes=F, xlab=NA, ylab=NA, col=blue))
axis(side = 4)
mtext(side = 4, line = 3, 'Observation Count')
legend("topright",
legend=c("PD","Obs."),
lty=1,lwd=5, col=c("green4",blue))
axis(1, at=bp, labels=df[,1])
dev.off()
# End Plot
#Making average actual default and plot df by data and bin - DATA PROFILING
# CAHPI
defaulters <- ci_dev %>% group_by(fileDate, cahpi_bins) %>% summarise(Defaulters = sum(y)) %>% data.frame()
nondefaulters <- ci_dev %>% group_by(fileDate, cahpi_bins) %>% filter(y==0) %>% count() %>% data.frame()
actual_df <- merge(defaulters, nondefaulters, by.x = c("fileDate","cahpi_bins"), by.y = c("fileDate","cahpi_bins"))
actual_df$total_obs <- actual_df$Defaulters+actual_df$n
actual_df <- actual_df %>% group_by(cahpi_bins) %>% mutate(pd_actual = 100*Defaulters/lag(n)) %>% data.frame() %>% na.omit
#actual_df <- actual_df %>% group_by(cahpi_bins) %>% mutate(pd_actual = 100*Defaulters/n) %>% data.frame() %>% na.omit
colnames(actual_df) <- c("fileDate","bins","Defaulters","Nondefaulters","Observations","PD_Actual")
df <- actual_df %>% group_by(bins) %>% summarise(pd_actual_m = mean(PD_Actual), obs_sum = sum(Observations)) %>% data.frame()
colnames(df) <- c("CAHPI", "PD", "Observations")
# Start Plot
blue <- rgb(0, 0, 1, alpha=0.2)
pdf(paste("./R output", paste("CI Default Rate - ",colnames(df)[1],"_profile.pdf", sep=""),sep ="/"), height = 5, width = 10)
par(mar = c(5,5,2,5))
with(df, plot(as.numeric(row.names(df)), PD, col="green4", type = "l", xaxt = "n",lwd=2,main = paste("Default Rate",colnames(df)[1], sep=" - "),
ylab="Default Rate (%)",xlab=names(df)[1],
ylim=c(0,max(apply(df[,2,drop=F], 2, max)))))
par(new = T)
bp <- with(df, barplot(df$Observations,axes=F, xlab=NA, ylab=NA, col=blue))
axis(side = 4)
mtext(side = 4, line = 3, 'Observation Count')
legend("topleft",
legend=c("PD","Obs."),
lty=1,lwd=5, col=c("green4",blue))
axis(1, at=bp, labels=df[,1])
dev.off()
# End Plot
#Making average actual default and plot df by data and bin - DATA PROFILING
# PAST DUE
defaulters <- ci_dev %>% group_by(fileDate, pastdue_bins) %>% summarise(Defaulters = sum(y)) %>% data.frame()
nondefaulters <- ci_dev %>% group_by(fileDate, pastdue_bins) %>% filter(y==0) %>% count() %>% data.frame()
actual_df <- merge(defaulters, nondefaulters, by.x = c("fileDate","pastdue_bins"), by.y = c("fileDate","pastdue_bins"))
actual_df$total_obs <- actual_df$Defaulters+actual_df$n
actual_df <- actual_df %>% group_by(pastdue_bins) %>% mutate(pd_actual = 100*Defaulters/lag(n)) %>% data.frame() %>% na.omit
colnames(actual_df) <- c("fileDate","bins","Defaulters","Nondefaulters","Observations","PD_Actual")
df <- actual_df %>% group_by(bins) %>% summarise(pd_actual_m = mean(PD_Actual), obs_sum = sum(Observations)) %>% data.frame()
colnames(df) <- c("Past Due", "PD", "Observations")
# Start Plot
blue <- rgb(0, 0, 1, alpha=0.2)
pdf(paste("./R output", paste("CI Default Rate - ",colnames(df)[1],"_profile.pdf", sep=""),sep ="/"), height = 5, width = 10)
par(mar = c(5,5,2,5))
with(df, plot(as.numeric(row.names(df)), PD, col="green4", type = "l", xaxt = "n",lwd=2,main = paste("Default Rate",colnames(df)[1], sep=" - "),
ylab="Default Rate (%)",xlab=names(df)[1],
ylim=c(0,max(apply(df[,2,drop=F], 2, max)))))
par(new = T)
bp <- with(df, barplot(df$Observations,axes=F, xlab=NA, ylab=NA, col=blue))
axis(side = 4)
mtext(side = 4, line = 3, 'Observation Count')
legend("topleft",
legend=c("PD","Obs."),
lty=1,lwd=5, col=c("green4",blue))
axis(1, at=bp, labels=df[,1])
dev.off()
# End Plot
#Making average actual default and plot df by data and bin - DATA PROFILING
# LOAN RATING
defaulters <- ci_dev %>% group_by(fileDate, boh_rating) %>% summarise(Defaulters = sum(y)) %>% data.frame()
nondefaulters <- ci_dev %>% group_by(fileDate, boh_rating) %>% filter(y==0) %>% count() %>% data.frame()
actual_df <- merge(defaulters, nondefaulters, by.x = c("fileDate","boh_rating"), by.y = c("fileDate","boh_rating"))
actual_df$total_obs <- actual_df$Defaulters+actual_df$n
actual_df <- actual_df %>% group_by(boh_rating) %>% mutate(pd_actual = 100*Defaulters/lag(n)) %>% data.frame() %>% na.omit
colnames(actual_df) <- c("fileDate","bins","Defaulters","Nondefaulters","Observations","PD_Actual")
df <- actual_df %>% group_by(bins) %>% summarise(pd_actual_m = mean(PD_Actual), obs_sum = sum(Observations)) %>% data.frame()
colnames(df) <- c("Loan Rating", "PD", "Observations")
# Start Plot
blue <- rgb(0, 0, 1, alpha=0.2)
pdf(paste("./R output", paste("CI Default Rate - ",colnames(df)[1],"_profile.pdf", sep=""),sep ="/"), height = 5, width = 10)
par(mar = c(5,5,2,5))
with(df, plot(as.numeric(row.names(df)), PD, col="green4", type = "l", xaxt = "n",lwd=2,main = paste("Default Rate",colnames(df)[1], sep=" - "),
ylab="Default Rate (%)",xlab=names(df)[1],
ylim=c(0,max(apply(df[,2,drop=F], 2, max)))))
par(new = T)
bp <- with(df, barplot(df$Observations,axes=F, xlab=NA, ylab=NA, col=blue))
axis(side = 4)
mtext(side = 4, line = 3, 'Observation Count')
legend("topleft",
legend=c("PD","Obs."),
lty=1,lwd=5, col=c("green4",blue))
axis(1, at=bp, labels=df[,1])
dev.off()
# End Plot
# IN SAMPLE PLOTS #######################
#Making average actual default and plot df by data and bin - IN SAMPLE
# Risk IN
defaulters <- ci_dev_training %>% group_by(fileDate, risk_bins) %>% summarise(Defaulters = sum(y)) %>% data.frame()
nondefaulters <- ci_dev_training %>% group_by(fileDate, risk_bins) %>% filter(y==0) %>% count() %>% data.frame()
actual_df <- merge(defaulters, nondefaulters, by.x = c("fileDate","risk_bins"), by.y = c("fileDate","risk_bins"))
actual_df <- actual_df %>% group_by(risk_bins) %>% mutate(pd_actual = Defaulters/lag(n)) %>% data.frame() %>% na.omit
colnames(actual_df) <- c("fileDate","bins","Defaulters","Nondefaulters","PD_Actual")
actual_df <- aggregate(actual_df$PD_Actual, list(actual_df$bins), mean)
colnames(actual_df) <- c("bins","PD_Actual")
# Observation numbers and estiamte and final plot df
estimate_df <- as.data.frame(count(ci_dev_training, risk_bins))
estimate_df$mean <- aggregate(ci_dev_training$p_hat, list(ci_dev_training$risk_bins), mean)[,2]
colnames(estimate_df) <- c("bins","Observations","Mean")
df <- merge(estimate_df, actual_df)
colnames(df) <- c("Risk", "Observations", "Estimate", "Actual")
df <- df %>% filter(Risk != "R4")
# Start Plot
green <- rgb(.5, 1, .5, alpha=0.2)
pdf(paste("./R output", paste("CI Default Rate - ",colnames(df)[1],".pdf", sep=""),sep ="/"), height = 5, width = 10)
par(mar = c(5,5,2,5))
with(df, plot(as.numeric(row.names(df)), Estimate, col="red", type = "l", xaxt = "n",lwd=2,main = paste("In-sample: Default Rate",colnames(df)[1], sep=" - "),
ylab="Default Rate",xlab=names(df)[1],
ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
with(df, plot(as.numeric(row.names(df)), Actual, col="blue", xaxt = "n",type = "l", lwd=2,
ylab="", xlab="", ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
bp <- with(df, barplot(df$Observations,axes=F, xlab=NA, ylab=NA, col=green))
axis(side = 4)
mtext(side = 4, line = 3, 'Observation Count')
legend("topleft",
legend=c("Estimate", "Actual","Obs."),
lty=1,lwd=5, col=c("red3","blue", green))
axis(1, at=bp, labels=df[,1])
dev.off()
# End Plot
# Season IN
defaulters <- ci_dev_training %>% group_by(fileDate, season_bins) %>% summarise(Defaulters = sum(y)) %>% data.frame()
nondefaulters <- ci_dev_training %>% group_by(fileDate, season_bins) %>% filter(y==0) %>% count() %>% data.frame()
actual_df <- merge(defaulters, nondefaulters, by.x = c('fileDate','season_bins'), by.y = c('fileDate','season_bins'))
actual_df <- actual_df %>% group_by(season_bins) %>% mutate(pd_actual = Defaulters/lag(n)) %>% data.frame() %>% na.omit
colnames(actual_df) <- c("fileDate","bins","Defaulters","Nondefaulters","PD_Actual")
actual_df <- aggregate(actual_df$PD_Actual, list(actual_df$bins), mean)
colnames(actual_df) <- c("bins","PD_Actual")
# Observation numbers and estiamte and final plot df
estimate_df <- as.data.frame(count(ci_dev_training, season_bins))
estimate_df$mean <- aggregate(ci_dev_training$p_hat, list(ci_dev_training$season_bins), mean)[,2]
colnames(estimate_df) <- c("bins","Observations","Mean")
df <- merge(estimate_df, actual_df)
colnames(df) <- c("Season", "Observations", "Estimate", "Actual")
# Start Plot
green <- rgb(.5, 1, .5, alpha=0.2)
pdf(paste("./R output", paste("CI Default Rate - ",colnames(df)[1],".pdf", sep=""),sep ="/"), height = 5, width = 10)
par(mar = c(5,5,2,5))
with(df, plot(as.numeric(row.names(df)), Estimate, col="red", type = "l", xaxt = "n",lwd=2,main = paste("In-sample: Default Rate",colnames(df)[1], sep=" - "),
ylab="Default Rate",xlab=names(df)[1],
ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
with(df, plot(as.numeric(row.names(df)), Actual, col="blue", xaxt = "n",type = "l", lwd=2,
ylab="", xlab="", ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
bp <- with(df, barplot(df$Observations,axes=F, xlab=NA, ylab=NA, col=green))
axis(side = 4)
mtext(side = 4, line = 3, 'Observation Count')
legend("topleft",
legend=c("Estimate", "Actual","Obs."),
lty=1,lwd=5, col=c("red3","blue", green))
axis(1, at=bp, labels=df[,1])
dev.off()
# End Plot
# CA UR IN
defaulters <- ci_dev_training %>% group_by(fileDate, caur_bins) %>% summarise(Defaulters = sum(y)) %>% data.frame()
nondefaulters <- ci_dev_training %>% group_by(fileDate, caur_bins) %>% filter(y==0) %>% count() %>% data.frame()
actual_df <- merge(defaulters, nondefaulters, by.x = c('fileDate','caur_bins'), by.y = c('fileDate','caur_bins'))
actual_df <- actual_df %>% group_by(caur_bins) %>% mutate(pd_actual = Defaulters/lag(n)) %>% data.frame() %>% na.omit
colnames(actual_df) <- c("fileDate","bins","Defaulters","Nondefaulters","PD_Actual")
actual_df <- aggregate(actual_df$PD_Actual, list(actual_df$bins), mean)
colnames(actual_df) <- c("bins","PD_Actual")
# Observation numbers and estiamte and final plot df
estimate_df <- as.data.frame(count(ci_dev_training, caur_bins))
estimate_df$mean <- aggregate(ci_dev_training$p_hat, list(ci_dev_training$caur_bins), mean)[,2]
colnames(estimate_df) <- c("bins","Observations","Mean")
df <- merge(estimate_df, actual_df)
colnames(df) <- c("CA_UR", "Observations", "Estimate", "Actual")
# Start Plot
green <- rgb(.5, 1, .5, alpha=0.2)
pdf(paste("./R output", paste("CI Default Rate - ",colnames(df)[1],".pdf", sep=""),sep ="/"), height = 5, width = 10)
par(mar = c(5,5,2,5))
with(df, plot(as.numeric(row.names(df)), Estimate, col="red", type = "l", xaxt = "n",lwd=2,main = paste("In-sample: Default Rate",colnames(df)[1], sep=" - "),
ylab="Default Rate",xlab=names(df)[1],
ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
with(df, plot(as.numeric(row.names(df)), Actual, col="blue", xaxt = "n",type = "l", lwd=2,
ylab="", xlab="", ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
bp <- with(df, barplot(df$Observations,axes=F, xlab=NA, ylab=NA, col=green))
axis(side = 4)
mtext(side = 4, line = 3, 'Observation Count')
legend("topleft",
legend=c("Estimate", "Actual","Obs."),
lty=1,lwd=5, col=c("red3","blue", green))
axis(1, at=bp, labels=df[,1])
dev.off()
# End Plot
# CA HPI IN
defaulters <- ci_dev_training %>% group_by(fileDate, cahpi_bins) %>% summarise(Defaulters = sum(y)) %>% data.frame()
nondefaulters <- ci_dev_training %>% group_by(fileDate, cahpi_bins) %>% filter(y==0) %>% count() %>% data.frame()
actual_df <- merge(defaulters, nondefaulters, by.x = c('fileDate','cahpi_bins'), by.y = c('fileDate','cahpi_bins'))
actual_df <- actual_df %>% group_by(cahpi_bins) %>% mutate(pd_actual = Defaulters/lag(n)) %>% data.frame() %>% na.omit
colnames(actual_df) <- c("fileDate","bins","Defaulters","Nondefaulters","PD_Actual")
actual_df <- aggregate(actual_df$PD_Actual, list(actual_df$bins), mean)
colnames(actual_df) <- c("bins","PD_Actual")
# Observation numbers and estiamte and final plot df
estimate_df <- as.data.frame(count(ci_dev_training, cahpi_bins))
estimate_df$mean <- aggregate(ci_dev_training$p_hat, list(ci_dev_training$cahpi_bins), mean)[,2]
colnames(estimate_df) <- c("bins","Observations","Mean")
df <- merge(estimate_df, actual_df)
colnames(df) <- c("CA_HPI", "Observations", "Estimate", "Actual")
# Start Plot
green <- rgb(.5, 1, .5, alpha=0.2)
pdf(paste("./R output", paste("CI Default Rate - ",colnames(df)[1],".pdf", sep=""),sep ="/"), height = 5, width = 10)
par(mar = c(5,5,2,5))
with(df, plot(as.numeric(row.names(df)), Estimate, col="red", type = "l", xaxt = "n",lwd=2,main = paste("In-sample: Default Rate",colnames(df)[1], sep=" - "),
ylab="Default Rate",xlab=names(df)[1],
ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
with(df, plot(as.numeric(row.names(df)), Actual, col="blue", xaxt = "n",type = "l", lwd=2,
ylab="", xlab="", ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
bp <- with(df, barplot(df$Observations,axes=F, xlab=NA, ylab=NA, col=green))
axis(side = 4)
mtext(side = 4, line = 3, 'Observation Count')
legend("topright",
legend=c("Estimate", "Actual","Obs."),
lty=1,lwd=5, col=c("red3","blue", green))
axis(1, at=bp, labels=df[,1])
dev.off()
# End Plot
# POB 5 IN
defaulters <- ci_dev_training %>% group_by(fileDate, pob5_bins) %>% summarise(Defaulters = sum(y)) %>% data.frame()
nondefaulters <- ci_dev_training %>% group_by(fileDate, pob5_bins) %>% filter(y==0) %>% count() %>% data.frame()
actual_df <- merge(defaulters, nondefaulters, by.x = c('fileDate','pob5_bins'), by.y = c('fileDate','pob5_bins'))
actual_df <- actual_df %>% group_by(pob5_bins) %>% mutate(pd_actual = Defaulters/lag(n)) %>% data.frame() %>% na.omit
colnames(actual_df) <- c("fileDate","bins","Defaulters","Nondefaulters","PD_Actual")
actual_df <- aggregate(actual_df$PD_Actual, list(actual_df$bins), mean)
colnames(actual_df) <- c("bins","PD_Actual")
# Observation numbers and estiamte and final plot df
estimate_df <- as.data.frame(count(ci_dev_training, pob5_bins))
estimate_df$mean <- aggregate(ci_dev_training$p_hat, list(ci_dev_training$pob5_bins), mean)[,2]
colnames(estimate_df) <- c("bins","Observations","Mean")
df <- merge(estimate_df, actual_df)
colnames(df) <- c("POB_5", "Observations", "Estimate", "Actual")
# Start Plot
green <- rgb(.5, 1, .5, alpha=0.2)
pdf(paste("./R output", paste("CI Default Rate - ",colnames(df)[1],".pdf", sep=""),sep ="/"), height = 5, width = 10)
par(mar = c(5,5,2,5))
with(df, plot(as.numeric(row.names(df)), Estimate, col="red", type = "l", xaxt = "n",lwd=2,main = paste("In-sample: Default Rate",colnames(df)[1], sep=" - "),
ylab="Default Rate",xlab=names(df)[1],
ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
with(df, plot(as.numeric(row.names(df)), Actual, col="blue", xaxt = "n",type = "l", lwd=2,
ylab="", xlab="", ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
bp <- with(df, barplot(df$Observations,axes=F, xlab=NA, ylab=NA, col=green))
axis(side = 4)
mtext(side = 4, line = 3, 'Observation Count')
legend("topleft",
legend=c("Estimate", "Actual","Obs."),
lty=1,lwd=5, col=c("red3","blue", green))
axis(1, at=bp, labels=df[,1])
dev.off()
# POB 50 IN
defaulters <- ci_dev_training %>% group_by(fileDate, pob50_bins) %>% summarise(Defaulters = sum(y)) %>% data.frame()
nondefaulters <- ci_dev_training %>% group_by(fileDate, pob50_bins) %>% filter(y==0) %>% count() %>% data.frame()
actual_df <- merge(defaulters, nondefaulters, by.x = c('fileDate','pob50_bins'), by.y = c('fileDate','pob50_bins'))
actual_df <- actual_df %>% group_by(pob50_bins) %>% mutate(pd_actual = Defaulters/lag(n)) %>% data.frame() %>% na.omit
colnames(actual_df) <- c("fileDate","bins","Defaulters","Nondefaulters","PD_Actual")
actual_df <- aggregate(actual_df$PD_Actual, list(actual_df$bins), mean)
colnames(actual_df) <- c("bins","PD_Actual")
# Observation numbers and estiamte and final plot df
estimate_df <- as.data.frame(count(ci_dev_training, pob50_bins))
estimate_df$mean <- aggregate(ci_dev_training$p_hat, list(ci_dev_training$pob50_bins), mean)[,2]
colnames(estimate_df) <- c("bins","Observations","Mean")
df <- merge(estimate_df, actual_df)
colnames(df) <- c("POB_50", "Observations", "Estimate", "Actual")
# Start Plot
green <- rgb(.5, 1, .5, alpha=0.2)
pdf(paste("./R output", paste("CI Default Rate - ",colnames(df)[1],".pdf", sep=""),sep ="/"), height = 5, width = 10)
par(mar = c(5,5,2,5))
with(df, plot(as.numeric(row.names(df)), Estimate, col="red", type = "l", xaxt = "n",lwd=2,main = paste("In-sample: Default Rate",colnames(df)[1], sep=" - "),
ylab="Default Rate",xlab=names(df)[1],
ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
with(df, plot(as.numeric(row.names(df)), Actual, col="blue", xaxt = "n",type = "l", lwd=2,
ylab="", xlab="", ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
bp <- with(df, barplot(df$Observations,axes=F, xlab=NA, ylab=NA, col=green))
axis(side = 4)
mtext(side = 4, line = 3, 'Observation Count')
legend("topleft",
legend=c("Estimate", "Actual","Obs."),
lty=1,lwd=5, col=c("red3","blue", green))
axis(1, at=bp, labels=df[,1])
dev.off()
# POB IN
defaulters <- ci_dev_training %>% group_by(fileDate, pob_bins) %>% summarise(Defaulters = sum(y)) %>% data.frame()
nondefaulters <- ci_dev_training %>% group_by(fileDate, pob_bins) %>% filter(y==0) %>% count() %>% data.frame()
actual_df <- merge(defaulters, nondefaulters, by.x = c('fileDate','pob_bins'), by.y = c('fileDate','pob_bins'))
actual_df <- actual_df %>% group_by(pob_bins) %>% mutate(pd_actual = Defaulters/lag(n)) %>% data.frame() %>% na.omit
colnames(actual_df) <- c("fileDate","bins","Defaulters","Nondefaulters","PD_Actual")
actual_df <- aggregate(actual_df$PD_Actual, list(actual_df$bins), mean)
colnames(actual_df) <- c("bins","PD_Actual")
# Observation numbers and estiamte and final plot df
estimate_df <- as.data.frame(count(ci_dev_training, pob_bins))
estimate_df$mean <- aggregate(ci_dev_training$p_hat, list(ci_dev_training$pob_bins), mean)[,2]
colnames(estimate_df) <- c("bins","Observations","Mean")
df <- merge(estimate_df, actual_df)
colnames(df) <- c("POB", "Observations", "Estimate", "Actual")
# Start Plot
green <- rgb(.5, 1, .5, alpha=0.2)
pdf(paste("./R output", paste("CI Default Rate - ",colnames(df)[1],".pdf", sep=""),sep ="/"), height = 5, width = 10)
par(mar = c(5,5,2,5))
with(df, plot(as.numeric(row.names(df)), Estimate, col="red", type = "l", xaxt = "n",lwd=2,main = paste("In-sample: Default Rate",colnames(df)[1], sep=" - "),
ylab="Default Rate",xlab=names(df)[1],
ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
with(df, plot(as.numeric(row.names(df)), Actual, col="blue", xaxt = "n",type = "l", lwd=2,
ylab="", xlab="", ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
bp <- with(df, barplot(df$Observations,axes=F, xlab=NA, ylab=NA, col=green))
axis(side = 4)
mtext(side = 4, line = 3, 'Observation Count')
legend("bottomleft",
legend=c("Estimate", "Actual","Obs."),
lty=1,lwd=5, col=c("red3","blue", green))
axis(1, at=bp, labels=df[,1])
dev.off()
# NAICS Group IN
defaulters <- ci_dev_training %>% group_by(fileDate, naics_grp) %>% summarise(Defaulters = sum(y)) %>% data.frame()
nondefaulters <- ci_dev_training %>% group_by(fileDate, naics_grp) %>% filter(y==0) %>% count() %>% data.frame()
actual_df <- merge(defaulters, nondefaulters, by.x = c('fileDate','naics_grp'), by.y = c('fileDate','naics_grp'))
actual_df <- actual_df %>% group_by(naics_grp) %>% mutate(pd_actual = Defaulters/lag(n)) %>% data.frame() %>% na.omit
#actual_df <- actual_df %>% group_by(naics_grp) %>% mutate(pd_actual = Defaulters/n) %>% data.frame() %>% na.omit
colnames(actual_df) <- c("fileDate","bins","Defaulters","Nondefaulters","PD_Actual")
actual_df <- aggregate(actual_df$PD_Actual, list(actual_df$bins), mean)
colnames(actual_df) <- c("bins","PD_Actual")
# Observation numbers and estiamte and final plot df
estimate_df <- as.data.frame(count(ci_dev_training, naics_grp))
estimate_df$mean <- aggregate(ci_dev_training$p_hat, list(ci_dev_training$naics_grp), mean)[,2]
colnames(estimate_df) <- c("bins","Observations","Mean")
df <- merge(estimate_df, actual_df)
colnames(df) <- c("NAICS Group", "Observations", "Estimate", "Actual")
# Start Plot
green <- rgb(.5, 1, .5, alpha=0.2)
#pdf(paste("./R output", paste("CI Default Rate - ",colnames(df)[1],".pdf", sep=""),sep ="/"), height = 5, width = 10)
par(mar = c(5,5,2,5))
with(df, plot(as.numeric(row.names(df)), Estimate, col="red", type = "l", xaxt = "n",lwd=2,main = paste("In-sample: Default Rate",colnames(df)[1], sep=" - "),
ylab="Default Rate",xlab=names(df)[1],
ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
with(df, plot(as.numeric(row.names(df)), Actual, col="blue", xaxt = "n",type = "l", lwd=2,
ylab="", xlab="", ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
bp <- with(df, barplot(df$Observations,axes=F, xlab=NA, ylab=NA, col=green))
axis(side = 4)
mtext(side = 4, line = 3, 'Observation Count')
legend("bottomright",
legend=c("Estimate", "Actual","Obs."),
lty=1,lwd=5, col=c("red3","blue", green))
axis(1, at=bp, labels=df[,1])
#dev.off()
########################################################################
# OUT of SAMPLE
#Making average actual default and plot df by data and bin - OUT SAMPLE
# Risk OUT
defaulters <- ci_dev_outsample %>% group_by(fileDate, risk_bins) %>% summarise(Defaulters = sum(y)) %>% data.frame()
nondefaulters <- ci_dev_outsample %>% group_by(fileDate, risk_bins) %>% filter(y==0) %>% count() %>% data.frame()
actual_df <- merge(defaulters, nondefaulters, by.x = c("fileDate","risk_bins"), by.y = c("fileDate","risk_bins"))
actual_df <- actual_df %>% group_by(risk_bins) %>% mutate(pd_actual = Defaulters/lag(n)) %>% data.frame() %>% na.omit
colnames(actual_df) <- c("fileDate","bins","Defaulters","Nondefaulters","PD_Actual")
actual_df <- aggregate(actual_df$PD_Actual, list(actual_df$bins), mean)
colnames(actual_df) <- c("bins","PD_Actual")
# Observation numbers and estiamte and final plot df
estimate_df <- as.data.frame(count(ci_dev_outsample, risk_bins))
estimate_df$mean <- aggregate(ci_dev_outsample$p_hat, list(ci_dev_outsample$risk_bins), mean)[,2]
colnames(estimate_df) <- c("bins","Observations","Mean")
df <- merge(estimate_df, actual_df)
colnames(df) <- c("Risk", "Observations", "Estimate", "Actual")
df <- df %>% filter(Risk != "R4")
# Start Plot
green <- rgb(.5, 1, .5, alpha=0.2)
pdf(paste("./R output", paste("CI Default Rate - ",colnames(df)[1],"_OUT.pdf", sep=""),sep ="/"), height = 5, width = 10)
par(mar = c(5,5,2,5))
with(df, plot(as.numeric(row.names(df)), Estimate, col="red", type = "l", xaxt = "n",lwd=2,main = paste("Out-of-sample: Default Rate",colnames(df)[1], sep=" - "),
ylab="Default Rate",xlab=names(df)[1],
ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
with(df, plot(as.numeric(row.names(df)), Actual, col="blue", xaxt = "n",type = "l", lwd=2,
ylab="", xlab="", ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
bp <- with(df, barplot(df$Observations,axes=F, xlab=NA, ylab=NA, col=green))
axis(side = 4)
mtext(side = 4, line = 3, 'Observation Count')
legend("topleft",
legend=c("Estimate", "Actual","Obs."),
lty=1,lwd=5, col=c("red3","blue", green))
axis(1, at=bp, labels=df[,1])
dev.off()
# End Plot
# Season OUT
defaulters <- ci_dev_outsample %>% group_by(fileDate, season_bins) %>% summarise(Defaulters = sum(y)) %>% data.frame()
nondefaulters <- ci_dev_outsample %>% group_by(fileDate, season_bins) %>% filter(y==0) %>% count() %>% data.frame()
actual_df <- merge(defaulters, nondefaulters, by.x = c('fileDate','season_bins'), by.y = c('fileDate','season_bins'))
actual_df <- actual_df %>% group_by(season_bins) %>% mutate(pd_actual = Defaulters/lag(n)) %>% data.frame() %>% na.omit
colnames(actual_df) <- c("fileDate","bins","Defaulters","Nondefaulters","PD_Actual")
actual_df <- aggregate(actual_df$PD_Actual, list(actual_df$bins), mean)
colnames(actual_df) <- c("bins","PD_Actual")
# Observation numbers and estiamte and final plot df
estimate_df <- as.data.frame(count(ci_dev_outsample, season_bins))
estimate_df$mean <- aggregate(ci_dev_outsample$p_hat, list(ci_dev_outsample$season_bins), mean)[,2]
colnames(estimate_df) <- c("bins","Observations","Mean")
df <- merge(estimate_df, actual_df)
colnames(df) <- c("Season", "Observations", "Estimate", "Actual")
# Start Plot
green <- rgb(.5, 1, .5, alpha=0.2)
pdf(paste("./R output", paste("CI Default Rate - ",colnames(df)[1],"_OUT.pdf", sep=""),sep ="/"), height = 5, width = 10)
par(mar = c(5,5,2,5))
with(df, plot(as.numeric(row.names(df)), Estimate, col="red", type = "l", xaxt = "n",lwd=2,main = paste("Out-of-sample: Default Rate",colnames(df)[1], sep=" - "),
ylab="Default Rate",xlab=names(df)[1],
ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
with(df, plot(as.numeric(row.names(df)), Actual, col="blue", xaxt = "n",type = "l", lwd=2,
ylab="", xlab="", ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
bp <- with(df, barplot(df$Observations,axes=F, xlab=NA, ylab=NA, col=green))
axis(side = 4)
mtext(side = 4, line = 3, 'Observation Count')
legend("topleft",
legend=c("Estimate", "Actual","Obs."),
lty=1,lwd=5, col=c("red3","blue", green))
axis(1, at=bp, labels=df[,1])
dev.off()
# End Plot
# CA UR OUT
defaulters <- ci_dev_outsample %>% group_by(fileDate, caur_bins) %>% summarise(Defaulters = sum(y)) %>% data.frame()
nondefaulters <- ci_dev_outsample %>% group_by(fileDate, caur_bins) %>% filter(y==0) %>% count() %>% data.frame()
actual_df <- merge(defaulters, nondefaulters, by.x = c('fileDate','caur_bins'), by.y = c('fileDate','caur_bins'))
actual_df <- actual_df %>% group_by(caur_bins) %>% mutate(pd_actual = Defaulters/lag(n)) %>% data.frame() %>% na.omit
colnames(actual_df) <- c("fileDate","bins","Defaulters","Nondefaulters","PD_Actual")
actual_df <- aggregate(actual_df$PD_Actual, list(actual_df$bins), mean)
colnames(actual_df) <- c("bins","PD_Actual")
# Observation numbers and estiamte and final plot df
estimate_df <- as.data.frame(count(ci_dev_outsample, caur_bins))
estimate_df$mean <- aggregate(ci_dev_outsample$p_hat, list(ci_dev_outsample$caur_bins), mean)[,2]
colnames(estimate_df) <- c("bins","Observations","Mean")
df <- merge(estimate_df, actual_df)
colnames(df) <- c("CA_UR", "Observations", "Estimate", "Actual")
# Start Plot
green <- rgb(.5, 1, .5, alpha=0.2)
pdf(paste("./R output", paste("CI Default Rate - ",colnames(df)[1],"_OUT.pdf", sep=""),sep ="/"), height = 5, width = 10)
par(mar = c(5,5,2,5))
with(df, plot(as.numeric(row.names(df)), Estimate, col="red", type = "l", xaxt = "n",lwd=2,main = paste("Out-of-sample: Default Rate",colnames(df)[1], sep=" - "),
ylab="Default Rate",xlab=names(df)[1],
ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
with(df, plot(as.numeric(row.names(df)), Actual, col="blue", xaxt = "n",type = "l", lwd=2,
ylab="", xlab="", ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
bp <- with(df, barplot(df$Observations,axes=F, xlab=NA, ylab=NA, col=green))
axis(side = 4)
mtext(side = 4, line = 3, 'Observation Count')
legend("topleft",
legend=c("Estimate", "Actual","Obs."),
lty=1,lwd=5, col=c("red3","blue", green))
axis(1, at=bp, labels=df[,1])
dev.off()
# End Plot
# CA HPI OUT
defaulters <- ci_dev_outsample %>% group_by(fileDate, cahpi_bins) %>% summarise(Defaulters = sum(y)) %>% data.frame()
nondefaulters <- ci_dev_outsample %>% group_by(fileDate, cahpi_bins) %>% filter(y==0) %>% count() %>% data.frame()
actual_df <- merge(defaulters, nondefaulters, by.x = c('fileDate','cahpi_bins'), by.y = c('fileDate','cahpi_bins'))
actual_df <- actual_df %>% group_by(cahpi_bins) %>% mutate(pd_actual = Defaulters/lag(n)) %>% data.frame() %>% na.omit
colnames(actual_df) <- c("fileDate","bins","Defaulters","Nondefaulters","PD_Actual")
actual_df <- aggregate(actual_df$PD_Actual, list(actual_df$bins), mean)
colnames(actual_df) <- c("bins","PD_Actual")
# Observation numbers and estiamte and final plot df
estimate_df <- as.data.frame(count(ci_dev_outsample, cahpi_bins))
estimate_df$mean <- aggregate(ci_dev_outsample$p_hat, list(ci_dev_outsample$cahpi_bins), mean)[,2]
colnames(estimate_df) <- c("bins","Observations","Mean")
df <- merge(estimate_df, actual_df)
colnames(df) <- c("CA_HPI", "Observations", "Estimate", "Actual")
# Start Plot
green <- rgb(.5, 1, .5, alpha=0.2)
pdf(paste("./R output", paste("CI Default Rate - ",colnames(df)[1],"_OUT.pdf", sep=""),sep ="/"), height = 5, width = 10)
par(mar = c(5,5,2,5))
with(df, plot(as.numeric(row.names(df)), Estimate, col="red", type = "l", xaxt = "n",lwd=2,main = paste("Out-of-sample: Default Rate",colnames(df)[1], sep=" - "),
ylab="Default Rate",xlab=names(df)[1],
ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
with(df, plot(as.numeric(row.names(df)), Actual, col="blue", xaxt = "n",type = "l", lwd=2,
ylab="", xlab="", ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
bp <- with(df, barplot(df$Observations,axes=F, xlab=NA, ylab=NA, col=green))
axis(side = 4)
mtext(side = 4, line = 3, 'Observation Count')
legend("topright",
legend=c("Estimate", "Actual","Obs."),
lty=1,lwd=5, col=c("red3","blue", green))
axis(1, at=bp, labels=df[,1])
dev.off()
# End Plot
# POB 5 OUT
defaulters <- ci_dev_outsample %>% group_by(fileDate, pob5_bins) %>% summarise(Defaulters = sum(y)) %>% data.frame()
nondefaulters <- ci_dev_outsample %>% group_by(fileDate, pob5_bins) %>% filter(y==0) %>% count() %>% data.frame()
actual_df <- merge(defaulters, nondefaulters, by.x = c('fileDate','pob5_bins'), by.y = c('fileDate','pob5_bins'))
actual_df <- actual_df %>% group_by(pob5_bins) %>% mutate(pd_actual = Defaulters/lag(n)) %>% data.frame() %>% na.omit
colnames(actual_df) <- c("fileDate","bins","Defaulters","Nondefaulters","PD_Actual")
actual_df <- aggregate(actual_df$PD_Actual, list(actual_df$bins), mean)
colnames(actual_df) <- c("bins","PD_Actual")
# Observation numbers and estiamte and final plot df
estimate_df <- as.data.frame(count(ci_dev_outsample, pob5_bins))
estimate_df$mean <- aggregate(ci_dev_outsample$p_hat, list(ci_dev_outsample$pob5_bins), mean)[,2]
colnames(estimate_df) <- c("bins","Observations","Mean")
df <- merge(estimate_df, actual_df)
colnames(df) <- c("POB_5", "Observations", "Estimate", "Actual")
# Start Plot
green <- rgb(.5, 1, .5, alpha=0.2)
pdf(paste("./R output", paste("CI Default Rate - ",colnames(df)[1],"_OUT.pdf", sep=""),sep ="/"), height = 5, width = 10)
par(mar = c(5,5,2,5))
with(df, plot(as.numeric(row.names(df)), Estimate, col="red", type = "l", xaxt = "n",lwd=2,main = paste("Out-of-sample: Default Rate",colnames(df)[1], sep=" - "),
ylab="Default Rate",xlab=names(df)[1],
ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
with(df, plot(as.numeric(row.names(df)), Actual, col="blue", xaxt = "n",type = "l", lwd=2,
ylab="", xlab="", ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
bp <- with(df, barplot(df$Observations,axes=F, xlab=NA, ylab=NA, col=green))
axis(side = 4)
mtext(side = 4, line = 3, 'Observation Count')
legend("topleft",
legend=c("Estimate", "Actual","Obs."),
lty=1,lwd=5, col=c("red3","blue", green))
axis(1, at=bp, labels=df[,1])
dev.off()
# POB 50 OUT
defaulters <- ci_dev_outsample %>% group_by(fileDate, pob50_bins) %>% summarise(Defaulters = sum(y)) %>% data.frame()
nondefaulters <- ci_dev_outsample %>% group_by(fileDate, pob50_bins) %>% filter(y==0) %>% count() %>% data.frame()
actual_df <- merge(defaulters, nondefaulters, by.x = c('fileDate','pob50_bins'), by.y = c('fileDate','pob50_bins'))
actual_df <- actual_df %>% group_by(pob50_bins) %>% mutate(pd_actual = Defaulters/lag(n)) %>% data.frame() %>% na.omit
colnames(actual_df) <- c("fileDate","bins","Defaulters","Nondefaulters","PD_Actual")
actual_df <- aggregate(actual_df$PD_Actual, list(actual_df$bins), mean)
colnames(actual_df) <- c("bins","PD_Actual")
# Observation numbers and estiamte and final plot df
estimate_df <- as.data.frame(count(ci_dev_outsample, pob50_bins))
estimate_df$mean <- aggregate(ci_dev_outsample$p_hat, list(ci_dev_outsample$pob50_bins), mean)[,2]
colnames(estimate_df) <- c("bins","Observations","Mean")
df <- merge(estimate_df, actual_df)
colnames(df) <- c("POB_50", "Observations", "Estimate", "Actual")
# Start Plot
green <- rgb(.5, 1, .5, alpha=0.2)
pdf(paste("./R output", paste("CI Default Rate - ",colnames(df)[1],"_OUT.pdf", sep=""),sep ="/"), height = 5, width = 10)
par(mar = c(5,5,2,5))
with(df, plot(as.numeric(row.names(df)), Estimate, col="red", type = "l", xaxt = "n",lwd=2,main = paste("Out-of-sample: Default Rate",colnames(df)[1], sep=" - "),
ylab="Default Rate",xlab=names(df)[1],
ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
with(df, plot(as.numeric(row.names(df)), Actual, col="blue", xaxt = "n",type = "l", lwd=2,
ylab="", xlab="", ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
bp <- with(df, barplot(df$Observations,axes=F, xlab=NA, ylab=NA, col=green))
axis(side = 4)
mtext(side = 4, line = 3, 'Observation Count')
legend("topleft",
legend=c("Estimate", "Actual","Obs."),
lty=1,lwd=5, col=c("red3","blue", green))
axis(1, at=bp, labels=df[,1])
dev.off()
# POB OUT
defaulters <- ci_dev_outsample %>% group_by(fileDate, pob_bins) %>% summarise(Defaulters = sum(y)) %>% data.frame()
nondefaulters <- ci_dev_outsample %>% group_by(fileDate, pob_bins) %>% filter(y==0) %>% count() %>% data.frame()
actual_df <- merge(defaulters, nondefaulters, by.x = c('fileDate','pob_bins'), by.y = c('fileDate','pob_bins'))
actual_df <- actual_df %>% group_by(pob_bins) %>% mutate(pd_actual = Defaulters/lag(n)) %>% data.frame() %>% na.omit
colnames(actual_df) <- c("fileDate","bins","Defaulters","Nondefaulters","PD_Actual")
actual_df <- aggregate(actual_df$PD_Actual, list(actual_df$bins), mean)
colnames(actual_df) <- c("bins","PD_Actual")
# Observation numbers and estiamte and final plot df
estimate_df <- as.data.frame(count(ci_dev_outsample, pob_bins))
estimate_df$mean <- aggregate(ci_dev_outsample$p_hat, list(ci_dev_outsample$pob_bins), mean)[,2]
colnames(estimate_df) <- c("bins","Observations","Mean")
df <- merge(estimate_df, actual_df)
colnames(df) <- c("POB", "Observations", "Estimate", "Actual")
# Start Plot
green <- rgb(.5, 1, .5, alpha=0.2)
pdf(paste("./R output", paste("CI Default Rate - ",colnames(df)[1],"_OUT.pdf", sep=""),sep ="/"), height = 5, width = 10)
par(mar = c(5,5,2,5))
with(df, plot(as.numeric(row.names(df)), Estimate, col="red", type = "l", xaxt = "n",lwd=2,main = paste("In-sample: Default Rate",colnames(df)[1], sep=" - "),
ylab="Default Rate",xlab=names(df)[1],
ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
with(df, plot(as.numeric(row.names(df)), Actual, col="blue", xaxt = "n",type = "l", lwd=2,
ylab="", xlab="", ylim=c(0,max(apply(df[,3:4], 2, max)))))
par(new = T)
bp <- with(df, barplot(df$Observations,axes=F, xlab=NA, ylab=NA, col=green))
axis(side = 4)
mtext(side = 4, line = 3, 'Observation Count')
legend("bottomleft",
legend=c("Estimate", "Actual","Obs."),
lty=1,lwd=5, col=c("red3","blue", green))
axis(1, at=bp, labels=df[,1])
dev.off()
# NAICS Code Graph
ci_dev$naics_nms <- ifelse(as.numeric(substr(ci_dev$naicsCode,start=1,stop=2)) == 23,"Const.",
ifelse(as.numeric(substr(ci_dev$naicsCode,start=1,stop=2)) %in% c(31,32,33) ,"Manu.",
ifelse(as.numeric(substr(ci_dev$naicsCode,start=1,stop=2)) == 42,"Whole. Trade",
ifelse(as.numeric(substr(ci_dev$naicsCode,start=1,stop=2)) %in% c(44,45),"Retail",
ifelse(as.numeric(substr(ci_dev$naicsCode,start=1,stop=2)) %in% c(48,49),"Transport.",
ifelse(as.numeric(substr(ci_dev$naicsCode,start=1,stop=2)) == 51,"Info.",
ifelse(as.numeric(substr(ci_dev$naicsCode,start=1,stop=2)) == 53,"Real Est. & Rent",
ifelse(as.numeric(substr(ci_dev$naicsCode,start=1,stop=2)) == 54,"Sci. & Tech.",
ifelse(as.numeric(substr(ci_dev$naicsCode,start=1,stop=2)) == 56,"Waste Mng.",
ifelse(as.numeric(substr(ci_dev$naicsCode,start=1,stop=2)) == 62,"Health Care",
ifelse(as.numeric(substr(ci_dev$naicsCode,start=1,stop=2)) == 71,"Arts & Ent.",
ifelse(as.numeric(substr(ci_dev$naicsCode,start=1,stop=2)) == 72,"Acc. & Food",
ifelse(as.numeric(substr(ci_dev$naicsCode,start=1,stop=2)) == 00,"Error","Other")))))))))))))
defaulters <- ci_dev %>% group_by(fileDate, naics_nms) %>% summarise(Defaulters = sum(y)) %>% data.frame()
nondefaulters <- ci_dev %>% group_by(fileDate, naics_nms) %>% filter(y==0) %>% count() %>% data.frame()
actual_df <- merge(defaulters, nondefaulters, by.x = c("fileDate","naics_nms"), by.y = c("fileDate","naics_nms"))
actual_df$total_obs <- actual_df$Defaulters+actual_df$n
actual_df <- actual_df %>% group_by(naics_nms) %>% mutate(pd_actual = 100*Defaulters/lag(n)) %>% data.frame() %>% na.omit
colnames(actual_df) <- c("fileDate","bins","Defaulters","Nondefaulters","Observations","PD_Actual")
df <- actual_df %>% group_by(bins) %>% summarise(pd_actual_m = mean(PD_Actual), obs_sum = sum(Observations)) %>% data.frame()
colnames(df) <- c("naics_nms", "PD", "Observations")
df <- df %>% filter(naics_nms != "Other")
# Start Plot
blue <- rgb(0, 0, 1, alpha=0.2)
#pdf(paste("./R output", paste("CI Default Rate - ",colnames(df)[1],"_profile.pdf", sep=""),sep ="/"), height = 5, width = 10)
par(mar = c(10,5,2,5))
with(df, plot(as.numeric(row.names(df)), PD, col="green4", type = "o",xaxt = "n",lwd=2,main = "Default Rate - NAICS",
ylab="Default Rate (%)",xlab=NA,
ylim=c(0,max(apply(df[,2,drop=F], 2, max)))))
par(new = T)
bp <- with(df, barplot(df$Observations,axes=F, xlab=NA, ylab=NA, col=blue))
axis(side = 4)
mtext(side = 4, line = 3, 'Observation Count')
legend("topleft",
legend=c("PD","Obs."),
lty=1,lwd=5, col=c("green4",blue))
axis(1, at=bp, labels=df[,1], las=2, cex.axis=1.4)
#dev.off()
# End Plot
# Grouped Bar Plot
funProp <- function(testCol) {
df[, testCol]/max(df[, testCol])
}
df$var.a.prop <- funProp("PD")
df$var.b.prop <- funProp("Observations")
barplot(t(as.matrix(df[, c("var.a.prop", "var.b.prop")])), beside = TRUE,
yaxt = "n", names.arg = df$naics_nms,las=2,col=c("green4",blue),main = "Default Rate - NAICS",
ylab="Default Rate (%)", cex.names=1.4)
axis(2, at = seq(0, max(df$var.a.prop), length.out = 10),
labels = round(seq(0, max(df$PD), length.out = 10),2))
axis(4, at = seq(0, max(df$var.b.prop), length.out = 10),
labels = round(seq(0, max(df$Observations), length.out = 10), 0))
mtext(side = 4, line = 3, 'Observation Count')
legend("topleft",
legend=c("PD","Obs."),
lty=1,lwd=5, col=c("green4",blue))
#################
# Coefficient Stability
## run logistic regression
model <- y ~ boh_rating1_R1 + boh_rating1_R2 + boh_rating1_R4 +
naics_code3_c + naics_code3_d +
season_fall + season_summer + season_winter + CAUR_yd_lag_3 +
cahpi_ag_lag_3_n12 + dpd0129_0 + POB_5 + POB_50
fit <- glm( model, family = binomial(link = "logit"), data = ci_dev_training)
summary(fit)
# Make random samples
# Sample Number
sample_number <- 10
# Sample fraction
sample_fraction <- .6
# Round the sample size to a whole number
sample_size <- round(nrow(ci_dev)*sample_fraction)
# Sample from the df
df_samples <- list()
df_samples_out <- list()
for (i in 1:sample_number){
##In sample data
df_samples[[i]] <- ci_dev[sample(nrow(ci_dev), sample_size, replace = FALSE), ]
##Out of sample data
df_samples_out[[i]] <- ci_dev[-which(rownames(ci_dev) %in% rownames(df_samples[[i]])),]
}
# Estimate the original model
logit_s <- list()
for (i in 1:length(df_samples)) {
logit_s[[i]] <- glm(model, family = binomial(link = "logit"), data = df_samples[[i]])
}
# Predict for each ols model
predict_s <- list()
for (i in 1:length(df_samples)) {
predict_s[[i]] <- predict(logit_s[[i]], df_samples_out[[i]], type="response")
}
# Make a df for the predict df and assign names
header_predict <- paste("Sample_", seq(1:length(df_samples)),sep="")
predict_s_df <- data.frame(predict_s)
colnames(predict_s_df) <- header_predict
# Make data frame of all predictions
test_out_df <- data.frame(ci_dev_outsample,predict_s_df)
test_out_df <- test_out_df %>% group_by(fileDate) %>% mutate(defaulters = sum(y)) %>% data.frame()
test_out_df <- test_out_df %>% group_by(fileDate) %>% filter(y==0) %>% mutate(nondefaulters = n()) %>% data.frame()
test_out_df <- test_out_df %>% group_by(fileDate) %>% mutate(pd_actual = defaulters/lag(nondefaulters)) %>% data.frame()
fcst_df_nms <- c("fileDate",header_predict,"pd_actual")
test_out_df <- na.omit(test_out_df[,which(colnames(test_out_df) %in% fcst_df_nms), drop = F])
test_out_df <- aggregate(x = test_out_df[,-1],
FUN = mean,
by = list(Date = test_out_df$fileDate))
head(test_out_df)
# Plot of all Forecasts
predict_samples_gg <- melt(test_out_df, id = "Date")
ggplot(data = predict_samples_gg, aes(x = Date, y = value, group = variable, color = variable)) + geom_line() + xlab("Date") + ylab("Default Rate") + ggtitle("Ave. Default Rate CI Out-of-Sample") +
theme(plot.title = element_text(hjust = 0.5)) +
theme(text = element_text(size=15)) + theme(legend.title=element_blank())
# Bootstrap the regression
# Bootstrap 95% CI for regression coefficients
library(boot)
# function to obtain regression weights
bs = function(data, indices, formula) {
d = data[indices,] # allows boot to select sample
fit = glm(formula, family = binomial(link = "logit"), data=d)
return(coef(fit))
}
# bootstrapping with 100 replications
results = boot(
data=ci_dev_training,
statistic=bs,
R=20,
formula=model)
Names = names(results$t0)
SEs = sapply(data.frame(results$t), sd)
Coefs = as.numeric(results$t0)
zVals = Coefs / SEs
Pvals = 2*pnorm(-abs(zVals))
Formatted_Results = cbind(Names, Coefs, SEs, zVals, Pvals)
summary(glm(model, family = binomial(link = "logit"), data = ci_dev_training))
# Pot coefficient density
for (i in 1:length(names(results$t0))){
plot(density(results$t[,i]), main = paste(names(results$t0)[i],"Density",sep=" - "))
}
<file_sep>/EBModel/4-CombineCREandCIEndingBalances.R
################################################################################
# Bank of Hope
# Commercial Real Estate Ending Balances
# Program: <>.R
# Author(s): KPMG, LLP
# Purpose:
# Data Dependences:
#
#
# R-version: R version 3.3.1 (2016-06-21)
# -- "Bug in Your Hair" Copyright (C) 2016 The R Foundation
# for Statistical Computing Platform: x86_64-apple-darwin13.4.0 (64-bit)
################################################################################
### Environment Settings #######################################################
pth_inputs = "C:/Users/ic07949/Desktop/KPMG/Model Development/development code and data/ending-balance_combined/combined/read-only-inputs"
pth_lib = "C:/Users/ic07949/Desktop/KPMG/Model Development/development code and data/ending-balance_combined/combined/library"
pth_out = "C:/Users/ic07949/Desktop/KPMG/Model Development/development code and data/ending-balance_combined/combined"
### No need to make changes below after this line ##############################
### Dependencies
source(paste(pth_lib,"/dev-support.R", sep=""))
source(paste(pth_lib,"/dfast-support.R", sep=""))
# source has the following functions:
# - stack()
# - get_bal_forecast()
# - concat()
# - bin_interval_variable()
# - calc_rsq(), calc_mape(), calc_mad(), calc_rmset()
# - cv_step(), cv_select()
library("openxlsx")
library("data.table")
library("lubridate")
library("ggplot2")
library("scales")
library("tseries")
library("quantreg")
library("car")
library("urca")
library("lmtest")
library("stats")
################################################################################
### Combine CRE and C-I
cre = readRDS(concat(pth_out, "/data-cre.RDS"))
c_i = readRDS(concat(pth_out, "/data-c_i.RDS"))
boh = cre[c_i, on="qtr_dt"]
### Transform Balance Variables
boh[, boh := cre_boh + ci]
segments = c("ip", "oo", "ci", "no", "mf", "cre_boh", "boh")
# log-diff
# log-diff-lag1
# pct
### Patch oo and no 2016Q4 with values based on mix of 2007.
boh[["is_patched_data"]] = FALSE
boh_2007 = boh[qtr_dt >= as.Date("2007-03-31") & qtr_dt <= as.Date("2007-12-31"), c("boh", "ci", "mf", "no", "oo")]
boh_2007Q1_bal = boh_2007[["boh"]][1]
boh_2007_growth_factor = exp(mean(log_diff(boh_2007[["boh"]]), na.rm=TRUE))
boh[qtr_dt == as.Date("2006-12-31"), "boh"] = boh_2007Q1_bal/boh_2007_growth_factor
# bal_t = bal_t-1 x exp(log_diff)
# bat_t-1 = bal_t/exp(log_diff)
mix_2007Q1 = (boh_2007[1, c("no", "oo")]/boh_2007[["boh"]][1])
boh[qtr_dt == as.Date("2006-12-31"),"is_patched_data"] = TRUE
boh[qtr_dt == as.Date("2006-12-31"),c("no", "oo")] = mix_2007Q1 * boh[qtr_dt == as.Date("2006-12-31"),"boh"][[1]]
for (seg in segments) {
tf_name = concat("ldiff_", seg)
Tf_name = concat("Ldiff_", seg)
pct_name = concat("pct_", seg)
tf_name_lag = concat(tf_name, "_lag1")
boh[[tf_name]] = log_diff(boh[[seg]])
boh[[Tf_name]] = delta(boh[[tf_name]])
boh[[tf_name_lag]] = shift(boh[[tf_name]], n=1)
boh[[pct_name]] = boh[[seg]]/boh[["boh"]]
}
### Data flags:
FST_TRAIN_DT = "2003-03-31"
CRE_FST_TRAIN_DT = "2007-06-30"
NTH_TRAIN_DT = "2016-12-31"
RECESSION_START = "2007-12-31"
RECESSION_END = "2009-06-30"
boh[["is_oot"]] = ifelse(boh[["qtr_dt"]] >= as.Date(NTH_TRAIN_DT), TRUE, FALSE)
boh[["is_train_ci"]] = ifelse(boh[["qtr_dt"]] >= as.Date(FST_TRAIN_DT) & boh[["qtr_dt"]] <= as.Date(NTH_TRAIN_DT), TRUE, FALSE)
boh[["is_train_mf"]] = ifelse(boh[["qtr_dt"]] >= as.Date(FST_TRAIN_DT) & boh[["qtr_dt"]] <= as.Date(NTH_TRAIN_DT), TRUE, FALSE)
boh[["is_train_no"]] = ifelse(boh[["qtr_dt"]] >= as.Date(CRE_FST_TRAIN_DT) & boh[["qtr_dt"]] <= as.Date(NTH_TRAIN_DT), TRUE, FALSE)
boh[["is_train_oo"]] = ifelse(boh[["qtr_dt"]] >= as.Date(CRE_FST_TRAIN_DT) & boh[["qtr_dt"]] <= as.Date(NTH_TRAIN_DT), TRUE, FALSE)
boh[["is_train_ip"]] = ifelse(boh[["qtr_dt"]] >= as.Date(CRE_FST_TRAIN_DT) & boh[["qtr_dt"]] <= as.Date(NTH_TRAIN_DT), TRUE, FALSE)
boh[["is_train_boh"]] = ifelse(boh[["qtr_dt"]] >= as.Date(CRE_FST_TRAIN_DT) & boh[["qtr_dt"]] <= as.Date(NTH_TRAIN_DT), TRUE, FALSE)
boh[["is_train_cre_boh"]] = ifelse(boh[["qtr_dt"]] >= as.Date(CRE_FST_TRAIN_DT) & boh[["qtr_dt"]] <= as.Date(NTH_TRAIN_DT), TRUE, FALSE)
boh[["is_recession"]] = ifelse(boh[["qtr_dt"]] >= as.Date(RECESSION_START) & boh[["qtr_dt"]] <= as.Date(RECESSION_END), TRUE, FALSE)
boh[["is_9q_data"]] = FALSE
### Add Econ Data
econ_data = readRDS(concat(pth_out, "/econ-data-baseline.RDS"))
for (seg in c("ci", "ip", "oo")) {
tf_name = concat("ldiff_", seg)
outlier_name = concat("is_",seg,"_outlier")
boh[[outlier_name]] = ifelse(is_outlier_ia(boh[[tf_name]]) == 1, TRUE, FALSE)
}
boh = econ_data[boh, on="qtr_dt"]
saveRDS(boh, concat(pth_out, "/data-boh.RDS"))
################################################################################
<file_sep>/Adobe/2-basic_analysis.R
################################################################################
#
#
# Program: 2-basic_analysis.R
# Author: <NAME>
# Purpose: find the data limitation and do basic analysis using data table from 1-merge_data
#
#
#
# R-version: R version 3.3.4 (2016-06-21)
# -- "Bug in Your Hair" Copyright (C) 2016 The R Foundation
# for Statistical Computing Platform: x86_64-apple-darwin13.4.0 (64-bit)
################################################################################
### Environment Settings #######################################################
pth_inputs = "C:/Users/ic07949/Desktop/personal/adobe/inputs"
pth_lib = "C:/Users/ic07949/Desktop/personal/adobe/library"
pth_out = "C:/Users/ic07949/Desktop/personal/adobe"
### No need to make changes below after this line ##############################
source(paste(pth_lib,"/dev-support.R", sep=""))
library("data.table")
library("dplyr")
library("ggplot2")
library("lubridate")
library("scales")
library("zoo")
library("plyr")
library("corrplot")
library("tidyr")
library("reshape")
##########################################################################################
# Load Data
###########################################################################################
df_assessments <- read.csv(concat(pth_out,"/df_assessments.csv"))
assessments_courses <- read.csv(concat(pth_out,"/assessments_courses.csv"))
############################################################################################
# check missing value : score has 173 missing value 0.09% from total dataset.
colSums(is.na(df_assessments))
#fill in missing value to 0
df_assessments[df_assessments$score == 'NA',] <- 0
##########################################################################################
# assessments data limitation
###########################################################################################
df_assessments_v2 <- assessments_courses %>% group_by(code_module, code_presentation) %>% dplyr::summarise(total_weight = sum(weight))
df_assessments <- merge(x = df_assessments, y = df_assessments_v2, by =c("code_module", "code_presentation"))
df_assessments$score_weight <- (as.integer(df_assessments$score)*as.integer(df_assessments$weight))/as.integer(df_assessments$total_weight)
# student's weight
df_assessments_v3 <- df_assessments %>% group_by(code_module, code_presentation, id_student) %>% dplyr::summarise(sum_weight = sum(weight))
df_assessments <- merge(x = df_assessments, y = df_assessments_v3, by =c("code_module", "code_presentation", "id_student"))
# total observation is 173912
# with final exam missing data decrease by 19033
df_assessments_v4 <- df_assessments[df_assessments$total_weight == df_assessments$sum_weight,]
# create the dataset will full score total 19033 observations
studentInfo_result <- studentInfo[,c("code_module","code_presentation","id_student","final_result")]
df_assessments_v5 <- merge(x = df_assessments_v4, y = studentInfo_result, by =c("code_module", "code_presentation", "id_student"))
df_assessments_v5$weight <- as.numeric(df_assessments_v5$weight)
df_assessments_v5$score_weight <- as.integer(as.character(df_assessments_v5$score))*df_assessments_v5$weight
df_assessments_v5$weighted_score <- as.numeric(df_assessments_v5$score_weight) / as.integer(df_assessments_v5$total_weight)
#compute final grade
df_assessments_v6 <- df_assessments_v5 %>% group_by(code_module, code_presentation, id_student) %>% dplyr::summarise(sum_score = sum(weighted_score))
df_assessments_v6 <- merge(x = df_assessments_v6, y = studentInfo_result, by =c("code_module", "code_presentation", "id_student"))
# plot socre range for each final results - cannot tell the
p <- ggplot(df_assessments_v6 , aes(x=final_result, y=sum_score, color =final_result)) + geom_boxplot()+
theme(legend.position="none")+
labs(x = "Final Result", y = "Score" )
ggsave( concat(pth_out,"/image_socre_range_final_results.png"), plot=p, width=15, height=11, unit="cm", dpi=500)
df_assessments_v7 <- df_assessments %>% group_by(id_assessment) %>% dplyr::summarise(k =n())
assessments_courses <- merge(x = assessments_courses, y = df_assessments_v7, by =c("id_assessment"), all.x = TRUE)
sum(is.na(assessments_courses$k))# total 18 eaxm scores are missing
##########################################################################################
# correlation with final result
###########################################################################################
coursesInfo <- read.csv(concat(pth_out,"/coursesInfo.csv"))
df_cousesInfo <- coursesInfo[, c(8,9,10,11,12,13,14,17,18,19,20)]
cor_coursesInfo <- cor(df_cousesInfo)
round(cor_coursesInfo, 2)
#corrplot
corrplot(cor_coursesInfo, type = "upper", order = "hclust", tl.col = "black", tl.srt = 45)
#heatmap
col<- colorRampPalette(c("blue", "white", "red"))(20)
heatmap(x = cor_coursesInfo, col = col, symm = TRUE)
##########################################################################################
#
###########################################################################################
<file_sep>/EBModel/7-BackTesting.R
################################################################################
# Bank of Hope
# Commercial Real Estate Ending Balances
# Program: <>.R
# Author(s): KPMG, LLP
# Purpose:
# Data Dependences:
#
#
# R-version: R version 3.3.1 (2016-06-21)
# -- "Bug in Your Hair" Copyright (C) 2016 The R Foundation
# for Statistical Computing Platform: x86_64-apple-darwin13.4.0 (64-bit)
################################################################################
### Environment Settings #######################################################
pth_inputs = "C:/Users/ic07949/Desktop/KPMG/Model Development/development code and data/ending-balance_combined/combined/read-only-inputs"
pth_lib = "C:/Users/ic07949/Desktop/KPMG/Model Development/development code and data/ending-balance_combined/combined/library"
pth_out = "C:/Users/ic07949/Desktop/KPMG/Model Development/development code and data/ending-balance_combined/combined"
### No need to make changes below after this line ##############################
### Dependencies
source(paste(pth_lib,"/dev-support.R", sep=""))
source(paste(pth_lib,"/dfast-support.R", sep=""))
source(paste(pth_lib,"/colors.R", sep=""))
# source has the following functions:
# - stack()
# - get_bal_forecast()
# - concat()
# - bin_interval_variable()
# - calc_rsq(), calc_mape(), calc_mad(), calc_rmset()
# - cv_step(), cv_select()
library("openxlsx")
library("data.table")
library("lubridate")
library("ggplot2")
library("RGraphics")
library("gridExtra")
library("ggplot2")
library("scales")
library("tseries")
library("car")
library("urca")
library("lmtest")
library("nortest")
library("stats")
library("orcutt")
library("quantreg")
################################################################################
GOLDEN_RATIO = 1.61803398875
STACK_RATIO = 1.20
### Import Econ Data ###########################################################
baseline = readRDS(concat(pth_out, "/econ-data-baseline.RDS"))
adverse = readRDS(concat(pth_out, "/econ-data-adverse.RDS"))
severe = readRDS(concat(pth_out, "/econ-data-severe.RDS"))
boh = readRDS(concat(pth_out, "/data-boh.RDS"))
### Import Balance Data ########################################################
boh = readRDS(concat(pth_out, "/data-boh.RDS"))
boh_train = function(seg) {
train_ind = concat("is_train_", seg)
boh_cp = copy(boh)
setnames(boh_cp, train_ind, "is_train")
boh_cp[is_train == TRUE,]
}
boh_train_ce = boh_train("ip")
################################################################################
### Selected Models ############################################################
### OLS
ols_ip_model = c("crei_eg_lag4","dow_ya")
ols_oo_model = c("crei_eg_lag2")
ols_ip_desc = c("CRE Index 8Q Growth", "Dow 1Q Growth MA(4)")
ols_oo_desc = c("CRE Index 8Q Growth")
### Quantile Regression
qrg_ip_model = ols_ip_model
qrg_oo_model = ols_oo_model
### Linear Testing Model Fit ###################################################
ols_ip = lm(ldiff_ip~., data=boh_train_ce[, c(ols_ip_model, "ldiff_ip"), with=FALSE])
ols_oo = lm(ldiff_oo~., data=boh_train_ce[, c(ols_oo_model, "ldiff_oo"), with=FALSE])
qrg_ip = lm(ldiff_ip~., data=boh_train_ce[, c(qrg_ip_model, "ldiff_ip"), with=FALSE])
qrg_oo = lm(ldiff_oo~., data=boh_train_ce[, c(qrg_oo_model, "ldiff_oo"), with=FALSE])
### combine model info into lists for later usage.
model_obj_list = list(ip=ols_ip, oo=ols_oo)
model_var_list = list(ip=ols_ip_model, oo=ols_oo_model)
model_desc_list = list(ip=ols_ip_desc, oo=ols_oo_desc)
### Find best tau for quantile regressions #####################################
# Find Tau for quantile regression
tau_list = seq(0.01,0.99,0.01)
seg_list = c("ip", "oo")
n_tau = length(tau_list)
qrg_tau_summary = data.table(tau=tau_list,ip=0, oo=0)
i = 1
for (tau in tau_list) {
for (seg in seg_list) {
resp = concat("ldiff_", seg)
train = boh_train_ce
frm = 2007
qrg_tau_summary[i, seg] = cv_step_qr_bal(
train
, tau=tau
, bal=seg
, resp=resp
, model=model_var_list[[seg]]
, from_yr=frm
, to_yr=2016
)[["bmape"]]
}
i = i + 1
}
tau_selections = list()
for (seg in seg_list) {
tau_selections[[seg]] = tau_list[which.min(qrg_tau_summary[[seg]])]
}
qrg_ip = rq(ldiff_ip~., tau=tau_selections[["ip"]], data=boh_train_ce[, c(qrg_ip_model, "ldiff_ip"), with=FALSE])
qrg_oo = rq(ldiff_oo~., tau=tau_selections[["oo"]], data=boh_train_ce[, c(qrg_oo_model, "ldiff_oo"), with=FALSE])
qrg_model_obj_list = list(ip=qrg_ip, oo=qrg_oo)
### Export coefficient data ####################################################
step = 1
for (seg in c("ip", "oo")) {
coef_names = names(model_obj_list[[seg]]$coefficients)
coef_data = data.table(summary(model_obj_list[[seg]])$coefficients)
oc_coef_data = data.table(summary(cochrane.orcutt(model_obj_list[[seg]]))$coefficients)
names(coef_data) = c("ols_est", "se", "t_value", "p_value")
names(oc_coef_data) = c("oc_est", "oc_se", "oc_t_value", "oc_p_value")
coef_data[["parameter"]] = coef_names
coef_data[["segment"]] = seg
oc_coef_data[["parameter"]] = coef_names
oc_coef_data[["segment"]] = seg
qrg_coef_names = names(qrg_model_obj_list[[seg]]$coefficients)
qrg_coef_data = data.table(summary(qrg_model_obj_list[[seg]])$coefficients)[,1]
setnames(qrg_coef_data, "coefficients", "qrg_est")
qrg_coef_data[["parameter"]] = qrg_coef_names
qrg_coef_data[["tau"]] = tau_selections[[seg]]
parameter_data = qrg_coef_data[coef_data[oc_coef_data, on="parameter"], on="parameter"][, c("segment","parameter", "ols_est", "oc_est", "qrg_est", "se", "oc_se", "t_value", "oc_t_value", "p_value", "oc_p_value", "tau")]
if (step == 1) {
combined_parameter_data = parameter_data
} else {
combined_parameter_data = rbind(combined_parameter_data, parameter_data)
}
step = step + 1
}
write.csv(combined_parameter_data, file=concat(pth_out, "/table-BackTesting-boh-paramater_data.csv"))
### Linearity Testing ##########################################################
for (seg in c("ip", "oo")) {
k = length(model_var_list[[seg]])
resp = concat("ldiff_", seg)
train = boh_train_ce
for (i in 1:k) {
var = model_var_list[[seg]][i]
dsc = model_desc_list[[seg]][i]
plot_data = train[, c(resp, var), with=FALSE]
setnames(plot_data, c(var, resp), c("xvar", "resp"))
plot_data[["grp"]] = dsc
if (i == 1) {
line_plot_data = plot_data
} else {
line_plot_data = rbind(line_plot_data, plot_data)
}
}
linearity_plot = (
ggplot(data=line_plot_data, aes(x=xvar, y=resp))
+ facet_wrap(~grp, ncol=2, scales="free_x")
+ theme_minimal()
+ theme(
panel.grid.minor=element_blank()
, strip.text = element_text(size = 14)
)
+ geom_point()
+ geom_smooth(method="lm", se=FALSE, linetype=2, size=0.50, color=hxBlue)
+ labs(x=NULL, y=NULL)
+ scale_y_continuous(label=comma_format())
)
if (k > 1) {
ggsave(concat(pth_out, "/image-BackTesting-", seg, "-linearity.png"), plot=linearity_plot, height=10, width=12*GOLDEN_RATIO, unit="cm")
} else {
ggsave(concat(pth_out, "/image-BackTesting-", seg, "-linearity.png"), plot=linearity_plot, height=10, width=8*GOLDEN_RATIO, unit="cm")
}
}
### VIF Testing ################################################################
write.csv(vif(ols_ip), file=concat(pth_out, "/table-BackTesting-ip-vifs.csv"))
### Stationarity Testing #######################################################
### Saved output to table-BackTesting-ci_model_variable_stationarity.txt
for (var in ols_ip_model) {
print(concat("###::ip::",var))
print(summary(ur.df(boh_train_ce[[var]], type='drift', lags=6, selectlags = 'BIC')))
}
### Saved output to table-BackTesting-ip_model_variable_stationarity.txt
for (var in ols_oo_model) {
print(concat("###::oo::",var))
print(summary(ur.df(boh_train_ce[[var]], type='drift', lags=6, selectlags = 'BIC')))
}
### Saved output to table-BackTesting-oo_model_variable_stationarity.txt
### Plot Model Drivers #########################################################
for (seg in c("ip", "oo")) {
k = length(model_var_list[[seg]])
train = boh_train_ce
for (i in 1:k) {
var = model_var_list[[seg]][i]
dsc = model_desc_list[[seg]][i]
act = train[, c("qtr_dt", var), with=FALSE]
bsl = baseline[, c("qtr_dt", var), with=FALSE]
adv = adverse[, c("qtr_dt", var), with=FALSE]
sev = severe[, c("qtr_dt", var), with=FALSE]
setnames(act, var, "Actual")
setnames(bsl, var, "Baseline")
setnames(adv, var, "Adverse")
setnames(sev, var, "Severe")
plot_data = act[bsl[adv[sev, on="qtr_dt"], on="qtr_dt"], on="qtr_dt"][qtr_dt >= as.Date("2007-09-30"), ]
plot_data[["grp"]] = dsc
if (i == 1) {
driver_plot_data = plot_data
} else {
driver_plot_data = rbind(driver_plot_data, plot_data)
}
}
driver_plot = (
ggplot(data=driver_plot_data)
+ facet_wrap(~grp, ncol=2, scales="free_y")
+ theme_minimal()
+ theme(
legend.title=element_blank()
, legend.position="bottom"
, panel.grid.minor=element_blank()
, strip.text = element_text(size = 14)
)
+ geom_line(aes(x=qtr_dt, y=Baseline, color="Baseline"))
+ geom_line(aes(x=qtr_dt, y=Adverse, color="Adverse"))
+ geom_line(aes(x=qtr_dt, y=Severe, color="Severe"))
+ geom_line(aes(x=qtr_dt, y=Actual, color="Actual"))
+ labs(x=NULL, y=NULL)
+ scale_y_continuous(label=comma_format())
+ theme(legend.title=element_blank(), legend.position="bottom")
+ guides(col=guide_legend(nrow=1))
)
if (k > 1) {
ggsave(concat(pth_out, "/image-BackTesting-", seg, "-drivers.png"), plot=driver_plot, height=10, width=12*GOLDEN_RATIO, unit="cm")
} else {
ggsave(concat(pth_out, "/image-BackTesting-", seg, "-drivers.png"), plot=driver_plot, height=10, width=8*GOLDEN_RATIO, unit="cm")
}
}
### Scenario talking points ####################################################
# recession_data = baseline[is_recession == TRUE, c("qtr_dt",unique(c(ols_ci_model, ols_ip_model, ols_oo_model))), with=FALSE]
# baseline_data = baseline[is_9q_data == TRUE, c("qtr_dt",unique(c(ols_ci_model, ols_ip_model, ols_oo_model))), with=FALSE]
# adverse_data = adverse[is_9q_data == TRUE, c("qtr_dt",unique(c(ols_ci_model, ols_ip_model, ols_oo_model))), with=FALSE]
# severe_data = severe[is_9q_data == TRUE, c("qtr_dt",unique(c(ols_ci_model, ols_ip_model, ols_oo_model))), with=FALSE]
# apply(recession_data,2,min)[ols_ci_model]
# adverse_data[, c("qtr_dt", ols_ci_model), with=FALSE]
### Residuals Testing ##########################################################
### Normality
# Restore output to console
################################################################################
### Income Producing Residual Tests ############################################
# Normailty
shapiro.test(ols_ip$residuals)
ad.test(ols_ip$residuals)
# Homoskedasticity
ncvTest(ols_ip)
# Autocorrelation
Box.test(ols_ip$residuals, type="Ljung-Box", lag=1)
Box.test(ols_ip$residuals, type="Ljung-Box", lag=2)
Box.test(ols_ip$residuals, type="Ljung-Box", lag=3)
### Owner Occupied Residual Tests ##############################################
# Normailty
shapiro.test(ols_oo$residuals)
ad.test(ols_oo$residuals)
# Homoskedasticity
ncvTest(ols_oo)
# Autocorrelation
Box.test(ols_oo$residuals, type="Ljung-Box", lag=1)
Box.test(ols_oo$residuals, type="Ljung-Box", lag=2)
Box.test(ols_oo$residuals, type="Ljung-Box", lag=3)
# Adjusted for outliers
ols_oo_no_outlier = lm(ldiff_oo~., data=boh_train_ce[is_oo_outlier == FALSE, c(ols_oo_model, "ldiff_oo"), with=FALSE])
# Normailty
shapiro.test(ols_oo_no_outlier$residuals)
ad.test(ols_oo_no_outlier$residuals)
# Homoskedasticity
ncvTest(ols_oo_no_outlier)
################################################################################
### saved output to table-BackTesting-<seg>_residual_tests.txt
#### Residuals ACF, Scatter, Histograms, and QQPlots ############################
### Income Producting
acf_obj = acf(ols_ip$residuals, plot=FALSE)
acf_df = with(acf_obj, data.frame(lag, acf))
acf_hi = 1.96/sqrt(length(ols_ip$residuals))
acf_lo = -acf_hi
r_acf = (
ggplot(data = acf_df, mapping = aes(x = lag, y = acf))
+ geom_hline(aes(yintercept = 0))
+ geom_hline(aes(yintercept = acf_hi), linetype=2, color=hxBlue)
+ geom_hline(aes(yintercept = acf_lo), linetype=2, color=hxBlue)
+ geom_segment(mapping = aes(xend = lag, yend = 0))
+ theme_minimal()
+ theme(panel.grid.minor=element_blank(), plot.title = element_text(hjust = 0.50))
+ labs(x="Lag", y="ACF",title="Residual ACF")
)
res_data = data.table(r=ols_ip$residuals, fitted=ols_ip$fitted.values)
r = res_data[["r"]]
p_rng = c(0.25, 0.75)
r_points = quantile(r, p_rng)
n_points = qnorm(p_rng)
slope = diff(r_points)/diff(n_points)
int = r_points[2] - slope * n_points[2]
r_qq = (
ggplot(data=res_data, aes(sample=r))
+ stat_qq()
+ geom_abline(intercept = int, slope=slope, linetype=2, color=hxBlue)
+ labs(x="Theoretical Quantiles", y="Residuals", title="Residual QQ Plot")
+ theme_minimal()
+ theme(panel.grid.minor=element_blank(), plot.title = element_text(hjust = 0.50))
)
r_hist = (
ggplot(data=res_data, aes(r))
+ theme_minimal()
+ theme(panel.grid.minor=element_blank(), plot.title = element_text(hjust = 0.50))
+ geom_histogram(color="white", bins=10, fill=hxBlue)
+ labs(x="Residuals", y=NULL,title="Residual Distribution")
+ scale_y_continuous(label=comma_format())
+ scale_x_continuous(label=comma_format())
)
r_scat = (
ggplot(data=res_data)
+ geom_point(aes(x=fitted, y=r))
+ geom_hline(yintercept=0, linetype=2, color=hxBlue)
+ theme_minimal()
+ theme(panel.grid.minor=element_blank(),plot.title = element_text(hjust = 0.50))
+ labs(x="Fitted Values", y="Residuals", title="Residual Scatter Plot")
+ scale_y_continuous(label=comma_format())
+ scale_x_continuous(label=comma_format())
)
r_top = grid.arrange(r_scat, r_acf,ncol=2)
r_bot = grid.arrange(r_qq, r_hist,ncol=2)
residual_plot = grid.arrange(r_top, r_bot, nrow=2)
ggsave(concat(pth_out, "/image-BackTesting-ip-residuals.png"), plot=residual_plot, height=12*GOLDEN_RATIO, width=12*GOLDEN_RATIO, unit="cm")
### Owner Occupied
acf_obj = acf(ols_oo$residuals, plot=FALSE)
acf_df = with(acf_obj, data.frame(lag, acf))
acf_hi = 1.96/sqrt(length(ols_oo$residuals))
acf_lo = -acf_hi
r_acf = (
ggplot(data = acf_df, mapping = aes(x = lag, y = acf))
+ geom_hline(aes(yintercept = 0))
+ geom_hline(aes(yintercept = acf_hi), linetype=2, color=hxBlue)
+ geom_hline(aes(yintercept = acf_lo), linetype=2, color=hxBlue)
+ geom_segment(mapping = aes(xend = lag, yend = 0))
+ theme_minimal()
+ theme(panel.grid.minor=element_blank(), plot.title = element_text(hjust = 0.50))
+ labs(x="Lag", y="ACF",title="Residual ACF")
)
res_data = data.table(r=ols_oo$residuals, fitted=ols_oo$fitted.values)
r = res_data[["r"]]
p_rng = c(0.25, 0.75)
r_points = quantile(r, p_rng)
n_points = qnorm(p_rng)
slope = diff(r_points)/diff(n_points)
int = r_points[2] - slope * n_points[2]
r_qq = (
ggplot(data=res_data, aes(sample=r))
+ stat_qq()
+ geom_abline(intercept = int, slope=slope, linetype=2, color=hxBlue)
+ labs(x="Theoretical Quantiles", y="Residuals", title="Residual QQ Plot")
+ theme_minimal()
+ theme(panel.grid.minor=element_blank(), plot.title = element_text(hjust = 0.50))
)
r_hist = (
ggplot(data=res_data, aes(r))
+ theme_minimal()
+ theme(panel.grid.minor=element_blank(), plot.title = element_text(hjust = 0.50))
+ geom_histogram(color="white", bins=10, fill=hxBlue)
+ labs(x="Residuals", y=NULL,title="Residual Distribution")
+ scale_y_continuous(label=comma_format())
+ scale_x_continuous(label=comma_format())
)
r_scat = (
ggplot(data=res_data)
+ geom_point(aes(x=fitted, y=r))
+ geom_hline(yintercept=0, linetype=2, color=hxBlue)
+ theme_minimal()
+ theme(panel.grid.minor=element_blank(),plot.title = element_text(hjust = 0.50))
+ labs(x="Fitted Values", y="Residuals", title="Residual Scatter Plot")
+ scale_y_continuous(label=comma_format())
+ scale_x_continuous(label=comma_format())
)
r_top = grid.arrange(r_scat, r_acf,ncol=2)
r_bot = grid.arrange(r_qq, r_hist,ncol=2)
residual_plot = grid.arrange(r_top, r_bot, nrow=2)
ggsave(concat(pth_out, "/image-BackTesting-oo-residuals.png"), plot=residual_plot, height=12*GOLDEN_RATIO, width=12*GOLDEN_RATIO, unit="cm")
################################################################################
### Performance Assesssment ####################################################
### Setup OOT Test Data
boh_2007q2_to_2017q2 = boh[
qtr_dt >= as.Date("2007-06-30", "%Y-%m-%d")
& qtr_dt <= as.Date("2017-06-30", "%Y-%m-%d")
,]
oot_testing = baseline[boh_2007q2_to_2017q2, on="qtr_dt"]
### Setup hold-out test data
### give one of 2007s data points to 2006 so we can do holdout for 2007.
boh_2006_adj = boh[qtr_dt != as.Date("2006-12-31"), ]
adj_row = ifelse(boh_2006_adj[["qtr_dt"]] == as.Date("2007-03-31"), TRUE, FALSE)
boh_2006_adj[adj_row,][["qtr_dt"]] = as.Date("2006-12-31")
################################################################################
### To do: In-sample-Performance
### To do: Out-of-time Performance
MAX_TRAIN_DATE = "2015-06-30"
### C-I
ma_n = 1
step = 1
hold_out_est_list = list()
for(seg in c("ip", "oo")) {
resp = concat("ldiff_", seg)
model = model_var_list[[seg]]
ols_model_obj = model_obj_list[[seg]]
qrg_model_obj = qrg_model_obj_list[[seg]]
train = boh_train_ce
cv_data = boh_2006_adj
### OLS Performance
ols_perf = get_oot_data(
train
, oot_testing
, bal_var=seg
, dt_var="qtr_dt"
, resp=resp
, model=model
, type="ols"
, ar_term=""
, max_train_dt=MAX_TRAIN_DATE
, ma_n=ma_n
)
ols_perf[["ns_resp_hat"]] = predict(ols_model_obj, ols_perf)
ols_hold_out = get_hold_out_perf(
cv_data
, train
, bal_var=seg
, dt_var="qtr_dt"
, resp=resp
, model=model
, type="ols"
, ar_term=""
)
# this will be use in coefficient stability analysis below
hold_out_est_list[[seg]] = ols_hold_out[["hold_out_coefs"]][var != "(Intercept)",]
ns = ols_perf[, .(
Ldiff_Insample_Rsq = calc_rsq(resp, ns_resp_hat)
, Ldiff_Insample_MAPE = calc_mape(resp, ns_resp_hat)
, Ldiff_InSample_UR = calc_under_rate(resp, ns_resp_hat)
)]
oot = ols_perf[is_oot == 1, .(
Balance_OOTime_Rsq = calc_rsq(is_oot * bal, is_oot * bal_est)
, Balance_OOTime_MAPE = calc_mape(bal, bal_est)
, Balance_OOTime_UR = calc_under_rate(bal, bal_est)
, Ldiff_OOTime_Rsq = calc_rsq(resp, resp_hat)
, Ldiff_OOTime_MAPE = calc_mape(resp, resp_hat)
, Ldiff_OOTime_UR = calc_under_rate(resp, resp_hat)
)]
ns_oot = data.table(ns, oot)
cv = ols_hold_out[["hold_out_data"]][, .(
Ldiff_CV_Rsq = calc_rsq(resp, resp_hat)
, Ldiff_CV_MAPE = calc_mape(resp, resp_hat)
, Ldiff_CV_UR = calc_under_rate(resp, resp_hat)
, Balance_CV_Rsq = calc_rsq(bal, bal_est)
, Balance_CV_MAPE = calc_mape(bal, bal_est)
, Balance_CV_UR = calc_under_rate(bal, bal_est)
)]
ols_metrics = data.table(ns_oot, cv)
ols_metrics[["id"]] = 1
ols_metrics = melt(ols_metrics, id.vars="id", value.name="ols", variable.name="metric")[, c("metric", "ols")]
### Quantile Regression Performance
qrg_perf = get_oot_data(
train
, oot_testing
, bal_var=seg
, dt_var="qtr_dt"
, resp=resp
, tau=tau_selections[[seg]]
, model=model
, type="qrg"
, ar_term=""
, max_train_dt=MAX_TRAIN_DATE
, ma_n=ma_n
)
qrg_perf[["ns_resp_hat"]] = predict(qrg_model_obj, qrg_perf)
qrg_hold_out = get_hold_out_perf(
cv_data
, train
, bal_var=seg
, dt_var="qtr_dt"
, resp=resp
, tau=tau_selections[[seg]]
, model=model
, type="qrg"
, ar_term=""
)
ns= qrg_perf[, .(
Ldiff_Insample_Rsq = calc_rsq(resp, ns_resp_hat)
, Ldiff_Insample_MAPE = calc_mape(resp, ns_resp_hat)
, Ldiff_InSample_UR = calc_under_rate(resp, ns_resp_hat)
)]
oot = qrg_perf[is_oot == 1, .(
Balance_OOTime_Rsq = calc_rsq(bal, bal_est)
, Balance_OOTime_MAPE = calc_mape(bal, bal_est)
, Balance_OOTime_UR = calc_under_rate(bal, bal_est)
, Ldiff_OOTime_Rsq = calc_rsq(resp, resp_hat)
, Ldiff_OOTime_MAPE = calc_mape(resp, resp_hat)
, Ldiff_OOTime_UR = calc_under_rate(resp, resp_hat)
)]
ns_oot = data.table(ns, oot)
cv = qrg_hold_out[["hold_out_data"]][, .(
Ldiff_CV_Rsq = calc_rsq(resp, resp_hat)
, Ldiff_CV_MAPE = calc_mape(resp, resp_hat)
, Ldiff_CV_UR = calc_under_rate(resp, resp_hat)
, Balance_CV_Rsq = calc_rsq(bal, bal_est)
, Balance_CV_MAPE = calc_mape(bal, bal_est)
, Balance_CV_UR = calc_under_rate(bal, bal_est)
)]
qrg_metrics = data.table(ns_oot, cv)
qrg_metrics[["id"]] = 1
qrg_metrics = melt(qrg_metrics, id.vars="id", value.name="qrg", variable.name="metric")[, c("metric", "qrg")]
### Combine metrics
metrics = ols_metrics[qrg_metrics, on="metric"]
metrics[["segment"]] = seg
### visualization
### plot in-sample up to oot period and show oot for ldiff and balance
### Log-difference
ols_resp = ols_perf[, c("dt", "is_oot", "resp", "ns_resp_hat", "resp_hat")]
setnames(ols_resp, c("resp_hat", "ns_resp_hat"), c("ols_resp_hat", "ols_ns_resp_hat"))
qrg_resp = qrg_perf[, c("dt", "ns_resp_hat", "resp_hat")]
setnames(qrg_resp, c("resp_hat", "ns_resp_hat"), c("qrg_resp_hat", "qrg_ns_resp_hat"))
resp_data = qrg_resp[ols_resp, on="dt"]
resp_data[,
`:=`(
bal = NA
, ols_bal_est = NA
, qrg_bal_est = NA
, bal_oot = NA
, bal_ns = NA
)
]
resp_data[, ols_ns_resp_hat := ifelse(is_oot == 0, ols_ns_resp_hat, NA)]
resp_data[, qrg_ns_resp_hat := ifelse(is_oot == 0, qrg_ns_resp_hat, NA)]
resp_data[, ols_resp_hat := ifelse(is_oot == 1 | shift(is_oot, n=1, type="lead") == 1, ols_resp_hat, NA)]
resp_data[, qrg_resp_hat := ifelse(is_oot == 1 | shift(is_oot, n=1, type="lead") == 1, qrg_resp_hat, NA)]
resp_data[["grp"]] = "Balance Log-difference"
### Balance
ols_bal = ols_perf[, c("dt", "is_oot", "bal", "bal_est")]
setnames(ols_bal, c("bal_est"), c("ols_bal_est"))
qrg_bal = qrg_perf[, c("dt", "bal_est")]
setnames(qrg_bal, c("bal_est"), c("qrg_bal_est"))
bal_data = qrg_bal[ols_bal, on="dt"]
bal_data[,
`:=`(
resp = NA
, ols_resp_hat = NA
, qrg_resp_hat = NA
, ols_ns_resp_hat = NA
, qrg_ns_resp_hat = NA
, bal_oot = NA
, bal_ns = NA
)
]
bal_data[, bal_oot := ifelse(is_oot == 1 | shift(is_oot, n=1, type="lead") == 1, bal, NA)]
bal_data[, ols_bal_est := ifelse(is_oot == 1 | shift(is_oot, n=1, type="lead") == 1, ols_bal_est, NA)]
bal_data[, qrg_bal_est := ifelse(is_oot == 1 | shift(is_oot, n=1, type="lead") == 1, qrg_bal_est, NA)]
bal_data[, bal_ns := ifelse(is_oot == 0, bal, NA)]
bal_data[["grp"]] = "Balance (billions)"
perf_series = rbind(resp_data, bal_data)
perf_plot = (
ggplot(data=perf_series)
+ facet_wrap(~grp, ncol=2, scales="free_y")
+ theme_minimal()
+ theme(
legend.title=element_blank()
, legend.position="bottom"
, panel.grid.minor=element_blank()
, strip.text = element_text(size = 14)
)
+ geom_point(aes(x=dt, y=resp, color="Actual"))
+ geom_line(aes(x=dt, y=ols_ns_resp_hat, color="OLS"))
+ geom_line(aes(x=dt, y=ols_resp_hat, color="OLS"), linetype=2)
+ geom_line(aes(x=dt, y=qrg_ns_resp_hat, color="QR"))
+ geom_line(aes(x=dt, y=qrg_resp_hat, color="QR"), linetype=2)
+ geom_line(aes(x=dt, y=bal_ns, color="Actual"))
+ geom_line(aes(x=dt, y=bal_ns, color="Actual"))
+ geom_line(aes(x=dt, y=ols_bal_est, color="OLS"), linetype=2)
+ geom_line(aes(x=dt, y=qrg_bal_est, color="QR"), linetype=2)
+ geom_point(aes(x=dt, y=bal_oot, color="Actual"))
+ labs(x=NULL, y=NULL)
+ scale_y_continuous(label=comma_format())
+ theme(legend.title=element_blank(), legend.position="bottom")
+ guides(col=guide_legend(nrow=1))
#+ scale_colour_manual(values=c(hxDRed, hxDGreen, hxDAqua, hxGray, hxDPurple))
)
ggsave(concat(pth_out, "/image-BackTesting-", seg, "-performance.png"), plot=perf_plot, height=10, width=12*GOLDEN_RATIO, unit="cm")
if (step == 1) {
combined_metrics = metrics
} else {
combined_metrics = rbind(combined_metrics, metrics)
}
step = step + 1
}
write.csv(combined_metrics, concat(pth_out, "/table-BackTesting-boh-performance_metrics.csv"))
### Stability (use hold_out_est_list from above) ###############################
### To do: More Stability Testing (using yearly hold-out)
### MAPE between final estimates and hold-out estimates.
### Plot Estimates for each hold-out year.
### MAPE from final coefficient
step = 1
for (seg in c("ip", "oo")) {
hold_out_coefs = hold_out_est_list[[seg]]
var_names = model_var_list[[seg]]
var_descs = model_desc_list[[seg]]
final_coefs = summary(model_obj_list[[seg]])$coefficients
coef_names = row.names(final_coefs)
final_coefs = data.table(final_coefs)
final_coefs[["var"]] = coef_names
final_coefs = final_coefs[, c("var", "Estimate")]
setnames(final_coefs, "Estimate", "final_est")
var_lookup = data.table(var=var_names, name=var_descs)
hold_out_coefs = var_lookup[hold_out_coefs, on="var"]
hold_out_coefs = final_coefs[hold_out_coefs, on="var"]
hold_out_coefs[, ape := abs(est/final_est - 1)]
coefs_mape = hold_out_coefs[,mean(ape), by="var"]
coefs_mape[["segment"]] = seg
setnames(coefs_mape, "V1", "mape")
if (step == 1) {
stacked_coefs_mape = coefs_mape
} else {
stacked_coefs_mape = rbind(stacked_coefs_mape, coefs_mape)
}
step = step + 1
write.csv(hold_out_coefs, concat(pth_out, "/table-BackTesting-", seg, "-hold_out_coefs.csv"))
final_coef_row = dcast(final_coefs[var != "(Intercept)", c("var", "final_est")], .~var, value.var="final_est")[, var_names, with=FALSE]
k = length(var_names)
x_marks = 2007:2016
if (k == 3) {
p = (
ggplot()
+ theme_minimal()
+ theme(panel.grid.minor=element_blank())
+ geom_line(data=hold_out_coefs, aes(x=yr_out, y=est, color=name))
+ labs(x="Hold-out Year",y="Coefficient Estimate")
+ scale_x_discrete(limits=x_marks, labels=x_marks)
+ guides(col=guide_legend(ncol=2))
+ theme(legend.title=element_blank(), legend.position="bottom")
+ geom_hline(yintercept = final_coef_row[[var_names[1]]], linetype=2, color=hxGray)
+ geom_hline(yintercept = final_coef_row[[var_names[2]]], linetype=2, color=hxGray)
+ geom_hline(yintercept = final_coef_row[[var_names[3]]], linetype=2, color=hxGray)
)
} else if (k == 2) {
p = (
ggplot()
+ theme_minimal()
+ theme(panel.grid.minor=element_blank())
+ geom_line(data=hold_out_coefs, aes(x=yr_out, y=est, color=name))
+ labs(x="Hold-out Year",y="Coefficient Estimate")
+ scale_x_discrete(limits=x_marks, labels=x_marks)
+ guides(col=guide_legend(ncol=2))
+ theme(legend.title=element_blank(), legend.position="bottom")
+ geom_hline(yintercept = final_coef_row[[var_names[1]]], linetype=2, color=hxGray)
+ geom_hline(yintercept = final_coef_row[[var_names[2]]], linetype=2, color=hxGray)
)
} else {
p = (
ggplot()
+ theme_minimal()
+ theme(panel.grid.minor=element_blank())
+ geom_line(data=hold_out_coefs, aes(x=yr_out, y=est, color=name))
+ labs(x="Hold-out Year",y="Coefficient Estimate")
+ scale_x_discrete(limits=x_marks, labels=x_marks)
+ guides(col=guide_legend(ncol=2))
+ theme(legend.title=element_blank(), legend.position="bottom")
+ geom_hline(yintercept = final_coef_row[[var_names[1]]], linetype=2, color=hxGray)
)
}
ggsave(concat(pth_out, "/image-BackTesting-", seg, "-coefficient_stability.png"), plot=p, height=8, width=10*GOLDEN_RATIO, unit="cm")
}
write.csv(stacked_coefs_mape, concat(pth_out, "/table-BackTesting-boh-hold_out_coefs_mape.csv"))
### To do: Sensitivity #########################################################
for (seg in c("ip", "oo")) {
# Note: this section employ lists defined way, way up at around model fitting step
# they are model_var_list(), model_desc_list(), and model_obj_list()
train = boh_train_ce
var_names = model_var_list[[seg]]
var_descs = model_desc_list[[seg]]
coef_names = c("(Intercept)",model_var_list[[seg]])
fit = model_obj_list[[seg]]
for (name in coef_names) {
coef = fit$coefficients[name][[1]]
if (name == coef_names[1]) {
lo = coef
hi = coef
sens_df = data.frame(parm=name, coef=coef, lo=1, hi=1, mu=1, sd=1, xb_comp_lo=lo, xb_comp_hi=hi)
} else {
lo = min(train[[name]])
hi = max(train[[name]])
mu = mean(train[[name]])
sd = sd(train[[name]])
xb_lo = min(coef * train[[name]])
xb_hi = max(coef * train[[name]])
sens_df = rbind(sens_df, data.frame(parm=name, lo=lo, hi=hi, mu=mu, sd=sd, coef=coef, xb_comp_lo=xb_lo, xb_comp_hi=xb_hi))
}
}
sens_dt = data.table(sens_df)
sens_dt$xb_range = max(fit$fitted.values) - min(fit$fitted.values)
sens_dt[, `:=`(
xb_comp_range = xb_comp_hi - xb_comp_lo
, xb_comp_total = sum(xb_comp_hi - xb_comp_lo)
)]
sens_dt[, importance := xb_comp_range/xb_comp_total]
write.csv(sens_dt, file=concat(pth_out, "/table-BackTesting-", seg, "_variable_importance.csv"))
### stress average case
case_template = dcast(sens_dt[parm != "(Intercept)", c("parm", "mu")], .~parm, value.var="mu")[, var_names, with=FALSE]
case_template[["id"]] = 1
sigmas = data.table(id=1, sigma_factor=seq(-5, 5))
case_template = sigmas[case_template, on="id", allow.cartesian=TRUE]
sens_list = list()
for (name in var_names) {
cases = case_template
sd = sens_dt[parm == name, sd]
cases[[name]] = cases[[name]] + cases[["sigma_factor"]] * sd
cases[["growth_rate"]] = exp(predict(fit, cases)) - 1
sens_list[[name]] = cases
}
sens_list = list()
for (name in var_names) {
cases = case_template
sd = sens_dt[parm == name, sd]
cases[[name]] = cases[[name]] + cases[["sigma_factor"]] * sd
cases[["growth_rate"]] = exp(predict(fit, cases)) - 1
sens_list[[name]] = cases
}
avg_growth = exp(mean(train[[concat("ldiff_", seg)]])) - 1
k = length(var_names)
### Sensitivity Plots (shocks)
if (k == 3) {
p_shock = (
ggplot()
+ geom_line(data=sens_list[[var_names[1]]], aes(x=sigma_factor, y=growth_rate, color=var_descs[1]))
+ geom_line(data=sens_list[[var_names[2]]], aes(x=sigma_factor, y=growth_rate, color=var_descs[2]))
+ geom_line(data=sens_list[[var_names[3]]], aes(x=sigma_factor, y=growth_rate, color=var_descs[3]))
+ theme_minimal()
+ theme(panel.grid.minor=element_blank())
+ labs(x="Standard Deviations", y="Growth Rate")
+ geom_hline(yintercept = avg_growth, linetype=2, color=hxGray)
+ scale_y_continuous(label=percent_format())
+ guides(col=guide_legend(ncol=2))
+ theme(legend.title=element_blank(), legend.position="bottom")
+ scale_x_discrete(limits = seq(-5,5), labels=
c(
expression(paste("-5",sigma, sep=""))
, expression(paste("-4",sigma, sep=""))
, expression(paste("-3",sigma, sep=""))
, expression(paste("-2",sigma, sep=""))
, expression(paste("-1",sigma, sep=""))
, expression(mu)
, expression(paste("+1",sigma, sep=""))
, expression(paste("+2",sigma, sep=""))
, expression(paste("+3",sigma, sep=""))
, expression(paste("+4",sigma, sep=""))
, expression(paste("+5",sigma, sep=""))
)
)
)
} else if (k == 2) {
p_shock = (
ggplot()
+ geom_line(data=sens_list[[var_names[1]]], aes(x=sigma_factor, y=growth_rate, color=var_descs[1]))
+ geom_line(data=sens_list[[var_names[2]]], aes(x=sigma_factor, y=growth_rate, color=var_descs[2]))
+ theme_minimal()
+ theme(panel.grid.minor=element_blank())
+ labs(x="Standard Deviations", y="Growth Rate")
+ geom_hline(yintercept = avg_growth, linetype=2, color=hxGray)
+ scale_y_continuous(label=percent_format())
+ guides(col=guide_legend(ncol=2))
+ theme(legend.title=element_blank(), legend.position="bottom")
+ scale_x_discrete(limits = seq(-5,5), labels=
c(
expression(paste("-5",sigma, sep=""))
, expression(paste("-4",sigma, sep=""))
, expression(paste("-3",sigma, sep=""))
, expression(paste("-2",sigma, sep=""))
, expression(paste("-1",sigma, sep=""))
, expression(mu)
, expression(paste("+1",sigma, sep=""))
, expression(paste("+2",sigma, sep=""))
, expression(paste("+3",sigma, sep=""))
, expression(paste("+4",sigma, sep=""))
, expression(paste("+5",sigma, sep=""))
)
)
)
} else {
p_shock = (
ggplot()
+ geom_line(data=sens_list[[var_names[1]]], aes(x=sigma_factor, y=growth_rate, color=var_descs[1]))
+ theme_minimal()
+ theme(panel.grid.minor=element_blank())
+ labs(x="Standard Deviations", y="Growth Rate")
+ geom_hline(yintercept = avg_growth, linetype=2, color=hxGray)
+ scale_y_continuous(label=percent_format())
+ guides(col=guide_legend(ncol=2))
+ theme(legend.title=element_blank(), legend.position="bottom")
+ scale_x_discrete(limits = seq(-5,5), labels=
c(
expression(paste("-5",sigma, sep=""))
, expression(paste("-4",sigma, sep=""))
, expression(paste("-3",sigma, sep=""))
, expression(paste("-2",sigma, sep=""))
, expression(paste("-1",sigma, sep=""))
, expression(mu)
, expression(paste("+1",sigma, sep=""))
, expression(paste("+2",sigma, sep=""))
, expression(paste("+3",sigma, sep=""))
, expression(paste("+4",sigma, sep=""))
, expression(paste("+5",sigma, sep=""))
)
)
)
}
ggsave(concat(pth_out, "/image-BackTesting-", seg,"-sensitivity_shocks.png"), plot=p_shock, height=12, width=12*GOLDEN_RATIO, unit="cm")
}
<file_sep>/EBModel/5-ExploreData.R
################################################################################
# Bank of Hope
# Commercial Real Estate Ending Balances
# Program: <>.R
# Author(s): KPMG, LLP
# Purpose:
# Data Dependences:
#
#
# R-version: R version 3.3.1 (2016-06-21)
# -- "Bug in Your Hair" Copyright (C) 2016 The R Foundation
# for Statistical Computing Platform: x86_64-apple-darwin13.4.0 (64-bit)
################################################################################
### Environment Settings #######################################################
pth_inputs = "C:/Users/ic07949/Desktop/KPMG/Model Development/development code and data/ending-balance_combined/combined/read-only-inputs"
pth_lib = "C:/Users/ic07949/Desktop/KPMG/Model Development/development code and data/ending-balance_combined/combined/library"
pth_out = "C:/Users/ic07949/Desktop/KPMG/Model Development/development code and data/ending-balance_combined/combined"
### No need to make changes below after this line ##############################
### Dependencies
source(paste(pth_lib,"/dev-support.R", sep=""))
source(paste(pth_lib,"/dfast-support.R", sep=""))
source(paste(pth_lib,"/colors.R", sep=""))
# source has the following functions:
# - stack()
# - get_bal_forecast()
# - concat()
# - bin_interval_variable()
# - calc_rsq(), calc_mape(), calc_mad(), calc_rmset()
# - cv_step(), cv_select()
library("openxlsx")
library("data.table")
library("lubridate")
library("ggplot2")
library("RGraphics")
library("gridExtra")
library("ggplot2")
library("scales")
library("tseries")
library("car")
library("urca")
library("lmtest")
library("stats")
library("orcutt")
################################################################################
GOLDEN_RATIO = 1.61803398875
STACK_RATIO = 1.20
### Import Data ################################################################
boh = readRDS(concat(pth_out, "/data-boh.RDS"))
boh_train = function(seg) {
train_ind = concat("is_train_", seg)
boh_cp = copy(boh)
setnames(boh_cp, train_ind, "is_train")
boh_cp[is_train == TRUE,]
}
boh_train_ce = boh_train("ip")
#boh_train_ci = boh_train("ci")
################################################################################
cre_banks = readRDS(concat(pth_out, "/data-cre_banks.RDS"))
#c_i_banks = readRDS(concat(pth_out, "/data-c_i_banks.RDS"))
#boh_banks = c_i_banks[cre_banks, on=c("qtr_dt", "bank")]
info = data.table(get_excel(concat(pth_inputs, "/table-variable_information.xlsx"), "vars"))
#boh_banks_agg = boh_banks[, c(.N, lapply(.SD, sum)), by=c("qtr_dt", "bank")][, c("qtr_dt","bank", "bank_pct_mf", "bank_pct_no", "bank_pct_oo", "bank_pct_ci")]
#write.csv(boh_banks_agg, file=concat(pth_out, "/table-ExploreData-boh-banks.csv"))
#write.csv(boh[, c("qtr_dt", "ci", "mf", "no", "oo")], file=concat(pth_out, "/table-ExploreData-boh-balances.csv"))
boh_banks_agg = cre_banks[, c(.N, lapply(.SD, sum)), by=c("qtr_dt", "bank")][, c("qtr_dt","bank", "bank_pct_mf", "bank_pct_no", "bank_pct_oo")]
write.csv(boh_banks_agg, file=concat(pth_out, "/table-ExploreData-boh-banks.csv"))
write.csv(boh[, c("qtr_dt", "mf", "no", "oo")], file=concat(pth_out, "/table-ExploreData-boh-balances.csv"))
################################################################################
### Find correlations ##########################################################
#segments = c("ci", "no", "mf", "oo")
segments = c("no", "mf", "oo")
cor_table = info[, c("name")]
cor_table[, segments] = 0
n = dim(cor_table)[1]
names = cor_table[["name"]]
for (seg in segments) {
ldiff = concat("ldiff_", seg)
if (seg == "ci") {
X = boh_train_ci
} else {
X = boh_train_ce
}
Y = X[[ldiff]]
for (i in 1:n) {
name = names[i]
cor_table[i, seg] = cor(X[[name]], Y)
}
}
### updat variable information
updated_info = info[cor_table, on="name"]
saveRDS(updated_info, concat(pth_inputs, "/table-variable_information_rho.RDS"))
### Percent of BoH Plots #######################################################
### Also add PSI
#past_data = boh_train_ce[, c("qtr_dt", "ci", "mf", "no" , "oo")]
#active_data = boh[qtr_dt > as.Date("2016-12-31"), c("qtr_dt", "ci", "mf", "no" , "oo")]
past_data = boh_train_ce[, c("qtr_dt", "mf", "no" , "oo")]
active_data = boh[qtr_dt > as.Date("2016-12-31"), c("qtr_dt", "mf", "no" , "oo")]
past_data[["psi"]] = get_psi_from_vector_amts(active_data, past_data, cols=c( "mf", "no", "oo"))
avg_mf_pct = mean(boh_train_ce[["pct_mf"]])
p_pct = (
ggplot()
+ geom_line(data=boh_train_ce, aes(x=qtr_dt, y=pct_mf, color="Multifamily"))
+ geom_line(data=boh_train_ce, aes(x=qtr_dt, y=pct_no, color="Non-Owner"))
+ geom_line(data=boh_train_ce, aes(x=qtr_dt, y=pct_ip, color="Income Producing"))
+ geom_line(data=boh_train_ce, aes(x=qtr_dt, y=pct_oo, color="Owner Occupied"))
+ theme_minimal()
+ labs(x=NULL, y="% of BOH CRE Balance")
+ theme(panel.grid.minor=element_blank())
+ theme(legend.title=element_blank(), legend.position="bottom")
+ scale_y_continuous(label=percent_format())
+ geom_hline(yintercept = avg_mf_pct, linetype=2, color=hxGray)
+ geom_text(aes(as.Date("2009-03-31"),avg_mf_pct,label = "3%", vjust = -1))
+ guides(col=guide_legend(nrow=1))
)
p_psi =(
ggplot(data=past_data, mapping = aes(x = qtr_dt, y = psi))
#+ geom_hline(aes(yintercept = 0.25), linetype=2, color="red")
+ geom_segment(mapping = aes(xend = qtr_dt, yend = 0))
+ theme_minimal()
+ theme(panel.grid.minor=element_blank())
+ labs(x=NULL, y="PSI")
)
pop_plot = grid.arrange(p_pct, p_psi, nrow=2, heights=c(9,3))
ggsave(concat(pth_out, "/image-ExploreData-boh-balance_pct_of_boh_chart.png"), plot=pop_plot, height=12*GOLDEN_RATIO, width=12*GOLDEN_RATIO, unit="cm")
### cumulative growth chart
#anchors = boh_train_ce[qtr_dt == as.Date("2007-06-30"), c("ci", "no", "mf", "ip", "oo")]
anchors = boh_train_ce[qtr_dt == as.Date("2007-06-30"), c("no", "mf", "ip", "oo")]
g_p = (
ggplot()
+ geom_rect(aes(xmin=as.Date("2007-12-31"), xmax=as.Date("2009-06-30"), ymin=1, ymax=2.75), alpha=0.35)
+ geom_text(aes(x=as.Date("2008-09-30"), y=2.40, label="Recession"), size=4, color="white")
+ geom_line(data=boh_train_ce[qtr_dt >= as.Date("2007-12-31"),], aes(x=qtr_dt, y=ma(mf/anchors[["mf"]],n=2), color="Multifamily"))
+ geom_line(data=boh_train_ce[qtr_dt >= as.Date("2007-12-31"),], aes(x=qtr_dt, y=ma(no/anchors[["no"]],n=2), color="Non-Owner Occupied"))
+ geom_line(data=boh_train_ce[qtr_dt >= as.Date("2007-12-31"),], aes(x=qtr_dt, y=ma(oo/anchors[["oo"]],n=2), color="Owner Occupied"))
# + geom_line(data=boh_train_ce[qtr_dt >= as.Date("2007-12-31"),], aes(x=qtr_dt, y=ma(ci/anchors[["ci"]],n=2), color="C&I"))
+ geom_line(data=boh_train_ce[qtr_dt >= as.Date("2007-12-31"),], aes(x=qtr_dt, y=ma(ip/anchors[["ip"]],n=2), color="Income Producing"), linetype=2)
+ theme_minimal()
+ labs(x=NULL, y="Cumulative Growth %")
+ scale_y_continuous(label=percent_format())
+ theme(panel.grid.minor=element_blank(), plot.title = element_text(hjust = 0.50))
+ theme(legend.title=element_blank(), legend.position="bottom")
)
ggsave(concat(pth_out, "/image-ExploreData-boh-growth_patterns.png"), plot=g_p, height=12, width=12*GOLDEN_RATIO, unit="cm")
################################################################################
#### Any Outliers? #############################################################
box_data = data.table()
#labs = list(ci="C&I", mf="Multifamily", oo="Owner Occupied", no="Non-Owner Occupied", ip="Income Producing")
labs = list(mf="Multifamily", oo="Owner Occupied", no="Non-Owner Occupied", ip="Income Producing")
#for (seg in c("ci", "mf", "no", "oo", "ip")) {
for (seg in c("mf", "no", "oo", "ip")) {
train = boh_train_ce
if (seg == "ci") { train = boh_train_ci }
resp = concat("ldiff_", seg)
resp_data = train[, resp, with=FALSE]
setnames(resp_data, resp, "resp")
resp_data[["seg"]] = labs[[seg]]
box_data = rbind(box_data, resp_data)
}
p_box = (
ggplot()
+ geom_boxplot(data=box_data[seg %in% c(labs[["ip"]],labs[["oo"]], labs[["mf"]], labs[["no"]]), ], mapping=aes(x=seg, y=resp))
+ coord_flip()
+ labs(y="Log-difference", x=NULL)
+ theme_minimal()
+ theme(panel.grid.minor=element_blank())
)
ggsave(concat(pth_out, "/image-ExloreData-boh-distributions.png"), plot=p_box, height=5, width=15, unit="cm")
### Balance and Log-diff Plots and ACF #########################################
### C-I
#for (seg in c("boh","ci", "mf", "no", "oo")) {
for (seg in c("cre_boh","mf", "no", "oo", "boh")) {
if(seg == "mf"){
oldCopy <- boh
boh <- boh[qtr_dt > "2007-03-31"]
}
bal = boh_train(seg)[[seg]]
ldiff = boh_train(seg)[[concat("ldiff_", seg)]]
dates = boh_train(seg)[["qtr_dt"]]
acf_obj = acf(bal, plot=FALSE)
acf_df = with(acf_obj, data.frame(lag, acf))
acf_hi = 1.96/sqrt(length(bal))
acf_lo = -acf_hi
b_acf = (
ggplot(data = acf_df, mapping = aes(x = lag, y = acf))
+ geom_hline(aes(yintercept = 0))
+ geom_hline(aes(yintercept = acf_hi), linetype=2, color=hxBlue)
+ geom_hline(aes(yintercept = acf_lo), linetype=2, color=hxBlue)
+ geom_segment(mapping = aes(xend = lag, yend = 0))
+ theme_minimal()
+ theme(panel.grid.minor=element_blank(), plot.title = element_text(size=14, hjust = 0.50))
+ labs(x="Lag", y="ACF",title="Balance ACF")
)
acf_obj = acf(ldiff, plot=FALSE)
acf_df = with(acf_obj, data.frame(lag, acf))
acf_hi = 1.96/sqrt(length(ldiff))
acf_lo = -acf_hi
l_acf = (
ggplot(data = acf_df, mapping = aes(x = lag, y = acf))
+ geom_hline(aes(yintercept = 0))
+ geom_hline(aes(yintercept = acf_hi), linetype=2, color=hxBlue)
+ geom_hline(aes(yintercept = acf_lo), linetype=2, color=hxBlue)
+ geom_segment(mapping = aes(xend = lag, yend = 0))
+ theme_minimal()
+ theme(panel.grid.minor=element_blank(), plot.title = element_text(size=14, hjust = 0.50))
+ labs(x="Lag", y="ACF",title="Transformation ACF")
)
b_series = (
ggplot()
+ geom_line(aes(x=dates, y=bal), color=hxBlue)
+ geom_point(aes(x=dates, y=bal), color=hxBlue)
+ theme_minimal()
+ theme(panel.grid.minor=element_blank(), plot.title = element_text(size=14, hjust = 0.50))
+ labs(x=NULL, y="Balance",title="Balance")
)
l_series = (
ggplot()
+ geom_line(aes(x=dates, y=ldiff), color=hxBlue)
+ geom_point(aes(x=dates, y=ldiff), color=hxBlue)
+ theme_minimal()
+ theme(panel.grid.minor=element_blank(), plot.title = element_text(size=14, hjust = 0.50))
+ labs(x=NULL, y="Log-difference",title="Transformed Balance")
)
t_top = grid.arrange(b_series, b_acf,ncol=2)
t_bot = grid.arrange(l_series, l_acf,ncol=2)
t_plot = grid.arrange(t_top, t_bot, nrow=2)
ggsave(concat(pth_out, "/image-ExploreData-", seg, "-targets.png"), plot=t_plot, height=10*GOLDEN_RATIO, width=12*GOLDEN_RATIO, unit="cm")
if(seg == "mf"){
boh <- oldCopy
}
}
bal = boh_train(seg)[[seg]]
b_series = (
ggplot()
+ geom_line(aes(x=dates, y=bal), color=hxBlue)
+ geom_point(aes(x=dates, y=bal), color=hxBlue)
+ theme_minimal()
+ theme(panel.grid.minor=element_blank(), plot.title = element_text(size=14, hjust = 0.50))
+ labs(x=NULL, y="Balance",title="Balance")
)
b_series
<file_sep>/EBModel/8-Forecasts.R
################################################################################
# Bank of Hope
# Commercial Real Estate Ending Balances
# Program: <>.R
# Author(s): KPMG, LLP
# Purpose:
# Data Dependences:
#
#
# R-version: R version 3.3.1 (2016-06-21)
# -- "Bug in Your Hair" Copyright (C) 2016 The R Foundation
# for Statistical Computing Platform: x86_64-apple-darwin13.4.0 (64-bit)
################################################################################
### Environment Settings #######################################################
pth_inputs = "C:/Users/ic07949/Desktop/KPMG/Model Development/development code and data/ending-balance_combined/combined/read-only-inputs"
pth_lib = "C:/Users/ic07949/Desktop/KPMG/Model Development/development code and data/ending-balance_combined/combined/library"
pth_out = "C:/Users/ic07949/Desktop/KPMG/Model Development/development code and data/ending-balance_combined/combined"
### No need to make changes below after this line ##############################
### Dependencies
source(paste(pth_lib,"/dev-support.R", sep=""))
source(paste(pth_lib,"/dfast-support.R", sep=""))
source(paste(pth_lib,"/colors.R", sep=""))
# source has the following functions:
# - stack()
# - get_bal_forecast()
# - concat()
# - bin_interval_variable()
# - calc_rsq(), calc_mape(), calc_mad(), calc_rmset()
# - cv_step(), cv_select()
library("openxlsx")
library("data.table")
library("lubridate")
library("ggplot2")
library("RGraphics")
library("gridExtra")
library("ggplot2")
library("scales")
library("tseries")
library("car")
library("urca")
library("lmtest")
library("stats")
################################################################################
GOLDEN_RATIO = 1.61803398875
STACK_RATIO = 1.20
### Import Data ################################################################
baseline = readRDS(concat(pth_out, "/econ-data-baseline.RDS"))
adverse = readRDS(concat(pth_out, "/econ-data-adverse.RDS"))
severe = readRDS(concat(pth_out, "/econ-data-severe.RDS"))
### Import Data ################################################################
boh = readRDS(concat(pth_out, "/data-boh.RDS"))
boh_train = function(seg) {
train_ind = concat("is_train_", seg)
boh_cp = copy(boh)
setnames(boh_cp, train_ind, "is_train")
boh_cp[is_train == TRUE,]
}
boh_train_ce = boh_train("ip")
################################################################################
################################################################################
### Selected Models ############################################################
### OLS
ols_ip_model = c("crei_eg_lag4","dow_ya")
ols_oo_model = c("crei_eg_lag2")
ols_ip_desc = c("CRE Index 8Q Growth", "Dow 1Q Growth MA(4)")
ols_oo_desc = c("CRE Index 8Q Growth")
### Linear Testing Model Fit ###################################################
ols_ip = lm(ldiff_ip~., data=boh_train_ce[, c(ols_ip_model, "ldiff_ip"), with=FALSE])
ols_oo = lm(ldiff_oo~., data=boh_train_ce[, c(ols_oo_model, "ldiff_oo"), with=FALSE])
### combine model info into lists for later usage.
model_obj_list = list(ip=ols_ip, oo=ols_oo)
model_var_list = list(ip=ols_ip_model, oo=ols_oo_model)
model_desc_list = list(ip=ols_ip_desc, oo=ols_oo_desc)
### Forecasts ##################################################################
boh_2007q2_to_2016q4 = boh[
qtr_dt >= as.Date("2007-06-30", "%Y-%m-%d")
& qtr_dt <= as.Date("2016-12-31", "%Y-%m-%d")
,]
ma_n=1
j = 1
for (seg in c("ip", "oo")) {
fit = model_obj_list[[seg]]
var_names = model_var_list[[seg]]
fcst_data = get_forecasts(
boh_2007q2_to_2016q4
, baseline
, adverse
, severe
, bal_var=seg
, dt_var="qtr_dt"
, model=var_names
, model_obj=fit
, resp=concat("ldiff_", seg)
, type="ols"
, ar_term=""
, ma_n=ma_n
)
a = fcst_data[["balance"]][, c("dt", "bal", "baseline", "adverse", "severe")]
setnames(a, "bal", "resp")
a[["grp"]] = "Balance (billions)"
b = fcst_data[["ldiff"]][, c("dt", "resp", "baseline", "adverse", "severe")]
b[["grp"]] = "Balance Log-difference"
b[!is.na(resp), `:=`(baseline = resp, adverse = resp, severe = resp)]
c = rbind(a,b)
f_c = (
ggplot(data=c)
+ facet_wrap(~grp, ncol=2, scales="free_y")
+ theme_minimal()
+ theme(
legend.title=element_blank()
, legend.position="bottom"
, panel.grid.minor=element_blank()
, strip.text = element_text(size = 14)
)
+ geom_line(aes(x=dt, y=baseline, color="Baseline"))
+ geom_line(aes(x=dt, y=adverse, color="Adverse"))
+ geom_line(aes(x=dt, y=severe, color="Severe"))
+ geom_line(aes(x=dt, y=resp, color="Actual"))
+ labs(x=NULL, y=NULL)
+ scale_y_continuous(label=comma_format())
+ theme(legend.title=element_blank(), legend.position="bottom")
+ guides(col=guide_legend(nrow=1))
+ scale_colour_manual(values=c(hxDRed, hxDGreen, hxDAqua, hxGray, hxDPurple))
)
ggsave(concat(pth_out, "/image-Forecasts-", seg,".png"), plot=f_c, height=10, width=12*GOLDEN_RATIO, unit="cm")
j = j + 1
write.csv(fcst_data[["balance"]], file=concat(pth_out, "/table-Forecasts-", seg,".csv"))
}
<file_sep>/2018_DFAST_Production_NCOR.R
# 2018 DFAST Production Run Net Charge Off Models CRE and C&I
# Bank of Hope
# Developer: <NAME>
# Start Date: 02/26/2018
# R version 3.4.3 (2017-11-30)
library(dplyr)
library(lubridate)
library(zoo)
library(data.table)
setwd("C:/Users/OL07805/Desktop/Desktop Things/Net Charge Off Models/DFAST Production Run 2018/")
# Load in dev dataset
load("S3_00_Estimation_Sample_with_MEV_20171117")
# Read in MEV Data
mev <- fread("S0_09_MEV_data_transformed_111717.csv")
# SEt up peer banks to train models
mfPeers <- c("Bank of Hope","Banner Corporation","Cathay General Bancorp","Columbia Banking System"
,"EAST WEST BANCORP","PacWest Bancorp","UMPQUA BANK")
nooPeers <- c("Bank of Hope","Cathay General Bancorp","EAST WEST BANCORP","UMPQUA BANK","Western Alliance")
# Set up variable formulas for models
ciVars <- paste(c("ca_rgsp_yg_EWMA4","ca_unemp_yd_EWMA4","vix_qd_lag4","prime_spread_log_qd","ca_hpi_yg_EWMA4_lag4")
,collapse = "+")
ooVars <- paste(c("empl_yg_EWMA4_lag4","crei_yg_EWMA2_lag4","prime_spread_log_qd_EWMA2_lag4"),collapse = "+")
nooVars <- paste(c("ENTITY_NAME","empl_yg_EWMA4_lag1","rgdp_grw_NL_lag3","crei_yg_EWMA4_lag3"),collapse = "+")
mfVars <- paste(c("ENTITY_NAME","empl_qg_EWMA2_lag3","gdp_grw_yoy_NL_lag2"),collapse = "+")
# Train the models
ciModel <- lm(formula = paste(c("NCOR ~ ",ciVars))
,data = data3[Portfolio2 == "CnI" & ENTITY_NAME == "Bank of Hope" & Scenario == "Historic"])
ooModel <- lm(formula = paste(c("NCOR ~ ",ooVars))
,data = data3[Portfolio2 == "OOCRE" & ENTITY_NAME == "Bank of Hope" & Scenario == "Historic"])
nooModel <- lm(formula = paste(c("NCOR ~ ",nooVars))
,data = data3[Portfolio2 == "NOOCRE" & ENTITY_NAME %in% nooPeers & Scenario == "Historic"])
mfModel <- lm(formula = paste(c("NCOR ~ ",mfVars))
,data = data3[Portfolio2 == "MF" & ENTITY_NAME %in% mfPeers & Scenario == "Historic"])
# Forecast on MEV Data and store in a new table
mev$ENTITY_NAME <- "Bank of Hope"
mev$Date <- as.Date(mev$Date)
forecastTable <- as.data.frame(c())
forecastTable[1:13,"qtr_dt"] <- as.Date(mev[Scenario == "Baseline",Date])
forecastTable$qtr_dt <- as.Date(forecastTable$qtr_dt)
segs <- c("CnI","OOCRE","NOOCRE","MF")
for(i in segs){
if(i == "CnI"){
fit <- ciModel
}
else if(i == "OOCRE"){
fit <- ooModel
}
else if(i == "NOOCRE"){
fit <- nooModel
}
else{
fit <- mfModel
}
forecastTable[1:13,paste0("NCOR_Baseline",i)] <- predict(fit,mev[Scenario == "Baseline"])
forecastTable[1:13,paste0("NCOR_Adverse",i)] <- predict(fit,mev[Scenario == "Adverse"])
forecastTable[1:13,paste0("NCOR_Severe",i)] <- predict(fit,mev[Scenario == "Severe"])
}
write.csv(forecastTable,"2018_DFAST_NCOR_Forecasts.csv",row.names = F)
#########################################################################################################################
### Plot each segments growth rates and ending balances
forecastTable$qtr_dt <- as.Date(forecastTable$qtr_dt)
segLabels <- c("Commercial and Industrial","Multifamily","Non Owner Occupied","Owner Occupied")
segs <- c("CnI","MF","NOOCRE","OOCRE")
rowCt <- 1
# Start with growth rates
for(i in segs){
plotA <- (ggplot(aes(x = qtr_dt),data = forecastTable)
+ geom_line(aes(y = get(paste0("NCOR_Baseline",i)),color = "Baseline"))
+ geom_line(aes(y = get(paste0("NCOR_Adverse",i)),color = "Adverse"))
+ geom_line(aes(y = get(paste0("NCOR_Severe",i)),color = "Severe"))
+ scale_color_manual(values = c("Baseline" = "green","Adverse" = "orange","Severe" = "red"))
+ labs(y = "NCOR",x = "Date",title = paste0(segLabels[rowCt]," Net Charge Off Rate"),color = "Scenario")
+ theme_minimal()
+ theme(plot.title = element_text(hjust = 0.5))
+ scale_y_continuous(labels = scales::percent))
#plot(plotA)
ggsave(paste0("plot_NCOR_Forecast_",segLabels[rowCt],".png"), plot=plotA, width=20, height=11, unit="cm", dpi=500)
rowCt <- rowCt + 1
}
<file_sep>/PD/R01_BBCN_Data.R
##############################################################################
## File Name: R01_BBCN_Data.R
## Author: KZ
## Date: 5/1/2017 Created
## Purpose: To import and clean BBCN data accoring to "01 - BBCN data.sas"
## Download 8/8/2017
##############################################################################
setwd("C:/Users/ic07949/Desktop/dataset")
requirements <- c("dplyr", "reshape2", "data.table","zoo")
for(rr in requirements){
if(! rr %in% installed.packages()) install.packages(rr)
}
require(dplyr)
require(reshape2)
require(data.table)
require(zoo)
## Import BBCN Data (SAS File 01, Line 1 to 53)
BBCN_df <- read.csv("data request bottom-up.csv")
names(BBCN_df)[1] <- paste("fileDate")
BBCN_df$fileDate <- as.Date(BBCN_df$fileDate, "%Y-%m-%d")
BBCN_df$originationDate <- as.Date(BBCN_df$originationDate, "%Y-%m-%d")
BBCN_df$maturityDate <- as.Date(BBCN_df$maturityDate,"%Y-%m-%d")
BBCN_df$non_acc_date <- as.Date(BBCN_df$nonAccrualDate, "%Y-%m-%d")
rates <- fread("rates2.csv")
acquired_bbcn_raw <- fread("acquired loan identifier bbcn.csv")
acquired_loans <- unique(acquired_bbcn_raw$Note_Number)
## Create label in BBCN for acquired loans (SAS File 01, Line 54 to 104)
BBCN_df$acquired_identifier <- ifelse(BBCN_df$accountNo %in% acquired_loans,
paste("acquired_bbcn"),
paste("bbcn_originated"))
table(BBCN_df$acquired_identifier)
#write.csv(BBCN_df, file = "BBCN_df_test.csv", row.names = FALSE)
## Create y for default event (SAS File 01, Line 118 to 125)
BBCN_df$y <- ifelse(BBCN_df$amtChargedOff > 0 | (BBCN_df$amtChargedOff == 0 &
BBCN_df$nonAccrualFlag != 0),
1, 0)
table(BBCN_df$y)
####################################################################################################
## Create time to maturity and POB (SAS File 01, Line 127 to 181)
BBCN_df$loan_age_q <- (as.yearqtr(BBCN_df$fileDate) - as.yearqtr(BBCN_df$originationDate)
) * 4
BBCN_df$term_q <- (as.yearqtr(BBCN_df$maturityDate) - as.yearqtr(BBCN_df$originationDate)
) * 4
BBCN_df$POB <- 100 * BBCN_df$loan_age_q / BBCN_df$term_q
BBCN_df$POB <- ifelse(BBCN_df$term_q ==0,100,BBCN_df$POB)
## find the date for the first default event (SAS File 01, Line 224 to 240)
indx_bbcn <- subset(BBCN_df, y==1, select = c("accountNo","non_acc_date"))
indx_bbcn <- as.data.table(indx_bbcn[order(indx_bbcn$accountNo, indx_bbcn$non_acc_date),])
indx_bbcn <- indx_bbcn %>% group_by(accountNo)%>% filter(row_number(non_acc_date) == 1)
names(indx_bbcn)[names(indx_bbcn)=="non_acc_date"] <- "min_non_acc_date"
#### 20184 obs in indx_bbcn
# BBCN_df <- BBCN_df[-grep("non_acc_date", colnames(BBCN_df))]
BBCN_df <- merge(x = BBCN_df, y = indx_bbcn, by = "accountNo", all.x = TRUE)
## Clean up data
BBCN_df$yr_maturity <- year(BBCN_df$maturityDate)
BBCN_df$yr_file <- year(BBCN_df$fileDate)
BBCN_df$mn_maturity <- month(BBCN_df$maturityDate)
BBCN_df$mn_file <- month(BBCN_df$fileDate)
BBCN_df$q_file <- quarter(BBCN_df$fileDate)
BBCN_df$yr_min_non_acc_date <- year(BBCN_df$min_non_acc_date)
BBCN_df$mn_min_non_acc_date <- month(BBCN_df$min_non_acc_date)
BBCN_df$ttm_m= 12*(BBCN_df$yr_maturity - BBCN_df$yr_file ) + (
BBCN_df$mn_maturity - BBCN_df$mn_file)
BBCN_df <- filter(BBCN_df, maturityDate > 2006)
# BBCN_df <- filter(BBCN_df, yr_maturity >= yr_file )
# BBCN_df <- filter(BBCN_df, !(yr_maturity == yr_file & (mn_file - mn_maturity)>2) )
#### 414335 obs.
## Create CRE/C&I portfolio ID (SAS File 01, Line 254 to 331)
BBCN_df$portfolio_id <- "NULL"
## trim leading or trailing blanks for vairable callReportCodeDescr
## (because R Reads blanks)
trim <- function (x) gsub("^\\s+|\\s+$", "", x)
BBCN_df$callReportCodeDescr <- trim(BBCN_df$callReportCodeDescr)
## (SAS File 01, Line 258 to 274)
BBCN_df$portfolio_id <- ifelse(BBCN_df$callReportCodeDescr %in% c("COMMERCIAL (GENERAL PLEDGE)") &
BBCN_df$loanTypeDescr %in% c("Commercial Line (18)", "Commercial Line (18)","Commercial Line (18)",
"Commercial Line (18)", "Commercial Term Loan (20)",
"Commercial Term Loan (20)","Commercial Term Loan (20)",
"Commercial Term Loan (20)","Comml LOC - Other Gov Gty (19)",
"Comml LOC - Other Gov Gty (19)","Discounted Acceptance (33)",
"Export Working Capital Program (38)","Performance Bond L/C (44)",
"Purchase Advance (31)","Purchase Advance (31)",
"Purchase Advance (31)","SBA 172 Loan (66)",
"SBA ARC Loans (62)","SBA ARC Loans (62)",
"SBA ARC Loans (62)","SBA Express LOC (64)",
"SBA Express LOC (64)", "SBA SOHO Loan (65)",
"SBA Term Loans (61)","SBA Term Loans (61)",
"SBA Term Loans (61)","Standby L/C (43)",
"Standby L/C (43)","Trust Receipt (30)",
"Working Capital Advance (37)","Working Capital Advance (37)",
"Working Capital Advance (37)"),
"CI",
BBCN_df$portfolio_id)
#### !!! SAS only reads up to 32 characters. But R reads them all. So It's "Export Working Capital Program (38)",
#### not "Export Working Capital Program (3" !!!!
## (SAS File 01, Line 276 to 279)
BBCN_df$portfolio_id <- ifelse(BBCN_df$portfolio_id == c("NULL") &
BBCN_df$callReportCodeDescr %in% c("COMMERCIAL (GENERAL PLEDGE)") &
BBCN_df$loanTypeDescr %in% c("Commercial Real Estate (71)", "SBA Real Estate (60)",
"SBA Real Estate (60)", "SBA Real Estate (60)"),
"CRE",
BBCN_df$portfolio_id)
## (SAS File 01, Line 281 to 283)
BBCN_df$portfolio_id <- ifelse(BBCN_df$portfolio_id == c("NULL") &
BBCN_df$callReportCodeDescr %in% c("CONVENTIONAL 5+ RESIDENTIAL",
"Conv 5+ Residential Prop",
"NON-FARM NON -RESIDENTIAL",
"Other nonfarm nonresi property",
"Owner-occupied nonfarm nonresi",
"SECURED BY FARMEDLAND") ,
"CRE",
BBCN_df$portfolio_id)
## (SAS File 01, Line 285 to 286)
BBCN_df$portfolio_id <- ifelse(BBCN_df$portfolio_id == c("NULL") &
BBCN_df$callReportCodeDescr %in% c("Check Credit & Rev Credit Plan",
"Com'l Loan - International Dpt",
"Com'l Loans - Borrowing Based"),
"CI",
BBCN_df$portfolio_id)
## (SAS File 01, Line 288 to 322)
BBCN_df$portfolio_id <- ifelse(BBCN_df$portfolio_id == c("NULL") &
BBCN_df$callReportCodeDescr %in% c("Commercial Loans") &
BBCN_df$loanTypeDescr %in% c("Bankers Health Group","Commercial Lease (25)",
"Commercial Line (18)",
"Commercial Term Loan (20)",
"Comml Asset-Based LOC (22)",
"Comml LOC - Other Gov Gty (19)",
"Comml Term - Other Gov Gty (21)",
"Discounted Acceptance (33)",
"Export Working Capital Program (38)",
"Express Line (26)",
"Master Comm LOC (01)",
"Master Comm LOC Sublimit (03)",
"Master ILOC (02)",
"Master ILOC Sublimit (04)",
"ODP LOC - Business",
"Performance Bond L/C (44)",
"Professional Line of Credit (51)",
"Professional Term Loan (50)",
"Purchase Advance (31)",
"Purchase Advance-Comm (27)",
"SBA 172 Loan (66)",
"SBA ARC Loans (62)",
"SBA Express LOC (64)",
"SBA Express Loan (63)",
"SBA SOHO Loan (65)",
"SBA Small Loan Advantage",
"SBA Term Loans (61)",
"Signature Line (11)",
"Simple Line of Credit (24)",
"Simple Loan - Commercial (23)",
"Standby L/C (43)",
"Syndicated Leveraged Lending",
"Trust Receipt (30)",
"Working Capital Advance (37)",
"Working Capital Advance-Comm (28)"),
"CI",
BBCN_df$portfolio_id)
#### Same problem. SAS only reads up to 32 characters. But R reads them all. So It's "Export Working Capital Program (38)",
#### not "Export Working Capital Program (3" !!!!
## (SAS File 01, Line 324 to 326)
BBCN_df$portfolio_id <- ifelse(BBCN_df$portfolio_id == c("NULL") &
BBCN_df$callReportCodeDescr %in% c("Commercial Loans") &
BBCN_df$loanTypeDescr %in% c("Comm RE - Revolving LOC (74)","Commercial Real Estate (71)",
"SBA Real Estate (60)"),
"CRE",
BBCN_df$portfolio_id)
## (SAS File 01,Line 328 to 330)
BBCN_df$portfolio_id <- ifelse(BBCN_df$portfolio_id == c("NULL") &
BBCN_df$callReportCodeDescr %in% c("INTERNATIONAL",
"Other Installment loans") ,
"CI",
BBCN_df$portfolio_id)
BBCN_df$portfolio_id <- ifelse(BBCN_df$portfolio_id == c("NULL"),
"error",
BBCN_df$portfolio_id)
table(BBCN_df$portfolio_id)
## delete portfolio_id == "error".
BBCN_df <- filter(BBCN_df, portfolio_id != "error")
## delete the observations after the default date
BBCN_df <- filter(BBCN_df, !(yr_file > yr_min_non_acc_date & (!is.na(yr_min_non_acc_date)) ) )
BBCN_df <- filter(BBCN_df, !((yr_file == yr_min_non_acc_date) & (mn_file - mn_min_non_acc_date > 2) &
(!is.na(yr_min_non_acc_date))))
#### 330012 obs.
## !! SAS File 01, Line 346 to 359 can be ignored because it doesn't affect the final results.
## clean multiple default events for 2 accounts (SAS File 01, Line 361 to 369)
BBCN_df <- filter(BBCN_df, !(accountNo %in% c(26506643, 23506889) & (yr_file >= 2009) & (mn_file > 03) ))
BBCN_df$min_non_acc_date <- ifelse(BBCN_df$accountNo %in% c(26506643, 23506889),
as.Date("2009-03-31"), BBCN_df$min_non_acc_date)
BBCN_df$min_non_acc_date <- as.Date(as.numeric(BBCN_df$min_non_acc_date))
BBCN_df$yr_min_non_acc_date <- ifelse(BBCN_df$accountNo %in% c(26506643, 23506889),
2009, BBCN_df$yr_min_non_acc_date)
BBCN_df$mn_min_non_acc_date <- ifelse(BBCN_df$accountNo %in% c(26506643, 23506889),
3, BBCN_df$mn_min_non_acc_date)
## add variables (SAS File 01, Line 375 to 383)
BBCN_df$account_id <- BBCN_df$accountNo;
BBCN_df$boh_id <- "bbcn"
## !! SAS File 01, Line 386 to 427 can be ignored because it doesn't affect the final results.
## only need to create variable property_type for CRE model
BBCN_df$property_type <- as.character(BBCN_df$collateralPropertyType)
BBCN_df$property_type <- as.numeric(BBCN_df$property_type)
table(BBCN_df$property_type)
table(BBCN_df$collateralPropertyType)
## create variable boh_rating (SAS File 01, Line 431 to 446)
#### !! variable loanRatingDescr2 has different length in R.
#### (length in SAS is shorter because developer forgot to use guessingrows in SAS proc import)
BBCN_df$boh_rating <- ifelse(BBCN_df$loanRatingDescr2 == c("Substandard"), 2000,
ifelse(BBCN_df$loanRatingDescr2 == c("Sp Mention"), 1000,
ifelse(BBCN_df$loanRatingDescr2 == c("Doubtful"), 3000,
ifelse(BBCN_df$loanRatingDescr2 == c("Loss"), 4000,
ifelse(BBCN_df$loanRatingDescr2 == c("Pass-4"), 4,
ifelse(BBCN_df$loanRatingDescr2 == c("Pass-3"), 3,
ifelse(BBCN_df$loanRatingDescr2 == c("Pass-2"), 2,
ifelse(BBCN_df$loanRatingDescr2 == c("Pass-1"), 1, 111)
)))))))
table(BBCN_df$loanRatingDescr2)
table(BBCN_df$boh_rating)
## clean up dcr2 (SAS File 01, Line 449 to 489)
## Only change character var to numeric
BBCN_df$dcr2 <- as.numeric(as.character(BBCN_df$DCR))
BBCN_df$dcr2 <- floor(BBCN_df$dcr2*1000)/1000 #keep 3 decimal places
BBCN_df$dcr2 <- ifelse(BBCN_df$dcr2 %in% c(0.001,0.002,0.003,0.004,0.005,0.007),
BBCN_df$dcr2*1000,
BBCN_df$dcr2)
BBCN_df$DCR <- BBCN_df$dcr2
## remove the letter of credit observations (SAS File 01, Line 492 to 500)
BBCN_df <- filter(BBCN_df, productDescr != c("Letter of Credits"))
BBCN_df$interest_rate <- as.numeric(BBCN_df$interestRate)
## add rates and create Final DF (SAS File 01, Line 183 to 211, Line 507 to 523)
rates <- subset(rates, select = -c(date,month))
setnames(rates, old = c("year","q"), new = c("yr_file","q_file"))
BBCN_df <- merge(x = BBCN_df, y = rates, by = c("yr_file","q_file"), all.x = TRUE)
## delete loan_spread_v > 10000 (SAS File 01, Line 574 to 577). only 2 obs with fixVar == "NULL".
# BBCN_df$loan_spread_v_gt10000 <- ifelse(BBCN_df$fixVar == "NULL", 1,0)
# BBCN_df <- filter(BBCN_df, loan_spread_v_gt10000 != 1)
## but SAS data file interim.df_final_bbcn2 did not exclude these two obs.
## create final data set for bbcn
df_final_bbcn <- as.data.frame(BBCN_df)
df_final_bbcn <- as.data.table(df_final_bbcn)
setnames(df_final_bbcn, old = c("timesPD01To29D", "fixVar","originalBal","originationDate",
"maturityDate","min_non_acc_date","origLoanToValue","currentNetBookBal",
"yr_file","q_file","mn_file"),
new = c("dpd0129","interest_rate_type","original_balance","origination_date",
"maturity_date","first_nonacc_date","org_ltv","current_balance",
"year","q","month"))
df_final_bbcn <- subset(df_final_bbcn, select = c(fileDate, account_id, boh_id, acquired_identifier,
portfolio_id, original_balance, origination_date, maturity_date,
current_balance, interest_rate, interest_rate_type,
loan_age_q, POB, boh_rating, DCR,
dpd0129, first_nonacc_date,
naicsCode, property_type, tb1m, tb3m, tb6m, tb1y, tb2y,
tb3y, tb5y, tb7y, tb10y,tb20y, tb30y, year, q, month, y))
save(df_final_bbcn, file = "Data output/df_final_bbcn.RData")
write.csv(df_final_bbcn, file = "Data output/df_final_bbcn.csv", row.names = FALSE)
<file_sep>/EBModel/run.R
################################################################################
# Bank of Hope
# Commercial Real Estate Ending Balances
# Program: <>.R
# Author(s): KPMG, LLP
# Purpose:
# Data Dependences:
#
#
# R-version: R version 3.3.1 (2016-06-21)
# -- "Bug in Your Hair" Copyright (C) 2016 The R Foundation
# for Statistical Computing Platform: x86_64-apple-darwin13.4.0 (64-bit)
################################################################################
### Environment Settings #######################################################
pth_inputs = "/Users/jerrywatkins/Projects/Engagements/BOH/ending-balance/combined/read-only-inputs"
pth_lib = "/Users/jerrywatkins/Projects/Engagements/BOH/ending-balance/combined/library"
pth_out = "/Users/jerrywatkins/Projects/Engagements/BOH/ending-balance/combined"
### No need to make changes below after this line ##############################
### Dependencies
source(paste(pth_lib,"/dev-support.R", sep=""))
source(paste(pth_lib,"/dfast-support.R", sep=""))
################################################################################
src_path = "/Users/jerrywatkins/Projects/Engagements/BOH/ending-balance/combined"
source(concat(src_path, "/1-ImportEconomicData.R"))
source(concat(src_path, "/2-ImportCREEndingBalances.R"))
source(concat(src_path, "/3-ImportCIEndingBalances.R"))
source(concat(src_path, "/4-CombineCREandCIEndingBalances.R"))
source(concat(src_path, "/5-ExploreData.R"))
source(concat(src_path, "/6-LeastSquaresModelSelection.R"))
source(concat(src_path, "/7-BackTesting.R"))
source(concat(src_path, "/8-Forecasts.R"))
<file_sep>/Adobe/library/dev-support.R
### General Purpose Utilities ##################################################
zip_to_list = function(a,b) {
z = list()
i = 1
for (element in a) {
z[[element]] = b[i]
i = i + 1
}
z
}
rm_blanks = function(u) {
i = which(u=="")
if (length(i) == 0) {
out = u
} else {
out = u[-i]
}
out
}
concat = function(...) {
paste(..., sep="")
}
get_excel = function(file, sheet) {
# needs openxlsx package
read.xlsx(
file
, sheet=sheet
, colNames=TRUE
, startRow=1
, skipEmptyRows=FALSE
, skipEmptyCols=FALSE
, detectDates=TRUE
, check.names=TRUE
, na.strings="NA"
)
}
<file_sep>/Adobe/1-merge_data.R
################################################################################
#
#
#Program: 1-Merge Data.R
# Author: <NAME>
# Purpose: create the dataset with appropriate merge process for data analysis and model developemnt
#
#
#
# R-version: R version 3.3.4 (2016-06-21)
# -- "Bug in Your Hair" Copyright (C) 2016 The R Foundation
# for Statistical Computing Platform: x86_64-apple-darwin13.4.0 (64-bit)
################################################################################
### Environment Settings #######################################################
pth_inputs = "C:/Users/hong3/Desktop/OULAD1/inputs"
pth_lib = "C:/Users/hong3/Desktop/OULAD1/library"
pth_out = "C:/Users/hong3/Desktop/OULAD1"
### No need to make changes below after this line ##############################
source(paste(pth_lib,"/dev-support.R", sep=""))
library("data.table")
library("dplyr")
library("ggplot2")
library("lubridate")
library("scales")
library("zoo")
library("plyr")
library("corrplot")
library("tidyr")
library("reshape")
##########################################################################################
# Load Data
###########################################################################################
assessments <- read.csv(concat(pth_inputs,"/assessments.csv"))
courses <- read.csv(concat(pth_inputs,"/courses.csv"))
studentAssessment <- read.csv(concat(pth_inputs,"/studentAssessment.csv"))
studentInfo <- read.csv(concat(pth_inputs,"/studentInfo.csv"))
studentRegistration <-read.csv(concat(pth_inputs,"/studentRegistration.csv"))
studentVle <- read.csv(concat(pth_inputs,"/studentVle.csv"))
vle <- read.csv(concat(pth_inputs,"/Vle.csv"))
##count the missing value####
colSums(is.na(assessments))
colSums(is.na(courses))
colSums(is.na(studentAssessment))
colSums(is.na(studentInfo))
colSums(is.na(studentRegistration))
colSums(is.na(studentVle))
colSums(is.na(vle))
##########################################################################################
# Assessments Data
###########################################################################################
#Fill in assessments' missing value
assessments_courses <- merge(assessments, courses, by=c("code_module", "code_presentation"))
assessments_courses$date[is.na(assessments_courses$date)] <- assessments_courses$module_presentation_length[is.na(assessments_courses$date)]
write.csv(assessments_courses, concat(pth_out,"/assessments_courses.csv"))
#df_assessments = course +assessment +s.asssessment
df_assessments <- merge(studentAssessment, assessments_courses , by=c("id_assessment"))
df_assessments$submission <- df_assessments$date_submitted - df_assessments$date
write.csv(df_assessments, concat(pth_out,"/df_assessments.csv"))
##########################################################################################
# Student Vle
###########################################################################################
#sum clicks by same date and website
df_student_final <- studentInfo[,c("code_module","code_presentation","id_student", "final_result")]
df_studentVle <- studentVle %>%
group_by(code_module, code_presentation, id_student)%>%
dplyr::summarise(count_date = n(), sum_click_sum = sum(sum_click))
df_studentVle <- merge(df_studentVle , df_student_final, by=c("code_module", "code_presentation", "id_student"))
df_studentVle$frequency <- df_studentVle$sum_click_sum / df_studentVle$count_date
df_studentVle_v1 <- df_studentVle %>%
group_by(code_module, code_presentation, final_result)%>%
dplyr::summarise(avg_count = mean(frequency), avg_click = mean(sum_click_sum))
df_studentVle_v1$id <- paste(df_studentVle_v1$code_module, df_studentVle_v1$code_presentation)
write.csv(df_studentVle_v1, concat(pth_out,"/df_studentVle_v1.csv"))
df_studentVle_v2 <- studentVle %>%
group_by(code_module, code_presentation, id_student)%>%
dplyr::summarise(avg_date = mean(date), sum_click_sum = sum(sum_click))
##########################################################################################
# Student Registration
###########################################################################################
df_studentRegistration <- merge(studentRegistration , df_student_final, by=c("code_module", "code_presentation", "id_student"))
df_studentRegistration$id <- paste(studentRegistration$code_module, studentRegistration$code_presentation)
#check missingvalue and drop a column
colSums(is.na(df_studentRegistration))
df_studentReg <- df_studentRegistration[ c(1:4,6:7)]
#fill in missing value in registration column
df_studentReg <- df_studentReg[complete.cases(df_studentReg), ]
df_studentReg_v1 <- df_studentReg%>%
group_by(id, final_result)%>%
dplyr::summarise(avg_date_reg = mean(date_registration))
write.csv(df_studentReg_v1,concat(pth_out, "/df_studentReg_v1.csv"))
##########################################################################################
# Student Info
#create the master dataset to build models and visualize the basic analysis
###########################################################################################
df_studentInfo <- studentInfo
#convert all categorical variables to numeric variables
df_studentInfo$gender <- as.integer(df_studentInfo$gender)
df_studentInfo$gender <- factor(ifelse(as.numeric(df_studentInfo$gender)==2, 1,0))
df_studentInfo$highest_education <- as.numeric(factor(df_studentInfo$highest_education , levels=c("No Formal quals" ,
"Lower Than A Level", "A Level or Equivalent",
"HE Qualification", "Post Graduate Qualification")))
df_studentInfo$age_band <- as.numeric(factor(df_studentInfo$age_band , levels=c("0-35","35-55", "55<=")))
df_studentInfo$imd_band <- as.numeric(factor(df_studentInfo$imd_band , levels=c("0-10%","10-20%", "20-30%",
"30-40%", "40-50%", "50-60%",
"60-70%", "70-80%", "80-90%",
"90-100%")))
df_studentInfo$imd_band[is.na(df_studentInfo$imd_band)] <- 5
df_studentInfo$disability <- as.integer(df_studentInfo$disability)
df_studentInfo <- merge(x = df_studentInfo, y = studentRegistration, by =c("code_module", "code_presentation", "id_student"))
df_studentInfo <- merge(x = df_studentInfo, y = courses, by =c("code_module", "code_presentation"))
#add column sum click and average date for each student and module, frequency clicks for each student and module
df_studentInfo <- merge(x = df_studentInfo,y = df_studentVle, by =c("code_module", "code_presentation", "id_student"))
sub_df_assessments <- df_assessments%>%
group_by(code_module, code_presentation, id_student)%>%
dplyr::summarise(avg_date_submission = mean(submission))
df_studentInfo <- merge(x = df_studentInfo,y = sub_df_assessments, by =c("code_module", "code_presentation", "id_student"))
drops <- c("X", "date_unregistration", "final_result.y")
df_studentInfo <- df_studentInfo[, !(names(df_studentInfo) %in% drops)]
df_studentInfo <- df_studentInfo[complete.cases(df_studentInfo), ]
df_studentInfo$id <- paste(df_studentInfo $code_module, df_studentInfo $code_presentation)
write.csv(df_studentInfo, concat(pth_out, "/df_studentInfo.csv"))
###########################################################################################
# Course Info
###########################################################################################
code_final_result <- studentInfo[,c("code_module","code_presentation","final_result")]
code_final_result$id <- paste(code_final_result$code_module, code_final_result$code_presentation)
courses$id <- paste(courses$code_module, courses$code_presentation)
assessments_courses$id <- paste(assessments_courses$code_module, assessments_courses$code_presentation)
a <- table(code_final_result$final_result, code_final_result$id)
b <- t(do.call("rbind", list(a)))
b <- data.table(b)
b$id <- courses$id
b$total <- b$Distinction+b$Fail+b$Pass+b$Withdrawn
b$Pro_D <- b$Distinction/b$total
b$Pro_F <- b$Fail/b$total
b$Pro_P <- b$Pass/b$total
b$Pro_W <- b$Withdrawn/b$total
#create new data frame for bar plot
c <- data.frame(id = rep((b$id), each =4),
result = rep(c("D", "F", "P", "W"),22),
len = c(a))
#write.csv(c, "c.csv")
# Stacked barplot with multiple groups
p <- ggplot(data=c, aes(x=id, y=len, fill=result)) +
geom_bar(stat="identity")+
theme(legend.position="bottom")+
labs(x = "module presentation", y = "Number" )+
theme(axis.text.x = element_text(angle=90))
ggsave( concat(pth_out, "/image_number_of_result.png"), plot=p, width=15, height=11, unit="cm", dpi=500)
########create new data frame for bar plot
d <- b[,c("Pro_D","Pro_F","Pro_P","Pro_W")]
d <- t(d)
e <- data.frame(id = rep((b$id), each =4),
result = rep(c("Pro_D", "Pro_F", "Pro_P", "Pro_W"),22),
len = c(d))
p <- ggplot(data=e, aes(x=id, y=len, fill=result)) +
geom_bar(stat="identity")+
theme(legend.position="bottom")+
theme(axis.text.x = element_text(angle=90))+
labs(x = "module presentation", y = "Percent(%)" )
ggsave(concat(pth_out, "/image-_percentage_of_result_per_pt.png"), plot=p, width=15, height=11, unit="cm", dpi=500)
# add assessments info
df_assessments_type <- assessments[,c("code_module","code_presentation","assessment_type")]
df_assessments_type$id <- paste(df_assessments_type$code_module, df_assessments_type$code_presentation)
a1<- table(df_assessments_type$assessment_type, df_assessments_type$id)
b1 <- t(do.call("rbind", list(a1)))
b1 <- data.table(b1)
b1$id <- courses$id
coursesInfo <- merge(b, b1, by=c("id"))
coursesInfo <- merge(coursesInfo, courses, by=c("id"))
######## Assessments weight sum
test <- assessments%>% group_by(code_module, code_presentation, assessment_type)%>% dplyr::summarise(sum_weight = sum(weight))
test$id <- paste(test$code_module, test$code_presentation)
test <- dcast(test, id ~ assessment_type, value.var = "sum_weight")
test$CMA[is.na(test$CMA)] <- 0
names(test)[2] <- ("weighted_CMA")
names(test)[3] <- paste("weighted_Exam")
names(test)[4] <- paste("weighted_TMA")
coursesInfo <- merge(coursesInfo, test, by=c("id"))
write.csv(coursesInfo, concat(pth_out, "/df_coursesInfo.csv"))
###########################################################################################
# Shiny Data
###########################################################################################
#
studentInfo$id <- paste(studentInfo$code_module, studentInfo$code_presentation)
write.csv(studentInfo, "shiny_studentInfo.csv")
#
assessments$id <- paste(assessments$code_module, assessments$code_presentation)
shiny_assessments<- assessments %>% group_by(id, assessment_type)%>%
dplyr::summarise(k = n(), sum_weight = sum(weight))
write.csv(shiny_assessments, "shiny_assessments.csv")
#
assessments_courses$id <- paste(assessments_courses$code_module, assessments_courses$code_presentation)
shiny_assessments_courses<- assessments_courses %>% group_by(id, assessment_type)%>%
dplyr::summarise(k = n())
write.csv(shiny_assessments_courses, "shiny_assessments_courses.csv")
<file_sep>/remediation/2018_DFAST_Remediation_EB.R
# 2018 DFAST EB Model remediation - variable selection for CRE
# Bank of Hope
# Developer: <NAME>
# Start Date: 09/24/2018
# R version 3.4.3 (2017-11-30)
library(dplyr)
library(lubridate)
library(zoo)
library(data.table)
library(ggplot2)
source("dev-support.R")
setwd("C:/Users/ic07949/Desktop/Model Development Code Package v2/DFAST Production Run 2018 EB/DFAST Production Run 2018")
#########################################################################################################################
### Read in Call Report Data
# Read in raw file of RCCI Schedule
cr1 <- fread("FFIEC CDR Call Schedule RCCI 12312017.txt")
# Bank of Hope IDRSSD
idRSSD <- 671464
cr1 <- cr1[IDRSSD == idRSSD]
# MF, NOO, OO, CnI
# RCON1460, RCONF160, RCONF161, RCON1766
startBal <- as.data.frame(cbind(c("mf","oo","no","ci"),c("RCON1460","RCONF160","RCONF161","RCON1766")))
names(startBal) <- c("segment","crID")
# Get relevant columns, I also checked them against the actual pdf as well, they match
startBal$balance <- as.numeric(cr1[,mget(as.character(startBal$crID))])
#########################################################################################################################
### Read in relevant datasets
# Read in training data
boh <- readRDS("data-boh.RDS")
# Read in new macroeconomic data
base <- readRDS("econ-data-baseline.RDS")
adverse <- readRDS("econ-data-adverse.RDS")
severe <- readRDS("econ-data-severe.RDS")
#########################################################################################################################
### Train the models
# Define model variables and collapse with '+' for formula
#ciVars <- paste(c("gdp_ag_lag1","ca_rinc_ag_lag3","inc_qg_lag2"),collapse = "+")
ipVars <- paste(c("crei_eg_lag4","dow_ya"),collapse = "+")
ooVars <- "crei_eg_lag2"
# Train models and check coefficient
ipModel_8 <- lm(formula = paste0("ldiff_ip ~ ",ipVars)
,data = boh[qtr_dt >= "2007-06-30" & qtr_dt <= "2015-06-30"])
ipModel_8
ipModel_4 <- lm(formula = paste0("ldiff_ip ~ ",ipVars)
,data = boh[qtr_dt >= "2007-06-30" & qtr_dt <= "2015-12-31"])
ipModel_4
ipModel_full <- lm(formula = paste0("ldiff_ip ~ ",ipVars)
,data = boh[qtr_dt >= "2007-06-30" & qtr_dt <= "2016-12-31"])
ipModel_full
ooModel_8 <- lm(formula = paste0("ldiff_oo ~ ",ooVars)
,data = boh[qtr_dt >= "2007-06-30" & qtr_dt <= "2015-06-30"])
ooModel_8
ooModel_4 <- lm(formula = paste0("ldiff_oo ~ ",ooVars)
,data = boh[qtr_dt >= "2007-06-30" & qtr_dt <= "2015-12-31"])
ooModel_4
ooModel_full <- lm(formula = paste0("ldiff_oo ~ ",ooVars)
,data = boh[qtr_dt >= "2007-06-30" & qtr_dt <= "2016-12-3"])
ooModel_full
#########################################################################################################################
boh_oos_8 <- boh[qtr_dt >= "2015-09-30"]
ip_oos_8 <- predict(ipModel_8, boh_oos_8)
oo_oos_8 <- predict(ooModel_8, boh_oos_8)
boh_oos_4 <- boh[qtr_dt >= "2016-03-31"]
ip_oos_4 <- predict(ipModel_4, boh_oos_4)
oo_oos_4 <- predict(ooModel_4, boh_oos_4)
boh_oos_full <- boh[qtr_dt >= "2007-06-30"]
ip_oos_full <- predict(ipModel_full, boh_oos_full)
oo_oos_full <- predict(ooModel_full, boh_oos_full)
result <- boh[,c("qtr_dt", "ldiff_ip", "ldiff_oo")]
result[,"ip_oos_8"] <- NA
result[,"oo_oos_8"] <- NA
result[,"ip_oos_4"] <- NA
result[,"oo_oos_4"] <- NA
result[,"ip_oos_full"] <- NA
result[,"oo_oos_full"] <- NA
result$ip_oos_8[59:66] <- ip_oos_8
result$oo_oos_8[59:66] <- oo_oos_8
result$ip_oos_4[63:66] <- ip_oos_4
result$oo_oos_4[63:66] <- oo_oos_4
result$ip_oos_full[27:66] <- ip_oos_full
result$oo_oos_full[27:66] <- oo_oos_full
final_result <- filter(result, qtr_dt >= "2007-06-30")
write.csv(final_result,"remedication_plan_result.csv",row.names = F)
<file_sep>/EBModel/1-ImportEconomicData.R
################################################################################
# Bank of Hope
# Commercial Real Estate Ending Balances
# Program: <>.R
# Author(s): KPMG, LLP
# Purpose:
# Data Dependences:
#
#
# R-version: R version 3.3.1 (2016-06-21)
# -- "Bug in Your Hair" Copyright (C) 2016 The R Foundation
# for Statistical Computing Platform: x86_64-apple-darwin13.4.0 (64-bit)
################################################################################
### Environment Settings #######################################################
pth_inputs = "/Users/jerrywatkins/Projects/Engagements/BOH/ending-balance/combined/read-only-inputs"
pth_lib = "/Users/jerrywatkins/Projects/Engagements/BOH/ending-balance/combined/library"
pth_out = "/Users/jerrywatkins/Projects/Engagements/BOH/ending-balance/combined"
### No need to make changes below after this line ##############################
### Dependencies
source(paste(pth_lib,"/dev-support.R", sep=""))
source(paste(pth_lib,"/dfast-support.R", sep=""))
library("openxlsx")
library("data.table")
library("lubridate")
library("ggplot2")
library("scales")
library("zoo")
library("tseries")
# Step 1 of 4
### Regional Data ##############################################################
# needs openxlsx package
# ! Caution ! make sure the column order still matches the following
# column variable scenaro description
# x1 qtr_dt scenario none
# x2 ca_unemp baseline FRB CCAR 2017 - Baseline : Labor: Unemployment Rate, (%, SA)
# x3 ca_unemp adverse FRB CCAR 2017 - Adverse : Labor: Unemployment Rate, (%, SA)
# x4 ca_unemp severe FRB CCAR 2017 - Severely Adverse : Labor: Unemployment Rate, (%, SA)
# x5 ca_hpi baseline FRB CCAR 2017 - Baseline : FHFA All Transactions Home Price Index, (1980Q1 = 100, SA)
# x6 ca_hpi adverse FRB CCAR 2017 - Adverse : FHFA All Transactions Home Price Index, (1980Q1 = 100, SA)
# x7 ca_hpi severe FRB CCAR 2017 - Severely Adverse : FHFA All Transactions Home Price Index, (1980Q1 = 100, SA)
# x8 ca_gsp baseline FRB CCAR 2017 - Baseline : Gross State Product: Total, (Bil. $, SAAR) Nominal
# x9 ca_gsp adverse FRB CCAR 2017 - Adverse : Gross State Product: Total, (Bil. $, SAAR) Nominal
# x10 ca_gsp severe FRB CCAR 2017 - Severely Adverse : Gross State Product: Total, (Bil. $, SAAR) Nominal
# x11 ca_real_gsp baseline FRB CCAR 2017 - Baseline : Gross State Product: Total, (Bil. Chained 2009 $, SAAR) Real
# x12 ca_real_gsp adverse FRB CCAR 2017 - Adverse : Gross State Product: Total, (Bil. Chained 2009 $, SAAR) Real
# x13 ca_real_gsp severe FRB CCAR 2017 - Severely Adverse : Gross State Product: Total, (Bil. Chained 2009 $, SAAR) Real
# x14 ca_income baseline FRB CCAR 2017 - Baseline : Income: Disposable Personal, (Mil. $, SAAR) Nominal
# x15 ca_income adverse FRB CCAR 2017 - Adverse : Income: Disposable Personal, (Mil. $, SAAR) Nominal
# x16 ca_income severe FRB CCAR 2017 - Severely Adverse : Income: Disposable Personal, (Mil. $, SAAR) Nominal
# x17 ca_real_income baseline FRB CCAR 2017 - Baseline : Disposable Personal Income, (Mil. 09$, SAAR) Real
# x18 ca_real_income adverse FRB CCAR 2017 - Adverse : Disposable Personal Income, (Mil. 09$, SAAR) Real
# x19 ca_real_income severe FRB CCAR 2017 - Severely Adverse : Disposable Personal Income, (Mil. 09$, SAAR) Real
raw_region_data = read.xlsx(
concat(pth_inputs, "/moodys/Regional Macrovariables Moodys.xlsx")
, sheet="Sheet1"
, colNames=FALSE
, startRow=6
, skipEmptyRows=FALSE
, skipEmptyCols=FALSE
, detectDates=TRUE
, check.names=TRUE
, na.strings="ND"
)
reg_baseline_cols = c("X1", "X2", "X5", "X8", "X11", "X14", "X17")
reg_adverse_cols = c("X1", "X3", "X6", "X9", "X12", "X15", "X18")
reg_severe_cols = c("X1", "X4", "X7", "X10", "X13", "X16", "X19")
reg_new_col_names = c(
"qtr_dt"
, "ca_unemp"
, "ca_hpi"
, "ca_gsp"
, "ca_rgsp"
, "ca_inc"
, "ca_rinc"
)
reg_baseline = data.table(raw_region_data[, reg_baseline_cols])
reg_adverse = data.table(raw_region_data[, reg_adverse_cols])
reg_severe = data.table(raw_region_data[, reg_severe_cols])
setnames(reg_baseline, reg_baseline_cols, reg_new_col_names)
setnames(reg_adverse, reg_adverse_cols, reg_new_col_names)
setnames(reg_severe, reg_severe_cols, reg_new_col_names)
# Step 2 of 4
### Employment Data ############################################################
# ! Caution ! make sure the column order still matches the following
# r_name var scenario region description:
# X1 qtr_dt none none Description:
# X2 empl baseline us FRB CCAR 2017 - Baseline: Employment: Total Nonagricultural, (Mil. #, SA)
# X3 empl adverse us FRB CCAR 2017 - Adverse: Employment: Total Nonagricultural, (Mil. #, SA)
# X4 empl severe us FRB CCAR 2017 - Severely Adverse: Employment: Total Nonagricultural, (Mil. #, SA)
# X5 ca_empl baseline ca FRB CCAR 2017 - Baseline : Employment: Total Nonagricultural, (Ths., SA)
# X6 ca_empl adverse ca FRB CCAR 2017 - Adverse : Employment: Total Nonagricultural, (Ths., SA)
# X7 ca_empl severe ca FRB CCAR 2017 - Severely Adverse : Employment: Total Nonagricultural, (Ths., SA)
raw_empl_data = read.xlsx(
concat(pth_inputs, "/moodys/Non Farm Employment Moodys.xlsx")
, sheet="Sheet1"
, colNames=FALSE
, startRow=6
, skipEmptyRows=FALSE
, skipEmptyCols=FALSE
, detectDates=TRUE
, check.names=TRUE
, na.strings="ND"
)
empl_baseline_cols = c("X1", "X2", "X5")
empl_adverse_cols = c("X1", "X3", "X6")
empl_severe_cols = c("X1", "X4", "X7")
empl_new_col_names = c(
"qtr_dt"
, "empl"
, "ca_empl"
)
empl_baseline = data.table(raw_empl_data[, empl_baseline_cols])
empl_adverse = data.table(raw_empl_data[, empl_adverse_cols])
empl_severe = data.table(raw_empl_data[, empl_severe_cols])
setnames(empl_baseline, empl_baseline_cols, empl_new_col_names)
setnames(empl_adverse, empl_adverse_cols, empl_new_col_names)
setnames(empl_severe, empl_severe_cols, empl_new_col_names)
# Step 3 of 4
# FRB Data #####################################################################
# Collect historical data
raw_historic = fread(concat(pth_inputs, "/frb/2017/Historic_Domestic.csv"))
raw_baseline = fread(concat(pth_inputs, "/frb/2017/Table_2A_Supervisory_Baseline_Domestic.csv"))
raw_adverse = fread(concat(pth_inputs, "/frb/2017/Table_3A_Supervisory_Adverse_Domestic.csv"))
raw_severe = fread(concat(pth_inputs, "/frb/2017/Table_4A_Supervisory_Severely_Adverse_Domestic.csv"))
# Step 4 of 4
# Transformations ##############################################################
get_frb_data = function(raw_frb_data) {
tf_data = copy(raw_frb_data)
orig_names = c(
"Real GDP growth"
, "Nominal GDP growth"
, "Real disposable income growth"
, "Nominal disposable income growth"
, "Unemployment rate"
, "CPI inflation rate"
, "3-month Treasury rate"
, "5-year Treasury yield"
, "10-year Treasury yield"
, "BBB corporate yield"
, "Mortgage rate"
, "Prime rate"
, "Dow Jones Total Stock Market Index (Level)"
, "House Price Index (Level)"
, "Commercial Real Estate Price Index (Level)"
, "Market Volatility Index (Level)"
, "Date"
)
new_names = c(
"rgdp_qg"
, "gdp_qg"
, "rinc_qg"
, "inc_qg"
, "unemp"
, "cpi"
, "yld_03m"
, "yld_05y"
, "yld_10y"
, "yld_bbb"
, "mort"
, "prime"
, "dow"
, "hpi"
, "crei"
, "vix"
, "qtr_date_string"
)
setnames(tf_data, orig_names, new_names)
# also add yield spread
tf_data[["bbb_spread"]]= tf_data[["yld_bbb"]] - tf_data[["yld_10y"]]
tf_data[["yld_spread"]]= tf_data[["yld_10y"]] - tf_data[["yld_03m"]]
# get date variable
yr = substr(tf_data[["qtr_date_string"]], 1, 4)
qtr = substr(tf_data[["qtr_date_string"]], 6, 7)
qtr_yr = paste(qtr, yr)
tf_data[["qtr_dt"]] = as.Date(as.yearqtr(qtr_yr, format = "Q%q %Y"), frac=1)
tf_data
}
frb_historic = get_frb_data(raw_historic)
frb_baseline = get_frb_data(rbind(raw_historic, raw_baseline))
frb_adverse = get_frb_data(rbind(raw_historic, raw_adverse))
frb_severe = get_frb_data(rbind(raw_historic, raw_severe))
transform = function(raw_frb_data, reg_data, empl_data) {
# Add regional and empl variables
tf_data = empl_data[reg_data[raw_frb_data, on="qtr_dt"], on="qtr_dt"]
# calc growth rates
tf_data[["hpi_qg"]] = gr(tf_data[["hpi"]])
tf_data[["ca_hpi_qg"]] = gr(tf_data[["ca_hpi"]])
tf_data[["crei_qg"]] = gr(tf_data[["crei"]])
tf_data[["dow_qg"]] = gr(tf_data[["dow"]])
tf_data[["empl_qg"]] = gr(tf_data[["empl"]])
tf_data[["ca_empl_qg"]] = gr(tf_data[["ca_empl"]])
tf_data[["ca_gsp_qg"]] = gr(tf_data[["ca_gsp"]])
tf_data[["ca_rgsp_qg"]] = gr(tf_data[["ca_rgsp"]])
tf_data[["ca_inc_qg"]] = gr(tf_data[["ca_inc"]])
tf_data[["ca_rinc_qg"]] = gr(tf_data[["ca_rinc"]])
tf_data[["hpi_eg"]] = gr(tf_data[["hpi"]], lag=8)
tf_data[["ca_hpi_eg"]] = gr(tf_data[["ca_hpi"]], lag=8)
tf_data[["crei_eg"]] = gr(tf_data[["crei"]], lag=8)
tf_data[["dow_eg"]] = gr(tf_data[["dow"]], lag=8)
tf_data[["empl_eg"]] = gr(tf_data[["empl"]], lag=8)
tf_data[["ca_empl_eg"]] = gr(tf_data[["ca_empl"]], lag=8)
tf_data[["ca_gsp_eg"]] = gr(tf_data[["ca_gsp"]], lag=8)
tf_data[["ca_rgsp_eg"]] = gr(tf_data[["ca_rgsp"]], lag=8)
tf_data[["ca_inc_eg"]] = gr(tf_data[["ca_inc"]], lag=8)
tf_data[["ca_rinc_eg"]] = gr(tf_data[["ca_rinc"]], lag=8)
# keep relevant columns
core_names = c(
"dow"
, "hpi"
, "ca_hpi"
, "crei"
, "dow_qg"
, "hpi_qg"
, "ca_hpi_qg"
, "crei_qg"
, "ca_rgsp_qg"
, "ca_gsp_qg"
, "rgdp_qg"
, "gdp_qg"
, "ca_rinc_qg"
, "ca_inc_qg"
, "rinc_qg"
, "inc_qg"
, "ca_unemp"
, "unemp"
, "ca_empl_qg"
, "empl_qg"
, "yld_spread"
, "bbb_spread"
, "hpi_eg"
, "ca_hpi_eg"
, "crei_eg"
, "dow_eg"
, "empl_eg"
, "ca_empl_eg"
, "ca_gsp_eg"
, "ca_rgsp_eg"
, "ca_inc_eg"
, "ca_rinc_eg"
)
tf_data = tf_data[, c("qtr_dt", "yld_03m", core_names), with=FALSE]
for (name in c("yld_spread", "bbb_spread", "unemp", "ca_unemp", "yld_03m")) {
dq_nm = concat(name, "_qd")
dy_nm = concat(name, "_yd")
tf_data[[dq_nm]] = delta(tf_data[[name]], lag=1)
tf_data[[concat(dq_nm, "_lag", 1)]] = shift(tf_data[[dq_nm]], n=1)
tf_data[[concat(dq_nm, "_lag", 2)]] = shift(tf_data[[dq_nm]], n=2)
tf_data[[concat(dq_nm, "_lag", 3)]] = shift(tf_data[[dq_nm]], n=3)
tf_data[[dy_nm]] = delta(tf_data[[name]], lag=4)
tf_data[[concat(dy_nm, "_lag", 1)]] = shift(tf_data[[dy_nm]], n=1)
tf_data[[concat(dy_nm, "_lag", 2)]] = shift(tf_data[[dy_nm]], n=2)
tf_data[[concat(dy_nm, "_lag", 3)]] = shift(tf_data[[dy_nm]], n=3)
}
for (name in core_names) {
# Transformations:
if (name %in% c("hpi", "crei", "ca_hpi", "bbb_spread", "yld_spread")) {
# Log-run ratio
lf_nm = concat(name,"_lf")
le_nm = concat(name,"_le")
lt_nm = concat(name,"_lt")
# Long-run ratio:
tf_data[[lf_nm]] = 100 * ((tf_data[[name]]/ma(tf_data[[name]], n=4)) - 1)
tf_data[[concat(lf_nm, "_lag", 1)]] = shift(tf_data[[lf_nm]], n=1)
tf_data[[concat(lf_nm, "_lag", 2)]] = shift(tf_data[[lf_nm]], n=2)
tf_data[[concat(lf_nm, "_lag", 3)]] = shift(tf_data[[lf_nm]], n=3)
tf_data[[concat(lf_nm, "_lag", 4)]] = shift(tf_data[[lf_nm]], n=4)
tf_data[[le_nm]] = 100 * ((tf_data[[name]]/ma(tf_data[[name]], n=8)) - 1)
tf_data[[concat(le_nm, "_lag", 1)]] = shift(tf_data[[le_nm]], n=1)
tf_data[[concat(le_nm, "_lag", 2)]] = shift(tf_data[[le_nm]], n=2)
tf_data[[concat(le_nm, "_lag", 3)]] = shift(tf_data[[le_nm]], n=3)
tf_data[[concat(le_nm, "_lag", 4)]] = shift(tf_data[[le_nm]], n=4)
tf_data[[lt_nm]] = 100 * ((tf_data[[name]]/ma(tf_data[[name]], n=12)) - 1)
tf_data[[concat(lt_nm, "_lag", 1)]] = shift(tf_data[[lt_nm]], n=1)
tf_data[[concat(lt_nm, "_lag", 2)]] = shift(tf_data[[lt_nm]], n=2)
tf_data[[concat(lt_nm, "_lag", 3)]] = shift(tf_data[[lt_nm]], n=3)
tf_data[[concat(lt_nm, "_lag", 4)]] = shift(tf_data[[lt_nm]], n=4)
}
# Lag-1:
tf_data[[concat(name, "_lag", 1)]] = shift(tf_data[[name]], n=1)
tf_data[[concat(name, "_lag", 2)]] = shift(tf_data[[name]], n=2)
tf_data[[concat(name, "_lag", 3)]] = shift(tf_data[[name]], n=3)
tf_data[[concat(name, "_lag", 4)]] = shift(tf_data[[name]], n=4)
# Annualized Growth Rates
if (length(grep("_qg", name)) != 0) {
# Annualized Rate
ag_nm = gsub("_qg", "_ag", name)
rate_vec = tf_data[[name]]/100
n = length(rate_vec)
tf_data[[ag_nm]] = sapply(1:n, function(t) {
if (t < 4) { agr = NA }
else {
agr = 1
for (j in 0:3) {
agr = agr * (1 + rate_vec[t - j])
}
}
agr = (agr^(1/4)) - 1
agr = 100 * agr
agr
}
)
tf_data[[concat(ag_nm, "_lag", 1)]] = shift(tf_data[[ag_nm]], n=1)
tf_data[[concat(ag_nm, "_lag", 2)]] = shift(tf_data[[ag_nm]], n=2)
tf_data[[concat(ag_nm, "_lag", 3)]] = shift(tf_data[[ag_nm]], n=3)
tf_data[[concat(ag_nm, "_lag", 4)]] = shift(tf_data[[ag_nm]], n=4)
ya_nm = gsub("_qg", "_ya", name)
tf_data[[ya_nm]] = ma(tf_data[[name]], n=4)
tf_data[[concat(ya_nm, "_lag", 1)]] = shift(tf_data[[ya_nm]], n=1)
tf_data[[concat(ya_nm, "_lag", 2)]] = shift(tf_data[[ya_nm]], n=2)
tf_data[[concat(ya_nm, "_lag", 3)]] = shift(tf_data[[ya_nm]], n=3)
tf_data[[concat(ya_nm, "_lag", 4)]] = shift(tf_data[[ya_nm]], n=4)
}
}
RECESSION_START = "2007-12-31"
RECESSION_END = "2009-06-30"
START_9Q = "2017-03-31"
END_9Q = "2019-03-31"
tf_data[["is_recession"]] = ifelse(tf_data[["qtr_dt"]] >= as.Date(RECESSION_START) & tf_data[["qtr_dt"]] <= as.Date(RECESSION_END), TRUE, FALSE)
tf_data[["is_9q_data"]] = ifelse(tf_data[["qtr_dt"]] >= as.Date(START_9Q) & tf_data[["qtr_dt"]] <= as.Date(END_9Q), TRUE, FALSE)
tf_data
}
historic = transform(frb_historic, reg_baseline, empl_baseline)[qtr_dt <= as.Date("2019-03-31"),]
baseline = transform(frb_baseline, reg_baseline, empl_baseline)[qtr_dt <= as.Date("2019-03-31"),]
adverse = transform(frb_adverse, reg_adverse, empl_adverse)[qtr_dt <= as.Date("2019-03-31"),]
severe = transform(frb_severe, reg_severe, empl_severe)[qtr_dt <= as.Date("2019-03-31"),]
### Save Files for Later #######################################################
saveRDS(historic, concat(pth_out, "/econ-data-historic.RDS"))
saveRDS(baseline, concat(pth_out, "/econ-data-baseline.RDS"))
saveRDS(adverse, concat(pth_out, "/econ-data-adverse.RDS"))
saveRDS(severe, concat(pth_out, "/econ-data-severe.RDS"))
################################################################################
<file_sep>/S1_02_Data_Exploration_20171118.Rmd
---
title: "S01_Data_Exploration_20171012"
author: "<NAME>"
date: "October 12, 2017"
output:
html_document:
toc: true
theme: default
toc_depth: 3
toc_float:
collapsed: false
smooth_scroll: false
---
<style>
pre {
overflow-x: auto;
}
pre code {
word-wrap: normal;
white-space: pre;
}
</style>
```{r global_options, echo = FALSE, include = FALSE}
options(width = 999)
knitr::opts_chunk$set(echo = FALSE, warning = FALSE, message = FALSE,
cache = FALSE, tidy = FALSE, size = "small")
```
## 1.1 Loadin R package
```{r,echo=F,include=FALSE}
### record the starting time
time0<-Sys.time()
requirements <- c("lattice","speedglm","data.table","ggplot2","knitr","gsubfn","zoo","sqldf","latticeExtra","sandwich","QuantPsyc","gridExtra","DT","urca","tseries","car",'ellipse','ResourceSelection','tidyr','snow','parallel','dplyr','panelAR',"moments","LambertW")
#install packages if you have not
for(requirement in requirements){if( !(requirement %in% installed.packages())) install.packages(requirement)}
#load all required packages
lapply(requirements, require, character.only=T);
full=FALSE;
```
## 1.2 Import Data & Cleaning
```{r, echo=TRUE, include=FALSE, warning=FALSE}
#####################################
#### set directory and load data ####
#####################################
## dir <- "/Users/jiahe/Documents/project/Union Bank/HQA/Code"
setwd("C:\\Users\\mingxie\\Desktop\\KPMG\\A - Projects\\201710 BOH Model Development\\2 - Code\\Model Development Code Package")
data0.co <- fread("S1_00_CO.csv", stringsAsFactors = FALSE)
data0.re <- fread("S1_00_RE.csv", stringsAsFactors = FALSE)
data0.eb <- fread("S1_00_EB.csv", stringsAsFactors = FALSE)
data0.peers <- fread("S1_00_CR_PEERS(Ming 2017-11-16).csv", stringsAsFactors = FALSE)
# data0.peers <- fread("S1_00_CR_PEERS(Ming 2017-10-20).csv", stringsAsFactors = FALSE)
data0.peers[,Date:=as.yearqtr(Date,'%m/%d/%Y'),]
# setnames(data0.peers,'IDRSSD','Sub')
#data2[Date=='2011Q4'&ENTITITY_NAME=='Bank of Hope',,]
data1.co<-melt(data0.co,id.vars = c('Portfolio','Date'),variable.name = 'Sub',value.name = 'GCO');
data1.re<-melt(data0.re,id.vars = c('Portfolio','Date'),variable.name = 'Sub',value.name = 'Recovery');
data1.eb<-melt(data0.eb,id.vars = c('Portfolio','Date'),variable.name = 'Sub',value.name = 'Balance');
data1<-merge(merge(data1.co,data1.re,all=TRUE),data1.eb,all=TRUE)
data1[,Date:=as.yearqtr(Date),]
data1[is.na(GCO),GCO:=0,]
data1[is.na(Recovery),Recovery:=0,]
data1[is.na(Balance),Balance:=0,]
data1[,ENTITY_NAME:='Bank of Hope',]
data2<-rbind(data1,data0.peers[ENTITY_NAME!='OPUS BANK',,],fill=TRUE)
data2[,Portfolio2:=ifelse(Portfolio%in%c('NOOCRE'),'NOOCRE',Portfolio),]
data2[,Balance.Ave:=.5*(Balance+ifelse(shift(Balance,1,fill=0)>0,shift(Balance,1),Balance)),keyby=.(Portfolio,ENTITY_NAME,Sub,Date)]
data2[,Balance.Ave:=.5*(Balance+ifelse(shift(Balance,1,fill=0)>0,shift(Balance,1),Balance)),keyby=.(Portfolio,ENTITY_NAME,Sub)]
data2[,Balance.Lag:=ifelse(shift(Balance,1,fill=0)>0,shift(Balance,1),Balance),keyby=.(Portfolio,ENTITY_NAME,Sub,Date)]
data2[,Balance.Lag:=ifelse(shift(Balance,1,fill=0)>0,shift(Balance,1),Balance),keyby=.(Portfolio,ENTITY_NAME,Sub)]
# data2[,DPD30.Lag1:=shift(DPD30,1,fill=0),keyby=.(Portfolio,ENTITY_NAME,Sub,Date)]
# data2[,DPD30.Lag1:=shift(DPD30,1,fill=0),keyby=.(Portfolio,ENTITY_NAME,Sub)]
# data2[,DPD30.Lag2:=shift(DPD30,2,fill=0),keyby=.(Portfolio,ENTITY_NAME,Sub,Date)]
# data2[,DPD30.Lag2:=shift(DPD30,2,fill=0),keyby=.(Portfolio,ENTITY_NAME,Sub)]
# data2[,DPD30.Lag3:=shift(DPD30,3,fill=0),keyby=.(Portfolio,ENTITY_NAME,Sub,Date)]
# data2[,DPD30.Lag3:=shift(DPD30,3,fill=0),keyby=.(Portfolio,ENTITY_NAME,Sub)]
#
# data2[,DPD90.Lag1:=shift(DPD90,1,fill=0),keyby=.(Portfolio,ENTITY_NAME,Sub,Date)]
# data2[,DPD90.Lag1:=shift(DPD90,1,fill=0),keyby=.(Portfolio,ENTITY_NAME,Sub)]
# data2[,DPD90.Lag2:=shift(DPD90,2,fill=0),keyby=.(Portfolio,ENTITY_NAME,Sub,Date)]
# data2[,DPD90.Lag2:=shift(DPD90,2,fill=0),keyby=.(Portfolio,ENTITY_NAME,Sub)]
# data2[,DPD90.Lag3:=shift(DPD90,3,fill=0),keyby=.(Portfolio,ENTITY_NAME,Sub,Date)]
# data2[,DPD90.Lag3:=shift(DPD90,3,fill=0),keyby=.(Portfolio,ENTITY_NAME,Sub)]
#
# data2[,NCRL.Lag1:=shift(NCRL,1,fill=0),keyby=.(Portfolio,ENTITY_NAME,Sub,Date)]
# data2[,NCRL.Lag1:=shift(NCRL,1,fill=0),keyby=.(Portfolio,ENTITY_NAME,Sub)]
# data2[,NCRL.Lag2:=shift(NCRL,2,fill=0),keyby=.(Portfolio,ENTITY_NAME,Sub,Date)]
# data2[,NCRL.Lag2:=shift(NCRL,2,fill=0),keyby=.(Portfolio,ENTITY_NAME,Sub)]
# data2[,NCRL.Lag3:=shift(NCRL,3,fill=0),keyby=.(Portfolio,ENTITY_NAME,Sub,Date)]
# data2[,NCRL.Lag3:=shift(NCRL,3,fill=0),keyby=.(Portfolio,ENTITY_NAME,Sub)]
# tmp<-dcast(data2[grepl('*Hope*',ENTITITY_NAME)&Date>='2007 Q2'&is.na(Balance)==0&Balance!=0,,],Date+Portfolio+Sub~ENTITITY_NAME,value.var =
# 'Balance',fill=0)
# tmp[tmp$'Bank of Hope'!=tmp$'Bank of Hope (KPMG Pull CR)',,]
# #tmp[Date=='2011 Q4',,]
# tmp<-dcast(data2[grepl('*Hope*',ENTITITY_NAME)&Date>='2007 Q2'&is.na(GCO)==0&GCO!=0,,],Date+Portfolio+Sub~ENTITITY_NAME,value.var =
# 'GCO',fill=0)
# tmp[tmp$'Bank of Hope'!=tmp$'Bank of Hope (KPMG Pull CR)',,]
#
# tmp<-dcast(data2[grepl('*Hope*',ENTITITY_NAME)&Date>='2007 Q2'&is.na(Recovery)==0&Recovery!=0,,],Date+Portfolio+Sub~ENTITITY_NAME,value.var =
# 'Recovery',fill=0)
# tmp[tmp$'Bank of Hope'!=tmp$'Bank of Hope (KPMG Pull CR)',,]
# data0.mev <- fread("S1_00_Macro_data_transformed1.csv", stringsAsFactors = FALSE)
data0.mev <- fread("S0_09_MEV_data_transformed_111717.csv", stringsAsFactors = FALSE)
# str(data0.mev)
data0.mev[,Date:=as.yearqtr(Date,'%Y-%m-%d'),]
# data0.mev[Date>=2007,.N,Date]
# data2.temp<-copy(data2)
# temp<-merge(data2[Portfolio2!='CONSTRUCTION',.N,keyby=.(Date,ENTITY_NAME,Portfolio2,IDRSSD,IDRSSD)],data2.temp[,.N,keyby=.(Date,ENTITY_NAME,IDRSSD,Portfolio2)],all=TRUE)
# temp2<-temp[N.x!=N.y|is.na(N.x)|is.na(N.y),,]
#
#
# temp1[,.N,keyby=.(ENTITY_NAME)]
# temp1[,.N,keyby=.(ENTITY_NAME)]
# # dim(data2);
# temp1<-data2[year(Date)>=2007&Portfolio2!='CONSTRUCTION',.(
# Balance=sum(Balance,na.rm =TRUE)
# ,GCO=sum(GCO,na.rm =TRUE)
# ,Recovery=sum(Recovery)
# ,NCOR=sum(GCO-Recovery,na.rm =TRUE)/sum(Balance,na.rm =TRUE))
# ,keyby=.(ENTITY_NAME,Date,Portfolio2)]
# temp2<-data2.temp[year(Date)>=2007&Portfolio2!='CONSTRUCTION',.(
# Balance=sum(Balance,na.rm =TRUE)
# ,GCO=sum(GCO,na.rm =TRUE)
# ,Recovery=sum(Recovery)
# ,NCOR=sum(GCO-Recovery,na.rm =TRUE)/sum(Balance,na.rm =TRUE))
# ,keyby=.(ENTITY_NAME,Date,Portfolio2)]
```
## 2.0 BOH Construction
```{r, eval=TRUE, echo=FALSE, include=TRUE, warning=FALSE, fig.width=13, fig.height=12}
i="Bank of Hope (KPMG Pull CR)"
data2.sub<- copy(data2[ENTITY_NAME==i,,])
cat(paste('Call Report Details for:',i))
myColours <- c(brewer.pal(5,"Greens")[3:5],brewer.pal(5,"Reds")[3:5],brewer.pal(5,"Blues")[3:5],brewer.pal(5,"Oranges")[3:5])
my.settings <- list(superpose.polygon=list(col=myColours, border="transparent"))
x.labels<-data2.sub[year(Date)>=2000&Balance>0,unique(Date),]
at <- seq(1, length(x.labels), 4)
p1<-barchart(Balance/1000~Date|Portfolio,group=as.character(Sub),stack=TRUE
,scale=list(x=list(rot=90,at=at,labels=x.labels[at]),y=list(relation='same'))
,horizontal=FALSE,par.settings = my.settings
,layout=c(2,3),ylab=''
,main=paste(i,'\nDistribution of Historical Balance Amount (MM$)')
,auto.key=list(x =0.51,y=.92,columns=2,border=FALSE,cex=1)
,data2.sub[year(Date)>=2000&Balance>0,,])
print(p1);
p1<-barchart(GCO~Date|Portfolio,group=as.character(Sub),stack=TRUE
,scale=list(x=list(rot=90,at=at,labels=x.labels[at]),y=list(relation='same'))
,horizontal=FALSE,par.settings = my.settings
,layout=c(2,3),ylab=''
,main=paste(i,'\nDistribution of Historical Charge-Off Amount (k$)')
,auto.key=list(x =0.51,y=.92,columns=2,border=FALSE,cex=1)
,data2.sub[year(Date)>=2000&GCO,,])
print(p1);
p1<-barchart(Recovery~Date|Portfolio,group=as.character(Sub),stack=TRUE
,scale=list(x=list(rot=90,at=at,labels=x.labels[at]),y=list(relation='same'))
,horizontal=FALSE,par.settings = my.settings
,layout=c(2,3),ylab=''
,main=paste(i,'\nDistribution of Historical Recovery Amount (k$)')
,auto.key=list(x =0.51,y=.92,columns=2,border=FALSE,cex=1)
,data2.sub[year(Date)>=2000&Recovery,,])
print(p1);
data2.sub.sum00<-data2.sub[year(Date)>=2000,.(
GCOR=sum(GCO)/sum(Balance.Lag,na.rm=TRUE)
,NCOR=sum(GCO-Recovery)/sum(Balance.Lag,na.rm=TRUE)
,RR=-sum(Recovery)/sum(Balance.Lag,na.rm=TRUE)
),keyby=.(Portfolio,Date)]
p1<-xyplot(GCOR+NCOR+RR~Date|Portfolio
,type=c('p','g','l')
,layout=c(2,3)
,xlab='',ylab=''
,main=paste(i,'\n Rates;')
,yscale.components=function(...){ yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100);return(yc)}
,auto.key=list(x =0.82,y=.93,columns=1,border=FALSE,cex=1)
,data2.sub.sum00)
print(p1);
#mean(1,2,na.rm=TRUE)
```
## 2.0 BOH Pre-2007
```{r, eval=TRUE, echo=FALSE, include=TRUE, warning=FALSE, fig.width=12, fig.height=7}
i="Bank of Hope"
data2.sub<- copy(data2[ENTITY_NAME==i,,])
cat(paste('Call Report Details for:',i))
myColours <- c(brewer.pal(5,"Greens")[3:5],brewer.pal(5,"Reds")[3:5],brewer.pal(5,"Blues")[3:5],brewer.pal(5,"Oranges")[3:5])
my.settings <- list(superpose.polygon=list(col=myColours, border="transparent"))
x.labels<-data2.sub[year(Date)>=2000&Balance>0,unique(Date),]
at <- seq(1, length(x.labels), 4)
p1<-barchart(Balance/1000~Date|Portfolio,group=as.character(Sub),stack=TRUE
,scale=list(x=list(rot=90,at=at,labels=x.labels[at]),y=list(relation='same'))
,horizontal=FALSE,par.settings = my.settings
,layout=c(2,2),ylab=''
,main=paste(i,'\nDistribution of Historical Balance Amount (MM$)')
,auto.key=list(x =0.46,y=.42,columns=2,border=FALSE,cex=1)
,data2.sub[year(Date)>=2000&Balance>0,,])
print(p1);
p1<-barchart(GCO~Date|Portfolio,group=as.character(Sub),stack=TRUE
,scale=list(x=list(rot=90,at=at,labels=x.labels[at]),y=list(relation='same'))
,horizontal=FALSE,par.settings = my.settings
,layout=c(2,2),ylab=''
,main=paste(i,'\nDistribution of Historical Charge-Off Amount (k$)')
,auto.key=list(x =0.46,y=.42,columns=2,border=FALSE,cex=1)
,data2.sub[year(Date)>=2000&GCO,,])
print(p1);
p1<-barchart(Recovery~Date|Portfolio,group=as.character(Sub),stack=TRUE
,scale=list(x=list(rot=90,at=at,labels=x.labels[at]),y=list(relation='same'))
,horizontal=FALSE,par.settings = my.settings
,layout=c(2,2),ylab=''
,main=paste(i,'\nDistribution of Historical Recovery Amount (k$)')
,auto.key=list(x =0.46,y=.42,columns=2,border=FALSE,cex=1)
,data2.sub[year(Date)>=2000&Recovery,,])
print(p1);
data2.sub.sum00<-data2.sub[year(Date)>=2000,.(
GCOR=sum(GCO)/sum(Balance.Lag,na.rm=TRUE)
,NCOR=sum(GCO-Recovery)/sum(Balance.Lag,na.rm=TRUE)
,RR=-sum(Recovery)/sum(Balance.Lag,na.rm=TRUE)
),keyby=.(Portfolio,Date)]
p1<-xyplot(GCOR+NCOR+RR~Date|Portfolio
,type=c('p','g','l')
,layout=c(2,2)
,xlab='',ylab=''
,main=paste(i,'\n Rates;')
,yscale.components=function(...){ yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100);return(yc)}
,auto.key=list(x =0.82,y=.93,columns=1,border=FALSE,cex=1)
,data2.sub.sum00)
print(p1);
#mean(1,2,na.rm=TRUE)
```
## 2.1 Analysis of Peer Banks
Based on discussion, model developer decided only include pre-2007 data for CnI for future analysis
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=13,fig.height=7}
cat('Summary of Estimation Sample')
data2<-data2[Portfolio2!='CONSTRUCTION',,]
data2[,Portfolio2:=ifelse(Portfolio%in%c('NOOCRE'),'NOOCRE',Portfolio),]
dim(data2)
data2<-data2[!(Portfolio2%in%c('OOCRE','NOOCRE','MF','CONSTRUCTION')&year(Date)<2007),,]
# data2[year(Date)<2007,.N,keyby=.(Portfolio2,Date)]
data2[,.(.N
# ,Balance=sum(ifelse(Date=='2016 Q4',Balance,0),na.rm=TRUE)
# ,GCO_CUM=sum(ifelse(year(Date)==2016,GCO,0),na.rm=TRUE)
# ,Recovery_CUM=sum(ifelse(year(Date)==2016,Recovery,0),na.rm=TRUE)
),keyby=.(ENTITY_NAME,Sub)]
# barchart(rate ~ as.yearqtr(TIME_ID, format = "%Y-%m-%d")|factor('Rating Distribution Over Time (Balance Weighted)')
# ,stack = TRUE, horizontal = FALSE
# ,par.settings = my.settings
# ,scale=list(x=list(rot=90))
# ,ylab=''
# ,yscale.components=function(...){ yc <- yscale.components.default(...)
# yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100);return(yc)}
# ,auto.key=list(x =0.03,y=.92,columns=2,border=FALSE,cex=.8)
# ,group=RTG_AGG,data=tmp[!is.na(rate),,])
for(i in data2[,sort(unique(ENTITY_NAME)),]){
#i="Bank of Hope (KPMG Pull CR)"
data2.sub<- data2[ENTITY_NAME==i,,]
cat(paste('Call Report Details for:',i))
myColours <- c(brewer.pal(5,"Greens")[3:5],brewer.pal(5,"Reds")[3:5],brewer.pal(5,"Blues")[3:5],brewer.pal(5,"Oranges")[3:5])
my.settings <- list(superpose.polygon=list(col=myColours, border="transparent"))
# p1<-barchart(Balance/1000~Date|Portfolio,group=as.character(Sub),stack=TRUE
# ,scale=list(x=list(rot=90),y=list(relation='same'))
# ,horizontal=FALSE
# ,par.settings = my.settings
# ,layout=c(2,2),ylab=''
# ,main=paste(i,'\nDistribution of Historical Balance Amount (MM$) since 2001')
# ,auto.key=list(x =0.51,y=.42,columns=2,border=FALSE,cex=1)
# ,data2.sub[year(Date)>=2000&Balance>0,,])
# print(p1);
x.labels<-data2.sub[year(Date)>=2000&Balance>0,unique(Date),]
at <- seq(1, length(x.labels), 4)
p1<-barchart(Balance/1000~Date|Portfolio,group=as.character(Sub),stack=TRUE
,scale=list(x=list(rot=90,at=at,labels=x.labels[at]),y=list(relation='same'))
,horizontal=FALSE,par.settings = my.settings
,layout=c(2,2),ylab=''
,main=paste(i,'\nDistribution of Historical Balance Amount (MM$)')
,auto.key=list(x =0.51,y=.42,columns=2,border=FALSE,cex=1)
,data2.sub[year(Date)>=2000&Balance>0,,])
print(p1);
p1<-barchart(GCO~Date|Portfolio,group=as.character(Sub),stack=TRUE
,scale=list(x=list(rot=90,at=at,labels=x.labels[at]),y=list(relation='same'))
,horizontal=FALSE,par.settings = my.settings
,layout=c(2,2),ylab=''
,main=paste(i,'\nDistribution of Historical Charge-Off Amount (k$)')
,auto.key=list(x =0.51,y=.42,columns=2,border=FALSE,cex=1)
,data2.sub[year(Date)>=2000&GCO,,])
print(p1);
p1<-barchart(Recovery~Date|Portfolio,group=as.character(Sub),stack=TRUE
,scale=list(x=list(rot=90,at=at,labels=x.labels[at]),y=list(relation='same'))
,horizontal=FALSE,par.settings = my.settings
,layout=c(2,2),ylab=''
,main=paste(i,'\nDistribution of Historical Recovery Amount (k$)')
,auto.key=list(x =0.51,y=.42,columns=2,border=FALSE,cex=1)
,data2.sub[year(Date)>=2000&Recovery,,])
print(p1);
data2.sub.sum00<-data2.sub[year(Date)>=2000,.(
GCOR=sum(GCO)/sum(Balance.Lag,na.rm=TRUE)
,NCOR=sum(GCO-Recovery)/sum(Balance.Lag,na.rm=TRUE)
,RR=-sum(Recovery)/sum(Balance.Lag,na.rm=TRUE)
),keyby=.(Portfolio,Date)]
p1<-xyplot(GCOR+NCOR+RR~Date|Portfolio
,type=c('p','g','l')
,layout=c(2,2)
,xlab='',ylab=''
,main=paste(i,'\n Rates;')
,yscale.components=function(...){ yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100);return(yc)}
,auto.key=list(x =0.82,y=.93,columns=1,border=FALSE,cex=1)
,data2.sub.sum00)
print(p1);
}
```
## 2.2 Input Data
```{r,echo=FALSE,include=TRUE,eval=TRUE,warning=FALSE,fig.width=20,fig.height=6}
invisible(gc());invisible(gc());
tmp<-copy(data2)
tmp[,Date:=as.character(Date),]
datatable(tmp
,rownames = F
,extensions = c('Scroller','FixedColumns','Buttons','ColReorder')
,options = list(
#rowCallback = JS("function(r,d) {$(r).attr('height', '100px')}"),
columnDefs = list(list(
targets = c(1:4),
render = JS(
"function(data, type, row, meta) {",
"return type === 'display' && data.length > 10 ?",
"'<span title=\"' + data + '\">' + data.substr(0, 10) + '...</span>' : data;",
"}")
)), ## column index starting from 0
deferRender = TRUE,
scrollY = 300,
scroller = TRUE,
scrollX = TRUE,
fixedColumns = list(leftColumns = 2),
dom = 'Blfrtip',
buttons =
list( list(
extend = 'collection',
buttons = c('csv'),
text = 'Save as .csv file'
)), ## it doesn't work if only keep 'csv'; weird
colReorder = TRUE,
initComplete = JS(
"function(settings, json) {",
"$(this.api().table().header()).css({'font-size': '12px'});",
"$(this.api().table().body()).css({'font-size': '12px'});",
"}") ## change both header and body font size
))
# %>% formatStyle("Scenario", target = 'row', backgroundColor = styleEqual(sce, c("#9ECAE1","#FCC5C0","#FA9FB5")))
```
## 2.3 Peer Banks Comparison
### EB, NCOR Historical Chart
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=13,fig.height=17}
# data2[year(Date)<2007,.N,keyby=.(Portfolio2,Date)]
temp0<-data2[year(Date)>=2000&ENTITY_NAME!='Bank of Hope (KPMG Pull CR)',.(
Balance=sum(Balance)
,GCO=sum(GCO)
,Recovery=sum(Recovery)
,NCOR=sum(GCO-Recovery,na.rm =TRUE)/sum(Balance.Lag,na.rm=TRUE)
)
,keyby=.(ENTITY_NAME,Date,Portfolio2)]
temp1<-rbind(temp0,temp0[,.(
ENTITY_NAME='Overall'
,Balance=sum(Balance)
,GCO=sum(GCO)
,Recovery=sum(Recovery)
,NCOR=mean(NCOR,na.rm=TRUE))
,keyby=.(Date,Portfolio2)])
myColours <- c(brewer.pal(6,"Greens")[seq(2,6,2)],
brewer.pal(6,"Reds")[seq(2,6,2)],
brewer.pal(6,"Blues")[seq(2,6,2)],
brewer.pal(6,"Oranges")[seq(2,6,2)],
brewer.pal(6,"Purples")[seq(2,6,2)]
)
my.settings <- list(
superpose.symbol = list(col=myColours, border="transparent")
,superpose.line = list(col=myColours, border="transparent")
)
xyplot(NCOR~Date|ifelse(ENTITY_NAME%in%c('Overall','Bank of Hope'),'Bank of Hope, Overall','Peer Banks')+Portfolio2,group=ENTITY_NAME
,type=c('l','g')
,layout=c(2,4)
,lwd=2
,par.settings = my.settings
,main='Historical Net Charge-off Rate'
,scale=list(y=list(relation='same'))
,yscale.components=function(...){ yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100);return(yc)}
,auto.key=list(x =0.52,y=.97,columns=2,border=FALSE,cex=1,lwd=4)
,temp1)
xyplot(NCOR~Date|ENTITY_NAME,group=Portfolio2
,type=c('l','g')
,layout=c(1,1)
,aspect=.4
,lwd=2
# ,par.settings = my.settings
,main='Historical Net Charge-off Rate'
,scale=list(y=list(relation='same'))
,yscale.components=function(...){ yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100);return(yc)}
,auto.key=list(x =0.75,y=.90,columns=1,border=FALSE,cex=1,lwd=4)
,temp1[ENTITY_NAME%in%c('Overall'),,])
xyplot(Balance/1000000~Date|Portfolio2,group=ENTITY_NAME
,type=c('l','g')
,layout=c(2,2),lwd=2,aspect=.5
,main='Historical Balance B$'
,par.settings = my.settings
,scale=list(y=list(relation='free'))
# ,yscale.components=function(...){ yc <- yscale.components.default(...)
# yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100);return(yc)}
,auto.key=list(x =0.48,y=.93,columns=1,border=FALSE,cex=1,lwd=4)
,temp1[ENTITY_NAME!='Overall',,])
```
### Correlation
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=13,fig.height=7}
cat('Correlation Analysis');
temp0<-data2[ENTITY_NAME=='Bank of Hope',,]
temp0[Sub%in%c('Wilshire Bank','Saehan Bancorp','Mirae Bank','Liberty Bank of New York','Bank Asiana'),ENTITY_NAME:='Wilshire Bank']
temp0[Sub%in%c('BBCN','Innovative Bank','Nara Bank','Asiana bank','Pacific International Bank','Foster Bankshares, Inc.'),ENTITY_NAME:='BBCN']
temp1<-rbind(
temp0[year(Date)>=2000,.(
Balance=sum(Balance,na.rm =TRUE)
,GCO=sum(GCO,na.rm =TRUE)
,Recovery=sum(Recovery)
,NCOR=sum(GCO-Recovery,na.rm =TRUE)/sum(Balance.Lag,na.rm=TRUE)
)
,keyby=.(ENTITY_NAME,Date,Portfolio2)]
,
data2[year(Date)>=2000&ENTITY_NAME!='Bank of Hope (KPMG Pull CR)',.(
Balance=sum(Balance,na.rm =TRUE)
,GCO=sum(GCO,na.rm =TRUE)
,Recovery=sum(Recovery,na.rm =TRUE)
,NCOR=sum(GCO-Recovery,na.rm =TRUE)/sum(Balance.Lag,na.rm=TRUE)
)
,keyby=.(ENTITY_NAME,Date,Portfolio2)]
)
temp1[is.na(NCOR),NCOR:=0,]
# data3[Portfolio2==x&Date=='2007Q1']
# dcast(data3[Portfolio2==x],Date~ENTITY_NAME,value.var='Balance',fill=0)
temp1.cor<-do.call('rbind',lapply(unique(temp1[,Portfolio2,]),function(x){
# x='CnI'
tmp1<-dcast(temp1[Portfolio2==x],Date~ENTITY_NAME,value.var='NCOR',fill=0)
tmp2<-cor(tmp1[,2:ncol(tmp1),with=FALSE],use='pairwise.complete.obs')
# tmp2<-cor(tmp1[,2:ncol(tmp1),with=FALSE])
return(data.table(Portfolio2=x,Peer.Bank=names(tmp2[,1]),cor=round(tmp2[,'Bank of Hope'],digit=2)))
}))
print(dcast(temp1.cor,Peer.Bank~Portfolio2,value.var = 'cor'))
```
### Joint Test
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=11,fig.height=18}
cat('Peer Banks OLS Ellipse Interval')
temp1.tst<-do.call('rbind',lapply(unique(temp1[,Portfolio2,]),function(x){
do.call('rbind',lapply(setdiff(unique(temp1[,ENTITY_NAME,]),c('Bank of Hope',"Bank of Hope (KPMG Pull CR)")),function(y){
# x='Count Weighted'
# x='CnI'
# y='Banner Corporation'
tmp1<-dcast(temp1[Portfolio2==x],Date~ENTITY_NAME,value.var='NCOR',fill=0)[,c('Bank of Hope',y),,with=FALSE]
tmp2<-data.table(Portfolio2=x,Peer.Bank=y,ellipse(lm(tmp1)))
setnames(tmp2,c('Portfolio2','Peer.Bank','Intercept','Slope'))
return(tmp2)
}))
}))
# seq(1,5,2)
myColours <- c(brewer.pal(6,"Greens")[seq(2,6,2)],
brewer.pal(6,"Reds")[seq(2,6,2)],
brewer.pal(6,"Blues")[seq(2,6,2)],
brewer.pal(6,"Oranges")[seq(2,6,2)],
brewer.pal(6,"Purples")[seq(2,6,2)]
)
# myColours<-c(brewer.pal(11,"Set3")
# display.brewer.all(n=NULL, type="all", select=NULL, exact.n=TRUE)
# my.settings <- list(superpose.polygon=list(col=myColours, border="transparent"))
my.settings <- list(
superpose.symbol = list(col=myColours, border="transparent")
,superpose.line = list(col=myColours, border="transparent")
)
cat('Fix Axis')
xyplot(Intercept~Slope|Portfolio2,group=Peer.Bank,temp1.tst
,type=c('p','g','l')
,layout=c(1,4)
,main='95% Confidence Ellipse for OLS: BOH ~ 1 + Peer.Bank (Slope and Intercept) - Fix Axis'
,auto.key = list(x = .61, y = .6, corner = c(0, 0),lines=FALSE,lwd=4)
,scale=list(x=list(relation='same'),y=list(relation='same'))
,par.settings = my.settings
,panel = panel.superpose
,lwd=2
,yscale.components=function(...){ yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100);return(yc)}
# must for use of panel.groups
,panel.groups=function(x, y, col, col.symbol, ...) {
panel.abline(v=1)
panel.abline(h=0)
panel.xyplot(x, y, col=col.symbol, ...)
panel.points(mean(x),mean(y),col=col.symbol,pch =13,cex=1.5)
}
)
cat('Free Axis')
xyplot(Intercept~Slope|Portfolio2,group=Peer.Bank,temp1.tst
,type=c('p','g','l')
,layout=c(1,4)
,main='95% Confidence Ellipse for OLS: BOH ~ 1 + Peer.Bank (Slope and Intercept) - Free Axis'
,auto.key = list(x = .61, y = .6, corner = c(0, 0),lines=FALSE,lwd=4)
,par.settings = my.settings
,scale=list(x=list(relation='free'),y=list(relation='free'))
,panel = panel.superpose
,lwd=2
,yscale.components=function(...){ yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100);return(yc)}
# must for use of panel.groups
,panel.groups=function(x, y, col, col.symbol, ...) {
panel.abline(v=1)
panel.abline(h=0)
panel.xyplot(x, y, col=col.symbol, ...)
panel.points(mean(x),mean(y),col=col.symbol,pch =13,cex=1.5)
}
)
```
### Single Test Slope
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=11,fig.height=18}
cat('Single Hypothesis is Slope = 1')
`%between%`<-function(x,rng) x>rng[1] & x<rng[2]
temp1.tst<-do.call('rbind',lapply(unique(temp1[,Portfolio2,]),function(x){
do.call('rbind',lapply(setdiff(unique(temp1[,ENTITY_NAME,]),c('Bank of Hope',"Bank of Hope (KPMG Pull CR)")),function(y){
# x='Count Weighted'
# x='CnI'
# y='Banner Corporation'
tmp1<-dcast(temp1[Portfolio2==x],Date~ENTITY_NAME,value.var='NCOR',fill=0)[,c('Bank of Hope',y),,with=FALSE]
tmp2<-round(confint(lm(tmp1))[2,],4)
tmp3<-data.table(Portfolio=x,Peer.Bank=y,tmp2[1],tmp2[2],1%between%c(tmp2))
setnames(tmp3,c('Portfolio2','Peer.Bank','2.5 %','97.5 %','Test Results'))
return(tmp3)
}))
}))
datatable(temp1.tst
,rownames = F
,extensions = c('Scroller','FixedColumns','Buttons','ColReorder')
,options = list(
#rowCallback = JS("function(r,d) {$(r).attr('height', '100px')}"),
columnDefs = list(list(
targets = c(1:4),
render = JS(
"function(data, type, row, meta) {",
"return type === 'display' && data.length > 10 ?",
"'<span title=\"' + data + '\">' + data.substr(0, 10) + '...</span>' : data;",
"}")
)), ## column index starting from 0
deferRender = TRUE,
scrollY = 300,
scroller = TRUE,
scrollX = TRUE,
fixedColumns = list(leftColumns = 2),
dom = 'Blfrtip',
buttons =
list( list(
extend = 'collection',
buttons = c('csv'),
text = 'Save as .csv file'
)), ## it doesn't work if only keep 'csv'; weird
colReorder = TRUE,
initComplete = JS(
"function(settings, json) {",
"$(this.api().table().header()).css({'font-size': '12px'});",
"$(this.api().table().body()).css({'font-size': '12px'});",
"}") ## change both header and body font size
))
datatable(dcast(temp1.tst,Peer.Bank~Portfolio2,value.var = 'Test Results')
,rownames = F
,extensions = c('Scroller','FixedColumns','Buttons','ColReorder')
,options = list(
#rowCallback = JS("function(r,d) {$(r).attr('height', '100px')}"),
columnDefs = list(list(
targets = c(1:4),
render = JS(
"function(data, type, row, meta) {",
"return type === 'display' && data.length > 10 ?",
"'<span title=\"' + data + '\">' + data.substr(0, 10) + '...</span>' : data;",
"}")
)), ## column index starting from 0
deferRender = TRUE,
scrollY = 300,
scroller = TRUE,
scrollX = TRUE,
fixedColumns = list(leftColumns = 2),
dom = 'Blfrtip',
buttons =
list( list(
extend = 'collection',
buttons = c('csv'),
text = 'Save as .csv file'
)), ## it doesn't work if only keep 'csv'; weird
colReorder = TRUE,
initComplete = JS(
"function(settings, json) {",
"$(this.api().table().header()).css({'font-size': '12px'});",
"$(this.api().table().body()).css({'font-size': '12px'});",
"}") ## change both header and body font size
))
```
### Chow's Test - Pairs
Using General F-Test to compare the two model below:
+ Model 1 (Unrestricted):
$$NCOR \sim \alpha_0+\alpha_{pb} \cdot I(peer.bank==1)+\beta \cdot x+\beta_{pb} \cdot x \cdot I(peer.bank==1)$$
+ Model 2 (Restricted):
$$NCOR \sim \alpha_0+\alpha_{pb} \cdot I(peer.bank==1)+\beta \cdot x$$
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=10,fig.height=5}
cat('Both Orignal and Transformed NCOR are tested')
temp0<-data2[ENTITY_NAME=='Bank of Hope',,]
temp0[Sub%in%c('Wilshire Bank','Saehan Bancorp','Mirae Bank','Liberty Bank of New York','Bank Asiana'),ENTITY_NAME:='Wilshire Bank']
temp0[Sub%in%c('BBCN','Innovative Bank','Nara Bank','Asiana bank','Pacific International Bank','Foster Bankshares, Inc.'),ENTITY_NAME:='BBCN']
temp1<-rbind(
temp0[year(Date)>=2000,.(
Balance=sum(Balance,na.rm =TRUE)
,GCO=sum(GCO,na.rm =TRUE)
,Recovery=sum(Recovery)
,NCOR=sum(GCO-Recovery,na.rm =TRUE)/sum(Balance.Lag,na.rm=TRUE)
)
,keyby=.(ENTITY_NAME,Date,Portfolio2)]
,
data2[year(Date)>=2000,.(
Balance=sum(Balance,na.rm =TRUE)
,GCO=sum(GCO,na.rm =TRUE)
,Recovery=sum(Recovery)
,NCOR=sum(GCO-Recovery,na.rm =TRUE)/sum(Balance.Lag,na.rm=TRUE)
)
,keyby=.(ENTITY_NAME,Date,Portfolio2)]
)
temp2<-merge(temp1,temp1[!ENTITY_NAME%in%c('BBCN','Wilshire Bank','Bank of Hope (KPMG Pull CR)'),.(
NCOR.all=mean(NCOR,na.rm=TRUE)
# NCOR.all=sum(GCO-Recovery,na.rm =TRUE)/sum(Balance,na.rm =TRUE)
),keyby=.(Portfolio2,Date)],by=c('Portfolio2','Date'))
# temp1[,.N,keyby=ENTITY_NAME]
# temp2[,summary(NCOR),]
temp2.all<-lapply(temp2[,sort(unique(Portfolio2)),],function(x){
lapply(temp2[!ENTITY_NAME%in%c('Bank of Hope','Bank of Hope (KPMG Pull CR)'),sort(unique(ENTITY_NAME)),],function(y){
# y='WASHINGTON FEDERAL'
# x='CnI'
if(x!='OOCRE'){tmp0<-temp2[ENTITY_NAME%in%c("Bank of Hope",y)&Portfolio2==x,,]}
if(x=='OOCRE'){tmp0<-temp2[ENTITY_NAME%in%c("Bank of Hope",y)&Portfolio2==x&!Date%in%as.yearqtr(c('2010 Q4','2011 Q1')),,]}
# cat('Portfolio = \'',x,'\' and Peer Bank = \'',y,'\'',sep='');
command1 = paste('tmp1<-lm(NCOR~1+I(1*(ENTITY_NAME=="',y,'"))+NCOR.all+I((ENTITY_NAME=="',y,'")*NCOR.all),tmp0)',sep='');
eval(parse(text=command1))
command2 = paste('tmp2<-lm(NCOR~1+I(1*(ENTITY_NAME=="',y,'"))+NCOR.all,tmp0)',sep='');
eval(parse(text=command2))
command1.tr = paste('tmp1.tr<-lm(log(NCOR+1)~1+I(1*(ENTITY_NAME=="',y,'"))+log(NCOR.all+1)+I((ENTITY_NAME=="',y,'")*log(NCOR.all+1)),tmp0)',sep='');
eval(parse(text=command1.tr))
command2.tr = paste('tmp2.tr<-lm(log(NCOR+1)~1+I(1*(ENTITY_NAME=="',y,'"))+log(NCOR.all+1),tmp0)',sep='');
eval(parse(text=command2.tr))
# anova(tmp1,tmp2)[['Pr(>F)']][2]
# tmp2<-confint(tmp1)
# summary(tmp1);summary(tmp2);
tmp3<-data.table(Portfolio2=x
,Peer.Bank=y
,Test.Results=ifelse(anova(tmp1,tmp2)[['Pr(>F)']][2]>.05,'Accept','Reject')
,Test.Pvalue=round(anova(tmp1,tmp2)[['Pr(>F)']][2],6)
,Test.Results.Tr=ifelse(anova(tmp1.tr,tmp2.tr)[['Pr(>F)']][2]>.05,'Accept','Reject')
,'Model1 Normality'=ifelse(shapiro.test(tmp1$residuals)$p.value>.05,'Accept','Reject')
,'Model2 Normality'=ifelse(shapiro.test(tmp2$residuals)$p.value>.05,'Accept','Reject')
,'Model1.Tr Normality'=ifelse(shapiro.test(tmp1.tr$residuals)$p.value>.05,'Accept','Reject')
,'Model2.Tr Normality'=ifelse(shapiro.test(tmp2.tr$residuals)$p.value>.05,'Accept','Reject')
)
p<-xyplot(NCOR~NCOR.all
,main=paste('Portfolio = \'',x,'\' and Peer Bank = \'',y,'\' Result = ',ifelse(anova(tmp1,tmp2)[['Pr(>F)']][2]>.05,'Accept','Reject'),sep='')
,yscale.components=function(...){ yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100);return(yc)}
,auto.key=list(x =0.62,y=.93,columns=1,border=FALSE,cex=1,lwd=4)
,group=ENTITY_NAME,type=c('g','p','r'),tmp0)
print(p)
tmp4<-rbind(
data.table(Model='Full Model',residual=tmp1$residual)
,data.table(Model='Restricted Model',residual=tmp2$residual)
)
# skewness(tmp1$residuals)
p<-histogram(~ residual | Model
, data = tmp4 , breaks=16 ,type = "density",
panel=function(x, ...) {
panel.histogram(x, ...)
panel.densityplot(x, bw=100,kernel="gaussian",...)
})
print(p)
p<-qqmath(~ residual | Model, data = tmp4,
prepanel = prepanel.qqmathline,
panel = function(x, ...) {
panel.qqmathline(x, ...)
panel.qqmath(x, ...)
})
print(p)
# cat('\n (Reference: Skewness=0, Kurtosis=3 for Normal Distribution)\n')
tmp5<-rbind(
data.table(Model='Full Model',Skewness=round(skewness(tmp1$residuals),4),Kurtosis=round(kurtosis(tmp1$residuals),4),Shapiro.Test=round(shapiro.test(tmp1$residuals)$p.value,4))
,data.table(Model='Restricted Model',Skewness=round(skewness(tmp2$residuals),4),Kurtosis=round(kurtosis(tmp2$residuals),4),Shapiro.Test=round(shapiro.test(tmp2$residuals)$p.value,4))
,data.table(Model='(Reference: Normal Dist.)',Skewness=0,Kurtosis=3,Shapiro.Test=NA)
,data.table(Model='(Reference: t(5) Dist.)',Skewness=0,Kurtosis=9,Shapiro.Test=NA)
,data.table(Model='(Reference: t(7) Dist.)',Skewness=0,Kurtosis=5,Shapiro.Test=NA)
,data.table(Model='(Reference: t(10) Dist.)',Skewness=0,Kurtosis=4,Shapiro.Test=NA)
,data.table(Model='(Reference: t(30) Dist.)',Skewness=0,Kurtosis=3.2308,Shapiro.Test=NA)
)
cat(paste('\n ',x,'-',y,' \n'));
cat('\n\n Residual Diagnosis \n\n');
print(tmp5)
df1=max(floor(6/(kurtosis(tmp1$residuals)-3)+4),1)
df2=max(floor(6/(kurtosis(tmp2$residuals)-3)+4),1)
tmp6<-rbind(
data.table(Model='Full Model',t.df=df1,p.value=round(ks.test(tmp1$residuals/sd(tmp1$residuals), "pt", df1)$p.value,6))
,data.table(Model='Full Model',t.df=3,p.value=round(ks.test(tmp1$residuals/sd(tmp1$residuals), "pt", 3)$p.value,6))
,data.table(Model='Full Model',t.df=2,p.value=round(ks.test(tmp1$residuals/sd(tmp1$residuals), "pt", 2)$p.value,6))
,data.table(Model='Full Model',t.df=1,p.value=round(ks.test(tmp1$residuals/sd(tmp1$residuals), "pt", 1)$p.value,6))
,data.table(Model='Restricted Model',t.df=df2,p.value=round(ks.test(tmp2$residuals/sd(tmp2$residuals), "pt", df2)$p.value,6))
,data.table(Model='Restricted Model',t.df=3,p.value=round(ks.test(tmp2$residuals/sd(tmp2$residuals), "pt", 3)$p.value,6))
,data.table(Model='Restricted Model',t.df=2,p.value=round(ks.test(tmp2$residuals/sd(tmp2$residuals), "pt", 2)$p.value,6))
,data.table(Model='Restricted Model',t.df=1,p.value=round(ks.test(tmp2$residuals/sd(tmp2$residuals), "pt", 1)$p.value,6))
)
cat('\n\n KS Test Against t-Distribtion (df) \n\n')
print(tmp6)
# n=42
# sqrt(6*n*(n-1)/(n-1)/(n+1)/(n+3))
tmp0[,Peer.Bank:=y,]
tmp0[,Result:=ifelse(anova(tmp1,tmp2)[['Pr(>F)']][2]>.05,'Accept','Reject'),]
return(list(
tmp3
,tmp0
))
# return(tmp3)
# tmp0
})
})
temp2.tst<-do.call('rbind',lapply(1:length(temp2.all),function(x){
#x=1
#y=2
do.call('rbind',lapply(1:length(temp2.all[[x]]),function(y){
return(temp2.all[[x]][[y]][[1]])
}))
}))
# temp3[,Test.Results:=(p025<0 & p975>0),]
cat('On Original NCOR')
datatable(dcast(temp2.tst,Peer.Bank~Portfolio2,value.var = 'Test.Results')
,rownames = F
,extensions = c('Scroller','FixedColumns','Buttons','ColReorder')
,options = list(
#rowCallback = JS("function(r,d) {$(r).attr('height', '100px')}"),
columnDefs = list(list(
targets = c(1:4),
render = JS(
"function(data, type, row, meta) {",
"return type === 'display' && data.length > 10 ?",
"'<span title=\"' + data + '\">' + data.substr(0, 10) + '...</span>' : data;",
"}")
)), ## column index starting from 0
deferRender = TRUE,
scrollY = 300,
scroller = TRUE,
scrollX = TRUE,
fixedColumns = list(leftColumns = 2),
dom = 'Blfrtip',
buttons =
list( list(
extend = 'collection',
buttons = c('csv'),
text = 'Save as .csv file'
)), ## it doesn't work if only keep 'csv'; weird
colReorder = TRUE,
initComplete = JS(
"function(settings, json) {",
"$(this.api().table().header()).css({'font-size': '12px'});",
"$(this.api().table().body()).css({'font-size': '12px'});",
"}") ## change both header and body font size
))
cat('On log(NCOR+1)')
datatable(dcast(temp2.tst,Peer.Bank~Portfolio2,value.var = 'Test.Results.Tr')
,rownames = F
,extensions = c('Scroller','FixedColumns','Buttons','ColReorder')
,options = list(
#rowCallback = JS("function(r,d) {$(r).attr('height', '100px')}"),
columnDefs = list(list(
targets = c(1:4),
render = JS(
"function(data, type, row, meta) {",
"return type === 'display' && data.length > 10 ?",
"'<span title=\"' + data + '\">' + data.substr(0, 10) + '...</span>' : data;",
"}")
)), ## column index starting from 0
deferRender = TRUE,
scrollY = 300,
scroller = TRUE,
scrollX = TRUE,
fixedColumns = list(leftColumns = 2),
dom = 'Blfrtip',
buttons =
list( list(
extend = 'collection',
buttons = c('csv'),
text = 'Save as .csv file'
)), ## it doesn't work if only keep 'csv'; weird
colReorder = TRUE,
initComplete = JS(
"function(settings, json) {",
"$(this.api().table().header()).css({'font-size': '12px'});",
"$(this.api().table().body()).css({'font-size': '12px'});",
"}") ## change both header and body font size
))
cat('Shapiro Normality Test to Residuals')
datatable(dcast(temp2.tst,Peer.Bank~Portfolio2,value.var = 'Model1 Normality')
,rownames = F
,extensions = c('Scroller','FixedColumns','Buttons','ColReorder')
,options = list(
#rowCallback = JS("function(r,d) {$(r).attr('height', '100px')}"),
columnDefs = list(list(
targets = c(1:4),
render = JS(
"function(data, type, row, meta) {",
"return type === 'display' && data.length > 10 ?",
"'<span title=\"' + data + '\">' + data.substr(0, 10) + '...</span>' : data;",
"}")
)), ## column index starting from 0
deferRender = TRUE,
scrollY = 300,
scroller = TRUE,
scrollX = TRUE,
fixedColumns = list(leftColumns = 2),
dom = 'Blfrtip',
buttons =
list( list(
extend = 'collection',
buttons = c('csv'),
text = 'Save as .csv file'
)), ## it doesn't work if only keep 'csv'; weird
colReorder = TRUE,
initComplete = JS(
"function(settings, json) {",
"$(this.api().table().header()).css({'font-size': '12px'});",
"$(this.api().table().body()).css({'font-size': '12px'});",
"}") ## change both header and body font size
))
datatable(dcast(temp2.tst,Peer.Bank~Portfolio2,value.var = 'Model2 Normality')
,rownames = F
,extensions = c('Scroller','FixedColumns','Buttons','ColReorder')
,options = list(
#rowCallback = JS("function(r,d) {$(r).attr('height', '100px')}"),
columnDefs = list(list(
targets = c(1:4),
render = JS(
"function(data, type, row, meta) {",
"return type === 'display' && data.length > 10 ?",
"'<span title=\"' + data + '\">' + data.substr(0, 10) + '...</span>' : data;",
"}")
)), ## column index starting from 0
deferRender = TRUE,
scrollY = 300,
scroller = TRUE,
scrollX = TRUE,
fixedColumns = list(leftColumns = 2),
dom = 'Blfrtip',
buttons =
list( list(
extend = 'collection',
buttons = c('csv'),
text = 'Save as .csv file'
)), ## it doesn't work if only keep 'csv'; weird
colReorder = TRUE,
initComplete = JS(
"function(settings, json) {",
"$(this.api().table().header()).css({'font-size': '12px'});",
"$(this.api().table().body()).css({'font-size': '12px'});",
"}") ## change both header and body font size
))
cat('Full Result')
datatable(temp2.tst
,rownames = F
,extensions = c('Scroller','FixedColumns','Buttons','ColReorder')
,options = list(
#rowCallback = JS("function(r,d) {$(r).attr('height', '100px')}"),
columnDefs = list(list(
targets = c(1:4),
render = JS(
"function(data, type, row, meta) {",
"return type === 'display' && data.length > 10 ?",
"'<span title=\"' + data + '\">' + data.substr(0, 10) + '...</span>' : data;",
"}")
)), ## column index starting from 0
deferRender = TRUE,
scrollY = 300,
scroller = TRUE,
scrollX = TRUE,
fixedColumns = list(leftColumns = 2),
dom = 'Blfrtip',
buttons =
list( list(
extend = 'collection',
buttons = c('csv'),
text = 'Save as .csv file'
)), ## it doesn't work if only keep 'csv'; weird
colReorder = TRUE,
initComplete = JS(
"function(settings, json) {",
"$(this.api().table().header()).css({'font-size': '12px'});",
"$(this.api().table().body()).css({'font-size': '12px'});",
"}") ## change both header and body font size
))
```
### Chow's Test - Aggregated Graphs
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=12,fig.height=9}
temp2.pair.data<-do.call('rbind',lapply(1:length(temp2.all),function(x){
#x=1
#y=2
do.call('rbind',lapply(1:length(temp2.all[[x]]),function(y){
return(temp2.all[[x]][[y]][[2]])
}))
}))
temp2.pair.data[,ENTITY_NAME:=ifelse(ENTITY_NAME=='Bank of Hope',ENTITY_NAME,'Peer Bank'),]
xyplot(NCOR~NCOR.all|paste(Peer.Bank,' (',Result,')',sep=''),group=ENTITY_NAME
,yscale.components=function(...){ yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100);return(yc)}
,main='Peer Bank Analysis Comparison by Graphs (CnI)'
,auto.key=list(x =0.77,y=.93,columns=1,border=FALSE,cex=1,lwd=4)
,layout=c(3,4)
,type=c('g','p','r')
,xlab='',ylab=''
,temp2.pair.data[Portfolio2=='CnI'&!Peer.Bank%in%c('BBCN','Wilshire Bank'),,])
xyplot(NCOR~NCOR.all|paste(Peer.Bank,' (',Result,')',sep=''),group=ENTITY_NAME
,yscale.components=function(...){ yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100);return(yc)}
,main='Peer Bank Analysis Comparison by Graphs (OOCRE)'
,auto.key=list(x =0.77,y=.93,columns=1,border=FALSE,cex=1,lwd=4)
,type=c('g','p','r')
,layout=c(3,4)
,xlab='',ylab=''
,temp2.pair.data[Portfolio2=='OOCRE'&!Peer.Bank%in%c('BBCN','Wilshire Bank'),,])
xyplot(NCOR~NCOR.all|paste(Peer.Bank,' (',Result,')',sep=''),group=ENTITY_NAME
,yscale.components=function(...){ yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100);return(yc)}
,main='Peer Bank Analysis Comparison by Graphs (NOOCRE)'
,auto.key=list(x =0.77,y=.93,columns=1,border=FALSE,cex=1,lwd=4)
,type=c('g','p','r')
,layout=c(3,4)
,xlab='',ylab=''
,temp2.pair.data[Portfolio2=='NOOCRE'&!Peer.Bank%in%c('BBCN','Wilshire Bank'),,])
xyplot(NCOR~NCOR.all|paste(Peer.Bank,' (',Result,')',sep=''),group=ENTITY_NAME
,yscale.components=function(...){ yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100);return(yc)}
,main='Peer Bank Analysis Comparison by Graphs (MF)'
,auto.key=list(x =0.77,y=.93,columns=1,border=FALSE,cex=1,lwd=4)
,type=c('g','p','r')
,layout=c(3,4)
,xlab='',ylab=''
,temp2.pair.data[Portfolio2=='MF'&!Peer.Bank%in%c('BBCN','Wilshire Bank'),,])
```
### Chow's Test - NCOR Graphs
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=11,fig.height=18}
cat('Subset Peer Banks Based on Chow Test Resutlt ')
temp3<-merge(data2,temp2.tst,by.x=c('Portfolio2','ENTITY_NAME'),by.y=c('Portfolio2','Peer.Bank'),all.x=TRUE)
temp4<-temp3[year(Date)>=2007,.(
Balance=sum(Balance)
,GCO=sum(GCO)
,Recovery=sum(Recovery)
,NCOR=sum(GCO-Recovery,na.rm =TRUE)/sum(Balance.Lag,na.rm=TRUE)
)
,keyby=.(ENTITY_NAME,Date,Portfolio2,Test.Results)]
temp5<-rbind(
temp4[Test.Results=='Accept'|ENTITY_NAME=='Bank of Hope',.(
ENTITY_NAME='1 - Accept'
,Balance=sum(Balance)
,GCO=sum(GCO)
,Recovery=sum(Recovery)
,NCOR=mean(NCOR,na.rm =TRUE)
# ,NCOR=sum(GCO-Recovery,na.rm =TRUE)/sum(Balance,na.rm =TRUE)
)
,keyby=.(Date,Portfolio2)]
,temp4[Test.Results=='Accept',.(
ENTITY_NAME='2 - Accept (Excluding BOH)'
,Balance=sum(Balance)
,GCO=sum(GCO)
,Recovery=sum(Recovery)
,NCOR=mean(NCOR,na.rm =TRUE)
# ,NCOR=sum(GCO-Recovery,na.rm =TRUE)/sum(Balance,na.rm =TRUE)
)
,keyby=.(Date,Portfolio2)]
# ,temp4[Test.Results=='Reject',.(
# ENTITY_NAME='2 - Reject'
# ,Balance=sum(Balance)
# ,GCO=sum(GCO)
# ,Recovery=sum(Recovery)
# ,NCOR=mean(NCOR,na.rm =TRUE))
# ,keyby=.(Date,Portfolio2,Test.Results)]
#
,temp4[Test.Results=='Accept'|ENTITY_NAME=='Bank of Hope',,]
,fill=TRUE)
# temp5[,.N,keyby=.(Portfolio2,ENTITY_NAME)]
myColours <- c(brewer.pal(6,"Greens")[seq(2 ,6,2)],
brewer.pal(6,"Reds")[seq(2 ,6,2)],
brewer.pal(6,"Blues")[seq(2 ,6,2)],
brewer.pal(6,"Oranges")[seq(2,6,2)],
brewer.pal(6,"Purples")[seq(2,6,2)]
)
my.settings <- list(
superpose.symbol = list(col=myColours, border="transparent")
,superpose.line = list(col=myColours, border="transparent")
)
xyplot(NCOR~Date|ifelse(ENTITY_NAME%in%c('1 - Accept','2 - Accept (Excluding BOH)','Bank of Hope'),'Bank of Hope, Overall Accept','Peer Banks')+Portfolio2,group=ENTITY_NAME
,type=c('l','g'),layout=c(2,4),lwd=2
,par.settings = my.settings,main='Historical Net Charge-off Rate'
,scale=list(y=list(relation='same'))
,yscale.components=function(...){ yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100);return(yc)}
,auto.key=list(x =0.69,y=.97,columns=1,border=FALSE,cex=1,lwd=4)
,temp5)
temp2.tst[Portfolio2=='NOOCRE'&Peer.Bank=='Cathay General Bancorp',Test.Results:='Accept',]
```
### Chow's Test - Pooled
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=11,fig.height=5}
cat('Pooled Chow Test Based on subset peer banks from Paired Chow Test')
temp3<-merge(temp2,temp2.tst,by.x=c('Portfolio2','ENTITY_NAME'),by.y=c('Portfolio2','Peer.Bank'),all.x=TRUE)[Test.Results=='Accept'|ENTITY_NAME=='Bank of Hope',,][!ENTITY_NAME%in%c('BBCN','Wilshire Bank'),,]
temp3.tst<-do.call('rbind',lapply(temp3[,sort(unique(Portfolio2)),],function(x){
# temp3[Portfolio2==x,.N,keyby=.(ENTITY_NAME)]
# x='OOCRE'
if(x!='OOCRE'){tmp0<-temp3[Portfolio2==x,,]}
if(x=='OOCRE'){tmp0<-temp3[Portfolio2==x&!Date%in%as.yearqtr(c('2010 Q4','2011 Q1')),,]}
c1 <- try(tmp1<-lm(NCOR~1+ENTITY_NAME+NCOR.all+ENTITY_NAME:NCOR.all,tmp0),silent = TRUE)
c2 <- try(tmp2<-lm(NCOR~1+ENTITY_NAME+NCOR.all,tmp0),silent = TRUE)
# str(shapiro.test(tmp1$residuals))
if(is(c1,"try-error")|is(c2,"try-error")){tmp3<-data.table(Portfolio2=x
,Test.Results='Error'
,'Model1 Normality'='N/A'
,'Model2 Normality'='N/A'
)}
else{tmp3<-data.table(Portfolio2=x
,Test.Results=ifelse(anova(tmp1,tmp2)[['Pr(>F)']][2]>.05,'Accept','Reject')
,'Model1 Normality'=ifelse(shapiro.test(tmp1$residuals)$p.value>.05,'Accept','Reject')
,'Model2 Normality'=ifelse(shapiro.test(tmp2$residuals)$p.value>.05,'Accept','Reject')
)}
tmp4<-rbind(
data.table(Model='Full Model',residual=tmp1$residuals)
,data.table(Model='Restricted Model',residual=tmp2$residuals)
)
# skewness(tmp1$residuals)
p<-histogram(~ residual | Model
, data = tmp4 , breaks=16 ,type = "density",
panel=function(x, ...) {
panel.histogram(x, ...)
panel.densityplot(x, bw=100,kernel="gaussian",...)
})
print(p)
p<-qqmath(~ residual | Model, data = tmp4,
prepanel = prepanel.qqmathline,
panel = function(x, ...) {
panel.qqmathline(x, ...)
panel.qqmath(x, ...)
})
print(p)
# cat('\n (Reference: Skewness=0, Kurtosis=3 for Normal Distribution)\n')
tmp5<-rbind(
data.table(Model='Full Model',Skewness=round(skewness(tmp1$residuals),4),Kurtosis=round(kurtosis(tmp1$residuals),4),Shapiro.Test=round(shapiro.test(tmp1$residuals)$p.value,4))
,data.table(Model='Restricted Model',Skewness=round(skewness(tmp2$residuals),4),Kurtosis=round(kurtosis(tmp2$residuals),4),Shapiro.Test=round(shapiro.test(tmp2$residuals)$p.value,4))
,data.table(Model='(Reference: Normal Dist.)',Skewness=0,Kurtosis=3,Shapiro.Test=NA)
,data.table(Model='(Reference: t(5) Dist.)',Skewness=0,Kurtosis=9,Shapiro.Test=NA)
,data.table(Model='(Reference: t(7) Dist.)',Skewness=0,Kurtosis=5,Shapiro.Test=NA)
,data.table(Model='(Reference: t(10) Dist.)',Skewness=0,Kurtosis=4,Shapiro.Test=NA)
,data.table(Model='(Reference: t(30) Dist.)',Skewness=0,Kurtosis=3.2308,Shapiro.Test=NA)
)
cat('\n\n Residual Diagnosis \n\n')
print(tmp5)
df1=max(floor(6/(kurtosis(tmp1$residuals)-3)+4),1)
df2=max(floor(6/(kurtosis(tmp2$residuals)-3)+4),1)
tmp6<-rbind(
data.table(Model='Full Model',t.df=df1,p.value=round(ks.test(tmp1$residuals/sd(tmp1$residuals), "pt", df1)$p.value,6))
,data.table(Model='Full Model',t.df=3,p.value=round(ks.test(tmp1$residuals/sd(tmp1$residuals), "pt", 3)$p.value,6))
,data.table(Model='Full Model',t.df=2,p.value=round(ks.test(tmp1$residuals/sd(tmp1$residuals), "pt", 2)$p.value,6))
,data.table(Model='Full Model',t.df=1,p.value=round(ks.test(tmp1$residuals/sd(tmp1$residuals), "pt", 1)$p.value,6))
,data.table(Model='Restricted Model',t.df=df2,p.value=round(ks.test(tmp2$residuals/sd(tmp2$residuals), "pt", df2)$p.value,6))
,data.table(Model='Restricted Model',t.df=3,p.value=round(ks.test(tmp2$residuals/sd(tmp2$residuals), "pt", 3)$p.value,6))
,data.table(Model='Restricted Model',t.df=2,p.value=round(ks.test(tmp2$residuals/sd(tmp2$residuals), "pt", 2)$p.value,6))
,data.table(Model='Restricted Model',t.df=1,p.value=round(ks.test(tmp2$residuals/sd(tmp2$residuals), "pt", 1)$p.value,6))
)
cat('\n\n KS Test Against t-Distribtion (df) \n\n')
print(tmp6)
return(tmp3)
}))
datatable(dcast(temp3.tst,.~Portfolio2,value.var = 'Test.Results')
,rownames = F
,extensions = c('Scroller','FixedColumns','Buttons','ColReorder')
,options = list(
#rowCallback = JS("function(r,d) {$(r).attr('height', '100px')}"),
columnDefs = list(list(
targets = c(1:4),
render = JS(
"function(data, type, row, meta) {",
"return type === 'display' && data.length > 10 ?",
"'<span title=\"' + data + '\">' + data.substr(0, 10) + '...</span>' : data;",
"}")
)), ## column index starting from 0
deferRender = TRUE,scrollY = 300,scroller = TRUE,scrollX = TRUE,
fixedColumns = list(leftColumns = 2),dom = 'Blfrtip',
buttons =
list( list(
extend = 'collection',
buttons = c('csv'),
text = 'Save as .csv file'
)), ## it doesn't work if only keep 'csv'; weird
colReorder = TRUE,
initComplete = JS(
"function(settings, json) {",
"$(this.api().table().header()).css({'font-size': '12px'});",
"$(this.api().table().body()).css({'font-size': '12px'});",
"}") ## change both header and body font size
))
```
## 2.4 BBCN vs. Wilshire
### Visual Comparison
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=11,fig.height=8}
temp0<-data2[ENTITY_NAME=='Bank of Hope',,]
temp0[Sub%in%c('Wilshire Bank','Saehan Bancorp','Mirae Bank','Liberty Bank of New York','Bank Asiana'),ENTITY_NAME:='Wilshire Bank']
temp0[Sub%in%c('BBCN','Innovative Bank','Nara Bank','Asiana bank','Pacific International Bank','Foster Bankshares, Inc.'),ENTITY_NAME:='BBCN']
# temp0[,.N,keyby=.(ENTITY_NAME,Sub)]
temp0.sum0<-temp0[,.(
Balance=sum(Balance)
,GCO=sum(GCO)
,Recovery=sum(Recovery)
,NCOR=sum(GCO-Recovery,na.rm =TRUE)/sum(Balance.Lag,na.rm=TRUE)
)
,keyby=.(ENTITY_NAME,Date,Portfolio2)]
xyplot(Balance/1000000~Date|Portfolio2
,group=ENTITY_NAME
,type=c('p','l','g')
,main='Quarterly Ending Balance (B$)'
# ,yscale.components=function(...){ yc <- yscale.components.default(...)
# yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100);return(yc)}
,auto.key=list(x =0.65,y=.93,columns=1,border=FALSE,cex=1,lwd=4)
,temp0.sum0[year(Date)>=2000,,])
xyplot(NCOR~Date|Portfolio2
,group=ENTITY_NAME
,type=c('p','l','g')
,main='Quarterly Net Charge-Off Rate'
,yscale.components=function(...){ yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100);return(yc)}
,auto.key=list(x =0.65,y=.93,columns=1,border=FALSE,cex=1,lwd=4)
,temp0.sum0[year(Date)>=2000,,])
```
### Chow's Test
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=11,fig.height=5}
temp0<-data2[ENTITY_NAME=='Bank of Hope',,]
temp0[Sub%in%c('Wilshire Bank','Saehan Bancorp','Mirae Bank','Liberty Bank of New York','Bank Asiana'),ENTITY_NAME:='Wilshire Bank']
temp0[Sub%in%c('BBCN','Innovative Bank','Nara Bank','Asiana bank','Pacific International Bank','Foster Bankshares, Inc.'),ENTITY_NAME:='BBCN']
temp1<-rbind(
temp0[year(Date)>=2000,.(
Balance=sum(Balance,na.rm =TRUE)
,GCO=sum(GCO,na.rm =TRUE)
,Recovery=sum(Recovery)
,NCOR=sum(GCO-Recovery,na.rm =TRUE)/sum(Balance.Lag,na.rm=TRUE)
)
,keyby=.(ENTITY_NAME,Date,Portfolio2)]
,
data2[year(Date)>=2000,.(
Balance=sum(Balance,na.rm =TRUE)
,GCO=sum(GCO,na.rm =TRUE)
,Recovery=sum(Recovery)
,NCOR=sum(GCO-Recovery,na.rm =TRUE)/sum(Balance.Lag,na.rm=TRUE)
)
,keyby=.(ENTITY_NAME,Date,Portfolio2)]
)
temp2<-merge(temp1,temp1[!ENTITY_NAME%in%c('BBCN','Wilshire Bank','Bank of Hope (KPMG Pull CR)'),.(
NCOR.all=mean(NCOR,na.rm=TRUE)
# NCOR.all=sum(GCO-Recovery,na.rm =TRUE)/sum(Balance,na.rm =TRUE)
),keyby=.(Portfolio2,Date)],by=c('Portfolio2','Date'))
temp3<-do.call('rbind',lapply(temp2[,unique(Portfolio2),],function(x){
do.call('rbind',lapply(temp2[ENTITY_NAME%in%c('Wilshire Bank'),unique(ENTITY_NAME),],function(y){
# x='OOCRE'
# y='OPUS BANK'
command1 = paste('tmp1<-lm(NCOR~1+I(1*(ENTITY_NAME=="',y,'"))+NCOR.all+I((ENTITY_NAME=="',y,'")*NCOR.all),temp2[ENTITY_NAME%in%c("BBCN","',y,'")&Portfolio2=="',x,'",,])',sep='');
eval(parse(text=command1))
command1 = paste('tmp2<-lm(NCOR~1+I(1*(ENTITY_NAME=="',y,'"))+NCOR.all,temp2[ENTITY_NAME%in%c("BBCN","',y,'")&Portfolio2=="',x,'",,])',sep='');
eval(parse(text=command1))
# anova(tmp1,tmp2)[['Pr(>F)']][2]
# tmp2<-confint(tmp1)
tmp3<-data.table(Portfolio2=x,Peer.Bank=y,Test.Results=ifelse(anova(tmp1,tmp2)[['Pr(>F)']][2]>.05,'Accept','Reject'))
tmp4<-rbind(
data.table(Model='Full Model',residual=tmp1$residual)
,data.table(Model='Restricted Model',residual=tmp2$residual)
)
# skewness(tmp1$residuals)
p<-histogram(~ residual | Model
, data = tmp4 , breaks=16 ,type = "density",
panel=function(x, ...) {
panel.histogram(x, ...)
panel.densityplot(x, bw=100,kernel="gaussian",...)
})
print(p)
p<-qqmath(~ residual | Model, data = tmp4,
prepanel = prepanel.qqmathline,
panel = function(x, ...) {
panel.qqmathline(x, ...)
panel.qqmath(x, ...)
})
print(p)
# cat('\n (Reference: Skewness=0, Kurtosis=3 for Normal Distribution)\n')
tmp5<-rbind(
data.table(Model='Full Model',Skewness=round(skewness(tmp1$residuals),4),Kurtosis=round(kurtosis(tmp1$residuals),4),Shapiro.Test=round(shapiro.test(tmp1$residuals)$p.value,4))
,data.table(Model='Restricted Model',Skewness=round(skewness(tmp2$residuals),4),Kurtosis=round(kurtosis(tmp2$residuals),4),Shapiro.Test=round(shapiro.test(tmp2$residuals)$p.value,4))
,data.table(Model='(Reference: Normal Dist.)',Skewness=0,Kurtosis=3,Shapiro.Test=NA)
,data.table(Model='(Reference: t(5) Dist.)',Skewness=0,Kurtosis=9,Shapiro.Test=NA)
,data.table(Model='(Reference: t(7) Dist.)',Skewness=0,Kurtosis=5,Shapiro.Test=NA)
,data.table(Model='(Reference: t(10) Dist.)',Skewness=0,Kurtosis=4,Shapiro.Test=NA)
,data.table(Model='(Reference: t(30) Dist.)',Skewness=0,Kurtosis=3.2308,Shapiro.Test=NA)
)
cat('\n\n Residual Diagnosis \n\n')
print(tmp5)
df1=max(floor(6/(kurtosis(tmp1$residuals)-3)+4),1)
df2=max(floor(6/(kurtosis(tmp2$residuals)-3)+4),1)
tmp6<-rbind(
data.table(Model='Full Model',t.df=df1,p.value=round(ks.test(tmp1$residuals/sd(tmp1$residuals), "pt", df1)$p.value,6))
,data.table(Model='Full Model',t.df=3,p.value=round(ks.test(tmp1$residuals/sd(tmp1$residuals), "pt", 3)$p.value,6))
,data.table(Model='Full Model',t.df=2,p.value=round(ks.test(tmp1$residuals/sd(tmp1$residuals), "pt", 2)$p.value,6))
,data.table(Model='Full Model',t.df=1,p.value=round(ks.test(tmp1$residuals/sd(tmp1$residuals), "pt", 1)$p.value,6))
,data.table(Model='Restricted Model',t.df=df2,p.value=round(ks.test(tmp2$residuals/sd(tmp2$residuals), "pt", df2)$p.value,6))
,data.table(Model='Restricted Model',t.df=3,p.value=round(ks.test(tmp2$residuals/sd(tmp2$residuals), "pt", 3)$p.value,6))
,data.table(Model='Restricted Model',t.df=2,p.value=round(ks.test(tmp2$residuals/sd(tmp2$residuals), "pt", 2)$p.value,6))
,data.table(Model='Restricted Model',t.df=1,p.value=round(ks.test(tmp2$residuals/sd(tmp2$residuals), "pt", 1)$p.value,6))
)
cat('\n\n KS Test Against t-Distribtion (df) \n\n')
print(tmp6)
return(tmp3)
}))
}))
# temp3[,Test.Results:=(p025<0 & p975>0),]
datatable(dcast(temp3,Peer.Bank~Portfolio2,value.var = 'Test.Results')
,rownames = F
,extensions = c('Scroller','FixedColumns','Buttons','ColReorder')
,options = list(
#rowCallback = JS("function(r,d) {$(r).attr('height', '100px')}"),
columnDefs = list(list(
targets = c(1:4),
render = JS(
"function(data, type, row, meta) {",
"return type === 'display' && data.length > 10 ?",
"'<span title=\"' + data + '\">' + data.substr(0, 10) + '...</span>' : data;",
"}")
)), ## column index starting from 0
deferRender = TRUE,
scrollY = 300,
scroller = TRUE,
scrollX = TRUE,
fixedColumns = list(leftColumns = 2),
dom = 'Blfrtip',
buttons =
list( list(
extend = 'collection',
buttons = c('csv'),
text = 'Save as .csv file'
)), ## it doesn't work if only keep 'csv'; weird
colReorder = TRUE,
initComplete = JS(
"function(settings, json) {",
"$(this.api().table().header()).css({'font-size': '12px'});",
"$(this.api().table().body()).css({'font-size': '12px'});",
"}") ## change both header and body font size
))
```
## 2.5 NCO Rate Options
Four different calculation options were provided to calculate the net charge-Off rate:
Option1: $$\frac{NCO_{t}}{EB_{t}}$$
Option2: $$\frac{NCO_{t}}{EB_{t-1}}$$
Option3: $$\frac{NCO_{t}}{\frac{1}{2}(EB_{t}+EB_{t-1})}$$
Option4: $$\frac{NCO_{t}}{(EB_{t}+GCO_{t})}$$
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=12,fig.height=8}
temp0<-data2[year(Date)>=2000&ENTITY_NAME=='Bank of Hope',.(
Balance=sum(Balance)
,GCO=sum(GCO)
,Recovery=sum(Recovery)
,NCOR1=sum(GCO-Recovery,na.rm =TRUE)/sum(Balance,na.rm =TRUE)
),keyby=.(Portfolio2,Date)]
temp0[,':='(
NCOR2=(GCO-Recovery)/shift(Balance,1)
,NCOR3=(GCO-Recovery)/(1/2*Balance+1/2*shift(Balance,1))
,NCOR4=(GCO-Recovery)/(Balance+GCO)
),keyby=.(Portfolio2)]
p1<-xyplot(NCOR1+NCOR2+NCOR3+NCOR4~Date|Portfolio2
,type=c('p','g','l')
,layout=c(2,2)
,scale=list(y=c(relation='free'),x=c(relation='free'))
,xlab='',ylab=''
,main=paste('Bank of Hope','\n NCOR Calculation Options')
,yscale.components=function(...){ yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100);return(yc)}
,auto.key=list(x =0.80,y=.93,columns=1,border=FALSE,cex=1)
,temp0)
print(p1);
```
## 3.1 Dependent Variable & Functional Form
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=12,fig.height=6}
# names(data0.mev)
cat('In-sample Peer Banks')
temp0<-rbind(
data2[year(Date)>=2000&!ENTITY_NAME%in%c('Bank of Hope (KPMG Pull CR)'),.(
Balance=sum(Balance,na.rm =TRUE)
,GCO=sum(GCO,na.rm =TRUE)
,Recovery=sum(Recovery)
,NCOR=sum(GCO-Recovery,na.rm =TRUE)/sum(Balance.Lag,na.rm=TRUE)
)
,keyby=.(ENTITY_NAME,Date,Portfolio2)]
)
temp1<-merge(temp0,temp2.tst[,.(Portfolio2,Peer.Bank,Test.Results),],by.x=c('Portfolio2','ENTITY_NAME'),by.y=c('Portfolio2','Peer.Bank'),all.x=TRUE)
temp1[,NCOR.all:=mean(NCOR),keyby=.(Portfolio2,Date)]
# str(temp2)
# temp2[,.N,keyby=.(TIME_IDX)]
```
### GLM
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=12,fig.height=6}
cat('GLM')
temp4<-lapply(temp1[,unique(Portfolio2),],function(x){
temp2<-temp1[Portfolio2==x&(Test.Results=='Accept'|ENTITY_NAME=='Bank of Hope'),,]
temp2[,PANEL_ID:=ENTITY_NAME]
temp2[,TIME_IDX:=.GRP,by=.(Date)]
fit<-glm(I(pmax(NCOR,0))~ENTITY_NAME+NCOR.all,temp2,family=quasibinomial(link="logit"));
cat(x)
print(summary(fit))
temp2[,y:=predict(fit, newdata=.SD, type=c("response"), na.action=na.pass),];
temp3<-rbind(temp2[,.(Scenario='Historic Actual',Date,y=NCOR,ENTITY_NAME),],temp2[,.(Scenario='Historic Fit',Date,y=y,ENTITY_NAME),])
# col=c('purple','blue')
p<-xyplot(y~as.Date(Date)|ENTITY_NAME,group=Scenario,scale=list(x=list(rot=90))
# ,par.settings = list(superpose.line = list(col=col),superpose.symbol = list(col= col))
,type=c('l','p','g'),xlab='',ylab='',ylim=c(-.005,.05)
,yscale.components=function(...){yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100)
return(yc)}
,main=x
,auto.key = list(x = .76, y=.88, corner = c(0,1) ,border = FALSE, lines = TRUE)
,temp3[ENTITY_NAME=='Bank of Hope',,])
print(p)
return()
});
```
### OLS
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=12,fig.height=6}
cat('OLS')
temp4<-lapply(temp1[,unique(Portfolio2),],function(x){
cat(x);
temp2<-temp1[Portfolio2==x&(Test.Results=='Accept'|ENTITY_NAME=='Bank of Hope'),,]
temp2[,PANEL_ID:=ENTITY_NAME]
temp2[,TIME_IDX:=.GRP,by=.(Date)]
fit<-lm(NCOR~ENTITY_NAME+NCOR.all,temp2)
print(summary(fit));
print(shapiro.test(fit$residuals));
qqnorm(fit$residuals);qqline(fit$residuals)
temp2[,y:=predict(fit, newdata=.SD, type=c("response"), na.action=na.pass),]
temp3<-rbind(temp2[,.(Scenario='Historic Actual',Date,y=NCOR,ENTITY_NAME),],temp2[,.(Scenario='Historic Fit',Date,y=y,ENTITY_NAME),])
# col=c('purple','blue')
p<-xyplot(y~as.Date(Date)|ENTITY_NAME,group=Scenario,scale=list(x=list(rot=90))
# ,par.settings = list(superpose.line = list(col=col),superpose.symbol = list(col= col))
,type=c('l','p','g'),xlab='',ylab='',ylim=c(-.005,.05)
,yscale.components=function(...){yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100)
return(yc)}
,main=x
,auto.key = list(x = .76, y=.88, corner = c(0,1) ,border = FALSE, lines = TRUE)
,temp3[ENTITY_NAME=='Bank of Hope',,])
print(p)
return();
});
```
### OLS: log(x+a)
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=12,fig.height=6}
cat('OLS + Transformed Dependent Variable')
temp4<-lapply(temp1[,unique(Portfolio2),],function(x){
temp2<-temp1[Portfolio2==x&(Test.Results=='Accept'|ENTITY_NAME=='Bank of Hope'),,]
temp2[,PANEL_ID:=ENTITY_NAME]
temp2[,TIME_IDX:=.GRP,by=.(Date)]
a<-temp2[,abs(min(NCOR)),]+10^(-5:5)
temp2.logtrans.tst<-do.call('rbind',lapply(a,function(z){
fit<-lm(log(NCOR+z)~ENTITY_NAME+NCOR.all,temp2)
shapiro.test(fit$residuals)$statistic
return(data.table(constant=round(z,4),statistic=shapiro.test(fit$residuals)$statistic,P.value=shapiro.test(fit$residuals)$p.value))
}))
print(temp2.logtrans.tst)
fit<-lm(log(NCOR+1)~ENTITY_NAME+NCOR.all,temp2)
print(summary(fit));
print(shapiro.test(fit$residuals));
qqnorm(fit$residuals);qqline(fit$residuals)
temp2[,y:=predict(fit, newdata=.SD, type=c("response"), na.action=na.pass),]
temp3<-rbind(temp2[,.(Scenario='Historic Actual',Date,y=NCOR,ENTITY_NAME),],temp2[,.(Scenario='Historic Fit',Date,y=exp(y)-1,ENTITY_NAME),])
# col=c('purple','blue')
p<-xyplot(y~as.Date(Date)|ENTITY_NAME,group=Scenario,scale=list(x=list(rot=90))
# ,par.settings = list(superpose.line = list(col=col),superpose.symbol = list(col= col))
,type=c('l','p','g'),xlab='',ylab='',ylim=c(-.005,.05)
,yscale.components=function(...){yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100)
return(yc)}
,main=x
,auto.key = list(x = .76, y=.88, corner = c(0,1) ,border = FALSE, lines = TRUE)
,temp3[ENTITY_NAME=='Bank of Hope',,])
print(p);
return();
});
```
### OLS: Box-Cox
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=12,fig.height=6}
cat('OLS Box-Cox')
temp4<-lapply(temp1[,unique(Portfolio2),],function(x){
temp2<-temp1[Portfolio2==x&(Test.Results=='Accept'|ENTITY_NAME=='Bank of Hope'),,]
temp2[,PANEL_ID:=ENTITY_NAME]
temp2[,TIME_IDX:=.GRP,by=.(Date)]
fit<-lm(NCOR~ENTITY_NAME+NCOR.all,temp2)
bc<-powerTransform(fit, family="bcnPower");
cat(x)
print(bc)
temp2[,NCOR.bc:=bcnPower(NCOR,lambda=bc$lambda,gamma=bc$gamma),]
fit<-lm(NCOR.bc~ENTITY_NAME+NCOR.all,temp2)
print(summary(fit));
print(shapiro.test(fit$residuals));
qqnorm(fit$residuals);qqline(fit$residuals)
})
# cat('OLS + Transformed Dependent Variable 2')
# fit<-lm(rank(NCOR)~ENTITY_NAME+NCOR.all,temp2)
# summary(fit);shapiro.test(fit$residuals);qqnorm(fit$residuals);qqline(fit$residuals)
# data3[Portfolio2=='CnI',.(length(rgdp_ag_lag1),length(NCOR)),]
```
### Panel AR
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=12,fig.height=6}
# debug(panelAR);
# undebug(panelAR)
# temp3<-as.data.frame(temp2)
# str(temp3)
cat('\nPanel AR\n')
temp4<-lapply(temp1[,unique(Portfolio2),],function(x){
#x='CnI'
cat(x);
temp2<-temp1[Portfolio2==x&(Test.Results=='Accept'|ENTITY_NAME=='Bank of Hope'),,]
temp2[,PANEL_ID:=ENTITY_NAME]
temp2[,TIME_IDX:=.GRP,by=.(Date)]
fit<-panelAR(NCOR ~ 0+PANEL_ID+NCOR.all,data=as.data.frame(temp2), panelVar="PANEL_ID", timeVar="TIME_IDX",autoCorr='psar1', panelCorrMethod = "parks", complete.case=TRUE,dof.correction = TRUE)
print(summary(fit));
cat('\nAR(1) Term Coef (Rho):\n');
print(data.table(Panel=names(summary(fit)$rho),rho=round(summary(fit)$rho,2)))
cat('\nPanel Covariance Structure:\n')
fit.sigma<-summary(fit)$Sigma
colnames(fit.sigma)<-1:dim(fit.sigma)[2]
print(round(fit.sigma,6));
cat('\nCorrelation of Covariance:\n')
print(round(cov2cor(fit.sigma),2));
# print(round(summary(fit)$Sigma*1000000,1))
# round(cov2cor(vcov(fit)),2)
# str(fit)
# anova(fit)
temp2[as.integer(names(fit$fitted.values)),y:=fit$fitted.values];
temp3<-rbind(temp2[,.(Scenario='Historic Actual',Date,y=NCOR,ENTITY_NAME),],temp2[,.(Scenario='Historic Fit',Date,y=y,ENTITY_NAME),]);
p<-xyplot(y~as.Date(Date)|ENTITY_NAME,group=Scenario,scale=list(x=list(rot=90))
# ,par.settings = list(superpose.line = list(col=col),superpose.symbol = list(col= col))
,type=c('l','p','g'),xlab='',ylab='',ylim=c(-.005,.05)
,yscale.components=function(...){yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100)
return(yc)}
,main=x
,auto.key = list(x = .76, y=.88, corner = c(0,1) ,border = FALSE, lines = TRUE)
,temp3[ENTITY_NAME=='Bank of Hope',,])
print(p);
# cat('\n Shapiral Normality Test \n')
print(shapiro.test(fit$residuals));
# cat('\n QQ-Plot \n')
qqnorm(fit$residuals);qqline(fit$residuals)
return();
});
# demo(Rehm)
#
#
# out1 <- panelAR(NCOR ~ NCOR.all, data=temp2, panelVar='ccode', timeVar='year', autoCorr='ar1', panelCorrMethod='pcse', rho.na.rm=TRUE, panel.weight='t-1', bound.rho=TRUE)
# summary(out1)
```
## 4.1 Segmentation
Test the hypothesis that NOOCRE and MF can be combined as one portoflio.
Model 1:
$$NCOR~peer.bank:segment+peer.bank:segment*x$$
Model 2:
$$NCOR~peer.bank+peer.bank*x$$
Peer.banks is only used as a blocking effect in this test. In order to have balanced design for testing, we only select banks that pass pairwise test in both MF and NOOCRE.
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=11,fig.height=8}
peer.banks<-dcast(temp2.tst,Peer.Bank~Portfolio2,value.var = 'Test.Results')[MF=='Accept'&NOOCRE=='Accept',.(Peer.Bank),]
# data2[,Portfolio2:=ifelse(Portfolio%in%c('NOOCRE'),'NOOCRE',Portfolio),]
temp0<-data2[year(Date)>=2007&Portfolio2%in%c('NOOCRE','MF'),
.(Balance=sum(Balance,na.rm =TRUE)
,GCO=sum(GCO,na.rm =TRUE)
,Recovery=sum(Recovery)
,NCOR=sum(GCO-Recovery,na.rm =TRUE)/sum(Balance.Lag,na.rm=TRUE)
)
,keyby=.(ENTITY_NAME,Date,Portfolio2)]
temp0[,NCOR.all:=mean(NCOR),keyby=.(Date)];
# Decision made based on graphical review
temp1<-merge(temp0,temp2.tst[,.(Portfolio2,Peer.Bank,Test.Results),],by.x=c('ENTITY_NAME','Portfolio2'),by.y=c('Peer.Bank','Portfolio2'),all.x=TRUE)
# temp1[,.N,keyby=.(Portfolio2,ENTITY_NAME)];
# temp2<-temp1[Test.Results=='Accept'|ENTITY_NAME%in%c('Bank of Hope'),,]
temp2<-temp1[ENTITY_NAME%in%c(unlist(peer.banks),'Bank of Hope'),,];
temp2[,.N,keyby=.(Portfolio2,ENTITY_NAME)];
fit1<-lm(NCOR~0+ENTITY_NAME:Portfolio2+ENTITY_NAME:Portfolio2:NCOR.all,temp2);
fit2<-lm(NCOR~0+ENTITY_NAME+ENTITY_NAME:NCOR.all,temp2);
# summary(fit1)
# summary(fit2)
anova(fit1,fit2)
fit1<-lm(NCOR~0+ENTITY_NAME:Portfolio2+Portfolio2:NCOR.all,temp2);
fit2<-lm(NCOR~0+ENTITY_NAME+NCOR.all,temp2);
# summary(fit1)
# summary(fit2)
anova(fit1,fit2)
# fit1<-lm(NCOR~Portfolio2+Portfolio2:NCOR.all,temp2[ENTITY_NAME=='Bank of Hope',,]);
# fit2<-lm(NCOR~NCOR.all,temp2[ENTITY_NAME=='Bank of Hope',,]);
# summary(fit1)
# summary(fit2)
# anova(fit1,fit2)
xyplot(NCOR~NCOR.all|ENTITY_NAME,groups=Portfolio2
,type=c('g','r','p')
,yscale.components=function(...){ yc <- yscale.components.default(...)
yc$left$labels$labels <-sprintf("%s%%",yc$left$labels$at*100);return(yc)}
,auto.key=list(x =0.62,y=.93,columns=1,border=FALSE,cex=1,lwd=4)
,layout=c(2,2),temp2)
# fit1<-lm(NCOR~ENTITY_NAME*Portfolio2+Portfolio2*NCOR.all,temp2);
# fit2<-lm(NCOR~ENTITY_NAME+NCOR.all,temp2);
# anova(fit1,fit2)
#
# fit1<-lm(NCOR~Portfolio2+Portfolio2*NCOR.all,temp2);
# fit2<-lm(NCOR~NCOR.all,temp2);
# anova(fit1,fit2)
```
## 5.1 Univariate Analysis
### Delilnquency Rates
+ Estimate the effect of delinquency using $$NCOR\sim Peer Bank + DR.lag(i)$$
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=11,fig.height=8}
temp0<-data2[year(Date)>=2000,
.(Balance=sum(Balance,na.rm =TRUE)
,GCO=sum(GCO,na.rm =TRUE)
,Recovery=sum(Recovery)
,DPD30=sum(DPD30)
,DPD90=sum(DPD90)
,NONACC=sum(NCRL)
,DPDR30=sum(DPD30,na.rm =TRUE)/sum(Balance,na.rm=TRUE)
,DPDR90=sum(DPD90,na.rm =TRUE)/sum(Balance,na.rm=TRUE)
,NONACCR=sum(NCRL,na.rm =TRUE)/sum(Balance,na.rm=TRUE)
,NCOR=sum(GCO-Recovery,na.rm =TRUE)/sum(Balance.Lag,na.rm=TRUE)
)
,keyby=.(Portfolio2,ENTITY_NAME,Date)]
temp1<-merge(temp0,temp2.tst[,.(Portfolio2,Peer.Bank,Test.Results),],by.x=c('ENTITY_NAME','Portfolio2'),by.y=c('Peer.Bank','Portfolio2'),all.x=TRUE)[ENTITY_NAME!='Bank of Hope',,]
# temp1[,.N,keyby=.(Portfolio2,ENTITY_NAME)]
temp1[,NCOR.all:=mean(NCOR),keyby=.(Portfolio2,Date)]
# temp0[year(Date)==2007,sum(Balance),keyby=.(Portfolio2)]
# data2[Date=='2007 Q1',Balance,]
# temp2.tst[Portfolio2=='NOOCRE'&Peer.Bank=='Cathay General Bancorp',Test.Results:='Accept',]
temp2<-temp1[Test.Results=='Accept'|(ENTITY_NAME%in%c('Bank of Hope (KPMG Pull CR)')),,]
# temp2[,.N,keyby=.(Portfolio2,ENTITY_NAME)]
for(i in 1:9){
command1 = paste('temp2[,DPDR30.Lag',i,':=shift(DPDR30,',i,',fill=NaN),keyby=.(Portfolio2,ENTITY_NAME)]',sep='');eval(parse(text=command1));
command1 = paste('temp2[,DPDR90.Lag',i,':=shift(DPDR90,',i,',fill=NaN),keyby=.(Portfolio2,ENTITY_NAME)]',sep='');eval(parse(text=command1));
command1 = paste('temp2[,NONACCR.Lag',i,':=shift(NONACCR,',i,',fill=NaN),keyby=.(Portfolio2,ENTITY_NAME)]',sep='');eval(parse(text=command1));
}
temp2.sum0<-
do.call('rbind',lapply(temp1[,unique(Portfolio2),],function(x){
do.call('rbind',lapply(grep('Lag',grep('DPDR|NONACCR',names(temp2),value = TRUE),value=TRUE),function(y){
# y='DPDR30.Lag2'
# x='CnI'
fit<-lm(paste('NCOR~ENTITY_NAME+',y),temp2[Portfolio2==x,,])
# str(summary(fit)$r.squared)
tmp0<-summary(fit)$coefficients[,]
tmp1<-data.table(var=y,Portfolio2=x,Estimate=round(tmp0[dim(tmp0)[1],1],4),pValue=round(tmp0[dim(tmp0)[1],4],6),rsquare=summary(fit)$r.squared)
return(tmp1)
}))
}))
datatable(dcast(temp2.sum0,var~Portfolio2,value.var = 'Estimate')
,rownames = F
,extensions = c('Scroller','FixedColumns','Buttons','ColReorder')
,options = list(
#rowCallback = JS("function(r,d) {$(r).attr('height', '100px')}"),
columnDefs = list(list(
targets = c(1:4),
render = JS(
"function(data, type, row, meta) {",
"return type === 'display' && data.length > 10 ?",
"'<span title=\"' + data + '\">' + data.substr(0, 10) + '...</span>' : data;",
"}")
)), ## column index starting from 0
deferRender = TRUE,
scrollY = 300,
scroller = TRUE,
scrollX = TRUE,
fixedColumns = list(leftColumns = 2),
dom = 'Blfrtip',
buttons =
list( list(
extend = 'collection',
buttons = c('csv'),
text = 'Save as .csv file'
)), ## it doesn't work if only keep 'csv'; weird
colReorder = TRUE,
initComplete = JS(
"function(settings, json) {",
"$(this.api().table().header()).css({'font-size': '12px'});",
"$(this.api().table().body()).css({'font-size': '12px'});",
"}") ## change both header and body font size
))
datatable(dcast(temp2.sum0,var~Portfolio2,value.var = 'rsquare')
,rownames = F
,extensions = c('Scroller','FixedColumns','Buttons','ColReorder')
,options = list(
#rowCallback = JS("function(r,d) {$(r).attr('height', '100px')}"),
columnDefs = list(list(
targets = c(1:4),
render = JS(
"function(data, type, row, meta) {",
"return type === 'display' && data.length > 10 ?",
"'<span title=\"' + data + '\">' + data.substr(0, 10) + '...</span>' : data;",
"}")
)), ## column index starting from 0
deferRender = TRUE,
scrollY = 300,
scroller = TRUE,
scrollX = TRUE,
fixedColumns = list(leftColumns = 2),
dom = 'Blfrtip',
buttons =
list( list(
extend = 'collection',
buttons = c('csv'),
text = 'Save as .csv file'
)), ## it doesn't work if only keep 'csv'; weird
colReorder = TRUE,
initComplete = JS(
"function(settings, json) {",
"$(this.api().table().header()).css({'font-size': '12px'});",
"$(this.api().table().body()).css({'font-size': '12px'});",
"}") ## change both header and body font size
))
datatable(temp2.sum0
,rownames = F
,extensions = c('Scroller','FixedColumns','Buttons','ColReorder')
,options = list(
#rowCallback = JS("function(r,d) {$(r).attr('height', '100px')}"),
columnDefs = list(list(
targets = c(1:4),
render = JS(
"function(data, type, row, meta) {",
"return type === 'display' && data.length > 20 ?",
"'<span title=\"' + data + '\">' + data.substr(0, 20) + '...</span>' : data;",
"}")
)), ## column index starting from 0
deferRender = TRUE,
scrollY = 300,
scroller = TRUE,
scrollX = TRUE,
fixedColumns = list(leftColumns = 2),
dom = 'Blfrtip',
buttons =
list( list(
extend = 'collection',
buttons = c('csv'),
text = 'Save as .csv file'
)), ## it doesn't work if only keep 'csv'; weird
colReorder = TRUE,
initComplete = JS(
"function(settings, json) {",
"$(this.api().table().header()).css({'font-size': '12px'});",
"$(this.api().table().body()).css({'font-size': '12px'});",
"}") ## change both header and body font size
))
temp3<-rbind(
temp2[,.(Portfolio2,NCOR,var='DPDR30.Lag1',value=DPDR30.Lag1),]
,temp2[,.(Portfolio2,NCOR,var='DPDR30.Lag2' ,value=DPDR30.Lag2),]
,temp2[,.(Portfolio2,NCOR,var='DPDR30.Lag3' ,value=DPDR30.Lag3),]
,temp2[,.(Portfolio2,NCOR,var='DPDR30.Lag4' ,value=DPDR30.Lag4),]
,temp2[,.(Portfolio2,NCOR,var='DPDR90.Lag1' ,value=DPDR90.Lag1),]
,temp2[,.(Portfolio2,NCOR,var='DPDR90.Lag2' ,value=DPDR90.Lag2),]
,temp2[,.(Portfolio2,NCOR,var='DPDR90.Lag3' ,value=DPDR90.Lag3),]
,temp2[,.(Portfolio2,NCOR,var='DPDR90.Lag4' ,value=DPDR90.Lag4),]
,temp2[,.(Portfolio2,NCOR,var='NCRLR.Lag1' ,value=NONACCR.Lag1),]
,temp2[,.(Portfolio2,NCOR,var='NCRLR.Lag2' ,value=NONACCR.Lag2),]
,temp2[,.(Portfolio2,NCOR,var='NCRLR.Lag3' ,value=NONACCR.Lag3),]
,temp2[,.(Portfolio2,NCOR,var='NCRLR.Lag4' ,value=NONACCR.Lag4),]
)
myColours <- c( brewer.pal(6,"Greens")[c(3,4,5,6)]
,brewer.pal(6,"Reds")[c(3,4,5,6)]
,brewer.pal(6,"Blues")[c(3,4,5,6)]
,brewer.pal(9,"Greys")[9]
)
my.settings <- list(
superpose.symbol = list(col=myColours, border="transparent")
,superpose.line = list(col=myColours, border="transparent")
)
xyplot(NCOR~value|Portfolio2,group=var
,xlab='Delinquency Rate',lwd=2
,layout=c(2,2),par.settings = my.settings
,auto.key=list(x =0.10,y=.92,columns=3,border=FALSE,cex=1,lwd=2)
,type=c('p','g','r'),temp3)
temp4<-rbind(
temp2[,.(Portfolio2,ENTITY_NAME,Date,var='DPDR30',value=DPDR30),]
# ,temp2[,.(Portfolio2,ENTITY_NAME,Date,var='DPDR30.Lag2' ,value=DPDR30.Lag2),]
# ,temp2[,.(Portfolio2,ENTITY_NAME,Date,var='DPDR30.Lag3' ,value=DPDR30.Lag3),]
,temp2[,.(Portfolio2,ENTITY_NAME,Date,var='DPDR90' ,value=DPDR90),]
# ,temp2[,.(Portfolio2,ENTITY_NAME,Date,var='DPDR90.Lag2' ,value=DPDR90.Lag2),]
# ,temp2[,.(Portfolio2,ENTITY_NAME,Date,var='DPDR90.Lag3' ,value=DPDR90.Lag3),]
,temp2[,.(Portfolio2,ENTITY_NAME,Date,var='NCRLR' ,value=NONACCR),]
# ,temp2[,.(Portfolio2,ENTITY_NAME,Date,var='NCRLR.Lag2' ,value=NCRLR.Lag2),]
# ,temp2[,.(Portfolio2,ENTITY_NAME,Date,var='NCRLR.Lag3' ,value=NCRLR.Lag3),]
,temp2[,.(Portfolio2,ENTITY_NAME,Date,var='ZZ - NCOR' ,value=NCOR),]
)
myColours <- c( brewer.pal(6,"Greens")[c(3,4,6)]
,brewer.pal(6,"Reds")[c(4)]
)
my.settings <- list(
superpose.symbol = list(col=myColours, border="transparent")
,superpose.line = list(col=myColours, border="transparent")
)
xyplot(value~Date|Portfolio2,group=var
,xlab='',lwd=2
,type=c('p','l','g')
,layout=c(2,2)
,main='Based on Call Report Data'
,par.settings = my.settings
,auto.key=list(x =0.35,y=.92,columns=1,border=FALSE,cex=1,lwd=2)
,temp4[ENTITY_NAME=='Bank of Hope (KPMG Pull CR)',,])
```
### DPD vs Macro
+ This section we explore the effectiveness of adding DPD variables vs macro credit drivers by comparing following OLS
$$NCOR\sim Peer Bank + NONACCR.Lag(i) + X$$
$$NCOR\sim Peer Bank + X$$
```{r, echo=FALSE, include=TRUE, warning=FALSE,fig.width=11,fig.height=6}
temp2.dpd.result<-do.call('rbind',lapply(temp2[,unique(Portfolio2),],function(x){
fit0<-lm(NCOR~ENTITY_NAME+NCOR.all,temp2[Portfolio2==x&year(Date)>=2007,,])
tmp0<-data.table(Portfolio2=x,lag=0,adj.rsquare=summary(fit0)$adj.r.squared)
tmp1<-do.call('rbind',lapply(1:9,function(y){
frm=paste('NCOR~ENTITY_NAME+NCOR.all+NONACCR.Lag',y,collapse='',sep='')
fit1<-lm(as.formula(frm),temp2[Portfolio2==x&year(Date)>=2007,,])
return(data.table(Portfolio2=x,lag=y,adj.rsquare=summary(fit1)$adj.r.squared))
}))
return(rbind(tmp0,tmp1))
}))
temp2.dpd.result[,adj.rsquare0:=max(adj.rsquare*(lag==0)),keyby=.(Portfolio2)]
xyplot(adj.rsquare~lag|Portfolio2,z=temp2.dpd.result[lag>0,,]
,type=c('p','l','g'),lwd=2
,main='In-sample fit comparison with/without delinquency rates.'
# ,scale=list(y=c(relation='free'),x=c(relation='free'))
,panel=function(x,y,subscripts,z=z,groups=groups,horizontal=horizontal,stack=stack,par.settings=par.settings,...){
panel.grid(h=-1, v=0);
panel.xyplot(x,y,...)
# print(z[subscripts,,]);print(subscripts);print(x);print(y)
# print(z[subscripts][1]$Base)
# print(z[subscripts][1]$Severe)
# panel.barchart(x,y,subscripts=subscripts,groups=groups,horizontal = horizontal,stack=stack,...)
# panel.barchart(x,y,subscripts=subscripts,groups=groups,horizontal = horizontal,stack=stack,...)
panel.abline(h=z[subscripts][1]$adj.rsquare0,col = 'darkgreen',lwd=3,alpha=.5)
# panel.abline(h=z[subscripts][1]$Severe,col = 'red',lwd=2,alpha=.5)
}
,temp2.dpd.result[lag>0,,])
```
### Prepare Estimation Sample
```{r, eval=TRUE,echo=FALSE, include=TRUE, warning=FALSE,fig.width=11,fig.height=18}
temp0<-data2[year(Date)>=2000,
.(Balance=sum(Balance,na.rm =TRUE)
,GCO=sum(GCO,na.rm =TRUE)
,Recovery=sum(Recovery)
,NCOR=sum(GCO-Recovery,na.rm =TRUE)/sum(Balance.Lag,na.rm=TRUE)
)
,keyby=.(ENTITY_NAME,Date,Portfolio2)]
# temp0[,NCOR.all:=mean(NCOR),keyby=.(Date)];
# decision based on graphical review
temp2.tst[Portfolio2=='NOOCRE'&Peer.Bank=='Cathay General Bancorp',Test.Results:='Accept',]
temp1<-merge(temp0,temp2.tst[,.(Portfolio2,Peer.Bank,Test.Results),],by.x=c('ENTITY_NAME','Portfolio2'),by.y=c('Peer.Bank','Portfolio2'),all.x=TRUE)
temp2<-temp1[Test.Results=='Accept'|(ENTITY_NAME%in%c('Bank of Hope')),,]
temp2[,.N,keyby=.(Portfolio2,ENTITY_NAME)]
data3<-rbind(
merge(temp2,data0.mev[Scenario=='Historic',,],by='Date')
,data0.mev[Scenario!='Historic',,]
,fill=TRUE
)
data3[Scenario!='Historic',ENTITY_NAME:='Bank of Hope',]
# data3[,.N,keyby=.(Date)]
save(data3,file='S3_00_Estimation_Sample_with_MEV_20171117');
# temp0<-data2[year(Date)>=2007&ENTITY_NAME!='Bank of Hope (KPMG Pull CR)',.(
# Balance=sum(Balance)
# ,GCO=sum(GCO)
# ,Recovery=sum(Recovery)
# ,NCOR=sum(GCO-Recovery,na.rm =TRUE)/sum(Balance,na.rm =TRUE))
# ,keyby=.(ENTITY_NAME,Date,Portfolio2)]
# data3<-merge(temp0,data0.mev[Scenario=='Historic',,],by='Date')
# data3.fcst<-data0.mev[Scenario!='Historic',]
# data3[,.N,keyby=.(Portfolio2,ENTITY_NAME)]
# data3.fcst<-rbind(data3.hist[ENTITY_NAME=='Bank of Hope',,],data0.mev[Scenario!='Historic',],fill=TRUE)
# data3.fcst[,ENTITY_NAME:='Bank of Hope',]
# data0.mev[,.N,keyby=.(Date,Scenario)]
# data3.fcst[,.N,Date]
# display.brewer.all()
```
## 9.0 Time Spend
```{r,eval=TRUE, echo=FALSE, warning=FALSE, message=FALSE,fig.width=10, fig.height=4, results="asis"}
################################
### calculate the time spent ###
################################
time1<-Sys.time()
# cat('Time Spend:')
cat('\n')
print(round(difftime(time1,time0,units='mins')))
```
<file_sep>/EBModel/2-ImportCREEndingBalances.R
################################################################################
# Bank of Hope
# Commercial Real Estate Ending Balances
# Program: <>.R
# Author(s): KPMG, LLP
# Purpose:
# Data Dependences:
#
#
# R-version: R version 3.3.1 (2016-06-21)
# -- "Bug in Your Hair" Copyright (C) 2016 The R Foundation
# for Statistical Computing Platform: x86_64-apple-darwin13.4.0 (64-bit)
################################################################################
### Environment Settings #######################################################
pth_inputs = "/Users/jerrywatkins/Projects/Engagements/BOH/ending-balance/combined/read-only-inputs"
pth_lib = "/Users/jerrywatkins/Projects/Engagements/BOH/ending-balance/combined/library"
pth_out = "/Users/jerrywatkins/Projects/Engagements/BOH/ending-balance/combined"
### No need to make changes below after this line ##############################
### Dependencies
source(paste(pth_lib,"/dev-support.R", sep=""))
source(paste(pth_lib,"/dfast-support.R", sep=""))
library("openxlsx")
library("data.table")
library("lubridate")
library("ggplot2")
library("scales")
library("zoo")
### Collect SNL Data
# note: the ending balance units are thousands
pth_excel_data = concat(pth_inputs, "/snl/Modified SNL CRE ending balance.xlsx")
comm_re = get_excel(pth_excel_data, "206548 Comm RE")
multi_family = get_excel(pth_excel_data, "206545 Multifamly")
owner_occ = get_excel(pth_excel_data, "206546 Owner Occ ")
non_owner_occ = get_excel(pth_excel_data, "206547 Non Owner Occ")
snl_cre = stack(comm_re, multi_family, owner_occ, non_owner_occ
, labels=c("oo_no", "mf", "oo", "no")
)
rename_list = list(
"label"="segment"
, "X1" = "quarter_date"
, "X2" = "quarter_month"
, "X3" = "quarter_year"
, "X4" = "snl_field_key"
, "X5" = "qtr_"
, "BBCN" = "bbcn_eb"
, "Wilshire.Bank" = "wilshire_eb"
, "Saehan.Bancorp" = "saehan_eb"
, "Bank.Asiana" = "bank_asiana_eb"
, "Foster.Bankshares..Inc." = "foster_eb"
, "Pacific.International.Bank" = "pacific_eb"
, "Nara.Bank" = "nara_eb"
, "Asiana.bank" = "asiana_bank_eb"
, "Liberty.Bank.of.New.York" = "liberty_eb"
, "Mirae.Bank" = "mirae_eb"
, "Innovative.Bank" = "innovative_eb"
)
snl_cre = snl_cre[, names(rename_list)]
new_names = sapply(names(rename_list), function(x) rename_list[[x]])
names(new_names) = NULL
names(snl_cre) = new_names
snl_cre = data.table(snl_cre)
snl_cre = snl_cre[, is_data_row := !(snl_field_key == "SNL Field Key")]
snl_cre = snl_cre[is_data_row == TRUE, ]
# convert ending balances to numeric
# and replace NA values with 0
name_vec = names(snl_cre)
numeric_conv_vec = name_vec[grep("_eb", name_vec)]
### convert to numeric and make units billions.
### dividing by 1e6 because values are in thousands
for (name in numeric_conv_vec) {
snl_cre[, name] = as.numeric(snl_cre[, ..name][[1]])
col = as.numeric(snl_cre[, ..name][[1]])/1e6
snl_cre[, name] = ifelse(is.na(col), 0, col)
}
# Add variables
# total ending balance
snl_cre[, end_bal :=
bbcn_eb
+ wilshire_eb
+ saehan_eb
+ bank_asiana_eb
+ foster_eb
+ pacific_eb
+ nara_eb
+ asiana_bank_eb
+ liberty_eb
+ mirae_eb
+ innovative_eb
]
# Date Variable
snl_cre[, qtr_dt := as.Date(quarter_date, "%Y-%m-%d")]
snl_cre_seg = snl_cre[, c("qtr_dt", "segment", "end_bal")]
cre = dcast(snl_cre_seg, qtr_dt ~ segment, value.var = "end_bal", fun.aggregate=sum)
cre_agg = cre[, c("qtr_dt", "oo", "mf", "no")]
banks = c(
"bbcn_eb"
, "wilshire_eb"
, "saehan_eb"
, "bank_asiana_eb"
, "foster_eb"
, "pacific_eb"
, "nara_eb"
, "asiana_bank_eb"
, "liberty_eb"
, "mirae_eb"
, "innovative_eb"
)
j = 1
for (bank in banks){
bank_data = dcast(snl_cre, qtr_dt ~ segment, value.var = bank, fun.aggregate=sum)
bank_data[["bank"]] = bank
if (j == 1) {
cre_banks = bank_data
} else {
cre_banks = rbind(cre_banks, bank_data)
}
j = j + 1
}
cre_banks = cre_banks[, c("qtr_dt", "oo", "mf", "no", "bank")]
setnames(cre_banks, c("oo", "mf", "no"), c("oo_bank", "mf_bank", "no_bank"))
cre_banks = cre_banks[cre_agg, on="qtr_dt"]
cre_banks[,
`:=`(
bank_pct_mf = ifelse(mf > 0, mf_bank/mf, 0)
, bank_pct_no = ifelse(no > 0, no_bank/no, 0)
, bank_pct_oo = ifelse(oo >0 , oo_bank/oo, 0)
)
]
rm("snl_cre_seg", "snl_cre", "cre_agg")
cre = cre[order(qtr_dt)]
cre[,
`:=`(
year=year(qtr_dt)
, cre_snl = mf + oo_no
, cre_boh = mf + oo + no
, ip=no + mf
)
]
### Save Files for Later #######################################################
saveRDS(cre, concat(pth_out, "/data-cre.RDS"))
saveRDS(cre_banks, concat(pth_out, "/data-cre_banks.RDS"))
################################################################################
<file_sep>/PD/BBCN.R
###################################################################
# Project: Bank of Hope
# PD Models
###################################################################
########
# BBCN
########
library(lubridate)
# Read the raw data
bbcn=read.csv("BBCN-Bottom-up-SGmod6.csv", header = T, nrows = 100000)
# Read the treasury rates
df_rates=read.csv("rates2.csv", header = T)
df_rates$date=NULL
df_rates$month=NULL
# Read the acquired loans data
df_acquired= read.csv("acquired loan identifier bbcn.csv", header = T)
df2=as.data.frame(df_acquired$Note_Number)
colnames(df2)=c("accountno")
# Create acquired identifier
bbcn$acquired_identifier<-0 # 0 represents bbcn_originated
bbcn$acquired_identifier[which(bbcn$accountno%in%unique(df2$accountno))]<-1 # 1 represents bbcn_acquired
# Create Y
bbcn$Y<-0
bbcn$Y[which(bbcn$amtChargedOff>0||bbcn$nonAccrualFlag%in%c(1,2,3,4,9))]<-1
# Clean up unnecessaruy observations
bbcn$Yr_origination<-year(mdy(bbcn$originationDate))
bbcn$Mn_origination<-month(mdy(bbcn$originationDate))
bbcn$Q_origination<-1
bbcn$Q_origination[which(bbcn$Mn_origination%in%c(4,5,6))]<-2
bbcn$Q_origination[which(bbcn$Mn_origination%in%c(7,8,9))]<-3
bbcn$Q_origination[which(bbcn$Mn_origination%in%c(10,11,12))]<-4
bbcn$Yr_maturity<-year(mdy(bbcn$maturityDate))
bbcn$Mn_maturity<-month(mdy(bbcn$maturityDate))
bbcn$Q_maturity<-1
bbcn$Q_maturity[which(bbcn$Mn_maturity%in%c(4,5,6))]<-2
bbcn$Q_maturity[which(bbcn$Mn_maturity%in%c(7,8,9))]<-3
bbcn$Q_maturity[which(bbcn$Mn_maturity%in%c(10,11,12))]<-4
bbcn$Yr_file<-year(mdy(bbcn$fileDate))
bbcn$Mn_file<-month(mdy(bbcn$fileDate))
bbcn$Q_file<-1
bbcn$Q_file[which(bbcn$Mn_file%in%c(4,5,6))]<-2
bbcn$Q_file[which(bbcn$Mn_file%in%c(7,8,9))]<-3
bbcn$Q_file[which(bbcn$Mn_file%in%c(10,11,12))]<-4
bbcn$ttm_m<-12*(bbcn$Yr_maturity-bbcn$Yr_file)+(bbcn$Mn_maturity-bbcn$Mn_file)
bbcn$ttm_q<-4*(bbcn$Yr_maturity-bbcn$Yr_file)+(bbcn$Q_maturity-bbcn$Q_file)
bbcn$loan_age_q<-4*(bbcn$Yr_file-bbcn$Yr_origination)+(bbcn$Q_file-bbcn$Q_origination)
bbcn$term_q<-4*(bbcn$Yr_maturity-bbcn$Yr_origination)+(bbcn$Q_maturity-bbcn$Q_origination)
bbcn$pob<-100*bbcn$loan_age_q/bbcn$term_q
# get the double digit Naics code
# Sina Comment 1 NAICS code
temp<-as.data.frame(bbcn$naicsCode)
temp$temp<-as.numeric(as.character(substr(as.character(temp[,1]),1,2)))
temp$temp[which(temp$`bbcn$naicsCode`==0),]
temp[,3]="Other"
temp[which(temp$temp==23) ,3]<-"Construction"
temp[which(temp$temp%in% c(31,32,33)) ,3]<-"Manufacturing"
temp[which(temp$temp==42) ,3]<-"Wholesale Trade"
temp[which(temp$temp%in% c(44,45)) ,3]<-"Retail"
temp[which(temp$temp%in% c(48,49)) ,3]<-"Transportaion"
temp[which(temp$temp==51) ,3]<-"Information"
temp[which(temp$temp== 53) ,3]<-"Real Estate & Rental"
temp[which(temp$temp==54) ,3]<-"Science & Technology"
temp[which(temp$temp==56) ,3]<-"Waste Management"
temp[which(temp$temp==62) ,3]<-"Health Care"
temp[which(temp$temp==71) ,3]<-"Arts & Entertainment"
temp[which(temp$temp==72) ,3]<-"Accomodation & Food"
temp[which(temp$temp==61) ,3]<-"Educational Services"
temp[which(temp$temp==55) ,3]<-"Management of Companies"
temp[which(temp$temp==22) ,3]<-"Utilities"
temp[which(temp$temp==11) ,3]<-"Agriculture"
temp[which(temp$temp==92) ,3]<-"Public Administration"
temp[which(temp$temp==52) ,3]<-"Finance & Insurance"
temp[which(temp$temp%in%c(0,99)) ,3]<-"error"
bbcn$naics2dig<-temp$temp
bbcn$naics<-temp[,3]
rm(temp)
# Rates
colnames(df_rates)=c("tb1m", "tb3m", "tb6m", "tb1y", "tb2y", "tb3y", "tb5y" ,"tb7y", "tb10y", "tb20y","tb30y", "Yr_file", "Q_file" )
bbcn<-merge(bbcn, df_rates, by=c("Yr_file","Q_file"), all.x = T)
bbcn$spread_v<-0
bbcn$spread_v[which(bbcn$fixVar=="V")]<-100*bbcn$interestRate[which(bbcn$fixVar=="V")]
bbcn$spread_v[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=1))]<-100*bbcn$interestRate[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=1))]-bbcn$tb1m[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=1))]
bbcn$spread_v[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=4&&bbcn$ttm_m>1))]<-100*bbcn$interestRate[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=4&&bbcn$ttm_m>1))]-bbcn$tb3m[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=4&&bbcn$ttm_m>1))]
bbcn$spread_v[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=9&&bbcn$ttm_m>4))]<-100*bbcn$interestRate[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=9&&bbcn$ttm_m>4))]-bbcn$tb6m[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=9&&bbcn$ttm_m>4))]
bbcn$spread_v[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=18&&bbcn$ttm_m>9))]<-100*bbcn$interestRate[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=18&&bbcn$ttm_m>9))]-bbcn$tb1y[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=18&&bbcn$ttm_m>9))]
bbcn$spread_v[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=30&&bbcn$ttm_m>18))]<-100*bbcn$interestRate[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=30&&bbcn$ttm_m>18))]-bbcn$tb2y[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=30&&bbcn$ttm_m>18))]
bbcn$spread_v[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=48&&bbcn$ttm_m>30))]<-100*bbcn$interestRate[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=48&&bbcn$ttm_m>30))]-bbcn$tb3y[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=48&&bbcn$ttm_m>30))]
bbcn$spread_v[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=72&&bbcn$ttm_m>48))]<-100*bbcn$interestRate[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=72&&bbcn$ttm_m>48))]-bbcn$tb5y[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=72&&bbcn$ttm_m>48))]
bbcn$spread_v[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=102&&bbcn$ttm_m>72))]<-100*bbcn$interestRate[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=102&&bbcn$ttm_m>72))]-bbcn$tb7y[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=102&&bbcn$ttm_m>72))]
bbcn$spread_v[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=180&&bbcn$ttm_m>102))]<-100*bbcn$interestRate[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=180&&bbcn$ttm_m>102))]-bbcn$tb10y[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=180&&bbcn$ttm_m>102))]
bbcn$spread_v[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=300&&bbcn$ttm_m>180))]<-100*bbcn$interestRate[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=300&&bbcn$ttm_m>180))]-bbcn$tb20y[(which(bbcn$fixVar=="F"&&bbcn$ttm_m<=300&&bbcn$ttm_m>180))]
bbcn$spread_v[(which(bbcn$fixVar=="F"&&bbcn$ttm_m>300))]<-100*bbcn$interestRate[(which(bbcn$fixVar=="F"&&bbcn$ttm_m>300))]-bbcn$tb30y[(which(bbcn$fixVar=="F"&&bbcn$ttm_m>300))]
#first non accrual date
unique_accountNo<-unique(bbcn$accountNo)
temp<-0
for(i in 1:length(unique_accountNo)){
temp[i]<-as.Date(min(mdy(bbcn$nonAccrualDate[which(bbcn$accountNo==unique_accountNo[i])])), origin="1970-01-01")
}
temp<-cbind(unique_accountNo,as.Date(temp,origin = "1970-01-01"))
colnames(temp)<-c("accountNo","min_nonAccDate")
bbcn2<-merge(bbcn,temp,by="accountNo", all.x = T)
rm(i)
rm(temp)
rm(unique_accountNo)
# remove unnecessary observations
bbcn2$deleter<-0
bbcn2$deleter[which(bbcn2$Yr_maturity<=2006)]<-1
bbcn2$deleter[which( (bbcn2$Yr_maturity== bbcn2$Yr_file) && (bbcn2$Mn_file-bbcn2$Mn_maturity>2))]<-1
bbcn2<-bbcn2[which(bbcn2$deleter==0),]
bbcn2$deleter=NULL
bbcn2$Yr_min_nonAccDate<-year(as.Date(bbcn2$min_nonAccDate,origin="1970-01-01"))
bbcn2$Mn_min_nonAccDate<-month(as.Date(bbcn2$min_nonAccDate,origin="1970-01-01"))
bbcn2$deleter<-0
bbcn2$deleter[which(bbcn2$Yr_file>bbcn2$Yr_min_nonAccDate)]<-1
bbcn2$deleter[which(bbcn2$Yr_file==bbcn2$Yr_min_nonAccDate&&(bbcn2$Mn_file-bbcn2$Mn_min_nonAccDate)>2)]<-1
bbcn2$deleter[is.na(bbcn2$Yr_min_nonAccDate)]=0
bbcn2$deleter[bbcn2$currentNetBookBal==0]=1
bbcn2<-bbcn2[which(bbcn2$deleter==0),]
bbcn2$deleter=NULL
# Portfolio ID
bbcn2$portfolio_id<-"other"
unique(bbcn2$callReportCodeDescr)
bbcn2[which(grepl("COMMERCIAL (GENERAL PLEDGE)", bbcn2$callReportCodeDescr) &&
grepl("Commercial Line (18)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("COMMERCIAL (GENERAL PLEDGE)", bbcn2$callReportCodeDescr) &&
grepl("Commercial Term Loan (20)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("COMMERCIAL (GENERAL PLEDGE)", bbcn2$callReportCodeDescr) &&
grepl("Comml LOC - Other Gov Gty (19)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("COMMERCIAL (GENERAL PLEDGE)", bbcn2$callReportCodeDescr) &&
grepl("Comml Term - Other Gov Gty (21)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("COMMERCIAL (GENERAL PLEDGE)", bbcn2$callReportCodeDescr) &&
grepl("Discounted Acceptance (33)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("COMMERCIAL (GENERAL PLEDGE)", bbcn2$callReportCodeDescr) &&
grepl("Export Working Capital Program (38)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("COMMERCIAL (GENERAL PLEDGE)", bbcn2$callReportCodeDescr) &&
grepl("Performance Bond L/C (44)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("COMMERCIAL (GENERAL PLEDGE)", bbcn2$callReportCodeDescr) &&
grepl("Purchase Advance (31)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("COMMERCIAL (GENERAL PLEDGE)", bbcn2$callReportCodeDescr) &&
grepl("SBA 172 Loan (66)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("COMMERCIAL (GENERAL PLEDGE)", bbcn2$callReportCodeDescr) &&
grepl("SBA ARC Loans (62)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("COMMERCIAL (GENERAL PLEDGE)", bbcn2$callReportCodeDescr) &&
grepl("SBA Express LOC (64)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("COMMERCIAL (GENERAL PLEDGE)", bbcn2$callReportCodeDescr) &&
grepl("SBA SOHO Loan (65)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("COMMERCIAL (GENERAL PLEDGE)", bbcn2$callReportCodeDescr) &&
grepl("SBA Term Loans (61)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("COMMERCIAL (GENERAL PLEDGE)", bbcn2$callReportCodeDescr) &&
grepl("Standby L/C (43)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("COMMERCIAL (GENERAL PLEDGE)", bbcn2$callReportCodeDescr) &&
grepl("Trust Receipt (30)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("COMMERCIAL (GENERAL PLEDGE)", bbcn2$callReportCodeDescr) &&
grepl("Working Capital Advance (37)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[grepl("Check Credit & Rev Credit Plan", bbcn2$callReportCodeDescr), 72]="CI"
bbcn2[grepl("Com'l Loan - International Dpt", bbcn2$callReportCodeDescr), 72]="CI"
bbcn2[grepl("Com'l Loans - Borrowing Based", bbcn2$callReportCodeDescr), 72]="CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("Bankers Health Group", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("Commercial Lease (25)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("Commercial Term Loan (20)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("Comml Asset-Based LOC (22)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("Comml LOC - Other Gov Gty (19)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("Comml Term - Other Gov Gty (21)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("Discounted Acceptance (33)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("Export Working Capital Program (38)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("Express Line (26)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("Master Comm LOC (01)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("Master Comm LOC Sublimit (03)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("Master ILOC (02)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("Master ILOC Sublimit (04)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("ODP LOC - Business", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("Performance Bond L/C (44)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("Professional Line of Credit (51)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("Purchase Advance (31)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("Purchase Advance-Comm (27)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("SBA 172 Loan (66)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("SBA ARC Loans (62)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("SBA Express LOC (64)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("SBA Express Loan (63)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("SBA SOHO Loan (65)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("SBA Small Loan Advantage", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("SBA Term Loans (61)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("Signature Line (11)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("Simple Line of Credit (24)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("Simple Loan - Commercial (23)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("Simple Line of Credit (24)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("Standby L/C (43)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("Syndicated Leveraged Lending", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("Trust Receipt (30)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("Working Capital Advance (37)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("Working Capital Advance-Comm (28)", bbcn2$loanTypeDescr)),72]<-"CI"
bbcn2[which(grepl("INTERNATIONAL", bbcn2$callReportCodeDescr)),72]<-"CI"
bbcn2[which(grepl("Other Installment loans", bbcn2$callReportCodeDescr)),72]<-"CI"
bbcn2[which(grepl("Check Credit & Rev Credit Plan", bbcn2$callReportCodeDescr)), 72]="CI"
bbcn2[which(grepl("COMMERCIAL (GENERAL PLEDGE)", bbcn2$callReportCodeDescr) &&
grepl("Commercial Real Estate (71)", bbcn2$loanTypeDescr)),72]<-"CRE"
bbcn2[which(grepl("COMMERCIAL (GENERAL PLEDGE)", bbcn2$callReportCodeDescr) &&
grepl("SBA Real Estate (60)", bbcn2$loanTypeDescr)),72]<-"CRE"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("Comm RE - Revolving LOC (74)", bbcn2$loanTypeDescr)),72]<-"CRE"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("SBA Real Estate (60)", bbcn2$loanTypeDescr)),72]<-"CRE"
bbcn2[which(grepl("Commercial Loans", bbcn2$callReportCodeDescr) &&
grepl("Commercial Real Estate (71)", bbcn2$loanTypeDescr)),72]<-"CRE"
bbcn2[grepl("Conv 5+", bbcn2$callReportCodeDescr), 72]="CRE"
bbcn2[grepl("CONVENTIONAL 5+", bbcn2$callReportCodeDescr), 72]="CRE"
bbcn2[grepl("NON-FARM", bbcn2$callReportCodeDescr), 72]="CRE"
bbcn2[grepl("Other nonfarm", bbcn2$callReportCodeDescr), 72]="CRE"
bbcn2[grepl("Owner-occupied", bbcn2$callReportCodeDescr), 72]="CRE"
bbcn2[grepl("NON-FARM", bbcn2$callReportCodeDescr), 72]="CRE"
bbcn2[grepl("SECURED BY FARMEDLAND", bbcn2$callReportCodeDescr), 72]="CRE"
# temp=bbcn2[bbcn2$portfolio_id=="CRE",]
# temp2=bbcn2[bbcn2$portfolio_id=="CI",]
bbcn2$deleter<-0
bbcn2$deleter[which(bbcn2$accountNo %in% c(26506643, 23506889) & bbcn2$Yr_file>=2009 && bbcn2$Mn_file>3)]<-1
bbcn3<-bbcn2[bbcn2$deleter==0,]
bbcn3$deleter=NULL
# Collateral
bbcn3$collateralPropertyType
bbcn3$collateral_descr[bbcn3$collateralPropertyType==10]<-'1-4 Res'
bbcn3$collateral_descr[bbcn3$collateralPropertyType==11]<-'Multifamily Res'
bbcn3$collateral_descr[bbcn3$collateralPropertyType==12]<-'Office Condo'
bbcn3$collateral_descr[bbcn3$collateralPropertyType==13]<-'Office Medical/Dental'
bbcn3$collateral_descr[bbcn3$collateralPropertyType==14]<-'Office'
bbcn3$collateral_descr[bbcn3$collateralPropertyType==10]<-'1-4 Res'
bbcn3$collateral_descr[bbcn3$collateralPropertyType==10]<-'1-4 Res'
bbcn3$collateral_descr[bbcn3$collateralPropertyType==10]<-'1-4 Res'
bbcn3$collateral_descr[bbcn3$collateralPropertyType==10]<-'1-4 Res'
bbcn3$collateral_descr[bbcn3$collateralPropertyType==10]<-'1-4 Res'
bbcn3$collateral_descr[bbcn3$collateralPropertyType==10]<-'1-4 Res'
bbcn3$collateral_descr[bbcn3$collateralPropertyType==10]<-'1-4 Res'
bbcn3$collateral_descr[bbcn3$collateralPropertyType==10]<-'1-4 Res'
bbcn3$collateral_descr[bbcn3$collateralPropertyType==10]<-'1-4 Res'
bbcn3$collateral_descr[bbcn3$collateralPropertyType==10]<-'1-4 Res'
bbcn3$collateral_descr[bbcn3$collateralPropertyType==10]<-'1-4 Res'
bbcn3$collateral_descr[bbcn3$collateralPropertyType==10]<-'1-4 Res'
bbcn3$collateral_descr[bbcn3$collateralPropertyType==10]<-'1-4 Res'
bbcn3$collateral_descr[bbcn3$collateralPropertyType==10]<-'1-4 Res'
bbcn3$collateral_descr[bbcn3$collateralPropertyType==10]<-'1-4 Res'
bbcn3$collateral_descr[bbcn3$collateralPropertyType==10]<-'1-4 Res'
bbcn3$collateral_descr[bbcn3$collateralPropertyType==10]<-'1-4 Res'
bbcn3$collateral_descr[bbcn3$collateralPropertyType==10]<-'1-4 Res'
bbcn3$collateral_descr[bbcn3$collateralPropertyType==10]<-'1-4 Res'
bbcn3$collateral_descr[bbcn3$collateralPropertyType==10]<-'1-4 Res'
bbcn3$collateral_descr[bbcn3$collateralPropertyType==10]<-'1-4 Res'
bbcn3$collateral_descr[bbcn3$collateralPropertyType==10]<-'1-4 Res'
bbcn3$collateral_descr[bbcn3$collateralPropertyType==10]<-'1-4 Res'
# Sina fix Collateral Description Line 530-570
#Sina to check
bbcn$boh_rating<-0
bbcn$boh_rating[which(bbcn$loanRatingDescr2=="Substandard")]<-2000
bbcn$boh_rating[which(bbcn$loanRatingDescr2=="Doubtful")]<-3000
bbcn$boh_rating[which(bbcn$loanRatingDescr2=="Loss")]<-4000
bbcn$boh_rating[which(bbcn$loanRatingDescr2=="Pass-3")]<-3
bbcn$boh_rating[which(bbcn$loanRatingDescr2=="Pass-4")]<-4
bbcn$boh_rating[which(bbcn$loanRatingDescr2=="Sp Mention")]<-1000
bbcn$boh_rating[which(bbcn$loanRatingDescr2=="Pass-1")]<-1
bbcn$boh_rating[which(bbcn$loanRatingDescr2=="Pass-2")]<-2
bbcn$boh_rating[which(bbcn$loanRatingDescr2==" Pass-2")]<-2
bbcn$boh_rating[which(bbcn$loanRatingDescr2=="Pass-2 ")]<-2
bbcn$boh_rating[which(bbcn$loanRatingDescr2==" Pass-2 ")]<-2
bbcn<-bbcn[which(bbcn$productDescr!="Letter of C"),]
# Sina fix Line 657-673
<file_sep>/SandB/StepFun.R
StepFun <- function(
var_info,
loan_in, #input loan level data
macro_in, #input macro data
tier=1, #indicate which tier of variables to consider
y='FLR_nco_diff~', #indicate response variable
thresh=c(0.05, 0.01, 0.001), #significance level for SE based p-value and LR test based p-value for each tier
criteria='bic', #variable selection criteria; other values='bic', 'LR.p', 'SE.p', 'rsq'
vars0 = c('1'), #model 0 variables
fix_vars0, #indicate which variables are fixed
out.print=T #indicate wheter intermediate output will be printed
){
# loan_in <- loan
# macro_in <- macro
# tier <- 2
# thresh <- c(0.05, 0.01, 0.001)
# y <- 's2_qd~'
# criteria <- 'rsq'
# vars0 <- c("ngdp_grw_lag4", "tr10y_qd")
# fix_vars0 <- c(1)
# out.print <- T
use_info <- var_info[, c('var', 'base', 'sign', 'tier'), with=F]
setnames(use_info, c('Variable', 'Base', 'Sign', 'Tier'))
base0 <- unique(use_info[Variable%in%vars0, ]$Base)
candidate <- copy(use_info[!Base%in%base0 & Tier==tier, ])
vars <- vars0 #vars will change as more variables added (dropped), vars0 is the initial variable list
vars_nm <- all.vars(as.formula(paste(y, paste(vars, collapse='+'), sep='')))
## Define initial model0
# macro_in = macro_all
data0 <- merge(loan_in, macro_in, by='Date')
data0 <- data0[complete.cases(data0[,vars_nm, with=F])]
model0 <- lm(as.formula(paste(y, paste(vars, collapse='+'), sep='')),
data=data0)
sum_0 <- summary(model0)
aic_0 <- -2*as.numeric(logLik(model0))+2*attr(logLik(model0), 'df')
bic_0 <- -2*as.numeric(logLik(model0))+log(nrow(data0))*attr(logLik(model0), 'df')
rsq_0 <- sum_0$adj.r.squared
add_var <- 0
k <- 0
result_list <- list()
while(length(add_var)>0){
k <- k+1
## Add 1 variable to model0
add_list <- list()
add_table <- NULL
system.time({
for(var in candidate$Variable){
formula <- as.formula(paste(y, paste(c(vars, var), collapse='+'), sep=''))
vars_nm <- all.vars(formula)
model_k <- lm(formula, data=data0[,vars_nm, with=F])
sum_k <- summary(model_k)
add_list[[length(add_list)+1]] <- sum_k
gc();gc();
loglik <- as.numeric(logLik(model_k))
df <- attr(logLik(model_k), 'df')
coef <- sum_k$coefficients[nrow(sum_k$coefficients), 1]
SE.p=sum_k$coefficients[nrow(sum_k$coefficients), 4]*ifelse(candidate[Variable==var,]$Sign==0, 1, 0.5)
aic <- -2*loglik+2*df
bic <- -2*loglik+log(nrow(data0))*df
LR.p <- pchisq(2*(loglik-as.numeric(logLik(model0))),1,lower.tail=F)*ifelse(candidate[Variable==var,]$Sign==0, 1, 0.5)
rsq <- sum_k$adj.r.squared
if(length(add_table)==0){
add_table <- data.table(add.var=var, coef, SE.p, aic, bic, LR.p, rsq)
}else{
add_table <- rbind(add_table, data.frame(add.var=var, coef, SE.p, aic, bic, LR.p, rsq))
}
}
names(add_list) <- candidate$Variable
})
add_table <- add_table[, ':='(
expected_sign=candidate$Sign
,sign_correct=sign(coef)*candidate$Sign>=0
,SE.p.sig=SE.p<=thresh[candidate[Variable==add.var,]$Tier]
,aic_correct=aic<=aic_0
,bic_correct=bic<=bic_0
,LR.sig=LR.p <= thresh[candidate[Variable==add.var,]$Tier]
,rsq_correct=rsq>rsq_0
),]
cat('\n\n\n -----------------------Step ', k, '-----------------------\n\n',sep='')
setorderv(add_table, criteria, ifelse(criteria=='rsq', -1, 1))
if(out.print==T){
print(add_table[sign_correct==T, unique(c('add.var', 'coef','sign_correct','SE.p', 'rsq', 'SE.p.sig', criteria)), with=F], digits=2)
}
#choose the variable to add
if(criteria=='aic'){
add_var <- add_table[aic_correct==T&sign_correct==T, ]$add.var[1]
}else if(criteria=='bic'){
add_var <- add_table[bic_correct==T&sign_correct==T, ]$add.var[1]
}else if(criteria=='LR.p'){
add_var <- add_table[LR.sig==T&sign_correct==T, ]$add.var[1]
}else if (criteria =='rsq'){
setorderv(add_table, criteria, -1)
add_var <- add_table[rsq_correct==T&sign_correct==T&SE.p.sig==T, ]$add.var[1]
}else{
add_var <- add_table[SE.p.sig==T&sign_correct==T, ]$add.var[1]
}
add_var <- as.character(add_var)
add_results <- list(all_models=add_table, add_var=add_var)
drop_results <- list()
if(length(add_var)>0&!is.na(add_var)){
vars <- c(vars, add_var)
vars_nm <- all.vars(as.formula(paste(y, paste(vars, collapse='+'), sep='')))
##Update candidate list
add_base <- candidate[Variable==add_var,]$Base
candidate <- candidate[Base!=add_base, ]
sum_0 <- add_list[[add_var]]
aic_0 <- add_table[add.var==add_var, ]$aic
bic_0 <- add_table[add.var==add_var, ]$bic
rsq_0 <- sum_0$adj.r.squared
cat('\n ---Add variable: ', add_var, '---\n', sep='')
if(out.print){cat('\n ---Selected model: ---\n')
print(add_list[[add_var]])}
## Remove insignificant coefficient if any
sig_levels <- unlist(lapply(setdiff(vars, c("1")), function(x){
if(x%in%use_info$Variable){
thresh[use_info[Variable==x, ]$Tier]*ifelse(use_info[Variable==x, ]$Sign==0, 1, 2)
}else{
thresh[1]
}
}))
insig_ind0 <- which(pmax(add_list[[add_var]]$coefficients[-1,4]-sig_levels, 0)>0)
insig_ind <- setdiff(insig_ind0, insig_ind0[which(vars[insig_ind0]%in%fix_vars0)])
if(length(insig_ind)>0){
cat('\n ---Remove insignificant variable(s):---\n',sep='')
while(length(insig_ind)>0){
drop_list <- list()
drop_table <- NULL
for(x in insig_ind){
formula_drop <- as.formula(paste(y, paste(vars[-x], collapse='+'),sep=''))
vars_nm <- all.vars(formula_drop)
model_drop <- lm(formula_drop, data=data0)
drop_list[[length(drop_list)+1]] <- summary(model_drop)
gc();gc();
drop.var <- vars[x]
aic <- -2*as.numeric(logLik(model_drop))+2*attr(logLik(model_drop), 'df')
bic <- -2*as.numeric(logLik(model_drop))+log(nrow(data0))*attr(logLik(model_drop), 'df')
LR.p <- pchisq(aic-aic_0+2,1,lower.tail=F)
rsq <- summary(model_drop)$adj.r.squared
if(length(drop_table)==0){
drop_table <- data.table(drop.var, aic, bic, LR.p, rsq)
}else{
drop_table <- rbind(drop_table, data.table(drop.var, aic, bic, LR.p, rsq))
}
}
names(drop_list) <-vars[insig_ind]
drop_table <- drop_table[, ':='(
aic_correct=aic<=aic_0
,bic_correct=bic<=bic_0
,LR.sig=LR.p<=sig_levels[x]
, rsq_correct=rsq>=rsq_0
)]
if(out.print){print(drop_table)}
if(criteria=='aic'){
setorderv(drop_table, criteria, c(1))
drop_var <- drop_table[aic_correct==T, ]$drop.var[1]
}else if(criteria=='bic'){
setorderv(drop_table, criteria, c(1))
drop_var <- drop_table[bic_correct==T, ]$drop.var[1]
}else if(criteria=='LR.p'){
setorderv(drop_table, criteria, c(-1))
drop_var <- drop_table[LR.sig==F, ]$drop.var[1]
}else if(criteria=='rsq'){
setorderv(drop_table, criteria, c(-1))
drop_var <- drop_table[rsq_correct==T,]$drop.var[1]
}else{
insig_pval <- add_list[[add_var]]$coefficients[-1,4]-sig_levels
drop_var <- vars[intersect(which(insig_pval>0 &insig_pval==pmax(insig_pval)), insig_ind)]
}
drop_results[[length(drop_results)+1]] <- list(drop_table=drop_table, drop_var=drop_var)
if(is.na(drop_var)){drop_var <- NULL}
if(length(drop_var)>0){
if(drop_var%in%fix_vars0){
cat('\n No variable is dropped because initial list is fixed.')
insig_ind <- insig_ind[-which(names(insig_ind)==drop_var)]
}else{
cat('\n Drop variable: ', drop_var, '\n\n', sep='')
## Update candidate list:
drop_base <- use_info[Variable==drop_var,]$Base
candidate <- rbind(candidate, use_info[Base==drop_base, ])
vars <- setdiff(vars, drop_var)
sig_levels <- unlist(lapply(vars, function(x){
if(x%in%use_info$Variable){
thresh[use_info[Variable==x, ]$Tier]*ifelse(use_info[Variable==x, ]$Sign==0, 1, 2)
}else{
thresh[1]
}
}))
insig_ind <- which(pmax(drop_list[[drop_var]]$coefficients[-1,4]-sig_levels, 0)>0)
sum_0 <- drop_list[[drop_var]]
aic_0 <- drop_table[drop.var==drop_var,]$aic
bic_0 <- drop_table[drop.var==drop_var,]$bic
}
}else{
cat('\n No variable is dropped because dropping will result in higher AIC (or BIC) or significant drop in likelihood.')
insig_ind <- NULL
}
}#end while
}
}else{
add_var <- NULL
cat('No variable added.')
}
result_list[[k]] <- list(add_results=add_results, drop_results=drop_results, final_model=sum_0)
gc();gc();
}
return(result_list)
}<file_sep>/PD/Wilshire.R
library(openxlsx)
library(lubridate)
wb<-read.csv("Book1.csv", header = T)
# Sina fix rates and acquired
wbco<-read.csv("wilshire charge offs cleaned.csv", header=T)
colnames(wbco)<-c("Note.Number","first_co_date","co_amt")
wb<-merge(wb,wbco, by="Note.Number", all.x=T)
rm(wbco)
wb$Y<-0
wb$Y[which(wb$Non.Accrual.Code%in%c(2,4))]<-1
wb$Y[which(wb$co_amt==1)]<-1
# Sina fix NAICS code
temp<-as.data.frame(wb$NAICS.Code)
temp$temp<-as.numeric(as.character(substr(as.character(temp[,1]),1,2)))
wb$temp_number<-temp$temp
rm(temp)
wb$Yr_origination<-year(mdy(wb$originationdate))
wb$Mn_origination<-month(mdy(wb$originationdate))
wb$Q_origination<-1
wb$Q_origination[which(wb$Mn_origination%in%c(4,5,6))]<-2
wb$Q_origination[which(wb$Mn_origination%in%c(7,8,9))]<-3
wb$Q_origination[which(wb$Mn_origination%in%c(10,11,12))]<-4
wb$Yr_maturity<-year(mdy(wb$maturitydate))
wb$Mn_maturity<-month(mdy(wb$maturitydate))
wb$Q_maturity<-1
wb$Q_maturity[which(wb$Mn_maturity%in%c(4,5,6))]<-2
wb$Q_maturity[which(wb$Mn_maturity%in%c(7,8,9))]<-3
wb$Q_maturity[which(wb$Mn_maturity%in%c(10,11,12))]<-4
wb$Yr_file<-year(mdy(wb$filedate))
wb$Mn_file<-month(mdy(wb$filedate))
wb$Q_file<-1
wb$Q_file[which(wb$Mn_file%in%c(4,5,6))]<-2
wb$Q_file[which(wb$Mn_file%in%c(7,8,9))]<-3
wb$Q_file[which(wb$Mn_file%in%c(10,11,12))]<-4
wb$ttm_m<-12*(wb$Yr_maturity-wb$Yr_file)+(wb$Mn_maturity-wb$Mn_file)
wb$ttm_q<-4*(wb$Yr_maturity-wb$Yr_file)+(wb$Q_maturity-wb$Q_file)
wb$loan_age_q<-4*(wb$Yr_file-wb$Yr_origination)+(wb$Q_file-wb$Q_origination)
wb$term_q<-4*(wb$Yr_maturity-wb$Yr_origination)+(wb$Q_maturity-wb$Q_origination)
wb$pob<-100*wb$loan_age_q/wb$term_q
# Sina fix Rate.Over.Split name
unique_accountNo<-unique(wb$Note.Number)
temp<-0
for(i in 1:length(unique_accountNo)){
temp[i]<-as.Date(min(mdy(wb$filedate[which(wb$Note.Number==unique_accountNo[i]&wb$Y==1)])), origin="1970-01-01")
}
temp<-cbind(unique_accountNo,as.Date(temp,origin = "1970-01-01"))
colnames(temp)<-c("Note.Number","min_nonAccDate")
wb<-merge(wb,temp,by="Note.Number")
rm(i)
rm(temp)
rm(unique_accountNo)
wb$f_nonAccDate<-0
wb$f_nonAccDate[which(is.na(wb$first_co_date)==T)]<-wb$min_nonAccDate[which(is.na(wb$first_co_date)==T)]
wb$f_nonAccDate[which(is.na(wb$first_co_date)==F)]<-min(as.numeric(wb$first_co_date[which(is.na(wb$first_co_date)==F)]),as.numeric(wb$min_nonAccDate[which(is.na(wb$first_co_date)==F)]))
wb$deleter<-1
wb$deleter[which(is.na(wb$first_co_date)==T)]<-0
wb$deleter[which(mdy(wb$filedate)<=ymd(wb$first_co_date))]<-0
wb<-wb[which(wb$deleter==0),]
wb$deleter<-1
wb$deleter[which(is.na(wb$min_nonAccDate)==T)]<-0
wb$deleter[which(mdy(wb$filedate)<=as.Date(wb$min_nonAccDate,origin="1970-01-01"))]<-0
wb<-wb[which(wb$deleter==0),]
wb<-wb[which(wb$Yr_maturity>2006),]
wb<-wb[which(wb$ttm_q>0),]
wb$boh_id<-"Wilshere"
wb<-wb[which(wb$Class.Code%in%c(2,3,5,6,10,13,20,21,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,59,60,61,63,99)),]
wb<-wb[which(is.na(wb$NAP...NAIP...NAIP.in.GL)==F&wb$NAP...NAIP...NAIP.in.GL!=0),]
wb<-wb[which(is.na(wb$Rate.Over.Split)==F&wb$Rate.Over.Split!=0),]
correction<-read.csv("property_code_correctio.csv",header=T)
# Sina fix column names
wb<-merge(wb,correction,by="Note.Number", all.x=T)
wb$propertyCodeNew<-wb$Property.Type.Code
wb$propertyCodeNew[which(is.na(wb$New_Code)==F)]<-wb$New_Code[which(is.na(wb$New_Code)==F)]
# Sina fix line 605
wb$boh_rating<-0
wb$boh_rating[which(wb$Loan.Rating.Code1==0)]<-0
wb$boh_rating[which(wb$Loan.Rating.Code1==1000)]<-1
wb$boh_rating[which(wb$Loan.Rating.Code1==2000)]<-2
wb$boh_rating[which(wb$Loan.Rating.Code1==3000)]<-3
wb$boh_rating[which(wb$Loan.Rating.Code1==4000)]<-4
wb$boh_rating[which(wb$Loan.Rating.Code1==5000)]<-4
wb$boh_rating[which(wb$Loan.Rating.Code1==6000)]<-1000
wb$boh_rating[which(wb$Loan.Rating.Code1==7000)]<-2000
wb$boh_rating[which(wb$Loan.Rating.Code1==8000)]<-3000
wb$boh_rating[which(wb$Loan.Rating.Code1==9000)]<-4000
<file_sep>/S0_01_MEV_Data_Transformation_111717.Rmd
---
title: "S2_01_MEV_Data_Transformation"
author: "KPMG"
date: "November 17, 2017"
output:
html_document:
toc: true
theme: default
toc_depth: 3
toc_float:
collapsed: false
smooth_scroll: false
---
This code reads original historical macroeconomic variables and stress Scenarios and perform transformations to generate a pool of candidate macroeconomic variables for the NCO model development.
* Data Sources includes macroeconomic variables provided by FEDs for CCAR stress testing along with additional variables (Regional, Employment and Industrial Production) or scenarios that were provided by BoH (using Moody's Analytics)
* Transformation Includes:
Label | Transformation
------|-------
(none)| No transformation performed, keep original variable
qd | Taking difference from previous quarter
yd | Taking difference from last year's same quarter
log | Taking nature logarithm
qg | The percentage change from previous quarter
yg | The percentage change from last year the same quarter
yoy | Cumulative annual year over year rate (based on the past 4 quarters)
* In addition, the 1-4 quarters lag is taken for all transformed variables.
## 1 Environment Setting & Loading R packages
```{r,echo=F,include=FALSE}
### record the starting time
time0<-Sys.time()
#####################################
#### set directory ####
#####################################
# pth_dir = "C:\\Users\\kdoughan\\Documents\\01_Engagements\\Bank of Hope\\10_2017_Bank of Hope Commerial Model Build\\2 - Code\\MEVs"
pth_dir = "C:\\Users\\OL07805\\Desktop\\Desktop Things\\Net Charge Off Models\\DFAST Production Run 2018";
requirements <- c("openxlsx","data.table","lubridate","ggplot2","scales","zoo","tseries","urca","forecast","CADFtest","leaps","car","qcc","lattice","latticeExtra","dplyr")
#install packages if you have not
for(requirement in requirements){if( !(requirement %in% installed.packages())) install.packages(requirement)}
#load all required packages
lapply(requirements, require, character.only=T);
### Dependencies
source(paste(pth_dir,"\\S3_00_dev-support.R", sep=""))
library("openxlsx")
library("data.table")
library("lubridate")
library("ggplot2")
library("scales")
library("zoo")
library("tseries")
library("urca")
library("forecast")
library("CADFtest")
library("leaps")
library("car")
library("qcc")
library("lattice")
library("latticeExtra")
library("dplyr")
```
## 2 Import Data & Cleaning
### 2.1 Regional Data
```{r, echo=FALSE, include=FALSE, warning=FALSE}
### Regional Data ##############################################################
# needs openxlsx package
# ! Caution ! make sure the column order still matches the following
# column variable scenaro description
# x1 qtr_dt scenario none
# x2 ca_unemp baseline FRB CCAR 2017 - Baseline : Labor: Unemployment Rate, (%, SA)
# x3 ca_unemp adverse FRB CCAR 2017 - Adverse : Labor: Unemployment Rate, (%, SA)
# x4 ca_unemp severe FRB CCAR 2017 - Severely Adverse : Labor: Unemployment Rate, (%, SA)
# x5 ca_hpi baseline FRB CCAR 2017 - Baseline : FHFA All Transactions Home Price Index, (1980Q1 = 100, SA)
# x6 ca_hpi adverse FRB CCAR 2017 - Adverse : FHFA All Transactions Home Price Index, (1980Q1 = 100, SA)
# x7 ca_hpi severe FRB CCAR 2017 - Severely Adverse : FHFA All Transactions Home Price Index, (1980Q1 = 100, SA)
# x8 ca_gsp baseline FRB CCAR 2017 - Baseline : Gross State Product: Total, (Bil. $, SAAR) Nominal
# x9 ca_gsp adverse FRB CCAR 2017 - Adverse : Gross State Product: Total, (Bil. $, SAAR) Nominal
# x10 ca_gsp severe FRB CCAR 2017 - Severely Adverse : Gross State Product: Total, (Bil. $, SAAR) Nominal
# x11 ca_real_gsp baseline FRB CCAR 2017 - Baseline : Gross State Product: Total, (Bil. Chained 2009 $, SAAR) Real
# x12 ca_real_gsp adverse FRB CCAR 2017 - Adverse : Gross State Product: Total, (Bil. Chained 2009 $, SAAR) Real
# x13 ca_real_gsp severe FRB CCAR 2017 - Severely Adverse : Gross State Product: Total, (Bil. Chained 2009 $, SAAR) Real
# x14 ca_income baseline FRB CCAR 2017 - Baseline : Income: Disposable Personal, (Mil. $, SAAR) Nominal
# x15 ca_income adverse FRB CCAR 2017 - Adverse : Income: Disposable Personal, (Mil. $, SAAR) Nominal
# x16 ca_income severe FRB CCAR 2017 - Severely Adverse : Income: Disposable Personal, (Mil. $, SAAR) Nominal
# x17 ca_real_income baseline FRB CCAR 2017 - Baseline : Disposable Personal Income, (Mil. 09$, SAAR) Real
# x18 ca_real_income adverse FRB CCAR 2017 - Adverse : Disposable Personal Income, (Mil. 09$, SAAR) Real
# x19 ca_real_income severe FRB CCAR 2017 - Severely Adverse : Disposable Personal Income, (Mil. 09$, SAAR) Real
raw_region_data = read.xlsx(
concat(pth_dir, "\\S0_00_Regional_Macrovariables_Moodys_2018.xlsx")
, sheet="Sheet1"
, colNames=FALSE
, startRow=6
, skipEmptyRows=FALSE
, skipEmptyCols=FALSE
, detectDates=TRUE
, check.names=TRUE
, na.strings="ND"
)
reg_baseline_cols = c("X1", "X2", "X5", "X8", "X11", "X14", "X17")
reg_adverse_cols = c("X1", "X3", "X6", "X9", "X12", "X15", "X18")
reg_severe_cols = c("X1", "X4", "X7", "X10", "X13", "X16", "X19")
reg_new_col_names = c(
"qtr_dt"
, "ca_unemp"
, "ca_hpi"
, "ca_gsp"
, "ca_rgsp"
, "ca_inc"
, "ca_rinc"
)
reg_baseline = data.table(raw_region_data[, reg_baseline_cols])
reg_adverse = data.table(raw_region_data[, reg_adverse_cols])
reg_severe = data.table(raw_region_data[, reg_severe_cols])
setnames(reg_baseline, reg_baseline_cols, reg_new_col_names)
setnames(reg_adverse, reg_adverse_cols, reg_new_col_names)
setnames(reg_severe, reg_severe_cols, reg_new_col_names)
```
### 2.2 Employment Data
```{r, echo=FALSE, include=FALSE, warning=FALSE}
### Employment Data ############################################################
# ! Caution ! make sure the column order still matches the following
# r_name var scenario region description:
# X1 qtr_dt none none Description:
# X2 empl baseline us FRB CCAR 2017 - Baseline: Employment: Total Nonagricultural, (Mil. #, SA)
# X3 empl adverse us FRB CCAR 2017 - Adverse: Employment: Total Nonagricultural, (Mil. #, SA)
# X4 empl severe us FRB CCAR 2017 - Severely Adverse: Employment: Total Nonagricultural, (Mil. #, SA)
# X5 ca_empl baseline ca FRB CCAR 2017 - Baseline : Employment: Total Nonagricultural, (Ths., SA)
# X6 ca_empl adverse ca FRB CCAR 2017 - Adverse : Employment: Total Nonagricultural, (Ths., SA)
# X7 ca_empl severe ca FRB CCAR 2017 - Severely Adverse : Employment: Total Nonagricultural, (Ths., SA)
raw_empl_data = read.xlsx(
concat(pth_dir, "\\S0_00_Non_Farm_Employment_Moodys_2018.xlsx")
, sheet="Sheet1"
, colNames=FALSE
, startRow=6
, skipEmptyRows=FALSE
, skipEmptyCols=FALSE
, detectDates=TRUE
, check.names=TRUE
, na.strings="ND"
)
empl_baseline_cols = c("X1", "X2", "X5")
empl_adverse_cols = c("X1", "X3", "X6")
empl_severe_cols = c("X1", "X4", "X7")
empl_new_col_names = c(
"qtr_dt"
, "empl"
, "ca_empl"
)
empl_baseline = data.table(raw_empl_data[, empl_baseline_cols])
empl_adverse = data.table(raw_empl_data[, empl_adverse_cols])
empl_severe = data.table(raw_empl_data[, empl_severe_cols])
setnames(empl_baseline, empl_baseline_cols, empl_new_col_names)
setnames(empl_adverse, empl_adverse_cols, empl_new_col_names)
setnames(empl_severe, empl_severe_cols, empl_new_col_names)
```
### 2.3 Industrial Production
```{r, echo=FALSE, include=FALSE, warning=FALSE}
### Industrial Production Data ############################################################
# ! Caution ! make sure the column order still matches the following
# r_name var scenario region description:
# X1 qtr_dt none none Date
# X2 indus_prod baseline us FRB CCAR 2017 - Baseline: Industrial Production: Total, (Index 2012=100, SA)
# X3 indus_prod adverse us FRB CCAR 2017 - Adverse: Industrial Production: Total, (Index 2012=100, SA)
# X4 indus_prod severe us FRB CCAR 2017 - Severely Adverse: Industrial Production: Total, (Index 2012=100, SA)
raw_indus_prod_data = read.xlsx(
concat(pth_dir, "\\S0_00_Moodys_Industrial_Production_2018.xlsx")
, sheet="Sheet1"
, colNames=FALSE
, startRow=6
, skipEmptyRows=FALSE
, skipEmptyCols=FALSE
, detectDates=TRUE
, check.names=TRUE
, na.strings="ND"
)
indus_prod_baseline_cols = c("X1", "X2")
indus_prod_adverse_cols = c("X1", "X3")
indus_prod_severe_cols = c("X1", "X4")
indus_prod_new_col_names = c(
"qtr_dt"
, "indus_prod"
)
indus_prod_baseline = data.table(raw_indus_prod_data[, indus_prod_baseline_cols])
indus_prod_adverse = data.table(raw_indus_prod_data[, indus_prod_adverse_cols])
indus_prod_severe = data.table(raw_indus_prod_data[, indus_prod_severe_cols])
setnames(indus_prod_baseline, indus_prod_baseline_cols, indus_prod_new_col_names)
setnames(indus_prod_adverse, indus_prod_adverse_cols, indus_prod_new_col_names)
setnames(indus_prod_severe, indus_prod_severe_cols, indus_prod_new_col_names)
```
### 2.4 S&P500 & Housing Starts
```{r, echo=FALSE, include=FALSE, warning=FALSE}
### Industrial Production Data ############################################################
# ! Caution ! make sure the column order still matches the following
# r_name var scenario region description
# X1 qtr_dt none none Date
# X2 sp500 baseline us FRB CCAR 2017 - Baseline: S&P 500 Composite: Price Index - Average, (Index 1941-43=10, NSA)
# X3 sp500 adverse us FRB CCAR 2017 - Adverse: S&P 500 Composite: Price Index - Average, (Index 1941-43=10, NSA)
# X4 sp500 severe us FRB CCAR 2017 - Severely Adverse: S&P 500 Composite: Price Index - Average, (Index 1941-43=10, NSA)
# X5 house_start baseline us FRB CCAR 2017 - Baseline: Housing Starts: Total, (Mil. #, SAAR)
# X6 house_start adverse us FRB CCAR 2017 - Adverse: Housing Starts: Total, (Mil. #, SAAR)
# X7 house_start severe us FRB CCAR 2017 - Severely Adverse: Housing Starts: Total, (Mil. #, SAAR)
# X8 ca_house_start baseline ca FRB CCAR 2017 - Baseline : Housing Starts: Total, (#, SAAR)
# X9 ca_house_start adverse ca FRB CCAR 2017 - Adverse : Housing Starts: Total, (#, SAAR)
# X10 ca_house_start severe ca FRB CCAR 2017 - Severely Adverse : Housing Starts: Total, (#, SAAR)
raw_sp500_data = read.xlsx(
concat(pth_dir, "\\S0_00_S&P500_Housing_Starts_Moodys_2018.xlsx")
, sheet="Sheet1"
, colNames=FALSE
, startRow=6
, skipEmptyRows=FALSE
, skipEmptyCols=FALSE
, detectDates=TRUE
, check.names=TRUE
, na.strings="ND"
)
sp500_baseline_cols = c("X1", "X2", "X5", "X8")
sp500_adverse_cols = c("X1", "X3", "X6", "X9")
sp500_severe_cols = c("X1", "X4", "X7", "X10")
sp500_new_col_names = c(
"qtr_dt"
, "sp500", "house_start", "ca_house_start"
)
sp500_baseline = data.table(raw_sp500_data[, sp500_baseline_cols])
sp500_adverse = data.table(raw_sp500_data[, sp500_adverse_cols])
sp500_severe = data.table(raw_sp500_data[, sp500_severe_cols])
setnames(sp500_baseline, sp500_baseline_cols, sp500_new_col_names)
setnames(sp500_adverse, sp500_adverse_cols, sp500_new_col_names)
setnames(sp500_severe, sp500_severe_cols, sp500_new_col_names)
```
### 2.5 FRB Data (Historical & Forecast)
```{r, echo=FALSE, include=FALSE, warning=FALSE}
# FRB Data #####################################################################
# Collect historical and Forecast data
raw_historic = fread(concat(pth_dir, "\\S0_00_Historic_Domestic_2018.csv"))
raw_baseline = fread(concat(pth_dir, "\\S0_00_Table_2A_Supervisory_Baseline_Domestic_2018.csv"))
raw_adverse = fread(concat(pth_dir, "\\S0_00_Table_3A_Supervisory_Adverse_Domestic_2018.csv"))
raw_severe = fread(concat(pth_dir, "\\S0_00_Table_4A_Supervisory_Severely_Adverse_Domestic_2018.csv"))
```
## 3 Transformations
The variables from the raw input datasets are classified into different categories depending on their nature. Each category will undergo different type of transformations. See below Phase 1 Transfomrations:
* Growth Rate: Variable such as GDP growth (keep original and yoy)
* Difference, Log & Log Difference: Variables like Unemployment Rate (keep original, log, log difference qd and yd).
* Non-stationary Growth: Variables like Dow Jones (remove orginial, keep qg and yg)
```{r, echo=FALSE, include=FALSE, warning=FALSE}
# Transformations ##############################################################
get_frb_data = function(raw_frb_data) {
tf_data = copy(raw_frb_data)
orig_names = c(
"Real GDP growth"
, "Nominal GDP growth"
, "Real disposable income growth"
, "Nominal disposable income growth"
, "Unemployment rate"
, "CPI inflation rate"
, "3-month Treasury rate"
, "5-year Treasury yield"
, "10-year Treasury yield"
, "BBB corporate yield"
, "Mortgage rate"
, "Prime rate"
, "Dow Jones Total Stock Market Index (Level)"
, "House Price Index (Level)"
, "Commercial Real Estate Price Index (Level)"
, "Market Volatility Index (Level)"
, "Date"
)
new_names = c(
"rgdp_grw"
, "gdp_grw"
, "rinc_grw"
, "inc_grw"
, "unemp"
, "cpi"
, "yld_03m"
, "yld_05y"
, "yld_10y"
, "yld_bbb"
, "mort"
, "prime"
, "dow"
, "hpi"
, "crei"
, "vix"
, "qtr_date_string"
)
setnames(tf_data, orig_names, new_names)
# Add Spreads:
tf_data[["bbb_spread"]]= tf_data[["yld_bbb"]] - tf_data[["yld_10y"]]
tf_data[["yld_spread"]]= tf_data[["yld_10y"]] - tf_data[["yld_03m"]]
tf_data[["mort_spread"]]= tf_data[["mort"]] - tf_data[["yld_10y"]]
tf_data[["prime_spread"]]= tf_data[["prime"]] - tf_data[["yld_03m"]]
# get date variable
yr = substr(tf_data[["qtr_date_string"]], 1, 4)
qtr = substr(tf_data[["qtr_date_string"]], 6, 7)
qtr_yr = paste(qtr, yr)
tf_data[["qtr_dt"]] = as.Date(as.yearqtr(qtr_yr, format = "Q%q %Y"), frac=1)
tf_data
}
frb_historic = get_frb_data(raw_historic)
frb_baseline = get_frb_data(rbind(raw_historic, raw_baseline))
frb_adverse = get_frb_data(rbind(raw_historic, raw_adverse))
frb_severe = get_frb_data(rbind(raw_historic, raw_severe))
transform = function(raw_frb_data, reg_data, empl_data, indus_prod_data, sp500_data) {
# Add regional, empl variables, industrial production, and S&P500 & House Starts
tf_data = indus_prod_data[empl_data[sp500_data[reg_data[raw_frb_data, on="qtr_dt"], on="qtr_dt"], on="qtr_dt"], on="qtr_dt"]
tf_data = tf_data[tf_data$qtr_dt>'1990-03-31',]
#Defining Variables Groups
#Growth Rate Group
grwrt_var <- c("rgdp_grw","gdp_grw","rinc_grw", "inc_grw")
#Non-stationary Growth Group (Percentage Change Transformation)
grw_var <- c("cpi", "dow","hpi","crei","ca_hpi","empl","ca_empl","ca_gsp","ca_rgsp","ca_inc","ca_rinc","indus_prod", "sp500", "house_start", "ca_house_start")
#Difference, Log and Log Difference Group (Difference Change Transformation)
dif_var <- c("yld_03m", "yld_05y", "yld_10y", "yld_bbb", "mort", "prime", "yld_spread", "bbb_spread", "mort_spread", "prime_spread", "unemp", "ca_unemp", "vix")
#Calculating Transformation Phase 1:
#Calculate Growth Rate Variables:
for (name in grwrt_var){
yoy_nm = concat(name,"_yoy")
rate_vec = tf_data[[name]]/100
n = length(rate_vec)
tf_data[[yoy_nm]] = sapply(1:n, function(t) {
if (t < 4) { agr = NA }
else {
agr = 1
for (j in 0:3) {
agr = agr * (1 + rate_vec[t - j])
}
}
agr = (agr^(1/4)) - 1
agr = 100 * agr
agr
}
)
}
#Calculate Growth Variables
for (name in grw_var) {
qg_nm = concat(name, "_qg")
yg_nm = concat(name, "_yg")
tf_data[[qg_nm]] = gr(tf_data[[name]])
tf_data[[yg_nm]] = gr(tf_data[[name]], lag=4)
}
#Calculate Difference & Log Variables
for (name in dif_var) {
dq_nm = concat(name, "_qd")
dy_nm = concat(name, "_yd")
log_nm = concat(name,"_log")
log_qd_nm = concat(name, "_log_qd")
log_yd_nm = concat(name, "_log_yd")
tf_data[[dq_nm]] = delta(tf_data[[name]], lag=1)
tf_data[[dy_nm]] = delta(tf_data[[name]], lag=4)
tf_data[[log_nm]] = log(ifelse(tf_data[[name]]<=0,0.01,tf_data[[name]]))
tf_data[[log_qd_nm]] = delta(log(ifelse(tf_data[[name]]<=0,0.01,tf_data[[name]])), lag=1)
tf_data[[log_yd_nm]] = delta(log(ifelse(tf_data[[name]]<=0,0.01,tf_data[[name]])), lag=4)
}
# keep relevant columns
core_names = c(
"cpi_qg", "cpi_yg"
, "dow_qg", "dow_yg"
, "hpi_qg", "hpi_yg"
, "ca_hpi_qg", "ca_hpi_yg"
, "crei_qg", "crei_yg"
, "ca_rgsp_qg", "ca_rgsp_yg"
, "ca_gsp_qg", "ca_gsp_yg"
, "ca_rinc_qg", "ca_rinc_yg"
, "ca_inc_qg", "ca_inc_yg"
, "empl_qg", "empl_yg"
, "ca_empl_qg", "ca_empl_yg"
, "indus_prod_qg", "indus_prod_yg"
, "sp500_qg", "sp500_yg"
, "house_start_qg", "house_start_yg"
, "ca_house_start_qg", "ca_house_start_yg"
, "rgdp_grw", "rgdp_grw_yoy"
, "gdp_grw", "gdp_grw_yoy"
, "rinc_grw", "rinc_grw_yoy"
, "inc_grw", "inc_grw_yoy"
, "unemp", "unemp_qd", "unemp_yd", "unemp_log", "unemp_log_qd", "unemp_log_yd"
, "yld_03m", "yld_03m_qd", "yld_03m_yd", "yld_03m_log", "yld_03m_log_qd", "yld_03m_log_yd"
, "yld_05y", "yld_05y_qd", "yld_05y_yd", "yld_05y_log", "yld_05y_log_qd", "yld_05y_log_yd"
, "yld_10y", "yld_10y_qd", "yld_10y_yd", "yld_10y_log", "yld_10y_log_qd", "yld_10y_log_yd"
, "yld_bbb", "yld_bbb_qd", "yld_bbb_yd", "yld_bbb_log", "yld_bbb_log_qd", "yld_bbb_log_yd"
, "mort", "mort_qd", "mort_yd", "mort_log", "mort_log_qd", "mort_log_yd"
, "prime", "prime_qd", "prime_yd", "prime_log", "prime_log_qd", "prime_log_yd"
, "yld_spread", "yld_spread_qd", "yld_spread_yd", "yld_spread_log", "yld_spread_log_qd", "yld_spread_log_yd"
, "bbb_spread", "bbb_spread_qd", "bbb_spread_yd", "bbb_spread_log", "bbb_spread_log_qd", "bbb_spread_log_yd"
, "mort_spread", "mort_spread_qd", "mort_spread_yd", "mort_spread_log", "mort_spread_log_qd", "mort_spread_log_yd"
, "prime_spread", "prime_spread_qd", "prime_spread_yd", "prime_spread_log", "prime_spread_log_qd", "prime_spread_log_yd"
, "vix", "vix_qd", "vix_yd", "vix_log", "vix_log_qd", "vix_log_yd"
, "ca_unemp", "ca_unemp_qd", "ca_unemp_yd", "ca_unemp_log", "ca_unemp_log_qd", "ca_unemp_log_yd"
)
tf_data = tf_data[, c("qtr_dt", core_names), with=FALSE]
#Define Non-Linearity Transformation
# For CRE-Price Index, Dow Jones, Real GDP
find.list <- list("dow_","crei_","rgdp_","gdp_")
find.string <- paste(unlist(find.list),collapse = "|")
for (name in core_names[grepl(find.string,core_names)]){
NL_nm = concat(name, "_NL")
tf_data[[NL_nm]] = ifelse(tf_data[[name]]>0,0,tf_data[[name]])
}
#Run the EWMA on Phase 1 Transformations
find.listq <- list("_qd","_qg")
find.stringq <- paste(unlist(find.listq),collapse = "|")
find.listy <- list("_yd","_yg")
find.stringy <- paste(unlist(find.listy),collapse = "|")
for (name in core_names) {
EWMA2_nm = concat(name, "_EWMA2")
EWMA4_nm = concat(name, "_EWMA4")
tmp = tf_data[[name]]
if (grepl(find.stringq,name)){
tmp[1]=tmp[2]
}
if (grepl("_yoy",name)){
tmp[1:3]=tmp[4]
}
if (grepl(find.stringy,name)){
tmp[1:4]=tmp[5]
}
tf_data[[EWMA2_nm]] = ewmaSmooth(tf_data[['qtr_dt']], tmp,lambda = 0.66)$y
tf_data[[EWMA4_nm]] = ewmaSmooth(tf_data[['qtr_dt']], tmp,lambda = 0.4)$y
}
#Apply Lag on All Variables
for (name in colnames(select(select(tf_data,-contains("Scenario")),-contains("qtr_dt")))) {
# Define Lags (1-4):
tf_data[[concat(name, "_lag", 1)]] = shift(tf_data[[name]], n=1)
tf_data[[concat(name, "_lag", 2)]] = shift(tf_data[[name]], n=2)
tf_data[[concat(name, "_lag", 3)]] = shift(tf_data[[name]], n=3)
tf_data[[concat(name, "_lag", 4)]] = shift(tf_data[[name]], n=4)
}
tf_data
}
historic = transform(frb_historic, reg_baseline, empl_baseline, indus_prod_baseline, sp500_baseline)
baseline = transform(frb_baseline, reg_baseline, empl_baseline, indus_prod_baseline, sp500_baseline)
adverse = transform(frb_adverse, reg_adverse, empl_adverse, indus_prod_adverse, sp500_adverse)
severe = transform(frb_severe, reg_severe, empl_severe, indus_prod_severe, sp500_severe)
```
## 4 MEV Aggregation & Output
```{r, echo=FALSE, include=FALSE, warning=FALSE}
######################### MEV Aggregation (in one csv) #########################
#Removing Historic Values from Different Scenarios
baseline = baseline[baseline$qtr_dt>'2017-12-31',]
adverse = adverse[adverse$qtr_dt>'2017-12-31',]
severe = severe[severe$qtr_dt>'2017-12-31',]
#Adding the Scenario Type to each
historic <- cbind(Scenario='Historic',historic)
baseline <- cbind(Scenario='Baseline',baseline)
adverse <- cbind(Scenario='Adverse',adverse)
severe <- cbind(Scenario='Severe',severe)
#Aggregating in one data table
mevdata <- rbind(historic,baseline,adverse,severe)
#Editing Date name
names(mevdata)[which(names(mevdata)=='qtr_dt')] = 'Date'
################### Compiled & Transformed MEV Output ####################
#Writing the MEV csv file
write.csv(mevdata,file=concat(pth_dir,"\\S0_09_MEV_data_transformed_111717.csv"), row.names=F)
#Writing an MEV (CREI) sample with all transformations to Check math
# tmp_MEV <- mevdata %>% select (Date, Scenario, contains("vix"))
# tmp_MEV <- tmp_MEV[Scenario%in%c('Historic','Baseline'),,]
# write.csv(tmp_MEV,file=concat(pth_dir,"\\MEV_data_transformed_Sample_111017_vix.csv"), row.names=F)
#Writing the MEV Information into csv
var_names <- colnames(mevdata[,-c(1:2)])
var_info=as.data.frame(matrix(0,length(var_names),5))
names(var_info) <- c("name", "base", "category", "tier", "sign")
var_info[,1] <- var_names
find.list <- list("_lag1","_lag2","_lag3","_lag4","_qd","_yd","_yg","_qg","_log","_EWMA2","_EWMA4","_yoy","_NL")
find.string <- paste(unlist(find.list),collapse = "|")
var_info[,2] <- gsub(find.string,'', var_info[,1])
var_info[,3] <- ifelse(var_info[,2]%in%c('yld_03m','yld_05y','yld_10y','yld_bbb','mort','prime','yld_spread','bbb_spread','mort_spread','prime_spread'),'Bond Market', ifelse(var_info[,2]%in%c("empl","unemp","ca_empl","ca_unemp"),'Employment', ifelse(var_info[,2]%in%c("ca_rgsp","ca_gsp","ca_rinc","ca_inc","rgdp_grw","gdp_grw","rinc_grw","inc_grw"),"Income & Growth",ifelse(var_info[,2]%in%c("hpi","ca_hpi","crei","house_start","ca_house_start"),'Real Estate',ifelse(var_info[,2]%in%c("dow","vix","sp500"),'Stock Market',ifelse(var_info[,2]=='cpi','Inflation','Others'))))))
var_info[,4] <- ifelse (var_info[,3]=='Stock Market',2,1)
var_info[,5] <- ifelse (var_info[,2]%in%c('yld_03m','yld_05y','yld_10y','yld_bbb','mort','prime','bbb_spread','mort_spread', 'prime_spread','vix','unemp','ca_unemp'),'+',ifelse(var_info[,2]%in%c('empl','ca_empl',"ca_rgsp","ca_gsp","ca_rinc", "ca_inc","rgdp_grw", "gdp_grw", "rinc_grw", "inc_grw","indus_prod","dow","crei","hpi","ca_hpi","sp500","house_start","ca_house_start"),'-','+/-'))
## The Tier and Sign will be defined manually.
write.csv(var_info, file=concat(pth_dir, "\\S0_09_vars_info_111717.csv"),row.names = F)
```
## 5 Stationarity Test for MEV
```{r, echo=F,warning=F, fig.width=30}
df=read.csv(concat(pth_dir,"\\S0_09_MEV_data_transformed_111717.csv"),header = TRUE)
var.names=colnames(df[,-c(1:2)])
var_info=as.data.frame(matrix(0, length(var.names), 5 ))
names(var_info) = c("var", "transf", "test_stat", "cval", "result")
var_info[,1]=var.names
var_info[grepl("qg", var_info$var),2] = TRUE
var_info[grepl("qd", var_info$var),2] = TRUE
var_info[grepl("yg", var_info$var),2] = TRUE
var_info[grepl("yd", var_info$var),2] = TRUE
var_info[grepl("log", var_info$var),2] = TRUE
var_info[grepl("yoy", var_info$var),2] = TRUE
var_info[grepl("EWMA", var_info$var),2] = TRUE
# Remove the lagged variables
var_info[grepl("lag1", var_info$var),4] = 33
var_info[grepl("lag2", var_info$var),4] = 33
var_info[grepl("lag3", var_info$var),4] = 33
var_info[grepl("lag4", var_info$var),4] = 33
var_info2=var_info[-which(var_info$cval==33),]
## select only the core variables [currently showing both core and ]
var_info3=var_info2[which(var_info2$transf!=3),]
dummy1=nrow(var_info3)
for (i in 1:dummy1){
a=var_info3[i,1]
test_var=df[,which(colnames(df)==a)]
b=summary(ur.df(na.remove(df[,which(colnames(df)==a)]), selectlags = c("BIC"), type = c("drift")))
var_info3[i,3]=b@teststat
var_info3[i,4]=b@cval[1,2]
}
test=var_info3[which(var_info3$test_stat>var_info3$cval),]
var_info3[which(var_info3$test_stat>var_info3$cval),5] = "Not Stationary"
var_info3[which(var_info3$test_stat<=var_info3$cval),5] = "Stationary"
write.csv(var_info3, file=concat(pth_dir,"\\S0_09_Stationarity_Test_Results_111717.csv"), row.names = F)
adf_res=var_info3
print(adf_res)
```
## 9 Time Spent
```{r,eval=TRUE, echo=FALSE, warning=FALSE, message=FALSE,fig.width=10, fig.height=4, results="asis"}
################################
### calculate the time spent ###
################################
time1<-Sys.time()
# cat('Time Spend:')
cat('\n')
print(round(difftime(time1,time0,units='mins')))
```<file_sep>/remediation/Stationary Testing.R
# 2018 DFAST Production Run Ending Balance CRE and C&I
# Bank of Hope
# Developer: <NAME>
# Start Date: 03/02/2018
# R version 3.4.3 (2017-11-30)
library(dplyr)
library(lubridate)
library(zoo)
library(data.table)
library(ggplot2)
setwd("C:/Users/OL07805/Desktop/Desktop Things/Ending Balance Model Final/Ending Balance Remediation Plan 09_25_18/Stationary Testing/")
source("dev-support.R")
#########################################################################################################################
### Read in Call Report Data
# Read in raw file of RCCI Schedule
cr1 <- fread("FFIEC CDR Call Schedule RCCI 12312017.txt")
# Bank of Hope IDRSSD
idRSSD <- 671464
cr1 <- cr1[IDRSSD == idRSSD]
# MF, NOO, OO, CnI
# RCON1460, RCONF160, RCONF161, RCON1766
startBal <- as.data.frame(cbind(c("mf","oo","no","ci"),c("RCON1460","RCONF160","RCONF161","RCON1766")))
names(startBal) <- c("segment","crID")
# Get relevant columns, I also checked them against the actual pdf as well, they match
startBal$balance <- as.numeric(cr1[,mget(as.character(startBal$crID))])
write.csv(startBal,"startBal.csv",row.names = F)
#########################################################################################################################
### Read in relevant datasets
# Read in training data
boh <- readRDS("data-boh.RDS")
# Read in new macroeconomic data
base <- readRDS("econ-data-baseline.RDS")
adverse <- readRDS("econ-data-adverse.RDS")
severe <- readRDS("econ-data-severe.RDS")
idios <- readRDS("econ-data-severe_Idiosyncratic.RDS")
#########################################################################################################################
### Train the models
# Define model variables and collapse with '+' for formula
ciVars <- paste(c("gdp_ag_lag1","ca_rinc_ag_lag3","inc_qg_lag2"),collapse = "+")
ipVars <- paste(c("crei_eg_lag4","dow_ya"),collapse = "+")
ooVars <- "crei_eg_lag2"
# Train models
ciModel <- lm(formula = paste0("ldiff_ci ~ ",ciVars)
,data = boh[qtr_dt >= "2003-03-31" & qtr_dt <= "2016-12-31"])
ipModel <- lm(formula = paste0("ldiff_ip ~ ",ipVars)
,data = boh[qtr_dt >= "2007-06-30" & qtr_dt <= "2016-12-31"])
ooModel <- lm(formula = paste0("ldiff_oo ~ ",ooVars)
,data = boh[qtr_dt >= "2007-06-30" & qtr_dt <= "2016-12-31"])
# Final stationary table
finalStationaryTable <- as.data.frame(c())
# loop for each model
for(i in c("ci","ip","oo")){
if(i == "ci"){
begDate <- "2003-03-31"
}
else{
begDate <- "2007-06-30"
}
# First the y variable
depVar <- get.Stationary.Results(paste0("ldiff_",i),boh[qtr_dt >= begDate & qtr_dt <= "2016-12-31"],sigLevel = 0.1)
# Then the residuals
residVar <- get.Stationary.Results("residuals",get(paste0(i,"Model")),sigLevel = 0.1)
residVar$Variable <- paste0(i,"_resid")
finalStationaryTable <- rbind(finalStationaryTable,depVar,residVar)
}
#########################################################################################################################
# do stationary tests
# Define vars to pull and perform testing on (keep CI separate since different timeframe)
ciVarStat <- c("gdp_ag_lag1","ca_rinc_ag_lag3","inc_qg_lag2")
modelVarStat <- c("crei_eg_lag4","dow_ya","crei_eg_lag2")
# Perform testing
ciStationary <- get.Stationary.Results(ciVarStat,boh[qtr_dt >= "2003-03-31" & qtr_dt <= "2016-12-31"],sigLevel = 0.1)
restStationary <- get.Stationary.Results(modelVarStat,boh[qtr_dt >= "2007-06-30" & qtr_dt <= "2016-12-31"],sigLevel = 0.1)
finalStationaryTable <- rbind(finalStationaryTable, ciStationary,restStationary)
write.csv(finalStationaryTable,"finalStationaryTable.csv",row.names = F)
#########################################################################################################################
### Predict new growth rates and apply to ending balance.
# Filter data so we only have relevant 9 quarter forecast
base <- base[qtr_dt >= "2018-03-31"]
adverse <- adverse[qtr_dt >= "2018-03-31"]
severe <- severe[qtr_dt >= "2018-03-31"]
idios <- idios[qtr_dt >= "2018-03-31"]
# Define segs for loop
segs <- c("ci","ip","oo")
# Define table to store forecasting information
forecastTable <- as.data.frame(c())
forecastTable[1:13,"qtr_dt"] <- as.yearqtr(unique(base$qtr_dt))
forecastTable$qtr_dt <- as.yearqtr(forecastTable$qtr_dt)
# Loop through each segment
for(i in segs){
# If statements to choose the right model
if(i == "ci"){
fit <- ciModel
}
else if(i == "ip"){
fit <- ipModel
}
else if(i == "oo"){
fit <- ooModel
}
# After choosing model, get new forecast information for growth rates
forecastTable[1:13,paste0("grw_base_",i)] <- predict(fit,base)
forecastTable[1:13,paste0("grw_adverse_",i)] <- predict(fit,adverse)
forecastTable[1:13,paste0("grw_severe_",i)] <- predict(fit,severe)
forecastTable[1:13,paste0("grw_idiosyncratic_",i)] <- predict(fit,idios)
# After getting growth rates, get ending balance
# If income producing, get ending balance for mf and non owner using ip growth rates
if(i == "ip"){
for(j in c("mf","no")){
forecastTable[1:13,paste0("eb_base_",j)] <- get_bal_forecast(startBal$balance[startBal$segment == j],forecastTable[1:13,paste0("grw_base_",i)])/1000000
forecastTable[1:13,paste0("eb_adverse_",j)] <- get_bal_forecast(startBal$balance[startBal$segment == j],forecastTable[1:13,paste0("grw_adverse_",i)])/1000000
forecastTable[1:13,paste0("eb_severe_",j)] <- get_bal_forecast(startBal$balance[startBal$segment == j],forecastTable[1:13,paste0("grw_severe_",i)])/1000000
forecastTable[1:13,paste0("eb_idiosyncratic_",j)] <- get_bal_forecast(startBal$balance[startBal$segment == j],forecastTable[1:13,paste0("grw_idiosyncratic_",i)])/1000000
}
}
# Otherwise just use relevant segment to get ending balance
else{
forecastTable[1:13,paste0("eb_base_",i)] <- get_bal_forecast(startBal$balance[startBal$segment == i],forecastTable[1:13,paste0("grw_base_",i)])/1000000
forecastTable[1:13,paste0("eb_adverse_",i)] <- get_bal_forecast(startBal$balance[startBal$segment == i],forecastTable[1:13,paste0("grw_adverse_",i)])/1000000
forecastTable[1:13,paste0("eb_severe_",i)] <- get_bal_forecast(startBal$balance[startBal$segment == i],forecastTable[1:13,paste0("grw_severe_",i)])/1000000
forecastTable[1:13,paste0("eb_idiosyncratic_",i)] <- get_bal_forecast(startBal$balance[startBal$segment == i],forecastTable[1:13,paste0("grw_idiosyncratic_",i)])/1000000
}
}
write.csv(forecastTable,"2018_DFAST_EB_Forecasts.csv",row.names = F)
#########################################################################################################################
### Plot each segments growth rates and ending balances
forecastTable$qtr_dt <- as.Date(forecastTable$qtr_dt)
segLabels <- c("Commercial and Industrial","Multifamily","Non Owner Occupied","Owner Occupied")
segs <- c("ci","mf","no","oo")
rowCt <- 1
# Start with growth rates
for(i in segs){
plotA <- (ggplot(aes(x = qtr_dt),data = forecastTable)
+ geom_line(aes(y = get(paste0("eb_base_",i)),color = "Baseline"))
+ geom_line(aes(y = get(paste0("eb_adverse_",i)),color = "Adverse"))
+ geom_line(aes(y = get(paste0("eb_severe_",i)),color = "Severe"))
+ geom_line(aes(y = get(paste0("eb_idiosyncratic_",i)),color = "Idiosyncratic"),linetype = "dashed")
+ scale_color_manual(values = c("Baseline" = "green","Adverse" = "orange","Severe" = "red","Idiosyncratic" = "lightcoral"))
+ labs(y = "Ending Balance ($ Billions)",x = "Date",title = paste0(segLabels[rowCt]," Ending Balance"),color = "Scenario")
+ theme_minimal()
+ theme(plot.title = element_text(hjust = 0.5)))
#plot(plotA)
ggsave(paste0("plot_EB_Forecast_",segLabels[rowCt],".png"), plot=plotA, width=20, height=11, unit="cm", dpi=500)
rowCt <- rowCt + 1
}
#########################################################################################################################
<file_sep>/EBCRE/CRE_End_Bals_shared_08302017.R
###################################################################
# Project: Bank of Hope
# CRE Ending Balances
###################################################################
#setwd("C:/Users/doxborrow/Desktop/BoH/Modeling/Ending Balance")
setwd("//useomvfs77/MCLP/Common/Clients/Bank of Hope/Model Development/Code/Ending Balance")
library(tseries)
library(urca)
library(lubridate)
library(forecast)
library(tseries)
library(CADFtest)
library (leaps)
library(openxlsx)
library(car)
library(lmtest)
library(orcutt)
library(lmtest)
library(dplyr)
library(ggplot2)
library(data.table)
####################
# Historical CRE loan balances
#####################
#read in the raw data
#####################
#read in the raw data
endbal <- read.csv("Ending Balances.csv",header = TRUE, stringsAsFactors = F)
endbal <- endbal[,c(4,2,3)]
names(endbal) <- c("date", "ci_bal", "cre_bal")
endbal <- as.data.frame(endbal)
# #Difference transformation
endbal$cre_qd <- c(NA, diff(endbal$cre_bal))
endbal$cre_yd <- endbal$cre_bal-lag(endbal$cre_bal, 4)
endbal$cre_qg <- c(NA, diff(log(endbal$cre_bal)))
endbal$cre_yg <- log(endbal$cre_bal)-lag(log(endbal$cre_bal), 4)
endbal$ci_qd <- c(NA, diff(endbal$ci_bal))
endbal$ci_yd <- endbal$ci_bal-lag(endbal$ci_bal, 4)
endbal$ci_qg <- c(NA, diff(log(endbal$ci_bal)))
endbal$ci_yg <- log(endbal$ci_bal)-lag(log(endbal$ci_bal), 4)
#making dataset ready for merge
endbal$year <- year(mdy(endbal$date))
endbal$month <- month(mdy(endbal$date))
endbal$q[endbal$month %in% c(1,2,3)] <- 1
endbal$q[endbal$month %in% c(4,5,6)] <- 2
endbal$q[endbal$month %in% c(7,8,9)] <- 3
endbal$q[endbal$month %in% c(10,11,12)] <- 4
endbal$month <- NULL
#max_lag
max_lag <- floor(12*(nrow(endbal)/100)^(1/4))
#######################
#read in the macro vars
#######################
######
#base
######
base <- read.csv("macro_base.csv", header = T)
aaa <- which(base$year==2003 & base$q==1)
bbb <- which(base$year==2018 & base$q==4)
base <- base[aaa:bbb,]
#########
#adverse
#########
adverse <- read.csv("macro_adverse.csv", header=T)
aaa <- which(adverse$year==2003 & adverse$q==1)
bbb <- which(adverse$year==2018 & adverse$q==4)
adverse <- adverse[aaa:bbb,]
########
#severe
########
severe <- read.csv("macro_severe.csv", header=T)
aaa <- which(severe$year==2003 & severe$q==1)
bbb <- which(severe$year==2018 & severe$q==4)
severe <- severe[aaa:bbb,]
##################
#development macro
##################
D1 <- which(base$year==2003 & base$q==1)
D2 <- which(base$year==2015 & base$q==4)
macro_dev <- base[c(D1:D2), ]
macro_input <- macro_dev[,-c(1,2,3)]
########################################
# Create the dep_var matrix
########################################
var.names <- colnames(macro_input)
var_info <- as.data.frame(matrix(0, length(var.names), 6 ))
names(var_info) <- c("var", "tier", "base", "lag", "diff", "sign")
var_info[,1] <- var.names
var_info[,5] <- 0
#diff
var_info[grepl("_qd", var_info$var),5] <- TRUE
var_info[grepl("_yd", var_info$var),5] <- TRUE
var_info[grepl("_qg", var_info$var),5] <- TRUE
var_info[grepl("_yg", var_info$var),5] <- TRUE
#lag
var_info[grepl("_lag1", var_info$var),4] <- 1
var_info[grepl("_lag2", var_info$var),4] <- 2
var_info[grepl("_lag3", var_info$var),4] <- 3
var_info[grepl("_lag4", var_info$var),4] <- 4
#var.base
var_info[grepl("ngdp", var_info$var),3] <- "ngdp_g"
var_info[grepl("rgdp", var_info$var),3] <- "rgdp_g"
var_info[grepl("rdpi", var_info$var),3] <- "rdpi_g"
var_info[grepl("ndpi", var_info$var),3] <- "ndpi_g"
var_info[grepl("ur", var_info$var),3] <- "ur_diff"
var_info[grepl("cpi", var_info$var),3] <- "cpi"
var_info[grepl("tr3m", var_info$var),3] <- "tr3m_diff"
var_info[grepl("tr5y", var_info$var),3] <- "tr5yr_diff"
var_info[grepl("tr10y", var_info$var),3] <- "tr10yr_diff"
var_info[grepl("bbb", var_info$var),3] <- "bbb_diff"
var_info[grepl("mort", var_info$var),3] <- "mort_diff"
var_info[grepl("prime", var_info$var),3] <- "prime_diff"
var_info[grepl("cppi", var_info$var),3] <- "cppi_diff"
var_info[grepl("djtsmx", var_info$var),3] <- "djtsmx_diff"
var_info[grepl("vix", var_info$var),3] <- "vix"
var_info[grepl("hpi", var_info$var),3] <- "hpi_g"
var_info[grepl("spr10", var_info$var),3] <- "spread"
var_info[grepl("spr10_q", var_info$var),3] <- "spread_diff"
var_info[grepl("spr10_y", var_info$var),3] <- "spread_diff"
var_info[grepl("rgpdi_eq", var_info$var),3] <- "rgpdi_eq"
var_info[grepl("gpdi_eq", var_info$var),3] <- "gpdi_eq"
var_info[grepl("pfi_nonres", var_info$var),3] <- "pfi_nonres"
var_info[grepl("willreit", var_info$var),3] <- "willreit"
#sign
var_info[grepl("gdp", var_info$var),6] <- 1
var_info[grepl("rdpi", var_info$var),6] <- 1
var_info[grepl("ndpi", var_info$var),6] <- 1
var_info[grepl("ur", var_info$var),6] <- -1
var_info[grepl("cpi", var_info$var),6] <- -1
var_info[grepl("tr3m", var_info$var),6] <- 0
var_info[grepl("tr5y", var_info$var),6] <- 0
var_info[grepl("tr10y", var_info$var),6] <- 0
var_info[grepl("mort", var_info$var),6] <- 0
var_info[grepl("prime", var_info$var),6] <- 0
var_info[grepl("bbb", var_info$var),6] <- 0
var_info[grepl("djtsmx", var_info$var),6] <- 1
var_info[grepl("hpi", var_info$var),6] <- 1
var_info[grepl("vix", var_info$var),6] <- -1
var_info[grepl("cppi", var_info$var),6] <- 1
var_info[grepl("spr10", var_info$var),6] <- -1
var_info[grepl("rgpdi_eqp", var_info$var),6] <- 1
var_info[grepl("gpdi_eqp", var_info$var),6] <- 1
var_info[grepl("pfi_nonres", var_info$var),6] <- 1
var_info[grepl("willreit", var_info$var),6] <- 1
# Tier
var_info[grepl("rgdp", var_info$var),2] <- 1
var_info[grepl("ngdp", var_info$var),2] <- 1
var_info[grepl("rdpi", var_info$var),2] <- 1
var_info[grepl("ndpi", var_info$var),2] <- 1
var_info[grepl("ur", var_info$var),2] <- 1
var_info[grepl("cpi", var_info$var),2] <- 3
var_info[grepl("tr3m", var_info$var),2] <- 2
var_info[grepl("tr5y", var_info$var),2] <- 2
var_info[grepl("tr10y", var_info$var),2] <- 2
var_info[grepl("mort", var_info$var),2] <- 3
var_info[grepl("prime", var_info$var),2] <- 3
var_info[grepl("bbb", var_info$var),2] <- 2
var_info[grepl("djtsmx", var_info$var),2] <- 2
var_info[grepl("hpi", var_info$var),2] <- 1
var_info[grepl("vix", var_info$var),2] <- 3
var_info[grepl("cppi", var_info$var),2] <- 1
var_info[grepl("spr10", var_info$var),2] <- 1
var_info[grepl("rgpdi_eq", var_info$var),2] <- 2
var_info[grepl("gpdi_eq", var_info$var),2] <- 2
var_info[grepl("pfi_nonres", var_info$var),2] <- 2
var_info[grepl("willreit", var_info$var),2] <- 2
#####################
#Variable Selection
#####################
first.obs <- which(base$year==2003 & base$q==1)
ndata <- which(base$year==2015 & base$q==4)
date_col <- as.data.frame(base$Date[first.obs:ndata])
colnames(date_col) <- "Date"
aaa <- which(endbal$year==2003 & endbal$q==1)
bbb <- which(endbal$year==2015 & endbal$q==4)
loan_input <- endbal[aaa:bbb,-c(1,12,13)]
b1 <- cbind(date_col, loan_input)
names(b1) <- c("Date", names(loan_input))
b <- data.table(b1)
c1 <- cbind(date_col, macro_input)
names(c1) <- c("Date", names(macro_input))
c <- data.table(c1)
a <- data.table(var_info)
#################################
# CRE Model
#################################
source("StepFun.R")
fix_vars0 <- c("1")
v110_model2_cre_qg_sep=StepFun(a,b,c, tier=1, #indicate which tier of variables to consider
y='cre_qg~', #indicate response variable
thresh=c(0.05, 0.01, 0.001), #significance level for SE based p-value and LR test based p-value for each tier
criteria='SE.p', #variable selection criteria; other values='bic', 'LR.p', 'SE.p', 'rsq'
vars0 = c("1"), #model 0 variables
fix_vars0, #indicate which variables are fixed
out.print=T #indicate wheter intermediate output will be printed
)
fix_vars0=c("1", "hpi_yg_lag4", "ndpi_grw_yoy_lag4", "spr10_yd")
v110_model2_cre_qg_sep=StepFun(a,b,c, tier=2, #indicate which tier of variables to consider
y='cre_qg~', #indicate response variable
thresh=c(0.05, 0.01, 0.001), #significance level for SE based p-value and LR test based p-value for each tier
criteria='SE.p', #variable selection criteria; other values='bic', 'LR.p', 'SE.p', 'rsq'
vars0 = c("1", "hpi_yg_lag4", "ndpi_grw_yoy_lag4", "spr10_yd"), #model 0 variables
fix_vars0, #indicate which variables are fixed
out.print=T #indicate wheter intermediate output will be printed
)
fix_vars0=c("1", "hpi_yg_lag4", "ndpi_grw_yoy_lag4", "spr10_yd", "tr3m_yd_lag3")
v110_model2_cre_qg_sep=StepFun(a,b,c, tier=3, #indicate which tier of variables to consider
y='cre_qg~', #indicate response variable
thresh=c(0.05, 0.01, 0.001), #significance level for SE based p-value and LR test based p-value for each tier
criteria='SE.p', #variable selection criteria; other values='bic', 'LR.p', 'SE.p', 'rsq'
vars0 = c("1", "hpi_yg_lag4", "ndpi_grw_yoy_lag4", "spr10_yd", "tr3m_yd_lag3"), #model 0 variables
fix_vars0, #indicate which variables are fixed
out.print=T #indicate wheter intermediate output will be printed
)
# Display final model
# Algorithm selected model
alg_model <- v110_model2_cre_qg_sep[[1]]$final_model
# Save the model coefficients
coef_alg_cre_eb <- as.data.frame(alg_model$coefficients)
write.csv(coef_alg_cre_eb, "coef_alg_cre_eb.csv")
# Plot the model drivers
gg_plot_df <- c[,c("Date","hpi_yg_lag4","ndpi_grw_yoy_lag4","spr10_yd","tr3m_yd_lag3")]
gg_plot_df <- as.data.frame(gg_plot_df)
gg_plot_df$Date <- as.Date(gg_plot_df$Date,"%m/%d/%Y")
gg_plot_df_drivers <- melt(gg_plot_df, id = "Date")
gg_plot_drivers <- ggplot(data = gg_plot_df_drivers, mapping = aes(x = Date, y = value, group = variable, color = variable)) + geom_line() + theme(legend.position = 'bottom') + theme(legend.title=element_blank()) + xlab("Date") + ylab("Value") + ggtitle("Model Drivers") + theme(plot.title = element_text(hjust = 0.5)) + theme(text = element_text(size=15)) + theme(legend.position="bottom")
gg_plot_drivers
ggsave("cre_eb_drivers.png", width = 7, height = 7)
# Final demeaned model
# Demean the model variables
x1 <- c$hpi_yg_lag4- mean(c$hpi_yg_lag4)
x2 <- c$ndpi_grw_yoy_lag4- mean(c$ndpi_grw_yoy_lag4)
x3 <- c$spr10_yd- mean(c$spr10_yd)
x4 <- c$tr3m_yd_lag3- mean(c$tr3m_yd_lag3)
mu1 <- mean(c$hpi_yg_lag4)
mu2 <- mean(c$ndpi_grw_yoy_lag4)
mu3 <- mean(c$spr10_yd)
mu4 <- mean(c$tr3m_yd_lag3)
sd1 <- sd(c$hpi_yg_lag4)
sd2 <- sd(c$ndpi_grw_yoy_lag4)
sd3 <- sd(c$spr10_yd)
sd4 <- sd(c$tr3m_yd_lag3)
# Model
out <- lm(b$cre_qg~x1+x2+x3+x4)
summary(out)
out_res<-rstandard(out)
out_fit<-fitted.values(out)
# <NAME>
durbinWatsonTest(out,3)
# Save the model coefficients
coef_final_model <- as.data.frame(summary(out)$coefficients)
write.csv(coef_final_model, "coef_final_model.csv")
#multicolinearity
vif(out)
# Autocorrelation
par(mfrow=c(1,2))
acf(out$residuals, main="")
pacf(out$residuals, main="")
Box.test(out$residuals, type = "Ljung-Box", lag = max_lag)
bgtest(out, order = 3)
durbinWatsonTest(out, max.lag = 3)
durbinWatsonTest(out)
#normality test
#QQ-plot
par(mfrow=c(1,1))
qqnorm(out_res, ylab="Residuals", xlab="Quantiles of Standard Normal", main="CRE Ending Balance")
qqline(out_res)
# Residual histogram
hist(out_res, breaks="FD", xlab="Residuals", main="Histogram of residuals", ylim=c(0,25))
x<- -3:3
lines(x, 52*dnorm(x,0,sd(out_res)),col=2)
# Residual vs predicted
plot(out_fit,out_res, ylab="Residuals", xlab="Q-o-Q Growth Rate", main="CRE Ending Balance")
abline(0, 0)
#################################
# Stationarity
#################################
summary(ur.df(na.remove(c$hpi_yg_lag4), type='drift', lags=6, selectlags = 'BIC'))
summary(ur.df(na.remove(c$ndpi_grw_yoy_lag4), type='drift', lags=6, selectlags = 'BIC'))
summary(ur.df(na.remove(c$spr10_yd), type='drift', lags=6, selectlags = 'BIC'))
summary(ur.df(na.remove(c$tr3m_yd_lag3), type='drift', lags=6, selectlags = 'BIC'))
pp.test(na.remove(c$ndpi_grw_yoy_lag4))
summary(ur.df(na.remove(x1), type='drift', lags=6, selectlags = 'BIC'))
summary(ur.df(na.remove(x2), type='drift', lags=6, selectlags = 'BIC'))
summary(ur.df(na.remove(x3), type='drift', lags=6, selectlags = 'BIC'))
summary(ur.df(na.remove(x4), type='drift', lags=6, selectlags = 'BIC'))
#################################
#implement the model
#################################
ndata <- nrow(b1)
npred<-9
output<-as.data.frame(matrix(0, ndata+npred,7))
D1 <- which(endbal$year==2003 & endbal$q==1)
D2 <- which(endbal$year==2015 & endbal$q==4)
input <- endbal[D1:D2,]
output[1:ndata, 1]=input$cre_bal
output[1:ndata, 2]=out$fitted.values
dummy1 <- ndata-1
output[2:ndata, 3]= exp(log(output[1:dummy1, 1]) + output[2:ndata, 2])
# plot(output[2:ndata, 3])
# lines(output[2:ndata, 1], col='red')
#PREDICT
aaaa <- which(base$year==2016 & base$q==1)
bbbb <- which(base$year==2018 & base$q==1)
indx <- cbind(base$hpi_yg_lag4-mu1,base$ndpi_grw_yoy_lag4-mu2,base$spr10_yd-mu3, base$tr3m_yd_lag3-mu4)
xreg_base <- as.data.frame(indx[aaaa:bbbb,])
names(xreg_base) <- c("x1", "x2", "x3", "x4")
fitted.base <- as.data.frame(predict(out, xreg_base))
indx <- cbind(adverse$hpi_yg_lag4-mu1,adverse$ndpi_grw_yoy_lag4-mu2,adverse$spr10_yd-mu3, adverse$tr3m_yd_lag3-mu4)
xreg_adverse <- as.data.frame(indx[aaaa:bbbb,])
names(xreg_adverse) <- c("x1", "x2", "x3", "x4")
fitted.adverse <- predict(out, xreg_adverse)
indx <- cbind(severe$hpi_yg_lag4-mu1,severe$ndpi_grw_yoy_lag4-mu2,severe$spr10_yd-mu3, severe$tr3m_yd_lag3-mu4)
xreg_severe <- as.data.frame(indx[aaaa:bbbb,])
names(xreg_severe) <- c("x1", "x2", "x3", "x4")
fitted.severe <- predict(out, xreg_severe)
#output[1:ndata, 1]=ppnr$Int_bearing[D1:D2]
output[53:61, 2]=fitted.base
output[53, 3]= exp(log(output[ndata, 1]) + output[53, 2])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 3]= exp(log(output[ac, 3]) + output[ab, 2])
}
output[1:ndata,c(4,5)]=output[1:ndata, c(2,3)]
output[1:ndata,c(6,7)]=output[1:ndata, c(2,3)]
output[53:61, 4]=fitted.adverse
output[53,5]= exp(log(output[ndata, 1]) + output[53, 4])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 5]= exp(log(output[ac, 5]) + output[ab, 4])
}
output[53:61, 6]=fitted.severe
output[53,7]= exp(log(output[ndata, 1]) + output[53, 6])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 7]= exp(log(output[ac, 7]) + output[ab, 6])
}
output[1, c(3,5,7)]=output[1,1]
output[which(output[,1]==0),1]=NA
#plot together
date1 = seq(ISOdate(2003,1,1), by = "quarter", length.out = 61)
plot(date1, output[,3], type='l', lty='dotted', ylab='CRE EB')
lines(date1, output[,5], col='blue')
lines(date1,output[,7], col='red')
lines(date1, output[,1], col='green')
legend("topleft", legend= c("base", "adverse", "severe", "Historical"), fill=c("black", "blue", "red", "green"))
colnames(output)=c("Historical", "estimated_base", "estimated_base_bal", "estimated_adverse", "adverse_bal", "estimated_severe", "severe_bal")
#output the Cre results
eb_projections <- cbind(date1,output)
write.csv(as.data.frame(cbind(date1,output)), "cre EB Projections.csv", row.names = F)
eb_projections_df_p <- eb_projections[,c("date1","Historical","estimated_base_bal","adverse_bal","severe_bal")]
colnames(eb_projections_df_p) <- c("Date","Actual","Base","Adverse","Severe")
eb_projections_df_p$Fitted <- append(eb_projections_df_p$Base[1:52],rep(NA,npred))
eb_projections_df_p <- melt(eb_projections_df_p, id="Date")
# Plot of projections
eb_projections_plot <- ggplot(eb_projections_df_p, aes(x = Date, y = value, color = variable, group = variable)) +
geom_line() +
xlab("Date") + ylab("Ending Balance ($)") + ggtitle("CRE Ending Balance") +
theme(plot.title = element_text(hjust = 0.5)) +
theme(text = element_text(size=15)) + theme(legend.title=element_blank())
eb_projections_plot
#######################
# Sensitivity Analysis
#######################
##HPI
ndata<-nrow(b1)
npred<-9
output<-as.data.frame(matrix(0, ndata+npred,7))
D1<-which(endbal$year==2003 & endbal$q==1)
D2<-which(endbal$year==2015 & endbal$q==4)
input<-endbal[D1:D2,]
output[1:ndata, 1]<-input$cre_bal
output[1:ndata, 2]<-out$fitted.values
dummy1<-ndata-1
output[2:ndata, 3]<- exp(log(output[1:dummy1, 1]) + output[2:ndata, 2])
plot(output[2:ndata, 3])
lines(output[2:ndata, 1], col='red')
#PREDICT
aaaa <- which(base$year==2016 & base$q==1)
bbbb <- which(base$year==2018 & base$q==1)
indx=cbind(base$hpi_yg_lag4-mu1,base$ndpi_grw_yoy_lag4-mu2,base$spr10_yd-mu3, base$tr3m_yd_lag3-mu4)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2", "x3", "x4")
fitted.base=as.data.frame(predict(out, xreg_base))
indx_1sd=cbind(base$hpi_yg_lag4-mu1+sd1,base$ndpi_grw_yoy_lag4-mu2,base$spr10_yd-mu3, base$tr3m_yd_lag3-mu4)
xreg_1sd=as.data.frame(indx_1sd[aaaa:bbbb,])
names(xreg_1sd)=c("x1", "x2", "x3", "x4")
fitted.1sd=as.data.frame(predict(out, xreg_1sd))
indx_2sd=cbind(base$hpi_yg_lag4-mu1+2*sd1,base$ndpi_grw_yoy_lag4-mu2,base$spr10_yd-mu3, base$tr3m_yd_lag3-mu4)
xreg_2sd=as.data.frame(indx_2sd[aaaa:bbbb,])
names(xreg_2sd)=c("x1", "x2", "x3", "x4")
fitted.2sd=as.data.frame(predict(out, xreg_2sd))
#output[1:ndata, 1]=ppnr$Int_bearing[D1:D2]
output[53:61, 2]=fitted.base
output[53, 3]= exp(log(output[ndata, 1]) + output[53, 2])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 3]= exp(log(output[ac, 3]) + output[ab, 2])
}
output[1:ndata,c(4,5)]=output[1:ndata, c(2,3)]
output[1:ndata,c(6,7)]=output[1:ndata, c(2,3)]
output[53:61, 4]=fitted.1sd
output[53,5]= exp(log(output[ndata, 1]) + output[53, 4])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 5]= exp(log(output[ac, 5]) + output[ab, 4])
}
output[53:61, 6]=fitted.2sd
output[53,7]= exp(log(output[ndata, 1]) + output[53, 6])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 7]= exp(log(output[ac, 7]) + output[ab, 6])
}
output[1, c(3,5,7)]=output[1,1]
output[which(output[,1]==0),1]=NA
#plot together
date1 = seq(ISOdate(2003,1,1), by = "quarter", length.out = 61)
plot(date1, output[,3], type='l', lty='dotted', ylab='CRE EB')
lines(date1, output[,5], col='blue')
lines(date1,output[,7], col='red')
lines(date1, output[,1], col='green')
legend("topleft", legend= c("base", "adverse", "severe", "Historical"), fill=c("black", "blue", "red", "green"))
colnames(output)=c("Historical", "estimated_base", "estimated_base_bal", "estimated_adverse", "adverse_bal", "estimated_severe", "severe_bal")
#output the Cre results
#clean
output2=output[c(53:61), c(3,5,7)]
hpi_df <- as.data.frame(cbind(date1[53:61],output2))
write.csv(hpi_df, "cre EB sensitivity hpi.csv", row.names = F)
colnames(hpi_df) <- c("Date","Baseline","1_std","2_std")
# Plot
hpi_df_gg <- melt(hpi_df, id = "Date")
hpi_df_gg_p <- ggplot(data = hpi_df_gg, mapping = aes(x = Date, y = value, group = variable, color = variable)) + geom_line() + theme(legend.position = 'bottom') + theme(legend.title=element_blank()) + xlab("Date") + ylab("CRE Ending Balance ($)") + ggtitle("HPI Yearly Growth Rate") + theme(plot.title = element_text(hjust = 0.5)) + theme(text = element_text(size=15)) + theme(legend.position="bottom")
hpi_df_gg_p
ggsave("hpi_sensitive.png", width = 7, height = 7)
##NDI
ndata=nrow(b1)
npred=9
output=as.data.frame(matrix(0, ndata+npred,7))
D1=which(endbal$year==2003 & endbal$q==1)
D2=which(endbal$year==2015 & endbal$q==4)
input=endbal[D1:D2,]
output[1:ndata, 1]=input$cre_bal
output[1:ndata, 2]=out$fitted.values
dummy1=ndata-1
output[2:ndata, 3]= exp(log(output[1:dummy1, 1]) + output[2:ndata, 2])
plot(output[2:ndata, 3])
lines(output[2:ndata, 1], col='red')
#PREDICT
aaaa=which(base$year==2016 & base$q==1)
bbbb=which(base$year==2018 & base$q==1)
indx=cbind(base$hpi_yg_lag4-mu1,base$ndpi_grw_yoy_lag4-mu2,base$spr10_yd-mu3, base$tr3m_yd_lag3-mu4)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2", "x3", "x4")
fitted.base=as.data.frame(predict(out, xreg_base))
indx_1sd=cbind(base$hpi_yg_lag4-mu1,base$ndpi_grw_yoy_lag4-mu2+sd2,base$spr10_yd-mu3, base$tr3m_yd_lag3-mu4)
xreg_1sd=as.data.frame(indx_1sd[aaaa:bbbb,])
names(xreg_1sd)=c("x1", "x2", "x3", "x4")
fitted.1sd=as.data.frame(predict(out, xreg_1sd))
indx_2sd=cbind(base$hpi_yg_lag4-mu1,base$ndpi_grw_yoy_lag4+2*sd2-mu2,base$spr10_yd-mu3, base$tr3m_yd_lag3-mu4)
xreg_2sd=as.data.frame(indx_2sd[aaaa:bbbb,])
names(xreg_2sd)=c("x1", "x2", "x3", "x4")
fitted.2sd=as.data.frame(predict(out, xreg_2sd))
#output[1:ndata, 1]=ppnr$Int_bearing[D1:D2]
output[53:61, 2]=fitted.base
output[53, 3]= exp(log(output[ndata, 1]) + output[53, 2])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 3]= exp(log(output[ac, 3]) + output[ab, 2])
}
output[1:ndata,c(4,5)]=output[1:ndata, c(2,3)]
output[1:ndata,c(6,7)]=output[1:ndata, c(2,3)]
output[53:61, 4]=fitted.1sd
output[53,5]= exp(log(output[ndata, 1]) + output[53, 4])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 5]= exp(log(output[ac, 5]) + output[ab, 4])
}
output[53:61, 6]=fitted.2sd
output[53,7]= exp(log(output[ndata, 1]) + output[53, 6])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 7]= exp(log(output[ac, 7]) + output[ab, 6])
}
output[1, c(3,5,7)]=output[1,1]
output[which(output[,1]==0),1]=NA
#plot together
date1 = seq(ISOdate(2003,1,1), by = "quarter", length.out = 61)
plot(date1, output[,3], type='l', lty='dotted', ylab='CRE EB')
lines(date1, output[,5], col='blue')
lines(date1,output[,7], col='red')
lines(date1, output[,1], col='green')
legend("topleft", legend= c("base", "adverse", "severe", "Historical"), fill=c("black", "blue", "red", "green"))
colnames(output)=c("Historical", "estimated_base", "estimated_base_bal", "estimated_adverse", "adverse_bal", "estimated_severe", "severe_bal")
#output the Cre results
#clean
output2=output[c(53:61), c(3,5,7)]
ndpi_df <- as.data.frame(cbind(date1[53:61],output2))
write.csv(ndpi_df, "cre EB sensitivity ndpi.csv", row.names = F)
colnames(ndpi_df) <- c("Date","Baseline","1_std","2_std")
# Plot
ndpi_df_gg <- melt(ndpi_df, id = "Date")
ndpi_df_gg_p <- ggplot(data = ndpi_df_gg, mapping = aes(x = Date, y = value, group = variable, color = variable)) + geom_line() + theme(legend.position = 'bottom') + theme(legend.title=element_blank()) + xlab("Date") + ylab("CRE Ending Balance ($)") + ggtitle("Nominal Disposable Inc.") + theme(plot.title = element_text(hjust = 0.5)) + theme(text = element_text(size=15)) + theme(legend.position="bottom")
ndpi_df_gg_p
ggsave("ndpi_sensitive.png", width = 7, height = 7)
# SPR10
ndata=nrow(b1)
npred=9
output=as.data.frame(matrix(0, ndata+npred,7))
D1=which(endbal$year==2003 & endbal$q==1)
D2=which(endbal$year==2015 & endbal$q==4)
input=endbal[D1:D2,]
output[1:ndata, 1]=input$cre_bal
output[1:ndata, 2]=out$fitted.values
dummy1=ndata-1
output[2:ndata, 3]= exp(log(output[1:dummy1, 1]) + output[2:ndata, 2])
# plot(output[2:ndata, 3])
# lines(output[2:ndata, 1], col='red')
#PREDICT
aaaa=which(base$year==2016 & base$q==1)
bbbb=which(base$year==2018 & base$q==1)
indx=cbind(base$hpi_yg_lag4-mu1,base$ndpi_grw_yoy_lag4-mu2,base$spr10_yd-mu3, base$tr3m_yd_lag3-mu4)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2", "x3", "x4")
fitted.base=as.data.frame(predict(out, xreg_base))
indx_1sd=cbind(base$hpi_yg_lag4-mu1,base$ndpi_grw_yoy_lag4-mu2,base$spr10_yd-mu3+sd3, base$tr3m_yd_lag3-mu4)
xreg_1sd=as.data.frame(indx_1sd[aaaa:bbbb,])
names(xreg_1sd)=c("x1", "x2", "x3", "x4")
fitted.1sd=as.data.frame(predict(out, xreg_1sd))
indx_2sd=cbind(base$hpi_yg_lag4-mu1,base$ndpi_grw_yoy_lag4-mu2,base$spr10_yd-mu3+2*sd3, base$tr3m_yd_lag3-mu4)
xreg_2sd=as.data.frame(indx_2sd[aaaa:bbbb,])
names(xreg_2sd)=c("x1", "x2", "x3", "x4")
fitted.2sd=as.data.frame(predict(out, xreg_2sd))
#output[1:ndata, 1]=ppnr$Int_bearing[D1:D2]
output[53:61, 2]=fitted.base
output[53, 3]= exp(log(output[ndata, 1]) + output[53, 2])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 3]= exp(log(output[ac, 3]) + output[ab, 2])
}
output[1:ndata,c(4,5)]=output[1:ndata, c(2,3)]
output[1:ndata,c(6,7)]=output[1:ndata, c(2,3)]
output[53:61, 4]=fitted.1sd
output[53,5]= exp(log(output[ndata, 1]) + output[53, 4])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 5]= exp(log(output[ac, 5]) + output[ab, 4])
}
output[53:61, 6]=fitted.2sd
output[53,7]= exp(log(output[ndata, 1]) + output[53, 6])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 7]= exp(log(output[ac, 7]) + output[ab, 6])
}
output[1, c(3,5,7)]=output[1,1]
output[which(output[,1]==0),1]=NA
#plot together
date1 = seq(ISOdate(2003,1,1), by = "quarter", length.out = 61)
plot(date1, output[,3], type='l', lty='dotted', ylab='CRE EB')
lines(date1, output[,5], col='blue')
lines(date1,output[,7], col='red')
lines(date1, output[,1], col='green')
legend("topleft", legend= c("base", "adverse", "severe", "Historical"), fill=c("black", "blue", "red", "green"))
colnames(output)=c("Historical", "estimated_base", "estimated_base_bal", "estimated_adverse", "adverse_bal", "estimated_severe", "severe_bal")
#output the Cre results
#clean
output2=output[c(53:61), c(3,5,7)]
spr10_df <- as.data.frame(cbind(date1[53:61],output2))
write.csv(spr10_df, "cre EB sensitivity spr10.csv", row.names = F)
colnames(spr10_df) <- c("Date","Baseline","1_std","2_std")
# Plot
spr10_df_gg <- melt(spr10_df, id = "Date")
spr10_df_gg_p <- ggplot(data = spr10_df_gg, mapping = aes(x = Date, y = value, group = variable, color = variable)) + geom_line() + theme(legend.position = 'bottom') + theme(legend.title=element_blank()) + xlab("Date") + ylab("CRE Ending Balance ($)") + ggtitle("10 Yr. Treasury Credit Spr.") + theme(plot.title = element_text(hjust = 0.5)) + theme(text = element_text(size=15)) + theme(legend.position="bottom")
spr10_df_gg_p
ggsave("10yr_spread_sensitive.png", width = 7, height = 7)
# tr3m
ndata=nrow(b1)
npred=9
output=as.data.frame(matrix(0, ndata+npred,7))
D1=which(endbal$year==2003 & endbal$q==1)
D2=which(endbal$year==2015 & endbal$q==4)
input=endbal[D1:D2,]
output[1:ndata, 1]=input$cre_bal
output[1:ndata, 2]=out$fitted.values
dummy1=ndata-1
output[2:ndata, 3]= exp(log(output[1:dummy1, 1]) + output[2:ndata, 2])
# plot(output[2:ndata, 3])
# lines(output[2:ndata, 1], col='red')
#PREDICT
aaaa=which(base$year==2016 & base$q==1)
bbbb=which(base$year==2018 & base$q==1)
indx=cbind(base$hpi_yg_lag4-mu1,base$ndpi_grw_yoy_lag4-mu2,base$spr10_yd-mu3, base$tr3m_yd_lag3-mu4)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2", "x3", "x4")
fitted.base=as.data.frame(predict(out, xreg_base))
indx_1sd=cbind(base$hpi_yg_lag4-mu1,base$ndpi_grw_yoy_lag4-mu2,base$spr10_yd-mu3, base$tr3m_yd_lag3-mu4+sd4)
xreg_1sd=as.data.frame(indx_1sd[aaaa:bbbb,])
names(xreg_1sd)=c("x1", "x2", "x3", "x4")
fitted.1sd=as.data.frame(predict(out, xreg_1sd))
indx_2sd=cbind(base$hpi_yg_lag4-mu1,base$ndpi_grw_yoy_lag4-mu2,base$spr10_yd-mu3, base$tr3m_yd_lag3-mu4+2*sd4)
xreg_2sd=as.data.frame(indx_2sd[aaaa:bbbb,])
names(xreg_2sd)=c("x1", "x2", "x3", "x4")
fitted.2sd=as.data.frame(predict(out, xreg_2sd))
#output[1:ndata, 1]=ppnr$Int_bearing[D1:D2]
output[53:61, 2]=fitted.base
output[53, 3]= exp(log(output[ndata, 1]) + output[53, 2])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 3]= exp(log(output[ac, 3]) + output[ab, 2])
}
output[1:ndata,c(4,5)]=output[1:ndata, c(2,3)]
output[1:ndata,c(6,7)]=output[1:ndata, c(2,3)]
output[53:61, 4]=fitted.1sd
output[53,5]= exp(log(output[ndata, 1]) + output[53, 4])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 5]= exp(log(output[ac, 5]) + output[ab, 4])
}
output[53:61, 6]=fitted.2sd
output[53,7]= exp(log(output[ndata, 1]) + output[53, 6])
for (i in 2:npred){
ab=52+i
ac=52+i-1
output[ab, 7]= exp(log(output[ac, 7]) + output[ab, 6])
}
output[1, c(3,5,7)]=output[1,1]
output[which(output[,1]==0),1]=NA
#plot together
date1 = seq(ISOdate(2003,1,1), by = "quarter", length.out = 61)
plot(date1, output[,3], type='l', lty='dotted', ylab='CRE EB')
lines(date1, output[,5], col='blue')
lines(date1,output[,7], col='red')
lines(date1, output[,1], col='green')
legend("topleft", legend= c("base", "adverse", "severe", "Historical"), fill=c("black", "blue", "red", "green"))
colnames(output)=c("Historical", "estimated_base", "estimated_base_bal", "estimated_adverse", "adverse_bal", "estimated_severe", "severe_bal")
#output the Cre results
#clean
output2=output[c(53:61), c(3,5,7)]
tr3m_df <- as.data.frame(cbind(date1[53:61],output2))
colnames(tr3m_df) <- c("Date","Baseline","1_std","2_std")
write.csv(tr3m_df, "cre EB sensitivity tr3m.csv", row.names = F)
# Plot
tr3m_df_gg <- melt(tr3m_df, id = "Date")
tr3m_df_gg_p <- ggplot(data = tr3m_df_gg, mapping = aes(x = Date, y = value, group = variable, color = variable)) + geom_line() + theme(legend.position = 'bottom') + theme(legend.title=element_blank()) + xlab("Date") + ylab("CRE Ending Balance ($)") + ggtitle("3-Month US Treasury Rate") + theme(plot.title = element_text(hjust = 0.5)) + theme(text = element_text(size=15)) + theme(legend.position="bottom")
tr3m_df_gg_p
ggsave("3m_treasury_sensitive.png", width = 7, height = 7)
########################
#95% Confidence Interval
########################
#Generate the 95% confidence interval for the base case. Default in R is 95%.
Date_ci = as.Date(as.yearqtr(paste(base$year[aaaa:bbbb],base$q[aaaa:bbbb],sep="-")))
indx=cbind(base$hpi_yg_lag4-mu1,base$ndpi_grw_yoy_lag4-mu2,base$spr10_yd-mu3, base$tr3m_yd_lag3-mu4)
xreg_base=as.data.frame(indx[aaaa:bbbb,])
names(xreg_base)=c("x1", "x2", "x3", "x4")
fitted.base=as.data.frame(predict(out, xreg_base, interval = "confidence"))
base_ci_df <- as.data.frame(cbind(Date_ci,fitted.base*100))
colnames(base_ci_df) <- c("Date", "Fcst","Lower","Upper")
write.csv(base_ci_df, "CRE Confidence Interval Data.csv", row.names = F)
base_ci_df <- melt(base_ci_df, id="Date")
gg_in_df <- data.frame(Date = as.Date(b1$Date,format = "%m/%d/%Y"),Actual = b1$cre_qg*100, Fitted = output$estimated_base[1:length(b1$cre_qg)]*100)
gg_in_df <- melt(gg_in_df, id="Date")
gg_fcst_df_ci <- rbind(gg_in_df,base_ci_df)
# Plot the historical actual and fitted with base 95% forecast
cre_EB_fcst_plot_ci <- ggplot(gg_fcst_df_ci, aes(x = Date, y = value, color = variable, group = variable)) +
geom_line() +
xlab("Date") + ylab("Loss Severity (%)") + ggtitle("CRE Ending Balance Growth Rate Forecast and 95% CI") +
theme(plot.title = element_text(hjust = 0.5)) +
theme(text = element_text(size=15)) + theme(legend.title=element_blank())
cre_EB_fcst_plot_ci
ggsave("CRE_End_BAL_Base_95_Fcst-Plot.png", width = 7, height = 7)
###################################################################
# Boot strap analysis
#demean it
x1_b <- c$hpi_yg_lag4- mean(c$hpi_yg_lag4)
x2_b <- c$ndpi_grw_yoy_lag4- mean(c$ndpi_grw_yoy_lag4)
x3_b <- c$spr10_yd- mean(c$spr10_yd)
x4_b <- c$tr3m_yd_lag3- mean(c$tr3m_yd_lag3)
# Make up the data set
df_total_dev = data.frame(cre_qg = b$cre_qg, hpi_yg_lag4 = x1_b, ndpi_grw_yoy_lag4 = x2_b, spr10_yd = x3_b, tr3m_yd_lag3 = x4_b)
# Boot strap the CRE regression coefficients
model <- "cre_qg ~ hpi_yg_lag4 + ndpi_grw_yoy_lag4 + spr10_yd + tr3m_yd_lag3"
summary(lm(model, data = df_total_dev))
# Bootstrap 95% CI for regression coefficients
library(boot)
# function to obtain regression weights
bs = function(data, indices, formula) {
d = data[indices,] # allows boot to select sample
fit = lm(formula, data=d)
return(coef(fit))
}
# bootstrapping function
results = boot(
data=df_total_dev,
statistic=bs,
R=5000,
formula=model)
Names = names(results$t0)
SEs = sapply(data.frame(results$t), sd)
Coefs = as.numeric(results$t0)
zVals = Coefs / SEs
Pvals = 2*pnorm(-abs(zVals))
Formatted_Results = cbind(Names, Coefs, SEs, zVals, Pvals)
# Plot coefficient density
par(mfrow=c(1,1))
for (i in 1:length(names(results$t0))){
plot(density(results$t[,i]), main = paste(names(results$t0)[i],"Density",sep=" - "))
}
# Back-testing Analysis
bt_endbal_df <- endbal
bt_endbal_df$Date <- as.Date(bt_endbal_df$date, "%m/%d/%Y")
# Macro Variables
bt_macro_df <- as.data.frame(c)
bt_macro_df$Date <- as.Date(bt_macro_df$Date, "%m/%d/%Y")
bt_macro_df$year <- year(bt_macro_df$Date)
bt_macro_df$q <- quarter(bt_macro_df$Date)
bt_macro_df$hpi_yg_lag4_dm <- bt_macro_df$hpi_yg_lag4 - mean(bt_macro_df$hpi_yg_lag4)
bt_macro_df$ndpi_grw_yoy_lag4_dm <- bt_macro_df$ndpi_grw_yoy_lag4 - mean(bt_macro_df$ndpi_grw_yoy_lag4)
bt_macro_df$spr10_yd_dm <- bt_macro_df$spr10_yd - mean(bt_macro_df$spr10_yd)
bt_macro_df$tr3m_yd_lag3_dm <- bt_macro_df$tr3m_yd_lag3 - mean(bt_macro_df$tr3m_yd_lag3)
# Merge the end bal and macro data
bt_df <- merge(x = bt_endbal_df, y = bt_macro_df, by.x = "Date", by.y = "Date")
# Partition the data
in1 <- which(bt_df$year.x==2003 & bt_df$q.y==1)
in2 <- which(bt_df$year.x==2014 & bt_df$q.y==4)
out1 <- which(bt_df$year.x==2015 & bt_df$q.y==1)
out2 <- which(bt_df$year.x==2015 & bt_df$q.y==4)
insample <- bt_df[in1:in2,]
outsample <- bt_df[out1:out2,]
# Estimate the model on the insample portion
out_bt <- lm(cre_qg ~ hpi_yg_lag4_dm + ndpi_grw_yoy_lag4_dm + spr10_yd_dm + tr3m_yd_lag3_dm, data = insample)
summary(out_bt)
# Add the fitted values to the insample data
insample$fitted <- out_bt$fitted.values
# Forecast added to the out of sample data
outsample$fitted <- predict(out_bt, outsample)
# Append the insample and out of sample data and select the columns
bt_df_final <- rbind(insample, outsample)
bt_df_final <- bt_df_final[,c("Date","cre_bal","fitted")]
fitted_bal <- exp(log(bt_df_final[1:(nrow(bt_df_final)-1), 2]) + bt_df_final[2:nrow(bt_df_final), 3])
bt_df_final$fitted_bal <- append(NA, fitted_bal)
bt_df_final$label <- append(rep("Fitted",nrow(insample)),rep("Forecast",nrow(outsample)))
# Plot
bt_df_final_p <- melt(bt_df_final[,c("Date","cre_bal","fitted_bal")], id = "Date")
bt_df_final_plot <- ggplot(data = bt_df_final_p, mapping = aes(x = Date, y = value, group = variable, color = variable)) + geom_line() + theme(legend.position = 'bottom') + theme(legend.title=element_blank()) + xlab("Date") + ylab("CRE Ending Balance ($)") + ggtitle("Out-of-Sample Forecast") + theme(plot.title = element_text(hjust = 0.5)) + theme(text = element_text(size=15)) + theme(legend.position="bottom")
bt_df_final_plot
# Table
fcst_table <- bt_df_final[out1:out2,c("Date","cre_bal","fitted_bal")]
fcst_table$p_error <- round(100*(fcst_table$fitted_bal-fcst_table$cre_bal)/fcst_table$cre_bal,2)
row.names(fcst_table) <- NULL
fcst_table
<file_sep>/example.R
setwd("C:/Users/ic07949/Desktop/dataset")
requirements <- c("dplyr", "reshape2", "data.table","zoo")
for(rr in requirements){
if(! rr %in% installed.packages()) install.packages(rr)
}
require(dplyr)
require(reshape2)
require(data.table)
require(zoo)
## Import BBCN Data (SAS File 01, Line 1 to 53)
BBCN_df <- read.csv("data request bottom-up.csv")
names(BBCN_df)[1] <- paste("fileDate")
BBCN_df$fileDate <- as.Date(BBCN_df$fileDate, "%Y-%m-%d")
BBCN_df$originationDate <- as.Date(BBCN_df$originationDate, "%Y-%m-%d")
BBCN_df$maturityDate <- as.Date(BBCN_df$maturityDate,"%Y-%m-%d")
BBCN_df$non_acc_date <- as.Date(BBCN_df$nonAccrualDate, "%Y-%m-%d")
BBCN_df <- filter(BBCN_df, ((fileDate >= "2012-06-30") & (fileDate <= "2016-03-31") ))
rates <- fread("rates2.csv")
acquired_bbcn_raw <- fread("acquired loan identifier bbcn.csv")
acquired_loans <- unique(acquired_bbcn_raw$Note_Number)
## Create label in BBCN for acquired loans (SAS File 01, Line 54 to 104)
BBCN_df$acquired_identifier <- ifelse(BBCN_df$accountNo %in% acquired_loans,
paste("acquired_bbcn"),
paste("bbcn_originated"))
table(BBCN_df$acquired_identifier)
#write.csv(BBCN_df, file = "BBCN_df_test.csv", row.names = FALSE)
## Create y for default event (SAS File 01, Line 118 to 125)
BBCN_df$y <- ifelse(BBCN_df$amtChargedOff > 0 | (BBCN_df$amtChargedOff == 0 &
BBCN_df$nonAccrualFlag != 0),
1, 0)
table(BBCN_df$y)
####################################################################################################
## Create time to maturity and POB (SAS File 01, Line 127 to 181)
BBCN_df$loan_age_q <- (as.yearqtr(BBCN_df$fileDate) - as.yearqtr(BBCN_df$originationDate)
) * 4
BBCN_df$term_q <- (as.yearqtr(BBCN_df$maturityDate) - as.yearqtr(BBCN_df$originationDate)
) * 4
BBCN_df$POB <- 100 * BBCN_df$loan_age_q / BBCN_df$term_q
BBCN_df$POB <- ifelse(BBCN_df$term_q ==0,100,BBCN_df$POB)
## find the date for the first default event (SAS File 01, Line 224 to 240)
indx_bbcn <- subset(BBCN_df, y==1, select = c("accountNo","non_acc_date"))
indx_bbcn <- as.data.table(indx_bbcn[order(indx_bbcn$accountNo, indx_bbcn$non_acc_date),])
indx_bbcn <- indx_bbcn %>% group_by(accountNo)%>% filter(row_number(non_acc_date) == 1)
names(indx_bbcn)[names(indx_bbcn)=="non_acc_date"] <- "min_non_acc_date"
#### 20184 obs in indx_bbcn
# BBCN_df <- BBCN_df[-grep("non_acc_date", colnames(BBCN_df))]
BBCN_df <- merge(x = BBCN_df, y = indx_bbcn, by = "accountNo", all.x = TRUE)
## Clean up data
BBCN_df$yr_maturity <- year(BBCN_df$maturityDate)
BBCN_df$yr_file <- year(BBCN_df$fileDate)
BBCN_df$mn_maturity <- month(BBCN_df$maturityDate)
BBCN_df$mn_file <- month(BBCN_df$fileDate)
BBCN_df$q_file <- quarter(BBCN_df$fileDate)
BBCN_df$yr_min_non_acc_date <- year(BBCN_df$min_non_acc_date)
BBCN_df$mn_min_non_acc_date <- month(BBCN_df$min_non_acc_date)
BBCN_df$ttm_m= 12*(BBCN_df$yr_maturity - BBCN_df$yr_file ) + (
BBCN_df$mn_maturity - BBCN_df$mn_file)
BBCN_df <- filter(BBCN_df, maturityDate > 2006)
# BBCN_df <- filter(BBCN_df, yr_maturity >= yr_file )
# BBCN_df <- filter(BBCN_df, !(yr_maturity == yr_file & (mn_file - mn_maturity)>2) )
#### 414335 obs.
## Create CRE/C&I portfolio ID (SAS File 01, Line 254 to 331)
BBCN_df$portfolio_id <- "NULL"
## trim leading or trailing blanks for vairable callReportCodeDescr
## (because R Reads blanks)
trim <- function (x) gsub("^\\s+|\\s+$", "", x)
BBCN_df$callReportCodeDescr <- trim(BBCN_df$callReportCodeDescr)
## (SAS File 01, Line 258 to 274)
BBCN_df$portfolio_id <- ifelse(BBCN_df$callReportCodeDescr %in% c("COMMERCIAL (GENERAL PLEDGE)") &
BBCN_df$loanTypeDescr %in% c("Commercial Line (18)", "Commercial Line (18)","Commercial Line (18)",
"Commercial Line (18)", "Commercial Term Loan (20)",
"Commercial Term Loan (20)","Commercial Term Loan (20)",
"Commercial Term Loan (20)","Comml LOC - Other Gov Gty (19)",
"Comml LOC - Other Gov Gty (19)","Discounted Acceptance (33)",
"Export Working Capital Program (38)","Performance Bond L/C (44)",
"Purchase Advance (31)","Purchase Advance (31)",
"Purchase Advance (31)","SBA 172 Loan (66)",
"SBA ARC Loans (62)","SBA ARC Loans (62)",
"SBA ARC Loans (62)","SBA Express LOC (64)",
"SBA Express LOC (64)", "SBA SOHO Loan (65)",
"SBA Term Loans (61)","SBA Term Loans (61)",
"SBA Term Loans (61)","Standby L/C (43)",
"Standby L/C (43)","Trust Receipt (30)",
"Working Capital Advance (37)","Working Capital Advance (37)",
"Working Capital Advance (37)"),
"CI",
BBCN_df$portfolio_id)
#### !!! SAS only reads up to 32 characters. But R reads them all. So It's "Export Working Capital Program (38)",
#### not "Export Working Capital Program (3" !!!!
## (SAS File 01, Line 276 to 279)
BBCN_df$portfolio_id <- ifelse(BBCN_df$portfolio_id == c("NULL") &
BBCN_df$callReportCodeDescr %in% c("COMMERCIAL (GENERAL PLEDGE)") &
BBCN_df$loanTypeDescr %in% c("Commercial Real Estate (71)", "SBA Real Estate (60)",
"SBA Real Estate (60)", "SBA Real Estate (60)"),
"CRE",
BBCN_df$portfolio_id)
## (SAS File 01, Line 281 to 283)
BBCN_df$portfolio_id <- ifelse(BBCN_df$portfolio_id == c("NULL") &
BBCN_df$callReportCodeDescr %in% c("CONVENTIONAL 5+ RESIDENTIAL",
"Conv 5+ Residential Prop",
"NON-FARM NON -RESIDENTIAL",
"Other nonfarm nonresi property",
"Owner-occupied nonfarm nonresi",
"SECURED BY FARMEDLAND") ,
"CRE",
BBCN_df$portfolio_id)
## (SAS File 01, Line 285 to 286)
BBCN_df$portfolio_id <- ifelse(BBCN_df$portfolio_id == c("NULL") &
BBCN_df$callReportCodeDescr %in% c("Check Credit & Rev Credit Plan",
"Com'l Loan - International Dpt",
"Com'l Loans - Borrowing Based"),
"CI",
BBCN_df$portfolio_id)
## (SAS File 01, Line 288 to 322)
BBCN_df$portfolio_id <- ifelse(BBCN_df$portfolio_id == c("NULL") &
BBCN_df$callReportCodeDescr %in% c("Commercial Loans") &
BBCN_df$loanTypeDescr %in% c("Bankers Health Group","Commercial Lease (25)",
"Commercial Line (18)",
"Commercial Term Loan (20)",
"Comml Asset-Based LOC (22)",
"Comml LOC - Other Gov Gty (19)",
"Comml Term - Other Gov Gty (21)",
"Discounted Acceptance (33)",
"Export Working Capital Program (38)",
"Express Line (26)",
"Master Comm LOC (01)",
"Master Comm LOC Sublimit (03)",
"Master ILOC (02)",
"Master ILOC Sublimit (04)",
"ODP LOC - Business",
"Performance Bond L/C (44)",
"Professional Line of Credit (51)",
"Professional Term Loan (50)",
"Purchase Advance (31)",
"Purchase Advance-Comm (27)",
"SBA 172 Loan (66)",
"SBA ARC Loans (62)",
"SBA Express LOC (64)",
"SBA Express Loan (63)",
"SBA SOHO Loan (65)",
"SBA Small Loan Advantage",
"SBA Term Loans (61)",
"Signature Line (11)",
"Simple Line of Credit (24)",
"Simple Loan - Commercial (23)",
"Standby L/C (43)",
"Syndicated Leveraged Lending",
"Trust Receipt (30)",
"Working Capital Advance (37)",
"Working Capital Advance-Comm (28)"),
"CI",
BBCN_df$portfolio_id)
#### Same problem. SAS only reads up to 32 characters. But R reads them all. So It's "Export Working Capital Program (38)",
#### not "Export Working Capital Program (3" !!!!
## (SAS File 01, Line 324 to 326)
BBCN_df$portfolio_id <- ifelse(BBCN_df$portfolio_id == c("NULL") &
BBCN_df$callReportCodeDescr %in% c("Commercial Loans") &
BBCN_df$loanTypeDescr %in% c("Comm RE - Revolving LOC (74)","Commercial Real Estate (71)",
"SBA Real Estate (60)"),
"CRE",
BBCN_df$portfolio_id)
## (SAS File 01,Line 328 to 330)
BBCN_df$portfolio_id <- ifelse(BBCN_df$portfolio_id == c("NULL") &
BBCN_df$callReportCodeDescr %in% c("INTERNATIONAL",
"Other Installment loans") ,
"CI",
BBCN_df$portfolio_id)
BBCN_df$portfolio_id <- ifelse(BBCN_df$portfolio_id == c("NULL"),
"error",
BBCN_df$portfolio_id)
table(BBCN_df$portfolio_id)
####################################################################################################################################
####################################################################################################################################
BBCN_df2 <- BBCN_df
BBCN_df2$PurposeCode <- "NULL"
BBCN_df2$PurposeCode <- ifelse(BBCN_df2$PurposeCode == c("NULL") &
BBCN_df2$callReportCodeDescr %in% c("Conv 5+ Residential Prop"), 180, BBCN_df2$PurposeCode)
BBCN_df2$PurposeCode <- ifelse(BBCN_df2$PurposeCode == c("NULL") &
BBCN_df2$callReportCodeDescr %in% c("NON-FARM NON -RESIDENTIAL", "Other nonfarm nonresi property"), 190, BBCN_df2$PurposeCode)
BBCN_df2$PurposeCode <- ifelse(BBCN_df2$PurposeCode == c("NULL") &
BBCN_df2$callReportCodeDescr %in% c("Owner-occupied nonfarm nonresi"), 200, BBCN_df2$PurposeCode)
BBCN_df2$PurposeCode <- ifelse(BBCN_df2$PurposeCode == c("NULL") &
BBCN_df2$callReportCodeDescr %in% c("Commercial Loans"), 510, BBCN_df2$PurposeCode)
BBCN_df2$PurposeCode <- ifelse(BBCN_df2$PurposeCode == c("NULL") &
BBCN_df2$callReportCodeDescr %in% c("Com'l Loan - International Dpt"), 511, BBCN_df2$PurposeCode)
BBCN_df2$PurposeCode <- ifelse(BBCN_df2$PurposeCode == c("NULL") &
BBCN_df2$callReportCodeDescr %in% c("Leasing"), 650, BBCN_df2$PurposeCode)
BBCN_df2$PurposeCode <- ifelse(BBCN_df2$PurposeCode == c("NULL"), "error", BBCN_df2$PurposeCode)
table(BBCN_df2$PurposeCode)
###########################################################################################################################
BBCN_df2$portfolio_id2 <- "NULL"
BBCN_df2$portfolio_id2 <- ifelse(BBCN_df2$portfolio_id2== c("NULL") &
BBCN_df2$PurposeCode %in% c("180","190","200"),
"CRE", BBCN_df2$portfolio_id2)
BBCN_df2$portfolio_id2 <- ifelse(BBCN_df2$portfolio_id2== c("NULL") &
BBCN_df2$PurposeCode %in% c("510","511","650"),
"CI", BBCN_df2$portfolio_id2)
table(BBCN_df2$portfolio_id2 )
| e1a8de701c2d8748e073e4accaf3dfff42475b72 | [
"R",
"RMarkdown"
] | 48 | R | hyunyouchoi/R-Studio | cc47e61e50e63cea145b4a38eb55a8e8b429ee9b | 3ad5f480c4da7de84593d28753bfad986cb7f0e8 |
refs/heads/master | <file_sep>import { Component, OnInit } from '@angular/core';
import { Diary } from '../diary'
@Component({
selector: 'app-diary',
templateUrl: './diary.component.html',
styleUrls: ['./diary.component.css']
})
export class DiaryComponent implements OnInit {
diaries = [
new Diary(1, 'The day I went fishing', 'I caught a huge Fish and we had it for Dinner that day ',new Date(2018,3.5)),
new Diary(2, 'The day I went fishing', 'I caught a huge Fish and we had it for Dinner that day ',new Date(2018,4,6)),
new Diary(3, 'The day I went fishing', 'I caught a huge Fish and we had it for Dinner that day ',new Date(2018,5,3)),
]
toogleDetails(index) {
this.diaries[index].showDescription = !this.diaries[index].showDescription;
}
completeDiary(isComplete,index){
if (isComplete){
this.diaries.splice(index, 1)
}
}
addNewDiary(diary) {
let diaryLength = this.diaries.length;
diary.id=diaryLength+1;
diary.completeDate = new Date(diary.completeDate)
this.diaries.push(diary)
}
constructor() { }
ngOnInit() {
}
}
| be15d6c47de27fdc63fb6ffd35f2bc2cf2000de3 | [
"TypeScript"
] | 1 | TypeScript | Wess58/DiaryApp | 0b84c775cbfcddec43904707f8533acae3532c34 | c7004bdff2f6c1180355569030b9818d4234fa60 |
refs/heads/master | <file_sep>#include "RBSearchTree.h"
using namespace std;
RBSearchTree::RBSearchTree()
{
root = NULL;
}
RBSearchTree::~RBSearchTree()
{
destroy_tree(root);
}
void RBSearchTree::destroy_tree(RBsearchTreeNode *leaf)
{
if (leaf != NULL)
{
destroy_tree(leaf->left);
destroy_tree(leaf->right);
delete leaf;
}
}
void RBSearchTree::insert(dictEntry *in)
// insert a dictEntry into the binary search tree
{
RBsearchTreeNode *z;
if (root == NULL)
{
root = new RBsearchTreeNode(in);
z = root;
}
else
z = insert_h(in, root);
if (z != NULL)
insert_fixup(z);
}
RBsearchTreeNode* RBSearchTree::insert_h(dictEntry *in, RBsearchTreeNode *current)
// inserts a dictEntry into the binary search tree if
// there's already a root entry
{
if ((*(in) < current->data->getWord()) and (current->left != NULL))
return insert_h(in, current->left);
else if (*(in) < current->data->getWord())
{
current->left = new RBsearchTreeNode(in, current);
return (current->left);
}
else if ((*(in) > current->data->getWord()) and (current->right != NULL))
return insert_h(in, current->right);
else if (*(in) > current->data->getWord())
{
current->right = new RBsearchTreeNode(in, current);
return current->right;
}
else
return NULL;
}
void RBSearchTree::insert_fixup(RBsearchTreeNode *node)
// fixes the color of the node inserted
// node = node currently focused on fixing
// p = parent of node, g = parent of parent, u = uncle of node
{
if (node->parent == NULL)
node->color = BLACK;
else if (node->parent->color == BLACK)
return;
else
{
RBsearchTreeNode *p = node->parent;
RBsearchTreeNode *g = p->parent;
RBsearchTreeNode *u = Uncle(node);
if (Color(u) == BLACK)
{
if ((node == p->right) and (p == g->left))
{
rotate_left(p);
swap(p, node);
}
if ((node == p->left) and (p == g->right))
{
rotate_right(p);
swap(p, node);
}
p->color = BLACK;
g->color = RED;
if (node == p->left)
rotate_right(g);
else
rotate_left(g);
}
else
{
p->color = BLACK;
u->color = BLACK;
g->color = RED;
insert_fixup(g);
}
}
}
RBsearchTreeNode* RBSearchTree::Uncle(RBsearchTreeNode *node)
// finds the uncle node of the node that's currently
// trying to be fixed
{
if (node->parent != NULL)
{
RBsearchTreeNode *p = node->parent;
if (p->parent != NULL)
{
RBsearchTreeNode *g = p->parent;
if (p == g->left)
return (g->right);
else
return (g->left);
}
}
return NULL;
}
RBcolor RBSearchTree::Color(RBsearchTreeNode *node)
// finds the color of the uncle node to determine
// which rotate to do
{
if (node == NULL)
return BLACK;
else
return (node->color);
}
void RBSearchTree::rotate_left(RBsearchTreeNode *node)
// rotates the current node left
{
RBsearchTreeNode *y = node->right;
node->right = y->left;
if (y->left != NULL)
y->left->parent = node;
y->parent = node->parent;
if (node->parent == NULL)
root = y;
else if (node == node->parent->left)
node->parent->left = y;
else
node->parent->right = y;
y->left = node;
node->parent = y;
}
void RBSearchTree::rotate_right(RBsearchTreeNode *node)
// rotates the current node right
{
RBsearchTreeNode *y = node->left;
node->left = y->right;
if (y->right != NULL)
y->right->parent = node;
y->parent = node->parent;
if (node->parent == NULL)
root = y;
else if (node == node->parent->right)
node->parent->right = y;
else
node->parent->left = y;
y->right = node;
node->parent = y;
}
RBsearchTreeNode* RBSearchTree::search(string w)
// search for string w in tree and remove
// dictEntry with word == w
{
return search_h(w, root);
}
RBsearchTreeNode* RBSearchTree::search_h(string w, RBsearchTreeNode *current)
// searches for string w in tree and removes
// dictEntry with word == w
// does the work for search()
{
if (*(current->data) == w)
{
// cout << w << " is the same as " << current->data->getWord() << endl;
return current;
}
else if (*(current->data) > w)
{
// cout << w << " is before " << current->data->getWord() << endl;
if (current->left != NULL)
return search_h(w, current->left);
else
return NULL;
}
else if (*(current->data) < w)
{
// cout << w << " is after " << current->data->getWord() << endl;
if (current->right != NULL)
return search_h(w, current->right);
else
return NULL;
}
else
return NULL;
}
void RBSearchTree::remove(string w)
// actually does the removing and restructuring of the tree
// dictEntry with word == w
{
string newWord, newDefinition;
RBsearchTreeNode* toDelete = search(w);
RBsearchTreeNode* y;
if (toDelete == NULL)
return;
if ((toDelete->left == NULL) or (toDelete->right == NULL))
y = toDelete;
else
{
y = successor(toDelete);
newWord = y->data->getWord();
newDefinition = y->data->getDefinition();
toDelete->data->updateWord(newWord);
toDelete->data->updateDefinition(newDefinition);
// cout << toDelete->data->getWord() << endl;
// cout << toDelete->data->getDefinition() << endl;
// works to here at least
}
if (y->left != NULL)
{
newWord = getWord(y->left);
newDefinition = getDefinition(y->left);
y->data->updateWord(newWord);
y->data->updateDefinition(newDefinition);
delete(y->left);
y->left = NULL;
return;
}
if (y->right != NULL)
{
newWord = getWord(y->right);
newDefinition = getDefinition(y->right);
y->data->updateWord(newWord);
y->data->updateDefinition(newDefinition);
delete(y->right);
y->right = NULL;
return;
}
if (y->color == BLACK)
remove_fixup(y);
if (y->parent == NULL)
root = NULL;
else if (y == y->parent->left)
y->parent->left = NULL;
else
y->parent->right = NULL;
delete(y);
}
void RBSearchTree::remove_fixup(RBsearchTreeNode *node)
// fixes the tree after removing a node
{
if (node->parent == NULL)
return;
RBsearchTreeNode* s = Sibling(node);
if (s->color == RED)
{
node->parent->color = RED;
s->color = BLACK;
if (node == node->parent->left)
rotate_left(node->parent);
else
rotate_right(node->parent);
s = Sibling(node);
}
if ((node == node->parent->left) and (Color(s->right) == BLACK) and (Color(s->left) == RED))
{
s->color = RED;
s->left->color = BLACK;
rotate_right(s);
}
else if ((node == node->parent->right) and (Color(s->left) == BLACK) and (Color(s->right) == RED))
{
s->color = RED;
s->right->color = BLACK;
rotate_left(s);
}
if ((node == node->parent->left) and (Color(s->right) == RED))
{
s->color = node->parent->color;
node->parent->color = BLACK;
s->right->color = BLACK;
rotate_left(node->parent);
return;
}
else if ((node == node->parent->right) and (Color(s->left) == RED))
{
s->color = node->parent->color;
node->parent->color = BLACK;
s->right->color = BLACK;
rotate_right(node->parent);
return;
}
else
{
s->color = RED;
remove_fixup(s->parent);
}
}
RBsearchTreeNode* RBSearchTree::Sibling(RBsearchTreeNode *node)
// find the sibling node for the current node
{
if (node == NULL)
return NULL;
else if (node->parent == NULL)
return NULL;
else if (node == node->parent->left)
return (node->parent->right);
else
return (node->parent->left);
}
RBsearchTreeNode* RBSearchTree::successor(RBsearchTreeNode *toDelete)
// ensures that all child nodes are still in tree
// when removing a parent node with two children
{
RBsearchTreeNode* current;
if ((toDelete == NULL) or (toDelete->right == NULL))
return NULL;
else
{
current = toDelete->right;
while (current->left != NULL)
current = current->left;
return current;
}
}
void RBSearchTree::preOrder(string filename)
// calls protected helper function to visit nodes
// protected b/c don't want people messing w/ order
// or else new tree will be messed up
{
fout.open(filename.c_str(), ofstream::out);
preorder_h(root);
fout.close();
}
void RBSearchTree::preorder_h(RBsearchTreeNode *n)
// does the work for preorder
// visits nodes in increasing order == sorts
// recursive
{
if (n != NULL)
{
preorder_h(n->left);
visit(n);
preorder_h(n->right);
}
}
void RBSearchTree::postOrder(string filename)
// calls protected helper function to visit nodes
{
fout.open(filename.c_str(), ofstream::out);
postorder_h(root);
fout.close();
}
void RBSearchTree::postorder_h(RBsearchTreeNode *n)
// does the work for postorder
// visits nodes in decreasing order == reverse sort/sort backwards
// recursive
{
if (n != NULL)
{
postorder_h(n->right);
visit(n);
postorder_h(n->left);
}
}
void RBSearchTree::inOrder(string filename)
// calls protected helper function to visit nodes
{
fout.open(filename.c_str(), ofstream::out);
inorder_h(root);
fout.close();
}
void RBSearchTree::inorder_h(RBsearchTreeNode *n)
// does the work for inorder
// visits nodes level by level left-right
// starting at root
// good for writing to file
{
RBsearchTreeNode *current;
queue<RBsearchTreeNode*> Q;
Q.push(n);
while (!Q.empty())
{
current = Q.front();
Q.pop();
visit(current);
if (current->left != NULL)
Q.push(current->left);
if (current->right != NULL)
Q.push(current->right);
}
}
void RBSearchTree::visit(RBsearchTreeNode *n)
// does the work for when each non-NULL node is visited
// gets the word and definition and writes it to the file
// file fout is part of class and declared in function calling
// visit
// in save file: 0 = RED 1 = BLACK
{
string word, definition;
word = getWord(n);
definition = getDefinition(n);
fout << word << ": " << definition << "\n\n";
}
string RBSearchTree::getWord(RBsearchTreeNode *n)
// does the work for getting the word from visit
// function getDefinition is a part of dictEntry class
// pointer to data allows call of protected function
{
return n->data->getWord();
}
string RBSearchTree::getDefinition(RBsearchTreeNode *n)
// does the work for getting the definition from visit
// function getDefinition is a part of dictEntry class
// pointer to data allows call of protected function
{
return n->data->getDefinition();
}
<file_sep>/*
NOTE: Lines 74 and 75 are commented out. Uncomment one at a time. I don't think
I got the functions to work correctly, but I got them to go through each
vertex, so I threw in some cout statements to show where it's reaching.
Uncommenting both at once will produce too much output for a single run.
It's sort of just to show the work I've done.
*/
#include <iostream>
#include <fstream>
#include <sstream>
#include <string.h>
#include <queue>
#include <deque>
using namespace std;
class mygraph {
public:
int nbVertices;
int** adjMatrix;
int** mark;
mygraph();
mygraph(int nbLines);
~mygraph();
void DFS(mygraph& G);
void BFS(mygraph& G);
private:
int count;
void dfs(int vertex);
void bfs(int vertex);
};
int main()
{
string response, line;
// a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t,
int a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t, row, column, nbLines;
cout << "Enter a file name: ";
cin >> response;
char *c_response = new char[response.length()+1];
strcpy(c_response, response.c_str());
ifstream myFile(c_response);
getline(myFile, line);
istringstream iss(line);
iss >> nbLines;
mygraph G(nbLines);
row = 0; // which row it is
while (getline(myFile, line))
{
column = 0; // which column it is
istringstream iss(line);
iss >> G.adjMatrix[row][column] >> G.adjMatrix[row][column+1] >> G.adjMatrix[row][column+2] >> G.adjMatrix[row][column+3] >> G.adjMatrix[row][column+4] >> G.adjMatrix[row][column+5] >> G.adjMatrix[row][column+6] >> G.adjMatrix[row][column+7] >> G.adjMatrix[row][column+8] >> G.adjMatrix[row][column+9] >> G.adjMatrix[row][column+10] >> G.adjMatrix[row][column+11] >> G.adjMatrix[row][column+12] >> G.adjMatrix[row][column+13] >> G.adjMatrix[row][column+14] >> G.adjMatrix[row][column+15] >> G.adjMatrix[row][column+16] >> G.adjMatrix[row][column+17] >> G.adjMatrix[row][column+18] >> G.adjMatrix[row][column+19];
// cout << G.adjMatrix[row][column] << G.adjMatrix[row][column+1] << G.adjMatrix[row][column+2] << G.adjMatrix[row][column+3] << G.adjMatrix[row][column+4] << G.adjMatrix[row][column+5] << G.adjMatrix[row][column+6] << G.adjMatrix[row][column+7] << G.adjMatrix[row][column+8] << G.adjMatrix[row][column+9] << G.adjMatrix[row][column+10] << G.adjMatrix[row][column+11] << G.adjMatrix[row][column+12] << G.adjMatrix[row][column+13] << G.adjMatrix[row][column+14] << G.adjMatrix[row][column+15] << G.adjMatrix[row][column+16] << G.adjMatrix[row][column+17] << G.adjMatrix[row][column+18] << G.adjMatrix[row][column+19] << endl;
row++;
}
// G.DFS(G);
// G.BFS(G);
}
mygraph::mygraph()
{
nbVertices = 0;
adjMatrix = NULL;
}
mygraph::mygraph(int nbLines)
{
nbVertices = nbLines;
adjMatrix = new int* [nbVertices];
mark = new int* [nbVertices];
for (int i = 0; i < nbVertices; i++)
{
adjMatrix[i] = new int [nbVertices];
mark[i] = new int [nbVertices];
}
}
mygraph::~mygraph()
{
for (int i = 0; i < nbVertices; i++)
{
delete[] adjMatrix[i];
delete[] mark[i];
}
delete[] adjMatrix;
delete[] mark;
}
void mygraph::DFS(mygraph& G)
{
count = 0;
for (int i = 0; i < nbVertices; i++)
{
for (int j = i+1; j < nbVertices; j++)
{
mark[i][j] = 0;
}
}
for (int vertex = 0; vertex < nbVertices; vertex++)
{
if (mark[vertex][vertex] == 0)
{
dfs(vertex);
}
}
cout << count << endl;
cout << "It pops them in FIFO order\n";
}
void mygraph::dfs(int vertex)
{
count += 1;
*mark[vertex] = count;
for (int w = 0; w < nbVertices; w++)
{
cout << "X Coordinate (" << w << ")" << endl;
if (mark[w] == 0)
{
dfs(w);
}
}
cout << "\n\n\n";
}
void mygraph::BFS(mygraph& G)
{
count = 0;
for (int vertex = 0; vertex < nbVertices; vertex++)
{
if (mark[vertex][vertex] == 0)
{
bfs(vertex);
}
}
cout << "It goes through all ys for each x before moving on to the next x\n";
}
void mygraph::bfs(int vertex)
{
count += 1;
*mark[vertex] = count;
deque<int> myQueue(vertex);
queue<int> queues(myQueue);
while (!queues.empty())
{
for (int w = 0; w < nbVertices; w++)
{
for (int j = 0; j < nbVertices; j++)
{
if (mark[w][j] == 0)
{
count += 1;
mark[w][j] = count;
queues.push(w);
cout << "Coordinate (" << w << ", " << j << ")" << endl;
}
}
}
queues.pop();
}
}<file_sep>/*
Author: <NAME>
NOTES: There was no description on what to do when key == leaf->value in
insert_help(). I took it as not duplicating and did not re-insert it
into the tree. It's weird though that it shows up in inOrderTraversal.
15. It traverses the tree and displays the output. It seems to display the
first node to the left, then its child node. Then it displays back up
to the root, then traverses down the right and displays back up to the root
again, resulting in a double cout of each node.
*/
#include <iostream>
#include <string>
using namespace std;
class treeNode
{
public:
int value;
treeNode *left;
treeNode *right;
treeNode(int x);
};
class binarySearchTree
{
private:
treeNode *root;
void destroy_tree(treeNode *leaf);
void insert_help(int key, treeNode *leaf);
void inOrder_help(treeNode *leaf);
bool search_help(int key, treeNode *leaf);
public:
void insert(int key);
void inOrderTraversal();
bool search(int key);
binarySearchTree();
~binarySearchTree();
};
int main()
{
int answer;
bool found;
binarySearchTree tree;
cout << "Enter a positive number (0 or -x quits): ";
cin >> answer;
while (answer > 0)
{
tree.insert(answer);
cout << "Enter a positive number (0 or -x quits): ";
cin >> answer;
}
cout << "Search for a positive number (0 or -x quits): ";
cin >> answer;
while (answer > 0)
{
found = tree.search(answer);
if (found)
{
cout << "Found " << answer << endl;
}
else
{
cout << answer << " not found\n";
}
cout << "Search for a positive number (0 or -x quits): ";
cin >> answer;
}
tree.inOrderTraversal();
}
treeNode::treeNode(int x)
{
// treeNode constructor
this->value = x;
this->left = NULL;
this->right = NULL;
}
binarySearchTree::binarySearchTree()
{
root = NULL;
}
binarySearchTree::~binarySearchTree()
{
destroy_tree(root);
}
void binarySearchTree::destroy_tree(treeNode *leaf)
{
if (leaf != NULL)
{
destroy_tree(leaf->left);
destroy_tree(leaf->right);
delete(leaf);
}
}
void binarySearchTree::insert(int key)
{
// test made it in
if (root == NULL)
{
root = new treeNode(key);
}
else
{
insert_help(key, root);
}
}
void binarySearchTree::insert_help(int key, treeNode *leaf)
{
if (key < leaf->value && leaf->left != NULL)
{
insert_help(key, leaf->left);
}
else if (key < leaf->value && leaf->left == NULL)
{
leaf->left = new treeNode(key);
}
else if (key > leaf->value && leaf->right != NULL)
{
insert_help(key, leaf->right);
}
else if (key > leaf->value && leaf->right == NULL)
{
leaf->right = new treeNode(key);
}
else if (key == leaf->value)
{
cout << "Value already entered. Value not duplicated (last entry not stored.)\n";
}
else
{
cout << "This should never show up. insert_help bug\n";
}
}
bool binarySearchTree::search(int key)
{
search_help(key, root);
}
bool binarySearchTree::search_help(int key, treeNode *leaf)
{
if (leaf == NULL)
{
return false;
}
else if (leaf->value == key)
{
return true;
}
else if (leaf->value > key)
{
return search_help(key, leaf->left);
}
else
{
return search_help(key, leaf->right);
}
}
void binarySearchTree::inOrderTraversal()
{
inOrder_help(root);
}
void binarySearchTree::inOrder_help(treeNode *leaf)
{
if (leaf == NULL)
{
return;
}
else if (leaf != NULL)
{
inOrder_help(leaf->left);
cout << leaf->value << endl;
inOrder_help(leaf->right);
cout << leaf->value << endl;
}
else
{
cout << "This should never show up. inOrder_help bug\n";
}
}<file_sep>#include "searchTreeNode.h"
using namespace std;
searchTreeNode::searchTreeNode(dictEntry *d)
// constructor setting data to passed in dictEntry
{
data = d;
left = NULL;
right = NULL;
parent = NULL;
}
searchTreeNode::searchTreeNode(dictEntry *d, searchTreeNode *p)
// constructor setting data and parent to values passed in
{
data = d;
parent = p;
left = NULL;
right = NULL;
}
searchTreeNode::searchTreeNode(dictEntry *d, searchTreeNode *l, searchTreeNode *r, searchTreeNode *p)
{
data = d;
left = l;
right = r;
parent = p;
}<file_sep>// <NAME>
//
// NOTE: The swapLines will work, just without the strings concatenated
// and the second line is a bit messed up. If it's couted it will
// display the correct numbers for the values, but when writing
// it's a bit messed up.
// The correct numbers are: 715727 476424 284794
#include <iostream>
#include <fstream>
#include <stdlib.h>
#include <sstream>
#include <string>
using namespace std;
void reverseFile();
void swapLines(int evens, int div3, int div5, int nbLines);
int main()
{
reverseFile();
}
void reverseFile()
{
// variables ====================
int counter = 0;
string line;
stringstream stringNumber;
ifstream myFile("barn.moo");
ofstream myOutFile;
myOutFile.open("reverse.dat");
// ==============================
int firstNum, secondNum, thirdNum;
int evens = 0;
int div3 = 0;
int div5 = 0;
string word1, word2, word3, word4, word5, word6, word7;
getline(myFile, line);
istringstream iss(line);
iss >> firstNum;
myOutFile << "There are " << firstNum << " lines in this file.\n";
// nbLines
int nbLines = firstNum;
int *intArray = new int[3*nbLines];
while (getline(myFile, line))
{
istringstream iss(line);
iss >> word1 >> firstNum >> word2 >> word3 >> secondNum >> word4 >> word5 >> word6 >> thirdNum >> word7;
intArray[counter] = firstNum;
counter++;
intArray[counter] = secondNum;
counter++;
intArray[counter] = thirdNum;
counter++;
if(firstNum%2 == 0)
{
evens++;
}
if(firstNum%3 == 0)
{
div3++;
}
if(firstNum%5 == 0)
{
div5++;
}
if(secondNum%2 == 0)
{
evens++;
}
if(secondNum%3 == 0)
{
div3++;
}
if(secondNum%5 == 0)
{
div5++;
}
if(thirdNum%2 == 0)
{
evens++;
}
if(thirdNum%3 == 0)
{
div3++;
}
if(thirdNum%5 == 0)
{
div5++;
}
}
int j = counter-1;
while (j > 0)
{
thirdNum = intArray[j];
secondNum = intArray[j-1];
firstNum = intArray[j-2];
myOutFile << firstNum << "," << secondNum << "," << thirdNum << "\n";
j -= 3;
}
swapLines(evens, div3, div5, nbLines);
}
void swapLines(int evens, int div3, int div5, int nbLines)
{
cout << "evens: " << evens << " div3: " << div3 << " div5: " << div5 << endl;
int counter = 0;
int insertHere = nbLines - 2;
int firstNum, num1, num2, num3;
string *intArray = new string[3*nbLines];
string line, word1, word2, token;
string delim = ",";
size_t position = 0;
ifstream myFile("reverse.dat");
ofstream myOutFile;
myOutFile.open("swap.dat");
getline(myFile, line);
istringstream iss(line);
iss >> word1 >> word2 >> firstNum;
nbLines = firstNum;
myOutFile << "There are also " << firstNum << " lines in this file.\n";
while(getline(myFile, line))
{
istringstream firstWord(line);
firstWord >> word1;
getline(myFile, line);
istringstream secondWord(line);
secondWord >> word2;
swap(word1, word2);
if (nbLines%2!=0 and counter == insertHere)
{
myOutFile << evens << " " << div3 << " " << div5 << endl << endl;
}
else
myOutFile << word1 << endl << word2 << endl;
counter += 2;
}
}<file_sep>#include <iostream>
#include <fstream>
#include <sstream>
#include <string.h>
#include <math.h>
using namespace std;
void InsertionSort(int* array, int size);
void BinarySearch(int* array, int size, int element);
int main()
{
string filename, line;
int a, nbLines;
int counter = 0;
cout << "Enter a file name: ";
cin >> filename;
char *char_filename = new char[filename.length()+1];
strcpy(char_filename, filename.c_str());
ifstream myFile(char_filename);
getline(myFile, line);
istringstream iss(line);
iss >> nbLines;
int *array = new int[nbLines];
while (getline(myFile, line))
{
istringstream iss(line);
iss >> a;
array[counter] = a;
counter++;
}
InsertionSort(array, nbLines);
}
void InsertionSort(int* array, int size)
{
int v, j;
for (int i = 1; i < size; i++)
{
v = array[i];
j = i-1;
while ((j >= 0) and (array[j] > v))
{
array[j+1] = array[j];
j--;
}
array[j+1] = v;
}
for (int ij = 0; ij < size; ij++)
{
cout << array[ij] << ", ";
}
cout << endl;
BinarySearch(array, size, 0);
}
void BinarySearch(int* array, int size, int element)
{
bool inArray = true;
bool found = false;
// l = left
// r = right
// m = middle
int l, r, m;
while (inArray)
{
found = false;
cout << "Search for: ";
cin >> element;
l = 0;
r = size-1;
while ((l <= r) and (found == false))
{
m = floor((l+r)/2);
if (element == array[m])
{
cout << "Found at index " << m << endl;
found = true;
}
else if (element < array[m])
{
r = m-1;
}
else
{
l = m+1;
}
}
if (found == false)
{
cout << "Not found\n";
inArray = false;
}
}
}<file_sep>#include <string>
#include <fstream>
using namespace std;
class RBSearchTree: public searchTree {
// purely virtual functions by including = 0 at end
// functions never used, used as base class for other classes
public:
RBSearchTree();
~RBSearchTree();
virtual void insert(dictEntry *in);
virtual RBsearchTreeNode* insert_h(dictEntry *in, RBsearchTreeNode *root);
virtual void insert_fixup(RBsearchTreeNode *node);
virtual RBsearchTreeNode* Uncle(RBsearchTreeNode *node);
virtual RBcolor Color(RBsearchTreeNode *node);
virtual void rotate_left(RBsearchTreeNode *node);
virtual void rotate_right(RBsearchTreeNode *node);
virtual void remove(string w);
virtual void remove_fixup(RBsearchTreeNode *node);
virtual RBsearchTreeNode* Sibling(RBsearchTreeNode *node);
virtual RBsearchTreeNode* search(string w);
virtual RBsearchTreeNode* search_h(string w, RBsearchTreeNode *root);
virtual RBsearchTreeNode* successor(RBsearchTreeNode *n);
virtual void preOrder(string filename);
virtual void postOrder(string filename);
virtual void inOrder(string filename);
protected:
RBsearchTreeNode *root;
ofstream fout;
virtual void preorder_h(RBsearchTreeNode *n);
virtual void postorder_h(RBsearchTreeNode *n);
virtual void inorder_h(RBsearchTreeNode *n);
virtual void visit(RBsearchTreeNode *n);
virtual void destroy_tree(RBsearchTreeNode *leaf);
virtual string getWord(RBsearchTreeNode *n);
virtual string getDefinition(RBsearchTreeNode *n);
};<file_sep>/*
TODO: successor in remove()
*/
#include "binarySearchTree.h"
#include <iostream>
#include <queue>
using namespace std;
binarySearchTree::binarySearchTree()
// init the tree with a NULL root
{
root = NULL;
}
binarySearchTree::~binarySearchTree()
{
destroy_tree(root);
}
void binarySearchTree::destroy_tree(searchTreeNode *leaf)
{
if (leaf != NULL)
{
// cout << leaf->data->getWord() << endl;
destroy_tree(leaf->left);
destroy_tree(leaf->right);
delete leaf;
}
}
void binarySearchTree::insert(dictEntry *in)
// insert a dictEntry into the binary search tree
{
// searchTreeNode *root = this->root;
if (root == NULL)
root = new searchTreeNode(in);
else
insert_h(in, root);
}
void binarySearchTree::insert_h(dictEntry *in, searchTreeNode *current)
// inserts a dictEntry into the binary search tree if
// there's already a root entry
{
if (*(in) == current->data->getWord())
return;
else if (*(in) < current->data->getWord())
{
if (current->left == NULL)
current->left = new searchTreeNode(in, current);
else
insert_h(in, current->left);
}
else
{
if (current->right == NULL)
current->right = new searchTreeNode(in, current);
else
insert_h(in, current->right);
}
}
searchTreeNode* binarySearchTree::search(string w)
// search for string w in tree and remove
// dictEntry with word == w
{
return search_h(w, root);
}
searchTreeNode* binarySearchTree::search_h(string w, searchTreeNode *current)
// searches for string w in tree and removes
// dictEntry with word == w
// does the work for search()
{
if (*(current->data) == w)
{
// cout << w << " is the same as " << current->data->getWord() << endl;
return current;
}
else if (*(current->data) > w)
{
// cout << w << " is before " << current->data->getWord() << endl;
if (current->left != NULL)
return search_h(w, current->left);
else
return NULL;
}
else if (*(current->data) < w)
{
// cout << w << " is after " << current->data->getWord() << endl;
if (current->right != NULL)
return search_h(w, current->right);
else
return NULL;
}
else
return NULL;
}
void binarySearchTree::remove(string w)
// actually does the removing and restructuring of the tree
// dictEntry with word == w
{
string newWord, newDefinition;
searchTreeNode* toDelete = search(w);
if (toDelete != NULL)
{
if (toDelete->parent != NULL)
{
if (toDelete->left == NULL)
{
if (toDelete == toDelete->parent->left)
{
toDelete->parent->left = toDelete->right;
delete toDelete;
}
else
{
toDelete->parent->right = toDelete->right;
delete toDelete;
}
}
else if (toDelete->right == NULL)
{
if (toDelete == toDelete->parent->left)
{
toDelete->parent->left = toDelete->left;
delete toDelete;
}
else
{
toDelete->parent->right = toDelete->left;
delete toDelete;
}
}
else
{
searchTreeNode* toDeleteSucc = successor(toDelete);
newDefinition = toDeleteSucc->data->getDefinition();
newWord = toDeleteSucc->data->getWord();
toDelete->data->updateDefinition(newDefinition);
// update word inside so toDelete check works out correctly
if (toDelete == toDelete->parent->left)
{
toDelete->data->updateWord(newWord);
if (toDeleteSucc->right != NULL)
toDeleteSucc->parent->left = toDeleteSucc->right;
delete toDeleteSucc;
}
else
{
toDelete->data->updateWord(newWord);
if (toDeleteSucc->right != NULL)
toDeleteSucc->parent->left = toDeleteSucc->right;
delete toDeleteSucc;
}
}
}
}
}
searchTreeNode* binarySearchTree::successor(searchTreeNode *toDelete)
// ensures that all child nodes are still in tree
// when removing a parent node with two children
{
searchTreeNode* current;
if ((toDelete == NULL) or (toDelete->right == NULL))
return NULL;
else
{
current = toDelete->right;
while (current->left != NULL)
current = current->left;
return current;
}
}
void binarySearchTree::preOrder(string filename)
// calls protected helper function to visit nodes
// protected b/c don't want people messing w/ order
// or else new tree will be messed up
{
fout.open(filename.c_str(), ofstream::out);
preorder_h(root);
fout.close();
}
void binarySearchTree::preorder_h(searchTreeNode *n)
// does the work for preorder
// visits nodes in increasing order == sorts
// recursive
{
if (n != NULL)
{
preorder_h(n->left);
visit(n);
preorder_h(n->right);
}
}
void binarySearchTree::postOrder(string filename)
// calls protected helper function to visit nodes
{
fout.open(filename.c_str(), ofstream::out);
postorder_h(root);
fout.close();
}
void binarySearchTree::postorder_h(searchTreeNode *n)
// does the work for postorder
// visits nodes in decreasing order == reverse sort/sort backwards
// recursive
{
if (n != NULL)
{
postorder_h(n->right);
visit(n);
postorder_h(n->left);
}
}
void binarySearchTree::inOrder(string filename)
// calls protected helper function to visit nodes
{
fout.open(filename.c_str(), ofstream::out);
inorder_h(root);
fout.close();
}
void binarySearchTree::inorder_h(searchTreeNode *n)
// does the work for inorder
// visits nodes level by level left-right
// starting at root
// good for writing to file
{
searchTreeNode *current;
queue<searchTreeNode*> Q;
Q.push(n);
while (!Q.empty())
{
current = Q.front();
Q.pop();
// cout << getWord(current) << endl;
visit(current);
if (current->left != NULL)
Q.push(current->left);
if (current->right != NULL)
Q.push(current->right);
}
}
void binarySearchTree::visit(searchTreeNode *n)
// does the work for when each non-NULL node is visited
// gets the word and definition and writes it to the file
// file fout is part of class and declared in function calling
// visit
{
string word, definition;
word = getWord(n);
definition = getDefinition(n);
fout << word << ": " << definition << "\n\n";
}
string binarySearchTree::getWord(searchTreeNode *n)
// does the work for getting the word from visit
// function getDefinition is a part of dictEntry class
// pointer to data allows call of protected function
{
return n->data->getWord();
}
string binarySearchTree::getDefinition(searchTreeNode *n)
// does the work for getting the definition from visit
// function getDefinition is a part of dictEntry class
// pointer to data allows call of protected function
{
return n->data->getDefinition();
}<file_sep>#include <string>
using namespace std;
class searchTree {
// purely virtual functions by including = 0 at end
// functions never used, used as base class for other classes
public:
virtual void insert(dictEntry *input) = 0;
virtual treeNode* search(string input) = 0;
virtual void remove(string input) = 0;
virtual void preOrder(string filename) = 0;
virtual void postOrder(string filename) = 0;
virtual void inOrder(string filename) = 0;
};<file_sep>#include "dictEntry.cpp"
#include "treeNode.h"
#include "searchTree.h"
#include "searchTreeNode.cpp"
#include "binarySearchTree.cpp"
#include "RBsearchTreeNode.cpp"
#include "RBSearchTree.cpp"
#include "dictionary.h"
#include <string>
#include <iostream>
#include <locale> // toupper
#include <fstream>
using namespace std;
dictionary::dictionary(searchTree *n)
// constructor for dictionary
// creates object dict of class searchTree
{
this->dict = n;
}
dictionary::~dictionary()
// destructor for dictionary
// calls destructor on the tree then deletes itself
{
// cout << "deleted dict\n";
}
void dictionary::search(string word)
// uses searchTree search() to find a treeNode*
// if treeNode != NULL calls getWord() and getDef()
// from its dictEntry (data)
{
string theWord, definition;
word = makeCap(word);
treeNode *entry = dict->search(word);
if (entry == NULL)
cout << "The word " << word << " was not found in the dictionary.\n";
else
{
theWord = entry->data->getWord();
definition = entry->data->getDefinition();
cout << word << ": " << definition << endl;
}
}
void dictionary::add(dictEntry *entry)
// adds a dictEntry to the chosen tree by calling
// searchTree insert()
{
dict->insert(entry);
}
void dictionary::remove(string word)
// calls searchTree remove() to remove dictEntry
// containing word param from chosen tree
{
dict->remove(word);
}
void dictionary::readFile(string filename)
// enters dictionary entries from filename param
// file into chosen searchTree tree
// endWord = index of ':' which signifies end WORD
// only stores lines that aren't blank (length() > 0)
{
dictEntry *entry;
int endWord;
string line, word, definition;
ifstream dictionaryFile(filename.c_str());
while (getline(dictionaryFile, line))
{
if (line.length() > 0)
{
endWord = line.find(':', 0);
if (endWord)
{
word = line.substr(0, endWord);
definition = line.substr(word.length()+ 2);
// cout << word << endl;
// cout << definition << endl;
entry = new dictEntry(word, definition);
add(entry);
}
else
continue;
}
}
}
void dictionary::writeFilePreorder(string filename)
// calls searchTree preOrder() file writing function
// on filename parameter
{
dict->preOrder(filename);
}
void dictionary::writeFileInorder(string filename)
// calls searchTree inOrder() file writing function
// on filename parameter
{
dict->inOrder(filename);
}
void dictionary::writeFilePostorder(string filename)
// calls searchTree postOrder() file writing function
// on filename parameter
{
dict->postOrder(filename);
}
string makeCap(string word)
// make the words capital for accurate string matching
{
int i = 0;
while (word[i] != '\0')
{
word[i] = toupper(word[i]);
i++;
}
return word;
}
int main()
{
dictEntry *entry;
int whichTree, whichOption, whichOrder;
string filename, newWord, newDefinition, wordToGet, removeWord;
cout << "Binary Tree [1] or Red-Black [2] Tree? ";
cin >> whichTree;
if (whichTree == 1)
{
binarySearchTree tree;
dictionary dict(&tree);
do {
cin.clear();
cout << "Read [1], Add [2], Search [3], Remove [4], Save [5], Close [0]: ";
cin >> whichOption;
switch (whichOption) {
case 1:
cout << "Enter file name: ";
cin >> filename;
// dict.readFile("testEntries.txt");
dict.readFile(filename);
break;
case 2:
// in brackets so other tree doesn't cross define dictEntry entry
{
cout << "Enter word: ";
cin >> newWord;
newWord = makeCap(newWord);
cout << "Enter definition: ";
getline(cin.ignore(), newDefinition);
entry = new dictEntry(newWord, newDefinition);
dict.add(entry);
}
break;
case 3:
cout << "Enter word to search for: ";
cin >> wordToGet;
dict.search(wordToGet);
break;
case 4:
cout << "Enter word to remove: ";
cin >> removeWord;
removeWord = makeCap(removeWord);
dict.remove(removeWord);
break;
case 5:
cout << "Preorder [1], Inorder [2], Postorder [3]: ";
cin >> whichOrder;
cout << "Enter filename: ";
cin >> filename;
switch (whichOrder) {
case 1:
dict.writeFilePreorder(filename);
break;
case 2:
dict.writeFileInorder(filename);
break;
case 3:
dict.writeFilePostorder(filename);
break;
default:
cout << "Not a valid option\n";
}
break;
case 0:
cout << "successfully deleted everything\n";
break;
default:
cout << "Invalid input.";
}
} while (whichOption != 0);
}
else if (whichTree == 2)
{
RBSearchTree tree;
dictionary dict(&tree);
do {
cin.clear();
cout << "Read [1], Add [2], Search [3], Remove [4], Save [5], Close [0]: ";
cin >> whichOption;
switch (whichOption) {
case 1:
cout << "Enter file name: ";
cin >> filename;
// dict.readFile("testEntries.txt");
dict.readFile(filename);
break;
case 2:
// in brackets so other tree doesn't cross define dictEntry entry
{
cout << "Enter word: ";
cin >> newWord;
newWord = makeCap(newWord);
cout << "Enter definition: ";
getline(cin.ignore(), newDefinition);
entry = new dictEntry(newWord, newDefinition);
dict.add(entry);
}
break;
case 3:
cout << "Enter word to search for: ";
cin >> wordToGet;
dict.search(wordToGet);
break;
case 4:
cout << "Enter word to remove: ";
cin >> removeWord;
removeWord = makeCap(removeWord);
dict.remove(removeWord);
break;
case 5:
cout << "Preorder [1], Inorder [2], Postorder [3]: ";
cin >> whichOrder;
cout << "Enter filename: ";
cin >> filename;
switch (whichOrder) {
case 1:
dict.writeFilePreorder(filename);
break;
case 2:
dict.writeFileInorder(filename);
break;
case 3:
dict.writeFilePostorder(filename);
break;
default:
cout << "Not a valid option\n";
}
break;
case 0:
cout << "successfully deleted everything\n";
break;
default:
cout << "Invalid input.\n";
}
} while (whichOption != 0);
}
else
{
cout << "You broke me with invalid input.\nClosing dictionary.\n";
return -1;
}
}<file_sep>// <NAME>
#include <iostream>
#include <fstream>
#include <stdlib.h>
#include <sstream>
#include <string>
#include <time.h>
using namespace std;
void generateFile();
class integerPairs
{
public:
int value1, value2;
integerPairs(){};
integerPairs(int a, int b);
bool operator==(integerPairs&);
bool operator>(integerPairs&);
bool operator>=(integerPairs&);
bool operator<(integerPairs&);
bool operator<=(integerPairs&);
};
integerPairs* readFile(int nbLines);
void ourSort(integerPairs* array, int n);
integerPairs findMax(integerPairs* array, int n, int& x);
void ourSortTiming(int size);
void generateFile()
{
// commented out to avoid collecting 200,000,000 every time
ofstream myfile;
srand(time(NULL));
myfile.open("InputNumbers.dat");
int firstRand, secondRand;
for (int i = 0; i < 10; i++) {
firstRand = rand()%100;
secondRand = rand()%100;
myfile << firstRand << " " << secondRand << "\n";
}
myfile.close();
}
integerPairs::integerPairs(int a, int b)
{
value1 = a;
value2 = b;
}
bool integerPairs::operator==(integerPairs&)
{
int a, b, c, d;
if ((a == c) && (b == d))
return true;
else
return false;
}
bool integerPairs::operator>(integerPairs&)
{
int a, b, c, d;
if ((a > c) || ((a == c) && (b > d)))
return true;
else
return false;
}
bool integerPairs::operator>=(integerPairs&)
{
int a, b, c, d;
if ((a == c) && (b == d))
return true;
else if ((a > c) || ((a == c) && (b > d)))
return true;
else
return false;
}
bool integerPairs::operator<(integerPairs&)
{
int a, b, c, d;
if ((a < c) || ((a == c) && (b < d)))
return true;
else
return false;
}
bool integerPairs::operator<=(integerPairs&)
{
int a, b, c, d;
if ((a == c) && (b == d))
return true;
else if ((a < c) || ((a == c) && (b < d)))
return true;
else
return false;
}
integerPairs* readFile(int nbLines)
{
integerPairs *intArray = new integerPairs[nbLines];
int counter = 0;
int firstNumber, secondNumber;
string line;
stringstream stringNumber;
ifstream myfile("InputNumbers.dat");
while (getline(myfile, line))
{
istringstream iss(line);
int a, b;
iss >> a >> b;
intArray[counter].value1 = a;
intArray[counter].value2 = b;
counter++;
}
cout << intArray[3].value1 << endl;
return intArray;
}
integerPairs findMax(integerPairs* array, int n, int& x)
{
integerPairs highest;
integerPairs current;
int startingNum = n;
current = array[n];
while (n > 0)
{
if(current > highest)
{
highest = current;
x = n;
}
n--;
current = array[n];
}
return highest;
}
void ourSort(integerPairs* array, int n)
{
integerPairs i; // i is the highest element
int x;
integerPairs temp;
while (n > 0)
{
i = findMax(array, n, x);
temp = i;
i = array[n-1];
array[n-1] = temp;
n--;
}
}
void ourSortTiming(int size)
{
clock_t start, end;
int totalTime;
integerPairs* array;
array = readFile(size);
start = clock();
ourSort(array, size);
end = clock();
totalTime = end - start;
cout << "Total time: " << totalTime << endl;
}
int main()
{
// int nbLines = 10;
// int intArray = [];
int nbLines = 40;
generateFile();
// integerPairs* myArray = readFile(nbLines);
// int max = findMax(myArray, nbLines);
// cout << max << endl;
// ourSort(myArray, nbLines);
// cout << intArray[2];
ourSortTiming(nbLines);
}<file_sep>/*
<NAME>
Questions
a. I have to generate 175 keys to get my first collision.
b. I have to generate 23305 keys to get my first 5-way collision.
c. I had 33155 total collisions.
d. I had 33155 empty spaces in the table.
e. I had 7 collisions in the most filled space.
f. It took 1012410 keys to fill the entire array.
g. There were 26 collisions in the most filled space when array is full
*/
#include <iostream>
#include <string>
#include <stdlib.h>
#include <random>
using namespace std;
long generateRandomKey();
long hashFunction(long key);
long countEmpties(long *table);
long countHighest(long *table);
long untilItsFull(long *table);
void fillTable(long *table, int n);
void initTable(long *table);
int numberOfItems = 90000;
default_random_engine generator;
uniform_int_distribution<long> distribution(0,180000);
int main()
{
long emptySpaces, highestNumberCollisions, numToFull, highestNumberCollisionsFull;
long *table = new long[numberOfItems];
initTable(table);
// 2-way collisions change n to desired number of collisions to check
int n = 2;
fillTable(table, n);
// 5-way collisions change n to desired number of collisions to check
initTable(table);
n = 5;
fillTable(table, n);
// total collisions
// ================== DON'T CHANGE n ==================
initTable(table);
n = -1;
fillTable(table, n);
emptySpaces = countEmpties(table);
highestNumberCollisions = countHighest(table);
initTable(table);
numToFull = untilItsFull(table);
highestNumberCollisionsFull = countHighest(table);
cout << "There were " << emptySpaces << " empty spaces in the table\n";
cout << "There were " << highestNumberCollisions << " collisions in the most filled space\n";
cout << "It took " << numToFull << " keys to fill the entire array\n";
cout << "There were " << highestNumberCollisionsFull << " collisions in the most filled space when array is full\n";
}
long generateRandomKey()
{
return distribution(generator);
}
long hashFunction(long key)
{
return (((11057*key) % 179999) % numberOfItems);
}
long countEmpties(long* table)
{
long numEmpties = 0;
for (int i = 0; i < numberOfItems; i++)
{
if (table[i] == 0)
numEmpties++;
}
return numEmpties;
}
long countHighest(long* table)
{
long highest = 0;
for (int i = 0; i < numberOfItems; i++)
{
if (table[i] > highest)
highest = table[i];
}
return highest;
}
long untilItsFull(long* table)
{
bool full = false;
long key, hashedKey;
long numberKeys = 0;
while (full == false)
{
full = true;
key = generateRandomKey();
hashedKey = hashFunction(key);
table[hashedKey] += 1;
numberKeys++;
for (int i = 0; i < numberOfItems; i++)
{
if (table[i] == 0)
{
full = false;
}
}
if (full == true)
return numberKeys;
}
}
void initTable(long* table)
{
for (int i = 0; i < numberOfItems; i++)
table[i] = 0;
}
void fillTable(long* table, int n)
{
int collisionCount = 0;
int totalCollisions = 0;
long key, hashedKey, position;
if (n == -1)
{
for (int i = 0; i < numberOfItems; i++)
{
position = i;
key = generateRandomKey();
hashedKey = hashFunction(key);
table[hashedKey] += 1;
if (table[hashedKey] > 1)
{
totalCollisions++;
}
}
cout << totalCollisions << " total collisions\n";
}
else
{
for (int i = 0; i < numberOfItems; i++)
{
position = i;
key = generateRandomKey();
hashedKey = hashFunction(key);
table[hashedKey] += 1;
if (table[hashedKey] == n)
{
collisionCount++;
cout << collisionCount << "st " << n << "-way collision at " << i << "th key generated\n";
break;
}
}
}
}<file_sep>#include <string>
using namespace std;
class dictionary {
public:
dictionary(searchTree *n);
~dictionary();
void search(string word);
void add(dictEntry *entry);
void remove(string word);
void readFile(string filename);
void writeFilePreorder(string filename);
void writeFileInorder(string filename);
void writeFilePostorder(string filename);
protected:
searchTree *dict;
};
string makeCap(string word);<file_sep>// rbst class has fout from bst
using namespace std;
enum RBcolor {RED, BLACK};
class RBsearchTreeNode: public treeNode {
public:
RBsearchTreeNode *left;
RBsearchTreeNode *right;
RBsearchTreeNode *parent;
RBcolor color;
RBsearchTreeNode(dictEntry *d);
RBsearchTreeNode(dictEntry *d, RBsearchTreeNode *p);
RBsearchTreeNode(dictEntry *d, RBsearchTreeNode *l, RBsearchTreeNode *r, RBsearchTreeNode *p);
};<file_sep>/*
Author: <NAME>
NOTE: I commented out the function calls that are not in use
Currently commented out is the results, running will show clock
ticks.
5. Results: 4483, seg fault, seg fault, seg fault
The stack overflows and results in segmentation faults for the tests
above 25000.
6. Clock ticks: 17,075,431 - 15,810,986 - 13,986 - 10,087
7. Noticeable difference between expmod3 (13,986) and expmod4(10,087)
bigTime()s not run
*/
#include <iostream>
#include <time.h>
using namespace std;
unsigned long expmod1(unsigned long b, unsigned long n, unsigned long m);
unsigned long expmod2(unsigned long b, unsigned long n, unsigned long m);
unsigned long expmod3(unsigned long b, unsigned long n, unsigned long m);
unsigned long expmod4(unsigned long b, unsigned long n, unsigned long m);
void time1();
void time2();
void time3();
void time4();
bool tester(unsigned long b, unsigned long m);
unsigned long b = 21415;
unsigned long n = 25000;
unsigned long m = 31457;
int main() {
unsigned long results;
bool testBool = true;
if (b > m) {
b = b%m;
}
testBool = tester(b, m);
if (testBool) {
clock_t start, end;
start = clock();
time1();
end = clock();
cout << "The computation took " << (end - start) << " clock ticks.\n";
start = clock();
time2();
end = clock();
cout << "The computation took " << (end - start) << " clock ticks.\n";
start = clock();
time3();
end = clock();
cout << "The computation took " << (end - start) << " clock ticks.\n";
start = clock();
time4();
end = clock();
cout << "The computation took " << (end - start) << " clock ticks.\n";
// results = expmod1(b, n, m);
// cout << results << endl;
// results = expmod2(b, n, m);
// cout << results << endl;
// results = expmod3(b, n, m);
// cout << results << endl;
// results = expmod4(b, n, m);
// cout << results << endl;
}
else {
cout << "Base too small or modulo too large\n";
}
}
unsigned long expmod1(unsigned long b, unsigned long n, unsigned long m) {
//cout << b << " " << n << " " << m << endl;
if (n <= 0) {
return 1;
}
else {
//cout << n << endl;
return (b*(expmod1(b, n-1, m))%m);
}
}
unsigned long expmod2(unsigned long b, unsigned long n, unsigned long m) {
unsigned long result = 1;
while (n > 0) {
result = (result * b) % m;
n -= 1;
}
return result;
}
unsigned long expmod3(unsigned long b, unsigned long n, unsigned long m) {
if (n == 0) {
return 1;
}
else {
if (n%2 != 0) {
return (b*(expmod3(b, n-1, m))%m);
}
else {
return (expmod3((b*b)%m, (n*0.5), m));
}
}
}
unsigned long expmod4(unsigned long b, unsigned long n, unsigned long m) {
unsigned long result = 1;
unsigned long base = b;
unsigned long exponent = n;
while (exponent != 0) {
if (exponent % 2 != 0) {
result = ((result * base)%m);
exponent -= 1;
}
else {
base = ((base * base)%m);
exponent = exponent/2;
}
}
return result;
}
void time1() {
int numTimes = 50000;
while (numTimes >= 0) {
expmod1(b, n, m);
numTimes -= 1;
}
}
void time2() {
int numTimes = 50000;
while (numTimes >= 0) {
expmod2(b, n, m);
numTimes -= 1;
}
}
void time3() {
int numTimes = 50000;
while (numTimes >= 0) {
expmod3(b, n, m);
numTimes -= 1;
}
}
void time4() {
int numTimes = 50000;
while (numTimes >= 0) {
expmod4(b, n, m);
numTimes -= 1;
}
}
bool tester(unsigned long b, unsigned long m) {
if (b <= 1 || m > 65536) {
return false;
}
}<file_sep>Dictionary
==========
`By: <NAME>`
#FINISHED
+ binarySearchTree
+ read
+ add
+ search
+ remove
+ save
+ close
+ RBSearchTree
+ read
+ add
+ search
+ remove
+ save
+ close
#Note
When saving a red-black tree, it will save word: definition: RBcolor, so reading in from that file will add extra do the definition. May look into finding a workaround, may not. For the time being, do not read in a file that was saved from a RB tree.<file_sep>Algorithms
==========
This directory contains all of my labs.
<file_sep>using namespace std;
class treeNode {
public:
dictEntry *data;
};<file_sep>#include <string>
#include "dictEntry.h"
using namespace std;
dictEntry::dictEntry(string w, string d)
// add a word and its definition to the dictionary
{
word = w;
definition = d;
}
string dictEntry::getWord()
// get the word value of the word the function is called on
{
return this->word;
}
string dictEntry::getDefinition()
// get the definition of the word the function is called on
{
return this->definition;
}
void dictEntry::updateWord(string word)
// update word of a string
{
this->word = word;
}
void dictEntry::updateDefinition(string def)
// update definition of a string
{
this->definition = def;
}
bool dictEntry::operator==(string str)
// returns true if the strings are equal
{
if (this->word.compare(str) == 0)
return true;
else
return false;
}
bool dictEntry::operator>(string str)
// returns true if the value of the first nonmatching character
// is greater in the this->word or if this->word is longer
{
if (this->word.compare(str) > 0)
return true;
else
return false;
}
bool dictEntry::operator<(string str)
// returns true if the value of the first nonmatching character
// is lower in the this->word or if this->word is shorter
{
if (this->word.compare(str) < 0)
return true;
else
return false;
}
bool dictEntry::operator>=(string str)
// returns true if the value of the first nonmatching character
// is greater in the this->word or if this->word is longer or
// if the strings are equal
{
if ((this->word.compare(str) == 0) or (this->word.compare(str) > 0))
return true;
else
return false;
}
bool dictEntry::operator<=(string str)
// returns true if the value of the first nonmatching character
// is lower in the this->word or if this->word is shorter or
// if the strings are equal
{
if ((this->word.compare(str) == 0) or (this->word.compare(str) < 0))
return true;
else
return false;
}<file_sep>/*
NOTE: To see sorted lists, use test.txt and change nbLines to 5, and
uncomment the cout statements in main
*/
#include <iostream>
#include <fstream>
#include <sstream>
#include <string.h>
#include <stdlib.h>
#include <math.h>
#include <time.h>
using namespace std;
class myItem {
public:
int serialNumber;
string name;
myItem() {};
myItem(int sn, string s);
bool operator==(myItem myObject[]);
bool operator<(myItem myObject[]);
bool operator<=(myItem myObject[]);
bool operator>(myItem myObject[]);
bool operator>=(myItem myObject[]);
bool operator==(myItem myObject);
bool operator<(myItem myObject);
bool operator<=(myItem myObject);
bool operator>(myItem myObject);
bool operator>=(myItem myObject);
};
void MergeSort(myItem original[], int size);
void Merge(myItem Left[], myItem Right[], myItem original[], int sizeL, int sizeR);
int LomutoPartition(myItem A[], int l, int r);
void LomutoQuickSort(myItem A[], int l, int r);
void HoareQuickSort(myItem A[], int l, int r);
int HoarePartition(myItem A[], int l, int r);
void cMergeSort(myItem original[], int size);
void cMerge(myItem Left[], myItem Right[], myItem original[], int sizeL, int sizeR);
int cLomutoPartition(myItem A[], int l, int r);
void cLomutoQuickSort(myItem A[], int l, int r);
void cHoareQuickSort(myItem A[], int l, int r);
int cHoarePartition(myItem A[], int l, int r);
int count = 0;
int main()
{
string filename, line, lineString;
int a;
int counter = 0;
int nbLines = 30000;
int l = 0;
// file for MergeSort =================================
cout << "Enter a file name to MergeSort: ";
cin >> filename;
if (filename == "")
{
exit(EXIT_FAILURE);
}
char* char_filenameM = new char[filename.length()+1];
strcpy(char_filenameM, filename.c_str());
ifstream myFileM(char_filenameM);
myItem arrayM[nbLines];
while (getline(myFileM, line))
{
istringstream iss(line);
iss >> a >> lineString;
arrayM[counter].serialNumber = a;
arrayM[counter].name = lineString;
counter++;
}
// end MergeSort file =================================
counter = 0;
// file for LomutoSort ================================
cout << "Enter a file name to LomutoSort: ";
cin >> filename;
if (filename == "")
{
exit(EXIT_FAILURE);
}
char* char_filenameL = new char[filename.length()+1];
strcpy(char_filenameL, filename.c_str());
ifstream myFileL(char_filenameL);
myItem arrayL[nbLines];
while (getline(myFileL, line))
{
istringstream iss(line);
iss >> a >> lineString;
arrayL[counter].serialNumber = a;
arrayL[counter].name = lineString;
counter++;
}
// end LomutoSort file ================================
counter = 0;
// file for HoareSort =================================
cout << "Enter a file name to HoareSort: ";
cin >> filename;
if (filename == "")
{
exit(EXIT_FAILURE);
}
char* char_filenameH = new char[filename.length()+1];
strcpy(char_filenameH, filename.c_str());
ifstream myFileH(char_filenameH);
myItem arrayH[nbLines];
while (getline(myFileH, line))
{
istringstream iss(line);
iss >> a >> lineString;
arrayH[counter].serialNumber = a;
arrayH[counter].name = lineString;
counter++;
}
// end HoareSort file =================================
clock_t start, end;
start = clock();
MergeSort(arrayM, nbLines);
end = clock();
cout << "MergeSort\n";
// for (int i = 0; i < nbLines; i++)
// {
// cout << arrayM[i].serialNumber << ", ";
// }
cout << "\nMergeSort took " << (end - start) << " clock ticks.\n";
start = clock();
LomutoQuickSort(arrayL, l, nbLines);
end = clock();
cout << "\n\n\nLomutoQuickSort\n";
// for (int i = 0; i < nbLines; i++)
// {
// cout << arrayL[i].serialNumber << ", ";
// }
cout << "\nLomutoQuickSort took " << (end - start) << " clock ticks.\n";
start = clock();
HoareQuickSort(arrayH, l, nbLines-1);
end = clock();
cout << "\n\n\nHoareQuickSort\n";
// for (int i = 0; i < nbLines; i++)
// {
// cout << arrayH[i].serialNumber << ", ";
// }
cout << "\nHoareQuickSort took " << (end - start) << " clock ticks.\n";
cout << "\n\n\n\n";
cMergeSort(arrayM, nbLines);
cout << "cMergeSort\n";
// for (int i = 0; i < nbLines; i++)
// {
// cout << arrayM[i].serialNumber << ", ";
// }
// cout << endl;
cout << "made " << count << " comparisons\n";
count = 0;
cLomutoQuickSort(arrayL, l, nbLines);
cout << "\n\n\ncLomutoQuickSort\n";
// for (int i = 0; i < nbLines; i++)
// {
// cout << arrayL[i].serialNumber << ", ";
// }
// cout << endl;
cout << "made " << count << " comparisons\n";
count = 0;
cHoareQuickSort(arrayH, l, nbLines-1);
cout << "\n\n\ncHoareQuickSort\n";
// for (int i = 0; i < nbLines; i++)
// {
// cout << arrayH[i].serialNumber << ", ";
// }
// cout << endl;
cout << "made " << count << " comparisons\n";
cout << "\n\ncompleted all sorts\n";
cout << "\n\n";
main();
}
void MergeSort(myItem original[], int size)
{
if (size > 1)
{
// allocate arrays
int numFloor = floor((size/2));
int numCeiling = size - numFloor;
myItem Left[numFloor];
myItem Right[numCeiling];
// copy to left (B) and right (C)
for (int i = 0; i < numFloor; i++)
{
Left[i] = original[i];
}
for (int j = numFloor, k = 0; j < size; j++, k++)
{
Right[k] = original[j];
}
// mergesort Left then Right
MergeSort(Left, numFloor);
MergeSort(Right, numCeiling);
Merge(Left, Right, original, numFloor, numCeiling);
}
}
void Merge(myItem Left[], myItem Right[], myItem original[], int sizeL, int sizeR)
{
int i = 0; // how far in Left
int j = 0; // how far in Right
int k = 0; // how far in original
// while loop
while ((i < sizeL) and (j < sizeR))
{
if (Left[i] <= Right[j])
{
original[k] = Left[i];
i++;
}
else
{
original[k] = Right[j];
j++;
}
k++;
}
if (i == sizeL)
{
for (; j < sizeR; j++, k++)
{
original[k] = Right[j];
}
}
else
{
for (; i < sizeL; i++, k++)
{
original[k] = Left[i];
}
}
}
void cMergeSort(myItem original[], int size)
{
if (size > 1)
{
count++;
// allocate arrays
int numFloor = floor((size/2));
int numCeiling = size - numFloor;
myItem Left[numFloor];
myItem Right[numCeiling];
// copy to left (B) and right (C)
for (int i = 0; i < numFloor; i++)
{
count++;
Left[i] = original[i];
}
count++;
for (int j = numFloor, k = 0; j < size; j++, k++)
{
count++;
Right[k] = original[j];
}
count++;
// mergesort Left then Right
cMergeSort(Left, numFloor);
cMergeSort(Right, numCeiling);
cMerge(Left, Right, original, numFloor, numCeiling);
}
count++;
}
void cMerge(myItem Left[], myItem Right[], myItem original[], int sizeL, int sizeR)
{
int i = 0; // how far in Left
int j = 0; // how far in Right
int k = 0; // how far in original
// while loop
while ((i < sizeL) and (j < sizeR))
{
count += 3;
if (Left[i] <= Right[j])
{
count++;
original[k] = Left[i];
i++;
}
else
{
count++;
original[k] = Right[j];
j++;
}
k++;
}
count += 3;
if (i == sizeL)
{
count++;
for (; j < sizeR; j++, k++)
{
count++;
original[k] = Right[j];
}
}
else
{
count++;
for (; i < sizeL; i++, k++)
{
count++;
original[k] = Left[i];
}
}
}
void LomutoQuickSort(myItem A[], int l, int r)
{
if (l < r)
{
int s = LomutoPartition(A, l, r);
LomutoQuickSort(A, l, s-1);
LomutoQuickSort(A, s+1, r);
}
}
int LomutoPartition(myItem A[], int l, int r)
{
int pivot = A[l].serialNumber;
int s = l;
for (int i = l+1; i < r; i++)
{
if (A[i].serialNumber < pivot)
{
s += 1;
swap(A[s], A[i]);
}
}
swap(A[l], A[s]);
return s;
}
void cLomutoQuickSort(myItem A[], int l, int r)
{
if (l < r)
{
count++;
int s = cLomutoPartition(A, l, r);
cLomutoQuickSort(A, l, s-1);
cLomutoQuickSort(A, s+1, r);
}
count++;
}
int cLomutoPartition(myItem A[], int l, int r)
{
int pivot = A[l].serialNumber;
int s = l;
for (int i = l+1; i < r; i++)
{
count++;
if (A[i].serialNumber < pivot)
{
count++;
s += 1;
swap(A[s], A[i]);
}
count++;
}
count++;
swap(A[l], A[s]);
return s;
}
void HoareQuickSort(myItem A[], int l, int r)
{
if (l < r)
{
int s = HoarePartition(A, l, r);
HoareQuickSort(A, l, s-1);
HoareQuickSort(A, s+1, r);
}
}
int HoarePartition(myItem A[], int l, int r)
{
myItem pivot;
if (A[l] < A[r])
{
pivot = A[l] ;
}
else
{
swap(A[l], A[r]) ;
pivot = A[l] ;
}
int i = l;
int j = r+1;
do {
do {
i += 1;
} while (A[i] < pivot);
do {
j -= 1;
} while (A[j] > pivot);
swap(A[i], A[j]);
} while (i < j);
swap(A[i], A[j]);
swap(A[l], A[j]);
return j;
}
void cHoareQuickSort(myItem A[], int l, int r)
{
if (l < r)
{
count++;
int s = cHoarePartition(A, l, r);
cHoareQuickSort(A, l, s-1);
cHoareQuickSort(A, s+1, r);
}
count++;
}
int cHoarePartition(myItem A[], int l, int r)
{
myItem pivot;
if (A[l] < A[r])
{
count++;
pivot = A[l] ;
}
else
{
count++;
swap(A[l], A[r]) ;
pivot = A[l] ;
}
int i = l;
int j = r+1;
do {
count++;
do {
count++;
i += 1;
} while (A[i] < pivot);
do {
count++;
j -= 1;
} while (A[j] > pivot);
swap(A[i], A[j]);
} while (i < j);
count++;
swap(A[i], A[j]);
swap(A[l], A[j]);
return j;
}
myItem::myItem(int sn, string s)
{
serialNumber = sn;
name = s;
}
bool myItem::operator==(myItem myObject[])
{
int a, b;
a = this->serialNumber;
b = myObject->serialNumber;
if (a == b)
return true;
else
return false;
}
bool myItem::operator<(myItem myObject[])
{
int a, b;
a = this->serialNumber;
b = myObject->serialNumber;
if (a < b)
return true;
else
return false;
}
bool myItem::operator<=(myItem myObject[])
{
int a, b;
a = this->serialNumber;
b = myObject->serialNumber;
if (a <= b)
return true;
else
return false;
}
bool myItem::operator>(myItem myObject[])
{
int a, b;
a = this->serialNumber;
b = myObject->serialNumber;
if (a > b)
return true;
else
return false;
}
bool myItem::operator>=(myItem myObject[])
{
int a, b;
a = this->serialNumber;
b = myObject->serialNumber;
if (a >= b)
return true;
else
return false;
}
bool myItem::operator==(myItem myObject)
{
int a, b;
a = this->serialNumber;
b = myObject.serialNumber;
if (a == b)
return true;
else
return false;
}
bool myItem::operator<(myItem myObject)
{
int a, b;
a = this->serialNumber;
b = myObject.serialNumber;
if (a < b)
return true;
else
return false;
}
bool myItem::operator<=(myItem myObject)
{
int a, b;
a = this->serialNumber;
b = myObject.serialNumber;
if (a <= b)
return true;
else
return false;
}
bool myItem::operator>(myItem myObject)
{
int a, b;
a = this->serialNumber;
b = myObject.serialNumber;
if (a > b)
return true;
else
return false;
}
bool myItem::operator>=(myItem myObject)
{
int a, b;
a = this->serialNumber;
b = myObject.serialNumber;
if (a >= b)
return true;
else
return false;
}
<file_sep>comp215
=======
Class and homework assignments for Algorithms class.
<file_sep>/*
NOTE: Dynamic will correctly display the least number of coins needed, but for certain input not the number of each coin.
Works 84-89 but not 83, 90.
Greedy will correctly display the number of each coin (for greedy), but not the least number of coins.
Both will get the number of each wrong but the least right for all numbers 0-7.
I couldn't figure out these oddities.
*/
#include <iostream>
using namespace std;
void ChangeMaking(int n);
void GreedyChangeMaking(int n);
class changeStruct {
public:
int minsofar, *numberEach;
changeStruct();
};
int D[] = {1, 7, 30, 84, 235};
int m = 5; // number of coins
int main()
{
// cin.clear();
int input;
do {
cout << "Enter change to make: ";
cin >> input;
if (input < 0)
break;
cout << "\nDynamic\n";
ChangeMaking(input);
cout << "\nGreedy\n";
GreedyChangeMaking(input);
} while (input >= 0 && cin);
if (!cin) {
cout << "That wasn't a number.\n";
// main();
}
else
cout << "Done.\n";
}
changeStruct::changeStruct()
{
minsofar = 0;
numberEach = new int[m];
for (int i = 0; i < m; i++)
numberEach[i] = 0;
}
void ChangeMaking(int n)
{
int j, lowest;
changeStruct counter, F[n+1];
for (int i = 0; i < n+1; i++)
{
counter.minsofar = i;
j = 0;
while (j < m && i >= D[j])
{
if (F[i-D[j]].minsofar+1 < counter.minsofar)
{
counter.minsofar = F[i-D[j]].minsofar+1;
for (int z = 0; z < m; z++)
{
if (z != j)
counter.numberEach[z] = F[i-D[j]].numberEach[z];
else
{
counter.numberEach[z]++;
// cout << "z = " << z << "\nj = " << j << "\nnumberEach[" << z << "] increased to " << counter.numberEach[z] << "\n";
}
}
}
j++;
}
F[i].minsofar = counter.minsofar;
for (int l = 0; l < m; l++)
F[i].numberEach[l] = counter.numberEach[l];
}
cout << "minsofar = " << counter.minsofar << endl;
cout << counter.numberEach[0] << " 1 cent coins\n";
cout << counter.numberEach[1] << " 7 cent coins\n";
cout << counter.numberEach[2] << " 30 cent coins\n";
cout << counter.numberEach[3] << " 84 cent coins\n";
cout << counter.numberEach[4] << " 235 cent coins\n";
}
void GreedyChangeMaking(int n)
{
int j, newminsofar, F[n+1];
changeStruct counter;
F[0] = 0;
for (int i = 0; i < n+1; i++)
{
counter.minsofar = i;
j = 0;
while ((j < m) && (D[j] <= i))
{
newminsofar = min(counter.minsofar, F[i-D[j]]+1);
if (newminsofar < counter.minsofar)
{
counter.minsofar = newminsofar;
for (int l = m-1; l >= 0; l--)
{
while (n - D[l] >= 0)
{
counter.numberEach[l]++;
n -= D[l];
}
}
}
j++;
}
F[i] = counter.minsofar;
}
cout << "minsofar = " << counter.minsofar << endl;
cout << counter.numberEach[0] << " 1 cent coins\n";
cout << counter.numberEach[1] << " 7 cent coins\n";
cout << counter.numberEach[2] << " 30 cent coins\n";
cout << counter.numberEach[3] << " 84 cent coins\n";
cout << counter.numberEach[4] << " 235 cent coins\n";
}<file_sep>#include <string>
using namespace std;
class searchTreeNode: public treeNode {
public:
searchTreeNode *left;
searchTreeNode *right;
searchTreeNode *parent;
searchTreeNode(dictEntry *d);
searchTreeNode(dictEntry *d, searchTreeNode *p);
searchTreeNode(dictEntry *d, searchTreeNode *l, searchTreeNode *r, searchTreeNode *p);
};<file_sep>#include <iostream>
#include <string>
using namespace std;
class cell {
public:
int value;
cell *next;
cell(int x);
cell(int x, cell nextCell);
cell(const cell& thisCell);
cell operator+(cell cellTwo);
};
class queue {
public:
cell *first;
cell *last;
void pushLast(int x);
};
queue myQueue;
int main() {
cell firstCell(3);
cell *nextCell;
cout << firstCell.value << "\n";
cell secondCell(2, *nextCell);
cout << secondCell.next << endl;
cell thirdCell(firstCell);
cout << thirdCell.value << "\t" << thirdCell.next << endl;
cell fourthCell = firstCell + secondCell;
cout << fourthCell.value << endl;
myQueue.pushLast(2);
cout << myQueue.last << "\n";
}
cell::cell(int x) {
value = x;
}
cell::cell(int x, cell nextCell) {
value = x;
this->next = &nextCell;
}
cell::cell(const cell& thisCell) {
this->value = thisCell.value;
this->next = thisCell.next;
}
cell cell::operator+(cell cellTwo) {
int values;
values = this->value + cellTwo.value;
cell fourthCell(values);
return fourthCell;
}
void queue::pushLast(int x) {
cell *toEnd;
myQueue.last = toEnd;
}<file_sep>#include <iostream>
#include <fstream>
#include <stdlib.h>
#include <sstream>
#include <string.h>
#include <algorithm> // for swap()
using namespace std;
class pairsAgain
{
public:
int val1, val2;
pairsAgain() {};
pairsAgain(int int1, int int2);
bool operator==(pairsAgain&);
bool operator>(pairsAgain&);
bool operator>=(pairsAgain&);
bool operator<(pairsAgain&);
bool operator<=(pairsAgain&);
};
void fileType();
void openFileNum();
void openFileText();
void whichSearchNum(pairsAgain* myArray, int nbLines);
void selectionSort(pairsAgain* myArray, int nbLines);
void bubbleSort(pairsAgain* myArray, int nbLines);
void sequentialSearch(pairsAgain* myArray, int nbLines);
void stringMatching(char* newString, int nbLines);
int main()
{
fileType();
}
void fileType()
{
string response;
cout << "Will you be using numbers or text? ";
cin >> response;
if (response == "numbers")
{
openFileNum();
}
else if (response == "text")
{
openFileText();
}
else
{
cout << "Wrong. Ending.\n";
}
}
// Chose a number
void openFileNum()
{
string response, line;
int a, b, nbLines, counter;
cout << "Enter file name: ";
cin >> response;
char *c_response = new char[response.length()+1];
strcpy(c_response, response.c_str());
ifstream myfile(c_response);
getline(myfile, line);
istringstream iss(line);
iss >> a;
nbLines = a;
counter = 0;
pairsAgain *intArray = new pairsAgain[nbLines];
while (getline(myfile, line))
{
istringstream iss(line);
iss >> a >> b;
intArray[counter] = pairsAgain(a, b);
counter++;
}
cout << intArray[3].val1 << endl;
whichSearchNum(intArray, nbLines);
}
void whichSearchNum(pairsAgain* myArray, int nbLines)
{
string response, line;
int a, b, counter;
cout << "Selection, Bubble, or Sequential? ";
cin >> response;
if (response == "Selection" or response == "selection")
{
selectionSort(myArray, nbLines);
}
else if (response == "Bubble" || response == "bubble")
{
bubbleSort(myArray, nbLines);
}
else if (response == "Sequential" || response == "sequential")
{
sequentialSearch(myArray, nbLines);
}
else
cout << "error\n";
}
void selectionSort(pairsAgain* myArray, int nbLines)
{
int i, j, min, tempA, tempB;
pairsAgain* temp;
string response;
cout << "Enter output filename: ";
cin >> response;
char *c_response = new char[response.length()+1];
strcpy(c_response, response.c_str());
ofstream myfile;
myfile.open(c_response);
for (i = 0; i < nbLines-2; i++)
{
min = i;
for (j = i+1; j < nbLines-1; j++)
{
if (myArray[j] < myArray[min])
{
min = j;
}
}
swap(myArray[i], myArray[min]);
}
for (i = 0; i < nbLines; i++)
myfile << myArray[i].val1 << " " << myArray[i].val2 << "\n";
}
void bubbleSort(pairsAgain* myArray, int nbLines)
{
int i, j, nbSwap;
string response;
cout << "Enter output filename: ";
cin >> response;
char *c_response = new char[response.length()+1];
strcpy(c_response, response.c_str());
ofstream myfile;
myfile.open(c_response);
for (i = 0; i < nbLines-1; i++)
{
nbSwap = 0;
for (j = 0; j < nbLines-i-1; j++)
{
if (myArray[j] > myArray[j+1])
{
swap(myArray[j], myArray[j+1]);
nbSwap++;
}
}
if (nbSwap == 0)
break;
}
for (i = 0; i < nbLines; i++)
myfile << myArray[i].val1 << " " << myArray[i].val2 << "\n";
}
void sequentialSearch(pairsAgain* myArray, int nbLines)
{
int i, searchValue;
bool hasValue = true;
while (hasValue == true)
{
cout << "Enter a value to search for: ";
cin >> searchValue;
cin.ignore();
for (i = 0; i < nbLines; i++)
{
if (myArray[i].val1 == searchValue)
{
hasValue = true;
break;
}
else
{
cout << "Searched " << i+2 << " values.\n";
hasValue = false;
}
}
}
cout << "Value not in file.\n";
}
// ============================================================================
// Chose a text ===============================================================
void openFileText()
{
string response, line;
int a, nbLines;
cout << "Enter file name: ";
cin >> response;
char *c_response = new char[response.length()+1];
strcpy(c_response, response.c_str());
ifstream myfile(c_response);
getline(myfile, line);
istringstream iss(line);
iss >> a;
nbLines = a;
char newString[15000];
char newLine[15000];
while (getline(myfile, line))
{
strcpy(newLine, line.c_str());
strcat(newString, newLine);
}
cout << newString << endl;
stringMatching(newString, nbLines);
}
void stringMatching(char* newString, int nbLines)
{
string response;
int m, j;
cout << "Enter string to find: ";
cin >> response;
m = response.length();
char *c_response = new char[response.length()+1];
strcpy(c_response, response.c_str());
for (int i = 0; i < nbLines-m; i++)
{
j = 0;
while ((j < m) and (c_response[j] == newString[i+j]))
{
j += 1;
}
if (j == m)
{
cout << "Found at index: " << j << endl;
stringMatching(newString, nbLines);
}
}
cout << "Not found\n";
}
pairsAgain::pairsAgain(int int1, int int2)
{
val1 = int1;
val2 = int2;
}
bool pairsAgain::operator==(pairsAgain& myArray)
{
int a, c;
a = this->val1;
c = myArray.val1;
if (a == c)
return true;
else
return false;
}
bool pairsAgain::operator>(pairsAgain& myArray)
{
int a, c;
a = this->val1;
c = myArray.val1;
if (a > c)
return true;
else
return false;
}
bool pairsAgain::operator>=(pairsAgain& myArray)
{
int a, c;
a = this->val1;
c = myArray.val1;
if (a >= c)
return true;
else
return false;
}
bool pairsAgain::operator<(pairsAgain& myArray)
{
int a, c;
a = this->val1;
c = myArray.val1;
if (a < c)
return true;
else
return false;
}
bool pairsAgain::operator<=(pairsAgain& myArray)
{
int a, c;
a = this->val1;
c = myArray.val1;
if (a <= c)
return true;
else
return false;
}<file_sep>#include "RBsearchTreeNode.h"
using namespace std;
RBsearchTreeNode::RBsearchTreeNode(dictEntry *d)
// constructor setting data to passed in dictEntry
{
data = d;
left = NULL;
right = NULL;
parent = NULL;
color = RED;
}
RBsearchTreeNode::RBsearchTreeNode(dictEntry *d, RBsearchTreeNode *p)
// constructor setting data and parent to values passed in
{
data = d;
parent = p;
left = NULL;
right = NULL;
color = RED;
}
RBsearchTreeNode::RBsearchTreeNode(dictEntry *d, RBsearchTreeNode *l, RBsearchTreeNode *r, RBsearchTreeNode *p)
{
data = d;
left = l;
right = r;
parent = p;
color = RED;
}<file_sep>#include <string.h>
#include <fstream>
using namespace std;
class binarySearchTree: public searchTree {
public:
binarySearchTree();
~binarySearchTree();
virtual void insert(dictEntry *in);
virtual void insert_h(dictEntry *in, searchTreeNode *current);
virtual void remove(string w);
virtual searchTreeNode* search(string w);
virtual searchTreeNode* search_h(string w, searchTreeNode *current);
virtual searchTreeNode* successor(searchTreeNode *toDelete);
virtual void preOrder(string filename);
virtual void postOrder(string filename);
virtual void inOrder(string filename);
protected:
searchTreeNode *root;
ofstream fout;
virtual void preorder_h(searchTreeNode *n);
virtual void postorder_h(searchTreeNode *n);
virtual void inorder_h(searchTreeNode *n);
virtual void visit(searchTreeNode *n);
virtual void destroy_tree(searchTreeNode *leaf);
virtual string getWord(searchTreeNode *n);
virtual string getDefinition(searchTreeNode *n);
};<file_sep>#include <iostream>
#include <string>
#include <math.h>
using namespace std;
template <class type>
type roundThis(type a, type b, type c)
// int(a) to ensure that all a values are int to % correctly
{
a = round(a);
return (int(a)%2==0?b:c);
}
template <class type>
class cell {
public:
type value;
cell() {};
cell(type a)
{
value = a;
}
};
int main()
{
float a = 9.2, b = 1.41, c = 5.97, d;
d = roundThis <float>(a, b, c);
cout << d << endl;
a = 8.2;
d = roundThis <float>(a, b, c);
cout << d << endl;
int e = 12;
// create a cell f
cell <int>(f);
// put int e as the value of cell f
f = cell <int>(e);
cout << f.value << endl;
}<file_sep>#include <iostream>
#include <fstream>
#include <sstream>
#include <string.h>
#include <math.h>
using namespace std;
int numLines()
{
readFileIn(1);
}
int readFileIn(int elemsPerLine)
{
string filename, line;
int nbLines;
int counter = 0;
cout << "Enter a file name: ";
cin >> filename;
char *char_filename = new char[filename.length()+1];
strcpy(char_filename, filename.c_str());
ifstream myFile(char_filename);
getline(myFile, line);
istringstream iss(line);
iss >> nbLines;
int *array = new int[nbLines];
while (getline(myFile, line))
{
istringstream iss(line);
iss >> a;
array[counter] = a;
counter++;
}
return
}<file_sep>#include <string>
using namespace std;
class dictEntry {
public:
dictEntry(string w, string d);
string getWord();
string getDefinition();
void updateWord(string word);
void updateDefinition(string def);
bool operator==(string str);
bool operator>(string str);
bool operator<(string str);
bool operator>=(string str);
bool operator<=(string str);
protected:
string word, definition;
};<file_sep>// <NAME>
#include <iostream>
#include <string>
using namespace std;
class monkey {
public:
bool hungry;
monkey();
monkey(bool);
~monkey() {cout << "\nmonkey destructor called.\n";}
virtual void eatBanana();
virtual void scratchHead();
void swingFromTree();
};
class ape: public monkey {
public:
ape();
ape(bool);
~ape() {cout << "\nape destructor called\n";}
void eatBanana();
virtual void scratchHead();
};
class parrot {
public:
char *vocabulary;
parrot();
parrot(char *character);
~parrot() {cout << "\nparrot destructor called\n";}
void talk();
};
class human: public ape, public parrot {
public:
human();
human(bool newtorf);
human(bool thirdtorf, char *newChar);
~human() {cout << "\nhuman destructor called\n";}
void eatBanana();
void scratchHead();
};
// MAIN
int main() {
monkey *m;
ape *a;
human *h;
m = new ape(true);
m->eatBanana();
a = new human;
delete m;
m = a;
m->scratchHead();
a->scratchHead();
m->eatBanana();
a->eatBanana();
h = dynamic_cast <human*>(a);
delete h;
h = new human(true, NULL);
h->talk();
delete h;
h = new human(true);
h = dynamic_cast <human*>(m);
h->swingFromTree();
delete a;
a = new ape();
a = dynamic_cast <ape*>(m);
a->swingFromTree();
}
// Definitions
monkey::monkey()
{
cout << "\ninside monkey() CTOR\n";
}
monkey::monkey(bool)
{
cout << "\ninside monkey(bool) CTOR\n";
}
void monkey::eatBanana()
{
cout << "\nmonkey is eating a banana\n";
}
void monkey::scratchHead()
{
cout << "\nmonkey is scratching its head\n";
}
void monkey::swingFromTree()
{
cout << "\nmonkey is swinging from tree\n";
}
ape::ape()
{
cout << "\ninside ape() CTOR\n";
}
ape::ape(bool torf): monkey(torf)
{
cout << "\ninside ape(bool) CTOR\n";
}
void ape::eatBanana() {
cout << "\nape is eating a banana\n";
}
void ape::scratchHead()
{
cout << "\nape is scratching its head\n";
}
parrot::parrot()
{
cout << "\ninside parrot() CTOR\n";
}
parrot::parrot(char *character)
{
cout << "\ninside parrot(char*) CTOR\n";
}
void parrot::talk()
{
cout << "\nparrot is talking\n";
}
human::human()
{
cout << "\ninside human CTOR\n";
}
human::human(bool newtorf): ape(newtorf)
{
cout << "\ninside human(bool) CTOR\n";
}
human::human(bool thirdtorf, char *newChar): ape(thirdtorf), parrot(newChar)
{
cout << "\ninside human(bool, char*) CTOR\n";
}
void human::eatBanana()
{
cout << "\nhuman is eating a banana\n";
}
void human::scratchHead()
{
cout << "\nhuman is scratching its head\n";
} | 266da33d23f48514a6bcc8aa1762341182e80b1e | [
"Markdown",
"C++"
] | 31 | C++ | tkicks/comp215 | 66313db615a122525df53d64dc43db03a6b9c9bc | a8cfea532f5284b474fa5cc699b2107830d7e7f9 |
refs/heads/master | <repo_name>paulberesuita/ngbook2<file_sep>/angular2-reddit-base/app.ts
import { bootstrap } from "angular2/platform/browser";
import { Component } from "angular2/core";
import { NgFor } from "angular2/common";
import {FORM_DIRECTIVES, FormBuilder, ControlGroup, Validators, AbstractControl} from "angular2/common";
class Article {
title: string;
link: string;
votes: number;
constructor(title: string, link: string, votes?: number) {
this.title = title;
this.link = link;
this.votes = votes || 0;
}
voteUp(): void {
this.votes += 1;
}
voteDown(): void {
this.votes -= 1;
}
domain(): string {
try {
const link: string = this.link.split('//')[1];
return link.split('/')[0];
} catch (error) {
return null;
}
}
}
@Component({
selector: 'reddit-article',
inputs: ['article'],
host: {
class: 'row'
},
template:
`
<div class="four wide column center aligned votes">
<div class="ui statistics">
<div class="value">
{{article.votes}}
</div>
<div class="label">
Points
</div>
</div>
</div>
<div class="twelve wide column">
<a class="ui large header" href="{{article.link}}">
{{article.title}}
</a>
<div class="meta">({{article.domain()}})</div>
<ul class="ui big horizontal list voters">
<li class="item">
<a href (click)="voteUp()">
<i class="arrow up icon"></i>
upvote
</a>
</li>
<li class="item">
<a href (click)="voteDown()">
<i class="arrow down icon"></i>
downvote
</a>
</li>
</ul>
<div>
`
})
class ArticleComponent {
article: Article;
constructor() {
this.article = new Article('Angular 2', 'http://angular.io', 10);
}
voteUp(): boolean {
this.article.voteUp();
return false;
}
voteDown(): boolean {
this.article.voteDown();
return false;
}
}
@Component({
selector: 'reddit',
directives: [ArticleComponent],
template: `
<form class="ui large form segment">
<h3 class="ui header">Add a Link</h3>
<div class="field">
<label for="title">Title:</label>
<input name="title" #newtitle>
</div>
<div class="field">
<label for="link">Link:</label>
<input name="link" #newlink>
</div>
<button (click)="addArticle(newtitle, newlink)"
class="ui positive right floated button">
Submit link
</button>
</form>
<div class="ui grid posts">
<reddit-article *ngFor="#foobar of sortedArticles()" [article]="foobar"></reddit-article>
</div>
`
})
class RedditApp {
articles: Article[];
constructor() {
this.articles = [ new Article('Angular 2', 'http://angular.io', 3),
new Article('Fullstack', 'http://fullstack.io', 2),
new Article('Angular Homepage', 'http://angular.io', 1)];
}
addArticle(title: HTMLInputElement, link: HTMLInputElement): void {
console.log(`Adding article title: ${title.value} and link: ${link.value}`);
this.articles.push( new Article(title.value, link.value, 0));
title.value = '';
link.value = '';
}
sortedArticles(): Article[] {
return this.articles.sort((a: Article, b: Article) => b.votes - a.votes);
}
}
@Component({
selector: 'switch-sample-app',
template: `
<h4 class="ui horizontal divider header">
Current choice is {{choice}}
</h4>
<div class="ui raised segment">
<ul [ngSwitch]="choice">
<li *ngSwitchWhen="1">First choice</li>
<li *ngSwitchWhen="2">Second choice</li>
<li *ngSwitchWhen="3">Third choice</li>
<li *ngSwitchWhen="4">Fourth choice</li>
<li *ngSwitchDefault>Default choice</li>
</ul>
</div>
<div style="">
<button class="ui primary button" (click)="nextChoice()">
Next choice
</button>
</div>
`
})
class SwitchSampleApp {
choice: number;
constructor() {
this.choice = 0;
}
nextChoice() {
this.choice += 1;
if (this.choice > 5) {
this.choice = 1;
}
}
}
@Component({
selector: 'style-sample-app',
template: `
<h4 class="ui horizontal divider header">
style.background-color
</h4>
<div [style.background-color]="'yellow'">
Uses fixed yellow background
</div>
<h4 class="ui horizontal divider header">
ngStyle literal
</h4>
<div [ngStyle]="{color: 'white', 'background-color': 'blue'}">
Uses fixed white text on blue background
</div>
<h4 class="ui horizontal divider header">
ngStyle literal and style.font-size.px
</h4>
<div>
<span [ngStyle]="{color: 'red'}" [style.font-size.px]="fontSize">
{{fontSize}}
<br>
red text
</span>
</div>
<h4 class="ui horizontal divider header">
ngStyle with an object
</h4>
<div [ngStyle]="style"></div>
<h4 class="ui horizontal divider header">
ngStyle with object property from variable
</h4>
<div>
<span [ngStyle]="{color: colorinput.value}">
{{ colorinput.value }} text
</span>
</div>
<h4 class="ui horizontal divider header">
style from variable
</h4>
<div [style.background-color]="colorinput.value"
style="color: white;">
{{ colorinput.value }} background
</div>
<h4 class="ui horizontal divider header">
Play with the color and font-size here
</h4>
<div class="ui input">
<input type="text" name="color" value="{{color}}" #colorinput>
</div>
<div class="ui input">
<input type="text" name="fontSize" value="{{fontSize}}" #fontinput>
</div>
<button class="ui primary button" (click)="apply(colorinput.value, fontinput.value)">
Apply settings
</button>
`
})
class StyleSampleApp {
color: string;
fontSize: number;
style: {
'background-color': string,
'border-radius': string,
border?: string,
width?: string,
height?: string
};
constructor() {
this.fontSize = 16;
this.color = "blue";
this.style = {
'background-color': '#ccc',
'border-radius': '50px',
'height': '30px',
'width': '30px',
};
}
apply(color, fontSize) {
this.color = color;
this.fontSize = fontSize;
}
}
@Component({
selector: 'ng-non-bindable-sample-app',
template: `
<div>
<span class="bordered">{{ content }}</span>
<span class="pre" ngNonBindable>
← This is what {{ content }} rendered
</span>
</div>
`
})
class NgNonBindableSampleApp {
content: string;
constructor() {
this.content = 'Some text';
}
}
@Component({
selector: 'demo-form-sku',
directives: [FORM_DIRECTIVES],
template:
`
<div class="ui raised segment">
<h2 class="ui header">Demo Form: Sku</h2>
<form [ngFormModel]="myForm"
(ngSubmit)="onSubmit(myForm.value)"
class="ui form">
<div class="field" [class.error]="!sku.valid && sku.touched">
<label for="skuInput">SKU</label>
<input type="text"
id="skuInput"
placeholder="SKU"
[ngFormControl]="myForm.controls['sku']">
<div *ngIf="!myForm.valid" class="ui error message">Form is invalid</div>
<div *ngIf="!sku.valid" class="ui error message">SKU is invalid</div>
<div *ngIf="myForm.hasError('required')" class="ui error message">SKU is required</div>
</div>
<button type="submit" class="ui button">Submit</button>
</form>
</div>
`
})
export class DemoFormWithValidationsExplicit {
myForm: ControlGroup;
sku: AbstractControl;
constructor(fb: FormBuilder) {
this.myForm = fb.group({
'sku': ['', Validators.required]
})
this.sku = this.myForm.controls['sku'];
}
onSubmit(value: string): void {
console.log('you submitted value: ', value);
}
}
@Component({
selector: 'demo-form-sku',
directives: [FORM_DIRECTIVES],
template:
`
<div class="ui raised segment">
<h2 class="ui header">Demo Form: Sku</h2>
<form [ngFormModel]="myForm"
(ngSubmit)="onSubmit(myForm.value)"
class="ui form">
<div class="field" [class.error]="!sku.valid && sku.touched">
<label for="skuInput">SKU</label>
<input type="text"
id="skuInput"
placeholder="SKU"
[ngFormControl]="myForm.controls['sku']">
<div *ngIf="!myForm.valid" class="ui error message">Form is invalid</div>
<div *ngIf="!sku.valid" class="ui error message">SKU is invalid</div>
<div *ngIf="myForm.hasError('required')" class="ui error message">SKU is required</div>
</div>
<button type="submit" class="ui button">Submit</button>
</form>
</div>
`
})
export class DemoFormWithValidationsShorthand {
myForm: ControlGroup;
constructor(fb: FormBuilder) {
this.myForm = fb.group({
'sku': ['', Validators.required]
})
}
onSubmit(value: string): void {
console.log('you submitted value: ', value);
}
}
bootstrap(DemoFormWithValidationsShorthand); | fd07ea77c825a9a84d1720757887f22c59d15b5d | [
"TypeScript"
] | 1 | TypeScript | paulberesuita/ngbook2 | 65c2a54dfd31955a4aad70bad89018d32d5f15ee | b869ce8e9465a73e875dfc52e3b20049ed20b988 |
refs/heads/master | <repo_name>TipsyDr/componentLibrary<file_sep>/src/component/base/toast/index.ts
/**
* @file index.js
*/
import ToastComponent from './App.vue'
// @ts-ignore
ToastComponent.install = function (Vue) {
const Constructor = Vue.extend(ToastComponent);
const instance = new Constructor();
const tpl = instance.$mount().$el;
document.body.appendChild(tpl);
// @ts-ignore
Vue.prototype.$toast = (msg, duration = 2000, fn) => {
if (instance.showToast) {
return;
}
instance.title = msg;
instance.showToast = true;
instance.hide(duration, fn);
}
Vue.component(ToastComponent.name, ToastComponent);
};
export default ToastComponent;
<file_sep>/src/index.ts
/**
* @file 注册所有的组件库
* @author zhuxiaohan
*/
import Toast from './component/base/toast';
import ZSwitch from './component/base/switch';
import ZSwiper from './component/base/swiper';
import ZButton from './component/base/button';
const components: any = {
ZSwitch,
ZSwiper,
ZButton
};
// @ts-ignore
const install = function (Vue: any) {
// @ts-ignore
if (install && install.installed) {
return;
}
Object.keys(components).forEach(key => {
Vue.component(components[key].name, components[key]);
});
Vue.use(Toast);
};
// @ts-ignore
if (typeof window !== 'undefined' && window.Vue) {
install(window.Vue);
}
const API = {
install,
...components
};
export default API;
<file_sep>/README.md
# components-library
## Project setup
```
npm install
```
### Compiles and hot-reloads for development
```
npm run serve
```
### Compiles and minifies for production
```
npm run build
```
### Customize configuration
See [Configuration Reference](https://cli.vuejs.org/config/).
### TODO
```
将组件分为 ui组件 以及 js组件;
将示例exmaple抽出来分别生成ui组件示例以及js组价示例;
项目目录调整好之后把js组件整理一下,ui组件优化一下;
```
<file_sep>/src/component/example/switch/index.ts
const pageRouter = {
path: '/switch',
name: 'Switch',
component: () => import('./App.vue')
};
export default pageRouter;<file_sep>/src/component/example/index.ts
import Button from './button';
import Swiper from './swiper';
import Switch from './switch';
import lazyLoadImg from './lazyload-img';
const examplePath = [
Button,
Swiper,
Switch,
lazyLoadImg
]
export default examplePath;<file_sep>/src/component/base/drop-down-refresh/index.ts
// 这里我做的不是用 window 的滚动事件,而是用最外层的绑定触摸下拉事件去实现
// 好处是我用在Vue这类单页应用的时候,组件销毁时不用去解绑 window 的 scroll 事件
// 但是滑动到底部事件就必须要用 window 的 scroll 事件,这点需要注意
/**
* 下拉刷新组件
* @param {object} option 配置
* @param {HTMLElement} option.el 下拉元素(必选)
* @param {number} option.distance 下拉距离[px](可选)
* @param {number} option.deviation 顶部往下偏移量[px](可选)
* @param {string} option.loadIcon 下拉中的 icon html(可选)
*/
export function dropDownRefresh(option: any) {
const doc = document;
/** 整体节点 */
const page = option.el;
/** 下拉距离 */
const distance = option.distance || 88;
/** 顶部往下偏移量 */
const deviation = option.deviation || 0;
/** 顶层节点 */
const topNode = doc.createElement('div');
/** 下拉时遮罩 */
const maskNode = doc.createElement('div');
topNode.innerHTML = `<div refresh-icon style="transition: .2s all;"><svg style="transform: rotate(90deg); display: block;" t="1570593064555" viewBox="0 0 1575 1024" version="1.1" xmlns="http://www.w3.org/2000/svg" p-id="26089" width="48" height="48"><path d="M1013.76 0v339.968H484.115692V679.778462h529.644308v339.968l529.644308-485.612308v-48.600616L1013.76 0zM243.396923 679.857231h144.462769V339.968H243.396923V679.778462z m-240.797538 0h144.462769V339.968H2.599385V679.778462z" fill="#000000" fill-opacity=".203" p-id="26090"></path></svg></div><div refresh-loading style="display: none; animation: refresh-loading 1s linear infinite;">${option.loadIcon || '<p style="font-size: 15px; color: #666;">loading...</p>'}</div>`;
topNode.style.cssText = `width: 100%; height: ${distance}px; position: fixed; top: ${-distance + deviation}px; left: 0; z-index: 10; display: flex; flex-wrap: wrap; align-items: center; justify-content: center; box-sizing: border-box; margin: 0; padding: 0;`;
maskNode.style.cssText = 'position: fixed; top: 0; left: 0; width: 100%; height: 100vh; box-sizing: border-box; margin: 0; padding: 0; background-color: rgba(0,0,0,0); z-index: 999';
page.parentNode.insertBefore(topNode, page);
/**
* 设置动画时间
* @param {number} n 秒数
*/
function setAnimation(n: number) {
page.style.transition = topNode.style.transition = `${n}s all`;
}
/**
* 设置滑动距离
* @param {number} n 滑动的距离(像素)
*/
function setSlide(n: number) {
page.style.transform = topNode.style.transform = `translate3d(0px, ${n}px, 0px)`;
}
/** 下拉提示 icon */
const icon: any = topNode.querySelector('[refresh-icon]') || '';
/** 下拉 loading 动画 */
const loading: any = topNode.querySelector('[refresh-loading]') || '';
return {
/**
* 监听开始刷新
* @param {Function} callback 下拉结束回调
* @param {(n: number) => void} rangeCallback 下拉状态回调
*/
onRefresh(callback: () => void, rangeCallback: (arg0: number) => void) {
/** 顶部距离 */
let scrollTop = 0;
/** 开始距离 */
let startDistance = 0;
/** 结束距离 */
let endDistance = 0;
/** 最后移动的距离 */
let range = 0;
// 触摸开始
page.addEventListener('touchstart', function (e: { touches: { pageY: number; }[] }) {
startDistance = e.touches[0].pageY;
scrollTop = 1;
setAnimation(0);
});
// 触摸移动
page.addEventListener('touchmove', function (e: { touches: { pageY: number; }[]; preventDefault: () => void; }) {
scrollTop = doc.documentElement.scrollTop === 0 ? doc.body.scrollTop : doc.documentElement.scrollTop;
// 没到达顶部就停止
if (scrollTop != 0) return;
endDistance = e.touches[0].pageY;
range = Math.floor(endDistance - startDistance);
// 判断如果是下滑才执行
if (range > 0) {
// 阻止浏览自带的下拉效果
e.preventDefault();
// 物理回弹公式计算距离
range = range - (range * 0.5);
// 下拉时icon旋转
if (range > distance) {
icon.style.transform = "rotate(180deg)";
} else {
icon.style.transform = "rotate(0deg)";
}
setSlide(range);
// 回调距离函数 如果有需要
if (typeof rangeCallback === "function") rangeCallback(range);
}
});
// 触摸结束
page.addEventListener('touchend', function () {
setAnimation(0.3);
// console.log(`移动的距离:${range}, 最大距离:${distance}`);
if (range > distance && range > 1 && scrollTop === 0) {
setSlide(distance);
doc.body.appendChild(maskNode);
// 阻止往上滑动
maskNode.ontouchmove = e => e.preventDefault();
// 回调成功下拉到最大距离并松开函数
if (typeof callback === "function") callback();
icon.style.display = "none";
loading.style.display = "block";
} else {
setSlide(0);
}
});
},
/** 结束下拉 */
end() {
maskNode && maskNode.parentNode && maskNode.parentNode.removeChild(maskNode);
setAnimation(0.3);
setSlide(0);
icon.style.display = 'block';
loading.style.display = 'none';
}
};
}
<file_sep>/src/component/base/toast/readme.md
## Toast
> 提示组件
### 使用方法
####js 使用
```js
setup(props, context){
// 三个参数,分别为提示信息(必填,字符串),持续时长(可选,单位毫秒,默认2000),回调函数(可选)
context.root.$tbToast('我是轻提示', 2000, function (){})
}
```
<file_sep>/src/component/base/replace-error-img/index.ts
window.addEventListener("error", (e: any) => {
/** 默认`base64`图片 */
const defaultImg = require('@/assets/img/error.jpg');
/**
* @type {HTMLImageElement}
*/
const node = e.target;
if (node.nodeName && node.nodeName.toLocaleLowerCase() === "img") {
node.style.objectFit = "cover";
node.src = defaultImg;
}
}, true);
<file_sep>/src/router/index.ts
/**
* @file 路由配置
* @desc 路由懒加载
*/
import Vue from 'vue';
import VueRouter, { RouteConfig } from 'vue-router';
import examplePath from '@/component/example';
Vue.use(VueRouter);
const route: RouteConfig[] = [
{
path: '/',
name: 'Home',
component: () => import('@/App.vue')
}
];
const routes: RouteConfig[] = route.concat(examplePath);
const router = new VueRouter({
mode: 'history',
routes
});
export default router;<file_sep>/src/component/example/lazyload-img/index.ts
const pageRouter = {
path: '/lazyLoadImg',
name: 'LazyLoadImg',
component: () => import('./App.vue')
};
export default pageRouter;<file_sep>/src/component/example/lazyload-img/lazyload.ts
/**
* 懒加载(完美版)可加载`<img>`、`<video>`、`<audio>`等一些引用资源路径的标签
* @param {object} params 传参对象
* @param {string?} params.lazyAttr 自定义加载的属性(可选)
* @param {"src"|"background"} params.loadType 加载的类型(默认为`src`)
* @param {string?} params.errorPath 加载失败时显示的资源路径,仅在`loadType`设置为`src`中可用(可选)
*/
function lazyLoad(params: any) {
const attr = params.lazyAttr || "lazy";
const type = params.loadType || "src";
/** 更新整个文档的懒加载节点 */
function update() {
const els = document.querySelectorAll(`[${attr}]`);
for (let i = 0; i < els.length; i++) {
const el = els[i];
observer.observe(el);
}
}
/**
* 加载图片
* @param {HTMLImageElement} el 图片节点
*/
function loadImage(el: any) {
const cache = el.src; // 缓存当前`src`加载失败时候用
el.src = el.getAttribute(attr);
el.onerror = function () {
el.src = params.errorPath || cache;
}
}
/**
* 加载单个节点
* @param {HTMLElement} el
*/
function loadElement(el: any) {
switch (type) {
case "src":
loadImage(el);
break;
case "background":
el.style.backgroundImage = `url(${el.getAttribute(attr)})`;
break;
}
el.removeAttribute(attr);
observer.unobserve(el);
}
/**
* 监听器
* [MDN说明](https://developer.mozilla.org/zh-CN/docs/Web/API/IntersectionObserver)
*/
const observer = new IntersectionObserver(function (entries) {
for (let i = 0; i < entries.length; i++) {
const item = entries[i];
if (item.isIntersecting) {
loadElement(item.target);
}
}
})
update();
return {
observer,
update
}
}
export default lazyLoad;
// 懒加载图片src
// lazyLoad({
// errorPath: "./img/error.jpg"
// })
// 懒加载图片background
// lazyLoad({
// lazyAttr: "lazy-bg",
// loadType: "src"
// })<file_sep>/src/component/example/button/index.ts
const pageRouter = {
path: '/button',
name: 'Button',
component: () => import('./App.vue')
};
export default pageRouter;<file_sep>/src/component/base/switch/readme.md
## Switch
> 提示组件
### 使用方法
####js使用
```js
setup(props, context){
}
```
<file_sep>/src/component/base/upload-img/upload-img.ts
/**
* input上传图片
* @param {HTMLInputElement} el
*/
function upLoadImage(el: any) {
/** 上传文件 */
const file = el.files[0];
/** 上传类型数组 */
const types = ["image/jpg", "image/png", "image/jpeg", "image/gif"];
// 判断文件类型
if (types.indexOf(file.type) < 0) {
file.value = null; // 这里一定要清空当前错误的内容
return alert("文件格式只支持:jpg 和 png");
}
// 判断大小
if (file.size > 2 * 1024 * 1024) {
file.value = null;
return alert("上传的文件不能大于2M");
}
const formData = new FormData(); // 这个是传给后台的数据
formData.append("img", file); // 这里"img"是跟后台约定好的字段
console.log(formData, file);
}
<file_sep>/src/component/base/button/index.ts
/**
* @file button组件
*/
import Component from './App.vue';
// @ts-ignore
Component.install = (Vue: any) => {
Vue.component(Component.name, Component);
};
export default Component;
| 69e51f935152e09646037728f535cc95370d7d72 | [
"Markdown",
"TypeScript"
] | 15 | TypeScript | TipsyDr/componentLibrary | bb4016df5c64f7a59749ae55dadd8a9a6ebb4ef0 | 683ebf517229f775923f7cc4031e9a03c455498d |
refs/heads/master | <repo_name>dmitriz/monad-transformers<file_sep>/lib/stack.js
const base = require('./base')
const assign = require('object-assign')
const helpers = require('./helpers')
const wrapper = require('./wrapper')
module.exports = function createStack (monadStack) {
// Generate errors
const error = new Error('The first argument must be a stack member')
// Add the ID monad at the bottom of the monad stack
const stack = [base.id].concat(monadStack)
//Verify input
stack.forEach(member => {
if (typeof member !== 'object') {throw new Error('Stack members must be objects')}
})
// Perform some preprocessing on the stack
return processStack(stack).slice(-1)[0]
}
// Applies the processing function on each stack member,
// passing the previous (outer) member as an argument
const processStack = (baseStack) =>
helpers.statefulMap(baseStack, (item, state) => {
const itemProcessed = addConstructor(processProtoNew(item, state))
return [itemProcessed,itemProcessed]
})
const convertOuterFunction = (funk, object) => function() {
return this.lift(funk.apply(object, arguments))
}
const asyncCompose = (thisRun, outerRun) => {
thisRun = thisRun || function(fn, val) {return fn(val)}
outerRun = outerRun || function(fn, val) {return fn(val)}
return function(fn, val) {
return thisRun.call(this, outerRun.bind(this, fn), val)
}
}
const addConstructor = (object) => {
object.constructor = wrapper(object)
return object
}
// Adds context to a stack member
const processProtoNew = (proto, outerProto) =>
assign({}, proto, {
fold: asyncCompose(outerProto.fold, proto.fold),
run: asyncCompose(proto.run, outerProto.run),
outer: outerProto,
name: proto.name + '/' + outerProto.name
}, helpers.monadMapVals(convertOuterFunction, outerProto))
<file_sep>/lib/helpers.js
exports.curry = function curry(funk, initial_arguments){
var context = this
return function(){
var all_arguments = (initial_arguments||[]).concat(Array.prototype.slice.call(arguments, 0))
return all_arguments.length>=funk.length?funk.apply(context, all_arguments):curry(funk, all_arguments)
}
}
exports.compose = function(){
//Convert functions to an array and flip them (for right-to-left execution)
var functions = Array.prototype.slice.call(arguments).reverse()
//Check if input is OK:
functions.forEach(function(funk){if(typeof funk !== "function"){throw new TypeError(funk+" is not a function" )}})
//Return the function which composes them
return function(){
//Take the initial input
var input = arguments
var context
return functions.reduce(function(return_result, funk, i){
//If this is the first iteration, apply the arguments that the user provided
//else use the return result from the previous function
return (i ===0?funk.apply(context, input): funk(return_result))
//return (i ===0?funk.apply(context, input): funk.apply(context, [return_result]))
}, undefined)
}
}
//
//combines an array of async functions with signature into one functions.
// [ (callback, value) => () ] => (value) => ()
exports.asyncCompose = (functions, self) => functions.reduce((f, newF) => {
return (val) => newF.call(self, f, val)
})
const baseMonadFunctions = ['of', 'chain', 'lift']
const optionalMonadFunctions = ['fold', 'run', 'value', 'map', 'outer', 'name']
const isIn = (arr) => (val) => arr.indexOf(val) === -1
// Checks if a given property is part of the general monad definition interface
const isReserverMonadKey = isIn(baseMonadFunctions.concat(optionalMonadFunctions))
// Maps the values of a given obj excluding the reserved ones.
exports.monadMapVals = (funk, obj) => {
return Object.keys(obj)
.filter(isReserverMonadKey)
.reduce((newObj, key) => {
newObj[key] = funk(obj[key], obj)
return newObj
}, {})
}
// A stateful version of the map function:
// f accepts an array item and a state (defaults to an object) and returns the processed version of the item plus a new state
exports.statefulMap = (arr, f) =>
arr.reduce((arrayAndState, item) => {
const itemAndState = (f(item, arrayAndState[1]))
return [arrayAndState[0].concat([itemAndState[0]]), itemAndState[1] ]
}, [[], {}])[0]
<file_sep>/docs/overview.md
#Overview
The package consists of the following components:
## Object wrapper
The [object wrapper](wrapper.md), exposed via the `mtl.make` function, combines one or several monad transformer definitions and mixes them into one [Fantasy Land compliant](https://github.com/fantasyland/fantasy-land) monad.
const mtl = {}
const createStack = require('./stack')
mtl.make = function () {
return createStack(Array.prototype.slice.call(arguments)).constructor
}
## Monad transformer definitions
The library contains four [monad transformer definitions](api.md), distributed in two packages: `data` and `comp`. It also contains three versions of the identity monad transformer, useful as a reference when implementing [custom monad transformers](implementing-transformer.md).
mtl.data = require('./data')
mtl.comp = require('./comp')
mtl.id = require('./id')
## Base monads
When stacking monad transformers, a you must place one plain monad at the bottom of the stack. This monad serves as the stack's base.
By default, the package uses the identity monad as a base but it also defines a wrapper which allow you to use the [Task monad from Folktale](https://github.com/folktale/data.task) as a base.
mtl.base = require('./base')
## Predefined stacks
The library features five predefined monad stacks.
mtl.simple = mtl.make(mtl.data.maybe, mtl.data.writer)
mtl.stateful = mtl.make(mtl.data.maybe, mtl.data.writer, mtl.comp.state)
mtl.list = mtl.make(mtl.data.list, mtl.data.maybe, mtl.data.writer)
mtl.statelist = mtl.make(mtl.data.list, mtl.data.maybe, mtl.data.writer, mtl.comp.state)
mtl.advanced = mtl.make(mtl.base.task, mtl.data.maybe, mtl.data.writer, mtl.comp.state)
## Helpers
Some helper functions that we want to keep handy:
const helpers = require('./helpers')
mtl.curry = helpers.curry
mtl.compose = helpers.compose
module.exports = mtl
[_View in GitHub_](../lib/main.js)
<file_sep>/lib/id.js
/* # Implementing a monad transformer
*
* Monad transformers are tricky, and one of the reasons for this is that they require an
* excessive amount of type juggling. You have to constantly wrap things in boxes and unwrap them
* again.
*
* One of the aims of this package is to reduce the amount of wrapping and unwrapping needed for
* making a new transformer and to provide an easy way to define and combine transformers.
*
* It does this by defining a monad transformer definition format, which allows you to specify
* your transformer only by specifying its transformations on the values.
* With it, all it takes to implement a transformer is implement these four functions:
* `of` (AKA `return`), `chain` (AKA `flatMap`) `lift` and `value`(AKA `run`)
*
* ## The trivial implementation
*
* Consider the identity Monad transformer. This is a monad transformer that does nothing:
* or in other words it produces a monad which behaves the same way as the one it is given to it
* as an argument. Here is how would the implementation of these methods look like:
*/
exports.idMinimal = {
name: 'idMinimal',
/*
* The `of` function takes a scalar value and returns an instance of the outer monad.
* In this case we delegate everything to the outer monad's `of` method.
* We access the outer monad with `this.outer`.
*/
// (val) => M(val)
of (val) {
return this.outer.of(val)
},
/*
* `chain` is the heart of any monad or monad transformer.
*
* In this case we implement it by just calling the `chain` function of the host monad (using
* `this.outer.chain`) with the function given to us as an argument.
*/
// (val => M(val) , M(val)) => M(val)
chain (fn, val) {
return this.outer.chain(fn, val)
},
/*
* The `lift` function is kinda like `of`, but it accepts an instance of the outer monad
* instead of a 'plain' value.
*/
// (M(val)) => M(val)
lift (val) {
return val
},
/*
* Having both 'lift' and 'of' enables us to convert any value created by one monad transformer
* to a a value that holds all elements of the stack
*
* Finally the `value` function provides a way to get 'the value back'
* What it does is to unwrap a previously-wrapped monad.
* In this case we didn't do any wrapping, so we don't have to do any unwrapping either.
*/
// ((val) => otherVal, M(val)) => otherVal
value (fn, val) {
return this.outer.value(fn, val)
},
fold (value, val) {
return value(val)
}
}
/* # Manipulating the value
*
* All monad transformers do the same thing (given a monad `A`, they produce a
* monad `B(A)` which somehow augments `A`), but there is no general formula for doing it.
*
* Simpler monads can be implemented just by manipulating the value inside the host monad.
*
* Our next implementation of ID will just wrap the underlying value (which we called A)
* in a plain object.
*
* So `M(A)` would become `M ({idVal:A})` when we wrap it and will be back to `M(A)` when we
* unwrap it.
*
* Here is how this implementation would look like:
*/
exports.id = {
name: 'Id',
/*
* The `of` function takes a scalar value and returns an instance of the outer monad.
* In this case we delegate everything to the outer monad's `of` method.
* We access the outer monad with `this.outer`.
*/
// (val) => M({idVal:val})
of (val) {
return this.outer.of({idVal: val })
},
/*
*
* chain just calls the `chain` function of the host monad like in the previous example.
* The difference is that it applies some transformation to the value in order to fit
* the new context.
*/
// (val => M({idVal:val}) , M({idVal:val})) => M({idVal:val})
chain (fn, mIdVal) {
return this.outer.chain((idVal) => {
return fn(idVal.idVal)
}, mIdVal)
},
/*
* The `lift` function uses `chain` + `of` (which is the same as `map`) to go to the host monad
* and modify the value inside it.
*/
// (M(val)) => M({idVal:val})
lift (mVal) {
return this.outer.chain((val) => this.outer.of({idVal: val}), mVal)
},
/*
* Lastly we have the `value` function (or the interpreter), which unwraps a previously-wrapped
* value.
*/
// ((val) => otherVal, M({idVal:val})) => otherVal
value (fn, mIdVal) {
return this.outer.value((idVal)=> {
return fn(idVal.idVal)
}, mIdVal)
},
fold (value, idVal) {
return value(idVal.idVal)
}
}
/*
* Notice that we are always returning an instance of the outer monad.
*
* That is, if you are to apply the transformation several times,
* the values nest inside M: M({idVal:{idVal: a}})
*
* However not all monad transformers are like that.
*
* ## A more complex structure
*
* So far we have seen monad transformers which only deal with the value inside the given
* monad A. However not all monad transformers are like that.
*
* There are monad transformers which add additional structure to the monad itself.
* Examples of the first type are all transformers that we have seen so far.
* An example of the second type is the 'State' monad, which given the same value `M(A)`, will
* produce something like `() =>{ M([A, State]) }`. That is, the transformer adds the state
* value to the 'host' monad `M`, and then it wraps the monad itself in a function.
*
* Now consider an alternative, a little more complex implementation of the ID monad. One
* which wraps the M monad into another plain object, so the value of M(A) becomes
* `{idContainer: M({idVal:a})}`. Notice that the transformer consists of two parts: one which
* wraps around the host monad, and one which wraps around the value in it.
*/
exports.idWrapped = {
name: 'IdWrapped',
// (val) => {idContainer: M({idVal:a})}
of (val) {
return {
idContainer: this.outer.of({idVal: val})
}
},
// (a => {idContainer:M({idVal:a})}, {idContainer:M({idVal:a})}) => {idContainer:M({idVal:a})}
chain (fn, idContainerMIdVal) {
return {
idContainer: this.outer.chain((idVal) => {
const val = fn(idVal.idVal)
return val.idContainer
}, idContainerMIdVal.idContainer)
}
},
// (M(val)) => {idContainer:M({idVal:val})}
lift (mVal) {
return {
idContainer: this.outer.chain((val) => this.outer.of({idVal: val}), mVal)
}
},
// ((val) => otherVal, {idContainer: M({idVal:val}))}=> otherVal
value (fn, idContainerMIdVal) {
return this.outer.value((idVal)=> {
return fn(idVal.idVal)
}, idContainerMIdVal.idContainer)
},
run (fn, idContainerMIdVal) {
return fn(idContainerMIdVal.idContainer)
},
fold (value, idVal) {
return value(idVal.idVal)
}
}
/* The key difference is that with this monad nesting happens both inside the host monad and
* outside of it. If we apply the transformation two times the value becomes:
* `{idContainer:{idContainer:M({idVal:{idVal:a}})}}`.
*/
/*
* [_View in GitHub_](../lib/id.js)
*/
<file_sep>/test/reader_tests.js
if ( global.v8debug ) {
global.v8debug.Debug.setBreakOnException()
}
var mtl = require('../lib/main')
var sinon = require('sinon')
var permutations = require('./permutations')
exports.reader = permutations(a => (a.indexOf(mtl.comp.reader) !== -1), (one, two, three) => {
return {
environment: (test) => {
const reader = mtl.make(one, two, three)
reader.of(5)
.readerMap((val, env) => env)
.map((val) => {
test.equal(val, 6)
return val
})
.value(a=>{
test.done()
}, {environment:6})
}
}
})
<file_sep>/test/tutorial_tests.js
module.exports = {
'p1': require('../tutorial/p1').test,
'p2': require('../tutorial/p2').test,
'p3': require('../tutorial/p3').test
}
<file_sep>/lib/wrapper.js
/* # The object wrapper
*
* This library provides a module which allows you to combine several monad transformer definitions
* and create a object-oriented wrapper for using the resulting monad.
*
* ## Creating a monad constructor
*
* You can create a monad constructor using the `mtl.make` function:
*
* ###`mtl.make([baseMonad], monadTransformer1, monadTransformer2)`
*
* ####`baseMonad - monadDefinition`
*
* Optionally you can pass the definition of the monad that would sit at the bottom of the stack,
* as a first argument of the `make` function.
*
* The parameter is optional. By default, the package uses the identity monad as a base.
*
* ####`monadTransformer<1-n> - monadTransformerDefinition`
*
* Pass the definitions of the monad transformers which would augment the base monad.
* Note that monad transformations are usually not commutative so the order in which the arguments
* are placed matters.
*/
const assign = require('object-assign')
const helpers = require('./helpers')
const idFunc = a => a
// Promotes a function from a monad definition to a monad stack method, so it can be used for chaining
const promoteToMethod = (funk, monadDefinition) => function () {
const args = Array.prototype.slice.call(arguments)
return this.chain((val) => {
return this.constructor(funk.apply(monadDefinition, args.concat([val])))
})
}
// Promotes a function from a monad definition to a stack constructor
const promoteToConstructor = (funk, monadDefinition) => function () {
return this(funk.apply(monadDefinition, arguments))
}
/*
* The function returns an `objectWrapper` which allows you instantiate monads from all kinds of values.
*/
module.exports = (stack) => {
const monad = assign(Object.create(monadWrapperProto), helpers.monadMapVals(promoteToMethod, stack))
const constructor = (val) => {
var object = Object.create(monad)
object._value = val
return object
}
monad.stack = stack
monad.constructor = assign(constructor, helpers.monadMapVals(promoteToConstructor, stack))
monad.constructor.of = monad.of.bind(monad)
monad.constructor.prototype = monad
return monad.constructor
}
/*
* ## Creating monads
*
* Monads are generally created using [type-specific methods](api.md) like `fromArray` (for stacks that include the
* list transformation, or `fromState` (for stateful computations) but several generic methods are also provided.
*
* ### `objectWrapper.of(value)`
*
* Constructs a monad from a plain non-monadic value.
*/
const monadWrapperProto = {
of (value) {
return this.constructor(this.stack.of(value))
},
/* ### `objectWrapper(value)`
*
* Constructs a monad from a value which obeys the structure of the monad stack i.e. it "wraps" the value
* into a monadic interface.
*
* ## Using monads
*
* Again there are many methods that you would use to manipulate a monad which are [type-specific](api.md).
* Here are the generic ones:
*
* ###`monad.chain(f)`
*
* Applies `f` to the value or values that are inside the monad and returns a new wrapped object
*
*/
chain (f) {
const fUnwrap = (val) => {
const newVal = f.call(this.constructor, val, this.constructor)
if (!newVal.hasOwnProperty('_value')) {throw JSON.stringify(newVal) + ' is not a wrapped value'}
if (newVal.stack.name !== this.stack.name) {throw `${this.stack.name} is not the same as ${newVal.stack.name}`}
return newVal._value
}
return this.constructor(this.stack.chain(fUnwrap, this._value))
},
/*
* ###`monad.map(f)`
*
* Applies `f` to the value or values that are inside the monad and wraps the resulting value in a new monad instance.
*
*/
map (funk) {
return this.chain((val) => this.of(funk(val)))
},
/*
* ###`monad.tap(f)`
*
* Applies the f to the monad and returns the result.
*
*/
tap (funk) {
return funk(this)
},
/* ###`monad.run()`
*
* Runs the computation inside the monad and calls the callback with the resulting value.
* Does not unwrap the value.
*
*/
run (callback, environment) {
return this.stack.run.call(environment, callback||idFunc, this._value)
},
/* ###`monad.value()`
*
* Runs the computation inside the monad and calls the callback with the resulting value.
* Unwraps the value using the `fold` functions.
*
*/
value (callbacks, environment) {
const stack = this.stack
return this.run((val) => {
return stack.fold.call(callbacks, (val) => {
if(typeof callbacks === 'function') {
callbacks(val)
}else if (typeof callbacks === 'object' && typeof callbacks.onValue === 'function'){
callbacks.onValue(val)
}
return val
}, val)
}, environment)
},
/*
* ###`monad.ap()`
*
* Applies a wrapped function to a wrapped value.
* Same as `<@>` in Haskell.
*/
ap (val) {
return this.chain(f => val.map(f))
},
/*
* ###`monad.andThen()`
*
* Same as `chain` but accepts a wrapped value instead a function that returns one.
* Same as `>>>` in Haskell.
*/
andThen (monad) {
return this.chain((_) => monad)
},
/*
* ###`monad.debug()`
*
* A shortcut for inserting a breakpoint in the computation.
*/
debug () {
debugger
return this
}
}
/*
* For more information, see the [Fantasy Land spec](https://github.com/fantasyland/fantasy-land).
*
* [_View in GitHub_](../lib/wrapper.js)
*/
<file_sep>/lib/main.js
/* #Overview
*
* The package consists of the following components:
*
* ## Object wrapper
*
* The [object wrapper](wrapper.md), exposed via the `mtl.make` function, combines one or several monad
* transformer definitions and mixes them into one
* [Fantasy Land compliant](https://github.com/fantasyland/fantasy-land) monad.
*/
const mtl = {}
const createStack = require('./stack')
mtl.make = function () {
return createStack(Array.prototype.slice.call(arguments)).constructor
}
/* ## Monad transformer definitions
*
* The library contains four [monad transformer definitions](api.md), distributed in two packages:
* `data` and `comp`. It also contains three versions of the identity monad transformer, useful
* as a reference when implementing [custom monad transformers](implementing-transformer.md).
*
*/
mtl.data = require('./data')
mtl.comp = require('./comp')
mtl.id = require('./id')
/* ## Base monads
*
* When stacking monad transformers, a you must place one plain monad at the bottom of the stack.
* This monad serves as the stack's base.
*
* By default, the package uses the identity monad as a base but it also defines a wrapper which
* allow you to use the [Task monad from Folktale](https://github.com/folktale/data.task) as a base.
*/
mtl.base = require('./base')
/* ## Predefined stacks
*
* The library features five predefined monad stacks.
*
*/
mtl.simple = mtl.make(mtl.data.maybe, mtl.data.writer)
mtl.stateful = mtl.make(mtl.data.maybe, mtl.data.writer, mtl.comp.state)
mtl.list = mtl.make(mtl.data.list, mtl.data.maybe, mtl.data.writer)
mtl.statelist = mtl.make(mtl.data.list, mtl.data.maybe, mtl.data.writer, mtl.comp.state)
mtl.advanced = mtl.make(mtl.base.task, mtl.data.maybe, mtl.data.writer, mtl.comp.state)
/*
* ## Helpers
*
* Some helper functions that we want to keep handy:
*/
const helpers = require('./helpers')
mtl.curry = helpers.curry
mtl.compose = helpers.compose
module.exports = mtl
/*
* [_View in GitHub_](../lib/main.js)
*/
<file_sep>/lib/base.js
const Task = require('data.task')
const idFunc = a => a
// A monad definition that wires a 'data.task' instance as a base for a transformer stack
exports.task = {
name: "Data.Task",
// (val) => Task(val)
of: Task.of,
// (val => Task(val), Task(val)) => Task(val)
chain(fn, task) {
return task.chain(fn)
},
// (val) => Task(val)
lift: Task.of,
// ((val) => otherVal, Task(val)) => otherVal
value (fn, task) {
task.fork((a)=>a, fn)
},
run (fn, task) {
task.fork((err)=> fn({taskError:err}), (val)=> fn({taskSuccess:val}) )
},
fold (value, val) {
return val.hasOwnProperty('taskSuccess') ? value(val.taskSuccess): (this.onTaskError || idFunc)(val.taskError)
},
fromContinuation(fn) {
if(typeof fn !== 'function') {throw fn + ' is not a function'}
return new Task(fn)
},
fromTask(task) {
return task
},
cont (fn, val) {
const fnVal = fn(val)
if(typeof fnVal !== 'function') {throw fnVal + ' is not a function'}
return new Task(fnVal)
},
rejected: Task.rejected
}
// The identity monad, which is used by default as a base
exports.id = {
name: 'root',
of (val) {
return val
},
chain (funk, val) {
return funk(val)
},
map (funk, val) {
return funk(val)
},
value (funk, val) {
return funk(val)
},
run (funk, val) {
return funk(val)
},
fold (value, val) {
return value(val)
}
}
<file_sep>/docs/tutorial/p1.md
# Retrieving REST Resources
#### Using the monad stack. Using the `Task` monad, the `Maybe` monad and the `Writer` monad.
_This is part 1 from the `monad-transformers` tutorial. See also [part 2](p2.md) and [part 3](p3.md)._
The following series of tutorials show performing some real-world tasks using the `monad-transformers` library. Our first task will be related to retrieving resources from a RESTful service and handling different kinds of errors.
## Mocking our Data
Below is a simple simple fake REST API with a set of resources defined in the `data` object and functions that simulate retriving and modifying resources asynchronously. We will be working with this service throughout the tutorial.
const mtl = require('../lib/main')
if ( global.v8debug ) {
global.v8debug.Debug.setBreakOnException()
}
const initData = () => {
const data = {
'users/john': {
name:'John',
occupation: 'developer'
},
'users/max': {
name: 'Max' //Has no occupation
},
'users/jim': {
name:'Jim',
occupation: 'farmer'
},
'occupations/developer': {
description: 'writes code'
},
'occupations/farmer': {
description: 'feeds the animals'
}
}
return {
getResource (url, error, success) {
setTimeout(() => data[url]!== undefined ? success(data[url]) : error({error:`Invalid URL - ${url}`}), 10)
},
postResource (url, value, error, success) {
setTimeout(() => { data[url] = value; success(value) }, 10)
}
}
}
const data = initData()
## Defining some helpers
Before we start with our first task let's define several helpers that we will use. This is an important technique in functional programming - to define as much of our code as possible using pure functions.
const mGetResource = (url) => data.getResource.bind(null, url)
const suffix = (suffix, str) => suffix + '/' + str
- `mGetResource` is just a curried version of the function that we defined in the mock.
- `suffix` is a function for concatenating strings, which we can use to construct a URL of a given resource.
### Composing functions with `monad-transformers`
Another important technique in functional programming is the technique of combining different small functions using composition. After looking at these two helpers, it is not such a long shot to imagine composing them into one function that retrieves a resource given its ID. Here is how this will work using simple function composition:
const compose = (f, g) => (a) => g(f(a))
const ordinaryGetResourceFrom = compose(suffix, mGetResource)
This is cool however the `mGetResource` function is asynchronous and therefore it does not return a value. Therefore the `ordinaryGetResourceFrom` function is also asynchronoust cannot be composed any further via simple function composition.
However it *can* be composed in principle, and as a matter of fact we do that quite often in JavaScript. You know, using Promises. We pass an async functions to the `then` method, and then we chain another async function and so on.
The `monad-transformers` lib supports Promises too among other monads - more precisely their immutable conterparts [Tasks](http://docs.folktalejs.org/en/latest/api/data/task/index.html). This means that we can wrap our normal callback-based async function in a Task and compose it, using [`chain`](../wrapper.md) (which is kinda like the `then` for Promises).
const Task = require('data.task')
const taskGetResourceFrom = (type) => (id) =>
m.of(suffix(type, id))
.chain((url) => m.fromTask(new Task(mGetResource(url))))
There is a slightly prettier way to write the same function using the `cont` helper function, which creates a Task behind the scenes.
const getResourceFrom = (type) => (id) =>
m.of(suffix(type, id))
.tellMap((url) => `Retrieving ${url}... `)
.cont(mGetResource)
A couple of remarks:
- There is one unrelated line, beggining with `tellMap` which we will discuss later.
- The function is written in such a way that you don't have to know about which async lib is being used - you just use the `cont` helper.
- We haven't lost the ability to do normal function composition. We can do it by using the [`map`](../wrapper.md) function. Functions, composed with `map` don't have to know about monads and wrappers at all.
Using our newly-defined `getResouceFrom`, we will define one more helper function - one that given a person object, retrieves info about its occupation. The function is simple - just retrieve the `occupation` key from the person object and then make a request for it from the `occupations` endpoint. You can check the data model above, but basically we receive a plain JS object and we have to access one of its properties. There is a helper for doing that in the `Maybe` monad transformer called `maybeGet`. The strength of `maybeGet` is that it also handles the case when the value of the requested property is not defined.
const getOccupationInfo = (mPersonalInfo) =>
m.of(mPersonalInfo)
.maybeGet('occupation')
.chain(getResourceFrom('occupations'))
Notice also how we compose functions that return monads by using `chain`.
## Writing our program
Now let's apply what we defined so far and write some code that actually does something. The following snippet first retrieves the details for a given user then retrieve the details of his occupation and finally displays both pieces of info one after the other:
const m = mtl.advanced
const getPersonInfo = (name) =>
m.of(name) //1
.chain(getResourceFrom('users')) //2
.chain((personDetails) => //3
getOccupationInfo(personDetails) //4
.map((occupationInfo) => `${personDetails.name} ${occupationInfo.description}` )) //5
There should be nothing new for you in this snippet. Let`s review it line by line, as a summary of this tutorial:
1. We begin by puttin a regular value into a monad, using the `of` function.
2. With `chain` we compose a function that receives a normal value and returns a monadic value. `chain` is actually the quintessential monadic function, so there is a lot of info available about it.
3. We `chain` again, this time with an inline function.
4. When we are calling `chain` we have to return a monadic value, and we do that. Again when you have a function that works in a monadic context, and you want to use it with a "normal" value, you just wrap the value in a monad before feeding it to the function.
5. We transform the value before returning it, using a "plain", non-monadic function, and the way to compose plain functions which aren't meant to be used inside a monad is by using [`map`](../wrapper.md) in the same way as `Array.map` is used to apply functions which don't work on arrays by themselves, to Arrays.
Using `map` inside the `chain` lambda is equivalent to using it outside of it. We do it inside just because we want to be able to use the `personalDetails` object.
## What we did
Now we will test the `getPersonInfo` function and make sure that it works well. The function returns a monad which contains our value, so we will want to take the value out of the monad. We do this by using the `run` function which accepts a normal callback:
exports.test = {}
exports.test.dbSuccess = (test) => getPersonInfo('john')
.run((result) => {
test.equal(result.taskSuccess.value.value.value, 'John writes code')
test.done()
})
Works fine but we better explain the `taskSuccess.value.value.value` part: Each monad transformation which we use defines its own object namespace. In that namespace we can see its value, which is actually the namespace of another monad. So effectively this means that in order to get to our value, we have to go through all these namespaces. Why does't the library give you the value directly? Because, as you will find out shortly, each layer of the monadic onion contributes something to our program.
### Handling errors
If we try to retrieve information abot a person that does not exist the Task monad will handle the error by stopping the computation. You will find the error in the `taskError` property:
exports.test.dbError = (test) => getPersonInfo('UndefinedPerson')
.run((result) => {
test.equal(result.taskError.error, 'Invalid URL - users/UndefinedPerson')
test.done()
})
### Handling `undefined` properties
The Maybe monad transformer handles undefined values in much the same way as the Task handles errors - it stops everything. In that way we can retrieve values that are `undefined`, and not get any errors for trying to do something with them afterwards. We have to do a `null` check only once - at the end.
For example here is what happens if we try to request a user that does not have an "occupation" field.
exports.test.dbMaybe = (test) => getPersonInfo('max')
.run((result) => {
test.equal(result.taskSuccess.value, undefined)
test.done()
})
Notice that although we performed some operations on the user after retrieving it, no exception was raised.
### Logging
Remember that our `getResourceFrom` function had one line which we ignored:
`.tellMap((url) => 'Retrieving ${url}')`
this line actually logs the url being requested by using the `Writer` monad. Sure enough, the log is part of the end result:
exports.test.dbLog = (test) => getPersonInfo('john')
.run((result) => {
test.equal(result.taskSuccess.value.writer, 'Retrieving users/john... Retrieving occupations/developer... ')
test.done()
})
exports.initData = initData
exports.suffix = suffix
Go to [Part 2](p2.md).
<file_sep>/docs/tutorial/p3.md
# Side effects
#### Creating and using custom monads.
_This is part 3 from the `monad-transformers` tutorial. See also [part 1](p1.md) and [part 2](p2.md)._
The monadic functions that we used so far were really cool and all but they were just functions, albeit asynchronous. They only received input at the beginning and did not place any output until the end. Technically they were not pure (because they accessed and modified external resources) but they were pretty close. Now we are going to do something different - we are going to handle interactive side effects.
const assign = require('object-assign')
const mtl = require("../lib/main.js")
process.stdin.setEncoding('utf8');
const util = require('util');
if ( global.v8debug ) {
global.v8debug.Debug.setBreakOnException()
}
Now in the previous example we handled some side effects using the `Reader` monad and aside from the fact that we were abusing it a little (the environment in `Reader` is supposed to be immutable and ours wasn't) this approach wasn't bad at all - when calling the function we could specify on which environment it was permitted to act upon. From the inside of the function we could regulate which parts of the code had access to the environment - for example functions we call using `map` cannot touch it (unless we loaded it beforehand using `loadEnvironment`).
When doing IO we also have an environment on which we act on (in this example it will be the `process` object in nodeJS) so it make sense to use something as `Reader` as a base. Only this time we are going to modify it just a little bit, so it fits our needs exactly.
## Defining a IO monad transformer
Monads are used for handling IO in the following way:
1. An IO monad is defined which holds an external environment within itself.
2. Users use the monad to compose pure functions which generate side effects.
3. The side effects are executed by some kind of `main` function which is the only impure part of the program.
That is precicely what we are going to do with our monad transformer. The first part is to define it. We are going to start with the implementation of the `Reader` monad transformer which is the following:
const reader = {
name: 'Reader',
//Standard functions:
of (val) {
return (env) => this.outer.of(val)
},
chain (funk, reader) {
return (env) =>
this.outer.chain((val) => {
return funk(val)(env)
}, reader(env))
},
lift (val) {
return (env) => val
},
run (f, reader) {
return f(reader(this.environment))
},
fold (value, val) {
return value(val)
},
//Custom functions:
readerMap (f, val) {
return (environment) => this.outer.of(f(val, environment))
},
loadEnvironment(val) {
return (environment) => this.outer.of(environment)
}
}
The core of the definition are a bunch of [standard functions](../implementing-transformer.md) which define how to "wrap" a plain normal values in a `Reader` function (`of` and `lift`), how to `run` an already-created `Reader`, against a specific environment and most importantly, how to apply a function which takes a normal plain value and returns an instance of `Reader` to an already-created `Reader`(the infamous `chain`). However most of the works is done via custom functions (helpers if you will). The helpers are the interface of our monad transformer.
Let's see how far can we go with our custom monad by keeping the standard functionality as-is and only redefine the helpers, so they are more IO-friendly.
So let's begin by copying the `Reader` monad:
var io = reader
Change the name, so we don't confuse the two monads while debugging:
io.name = 'IO'
And lastly, we are also going to patch the `run` method so our new monad uses the `process` global variable as its environment (while leaving the possibility to mock our `process` object if we want to).
io.run = function run (f, reader) {
return f(reader(this.process || global.process))
}
Our IO monad transformer is now valid and it can be used. All we have to do is define some helpers that will make it more usable in its new context.
The standars `Reader` helpers provide direct access to the environment to the functions we compose. Which is OK if the environment is immutable so our users can see it but cannot touch it but not OK in the current case. We would prefer to manipulate the environment from inside of the monad's implementation. So let's remove the original helpers and start from scratch. If we wanted to keep them, we would have to change their names so our new monad transformer can be used along with the original `Reader`.
delete io.readerMap
delete io.loadEnvironment
Now let's start defining our new helpers. We will keep it simple defining just one method to write in the standard output and one to read from the standard input.
The writing part is trivial. From a functional point of view, we just do nothing. All we do is run the side effect:
io.write = function (f, val) {
return (process) => {
process.stdout.write(f(val)+'\n')//Perform side effect
return this.outer.of(val)//Return val
}
}
The reading part is a bit harder because the input is _asynchronous_. So how do we "return" the input if it is not there yet? One way to do it is to return a continuation instead and then handle this continuation externaly using the Task monad.
It will look like this:
const unescapeString = (str) => {
const str2 = '"' + str.slice(0,-1).slice(1) +'"'
return JSON.parse(str2).trim()
}
//Calls the callback with the user input
const promptInput = (process, callback) => {
const processData = (text) => {
process.stdin.removeListener('data', processData)
process.stdin.pause()
const input = util.inspect(text)
callback(unescapeString(input))
}
process.stdin.resume()
process.stdin.on('data',processData)
}
io.promptFor = function (f,val) {
return (process) => {
process.stdout.write(f(val)) //prompt the user to write a value
return this.outer.of((error, success) =>
promptInput(process, success))
}
}
And it can be used like this:
const ioM = mtl.make(mtl.base.task, io)
const getUsername = () =>
ioM.of()
.promptFor(()=> 'Username: ')
.chain((usernameContinuation) => m.fromContinuation(usernameContinuation))
.write((username) => `Your username is "${username}"`)
By now you probably know that although the second step is redundant it has to be there in order for our transformations to act independently of one another, and to be usable by themselves. What you may not know is that they don't _have_ to be independent. Sure, defining the transformers separately from one another gives us much freedom in composing them but sometimes we may want to define a transformer that will work only on a specific stack. If that is what we want, we can freely make use of the other monads that we have in that stack.
###Interlude: Dependencies between monad transformers. Reusing functions in across different stacks
When several monad transformers are chained, their transformations are applied sequentially. This means that each transformation has access to and can trigger all previous (or "outer") transformations.
Monads are specified in the `make` method from left to right. This means that if we have a stack composed of `mtl.base.task` and then `io` then we can use the `Task` monad transformer in the implementation of the `io` monad transformer. Almost as if the `io` object inherits from the `Task` object.
Let's try it. For example if we want to redefining the `promptFor` method so it creates a Task directly all we have to do is change the `of` method to `fromContinuation`:
io.promptFor = function (f,val) {
return (process) => {
process.stdout.write(f(val)) //prompt the user to write a value
return this.outer.fromContinuation((error, success) =>
promptInput(process, success))
}
}
When we define the stack we have to make sure that there is a Task monad transformer to the left of the monad that uses it. Then we can use the function just the way that we wanted to use it:
const ioMNew = mtl.make(mtl.base.task, io)
const getUsernameNew = () =>
ioMnew.of()
.promptFor(()=> 'Username: ')
.write((username) => `Your username is "${username}"`)
Because we keep the side effects strictly inside the monad we still can chain the IO-bound functions in the same way as pure ones. Here is a more complex example requesting a username and a password and then displaying them both:
You may already recognize this pattern - using a custom lambda to bind two or more values to constants so we can use it for creating a third value (which in this case it is an IO action).
const getUsernamePass = () =>
ioMNew.of()
.promptFor(()=> 'Username: ')
.write((username) => `Your username is "${username}"`)
.chain((username) =>
ioMNew.of(username)
.promptFor((username)=> `Password for "${username}": `)
.write((password) => `Attempting connection for ${username}:${password}`))
//getUsernamePass().run()

What is cool when building dependencies as monad transformers is that each new transformation "inherits" all the methods of the previous transformations. So not only can we access the methods of the "outer" monad, but also the methods of all monads that come before it.
What this means for our new `IO` transformer is that it will work on any arbitrary stack, that features the `Task` transformer, no matter how many other transformers there are between the two.
Let's for example define a stack which contains `IO` but also contains all transformers that we used in the previous examples:
const m = mtl.make(mtl.base.task, mtl.data.maybe, mtl.data.writer, mtl.comp.reader, io)
This would allow us to run all functions that we defined early on - remember that we parametrized the `m` argument, so they don't rely on explicit stack. So let's import them:
const previous = require('./p2.js')
const initData = previous.initData
const mGetResourceFrom = previous.mGetResourceFrom
const mPostResourceTo = previous.mPostResourceTo
## Our first "real" application
We are doing a command-line interface for retrieving and modifying our resources using the `IO` transformer that we just defined and the functions for retrieving and modifying resources from the previous part.
As usual we will start by creating some general definitions and will gradually move to more specific use cases.
### Displaying resources
To display a resource, we must retrieve it and then write it in the screen (notice that we don't need to pass the stack constructor to the `mGetresourceFrom` function since all functions which are composed using `chain` get it, as their last parameter).
prettyPrint = (obj) => JSON.stringify(obj, null, 4)
const displayResource = (type) => (id) =>
m.of(id)
.write((id)=> `Displaying info for "${id}"`)
.chain(mGetResourceFrom(type))
.write(prettyPrint)
Normally in an application before requesting a resource we have to prompt the user for its ID. Here is a general function for prompting for resource:
const promptForResource = (type) =>
m.of(type).promptFor((type)=> `${type} ID:`)
As you can probably guess these two steps compose seamlessly:
const promptAndDisplayResource = (type) =>
promptForResource(type).chain(displayResource(type))
### Modifying resources
How do we modify a resource from our command-line app?
The function looks a bit convoluted, because there are a lot of values involved, but it can be achieved just by combining the steps that we defined so far. In it, we propt for a resource, and then allows you to modify one of the resource's properties:
const set = mtl.curry((obj, key, value) => {
const newObj = assign({}, obj)
newObj[key] = value
return newObj
})
const modifyResourceProperty = (type, property) => promptForResource(type).chain((id) =>
//Retrieve the resource
mGetResourceFrom(type, id, m)
.chain((userInfo) => m.of(userInfo)
//Prompt for a new property value
.promptFor((userInfo) => `${userInfo.name} is currently ${userInfo.occupation}. Choose another occupation:`)
//Modify the resource
.map(set(userInfo, property))
//Post the new version
.chain(mPostResourceTo(type, id))))
### Putting it together
We are now going to create a menu for our console application. We can do that by putting some actions in an object and then `prompt` users for the action that they want to undergo.
Also we don't want our program to quit when we are finished with a given task. We are going to solve this problem of our program quitting by calling our `main` function recursively.
const start = () =>
promptForAction(Object.keys(actions))
.chain((name) => actions[name])
const actions = {
'Get Users': promptAndDisplayResource('users').chain(start),
'Get Occupations': promptAndDisplayResource('occupations').chain(start),
'Modify User occupation': modifyResourceProperty('users', 'occupation').chain(start)
}
const promptForAction = (actions) =>
m.of(actions)
.write(()=> 'Available Actions')
.write((items)=> items.map((item, i) => ( `[${i + 1}] - ${item}`)).join('\n'))
.promptFor(()=> 'Action:')
.chain((index)=> m.of(actions).maybeGet(parseInt(index - 1)))
### Running the program
We are going to run the program against a fresh environment. In case of something bad happens we will just print the error:
start().run(result => console.log(result), {environment:initData()})

That's it for now. If you have questions or suggestions, go open an issue or [contact me directly](mailto:<EMAIL>)
<file_sep>/test/state_tests.js
var mtl = require('../lib/main')
var sinon = require('sinon')
var permutations = require('./permutations')
exports.state = permutations(a => (a.indexOf(mtl.comp.state) !== -1), (one, two, three) => {
return {
saveLoad: (test) => {
test.expect(3)
var state = mtl.make(one, two, three)
state.of(4)
.saveState()
.map((val) => {
test.equal(val, 4, '"save" does not affect the wrapped value')
return 6
})
.map((val) => {
test.equal(val, 6, '"map" replaces the wrapped value')
return val
})
.loadState()
.map((val) => {
test.equal(val, 4, '"load" brings back the saved value')
return val
})
.value()
test.done()
},
value: (test) => {
var val = 3
var state = mtl.make(one, two, three)
test.equal(state.of(val).value(), val, "value brings back the original value")
test.done()
},
statefulMap: (test) => {
var state = mtl.make(one, two, three)
var val = state.of(4)
.statefulMap((val, state) => {
return [val, val+1]
})
.value({
onState:(state) => {
test.equal(state, 5, '"statefulMap" lets you consume the value and state and return a new value and a new state.')
test.done()
}
})
}
}
})
global.state = module.exports
<file_sep>/Gruntfile.js
var sources = 'lib/*.js'
module.exports = function (grunt) {
grunt.initConfig({
pkg: grunt.file.readJSON('package.json'), // the package file to use
concat: {
basic_and_extras: {
options:{
process:function(src){
return src
.replace(/\r/gm, '')
.split(/\n/)
.map((row) => {
var twoL = row.slice(0,2)
if(twoL === '/*' || twoL === ' *' || twoL === '*/') {
return row.length > 3 ? row.slice(3) + ' ' : '\n\n'
} else {
return ' ' + row + '\n'
}
})
.join('')
}
},
files: {
'docs/overview.md': ['lib/main.js'],
'docs/wrapper.md': ['lib/wrapper.js'],
'docs/implementing-transformer.md': ['lib/id.js'],
'docs/api.md': ['lib/data.js', 'lib/comp.js'],
'docs/example.md': ['test/db_example.js'],
'docs/tutorial/p1.md': ['tutorial/p1.js'],
'docs/tutorial/p2.md': ['tutorial/p2.js'],
'docs/tutorial/p3.md': ['tutorial/p3.js'],
},
}
},
standard: {
options: {
// Task-specific options go here.
},
your_target: [sources, "test/*.js"]
}
})
grunt.loadNpmTasks('grunt-standard')
grunt.loadNpmTasks('grunt-contrib-concat')
}
<file_sep>/tutorial/p3.js
/*
* # Side effects
*
* #### Creating and using custom monads.
*
* _This is part 3 from the `monad-transformers` tutorial. See also [part 1](p1.md) and [part 2](p2.md)._
*
*
* The monadic functions that we used so far were really cool and all but they were just functions, albeit
* asynchronous. They only received input at the beginning and did not place any output until the end.
* Technically they were not pure (because they accessed and modified external resources) but they were
* pretty close. Now we are going to do something different - we are going to handle interactive side effects.
*/
const assign = require('object-assign')
const mtl = require("../lib/main.js")
process.stdin.setEncoding('utf8');
const util = require('util');
if ( global.v8debug ) {
global.v8debug.Debug.setBreakOnException()
}
/* Now in the previous example we handled some side effects using the `Reader` monad and
* aside from the fact that we were abusing it a little (the environment in `Reader` is supposed
* to be immutable and ours wasn't) this approach wasn't bad
* at all - when calling the function we could specify on which environment it was permitted to act upon.
* From the inside of the function we could regulate which parts of the code had access to
* the environment - for example functions we call using `map` cannot touch it (unless we loaded it
* beforehand using `loadEnvironment`).
*
* When doing IO we also have an environment on which we act on (in this example it will be the `process` object
* in nodeJS)
* so it make sense to use something as `Reader` as a base. Only this time we are going to modify it
* just a little bit, so it fits our needs exactly.
*
* ## Defining a IO monad transformer
*
* Monads are used for handling IO in the following way:
*
* 1. An IO monad is defined which holds an external environment within itself.
*
* 2. Users use the monad to compose pure functions which generate side effects.
*
* 3. The side effects are executed by some kind of `main` function which is the only impure part of the
* program.
*
* That is precicely what we are going to do with our monad transformer.
* The first part is to define it.
* We are going to start with the implementation of the `Reader` monad transformer which is
* the following:
*/
const reader = {
name: 'Reader',
//Standard functions:
of (val) {
return (env) => this.outer.of(val)
},
chain (funk, reader) {
return (env) =>
this.outer.chain((val) => {
return funk(val)(env)
}, reader(env))
},
lift (val) {
return (env) => val
},
run (f, reader) {
return f(reader(this.environment))
},
fold (value, val) {
return value(val)
},
//Custom functions:
readerMap (f, val) {
return (environment) => this.outer.of(f(val, environment))
},
loadEnvironment(val) {
return (environment) => this.outer.of(environment)
}
}
/*
* The core of the definition are a bunch of [standard functions](../implementing-transformer.md) which define how to "wrap"
* a plain normal values in a `Reader` function (`of` and `lift`),
* how to `run` an already-created `Reader`, against a specific environment and most importantly,
* how to apply a function which takes a normal plain value and returns an instance of `Reader`
* to an already-created `Reader`(the infamous `chain`).
* However most of the works is done via custom functions (helpers if you will).
* The helpers are the interface of our monad transformer.
*
* Let's see how far can we go with our custom monad by keeping the standard functionality as-is
* and only redefine the helpers, so they are more IO-friendly.
*
* So let's begin by copying the `Reader` monad:
*/
var io = reader
/*
* Change the name, so we don't confuse the two monads while debugging:
*/
io.name = 'IO'
/*
* And lastly, we are also going to patch the `run` method so our new monad uses the `process`
* global variable as its environment (while leaving the possibility to mock our `process`
* object if we want to).
*/
io.run = function run (f, reader) {
return f(reader(this.process || global.process))
}
/*
* Our IO monad transformer is now valid and it can be used.
* All we have to do is define some helpers that will make it more usable in its new context.
*
* The standars `Reader` helpers provide direct access to the environment to the functions we compose.
* Which is OK if the environment is immutable so our users can see it but cannot touch it but
* not OK in the current case. We would prefer to manipulate the environment from inside of the monad's
* implementation.
* So let's remove the original helpers and start from scratch.
* If we wanted to keep them, we would have to change their names so our new monad transformer can be used
* along with the original `Reader`.
*/
delete io.readerMap
delete io.loadEnvironment
/*
*
* Now let's start defining our new helpers.
* We will keep it simple defining just one method to write in the standard output and one to read
* from the standard input.
*
* The writing part is trivial. From a functional point of view, we just do nothing.
* All we do is run the side effect:
*
*/
io.write = function (f, val) {
return (process) => {
process.stdout.write(f(val)+'\n')//Perform side effect
return this.outer.of(val)//Return val
}
}
/*
* The reading part is a bit harder because the input is _asynchronous_.
* So how do we "return" the input if it is not there yet?
* One way to do it is to return a continuation instead and then
* handle this continuation externaly using the Task monad.
*
* It will look like this:
*/
const unescapeString = (str) => {
const str2 = '"' + str.slice(0,-1).slice(1) +'"'
return JSON.parse(str2).trim()
}
//Calls the callback with the user input
const promptInput = (process, callback) => {
const processData = (text) => {
process.stdin.removeListener('data', processData)
process.stdin.pause()
const input = util.inspect(text)
callback(unescapeString(input))
}
process.stdin.resume()
process.stdin.on('data',processData)
}
io.promptFor = function (f,val) {
return (process) => {
process.stdout.write(f(val)) //prompt the user to write a value
return this.outer.of((error, success) =>
promptInput(process, success))
}
}
/*
* And it can be used like this:
*/
const ioM = mtl.make(mtl.base.task, io)
const getUsername = () =>
ioM.of()
.promptFor(()=> 'Username: ')
.chain((usernameContinuation) => m.fromContinuation(usernameContinuation))
.write((username) => `Your username is "${username}"`)
/*
* By now you probably know that although the second step is redundant it
* has to be there in order for our transformations to act independently of one another, and to be usable
* by themselves.
* What you may not know is that they don't _have_ to be independent.
* Sure, defining the transformers separately from one another gives us much freedom in composing them
* but sometimes we may want to define a transformer that will work only on a
* specific stack. If that is what we want, we can freely make use of the other monads that we have in that stack.
*
* ###Interlude: Dependencies between monad transformers. Reusing functions in across different stacks
*
* When several monad transformers are chained, their transformations are
* applied sequentially. This means that each transformation has access to
* and can trigger all previous (or "outer") transformations.
*
* Monads are specified in the `make` method from left to right. This means that if we
* have a stack composed of `mtl.base.task` and then `io` then we can use the `Task` monad transformer
* in the implementation of the `io` monad transformer. Almost as if the `io` object inherits from the `Task` object.
*
* Let's try it. For example if we want to redefining the `promptFor` method so it creates a Task directly
* all we have to do is change the `of` method to `fromContinuation`:
*/
io.promptFor = function (f,val) {
return (process) => {
process.stdout.write(f(val)) //prompt the user to write a value
return this.outer.fromContinuation((error, success) =>
promptInput(process, success))
}
}
/*
* When we define the stack we have to make sure that there is a Task monad transformer to the left
* of the monad that uses it.
* Then we can use the function just the way that we wanted to use it:
*/
const ioMNew = mtl.make(mtl.base.task, io)
const getUsernameNew = () =>
ioMnew.of()
.promptFor(()=> 'Username: ')
.write((username) => `Your username is "${username}"`)
/*
* Because we keep the side effects strictly inside the monad we still can chain
* the IO-bound functions in the same way as pure ones.
* Here is a more complex example requesting a username and a password and then displaying them both:
*
* You may already recognize this pattern - using a custom lambda to bind two or more values to constants
* so we can use it for creating a third value (which in this case it is an IO action).
*
*/
const getUsernamePass = () =>
ioMNew.of()
.promptFor(()=> 'Username: ')
.write((username) => `Your username is "${username}"`)
.chain((username) =>
ioMNew.of(username)
.promptFor((username)=> `Password for "${username}": `)
.write((password) => `Attempting connection for ${username}:${password}`))
//getUsernamePass().run()
/*
* 
*
* What is cool when building dependencies as monad transformers is that each new transformation "inherits" all
* the methods of the previous transformations. So not only can we access the
* methods of the "outer" monad, but also the methods of all monads that come before it.
*
* What this means for our new `IO` transformer is that it will work on any
* arbitrary stack, that features the `Task` transformer, no matter how many other transformers there are
* between the two.
*
* Let's for example define a stack which contains `IO` but also contains all
* transformers that we used in the previous examples:
*/
const m = mtl.make(mtl.base.task, mtl.data.maybe, mtl.data.writer, mtl.comp.reader, io)
/*
* This would allow us to run all functions that we defined early on - remember that
* we parametrized the `m` argument, so they don't rely on explicit stack.
* So let's import them:
*/
const previous = require('./p2.js')
const initData = previous.initData
const mGetResourceFrom = previous.mGetResourceFrom
const mPostResourceTo = previous.mPostResourceTo
/*
* ## Our first "real" application
*
* We are doing a command-line interface for retrieving and modifying our resources using the `IO` transformer that
* we just defined and the functions for retrieving and modifying resources from the previous part.
*
* As usual we will start by creating some general definitions and will gradually move to more specific use cases.
*
* ### Displaying resources
*
* To display a resource, we must retrieve it and then write it in the screen
* (notice that we don't need to pass the stack constructor to the `mGetresourceFrom` function since
* all functions which are composed using `chain` get it, as their last parameter).
*/
prettyPrint = (obj) => JSON.stringify(obj, null, 4)
const displayResource = (type) => (id) =>
m.of(id)
.write((id)=> `Displaying info for "${id}"`)
.chain(mGetResourceFrom(type))
.write(prettyPrint)
/*
* Normally in an application before requesting a resource we have to prompt the user for its ID.
* Here is a general function for prompting for resource:
*/
const promptForResource = (type) =>
m.of(type).promptFor((type)=> `${type} ID:`)
/*
* As you can probably guess these two steps compose seamlessly:
*/
const promptAndDisplayResource = (type) =>
promptForResource(type).chain(displayResource(type))
/*
* ### Modifying resources
*
* How do we modify a resource from our command-line app?
*
* The function looks a bit convoluted, because there are a lot of values involved, but it can be achieved just by
* combining the steps that we defined so far.
* In it, we propt for a resource, and then allows you to modify one of the resource's properties:
*
*/
const set = mtl.curry((obj, key, value) => {
const newObj = assign({}, obj)
newObj[key] = value
return newObj
})
const modifyResourceProperty = (type, property) => promptForResource(type).chain((id) =>
//Retrieve the resource
mGetResourceFrom(type, id, m)
.chain((userInfo) => m.of(userInfo)
//Prompt for a new property value
.promptFor((userInfo) => `${userInfo.name} is currently ${userInfo.occupation}. Choose another occupation:`)
//Modify the resource
.map(set(userInfo, property))
//Post the new version
.chain(mPostResourceTo(type, id))))
/*
* ### Putting it together
*
* We are now going to create a menu for our console application.
* We can do that by putting some actions in an object and then `prompt` users for the action that they want to undergo.
*
* Also we don't want our program to quit when we are finished with a given task. We are going to solve this problem of our program quitting by calling our `main` function recursively.
*/
const start = () =>
promptForAction(Object.keys(actions))
.chain((name) => actions[name])
const actions = {
'Get Users': promptAndDisplayResource('users').chain(start),
'Get Occupations': promptAndDisplayResource('occupations').chain(start),
'Modify User occupation': modifyResourceProperty('users', 'occupation').chain(start)
}
const promptForAction = (actions) =>
m.of(actions)
.write(()=> 'Available Actions')
.write((items)=> items.map((item, i) => ( `[${i + 1}] - ${item}`)).join('\n'))
.promptFor(()=> 'Action:')
.chain((index)=> m.of(actions).maybeGet(parseInt(index - 1)))
/*
* ### Running the program
*
* We are going to run the program against a fresh environment. In case of something bad happens we will just print
* the error:
*/
//start().run(result => console.log(result), {environment:initData()})
/*
* 
*
* That's it for now. If you have questions or suggestions, go open an issue or [contact me directly](mailto:<EMAIL>)
*/
<file_sep>/tutorial/p4.js
/*
* # Dynamic
*
* #### Creating custom monads.
*
* _This is part 4 from the `monad-transformers` tutorial. See also [part 1](p1.md), [part 2](p2.md) and
* [part 3](p3.md)._
*
* Most of the applications that we write are dynamic - they constantly receive input and output.
* And a breed of especially dynamic applications are the GUI applications.
*
* In command-line applications you prompt the user when it is OK to contact you. In graphical user interfaces
* you just cannot stop him from clicking. And you cannot ignore his actions either, because he will complain
* that the app isn't responsive.
*
* ## Interlude: Purely-functional
*
* By now you probably realized that real applications cannot be purely functional, that is they must
* feature core that that takes care of side effects
* (Haskell applications are called purely functional because that code is inside the Haskell runtime itself).
* Our job is to model the application such that this core is as trivial. Monads are NOT used to
* perform side effects - they are used to contain them.
*
* ## Our runtime.
*
* In the previous part of the tutorial we started writing our application by defining some pure functions
* and then we went on to connect it to the outside world. This time we will start out the opposite way -
* we will define our side-effect handling functions first and then proceed to build a monad transformer that
* handles them and a simple application.
*
* Our runtime will be as simple as possible.
*
* It will constitute of one function called `input` that get's called for every user action
* (a kind of universal event handler)
* and one function called `output` that we must use to render a new UI on screen. The main
* difference between dynamic or interactive applications and non-dynamic ones is that here
* `input` and `output` are not related. That is, you can produce input several times before you get
* any output (for example when you have to fill several fields)
* and you can receive output out of the blue (for example if a slow request from the server
* finally arrives, or when someone else edits the same object that you have opened).
*
* More formally, `runtime` is a function which accepts a function called `input` and returns a
* function called output.
*
* The `input` function is written by us.
* It accepts some object that represents a user action (an event if you will) and does not return anything.
*
* The `output` function is given to us by the runtime.
* It accepts some object that represents a DOM node and again does not return anything.
*
*/
const runtime = (input) => (node) => {
document.appendChild(node)
}
/*
*
*
* One thing that we have to take into account is that in order for the `output` function to remain pure
* it must only deal with immutable values. That is it should not modify the DOM directly. A way to escape
* this is to rerender the DOM every time the function is called. This is problematic from a performance point
* of view, but
*/
const react = require('react')
/*
* With React, our runtime function becomes very simple
*/
const runtime = () => {
const component = createClass({render(ui){return ui}})
return {component, output:(node) => {
component.render(node)
}}
}
const mtl = require("../lib/main.js")
<file_sep>/docs/implementing-transformer.md
# Implementing a monad transformer
Monad transformers are tricky, and one of the reasons for this is that they require an excessive amount of type juggling. You have to constantly wrap things in boxes and unwrap them again.
One of the aims of this package is to reduce the amount of wrapping and unwrapping needed for making a new transformer and to provide an easy way to define and combine transformers.
It does this by defining a monad transformer definition format, which allows you to specify your transformer only by specifying its transformations on the values. With it, all it takes to implement a transformer is implement these four functions: `of` (AKA `return`), `chain` (AKA `flatMap`) `lift` and `value`(AKA `run`)
## The trivial implementation
Consider the identity Monad transformer. This is a monad transformer that does nothing: or in other words it produces a monad which behaves the same way as the one it is given to it as an argument. Here is how would the implementation of these methods look like:
exports.idMinimal = {
name: 'idMinimal',
The `of` function takes a scalar value and returns an instance of the outer monad. In this case we delegate everything to the outer monad's `of` method. We access the outer monad with `this.outer`.
// (val) => M(val)
of (val) {
return this.outer.of(val)
},
`chain` is the heart of any monad or monad transformer.
In this case we implement it by just calling the `chain` function of the host monad (using `this.outer.chain`) with the function given to us as an argument.
// (val => M(val) , M(val)) => M(val)
chain (fn, val) {
return this.outer.chain(fn, val)
},
The `lift` function is kinda like `of`, but it accepts an instance of the outer monad instead of a 'plain' value.
// (M(val)) => M(val)
lift (val) {
return val
},
Having both 'lift' and 'of' enables us to convert any value created by one monad transformer to a a value that holds all elements of the stack
Finally the `value` function provides a way to get 'the value back' What it does is to unwrap a previously-wrapped monad. In this case we didn't do any wrapping, so we don't have to do any unwrapping either.
// ((val) => otherVal, M(val)) => otherVal
value (fn, val) {
return this.outer.value(fn, val)
},
fold (value, val) {
return value(val)
}
}
# Manipulating the value
All monad transformers do the same thing (given a monad `A`, they produce a monad `B(A)` which somehow augments `A`), but there is no general formula for doing it.
Simpler monads can be implemented just by manipulating the value inside the host monad.
Our next implementation of ID will just wrap the underlying value (which we called A) in a plain object.
So `M(A)` would become `M ({idVal:A})` when we wrap it and will be back to `M(A)` when we unwrap it.
Here is how this implementation would look like:
exports.id = {
name: 'Id',
The `of` function takes a scalar value and returns an instance of the outer monad. In this case we delegate everything to the outer monad's `of` method. We access the outer monad with `this.outer`.
// (val) => M({idVal:val})
of (val) {
return this.outer.of({idVal: val })
},
chain just calls the `chain` function of the host monad like in the previous example. The difference is that it applies some transformation to the value in order to fit the new context.
// (val => M({idVal:val}) , M({idVal:val})) => M({idVal:val})
chain (fn, mIdVal) {
return this.outer.chain((idVal) => {
return fn(idVal.idVal)
}, mIdVal)
},
The `lift` function uses `chain` + `of` (which is the same as `map`) to go to the host monad and modify the value inside it.
// (M(val)) => M({idVal:val})
lift (mVal) {
return this.outer.chain((val) => this.outer.of({idVal: val}), mVal)
},
Lastly we have the `value` function (or the interpreter), which unwraps a previously-wrapped value.
// ((val) => otherVal, M({idVal:val})) => otherVal
value (fn, mIdVal) {
return this.outer.value((idVal)=> {
return fn(idVal.idVal)
}, mIdVal)
},
fold (value, idVal) {
return value(idVal.idVal)
}
}
Notice that we are always returning an instance of the outer monad.
That is, if you are to apply the transformation several times, the values nest inside M: M({idVal:{idVal: a}})
However not all monad transformers are like that.
## A more complex structure
So far we have seen monad transformers which only deal with the value inside the given monad A. However not all monad transformers are like that.
There are monad transformers which add additional structure to the monad itself. Examples of the first type are all transformers that we have seen so far. An example of the second type is the 'State' monad, which given the same value `M(A)`, will produce something like `() =>{ M([A, State]) }`. That is, the transformer adds the state value to the 'host' monad `M`, and then it wraps the monad itself in a function.
Now consider an alternative, a little more complex implementation of the ID monad. One which wraps the M monad into another plain object, so the value of M(A) becomes `{idContainer: M({idVal:a})}`. Notice that the transformer consists of two parts: one which wraps around the host monad, and one which wraps around the value in it.
exports.idWrapped = {
name: 'IdWrapped',
// (val) => {idContainer: M({idVal:a})}
of (val) {
return {
idContainer: this.outer.of({idVal: val})
}
},
// (a => {idContainer:M({idVal:a})}, {idContainer:M({idVal:a})}) => {idContainer:M({idVal:a})}
chain (fn, idContainerMIdVal) {
return {
idContainer: this.outer.chain((idVal) => {
const val = fn(idVal.idVal)
return val.idContainer
}, idContainerMIdVal.idContainer)
}
},
// (M(val)) => {idContainer:M({idVal:val})}
lift (mVal) {
return {
idContainer: this.outer.chain((val) => this.outer.of({idVal: val}), mVal)
}
},
// ((val) => otherVal, {idContainer: M({idVal:val}))}=> otherVal
value (fn, idContainerMIdVal) {
return this.outer.value((idVal)=> {
return fn(idVal.idVal)
}, idContainerMIdVal.idContainer)
},
run (fn, idContainerMIdVal) {
return fn(idContainerMIdVal.idContainer)
},
fold (value, idVal) {
return value(idVal.idVal)
}
}
The key difference is that with this monad nesting happens both inside the host monad and outside of it. If we apply the transformation two times the value becomes: `{idContainer:{idContainer:M({idVal:{idVal:a}})}}`.
[_View in GitHub_](../lib/id.js)
<file_sep>/tutorial/p1.js
/*
* # Retrieving REST Resources
*
* #### Using the monad stack. Using the `Task` monad, the `Maybe` monad and the `Writer` monad.
*
* _This is part 1 from the `monad-transformers` tutorial. See also [part 2](p2.md) and [part 3](p3.md)._
*
*
* The following series of tutorials show performing some real-world tasks using the `monad-transformers` library.
* Our first task will be related to retrieving resources from a RESTful service and handling different kinds
* of errors.
*
* ## Mocking our Data
*
* Below is a simple simple fake REST API with a set of resources defined in the `data`
* object and functions that simulate retriving and modifying resources asynchronously.
* We will be working with this service throughout the tutorial.
*/
const mtl = require('../lib/main')
if ( global.v8debug ) {
global.v8debug.Debug.setBreakOnException()
}
const initData = () => {
const data = {
'users/john': {
name:'John',
occupation: 'developer'
},
'users/max': {
name: 'Max' //Has no occupation
},
'users/jim': {
name:'Jim',
occupation: 'farmer'
},
'occupations/developer': {
description: 'writes code'
},
'occupations/farmer': {
description: 'feeds the animals'
}
}
return {
getResource (url, error, success) {
setTimeout(() => data[url]!== undefined ? success(data[url]) : error({error:`Invalid URL - ${url}`}), 10)
},
postResource (url, value, error, success) {
setTimeout(() => { data[url] = value; success(value) }, 10)
}
}
}
const data = initData()
/* ## Defining some helpers
*
* Before we start with our first task let's define several helpers that we will use.
* This is an important technique in functional programming - to define as much of our code as possible using pure
* functions.
*/
const mGetResource = (url) => data.getResource.bind(null, url)
const suffix = (suffix, str) => suffix + '/' + str
/*
* - `mGetResource` is just a curried version of the function that we defined in the mock.
*
* - `suffix` is a function for concatenating strings, which we can use to construct a URL of a given resource.
*
* ### Composing functions with `monad-transformers`
*
* Another important technique in functional programming is the technique of combining different small functions using
* composition. After looking at these two helpers, it is not such a long shot to imagine composing them into
* one function that retrieves a resource given its ID. Here is how this will work using simple function composition:
*/
const compose = (f, g) => (a) => g(f(a))
const ordinaryGetResourceFrom = compose(suffix, mGetResource)
/* This is cool however the `mGetResource` function is asynchronous and therefore it does not return a value.
* Therefore the `ordinaryGetResourceFrom` function is also asynchronoust cannot be composed any further via simple function composition.
*
* However it *can* be composed in principle, and as a matter of fact we do that quite often in JavaScript.
* You know, using Promises. We pass an async functions to the `then` method, and then we chain another async function and
* so on.
*
* The `monad-transformers` lib supports Promises too among other monads - more precisely their immutable conterparts
* [Tasks](http://docs.folktalejs.org/en/latest/api/data/task/index.html). This means that we can wrap our normal
* callback-based async function in a Task and compose it, using [`chain`](../wrapper.md) (which is kinda like the `then` for
* Promises).
*/
const Task = require('data.task')
const taskGetResourceFrom = (type) => (id) =>
m.of(suffix(type, id))
.chain((url) => m.fromTask(new Task(mGetResource(url))))
/*
* There is a slightly prettier way to write the same function using the `cont` helper function, which creates a
* Task behind the scenes.
*/
const getResourceFrom = (type) => (id) =>
m.of(suffix(type, id))
.tellMap((url) => `Retrieving ${url}... `)
.cont(mGetResource)
/*
* A couple of remarks:
*
* - There is one unrelated line, beggining with `tellMap` which we will discuss later.
*
* - The function is written in such a way that you don't have to know about which async lib is being used -
* you just use the `cont` helper.
*
* - We haven't lost the ability to do normal function composition. We can do it by using the [`map`](../wrapper.md)
* function. Functions, composed with `map` don't have to know about monads and wrappers at all.
*
* Using our newly-defined `getResouceFrom`, we will define one more helper function -
* one that given a person object, retrieves info about its occupation.
* The function is simple - just retrieve the `occupation` key from the person object and then make a request for
* it from the `occupations` endpoint. You can check the data model above, but basically we receive a plain JS object
* and we have to access one of its properties. There is a helper for doing that in the `Maybe` monad transformer
* called `maybeGet`. The strength of `maybeGet` is that it also handles
* the case when the value of the requested property is not defined.
*/
const getOccupationInfo = (mPersonalInfo) =>
m.of(mPersonalInfo)
.maybeGet('occupation')
.chain(getResourceFrom('occupations'))
/* Notice also how we compose functions that return monads by using `chain`.
*
* ## Writing our program
*
* Now let's apply what we defined so far and write some code that actually does something.
* The following snippet first retrieves the details for a given user then retrieve the details of his
* occupation and finally displays both pieces of info one after the other:
*
*/
const m = mtl.advanced
const getPersonInfo = (name) =>
m.of(name) //1
.chain(getResourceFrom('users')) //2
.chain((personDetails) => //3
getOccupationInfo(personDetails) //4
.map((occupationInfo) => `${personDetails.name} ${occupationInfo.description}` )) //5
/*
* There should be nothing new for you in this snippet. Let`s review it line by line, as a summary of this
* tutorial:
*
* 1. We begin by puttin a regular value into a monad, using the `of` function.
*
* 2. With `chain` we compose a function that receives a normal value and returns a monadic value.
* `chain` is actually the quintessential monadic function, so there is a lot of info available
* about it.
*
* 3. We `chain` again, this time with an inline function.
*
* 4. When we are calling `chain` we have to return a monadic value, and we do that.
* Again when you have a function that works in a monadic context, and you want to use it with
* a "normal" value, you just wrap the value in a monad before feeding it to the function.
*
* 5. We transform the value before returning it, using a "plain", non-monadic function, and the way
* to compose plain functions which aren't meant to be used inside a monad is by using [`map`](../wrapper.md)
* in the same way as `Array.map` is used to apply functions which don't work on arrays by
* themselves, to Arrays.
*
* Using `map` inside the `chain` lambda is equivalent to using it outside of it. We do it inside
* just because we want to be able to use the `personalDetails` object.
*
* ## What we did
*
* Now we will test the `getPersonInfo` function and make sure that it works well. The function returns
* a monad which contains our value, so we will want to take the value out of the monad. We do this by using
* the `run` function which accepts a normal callback:
*/
exports.test = {}
exports.test.dbSuccess = (test) => getPersonInfo('john')
.run((result) => {
test.equal(result.taskSuccess.value.value.value, 'John writes code')
test.done()
})
/*
* Works fine but we better explain the `taskSuccess.value.value.value` part:
* Each monad transformation which we use defines its own object namespace. In that namespace
* we can see its value, which is actually the namespace of another monad. So effectively this means that
* in order to get to our value, we have to go through all these namespaces.
* Why does't the library give you the value directly?
* Because, as you will find out shortly, each layer of the monadic onion contributes something to
* our program.
*
* ### Handling errors
*
* If we try to retrieve information abot a person that does not exist the Task monad will handle the error
* by stopping the computation. You will find the error in the `taskError` property:
*/
exports.test.dbError = (test) => getPersonInfo('UndefinedPerson')
.run((result) => {
test.equal(result.taskError.error, 'Invalid URL - users/UndefinedPerson')
test.done()
})
/*
* ### Handling `undefined` properties
*
* The Maybe monad transformer handles undefined values in much the same way as the Task handles errors -
* it stops everything. In that way we can retrieve values that are `undefined`, and not get any errors for
* trying to do something with them afterwards. We have to do a `null` check only once - at the end.
*
* For example here is what happens if we try to request a user that does not have an "occupation" field.
*
*/
exports.test.dbMaybe = (test) => getPersonInfo('max')
.run((result) => {
test.equal(result.taskSuccess.value, undefined)
test.done()
})
/*
* Notice that although we performed some operations on the user after retrieving it, no exception was raised.
*
* ### Logging
*
* Remember that our `getResourceFrom` function had one line which we ignored:
*
* `.tellMap((url) => 'Retrieving ${url}')`
*
* this line actually logs the url being requested by using the `Writer` monad.
* Sure enough, the log is part of the end result:
*/
exports.test.dbLog = (test) => getPersonInfo('john')
.run((result) => {
test.equal(result.taskSuccess.value.writer, 'Retrieving users/john... Retrieving occupations/developer... ')
test.done()
})
exports.initData = initData
exports.suffix = suffix
/*
* Go to [Part 2](p2.md).
*/
<file_sep>/test/maybe_tests.js
var mtl = require('../lib/main')
var sinon = require('sinon')
var permutations = require('./permutations')
if ( global.v8debug ) {
global.v8debug.Debug.setBreakOnException()
}
//TODO add err handling to all types
exports.maybe = permutations(a => (a.indexOf(mtl.data.maybe) !== -1), (one, two, three) => {
return {
testOne: (test) => {
var maybe = mtl.make(one, two, three)
var spy = sinon.spy((a) => a)
var m = maybe.of({foo: {baz: 'bar'}})
.maybeGet('foo')
.maybeGet('baz')
.map(spy)
.value()
test.equals(spy.lastCall.returnValue, 'bar')
test.done()
},
testTwo: (test) => {
var maybe = mtl.make(one, two, three)
var spy = sinon.spy((a) => a)
maybe.of(4)
.map(function (val) {return val + 1})
.maybeMap((val) => {
test.equals(val, 5, 'A call to "map" modifies the value, and packs it again')
return undefined
})
.map(spy)
.value()
test.equals(spy.called, false, 'After a val is set to undefined, functions are no longer called')
test.done()
},
testThree: (test) => {
var maybe = mtl.make(one, two, three)
var spy = sinon.spy((a) => a)
maybe.of({foo: 'bar'})
.maybeGet('undefined_key')
.map(spy)
.value({
onNothing:()=>{
test.equals(spy.called, false, 'When you get an undefined value, maybe is not called ')
test.done()
}
})
}
}
})
global.maybe = module.exports
<file_sep>/tutorial/p2.js
/*
/* # Modifying REST Resources / Parametrizing our data source
*
* #### Using the monad stack. Using the `Reader` monad.
*
* _This is part 2 from the `monad-transformers` tutorial. See also [part 1](p1.md) and [part 3](p3.md)._
*
*
* In the previous part we defined some functions for retrieving resources from REST endpoints.
* Now we will produce some functions that modify the resources they retrieve.
*/
const mtl = require("../lib/main.js")
if ( global.v8debug ) {
global.v8debug.Debug.setBreakOnException()
}
/*
* ## Parametrizing the datasource
*
* Let's start by improving what we have so far.
*/
const initData = require('./p1.js').initData
const suffix = mtl.curry((suffix, str) => suffix + '/' + str)
/*
* As you can see I included some of the resources from the previous tutorial, however
* I could not reuse more of it, because of the way that the `getResource` function was written:
* Namely, the function is bound to a specific data source. This means that we won't be able to reuse it
* because we will have to always keep track of what in our data is changed.
*/
const data = initData()
const GetResource = (url) => data.getResource.bind(null, url)
/*
* So let's fix `GetResource`, by accepting the datasource as an additional parameter.
* Now we can specify which datasource we want to work with when we call it.
*/
const mGetResource = (url, data) => data.getResource.bind(null, url)
/*
* However, this breaks our workflow a bit. In the previous part of the tutorial we could define
* this beautiful chaining functions like `getResourceFrom` that were quite handy:
*/
const oldGetResourceFrom = (type) => (id) =>
m.of(suffix(type, id))
.tellMap((url) => `Retrieving ${url}... `)
.cont(mGetResource)
/*
* Now, because `getResourceFrom` uses `mGetResource` it would also have to accept a
* datasource when called, and our whole codebase will become bloated. Unless there is a transformer
* that can handle this for us.
*
* And as you might suspect, there actually is one.
*
* The `Reader` monad transformer is the evil twin of the `Writer` monad transformer.
* It gives us access to a immutable datastructure sometimes called an 'environment' for storing all kinds of configurations
* throughout our computation without bothering to pass it around to each new function.
* It is like an additional parameter that you always get.
*
* In order to use the `Reader` monad transformer let's first refactor our code a bit:
*/
const oldGetResourceFrom2 = (type) => (id) =>
m.of(suffix(type, id))
.chain((url) => m.fromContinuation(mGetResource(url)))
/*
* Remember this? This is the first version of the `getResourceFrom` function which does not use the `cont` helper.
* Or it is close to it anyways - this one uses another helper - the `fromContinuation` constructor. We desugared our
* function in order to combine it with another helper - `loadEnvironment`.
*/
const mGetResourceFrom = (type, id) =>
m.loadEnvironment().chain((environment) =>
m.fromContinuation(mGetResource(suffix(type, id), environment)))
/*
* The `Reader` allows us to run our function against the data that we defined earlier or any other.
*/
exports.test = {}
exports.test.mGetResource = (test) => {
mGetResourceFrom('users', 'john')
.run((result) => {
test.equal(result.taskSuccess.value.value.occupation, "developer")
test.done()
}, {environment:initData()})
}
/*
* So that is the formula for using the `Reader`: we define the environment in the `run` method,
* we use the environment whenever we need it in the function body, and all functions that we call with
* `chain` also have access to the environment.
*
* ### Interlude: Monad transformers and the transformer stack
*
* That is all good, you might say, but why did we have to take a step back in order to use it? Why can't we still use the
* `cont` helper _and_ have access to the environment?
* The reason for this is that we are combining the effects of two different monad transformers.
* And although it may seem so from a first glance, monad transformers aren't in any way related to each other.
* A monad transformer stack is defined just by specifying the transformers that it uses and in which order.
* For example here is a monad transformer stack that we can use for this tutorial:
*/
const m = mtl.make(mtl.base.task, mtl.data.maybe, mtl.data.writer, mtl.comp.reader)
/*
* We include just the monads we need and we can customize the stack however we like.
* If you really want to use a given helper you have to define it in terms of the other helpers.
* Here is, for example, a function for chaining computations that use the environment (remember: `chain`
* and `of` are key, everything else can be defined in terms of them).
*/
m.prototype.readerCont = function (f) {
return this.chain((val) =>
m.loadEnvironment()
.cont((env) => f(val, env)))
}
/*
* Once we have this we can totally abstract away our environment in the
* `getResourceFrom` function:
*/
const helperGetResourceFrom = (type) => (id) =>
m.of(suffix(type, id))
.readerCont(mGetResource)
/*
* Let's verify that this works before moving on:
*/
exports.test.helperGetResourceFrom = (test) => {
helperGetResourceFrom('users')('john')
.run((result) => {
test.deepEqual(result.taskSuccess.value.value,{name:"John", occupation:"developer"})
test.done()
}, {environment:initData()})
}
/*
* As you can see it works in the same way as it worked before. the only difference is that we have to pass the
* environment as an argument to the `run` function.
*
* ## Posting resources
*
* How would a primitive function for posting resources looks like? Here is one way:
*/
const postResourceTo = (type, id) => (resource) => m.loadEnvironment().chain((data) =>
m.fromContinuation(data.postResource.bind(null, suffix(type, id), resource)))
/*
* It is pretty easy to conceive once you understand its `get` counterpart.
*
* ### Interlude: Currying
*
* Wait a sec. Do we need a function that receives a resource type and an id, and returns a resource modifier?
* Or do we actually want one that receives just the type and returns a function that accepts both an id and a new version of a
* resource? If you find yourself asking these questions, the answer is just to wrap the function in the `curry` constructor
* and make it work both ways.
*
* Just remember to order your arguments from the one you know a lot about to the one that you don't know:
*/
const mPostResourceTo = mtl.curry((type, id, resource) =>
m.loadEnvironment().chain((data) =>
m.fromContinuation(data.postResource.bind(null, suffix(type, id), resource))))
/*
* After we have a functions for retrieving and posting a resource, we might combine them in many ways.
*
* For example let's write a function that modifies a resouce:
*/
const modifyResource = mtl.curry((type, f, id) =>
mGetResourceFrom(type, id)
.map(f)
.chain(mPostResourceTo(type, id)))
/*
* Keeps getting easier and easier. This allows us to modify a resource using ordinary functions like:
*/
const makeFarmer = (user) => { user.occupation = 'farmer'; return user}
/*
* And the fact that is curried allows us to "breed" it into a thousand more-specific functions:
*/
const modifyUser = modifyResource('users')
const mMakeFarmer = modifyUser(makeFarmer)
/*
* Beautiful. Let's test that:
*/
exports.test.modify = (test) => {
m.of('john')
.chain(mMakeFarmer)
.run((result) => {
test.deepEqual(result.taskSuccess.value.value,{name:"John", occupation:"farmer"})
test.done()
}, {environment:initData()})
}
/*
* To be sure, let's retrieve the resource again, after changing it:
*/
exports.test.modifyAndGet = (test) => {
m.of('john')
.chain(mMakeFarmer)
.chain((_)=> mGetResourceFrom('users', 'john'))
.run((result) => {
test.deepEqual(result.taskSuccess.value.value,{name:"John", occupation:"farmer"})
test.done()
}, {environment:initData()})
}
/*
* With `.chain((_)=>` we effectively ignore the value that we have so far.
* This may seem weird, since we never do it with Promises for example, but here it makes sense.
* There even is a shortcut method for this - `andThen`.
*/
/*
* ## Parametrizing the monad stack
*
* Now we can use our functions with any datasource that supports the same API, however we still
* are bound to the implementation of the monad stack. That is, we will have to refactor them
* every time we want to use them with a different stack. We can fix this by parameterizing
* them further and add the stack constructor value as an argument. With this we are done and we can export them
* for the next part of the tutorial:
*/
exports.initData = initData
exports.mGetResourceFrom = mtl.curry((type, id, m) =>
m.loadEnvironment().chain((environment) =>
m.fromContinuation(mGetResource(suffix(type, id), environment))))
exports.mPostResourceTo = mtl.curry((type, id, resource, m) => m.loadEnvironment().chain((data) =>
m.fromContinuation(data.postResource.bind(null, suffix(type, id), resource))))
/*
* Go to [Part 3](p3.md).
*/
<file_sep>/test/list_maybe_example.js
if ( global.v8debug ) {
global.v8debug.Debug.setBreakOnException()
}
var mtl = require('../lib/main')
var sinon = require('sinon')
exports.listMaybeGet = (test) => {
var listMaybe = mtl.make(mtl.data.list, mtl.data.maybe)
var spy = sinon.spy((a) => a)
listMaybe.fromArray([{name: 'foo'}, {name: 'bar'}, {name: 'baz'}])
.maybeGet('name')
.map(spy)
test.deepEqual(spy.returnValues, ['foo', 'bar', 'baz'])
test.done()
}
exports.listMaybeFilter = (test) => {
var listMaybe = mtl.make(mtl.data.list, mtl.data.maybe)
var spy = sinon.spy((a) => a)
listMaybe.fromArray([{name: 'foo'}, {name: 'bar'}, {name: 'baz'}])
.filter(a => a.name === 'foo')
.map(spy)
test.deepEqual(spy.returnValues, [{name:'foo'}])
test.done()
}
<file_sep>/docs/api.md
# Types API
Here is a list of all monad transformers and the methods that they add to the wrapper object.
## `data.maybe`
The `maybe` monad transformer automatically checks if your value is undefined and stops the computation if it is.
### `value.maybeGet(key)`
A helper to safely retrieve a possibly undefined property of your wrapped value.
### `value.maybeMap(f)`
Chains a function that returns a `maybe` value in the computation
### Definition

### Source
const idFunc = a => a
exports.maybe = {
// Standard functions
name: 'Maybe',
// (val) => M({value:val})
of (val) { return this.outer.of({value: val, something:true }) },
// (val => M({value:val}) , M({value:val})) => M({value:val})
chain (funk, mMaybeVal) {
return this.outer.chain((value) => {
return value.something ? funk(value.value) : this.outer.of(value)
}, mMaybeVal)
},
// (M(val)) => M({value:val})
lift (mVal) {
return this.outer.chain((val) => this.outer.of({value: val, something: true}), mVal)
},
fold (value, maybe) {
return maybe.something ? value(maybe.value) : (this.onNothing || idFunc )()
},
// Custom functions
maybeGet (key, val) {
return val[key] !== undefined ? this.of(val[key]) : this.outer.of({something: false})
},
nothing () {
return this.outer.of({something: false})
},
maybeMap (funk, val) {
const value = funk(val)
return value !== undefined ? this.of(value) : this.outer.of({something: false})
}
}
[_View in GitHub_](../lib/data.js)
## `data.list`
The `list` monad transformer allows you to operate on a list of values. instead of on a single value.
### `List.fromArray(val)`
Wraps an array in a list monad transformer instance.
### `values.filter(fn)`
Filters out the values that don't match the predicate. Same as `Array.prototype.filter`.
_The behaviour of `Array.prototype.map` is covered by the monad transformer `map` method._
### Source
exports.list = {
name: 'List',
// Standard functions
// (val) => M([val])
of (val) {
return this.outer.of([val])
},
// (val => M([val]) , M([val]))=> M([val])
chain (funk, mListVal) {
return this.outer.chain(listVal => {
return listVal.length === 0 ? this.outer.of([]) : listVal
.map(funk)
.reduce((accumulatedVal, newVal) => {
return this.outer.chain(accumulated => {
return this.outer.chain(_new =>
this.outer.of(accumulated.concat(_new)), newVal)
}, accumulatedVal)
})
}, mListVal)
},
// (M(val)) => M([val])
lift (val) {
return this.outer.chain(innerValue => this.outer.of([innerValue]), val)
},
// ((val) => otherVal, M([val])) => otherVal
value (funk, val) {
return this.outer.value((list) => {
return list.map(funk)
}, val)
},
fold (value, list) {
return list.map(value)
},
// Custom functions
filter (funk, val) {
if (funk(val)) {
return this.of(val)
} else {
return this.outer.of([])
}
},
fromArray (val) {
if (val.concat && val.map && val.reduce && val.slice) {
return this.outer.of(val)
} else {
throw val + ' is not a list.'
}
}
}
[_View in GitHub_](../lib/data.js)
## `data.writer`
The writer monad transformer augments the wrapped value with one additional value which may be used for storing some additional information about the computation.
The additional value must be an object that has a `concat` method (as String or Array).
### `value.tell(val)`
Concats `val` to the current log value.
### `value.tellMap(f)`
Calls `f` with the current value as an argument and then concats the result to the current log value.
### Definition

###Source
const concatLog = (log, newLog) => {
if(log === undefined) {
return newLog
} else {
if (newLog === undefined) {
return log
} else {
return log.concat(newLog)
}
}
}
exports.writer = {
name: 'Writer',
// Standard functions
// (val) => M([val, log])
of (val) {
return this.outer.of({value: val, writer: undefined})
},
// (val => M([val, log]), M([val, log])) => M([val, log])
chain (funk, mWriterVal) {
return this.outer.chain((writerVal) => {
const val = writerVal.value, log = writerVal.writer
const newMWriterVal = funk(val)
return this.outer.chain((newWriterVal) => {
const newVal = newWriterVal.value, newLog = newWriterVal.writer
return this.outer.of({value: newVal, writer: concatLog(log, newLog)})
}, newMWriterVal)
}, mWriterVal)
},
// (M(val) => M([val, log])
lift (mVal) {
return this.outer.chain((val) => this.outer.of({value: val, writer: undefined}), mVal)
},
// ((val) => b, M([val, log])) => b
fold (value, writerVal) {
(this.onWriterLog || idFunc)(writerVal.writer)
return value(writerVal.value)
},
// Custom functions
tell (message, val) {
return this.outer.of({value: val, writer:message})
},
tellMap (fn, val) {
return this.outer.of({value: val, writer: fn(val)})
}
}
[_View in GitHub_](../lib/data.js)
## `comp.state`
The `state` monad transformer allows you to keep one additional mutable state value with your computation.
### `value.save()`
Saves the return value of the function in the state, overwriting the previous one.
### `value.load()`
Returns the current state.
### `value.statefulMap(f)`
Maps over the current value and state with `f`. The function should return an array containing two elements - the new value and the new state.
### Definition

###Source
const idFunc = a=>a
exports.state = {
name: 'State',
//Standard functions:
of (val) {
return (prevState) => this.outer.of({value: val, state: prevState})
},
chain (funk, state) {
return (prevState) =>
this.outer.chain((params) => {
const newVal = params.value, newState = params.state
return funk(newVal)(newState)
}, state(prevState))
},
lift (val) {
return (prevState) =>
this.outer.chain((innerValue) => this.outer.of({value: innerValue, state: prevState}), val)
},
run (f, state) {
return f(state())
},
fold (value, params) {
(this.onState || idFunc)(params.state)
return value(params.value)
},
//Custom functions:
loadState (val) {
return (prevState) => this.outer.of({value: prevState, state: prevState})
},
saveState (val) {
return (prevState) => this.outer.of({value:val, state: val})
},
statefulMap (funk, val) {
return (prevState) => {
const stateTuple = funk(val, prevState)
return this.outer.of({value: stateTuple[0], state: stateTuple[1]})
}
},
setState (newState, val) {
return (prevState) => this.outer.of({value:val, state: newState})
},
mapState (funk, val) {
return (prevState) => this.outer.of({value:val, state: funk(prevState, val)})
}
}
## `comp.reader`
The `reader` monad transformer allows you to specify an immutable configuration for your function which you can use to tweek the way it behaves.
### Definition

###Source
exports.reader = {
name: 'Reader',
//Standard functions:
of (val) {
return (env) => this.outer.of(val)
},
chain (funk, reader) {
return (env) =>
this.outer.chain((val) => {
return funk(val)(env)
}, reader(env))
},
lift (val) {
return (env) => val
},
run (f, reader) {
return f(reader(this.environment))
},
fold (value, val) {
return value(val)
},
//Custom functions:
readerMap (f, val) {
return (environment) => this.outer.of(f(val, environment))
},
loadEnvironment(val) {
return (environment) => this.outer.of(environment)
}
}
[_View in GitHub_](../lib/comp.js)
## References
All images, taken from [the Wikipedia article on monad transformers](https://en.wikipedia.org/wiki/Monad_transformer).
<file_sep>/docs/wrapper.md
# The object wrapper
This library provides a module which allows you to combine several monad transformer definitions and create a object-oriented wrapper for using the resulting monad.
## Creating a monad constructor
You can create a monad constructor using the `mtl.make` function:
###`mtl.make([baseMonad], monadTransformer1, monadTransformer2)`
####`baseMonad - monadDefinition`
Optionally you can pass the definition of the monad that would sit at the bottom of the stack, as a first argument of the `make` function.
The parameter is optional. By default, the package uses the identity monad as a base.
####`monadTransformer<1-n> - monadTransformerDefinition`
Pass the definitions of the monad transformers which would augment the base monad. Note that monad transformations are usually not commutative so the order in which the arguments are placed matters.
const assign = require('object-assign')
const helpers = require('./helpers')
const idFunc = a => a
// Promotes a function from a monad definition to a monad stack method, so it can be used for chaining
const promoteToMethod = (funk, monadDefinition) => function () {
const args = Array.prototype.slice.call(arguments)
return this.chain((val) => {
return this.constructor(funk.apply(monadDefinition, args.concat([val])))
})
}
// Promotes a function from a monad definition to a stack constructor
const promoteToConstructor = (funk, monadDefinition) => function () {
return this(funk.apply(monadDefinition, arguments))
}
The function returns an `objectWrapper` which allows you instantiate monads from all kinds of values.
module.exports = (stack) => {
const monad = assign(Object.create(monadWrapperProto), helpers.monadMapVals(promoteToMethod, stack))
const constructor = (val) => {
var object = Object.create(monad)
object._value = val
return object
}
monad.stack = stack
monad.constructor = assign(constructor, helpers.monadMapVals(promoteToConstructor, stack))
monad.constructor.of = monad.of.bind(monad)
monad.constructor.prototype = monad
return monad.constructor
}
## Creating monads
Monads are generally created using [type-specific methods](api.md) like `fromArray` (for stacks that include the list transformation, or `fromState` (for stateful computations) but several generic methods are also provided.
### `objectWrapper.of(value)`
Constructs a monad from a plain non-monadic value.
const monadWrapperProto = {
of (value) {
return this.constructor(this.stack.of(value))
},
### `objectWrapper(value)`
Constructs a monad from a value which obeys the structure of the monad stack i.e. it "wraps" the value into a monadic interface.
## Using monads
Again there are many methods that you would use to manipulate a monad which are [type-specific](api.md). Here are the generic ones:
###`monad.chain(f)`
Applies `f` to the value or values that are inside the monad and returns a new wrapped object
chain (f) {
const fUnwrap = (val) => {
const newVal = f.call(this.constructor, val, this.constructor)
if (!newVal.hasOwnProperty('_value')) {throw JSON.stringify(newVal) + ' is not a wrapped value'}
if (newVal.stack.name !== this.stack.name) {throw `${this.stack.name} is not the same as ${newVal.stack.name}`}
return newVal._value
}
return this.constructor(this.stack.chain(fUnwrap, this._value))
},
###`monad.map(f)`
Applies `f` to the value or values that are inside the monad and wraps the resulting value in a new monad instance.
map (funk) {
return this.chain((val) => this.of(funk(val)))
},
###`monad.tap(f)`
Applies the f to the monad and returns the result.
tap (funk) {
return funk(this)
},
###`monad.run()`
Runs the computation inside the monad and calls the callback with the resulting value. Does not unwrap the value.
run (callback, environment) {
return this.stack.run.call(environment, callback||idFunc, this._value)
},
###`monad.value()`
Runs the computation inside the monad and calls the callback with the resulting value. Unwraps the value using the `fold` functions.
value (callbacks, environment) {
const stack = this.stack
return this.run((val) => {
return stack.fold.call(callbacks, (val) => {
if(typeof callbacks === 'function') {
callbacks(val)
}else if (typeof callbacks === 'object' && typeof callbacks.onValue === 'function'){
callbacks.onValue(val)
}
return val
}, val)
}, environment)
},
###`monad.ap()`
Applies a wrapped function to a wrapped value. Same as `<@>` in Haskell.
ap (val) {
return this.chain(f => val.map(f))
},
###`monad.andThen()`
Same as `chain` but accepts a wrapped value instead a function that returns one. Same as `>>>` in Haskell.
andThen (monad) {
return this.chain((_) => monad)
},
###`monad.debug()`
A shortcut for inserting a breakpoint in the computation.
debug () {
debugger
return this
}
}
For more information, see the [Fantasy Land spec](https://github.com/fantasyland/fantasy-land).
[_View in GitHub_](../lib/wrapper.js)
<file_sep>/docs/tutorial/p2.md
# Modifying REST Resources / Parametrizing our data source
#### Using the monad stack. Using the `Reader` monad.
_This is part 2 from the `monad-transformers` tutorial. See also [part 1](p1.md) and [part 3](p3.md)._
In the previous part we defined some functions for retrieving resources from REST endpoints. Now we will produce some functions that modify the resources they retrieve.
const mtl = require("../lib/main.js")
if ( global.v8debug ) {
global.v8debug.Debug.setBreakOnException()
}
## Parametrizing the datasource
Let's start by improving what we have so far.
const initData = require('./p1.js').initData
const suffix = mtl.curry((suffix, str) => suffix + '/' + str)
As you can see I included some of the resources from the previous tutorial, however I could not reuse more of it, because of the way that the `getResource` function was written: Namely, the function is bound to a specific data source. This means that we won't be able to reuse it because we will have to always keep track of what in our data is changed.
const data = initData()
const GetResource = (url) => data.getResource.bind(null, url)
So let's fix `GetResource`, by accepting the datasource as an additional parameter. Now we can specify which datasource we want to work with when we call it.
const mGetResource = (url, data) => data.getResource.bind(null, url)
However, this breaks our workflow a bit. In the previous part of the tutorial we could define this beautiful chaining functions like `getResourceFrom` that were quite handy:
const oldGetResourceFrom = (type) => (id) =>
m.of(suffix(type, id))
.tellMap((url) => `Retrieving ${url}... `)
.cont(mGetResource)
Now, because `getResourceFrom` uses `mGetResource` it would also have to accept a datasource when called, and our whole codebase will become bloated. Unless there is a transformer that can handle this for us.
And as you might suspect, there actually is one.
The `Reader` monad transformer is the evil twin of the `Writer` monad transformer. It gives us access to a immutable datastructure sometimes called an 'environment' for storing all kinds of configurations throughout our computation without bothering to pass it around to each new function. It is like an additional parameter that you always get.
In order to use the `Reader` monad transformer let's first refactor our code a bit:
const oldGetResourceFrom2 = (type) => (id) =>
m.of(suffix(type, id))
.chain((url) => m.fromContinuation(mGetResource(url)))
Remember this? This is the first version of the `getResourceFrom` function which does not use the `cont` helper. Or it is close to it anyways - this one uses another helper - the `fromContinuation` constructor. We desugared our function in order to combine it with another helper - `loadEnvironment`.
const mGetResourceFrom = (type, id) =>
m.loadEnvironment().chain((environment) =>
m.fromContinuation(mGetResource(suffix(type, id), environment)))
The `Reader` allows us to run our function against the data that we defined earlier or any other.
exports.test = {}
exports.test.mGetResource = (test) => {
mGetResourceFrom('users', 'john')
.run((result) => {
test.equal(result.taskSuccess.value.value.occupation, "developer")
test.done()
}, {environment:initData()})
}
So that is the formula for using the `Reader`: we define the environment in the `run` method, we use the environment whenever we need it in the function body, and all functions that we call with `chain` also have access to the environment.
### Interlude: Monad transformers and the transformer stack
That is all good, you might say, but why did we have to take a step back in order to use it? Why can't we still use the `cont` helper _and_ have access to the environment? The reason for this is that we are combining the effects of two different monad transformers. And although it may seem so from a first glance, monad transformers aren't in any way related to each other. A monad transformer stack is defined just by specifying the transformers that it uses and in which order. For example here is a monad transformer stack that we can use for this tutorial:
const m = mtl.make(mtl.base.task, mtl.data.maybe, mtl.data.writer, mtl.comp.reader)
We include just the monads we need and we can customize the stack however we like. If you really want to use a given helper you have to define it in terms of the other helpers. Here is, for example, a function for chaining computations that use the environment (remember: `chain` and `of` are key, everything else can be defined in terms of them).
m.prototype.readerCont = function (f) {
return this.chain((val) =>
m.loadEnvironment()
.cont((env) => f(val, env)))
}
Once we have this we can totally abstract away our environment in the `getResourceFrom` function:
const helperGetResourceFrom = (type) => (id) =>
m.of(suffix(type, id))
.readerCont(mGetResource)
Let's verify that this works before moving on:
exports.test.helperGetResourceFrom = (test) => {
helperGetResourceFrom('users')('john')
.run((result) => {
test.deepEqual(result.taskSuccess.value.value,{name:"John", occupation:"developer"})
test.done()
}, {environment:initData()})
}
As you can see it works in the same way as it worked before. the only difference is that we have to pass the environment as an argument to the `run` function.
## Posting resources
How would a primitive function for posting resources looks like? Here is one way:
const postResourceTo = (type, id) => (resource) => m.loadEnvironment().chain((data) =>
m.fromContinuation(data.postResource.bind(null, suffix(type, id), resource)))
It is pretty easy to conceive once you understand its `get` counterpart.
### Interlude: Currying
Wait a sec. Do we need a function that receives a resource type and an id, and returns a resource modifier? Or do we actually want one that receives just the type and returns a function that accepts both an id and a new version of a resource? If you find yourself asking these questions, the answer is just to wrap the function in the `curry` constructor and make it work both ways.
Just remember to order your arguments from the one you know a lot about to the one that you don't know:
const mPostResourceTo = mtl.curry((type, id, resource) =>
m.loadEnvironment().chain((data) =>
m.fromContinuation(data.postResource.bind(null, suffix(type, id), resource))))
After we have a functions for retrieving and posting a resource, we might combine them in many ways.
For example let's write a function that modifies a resouce:
const modifyResource = mtl.curry((type, f, id) =>
mGetResourceFrom(type, id)
.map(f)
.chain(mPostResourceTo(type, id)))
Keeps getting easier and easier. This allows us to modify a resource using ordinary functions like:
const makeFarmer = (user) => { user.occupation = 'farmer'; return user}
And the fact that is curried allows us to "breed" it into a thousand more-specific functions:
const modifyUser = modifyResource('users')
const mMakeFarmer = modifyUser(makeFarmer)
Beautiful. Let's test that:
exports.test.modify = (test) => {
m.of('john')
.chain(mMakeFarmer)
.run((result) => {
test.deepEqual(result.taskSuccess.value.value,{name:"John", occupation:"farmer"})
test.done()
}, {environment:initData()})
}
To be sure, let's retrieve the resource again, after changing it:
exports.test.modifyAndGet = (test) => {
m.of('john')
.chain(mMakeFarmer)
.chain((_)=> mGetResourceFrom('users', 'john'))
.run((result) => {
test.deepEqual(result.taskSuccess.value.value,{name:"John", occupation:"farmer"})
test.done()
}, {environment:initData()})
}
With `.chain((_)=>` we effectively ignore the value that we have so far. This may seem weird, since we never do it with Promises for example, but here it makes sense. There even is a shortcut method for this - `andThen`.
## Parametrizing the monad stack
Now we can use our functions with any datasource that supports the same API, however we still are bound to the implementation of the monad stack. That is, we will have to refactor them every time we want to use them with a different stack. We can fix this by parameterizing them further and add the stack constructor value as an argument. With this we are done and we can export them for the next part of the tutorial:
exports.initData = initData
exports.mGetResourceFrom = mtl.curry((type, id, m) =>
m.loadEnvironment().chain((environment) =>
m.fromContinuation(mGetResource(suffix(type, id), environment))))
exports.mPostResourceTo = mtl.curry((type, id, resource, m) => m.loadEnvironment().chain((data) =>
m.fromContinuation(data.postResource.bind(null, suffix(type, id), resource))))
Go to [Part 3](p3.md).
<file_sep>/test/permutations.js
// This modules allows you to run nodeunit tests on all possible combinations of monads, defined in the library
var combinatorics = require('js-combinatorics')
const id = require('../lib/id')
const data = require('../lib/data')
const comp = require('../lib/comp')
const monads = [].concat([data.writer, data.list, data.maybe, id.idMinimal, id.id, id.idWrapped, comp.state, comp.reader])
const stacks = combinatorics.permutation(monads, 3).toArray()
module.exports = (stackFilter, testFunction) => stacks.filter(stackFilter).reduce((obj, stack) => {
obj[stack.map(s => s.name).join('')] = testFunction.apply(null, stack)
return obj
}, {})
| f47529f9a1aabcfc9fd50291e0949ce4f297f961 | [
"JavaScript",
"Markdown"
] | 24 | JavaScript | dmitriz/monad-transformers | 367f3d8df15698e9afd7e6489f445584437f3cc5 | d904a8afa282debcbcb9556f6b610627918695b0 |
refs/heads/master | <file_sep>var collab = require('./lib/collaborative.js'),
distance = require('./lib/distance.js')
module.exports = {Collaborative: collab, Distance: distance}
<file_sep>[](https://travis-ci.org/xmen4u/collaborative_filtering)
## Collaborative Filtering
============================================
A node module, that uses Collaborative filtering for the use of recommendations, it allows you out of the box,
to use distance similarity / pearson/ others or custom one [depending on the need]
It is also provided with a full example of usage and dataset creation.
I hope to provide a working stack [FE + BE] to use this with real-time and cache capabilities
using jSHint, matchdep , stream, grunt.js
Use this with my permission only
## ToC
---------------------
1. [Main app](#main)
<a name="main">Main app</a>
---------------------
## Install
```
npm install collaborative_filtering
```
place the ```distance.js``` where ever you want and include it, i've used an iOc style
so you could adjust it and plug-it in the module
## Initialization
we need to initialize the distance object, you can add any distance metric you wish
to distance.js
```
var readers = require('./recommendations.js'), // creation of the dataset
Collaborative = require('../lib/collaborative'),
Distance = require('../lib/distance'),
collab = new Collaborative(new Distance()),
```
after initialization, you need to create a multi-dimensional vector, an array of arrays:
```[[1,2],[1,4],[2,5],[5,9],...,[10,12]]```
just like the "creation of the data set line", you can find the model inside /models , it looks like:
```
in code we grab it via stream from a line-by-line [newline] structured flat file [so we won't have limit on memory space]
```
// people
person1 = readers[0],
person2 = readers[1],
person7 = readers[6]
console.log('comparing ' + person1.getName() + ' and ' + person2.getName())
console.log('Distance correlation: ' + collab.simDistance(person1,person2))
console.log('Pearson correlation: ' + collab.simPearson(person1,person2))
console.log(collab.getSimiliarItems(readers, person1, 5))
console.log(collab.getRecommendations(readers,person7))
```
finally we run the collaborative filtering, for example "item-based":
```
[ { rating: 3.4682459444748344, id: 'And Then There Were None' },
{ rating: 3, id: 'A Tale of Two Cities' },
{ rating: 2.319573433326274, id: 'The Hobbit' } ]
```
## License
BSD - ask for my permission<file_sep>/**
********************************************************************************************
* name: Main
********************************************************************************************
* This module is responsible for examples of use
********************************************************************************************
* desc: using collaborative filtering on a made up books dataset
********************************************************************************************
* code: written by <NAME>, you may not use it without my permission, BSD license
* date: oct-2012
********************************************************************************************
**/
var readers = require('./recommendations.js'),
Collaborative = require('../lib/collaborative'),
Distance = require('../lib/distance'),
collab = new Collaborative(new Distance()),
// people
person1 = readers[0],
person2 = readers[1],
person7 = readers[6]
var BookReader = require('../models/BookReader.js')
console.log('comparing ' + person1.getName() + ' and ' + person2.getName())
console.log('Distance correlation: ' + collab.simDistance(person1,person2))
console.log('Pearson correlation: ' + collab.simPearson(person1,person2))
console.log(collab.getSimiliarItems(readers, person1, 5))
console.log(collab.getRecommendations(readers,person7))
console.log(person7)
/// beginning of a transformation function to achieve item-based recommendations
function transform(people){
var items = [],
temp_items = {},
i,
j,
person_rating_list,
len = people.length,
items_len
for(i = 0; i < len; i++){
person_rating_list = people[i].getRatingList()
items_len = person_rating_list.length
for(j = 0; j < items_len; j++){
if (!temp_items.hasOwnProperty(person_rating_list[j].getId())){
temp_items[person_rating_list[j].getId()] = new BookReader(person_rating_list[j].getName(),'',person_rating_list[j].getId() )
}//else
temp_items[person_rating_list[j].getId()].addBookRating(people[i].getName(), person_rating_list[j].rating)
}// for
}// for
return temp_items
}// transform
var items_based = transform(readers),
temp_arr = [],
element
for(element in items_based){
temp_arr.push(items_based[element])
}
//console.log(collab.getRecommendations(temp_arr,items_based['A Tale of Two Cities']))
console.log(collab.getSimiliarItems(temp_arr, items_based['A Tale of Two Cities']))
<file_sep>/**
********************************************************************************************
* name: BookReader
********************************************************************************************
* This module is responsible for bookreader
********************************************************************************************
* desc: using user-based filtering [item based can be achieved by flippig the users and their items],
* implemented both Distance, Pearson, Tanimoto (in the distance) similarity score
********************************************************************************************
* code: written by <NAME>, you may not use it without my permission, BSD license
* date: oct-2012
********************************************************************************************
**/
var BookReader = require('../models/BookReader.js')
var readers =[],
temp_reader
temp_reader = new BookReader('Gil', 'Tamari',1)
temp_reader.addBookRating('A Tale of Two Cities', 2.5)
temp_reader.addBookRating('The Lord of the Rings', 3.5)
temp_reader.addBookRating('The Hobbit', 3.0)
temp_reader.addBookRating('The Catcher in the Rye', 3.5)
temp_reader.addBookRating('Harry Potter', 2.5)
temp_reader.addBookRating('And Then There Were None', 3.0)
readers.push(temp_reader)
temp_reader = new BookReader('Kanye', 'West',2)
temp_reader.addBookRating('A Tale of Two Cities', 3.0)
temp_reader.addBookRating('The Lord of the Rings', 3.5)
temp_reader.addBookRating('The Hobbit', 1.5)
temp_reader.addBookRating('The Catcher in the Rye', 5.0)
temp_reader.addBookRating('Harry Potter', 3.5)
temp_reader.addBookRating('And Then There Were None', 3.0)
readers.push(temp_reader)
temp_reader = new BookReader('Jay', 'Z',3)
temp_reader.addBookRating('A Tale of Two Cities', 2.5)
temp_reader.addBookRating('The Lord of the Rings', 3.0)
temp_reader.addBookRating('The Catcher in the Rye', 3.5)
temp_reader.addBookRating('And Then There Were None', 4.0)
readers.push(temp_reader)
temp_reader = new BookReader('Tupac', 'Shakur',4)
temp_reader.addBookRating('The Lord of the Rings', 3.5)
temp_reader.addBookRating('The Hobbit', 3.0)
temp_reader.addBookRating('The Catcher in the Rye', 4.0)
temp_reader.addBookRating('Harry Potter', 2.5)
temp_reader.addBookRating('And Then There Were None', 4.5)
readers.push(temp_reader)
temp_reader = new BookReader('Dr', 'Dre',5)
temp_reader.addBookRating('A Tale of Two Cities', 3.0)
temp_reader.addBookRating('The Lord of the Rings', 4.0)
temp_reader.addBookRating('The Hobbit', 2.0)
temp_reader.addBookRating('The Catcher in the Rye', 3.0)
temp_reader.addBookRating('Harry Potter', 2.0)
temp_reader.addBookRating('And Then There Were None', 3.0)
readers.push(temp_reader)
temp_reader = new BookReader('Eminem', '',6)
temp_reader.addBookRating('A Tale of Two Cities', 3.0)
temp_reader.addBookRating('The Lord of the Rings', 4.0)
temp_reader.addBookRating('The Catcher in the Rye', 5.0)
temp_reader.addBookRating('Harry Potter', 3.5)
temp_reader.addBookRating('And Then There Were None', 3.0)
readers.push(temp_reader)
temp_reader = new BookReader('Run the', 'Jewels',7)
temp_reader.addBookRating('The Lord of the Rings', 4.5)
temp_reader.addBookRating('The Catcher in the Rye', 4.0)
temp_reader.addBookRating('Harry Potter', 1.0)
readers.push(temp_reader)
module.exports = readers
<file_sep>/**
********************************************************************************************
* name: Book Reader model
********************************************************************************************
* This module is responsible for general user [it can be changed to a parent object so book
* reader would prototypical inherit it
********************************************************************************************
* desc: a model of a user, that reads books and has a list of books [plain JSONs]
********************************************************************************************
* code: written by <NAME>, you may not use it without my permission, BSD license
* date: oct-2012
********************************************************************************************
**/
function BookReader(first_name, last_name, id, books_BookReader_list){
this.first_name = first_name
this.last_name = last_name
this.name = this.first_name + ' ' + this.last_name
this.id = id || 1
this.rating_list = books_BookReader_list || []
}
BookReader.prototype.getId = function(){ return this.id}
BookReader.prototype.getName = function(){ return this.name}
// book rating = 1..5
BookReader.prototype.addBookRating = function(book_name, book_rating){
this.rating_list.push({name: book_name, rating: book_rating, getId: function(){ return book_name}, getName: function(){return book_name} })
}
BookReader.prototype.addRating = function(name, rating, id){
id = id || name
this.rating_list.push({name: name, rating: rating, getId: function(){ return id} })
}
BookReader.prototype.getBookRatingsList = function(){
return this.rating_list
}
BookReader.prototype.getRatingList = function(){return this.getBookRatingsList()}
BookReader.prototype.getBookRating = function(book_name){
var i,
len = this.rating_list.length;
for(i = 0; i < len; i++){
if (this.rating_list[i].name === book_name){
return i
}// if
}// for
return -1
}
BookReader.prototype.getRating = function(book){return this.getBookRating(book.name)}
module.exports = BookReader
| b3291150cf8c948f9b3530637831742c453e3e9b | [
"JavaScript",
"Markdown"
] | 5 | JavaScript | xmen4u/collaborative_filtering | 094753aa4746d96a9630d1b3260a1a99141f2c27 | 8b47a6a0d115deb70564c9d9b1921347b8740d42 |
refs/heads/master | <repo_name>Bomberlt/instagram-goals-getter<file_sep>/src/index.js
import 'dotenv/config';
import { IgApiClient } from 'instagram-private-api';
import express from 'express';
console.log('Hello Node.js project.');
console.log(process.env.MY_SECRET);
const ig = new IgApiClient();
async function asyncCall() {
ig.state.generateDevice(process.env.instagram_username);
ig.simulate.preLoginFlow();
const loggedInUser = await ig.account.login(process.env.instagram_username, process.env.password);
process.nextTick(async () => await ig.simulate.postLoginFlow());
console.log('Logged in to Instagram');
// Create UserFeed instance to get loggedInUser's posts
const targetUser = await ig.user.searchExact('250pix'); // getting exact user by login
const userFeed = ig.feed.user(targetUser.pk);
let posts = [];
let count = 0;
do {
const page = await userFeed.items();
count = page.length;
const pagePosts =page.map(post => {
const caption = post.caption.text;
const datePart = caption.split(' ')[0];
const month = datePart.split('-')[0].padStart(2, '0');
const day = datePart.split('-')[1];
const date = `2021-${month}-${day}`;
const title = caption.substring(datePart.length + 1).includes('#')
? `${caption.substring(datePart.length + 1).split('.')[0]}.`
: caption.substring(datePart.length + 1)
return {
value: title,
date: date,
imageUrl: post.image_versions2.candidates[0].url,
timestamp: post.taken_at
}
});
posts = posts.concat(pagePosts);
} while (count === 18);
console.log(posts);
return posts;
}
const app = express();
app.get('/data', async (req, res) => {
const posts = await asyncCall();
const data = {
title: "2021 Goals",
goals: [{
name: "Take 250 pics",
type: "COMPLETE_N_ITEMS",
hideItems: false,
data: {
itemCountToComplete: 250,
entries: posts.slice()
}
}]
};
res.send(data);
});
app.listen(3003, () => {
console.log('Example app listening on port 3003!');
}); | d361d27e0fd8c7ecfec5f8eefbf8cf4978f6ff3e | [
"JavaScript"
] | 1 | JavaScript | Bomberlt/instagram-goals-getter | 286b7811e2e8818972e82427f255b0ede2fadcbd | fe9d25d4ac96340c2ceb6d65652c24d611bf0a44 |
refs/heads/master | <repo_name>epps/databases<file_sep>/server/db/index.js
var mysql = require('mysql');
// Create a database connection and export it from this file.
// You will need to connect with the user "root", no password,
// and to the database "chat".
var connection = mysql.createConnection({
host: 'localhost', // MKS-LA-7.local
user: 'root',
password: '',
database: 'chat',
multipleStatements: true
});
module.exports = {
storeMessage: function(username, text, roomname, callback){
var query = "INSERT INTO messages (username, message, roomname) VALUES (?, ?, ?);";
var queryVals = [username, text, roomname];
queryDB(query, function(result){
console.log("message storage successful ",result);
callback(result);
}, queryVals);
},
getMessages: function(roomname, callback){
var room = roomname || 'lobby';
// var query = "SELECT username, message, roomname FROM messages WHERE roomname="+room+";";
var query = "SELECT objectId, username, message, roomname FROM messages;";
queryDB(query, function(result){
console.log("message retrieval successful ",result); // eventually return result
callback(result);
});
}
};
var queryDB = function(queryString, callback, values){
// connection.connect();
connection.query(queryString, values, function(error, result){
if (error){throw error;}
console.log(result);
callback(result);
});
// connection.end(function(error){
// if (error){throw error;}
// console.log("Interaction complete.");
// });
};
| 1b39e5ac6f93e8e41f7911504980ef9a4e1aa0fb | [
"JavaScript"
] | 1 | JavaScript | epps/databases | 58fd14a464afc67247a606ccf1cd597f48ba71c6 | 5d66385f46c1abc92a0847bb6935ebc8b19b8d13 |
refs/heads/master | <file_sep>/*global describe, beforeEach, assert, it */
'use strict';
describe('Mottoview View', function () {
beforeEach(function () {
this.Mottoview = new societe.Views.MottoviewView();
});
});
<file_sep>/*global describe, beforeEach, assert, it */
'use strict';
describe('Approuter Router', function () {
beforeEach(function () {
this.Approuter = new societe.Routers.ApprouterRouter();
});
it('index route', function(){
});
});
<file_sep>/*global describe, beforeEach, assert, it */
'use strict';
describe('Video Model', function () {
beforeEach(function () {
this.Video = new societe.Models.VideoModel();
});
});
<file_sep>/*global describe, beforeEach, assert, it */
'use strict';
describe('Videoview View', function () {
beforeEach(function () {
this.Videoview = new societe.Views.VideoviewView();
});
});
<file_sep>/*global describe, beforeEach, assert, it */
'use strict';
describe('Service Model', function () {
beforeEach(function () {
this.Service = new societe.Models.ServiceModel();
});
});
<file_sep>/*global societe, Backbone, JST*/
societe.Views = societe.Views || {};
(function () {
'use strict';
societe.Views.MottoviewView = Backbone.View.extend({
initialize: function() {
this.render();
},
render: function() {
var source = $('#motto-template').html();
var template = Handlebars.compile(source);
var html = template();
this.$el.html(html);
}
});
})();
<file_sep>societe
=======
Nouveau site de societe-ecran media
<file_sep>/*global societe, Backbone*/
societe.Models = societe.Models || {};
(function () {
'use strict';
societe.Models.VideoModel = Backbone.Model.extend({
defaults: {
0: {},
id: '0',
url: '<NAME>',
title: 'designer',
thumbnail_medium: 'tn'
},
urlRoot: 'http://vimeo.com/api/v2/video/59908384.json', //+ this.attributes.videoId +
initialize: function() {
//console.log(videoId);
}
});
})();
<file_sep>/*global describe, beforeEach, assert, it */
'use strict';
describe('Teammatesview View', function () {
beforeEach(function () {
this.Teammatesview = new societe.Views.TeammatesviewView();
});
});
<file_sep>/*global describe, beforeEach, assert, it */
'use strict';
describe('Servicesview View', function () {
beforeEach(function () {
this.Servicesview = new societe.Views.ServicesviewView();
});
});
<file_sep>/*global societe, Backbone*/
societe.Models = societe.Models || {};
(function () {
'use strict';
societe.Models.ServiceModel = Backbone.Model.extend({
defaults: {
id: '0',
title: 'designer',
picture: '',
description: ''
},
initialize: function() {
//console.log(videoId);
}
});
})();
| 526bc6e085500bfb02d0da71d3cd40eaf981d3eb | [
"JavaScript",
"Markdown"
] | 11 | JavaScript | Kloport/societe | f68e5a59630decc2d6b767871a6cf88dd0997de0 | 671cd8776191c65ad15833ddec34f979e4cc8dbb |
refs/heads/main | <repo_name>Ghazanfar-ijaz/alyrakit<file_sep>/src/components/ListFeatures.js
import working from "../assets/working.svg"
const ListFeatures = () => {
const list = [
"Lifetime updates",
"Tons of assets",
"Tech support",
"Integration ready",
]
return (
<div>
<img
src={working}
alt="Illustration with a computer on the desk"
width="400"
height="295"
/>
<h2>The most useful resource ever created for designers</h2>
<ul>
{list.map((el, index) => {
return <li key={index}>{el}</li>
})}
</ul>
</div>
)
}
export default ListFeatures
<file_sep>/src/components/GetStarted.js
const GetStarted = () => {
return (
<section id="buy-now">
<span>Get Started</span>
<h2>Get AlyraKit and save your time</h2>
<p>
Stop wasting time trying to do it the "right way" and build a site from
scratch. AlyraKit is faster, easier, and you still have complete
control.
</p>
<button>Buy now</button>
</section>
)
}
export default GetStarted
| 087d6d3b2e5519d16ce8912c908b4f1611043c76 | [
"JavaScript"
] | 2 | JavaScript | Ghazanfar-ijaz/alyrakit | aad795f36cd473a436aaf6b98150adad409793a3 | fe34cec62d46d423e4d30f2410d0543c14e461eb |
refs/heads/master | <repo_name>ihortrypolskyi/ansible-ma-test<file_sep>/ansible/production.sh
cd /home/app/ma/current
docker-compose -f docker-compose.production.yml stop
docker-compose -f docker-compose.production.yml rm -f -v
docker-compose -f docker-compose.production.yml up --build -d
docker ps
<file_sep>/Dockerfile
FROM ruby:2.4.2
RUN apt-get update && apt-get -y install curl git nano
RUN mkdir /app
RUN mkdir /app/ma
WORKDIR /app/ma
ADD Gemfile /app/ma/
ADD Gemfile.lock /app/ma/
RUN cd /app/ma && bundle install
RUN apt-get -y install nodejs
ADD . /app/ma/
| 4245c3a4719d652c763627a33aba73a5dcdc22a0 | [
"Dockerfile",
"Shell"
] | 2 | Shell | ihortrypolskyi/ansible-ma-test | 713465ff4f981cf4a45cc0dfadda158a6de172fa | cc70968124d42a55072dc99f46fb2db82e4412b7 |
refs/heads/master | <file_sep>package bio;
import java.util.ArrayList;
import java.util.List;
public class Rozwiazanie implements Cloneable {
private int rozmiar=0;
private int wartosc=0;
private List<Oligonukleotyd>Slowa = new ArrayList<Oligonukleotyd>();
public Rozwiazanie(List<Oligonukleotyd> clone) {
Slowa = clone;
setRozmiar(clone.size());
}
public int getWartosc() {
return wartosc;
}
public List<Oligonukleotyd> getSlowa() {
return Slowa;
}
public void przesunSlowo(int indeks, int nowyIndeks){
Oligonukleotyd temp = Slowa.get(indeks);
Slowa.remove(temp);
Slowa.add(nowyIndeks, temp);
}
public void przeliczWartosc(int[][] grafOl) {
Oligonukleotyd s1=Slowa.get(0);
wartosc=s1.getLiczbaSlow();
int dlugosc=s1.getDlugosc();
int[][] graf = Projekt.getGrafOl();
int i=1;
while(dlugosc<Projekt.getDlugoscSekwencji()&&i<Slowa.size()){
Oligonukleotyd s2=Slowa.get(i++);
int i1=s1.getIndeks();
int i2=s2.getIndeks();
dlugosc+=graf[i1][i2];
if(s2.getDlugosc()>s1.getDlugosc())dlugosc+=s2.getDlugosc()-s1.getDlugosc();
if(dlugosc<=Projekt.getDlugoscSekwencji()){
wartosc+=s2.getLiczbaSlow();
s1=s2;
}
}
}
public String Wynik(int[][] grafOl){
Oligonukleotyd s1=Slowa.get(0);
String wynik = s1.getLancuch();
int dlugosc=s1.getDlugosc();
int[][] graf = Projekt.getGrafOl();
int i=1;
while(dlugosc<Projekt.getDlugoscSekwencji()&&i<Slowa.size()){
Oligonukleotyd s2=Slowa.get(i++);
int i1=s1.getIndeks();
int i2=s2.getIndeks();
dlugosc+=graf[i1][i2];
if(dlugosc<=Projekt.getDlugoscSekwencji()){
wynik += s2.getLancuch().substring(s1.getLancuch().length() - graf[i1][i2]);
s1=s2;
}
}
return wynik;
}
public Rozwiazanie clone(){
Rozwiazanie rNew = new Rozwiazanie( new ArrayList<Oligonukleotyd>(Slowa));
return rNew;
}
public void dodajSlowo(Oligonukleotyd o) {
Slowa.add(o);
}
public int getRozmiar() {
return rozmiar;
}
public void setRozmiar(int rozmiar) {
this.rozmiar = rozmiar;
}
}
<file_sep>package bio;
class Oligonukleotyd implements Comparable{
private String lancuch;
private int poprzedni = -1;
private int nastepny = -1;
private int dlugosc;
private int liczbaSlow = 1;
private int indeks;
public Oligonukleotyd(String lancuch, int l) {
this.lancuch = lancuch;
this.dlugosc = l;
}
public String getLancuch() {
return lancuch;
}
public void setLancuch(String lancuch) {
this.lancuch = lancuch;
}
public int getPoprzedni() {
return poprzedni;
}
public void setPoprzedni(int poprzedni) {
this.poprzedni = poprzedni;
}
public int getNastepny() {
return nastepny;
}
public void setNastepny(int nastepny) {
this.nastepny = nastepny;
}
public int getDlugosc() {
return dlugosc;
}
public void setDlugosc(int dlugosc) {
this.dlugosc = dlugosc;
}
public int getLiczbaSlow() {
return liczbaSlow;
}
public void setLiczbaSlow(int liczbaSlow) {
this.liczbaSlow = liczbaSlow;
}
public int getIndeks() {
return indeks;
}
public void setIndeks(int indeks) {
this.indeks = indeks;
}
public int compareTo(Object arg0) {
Oligonukleotyd o = (Oligonukleotyd) arg0;
return this.dlugosc - o.dlugosc;
}
}
<file_sep># Bioinformatyka
Projekt na zajęcia z bioinformatyki.
| f2bc55e1600752444d7b6d9ce4b7e7566bca5078 | [
"Markdown",
"Java"
] | 3 | Java | Terenter/Bioinformatyka | 58ef0d61e5f661c3a480fce32a38392053dbe319 | 8047a2442f5d1a58962f3895b21cc22ae17947ef |
refs/heads/master | <file_sep>import datetime
from django import forms
from .models import Dish, Ingredient, Menu
class MenuForm(forms.ModelForm):
class Meta:
model = Menu
fields = ('season', 'expiration_date', 'dishes')
def clean_expiration_date(self):
date = self.cleaned_data['expiration_date']
if date:
if date <= datetime.date.today():
raise forms.ValidationError('Expiration date must be in the future!')
return date
class DishForm(forms.ModelForm):
class Meta:
model = Dish
fields = ('name', 'description', 'standard', 'ingredients')
class IngredientForm(forms.ModelForm):
class Meta:
model = Ingredient
fields = ('name',)
<file_sep># Generated by Django 2.1.4 on 2019-02-03 10:05
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('menu', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='dish',
name='created_date',
field=models.DateField(blank=True, default=datetime.date.today),
),
migrations.AlterField(
model_name='menu',
name='created_date',
field=models.DateField(blank=True, default=datetime.date.today),
),
]
<file_sep>import datetime
from django.contrib.auth.models import User
from django.test import TestCase
from django.urls import reverse
from .forms import MenuForm
from .models import Dish, Ingredient, Menu
class MenuTests(TestCase):
def setUp(self):
User.objects.create_user('Crampy', password='<PASSWORD>')
chef = User.objects.create_user(username='bobone', first_name='Bob', last_name='Stud')
papaya = Ingredient.objects.create(name='Papaya')
orange = Ingredient.objects.create(name='Orange')
fish = Ingredient.objects.create(name='Fish')
fruity_fish = Dish()
fruity_fish.chef = chef
fruity_fish.name = 'Fruity Fish'
fruity_fish.description = 'Yum!'
fruity_fish.standard = True
fruity_fish.save()
fruity_fish.ingredients.add(papaya, orange, fish)
fishie = Dish()
fishie.chef = chef
fishie.name = 'Fishie'
fishie.description = "Plain ol' fish"
fishie.save()
fishie.ingredients.add(fish)
both_fishes = Menu()
both_fishes.season = 'Both Fishies'
both_fishes.chef = chef
both_fishes.save()
both_fishes.dishes.add(fruity_fish, fishie)
one_fish = Menu()
one_fish.season = 'Just One :('
one_fish.chef = chef
one_fish.expiration_date = datetime.date(year=2030, month=3, day=1)
one_fish.save()
one_fish.dishes.add(fishie)
def test_models_return_name_as_string(self):
papaya = Ingredient.objects.get(name='Papaya')
fruity_fish = Dish.objects.get(name='Fruity Fish')
both_fishes = Menu.objects.get(season='Both Fishies')
self.assertEqual(str(papaya), 'Papaya')
self.assertEqual(str(fruity_fish), 'Fruity Fish')
self.assertEqual(str(both_fishes), 'Both Fishies')
def test_menu_form_expiration_date_in_future(self):
menu1 = MenuForm(data={
'season': 'April',
'dishes': [Dish.objects.get(name='Fruity Fish')],
'expiration_date': datetime.date(year=1980, month=1, day=1),
})
menu2 = MenuForm(data={
'season': 'Smash Mouth',
'dishes': [Dish.objects.get(name='Fruity Fish'), Dish.objects.get(name='Fishie')],
'expiration_date': datetime.date(year=2030, month=1, day=1),
})
self.assertFalse(menu1.is_valid())
self.assertTrue(menu2.is_valid())
def test_signin_view(self):
response = self.client.post(reverse('menu:login'), {'username': 'Crampy', 'password': '<PASSWORD>'}, follow=True)
self.assertTrue(response.context['user'].is_active)
def test_signout_view(self):
self.client.login(username='Crampy', password='<PASSWORD>')
response = self.client.get(reverse('menu:logout'))
self.assertEqual(response.status_code, 302)
def test_create_menu_view(self):
self.client.login(username='Crampy', password='<PASSWORD>')
response = self.client.post(reverse('menu:create_menu'), {
'season': 'yep',
'dishes': [Dish.objects.get(name='Fruity Fish')],
'expiration_date': datetime.date(year=2040, month=12, day=12),
})
self.assertEqual(response.status_code, 200)
def test_create_dish_view(self):
self.client.login(username='Crampy', password='<PASSWORD>')
response = self.client.post(reverse('menu:create_dish'), {
'name': 'skip',
'ingredients': [Ingredient.objects.get(name='Papaya')],
'description': 'spicy',
'standard': True,
})
self.assertEqual(response.status_code, 200)
def test_create_ingredient_view(self):
self.client.login(username='Crampy', password='<PASSWORD>')
response = self.client.post(reverse('menu:create_ingredient'), {
'name': 'spazzle',
})
self.assertEqual(response.status_code, 302)
def test_list_menus_view(self):
response = self.client.get(reverse('menu:list_menus'))
self.assertContains(response, 'Both Fishies')
self.assertContains(response, 'Just One :(')
def test_list_ingredients_view(self):
response = self.client.get(reverse('menu:list_ingredients'))
self.assertContains(response, 'Papaya')
self.assertContains(response, 'Orange')
self.assertContains(response, 'Fish')
def test_detail_menu_view(self):
menu = Menu.objects.get(season='Both Fishies')
response = self.client.get(reverse('menu:detail_menu', args=[menu.id]))
self.assertContains(response, 'Fruity Fish')
self.assertContains(response, 'Fishie')
def test_detail_dish_view(self):
dish = Dish.objects.get(name='Fruity Fish')
response = self.client.get(reverse('menu:detail_dish', args=[dish.id]))
self.assertContains(response, 'Yum!')
def test_edit_menu_view(self):
self.client.login(username='Crampy', password='<PASSWORD>')
menu = Menu.objects.get(season='Both Fishies')
response = self.client.post(reverse('menu:edit_menu', args=[menu.id]), {
'season': 'pep',
})
self.assertEqual(response.status_code, 200)
def test_edit_dish_view(self):
self.client.login(username='Crampy', password='<PASSWORD>')
dish = Dish.objects.get(name='Fruity Fish')
response = self.client.post(reverse('menu:edit_dish', args=[dish.id]), {
'name': 'skip',
})
self.assertEqual(response.status_code, 200)
def test_edit_ingredient_view(self):
self.client.login(username='Crampy', password='<PASSWORD>')
ingredient = Ingredient.objects.get(name='Papaya')
response = self.client.post(reverse('menu:edit_ingredient', args=[ingredient.id]), {
'name': 'dazzle',
})
self.assertEqual(response.status_code, 302)
<file_sep>import datetime
from django.contrib import messages
from django.contrib.auth import authenticate, login, logout
from django.shortcuts import get_object_or_404, redirect, render
from .forms import DishForm, IngredientForm, MenuForm
from .models import Dish, Ingredient, Menu
def signin(request):
if request.method == 'POST':
username = request.POST['username']
password = request.POST['<PASSWORD>']
user = authenticate(request, username=username, password=password)
if user is not None:
login(request, user)
messages.success(request, "Welcome, {}!".format(user.first_name))
return redirect('menu:list_menus')
else:
messages.error("Invalid login")
return render(request, 'menu/login.html')
def signout(request):
logout(request)
messages.success(request, 'Goodbye!')
return redirect('menu:list_menus')
def create_menu(request):
if request.method == 'POST':
form = MenuForm(request.POST)
if form.is_valid():
menu = form.save()
return redirect('menu:detail_menu', menu_pk=menu.pk)
else:
form = MenuForm()
return render(request, 'menu/create_menu.html', {'form': form})
def create_dish(request):
if request.method == 'POST':
form = DishForm(request.POST)
if form.is_valid():
dish = form.save(commit=False)
dish.chef = request.user
dish.save()
return redirect('menu:detail_dish', dish_pk=dish.pk)
else:
form = DishForm()
return render(request, 'menu/create_dish.html', {'form': form})
def create_ingredient(request):
if request.method == 'POST':
form = IngredientForm(request.POST)
if form.is_valid():
form.save()
return redirect('menu:create_ingredient')
else:
form = IngredientForm()
return render(request, 'menu/create_ingredient.html', {'form': form})
def list_menus(request):
menus = Menu.objects.exclude(
expiration_date__lte=datetime.date.today()
).prefetch_related('dishes')
return render(request, 'menu/list_menus.html', {'menus': menus})
def list_ingredients(request):
ingredients = Ingredient.objects.all()
return render(request, 'menu/list_ingredients.html', {'ingredients': ingredients})
def detail_menu(request, menu_pk):
menu = get_object_or_404(Menu, id=menu_pk)
return render(request, 'menu/detail_menu.html', {'menu': menu})
def detail_dish(request, dish_pk):
dish = get_object_or_404(Dish, id=dish_pk)
return render(request, 'menu/detail_dish.html', {'dish': dish})
def edit_menu(request, menu_pk):
menu = get_object_or_404(Menu, id=menu_pk)
if request.method == 'POST':
form = MenuForm(request.POST, instance=menu)
if form.is_valid():
menu = form.save()
return redirect('menu:detail_menu', menu_pk=menu_pk)
else:
form = MenuForm(instance=menu)
return render(request, 'menu/edit_menu.html', {'form': form})
def edit_dish(request, dish_pk):
dish = get_object_or_404(Dish, id=dish_pk)
if request.method == 'POST':
form = DishForm(request.POST, instance=dish)
if form.is_valid():
dish = form.save()
return redirect('menu:detail_dish', dish_pk=dish_pk)
else:
form = DishForm(instance=dish)
return render(request, 'menu/edit_dish.html', {'form': form})
def edit_ingredient(request, ingredient_pk):
ingredient = get_object_or_404(Ingredient, id=ingredient_pk)
if request.method == 'POST':
form = IngredientForm(request.POST, instance=ingredient)
if form.is_valid():
ingredient = form.save()
return redirect('menu:list_ingredients')
else:
form = IngredientForm(instance=ingredient)
return render(request, 'menu/edit_ingredient.html', {'form': form})
<file_sep>"""mysite URL configuration"""
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.urls import include, path
from django.views.generic.base import RedirectView
favicon_view = RedirectView.as_view(url='/static/menu/favicon.ico/', permanent=True)
urlpatterns = [
path('favicon.ico/', favicon_view),
path('admin/', admin.site.urls),
path('', include('menu.urls')),
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if settings.DEBUG:
import debug_toolbar
urlpatterns = [
path('__debug__/', include(debug_toolbar.urls)),
] + urlpatterns
<file_sep># Generated by Django 2.1.4 on 2019-02-03 10:07
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('menu', '0002_auto_20190203_0205'),
]
operations = [
migrations.AlterField(
model_name='dish',
name='created_date',
field=models.DateField(default=datetime.date.today, null=True),
),
migrations.AlterField(
model_name='menu',
name='created_date',
field=models.DateField(default=datetime.date.today, null=True),
),
]
<file_sep>from django.contrib import admin
from .models import Menu, Dish, Ingredient
admin.site.register(Menu)
admin.site.register(Dish)
admin.site.register(Ingredient)
<file_sep>"""menu app URL routes"""
from django.urls import path
from . import views
app_name = 'menu'
urlpatterns = [
path('login/', views.signin, name='login'),
path('logout/', views.signout, name='logout'),
path('', views.list_menus, name='list_menus'),
path('menu/new/', views.create_menu, name='create_menu'),
path('menu/<int:menu_pk>/', views.detail_menu, name='detail_menu'),
path('menu/<int:menu_pk>/edit/', views.edit_menu, name='edit_menu'),
path('dish/new/', views.create_dish, name='create_dish'),
path('dish/<int:dish_pk>/', views.detail_dish, name='detail_dish'),
path('dish/<int:dish_pk>/edit', views.edit_dish, name='edit_dish'),
path('ingredient/', views.list_ingredients, name='list_ingredients'),
path('ingredient/new/', views.create_ingredient, name='create_ingredient'),
path('ingredient/<int:ingredient_pk>/edit/', views.edit_ingredient, name='edit_ingredient'),
]
<file_sep>"""menu app model definitions"""
import datetime
from django.db import models
class Menu(models.Model):
dishes = models.ManyToManyField('Dish', related_name='dishes')
season = models.CharField(max_length=200)
created_date = models.DateField(null=True, default=datetime.date.today)
expiration_date = models.DateField(blank=True, null=True)
class Meta:
ordering = ['expiration_date', 'created_date', 'season']
def __str__(self):
return self.season
class Dish(models.Model):
chef = models.ForeignKey('auth.User', models.CASCADE)
ingredients = models.ManyToManyField('Ingredient', related_name='ingredients')
created_date = models.DateField(null=True, default=datetime.date.today)
name = models.CharField(max_length=200)
description = models.TextField()
standard = models.BooleanField(default=False)
def __str__(self):
return self.name
class Ingredient(models.Model):
name = models.CharField(max_length=200)
def __str__(self):
return self.name
<file_sep>Django==2.1.4
coverage==4.5.2
django-debug-toolbar==1.11
selenium==3.141.0
sqlparse==0.2.4
| 985aa72864d9cbddc202f3552059f509bddcacb0 | [
"Python",
"Text"
] | 10 | Python | Imfractical/mysite | 3bb8e5a706a844f8a8630890cc33ce6a28a1885c | edac95d7691b6c644f78a50d81a0fa89bb7d3133 |
refs/heads/master | <file_sep>import base64
import boto3
import logging
import json
from flatten_json import flatten
import os
logger = logging.getLogger()
logger.setLevel(logging.INFO)
def handler(event, context):
logger.info('Incoming event!')
logger.info(f'Event:\n{event}')
try:
app_id, stage, flat_data, tags = parse_event(event)
ssm = get_client('ssm')
for key,value in flat_data.items():
put_param(app_id, stage, key, value, ssm)
if len(tags) != 0:
tag_param(app_id, stage, key, tags, ssm)
response = {
'statusCode': 200,
'body': 'Processed.',
'headers': {
'Content-Type': 'text/plain'
}
}
return response
except:
response = {
'statusCode': 500,
'body': 'Err: Internal server error.',
'headers': {
'Content-Type': 'text/plain'
}
}
return response
def parse_event(event):
app_id = event["queryStringParameters"]['appId']
logger.info(f'AppId: {app_id}')
stage = event["queryStringParameters"]['stage']
logger.info(f'Stage: {stage}')
data = json.loads(event['body'])
logger.info(f'Data:\n{data}')
flat_data = flatten(data, '.')
logger.info(f'Flat data:\n{flat_data}')
tags = {}
for key,value in flat_data.items():
if key.startswith('metadata.tags.'):
key = key[14:]
new_tag = {key: value}
tags = {**tags, **new_tag}
logger.info(f'Tags found: {tags}')
if not os.environ['METADATA_AS_PARAM']:
for key in list(flat_data.keys()):
if key.startswith('metadata.'):
del flat_data[key]
for key,value in flat_data.items():
if isinstance(value,int):
flat_data[key] = str(value)
logger.info(f'Updated Data:\n{flat_data}')
return app_id, stage, flat_data, tags
def put_param(app_id, stage, key, value, ssm):
logger.info(f'Key: {key}, Value: {value}')
if isinstance(value,str) and value.startswith('cipher:') is True:
value = decrypt(value)
param_type = 'SecureString'
else:
param_type = 'String'
logger.info(f'Param Type: {param_type}')
compare = compare_param(app_id, stage, key, value, param_type)
if compare is True:
put = ssm.put_parameter(
Name=f'/{app_id}/{stage}/{key}',
Value=value,
Type=param_type,
Overwrite=True
)
logger.info(f'Put Response:\n{put}')
response = True
else:
logger.info('Parameter is current.')
response = False
return response
def get_client(service):
region = os.environ['REGION']
service = boto3.client(service, region_name=region)
return service
def decrypt(value):
key = os.environ['KMS_KEY_ALIAS']
logger.info(f'KMS Key: {key}')
trim_value = value[7:]
bytes_value = base64.b64decode(trim_value)
kms = get_client('kms')
kms_response = kms.decrypt(CiphertextBlob=bytes_value)
logger.info('Secret decrypted!')
bytes_decrypted_value = kms_response['Plaintext']
value = bytes_decrypted_value.decode('utf-8')
return value
def compare_param(app_id, stage, key, value, param_type):
ssm = get_client('ssm')
try:
get = ssm.get_parameter(
Name=f'/{app_id}/{stage}/{key}',
WithDecryption=True
)
existing_value = get['Parameter']['Value']
existing_type = get['Parameter']['Type']
if existing_value != value or existing_type != param_type:
compare = True
else:
compare = False
except:
compare = True
return compare
def tag_param(app_id, stage, key, tags, ssm):
logger.info('Adding Tags')
psm_param = {'ManagedBy': 'psm'}
tags = {**tags, **psm_param}
for tag_key,tag_value in tags.items():
tag = ssm.add_tags_to_resource(
ResourceType='Parameter',
ResourceId=f'/{app_id}/{stage}/{key}',
Tags=[
{
'Key': tag_key,
'Value': tag_value
},
]
)
logger.info(f'Tag Response:\n{tag}')
return tag
| 1768ba5c0e7dedd4679baa6cf4a29e7b0079124c | [
"Python"
] | 1 | Python | PoeBlu/psm | 9334ef17db8ac086c2693738ab3dd71d2c21985e | 4bd96cf222de902aa02420b65af97f63826bd83f |
refs/heads/master | <file_sep>require 'sequel'
require 'singleton'
class Connection
include Singleton
attr_reader :db
def initialize
@db = Sequel.connect(ENV['connection-string'] || 'sqlite://test.db')
end
end
DB = Connection.instance # Sequel issue, requires exactly DB name for variable<file_sep>require 'csv'
require 'open-uri'
require 'sequel'
require_relative '../db/connection'
require_relative './../db/models/exchange_rate'
class DownloaderService
CSV_LINK = 'https://sdw.ecb.europa.eu/quickviewexport.do;jsessionid=6B0E2A6E93BA1C8CDA0E20C369D46F4E?SERIES_KEY=120.EXR.D.USD.EUR.SP00.A&type=csv'.freeze
START_ROW = 5
def self.populate_db
file_raw = open(CSV_LINK)
file_parsed = CSV.read(file_raw)[START_ROW..-1]
data = file_parsed.each_slice(900).to_a # SQLite bulk insert problem limit is 1000
data.each do |portion|
ExchangeRate.import(%i[created_at price], portion)
end
end
end
<file_sep>require 'date'
require 'bigdecimal'
require_relative '../db/models/exchange_rate'
class ExchangeService
def self.exchange(amount, date_range = DateTime.now.prev_day(1))
raise 'Date cannot be empty' if date_range.nil?
raise 'Amount cannot be empty' if amount.nil?
# should we add all posible validations here?
amount = BigDecimal.new(amount.to_s)
if date_range.is_a? Array
ExchangeRate.where(created_at: date_range).each do |row|
price = BigDecimal.new(row.price.to_s)
date = row.created_at
puts format_result(amount, price * amount, date)
end
else
price = BigDecimal.new(ExchangeRate.where(created_at: date_range.to_date).first.price.to_s)
puts format_result(amount, price * amount, date_range)
end
end
def self.format_decimal(val)
val.truncate(4).to_s('F')
end
def self.format_result(usd, eur, date)
"#{format_decimal(usd)} USD in EUR is #{format_decimal(eur)} at #{date.to_date}"
end
end<file_sep>source 'https://rubygems.org'
gem 'sequel'
gem 'rspec'
gem 'rake'
gem 'sqlite3'
gem 'vcr'
gem 'webmock'
<file_sep>require 'rspec'
require 'sequel'
require_relative '../db/connection'
require_relative '../services/exchange_service'
require_relative '../db/models/exchange_rate'
require 'bigdecimal'
require 'date'
require_relative '../services/downloader_service'
require 'vcr'
VCR.configure do |c|
c.cassette_library_dir = 'vcr_cassettes'
c.hook_into :webmock
c.allow_http_connections_when_no_cassette = true
end
RSpec.describe DownloaderService do
describe '#populate_db' do
before(:all) do
ExchangeRate.dataset.delete
end
it 'should populate database' do
VCR.use_cassette('CSVcassette') do
expect(ExchangeRate.dataset.count).to eq(0)
expect { described_class.populate_db }.to(change { ExchangeRate.dataset.count })
end
end
end
end<file_sep># testcase
Test case
Simple app to get exchange rates
enviromnet requirents
ruby 2.2+
Usage
run rake tasks from the root of the project
:create_db
:drop_db
:populate_db
:exchange_today, [:amount]
:exchange_last_week, [:amount]
Feel free to add adition rake taks)
Running tests
just run rspec spec in the root
<file_sep>require 'rspec'
require 'sequel'
require_relative '../db/connection'
require_relative '../services/exchange_service'
require_relative '../db/models/exchange_rate'
require 'bigdecimal'
require 'date'
RSpec.describe ExchangeService do
##########################################################################
## Next time use something friendly to factory bot and also use FFaker) ##
##########################################################################
describe '#format_decimal' do
it 'should format decimal' do
decimal = BigDecimal.new('100.4444666')
expect(described_class.format_decimal(decimal)).to eq('100.4444')
expect(described_class.format_decimal(decimal)).not_to eq(decimal)
end
it 'should not format garbage' do
decimal = 'string'
expect { described_class.format_decimal(decimal) }.to raise_error(NoMethodError)
end
end
describe '#print_result' do
it 'should prints proper string' do
usd = BigDecimal.new('100.4444666')
date = DateTime.now
string = "100.4444 USD in EUR is 100.4444 at #{date.to_date.to_s}"
expect(described_class.format_result(usd, usd, date)).to eq string
expect { described_class.format_result(usd, 'garbage', date) }.to raise_error(NoMethodError)
end
end
describe '#exchange' do
before(:all) do
portion = [
['2018-02-01', '1.2459'],
['2018-01-01', '1.2489']
]
ExchangeRate.dataset.delete
ExchangeRate.import(%i[created_at price], portion)
end
specify do
single_string = "300.0 USD in EUR is 373.77 at 2018-02-01\n"
expect { described_class.exchange(300, Date.parse('2018-02-01')) }.to output(single_string).to_stdout
end
specify do
single_string = "300.0 USD in EUR is 374.67 at 2018-01-01\n300.0 USD in EUR is 373.77 at 2018-02-01\n"
expect do
described_class.exchange(300, [Date.parse('2018-02-01'), Date.parse('2018-01-01')])
end
.to output(single_string).to_stdout
end
describe '#exchange' do
it 'should check params' do
expect { described_class.exchange(100, nil) }.to raise_error('Date cannot be empty')
expect { described_class.exchange(nil, 100) }.to raise_error('Amount cannot be empty')
end
end
end
end<file_sep>class ExchangeRate < Sequel::Model
def validate
super
validates_presence %i[price created_at]
validates_numeric :price
validates_type Date, :created_at
validates_unique Date
end
end<file_sep># Add your own tasks in files placed in lib/tasks ending in .rake,
# for example lib/tasks/capistrano.rake, and they will automatically be available to Rake.
require_relative 'db/connection'
require_relative 'services/downloader_service'
require_relative 'services/exchange_service'
connection = Connection.instance.db
task :create_db do
connection.create_table :exchange_rates do
primary_key :id
Date :created_at, null: false, index: true, uniq: true
BigDecimal :price, null: false
end
puts 'Database Created!'
end
task :drop_db do
connection.drop_table :exchange_rates
puts 'Database Dropped!'
end
task :populate_db do
DownloaderService.populate_db
puts 'DB is populated'
end
task :exchange_today, [:amount] do |t, args|
ExchangeService.exchange(args[:amount])
end
task :exchange_last_week, [:amount] do |t, args|
ExchangeService.exchange(args[:amount], [DateTime.now.prev_day(4).to_date, DateTime.now.prev_day(7).to_date])
end
| 84db8190209060e3ad60e55e8f7711356fcd6cb9 | [
"Markdown",
"Ruby"
] | 9 | Ruby | pavelkvasnikov/testcase | f7cbc2421d974c71e2093dfd24cd034602a94402 | c911a1b04df05411cc0f3c86ce81df5c3de94ea5 |
refs/heads/master | <file_sep># TAKE AWAY MESSAGES:
# 1. seperate game logic (model) and game representation (view)
# 2. factor out functionality to functions with descriptive names
import random
# Given a character c and an integer t
# return a string containing c repeated t times
def repeat(c,t):
s = "" # initialise the string s with the empty string
for i in range(0,t):
s = s + c # append the character c to the end of the string s
return s
# Given an integer (the number of sticks),
# return a string (representation of the game status)
def showGameStatus(sticks):
# "STICKS sticks remaining", where STICKS is replaced by the value
# of the argument sticks
textual_repr = str(sticks) + " sticks remaining"
# "||...|", where "|" is repeated STICKS time and STICKS is the
# value of the argument sticks
graphical_repr = repeat("|",sticks)
return graphical_repr
# Given an integer (the number sticks), returns nothing
def showGameStatus_bad(sticks):
print str(sticks) + " sticks remaining"
# game modes (an integer):
# 0 -> AI
# 1 -> 1vs1
# ... -> undefined (treat it as AI)
# game levels (an integer):
# 0 -> undefined
# 1 -> level 1
# 2 -> level 2
# 3 -> level 3
# n -> undefined
player1 = raw_input("Please select your name player 1: ")
answer = raw_input("Do you want to play alone? (y/n)")
if answer == "y":
game_mode = 0
answer = raw_input("What level do you want to play on? (1-3, 3 most difficult)")
if answer.isdigit():
# str() takes an integer and returns a string (representation of it)
# int() takes a string and returns an integer (represented by the string)
# str("hello") => "hello"
# int("hej") => crash
answer_number = int(answer)
if answer_number >= 1 and answer_number <= 3:
game_level = answer_number
else:
game_level = 1
else:
game_level = 1
elif answer == "n":
game_mode = 1
else:
game_mode = 1
if game_mode == 1:
player2 = raw_input("Please select your name player 2")
print "Welcome", player1, "and", player2
elif game_mode == 0:
player2 = "AI"
print "Good luck,", player1
#initialise sticks
sticks = random.randrange(15,25)
print "There are ",sticks," sticks"
turn = False
if game_mode==1:
while(sticks > 0):
#byt turn
turn = not turn
#player 1's tur
if turn:
#turn = True betyder player 1
print player1,"'s turn"
else:
#turn = False betyder player 2
print player2,"'s turn"
#make player select stick
print "select 1 or 2 sticks"
choice = raw_input()
if choice == '1':
sticks = sticks - 1
elif choice == '2':
sticks = sticks - 2
else:
print "please select a valid option"
turn = not turn
print showGameStatus(sticks)
elif game_mode==0:
while(sticks>0):
turn = not turn
#player 1s tur
if turn:
print player1,"'s tur"
print "select 1 or 2 sticks"
choice = raw_input()
if choice == '1':
sticks = sticks - 1
elif choice == '2':
sticks = sticks - 2
else:
print "Please select a valid option"
turn = not turn
else:
print player2, "'s tur"
AI_choice = random.randrange(1,3)
print("AI chose %r" % AI_choice)
if AI_choice == 1:
sticks = sticks - 1
elif AI_choice == 2:
sticks = sticks - 2
print showGameStatus(sticks)
#select winner
if turn == True:
print player1," wins!!!"
else:
print player2," wins!!!"
<file_sep>__author__ = 'markusi'
import random
AI_sticks= random.randrange(1,3)
print AI_sticks
print(random.randrange(1,3))<file_sep>__author__ = 'markusi'
print "Hello beginner. What is your name?"
myName=raw_input()
print "Welcome " + myName | d041923ec9a7146dd12aa34ee51b261f831a69b5 | [
"Python"
] | 3 | Python | programmeringyeah/Pick-one-pick-two-game | df6e56b15d39424a962d5cce88168a63be1f5769 | c60c6db63492e3517260c7bbdc2d8e694d37a936 |
refs/heads/master | <file_sep>window.onload = initialize;
function initialize(){
var controller = document.getElementById('my-little-box');
controller.addEventListener('mouseenter', showSomething);
}
function showSomething(){
console.log("hola Félix");
} | 8316f3d65680fd4c05f9228c439fddc062f94bf0 | [
"JavaScript"
] | 1 | JavaScript | tcrurav/ForFelix | d6abd1711fb47b01532aa856ffba5fc95e6fb79e | c0a1ad20240301e854a24cb25a44efaa57e03ae4 |
refs/heads/master | <file_sep>using CefSharp;
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace TweetDeck
{
public partial class Form1 : Form
{
public Form1()
{
InitializeComponent();
}
private void Form1_Load(object sender, EventArgs e)
{
string initLocation = Properties.Settings.Default.InitialLocation;
Point il = new Point(0, 0);
Size sz = Size;
if (!string.IsNullOrWhiteSpace(initLocation))
{
string[] parts = initLocation.Split(',');
if (parts.Length >= 2)
{
il = new Point(int.Parse(parts[0]), int.Parse(parts[1]));
}
if (parts.Length >= 4)
{
sz = new Size(int.Parse(parts[2]), int.Parse(parts[3]));
}
}
Size = sz;
Location = il;
var settings = new CefSettings();
settings.CachePath = Environment.CurrentDirectory+ "/chromeData";
Cef.Initialize(settings, shutdownOnProcessExit: true, performDependencyCheck: true);
CefSharp.WinForms.ChromiumWebBrowser chrom = new CefSharp.WinForms.ChromiumWebBrowser("https://tweetdeck.twitter.com/");
this.Controls.Add(chrom);
chrom.BrowserSettings.ApplicationCache = CefSharp.CefState.Enabled;
CefSharp.CefSettings sc = new CefSharp.CefSettings();
chrom.BrowserSettings.WebSecurity = CefSharp.CefState.Enabled;
chrom.Dock = DockStyle.Fill;
chrom.Load("https://tweetdeck.twitter.com/");
chrom.Show();
}
private void Form1_FormClosing(object sender, FormClosingEventArgs e)
{
Point location = Location;
Size size = Size;
if (WindowState != FormWindowState.Normal)
{
location = RestoreBounds.Location;
size = RestoreBounds.Size;
}
string initLocation = string.Join(",", location.X, location.Y, size.Width, size.Height);
Properties.Settings.Default.InitialLocation = initLocation;
Properties.Settings.Default.Save();
}
}
}
<file_sep># TweetDeckWindows
Tweet Deck that works in windows.
## Download
**[Portable](https://github.com/boristomas/TweetDeckWindows/blob/master/TweetDeck/bin/x86/Release/Portable.zip?raw=true)**
**[Setup](https://github.com/boristomas/TweetDeckWindows/blob/master/TweetDeck/publish/Setup.zip?raw=true)**
## Start with windows
If you want to start app with windows you should add app shortcut to your startup folder:
WinKey+R type in
"shell:startup"
more info about startup folder: http://www.softwareok.com/?seite=faq-Windows-10&faq=28
## HELP
twitter: [@boristomas](http://www.twitter.com/boristomas)
email: <EMAIL>
| 5444b3580c6c4d36e0559cbd7db89b1d46b9d390 | [
"Markdown",
"C#"
] | 2 | C# | boristomas/TweetDeckWindows | fb585dae5b3ad400ad785bfd7ce84d0822bce9f0 | 55b1ded1ff42c236a5512491757c4db89de1db63 |
refs/heads/master | <file_sep># To add a new cell, type '# %%'
# To add a new markdown cell, type '# %% [markdown]'
##################################################################
# Author: <NAME>
# February 20, 2020
#
# This script takes two files, one containing the Chlorophyll A
# measurements and one containing the Nitrate measurements. It
# finds the dates where both chemicals were measured and adds
# the Nitrate measurement at the end of the line in the
# chlorophyll A data. It then outputs one file with the data
# from all the sites.
# ##############################################################
# %%
import glob
import pandas as pd
# %%
# Function to rename the "Recorded Value" column in the datasets to have the name of the analyte measured.
def rename_cols(df):
if df['Analyte'].iloc[0] == 'Chlorophyll a (Total)':
df=df.rename(columns={"Recorded Value": "ChlA Value"})
elif df['Analyte'].iloc[0] == 'Nitrate-Nitrite (N) (Total)':
df=df.rename(columns={"Recorded Value": "Nitrate Value"})
else:
print(f"Unexpected analyte value, {df['Analyte'].iloc[0]}, not renaming column)")
return(df)
# %%
# May be usefull to have a list, but not really used.
file_list=[]
# Delete the All_sites dataframe if it exists.
try:
del All_sites
except:
pass
# %%
# This is the main part of the script that processes all the files.
# Files should be in a folder, with chlorophyll A files ending in ChlA.csv and corresponding
# nitrate files ending in Nitrate.csv.
for file in glob.glob("*ChlA.csv"):
# Add current file to list of files processed
file_list.append(file)
# Process chlorophyll A file
chlA_df = pd.read_csv(file)
chlA_df = rename_cols(chlA_df)
# Get name of Nitrate file and process that
Nitrate_file=file[:-8] + "Nitrate.csv"
Nitrate_df = pd.read_csv(Nitrate_file)
Nitrate_df = rename_cols(Nitrate_df)
# Print file names
print(f"Processing {file} and {Nitrate_file}")
# Merge the dfs
merged_df = pd.merge(chlA_df, Nitrate_df['Nitrate Value'], how='inner',
left_on=chlA_df['Collected Date'], right_on=Nitrate_df['Collected Date'])
# fist pass will fail and create the df, subsequent passes will concat to existing
try:
All_sites = pd.concat([All_sites,merged_df], axis=0)
except:
All_sites = merged_df
All_sites.to_csv("All_sites_ChlA_Nitrate.csv")
All_sites.describe()
# %%
<file_sep># Data analysis for Jonathan Gitzendanner Project
## AICE Environmental Science
### Author: <NAME>
The data files and script in this repository are supplemental data for <NAME>'s AICE Environmental Science project studying Chlorphyll A and Nitrate levels at 10 sampling sites using data from the South West Florida Water Management District (SWFWMD).
Jonathan developed the project, located the data and downloaded the data on his own.
The data files as downloaded from SWFWMD are in a format that is difficult to analyze directly. For each sampling location, there is one file for each chemical and one line for the dates that chemical was analyzed. The chemicals were not analyzed on the same dates in all cases.
Jonathan wanted to find the dates where both chemicals were analyzed on the same day. This seemed like a challenging task using Excel or other methods. As such, <NAME>, Jonathan's father, wrote a Phython script to do the data conversion. The output of the script was then returned to Jonathan who finished the summary of the data and all subsequent analyses.
The data and Python script are being provided as an example of open science and full disclosure of what he did vs what part he had assistance with.
Jonathan did not have any roll in coding the Python script and it should not be considered as part of his project.
| 59baf041f90b74357a018d75056725ad2a04c2ba | [
"Markdown",
"Python"
] | 2 | Python | magitz/swfwmd_data_analysis | 55b5ef726448bc6c706cadbd389cd6d5e8aadcba | 86e90b5a6da49d964e51aa248256290f3f951a4a |
refs/heads/master | <file_sep><!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<title>Updates & Corrections – August 2017 | Clements Checklist</title>
<link rel="SHORTCUT ICON" href="http://www.birds.cornell.edu/images/FavIcon.ico" type="image/x-icon" /><link rel="ICON" href="http://www.birds.cornell.edu/images/FavIcon.ico" type="image/x-icon" />
<link rel="icon"
type="image/png"
href="http://www.birds.cornell.edu/bbimages/aab/favicon.png" />
<link type="text/css" rel="stylesheet" href="http://www.birds.cornell.edu/clementschecklist/wp-content/themes/cornelllab/style.css" />
<link rel='dns-prefetch' href='//s.w.org' />
<link rel="alternate" type="application/rss+xml" title="Clements Checklist » Feed" href="http://www.birds.cornell.edu/clementschecklist/feed/" />
<link rel="alternate" type="application/rss+xml" title="Clements Checklist » Comments Feed" href="http://www.birds.cornell.edu/clementschecklist/comments/feed/" />
<script type="text/javascript">
window._wpemojiSettings = {"baseUrl":"https:\/\/s.w.org\/images\/core\/emoji\/2.4\/72x72\/","ext":".png","svgUrl":"https:\/\/s.w.org\/images\/core\/emoji\/2.4\/svg\/","svgExt":".svg","source":{"concatemoji":"http:\/\/www.birds.cornell.edu\/clementschecklist\/wp\/wp-includes\/js\/wp-emoji-release.min.js?ver=4.9.4"}};
!function(a,b,c){function d(a,b){var c=String.fromCharCode;l.clearRect(0,0,k.width,k.height),l.fillText(c.apply(this,a),0,0);var d=k.toDataURL();l.clearRect(0,0,k.width,k.height),l.fillText(c.apply(this,b),0,0);var e=k.toDataURL();return d===e}function e(a){var b;if(!l||!l.fillText)return!1;switch(l.textBaseline="top",l.font="600 32px Arial",a){case"flag":return!(b=d([55356,56826,55356,56819],[55356,56826,8203,55356,56819]))&&(b=d([55356,57332,56128,56423,56128,56418,56128,56421,56128,56430,56128,56423,56128,56447],[55356,57332,8203,56128,56423,8203,56128,56418,8203,56128,56421,8203,56128,56430,8203,56128,56423,8203,56128,56447]),!b);case"emoji":return b=d([55357,56692,8205,9792,65039],[55357,56692,8203,9792,65039]),!b}return!1}function f(a){var c=b.createElement("script");c.src=a,c.defer=c.type="text/javascript",b.getElementsByTagName("head")[0].appendChild(c)}var g,h,i,j,k=b.createElement("canvas"),l=k.getContext&&k.getContext("2d");for(j=Array("flag","emoji"),c.supports={everything:!0,everythingExceptFlag:!0},i=0;i<j.length;i++)c.supports[j[i]]=e(j[i]),c.supports.everything=c.supports.everything&&c.supports[j[i]],"flag"!==j[i]&&(c.supports.everythingExceptFlag=c.supports.everythingExceptFlag&&c.supports[j[i]]);c.supports.everythingExceptFlag=c.supports.everythingExceptFlag&&!c.supports.flag,c.DOMReady=!1,c.readyCallback=function(){c.DOMReady=!0},c.supports.everything||(h=function(){c.readyCallback()},b.addEventListener?(b.addEventListener("DOMContentLoaded",h,!1),a.addEventListener("load",h,!1)):(a.attachEvent("onload",h),b.attachEvent("onreadystatechange",function(){"complete"===b.readyState&&c.readyCallback()})),g=c.source||{},g.concatemoji?f(g.concatemoji):g.wpemoji&&g.twemoji&&(f(g.twemoji),f(g.wpemoji)))}(window,document,window._wpemojiSettings);
</script>
<style type="text/css">
img.wp-smiley,
img.emoji {
display: inline !important;
border: none !important;
box-shadow: none !important;
height: 1em !important;
width: 1em !important;
margin: 0 .07em !important;
vertical-align: -0.1em !important;
background: none !important;
padding: 0 !important;
}
</style>
<script type='text/javascript' src='http://www.birds.cornell.edu/clementschecklist/wp/wp-includes/js/jquery/jquery.js?ver=1.12.4'></script>
<script type='text/javascript' src='http://www.birds.cornell.edu/clementschecklist/wp/wp-includes/js/jquery/jquery-migrate.min.js?ver=1.4.1'></script>
<script type='text/javascript' src='http://www.birds.cornell.edu/clementschecklist/wp/wp-content/plugins/mailchimp-widget/js/mailchimp-widget-min.js?ver=4.9.4'></script>
<script type='text/javascript' src='http://www.birds.cornell.edu/clementschecklist/wp-content/themes/cornelllab/js/cornell-custom.js?ver=4.9.4'></script>
<link rel='https://api.w.org/' href='http://www.birds.cornell.edu/clementschecklist/wp-json/' />
<link rel="EditURI" type="application/rsd+xml" title="RSD" href="http://www.birds.cornell.edu/clementschecklist/wp/xmlrpc.php?rsd" />
<link rel="wlwmanifest" type="application/wlwmanifest+xml" href="http://www.birds.cornell.edu/clementschecklist/wp/wp-includes/wlwmanifest.xml" />
<meta name="generator" content="WordPress 4.9.4" />
<link rel="canonical" href="http://www.birds.cornell.edu/clementschecklist/updates-corrections-august-2017/" />
<link rel='shortlink' href='http://www.birds.cornell.edu/clementschecklist/?p=674' />
<link rel="alternate" type="application/json+oembed" href="http://www.birds.cornell.edu/clementschecklist/wp-json/oembed/1.0/embed?url=http%3A%2F%2Fwww.birds.cornell.edu%2Fclementschecklist%2Fupdates-corrections-august-2017%2F" />
<link rel="alternate" type="text/xml+oembed" href="http://www.birds.cornell.edu/clementschecklist/wp-json/oembed/1.0/embed?url=http%3A%2F%2Fwww.birds.cornell.edu%2Fclementschecklist%2Fupdates-corrections-august-2017%2F&format=xml" />
<!-- Google Tag Manager -->
<script>(function(w,d,s,l,i){w[l]=w[l]||[];w[l].push({'gtm.start':
new Date().getTime(),event:'gtm.js'});var f=d.getElementsByTagName(s)[0],
j=d.createElement(s),dl=l!='dataLayer'?'&l='+l:'';j.async=true;j.src=
'//www.googletagmanager.com/gtm.js?id='+i+dl;f.parentNode.insertBefore(j,f);
})(window,document,'script','dataLayer','GTM-P7854M');</script>
<!-- End Google Tag Manager -->
<script type="text/javascript">
jQuery(document).ready(function($){
$("#text_resize_wrapper").click(function(){
$(".resizer").slideToggle('fast', function() {});
return false;
});
$("#text_resize_wrapper").css( 'cursor', 'pointer' );
$("#mobile_sections_link").unbind("click").click(
function (event) {
if($("#mobile_sections").is(':visible')) {
$("#mobile_sections").slideUp();
}
else {
$("#mobile_search").slideUp();
$("#mobile_sections").slideDown();
}
});
$("#mobile_search_link").unbind("click").click(
function (event) {
if($("#mobile_search").is(':visible')) {
$("#mobile_search").slideUp();
}
else {
$("#mobile_sections").slideUp();
$("#mobile_search").slideDown();
}
});
$('#search_text').focus(function() {
$(this).val('');
});
//GET IMAGES WITH CAPTION CLASS AND WRAP WITH DIV, ADD CAPTION SPAN AND TEXT-ALIGN RIGHT BOTH IMAGE AND CAPTION
$('img.caption-image').each(function(index) {
var parentNode = $(this).parent();
var newNode = $('<div />').addClass('caption-wrap').append($(this)).append('<span class="caption-text">' + $(this).attr("alt") + '</span>');
parentNode.append(newNode);
});
});
</script>
</head>
<body>
<div id="head_section">
<div class="inner_section clearfix cmfix" >
<div class="header_right mobile-device">
<ul id="toolbar" class="clearfix">
<li><a href="/enews" id="getEnews"><span>Get eNews</span></a></li>
<li><a href="http://www.birds.cornell.edu/page.aspx?pid=1644" id="contactUs"><span>Contact Us</span></a></li>
<li><a href="https://secure3.birds.cornell.edu/SSLpage.aspx?pid=1601" id="donateNow"><span>Donate Now</span></a></li>
</ul><!-- #toolbar -->
<div id="search_section" >
<form method="get" id="searchform" action="http://www.birds.cornell.edu/clementschecklist/">
<fieldset>
<input type="text" value="" name="s" id="s" />
<div id="search_button_wrap"><input type="submit" id="searchsubmit" value="" /></div>
</fieldset>
</form> </div><!-- #search_section -->
</div><!-- .header_right -->
<a href="http://www.birds.cornell.edu" id="logo"><span>Cornell Lab of Ornithology</span></a>
<h1 id="tagline" >
<a href="http://www.birds.cornell.edu/clementschecklist/" class="tagline_link" title="Clements Checklist" rel="home">Clements Checklist</a>
</h1>
</div><!-- .inner_section -->
</div><!-- #head_section -->
<div id="nav_section" class="clearfix other_device">
<div class="inner_section">
<div id="menu-wrapper" >
<div class="menu-main-menu-container"><ul id="menu-main-menu" class="menu"><li class="menu-item menu-item-home"><a href="http://www.birds.cornell.edu/clementschecklist/" title="Home">Home</a></li><li id="menu-item-8" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-has-children menu-item-8"><a href="http://www.birds.cornell.edu/clementschecklist/about/">About the Book</a>
<ul class="sub-menu">
<li id="menu-item-164" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-164"><a href="http://www.birds.cornell.edu/clementschecklist/about/preface/">Preface to the 6th Edition</a></li>
<li id="menu-item-171" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-171"><a href="http://www.birds.cornell.edu/clementschecklist/about/purchasing/">Purchasing</a></li>
<li id="menu-item-165" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-165"><a href="http://www.birds.cornell.edu/clementschecklist/about/methods/">Methods</a></li>
</ul>
</li>
<li id="menu-item-43" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-43"><a href="http://www.birds.cornell.edu/clementschecklist/jamesclements/"><NAME></a></li>
<li id="menu-item-174" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-has-children menu-item-174"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/">Latest Updates</a>
<ul class="sub-menu">
<li id="menu-item-746" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-746"><a href="http://www.birds.cornell.edu/clementschecklist/august-2018/">August 2018</a></li>
<li id="menu-item-680" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-680"><a href="http://www.birds.cornell.edu/clementschecklist/august-2017/">August 2017</a></li>
<li id="menu-item-641" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-641"><a href="http://www.birds.cornell.edu/clementschecklist/overview-august-2016/">August 2016</a></li>
<li id="menu-item-528" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-528"><a href="http://www.birds.cornell.edu/clementschecklist/overview-august-2015/">August 2015</a></li>
<li id="menu-item-480" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-480"><a href="http://www.birds.cornell.edu/clementschecklist/2014-overview/">August 2014</a></li>
<li id="menu-item-365" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-365"><a href="http://www.birds.cornell.edu/clementschecklist/overview-august-2013/">August 2013</a></li>
<li id="menu-item-20" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-20"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/sep12overview/">September 2012</a></li>
<li id="menu-item-35" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-35"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/23aug2011overview/">August 2011</a></li>
<li id="menu-item-170" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-170"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/feb11overview/">February 2011</a></li>
<li id="menu-item-169" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-169"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/dec09overview/">December 2009</a></li>
<li id="menu-item-168" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-168"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/dec08overview/">December 2008</a></li>
<li id="menu-item-167" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-167"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/oct07overview/">October 2007</a></li>
<li id="menu-item-166" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-166"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/july07/">July 2007</a></li>
</ul>
</li>
<li id="menu-item-161" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-161"><a href="http://www.birds.cornell.edu/clementschecklist/download/">Downloadable Checklist</a></li>
<li id="menu-item-163" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-163"><a href="http://www.birds.cornell.edu/clementschecklist/contact/">Contact</a></li>
</ul></div> </div><!-- #menu-wrapper -->
</div><!-- .inner_section -->
</div><!-- #nav_section -->
<div id="mobile_nav_section" class="mobile_device">
<div class="clearfix">
<div class="inner_section mobile_nav_container">
<a href="http://www.birds.cornell.edu/clementschecklist/" id="mobile_home_link"
class='mobile_nav_link'
> </a>
<a href="#" id="mobile_sections_link"
class='mobile_nav_link active' >Website Sections</a>
<a href="#" id="mobile_search_link"
class='mobile_nav_link' >Search</a>
</div>
</div>
</div>
<div id="mobile_sections" class="mobile_device">
<div class="clearfix">
<ul class="mobile_sections_nav">
<div class="mobile-menu-main-menu-container"><ul id="menu-main-menu-1" class="menu"><li class="menu-item menu-item-home"><a href="http://www.birds.cornell.edu/clementschecklist/" title="Home">Home</a></li><li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-has-children menu-item-8"><a href="http://www.birds.cornell.edu/clementschecklist/about/">About the Book</a>
<ul class="sub-menu">
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-164"><a href="http://www.birds.cornell.edu/clementschecklist/about/preface/">Preface to the 6th Edition</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-171"><a href="http://www.birds.cornell.edu/clementschecklist/about/purchasing/">Purchasing</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-165"><a href="http://www.birds.cornell.edu/clementschecklist/about/methods/">Methods</a></li>
</ul>
</li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-43"><a href="http://www.birds.cornell.edu/clementschecklist/jamesclements/"><NAME></a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-has-children menu-item-174"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/">Latest Updates</a>
<ul class="sub-menu">
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-746"><a href="http://www.birds.cornell.edu/clementschecklist/august-2018/">August 2018</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-680"><a href="http://www.birds.cornell.edu/clementschecklist/august-2017/">August 2017</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-641"><a href="http://www.birds.cornell.edu/clementschecklist/overview-august-2016/">August 2016</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-528"><a href="http://www.birds.cornell.edu/clementschecklist/overview-august-2015/">August 2015</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-480"><a href="http://www.birds.cornell.edu/clementschecklist/2014-overview/">August 2014</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-365"><a href="http://www.birds.cornell.edu/clementschecklist/overview-august-2013/">August 2013</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-20"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/sep12overview/">September 2012</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-35"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/23aug2011overview/">August 2011</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-170"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/feb11overview/">February 2011</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-169"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/dec09overview/">December 2009</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-168"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/dec08overview/">December 2008</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-167"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/oct07overview/">October 2007</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-166"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/july07/">July 2007</a></li>
</ul>
</li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-161"><a href="http://www.birds.cornell.edu/clementschecklist/download/">Downloadable Checklist</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-163"><a href="http://www.birds.cornell.edu/clementschecklist/contact/">Contact</a></li>
</ul></div> </ul>
</div>
</div>
<div id="mobile_search" class="mobile_device">
<div class="clearfix">
<form method="get" action="http://www.birds.cornell.edu/clementschecklist/">
<div id="mobile_search_wrapper" >
<input type="text" id="mobile_search_input" name="s" />
</div>
<div id="mobile_expandable_wrap">
<div id="mobile_search_btn_wrap">
<input type="submit" id="mobile_search_btn" value="" />
</div>
</div>
</form>
</div>
</div>
<div id="breadcrumb_section"><div class="inner_section"><a href="http://www.birds.cornell.edu/clementschecklist">Home</a> » Updates & Corrections – August 2017</div></div>
<div id="content_section" class="cmfix">
<div class="inner_section clearfix">
<div id="content_area">
<div class="standard_wrap">
<h3 class="page-title">Updates & Corrections – August 2017</h3>
<div class="page-content">
<p><strong>2017 UPDATES and CORRECTIONS, to accompany the eBird/Clements <a href="http://www.birds.cornell.edu/clementschecklist/download/">Checklistv2017 spreadsheet</a><br />
Posted 15 August 2017</strong></p>
<p>The Updates and Corrections are grouped into four sections. Within each section, items are listed in the order in which they are encountered in the eBird/Clements Checklistv2017 spreadsheet, although we also continue to reference by page number the relevant entry in the last published edition of the Clements Checklist (6th, 2007).</p>
<p>The four sections are</p>
<p>1 <strong>Species</strong> – gains and losses (posted 17 August 2017)</p>
<p>2 <strong>Orders and</strong> <strong>Families</strong> – gains, losses, and changes to order or family composition or nomenclature (posted 17 August 2017)</p>
<p>3 <strong>Standard Updates and Correction</strong> – all other changes, listed in sequence as they occur in the spreadsheet (<span class="s1">posted 18 July 2018</span>)</p>
<p>4 <strong>Groups</strong> – a list of new groups (posted 17 August 2017)</p>
<p><strong>SPECIES</strong></p>
<p><strong>SPECIES GAINS (splits and newly recognized species)</strong></p>
<p><strong> </strong>page 69,<strong> Gray-breasted Partridge <em>Arborophila orientalis</em></strong></p>
<p>Gray-breasted Partrige <em>Arborophila orientalis</em> is split into four monotypic species, following Mees (1996): Malaysian Partridge <em>Arborophila campbelli</em>; Roll’s Partridge <em>Arborophila rolli</em>; Sumatran Partridge <em>Arborophila sumatrana</em>; and Gray-breasted Partridge <em>Arborophila orientalis</em>.</p>
<p>Reference:</p>
<p><NAME>. 1996. Geographical variation in birds of Java. Publications of the Nuttall Ornithological Club number 26. Cambridge, Massachusetts.</p>
<p><strong> </strong></p>
<p>page 39, <strong>Northern Harrier <em>Circus cyaneus</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), Northern Harrier <em>Circus cyaneus</em> is split into two monotypic species: Hen Harrier <em>Circus cyaneus</em>, of the Old World, and Northern Harrier <em>Circus hudsonius</em>, of North America. This split is based on “differences in morphology, plumage, and breeding habitat (Grant 1983, Thorpe 1988, Dobson and Clarke 2011, Etherington and Mobley 2016) commensurate with differences between other recognized species of <em>Circus</em>” (Chesser et al. 2017).</p>
<p>References:</p>
<p>Chesser, R.T., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p><NAME>., and <NAME>. 2011. Inconsistency in the taxonomy of Hen and Northern harriers: causes and consequences. British Birds 104: 192–201.</p>
<p><NAME>., and <NAME>. 2016. <a href="https://avianres.biomedcentral.com/track/pdf/10.1186/s40657-016-0052-3?site=avianres.biomedcentral.com">Molecular phylogeny, morphology and life-history comparisons within <em>Circus cyaneus</em> reveal the presence of two distinct evolutionary lineages</a>. Avian Research 7: 17.</p>
<p><NAME>. 1983. The ‘Marsh Hawk’ problem. British Birds 76: 373–376.</p>
<p><NAME>. 1988. Juvenile Hen Harriers showing ‘Marsh Hawk” characters. British Birds 81: 377–382.</p>
<p> </p>
<p>page (addition 2017), <strong>Tanna Ground-Dove <em>Alopecoenas ferrugineus</em></strong></p>
<p>The validity of Tanna Ground-Dove <em>Alopecoenas ferrugineus</em> formerly was questioned (Peters 1937), but this species now is widely accepted (Stresemann 1950, Greenway 1958, Dutson 2011). Insert this species, with range “Formerly Tanna Island (Vanuatu). Extinct; not reported since 1774”, immediately following Thick-billed Ground-Dove <em>Alopecoenas salamonis</em>.</p>
<p>References:</p>
<p><NAME>. 2011. Birds of Melanesia. The Bismarcks, Solomons, Vanuatu and New Caledonia. <NAME>, London.</p>
<p><NAME>., Jr. 1958. Extinct and vanishing birds of the world. Special Publication number 13. American Committee for International Wild Life Protection, New York, New York.</p>
<p><NAME>. 1937. <a href="http://biodiversitylibrary.org/page/14477851">Check-list of birds of the world. Volume III</a>. Harvard University Press, Cambridge, Massachusetts.</p>
<p><NAME>. 1950. <a href="https://sora.unm.edu/sites/default/files/journals/auk/v067n01/p0066-p0088.pdf">Birds collected during Capt. <NAME>’s last expedition (1776-1780)</a>. Auk 67: 66-88.</p>
<p> </p>
<p>page (addition 2017), <strong>Norfolk Ground-Dove <em>Alopecoenas norfolkensis</em></strong></p>
<p>The status of Norfolk Ground-Dove <em>Alopecoenas norfolkensis</em> formerly was confused (Peters 1937), but this species now is widely accepted as valid (Goodwin 1970, Gill et al. 2010, Forshaw 2015). Insert this species, with range “Formerly Norfolk Island (Australia). Extinct since ca 1800”, immediately following White-throated Ground-Dove <em>Alopecoenas xanthonurus</em>.</p>
<p>References:</p>
<p><NAME>. 2015. Pigeons and doves in Australia. CSIRO Publishing, Clayton South, Victoria, Australia.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME> (Checklist Committee, Ornithological Society of New Zealand). 2010. Checklist of the birds of New Zealand. Te Papa Press and the Ornithological Society of New Zealand, Wellington, New Zealand.</p>
<p><NAME>. 1970. Pigeons and doves of the world. Second edition. British Museum (Natural History), London and Cornell University Press, Ithaca, New York.</p>
<p><NAME>. 1937. <a href="http://biodiversitylibrary.org/page/14477786">Check-list of birds of the world. Volume III</a>. Harvard University Press, Cambridge, Massachusetts.</p>
<p> </p>
<p>page 185, <strong>Glossy Swiftlet <em>Collocalia esculenta</em></strong></p>
<p>Glossy Swiftlet <em>Collocalia esculenta</em> is split into multiple species, and the sequence of species of <em>Collocalia</em> swiftlets is revised. following Rheindt et al. (2017):</p>
<p>Subspecies <em>Collocalia esculenta natalis</em> is elevated to species rank as a monotypic Christmas Island Swiftlet <em>Collocalia natalis</em>.</p>
<p>Subspecies <em>affinis, elachyptera, cyanoptila, vanderbilti</em>, and <em>oberholseri</em> are removed from Glossy Swiftlet and are recognized as Plume-toed Swiftlet <em>Collocalia affinis</em>.</p>
<p>Subspecies <em>marginata</em> and <em>septentrionalis</em> are removed from Glossy Swiftlet and are recognized as Gray-rumped Swiftlet <em>Collocalia marginata</em>.</p>
<p>Subspecies <em>isonota</em> and <em>bagobo</em> are removed from Glossy Swiftlet, and are recognized as Ridgetop Swiftlet <em>Collocalia isonota</em>.</p>
<p>We recognize Tenggara Swiftlet <em>Collocalia sumbawae</em>, which includes subspecies <em>sumbawae</em> and a newly described subspecies, <em>sumbae.</em> Revise the range description of nominate <em>sumbawae</em> from “W Lesser Sundas (Sumbawa, Sumba, Flores and Besar)” to “western Lesser Sundas (Sumbawa; population on Flores and Besar possibly also this subspecies)”. Following <em>sumbawae</em>, insert newly described <em>sumbae</em> Schodde, Rheindt, and Christidis 2017, with range “western Lesser Sundas (Sumba)”.</p>
<p>Subspecies <em>neglecta </em>and <em>perneglecta</em> are removed from Glossy Swiftlet, and are recognized as Drab Swiftlet <em>Collocalia neglecta</em>. Revise the range description of nominate <em>neglecta</em> from “E Lesser Sundas (Roti, Dao, Semau, Timor and Jaco)” to “Lesser Sundas (Sawu, Roti, Semau, and Timor)”. Revise the range description of subspecies <em>perneglecta</em> from “Alor, Sawu, Wetar, Kisar, Romang, Damar and Tanimbar is.” to “Lesser Sundas (Alor, Wetar, and Kisar); populations on Romang, Damar and Tanimbar possibly introgressant with Glossy Swiftlet”.</p>
<p>Subspecies <em>uropygialis</em> and <em>albidior</em> are removed from Glossy Swiftlet, and are recognized as Satin Swiftlet <em>Collocalia uropygialis</em>.</p>
<p>Additionally, subspecies <em>Collocalia esculenta erwini</em>, with range “High mountains of w New Guinea”, is condidered to be a junior synonym of <em>nitens</em> (Beehler and Pratt 2016, Rheindt et al. 2017), and is deleted. Revise the range description of subspecies <em>nitens</em> from “Lowlands of New Guinea and w Papuan islands” to “New Guinea, western Papuan Islands, Yapen, and Karker”. Following Rheindt et al. (2017), we recognize subspecies <em>heinrothi</em> Neumann 1919, previously considered to be a junior synomym of <em>stresemanni</em> (Peters 1940). Insert <em>heinrothi</em> immediately following subspecies <em>stresemanni</em>; the range of <em>heinrothi</em> is “Bismarck Archipelago (New Hanover, Nusa, New Ireland, and Djaul)”. Subspecies <em>kalili</em>, with range “Bismarck Arch. (New Ireland, New Hanover and Dyaul)”, is considered to be a junior synomym of <em>heinrothi </em>(Dickinson and Remsen 2013, Rheindt et al. 2017), and is deleted. Following subspecies <em>tametamele,</em> insert a newly described subspecies, <em>lagonoleucos</em> Schodde, Rheindt, and Christidis 2017, with range “northwestern Solomon Islands (Buka and Bougainville; identify of population on Shortland not determined, but possibly this subspecies)” (Rheindt et al. 2017). Revise the range description of nominate <em>esculenta</em> from “S Moluccas, s Sulawesi, Banggai and Sula islands” to “central and southern Sulawesi, Banggai and Sula islands, South Moluccas (to Kai Islands), and Aru Islands”. As a result of all of these changes, Glossy Swiftlet now consists of the following subspecies: <em>spilura, manadensis, esculenta, minuta, amethystina, numforensis, nitens, misimae, stresemanni, heinrothi, spilogaster, hypogrammica, tametamele, lagonoleucos, becki, makirensis,</em> and <em>desiderata</em>.</p>
<p>References:</p>
<p><NAME>., and <NAME>. 2016. Birds of New Guinea: distribution, taxonomy, and systematics. Princeton University Press, Princeton, New Jersey.</p>
<p><NAME>., and <NAME>. (editors). 2013. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 1. Aves Press, Eastbourne, United Kingdom.</p>
<p><NAME>. 1940. <a href="http://biodiversitylibrary.org/page/14476702">Check-list of birds of the world. Volume IV</a>. Harvard University Press, Cambridge, Massachusetts.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2017. Speciation in Indo-Pacific swiftlets (Aves: Apodidae): integrating molecular and phenotypic data for a new provisional taxonomy of the <em>Collocalia esculenta</em> complex. Zootaxa 4250: 401-433.</p>
<p> </p>
<p>page 204, <strong>Magnificent Hummingbird <em>Eugenes fulgens</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), Magnificent Hummingbird <em>Eugenes fulgens</em> is split into two species: Rivoli’s Hummingbird <em>Eugenes fulgens</em>, and Talamanca Hummingbird <em>Eugenes spectabilis</em>. This action is based on an assessment of the degree of plumage differences between them. A phylogenetic survey by Zamudio-Beltrán and Hernández-Baños (2015) also revealed a genetic divergence between Rivoli’s and Talamanca hummingbirds.</p>
<p>References:</p>
<p>Chesser, R.T., K.J. Burns, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p>Zamudio-Beltrán, L.E., and <NAME>. 2015. A multilocus analysis provides evidence for more than one species within <em>Eugenes fulgens</em> (Aves: Trochilidae). Molecular Phylogenetics and Evolution 90: 80-84.</p>
<p> </p>
<p>page 240, <strong>Emerald Toucanet <em>Aulacorhynchus prasinus</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), Emerald Toucanet <em>Aulacorhynchus prasinus</em> is split into two species: Northern Emerald-Toucanet <em>Aulacorhynchus prasinus</em>, which includes subspecies <em>wagleri, prasinus, warneri, virescens, volcanius, maxillaris, caeruleogularis</em>, and <em>cognatus</em>; and Southern Emerald-Toucanet <em>Aulacorhynchus albivitta</em>, which includes subspecies <em>lautus, griseigularis, albivitta, phaeolaemus, dimidiatus</em>, and <em>cyanolaemus</em>. This split is based on “differences in phenotype and genetic results consistent with those differences (Puebla-Olivares et al. 2008, Bonaccorso et al. 2011, Winker 2016)” (Chesser et al. 2017).</p>
<p>Within Northern Emerald-Toucanet, change the English name of the monotypic group <em>Aulacorhynchus prasinus wagleri</em> from Emerald Toucanet (Wagler’s) to Northern Emerald-Toucanet (Wagler’s). Change the English name of the polytypic group <em>Aulacorhynchus prasinus </em>[<em>prasinus</em> Group] from Emerald Toucanet (Emerald) to Northern Emerald-Toucanet (Emerald). Subspecies <em>stenorhabdus</em>, with range “Subtropical s Mexico to w Guatemala and n El Salvador”, and subspecies <em>chiapensis</em>, with range “Mts. of extreme s Mexico (Mt. Ovando, Chiapas)”, both are considered to be junior synonyms of <em>virescens</em> (Peters 1948, Monroe 1968), and are deleted. Revise the range description of <em>virescens</em> from “SE Mexico (Chiapas) to Honduras and Nicaragua” to “southeastern Mexico, Guatemala, Belize, western El Salvador, Honduras, and northern Nicaragua”. Change the English name of the polytypic group <em>Aulacorhynchus prasinus caeruleogularis/maxillaris</em> from Emerald Toucanet (Blue-throated) to Northern Emerald-Toucanet (Blue-throated). Change the English name of the monotypic group <em>Aulacorhynchus prasinus cognatus</em> from Emerald Toucanet (Violet-throated) to Northern Emerald-Toucanet (Violet-throated).</p>
<p>Within Southern Emerald-Toucanet, change the names of the monotypic group Emerald Toucanet (Santa Marta) <em>Aulacorhynchus prasinus lautus</em> to Southern Emerald-Toucanet (Santa Marta) <em>Aulacorhynchus albivitta lautus</em>. Change the names of the monotypic group Emerald Toucanet (Gray-throated) <em>Aulacorhynchus prasinus griseigularis</em> to Southern Emerald-Toucanet (Gray-throated) <em>Aulacorhynchus albivitta griseigularis</em>. Change the names of the polytypic group Emerald Toucanet (Andean) <em>Aulacorhynchus prasinus albivitta/phaeolaemus</em> to Southern Emerald-Toucanet (Andean) <em>Aulacorhynchus albivitta albivitta/phaeolaemus</em>. Change the names of the polytypic group Emerald Toucanet (Black-throated) <em>Aulacorhynchus prasinus</em> [<em>atrogularis</em> Group] to Southern Emerald-Toucanet (Black-throated) <em>Aulacorhynchus albivitta</em> [<em>atrogularis</em> Group].</p>
<p>References:</p>
<p>Bonaccorso, E., <NAME>, <NAME>, and <NAME>. 2011. Molecular phylogeny and systematics of Neotropical toucanets in the genus <em>Aulacorhynchus</em> (Aves, Ramphastidae). Zoologica Scripta 40: 336-349.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p><NAME>., Jr. 1968. A distributional survey of the birds of Honduras. Ornithological Monographs number 7. American Ornithologists’ Union.</p>
<p><NAME>. 1948. <a href="http://biodiversitylibrary.org/page/14477504">Check-list of birds of the world. Volume VI</a>. Harvard University Press, Cambridge, Massachusetts.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2008. Speciation in the Emerald Toucanet (<em>Aulacorhynchus prasinus</em>) complex. Auk 125: 39-50.</p>
<p><NAME>. 2016. <a href="https://peerj.com/articles/2381.pdf">An examination of species limits in the <em>Aulacorhynchus</em> “<em>prasinus</em>” toucanet complex (Aves: Ramphastidae)</a>. PeerJ 4: e2381.</p>
<p> </p>
<p>page 135, <strong>Horned Parakeet <em>Eunymphicus cornutus</em></strong></p>
<p>Each of the two monotypic groups of Horned Parakeet is recognized as a separate species, following Juniper and Parr (1998) and Boon et al. (2014): Horned Parakeet (Horned) <em>Eunymphicus cornutus cornutus</em> becomes Horned Parakeet <em>Eunymphicus cornutus</em>, and Horned Parakeet (Ouvea) <em>Eunymphicus cornutus uvaeensis</em> becomes Ouvea Parakeet <em>Eunymphicus uvaeensis</em>.</p>
<p>References:</p>
<p>Boon, W.-M., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2008. Morphological, behavioural, and genetic differentiation within the Horned Parakeet (<em>Eunymphicus cornutus</em>) and its affinities to <em>Cyanoramphus</em> and <em>Prosopeia</em>. Emu 108: 251-260.</p>
<p><NAME>., and <NAME>. 1998. Parrots: a guide to parrots of the world. Yale University Press, New Haven, Connecticut.</p>
<p> </p>
<p>page (addition 2017), <strong>Tatama Tapaculo <em>Scytalopus alvarezlopezi</em></strong></p>
<p>We add a new species, the recently described Tatama Tapaculo <em>Scytalopus alvarezlopezi</em> (Stiles et al. 2017), with range “Pacific slope of Colombian Andes (western Antioquia south to southwestern Valle del Cauca)”. Position Tatama Tapaculo to immediately follow Ecuadorian Tapaculo <em>Scytalopus robbinsi</em>. Tatama Tapaculo is the species that long has been known to birders as “Alto Pisones Tapaculo”; Alto de Pisones is a site at the edge of Tamatá National Park. Please note that the validity of this new species has not yet been reviewed by <a href="http://www.museum.lsu.edu/~Remsen/SACCBaseline.htm">AOS-SACC</a>.</p>
<p>Reference:</p>
<p><NAME>., <NAME>., and <NAME>. 2017. A new species of tapaculo (Rhinocryptidae: <em>Scytalopus</em>) from the Western Andes of Colombia. Auk 134: 377-392.</p>
<p> </p>
<p>page 558, <strong>Cardinal Myzomela <em>Myzomela cardinali</em></strong><em>s</em></p>
<p>The monotypic group Cardinal Myzomela (Samoan) <em>Myzomela cardinalis nigriventris</em> is elevated to species rank as Samoan Myzomela <em>Myzomela nigriventris</em>, following Pratt and Mittermeier (2016).</p>
<p>Reference:</p>
<p>Pratt, H.D., and <NAME>. 2016. Notes on the natural history, taxonomy, and conservation of the endemic avifaua of the Samoan Archipelago. Wilson Journal of Ornithology 128: 217-241.</p>
<p> </p>
<p>page 571, <strong>Northern Shrike <em>Lanius excubitor</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), Northern Shrike <em>Lanius excubitor</em> is split into two species: Great Gray Shrike <em>Lanius excubitor</em>, with subspecies <em>excubitor, homeyeri</em>, and <em>leucopterus</em>; and Northern Shrike <em>Lanius borealis</em>, with subspecies <em>sibiricus, bianchii, mollis, funereus,</em> and <em>borealis</em>. This split is based on “differences in plumage and mtDNA (Johnsen et al. 2010, Olsson et al. 2010, Peer et al. 2011)” (Chesser et al. 2017); in particular, Northern Shrike is more closely related to other species than it is to Great Gray Shrike (Olsson et al. 2010).</p>
<p class="p1"><span class="s1">Change the the scientific name of the polytypic group Northern Shrike (Asian) from <i>Lanius excubitor</i> [<i>mollis</i> Group] to <i>Lanius borealis</i> [<i>mollis</i> Group].</span></p>
<p class="p1"><span class="s1"> Subspecies <i>invictus</i>, with range “N Alaska to extreme n British Columbia and Alberta”, is considered to be a junior synonym of nominate <i>borealis</i> (Phillips 1986), and is deleted. Revise the range description of <i>borealis</i> from “E Canada (Quebec and n Ontario); > to ne US” to “breeds Alaska and northern Canada, south to extreme northern British Columbia and Alberta, northern Ontario, and Quebec; winters southern Canada and northern United States.”. Consequently, the group Northern Shrike (American) becomes monotypic; change the scientific name of this group from <i>Lanius excubitor borealis/invictus</i> to <i>Lanius borealis borealis</i>.</span></p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2010. DNA barcoding of Scandinavian birds reveals divergent lineages in trans-Atlantic species. Journal of Ornithology 151: 565–578.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2010. The <em>Lanius excubitor </em>(Aves, Passeriformes) conundrum—taxonomic dilemma when molecular and non-molecular data tell different stories. Molecular Phylogenetics and Evolution 55: 347–357.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2011. Complex biogeographic history of <em>Lanius</em> shrikes and its implications for the evolution of defenses against avian brood parasitism. Condor 113: 385–394.</p>
<p> </p>
<p>page 700, <strong>Piopio <em>Turnagra capensis</em></strong></p>
<p>The extinct genus <em>Turnagra</em>, which we previously treated as a single, monotypic species, Piopio <em>Turnagra capensis</em>, is split into two species, following Oliver (1955), Holdaway et al. (2001), and Gill et al. (2010): a monotypic North Island Piopio <em>Turnagra tanagra</em>, with range “Formerly New Zealand (North Island). Extinct; last confirmed report in 1902”; and a polytypic South Island Piopio <em>Turnagra capensis</em>, with subspecies <em>minor</em> and <em>capensis</em>.</p>
<p>Add a previously overlooked subspecies, <em>Turnagra capensis minor</em>, with range “Formerly New Zealand (Stephens Island). Extinct; last reported 1897” (Gill et al. 2010).</p>
<p>With the split of <em>Turnagra</em> into two species, and the addition of subspecies <em>minor</em>, revise the range description of nominate <em>capensis</em> from “Formerly New Zealand. Extinct; last reported 1963” to “Formerly New Zealand (South Island). Extinct; last confirmed report in 1905”.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME> (Checklist Committee, Ornithological Society of New Zealand). 2010. Checklist of the birds of New Zealand. Te Papa Press and the Ornithological Society of New Zealand, Wellington, New Zealand.</p>
<p><NAME>., <NAME>, and <NAME>. 2001. A working list of breeding bird species of the New Zealand region at first human contact. New Zealand Journal of Zoology 28:</p>
<p>119-187.</p>
<p><NAME>. 1955. New Zealand birds. A.H. & <NAME>, Wellington, New Zealand.</p>
<p> </p>
<p>page 479, <strong>Silktail <em>Lamprolia victoriae</em></strong></p>
<p>Silktail <em>Lamprolia victoriae</em> is split into monotypic species, following Andersen et al. (2015b, 2017): Taveuni Silktail <em>Lamprolia victoriae</em>, and Natewa Silktail <em>Lamprolia klinesmithi</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2015b. Phylogeny of the monarch flycatchers reveals extensive paraphyly and novel relationships within a major Australo-Pacific radiation. Molecular Phylogenetics and Evolution 67: 336–347.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2017. Conservation genomics of the silktail (Aves: <em>Lamprolia victoriae</em>) suggests the need for increased protection of native forest on the Natewa Peninsula, Fiji. Conservation Genetics in press: doi:10.1007/s10592-017-0979-x.</p>
<p> </p>
<p>page 584, <strong>Superb Bird-of-Paradise <em>Lophorina superba</em></strong></p>
<p>Superb Bird-of-Paradise is split into three species, following Irestedt et al. (2017). Confusingly, the name <em>superba</em> also is transferred from one population to another (Irestedt et al. 2017). The resulting species are Vogelkop Superb Bird-of-Paradise <em>Lophorina niedda</em>, with subspecies <em>niedda</em> and the newly described subspecies <em>inopinata</em>; Greater Superb Bird-of-Paradise <em>Lophorina superba</em>, with subspecies <em>superba, addenda</em>, and <em>latipennis</em>; and a monotypic Lesser Superb Bird-of-Paradise <em>Lophorina minor</em>.</p>
<p>Under Vogelkop Superb Bird-of-Paradise (<em>Lophorina niedda</em>), add a newly described subspecies, <em>Lophorina niedda inopinata</em> Irestedt et al. 2017, with range “mountains of the Bird’s Head Peninsula, West Papua, New Guinea”. Insert this subspecies immediately following the entry for the species Vogelkop Superb Bird-of-Paradise <em>Lophorina niedda</em>. Note that the range attributed to this subspecies corresponds to the range formerly attributed to subspecies <em>superba</em>, a name that now is applied to a population in the western highlands of New Guinea, and which represents a different species, Greater Superb Bird-of-Paradise.</p>
<p>Revise the range description of subspecies <em>niedda </em>from “W New Guinea (Mt. Wondiwoi in Wandammen Peninsula)” to “mountains of the Wandammen Peninsula, Bird’s Neck, West Papua, New Guinea”.</p>
<p>Regarding Greater Superb Bird-of-Paradise (<em>Lophorina superba</em>), Irestedt et al. (2017) conclude that the name <em>superba</em>, previously applied to the population in the mountains of the Bird’s Head Peninsula, instead should refer to the population of the central highlands of New Guinea. Also, subspecies <em>feminina</em>, with range “W New Guinea (Weyland Mts. to Hindenberg Mts.)”, is considered to be a junior synonym of <em>superba</em> (Irestedt et al. 2017). Revise the range description of <em>superba</em> from “W New Guinea (Arfak and Tamrau mountains)” to “montane western New Guinea, from the Kobowre Mountains (West Papua, Indonesia) to the Sepik-Strickland River Divide (western Papua New Guinea)”.</p>
<p>Following Irestedt et al. (2017), resurrect subspecies <em>addenda</em> Iredale 1948, previously considered to be a synomym of <em>feminina</em> (Mayr 1962), with range “eastern ranges of New Guinea, from the Yuat-Strickland River Divide and the base of the southeastern Peninsula, Papua New Guinea”. Insert subspecies <em>addenda</em> immediately following subspecies <em>superba</em>.</p>
<p>Revise the range description of subspecies <em>latipennis</em> from “E New Guinea (Central and E Highlands to mts. of Huon Pen.)” to “eastern New Guinea (mountains of the Huon Peninsula, and presumably also the Herzog and Adelbert Ranges)”.</p>
<p>Lesser Superb Bird-of-Paradise (<em>Lophorina minor</em>) is monotypic. Subspecies <em>sphinx</em>, known from a single specimen, with range “Mountains of extreme se New Guinea”, is considered to be a junior synonym of <em>minor</em> (Irestedt et al. 2017), and is deleted. Revise the range description of <em>minor</em> from “Mountains of se Papua New Guinea” to “southeastern Papua New Guinea (mountains of the Papuan Peninsula, west at least to the Wharton Range)”.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2017. Phylogeny, biogeography and taxonomic consequences in a bird-of-paradise species complex, <em>Lophorina-Ptiloris (</em>Aves: Paradisaeidae). Zoological Journal of the Linnean Society in press.</p>
<p>Mayr, E. 1962. <a href="http://biodiversitylibrary.org/page/14485566">Family Paradisaeidae, Birds of Paradise</a>. Pages 181-204 in E. Mayr and <NAME>, Jr. (editors), Check-list of the birds of the world. Volume XV. Museum of Comparative Zoology, Cambridge, Massachusetts.</p>
<p> </p>
<p>page 584, <strong>Magnificent Riflebird <em>Ptiloris magnificus</em></strong></p>
<p>Each of the two groups in Magnifcent Riflebird is recognized as a separate species, following Beehler and Swaby (1991), Beehler and Pratt (2016), and Irestedt et al. (2017): a polytypic Magnificent Riflebird <em>Ptiloris magnificus</em>, including subspecies <em>magnificus</em> and <em>alberti</em>; and a monotypic Growling Riflebird <em>Ptiloris intercedens</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2017. Phylogeny, biogeography and taxonomic consequences in a bird-of-paradise species complex, <em>Lophorina-Ptiloris (</em>Aves: Paradisaeidae). Zoological Journal of the Linnean Society in press.</p>
<p><NAME>., and <NAME>. 2016. Birds of New Guinea: distribution, taxonomy, and systematics. Princeton University Press, Princeton, New Jersey.</p>
<p>Beehler, B.M., and <NAME>. 1991. <a href="https://sora.unm.edu/sites/default/files/journals/condor/v093n03/p0738-p0745.pdf">Phylogeny and biogeography of the <em>Ptiloris</em> riflebirds (Aves: Paradisaeidae)</a>. Condor 93: 738-745.</p>
<p> </p>
<p>page 378, <strong>Streak-eared Bulbul <em>Pycnonotus blanfordi</em></strong></p>
<p>Streak-eared Bulbul <em>Pycnonotus blanfordi</em> is split into two monotypic species, Ayeyarwady Bulbul <em>Pycnonotus blanfordi</em>, and Streak-eared Bulbul <em>Pycnonotus conradi</em> (Garg et al. 2016). Note that the English name “Streak-eared Bulbul” now is applied to a different scientific name (<em>conradi</em>, not <em>blanfordi</em>).</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2016. Genome-wide data help identify an avian species-level lineage that is morphologically and vocally cryptic. Molecular Phylogenetics and Evolution 102: 97-103.</p>
<p> </p>
<p>page 383, <strong>Buff-vented Bulbul <em>Iole olivacea</em></strong></p>
<p>Buff-vented Bulbul has been listed as a monotypic species, <em>Iole olivacea</em>, since subspecies were introduced to the eBird/Clements Checklist (Clements Checklist fifth edition, 2000). The species name is <em>charlottae</em> (Dickinson and Christidis 2014), not <em>olivacea</em>, however, and the species should have been considered to be polytypic, with subspecies <em>crypta</em> and <em>charlottae</em> (Rand and Deignan 1960, Dickinson and Christidis 2014). Manawatthana et al. (2017) now demonstrate that <em>crypta</em> and <em>charlottae</em> each should be recognized as a separate species. The English name Buff-vented Bulbul remains with <em>Iole crypta</em>. Revise the range description of Buff-vented Bulbul from “Malay Peninsula, Sumatra, Borneo and adjacent islands” to “Thai-Malay Peninsula, Sumatra, Bangka and Belitung, Anambas Islands, and Natuna”. The English name of <em>Iole charlottae</em> is Charlotte’s Bulbul. Revise the range description of Charlotte’s Bulbul from “Malay Peninsula, Sumatra, Borneo and adjacent islands” to “Borneo”.</p>
<p>References:</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2017. Phylogeography of bulbuls in the genus <em>Iole</em> (Aves: Pycnonotidae). Biological Journal of the Linnean Society 120: 931-944.</p>
<p><NAME>., and <NAME>. 1960. <a href="http://biodiversitylibrary.org/page/14480960">Family Pycnonotidae</a>. Pages 221-300 in E. Mayr and <NAME>, Jr. (editors), Check-list of birds of the world. Volume IX. Museum of Comparative Zoology, Cambridge, Massachusetts.</p>
<p> </p>
<p>page 383, <strong>Olive Bulbul <em>Iole virescens</em></strong></p>
<p>Olive Bulbul <em>Iole virescens</em> is split into two species, following Manawatthana et al. (2017): a monotypic Cachar Bulbul <em>Iole cacharensis</em>; and Olive Bulbul <em>Iole viridescens</em>. Revise the range of Cachar Bulbul from “northeastern India (Assam); population in eastern Bangladesh possibly also this subspecies (or is nominate virescens?)” to “northeastern India (Assam) and eastern Bangladesh”.</p>
<p>Olive Bulbul contains three subspecies: <em>viridescens</em>, and two subspecies that previously were classified under Gray-eyed Bulbul (<em>Iole propinqua</em>), <em>lekhakuni</em> and <em>cinnamomeoventris</em>.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2017. Phylogeography of bulbuls in the genus <em>Iole</em> (Aves: Pycnonotidae). Biological Journal of the Linnean Society 120: 931-944.</p>
<p> </p>
<p>page 555, <strong>Gray-brown White-eye <em>Zosterops cinereus</em></strong></p>
<p>Gray-brown White-eye <em>Zosterops cinereus</em> is split into two species, following Hayes et al. (2016): Pohnpei White-eye <em>Zosterops ponapensis</em>, and Kosrae White-eye <em>Zosterops cinereus</em>.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, and <NAME>. 2016. The avifauna of Kosrae, Micronesia: history, status, and taxonomy. Pacific Science 70: 91–127.</p>
<p> </p>
<p>page 491, <strong>Black-chinned Laughingthrush <em>Trochalopteron cachinnans</em></strong></p>
<p>Black-chinned Laughingthrush <em>Trochalopteron cachinnans</em> does not belong in the genus <em>Trochalopteron</em>, but instead is placed in the newly described genus <em>Montecincla</em> (Robin et al. 2017). Position <em>Montecincla</em> immediately following Red-tailed Laughingthrush <em>Trochalopteron milnei</em>. Each of the two monotypic groups of Black-chinned Laughingthrush is elevated to species rank (Praveen and Nameer 2012, Robin et al. 2017): Black-chinned Laughingthrush (Banasura) <em>Trochalopteron cachinnans jerdoni</em> becomes Banasura Laughingthrush <em>Montecincla jerdoni</em>; and Black-chinned Laughingthrush (Nilgiri) <em>Trochalopteron cachinnans cachinnans </em>becomes Nilgiri Laughingthrush <em>Montecincla cachinnans</em>.</p>
<p>References:</p>
<p><NAME>., and P.O. Nameer. 2012. <em>Strophocincla</em> laughingthrushes of south India: a case for allopatric speciation and impact on their conservation. Journal of the Bombay Natural History Society 109: 46-52.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2017. <a href="https://bmcevolbiol.biomedcentral.com/track/pdf/10.1186/s12862-017-0882-6?site=bmcevolbiol.biomedcentral.com">Two new genera of songbirds represent endemic radiations from the Shola Sky Islands of the Western Ghats, India</a>. BMC Evolutionary Biology 17: 31.</p>
<p> </p>
<p>page 491, <strong>Kerala Laughingthrush <em>Trochalopteron fairbanki</em></strong></p>
<p>Kerala Laughingthrush <em>Trochalopteron fairbanki</em> does not belong in the genus <em>Trochalopteron</em>, but instead is placed in the newly described genus <em>Montecincla</em> (Robin et al. 2017). Position <em>Montecincla </em>immediately following Red-tailed Laughingthrush <em>Trochalopteron milnei</em>. Each of the two monotypic groups of Kerala Laughingthrush is elevated to species rank (Praveen and Nameer 2012, Robin et al. 2017): Kerala Laughingthrush (Palani) <em>Trochalopteron fairbanki fairbanki</em> becomes Palani Laughingthrush <em>Montecincla fairbanki</em>; and Kerala Laughingthrush (Travancore) <em>Trochalopteron fairbanki meridionale</em> becomes Ashambu Laughingthrush <em>Montecincla meridionale</em>.</p>
<p>References:</p>
<p><NAME>., and P.O. Nameer. 2012. <em>Strophocincla</em> laughingthrushes of south India: a case for allopatric speciation and impact on their conservation. Journal of the Bombay Natural History Society 109: 46-52.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2017. <a href="https://bmcevolbiol.biomedcentral.com/track/pdf/10.1186/s12862-017-0882-6?site=bmcevolbiol.biomedcentral.com">Two new genera of songbirds represent endemic radiations from the Shola Sky Islands of the Western Ghats, India</a>. BMC Evolutionary Biology 17: 31.</p>
<p> </p>
<p>page 453, <strong>Blue-throated Flycatcher <em>Cyornis rubeculoides</em></strong></p>
<p>The monotypic group Blue-throated Flycatcher (Chinese) <em>Cyornis rubeculoides glaucicomans</em> is elevated to species rank as Chinese Blue Flycatcher <em>Cyornis glaucicomans</em> (Zhang et al. 2015). Revise the range description from “S China (Sichuan, Guizhou, w Hubei and Shaanxi)” to “breeds southern China (southern Shaanxi and western Hubei to Sichuan and Guizhou); winters southwestern Thailand and the Thai-Malay Peninsula”.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2015. Unexpected divergence and lack of divergence revealed in continental Asian <em>Cyornis</em> flycatchers (Aves: Muscicapidae). Molecular Phylogenetics and Evolution 94: 232-241.</p>
<p> </p>
<p>page 456, <strong>White-tailed Rubythroat <em>Calliope pectoralis</em></strong></p>
<p>White-tailed Rubythroat <em>Calliope pectoralis</em> is split into two species, based on Liu et al. (2016): a polytypic Himalayan Rubythroat <em>Calliope pectoralis</em>, including subspecies <em>pectoralis, confusa</em>, and <em>ballioni</em>; and a monotypic Chinese Rubythroat <em>Calliope tschebaiewi</em>.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2016. Species delimitation of the white-tailed rubythroat <em>Calliope pectoralis</em> complex (Aves, Muscicapidae) using an integrative taxonomic approach. Journal of Avian Biology 47: 899-910.</p>
<p> </p>
<p>page 687, <strong>Sharp-beaked Ground-Finch <em>Geospiza difficilis</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://museum.lsu.edu/~Remsen/SACCprop676.htm">Proposal 676</a>), Sharp-beaked Ground-Finch <em>Geospiza difficilis</em> is split into three monotypic species, based on Lamicchaney et al. (2015). AOS-SACC has not yet determined English names for these species; provisionally we use the following nomenclature: Vampire Ground-Finch <em>Geospiza septentrionalis</em>; Genovesa Ground-Finch <em>Geospiza acutirostris</em>; and Sharp-beaked Ground-Finch <em>Geospiza difficilis</em>.</p>
<p class="p1"><span class="s1">Revise the range description of Sharp-beaked Ground-Finch from “Galapagos Islands (Pinta, Fernandina, Isabela, and Santiago Is.)” to “Galapagos Islands (Pinta, Fernandina, and Santiago Islands; formerly also Santa Cruz Island, this population now extinct).”</span></p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, C.-J. Rubin, <NAME>, C., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2015. Evolution of Darwin’s finches and their beaks revealed by genome sequencing. Nature 518: 371–375.</p>
<p> </p>
<p>page 687, <strong>Large Cactus-Finch <em>Geospiza conirostris</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://museum.lsu.edu/~Remsen/SACCprop676.htm">Proposal 676</a>), Large Cactus-Finch <em>Geospiza conirostris</em> is split into two species, based on Lamicchaney et al. (2015). AOS-SACC has not yet determined English names for these species; provisionally we use the following nomenclature: Española Cactus-Finch <em>Geospiza conirostris</em>, which is monotypic; and Genovesa Cactus-Finch <em>Geospiza propinqua</em>, which includes subspecies <em>propinqua</em> and <em>darwini</em>. <span class="s1">Note the change of the spelling of the subspecies name <i>darwinii</i> to the correct original spelling <i>darwini </i>(Rothschild and Hartert 1899).</span></p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, C.-J. Rubin, <NAME>, C., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2015. Evolution of Darwin’s finches and their beaks revealed by genome sequencing. Nature 518: 371–375.</p>
<p class="p1"><span class="s1"><NAME>., and <NAME>. 1899. <a href="https://biodiversitylibrary.org/page/3259006"><span class="s2">A review of the ornithology of the Galapagos Islands, with notes on Webster-Harris Expedition</span>.</a> Novitates Zoologicae 6: 85-205.</span></p>
<p>page 687, <strong>Yellow-eyed Junco <em>Junco phaeonotus</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), the monotypic group Yellow-eyed Junco (Baird’s) <em>Junco phaeonotus bairdi</em> is elevated to species rank as Baird’s Junco <em>Junco bairdi</em>. This split is based on “morphology (Miller 1941), vocalizations (Howell and Webb 1995, Pieplow and Francis 2011), and genetics (McCormack et al. 2012, Friis et al. 2016, Milá et al. 2016)” (Chesser et al. 2017).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2016. Rapid postglacial diversification and long-term stasis within the songbird genus <em>Junco</em>: phylogeographic and phylogenomic evidence. Molecular Ecology 25: 6175–6195.</p>
<p><NAME>., and <NAME>. 1995. A guide to the birds of Mexico and northern Central America. Oxford University Press, New York.</p>
<p>McCormack, J.E., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2012. Next-generation sequencing reveals phylogeographic structure and a species tree for recent bird divergences. Molecular Phylogenetics and Evolution 62: 397-406.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2016. More than meets the eye: lineage diversity and evolutionary history of dark-eyed and yellow-eyed juncos. Pages 179-198 in <NAME> and <NAME> (editors), Snowbird. University of Chicago Press, Chicago.</p>
<p><NAME>. 1941. Speciation in the avian genus <em>Junco</em>. University of California Publications in Zoology 44: 173-434.</p>
<p><NAME>., and <NAME>. 2011. Song differences among subspecies of Yellow-eyed Juncos (<em>Junco phaeonotus</em>). Wilson Journal of Ornithology 123: 464-471.</p>
<p> </p>
<p>page 680, <strong>Prevost’s Ground-Sparrow <em>Melozone biarcuata</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), Prevost’s Ground-Sparrow <em>Melozone biarcuata</em> is split into two species: White-faced Ground-Sparrow <em>Melozone biarcuata</em>, and Cabanis’s Ground-Sparrow <em>Melozone cabanisi</em>. This action is based on Sandoval et al. (2014), who documented vocal differences between these two populations; furthermore, the plumage differences between them are commensurate with differences between other closely related species in the family. Following Sandoval et al. (2014), we also consider subspecies <em>hartwegi</em>, with range “Highlands of s Mexico (Chiapas)”, to be a junior synomym of nominate <em>biarcuat</em>a, and this subspecies is deleted. White-faced Ground-Sparrow thus becomes monotypic. Revise the range description of White-faced Ground-Sparrow from “Highlands of Guatemala, El Salvador and w Honduras” to “highlands of southern Mexico (Chiapas) Guatemala, El Salvador, and western Honduras”.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2014. Analysis of plumage, morphology, and voice reveals species-level differences between two subspecies of Prevost’s Ground-Sparrow <em>Melozone biarcuata</em> (Prévost and Des Murs) (Aves: Emberizidae). Zootaxa 3895: 103–116.</p>
<p> </p>
<p>page (addition 2009), <strong>Red Crossbill <em>Loxia curvirostra</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), the recently described monotypic group, Red Crossbill (South Hills or type 9) <em>Loxia curvirostra sinesciuris</em> is elevated to species rank as Cassia Crossbill <em>Loxia sinesciuris</em>. This split is based on evidence for premating reproductive isolation in the face of sympatry with Red Crossbill (Smith and Benkman 2007, Benkman et al. 2009), and on genomic differences (Parchman et al. 2016).</p>
<p>References:</p>
<p>Benkman, C.W., <NAME>, <NAME>, <NAME>, and <NAME>. 2009. A new species of red crossbill (Fringillidae: <em>Loxia</em>) from Idaho. Condor 111: 169–176.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2016. Genome divergence and diversification within a geographic mosaic of coevolution. Molecular Ecology 25: 5705-5718.</p>
<p><NAME>., and <NAME>. 2007. A coevolutionary arms race causes ecological speciation in crossbills. American Naturalist 169: 455–465.</p>
<p> </p>
<p> </p>
<p><strong>SPECIES LOSSES (lumps and other deletions)</strong></p>
<p><strong> </strong></p>
<p>page 101, <strong>Thayer’s Gull <em>Larus thayeri</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), Thayer’s Gull <em>Larus thayeri</em> is lumped with Iceland Gull <em>Larus glaucoides</em>, and becomes <em>Larus glaucoides thayeri</em>. We continue to recognize this taxon as a monotypic group, Iceland Gull (Thayer’s) <em>Larus glaucoides thayeri</em>. This lump is based on “evidence of non-assortative mating between <em>thayeri</em> and <em>kumlieni</em> on Baffin and Southampton islands (Weber 1981, Gaston and Decker 1985, Snell 1989)” (Chesser et al. 2017).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p><NAME>., and <NAME>. 1985. Interbreeding of Thayer’s Gull, <em>Larus thayeri</em>, and Kumlien’s Gull, <em>Larus glaucoides kumlieni</em> on Southampton Island, Northwest Territories. Canadian Field-Naturalist 99: 257-259.</p>
<p>Snell, R.R. 1989. Status of <em>Larus</em> gulls at Home Bay, Baffin Island. Colonial Waterbirds 112: 12-23.</p>
<p>Weber, J.W. 1981. The <em>Larus </em>gulls of the Pacific Northwest interior, with taxonomic comments on several forms (Part 1). Continental Birdlife 2: 1-10.</p>
<p> </p>
<p>page 536, <strong>Western Olive Sunbird <em>Cyanomitra obscura</em></strong></p>
<p>Western Olive Sunbird <em>Cyanomitra obscura</em> is lumped with Eastern Olive Sunbird <em>Cyanomitra olivacea</em>, following Bowie et al. (2004). The combined species becomes Olive Sunbird <em>Cyanomitra olivacea</em>. The ranges of many subspecies are revised, following Fry and Keith (2000), and the sequence of subspecies within Olive Sunbird is reordered.</p>
<p>Revise the range description of subspecies <em>guineensis</em> from “Guinea-Bissau to Togo” to “Senegal to Togo”. Change the scientific name from <em>Cyanomitra obscura guineensis</em> to <em>Cyanomitra olivacea guineensis</em>.</p>
<p>Revise the range description of subspecies <em>cephaelis</em> from “Ghana to Gabon, Democratic Republic of the Congo, n Angola and Congo Basin” to “Benin to northern Angola and the Congo Basin”. Change the scientific name from <em>Cyanomitra obscura cephaelis</em> to <em>Cyanomitra olivacea cephaelis</em>.</p>
<p>Change the scientific name of subspecies <em>obscura</em> from <em>Cyanomitra obscura obscura</em> to <em>Cyanomitra olivacea obscura</em>.</p>
<p>Revise the range description of subspecies <em>ragazzii</em> from “S Sudan to Uganda, w Kenya, w Tanzania, e Democratic Republic of the Congo and n Zambia” to “southern South Sudan and southwestern Ethiopia to Uganda, western Kenya, western Tanzania, eastern Democratic Republic of the Congo, and northern Zambia”. Change the scientific name from <em>Cyanomitra obscura ragazzii</em> to <em>Cyanomitra olivacea ragazzii</em>.</p>
<p>Revise the range description of subspecies <em>neglecta</em> from “Highlands of ne Kenya to ne Tanzania” to “central Kenya to northeastern Tanzania”.</p>
<p>Change the scientific name of subspecies <em>granti</em> from <em>Cyanomitra obscura granti</em> to <em>Cyanomitra olivacea granti</em>.</p>
<p>Revise the range description of subspecies <em>sclateri </em>from “Mts. of e Zimbabwe and immediately adjacent Mozambique” to “eastern Zimbabwe and west central Mozambique”. Change the scientific name from <em>Cyanomitra obscura sclateri</em> to <em>Cyanomitra olivacea sclateri</em>.</p>
<p>Revise the range description of subspecies <em>olivacina</em> from “coastal southern Tanzania to southern Mozambique and northeastern South Africa (northeastern KwaZulu-Natal)” to “northeastern South Africa (northeastern KwaZulu-Natal) and southern Mozambique”.</p>
<p>Revise the range description of nominate <em>olivacea</em> from “South Africa (Pondoland to Natal and s Zululand)” to “eastern South Africa (southern KwaZulu-Natal to Eastern Cape) and Swaziland”.</p>
<p>References:</p>
<p>Bowie, R.C.K., <NAME>, <NAME>, and <NAME>. 2004. Molecular evolution in space and through time: mtDNA phylogeography of the Olive Sunbird (<em>Nectarinia olivacea/obscura</em>) throughout continental Africa. Molecular Phylogenetics and Evolution 33: 56-74.</p>
<p><NAME>., and <NAME> (editors). 2000. The birds of Africa. Volume VI. Academic Press, London.</p>
<p> </p>
<p><strong>ORDERS AND FAMILIES</strong></p>
<p><strong>ORDERS (newly recognized orders)</strong></p>
<p>page 34, <strong>Cathartiformes Cathartidae (New World Vultures)</strong></p>
<p>In accord with AOU-NACC (Chesser et al. 2016), the New World Vultures Cathartidae are removed from Accipitriformes and are placed in a new order, Cathartiformes, based on the very deep phylogenomic divergence between vultures and the rest of Accipitriformes (Jarvis et al. 2014, Prum et al. 2015). The position of New World Vultures in the linear sequence does not change.</p>
<p>References:</p>
<p>Chesser, R.T., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2016. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-16-77.1">Fifty-seventh supplement to the American Ornithologists’ Union <em>Check-list of North American birds</em></a>. Auk 133: 544-560.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>’Brien, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Whole-genome analyses resolve early branches in the tree of life of modern birds. Science 346: 1320-1331.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2015. A comprehensive phylogeny of birds (Aves) using targeted next-generation DNA sequencing. Nature 526: 569-573.</p>
<p><strong>FAMILIES (newly recognized families)</strong></p>
<p>pages 493, 496 <strong>Modulatricidae (Dapple-throat and Allies)</strong></p>
<p>Spot-throat <em>Modulatrix stictigula</em>, Dapple-throat <em>Arcanator orostruthus</em>, and Gray-chested Babbler <em>Kakamega poliothorax</em> are removed from Promeropidae (Sugarbirds), and are placed in a newly recognized family, Modulatricidae (Dapple-throat and Allies<strong>)</strong>, following Fjeldså et al. (2015).</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2015. Three new bird family names. Pages 33–34 in <NAME>, <NAME>, and <NAME>, Bird families of the world. Lynx Edicions, Barcelona.</p>
<p> </p>
<p>page 652, <strong>Rhodinocichlidae (Thrush-Tanager)</strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), <NAME>-Tanager <em>Rhodinocichla rosea</em> is removed from Thraupidae (Tanagers and Allies) and is placed in a new monotypic family, Rhodinocichlidae (Thrush-Tanager), following Barker et al. (2013, 2015). Position Rhodinocichlidae to immediately follow McKay’s Bunting <em>Plectrophenax hyperboreus</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2013. Going to extremes: contrasting rates of diversification in a recent radiation of New World passerine birds. Systematic Biology 62: 298-320.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2015. <a href="http://www.bioone.org/doi/pdf/10.1642/AUK-14-110.1">New insights into New World biogeography: an integrated view from the phylogeny of blackbirds, cardinals, sparrows, tanagers, warblers, and allies</a>. Auk 132: 333-348.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p> </p>
<p>pages 649-650, 664, 676-687, <strong>Passerellidae (New World Buntings and Sparrows)</strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), all New World species of Emberizidae (Emberizidae (Buntings and New World Sparrows) are recognized as a separate family, Passerellidae (New World Buntings and Sparrows), following Barker et al. (2013, 2015). AOS-NACC also revised the linear sequence of families in the nine-primaried oscines (Chesser et al. 2017), but we defer completely following the new sequence until our next release (August 2018). In the interim, the relative positions of Passerellidae and Emberizidae do not change: Passerellidae immediately follows Thraupidae (Tanagers and Allies), and Emberizidae immediately follows Passerellidae. Change the English name of Emberizidae from “Buntings and New World Sparrows” to “Old World Buntings”.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2013. Going to extremes: contrasting rates of diversification in a recent radiation of New World passerine birds. Systematic Biology 62: 298-320.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2015. <a href="http://www.bioone.org/doi/pdf/10.1642/AUK-14-110.1">New insights into New World biogeography: an integrated view from the phylogeny of blackbirds, cardinals, sparrows, tanagers, warblers, and allies</a>. Auk 132: 333-348.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p> </p>
<p>page 652, <strong>Calyptophilidae (Chat-Tanagers)</strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), the two species of chat-tanagers (<em>Calyptophilus</em>) are removed from Thraupidae (Tanagers and Allies) and are placed in a new family, Calyptophilidae (Chat-Tanagers), following Barker et al. (2013, 2015). AOS-NACC also revised the linear sequence of families in the nine-primaried oscines (Chesser et al. 2017), but we defer completely following the new sequence until our next release (August 2018). In the interim, position Calyptophilidae to follow Emberizidae (Old World Buntings).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2013. Going to extremes: contrasting rates of diversification in a recent radiation of New World passerine birds. Systematic Biology 62: 298-320.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2015. <a href="http://www.bioone.org/doi/pdf/10.1642/AUK-14-110.1">New insights into New World biogeography: an integrated view from the phylogeny of blackbirds, cardinals, sparrows, tanagers, warblers, and allies</a>. Auk 132: 333-348.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p> </p>
<p>page 643, 647, 652, <strong>Phaenicophilidae (Hispaniolan Tanagers)</strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), the palm-tanagers (<em>Phaenicophilus</em>) are removed from Thraupidae (Tanagers and Allies), and the genera <em>Xenoligea</em> and <em>Microligea</em> are removed from Parulidae (New World Warblers. All three genera are placed in a new family, Phaenicophilidae (Hispaniolan Tanagers), following Barker et al. (2013, 2015). The sequence of species in Phaenicophilidae is Black-crowned Palm-Tanager (<em>Phaenicophilus palmarum</em>),</p>
<p>Gray-crowned Palm-Tanager (<em>Phaenicophilus poliocephalus</em>), White-winged Warbler (<em>Xenoligea montana</em>), and Green-tailed Warbler (<em>Microligea palustris</em>). AOS-NACC also revised the linear sequence of families in the nine-primaried oscines (Chesser et al. 2017), but we defer completely following the new sequence until our next release (August 2018). In the interim, position Phaenicophilidae to follow Emberizidae (Old World Buntings) and Calyptophilidae (Chat-Tanagers).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2013. Going to extremes: contrasting rates of diversification in a recent radiation of New World passerine birds. Systematic Biology 62: 298-320.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2015. <a href="http://www.bioone.org/doi/pdf/10.1642/AUK-14-110.1">New insights into New World biogeography: an integrated view from the phylogeny of blackbirds, cardinals, sparrows, tanagers, warblers, and allies</a>. Auk 132: 333-348.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p> </p>
<p>page 649, <strong>Nesospingidae (Puerto Rican Tanager)</strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), Puerto Rican Tanager (<em>Nesospingus speculiferus</em>) is removed from Thraupidae (Tanagers and Allies) and is placed in a new monotypic family, Nesospingidae (Puerto Rican Tanager), following Barker et al. (2013, 2015). AOS-NACC also revised the linear sequence of families in the nine-primaried oscines (Chesser et al. 2017), but we defer completely following the new sequence until our next release (August 2018). In the interim, position Nesospingidae to follow Emberizidae (Old World Buntings), Calyptophilidae (Chat-Tanagers), and Phaenicophilidae (Hispaniolan Tanagers).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2013. Going to extremes: contrasting rates of diversification in a recent radiation of New World passerine birds. Systematic Biology 62: 298-320.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2015. <a href="http://www.bioone.org/doi/pdf/10.1642/AUK-14-110.1">New insights into New World biogeography: an integrated view from the phylogeny of blackbirds, cardinals, sparrows, tanagers, warblers, and allies</a>. Auk 132: 333-348.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p> </p>
<p>page 655, <strong>Spindalidae (Spindalises)</strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), the genus <em>Spindalis</em> is removed from Thraupidae (Tanagers and Allies) and is placed in a new family, Spindalidae (Spindalises), following Barker et al. (2013, 2015). AOS-NACC also revised the linear sequence of families in the nine-primaried oscines (Chesser et al. 2017), but we defer completely following the new sequence until our next release (August 2018). In the interim, position <span class="s1">Spindalidae </span>to follow Emberizidae (Old World Buntings), Calyptophilidae (Chat-Tanagers), Phaenicophilidae (Hispaniolan Tanagers), and Nesospingidae (Puerto Rican Tanager).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2013. Going to extremes: contrasting rates of diversification in a recent radiation of New World passerine birds. Systematic Biology 62: 298-320.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2015. <a href="http://www.bioone.org/doi/pdf/10.1642/AUK-14-110.1">New insights into New World biogeography: an integrated view from the phylogeny of blackbirds, cardinals, sparrows, tanagers, warblers, and allies</a>. Auk 132: 333-348.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p> </p>
<p>page 646, <strong>Zeledoniidae (Wrenthrush)</strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), Wrenthrush (<em>Zeledonia coronata</em>) is removed from Parulidae (New World Warblers) and is placed in a new family, Zeledoniidae (Wrenthrush), following Barker et al. (2013, 2015). AOS-NACC also revised the linear sequence of families in the nine-primaried oscines (Chesser et al. 2017), but we defer completely following the new sequence until our next release (August 2018). In the interim, position Zeledoniidae to follow Emberizidae (Old World Buntings), Calyptophilidae (Chat-Tanagers), Phaenicophilidae (Hispaniolan Tanagers), Nesospingidae (Puerto Rican Tanager), and Spindalidae (Spindalises).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2013. Going to extremes: contrasting rates of diversification in a recent radiation of New World passerine birds. Systematic Biology 62: 298-320.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2015. <a href="http://www.bioone.org/doi/pdf/10.1642/AUK-14-110.1">New insights into New World biogeography: an integrated view from the phylogeny of blackbirds, cardinals, sparrows, tanagers, warblers, and allies</a>. Auk 132: 333-348.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p> </p>
<p>page 643, <strong>Teretistridae (Cuban Warblers)</strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), the genus <em>Teretistris</em> is removed from Parulidae (New World Warblers) and is placed in a new family, Teretistridae (Cuban Warblers), following Barker et al. (2013, 2015). AOS-NACC also revised the linear sequence of families in the nine-primaried oscines (Chesser et al. 2017), but we defer completely following the new sequence until our next release (August 2018). In the interim, position Teretistridae to follow Emberizidae (Old World Buntings), Calyptophilidae (Chat-Tanagers), Phaenicophilidae (Hispaniolan Tanagers), Nesospingidae (Puerto Rican Tanager), Spindalidae (Spindalises), and Zeledoniidae (Wrenthrush).</p>
<p>References:</p>
<p>Barker, F.K., <NAME>, <NAME>, <NAME>anyon, and <NAME>. 2013. Going to extremes: contrasting rates of diversification in a recent radiation of New World passerine birds. Systematic Biology 62: 298-320.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2015. <a href="http://www.bioone.org/doi/pdf/10.1642/AUK-14-110.1">New insights into New World biogeography: an integrated view from the phylogeny of blackbirds, cardinals, sparrows, tanagers, warblers, and allies</a>. Auk 132: 333-348.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p>page 646, <strong>Icteriidae (Yellow-breasted Chat)</strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), Yellow-breasted Chat (<em>Icteria virens</em>) is removed from Parulidae (New World Warblers) and is placed in a new monotypic family, Icteriidae (Yellow-breasted Chat), following Barker et al. (2013, 2015). AOS-NACC also revised the linear sequence of families in the nine-primaried oscines (Chesser et al. 2017), but we defer completely following the new sequence until our next release (August 2018). In the interim, position Icteriidae to follow Emberizidae (Old World Buntings), Calyptophilidae (Chat-Tanagers), Phaenicophilidae (Hispaniolan Tanagers), Nesospingidae (Puerto Rican Tanager), Spindalidae (Spindalises), Zeledoniidae (Wrenthrush), and Teretistridae (Cuban Warblers).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2013. Going to extremes: contrasting rates of diversification in a recent radiation of New World passerine birds. Systematic Biology 62: 298-320.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2015. <a href="http://www.bioone.org/doi/pdf/10.1642/AUK-14-110.1">New insights into New World biogeography: an integrated view from the phylogeny of blackbirds, cardinals, sparrows, tanagers, warblers, and allies</a>. Auk 132: 333-348.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p> </p>
<p>page 652, <strong>Mitrospingidae (Mitrospingid Tanagers)</strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), the genus <em>Mitrospingus</em> is removed from Thraupidae (Tanagers and Allies) and is placed in a new family, Mitrospingidae (Mitrospingid Tanagers), following Barker et al. (2013, 2015). AOS-NACC also revised the linear sequence of families in the nine-primaried oscines (Chesser et al. 2017), but we defer completely following the new sequence until our next release (August 2018). In the interim, position Icteriidae to follow Emberizidae (Old World Buntings), Calyptophilidae (Chat-Tanagers), Phaenicophilidae (Hispaniolan Tanagers), Nesospingidae (Puerto Rican Tanager), Spindalidae (Spindalises), Zeledoniidae (Wrenthrush), Teretistridae (Cuban Warblers), and Icteriidae (Yellow-breasted Chat).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2013. Going to extremes: contrasting rates of diversification in a recent radiation of New World passerine birds. Systematic Biology 62: 298-320.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2015. <a href="http://www.bioone.org/doi/pdf/10.1642/AUK-14-110.1">New insights into New World biogeography: an integrated view from the phylogeny of blackbirds, cardinals, sparrows, tanagers, warblers, and allies</a>. Auk 132: 333-348.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p> </p>
<p><strong>STANDARD UPDATES and CORRECTIONS</strong></p>
<p>page 2, <strong>Southern Brown Kiwi <em>Apteryx australis</em></strong></p>
<p>Change the English name for the monotypic group <em>Apteryx australis australis</em> from Southern Brown Kiwi (South Island) to Southern Brown Kiwi (South I.).</p>
<p>Change the English name for the monotypic group <em>Apteryx australis lawryi</em> from Southern Brown Kiwi (Stewart Island) to Southern Brown Kiwi (Stewart I.)</p>
<p> </p>
<p>page 27, <strong>geese, genera <em>Anser</em> and <em>Chen</em></strong></p>
<p>The sequence of species of geese in the genus <em>Anser </em>is revised, based on Ottenburghs et al. (2016); note that <em>Anser</em> now includes several species previously classified in <em>Chen</em> (as detailed below). The sequence that we adopt is:</p>
<p>Bar-headed Goose <em>Anser indicus</em></p>
<p>Emperor Goose <em>Anser canagicus</em></p>
<p>Snow Goose <em>Anser caerulescens</em></p>
<p>Ross’s Goose <em>Anser rossii</em></p>
<p>Graylag Goose <em>Anser anser</em></p>
<p>Swan Goose <em>Anser cygnoides</em></p>
<p>Greater White-fronted Goose <em>Anser albifrons</em></p>
<p>Lesser White-fronted Goose <em>Anser erythropus</em></p>
<p>Taiga Bean-Goose <em>Anser fabalis</em></p>
<p>Tundra Bean-Goose <em>Anser serrirostris</em></p>
<p>Pink-footed Goose <em>Anser brachyrhynchus</em></p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2016. A tree of geese: a phylogenomic perspective on the evolutionary history of true geese. Molecular Phylogenetics and Evolution 101: 303-313.</p>
<p> </p>
<p>page 27, <strong>Emperor Goose <em>Chen canagica</em></strong></p>
<p>page 27,<strong> Snow Goose <em>Chen caerulescens</em></strong></p>
<p>page 27,<strong> Ross’s Goose <em>Chen rossii</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), the genus <em>Chen</em> is merged into <em>Anser</em>; this change is necessary because phylogenetic analysis of DNA sequence data reveals <em>Chen</em> is embedded within <em>Anser</em> (Ottenburghs et al. 2016). Change the scientific name of Emperor Goose from <em>Chen canagica</em> to <em>Anser canagicus</em>.</p>
<p>Change the scientific name of Snow Goose from <em>Chen caerulescens</em> to <em>Anser caerulescens</em>. Following the transfer of Snow Goose from <em>Chen</em> to <em>Anser</em>, change the subspecies name <em>atlantica</em> to <em>atlanticus</em>.</p>
<p>Change the scientific name of Ross’s Goose from <em>Chen rossii</em> to <em>Anser rossii</em>.</p>
<p>References:</p>
<p>Chesser, R.T., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site"><em>Fifty-eighth supplement to the American Ornithological Society’s Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2016. A tree of geese: a phylogenomic perspective on the evolutionary history of true geese. Molecular Phylogenetics and Evolution 101: 303-313.</p>
<p> </p>
<p>pages 27-28, <strong>Canada Goose <em>Branta canadensis</em></strong></p>
<p>The monotypic group Canada Goose (<em>parvipes</em>) <em>Branta canadensis parvipes</em> is merged into the polytypic group Canada Goose (<em>canadensis/interior</em>) <em>Branta canadensis canadensis/interior</em>. Change the English name of the expanded group to Canada Goose (<em>canadensis</em> Group), and the scientific name to <em>Branta canadensis</em> [<em>canadensis</em> Group].</p>
<p> </p>
<p>page 30, <strong>Baikal Teal <em>Anas formosa</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), <em>Anas</em> is partioned into four genera, based on phylogenetic analysis of mitochondrial DNA sequence data (Gonzalez et al. 2009). Change the scientific name of Baikal Teal from <em>Anas formosa</em> to <em>Sibirionetta formosa</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p><NAME>., <NAME>, and <NAME>. 2009. Phylogenetic relationships based on two mitochondrial genes and hybridization patterns in Anatidae. Journal of Zoology 279: 310-318.</p>
<p> </p>
<p>page 31, <strong>Garganey <em>Anas querquedula</em></strong></p>
<p>page 31, <strong>Hottentot Teal <em>Anas hottentota</em></strong></p>
<p>page 31, <strong>Silver Teal <em>Anas versicolor</em></strong></p>
<p>page 31, <strong>Puna Teal <em>Anas puna</em></strong></p>
<p>page 31, <strong>Blue-winged Teal <em>Anas discors</em></strong></p>
<p>page 31, <strong>Cinnamon Teal <em>Anas cyanoptera</em></strong></p>
<p>page 31, <strong>Red Shoveler <em>Anas platalea</em></strong></p>
<p>page 31, <strong>Cape Shoveler <em>Anas smithii</em></strong></p>
<p>page 31, <strong>Australian Shoveler <em>Anas rhynchotis</em></strong></p>
<p>page 31, <strong>Northern Shoveler <em>Anas clypeata</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), <em>Anas </em>is partioned into four genera, based on phylogenetic analysis of mitochondrial DNA sequence data (Gonzalez et al. 2009). As a result, the blue-winged teals and the shovelers are placed in the genus <em>Spatula</em>. Change the scientific name of Garganey from <em>Anas querquedula</em> to <em>Spatula querquedula</em>.</p>
<p>Change the scientific name of Hottentot Teal from <em>Anas hottentota</em> to <em>Spatula hottentota</em>.</p>
<p>Change the scientific name of Silver Teal from <em>Anas versicolor </em>to <em>Spatula versicolor</em>.</p>
<p>Change the scientific name of Puna Teal from <em>Anas puna</em> to <em>Spatula puna</em>.</p>
<p>Change the scientific name of Blue-winged Teal from <em>Anas discors</em> to <em>Spatula discors</em>.</p>
<p>Change the scientific name of Cinnamon Teal from <em>Anas cyanoptera</em> to <em>Spatula cyanoptera</em>.</p>
<p>Change the scientific name of Red Shoveler from <em>Anas platalea</em> to <em>Spatula platalea</em>.</p>
<p>Change the scientific name of Cape Shoveler from <em>Anas smithii</em> to <em>Spatula smithii</em>.</p>
<p>Change the scientific name of Australian Shoveler from <em>Anas rhynchotis</em> to <em>Spatula rhynchotis</em>.</p>
<p>Change the scientific name of Northern Shoveler from <em>Anas clypeata</em> to <em>Spatula clypeata</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p><NAME>., <NAME>, and <NAME>. 2009. Phylogenetic relationships based on two mitochondrial genes and hybridization patterns in Anatidae. Journal of Zoology 279: 310-318.</p>
<p> </p>
<p>page 29, <strong>Gadwall <em>Anas strepera</em></strong></p>
<p>page 29, <strong>Falcated Duck <em>Anas falcata</em></strong></p>
<p>page 29, <strong>Eurasian Wigeon <em>Anas penelope</em></strong></p>
<p>page 29, <strong>American Wigeon <em>Anas americana</em></strong></p>
<p>page 29, <strong>Chiloe Wigeon <em>Anas sibilatrix</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), <em>Anas</em> is partioned into four genera, based on phylogenetic analysis of mitochondrial DNA sequence data (Gonzalez et al. 2009). As a result, Gadwall, Falcated Duck, and the wigeons are placed in the genus <em>Mareca</em>. Change the scientific name of Gadwall from <em>Anas strepera</em> to <em>Mareca strepera</em>.</p>
<p>Change the scientific name of the monotypic group Gadwall (Common) from <em>Anas strepera strepera</em> to <em>Mareca strepera strepera</em>.</p>
<p>Change the scientific name of the monotypic group Gadwall (Coue’s) from <em>Anas strepera couesi</em> to <em>Mareca strepera couesi</em>.</p>
<p>Change the scientific name of Falcated Duck from <em>Anas falcata</em> to <em>Mareca falcata</em>.</p>
<p>Change the scientific name of Eurasian Wigeon from <em>Anas penelope</em> to <em>Mareca penelope</em>.</p>
<p>Change the scientific name of American Wigeon from <em>Anas americana</em> to <em>Mareca americana</em>.</p>
<p>Change the scientific name of Chiloe Wigeon from <em>Anas sibilatrix</em> to <em>Mareca sibilatrix</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p><NAME>., <NAME>, and <NAME>. 2009. Phylogenetic relationships based on two mitochondrial genes and hybridization patterns in Anatidae. Journal of Zoology 279: 310-318.</p>
<p> </p>
<p>pages 29-31, <strong>dabbling ducks genera <em>Sibirionetta, Spatula, Mareca</em>, and <em>Anas</em></strong></p>
<p>The sequence of species in the dabbling ducks (<em>Sibirionetta, Spatula, Marec</em>a, and <em>Anas</em>) is revised, following Gonzalez et al. (2009). The new sequence of species is:</p>
<p>Baikal Teal <em>Sibirionetta formosa</em></p>
<p>Garganey <em>Spatula querquedula</em></p>
<p>Hottentot Teal <em>Spatula hottentota</em></p>
<p>Silver Teal <em>Spatula versicolor</em></p>
<p>Puna Teal <em>Spatula puna</em></p>
<p>Blue-winged Teal <em>Spatula discors</em></p>
<p>Cinnamon Teal <em>Spatula cyanoptera</em></p>
<p>Red Shoveler <em>Spatula platalea</em></p>
<p>Cape Shoveler <em>Spatula smithii</em></p>
<p>Australian Shoveler <em>Spatula rhynchotis</em></p>
<p>Northern Shoveler <em>Spatula clypeata</em></p>
<p>Gadwall <em>Mareca strepera</em></p>
<p>Falcated Duck <em>Mareca falcata</em></p>
<p>Eurasian Wigeon <em>Mareca penelope</em></p>
<p>American Wigeon <em>Mareca americana</em></p>
<p>Chiloe Wigeon <em>Mareca sibilatrix</em></p>
<p>African Black Duck <em>Anas sparsa</em></p>
<p>Yellow-billed Duck <em>Anas undulata</em></p>
<p>Meller’s Duck <em>Anas melleri</em></p>
<p>Pacific Black Duck <em>Anas superciliosa</em></p>
<p>Laysan Duck <em>Anas laysanensis</em></p>
<p>Hawaiian Duck <em>Anas wyvilliana</em></p>
<p>Philippine Duck <em>Anas luzonica</em></p>
<p>Indian Spot-billed Duck <em>Anas poecilorhyncha</em></p>
<p>Eastern Spot-billed Duck <em>Anas zonorhyncha</em></p>
<p>Mallard <em>Anas platyrhynchos</em></p>
<p>American Black Duck <em>Anas rubripes</em></p>
<p>Mottled Duck <em>Anas fulvigula</em></p>
<p>Cape Teal <em>Anas capensis</em></p>
<p>White-cheeked Pintail <em>Anas bahamensis</em></p>
<p>Red-billed Duck <em>Anas erythrorhyncha</em></p>
<p>Northern Pintail <em>Anas acuta</em></p>
<p>Eaton’s Pintail <em>Anas eatoni</em></p>
<p>Yellow-billed Pintail <em>Anas georgica</em></p>
<p>Green-winged Teal <em>Anas crecca</em></p>
<p>Andean Teal <em>Anas andium</em></p>
<p>Yellow-billed Teal <em>Anas flavirostris</em></p>
<p>Andaman Teal <em>Anas albogularis</em></p>
<p>Sunda Teal <em>Anas gibberifrons</em></p>
<p>Gray Teal <em>Anas gracil</em>is</p>
<p>Chestnut Teal <em>Anas castanea</em></p>
<p>Bernier’s Teal <em>Anas bernieri</em></p>
<p>Auckland Islands Teal <em>Anas aucklandica</em></p>
<p>Campbell Islands Teal <em>Anas nesiotis</em></p>
<p>Brown Teal <em>Anas chlorotis</em></p>
<p>Reference:</p>
<p><NAME>., <NAME>, and <NAME>. 2009. Phylogenetic relationships based on two mitochondrial genes and hybridization patterns in Anatidae. Journal of Zoology 279: 310-318.</p>
<p> </p>
<p>page 54, <strong>Tabon Scrubfowl <em>Megapodius cumingii</em></strong></p>
<p>Subspecies <em>dillwyni</em> Tweedale 1878, previously considered to be a junior synonym of <em>pusillus</em> (e.g., Dickinson et al. 1991), is recognized, following Jones et al. (1995). Insert <em>dillwyni </em>immediately following the heading for the species. The range of <em>dillwnyi</em> is “northern Philippines (Luzon, Mindoro, Marinduque, and Babuyan Islands)”.</p>
<p>With the addition of subspecies <em>dillwyni</em> and <em>tabon</em>, revise the range description of subspecies <em>pusillus </em>from “N and e Philippine Islands” to “central Philippines: Visayan Islands (Masbate, Negros, Cebu, Bohol, Leyte, and Samar), western Mindanao, and Basilan”.</p>
<p>Subspecies <em>tabon</em> Hackisuka 1931, previously considered to be a junior synonym of pusillus (e.g., Dickinson et al. 1991), is recognized, following Jones et al. (1995). Insert <em>tabon</em> immediately following the nominate subspecies <em>cumingi</em>i. The range of tabon is “southeastern Philippines (eastern Mindanao)”.</p>
<p>References:</p>
<p><NAME>., <NAME>, and <NAME>. 1991. The birds of the Philippines. An annotated check-list. British Ornithologists’ Union Check-list number 12. British Ornithologists’ Union, London.</p>
<p><NAME>., <NAME>, and <NAME>. 1995. The megapodes Megapodiidae. Oxford University Press, New York and Oxford, United Kingdom.</p>
<p> </p>
<p>page 70, <strong>Stone Partridge <em>Ptilopachus petrosus</em></strong></p>
<p>Revise the range description of the monotypic group Stone Partridge (Stone) <em>Ptilopachus petrosus petrosus</em> from “Senegambia to s Sudan, n Uganda and n Kenya” to “Senegambia east to central Sudan, northeastern Democratic Republic of the Congo, northern Uganda, northern Kenya, and south central Ethiopia”.</p>
<p>Revise the range description of the monotypic group Stone Partridge (Ethiopian) <em>Ptilopachus petrosus major</em> from “Rocky areas of nw Ethiopia” to “northern Eritrea and northwestern Ethiopia”.</p>
<p> </p>
<p>pages 60-63, 66, 70, <strong>New World Quail Odontophoridae</strong></p>
<p>In accord with AOU-NACC (Chesser et al. 2016) and AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop706.htm">Proposal 706</a>), the sequence of genera in Odontophoridae is revised. This action is based on Hosner et al. (2015). The new sequence of genera is:</p>
<p><em>Ptilopachus</em></p>
<p><em>Rhynchortyx </em></p>
<p><em>Oreortyx</em></p>
<p><em>Dendrortyx</em></p>
<p><em>Philortyx</em></p>
<p><em>Colinus</em></p>
<p><em>Callipepla</em></p>
<p><em>Cyrtonyx</em></p>
<p><em>Dactylortyx</em></p>
<p><em>Odontophorus</em></p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2016. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-16-77.1?code=coop-site">Fifty-seventh supplement to the American Ornithologists’ <em>Union Check-list of North American birds</em>.</a> Auk 133: 544-560.</p>
<p><NAME>., <NAME>, and <NAME>. 2015a. Land connectivity changes and global cooling shaped the colonization history and diversification of New World quail (Aves: Galliformes: Odontophoridae). Journal of Biogeography 42: 1883-1895.</p>
<p> </p>
<p>page 70, <strong>Crested Partridge <em>Rollulus rouloul</em></strong></p>
<p>Revise the range description from “Malay Pen., Sumatra, Borneo, Banka and Belitung islands” to “southern Myanmar (southern Tenasserim), Thai-Malay Peninsula, Sumatra, Bangka and Belitung islands, and Borneo”.</p>
<p> </p>
<p>page 69, <strong>Chestnut-bellied Partridge <em>Arborophila javanica</em></strong></p>
<p>Subspecies <em>bartelsi</em>, with range “Mountains of w-central Java”, is considered to be a junior synonym of nominate <em>javanica</em> (Mees 1996), and is deleted. Revise the range of <em>javanica</em> from “Mountains of w Java” to “mountains of western and central Java”.</p>
<p>Reference:<br />
<NAME>. 1996. Geographical variation in birds of Java. Publications of the Nuttall Ornithological Club number 26. Cambridge, Massachusetts.</p>
<p> </p>
<p>page 64, <strong>Rock Partridge <em>Alectoris graeca</em></strong></p>
<p>With the recognition of subspecies <em>orlandoi</em>, change the scientific name of the polytypic group Rock Partridge (European) from <em>Alectoris graeca graeca/saxatilis</em> to Rock Partridge (European) <em>Alectoris graeca</em> [<em>graeca</em> Group].</p>
<p>With the recognition of subspecies <em>orlandoi</em>, revise the range description of subspecies <em>saxatilis</em> from “Alps (France to Austria), Italy (Apennines), and the western Balkans” to “Alps (France to Austria) and the western Balkans”.</p>
<p>We recognize a previously overlooked subspecies, <em>orlandoi</em>, with range “Italy (Apennine Mountains)”, following Corso (2010, 2012). Insert <em>orlandoi</em> immediately following subspecies <em>saxatilis</em>.</p>
<p>References:</p>
<p><NAME>. 2010. Sicilian Rock Partridge: identification and taxonomy. Dutch Birding 32: 79-96.</p>
<p><NAME>. 2012. Additional comments on Rock Partridge morphology: <em>Alectoris graeca orlandoi</em>. Dutch Birding 34: 97-99.</p>
<p> </p>
<p>page 66, <strong>Yellow-necked Francolin <em>Pternistis leucoscepus</em></strong></p>
<p>Revise the range description from “SE Sudan to Ethiopia, Somalia, Kenya and n Tanzania” to “Eritrea, Ethiopia, Djibouti, Somalia, southeastern South Sudan, northeastern Uganda, Kenya, and north central Tanzania”.</p>
<p> </p>
<p>page 65, <strong>Crested Francolin <em>Francolinus sephaena</em></strong></p>
<p>Crested Francolin is removed from the genus <em>Francolinu</em>s and placed in <em>Dendroperdix</em> (Hockey et al. 2005, Wang et al. 2016). Change the scientific name from <em>Francolinus sephaena</em> to <em>Dendroperdix sephaena</em>.</p>
<p>Revise the range description of subspecies <em>granti</em> from “Ethiopia to s Sudan, Uganda and n-central Tanzania” to “South Sudan and western Ethiopia to north central Tanzania”.</p>
<p>References:</p>
<p><NAME>., <NAME>, and <NAME> (editors). 2005. Roberts Birds of southern Africa. Trustees of the John Voelcker Bird Book Fund, Cape Town.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2016. Ancestral range reconstruction of Galliformes: the effects of topology and taxon sampling. Journal of Biogeography 44: 122-135.</p>
<p> </p>
<p>page 65, <strong>Schlegel’s Francolin <em>Peliperdix schlegelii</em></strong></p>
<p>Revise the range description from “Savanna of Cameroon and s Chad to sw Sudan” to “Cameroon, southern Chad, northern Central African Republic, extreme southwestern Sudan, and northwestern South Sudan”.</p>
<p> </p>
<p>pages 72-73, <strong>Ring-necked Pheasant <em>Phasianus colchicus</em></strong></p>
<p>Revise the range description of subspecies <em>colchicus</em> from “E Georgia to ne Azerbaijan, s Armenia and nw Iran. Pheasants representing a mixture of <em>colchicus, torquatus</em>, and other subspecies now are widely introduced around the world, including across Europe, on New Zealand, the Hawaiian Islands, and North America” to “eastern Georgia to northeastern Azerbaijan, southern Armenia and northwestern Iran. Pheasants representing a mixture of <em>colchicus, torquatus</em>, and other subspecies now are widely introduced around the world, including across Europe, on New Zealand, the Hawaiian Islands, North America, and South America (southern Chile)”.</p>
<p> </p>
<p>page 72, <strong>Crested Fireback <em>Lophura ignita</em></strong></p>
<p>Revise the range description of subspecies <em>ignita</em> from “Kalimantan (Borneo) and Banka I. (off se Sumatra)” to “Borneo (except for the north) and Bangka I. (off southeastern Sumatra)”.</p>
<p> </p>
<p>page 59, <strong>Eurasian Capercaillie <em>Tetrao urogallus</em></strong></p>
<p>In accord with current usage (Madge and McGowan 2002, British Ornithologists’ Union 2013), change the English name of <em>Tetrao urogallus</em> from Eurasian Capercaillie to Western Capercaillie.</p>
<p>References:</p>
<p>British Ornithologists’ Union. 2013. The British List: A Checklist of Birds of Britain (8th edition). Ibis 155: 635-676.</p>
<p><NAME>., and <NAME>. 2002. Pheasants, partridges, and grouse. Princeton University Press, Princeton, New Jersey.</p>
<p> </p>
<p>page 59, <strong>Hazel Grouse <em>Bonasa bonasia</em></strong></p>
<p>Phylogenetic analyses of DNA sequence data consistently shows that the genus <em>Bonasa</em> is not monophyletic (Kimball et al. 2011, Wang et al. 2013, 2016). Change the scientific name of Hazel Grouse from <em>Bonasa bonasia</em> to <em>Tetrastes bonasia</em>.</p>
<p>Change the spelling of the subspecies name <em>styriac</em>a to <em>styriacus</em>.</p>
<p>Change the spelling of the subspecies name <em>rhenana</em> to <em>rhenanus</em>.</p>
<p>Change the spelling of the subspecies name <em>sibirica</em> to <em>sibiricu</em>s.</p>
<p>References:</p>
<p><NAME>., <NAME>, and <NAME>. 2011. <a href="https://www.hindawi.com/journals/ijeb/2011/423938/">A macroevolutionary perspective on multiple sexual traits in the Phasianidae (Galliformes)</a>. International Journal of Evolutionary Biology 2011: 423938.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2013. <a href="http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0064312">Assessing phylogenetic relationships among Galliformes: a multigene phylogeny with expanded taxon sampling in Phasianidae</a>. PLoS ONE 8: e64312</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2016. Ancestral range reconstruction of Galliformes: the effects of topology and taxon sampling. Journal of Biogeography 44: 122-135.</p>
<p> </p>
<p>page 59, <strong>Severtzov’s Grouse <em>Bonasa sewerzowi</em></strong></p>
<p>Phylogenetic analyses of DNA sequence data consistently shows that the genus <em>Bonasa</em> is not monophyletic (Kimball et al. 2011, Wang et al. 2013, 2016). Change the scientific name of Severtzov’s Grouse from <em>Bonasa sewerzowi </em>to <em>Tetrastes sewerzowi</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, and <NAME>. 2011. <a href="https://www.hindawi.com/journals/ijeb/2011/423938/">A macroevolutionary perspective on multiple sexual traits in the Phasianidae (Galliformes)</a>. International Journal of Evolutionary Biology 2011: 423938.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2013. <a href="http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0064312">Assessing phylogenetic relationships among Galliformes: a multigene phylogeny with expanded taxon sampling in Phasianidae</a>. PLoS ONE 8: e64312</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2016. Ancestral range reconstruction of Galliformes: the effects of topology and taxon sampling. Journal of Biogeography 44: 122-135.</p>
<p> </p>
<p>pages 58-59, <strong>Rock Ptarmigan <em>Lagopus muta</em></strong></p>
<p>Change the spelling of the subspecies name <em>reinhardti</em> to to the correct original spelling <em>reinhardi</em> (Peters 1934, Dickinson and Remsen 2013).</p>
<p>References:</p>
<p><NAME>., and J.V. Remsen, Jr. (editors). 2013. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 1. Aves Press, Eastbourne, United Kingdom.</p>
<p><NAME>. 1934. <a href="https://biodiversitylibrary.org/page/14482847">Check-list of birds of the world. Volume II</a>. Harvard University Press, Cambridge, Massachusetts.</p>
<p> </p>
<p>page 7, <strong>Jackass Penguin <em>Spheniscus demersus</em></strong></p>
<p>In accord with current usage (Williams 1995, Hockey et al. 2005), change the English name of <em>Spheniscus demersus</em> from Jackass Penguin to African Penguin.</p>
<p>References:</p>
<p><NAME>., <NAME>, and <NAME> (editors). 2005. Roberts Birds of southern Africa. VII edition. Trustees of the <NAME> Bird Book Fund, Cape Town.</p>
<p><NAME>. 1995. The penguins. Spheniscidae. Oxford University Press, Oxford, United Kingdom.</p>
<p> </p>
<p>page 14, <strong>White-bellied Storm-Petrel <em>Fregetta grallaria</em></strong></p>
<p>Change the English name of the monotypic group <em>Fregetta grallaria titan</em> from White-bellied Storm-Petrel (Rapa Island) to White-bellied Storm-Petrel (Rapa I.).</p>
<p> </p>
<p>page 18, <strong>Ascension Island Frigatebird <em>Fregata aquila</em></strong></p>
<p>In accord with current usage (Brown et al. 1982, Borrow and Demey 2001), change the English name of <em>Fregata aquila</em> from Ascension Island Frigatebird to Ascension Frigatebird.</p>
<p>References:</p>
<p><NAME>., and <NAME>. 2001. A guide to birds of western Africa. Princeton University Press, Princeton, New Jersey.</p>
<p><NAME>., <NAME>, and <NAME>. 1982. The birds of Africa. Volume I. Academic Press, London.</p>
<p> </p>
<p>page 20, <strong>Intermediate Egret <em>Mesophoyx intermedia</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), the genus <em>Mesophoyx</em> is merged into <em>Ardea</em>, based on Sheldon (1987), Chang et al. (2003), and Zhou et al. (2014). Change the scientific name of Intermediate Egret from <em>Mesophoyx intermedia</em> to <em>Ardea intermedia</em>.</p>
<p>Change the scientific name of the monotypic group Intermediate Egret (Intermediate) from <em>Mesophoyx intermedia intermedia</em> to <em>Ardea intermedia intermedia</em>.</p>
<p>Change the scientific name of the monotypic group Intermediate Egret (Plumed from <em>Mesophoyx intermedia plumifera</em> to <em>Ardea intermedia plumifera</em>.</p>
<p>Change the scientific name of the monotypic group Intermediate Egret (Yellow-billed) from <em>Mesophoyx intermedia brachyrhyncha</em> to <em>Ardea intermedia brachyrhyncha</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2003. Phylogenetic relationships among 13 species of herons inferred from mitochondrial 12S rRNA gene sequences. Acta Zoologica Sinica 49: 205-210.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p><NAME>. 1987. <a href="https://sora.unm.edu/sites/default/files/journals/auk/v104n01/p0097-p0108.pdf">Phylogeny of herons estimated from DNA-DNA hybridization data</a>. Auk 104: 97-108.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2014. <a href="https://bmcgenomics.biomedcentral.com/articles/10.1186/1471-2164-15-573">The complete mitochondrial genomes of sixteen ardeid birds revealing the evolutionary process of the gene rearrangements</a>. BMC Genomics 15: 573.</p>
<p> </p>
<p>page 24, <strong>Australian Ibis <em>Threskiornis molucca</em></strong></p>
<p>Change the spelling of the scientific name of Australian Ibis from <em>Threskiornis moluccus</em> to <em>Threskiornis molucca</em> (Schodde and Bock 2016).</p>
<p>Change the spelling of the subspecies name <em>moluccus</em> to <em>molucca</em>.</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2016. Conflict resolution of grammar and gender for avian species-group names under Article 31.2.2 of the ICZN Code: is gender agreement worth it? Zootaxa 4127: 161-170.</p>
<p> </p>
<p>page 49, <strong>Secretary-bird <em>Sagittarius serpentarius</em></strong></p>
<p>In accord with widespread usage (e.g., Ferguson-Lees and Christie 2001), change the English name of <em>Sagittarius serpentarius</em> from Secretary-bird to Secretarybird.</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2001. Raptors of the world. Houghton Mifflin Company, Boston and New York.</p>
<p> </p>
<p>page 37, <strong>Lammergeier <em>Gypaetus barbatus</em></strong></p>
<p>Change the English name of <em>Gypaetus barbatus</em> from Lammergeier to Bearded Vulture, to conform to prevailing usage (e.g., Hockey et al. 2005, Rasmussen and Anderton 2005).</p>
<p>Change the English name of the monotypic group <em>Gypaetus barbatus barbatus</em> from Lammergeier (Eurasian) to Bearded Vulture (Eurasian).</p>
<p>Change the English name of the monotypic group <em>Gypaetus barbatus meridionalis</em> from Lammergeier (African) to Bearded Vulture (African).</p>
<p>References:</p>
<p>Hockey, P.A.R., <NAME>, and <NAME> (editors). 2005. Roberts Birds of southern Africa. VII edition. Trustees of the John Voelcker Bird Book Fund, Cape Town.</p>
<p>Rasmussen, P.C., and <NAME>. 2005. Birds of South Asia. The Ripley guide. Volume 2: attributes and status. Smithsonian Institution and Lynx Edicions, Washington D.C. and Barcelona.</p>
<p> </p>
<p>page 38, <strong>Beaudouin’s Snake-Eagle <em>Circaetus beaudouini</em></strong></p>
<p>Revise the range description from “Senegal and Mauritania to s Sudan, n Uganda and nw Kenya” to “southwestern Mauritania south to Guinea, east to southwestern Sudan and western South Sudan, possibly to northern Uganda”.</p>
<p> </p>
<p>page 42, <strong>Ovampo Sparrowhawk <em>Accipiter ovampensis</em></strong></p>
<p>In accord with current usage (Zimmerman et al. 1996, Borrow and Demey 2001, Ferguson-Lees and Christie 2001, Hockey et al. 2005), change the English name of <em>Accipiter ovampensis</em> from Ovampo Sparrowhawk to Ovambo Sparrowhawk.</p>
<p>References:</p>
<p><NAME>., and <NAME>. 2001. A guide to birds of western Africa. Princeton University Press, Princeton, New Jersey.</p>
<p><NAME>., and <NAME>. 2001. Raptors of the world. Houghton Mifflin Company, Boston and New York.</p>
<p><NAME>., <NAME>, and <NAME> (editors). 2005. Roberts Birds of southern Africa. VII edition. Trustees of the John Voelcker Bird Book Fund, Cape Town.</p>
<p><NAME>., <NAME>, and <NAME>. 1996. Birds of northern Kenya and northern Tanzania. Princeton University Press, Princeton, New Jersey.</p>
<p> </p>
<p>page 43, <strong>Sharp-shinned Hawk <em>Accipiter striatus</em></strong></p>
<p>Subspecies <em>madrensis</em> is removed from the polytypic group Sharp-shinned Hawk (Northern) <em>Accipiter striatus</em> [<em>velox</em> Group], and instead is recognized as a separate monotypic group, Sharp-shinned Hawk (Madrean) <em>Accipiter striatus madrensis</em>.</p>
<p> </p>
<p>pages 36-37, <strong>sea eagles genera <em>Haliaeetus, Ichthyophaga</em></strong></p>
<p>The genus <em>Ichthyophaga</em> (or <em>Icthyophaga</em>) is embedded within <em>Haliaeetus</em> (Lerner and Mindell 2005), and so is merged with that genus. The sequence of species within <em>Haliaeetus</em> is revised, based on Lerner and Mindell (2005). The new sequence of species is:</p>
<p>Bald Eagle <em>Haliaeetus leucocephalus</em></p>
<p>White-tailed Eagle <em>Haliaeetus albicilla</em></p>
<p>Pallas’s Fish-Eagle <em>Haliaeetus leucoryphus</em></p>
<p>Steller’s Sea-Eagle <em>Haliaeetus pelagicus</em></p>
<p>White-bellied Sea-Eagle <em>Haliaeetus leucogaster</em></p>
<p>Sanford’s Sea-Eagle <em>Haliaeetus sanfordi</em></p>
<p>African Fish-Eagle <em>Haliaeetus vocifer</em></p>
<p>Madagascar Fish-Eagle <em>Haliaeetus vociferoides</em></p>
<p>Lesser Fish-Eagle <em>Haliaeetus humilis</em></p>
<p>Gray-headed Fish-Eagle <em>Haliaeetus ichthyaetus</em></p>
<p>Reference:</p>
<p> </p>
<p>Lerner, H.R.L., and <NAME>. 2005. Phylogeny of eagles, Old World vultures, and other Accipitridae based on nuclear and mitochondrial DNA. Molecular Phylogenetics and Evolution 37: 327-346.</p>
<p> </p>
<p>page 37, <strong>Lesser Fish-Eagle <em>Ichthyophaga humilis</em></strong></p>
<p>The genus <em>Ichthyophaga</em> (or <em>Icthyophaga</em>) is embedded within <em>Haliaeetus</em> (Lerner and Mindell 2005), and so is merged with that genus. Change the scientific name of Lesser Fish-Eagle from <em>Ichthyophaga humilis</em> to <em>Haliaeetus humilis</em>.</p>
<p>With the transfer of Lesser Fish-Eagle from <em>Ichthyophaga</em> to <em>Haliaeetus</em>, the spelling of subspecies name <em>plumbea</em> changes to <em>plumbeus</em>.</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2005. Phylogeny of eagles, Old World vultures, and other Accipitridae based on nuclear and mitochondrial DNA. Molecular Phylogenetics and Evolution 37: 327-346.</p>
<p> </p>
<p>page 37, <strong>Gray-headed Fish-Eagle <em>Ichthyophaga ichthyaetus</em></strong></p>
<p>The genus <em>Ichthyophaga</em> (or <em>Icthyophaga</em>) is embedded within <em>Haliaeetus</em> (Lerner and Mindell 2005), and so is merged with that genus. Change the scientific name of Gray-headed Fish-Eagle from <em>Ichthyophaga ichthyaetus</em> to <em>Haliaeetus ichthyaetus</em>.</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2005. Phylogeny of eagles, Old World vultures, and other Accipitridae based on nuclear and mitochondrial DNA. Molecular Phylogenetics and Evolution 37: 327-346.</p>
<p> </p>
<p>page 46, <strong>Common Buzzard <em>Buteo buteo</em></strong></p>
<p>Change the English name of the monotypic group <em>Buteo buteo rothschildi</em> from Common Buzzard (Azorean) to Common Buzzard (Azores).</p>
<p>Change the English name of the monotypic group <em>Buteo buteo insularum</em> from Common Buzzard (Canary Islands) to Common Buzzard (Canary Is.).</p>
<p> </p>
<p>page 87, <strong>Arabian Bustard <em>Ardeotis arabs</em></strong></p>
<p>Revise the range description of subspecies <em>butleri</em> from “S Sudan; single record for nw Kenya” to “South Sudan; very rare (resident?) in northwestern Kenya”.</p>
<p>Revise the range description of nominate <em>arabs</em> from “Ethiopia to nw Somalia, sw Saudi Arabia and w Yemen” to “Eritrea, northeastern Ethiopia, Djibouti, and northwestern Somalia; southwestern Saudi Arabia and western Yemen”.</p>
<p> </p>
<p>page 87, <strong>Kori Bustard <em>Ardeotis kori</em></strong></p>
<p>Revise the range description of subspecies <em>struthiunculus</em> from “Ethiopia to nw Somalia, se Sudan, ne Uganda and n Tanzania” to “southeastern South Sudan to northwestern Somalia, south to northern Uganda and north central Tanzania”.</p>
<p> </p>
<p>page 87, <strong>Houbara Bustard <em>Chlamydotis undulata</em></strong></p>
<p>Change the English name of the monotypic group <em>Chlamydotis undulata fuertaventurae</em> from Houbara Bustard (Canary Islands) to Houbara Bustard (Canary Is.).</p>
<p> </p>
<p>page 87, <strong>White-bellied Bustard <em>Eupodotis senegalensis</em></strong></p>
<p>Revise the range description of nominate <em>senegalensis</em> from “SW Mauritania to Guinea, Central African Rep. and s Sudan” to “southwestern Mauritania south to Guinea, east to central Sudan, Eritrea, and northwestern Ethiopia”.</p>
<p>Revise the range description of subspecies <em>canicollis </em>from “Ethiopia to Kenya and ne Tanzania” to “Ethiopia to southeastern South Sudan, northern Uganda, Kenya and northeastern Tanzania”.</p>
<p> </p>
<p>page 88, <strong>Savile’s Bustard <em>Eupodotis savilei</em></strong></p>
<p>Revise the range description from “SW Mauritania and Senegal to Nigeria, Chad and s Sudan” to “southwestern Mauritania and Senegal east to Chad and central Sudan”.</p>
<p> </p>
<p>page 88, <strong>Buff-crested Bustard <em>Eupodotis gindiana</em></strong></p>
<p>Revise the range description from “SE Sudan to s Ethiopia, Somalia, Kenya and n Tanzania” to “southeastern South Sudan to eastern Ethiopia, Djibouti, and Somalia to northeastern Uganda, Kenya, and north central Tanzania”.</p>
<p> </p>
<p>page 81, <strong>Mangrove Rail <em>Rallus longirostris</em></strong></p>
<p>Add a recently described subspecies, <em>Rallus longirostris berryorum</em> Maley et al. 2016, with range “Pacific coast of eastern El Salvador, Honduras, and northern Nicaragua; population in northwestern Costa Rica presumably also refers to this subspecies”. We also recognize <em>berryorum</em> as a new monotypic group, with the English name Mangrove Rail (Fonseca). Insert <em>berryorum</em> immediately following the heading for the species.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2016. Fonseca Mangrove Rail: a new subspecies from Honduras. Western Birds 47: 262-273.</p>
<p> </p>
<p>page 81, <strong>Water Rail <em>Rallus aquaticus</em></strong></p>
<p>Revise the range statement for subspecies <em>hibernans</em> from “Iceland” to “Iceland; declining, and possibly extinct”.</p>
<p> </p>
<p>page 85, <strong>Purple Gallinule <em>Porphyrio martinica</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop725.htm">Proposal 725</a>), change the scientific name of Purple Gallinule from <em>Porphyrio martinicus</em> back to <em>Porphyrio martinica</em>, following Schodde and Bock (2016).</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2016. Conflict resolution of grammar and gender for avian species-group names under Article 31.2.2 of the ICZN Code: is gender agreement worth it? Zootaxa 4127: 161-170.</p>
<p> </p>
<p>page 85, <strong>Tristan Moorhen <em>Gallinula nesiotis</em></strong></p>
<p>Revise the range statement for Tristan Moorhen (Gough) <em>Gallinula nesiotis comeri</em> from “Gough I. (South Atlantic Ocean)” to “Gough I. (South Atlantic Ocean). Introduced to Tristan da Cunha in 1956”.</p>
<p> </p>
<p>page 85, <strong>Black-tailed Native-hen <em>Tribonyx ventralis</em></strong></p>
<p>Change the English name of <em>Tribonyx ventralis</em> from Black-tailed Native-hen to Black-tailed Nativehen, to conform to the spelling of similar group names (waterhen, swamphen, and moorhen).</p>
<p> </p>
<p>page 85, <strong>Tasmanian Native-hen <em>Tribonyx mortierii</em></strong></p>
<p>Change the English name of <em>Tribonyx mortierii</em> from Tasmanian Native-hen to Tasmanian Nativehen, to conform to the spelling of similar group names (waterhen, swamphen, and moorhen).</p>
<p> </p>
<p>page 86, <strong>Eurasian Coot <em>Fulica atra</em></strong></p>
<p>Change subspecies name <em>novaeguinea</em> to the correct original spelling, <em>novaeguineae</em> (Rand 1940).</p>
<p>Reference:</p>
<p><NAME>. 1940. <a href="http://digitallibrary.amnh.org/bitstream/handle/2246/3717/v2/dspace/ingest/pdfSource/nov/N1072.pdf?sequence=1&isAllowed=y">Results of the Archbold Expeditions. No. 25. New birds from the 1938-1939 expedition</a>. American Museum Novitates number 1072.</p>
<p> </p>
<p>page 78, <strong>White-spotted Flufftail <em>Sarothrura pulchra</em></strong></p>
<p>Revise the range description of the monotypic group White-spotted Flufftail (Southern) <em>Sarothrura pulchra centralis</em> from “Congo to s Sudan, w Kenya, nw Tanzania and n Angola” to “Congo and northern Angola east to extreme southern South Sudan, western Kenya, and northwestern Tanzania”.</p>
<p> </p>
<p>page 78, <strong>Striped Flufftail <em>Sarothrura affinis</em></strong></p>
<p>Revise the range description of subspecies <em>antonii</em> from “Montane grasslands of extreme s Sudan to e Zimbabwe” to “extreme southern South Sudan south, locally, to eastern Zimbabwe”.</p>
<p> </p>
<p>page 97, <strong>curlews genus <em>Numenius</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), the sequence of species of <em>Numenius</em> is revised, following Gibson and Baker (2012). The new sequence of species is:</p>
<p>Bristle-thighed Curlew <em>Numenius tahitiensis</em></p>
<p>Whimbrel <em>Numenius phaeopus</em></p>
<p>Little Curlew <em>Numenius minutus</em></p>
<p>Eskimo Curlew <em>Numenius borealis</em></p>
<p>Long-billed Curlew <em>Numenius americanus</em></p>
<p>Far Eastern Curlew <em>Numenius madagascariensis</em></p>
<p>Slender-billed Curlew <em>Numenius tenuirostris</em></p>
<p>Eurasian Curlew <em>Numenius arquata</em></p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p><NAME>., and <NAME>. 2012. Multiple gene sequences resolve phylogenetic relationships in the shorebird suborder Scolopaci (Aves: Charadriiformes). Molecular Phylogenetics and Evolution 64: 66–72.</p>
<p> </p>
<p>page 97, <strong>godwits genus <em>Limosa</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), the sequence of species of <em>Limosa</em> is revised, following Gibson and Baker (2012). The new sequence of species is:</p>
<p>Bar-tailed Godwit <em>Limosa lapponica</em></p>
<p>Black-tailed Godwit <em>Limosa limosa</em></p>
<p>Hudsonian Godwit <em>Limosa haemastica</em></p>
<p>Marbled Godwit <em>Limosa fedoa</em></p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p><NAME>., and <NAME>. 2012. Multiple gene sequences resolve phylogenetic relationships in the shorebird suborder Scolopaci (Aves: Charadriiformes). Molecular Phylogenetics and Evolution 64: 66–72.</p>
<p> </p>
<p>page 97, <strong>dowitchers genus <em>Limnodromus</em></strong></p>
<p>The sequence of species of dowitcher (<em>Limnodromus</em>) is revised, based on Gibson and Baker (2012). The new sequence of species is:</p>
<p>Asian Dowitcher <em>Limnodromus semipalmatus</em></p>
<p>Short-billed Dowitcher <em>Limnodromus griseus</em></p>
<p>Long-billed Dowitcher <em>Limnodromus scolopaceus</em></p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2012. Multiple gene sequences resolve phylogenetic relationships in the shorebird suborder Scolopaci (Aves: Charadriiformes). Molecular Phylogenetics and Evolution 64: 66–72.</p>
<p> </p>
<p>pages 95-97<strong>, snipe and woodcocks, genera <em>Lymnocryptes, Scolopax, Coenocorypha</em>, and <em>Gallinago </em></strong></p>
<p>The sequence of species of snipes and woodcock (<em>Lymnocryptes, Scolopax, Coenocorypha</em>, and <em>Gallinago</em>) is revised, based on Gibson and Baker (2012). The new sequence of species is:</p>
<p><NAME> <em>Lymnocryptes minimus</em></p>
<p>Eurasian Woodcock <em>Scolopax rusticola</em></p>
<p><NAME> <em>Scolopax mira</em></p>
<p>Bukidnon Woodcock <em>Scolopax bukidnonensis</em></p>
<p><NAME> <em>Scolopax saturata</em></p>
<p>New Guinea Woodcock <em>Scolopax rosenbergii</em></p>
<p>Sulawesi Woodcock <em>Scolopax celebensis</em></p>
<p><NAME> <em>Scolopax rochussenii</em></p>
<p>American Woodcock <em>Scolopax minor</em></p>
<p>North Island Snipe <em>Coenocorypha barrierensis</em></p>
<p>South Island Snipe <em>Coenocorypha iredalei</em></p>
<p>Forbes’s Snipe <em>Coenocorypha chathamica</em></p>
<p>Chatham Islands Snipe <em>Coenocorypha pusilla</em></p>
<p>Snares Island Snipe <em>Coenocorypha huegeli</em></p>
<p>Subantarctic Snipe <em>Coenocorypha aucklandica</em></p>
<p>Imperial Snipe <em>Gallinago imperialis</em></p>
<p>Jameson’s Snipe <em>Gallinago jamesoni</em></p>
<p>Fuegian Snipe <em>Gallinago stricklandii</em></p>
<p>Solitary Snipe <em>Gallinago solitaria</em></p>
<p>Latham’s Snipe <em>Gallinago hardwickii</em></p>
<p>Wood Snipe <em>Gallinago nemoricola</em></p>
<p>Great Snipe <em>Gallinago media</em></p>
<p>Common Snipe <em>Gallinago gallinago</em></p>
<p>Wilson’s Snipe <em>Gallinago delicata</em></p>
<p>South American Snipe <em>Gallinago paraguaiae</em></p>
<p>Puna Snipe <em>Gallinago andina</em></p>
<p>Noble Snipe <em>Gallinago nobilis</em></p>
<p>Pin-tailed Snipe <em>Gallinago stenura</em></p>
<p>Swinhoe’s Snipe <em>Gallinago megala</em></p>
<p>African Snipe <em>Gallinago nigripennis</em></p>
<p>Madagascar Snipe <em>Gallinago macrodactyla</em></p>
<p>Giant Snipe <em>Gallinago undulata</em></p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p><NAME>., and <NAME>. 2012. Multiple gene sequences resolve phylogenetic relationships in the shorebird suborder Scolopaci (Aves: Charadriiformes). Molecular Phylogenetics and Evolution 64: 66–72.</p>
<p> </p>
<p>page 89, <strong>Crab Plover <em>Dromas ardeola</em></strong></p>
<p>In accord with current usage (Inskipp et al. 1996, Zimmerman et al. 1996, Grimmett et al. 1999, Steveson and Fanshawe 2002, Rasmussen and Anderton 2012), change the English name of <em>Dromas ardeola</em> from Crab Plover to Crab-Plover.</p>
<p>References:</p>
<p><NAME>., <NAME>, and <NAME>. 1999. A guide to the birds of India, Pakistan, Nepal, Bangladesh, Bhutan, Sri Lanka, and the Maldives. Princeton University Press, Princeton, New Jersey.</p>
<p><NAME>., <NAME>, and <NAME>. 1996. An annotated checklist of the birds of the Oriental Region. Oriental Bird Club, Sandy, Bedfordshire, United Kingdom.</p>
<p><NAME>., and <NAME>. 2012. Birds of South Asia. The Ripley guide. Volume 2: attributes and status. Second Edition. Smithsonian Institution and Lynx Edicions, Washington D.C. and Barcelona.</p>
<p><NAME>., and <NAME>. 2002. Field guide to the birds of East Africa. T & A D Poyser, London.</p>
<p><NAME>., <NAME>, and <NAME>. 1996. Birds of northern Kenya and northern Tanzania. Princeton University Press, Princeton, New Jersey.</p>
<p> </p>
<p>page 91, <strong>Somali Courser <em>Cursorius somalensis</em></strong></p>
<p>Revise the range description of subspecies <em>littoralis</em> from “Extreme se Sudan to n Kenya and s Somalia” to “extreme southeastern South Sudan, northern Kenya, and southern Somalia”.</p>
<p> </p>
<p>page 91, <strong>Three-banded Courser <em>Rhinoptilus cinctus</em></strong></p>
<p>Revise the range description of nominate <em>cinctus</em> from “SE Sudan to e Ethiopia, Somalia and n Kenya” to “Ethiopia, Somalia, southeastern South Sudan, southern Ethiopia, Somalia, and northern Kenya”.</p>
<p> </p>
<p>page 103, <strong>Gray Noddy <em>Procelsterna albivitta</em></strong></p>
<p>page 103, <strong>Blue-gray Noddy <em>Procelsterna cerulea</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), the genus <em>Procelsterna</em> is merged into the genus <em>Anous</em>, following evidence that <em>Procelsterna</em> is embedded within <em>Anous</em> (Cibois et al. 2016). Reposition Gray and Blue-gray noddies to immediately follow Brown Noddy (<em>Anous stolidus</em>).Change the scientific name of Gray Noddy from <em>Procelsterna albivitta</em> to <em>Anous albivitta</em>.</p>
<p>Change the scientific name of Blue-gray Noddy from <em>Procelsterna cerulea</em> to <em>Anous ceruleus</em>. Change the spelling of the nominate subspecies from <em>cerulea</em> to <em>ceruleus</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2016. Molecular phylogeny and systematics of Blue and Grey noddies (<em>Procelsterna</em>). Ibis 158: 433-438.</p>
<p> </p>
<p>page 108, <strong>Chestnut-bellied Sandgrouse <em>Pterocles exustus</em></strong></p>
<p>Revise the range description of subspecies <em>floweri</em> from “Formerly Egypt. Extinct” to “formerly widespread in Nile Valley of northern and central Egypt; population greatly reduced (had been considered extinct), but recent records from south central Egypt presumably are this subspecies”.</p>
<p>Revise the range description of subspecies <em>ellioti</em> from “SE Sudan to Eritrea, n Ethiopia and Somalia” to “eastern Sudan, Eritrea, northern Ethiopia, and northern and central Somalia”.</p>
<p>Revise the range description of subspecies <em>olivascens</em> from “S Ethiopia to Somalia, Kenya and n Tanzania” to “southeastern South Sudan, southern Ethiopia, southwestern Somalia, Kenya, and northern Tanzania”.</p>
<p> </p>
<p>page 109, <strong>Lichtenstein’s Sandgrouse <em>Pterocles lichtensteinii</em></strong></p>
<p>Revise the range description of subspecies <em>sukensis</em> from “SE Sudan and s Ethiopia to central Kenya” to “southeastern South Sudan, southern Ethiopia and northwestern and central Kenya”.</p>
<p> </p>
<p>page 110, <strong>Delegorgue’s Pigeon <em>Columba delegorguei</em></strong></p>
<p>Revise the range description of subspecies <em>sharpei</em> from “SE Sudan to Uganda, Kenya, Tanzania and Zanzibar” to “southeastern South Sudan, Uganda, Kenya, and Tanzania (including Zanzibar)”.</p>
<p> </p>
<p>pages 110-111, <strong>Lemon Dove <em>Columba larvata</em></strong></p>
<p>Revise the range description for subspecies <em>bronzina</em> from “Ethiopia and se Sudan (Boma Hills)” to “Eritrea, Ethiopia, and southeastern South Sudan (Boma Hills)”.</p>
<p>Revise the range description for nominate <em>larvata</em> from “S Sudan to Uganda, w Tanzania, Malawi and South Africa” to “southern South Sudan, Uganda, Kenya, and Tanzania south to South Africa”.</p>
<p> </p>
<p>page 111, <strong>Metallic Pigeon <em>Columba vitiensis</em></strong></p>
<p>Revise the range description for subspecies <em>anthracina</em> from “Palawan, Calauit and islands off north Borneo” to “Palawan and adjacent islands (Calauit, Comiran, and Lambucan), Philippines”.</p>
<p> </p>
<p>page 113, <strong>African Collared-Dove <em>Streptopelia roseogrisea</em></strong></p>
<p>Revise the range description of nominate <em>roseogrisea</em> from “SW Mauritania and Senegambia to s Sudan and w Ethiopia” to “Mauritania and Senegambia east to Sudan and western Ethiopia”.</p>
<p> </p>
<p>page 121, <strong>Wetar Ground-Dove <em>Gallicolumba hoedtii</em></strong></p>
<p>page 121, <strong>Shy Ground-Dove <em>Gallicolumba stairi</em></strong></p>
<p>page 121, <strong>Santa Cruz Ground-Dove <em>Gallicolumba sanctaecrucis</em></strong></p>
<p>page 121, <strong>Thick-billed Ground-Dove <em>Gallicolumba salamonis</em></strong></p>
<p>page 121, <strong>Bronze Ground-Dove <em>Gallicolumba beccarii</em></strong></p>
<p>page 121, <strong>Palau Ground-Dove <em>Gallicolumba canifrons</em></strong></p>
<p>page 120, <strong>White-bibbed Ground-Dove <em>Gallicolumba jobiensis</em></strong></p>
<p>page 121, <strong>Marquesas Ground-Dove <em>Gallicolumba rubescens</em></strong></p>
<p>page 120, <strong>Caroline Islands Ground-Dove <em>Gallicolumba kubaryi</em></strong></p>
<p>page 120, <strong>Polynesian Ground-Dove <em>Gallicolumba erythroptera</em></strong></p>
<p>page 121, <strong>White-throated Ground-Dove <em>Gallicolumba xanthonura</em></strong></p>
<p>The genus <em>Gallicolumba</em> is not monophyletic, and many species formerly classified in this genus are removed to the genus <em>Alopecoenas</em> (Jønsson et al. 2011b, Moyle et al. 2013).</p>
<p>Change the scientific name of Wetar Ground-Dove from <em>Gallicolumba hoedtii</em> to <em>Alopecoenas hoedtii</em>.</p>
<p>Change the scientific name of Shy Ground-Dove from <em>Gallicolumba stairi</em> to <em>Alopecoenas stairi</em>. Subspecies <em>vitiensis</em>, with range “Fiji and Tonga (Vava’u, Ha’apai and Nomuka group)”, is considered to be a junior synonym of nominate <em>stairi</em> (Peters 1937), and is deleted; this species thus becomes monotypic. Revise the range of <em>stairi</em> from “Wallis and Futuna Islands and Samoa” to “Fiji, Tonga, Wallis and Futuna Islands, and Samoa”.</p>
<p>Change the scientific name of Santa Cruz Ground-Dove from <em>Gallicolumba sanctaecrucis</em> to <em>Alopecoenas sanctaecrucis</em>.</p>
<p>Change the scientific name of Thick-billed Ground-Dove from <em>Gallicolumba salamonis</em> to <em>Alopecoenas salamonis</em>. Revise the range description of Thick-billed Ground-Dove from “Solomon Islands (Ramos and Makira). Possibly extinct” to “Formerly Solomon Islands (Ramos and Makira). Extinct; last reported 1927”.</p>
<p>Change the scientific name of Bronze Ground-Dove from <em>Gallicolumba beccarii </em>to <em>Alopecoenas beccarii</em>. Revise the range description of subspecies <em>johannae</em> from “Bismarck Archipelago (Karkar and Nissan)” to “Karkar Island (off eastern New Guinea) and Bismarck Archipelago (except for the Admiralty and St. Matthias groups)”. We add a previously overlooked subspecies, <em>masculinus</em> Salomonsen 1972, following Mayr and Diamond (2001) and Dutson (2011). The range of <em>masculinus</em> is “Nissan Island (western Solomon Islands)”. Insert <em>masculinus</em> immediately following subspecies <em>admiralitatis</em>. Following the transfer of Bronze Ground-Dove from <em>Gallicolumba</em> to <em>Alopecoenas</em>, the subspecies name <em>intermedia</em> changes to <em>intermedius</em>.</p>
<p>Change the scientific name of Palau Ground-Dove from <em>Gallicolumba canifrons</em> to <em>Alopecoenas canifrons</em>.</p>
<p>Change the scientific name of White-bibbed Ground-Dove from <em>Gallicolumba jobiensis</em> to <em>Alopecoenas jobiensis</em>. Change the scientific name of subspecies <em>chalconota</em> to <em>chalconotu</em>s.</p>
<p>Change the scientific name of Marquesas Ground-Dove from <em>Gallicolumba rubescens</em> to <em>Alopecoenas rubescens</em>.</p>
<p>Change the scientific name of Caroline Islands Ground-Dove from <em>Gallicolumba kubaryi</em> to <em>Alopecoenas kubaryi</em>.</p>
<p>Change the scientific name of Polynesian Ground-Dove from <em>Gallicolumba erythroptera</em> to <em>Alopecoenas erythropterus</em>.</p>
<p>Change the scientific name of White-throated Ground-Dove from <em>Gallicolumba xanthonura</em> to <em>Alopecoenas xanthonurus</em>.</p>
<p>References:</p>
<p><NAME>. 2011. Birds of Melanesia. The Bismarcks, Solomons, Vanuatu and New Caledonia. <NAME>, London.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2011b. Systematics and biogeography of Indo-Pacific ground-doves. Molecular Phylogenetics and Evolution 59: 538-543.</p>
<p><NAME>., and <NAME>. 2001. The birds of northern Melanesia: speciation, ecology, and biogeography. Oxford University Press, Oxford, United Kingdom.</p>
<p><NAME>., <NAME>, and <NAME>. 2013. A reconsideration of <em>Gallicolumba</em> (Aves: Columbidae) relationships using fresh source material reveals pseudogenes, chimeras, and a novel phylogenetic hypothesis. Molecular Phylogenetics and Evolution 66: 1060-1066.</p>
<p><NAME>. 1937. <a href="https://biodiversitylibrary.org/page/14477851">Check-list of birds of the world. Volume III</a>. Harvard University Press, Cambridge, Massachusetts.</p>
<p> </p>
<p>page (addition 2017),<strong> Tanna Ground-Dove <em>Alopecoenas ferrugineus</em></strong></p>
<p>The validity of Tanna Ground-Dove <em>Alopecoenas ferrugineus</em> formerly was questioned (Peters 1937), but this species now is widely accepted (Stresemann 1950, Greenway 1958, Dutson 2011). Insert this species, with range “Formerly Tanna Island (Vanuatu). Extinct; not reported since 1774”, immediately following Thick-billed Ground-Dove <em>Alopecoenas salamonis</em>.</p>
<p> </p>
<p>page (addition 2017), <strong>Norfolk Ground-Dove <em>Alopecoenas norfolkensis</em></strong></p>
<p>The status of Norfolk Ground-Dove <em>Alopecoenas norfolkensis</em> formerly was confused (Peters 1937), but this species now is widely accepted as valid (Goodwin 1970, Gill et al. 2010, Forshaw 2015). Insert this species, with range “Formerly Norfolk Island (Australia). Extinct since ca 1800”, immediately following White-throated Ground-Dove <em>Alopecoenas xanthonurus</em>.</p>
<p>References:</p>
<p>Forshaw, J.M. 2015. Pigeons and doves in Australia. CSIRO Publishing, Clayton South, Victoria, Australia.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME> (Checklist Committee, Ornithological Society of New Zealand). 2010. Checklist of the birds of New Zealand. Te Papa Press and the Ornithological Society of New Zealand, Wellington, New Zealand.</p>
<p><NAME>. 1970. Pigeons and doves of the world. Second edition. British Museum (Natural History), London and Cornell University Press, Ithaca, New York.</p>
<p><NAME>. 1937. Check-list of birds of the world. Volume III. Harvard University Press, Cambridge, Massachusetts.</p>
<p> </p>
<p>pages 120-121, <strong>ground-doves genus <em>Alopecoenas</em></strong></p>
<p>The genus <em>Alopecoenas</em> is repositioned to follow the genus <em>Henicophaps</em>. The sequence of species in <em>Alopecoenas</em> is revised, based on Moyle et al. (2013).The new sequence of species is:</p>
<p>Wetar Ground-Dove <em>Alopecoenas hoedtii</em></p>
<p>Shy Ground-Dove <em>Alopecoenas stairi</em></p>
<p>Santa Cruz Ground-Dove <em>Alopecoenas sanctaecrucis</em></p>
<p>Thick-billed Ground-Dove <em>Alopecoenas salamonis</em></p>
<p>Tanna Ground-Dove <em>Alopecoenas ferrugineus</em></p>
<p>Bronze Ground-Dove <em>Alopecoenas beccarii</em></p>
<p>Palau Ground-Dove <em>Alopecoenas canifrons</em></p>
<p>White-bibbed Ground-Dove <em>Alopecoenas jobiensis</em></p>
<p>Marquesas Ground-Dove <em>Alopecoenas rubescens</em></p>
<p>Caroline Islands Ground-Dove <em>Alopecoenas kubaryi</em></p>
<p>Polynesian Ground-Dove <em>Alopecoenas erythropterus</em></p>
<p>White-throated Ground-Dove <em>Alopecoenas xanthonurus</em></p>
<p>Norfolk Ground-Dove <em>Alopecoenas norfolkensis</em></p>
<p>Reference:</p>
<p><NAME>., <NAME>, and <NAME>. 2013. A reconsideration of <em>Gallicolumba</em> (Aves: Columbidae) relationships using fresh source material reveals pseudogenes, chimeras, and a novel phylogenetic hypothesis. Molecular Phylogenetics and Evolution 66: 1060-1066.</p>
<p> </p>
<p>page 120, <strong>ground-doves genus <em>Gallicolumba</em></strong></p>
<p>The genus <em>Gallicolumba </em>is not monophyletic, and many species formerly classified in this genus are removed to the genus <em>Alopecoenas</em> (Jønsson et al. 2011, Moyle et al. 2013). The sequence of species in <em>Gallicolumba</em> is revised, based on Moyle et al. (2013). The new sequence of species is:</p>
<p>Sulawesi Ground-Dove <em>Gallicolumba tristigmata</em></p>
<p>Cinnamon Ground-Dove <em>Gallicolumba rufigula</em></p>
<p>Mindoro Bleeding-heart <em>Gallicolumba platenae</em></p>
<p>Negros Bleeding-heart <em>Gallicolumba keayi</em></p>
<p>Sulu Bleeding-heart <em>Gallicolumba menagei</em></p>
<p>Luzon Bleeding-heart <em>Gallicolumba luzonica</em></p>
<p>Mindanao Bleeding-heart <em>Gallicolumba crinigera</em></p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2011b. Systematics and biogeography of Indo-Pacific ground-doves. Molecular Phylogenetics and Evolution 59: 538-543.</p>
<p><NAME>., <NAME>, and <NAME>. 2013. A reconsideration of <em>Gallicolumba</em> (Aves: Columbidae) relationships using fresh source material reveals pseudogenes, chimeras, and a novel phylogenetic hypothesis. Molecular Phylogenetics and Evolution 66: 1060-1066.</p>
<p> </p>
<p>page 128, <strong>Pinon’s Imperial-Pigeon <em>Ducula pinon</em></strong></p>
<p>With the deletion of subspecies <em>rubiensis</em> (see below), change the scientific name of the polytypic group Pinon’s Imperial-Pigeon (Gray-headed) from <em>Ducula pinon</em> [<em>pino</em>n Group] to <em>Ducula pinon pinon/jobiensis</em>.</p>
<p>Subspecies <em>rubiensis</em>, with range “Central and s New Guinea”, is considered to represent populations in zones of introgression between nominate <em>pinon</em> and subspecies <em>jobiensis</em> (Gibbs et al. 2001, Beehler and Pratt 2016), and is deleted. Revise the range description of nominate <em>pinon</em> from “Aru Islands, w Papuan islands and sw New Guinea” to “west Papuan Islands, Aru Islands, and from Bird’s Head east across southern New Guinea to lowlands south of the Huon Gulf; intergrades with jobiensis around Bird’s Neck in the west, and on the Southeastern Peninsula”.</p>
<p>Revise the range description of subspecies <em>jobiensis</em> from “Yapen I.; n New Guinea e to Huon Gulf and offshore islands” to “Yapen Island, and northern New Guinea east to the Huon Gulf and offshore islands; intergrades with <em>pinon</em> around Bird’s Neck in the west, and on the Southeastern Peninsula”.</p>
<p>References:</p>
<p>Beehler, B.M., and <NAME>. 2016. Birds of New Guinea: distribution, taxonomy, and systematics. Princeton University Press, Princeton, New Jersey.</p>
<p><NAME>., <NAME>, and <NAME>. 2001. Pigeons and doves: a guide to the pigeons and doves of the world. Princeton University Press, Princeton, New Jersey.</p>
<p> </p>
<p>page 151, <strong>White-crested Turaco <em>Tauraco leucolophus</em></strong></p>
<p>Revise the range description from “Extreme se Nigeria to n Uganda, sw Sudan and w Kenya” to “extreme southeastern Nigeria and northern Cameroon east to western and southern South Sudan, northeastern Democratic Republic of the Congo, northern Uganda, and western Kenya”.</p>
<p> </p>
<p>page 151, <strong>White-cheeked Turaco <em>Tauraco leucotis</em></strong></p>
<p>Revise the range description of the monotypic group White-cheeked Turaco (White-cheeked) <em>Tauraco leucotis leucotis</em> from “<em>Podocarpus</em> forests of Eritrea, Ethiopia and se Sudan” to “Eritrea, northern and western Ethiopia, and adjacent eastern South Sudan”.</p>
<p> </p>
<p>page 158, <strong>Black-throated Coucal <em>Centropus leucogaster</em></strong></p>
<p>Add a range description for subspecies <em>efulenensis</em>: “southwestern Cameroon and northern Gabon”.</p>
<p> </p>
<p>page 158, <strong>Blue-headed Coucal <em>Centropus monachus</em></strong></p>
<p>Add a previously overlooked subspecies, <em>occidentalis</em> Neumann 1908, following Payne (2005). The range of <em>occidentalis</em> is “Guinea and Ivory Coast east to southern Nigeria, Cameroon, Equatorial Guinea, Gabon, and northern Angola; populations of the western and southern Democratic Republic of the Congo variously attributed to <em>occidentalis</em> or to <em>fisheri</em>“. Insert occidentalis immediately after the heading for Blue-headed Coucal Centropus monachus.</p>
<p>Revise the range description of subspecies <em>fischeri </em>from “Ivory Coast to w Kenya, s Sudan, Ethiopia and n Angola” to “southeastern Sudan and South Sudan to eastern Democratic Republic of the Congo, Uganda, western Kenya, Rwanda, Burundi, and northwestern Tanzania”.</p>
<p>Reference:</p>
<p><NAME>. 2005. The cuckoos. Oxford University Press, New York and Oxford, United Kingdom.</p>
<p> </p>
<p>page 159, <strong>Ash-colored Cuckoo <em>Coccycua cinerea</em></strong></p>
<p>Revise the range description from “S Brazil to n Argentina, Paraguay, Bolivia and extreme se Peru” to “breeds Paraguay, northern Argentina, southern Brazil, and Uruguay; winters north at least to Bolivia and western Brazil south of the Amazon, rarely to eastern Peru and southeastern Colombia”.</p>
<p> </p>
<p>page 154, <strong>Shining Bronze-Cuckoo <em>Chrysococcyx lucidus</em></strong></p>
<p>Correct the English name of the monotypic group <em>Chrysococcyx lucidus lucidus</em> from Shining Bronze-Cuckoo (Golden) to Shining Bronze-Cuckoo (Shining).</p>
<p>Correct the English name of the monotypic group <em>Chrysococcyx lucidus plagosus</em> from Shining Bronze-Cuckoo (Shining) to Shining Bronze-Cuckoo (Golden).</p>
<p> </p>
<p>page 155, <strong>Fork-tailed Drongo-Cuckoo <em>Surniculus dicruroides</em></strong></p>
<p>page 155, <strong>Square-tailed Drongo-Cuckoo <em>Surniculus lugubris</em></strong></p>
<p>Subspecies <em>barussarum</em> is considered to belong to Square-tailed Drongo-Cuckoo <em>Surniculus lugubris</em>, not to Fork-tailed Drongo-Cuckoo (Erritzøe et al. 2012). Change the scientific name from <em>Surniculus dicruroides barussarum</em> to <em>Surniculus lugubris barussarum</em>. Reposition <em>barussarum</em> to immediately follow the species heading for Square-tailed Drongo-Cuckoo. Revise the range of <em>barussarum</em> from “breeds northeastern India, northern Myanmar, northern Thailand, northern Indochina, and southeastern China, including Hainan; the species (mostly this population?) winters south to Sumatra” to “breeds northeastern India, northern Myanmar, northern Thailand, northern Indochina, and southeastern China, including Hainan; winters south to Sumatra”.</p>
<p>Reference:</p>
<p>Erritzøe, J., <NAME>, <NAME>, and <NAME>. 2012. Cuckoos of the world. <NAME>, London.</p>
<p> </p>
<p>page 153, <strong>Black Cuckoo <em>Cuculus clamosus</em></strong></p>
<p>Revise the range description of the monotypic group Black Cuckoo (Rufous-throated) <em>Cuculus clamosus gabonensis</em> from “Liberia to Ghana, Nigeria, s Sudan, Uganda and w Kenya” to “Liberia east to Democratic Republic of the Congo, South Sudan, Uganda and western Kenya”.</p>
<p> </p>
<p>page 161, <strong>Barn Owl <em>Tyto alba</em></strong></p>
<p>Change the English name from Barn Owl (Canary Islands) to Barn Owl (Canary Is.).</p>
<p>Change the English name of the polytypic group <em>Tyto alba</em> [<em>delicatula</em> Group] from Barn Owl (Australian) to Barn Owl (Eastern).</p>
<p>Subspecies <em>stertens</em> belongs with the group Barn Owl (Eastern) <em>Tyto alba</em> [<em>delicatula</em> Group] (Aliabadian et al. 2016), and not with the group to which it previously was assigned, Barn Owl (Eurasian) <em>Tyto alba</em> [<em>alba</em> Group].</p>
<p>Subspecies <em>javanica</em> belongs with the group Barn Owl (Eastern) <em>Tyto alba</em> [<em>delicatula</em> Group] (Aliabadian et al. 2016), and not with the group to which it previously was assigned, Barn Owl (Eurasian) <em>Tyto alba</em> [<em>alba</em> Group].</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2016. Phylogeny, biogeography, and diversification of barn owls (Aves: Strigiformes). Biological Journal of the Linnean Society 119: 904-918.</p>
<p> </p>
<p>page 162, <strong>Indian Scops-Owl <em>Otus bakkamoena</em></strong></p>
<p>page 162, <strong>Collared Scops-Owl <em>Otus lettia</em></strong></p>
<p>Subspecies <em>plumipes</em>, previously classified under Indian Scops-Owl <em>Otus bakkamoena</em>, properly belongs with Collared Scops-Owl <em>Otus lettia</em> (Rasmussen and Anderton 2005, König and Weick 2008). Position <em>plumipes</em> immediately following the species heading for Collared Scops-Owl, and change the scientific name from <em>Otus bakkamoena plumipes</em> to <em>Otus lettia plumipes</em>.</p>
<p>Revise the range description of nominate <em>lettia</em> from “E Nepal to Bangladesh, Myanmar, Thailand and Indochina” to “eastern Nepal, northeastern India (south to Odisha and northeastern Ghats), Bangladesh, Myanmar, Thailand, and Indochina”.</p>
<p>References:</p>
<p><NAME>., and <NAME>. 2008. Owls of the world. Second edition. Yale University Press, New Haven, Connecticut.</p>
<p><NAME>., and <NAME>. 2005. Birds of South Asia. The Ripley guide. Volume 2: attributes and status. Smithsonian Institution and Lynx Edicions, Washington D.C. and Barcelona.</p>
<p> </p>
<p>page 163, <strong>Sunda Scops-Owl <em>Otus lempiji</em></strong></p>
<p>Subspecies <em>cnephaeus</em>, with range “S Malay Peninsula”, previously was recognized as a monotypic group, Sunda Scops-Owl (Singapore) <em>Otus lempiji cnephaeus</em>; but this “group” is a junior synonym of nominate <em>lempiji</em> (Wells 1999), and is deleted. Consequently the polytypic group Sunda Scops-Owl (Sunda) <em>Otus lempiji</em> [<em>lempiji</em> Group] also is deleted.</p>
<p>Reference:</p>
<p>Wells, D.R. 1999. The birds of the Thai-Malay Peninsula. Volume One. Academic Press, London.</p>
<p> </p>
<p>page 166, <strong>Vermiculated Screech-Owl <em>Megascops guatemalae</em></strong></p>
<p>Revise the range description of the monotypic group Vermiculated Screech-Owl (Roraima) <em>Megascops guatemalae roraimae</em> from “Tepuis of se Venezuela and adjacent n Brazil” to “tepuis of southern Venezuela, adjacent northern Brazil, southern Guyana, and Suriname”.</p>
<p>Revise the range description of subspecies <em>napensis</em> from “Tropical e Ecuador and e Colombia” to “Andean foothills of Venezuela, eastern Colombia and eastern Ecuador; populations in coastal mountains of Venezuela presumably also are this subspecies”.</p>
<p> </p>
<p>page 167, <strong>Dusky Eagle-Owl <em>Bubo coromandus</em></strong></p>
<p>Revise the range description of nominate <em>coromandus</em> from “Pakistan to central India, s Nepal, Assam and Bangladesh” to “Pakistan to northern and central India, southern Nepal, and Bangladesh; disjunct population in southeastern China (Jiangxi and Zhejiang) perhaps also this subspecies”.</p>
<p>Revise the range description of subspecies <em>klossii</em> from “Extreme s China to s Myanmar and w Thailand” to “western and southern Myanmar, southern Thailand, and northern peninsular Malaysia (Wells 2007: 759)”.</p>
<p>Reference:</p>
<p>Wells, D.R. 2007. The birds of the Thai-Malay Peninsula. Volume Two. <NAME>, London.</p>
<p> </p>
<p>page 172, <strong>Sjostedt’s Owlet <em>Glaucidium sjostedti</em></strong></p>
<p>In accord with widespread usage (e.g., Dowsett and Forbes-Watson 1993, Christy and Clarke 1994), change the English name of <em>Glaucidium sjostedti</em> from Sjostedt’s Owlet to Sjöstedt’s Owlet.</p>
<p>References:</p>
<p><NAME>., and <NAME>. 1994. Guide des oiseaux de le Réserve de la Lopé. Écofac, Libreville, Gabon.</p>
<p><NAME>., and <NAME>. 1993. Checklist of birds of the Afrotropical and Malagasy regions. Volume 1: species limits and distribution. Tauraco Press, Liège, Belgium.</p>
<p> </p>
<p>page 169, <strong>African Wood-Owl <em>Strix woodfordii</em></strong></p>
<p>Revise the range description of subpecies <em>umbrina</em> from “Ethiopia and se Sudan” to “Ethiopia and eastern South Sudan”.</p>
<p> </p>
<p>page 175, <strong>Striped Owl <em>Pseudoscops clamator</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop713.htm">Proposal 713</a>), Striped Owl is removed from <em>Pseudoscops</em> and is placed in the genus <em>Asio</em>, based on genetic evidence (Wink et al. 2009). Change the scientific name from <em>Pseudoscops clamator</em> to <em>Asio clamator</em>. Reposition Striped Owl to follow Madagascar Long-eared Owl <em>Asio madagascariensis</em>.</p>
<p>Reference:</p>
<p><NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of owls (Strigiformes) inferred from DNA sequences of the mitochondrial cytochrome <em>b</em> and the nuclear <em>RAG-1</em> gene. Ardea 97: 581-591.</p>
<p> </p>
<p>page 183, <strong>Standard-winged Nightjar <em>Caprimulgus longipennis</em></strong></p>
<p>Revise the range description from “Senegambia to sw Sudan, n Uganda, Ethiopia and Somalia” to “breeding season from Senegambia to Liberia, east to southwestern South Sudan, northeastern Democratic Republic of the Congo, and northwestern Uganda (present during the season in southern portion of this region, but breeding there not confirmed); winters north and east from southern Mauritania and northern Sengeal east to central Chad, southern Sudan, eastern South Sudan, central Uganda, Eritrea, western Ethiopia, and western Kenya”.</p>
<p> </p>
<p>page 182, <strong>Donaldson-Smith’s Nightjar <em>Caprimulgus donaldsoni</em></strong></p>
<p>Revise the range description from “Ethiopia and Somalia to se Sudan and ne Tanzania” to “northwestern and southern Somalia, eastern and southern Ethiopia, eastern Kenya, and northeastern Tanzania; possibly also in southeastern South Sudan”.</p>
<p> </p>
<p>page 182, <strong>Black-shouldered Nightjar <em>Caprimulgus nigriscapularis</em></strong></p>
<p>Revise the range description from “Senegambia to southeastern Sudan, western Kenya, and southwestern Democratic Republic of the Congo” to “Senegambia east, patchily, to southern South Sudan, Uganda, extreme western Kenya, southwestern and eastern Democratic Republic of the Congo, Rwanda, Burundi, and northwestern Tanzania”.</p>
<p> </p>
<p>page 183, <strong>Star-spotted Nightjar <em>Caprimulgus stellatus</em></strong></p>
<p>Revise the range description from “SE Sudan to Ethiopia, Djibouti, Somalia and central</p>
<p>Kenya” to “southern South Sudan, Ethiopia (and northern Somalia?), and northern Kenya”.</p>
<p> </p>
<p>page 183, <strong>Slender-tailed Nightjar <em>Caprimulgus clarus</em></strong></p>
<p>Revise the range description from “SE Sudan to Ethiopia, Djibouti, Somalia and n Tanzania” to “southeastern South Sudan, Ethiopia, Djibouti, northwestern and southern Somalia, Uganda, Kenya, and northeastern Tanzania”.</p>
<p> </p>
<p>page 185, <strong>swiftlets genus <em>Collocalia</em></strong></p>
<p>The sequence of species of <em>Collocalia</em> swiftlets is revised, based on Rheindt et al. (2017). The new sequence of species is:</p>
<p>Pygmy Swiftlet <em>Collocalia troglodytes</em></p>
<p>Bornean Swiftlet <em>Collocalia dodgei</em></p>
<p>Christmas Island Swiftlet <em>Collocalia natalis</em></p>
<p>Cave Swiftlet <em>Collocalia linchi</em></p>
<p>Plume-toed Swiftlet <em>Collocalia affinis</em></p>
<p>Gray-rumped Swiftlet <em>Collocalia marginata</em></p>
<p>Ridgetop Swiftlet <em>Collocalia isonota</em></p>
<p>Tenggara Swiftlet <em>Collocalia sumbawae</em></p>
<p>Drab Swiftlet <em>Collocalia neglecta</em></p>
<p>Glossy Swiftlet <em>Collocalia esculenta</em></p>
<p>Satin Swiftlet <em>Collocalia uropygialis</em></p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2017. Speciation in Indo-Pacific swiftlets (Aves: Apodidae): integrating molecular and phenotypic data for a new provisional taxonomy of the <em>Collocalia esculenta</em> complex. Zootaxa 4250: 401-433.</p>
<p> </p>
<p>page 189, <strong>African Palm-Swift <em>Cypsiurus parvus</em></strong></p>
<p>Revise the range description of nominate <em>parvus</em> from “Senegambia to s Sudan, Ethiopia and sw Arabia” to “Senegambia east to South Sudan, Eritrea, and Ethiopia; also southwestern Saudi Arabia and western Yemen”.</p>
<p>Revise the range description of subspecies <em>myochrous</em> from “Higher elevations from s Sudan to ne South Africa” to “southern South Sudan south to northeastern South Africa”.</p>
<p> </p>
<p>page 203, <strong>Speckled Hummingbird <em>Adelomyia melanogenys</em></strong></p>
<p>Correct the English name of the monotypic group <em>Adelomyia melanogenys inornata</em> from Speckled Hummingbird (<em>inornata</em> to Speckled Hummingbird (<em>inornata</em>).</p>
<p> </p>
<p>page 700, <strong>Brace’s Emerald <em>Chlorostilbon bracei</em></strong></p>
<p>With the addition of subspecies <em>elegans </em>(see below), we recognize nominate <em>bracei</em> as a new monotypic group, Brace’s Emerald (Brace’s) <em>Chlorostilbon bracei bracei</em>.</p>
<p>Insert a previously overlooked taxon, <em>elegans</em> Gould 1860 (Weller 1999). This taxon may be a valid species (Weller 1999), but provisionally we recognize it as a subspecies of Brace’s Emerald; insert <em>elegans</em> following the entry for nominate <em>bracei</em>, with range “Formerly the Caribbean; extinct. Distribution unknown, possibly occurred on Jamaica or in the Bahamas. Known from a single specimen from 1860”. We recognize <em>elegans</em> as a new monotypic group, Brace’s Emerald (Caribbean) <em>Chlorostilbon bracei elegans</em>.</p>
<p>Reference:</p>
<p><NAME>. 1999. On types of trochilids in The Natural History Museum, Tring II. Re-evaluation of <em>Erythronota</em> (?) <em>elegan</em>s Gould 1860: a presumed extinct species of the genus <em>Chlorostilbon</em>. Bulletin of the British Ornithologists’ Club 119: 197-202.</p>
<p> </p>
<p>page 194, <strong>Gray-breasted Sabrewing <em>Campylopterus largipennis</em></strong></p>
<p>Subspecies <em>aequatorialis</em>, with range “E Colombia to Ecuador, Peru, n Bolivia and nw Brazil”, is considered to be a junior synonym of subspecies <em>obscurus</em> (Lopes et al. 2017), and is deleted. Revise the range description of <em>obscurus</em> from “NE Brazil (e Pará and Maranhão)” to “eastern Colombia south to northern Bolivia and east, mostly south of the Amazon, to eastern Amazonian Brazil (eastern Pará and Maranhão)”.</p>
<p>Lopes et al. (2017) described a new taxon of sabrewing from central Brazil, with range “eastern Brazil (southeastern Goiás, southwestern Bahia, and northern Minas Gerais”. They described this sabrewing as a new species, <em>Campylopterus calcirupicola</em> (Dry-forest Sabrewing), but, pending review of this proposed new species by AOS-SACC, we provisionally classify it as a subspecies, <em>Campylopterus largipennis calcirupicola</em>.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, and <NAME>. 2017. A cryptic new species of hummingbird of the <em>Campylopterus largipennis</em> complex (Aves: Trochilidae). Zootaxa 4268: 1-33.</p>
<p> </p>
<p>page 202, <strong>Berylline Hummingbird <em>Amazilia beryllina</em></strong></p>
<p>Revise the range description of nominate <em>beryllina</em> from “E and central Mexico (Veracruz to Chiapas)” to “eastern and central Mexico (Veracruz to central Oaxaca)”.</p>
<p> </p>
<p>page 199, <strong>Violet-bellied Hummingbird <em>Damophila julie</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), change the scientific name of Violet-bellied Hummingbird from <em>Damophila julie</em> to <em>Juliamyia julie</em>. The name <em>Damophila </em>Reichenbach 1854 is preoccupied by an earlier use of this name, <em>Damophila</em> Curtis 1832, proposed for a genus of Lepidoptera (Özdikmen 2008).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017. Fifty-eighth supplement to the American Ornithological Society’s Check-list of North American birds. Auk 134: 751-773.</p>
<p><NAME>. 2008. <em>Neodamophila</em> nom. nov., a replacement name for the bird genus <em>Damophila</em> Reichenbach, 1854 (Aves: Apodiformes: Trochilidae). Munis Entomology and Zoology 3: 171-173.</p>
<p> </p>
<p>page 211, <strong>Speckled Mousebird <em>Colius striatus</em></strong></p>
<p>Revise the range description of subspecies <em>jebelensis</em> from “N border of Uganda and s Sudan” to “South Sudan and northern Uganda”.</p>
<p> </p>
<p>page 212, <strong>Blue-naped Mousebird <em>Urocolius macrourus</em></strong></p>
<p>Revise the range description of subspecies <em>pulcher</em> from “SE Sudan to s Somalia, Kenya, Uganda and Tanzania” to “extreme southeastern South Sudan, southern Ethiopia, and southern Somalia to eastern Uganda, Kenya, and northern Tanzania”.</p>
<p> </p>
<p>pages 228-229, <strong>Woodhoopoes and Scimitar-bills Phoeniculidae</strong></p>
<p>Change the English name of the family Phoeniculidae from Woodhoopoes and Scimitar-bills to Woodhoopoes and Scimitarbills.</p>
<p> </p>
<p>page 229, <strong>Red-billed Dwarf Hornbill <em>Lophoceros camurus</em></strong></p>
<p>Revise the range description from “S Sierra Leone to extreme s Sudan, w Uganda and n Angola” to “southern Sierra Leone to Democratric Republic of the Congo, northwestern Angola (Cabinda), southwestern South Sudan and western Uganda”.</p>
<p> </p>
<p>page 216, <strong>Shining-blue Kingfisher <em>Alcedo quadribrachys</em></strong></p>
<p>Revise the range description of subspecies <em>guentheri</em> from “Coastal sw Nigeria to extreme s Sudan, Kenya and Zambia” to “southwestern Nigeria to extreme southern South Sudan, extreme western Kenya, and northwestern Zambia”.</p>
<p> </p>
<p>page 226, <strong>Boehm’ Bee-eater <em>Merops boehmi</em></strong></p>
<p>In accord with widespread usage (e.g., Britton 1980, Dowsett and Forbes-Watson 1993, Stevenson and Fanshawe 2002), change the English name of <em>Merops boehmi</em> from Boehm’s Bee-eater to Böhm’s Bee-eater.</p>
<p>References:</p>
<p><NAME>. (editor). 1980. Birds of east Africa. East Africa Natural History Society, Nairobi.</p>
<p><NAME>., and <NAME>. 1993. Checklist of birds of the Afrotropical and Malagasy regions. Volume 1: species limits and distribution. Tauraco Press, Liège, Belgium.</p>
<p><NAME>., and <NAME>. 2002. Field guide to the birds of East Africa. T & A D Poyser, London.</p>
<p> </p>
<p>page 235, <strong>Red-and-yellow Barbet <em>Trachyphonus erythrocephalus</em></strong></p>
<p>Revise the range description of subspecies <em>versicolor</em> from “NE Uganda to se Sudan, Ethiopia and n Kenya” to “southeastern South Sudan, northeastern Uganda, southern and southwestern Ethiopia, southwestern Somalia, and northern Kenya”.</p>
<p> </p>
<p>page 235, <strong>D’Arnaud’s Barbet <em>Trachyphonus darnaudii</em></strong></p>
<p>Revise the range description of nominate <em>darnaudii</em> from “SE Sudan and sw Ethiopia to ne Uganda and w-central Kenya” to “southeastern South Sudan, southwestern Ethiopia, northeastern Uganda, and west central Kenya”.</p>
<p> </p>
<p>page 236, <strong>Red-fronted Tinkerbird <em>Pogoniulus pusillus</em></strong></p>
<p>Revise the range description of subspecies <em>affinis</em> from “SE Sudan to se Ethiopia, s Somalia, Kenya, Uganda, se Tanzania” to “southeastern South Sudan to southeastern Ethiopia and southern Somalia, south to northern Uganda, Kenya, Uganda, and eastern Tanzania”.</p>
<p> </p>
<p>page 237, <strong>Red-fronted Barbet <em>Tricholaema diademata</em></strong></p>
<p>Revise the range description of nominate <em>diademata</em> from “SE Sudan and n-cent. Ethiopia to se Uganda and central Kenya” to “southeastern South Sudan and central Ethiopia south to eastern Uganda and central Kenya”.</p>
<p> </p>
<p>page 237, <strong>Black-throated Barbet <em>Tricholaema melanocephala</em></strong></p>
<p>Revise the range description of subspecies <em>stigmatothorax</em> from “SE Sudan, s Ethiopia and s Somalia to Kenya and Tanzania” to “southeastern South Sudan, southern Ethiopia, southern Somalia, Kenya, and northern and central Tanzania”.</p>
<p> </p>
<p>page 237, <strong>White-headed Barbet <em>Lybius leucocephalus</em></strong></p>
<p>Revise the range description of nominate <em>leucocephalus</em> from “E Cent. African Rep. and s Sudan to Kenya and nw Tanzania” to “South Sudan to northeastern Democratic Republic of the Congo, western Kenya, and northwestern Tanzania; possibly also eastern Central African Republic”.</p>
<p> </p>
<p>page 238, <strong>Sooty Barbet <em>Calorhamphus hayii</em></strong></p>
<p>page 238, <strong>Brown Barbet <em>Calorhamphus fuliginosus</em></strong></p>
<p>Change the genus name <em>Calorhamphus</em> to the correct original spelling, <em>Caloramphus</em> (Dickinson and Remsen 2013).</p>
<p>Reference:</p>
<p><NAME>., and J.V. Remsen, Jr. (editors). 2013. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 1. Aves Press, Eastbourne, United Kingdom.</p>
<p> </p>
<p>page 243, <strong>Yellow-throated Toucan <em>Ramphastos ambiguus</em></strong></p>
<p>Revise the range description of subspecies <em>abbreviatus</em> from “E slope of Andes of Colombia to w Venezuela and e Peru” to “northeastern Colombia (west slope of the Eastern Andes south to the central Magdalena Valley) and northwestern and northern Venezuela”.</p>
<p>Revise the range description of nominate <em>ambiguus</em> from “Northern section of upper Amazon basin” to “eastern Andes from southern Colombia (upper Magdalena Valley) to central Peru”.</p>
<p> </p>
<p>page 243, <strong>Willcocks’s Honeyguide <em>Indicator willcocksi</em></strong></p>
<p>Revise the range description of subspecies <em>hutsoni</em> from “N-central Nigeria to s Chad and sw Sudan” to “north central Nigeria east to extreme southern Chad and southwestern South Sudan”.</p>
<p> </p>
<p>page 243, <strong>Pallid Honeyguide <em>Indicator meliphilus</em></strong></p>
<p>Revise the range description of nominate <em>meliphilus</em> from “E Uganda and central Kenya to central Tanzania and sw Sudan” to “eastern Uganda and central and southeastern Kenya to northeastern Tanzania”.</p>
<p> </p>
<p>page 244, <strong>Lesser Honeyguide <em>Indicator minor</em></strong></p>
<p>Revise the range description of subspecies <em>teitensis</em> from “SE Sudan to Somalia, ne Namibia, Zimbabwe and c Mozambique” to “southeastern South Sudan, southeastern Ethiopia, and southern Somalia south and southwest to Angola, northeastern Namibia, Zimbabwe, and central Mozambique”.</p>
<p> </p>
<p>page 244, <strong>Yellow-rumped Honeyguide <em>Indicator xanthonotus</em></strong></p>
<p>Correct the spelling of the subspecies name <em>radcliffi</em> to <em>radcliffii</em>.</p>
<p> </p>
<p>page 244, <strong>Rufous-necked Wryneck <em>Jynx ruficollis</em></strong></p>
<p>Revise the range description of the monotypic group Rufous-necked Wryneck (Rufous-necked) <em>Jynx ruficollis ruficollis</em> from “SE Gabon to s Democratic Republic of the Congo, e Uganda, s Sudan, n Angola and e S Africa” to “southeastern Gabon east to Uganda, western Kenya, and northwestern Tanzania, south locally to northern and eastern Angola, northwestern Zambia and adjacent southern Democratic Republic of the Congo, northern Mozambique, and eastern South Africa”.</p>
<p>Revise the range description of the monotypic group Rufous-necked Wryneck (Bar-throated) <em>Jynx ruficollis pulchricollis</em> from “SE Nigeria and Cameroon to nw Democratic Republic of the Congo, s Sudan and nw Uganda” to “southeastern Nigeria and Cameroon to northwestern Democratic Republic of the Congo, South Sudan, and northwestern Uganda”.</p>
<p> </p>
<p>page 245, <strong>Golden-spangled Piculet <em>Picumnus exilis</em></strong></p>
<p>Correct the spelling of the subspecies name <em>buffoni</em> to <em>buffonii</em> (David et al. 2009a).</p>
<p>Reference:</p>
<p><NAME>., <NAME>, and <NAME>. 2009a. Contributions to a list of first reviser actions: ornithology. Zootaxa 2085: 1-24.</p>
<p> </p>
<p>page 248, <strong>Fine-spotted Woodpecker <em>Campethera punctuligera</em></strong></p>
<p>Revise the range description of nominate <em>punctuligera</em> from “SW Mauritania to Senegambia, Cameroon, sw Sudan, ne Democratic Republic of the Congo” to “southwestern Mauritania south to Sierra Leone, east to Central African Republic and southern Chad (and adjacent southwestern Sudan?), and northern Democratic Republic of the Congo”.</p>
<p>Revise the range description of subspecies <em>balia</em> from “S Sudan to extreme ne Democratic Republic of the Congo” to “South Sudan and extreme northeastern Democratic Republic of the Congo”.</p>
<p> </p>
<p>page 248, <strong>Golden-tailed Woodpecker <em>Campethera abingoni</em></strong></p>
<p>Revise the range description of the monotypic group subspecies Golden-tailed Woodpecker (Streak-backed) <em>Campethera abingoni chrysura</em> from “Senegambia to s Sudan, ne Democratic Republic of the Congo and w Uganda” to “Senegambia to South Sudan, northeastern Democratic Republic of the Congo, and western Uganda”.</p>
<p> </p>
<p>page 248, <strong>Green-backed Woodpecker <em>Campethera cailliautii</em></strong></p>
<p>Revise the range description of the monotypic group Green-backed Woodpecker (Plain-backed) <em>Campethera cailliautii permista</em> from “E Ghana to sw Sudan, sw Uganda, n Angola and central Democratic Republic of the Congo” to “eastern Ghana east to extreme southwestern South Sudan and southwestern U Uganda, south to northern Angola and southern Democratic Republic of the Congo”.</p>
<p> </p>
<p>page 249, <strong>Buff-spotted Woodpecker <em>Campethera nivosa</em></strong></p>
<p>Revise the range description of subspecies <em>herberti</em> from “Central African Rep. to w Kenya, sw Sudan, Uganda and e Democratic Republic of the Congo” to “Central African Republic and extreme southwestern South Sudan to eastern Democratic Republic of the Congo, central Uganda, and western Kenya”.</p>
<p> </p>
<p>page 249, <strong>Speckle-breasted Woodpecker <em>Dendropicos poecilolaemus</em></strong></p>
<p>Revise the range description from “Extreme se Nigeria to sw Sudan, Uganda and w Kenya” to “extreme southeastern Nigeria east to southwestern South Sudan, northeastern Democratric Republic of the Congo, Uganda, northern Rwanda, and western Kenya”.</p>
<p> </p>
<p>page 249, <strong>Cardinal Woodpecker <em>Dendropicos fuscescens</em></strong></p>
<p>Revise the range description of subspecies <em>sharpii</em> from “Cameroon to s Sudan, w Democratic Republic of the Congo and n Angola” to “Cameroon and Central African Republic to western Democratic Republic of the Congo and northern Angola”.</p>
<p>Revise the range description of subspecies <em>lepidus</em> from “E Democratic Republic of the Congo to highlands of Ethiopia to Uganda, Kenya, nw Tanzania” to “South Sudan and eastern Democratic Republic of the Congo to highlands of Ethiopia, Uganda, Kenya, and northwestern Tanzania”.</p>
<p>Revise the range description of subspecies <em>hemprichii</em> from “Lower elevations of Ethiopia to Somalia and e Kenya” to “lower elevations of the Horn of Africa: Eritrea, Ethiopia, and Somalia to northern and eastern Kenya”.</p>
<p> </p>
<p>page 249, <strong>Bearded Woodpecker <em>Dendropicos namaquus</em></strong></p>
<p>Revise the range description of nominate <em>namaquus</em> from “W Cent. Afr. Rep. to s Sudan, e Democratic Republic of the Congo, Tanzania and Angola” to “western Central African Republic to southern Sudan, eastern Democratic Republic of the Congo, Uganda, and western Kenya, and from Angola south to Namiibia east to Tanzania, to Botswana, central Mozambique, and northern South Africa”.</p>
<p> </p>
<p>page 249, <strong>Golden-crowned Woodpecker <em>Dendropicos xantholophus</em></strong></p>
<p>Revise the range description from “SW Cameroon to s Sudan, Uganda, w Kenya and nw Angola” to “southeastern Nigeria and southwestern Cameroon to extreme southern South Sudan, Uganda, western Kenya, central Democratic Republic of the Congo, and northwestern Angola”.</p>
<p> </p>
<p>page 250, <strong>African Gray Woodpecker <em>Dendropicos goertae</em></strong></p>
<p>Revise the range description of subspecies <em>abessinicus</em> from “E Sudan to w Ethiopia” to “eastern Sudan, western Eritrea, and northern and western Ethiopia”.</p>
<p>Revise the range description of nominate <em>goertae</em> from “Senegambia to s Sudan, Democratic Republic of the Congo, w Kenya and nw Tanzania” to “southwestern Mauritania and Senegambia to South Sudan, Democratic Republic of the Congo, western Kenya, and northwestern Tanzania”.</p>
<p> </p>
<p>page 250, <strong>Brown-capped Woodpecker <em>Dendrocopos nanus</em></strong></p>
<p>Correct the spelling of the subspecies name <em>gymnopthalmus</em> to <em>gymnopthalmos</em>.</p>
<p> </p>
<p>page 255, <strong>Northern Flicker <em>Colaptes auratus</em></strong></p>
<p>Change the English name of the monotypic group <em>Colaptes auratus gundlachi</em> from Northern Flicker (Grand Cayman Island) to Northern Flicker (Grand Cayman I.).</p>
<p> </p>
<p>page 50, <strong>Eurasian Kestrel <em>Falco tinnunculus</em></strong></p>
<p>Change the English name of the polytpic group <em>Falco tinnunculus canariensis/dacotiae</em> from Eurasian Kestrel (Canary Islands) to Eurasian Kestrel (Canary Is.).</p>
<p> </p>
<p>page 53, <strong>Peregrine Falcon <em>Falco peregrinus</em></strong></p>
<p>Revise the range description of subspecies <em>ernesti</em> from “Philippines to New Guinea, Bismarck Arch. and Indonesia” to “Thai-Malay Peninsula, Philippines, Greater Sundas, New Guinea, and Bismarck Archipelago; birds of Solomon Islands probably also this subspecies”.</p>
<p>Reverse the sequence of subspecies <em>nesiotes </em>and <em>ernesti</em>, so that <em>nesiotes</em> follows (not precedes) <em>ernesti</em>.</p>
<p> </p>
<p>page 139, <strong>Rose-ringed Parakeet <em>Psittacula krameri</em></strong></p>
<p>Revise the range description of nominate <em>krameri</em> from “Mauritania to Senegal, Guinea, w Uganda and s Sudan” to “southern Mauritania south to Guinea, east to southern Sudan, South Sudan, and extreme northern Uganda. The species is widely introduced around the world, although the the subspecific identify of these populations is poorly known; now feral across Europe and the Middle East, and locally in north and south Africa, Mauritius, the Seychelles, locally in southeast Asia, Japan, Australia, New Zealand, the Hawaiian Islands, locally in North America (California, Florida, and northern Baja California), locally in the Greater and Lesser Antilles, and in Venezuela”.</p>
<p>Revise the range description of subspecies <em>parvirostris</em> from “E Sudan (Sennar) to Eritrea, Ethiopia, Djibouti and nw Somalia” to “eastern Sudan, Eritrea, northwestern Ethiopia, and Djibouti”.</p>
<p> </p>
<p>page 700, <strong>Newton’s Parrot <em>Psittacula exsul</em></strong></p>
<p>Correct the English name of <em>Psittacula exsul </em>from Newton’s Parrot to Newton’s Parakeet.</p>
<p> </p>
<p>page 700, <strong>Paradise Parakeet <em>Psephotus pulcherrimus</em></strong></p>
<p>Correct the English name of <em>Psephotus pulcherrimus</em> from Paradise Parakeet to Paradise Parrot.</p>
<p> </p>
<p>page 142, <strong>Meyer’s Parrot <em>Poicephalus meyeri</em></strong></p>
<p>Revise the range description of nominate <em>meyeri </em>from “N Cameroon to s Chad, n Democratic Republic of the Congo, s Sudan and Ethiopia” to “northeastern Cameroon to southern Chad, southern Sudan (and South Sudan?), western Ethiopia, and Eritrea”.</p>
<p>Revise the range description of subspecies <em>saturatus</em> from “Uganda, western Kenya, Rwanda, Burundi, eastern Democratic Republic of the Congo, and northwestern Tanzania” to “northeastern Democratic Republic of the Congo, Uganda, western Kenya, Rwanda, Burundi, and northwestern Tanzania”.</p>
<p> </p>
<p>page 142, <strong>Niam-Niam Parrot <em>Poicephalus crassus</em></strong></p>
<p>Revise the range description from “SW Chad to Central African Republic and extreme sw Sudan” to “southwestern Chad, Central African Republic, extreme northern Democratic Republic of the Congo, and extreme southwestern South Sudan”.</p>
<p> </p>
<p>page 146, <strong>Barred Parakeet <em>Bolborhynchus lineola</em></strong></p>
<p>Revise the range description of subspecies <em>tigrinus</em> from “Mountains of nw Venezuela and Colombia to s Peru” to “Andes from Venezuela south to southwestern Ecuador and to central Bolivia (Santa Cruz); also coastal cordillera of Venezuela (Aragua)”.</p>
<p> </p>
<p>page 144, <strong>Olive-throated Parakeet <em>Eupsittula nana</em></strong></p>
<p>Revise the range description of the monotypic group Olive-throated Parakeet (Jamaican) <em>Eupsittula nana nana</em> from “Jamaica” to “Jamaica; a population on Hispaniola (Sierra de Baoruco, Dominican Republic) is believed to stem from a recent introduction from Jamaica”.</p>
<p> </p>
<p>page 143, <strong>Golden Parakeet <em>Guaruba guaruba</em></strong></p>
<p>Correct the spelling of the species name from <em>guaruba</em> to <em>guarouba</em>.</p>
<p> </p>
<p>page 266, <strong>Hooded Pitta <em>Pitta sordida</em></strong></p>
<p>Revise the range description of the monotypic group Hooded Pitta (Chestnut-crowned) <em>Pitta sordida cucullata</em> from “Foothills of n India to s China (Yunnan) and Indochina” to “breeds Himalayan foothills of northern India east to southern China (Yunnan), northern Myanmar and Indochina, south to Bangladesh, Thailand, and northwestern peninsular Malaysia; winters to southern peninsula Malaysia, Sumatra, and Java”.</p>
<p>Revise the range description of subspecies <em>mulleri</em> from “Greater Sundas and western Sulu Islands” to “Sumatra, Java, Borneo, and western Sulu Islands”.</p>
<p> </p>
<p>page 293, <strong>Variable Antshrike <em>Thamnophilus caerulescens</em></strong></p>
<p>Change the subspecies name <em>subandinus</em> to the older available name, <em>melanochrous</em>.</p>
<p> </p>
<p>page 302, <strong>Common Scale-backed Antbird <em>Willisornis poecilinotus</em></strong></p>
<p>Change the scientific name of the polytypic group Common Scale-backed Antbird (Buff-breasted) from <em>Willisornis poecilinotus lepidonotus/duidae</em> to <em>Willisornis poecilinotus lepidonota/duidae</em>.</p>
<p>Change the scientific name of subspecies <em>lepidonotus</em> to <em>lepidonota</em> (Dickinson and Christidis 2014).</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p> </p>
<p>page 311, <strong>Collared Crescentchest <em>Melanopareia torquata</em></strong></p>
<p>Revise the range description of subspecies <em>bitorquata</em> from “Cerrado of e Bolivia (ne Santa Cruz)” to “cerrado of Bolivia (eastern Santa Cruz) and adjacent Brazil (southwestern Mato Grosso)”.</p>
<p> </p>
<p>page 308, <strong>Ceara Gnateater <em>Conopophaga cearae</em></strong></p>
<p>Revise the range description from “NE Brazil (Serra de Baturité in n Ceará)” to “local in northeastern Brazil (Serra de Baturité in Ceará, Pernambuco, and Chapada Diamantina in northern Bahia)”.</p>
<p> </p>
<p>page 283, <strong>Plain Xenops <em>Xenops minutus</em></strong></p>
<p>Add a previously overlooked subspecies, <em>alagoanus</em> Pinto 1954, with range “northeastern Brazil (Paraíba, Pernambuco, and Alagoas)”; insert <em>alagoanus</em> immediately following subspecies <em>genibarbis</em>.</p>
<p>Revise the range description of Plain Xenops (White-throated) <em>Xenops minutus minutus</em> from “E Brazil (Pernambuco) to e Paraguay and ne Argentina” to “eastern Brazil (north to southern Bahia) to eastern Paraguay and northeastern Argentina”.</p>
<p> </p>
<p>page 281, <strong>Pale-browed Treehunter <em>Cichlocolaptes leucophrus</em></strong></p>
<p>With the addition of subspecies <em>holti</em>, revise the range of nominate <em>leucophrus</em> from “E Brazil (s Bahia to ne Santa Catarina)” to “eastern Brazil (southern Bahia south to Rio de Janeiro)”. Nominate <em>leucophrus</em> is recognized as a new monotypic group, Pale-browed Treehunter (Pale-tailed) <em>Cichlocolaptes leucophrus leucophrus</em>.</p>
<p>Add a previously overlooked subspecies, <em>holti</em> Pinto 1941, with range “southeastern Brazil (São Paulo south to northeastern Santa Catarina)” (Ridgely and Tudor 1994, Remsen 2003). Insert <em>holti</em> immediately following nominate <em>leucophrus</em>. Subspecies <em>holti</em> is recognized as a new monotypic group, Pale-browed Treehunter (Rufous-tailed) <em>Cichlocolaptes leucophrus holti</em>.</p>
<p>References:</p>
<p><NAME>., Jr. 2003. Family Furnariidae (ovenbirds). Pages 162-357 in J. <NAME>, <NAME>, and <NAME> (editors), Handbook of the birds of the world. Volume 8. Lynx Edicions, Barcelona.</p>
<p><NAME>., and <NAME>. 1994. The birds of South America. Volume II. University of Texas Press, Austin, Texas.</p>
<p> </p>
<p>page 281, <strong>White-throated Foliage-gleaner <em>Syndactyla roraimae</em></strong></p>
<p>Revise the range description of subspecies <em>paraquensis</em> from “Tepuis of s Venezuela (Mt. Paraque, Parú and Ptari-tepui)” to “tepuis of southern Venezuela (Cerro Sipapo [Cerro Paraque], northwestern Amazonas)”.</p>
<p>Revise the range description of subspecies <em>duidae</em> from “Tepuis of s Venezuela (Mt. Duida and Mt. Yaví)” to “tepuis of southern Venezuela (Cerros Parú, Yavi, Gimé, Duida, and de la Neblina, Amazonas)”.</p>
<p>Subspecies <em>albigularis</em>, with range “Subtropical mountains of se Venezuela (Gran Sabana)”, is a junior synonym of nominate <em>roraimae</em> (Eisenmann in Vaurie 1980: 342, footnote 144), and is deleted. Add a previously overlooked subspecies, <em>urutani</em> Phelps and Dickerman 1980, with range “tepuis of southern Venezuela (Cirres Jaua and Urutaní, southern Bolívar)”.</p>
<p>Revise the range description of nominate <em>roraimae</em> from “Tepuis of extreme n Brazil (Mt. Roraima)” to “tepuis of southern Venezuela (Gran Sabana area, including Roraima), extreme northern Brazil (Roraima), and western Guyana”.</p>
<p>References:</p>
<p><NAME>., Jr., and <NAME>. 1980. Cuatro subespecies nuevas de aves (Furnariidae, Formicariidae) de la region de Pantepui, Estado Bolivar y Territorio Amazonas, Venezuela. Boletín de la Sociedad Venezolana de Ciencias Naturales 138: 139-147.</p>
<p><NAME>. 1980. Taxonomy and geographical distribution of the Furnariidae (Aves, Passeriformes). Bulletin of the American Museum of Natural History 166: 1-357.</p>
<p> </p>
<p>page 274, <strong>Marcapata Spinetail <em>Cranioleuca marcapatae</em></strong></p>
<p>Revise the range description of subspecies <em>weskei</em> from “SE Peru (cloud forests of Cordillera Vilcabamba in Cuzco)” to “Andes of south central Peru (Mantaro Valley in Junín south to Cordillera Vilcabamba in Cusco)”. Reverse the sequence of the two subspecies, so that <em>weskei</em> is listed before nominate <em>marcapatae</em>.</p>
<p> </p>
<p>page 273, <strong>White-bellied Spinetail <em>Synallaxis propinqua</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop717.htm">Proposal 717</a>), change the scientific name of White-bellied Spinetail from <em>Synallaxis propinqua</em> to <em>Mazaria propinqua</em>, based on genetic evidence that White-bellied Spinetail is not a member of the genus <em>Synallaxis</em> (Derryberry et al. 2011, Claramunt 2014). Reposition White-bellied Spinetail to immediately follow Red-and-white Spinetail <em>Certhiaxis mustelinus</em>.</p>
<p>References:</p>
<p>Claramunt, S. 2014. Phylogenetic relationships among Synallaxini spinetails (Aves: Furnariidae) reveal a new biogeographic pattern across the Amazon and Paraná river basins. Molecular Phylogenetics and Evolution 78: 223-231.</p>
<p><NAME>., <NAME>, G.Derryberry, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>., and <NAME>. 2011. Lineage diversification and morphological evolution in a large-scale continental radiation: the Neotropical ovenbirds and woodcreepers (Aves: Furnariidae). Evolution 65: 2973–2986.</p>
<p> </p>
<p>page 271, <strong>Chotoy Spinetail <em>Schoeniophylax phryganophilus</em></strong></p>
<p>In accord with AOS-SACC, reposition Chotoy Spinetail to immediately follow White-bellied Spinetail <em>Mazaria propinqua</em> (formerly <em>Synallaxis propinqua</em>), which itself is moved to a new position following Red-and-white Spinetail <em>Certhiaxis mustelinus</em>.</p>
<p> </p>
<p>pages 272-273, <strong>spinetail genus <em>Synallaxis</em></strong></p>
<p>The sequence of species of <em>Synallaxis</em> spintetails is revised, in accord with AOS-SACC. The new sequence of species is:</p>
<p>Ochre-cheeked Spinetail <em>Synallaxis scutata</em></p>
<p>Gray-bellied Spinetail <em>Synallaxis cinerascens</em></p>
<p>Plain-crowned Spinetail <em>Synallaxis gujanensis</em></p>
<p>White-lored Spinetail <em>Synallaxis albilora</em></p>
<p>Marañon Spinetail <em>Synallaxis maranonica</em></p>
<p>Great Spinetail <em>Synallaxis hypochondriaca</em></p>
<p>Necklaced Spinetail <em>Synallaxis stictothorax</em></p>
<p>Russet-bellied Spinetail <em>Synallaxis zimmeri</em></p>
<p>Slaty Spinetail <em>Synallaxis brachyura</em></p>
<p>Silvery-throated Spinetail <em>Synallaxis subpudica</em></p>
<p>Red-shouldered Spinetail <em>Synallaxis hellmayri</em></p>
<p>Rufous-capped Spinetail <em>Synallaxis ruficapilla</em></p>
<p>Bahia Spinetail <em>Synallaxis cinerea</em></p>
<p>Pinto’s Spinetail <em>Synallaxis infuscata</em></p>
<p>Dusky Spinetail <em>Synallaxis moesta</em></p>
<p>McConnell’s Spinetail <em>Synallaxis macconnelli</em></p>
<p>Cabanis’s Spinetail <em>Synallaxis cabanisi</em></p>
<p>Cinereous-breasted Spinetail <em>Synallaxis hypospodia</em></p>
<p>Spix’s Spinetail <em>Synallaxis spixi</em></p>
<p>Dark-breasted Spinetail <em>Synallaxis albigularis</em></p>
<p>Rio Orinoco Spinetail <em>Synallaxis beverlyae</em></p>
<p>Pale-breasted Spinetail <em>Synallaxis albescens</em></p>
<p>Sooty-fronted Spinetail <em>Synallaxis frontalis</em></p>
<p>Azara’s Spinetail <em>Synallaxis azarae</em></p>
<p>Apurimac Spinetail <em>Synallaxis courseni</em></p>
<p>Hoary-throated Spinetail <em>Synallaxis kollari</em></p>
<p>Rufous-breasted Spinetail <em>Synallaxis erythrothorax</em></p>
<p>White-whiskered Spinetail <em>Synallaxis candei</em></p>
<p>Blackish-headed Spinetail <em>Synallaxis tithys</em></p>
<p>Rusty-headed Spinetail <em>Synallaxis fuscorufa</em></p>
<p>Rufous Spinetail <em>Synallaxis unirufa</em></p>
<p>Black-throated Spinetail <em>Synallaxis castanea</em></p>
<p>Stripe-breasted Spinetail <em>Synallaxis cinnamomea</em></p>
<p>Ruddy Spinetail <em>Synallaxis rutilans</em></p>
<p>Chestnut-throated Spinetail <em>Synallaxis cherriei</em></p>
<p> </p>
<p>page 325, <strong>Slaty-capped Flycatcher <em>Leptopogon superciliaris</em></strong></p>
<p>In light of vocal differences (e.g., Ridgely and Greenfield 2001), we resurrect subspecies <em>transandinus</em>. Provisionally we continue to treat subspecies <em>hellmayri</em> as a junior synonym of <em>transandinus</em>, so the range of <em>transandinus</em> is “highlands of Costa Rica and Panama, and west slope of the Andes of Colombia and Ecuador”. We recognize <em>transandinus</em> as a new monotypic group, Slaty-capped Flycatcher (<em>transandinus</em>) <em>Leptopogon superciliaris transandinus</em>.</p>
<p>Revise the range description of subspecies Slaty-capped Flycatcher (<em>superciliaris</em>) <em>Leptopogon superciliaris superciliaris</em> from “Mts. of Costa Rica to Venezuela, n Brazil and w Bolivia” to “coastal mountains and Andes of Venezuela south through Andes of Colombia (except for west slope of Western Andes) and east slope of Andes of Ecuador and Peru (south to the Apurímac Valley in Cuzco)”.</p>
<p>Reference:</p>
<p>Ridgely, R.S., and <NAME>. 2001. The birds of Ecuador: status, distribution, and taxonomy. Cornell University Press, Ithaca, New York.</p>
<p> </p>
<p>page 333, <strong>Cinnamon Manakin-Tyrant <em>Neopipo cinnamomea</em></strong></p>
<p>Our current sequence of genera in Tyrannidae is based on the lists of AOS-NACC and AOS-SACC, which differ from one another, and neither of which well reflects current knowledge of the phylogeny of these birds. Resolution of this problem is a task that we postpone until a later date. In the short term, with the transfer of Kinglet Calyptura <em>Calyptura cristata</em> from Cotingidae to Tyrannidae (see below), based its relationship to Cinnamon Manakin-Tyrant <em>Neopipo cinnamomea</em> and the spadebills (Platyrinchus), we move Cinnamon Manakin-Tyrant to a new position, immediately following Yellow-breasted Flycatcher <em>Tolmomyias flaviventris</em>.</p>
<p> </p>
<p>page 313, <strong>Kinglet Calyptura <em>Calyptura cristata</em></strong></p>
<p>In accord with AOS-SACC <a href="http://www.museum.lsu.edu/~Remsen/SACCprop727.htm">(Proposal 727</a>), Kinglet Calyptura <em>Calyptura cristata</em> is removed from Cotingidae and placed in Tyrannidae, following genetic evidence that it is closely related to Cinnamon Manakin-Tyrant <em>Neopipo cinnamomea</em> and to spadebills (<em>Platyrinchus</em>) (Ohlsson et al. 2012). Position Kinglet Calyptura between Cinnamon Manakin-Tyrant and the spadebills.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. Nuclear DNA from a 180-year old study skin reveals the phylogenetic position of the Kinglet Calyptura <em>Calyptura cristata</em> (Passeriformes: Tyrannides). Ibis 154: 533-541.</p>
<p> </p>
<p>page 343, <strong>Fork-tailed Flycatcher <em>Tyrannus savana</em></strong></p>
<p>Revise the range description of the monotypic group Fork-tailed Flycatcher (<em>savana</em>) <em>Tyrannus savana savana</em> from “Central and s S America and Falkland Is.; winters to West Indies” to “breeds in central and southern South America (eastern Bolivia and southern Brazil south to central Argentina); southern populations migratory, wintering in northern South America, Trinidad, and Tobago”.</p>
<p> </p>
<p>page 312, <strong>Swallow-tailed Cotinga <em>Phibalura flavirostris</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop726.htm">Proposal 726</a>), Swallow-tailed Cotinga <em>Phibalura flavirostris</em> is repositioned to follow Rufous-tailed Plantcutter <em>Phytotoma rara</em>, based on Berv and Prum (2014).</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2014. A comprehensive multilocus phylogeny of the Neotropical cotingas (Cotingidae, Aves) with a comparative evolutionary analysis of breeding system and plumage dimorphism and a revised phylogenetc classification. Molecular Phylogenetics and Evolution 81: 120-136.</p>
<p> </p>
<p>page 560, <strong>Northern Wattled-Honeyeater <em>Foulehaio taviuensis</em></strong></p>
<p>Correct the spelling of the species name from <em>taviuensis</em> to <em>taviunensis</em>.</p>
<p> </p>
<p>page 466, <strong>Brown-throated Wattle-eye <em>Platysteira cyanea</em></strong></p>
<p>Revise the range description of nominate <em>cyanea</em> from “Senegal to Gabon, Angola, Central African Republic and Democratic Republic of the Congo” to “Senegal to Gabon, Angola, western Central African Republic, and northwestern Democratic Republic of the Congo”.</p>
<p>Revise the range description of subspecies <em>aethiopica</em> from “SE Sudan (Boma) and Ethiopia” to “eastern South Sudan and Ethiopia”.</p>
<p>Revise the range description of subspecies <em>nyansae</em> from “S Sudan to n Democratic Republic of the Congo, Kenya, Uganda and nw Tanzania” to “eastern Central African Republic and eastern and central Democratic Republic of the Congo to southern South Sudan, western Kenya, and northwestern Tanzania”.</p>
<p> </p>
<p>page 467, <strong>Jameson’s Wattle-eye <em>Platysteira jamesoni</em></strong></p>
<p>Revise the range description from “E Democratic Republic of the Congo to Uganda, se Sudan, w Kenya and nw Tanzania” to “eastern Democratic Republic of the Congo, extreme southern South Sudan, Uganda, western Kenya, and extreme northwestern Tanzania”.</p>
<p> </p>
<p>page 576, <strong>White Helmetshrike <em>Prionops plumatus</em></strong></p>
<p>Revise the range description of subspecies <em>concinnatus</em> from “Cent. Cameroon to Democratic Republic of the Congo, se Sudan, Ethiopia and n Uganda” to “central Cameroon east to southern Sudan, northern and western Ethiopia, Eritrea, northeastern Democratic Republic of the Congo, and northern Uganda”.</p>
<p>Revise the range description of subspecies <em>cristatus</em> from “Eritrea and w Ethiopia to se Sudan, e Uganda and nw Kenya” to “Eritrea, central and western Ethiopia, eastern South Sudan, eastern Uganda, and northwestern Kenya”.</p>
<p> </p>
<p>page 466, <strong>African Shrike-flycatcher <em>Megabyas flammulatus</em></strong></p>
<p>Revise the range description of subspecies <em>aequatorialis</em> from “NW Angola to Democratic Republic of the Congo, Uganda, w Kenya and extreme s Sudan” to “northern Angola to central Democratic Republic of the Congo, extreme southern South Sudan, Uganda, and western Kenya; also southeastern Democratic Republic of the Congo and adjacent northwestern Zambia”.</p>
<p> </p>
<p>page 572, <strong>Brubru <em>Nilaus afer</em></strong></p>
<p>Revise the range description of subspecies <em>minor</em> from “SE Sudan to s Eritrea, Somalia, extreme n Kenya and Tanzania” to “southeastern South Sudan, southeastern Ethiopia, central and southern Somalia, northern and eastern Kenya, and northeastern Tanzania”.</p>
<p> </p>
<p>page 573, <strong>Pink-footed Puffback <em>Dryoscopus angolensis</em></strong></p>
<p>Revise the range description of subspecies <em>nandensis</em> from “E Democratic Republic of the Congo to s Sudan, Uganda, w Rwanda and w Kenya” to “northeastern Democratic Republic of the Congo, extreme southern South Sudan, Uganda, Rwanda, Burundi, western Kenya, and northwestern Tanzania”.</p>
<p> </p>
<p>page 573, <strong>Marsh Tchagra <em>Tchagra minutus</em></strong></p>
<p>Revise the range description of nominate <em>minutus</em> from “Sierra Leone to s Sudan, Ethiopia, w Kenya and nw Tanzania” to “Sierra Leone east to South Sudan, eastern Sudan, and Ethiopia, south to western Kenya and northwestern Tanzania”.</p>
<p> </p>
<p>page 573, <strong>Black-crowned Tchagra <em>Tchagra senegalus</em></strong></p>
<p>Revise the range description of subspecies <em>habessinicus</em> from “S Sudan (upper Nile Province) to Eritrea, Ethiopia and Somalia” to “eastern Sudan and eastern South Sudan to Eritrea, Ethiopia, Djibouti, and northwestern Somalia”.</p>
<p>Revise the range description of subspecies <em>armenus</em> from “S Cameroon to n Democratic Republic of the Congo, s Sudan, Uganda, Kenya, Mozambique” to “southern Cameroon south to Angola, and east to northern Democratic Republic of the Congo, western South Sudan, Uganda, western Kenya, and Tanzania, south to Malawi, northern Mozambique, and northern Zimbabwe”.</p>
<p> </p>
<p>page 573, <strong>Brown-crowned Tchagra <em>Tchagra australis</em></strong></p>
<p>Revise the range description of subspecies <em>emini</em> from “SE Nigeria to Democratic Republic of the Congo, s Sudan, Rwanda, w Kenya, nw Tanzania” to “southeastern Nigeria to northern and eastern Democratic Republic of the Congo, southern South Sudan, Uganda, Rwanda, Burundi, central Kenya, and northwestern Tanzania”.</p>
<p> </p>
<p>page 573, <strong>Three-streaked Tchagra <em>Tchagra jamesi</em></strong></p>
<p>Revise the range description of nominate <em>jamesi</em> from “Extreme se Sudan to Somalia, Uganda, Ethiopia and n Kenya” to “extreme southeastern South Sudan, southern Ethiopia, Somalia, northeastern Uganda, northern and eastern Kenya, and extreme northeastern Tanzania”.</p>
<p> </p>
<p>page 573, <strong>Luehder’s Bushshrike <em>Laniarius luehderi</em></strong></p>
<p>In accord with widespread usage (e.g., Fry and Keith 2000, Harris 2000), change the English name of <em>Laniarius luehderi</em> from Luehder’s Bushshrike to Lühder’s Bushshrike.</p>
<p>Revise the range description from “E Nigeria to s Cameroon, s Sudan, w Kenya and sw Tanzania” to “southeastern Nigeria and southern Cameroon south to northwestern Angola (Cabinda) and southern Congo; northeastern Democratoc Republic of the Congo, extreme southern South Sudan, Uganda, western Kenya, Rwanda, Burundi, and western Tanzania”.</p>
<p>References:</p>
<p><NAME>., and <NAME> (editors). 2000. The birds of Africa. Volume VI. Academic Press, London.</p>
<p><NAME>. 2000. Shrikes and bush-shrikes. Princeton University Press, Princeton, New Jersey.</p>
<p> </p>
<p>page 574, <strong>Tropical Boubou <em>Laniarius major</em></strong></p>
<p>Revise the range description of nominate <em>major</em> from “Sierra Leone to s Sudan, Kenya, Tanzania and Malawi” to “Sierra Leone east to southern South Sudan, Kenya, and Tanzania, south to Malawi”.</p>
<p> </p>
<p>page 574, <strong>Fuelleborn’s Boubou <em>Laniarius fuelleborni</em></strong></p>
<p>In accord with widespread usage (e.g., Fry and Keith 2000, Harris 2000), change the English name of <em>Laniarius fuelleborni</em> from Fuelleborn’s Boubou to Fülleborn’s Boubou.</p>
<p>Change the English name of the monotypic group <em>Laniarius fuelleborni usambaricus</em> from Fuelleborn’s Boubou (Usamabara) to Fülleborn’s Boubou (Usambara).</p>
<p>Change the English name of the monotypic group <em>Laniarius fuelleborni fuelleborni</em> from Fuelleborn’s Boubou (Fuelleborn’s) to Fülleborn’s Boubou (Fülleborn’s).</p>
<p>References:</p>
<p><NAME>., and <NAME> (editors). 2000. The birds of Africa. Volume VI. Academic Press, London.</p>
<p><NAME>. 2000. Shrikes and bush-shrikes. Princeton University Press, Princeton, New Jersey.</p>
<p> </p>
<p>page 574, <strong>Rosy-patched Bushshrike <em>Rhodophoneus cruentus</em></strong></p>
<p>Revise the range description of nominate <em>cruentus</em> from “Extreme se Egypt to Eritrea, Ethiopia and extreme se Sudan” to “extreme southeastern Egypt, northeastern Sudan, Eritrea, and northern Ethiopia”.</p>
<p>Revise the range description of subspecies <em>hilgerti</em> from “E and s Ethiopia to Somalia and n Kenya” to “extreme southeastern South Sudan, southern and eastern Ethiopia, Djibouti, Somalia, and northern and eastern Kenya”.</p>
<p> </p>
<p>page 575, <strong>Sulphur-breasted Bushshrike <em>Telophorus sulfureopectus</em></strong></p>
<p>Revise the range description of subspecies <em>similis </em>from “S Sudan, Ethiopia, e Democratic Republic of the Congo, e Uganda, Kenya to South Africa” to “southern Sudan, South, Sudan, Ethiopia, southern Somalia, and eastern Democratic Republic of the Congo south to Angola, northern Namibia, and South Africa”.</p>
<p> </p>
<p>page 575, <strong>Mt. Kupe Bushshrike <em>Telophorus kupeensis</em></strong></p>
<p>Change the English name of <em>Telophorus kupeensis</em> from Mt. Kupe Bushshrike to Mount Kupe Bushshrike.</p>
<p> </p>
<p>page 368, <strong>Large Cuckooshrike <em>Coracina macei</em></strong></p>
<p>Revise the range description of subspecies <em>larutensis</em> from “N Malaya” to “peninsular Malaysia”.</p>
<p> </p>
<p>page 373, <strong>Red-shouldered Cuckooshrike <em>Campephaga phoenicea</em></strong></p>
<p>Revise the range description from “Senegambia to s Sudan, Ethiopia, w Kenya and n Angola” to “Senegambia to southern Sudan, Ethiopia, and Eritrea, south to northern Congo, northern Democratic Republic of the Congo, Uganda, and western Kenya”.</p>
<p> </p>
<p>page 484, <strong>Gray Whistler <em>Pachycephala simplex</em></strong></p>
<p>Change the the scientific name of the polytypic group <em>Pachycephala simplex simplex/dubia</em> to <em>Pachycephala simplex simplex/brunnescens</em>, and change the English name of this group from Gray Whistler (Gray) to Gray Whistler (Brown).</p>
<p>The subspecies name <em>dubia</em> is preoccupied in <em>Pachycephala</em>, and is replaced by the new name <em>brunnescens</em> Wolters (Wolters 1980, Dickinson and Christidis 2014).</p>
<p>Subspecies <em>gagiensis</em>, with range “Gagi I. (New Guinea)”; subspecies <em>waigeuensi</em>s, with range “Waigeo and Gebe islands (New Guinea)”; and subspecies <em>perneglecta</em>, with range “S New Guinea”, all are considered to be junior synonyms of subspecies <em>griseiceps</em> (Beehler and Pratt 2016), and are deleted. Revise the range description of <em>griseiceps</em> from “Aru Islands and ne New Guinea” to “New Guinea: northwestern Islands, Aru Islands, Bird’s Head and Bird’s Neck to southern lowlands, Trans-Fly, and southern Southeastern Peninsula east to Port Moresby”.</p>
<p>References:</p>
<p><NAME>., and <NAME>. 2016. Birds of New Guinea: distribution, taxonomy, and systematics. Princeton University Press, Princeton, New Jersey.</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p><NAME>. 1980. Die Vogelarten der Erde. Paul Parey, Hamburg.</p>
<p> </p>
<p>page 570, <strong>Emin’s Shrike <em>Lanius gubernator</em></strong></p>
<p>Revise the range description from “Savanna of Ivory Coast to s Sudan, n Uganda and ne Democratic Republic of the Congo” to “Ivory Coast to South Sudan, northeastern Democratic Republic of the Congo, and northern Uganda”.</p>
<p> </p>
<p>page 571, <strong>Taita Fiscal <em>Lanius dorsalis</em></strong></p>
<p>Revise the range description from “S Sudan to Somalia, Ethiopia and ne Tanzania” to “southeastern South Sudan, northeastern Uganda, Ethiopia, Somalia, Kenya, and northeastern Tanzania”.</p>
<p> </p>
<p>page 571, <strong>Northern Fiscal <em>Lanius humeralis</em></strong></p>
<p>Revise the range description of subspecies <em>smithii </em>from “Guinea to s Sudan” to “Guinea to northern Democratic Republic of the Congo, southern South Sudan, and western Uganda”.</p>
<p> </p>
<p>page 572, <strong>Yellow-billed Shrike <em>Corvinella corvina</em></strong></p>
<p>Revise the range description of subspecies <em>caliginosa</em> from “Sudan (Bahr al Ghazal)” to “western South Sudan”.</p>
<p>Revise the range description of subspecies <em>affinis</em> from “S Sudan to extreme ne Democratic Republic of the Congo, n Uganda and w Kenya” to “southern South Sudan to extreme northeastern Democratic Republic of the Congo, northern Uganda, and western Kenya”.</p>
<p> </p>
<p>page 572, <strong>White-rumped Shrike <em>Eurocephalus ruppelli</em></strong></p>
<p>Revise the range description from “S Sudan to Somalia, Ethiopia, Kenya and Tanzania” to “southeastern South Sudan and Uganda to Ethiopia, Somalia, Kenya and Tanzania”.</p>
<p> </p>
<p>page 624, <strong>Tawny-crowned Greenlet <em>Tunchiornis ochraceiceps</em></strong></p>
<p>Revise the range description of subspecies <em>ferrugineifrons</em> from “SE Colombia to the Guianas, e Peru and nw Amazonian Brazil” to “southeastern Colombia east to southern Venezuela, extreme western Guyana, and northwestern Brazil (west of the Rio Negro), south to northeastern Peru and western Amazonian Brazil south of the Amazon, east to the west bank of the Madeira river”.</p>
<p>Revise the range description of subspecies <em>viridior</em> from “Tropical s Peru (Ayacucho and Cuzco) to n Bolivia” to “eastern Peru (south of the Marañón and Amazon rivers) south to northern Bolivia”.</p>
<p>Revise the range description of subspecies <em>luteifrons</em> from “Extreme e Venezuela, the Guianas and n Amazonian Brazil” to “extreme eastern Venezuela (eastern Bolívar), the Guianas and northern Amazonian Brazil (east of the Rio Negro)”.</p>
<p> </p>
<p>page 622, <strong>Cassin’s Vireo <em>Vireo cassinii</em></strong></p>
<p>Correct the spelling of the subspecies name <em>lucasanas</em> to <em>lucasanus</em> (Brewster 1891).</p>
<p>Reference:</p>
<p>Brewster, W. 1891. <a href="https://sora.unm.edu/sites/default/files/journals/auk/v008n02/p0139-p0149.pdf">Descriptions of seven supposed new North American birds</a>. Auk 8: 139-149.</p>
<p> </p>
<p>page 488, <strong>Variable Pitohui <em>Pitohui kirhocephalus</em></strong></p>
<p>Correct the spelling of the scientific name of the polytypic group Variable Pitohui (Raja Ampat) from <em>Pitohui kirhocephalus cervineiventris/pallidus</em> to <em>Pitohui kirhocephalus cerviniventris/pallidus</em>.</p>
<p>Correct the spelling of the subspecies name <em>cervineiventris</em> to <em>cerviniventris</em> (Gray 1861).</p>
<p>Reference:</p>
<p><NAME>. 1861. <a href="https://biodiversitylibrary.org/page/28672992">Remarks on, and descriptions of, new species of birds lately sent by Mr. <NAME> from Waigiou, Mysol, and Gagie Islands</a>. Proceedings of the Zoological Society of London [1861] 427-438.</p>
<p> </p>
<p>page 569, <strong>Black-winged Oriole <em>Oriolus nigripennis</em></strong></p>
<p>Revise the range description from “S Guinea to se Sudan, Cameroon and Angola; Bioko I.” to “Sierra Leone and Liberia east to northern and central Democratic Republic of the Congo, southern South Sudan, and western Uganda, south to northern Angola; Bioko I.”.</p>
<p> </p>
<p>page 577, <strong>Square-tailed Drongo <em>Dicrurus ludwigii</em></strong></p>
<p>Revise the range description of subspecies <em>sharpei</em> from “Senegal to n Angola, Democratic Republic of the Congo, s Sudan, Uganda and w Kenya” to “Senegal to northwestern Angola, northern Democratic Republic of the Congo, southern South Sudan, Uganda and western Kenya”.</p>
<p> </p>
<p>page 468, <strong>Northern Fantail <em>Rhipidura rufiventris</em></strong></p>
<p>Change the English name of the monotypic group <em>Rhipidura rufiventris obiensis</em> from Northern Fantail (Gray-backed) to Northern Fantail (Obi).</p>
<p>Change the English name of the monotypic group <em>Rhipidura rufiventris bouruensis</em> from Northern Fantail (Rusty-bellied) to Northern Fantail (Buru).</p>
<p>The polytypic group Northern Fantail (Plain) <em>Rhipidura rufiventris</em> [<em>gularis </em>Group], which included subspecies <em>cinerea, perneglecta, assimilis</em>, and <em>gularis</em>, is partitioned. Subspecies <em>perneglecta</em>, with (incorrect!) range “Watubela Islands (s Moluccas)”, is considered to be a junior synonym of subspecies <em>assimilis</em> (White and Bruce 1986, Schodde and Matthews 1977), and is deleted. Revise the range description of subspecies <em>assimilis</em> from “Kai Islands (Kai Kecil and Kai Besar)” to “Tayandu Islands and Kai Islands, South Moluccas”. Subspecies <em>assimilis</em> and subspecies <em>finitima</em>, which formerly was included in the polytypic group Northern Fantail (Cream-bellied) <em>Rhipidura rufiventris</em> [<em>rufiventris</em> Group], together form a new group, Northern Fantail (Kai) <em>Rhipidura assimilis/finitima</em>. Subspecies <em>gularis</em> is transfered, with subspecies <em>vidua</em> (formerly included in the polytypic group Northern Fantail (Slaty) <em>Rhipidura rufiventris vidua/kordensis</em>) to the polytpic group Northern Fantail (Melanesian) <em>Rhipidura rufiventris</em> [<em>setosa</em> Group]. Subspecies <em>cinerea</em> is recognized as a new monotypic group, Northern Fantail (Seram) <em>Rhipidura rufiventris cinerea</em>.</p>
<p>Correct the range description of subspecies <em>finitima</em> from “Tayandu Islands (Taam, Kilsuin and Kur)” to “Tiur, Watubela Islands, South Moluccas”.</p>
<p>Change the English name of the monotypic group <em>Rhipidura rufiventris tenkatei</em> from Northern Fantail (Speckle-throated) to Northern Fantail (Rote).</p>
<p>The polytypic group Northern Fantail (Cream-bellied) <em>Rhipidura rufiventris</em> [<em>rufiventris</em> Group], which included subspecies <em>rufiventris, finitima, pallidiceps, hoedti</em>, and <em>isura</em>, is partitioned. Subspecies <em>rufiventris</em> and <em>pallidiceps</em> together form a new polytypic group, Northern Fantail (Timor) <em>Rhipidura rufiventris rufiventris/pallidiceps</em>. Two subspecies are recognized as new monotypic groups: Northern Fantail (Banda Sea) <em>Rhipidura rufiventris hoedti</em>. Subspecies <em>isura</em> joins a new polytypic group, Northern Fantail (Northern) <em>Rhipidura rufiventris</em> [<em>isura</em> Group]. Subspecies <em>hoedti</em> is recognized as a new monotypic group, Northern Fantail (Banda Sea) <em>Rhipidura rufiventris hoedti</em>.</p>
<p>Subspecies <em>kordensis</em>, previously included in the polytypic group Northern Fantail (Slaty) <em>Rhipidura rufiventris vidua/korden</em>sis, is recognized as a new monotypic group, Northern Fantail (Biak) <em>Rhipidura rufiventris kordensis</em>.</p>
<p>References:</p>
<p><NAME>., and <NAME>. 1977. Contributions to Papuasian ornithology V. Survey of the birds of Taam Island, Kai Group. Division of Wildlife Research technical paper number 33, CSIRO, Melbourne, Australia.</p>
<p><NAME>., and <NAME>. 1986. The birds of Wallacea. (Sulawesi, the Moluccas & Lesser Sunda Islands, Indonesia). British Ornithologists’ Union Check-list number 7. British Ornithologists’ Union, London.</p>
<p> </p>
<p>page 472, <strong>Blue-headed Crested-Flycatcher <em>Trochocercus nitens</em></strong></p>
<p>Revise the range description of nominate <em>nitens</em> from “Nigeria and Cameroon to Gabon, n Angola, e Democratic Republic of the Congo and s Sudan” to “Nigeria and Cameroon to Gabon, northern Angola, Democratic Republic of the Congo, southern South Sudan, Uganda, Rwanda, and Burundi”.</p>
<p> </p>
<p>page 473, <strong>African Paradise-Flycatcher <em>Terpsiphone viridis</em></strong></p>
<p>Revise the range description of subspecies <em>speciosa</em> from “S Cameroon to e Democratic Republic of the Congo, s Sudan and Gabon” to “southern Cameroon south to northeastern Angola, east to Democratic Republic of the Congo and southwestern South Sudan”.</p>
<p> </p>
<p>page 475, <strong><NAME>bill <em>Clytorhynchus vitiensis</em></strong></p>
<p>Change the subspecies name <em>compressirostris</em> Layard 1876 to the older available name <em>brunneus</em> Ramsay 1875.</p>
<p> </p>
<p>page 476, <strong>Spectacled Monarch <em>Symposiachrus trivirgatus</em></strong></p>
<p>Delete subspecies <em>bernsteinii</em>, with range “Salawati I. (New Guinea)”. No representative of this species occurs on Salawati; the single known specimen of <em>bernsteinii</em> apparently came from Ambon Island, and so <em>bernsteinii</em> is a junior synomym of <em>nigrivmentum</em> (Dekker 2003, Beehler and Pratt 2016).</p>
<p>Revise the range description of subspecies <em>albiventris</em> from “S New Guinea; Torres Straits is.; Cape York Pen to Burdekin R.” to “northeastern Australia: Torres Straits islands, and coastal northeastern Queensland (Cape York south to McIlwraith Range)”.</p>
<p>Revise the range description of subspecies <em>melanopterus</em> from “Louisiade Archipelago” to “Southeastern Islands (southeastern New Guinea)”.</p>
<p>Revise the range description of subspecies <em>gouldii</em> from “E Australia (Clarke Range, Queensland to near Sydney, NSW)” to “breeds eastern Australia (Clarke Range, Queensland south to near Sydney, New Sout Wales); at least partially migratory, wintering to southern New Guinea (Trans-Fly) and Torres Strait Islands”.</p>
<p>References:</p>
<p><NAME>., and <NAME>. 2016. Birds of New Guinea: distribution, taxonomy, and systematics. Princeton University Press, Princeton, New Jersey.</p>
<p>Dekker, R.W.R.J. 2003.Type specimens of birds in the National Museum of Natural History, Leiden. Part 2. Nationaal Natuurhistorisch Museum Technical Bulletin 6.</p>
<p> </p>
<p>page 590, <strong>Eurasian Jay <em>Garrulus glandarius</em></strong></p>
<p>Correct the subspecies name <em>barringtoni</em> to the correct original spelling, <em>harington</em>i. Subspecies <em>haringtoni </em>belongs to the polytypic group Eurasian Jay (Himalayan) <em>Garrulus glandarius</em> [<em>bispecularis</em> Group] (Hartert 1918, Vaurie 1959), and not to the group Eurasian Jay (White-faced); reposition <em>haringtoni</em> to follow subspecies <em>sinensi</em>s.</p>
<p>Subspecies <em>oatesi</em>, previously included in the group Eurasian Jay (Himalayan), should be placed in the Eurasian Jay (White-faced) (Hartert 1918, Goodwin 1976). Reposition <em>oatesi</em> to follow the heading for the group Eurasian Jay (White-faced). Correct the scientific name of the polytypic group Eurasian Jay (White-faced) from <em>Garrulus glandarius leucotis/barringtoni</em> to <em>Garrulus glandarius leucotis/oatesi</em>.</p>
<p>References:</p>
<p><NAME>. 1976. Crows of the world. Cornell University Press, Ithaca, New York.</p>
<p><NAME>. 1918. <a href="https://biodiversitylibrary.org/page/3856788"><em>Garrulus bispecularis</em> and its allies, with list of all forms of <em>Garrulus</em></a>. Novitates Zoologicae 25: 430-433.</p>
<p><NAME>. 1959. The birds of the Palearctic fauna. Passeriformes. H.F. & G. Witherby Limited, London.</p>
<p> </p>
<p>page 591, <strong>Rufous Treepie <em>Dendrocitta vagabunda</em></strong></p>
<p>Revise the range description of subspecies <em>behni</em> from “northeastern Pakistan and northwestern India to western Nepal” to “western India (Rajasthan, western Madhya Pradesh, and Gujarat south to Karnataka)” (Ripley 1982, Steinheimer 2009).</p>
<p>References:</p>
<p><NAME>. 1982. A synopsis of the birds of India and Pakistan together with those of Nepal, Bhutan, Bangldesh and Srki Lanka. Bombay Natural History Society, Bombay.</p>
<p><NAME>. 2009. The type specimens of Corvidae (Aves) in the Museum für Naturkunde at the Humboldt-University of Berlin, with the description of a new subspecies of <em>Dendrocitta vagabunda</em>. Zootaxa 2149: 1-49.</p>
<p> </p>
<p>page 592, <strong>Gray Treepie <em>Dendrocitta formosae</em></strong></p>
<p>Revise the range description of subspecies <em>assimilis</em> from “S Myanmar to Thailand and Andaman Islands” to “southern Myanmar to Thailand”.</p>
<p> </p>
<p>page 592, <strong>Eurasian Magpie <em>Pica pica</em></strong></p>
<p>Change the English name of the monotypic group <em>Pica pica mauritanica</em> from Eurasian Magpie (African) to Eurasian Magpie (North African).</p>
<p> </p>
<p>page 593, <strong>Eurasian Jackdaw <em>Corvus monedula</em></strong></p>
<p>Revise the range description of subspecies <em>spermologus</em> from “W and central Europe; > to Canary Islands and Corsica” to “western and central Europe, including the British Isles, and Morocco and northwestern Algeria”.</p>
<p>Revise the range description of subspecies <em>cirtensis</em> from “N Africa (Morocco and Algeria)” to “northeastern Algeria; formerly also northwestern Tunisia, that population now extinct”.</p>
<p> </p>
<p>page 595, <strong>Somali Crow <em>Corvus edithae</em></strong></p>
<p>Revise the range description from “Eritrea, Ethiopia, Somalia, Kenya and se Sudan” to “Eritrea, Djibouti, eastern and southern Ethiopia, Somalia, extreme southeastern South Sudan, and northern Kenya”.</p>
<p> </p>
<p>page 595, <strong>Thick-billed Raven <em>Corvus crassirostris</em></strong></p>
<p>Revise the range description from “Mts. of Ethiopia and Eritrea; vagrant to nw Somalia and se Sudan” to “Eritrea and Ethiopia”.</p>
<p> </p>
<p>page 480, <strong>New Zealand Robin <em>Petroica australis</em></strong></p>
<p>Change the English name of the monotypic group <em>Petroica australis longipes</em> from New Zealand Robin (North Island) to New Zealand Robin (North I.).</p>
<p>Change the English name of the polytypic group <em>Petroica australis australis/rakiura</em> from New Zealand Robin (South Island) to New Zealand Robin (South I.).</p>
<p> </p>
<p>page 382, <strong>Western Nicator <em>Nicator chloris</em></strong></p>
<p>Revise the range description from “Senegal to Democratic Republic of the Congo, Uganda, extreme s Sudan and w Tanzania” to “Senegal to northern Angola, Democratic Republic of the Congo, southern South Sudan, Uganda, and western Tanzania”.</p>
<p> </p>
<p>page 352, <strong>Chestnut-backed Sparrow-Lark <em>Eremopterix leucotis</em></strong></p>
<p>Revise the range description of subspecies <em>melanocephalus </em>from “Senegambia to Nile River” to “Senegambia to central and southern Sudan”.</p>
<p>Revise the range description of nominate <em>leucotis</em> from “E and s Sudan to Eritrea and Ethiopia” to “South Sudan to Ethiopia, Eritrea, and northwestern Somalia”.</p>
<p> </p>
<p>page 352, <strong>Chestnut-headed Sparrow-Lark <em>Eremopterix signatus</em></strong></p>
<p>Revise the range description of nominate <em>signatu</em>s from “Extreme se Sudan to se Ethiopia and Somalia” to “southern and eastern Ethiopia, Somalia, and eastern Kenya”.</p>
<p>Revise the range description of subspecies <em>harrisoni</em> from “SE Sudan to nw Kenya (west of Lake Turkana)” to “southeastern South Sudan and northwestern Kenya”.</p>
<p> </p>
<p>page 349, <strong>Red-winged Lark <em>Mirafra hypermetra</em></strong></p>
<p>Revise the range description of subspecies <em>kidepoensis </em>from “S Sudan and ne Uganda” to “southeastern South Sudan and northeastern Uganda”.</p>
<p>Revise the range description of subspecies <em>kathangorensis</em> from “Extreme se Sudan” to “southeastern South Sudan and southwestern Ethiopia”.</p>
<p> </p>
<p>page 349, <strong>Flappet Lark <em>Mirafra rufocinnamomea</em></strong></p>
<p>Revise the range description of subspecies <em>furensis </em>from “W-c Sudan (w Darfur)” to “southwestern Sudan”.</p>
<p>Revise the range description of subspecies <em>sobatensis</em> from “C Sudan (confluence of White Nile and Sobat rivers)” to “northeastern South Sudan”.</p>
<p>Revise the range description of subspecies <em>torrida</em> from “SE Sudan to s Ethiopia, Uganda, c Kenya and Tanzania” to “southeastern Sudan and southeastern Ethiopia south to central Uganda, central Kenya, and central Tanzania”.</p>
<p> </p>
<p>page 348, <strong>White-tailed Lark <em>Mirafra albicauda</em></strong></p>
<p>Revise the range description from “W Chad to s Sudan, Ethiopia, Kenya and Tanzania” to “western Chad; eastern Sudan; northern South Sudan; northeastern Democratic Republic of the Congo, Uganda, south central Ethiopia, Kenya, and Tanzania”.</p>
<p> </p>
<p>page 348, <strong>Singing Bushlark <em>Mirafra cantillans</em></strong></p>
<p>Revise the range description of subspecies <em>marginata </em>from “S Sudan to Eritrea, ne Ethiopia, Somalia and Kenya” to “South Sudan, eastern Uganda, northern and southeastern Ethiopia, Eritrea, Somalia, western Kenya, and northeastern Tanzania”.</p>
<p> </p>
<p>page 353, <strong>Syke’s Short-toed Lark <em>Calandrella dukhunensis</em></strong></p>
<p>Correct the English name from Syke’s Short-toed Lark to Sykes’s Short-toed Lark.</p>
<p> </p>
<p>page 356, <strong>Thekla Lark <em>Galerida theklae</em></strong></p>
<p>Change the English name of <em>Galerida theklae</em> from Thekla Lark to Thekla’s Lark.</p>
<p> </p>
<p>page 356, <strong>Maghreb Lark <em>Galerida macrorhyncha</em></strong></p>
<p>Correct the spelling of subspecies name <em>randoni</em> to <em>randonii</em> (David et al. 2009a).</p>
<p>Reference:</p>
<p><NAME>., <NAME>, and <NAME>. 2009a. Contributions to a list of first reviser actions: ornithology. Zootaxa 2085: 1-24.</p>
<p> </p>
<p>page 360, <strong>Chilean Swallow <em>Tachycineta meyeni</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop718.htm">Proposal 718</a>), change the species name of Chilean Swallow from <em>meyeni</em> (back) to <em>leucopyga</em>, following Mlíkovsky and Frahnert (2009).</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2009. Nomenclatural notes on Neotropical swallows of the genus <em>Tachycineta</em> Cabanis (Aves: Hirundinidae). Zootaxa 2209: 65-68.</p>
<p> </p>
<p>page 358, <strong>Banded Martin <em>Riparia cincta</em></strong></p>
<p>Revise the range description of subspecies <em>suahelica</em> from “S Sudan to Kenya, Uganda, Zambia, Zimbabwe, Mozambique” to “southern South Sudan to eastern Democratic Republic of the Congo, Uganda, western Kenya, Rwanda, Burundi, western Tanzania, Zambia, Malawi, Zimbabwe, and western Mozambique”.</p>
<p> </p>
<p>page 361, <strong>Rock Martin <em>Ptyonoprogne fuligula</em></strong></p>
<p>Revise the range description of subspecies <em>fusciventris</em> from “S Chad, CAR, w and s Sudan, sw Ethiopia to n Mozambique” to “central and southern Chad, Central African Republic, western Sudan, South Sudan, and southwestern Ethiopia south through East Africa to northern Mozambique”.</p>
<p> </p>
<p>page 361, <strong>Ethiopian Swallow <em>Hirundo aethiopica</em></strong></p>
<p>Revise the range description of nominate <em>aethiopica</em> from “Senegambia to se Sudan, w Kenya, Uganda and Tanzania” to “patchily distributed from Senegambia east to Benin, then more widespread from southern Niger and northern Benin east to western Ethiopia, south in East Africa to Uganda, Kenya, and northeastern Tanzaniato se Sudan, w Kenya, Uganda and Tanzania”.</p>
<p> </p>
<p>page 362<strong>, Red-rumped Swallow <em>Cecropis daurica</em></strong></p>
<p>Revise the range description of subspecies <em>emini</em> from “S Sudan to e Democratic Republic of the Congo, Uganda, Kenya, Tanzania and Malawi” to “southeastern South Sudan and eastern Democratic Republic of the Congo south through Uganda and Kenya to Malawi and northern Zambia”.</p>
<p>Revise the range description of subspecies <em>domicella</em> from “SSenegambia to s Sudan and extreme nw Uganda” to “Senegambia and Guinea east to southwestern Sudan, South Sudan, and western Ethiopia”.</p>
<p> </p>
<p>page 361, <strong>Lesser Striped-Swallow <em>Cecropis abyssinica</em></strong></p>
<p>Revise the range description of nominate <em>abyssinica</em> from “E Sudan to Eritrea, Ethiopia and Somalia” to “southeastern Sudan (and northeastern South Sudan?), Ethiopia, and Eritrea”.</p>
<p>Revise the range description of subspecies <em>unitatis</em> from “S Sudan to Kenya, w Uganda, Gabon and e Cape Province” to “Gabon east to southern South Sudan, Kenya, and southern Somalia, south to central Angola, eastern Zambia, Zimbabwe, eastern Botswana, Mozambique, and eastern South Africa”.</p>
<p>Revise the range description of subspecies <em>bannermani</em> from “NE C African Republic to sw Sudan (Darfur Province)” to “northeastern Central African Republic to southwestern Sudan and northwestern South Sudan”.</p>
<p> </p>
<p>page 362, <strong>Rufous-chested Swallow <em>Cecropis semirufa</em></strong></p>
<p>Revise the range description of subspecies <em>gordoni</em> from “Senegal to s Sudan, n Angola, sw Kenya and nw Tanzania” to “Senegambia east to southern South Sudan, Uganda, Rwanda,southwestern Kenya, and northwestern Tanzania, south to northern Angola and central Democratic Republic of the Congo”.</p>
<p> </p>
<p>page 362, <strong>Mosque Swallow <em>Cecropis senegalensis</em></strong></p>
<p>Revise the range description of nominate <em>senegalensis</em> from “Mauritania and Senegambia to s Chad and s Sudan” to “southwestern Mauritania and Senegambia to southern Chad and southern Sudan”.</p>
<p>Revise the range description of subspecies <em>saturatior</em> from “S Ghana to Gabon, Ethiopia, Uganda and Kenya” to “southern Ghana east to South Sudan, Ethiopia, Uganda, Rwanda, Burundi and Kenya, south to Congo and northern Democratic Republic of the Congo”.</p>
<p> </p>
<p>page 357, <strong>White-headed Sawwing <em>Psalidoprocne albiceps</em></strong></p>
<p>Revise the range description of the monotypic group White-headed Sawwing (White-headed) <em>Psalidoprocne albiceps albiceps</em> from “s Sudan to w Kenya, e Democratic Republic of the Congo, Tanzania, Zambia and Malawi” to “southeastern South Sudan,eastern Democratic Republic of the Congo, Uganda, and western Kenya south to northern Zambia and northern Malawi”.</p>
<p> </p>
<p>page 357, <strong>Black Sawwing <em>Psalidoprocne pristoptera</em></strong></p>
<p>Revise the range description of subspecies <em>chalybea</em> from “Extreme se Nigeria to Cameroon, C African Rep. and Democratic Republic of the Congo” to “extreme southeastern Nigeria and Cameroon to Central African Republic, northwestern South Sudan, and northeastern Democratic Republic of the Congo”.</p>
<p>Revise the range description of subspecies <em>oleaginea</em> from “SW Ethiopia (Maji region)” to “eastern South Sudan and southwestern Ethiopia”.</p>
<p>Revise the range description of subspecies <em>mangbettorum</em> from “Extreme ne Democratic Republic of the Congo to extreme sw Sudan” to “extreme northeastern Democratic Republic of the Congo and southern South Sudan”.</p>
<p> </p>
<p>page 472, <strong>African Blue-Flycatcher <em>Elminia longicauda</em></strong></p>
<p>Blue-Flycatchers are not a monophyletic group, as the name is shared by species in three different, unrelated genera (<em>Elminia, Cyornis</em>, and <em>Ficedula</em>). Therefore change the English name of <em>Elminia longicauda</em> from African Blue-Flycatcher to African Blue Flycatcher.</p>
<p>Revise the range description of subspecies <em>teresita</em> from “Cameroon south to northern Angola, and east to South Sudan, western Kenya, and extreme northwestern Tanzania”.</p>
<p> </p>
<p>page 472, <strong>White-tailed Blue-Flycatcher <em>Elminia albicauda</em></strong></p>
<p>Blue-Flycatchers are not a monophyletic group, as the name is shared by species in three different, unrelated genera (<em>Elminia, Cyornis</em>, and <em>Ficedula</em>). Therefore change the English name of <em>Elminia albicauda</em> from White-tailed Blue-Flycatcher to White-tailed Blue Flycatcher.</p>
<p> </p>
<p>page 472, <strong>Dusky Crested-Flycatcher <em>Elminia nigromitrata</em></strong></p>
<p>Revise the range description of nominate <em>nigromitrata</em> from “Cameroon to s Sudan, Kenya, Uganda and Tanzania” to “Cameroon and Gabon east across northern Democratic Republic of the Congo to southern Central Africa Republic, southwestern South Sudan, Uganda, and western Kenya”.</p>
<p> </p>
<p>page 454, <strong>Gray-headed Canary-Flycatcher <em>Culicicapa ceylonensis</em></strong></p>
<p>Revise the range description for subspecies <em>antioxantha</em> from “Thai-Malay Peninsula, Sumatra, Java, and Bali” to “Thai-Malay Peninsula, Sumatra, Java, Bali, and Borneo”.</p>
<p> </p>
<p>page 526, <strong>White-shouldered Black-Tit <em>Melaniparus guineensis</em></strong></p>
<p>Revise the range description from “Senegal to s Sudan, ne Democratic Republic of the Congo, sw Ethiopia, n Uganda, w Kenya” to “southern Mauritania and Senegal east to southern Sudan, South Sudan, and southwestern Ethiopia, south to northern Democratic Republic of the Congo, Uganda, and extreme western Kenya”.</p>
<p> </p>
<p>page 526, <strong>White-bellied Tit <em>Melaniparus albiventris</em></strong></p>
<p>Revise the range description from “Disjunct in mts. of Nigeria/Cameroon; se Sudan to Tanzania” to “southeastern Nigeria and central Cameroon; southeastern South Sudan south to Tanzania”.</p>
<p> </p>
<p>page 533, <strong>Yellow Penduline-Tit <em>Anthoscopus parvulus</em></strong></p>
<p>Revise the range description of nominate <em>parvulu</em>s from “Chad to s Sudan and ne Democratic Republic of the Congo” to “Chad to southern South Sudan and northeastern Democratic Republic of the Congo”.</p>
<p> </p>
<p>page 530, <strong>White-breasted Nuthatch <em>Sitta carolinensis</em></strong></p>
<p>Revise the range description of subspecies <em>tenuissima</em> from “British Columbia and Cascades to Sierra Nevada of n California” to “British Columbia and Cascades to the east slope of the Sierra Nevada of northern California (south to Tulare County)”.</p>
<p>Revise the range description of subspecies <em>aculeat</em>a from “W Washington to Oregon, California and n Baja (Sierra Juárez)” to “western Washington to western Oregon (east to the west slope of the Cascades), California (east to the west slope of the Sierra Nevada), and northern Baja California (Sierra Juárez)”.</p>
<p> </p>
<p>page 530, <strong>Rock Nuthatch <em>Sitta neumayer</em></strong></p>
<p>In accord with current usage (Harrap and Quinn 1995), change the English name of <em>Sitta neumayer</em> from Rock Nuthatch to Western Rock Nuthatch.</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 1995. Chickadees, tits, nuthatches & treecreepers. Princeton University Press, Princeton, New Jersey.</p>
<p> </p>
<p>page 531, <strong>Persian Nuthatch <em>Sitta tephronota</em></strong></p>
<p>In accord with current usage (Harrap and Quinn 1995, Inskipp et al. 1996, Rasmussen and Anderton 2012), change the English name of <em>Sitta tephronota</em> from Persian Nuthatch to Eastern Rock Nuthatch.</p>
<p>References:</p>
<p>Harrap, S., and <NAME>. 1995. Chickadees, tits, nuthatches & treecreepers. Princeton University Press, Princeton, New Jersey.</p>
<p><NAME>., <NAME>, and <NAME>. 1996. An annotated checklist of the birds of the Oriental Region. Oriental Bird Club, Sandy, Bedfordshire, United Kingdom.</p>
<p><NAME>., and <NAME>. 2012. Birds of South Asia. The Ripley guide. Volume 2: attributes and status. Second Edition. Smithsonian Institution and Lynx Edicions, Washington D.C. and Barcelona.</p>
<p> </p>
<p>page 393, <strong>Bewick’s Wren <em>Thryomanes bewickii</em></strong></p>
<p>Subspecies <em>altus</em>, with range “E North America (s Ontario to central Georgia)”, is considered to be a junior synonym of nominate <em>bewickii</em> (Phillips 1986, Pyle 1997), and is deleted. Revise the range description of nominate <em>bewickii</em> from “N-central US to Kansas, Nebraska and Mississippi” to “eastern North America, from northeastern Kansas and southern Iowa east to southern Ontario and central Pennsylvania, south to northern Arkansas, northern Alabamba, and central South Carolina; but extirpated from most of its range east of the Mississippi River”. The polytypic group Bewick’s Wren (<em>bewickii/altus</em>) <em>Thryomanes bewickii bewickii/altus</em> becomes the monotypic group Bewick’s Wren (<em>bewickii</em>) <em>Thryomanes bewickii bewickii</em>.</p>
<p>Change the names of the polytypic group Bewick’s Wren (<em>eremophilus</em> Group) <em>Thryomanes bewickii</em> [<em>eremophilus</em> Group] to Bewick’s Wren (<em>mexicanus</em> Group) <em>Thryomanes bewickii</em> [<em>mexicanus</em> Group].</p>
<p>Revise the range description of subspecies <em>cryptus</em> from “Kansas and Oklahoma to s Texas and ne Mexico (n Tamaulipas)” to “eastern Colorado south through western Oklahoma and western Texas to northeastern Mexico (northern Nuevo León)”.</p>
<p>Add a previously overlooked subspecies, <em>pulichi</em> Phillips 1986, with range “central United States, in Kansas, Oklahoma, and presumably north central Texas, and east, perhaps, to Missouri; mostly resident, but some winter in south central Texas and perhaps northeastern Mexico”. Insert <em>pulichi </em>immediately following subspecies <em>cryptus</em>.</p>
<p>Add a previously overlooked subspecies, <em>sadai</em> Phillips 1986, with range “southernmost Texas and northeastern Mexico (south to central Tamaulipas)”. Insert sadai immediately following subspecies <em>pulichi</em>.</p>
<p>Change the subspecies name <em>bairdii</em> to the older available name <em>mexicanus</em> (Paynter and Vaurie 1960, Phillips 1986). Subspecies <em>percnus</em> is considered to be a junior synonym of <em>mexicanus</em> (Phillips 1986), and is deleted. Revise the range description of <em>mexicanus</em> from “SE Mexico (Oaxaca, Veracruz and s Puebla)” to “central Mexico (Jalisco to western Veracruz, south to south central Oaxaca)”.</p>
<p>Subspecies <em>atrestus</em>, with range “S-central Oregon to ne California and w-central Nevada”, is considered to be a junior synonym of <em>drymoecus</em> (Phillips 1986), and is deleted.</p>
<p>Subspecies <em>correctus</em>, with range “Coastal California (San Benito County to San Diego)”; subspecies <em>nesophilus</em>, with range “Santa Cruz, Santa Rosa and Anacapa Islands off s California”; and subspecies <em>catalinae</em>, with range “Catalina Island (off Southern California)”, all are considered to be junior synonyms of <em>charienturus</em> (Phillips 1986), and are deleted. Revise the range description of <em>charienturus</em> from “N Baja California (south to 30ºN)” to “southwestern California (north to Morro Bay), including the northern Channel Islands, and northwestern Baja California, Mexico”.</p>
<p>References:</p>
<p>Phillips, A.R. 1986. The known birds of North and Middle America. Part I. Hirundinidae to Mimidae; Certhiidae. Privately published, Denver, Colorado.</p>
<p><NAME>. 1997. Identification guide to North American birds. Part I. Slate Creek Press, Bolinas, California.</p>
<p><NAME>., Jr., and <NAME>. 1960. <a href="https://biodiversitylibrary.org/page/14481073">Family Troglodytidae, wrens</a>. Pages 379-440 in E. Mayr and <NAME>, Jr. (editors), Check-list of birds of the world. Volume IX. Museum of Comparative Zoology, Cambridge, Massachusetts.</p>
<p> </p>
<p>page 396, <strong>Santa Marta Wood-Wren <em>Henicorhina anachoreta</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop743.htm">Proposal 743</a>), change the English name of <em>Henicorhina anachoreta</em> from Santa Marta Wood-Wren to Hermit Wood-Wren.</p>
<p> </p>
<p>page 397, <strong>Musician Wren <em>Cyphorhinus arada</em></strong></p>
<p>Subspecies <em>urbanoi</em>, with range “Brazil (Pará in vicinity of Faro and Obidos)”, and subspecies <em>faroensis</em>, with range “N Brazil”, are considered to be junior synonyms of nominate <em>arada</em> (Bocalini and Silveira 2016), and are deleted. Revise the range description of <em>arada</em> from “S Venezuela (Gran Sabana) to the Guianas and adjacent ne Brazil” to “eastern Venezuela, the Guianas, and northern Brazil (north of the Amazon, east of the Rio Negro)”.</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2016. A taxonomic revision of the Musician Wren, <em>Cyphorhinus arada</em> (Aves, Troglodytidae), reveals the existence of six valid species endemic to the Amazon basin. Zootaxa 4193: 541–564.</p>
<p> </p>
<p>page 379, <strong>Slender-billed Greenbul <em>Stelgidillas gracilir</em>ostris</strong></p>
<p>Revise the range description of nominate <em>gracilirostris</em> from “Guinea to extreme s Sudan, w Democratic Republic of the Congo, w Kenya and Angola” to “southwestern Senegal and Guinea to extreme southern South Sudan, central Uganda, and western Kenya, south to northern Angola and central Democratic Republic of the Congo”.</p>
<p> </p>
<p>page 381, <strong>Red-tailed Bristlebill <em>Bleda syndactylus</em></strong></p>
<p>Revise the range description of subspecies <em>woosnami</em> from “E Democratic Republic of the Congo to s Sudan, Kenya, Uganda and Zambia” to “eastern Democratic Republic of the Congo, southern South Sudan, Uganda, and western Kenya to northern Angola and extreme northwestern Zambia”.</p>
<p> </p>
<p>page 382, <strong>Lesser Bristlebill <em>Bleda notatus</em></strong></p>
<p>Revise the range description of the monotypic group Lesser Bristlebill (Yellow-eyed) <em>Bleda notatus ugandae</em> from “NE Democratic Republic of the Congo to s Sudan and Uganda” to “Democratic Republic of the Congo (except along lower Congo River) to southern Central African Republic, southwestern South Sudan, and Uganda”.</p>
<p> </p>
<p>page 380, <strong>Simple Greenbul <em>Chlorocichla simplex</em></strong></p>
<p>Revise the range description from “Guinea-Bissau to ne Angola, e Democratic Republic of the Congo and extreme s Sudan” to “Guinea-Bissau east to southern South Sudan and western Uganda, south to northern Angola and central Democratic Republic of the Congo”.</p>
<p> </p>
<p>page 380, <strong>Joyful Greenbul <em>Chlorocichla laetissima</em></strong></p>
<p>Revise the range description of nominate <em>laetissima</em> from “Montane forests of e Democratic Republic of the Congo to s Sudan, Uganda and nw Kenya” to “montane forests of northeastern Democratic Republic of the Congo, extreme southern South Sudan, western Uganda, and southwestern Kenya”.</p>
<p> </p>
<p>page 380, <strong>Honeyguide Greenbul <em>Baeopogon indicator</em></strong></p>
<p>Revise the range description of nominate <em>indicator</em> from “S Nigeria to e Democratic Republic of the Congo, s Sudan, Uganda, w Kenya and nw Zambia” to “Nigeria to southern South Sudan, Uganda, and western Kenya, south to northern Angola, southern Democratic Republic of the Congo, and extreme northwestern Zambia”.</p>
<p> </p>
<p>page 380, <strong>Sjostedt’s Greenbul <em>Baeopogon clamans</em></strong></p>
<p>In accord with widespread usage (e.g., Keith et al. 1992, Dowsett and Forbes-Watson 1993, Borrow and Demey 2001), change the English name of <em>Baeopogon clamans</em> from Sjostedt’s Greenbul to Sjöstedt’s Greenbul.</p>
<p>References:</p>
<p><NAME>., and <NAME>. 2001. A guide to birds of western Africa. Princeton University Press, Princeton, New Jersey.</p>
<p><NAME>., and A.D. Forbes-Watson. 1993. Checklist of birds of the Afrotropical and Malagasy regions. Volume 1: species limits and distribution. Tauraco Press, Liège, Belgium.</p>
<p><NAME>., <NAME>, and <NAME> (editors). 1992. The birds of Africa. Volume IV. Academic Press, London.</p>
<p> </p>
<p>page 380, <strong>Yellow-throated Greenbul <em>Atimastillas flavicollis</em></strong></p>
<p>Revise the range description of the monotypic group Yellow-throated Greenbul (<em>soror</em>) <em>Atimastillas flavicollis soror</em> from “N-central Cameroon east to Gabon, Democratic Republic of the Congo, s Sudan and Ethiopia” to “north central Cameroon east to southwestern South Sudan, south to Congo and central Democratic Republic of the Congo; eastern South Sudan and western Ethiopia”.</p>
<p> </p>
<p>page 379, <strong>Plain Greenbul <em>Eurillas curvirostris</em></strong></p>
<p>Change the names of the monotypic group Plain Greenbul (<em>leoninus</em>) <em>Eurillas curvirostris leoninus</em> to Plain Greenbul <em>(leonina</em>) <em>Eurillas curvirostris leonina</em> (Dickinson and Christidis 2014).</p>
<p>Revise the range description of the monotypic group Plain Greenbul (<em>curvirostris</em>) <em>Eurillas curvirostris curvirostris</em> from “S Ghana to n Angola, s Sudan, Uganda and w Kenya; Bioko” to “southern Ghana east to southern South Sudan, Uganda, and western Kenya, south to northern Angola and central Democratic Republic of the Congo; Bioko”.</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p> </p>
<p>page 380, <strong>Leaf-love <em>Phyllastrephus scandens</em></strong></p>
<p>Revise the range description of subspecies <em>orientalis</em> from “S Cameroon to s Democratic Republic of the Congo, s Sudan and extreme w Tanzania” to “Cameroon east to southern South Sudan, south to southern Congo, western and southern Democratic Republic of the Congo, and extreme western Tanzania”.</p>
<p> </p>
<p>page 380, <strong>Northern Brownbul <em>Phyllastrephus strepitans</em></strong></p>
<p>Revise the range description from “Extreme s Sudan to n Uganda, s Ethiopia, Kenya and e Tanzania” to “southwestern Sudan; southeastern South Sudan, southern Ethiopia and southern Somalia to northern Uganda, Kenya, and northeastern Tanzania”.</p>
<p> </p>
<p>page 381, <strong>Toro Olive-Greenbul <em>Phyllastrephus hypochloris</em></strong></p>
<p>Revise the range description from “Forests of ne Democratic Republic of the Congo to extreme se Sudan, Uganda and w Kenya” to “southern South Sudan, eastern Democratic Republic of the Congo, Uganda, extreme western Kenya, and extreme northwestern Tanzania”.</p>
<p> </p>
<p>page 380, <strong>Cabanis’s Greenbul <em>Phyllastrephus cabanisi</em></strong></p>
<p>Revise the range description of subspecies <em>sucosus</em> from “E Democratic Republic of the Congo to Rwanda, s Sudan, Uganda, w Kenya and nw Tanzania” to “eastern Democratic Republic of the Congo to extreme southern South Sudan, Uganda, Rwanda, wester Kenya, and northwestern Tanzania”.</p>
<p> </p>
<p>page 381, <strong>White-throated Greenbul <em>Phyllastrephus albigularis</em></strong></p>
<p>Revise the range description of the monotypic group White-throated Greenbul (White-throated) <em>Phyllastrephus albigularis albigularis</em> from “SW Senegal to Cameroon, Gabon, s Sudan and Uganda” to “southwestern Senegal east to extreme southern South Sudan and Uganda, south to southern Congo and western, northern, and eastern Democratic Republic of the Congo”.</p>
<p> </p>
<p>pages 376-377, <strong>Common Bulbul <em>Pycnonotus barbatus</em></strong></p>
<p>Revise the range description of subspecies <em>arsinoe</em> from “E Chad to Egypt and Sudan (s to Darfur, Kordofan, Nile Valley)” to “Egypt (Nile Valley), eastern Chad, Sudan, and northern South Sudan”.</p>
<p>Revise the range description of subspecies <em>schoanus</em> from “Eritrea and e Ethiopia to extreme se Sudan (Boma Hills)” to “Eritrea, Ethiopia, and southeastern South Sudan”.</p>
<p>Revise the range description of subspecies <em>tricolo</em>r from “E Cameroon to Democratic Republic of the Congo, s Sudan, Angola, Namibia and Zambia” to “eastern Cameroon east to South Sudan and central Kenya, south to northern Namibia, Democratic Republic of the Congo, northwestern Botswana, and northern and western Zambia”.</p>
<p> </p>
<p>page 383, <strong>bulbuls genus <em>Iole</em></strong></p>
<p>The sequence of species of <em>Iole</em> bulbuls is revised, based on Manawatthana et al. (2017). The new sequence of species is:</p>
<p>Sulphur-bellied Bulbul <em>Iole palawanensis</em></p>
<p>Buff-vented Bulbul <em>Iole crypta</em></p>
<p>Charlotte’s Bulbul <em>Iole charlottae</em></p>
<p>Gray-eyed Bulbul <em>Iole propinqua</em></p>
<p>Cachar Bulbul <em>Iole cacharensis</em></p>
<p>Olive Bulbul <em>Iole viridescens</em></p>
<p>Yellow-browed Bulbul <em>Iole indica</em></p>
<p>Reference:</p>
<p>Manawatth<NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2017. Phylogeography of bulbuls in the genus <em>Iole</em> (Aves: Pycnonotidae). Biological Journal of the Linnean Society 120: 931-944.</p>
<p> </p>
<p>page 383, <strong>Gray-eyed Bulbul <em>Iole propinqua</em></strong></p>
<p>page 383, <strong>Olive Bulbul <em>Iole virescens</em></strong></p>
<p>The correct species name of Olive Bulbul is <em>Iole viridescens</em>, not <em>Iole virescens</em> (Rand and Deignan 1960, Dickinson and Christidis 2014).</p>
<p>A population in Myanmar, previously classified as a subspecies of Olive Bulbul (<em>Iole viredescens myitkyinensis</em>), instead is embedded within Gray-eyed Bulbul. Transfer <em>myitkyinensis</em> to Gray-eyed Bulbul; position it immediately following the species heading for Gray-eyed Bulbul; and change the scientific name from <em>Iole viredescens myitkyinensis</em> to <em>Iole propinqua myitkyinensis.</em></p>
<p>Two other subspecies of Gray-eyed Bulbul, <em>lekahuni</em> and <em>cinnamomeiventri</em>s, in turn are reassigned to Olive Bulbul <em>Iole viridescens</em>. Change the scientific name from <em>Iole propinqua lekhakuni</em> to <em>Iole viridescens lekhakuni</em>. Revise the range description from “S Myanmar to sw Thailand” to “southern Myanmar to western Thailand”.</p>
<p>Change the scientific name from <em>Iole propinqua cinnamomeoventris</em> to <em>Iole viridescens cinnamomeoventris</em>. Revise the range description of subspecies <em>cinnamomeoventris</em> from “Malay Pen. (Mergui District and Isthmus of Kra to Trang)” to “Thai-Malay Peninsula (Mergui District and Isthmus of Kra south at least to Songkhla)”. We recognize <em>cinnamomeiventris </em>as a new monotypic group, Olive Bulbul (Baker’s) <em>Iole viridescens cinnamomeoventris</em>.</p>
<p>References:</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p><NAME>., and <NAME>.1960. <a href="https://biodiversitylibrary.org/page/14480958">Family Pycnonotidae</a>. Pages 221-300 in E. Mayr and <NAME>, Jr. (editors), Check-list of birds of the world. Volume IX. Museum of Comparative Zoology, Cambridge, Massachusetts.</p>
<p> </p>
<p>page 384, <strong>Ashy Bulbul <em>Hemixos flavala</em></strong></p>
<p>The polytypic group Ashy Bulbul (Cinereous) <em>Hemixos flavala cinereus/connectens</em> now is recognized as two separate monotypic groups, Ashy Bulbul (Cinereous) <em>Hemixos flavala cinereus</em> and Ashy Bulbul (Green-winged) <em>Hemixos flavala connectens</em>.</p>
<p>Revise the range description of <em>cinereus</em> from “Malay Peninsula and Sumatra” to “Thai-Malay Peninsula (north to southern Surat Thani) and Sumatra”.</p>
<p> </p>
<p>page 385, <strong>Firecrest <em>Regulus ignicapilla</em></strong></p>
<p>Change the scientific name of the polytypic group Firecrest (European) from <em>Regulus ignicapilla ignicapilla/balearica</em> to <em>Regulus ignicapilla ignicapilla/balearicus</em>.</p>
<p>Correct the spelling of the subspecies name <em>balearica</em> to <em>balearicus</em>.</p>
<p> </p>
<p>page 435, <strong>Northern Crombec <em>Sylvietta brachyura</em></strong></p>
<p>Revise the range description of subspecies <em>leucopsis</em> from “S Eritrea to Ethiopia, se Sudan, Somalia, Kenya and Tanzania” to “southern Eritrea, Ethiopia, Djibouti, Somalia, southeastern South Sudan, Kenya, and northeastern Tanzania”.</p>
<p> </p>
<p>page 428, <strong>Moustached Grass-Warbler <em>Melocichla mentalis</em></strong></p>
<p>Revise the range description of subspecies <em>amauroura </em>from “S Sudan to sw Ethiopia, Kenya, Tanzania and Zambia” to “South Sudan and southwestern Ethiopia south to eastern Democratic Republic of the Congo, Uganda, western and central Kenya, western Tanzania, and northern and central Zambia”.</p>
<p> </p>
<p>page 436, <strong>Yellow Longbill <em>Macrosphenus flavicans</em></strong></p>
<p>Revise the range description of subspecies <em>hypochondriacus</em> from “E Democratic Republic of the Congo to Uganda, Central African Rep. and extreme sw Sudan” to “southeastern Central African Republic, southwestern South Sudan, eastern Democratic Republic of the Congo, Uganda, and extreme northwestern Tanzania”.</p>
<p> </p>
<p>page 436, <strong>Green Hylia <em>Hylia prasina</em></strong></p>
<p>Revise the range description of nominate <em>prasina</em> from “Senegambia to Angola, Democratic Republic of the Congo, s Sudan, w Kenya and nw Tanzania” to “Senegambia east to southern South Sudan, Uganda, western Kenya, and northwestern Tanzania, south to central Angola and central Democratic Republic of the Congo”.</p>
<p> </p>
<p>pages 419, 425-426, 432-433, 435, 440-441, 471-472, <strong>Bush-Warblers and Allies Cettiidae</strong></p>
<p>Change the English name of the family Cettiidae from Bush-Warblers and Allies to Bush Warblers and Allies. Change the scientific name of this family from Cettiidae to Scotocercidae (Dickinson and Christidis 2014: 641).</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p> </p>
<p>page 419, <strong>Scrub Warbler <em>Scotocerca inquieta</em></strong></p>
<p>Revise the range description of subspecies <em>grisea</em> from “W Saudi Arabia (Taif Plateau), e South Yemen and Oman” to “western Saudi Arabia, eastern Yemen, and Oman”.</p>
<p>Revise the range description of subspecies <em>buryi </em>from “SW Saudi Arabia, North Yemen and Hadramaut” to “southwestern Saudi Arabia and western Yemen”.</p>
<p> </p>
<p>page 425, <strong>Pale-footed Bush-Warbler <em>Urosphena pallidipes</em></strong></p>
<p>Bush-Warblers are not a monophyletic group, as this group name is shared by species in five different, unrelated genera (<em>Urosphena, Cettia, Horornis, Locustella</em>, and <em>Elaphrornis</em>). Therefore change the English name of <em>Urosphena pallidipes</em> from Pale-footed Bush-Warbler to Pale-footed Bush Warbler.</p>
<p> </p>
<p>page 426, <strong>Chestnut-crowned Bush-Warbler <em>Cettia major</em></strong></p>
<p>page 426, <strong>Gray-sided Bush-Warbler<em> Cettia brunnifrons</em></strong></p>
<p>Bush-Warblers are not a monophyletic group, as this group name is shared by species in five different, unrelated genera (<em>Urosphena, Cettia, Horornis, Locustella</em>, and <em>Elaphrornis</em>). Therefore change the English name of <em>Cettia major</em> from Chestnut-crowned Bush-Warbler to Chestnut-crowned Bush Warbler.</p>
<p>Change the English name of <em>Cettia brunnifrons</em> from Gray-sided Bush-Warbler to Gray-sided Bush Warbler.</p>
<p> </p>
<p>page 425, <strong>Philippine Bush-Warbler <em>Horornis seebohmi</em></strong></p>
<p>page 425, <strong>Japanese Bush-Warbler <em>Horornis diphone</em></strong></p>
<p>page 425, <strong>Manchurian Bush-Warbler <em>Horornis borealis</em></strong></p>
<p>page 426, <strong>Palau Bush-Warbler <em>Horornis annae</em></strong></p>
<p>page 426, <strong>Tanimbar Bush-Warbler <em>Horornis carolinae</em></strong></p>
<p>page 426, <strong>Fiji Bush-Warbler <em>Horornis ruficapilla</em></strong></p>
<p>page 426, <strong>Brownish-flanked Bush-Warbler <em>Horornis fortipes</em></strong></p>
<p>page 426, <strong>Hume’s Bush-Warbler <em>Horornis brunnescens</em></strong></p>
<p>page 426, <strong>Yellowish-bellied Bush-Warbler <em>Horornis acanthizoides</em></strong></p>
<p>page 426, <strong>Sunda Bush-Warbler <em>Horornis vulcanius</em></strong></p>
<p>page 426, <strong>Aberrant Bush-Warbler <em>Horornis flavolivaceus</em></strong></p>
<p>Bush-Warblers are not a monophyletic group, as this group name is shared by species in five different, unrelated genera (<em>Urosphena, Cettia, Horornis, Locustella</em>, and <em>Elaphrornis</em>). Therefore change the English group name of all species of “Bush-Warbler” in <em>Horornis</em> to “Bush Warbler”.</p>
<p> </p>
<p>page 426, <strong>Aberrant Bush-Warbler <em>Horornis flavolivaceus</em></strong></p>
<p>Subspecies <em>dulcivox</em>, with range “S China (s Sichuan to s Yunnan)”, is deleted. This subspecies is known with certainty from only a single specimen, which was destroyed during the Second World War (Vaurie 1959). Delacour (1943) suggested that <em>dulcivox</em> is a junior synonmy of <em>intricatus</em>, but since its identity no longer can be confirmed, Dickinson and Christidis (2014) remark that it is “best considered unidentifiable”. We delete subspecies <em>dulcivox</em>. Revise the range description of <em>intricatus</em> from “NE Myanmar to nw Thailand and sw China (s Shanxi, e Sichuan)” to “northern and eastern Myanmar and southwestern China (east to southern Shanxi)”.</p>
<p>References:</p>
<p><NAME>. 1943. The bush-warblers of the genera <em>Cettia</em> and <em>Bradypterus</em>, with notes on allied genera and species. Part II. Ibis 85: 27-40.</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p><NAME>. 1959. The birds of the Palearctic fauna. Passeriformes. H.F. & G. Witherby Limited, London.</p>
<p> </p>
<p>page 436, <strong>Brown Woodland-Warbler <em>Phylloscopus umbrovirens</em></strong></p>
<p>Revise the range description of subspecies <em>mackenzianus</em> from “S Sudan to e Uganda and central Kenya” to “southern South Sudan, northern and eastern Uganda, and central Kenya”.</p>
<p> </p>
<p>page 437, <strong>Common Chiffchaff <em>Phylloscopus collybita</em></strong></p>
<p>The monotypic group Common Chiffchaff (<em>collybita</em>) <em>Phylloscopus collybita collybita</em> is expanded to encompass the monotypic group Common Chiffchaff (Scandinavian) <em>Phylloscopus collybita abietinus</em> (which therefore is no longer recognized as a group), and three additional subspecies <em>(brevirostris, caucasicus</em>, and <em>menzbieri</em>) that previously had not been assigned to any group. Change the scientific name of the group <em>Phylloscopus collybita collybita</em> to <em>Phylloscopus collybita</em> [<em>collybita</em> Group], and change the English name of this group from Common Chiffchaff (<em>collybita</em>) to Common Chiffchaff (Common).</p>
<p> </p>
<p>page 438, <strong>Western Crowned Leaf Warbler <em>Phylloscopus occipitalis</em></strong></p>
<p>In accord with current usage (Inskipp et al. 1996, Grimmett et al. 1999, Rasmussen and Anderton 2012), change the English name of <em>Phylloscopus occipitalis</em> from Western Crowned Leaf Warbler to Western Crowned Warbler.</p>
<p>References:</p>
<p><NAME>., <NAME>, and <NAME>. 1999. A guide to the birds of India, Pakistan, Nepal, Bangladesh, Bhutan, Sri Lanka, and the Maldives. Princeton University Press, Princeton, New Jersey.</p>
<p><NAME>., <NAME>, and <NAME>. 1996. An annotated checklist of the birds of the Oriental Region. Oriental Bird Club, Sandy, Bedfordshire, United Kingdom.</p>
<p>Rasmussen, P.C., and <NAME>. 2012. Birds of South Asia. The Ripley guide. Volume 2: attributes and status. Second Edition. Smithsonian Institution and Lynx Edicions, Washington D.C. and Barcelona.</p>
<p> </p>
<p>page 438, <strong>Eastern Crowned Leaf Warbler <em>Phylloscopus coronatus</em></strong></p>
<p>In accord with current usage (Inskipp et al. 1996, Grimmett et al. 1999, Brazil 2009), change the English name of <em>Phylloscopus coronatus</em> from Eastern Crowned Leaf Warbler to Eastern Crowned Warbler.</p>
<p>References:</p>
<p>Brazil, M. 2009. Birds of East Asia. China, Taiwan, Korea, Japan, and Russia. Princeton University Press, Princeton, New Jersey.</p>
<p><NAME>., <NAME>, and <NAME>. 1999. A guide to the birds of India, Pakistan, Nepal, Bangladesh, Bhutan, Sri Lanka, and the Maldives. Princeton University Press, Princeton, New Jersey.</p>
<p><NAME>., <NAME>, and <NAME>. 1996. An annotated checklist of the birds of the Oriental Region. Oriental Bird Club, Sandy, Bedfordshire, United Kingdom.</p>
<p> </p>
<p>page 439, <strong>Lemon-throated Warbler <em>Phylloscopus cebuensis</em></strong></p>
<p>Change the English name of <em>Phylloscopus cebuensis</em> from Lemon-throated Warbler to Lemon-throated Leaf Warbler (Dickinson et al. 1991, Inskipp et al. 1996, Kennedy et al. 2000).</p>
<p>References:</p>
<p><NAME>., <NAME>, and <NAME>. 1991. The birds of the Philippines. An annotated check-list. British Ornithologists’ Union Check-list number 12. British Ornithologists’ Union, London.</p>
<p><NAME>., <NAME>, and <NAME>. 1996. An annotated checklist of the birds of the Oriental Region. Oriental Bird Club, Sandy, Bedfordshire, United Kingdom.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>., and <NAME>. 2000. A guide to the birds of the Philippines. Oxford University Press, New York and Oxford, United Kingdom.</p>
<p> </p>
<p>page 439, <strong>Mountain Warbler <em>Phylloscopus trivirgatus</em></strong></p>
<p>Change the English name of <em>Phylloscopus trivirgatus</em> from Mountain Warbler to Mountain Leaf Warbler (Inskipp et al. 1996, Wells 2007, Eaton et al. 2016).</p>
<p>Change the English name of the polytypic group <em>Phylloscopus trivirgatus</em> [<em>trivirgatus</em> Group] from Mountain Warbler (Mountain) to Mountain Leaf Warbler (Mountain).</p>
<p>Change the English name of the polytypic group <em>Phylloscopus trivirgatus</em> [<em>nigrorum</em> Group] from Mountain Warbler (Philippines) to Mountain Leaf Warbler (Philippines).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2016. Birds of the Indonesian Archipelago: Greater Sundas and Wallacea. Lynx Edicions, Barcelona.</p>
<p><NAME>., <NAME>, and <NAME>. 1996. An annotated checklist of the birds of the Oriental Region. Oriental Bird Club, Sandy, Bedfordshire, United Kingdom.</p>
<p><NAME>. 2007. The birds of the Thai-Malay Peninsula. Volume Two. <NAME>, London.</p>
<p> </p>
<p>pages 428-432, <strong>Reed-Warblers and Allies Acrocephalidae</strong></p>
<p>Change the English name of the family Acrocephalidae from Reed-Warblers and Allies to Reed Warblers and Allies.</p>
<p> </p>
<p>page 432, <strong>Eastern Olivaceous Warbler <em>Iduna pallida</em></strong></p>
<p>Revise the range description of nominate <em>pallida</em> from “Egypt; winters to s Sudan and Ethiopia” to “breeds Egypt, south through Western Desert oases the Nile Valley to central Sudan; winters Sudan, Eritrea, and Ethiopia”.</p>
<p> </p>
<p>page 432, <strong>African Yellow-Warbler <em>Iduna natalensis</em></strong></p>
<p>Revise the range description of subspecies <em>batesi</em> from “Nigeria to n Democratic Republic of the Congo and sw Sudan” to “Nigeria east to northern Democratic Republic of the Congo”.</p>
<p>Revise the range description of subspecies <em>massaica</em> from “SE Sudan and Ethiopia to e Democratic Republic of the Congo, Uganda, Kenya and s Tanzania” to “southern South Sudan and Ethiopia to northeastern Democratic Republic of the Congo, Uganda, Kenya, and northwestern and northeastern Tanzania”.</p>
<p> </p>
<p>page 432, <strong>Mountain Yellow-Warbler <em>Iduna similis</em></strong></p>
<p>Revise the range description from “Mountains of e Democratic Republic of the Congo to se Sudan, Kenya, Tanzania and n Malawi” to “southern South Sudan, mountains of eastern Democratic Republic of the Congo, northeastern Uganda, Kenya, Rwanda, Burundi, Tanzania, and northern Malawi”.</p>
<p> </p>
<p>page 429, <strong>Black-browed Reed-Warbler <em>Acrocephalus bistrigiceps</em></strong></p>
<p>page 429, <strong>Streaked Reed-Warbler <em>Acrocephalus sorghophilus</em></strong></p>
<p>page 429, <strong>Manchurian Reed-Warbler <em>Acrocephalus tangorum</em></strong></p>
<p>page 430, <strong>Blyth’s Reed-Warbler <em>Acrocephalus dumetorum</em></strong></p>
<p>page 430, <strong>Large-billed Reed-Warbler <em>Acrocephalus orinus</em></strong></p>
<p>page 430, <strong>Eurasian Reed-Warbler <em>Acrocephalus scirpaceus</em></strong></p>
<p>page 430, <strong>African Reed-Warbler <em>Acrocephalus baeticatus</em></strong></p>
<p>page 430, <strong>Basra Reed-Warbler <em>Acrocephalus griseldis</em></strong></p>
<p>page 430, <strong>Great Reed-Warbler <em>Acrocephalus arundinaceus</em></strong></p>
<p>page 430, <strong>Oriental Reed-Warbler <em>Acrocephalus orientalis</em></strong></p>
<p>page 430, <strong>Clamorous Reed-Warbler <em>Acrocephalus stentoreus</em></strong></p>
<p>page 430, <strong>Nightingale Reed-Warbler <em>Acrocephalus luscinius</em></strong></p>
<p>page (addition 2013), <strong>Saipan Reed-Warbler <em>Acrocephalus hiwae</em></strong></p>
<p>page 430, <strong>Australian Reed-Warbler <em>Acrocephalus australis</em></strong></p>
<p>page 430, <strong>Caroline Reed-Warbler <em>Acrocephalus syrinx</em></strong></p>
<p>page 430, <strong>Aguiguan Reed-Warbler <em>Acrocephalus nijoi</em></strong></p>
<p>page 431, <strong>Kiritimati Reed-Warbler <em>Acrocephalus aequinoctialis</em></strong></p>
<p>page 431, <strong>Southern Marquesan Reed-Warbler <em>Acrocephalus mendanae</em></strong></p>
<p>page 430, <strong>Pagan Reed-Warbler <em>Acrocephalus yamashinae</em></strong></p>
<p>page 430, <strong>Nauru Reed-Warbler <em>Acrocephalus rehsei</em></strong></p>
<p>page 431, <strong>Pitcairn Reed-Warbler <em>Acrocephalus vaughani</em></strong></p>
<p>page 431, <strong>Henderson Island Reed-Warbler <em>Acrocephalus taiti</em></strong></p>
<p>page 431, <strong>Cook Islands Reed-Warbler <em>Acrocephalus kerearako</em></strong></p>
<p>page 431, <strong>Rimitara Reed-Warbler <em>Acrocephalus rimitarae</em></strong></p>
<p>page (addition 2013), <strong>Society Islands Reed-Warbler <em>Acrocephalus musae</em></strong></p>
<p>page 431, <strong>Tahiti Reed-Warbler <em>Acrocephalus caffer</em></strong></p>
<p>page 431, <strong>Moorea Reed-Warbler <em>Acrocephalus longirostris</em></strong></p>
<p>page 431, <strong>Northern Marquesan Reed-Warbler <em>Acrocephalus percernis</em></strong></p>
<p>page 431, <strong>Tuamotu Reed-Warbler <em>Acrocephalus atyphus</em></strong></p>
<p>page (addition 2013), <strong>Mangareva Reed-Warbler <em>Acrocephalus astrolabii</em></strong></p>
<p>Reed-Warblers do not form a monophyletic group within <em>Acrocephalus</em>; therefore, change the English group name of all species of reed-warblers to “Reed Warbler”.</p>
<p>Additionally, change the English name of the monotypic group <em>Acrocephalus scirpaceus scirpaceus</em> from Eurasian Reed-Warbler (Eurasian) to Eurasian Reed Warbler (Eurasian); and change the English name of the monotypic group <em>Acrocephalus scirpaceus fuscus</em> from Eurasian Reed-Warbler (Caspian) to Eurasian Reed Warbler (Caspian).</p>
<p>Change the English name of the polytypic group <em>Acrocephalus stentoreus stentoreus/levantinus</em> from Clamorous Reed-Warbler (Clamorous) to Clamorous Reed Warbler (Clamorous); and change the English name of the polytypic group <em>Acrocephalus stentoreus </em>[<em>brunnescens</em> Group] from Clamorous Reed-Warbler (Brown) to Clamorous Reed Warbler (Brown).</p>
<p> </p>
<p>page 430, <strong>Eurasian Reed Warbler <em>Acrocephalus scirpaceus</em></strong></p>
<p>Revise the range description of nominate <em>scirpaceus</em> (Eurasian Reed Warbler) from “NW Africa and Europe to Crimea and Volga R; > w and c Africa” to “breeds Europe (except the Iberian Peninsula) east to western Turkey and western Russia; winters in subSaharan Africa (Senegal east to Sudan, south at least to Gabon and the northern edge of the Congo Basin)”.</p>
<p>Hering et al. (2016) describe a new subspecies, <em>ammon</em>, of Eurasian Reed Warbler, with range “Libyan Desert depressions on the Libya/Egypt border (oases of Qattara, Siwa, Sitra, and Al Jaghbub)”. We recognize this subspecies as a new monotypic group, Eurasian Reed Warbler (Siwa) <em>Acrocephalus scirpaceus ammon</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, and <NAME>. 2016. <a href="http://boc-online.org/bulletins/downloads/BBOC1362-Hering.pdf">A new subspecies of Eurasian Reed Warbler <em>Acrocephapus scirpaceus</em> in Egypt</a>. Bulletin of the British Ornithologists’ Club 136: 101-128.</p>
<p> </p>
<p>page 430, <strong>African Reed Warbler <em>Acrocephalus baeticatus </em></strong></p>
<p>Subspecies <em>avicenniae</em> (African Reed Warbler) is more closely related to Eurasian Reed Warbler than to other subspecies of African Reed Warbler (Kennerly and Pearson 2010, Olsson et al. 2016), and so is transferred to Eurasian Reed Warbler. Change the scientific name of this monotypic group from <em>Acrocephalus baeticatus avicenniae</em> to <em>Acrocephalus scirpaceus avicenniae</em>, and change the English name from African Reed-Warbler (Mangrove) to Eurasian Reed Warbler (Mangrove).</p>
<p>Olsson et al. (2016) documented that reed warblers breeding in northwestern Africa and on the Iberian Peninsula, previously included in Eurasian Reed Warbler (Eurasian) <em>Acrocephalus scirpaceus scirpaceus</em>, are genetically distinct from <em>scirpaceus</em> (with high levels of support), and apparently are closely related to African Reed Warbler <em>Acrocephalus baeticatus</em> (although this relationship is less well established). An available name for these birds is <em>ambiguus</em> Brehm 1857. Therefore add subspecies <em>ambiguus</em>, with range “breeds northwestern Africa (Morocco to Tunisia) and southwestern Europe (Iberian Peninsula); Moroccon population at least partially resident, otherwise winters in sub-Saharan Africa, but range not known in detail”, immediately following the heading for African Reed Warbler <em>Acrocephalus baeticatus</em>.</p>
<p>Revise the range description of subspecies <em>cinnamomeus</em> from “Senegal to s Sudan, Ethiopia and Somalia south to Mozambique” to “Senegal east to South Sudan, Ethiopia, and southern Somalia, south to eastern Democratic Republic of the Congo, eastern Zambia, Malawi, and Mozambique”.</p>
<p>Revise the range description of subspecies <em>hallae</em> from “SW Angola to Namibia, sw Botswana, sw Zambia and Malawi” to “southwestern Angola south to western Botswana and western South Africa”.</p>
<p>Revise the range description of nominate <em>baeticatus</em> from “northern Botswana and Zimbabwe southern and southeastern South Africa” to “eastern Botswana and Zimbabwe to southern and southeastern South Africa”.</p>
<p>References:</p>
<p><NAME>., and <NAME>. 2010. Reed and bush warblers. <NAME>, London.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2016. Mitochondrial phylogeny of the Eurasian/African reed warbler complex (<em>Acrocephalus</em>, Aves). Disagreement between morphological and molecular evidence and cryptic divergens: a case for resurrecting <em>Calamoherpe ambigua</em> Brehm 1857. Molecular Phylogenetics and Evolution 102: 30-44.</p>
<p>page 431, <strong>Lesser Swamp-Warbler <em>Acrocephalus gracilirostris</em></strong></p>
<p>page 431, <strong>Greater Swamp-Warbler <em>Acrocephalus rufescens</em></strong></p>
<p>page 431, <strong>Cape Verde Swamp-Warbler <em>Acrocephalus brevipennis</em></strong></p>
<p>page 431, <strong>Madagascar Swamp-Warbler <em>Acrocephalus newtoni</em></strong></p>
<p>Swamp-Warblers are not a monophyletic group, as the name is shared by species in two different, unrelated genera (<em>Acrocephalus</em> and <em>Bradypterus</em>). English group name of all species of swamp-warblers to “Swamp Warbler”.</p>
<p>Revise the range description of subspecies <em>jacksoni</em> (Lesser Swamp Warbler) from “S Sudan to w Kenya, Uganda and adjacent Democratic Republic of the Congo” to “southern Sudan to western Kenya, Uganda, and eastern Democratic Republic of the Congo”.</p>
<p>Revise the range description of subspecies <em>ansorgei </em>(Greater Swamp Warbler) from “S Sudan to Uganda, e Democratic Republic of the Congo, w Kenya, n Botswana and nw Angola” to “South Sudan to eastern Democratic Republic of the Congo, Uganda, Rwanda, and western Kenya south to northwestern Angola, northern Botswana, and northwestern Zimbabwe”.</p>
<p> </p>
<p>page 430, <strong>Clamorous Reed Warbler <em>Acrocephalus stentoreus</em></strong></p>
<p>Subspecies <em>sumbae</em>, previously classified under Clamorous Reed Warbler <em>Acrocephalus stentoreus</em>, is reassigned to Australian Reed Warbler <em>Acrocephalus australis</em>, following Cibois et al. (2011b). Position <em>sumbae</em> immediately following the species heading for Australian Reed Warbler. <em>Acrocephalus australis toxopei</em>, with range “New Guinea, Bismarck Archipelago and Solomon Islands”, is considered to be a junior synonym of <em>sumbae</em> (Watson et al. 1986a, Beehler and Pratt 2016), and is deleted. Revise the range description of <em>sumbae</em> from “Buru I. (s Moluccas) and e Lesser Sundas (Sumba and Timor)” to “Buru (South Moluccas), Sumba and Timor (Lesser Sundas), New Guinea, Bismarck Archipelago, and Solomon Islands”.</p>
<p>References:</p>
<p><NAME>., and <NAME>. 2016. Birds of New Guinea: distribution, taxonomy, and systematics. Princeton University Press, Princeton, New Jersey.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2011b. Charting the course of reed-warblers across the Pacific islands. Journal of Biogeographhy 38: 1963–1975.</p>
<p><NAME>., <NAME>., and <NAME>. 1986a. <a href="https://biodiversitylibrary.org/page/14483769">Family Sylviidae, Old World warblers</a>. Pages 3-294 in E. Mayr and <NAME> (editors), Check-list of birds of the world. Volume XI. Museum of Comparative Zoology, Cambridge, Massachusetts.</p>
<p> </p>
<p>page 430, <strong>Australian Reed Warbler <em>Acrocephalus australis</em></strong></p>
<p>Subspecies <em>carterae</em>, with range “NW Australia”, is considered to be a junior synonym of <em>gouldi</em> (Watson et al. 1986a, Schodde and Mason 1999), and is deleted. Revise the range description of <em>gouldi</em> from “SW Western Australia(Pilbara to Esperance); winters to north” to “breeds western Australia (southwestern Western Australia); winters to northwestern Northern Territory”.</p>
<p>References:</p>
<p><NAME>., and <NAME>. 1999. The directory of Australian birds. Passerines. CSIRO Publishing, Canberra.</p>
<p><NAME>., <NAME>., and <NAME>. 1986a. <a href="https://biodiversitylibrary.org/page/14483769">Family Sylviidae, Old World warblers</a>. Pages 3-294 in E. Mayr and <NAME> (editors), Check-list of birds of the world. Volume XI. Museum of Comparative Zoology, Cambridge, Massachusetts.</p>
<p> </p>
<p>page (additon 2013), <strong>Mangareva Reed Warbler <em>Acrocephalus astrolabii</em></strong></p>
<p>Revise the range description from “Mangareva (Gambier Islands); extinct” to “formerly Mangareva, and perhaps other islands, in the Gambier Islands; extinct, no confirmed records since 1838-1839, but may have persisted until the late 19th century” (Cibois et al. 2011a).</p>
<p>Reference:</p>
<p><NAME>., <NAME>, and <NAME>. 2011a. <a href="https://biodiversitylibrary.org/page/50898421">Molecular and morphological analysis of Pacific reed warbler specimens of dubious origin, including <em>Acrocephalus luscinius astrolabii</em></a>. Bulletin of the British Ornithologists’ Club 131: 32-40.</p>
<p> </p>
<p>page 442, <strong>Spinifex-bird <em>Megalurus carteri</em></strong></p>
<p>In accord with widespread usage (e.g., Schodde and Mason 1999, Christidis and Boles 2008), change the English name of <em>Megalurus carteri</em> from Spinifex-bird to Spinifexbird.</p>
<p>References:</p>
<p><NAME>. and <NAME>. 2008. Systematics and taxonomy of Australian birds. CSIRO Publishing, Melbourne.</p>
<p><NAME>., and <NAME>. 1999. The directory of Australian birds. Passerines. CSIRO Publishing, Canberra.</p>
<p> </p>
<p>page 427, <strong>Cinnamon Bracken-Warbler <em>Bradypterus cinnamomeus</em></strong></p>
<p>Revise the range description of subspecies <em>cavei</em> from “SE Sudan” to “southern South Sudan and northeastern Uganda”.</p>
<p> </p>
<p>page 428, <strong>Brown Emu-tail <em>Bradypterus brunneus</em></strong></p>
<p>In accord with widespread usage (e.g., Morris and Hawkins 1998, Sinclair and Langrand 1998), change the English name of <em>Bradypterus brunneus</em> from Brown Emu-tail to Brown Emutail.</p>
<p>References:</p>
<p><NAME>., and <NAME>. 1998. Birds of Madagascar: a photographic guide. Yale University Press, New Haven, Connecticut.</p>
<p><NAME>., and <NAME>. 1998. Birds of the Indian Ocean islands: Madagascar, Mauritius, Réunion, Rodrigues, Seychelles and the Comoros. Struik Publishers, Cape Town.</p>
<p> </p>
<p>page 427, <strong>Grauer’s Swamp-Warbler <em>Bradypterus graueri</em></strong></p>
<p>page 427, <strong>Dja River Swamp-Warbler <em>Bradypterus grandis</em></strong></p>
<p>page 427, <strong>White-winged Swamp-Warbler <em>Bradypterus carpalis</em></strong></p>
<p>Swamp-Warblers are not a monophyletic group, as the name is shared by species in two different, unrelated genera (<em>Acrocephalus</em> and <em>Bradypterus</em>). Therefore change the English group name of all species of swamp-warbler to Swamp Warbler.</p>
<p> </p>
<p>page 427, <strong>Grauer’s Swamp Warbler <em>Bradypterus graueri</em></strong></p>
<p>Revise the range description of subspecies <em>sudanensis</em> from “S Sudan” to “South Sudan and western Ethiopia”.</p>
<p> </p>
<p>page 428, <strong>Brown Bush-Warbler <em>Locustella luteoventris</em></strong></p>
<p>page 428, <strong>Chinese Bush-Warbler <em>Locustella tacsanowskia</em></strong></p>
<p>page 427, <strong>Long-billed Bush-Warbler <em>Locustella major</em></strong></p>
<p>page 428, <strong>Long-tailed Bush-Warbler <em>Locustella caudata</em></strong></p>
<p>page 428, <strong>Chestnut-backed Bush-Warbler <em>Locustella castanea</em></strong></p>
<p>page 427, <strong>Baikal Bush-Warbler <em>Locustella davidi</em></strong></p>
<p>page 427, <strong>West Himalayan Bush-Warbler <em>Locustella kashmirensis</em></strong></p>
<p>page 427, <strong>Spotted Bush-Warbler <em>Locustella thoracica</em></strong></p>
<p>page 428, <strong>Taiwan Bush-Warbler <em>Locustella alishanensis</em></strong></p>
<p>page 428, <strong>Friendly Bush-Warbler <em>Locustella accentor</em></strong></p>
<p>page 428, <strong>Russet Bush-Warbler <em>Locustella mandelli</em></strong></p>
<p>page (addition 2015), <strong>Sichuan Bush-Warbler <em>Locustella chengi</em></strong></p>
<p>page 428, <strong>Benguet Bush-Warbler <em>Locustella seebohmi</em></strong></p>
<p>page 428, <strong>Javan Bush-Warbler <em>Locustella montis</em></strong></p>
<p>page 428, <strong>Timor Bush-Warbler <em>Locustella timorensis</em></strong></p>
<p>Bush-Warblers are not a monophyletic group, as the name is shared by species in five different, unrelated genera (<em>Urosphena, Cettia, Horornis, Locustella</em>, and <em>Elaphrornis</em>). Therefore change the English group name of all bush-warblers to Bush Warbler.</p>
<p> </p>
<p>page 428, <strong>Chestnut-backed Bush Warbler <em>Locustella castanea</em></strong></p>
<p>Change the spelling of the subspecies name <em>muscula</em> to <em>musculus</em>.</p>
<p> </p>
<p>page 428, <strong>Sri Lanka Bush-Warbler <em>Elaphrornis palliseri</em></strong></p>
<p>Bush-Warblers are not a monophyletic group, as the name is shared by species in five different, unrelated genera (<em>Urosphena, Cettia, Horornis, Locustella</em>, and <em>Elaphrornis</em>). Therefore change the English name of <em>Elaphrornis palliseri</em> from Sri Lanka Bush-Warbler to Sri Lanka Bush Warbler.</p>
<p> </p>
<p>page 428, <strong>Gray Emu-tail <em>Amphilais seebohmi</em></strong></p>
<p>In accord with widespread usage (e.g., Morris and Hawkins 1998, Sinclair and Langrand 1998), change the English name of <em>Amphilais seebohmi</em> from Gray Emu-tail to Gray Emutail.</p>
<p>References:</p>
<p><NAME>., and <NAME>. 1998. Birds of Madagascar: a photographic guide. Yale University Press, New Haven, Connecticut.</p>
<p><NAME>., and <NAME>. 1998. Birds of the Indian Ocean islands: Madagascar, Mauritius, Réunion, Rodrigues, Seychelles and the Comoros. Struik Publishers, Cape Town.</p>
<p> </p>
<p>page 422, <strong>Black-collared Apalis <em>Apalis pulchra</em></strong></p>
<p>Revise the range description of nominate <em>pulchra</em> from “Mts. of se Nigeria and Cameroon; se Sudan to Uganda and Kenya” to “southeastern Nigeria and Cameroon; southern South Sudan, highlands of eastern Democratic Republic of the Congo, southeastern Uganda, and Kenya”.</p>
<p> </p>
<p>page 423, <strong>Yellow-breasted Apalis <em>Apalis flavida</em></strong></p>
<p>Revise the range description of subspecies <em>flavocincta </em>from “SE Sudan to n Uganda, s Ethiopia, Somalia, Kenya, ne Tanzania” to “southeastern South Sudan, southern Ethiopia, southern Somalia, northeastern Uganda, Kenya, and extreme northeastern Tanzania”.</p>
<p> </p>
<p>page 423, <strong>Buff-throated Apalis <em>Apalis rufogularis</em></strong></p>
<p>Revise the range description of subspecies <em>nigrescens </em>from “SW Sudan to Democratic Republic of the Congo, Zambia, ne Angola, Uganda and nw Tanzania” to “extreme southwestern South Sudan, Democratic Republic of the Congo, Uganda, western Kenya, and extreme northwestern Tanzania south to extreme northeastern Angola, and northwestern Zambia”.</p>
<p> </p>
<p>page 424, <strong>Gray Apalis <em>Apalis cinerea</em></strong></p>
<p>Revise the range description of nominate <em>cinerea</em> from “Democratic Republic of the Congo to s Sudan, ne Uganda, Rwanda, Kenya and nw Tanzania” to “southern South Sudan, eastern Democratic Republic of the Congo, Uganda, Rwanda, Burundi, Kenya, and northern Tanzania”.</p>
<p> </p>
<p>page 424, <strong>Green-backed Camaroptera <em>Camaroptera brachyur</em>a</strong></p>
<p>Revise the range description of subspecies <em>abessinica</em> from “S Sudan to ne Democratic Republic of the Congo, Ethiopia, n Uganda, n Kenya and w Somalia” to “South Sudan, Ethiopia, Djibouti, northern Somalia, northeastern Democratic Republic of the Congo, northern Uganda, and northern Kenya”.</p>
<p> </p>
<p>page 424, <strong>Olive-green Camaroptera <em>Camaroptera chloronota</em></strong></p>
<p>Revise the range description of subspecies <em>toroensis</em> from “Central African Rep. to sw Sudan, Uganda, w Kenya and Tanzania” to “southeastern Central African Republic to southwestern South Sudan, northern and central Democratic Republic of the Congo, Uganda, western Kenya, and northwestern Tanzania”.</p>
<p> </p>
<p>page 424, <strong>Red-fronted Warbler <em>Urorhipis rufifrons</em></strong></p>
<p>Revise the range description of subspecies <em>smithi</em> from “S Sudan to se Ethiopia, Somalia, Uganda, Kenya and Tanzania” to “South Sudan to southeastern Ethiopia, Somalia, extreme northeastern Uganda, Kenya, and northeastern Tanzania”.</p>
<p> </p>
<p>page 425, <strong>Gray Wren-Warbler <em>Calamonastes simplex</em></strong></p>
<p>Revise the range description from “SE Sudan to Ethiopia, Somalia, Kenya, ne Uganda, ne Tanzania” to “southeastern South Sudan, Ethiopia, Somalia, northeastern Uganda, Kenya, and northeastern Tanzania”.</p>
<p> </p>
<p>page 422, <strong>Red-winged Gray Warbler <em>Drymocichla incana</em></strong></p>
<p>Revise the range description from “E Nigeria and Cameroon to se Sudan and nw Uganda” to “extreme Nigeria and Cameroon east to northern Democratic Republic of the Congo, southern South Sudan, and northwestern Uganda”.</p>
<p> </p>
<p>page 414, <strong>Red-faced Cisticola <em>Cisticola erythrops</em></strong></p>
<p>Revise the range description of subspecies <em>sylvia</em> from “NE Democratic Republic of the Congo and s Sudan to central Tanzania and s Democratic Republic of the Congo” to “northeastern Democratic Republic of the Congo and southwestern South Sudan to Kenya and central Tanzania”.</p>
<p>Revise the range description of subspecies <em>pyrrhomitra</em> from “SE Sudan to Ethiopia” to “southeastern South Sudan and Ethiopia”.</p>
<p>Revise the range description of subspecies <em>niloticus</em> from “Sudan (upper Blue Nile)” to “southeastern Sudan (upper Blue Nile) and adjacent western Ethiopia”.</p>
<p> </p>
<p>page 414, <strong>Singing Cisticola <em>Cisticola cantans</em></strong></p>
<p>Revise the range description of subspecies <em>concolor </em>from “N Nigeria to s Sudan” to “northern Nigeria to southern Sudan and South Sudan”.</p>
<p> </p>
<p>page 415, <strong>Rock-loving Cisticola <em>Cisticola aberrans</em></strong></p>
<p>Revise the range description of subspecies <em>petrophilu</em>s from “N Nigeria to w Cameroon, ne Democratic Republic of the Congo and sw Sudan” to “northern Nigeria and Cameroon east to northeastern Democratic Republic of the Congo, southern South Sudan, and northern Uganda”.</p>
<p> </p>
<p>page 415, <strong>Boran Cisticola <em>Cisticola bodessa</em></strong></p>
<p>Revise the range description of nominate <em>bodessa</em> from “Juniper woodlands of s Sudan to Eritrea, s Ethiopia and n Kenya” to “northern Eritrea, southeastern South Sudan, southern Ethiopia, and northern Kenya”.</p>
<p> </p>
<p>page 415, <strong>Rattling Cisticola <em>Cisticola chiniana</em></strong></p>
<p>Revise the range description of subspecies <em>simplex</em> from “S Sudan to n Uganda” to “southern South Sudan, northeastern Democratic Republic of the Congo, and northern Uganda”.</p>
<p> </p>
<p>page 415, <strong>Ashy Cisticola <em>Cisticola cinereolus</em></strong></p>
<p>Revise the range description of subspecies <em>schillingsi</em> from “S Ethiopia and extreme se Sudan to n Tanzania” to “southeastern South Sudan, southern Ethiopia, southern Somalia, Kenya, and northern Tanzania”.</p>
<p> </p>
<p>page 415, <strong>Red-pate Cisticola <em>Cisticola ruficeps</em></strong></p>
<p>Revise the range description of nominate <em>ruficeps</em> from “Chad to s Sudan (Kordofan and Bahr-el-Ghazal)” to “Chad, southwestern Sudan, and southwestern South Sudan”.</p>
<p>Revise the range description of subspecies <em>mongalla</em> from “S Sudan (upper White Nile) to n Uganda” to “south central South Sudan (upper White Nile) and northern Uganda”.</p>
<p> </p>
<p>page 416, <strong>Winding Cisticola <em>Cisticola galactotes</em></strong></p>
<p>Revise the range description of subspecies <em>zalingei </em>from “N Nigeria to s Sudan (Darfur)” to “northern Nigeria east to southwestern Sudan”.</p>
<p>Revise the range description of subspecies <em>marginatus</em> from “S Sudan (upper White Nile) to n Uganda” to “central Sudan (White Nile Valley), South Sudan, western Ethiopia, and northern Uganda”.</p>
<p> </p>
<p>page 417, <strong>Siffling Cisticola <em>Cisticola brachypterus</em></strong></p>
<p>Revise the range description of nominate <em>brachypterus</em> from “Gambia to Central African Republic, Sudan, n Democratic Republic of the Congo and n Angola” to “Senegal south to Liberia, east to to Central African Republic, western South Sudan, and northern Democratic Republic of the Congo, south to northern coastal Angola”.</p>
<p>Revise the range description of subspecies <em>hypoxanthus</em> from “NE Democratic Republic of the Congo to n Uganda and se Sudan” to “northeastern Democratic Republic of the Congo, southern South Sudan, and northern Uganda”.</p>
<p> </p>
<p>page 417, <strong>Foxy Cisticola <em>Cisticola troglodytes</em></strong></p>
<p>Revise the range description of nominate <em>troglodytes</em> from “Central African Republic to s Sudan (White Nile) and w Kenya” to “southern Chad, Central African Republic, southern Sudan, South Sudan, and extreme western Kenya”.</p>
<p>Revise the range description of subspecies <em>ferrugineus</em> from “W Ethiopia and adjacent e Sudan (Blue Nile)” to “southeastern Sudan (upper Blue Nile), eastern South Sudan, and adjacent western Ethiopia”.</p>
<p> </p>
<p>page 417, <strong>Tiny Cisticola <em>Cisticola nana</em></strong></p>
<p>Revise the range description from “SE Sudan to Ethiopia, Somalia, Kenya and n Tanzania” to “Ethiopia, southeastern South Sudan, southwestern, Somalia, Kenya, and northeastern Tanzania”.</p>
<p> </p>
<p>page 418, <strong>Desert Cisticola <em>Cisticola aridulus</em></strong></p>
<p>Revise the range description of nominate <em>aridulus </em>from “Mali, Niger and n Nigeria to s Sudan” to “southern Mauritania and Senegal east to Sudan”.</p>
<p> </p>
<p>page 424, <strong>Gray-capped Warbler <em>Eminia lepida</em></strong></p>
<p>Revise the range description from “SE Sudan to n Democratic Republic of the Congo, Rwanda, Burundi, Uganda, Kenya, Tanzania” to “southern South Sudan, northeastern Democratic Republic of the Congo, Rwanda, Burundi, Uganda, Kenya, and northern Tanzania”.</p>
<p> </p>
<p>page 428, <strong>Black-faced Rufous-Warbler <em>Bathmocercus rufus</em></strong></p>
<p>Revise the range description of subspecies <em>vulpinus</em> from “E Democratic Republic of the Congo to extreme s Sudan, Uganda, Kenya and Tanzania” to “northeastern Democratic Republic of the Congo, extreme southern South Sudan, Uganda, extreme Kenya, and northwestern Tanzania”.</p>
<p> </p>
<p>page 432, <strong>Buff-bellied Warbler <em>Phyllolais pulchella</em></strong></p>
<p>Revise the range description from “E Nigeria to Chad, s Sudan, Ethiopia, Democratic Republic of the Congo, Kenya and n Tanzania” to “southwestern Niger and northeastern Nigeria to Sudan, South Sudan, northeastern Democratic Republic of the Congo, Eritrea, Ethiopia, Uganda, western Kenya, northern Rwanda, and northern Tanzania”.</p>
<p> </p>
<p>page 420, <strong>Graceful Prinia <em>Prinia gracilis</em></strong></p>
<p>Revise the range description of nominate <em>gracilis</em> from “Nile Valley (Cairo to n Sudan) and n Egypt (El Faiyum)” to “Nile Valley from northern Eygpt to southern Sudan”.</p>
<p>Revise the range description of subspecies <em>carlo</em> from “S Sudan to Eritrea, Ethiopia, Djibouti and s Somalia” to “coastal regions of northeastern Sudan, Eritrea, Ethiopia, Djibouti and Somalia”.</p>
<p> </p>
<p>page 421, <strong>Tawny-flanked Prinia <em>Prinia subflava</em></strong></p>
<p>Revise the range description of nominate <em>subflava</em> from “Senegal to s Sudan, adj. Uganda, s-central Ethiopia and s Eritrea” to “Senegal to western and central Ethiopia and northern Uganda”.</p>
<p>Revise the range description of subspecies <em>pallescens</em> from “Mali to Sudan, Ethiopia and nw Eritrea” to “Mali to Sudan, northwestern Ethiopia, and western Eritrea”.</p>
<p> </p>
<p>page 421, <strong>Pale Prinia <em>Prinia somalica</em></strong></p>
<p>Revise the range description of subspecies <em>erlangeri</em> from “SE Sudan to Ethiopia, Uganda, Kenya and Somalia” to “southeastern South Sudan, southern Ethiopia, central and southern Somalia, northeastern Uganda, and Kenya”.</p>
<p> </p>
<p>pages 421-422, <strong>Red-winged Prinia <em>Prinia erythroptera</em></strong></p>
<p>Revise the range description of subspecies <em>jodoptera</em> from “Central and s Cameroon to s Sudan” to “central and southern Cameroon to South Sudan”.</p>
<p> </p>
<p>page 434, <strong>Green-backed Eremomela <em>Eremomela canescens</em></strong></p>
<p>Revise the range description of nominate <em>canescens</em> from “Central African Rep. to Chad, s Sudan, Uganda and w Kenya” to “eastern Cameroon, southern Chad and Central African Republic to South Sudan, extreme northeastern Democratic Republic of the Congo, northern Uganda, and western Kenya”.</p>
<p>Revise the range description of subspecies <em>abyssinica</em> from “Eritrea to Ethiopia and Sudan” to “southeastern Sudan, eastern South Sudan, western Eritrea, and western and central Ethiopia”.</p>
<p> </p>
<p>page 434, <strong>Rufous-crowned Eremomela <em>Eremomela badiceps</em></strong></p>
<p>Revise the range description of subspecies <em>latukae</em> from “S Sudan” to “southern South Sudan”.</p>
<p> </p>
<p>page 443, <strong>Desert Whitethroat <em>Sylvia minula</em></strong></p>
<p>page 443, <strong>Lesser Whitethroat <em>Sylvia curruca</em></strong></p>
<p>Subspecies <em>Sylvia curruca jaxartica</em> (Lesser Whitethroat), with range “Plains of s Transcaspia”, is considered to be a junior synonym of Desert Whitethroat <em>Sylvia minula</em> (Shirihai et al. 2001), and is deleted. Revise the range description of Desert Whitethroat from “breeds from Turkmenistan and Uzbekistan (and probably Iran) east through northern Afghanistan and Tajikistan to western China; winters to the south” to “breeds from Uzbekistan, Turkmenistan and central Iran east through northern Afghanistan and Tajikistan to western China; winters to the south”.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, and <NAME>. 2001. <em>Sylvia</em> warblers. Princeton University Press, Princeton, New Jersey.</p>
<p> </p>
<p>page 443, <strong>Lesser Whitethroat <em>Sylvia curruca</em></strong></p>
<p>Subspecies <em>caucasica</em>, with range “Mountains of Balkan Peninsula to w Iran and Caucasus Mountains”, is considered to be a junior synonym of nominate <em>curruca</em> (Shirihai et al. 2001), and is deleted. Revise the range description of <em>curruca</em> from “W Europe to Caucasus and w Siberia; winters to central Africa” to “breeds Europe east to western Russia (approximately to the Ural Mountains), the Caucasus, Turkey, and western Iran; winters primarily in northeastern Africa, less commonly west to Senegambia”.</p>
<p>Subspecies <em>telengitica</em>, with range “Deserts of Russian Altai to w and s Mongolia”, is considered to be a junior synonym of subspecies <em>halimodendri</em> (Shirihai et al. 2001), and is deleted. Revise the range description of <em>halimodendr</em>i from “Plains of lower Volga to e Kazakhstan (Lake Zaysan) and w Altai” to “breeds central Asian steppes, from the lower Ural River and Transcapia east to western Mongolia; primarily winters in southwestern Asia (southeastern Iran to northwestern India), but perhaps also in eastern Africa”.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, and <NAME>. 2001. <em>Sylvia</em> warblers. Princeton University Press, Princeton, New Jersey.</p>
<p> </p>
<p>page 443, <strong>Wrentit <em>Chamaea fasciata</em></strong></p>
<p>Add a previously overlooked subspecies, <em>margra</em> Browning 1992, with range “interior of southwestern Oregon”. Insert subspecies margra immediately following the entry for subspecies phaea.</p>
<p>Subspecies <em>intermedia</em>, with range “Chaparral belt of central California (San Francisco region)”, is considered to be a junior synonym of nominate <em>fasciata</em> (Phillips 1986), and is deleted. Revise the range description of <em>fasciata</em> from “Coastal s California (Monterey to San Luis Obispo counties)” to “interior of coastal California from Lake County south to the San Francisco Bay region, then south coastally to San Luis Obispo County”.</p>
<p>Subspecies <em>canicauda</em>, with range “Chaparral belt of nw Baja California”, is considered to be a junior synonym of subspecies <em>henshawi </em>(Phillips 1986), and is deleted. Revise the range description of <em>henshawi</em> from “Chaparral belt of interior s Oregon to s California (San Diego)” to “interior of northern California (inner Coast Ranges and Sierra Nevada) and coastal southern California to northwestern Baja California”.</p>
<p>References:</p>
<p><NAME>. 1992. <a href="https://biodiversitylibrary.org/page/35607596">A new subspecies of <em>Chamaea fasciata </em>(Wrentit) from Oregon (Aves: Timaliinae)</a>. Proceedings of the Biological Society of Washington 105: 414-419.</p>
<p>Phillips, A.R. 1986. The known birds of North and Middle America. Part I. Hirundinidae to Mimidae; Certhiidae. Privately published, Denver, Colorado.</p>
<p> </p>
<p>page 549, <strong>African Yellow White-eye <em>Zosterops senegalensis</em></strong></p>
<p>Revise the range description of subspecies <em>gerhardi</em> from “S Sudan and ne Uganda border” to “extreme southern South Sudan and northeastern Uganda”.</p>
<p> </p>
<p>page 550, <strong>White-breasted White-eye <em>Zosterops abyssinicus</em></strong></p>
<p>Revise the range description of subspecies <em>arabs</em> from “S Arabian Peninsula (Yemen and extreme n Aden)” to “southern Arabian Peninsula (southwestern Saudi Arabia, Yemen, and southwestern Oman)”.</p>
<p>Revise the range description of nominate <em>abyssinicus</em> from “Lowlands of Eritrea and e Ethiopia to se Sudan” to “northeastern Sudan, Eritrea, and northern and central Ethiopia”.</p>
<p> </p>
<p>page 550, <strong>Anjouan White-eye <em>Zosterops anjouanensis</em></strong></p>
<p>Change the spelling of the species name from <em>anjouanensis</em> to <em>anjuanensis </em>(Newton 1877, Dickinson and Christidis 2014).</p>
<p>References:</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p><NAME>. 1877. <a href="https://biodiversitylibrary.org/page/28510901">On a collection of birds from the island of Anjuan</a>. Proceedings of the Zoological Society of London [1877] 295-302.</p>
<p> </p>
<p>page 551, <strong>Japanese White-eye <em>Zosterops japonicus</em></strong></p>
<p>Correct the English name of the monotypic group Japanese White-eye (<em>simplex/haianus</em>) to Japanese White-eye (<em>simplex/hainanus</em>); and correct the scientific name of this group from <em>Zosterops japonicus simplex/haianus</em> to <em>Zosterops japonicus simplex/hainanus</em>.</p>
<p> </p>
<p>page 551, <strong>Lowland White-eye <em>Zosterops meyeni</em></strong></p>
<p>Revise the range description of subspecies <em>batanis</em> from “Philippines (Batan, Sabtang, Ivojos, Itbayat and Y’Ami)” to “Lüdao (Green) and Lanyu (Orchid) Islands (southeast of Taiwan) and Batan Islands (Philippines, north of Luzon)”. Reverse the sequence of the two subspecies, so that <em>batanis </em>precedes nominate <em>meyeni</em>.</p>
<p> </p>
<p>page 553, <strong>Cream-throated White-eye <em>Zosterops atriceps</em></strong></p>
<p>Revise the range description of subspecies <em>atriceps</em> from “N Moluccas (Bacan and Obi)” to “Bacan, north Moluccas”.</p>
<p> </p>
<p>page 554, <strong>Silver-eye <em>Zosterops lateralis</em></strong></p>
<p>Revise the range description of nominate <em>lateralis</em> from “Tasmania and Flinders I. (Bass Strait)” to “breeds Tasmania and Flinders I. (Bass Strait); winters in coastal eastern Australia. Also colonized New Zealand in the 19th century, now resident throughout New Zealand (colonized ca 1832) and on Norfolk Island (colonized 1904)”.</p>
<p> </p>
<p>page 449, <strong>Chevron-breasted Babbler <em>Sphenocichla roberti</em></strong></p>
<p>Chevron-breasted Babbler <em>Sphenocichla roberti</em> is transferred to the genus <em>Stachyris</em>, following Moyle et al. (2012); reposition this species to immediately follow Blackish-breasted Babbler <em>Stachyris humei</em>.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2012. Phylogeny and biolgeography of the core Babblers (Aves: Timaliidae). Systematic Biology 61: 631-651.</p>
<p> </p>
<p>page 495, <strong>Scaly-breasted Illadopsis <em>Illadopsis albipectus</em></strong></p>
<p>Revise the range description from “NW Angola to Democratic Republic of the Congo, Uganda, w Kenya, nw Tanzania, se Sudan” to “northern Democratic Republic of the Congo, southeastern Central African Republic, southern South Sudan, Uganda, western Kenya, and northwestern Tanzania; northwestern Angola”.</p>
<p> </p>
<p>page 496, <strong>Thrush Babbler<em> Illadopsis turdina</em></strong></p>
<p>Revise the range description of the monotypic group Thrush Babbler (Thrush) <em>Illadopsis turdina turdina</em> from “S Sudan and ne Democratic Republic of the Congo” to “eastern Central African Republic, extreme southern South Sudan, and adjacent northeastern Democratic Republic of the Congo”.</p>
<p> </p>
<p>page 498, <strong>Eyebrowed Wren-Babbler <em>Napothera epilepidota</em></strong></p>
<p>Revise the range description of subspecies <em>granti</em> from “N Malay Peninsula” to “Thai-Malay Peninsula”.</p>
<p> </p>
<p>page 498, <strong>Streaked Wren-Babbler <em>Turdinus brevicaudatus</em></strong></p>
<p>Revise the range description of subspecies <em>leucostictus</em> from “N Malay Peninsula” to “Thai-Malay Peninsula”.</p>
<p> </p>
<p>page 503, <strong>Rufous Chatterer <em>Turdoides rubiginosa</em></strong></p>
<p>Revise the range description of nominate <em>rubiginosa</em> from “SE Sudan, Ethiopia, n Uganda and Kenya” to “southeastern South Sudan, Ethiopia, northern Uganda, and Kenya (except southeast)”.</p>
<p> </p>
<p>page 504, <strong>White-rumped Babbler <em>Turdoides leucopygia</em></strong></p>
<p>Revise the range description of subspecies <em>limbata</em> from “W Eritrea and nw Ethiopia” to “extreme eastern Sudan, western Eritrea, and northwestern Ethiopia”.</p>
<p>Revise the range description of subspecies <em>omoensis</em> from “S and sw Ethiopia to se Sudan” to “southeastern South Sudan and western and southwestern Ethiopia”.</p>
<p> </p>
<p>page 505, <strong>Brown Babbler <em>Turdoides plebejus</em></strong></p>
<p>Revise the range description of subspecies <em>cinerea</em> from “SE Nigeria to s Sudan, sw Ethiopia and w Kenya” to “southeastern Nigeria to South Sudan, northeastern Democratic Republic of the Congo, southwestern Ethiopia, Uganda, and western Kenya”.</p>
<p> </p>
<p>page 491, <strong>Chinese Hwamei <em>Garrulax canorus</em></strong></p>
<p>Revise the range description of nominate <em>canorus</em> from “S China (Yangtze Valley) to Tonkin, n Annam and n Laos” to “central eastern and southeastern China to northern Laos and central Vietnam; introduced to Taiwan (where interbreeds with Taiwan Hwamei) and Hawaiian Islands”.</p>
<p> </p>
<p>page 491, <strong>Chestnut-hooded Laughingthrush <em>Ianthocincla treacheri</em></strong></p>
<p>Revise the range description of subspecies <em>griswoldi </em>from “Highlands of central Borneo (Schwaner and Müller mountains)” to “highlands of central Borneo (Schwaner and Müller mountains); a population in the Meratus Mountains, South Kalimantan, Borneo probably is an undescribed subspecies”.</p>
<p> </p>
<p>page 505, <strong>Chinese Babax <em>Ianthocincla lanceolata</em></strong></p>
<p>Change the English name of the monotypic group <em>Ianthocincla lanceolata woodi</em> from Chinese Babax (Mount Victoria) to Chinese Babax (Mt. Victoria).</p>
<p> </p>
<p>page 496, <strong>Gray-chested Illadopsis <em>Kakamega poliothorax</em></strong></p>
<p>Change the English name of <em>Kakamega poliothorax</em> from Gray-chested Illadopsis to Gray-chested Babbler (Fry and Keith 2000).</p>
<p>Reference:</p>
<p><NAME>., and <NAME> (editors). 2000. The birds of Africa. Volume VI. Academic Press, London.</p>
<p> </p>
<p>page 441, <strong>Yellow-bellied Hyliota <em>Hyliota flavigaster</em></strong></p>
<p>Revise the range description of nominate <em>flavigaster</em> from “Senegal to s Sudan, w Ethiopia, Kenya and Tanzania” to “Senegambia to Sierra Leone, east to South Sudan, western Ethiopia, Uganda, and western Kenya”.</p>
<p> </p>
<p>page 449, <strong>Dusky-brown Flycatcher <em>Muscicapa adusta</em></strong></p>
<p>Revise the range description of subspecies <em>pumila</em> from “Mountains of s Sudan to Cameroon, Uganda and n Tanzania” to “Cameroon and northwestern Central African Republic; southern South Sudan, eastern Democratic Republic of the Congo, Uganda, western Rwanda, western Burundi, western Kenya, and northern Tanzania”.</p>
<p> </p>
<p>page 449, <strong>Swamp Flycatcher <em>Muscicapa aquatica</em></strong></p>
<p>Revise the range description of nominate <em>aquatica</em> from “Gambia to sw Sudan and n Democratic Republic of the Congo” to “southwestern Mauritania south to Gambia, east Nigeria, northern Cameroon, southern Chad, northern Central African Republic, western South Sudan, and northern Democratic Republic of the Congo”.</p>
<p>Revise the range description of subspecies <em>infulata</em> from “S Sudan to e Democratic Republic of the Congo, w Kenya, nw Tanzania and ne Zambia” to “central and eastern South Sudan, northeastern Democratic Republic of the Congo, Uganda, Rwanda, western Burundi, western Kenya, northwestern Tanzania, and northeastern Zambia”.</p>
<p> </p>
<p>page 449, <strong>Boehm’s Flycatcher <em>Bradornis boehmi</em></strong></p>
<p>In accord with widespread usage (e.g., Britton 1980, Dowsett and Forbes-Watson 1993, Urban et al. 1997), change the English name of <em>Bradornis boehmi</em> from Boehm’s Flycatcher to Böhm’s Flycatcher.</p>
<p>References:</p>
<p><NAME>. (editor). 1980. Birds of east Africa. East Africa Natural History Society, Nairobi.</p>
<p><NAME>., and <NAME>. 1993. Checklist of birds of the Afrotropical and Malagasy regions. Volume 1: species limits and distribution. Tauraco Press, Liège, Belgium.</p>
<p>Urban, E.K., <NAME>, and <NAME>. 1997. The birds of Africa. Volume V. Academic Press, London.</p>
<p> </p>
<p>page 449, <strong>Dusky-blue Flycatcher <em>Bradornis comitatus</em></strong></p>
<p>Revise the range description of nominatte <em>comitatus</em> from “Cameroon to nw Angola, Democratic Republic of the Congo, Uganda and sw Sudan” to “Cameroon south to northwestern Angola, east to southern Central African Republic, northern and central Democratic Republic of the Congo, extreme southwestern South Sudan, and Uganda”.</p>
<p> </p>
<p>page 447, <strong>Grayish Flycatcher <em>Bradornis microrhynchus</em></strong></p>
<p>Revise the range description of subspecies <em>neumanni</em> from “SE Sudan to s Ethiopia, central Somalia, n Kenya and ne Uganda” to “southeastern South Sudan, southern Ethiopia, central and southern Somalia, northern Uganda, and northern Kenya”.</p>
<p> </p>
<p>page 446, <strong>Pale Flycatcher <em>Agricola pallidus</em></strong></p>
<p>Revise the range description of nominate <em>pallidus</em> from “Senegambia to n Democratic Republic of the Congo, s Sudan and w Ethiopia” to “Senegambia to South Sudan and western Ethiopia; also northeastern Democratic Republic of the Congo, possibly this subspecies (or parvus?)”.</p>
<p>Revise the range description of subspecies <em>parvus</em> from “SW Ethiopia to e Sudan, e Democratic Republic of the Congo and nw Uganda” to “eastern South Sudan, southwestern Ethiopia, and northwestern Uganda”.</p>
<p>Revise the range description of subspecies <em>murinus</em> from “Gabon to Congo, Angola, w Kenya, n Botswana, nw Zimbabwe” to “Gabon, Congo, and Angola, east to southeastern South Sudan and western and southern Kenya, south to northeastern Namibia, northern Botswana, western and southern Zambia, and Zimbabwe”.</p>
<p> </p>
<p>page 449, <strong>Ashy Flycatcher <em>Fraseria caerulescens</em></strong></p>
<p>Revise the range description of subspecies <em>brevicauda</em> from “SE Nigeria to nw Angola, e Democratic Republic of the Congo, s Sudan and Uganda” to “southern Nigeria east to southern South Sudan, Uganda, and extreme western Kenya, south to northwestern Angola and southern Democratic Republic of the Congo”.</p>
<p> </p>
<p>page 446, <strong>Silverbird <em>Melaenornis semipartitus</em></strong></p>
<p>Revise the range description from “S Sudan to n Ethiopia, Uganda. w Kenya and w Tanzania” to “southwestern and southern Sudan, South Sudan, western Ethiopia, eastern Uganda, western Kenya, and northern Tanzania”.</p>
<p> </p>
<p>page 447, <strong>White-eyed Slaty-Flycatcher <em>Melaenornis fischeri</em></strong></p>
<p>Revise the range description of nominate <em>fischeri</em> from “Mountains of se Sudan to Uganda, Kenya and ne Tanzania” to “southeastern South Sudan, Uganda, Kenya, and northern Tanzania”.</p>
<p> </p>
<p>page 414, <strong>Fire-crested Alethe <em>Alethe diademata</em></strong></p>
<p>Revise the range description of subspecies <em>woosnami</em> from “E Democratic Republic of the Congo to sw Sudan, w Uganda and nw Tanzania” to “northern and central Democratic Republic of the Congo, southwestern South Sudan, western Uganda, and extreme northwestern Tanzania”.</p>
<p> </p>
<p>page 458, <strong>Red-backed Scrub-Robin <em>Cercotrichas leucophrys</em></strong></p>
<p>Revise the range description of subspecies <em>leucoptera</em> from “S Sudan to ne Uganda, s Ethiopia, n Somalia and n Kenya” to “southeastern South Sudan, central and southern Ethiopia, northwestern Somalia, and northern Kenya”.</p>
<p>Revise the range description of subspecies <em>zambesiana</em> from “S Sudan to e Kenya, n Mozambique and e Zambia” to “extreme southern South Sudan and northern and eastern Democratic Republic of the Congo to western Kenya, south to Malawi, northern Mozambique, and eastern Zimbabwe”.</p>
<p> </p>
<p>page 459, <strong>Oriental Magpie-Robin <em>Copsychus saularis</em></strong></p>
<p>Subspecies <em>nesiotes</em>, with range “SE Sumatra, Rhio Archipelago, Belitung and Bangka islands”, is considered to be a junior synonym of <em>musicus</em>, and is deleted (Ripley 1964, <NAME> and Voous 1988). Revise the range description of <em>musicus</em> from “Thai-Malay Peninsula (perhaps an area of introgression between <em>saularis</em> and <em>musicus</em>?), Sumatra and associated islands (Simeulue, the Batu Islands, Nias, and the Mentawai Islands), and western Java; intergrades with amoenus in central Java” to “Thai-Malay Peninsula (perhaps an area of introgression between <em>saularis</em> and <em>musicus</em>?), Sumatra and associated islands (Simeulue, the Batu Islands, Nias, Mentawai Islands, Riau Archipelago, Belitung, and Bangka), and western Java; intergrades with <em>amoenus</em> in central Java”.</p>
<p>References:</p>
<p><NAME>. 1964. <a href="https://biodiversitylibrary.org/page/14486255">Subfamily Turdinae, Thrushes</a>. Pages 13-227 in E. Mayr and R.<NAME>, Jr. (editors), Check-list of birds of the world. Volume X. Museum of Comparative Zoology, Cambridge, Massachusetts.</p>
<p><NAME>, J.G., and <NAME>. 1988. The birds of Sumatra. B.O.U. Check-list number 10. British Ornithologists’ Union, Tring, United Kingdom.</p>
<p> </p>
<p>page 459, <strong>Madagascar Magpie-Robin <em>Copsychus albospecularis</em></strong></p>
<p>Revise the range description of the monotypic group Madagascar Magpie-Robin (Black-bellied) <em>Copsychus albospecularis albospecularis</em> from “N Madagascar” to “northeastern Madagascar (intergrading with <em>inexspectatus</em> in central eastern Madagascar)”.</p>
<p>Change the spelling of the scientific name of the monotypic group Madagascar Magpie-Robin (White-bellied) from <em>Copsychus albospecularis inexpectatus</em> to the correct original spelling, <em>Copsychus albospecularis inexspectatus</em> (Richmond 1897, Dickinson and Christidis 2014). Revise the range description of <em>inexspectatus</em> from “E Madagascar” to “southeastern Madagascar (intergrading with <em>albospecularis</em> in central eastern Madagascar)”.</p>
<p>Revise the range description of the monotypic group Madagascar Magpie-Robin (White-winged) <em>Copsychus albospecularis picas</em> from “W Madagascar” to “northern, western, and southern Madagascar (Tsaranatana massif south to the Androy region)”.</p>
<p>References:</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p><NAME>. 1897. <a href="https://biodiversitylibrary.org/page/7295584">Catalogue of a collection of birds made by <NAME> in Madagascar, with descriptions of three new species</a>. Proceedings of the United States National Museum 19: 677-694.</p>
<p> </p>
<p>page 460, <strong>White-rumped Shama <em>Copsychus malabaricus</em></strong></p>
<p>Subspecies <em>indicus</em>, with range “Nepal to Assam and ne India”; subspecies <em>minor</em>, with range “Hainan (s China)”; and subspecies <em>interpositus</em>, with range “SW China to Myanmar, Thailand, Indochina and Mergui Arch.”, all are considered to be junior synonyms of <em>macrourus</em> Gmelin 1789 (Collar 2005, Wells 2007), a previously overlooked subspecies (Mees 1996), and are deleted. Insert subspecies <em>macrourus</em> immediately following subspecies <em>leggei</em>, with range “Nepal to northeastern India, southwestern China and Hainan Island), Myanmar, Thailand, Indochina (including Con Son Island, southern Vietnam), and the northern Thai-Malay Peninsula; the species also is introduced on Taiwan (subspecies not confirmed, possibly <em>macrourus</em>) and the Hawaiian Islands (probably <em>macrouru</em>s)”.</p>
<p>Subspecies <em>mallopercnus</em>, with range “Malay Peninsula, Riau Archipelago and Lingga Archipelago”; subspecies <em>ochroptilus</em>, with range “Anambas Islands (South China Sea)”; subspecies <em>abbotti</em>, with range “Bangka and Belitung islands (off Borneo)”; and subspecies <em>eumesus</em>, with range “Natuna Islands (off Borneo)”, all are considered to be junior synonyms of subspecies <em>tricolor</em> (Ripley 1964, Collar 2005), and are deleted. Revise the range description of subspecies <em>tricolor</em> from “Sumatra, w Java, Banka, Belitung and Karimata islands” to “southern Thai-Malay Peninsula, Sumatra, Riau and Lingga Archipelagos, Bangka and Belitung islands, Anambas Islands, Natuna Islands, and western Java”.</p>
<p>Subspecies <em>hypolizus</em> Oberholser 1912, previously considered to be a junior synonym of <em>melanurus</em> (Ripley 1964), is recognized, following Eaton et al. (2016). Position <em>hypolizus</em>, with range “Simeulue Island, west coast of Sumatra”, immediately following the heading for the polytypic group White-rumped Shama (Barusan) <em>Copsychus malabaricus</em> [<em>melanurus</em> Group].</p>
<p>Subspecies <em>opisthochrus </em>Oberholser 1912, previously considered to be a junior synonym of <em>melanurus</em> (Ripley 1964), is recognized, following Eaton et al. (2016). Position <em>opisthochrus</em>, with range “Lasia and Babi Islands, west coast of Sumatra; possibly extinct in the wild”, immediately following subspecies <em>hypolizus</em>.</p>
<p>Subspecies <em>opisthopelus</em>, with range “Islands off sw Sumatra”, is considered to be a junior synonym of <em>melanurus</em> (Collar 2005), and is deleted. Revise the range description of <em>melanurus</em> from “Islands off nw Sumatra” to “Nias Island and Mentawai Islands, west coast of Sumatra”.</p>
<p>Revise the range description for subspecies <em>mirabilis</em> from “Prinsen I. (Sunda Strait)” to “Panaitan Island (Sunda Strait); possibly extinct”.</p>
<p>References:</p>
<p><NAME>. 2005. Family Turdidae (thrushes). Pages 514-807 in <NAME>, <NAME>, and <NAME> (editors), Handbook of the birds of the world. Volume 10. Lynx Edicions, Barcelona.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2016. Birds of the Indonesian Archipelago: Greater Sundas and Wallacea. Lynx Edicions, Barcelona.</p>
<p><NAME>. 1996. Geographical variation in birds of Java. Publications of the Nuttall Ornithological Club number 26. Cambridge, Massachusetts.</p>
<p><NAME>. 1964. <a href="https://biodiversitylibrary.org/page/14486258">Subfamily Turdinae, Thrushes</a>. Pages 13-227 in E. Mayr and <NAME>, Jr. (editors), Check-list of birds of the world. Volume X. Museum of Comparative Zoology, Cambridge, Massachusetts.</p>
<p>Wells, D.R. 2007. The birds of the Thai-Malay Peninsula. Volume Two. <NAME>, London.</p>
<p> </p>
<p>page 413, <strong>Nilgiri Shortwing <em>Brachypteryx major</em></strong></p>
<p>Nilgiri Shortwing <em>Brachypteryx major</em> is not a member of the genus <em>Brachypteryx</em>, and is placed in a new genus, <em>Sholicola</em> (Robin et al. 2017). Position <em>Sholicola</em> immediately following Rufous-browed Flycatcher <em>Anthipes solitaris</em>. Change the English name from Nilgiri Shortwing to Nilgiri Blue Robin, and the scientific name from <em>Brachypteryx major</em> to <em>Sholicola major</em>.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2017. <a href="https://bmcevolbiol.biomedcentral.com/articles/10.1186/s12862-017-0882-6">Two new genera of songbirds represent endemic radiations fro the Shola Sky Islands of the Western Ghats, India</a>. BMC Evolutionary Biology 17: 31.</p>
<p> </p>
<p>page 413, <strong>White-bellied Shortwing <em>Brachypteryx albiventris</em></strong></p>
<p>White-bellied Shortwing <em>Brachypteryx albiventris</em> is not a member of the genus <em>Brachypteryx</em>, and is placed in a new genus, <em>Sholicola</em> (Robin et al. 2017). Position <em>Sholicola</em> immediately following Rufous-browed Flycatcher <em>Anthipes solitaris</em>. Change the English name from White-bellied Shortwing to White-bellied Blue Robin, and the scientific name from <em>Brachypteryx albiventris</em> to <em>Sholicola albiventris</em>.</p>
<p>With the addition of a new subspecies, revise the range description of nominate <em>albiventris </em>from “Peninsular India (sw Madras to Kerala)” to “southern western Ghats (south of the Palghat Gap) in Kerala and Tamil Nadu, southern India”.</p>
<p>Robin et al. (2017) describe a new species, <em>Sholicola ashambuensis</em>, which we provisionally recognize as a subspecies of White-bellied Blue Robin. Position <em>ashambuensis</em> immediately following nominate <em>albiventris</em>. The range of <em>ashambuensis</em> is “southernmost Western Ghats (Ashambu Hills), southern India”.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2017. <a href="https://bmcevolbiol.biomedcentral.com/articles/10.1186/s12862-017-0882-6">Two new genera of songbirds represent endemic radiations fro the Shola Sky Islands of the Western Ghats, India</a>. BMC Evolutionary Biology 17: 31.</p>
<p> </p>
<p>page 453, <strong>Timor Blue-Flycatcher <em>Cyornis hyacinthinus</em></strong></p>
<p>page 453, <strong>Rueck’s Blue-Flycatcher <em>Cyornis ruckii</em></strong></p>
<p>page 453, <strong>Hainan Blue-Flycatcher <em>Cyornis hainanus</em></strong></p>
<p>page 453, <strong>White-bellied Blue-Flycatcher <em>Cyornis pallipes</em></strong></p>
<p>page 453, <strong>Pale-chinned Blue-Flycatcher <em>Cyornis poliogenys</em></strong></p>
<p>page 453, <strong>Pale Blue-Flycatcher <em>Cyornis unicolor</em></strong></p>
<p>page 453, <strong>Large Blue-Flycatcher <em>Cyornis magnirostris</em></strong></p>
<p>page 453, <strong>Hill Blue-Flycatcher <em>Cyornis banyumas</em></strong></p>
<p>page 454, <strong>Sunda Blue-Flycatcher <em>Cyornis caerulatus</em></strong></p>
<p>page 454, <strong>Malaysian Blue-Flycatcher <em>Cyornis turcosus</em></strong></p>
<p>page 454, <strong>Palawan Blue-Flycatcher <em>Cyornis lemprieri</em></strong></p>
<p>page 454, <strong>Bornean Blue-Flycatcher <em>Cyornis superbus</em></strong></p>
<p>page 454, <strong>Tickell’s Blue-Flycatcher <em>Cyornis tickelliae</em></strong></p>
<p>page 454, <strong>Mangrove Blue-Flycatcher <em>Cyornis rufigastra</em></strong></p>
<p>page 454, <strong>Sulawesi Blue-Flycatcher <em>Cyornis omissus</em></strong></p>
<p>Blue-Flycatchers are not a monophyletic group, as the name is shared by species in three different, unrelated genera (<em>Elminia, Cyornis</em>, and <em>Ficedula</em>). Therefore change the English group name of all <em>Cyornis </em>blue-flycatchers to blue flycatcher.</p>
<p>In accord with widespread usage (e.g., Eaton et al. 2016), change the spelling of the English name of <em>Cyornis ruckii</em> from Rueck’s Blue-Flycatcher to Rück’s Blue Flycatcher</p>
<p>Reference:</p>
<p>Eaton, J.A., <NAME>, <NAME>, and <NAME>. 2016. Birds of the Indonesian Archipelago: Greater Sundas and Wallacea. Lynx Edicions, Barcelona.</p>
<p> </p>
<p>page 453, <strong>Hainan Blue-Flycatcher <em>Cyornis hainanus</em></strong></p>
<p>With the transfer of subspecies <em>klossi</em> from Blue-throated Flycatcher <em>Cyornis rubeculoides</em> to Hainan Blue Flycatcher, the latter no longer is monotypic; therefore we insert an entry for the nominate subspecies, <em>Cyornis hainanus hainanus</em>, immediately following the species heading.</p>
<p>Subspecies <em>klossi</em>, previously classified as a subspecies of Blue-throated Flycatcher <em>Cyornis rubeculoides</em>, is transferred to Hainan Blue Flycatcher, following Zhang et al. (2015). Position <em>klossi</em> to immediately follow nominate <em>hainanus</em>.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2015. Unexpected divergence and lack of divergence revealed in continental Asian <em>Cyornis</em> flycatchers (Aves: Muscicapidae). Molecular Phylogenetics and Evolution 94: 232-241.</p>
<p> </p>
<p>page 452, <strong>Island Flycatcher <em>Eumyias panayensis</em></strong></p>
<p>Change the English name of <em>Eumyias panayensis</em> from Island Flycatcher to Turquoise Flycatcher.</p>
<p> </p>
<p>page 457, <strong>Cape Robin-Chat <em>Cossypha caffra</em></strong></p>
<p>Revise the range description of subspecies <em>iolaema</em> from “Mts. of extreme s Sudan to Kenya, Zambia and Mozambique” to “extreme southern South Sudan, eastern Democratic Republic of the Congo, Uganda, and Kenya south to Malawi and northern Mozambique”.</p>
<p> </p>
<p>page 457, <strong>Blue-shouldered Robin-Chat <em>Cossypha cyanocampter</em></strong></p>
<p>Revise the range description of subspecies <em>bartteloti</em> from “NE Democratic Republic of the Congo to s Sudan, Uganda and w Kenya” to “northeastern Democratic Republic of the Congo, southern South Sudan, Uganda, western Kenya, and extreme northwestern Tanzania”.</p>
<p> </p>
<p>page 457, <strong>Gray-winged Robin-Chat <em>Cossypha polioptera</em></strong></p>
<p>Revise the range description of the monotypic group Gray-winged Robin-Chat (Gray-winged) <em>Cossypha polioptera polioptera</em> from “S Sudan to Uganda, nw Tanzania, n Angola and nw Zambia” to “southern South Sudan, eastern and southeastern Democratic Republic of the Congo, Uganda, western Kenya, Rwanda, Burundi, northwestern Tanzania, northern Angola, and northwestern Zambia”.</p>
<p> </p>
<p>page 457, <strong>Rüppell’s Robin-Chat <em>Cossypha semirufa</em></strong></p>
<p>Revise the range description of nominate <em>semirufa</em> from “SE Sudan (Boma Hills) to Eritrea, Ethiopia and n Kenya” to “Eritrea, Ethiopia (except central east), southeastern South Sudan</p>
<p>(Boma Hills), and northern Kenya”.</p>
<p>Revise the range description of subspecies <em>donaldsoni</em> from “E and se Ethiopia (Harrar and e Gallaland)” to “central eastern Ethiopia”.</p>
<p> </p>
<p>page 458, <strong>Snowy-crowned Robin-Chat <em>Cossypha niveicapilla</em></strong></p>
<p>Revise the range description of nominate <em>niveicapilla</em> from “Senegal to s Sudan, sw Ethiopia, Uganda, Kenya and Tanzania” to “southern Mauritania and Senegal east to to southern South Sudan and western Ethiopia, south to southwestern and northeastern Democratic Republic of the Congo, northwestern and northeastern Angola, and northwestern Tanzania”.</p>
<p> </p>
<p>page 458, <strong>White-crowned Robin-Chat <em>Cossypha albicapillus</em></strong></p>
<p>Revise the range description of subspecies <em>omoensis </em>from “Extreme se Sudan to sw Ethiopia” to “southeastern South Sudan and southwestern Ethiopia”.</p>
<p> </p>
<p>page 458, <strong>Spotted Morning-Thrush <em>Cichladusa guttata</em></strong></p>
<p>Revise the range description of nominate <em>guttata</em> from “S Sudan to w Uganda, Democratic Republic of the Congo and nw Kenya (w of Lake Turkana)” to “eastern South Sudan and southwestern Ethiopia to northeastern Democratic Republic of the Congo, northern Uganda, and northwestern Kenya (west of Lake Turkana)”.</p>
<p>Revise the range description of subspecies <em>intercalans</em> from “SW Ethiopia to Kenya, Tanzania to e Democratic Republic of the Congo” to “southern Ethiopia to central Kenya and central Tanzania”.</p>
<p> </p>
<p>page 414, <strong>Brown-chested Alethe <em>Pseudalethe poliocephala</em></strong></p>
<p>Revise the range description of subspecies <em>giloensis</em> from “S Sudan” to “southeastern South Sudan (Imatong Mountains), population in adjacent northeastern Uganda presumably also this subspecies”.</p>
<p>Revise the range description of subspecies <em>carruthersi</em> from “Extreme s Sudan to ne Democratic Republic of the Congo, Uganda and w Kenya” to “southeastern Central African Republic, northeastern Democratic Republic of the Congo, Uganda, and western Kenya”.</p>
<p> </p>
<p>page 455, <strong>Forest Robin <em>Stiphrornis erythrothorax</em></strong></p>
<p>Revise the range description of the monotypic group Forest Robin (Eastern) <em>Stiphrornis erythrothorax xanthogaster</em> from “E Cameroon to e Democratic Republic of the Congo, w Uganda and extreme s Sudan” to “southeastern Cameroon south to eastern Gabon, east to northern Democratic Republic of the Congo, extreme southwestern South Sudan, and western Uganda”.</p>
<p> </p>
<p>page 456, <strong>Siberian Blue Robin <em>Larvivora cyane</em></strong></p>
<p>Revise the range description of nominate <em>cyane</em> from “S Siberia (Altai Mts. to Sea of Okhotsk); > to Indonesia” to “breeds south central Siberia (and perhaps adjacent northeastern Kazakhastan) and northern Mongolia; the species winters in southern China, Indochina, the Thai-Malay Peninsula, Sumatra, Java, and Borneo, but the nonbreeding distributions of the two subspecies are not well differentiated”.</p>
<p>Revise the range description of subspecies <em>bochaiensis</em> from “breeds eastern Siberia to northeastern China, Korea and Japan; winters to Indochina, Thai-Malay Peninsula, and Borneo, possibly also the Greater Sundas” to “breeds eastern Siberia to northeastern China, Korea and Japan; the species winters in southern China, Indochina, the Thai-Malay Peninsula, Sumatra, Java, and Borneo, but the nonbreeding distributions of the two subspecies are not well differentiated”.</p>
<p> </p>
<p>page 402, <strong>Chestnut-winged Whistling-Thrush <em>Myophonus castaneus</em></strong></p>
<p>Change the English name of <em>Myophonus castaneus</em> from Chestnut-winged Whistling-Thrush to Sumatran Whistling-Thrush (Eaton et al. 2016).</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2016. Birds of the Indonesian Archipelago: Greater Sundas and Wallacea. Lynx Edicions, Barcelona.</p>
<p> </p>
<p>page 461, <strong>White-crowned Forktail <em>Enicurus leschenaulti</em></strong></p>
<p>With the recognition of the nominate subspecies as a new monotypic group, change the scientific name of the group White-crowned Forktail (White-crowned) from <em>Enicurus leschenaulti</em> [<em>leschenaulti</em> Group] to <em>Enicurus leschenaulti</em> [<em>frontalis</em> Group].</p>
<p> </p>
<p>page 461, <strong>White-tailed Robin <em>Cinclidium leucurum</em></strong></p>
<p>Change the scientific name of White-tailed Robin from <em>Cinclidium leucurum</em> to <em>Myiomela leucura</em> (Zuccon and Ericson 2010).</p>
<p>Change the spelling of the subspecies name <em>cambodianum</em> to <em>cambodiana</em>.</p>
<p>Reference:</p>
<p>Zuccon, D., and <NAME>. 2010. A multi-gene phylogeny disentangles the chat-flycatcher complex (Aves: Muscicapidae). Zoologica Scripta 39: 213–224.</p>
<p> </p>
<p>page 461, <strong>Sunda Robin <em>Cinclidium diana</em></strong></p>
<p>Change the scientific name of <NAME> from <em>Cinclidium diana</em> to <em>Myiomela diana</em> (Zuccon and Ericson 2010).</p>
<p>Change the spelling of the subspecies name <em>sumatranu</em> to <em>sumatrana</em>.</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2010. A multi-gene phylogeny disentangles the chat-flycatcher complex (Aves: Muscicapidae). Zoologica Scripta 39: 213–224.</p>
<p> </p>
<p>page 454, <strong>Pygmy Blue-Flycatcher <em>Ficedula hodgsoni</em></strong></p>
<p>Change the English name of <em>Ficedula hodgsoni</em> from Pygmy Blue-Flycatcher to Pygmy Flycatcher (Eaton et al. 2016).</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2016. Birds of the Indonesian Archipelago: Greater Sundas and Wallacea. Lynx Edicions, Barcelona.</p>
<p> </p>
<p>page 451, <strong>Russet-tailed Flycatcher <em>Ficedula crypta</em></strong></p>
<p>Change the English name of <em>Ficedula crypta </em>from Russet-tailed Flycatcher to Cryptic Flycatcher, to conform to widespread usage (Sibley and Monroe 1990, Dickinson et al. 1991, Kennedy et al. 2000).</p>
<p>References:</p>
<p><NAME>., <NAME>, and <NAME>. 1991. The birds of the Philippines. An annotated check-list. British Ornithologists’ Union Check-list number 12. British Ornithologists’ Union, London.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, Jr., and <NAME>. 2000. A guide to the birds of the Philippines. Oxford University Press, New York and Oxford, United Kingdom.</p>
<p><NAME>., and <NAME>, Jr. 1990. Distribution and taxonomy of birds of the world. Yale University Press, New Haven, Connecticut.</p>
<p> </p>
<p>page 401, <strong>Little Rock-Thrush <em>Monticola rufocinereus</em></strong></p>
<p>Revise the range description of nominate <em>rufocinereus</em> from “Mts. of se Sudan to Ethiopia, e Uganda, w Kenya and ne Tanzania” to “Eritrea, Ethiopia, northern Somalia, southern South Sudan, northeastern Uganda, Kenya, and northern Tanzania”.</p>
<p>Revise the range description of subspecies <em>sclateri</em> from “W Saudi Arabia” to “southwestern Arabian Peninsula (southwestern Saudi Arabia and western Yemen)”.</p>
<p> </p>
<p>page 401, <strong>Blue Rock-Thrush <em>Monticola solitarius</em></strong></p>
<p>Correct English name of the polytypic group <em>Monticola solitarius solitarius/longirostris</em> from Blue Rock-Thrush (<em>solitarius</em> Group) to Blue Rock-Thrush (<em>solitarius/longirostris</em>).</p>
<p> </p>
<p>page 462, <strong>African Stonechat <em>Saxicola torquatus</em></strong></p>
<p>Revise the range description of the monotypic group African Stonechat (Ethiopian) <em>Saxicola torquatus albofasciatus</em> from “Highlands of Ethiopia, se Sudan and ne Uganda” to “western and central Ethiopia, southeastern South Sudan, and northeastern Uganda”.</p>
<p> </p>
<p>page 465, <strong>Sooty Chat <em>Myrmecocichla nigra</em></strong></p>
<p>Revise the range description from “Nigeria to Angola, extreme s Sudan, Tanzania and Zambia” to “patchily distributed from southeastern Nigeria to south to southern Angola and southern Zambia, east to southern South Sudan, southwestern Kenya, and eastern Tanzania”.</p>
<p> </p>
<p>page 465, <strong>White-fronted Black-Chat <em>Myrmecocichla albifrons</em></strong></p>
<p>Revise the range description of subspecies <em>clericalis</em> from “S Sudan (west of the Nile) to ne Democratic Republic of the Congo and n Uganda” to “South Sudan, northeastern Democratic Republic of the Congo, and northern Uganda”.</p>
<p> </p>
<p>page 466, <strong>Mocking Cliff-Chat <em>Thamnolaea cinnamomeiventris</em></strong></p>
<p>Revise the range description of subspecies <em>subrufipennis</em> from “Extreme se Sudan to sw Ethiopia, Zambia and Malawi” to “southeastern South Sudan and southwestern Ethiopia south to Zambia and Malawi”.</p>
<p> </p>
<p>page 465, <strong>Familiar Chat <em>Cercomela familiaris</em></strong></p>
<p>Revise the range description of subspecies <em>falkensteini</em> from “Ghana to sw Sudan, n Ethiopia, Uganda and Kenya” to “southeastern Senegal and northern Guinea east to southern Sudan, western South Sudan, northwestern Ethiopia, southern Uganda, and Tanzania, south to the Zambezi Valley”.</p>
<p>Revise the range description of subspecies <em>omoensis</em> from “SE Sudan (Boma Hills) to sw Ethiopia” to “southeastern South Sudan, southwestern Ethiopia, northeastern Uganda, and</p>
<p>northwestern Kenya”.</p>
<p> </p>
<p>page 401, <strong>Rufous Flycatcher-Thrush <em>Neocossyphus fraseri</em></strong></p>
<p>Revise the range description of subspecies <em>rubicundus </em>from “Nigeria to Central African Republic, w Democratic Republic of the Congo, Gabon and Angola” to “southeastern Nigeria east to Central African Republic, south to northern Angola and northwestern Zambia”.</p>
<p>Revise the range description of subspecies <em>vulpinus</em> from “S Sudan to Uganda, ne Democratic Republic of the Congo, nw Zambia and nw Tanzania” to “southwestern South Sudan, northeastern Democratic Republic of the Congo, and Uganda”.</p>
<p> </p>
<p>page 403, <strong>Spotted Ground-Thrush <em>Geokichla guttata</em></strong></p>
<p>Revise the range description of the monotypic group Spotted Ground-Thrush (Lotti) <em>Geokichla guttata maxis</em> from “S Sudan” to “southern South Sudan”.</p>
<p>Revise the range description of the monotypic group Spotted Ground-Thrush (Congo)</p>
<p><em>Geokichla guttata lippensi</em> from “Coastal e Kenya and Tanzania” to “southeastern Democratic Republic of the Congo (Upemba National Park)”.</p>
<p> </p>
<p>page 403, <strong>Abyssinian Ground-Thrush <em>Geokichla piaggiae</em></strong></p>
<p>Revise the range description of nominate <em>piaggiae</em> from “Ethiopia to se Sudan, n Kenya, sw Uganda and mts. of e Democratic Republic of the Congo” to “southeastern South Sudan, Ethiopia, eastern Democratic Republic of the Congo, eastern Uganda, and northern and western Kenya”.</p>
<p> </p>
<p>page 406, <strong>Spotted Nightingale-Thrush <em>Catharus dryas</em></strong></p>
<p>Subspecies <em>harrisoni</em>, with range “Highlands of se Mexico (Oaxaca)”, is considered to be a junior synonym of <em>ovandensis</em> (Collar 2005), and is deleted. Revise the range of <em>ovandensis</em> from “Highlands of s Mexico (Chiapas)” to “highlands of southern Mexico (southeastern Oaxaca and Chiapas)”.</p>
<p>Revise the range description of nominate <em>dryas </em>from “W Guatemala (Sierra de las Minas) to Honduras; w Ecuador” to “highlands of Guatemala, Honduras, and northern Nicaragua” (Chavarría and Batchelder 2012).</p>
<p>Subspecies <em>ecuadoreanus</em>, with range “Andes of w Ecuador”, is considered to be a junior synonym of <em>maculatus</em> (Collar 2005), and is deleted. Revise the range description of <em>maculatus </em>from “E slope of Andes of Colombia to e Ecuador, e Peru and n Bolivia” to “Andes of Venezuela and of Colombia (except for Pacific slope) and eastern slope of Andes of Ecuador, Peru, and northern Bolivia; western slope of Andes in Ecuador”.</p>
<p>Revise the range description of subspecies <em>blakei </em>from “Andes of extreme n Argentina (Jujuy and Salta)” to “Andes of southern Bolivia (Tarija) and northern Argentina (Jujuy and Salta)”.</p>
<p>References:</p>
<p><NAME>., and <NAME>. 2012. Seven new records for Nicaragua and range extensions for two additional species. Cotinga 34: 28–32.</p>
<p><NAME>. 2005. Family Turdidae (thrushes). Pages 514-807 in <NAME>, <NAME>, and <NAME> (editors), Handbook of the birds of the world. Volume 10. Lynx Edicions, Barcelona.</p>
<p> </p>
<p>page 407, <strong>Abyssinian Thrush <em>Turdus abyssinicus</em></strong></p>
<p>Revise the range description of nominate <em>abyssinicus</em> from “Highlands of Ethiopia, se Sudan, n Uganda, Kenya and n Tanzania” to “Eritrea, Ethiopia, southeastern South Sudan, northern Uganda, Kenya, and northern Tanzania”.</p>
<p> </p>
<p>page 408, <strong>African Thrush <em>Turdus pelios</em></strong></p>
<p>Revise the range description of nominate <em>pelios </em>from “E Cameroon to Chad, s Sudan, Eritrea and Ethiopia” to “eastern Cameroon to southern Chad, southern Sudan, South Sudan (except extreme south), Eritrea, and northern and central Ethiopia”.</p>
<p>Revise the range description of subspecies <em>centralis </em>from “N Democratic Republic of the Congo to s Sudan, sw Ethiopia, Uganda, Kenya and Tanzania” to “southern Central African Republic and eastern Congo to northern Democratic Republic of the Congo, southern South Sudan, southern Ethiopia, Uganda, western Kenya, and northwestern Tanzania”.</p>
<p> </p>
<p>page 409, <strong>Island Thrush <em>Turdus poliocephalus</em></strong></p>
<p>The name <em>carbonarius</em> is preoccupied in <em>Turdus</em>; change the scientific name of the monotypic group Island Thrush (Ashy) from <em>Turdus poliocephalus carbonarius</em> to <em>Turdus poliocephalus erebus</em> (Mayr and Gilliard 1952, Beehler and Pratt 2016).</p>
<p>Change the English name of the monotypic group <em>Turdus poliocephalus poliocephalus</em> from Island Thrush (Norfolk Island) to Island Thrush (Norfolk I.).</p>
<p>Change the English name of the monotypic group <em>Turdus poliocephalus vinitinctus</em> from Island Thrush (Lord Howe Island) to Island Thrush (Lord Howe I.).</p>
<p>References:</p>
<p><NAME>., and <NAME>. 2016. Birds of New Guinea: distribution, taxonomy, and systematics. Princeton University Press, Princeton, New Jersey.</p>
<p><NAME>., and <NAME>. 1952. <a href="http://digitallibrary.amnh.org/bitstream/handle/2246/4053/v2/dspace/ingest/pdfSource/nov/N1577.pdf?sequence=1&isAllowed=y">Six new subspecies of birds from the highlands of New Guinea</a>. American Museum Novitates number 1577.</p>
<p> </p>
<p>pages 411-412, <strong>Black-billed Thrush <em>Turdus ignobilis</em></strong></p>
<p>With the recognition of <em>Turdus ignobilis murinus</em> as a new monotypic group, change the scientific name of the polytypic group Black-billed Thrush (Drab) from <em>Turdus ignobilis </em>[<em>ignobilis </em>Group] to <em>Turdus ignobilis ignobilis/goodfellowi</em>.</p>
<p>Subspecies <em>murinus</em> is removed from the group Black-billed Thrush (Drab), and is recognized as a new monotypic group, Black-billed Thrush (Pantepui) <em>Turdus ignobilis murinus</em>.</p>
<p> </p>
<p>page 399, <strong>Le Conte’s Thrasher <em>Toxostoma lecontei</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), change the English name of <em>Toxostoma lecontei</em> from Le Conte’s Thrasher to LeConte’s Thrasher, “to conform to the generally accepted spelling of the name of entomologist <NAME> LeConte, for whom the species was named (Mearns and Mearns 1992, Jobling 2010)” (Chesser et al. 2017).</p>
<p>Change the English of the polytypic group <em>Toxostoma lecontei lecontei/macmillanorum</em> from Le Conte’s Thrasher (Le Conte’s) to LeConte’s Thrasher (LeConte’s).</p>
<p>Change the English of the monotypic group <em>Toxostoma lecontei arenicola</em> from Le Conte’s Thrasher (Vizcaino) to LeConte’s Thrasher (Vizcaino).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p><NAME>. 2010. The Helm dictionary of scientific bird names. <NAME>, London, United Kingdom.</p>
<p>Mearns, B., and <NAME>. 1992. Audubon to Xantus: the lives of those commemorated in North American bird names. Academic Press, New York, New York.</p>
<p> </p>
<p>page 598, <strong>Black-winged Starling <em>Acridotheres melanopterus</em></strong></p>
<p>With the addition of two additional subspecies, revise the range description of nominate <em>melanopterus</em> from “Lowlands of Java, Bali and Lombok” to “western and central Java and Madura”.</p>
<p>Add two previously overlooked subspecies of Black-winged Starling: subspecies <em>tricolor</em>, with range “eastern Java”; and subspecies <em>tertius</em>, with range “Bali, Nusa Penida, and Lombok” (Amadon 1962).</p>
<p>Reference:</p>
<p>Amadon, D. 1962. <a href="https://biodiversitylibrary.org/page/14485485">Family Sturnidae, starlings</a>. Pages 75-121 in E. Mayr and <NAME>, Jr. (editors), Check-list of birds of the world. Volume XV. Museum of Comparative Zoology, Cambridge, Massachusetts.</p>
<p> </p>
<p>page 601, <strong>Waller’s Starling <em>Onychognathus walleri</em></strong></p>
<p>Revise the range description of the monotypic group Waller’s Starling (Albertine) <em>Onychognathus walleri elgonensis</em> from “Kenya (west of Rift Valley) to Uganda, se Sudan and e Democratic Republic of the Congo” to “southern South Sudan, eastern Democratic Republic of the Congo, western and eastern (Mt. Elgon) Uganda, Rwanda, Burundi, and Kenya (west of the Rift Valley)”.</p>
<p> </p>
<p>page 601, <strong>Bristle-crowned Starling <em>Onychognathus salvadorii</em></strong></p>
<p>Revise the range description from “Extreme se Sudan to ne Uganda, s Ethiopia, Somalia and n Kenya” to “central Ethiopia, northwestern and southern Somalia, and northern Kenya”.</p>
<p> </p>
<p>page 601, <strong>Sharpe’s Starling <em>Pholia sharpii</em></strong></p>
<p>Revise the range description from “S Sudan to Ethiopia, n Tanzania, w Uganda, Rwanda and e Democratic Republic of the Congo” to “patchily distributed in Ethiopia, southeastern South Sudan, eastern Democratic Republic of the Congo, Uganda, Kenya, Rwanda, Burundi, and Tanzania”.</p>
<p> </p>
<p>page 601, <strong>Stuhlmann’s Starling <em>Poeoptera stuhlmanni</em></strong></p>
<p>Revise the range description from “Mts. of s Ethiopia to s Sudan, Uganda, Tanzania and e Democratic Republic of the Congo” to “patchily distributed in southwestern Ethiopia, southern South Sudan (Imatong Mountains), eastern Democratic Republic of the Congo, Uganda, western Uganda, Rwanda, Burundi and western Tanzania”.</p>
<p> </p>
<p>page 600, <strong>Shelley’s Starling <em>Lamprotornis shelleyi</em></strong></p>
<p>Revise the range description from “Acacia of Somalia and s Ethiopia to se Sudan and se Kenya” to “breeds southeastern South Sudan, southern and eastern Ethiopia, and northern Somalia; partial resident, but as nonbreeder withdraws from Somalia and northeastern Ethiopia, disperses south to southern Somalia, Kenya, and northeastern Tanzania”.</p>
<p> </p>
<p>page 600, <strong>Rüppell’s Starling <em>Lamprotornis purpuroptera</em></strong></p>
<p>Revise the range description of subspecies <em>aeneocephalus</em> from “Eritrea, Sudan and n Ethiopia” to “southeastern Sudan, South Sudan, northwestern Eritrea, and western Ethiopia”.</p>
<p>Revise the range description of nominate <em>purpuroptera </em>from “S Ethiopia to Somalia, s Sudan, Uganda, w Kenya and Tanzania” to “central and southern Ethiopia, northeastern Democratic Republic of the Congo, Uganda, Kenya, southern Somalia, Rwanda, Burudni, and northern and western Tanzania”.</p>
<p> </p>
<p>page 600, <strong>Long-tailed Glossy Starling <em>Lamprotornis caudatus</em></strong></p>
<p>Revise the range description from “Savanna of Senegal to s Sudan (Nile River region)” to “extreme southern Mauritania to northern Guinea east to southern Sudan and northwestern South Sudan”.</p>
<p> </p>
<p>page 600, <strong>Superb Starling <em>Lamprotornis superbus</em></strong></p>
<p>Revise the range description from “SE Sudan and Ethiopia to Somalia, Uganda, Kenya and Tanzania” to “southeastern South Sudan, Ethiopia, Somalia, northern Uganda, Kenya, and Tanzania”.</p>
<p> </p>
<p>page 599, <strong>Lesser Blue-eared Starling <em>Lamprotornis chloropterus</em></strong></p>
<p>Revise the range description of the monotypic group Lesser Blue-eared Starling (Lesser) <em>Lamprotornis chloropterus chloropterus</em> from “Senegal to s Sudan, Eritrea, Ethiopia, n Uganda and w Kenya” to “Senegambia east to southern Sudan, South Sudan, Eritrea, estern Ethiopia, northern Uganda, and western Kenya”.</p>
<p> </p>
<p>page 600, <strong>Bronze-tailed Starling <em>Lamprotornis chalcurus</em></strong></p>
<p>Revise the range description of subspecies <em>emini</em> from “E Cameroon to s Sudan and w Kenya” to “eastern Cameroon to southwestern Sudan, South Sudan, extreme northeastern Democratic Republic of the Congo, northern Uganda, and extreme western Kenya”.</p>
<p> </p>
<p>page 386, <strong>Orange-bellied Leafbird <em>Chloropsis hardwickii</em></strong></p>
<p>Revise the range description of nominate <em>hardwickii</em> from “E Himalayas to sw China, Myanmar, n Thailand and n Vietnam” to “eastern Himalayas to southwestern China (southeastern Tibet, Yunnan), Myanmar, northwestern Thailand, and northern Laos”.</p>
<p>Revise the range description of subspecies <em>melliana</em> from “S China (Guangxi, Fujian and Guangdong) to n Vietnam” to “southeastern China (Guizhou and Guangx east to Zheijang) to central Laos and central Vietnam”.</p>
<p> </p>
<p>page 535, <strong>Western Violet-backed Sunbird <em>Anthreptes longuemarei</em></strong></p>
<p>Revise the range description of subspecies <em>haussarum</em> from “Liberia to n Cameroon, s Sudan, n Uganda; vagrant to w Kenya” to “eastern Guinea and Liberia to northern Cameroon, extreme southwestern Sudan, South Sudan, northeastern Democratic Republic of the Congo, northwestern Uganda, and western Kenya”.</p>
<p> </p>
<p>page 535, <strong>Kenya Violet-backed Sunbird <em>Anthreptes orientalis</em></strong></p>
<p>Revise the range description from “Somalia to se Sudan, Ethiopia, n Uganda, Kenya and ne Tanzania” to “southeastern South Sudan, southern and eastern Ethiopia, northwestern and southern Somalia, northern Uganda, Kenya, and Tanzania”.</p>
<p> </p>
<p>page 535, <strong>Seimund’s Sunbird <em>Anthreptes seimundi</em></strong></p>
<p>Revise the range description of subspecies <em>minor</em> from “Nigeria to Cameroon, n Angola, Democratic Republic of the Congo, s Sudan, Rwanda, Uganda” to “southern Nigeria and southern Cameroon east to Central African Republic, southern South Sudan, Uganda, and Rwanda, south to northern Angola and central Democtraic Republic of the Congo”.</p>
<p> </p>
<p>page 535, <strong>Green Sunbird <em>Anthreptes rectirostris</em></strong></p>
<p>Revise the range description of the monotypic group Green Sunbird (Gray-throated) <em>Anthreptes rectirostris tephrolaemus</em> from “S Nigeria to Angola, s Sudan, Uganda and w Kenya; Bioko” to “southeastern Benin and southern Nigeria east to southern Central African Republic, southern South Sudan, Uganda, southwestern Kenya, and northwestern Tanzania, south to northern Angola and southern Democratic Republic of the Congo; Bioko”.</p>
<p> </p>
<p>page 535, <strong>Collared Sunbird <em>Hedydipna collaris</em></strong></p>
<p>Revise the range description of subspecies <em>somereni</em> from “Extreme se Nigeria to n Democratic Republic of the Congo, nw Angola and sw Sudan” to “southeastern Nigeria south to northwestern Angola and east to northern Democratic Republic of the Congo and southwestern South Sudan”.</p>
<p>Revise the range description of subspecies <em>garguensis</em> from “E Angola to se Sudan, Uganda, Kenya, nw Tanzania and Zambia” to “southeastern South Sudan, extreme southwestern Ehtiopia, eastern and southeastern Democratic Republic of the Congo, Uganda, western Kenya, Rwanda, Burundi, western Tanzania, eastern Angola, and northern Zambia”.</p>
<p> </p>
<p>page 535, <strong>Pygmy Sunbird <em>Hedydipna platura</em></strong></p>
<p>Revise the range description from “SW Mauritania to n Nigeria, ne Democratic Republic of the Congo, s Sudan and n Uganda” to “Mauritania and Senegal east to Sudan, South Sudan, and perhaps locally in western Ethiopia, south to northern Ivory Coast, northern Nigeria, northern Central African Republic, northeastern Democratic Republic of the Congo, and northern Uganda; resident in northern part of range, only a breeding visitor to southern portions of range”.</p>
<p> </p>
<p>page 535, <strong>Nile Valley Sunbird <em>Hedydipna metallica</em></strong></p>
<p>Revise the range description from “N Egypt to Sudan, n Ethiopia, Somalia, sw Arabia, Yemen, Oman” to “breeds Egypt (primarily the Nile Valley) to Sudan, northeastern South Sudan, Eritrea, northern Ethiopia, Djibouti, northwestern Somalia, southwestern Saudi Arabia, Yemen, and southwestern Oman; nonbreeding visitor to northern Egypt (Cairo) and northern eastern Somalia”.</p>
<p> </p>
<p>page 536, <strong>Green-headed Sunbird <em>Cyanomitra verticalis</em></strong></p>
<p>Revise the range description of subspecies <em>viridisplendens</em> from “E Democratic Republic of the Congo to s Sudan, w Kenya, Tanzania, Malawi and ne Zambia” to “western and southern South Sudan and eastern Democratic Republic of the Congo to central Kenya, south to northeastern Zambia and northern Malawi”.</p>
<p>Correct the spelling of the scientific name of the polytypic group Green-headed Sunbird (Blue-headed) from <em>Cyanomitra verticalis cyanocephala/boehndorffi</em> to <em>Cyanomitra verticalis cyanocephala/bohndorffi</em>.</p>
<p> </p>
<p>page 537, <strong>Green-throated Sunbird <em>Chalcomitra rubescens</em></strong></p>
<p>Revise the range description of nominate <em>rubescens</em> from “E Cameroon to n Angola, se Sudan, Kenya, Tanzania and Zambia” to “central and southern Cameroon south to northern Angola and northwestern Zambia, east to southern South Sudan, Uganda, western Kenya, and northwestern Tanzania”.</p>
<p> </p>
<p>page 537, <strong>Scarlet-chested Sunbird <em>Chalcomitra senegalensis</em></strong></p>
<p>Revise the range description of subspecies <em>acik</em> from “Cameroon to ne Democratic Republic of the Congo, Cent. African Rep., sw Sudan, nw Uganda” to “northern Cameroon to Central African Republic, southwestern Sudan, western and central South Sudan, northeastern Democratic Republic of the Congo, and northwestern Uganda”.</p>
<p>Revise the range description of subspecies <em>proteus</em> from “SE Sudan (Boma Hills), Eritrea, Ethiopia and n Kenya” to “southeastern Sudan, southeastern South Sudan (Boma Hills), Eritrea, and Ethiopia”.</p>
<p>Revise the range description of subspecies <em>lamperti</em> from “E Democratic Republic of the Congo to s Sudan, Uganda, Kenya and Tanzania” to “eastern Democratic Republic of the Congo, southern South Sudan (Imatong Mountains, Leboni Forest), Rwanda, Burundi, Uganda (except northwest), southwestern Kenya, and Tanzania”.</p>
<p> </p>
<p>page 538, <strong>Tacazze Sunbird <em>Nectarinia tacazze</em></strong></p>
<p>Revise the range description of subspecies <em>jackson</em>i from “Mts. of se Sudan to e Uganda, w Kenya and ne Tanzania” to “southeastern South Sudan, northern and eastern Uganda, western Kenya, and northern Tanzania”.</p>
<p> </p>
<p>page 538, <strong>Malachite Sunbird <em>Nectarinia famosa</em></strong></p>
<p>Revise the range description of subspecies <em>cupreonitens</em> from “E Democratic Republic of the Congo to se Sudan, Ethiopia, Uganda, Kenya, Tanzania, Malawi” to “Ethiopia and southeastern South Sudan, eastern Democratic Republic of the Congo, Uganda, Kenya, Rwanda, Burundi, Tanzania, and northern Malawi”.</p>
<p> </p>
<p>page 539, <strong>Northern Double-collared Sunbird <em>Cinnyris reichenowi</em></strong></p>
<p>Revise the range description of the monotypic group Northern Double-collared Sunbird (Eastern) <em>Cinnyris reichenowi reichenowi</em> from “Highlands of e Democratic Republic of the Congo to Uganda, se Sudan and w Kenya” to “southern South Sudan, eastern Democratic Republic of the Congo, northeastern, eastern and southwestern Uganda, western Kenya, Rwanda, and Burundi”.</p>
<p> </p>
<p>page 539, <strong>Mariqua Sunbird <em>Cinnyris mariquensis</em></strong></p>
<p>Revise the range description of subspecies <em>osiris</em> from “Extreme se Sudan to Eritrea, Ethiopia, n Kenya and n Uganda” to “Eritrea, Ethiopia, northwestern Somalia, southern South Sudan, northern Uganda and northern Uganda”.</p>
<p> </p>
<p>page 539, <strong>Red-chested Sunbird <em>Cinnyris erythrocercus</em></strong></p>
<p>Revise the range description from “S Sudan to e Democratic Republic of the Congo, Uganda, w Kenya and nw Tanzania” to “South Sudan, eastern Democratic Republic of the Congo, Uganda, southwestern Kenya, Rwanda, Burundi, and northwestern Tanzania”.</p>
<p> </p>
<p>page 539, <strong>Tsavo Sunbird <em>Cinnyris tsavoensis</em></strong></p>
<p>Revise the range description from “S Somalia to s Sudan, Ethiopia, e Kenya and ne Tanzania” to “southern Somalia, eastern Kenya, and northeastern Tanzania”.</p>
<p> </p>
<p>page 540, <strong>Palestine Sunbird <em>Cinnyris osea</em></strong></p>
<p>Revise the range description of the monotypic group Palestine Sunbird (Decorse’s) <em>Cinnyris osea decorsei</em> from “Lake Chad to s Sudan, extreme ne Democratic Republic of the Congo and nw Uganda” to “very patchily distributed in eastern Cameroon, southwestern Chad (Lake Chad), Central African Republic, southwestern Sudan, extreme northeastern Democratic Republic of the Congo, southwestern South Sudan, and northwestern Uganda”.</p>
<p> </p>
<p>page 540, <strong>Shining Sunbird <em>Cinnyris habessinicus</em></strong></p>
<p>Revise the range description of subspecies <em>turkanae</em> from “SE Sudan to s Ethiopia, sw Somalia, n Kenya and ne Uganda” to “southeastern South Sudan, southern Ethiopia, southwestern Somalia, northeastern Uganda, and northern Kenya”.</p>
<p> </p>
<p>page 540, <strong>Splendid Sunbird <em>Cinnyris coccinigastrus</em></strong></p>
<p>Revise the range description from “Senegal to sw Mali, Gabon, ne Democratic Republic of the Congo, sw Sudan and Uganda” to “Senegal to Sierra Leone, east to southern Nigeria, Cameroon, Central African Republic, northeastern Democratic Republic of the Congo, and southwestern South Sudan; nonbreeding visitor to northeastern Gabon”.</p>
<p> </p>
<p>page 367, <strong>Western Yellow Wagtail <em>Motacilla flava</em></strong></p>
<p>The polytypic group Western Yellow Wagtail (<em>lutea/flavissima</em>) <em>Motacilla flava lutea/flavissima</em> is partitioned into two separate monotypic groups, Western Yellow Wagtail (f<em>lavissima</em>) <em>Motacilla flava flavissima</em>, and Western Yellow Wagtail (<em>lute</em>a) <em>Motacilla flava lutea</em>.</p>
<p>The polytypic group Western Yellow Wagtail (<em>flava/beema</em>) <em>Motacilla flava flava/beema</em> is partitioned into two separate monotypic groups, Western Yellow Wagtail (<em>flava</em>) <em>Motacilla flava flava</em>, and Western Yellow Wagtail (<em>beema</em>) <em>Motacilla flava beema</em>.</p>
<p>The polytypic group Western Yellow Wagtail (Ashy-headed) <em>Motacilla flava</em> [<em>cinereocapilla</em> Group] is partitioned into three separate monotypic groups, Western Yellow Wagtail (<em>iberiae</em>) <em>Motacilla flava iberiae</em>, Western Yellow Wagtail (<em>cinereocapilla</em>) <em>Motacilla flava cinereocapilla</em>, and Western Yellow Wagtail (<em>pygmaea</em>) <em>Motacilla flava pygmaea</em>.</p>
<p> </p>
<p>page 367, <strong>African Pied Wagtail <em>Motacilla aguimp</em></strong></p>
<p>Revise the range description of subspecies <em>vidua</em> from “Sierra Leone to s Sudan, Ethiopia, Kenya and South Africa” to “southeastern Senegal and eastern Gambia east to Sierra Leone to southern Mali, southern Chad, eastern Sudan, Ethiopia, and southern Somalia, south to Angloa, northern and eastern Botswana, and eastern South Africa (south to Eastern Cape) ; Nile Valley of southern Egypt and northern Sudan”.</p>
<p> </p>
<p>page 363, <strong>African Pipit <em>Anthus cinnamomeus</em></strong></p>
<p>Revise the range description of subspecies <em>stabilis</em> from “C and se Sudan” to “central and southeastern Sudan and eastern South Sudan”.</p>
<p> </p>
<p>page 364, <strong>Plain-backed Pipit <em>Anthus leucophrys</em></strong></p>
<p>Revise the range description of subspecies <em>zenkeri</em> from “S Mali to s Sudan, n Democratic Republic of the Congo, w Uganda, w Kenya and nw Tanzania” to “southern Mali and Guinea to southwestern Sudan and South Sudan south to northern Democratic Republic of the Congo, Uganda, western Kenya, Rwanda, Burundi, and northwestern Tanzania”.</p>
<p>Revise the range description of subspecies <em>omoensis</em> from “Extreme e Sudan and Ethiopia” to “southeastern Sudan, eastern South Sudan, Eritrea, and northern and western Ethiopia”.</p>
<p> </p>
<p>page 366, <strong>Golden Pipit <em>Tmetothylacus tenellus</em></strong></p>
<p>Revise the range description from “Arid scrub of se Sudan to Ethiopia, Somalia, Kenya and n Tanzania” to “southeastern South Sudan, southern and eastern Ethiopia, Somalia, northern Uganda, eastern Kenya, and eastern Tanzania; partially nomadic or migratory”.</p>
<p> </p>
<p>page 366, <strong>Fuelleborn’s Longclaw <em>Macronyx fuelleborni</em></strong></p>
<p>In accord with widespread usage (e.g., Britton 1980, Keith et al. 1992, Dowsett and Forbes-Watson 1993), change the English name of <em>Macronyx fuelleborni</em> from Fuelleborn’s Longclaw to Fülleborn’s Longclaw.</p>
<p>References:</p>
<p><NAME>. (editor). 1980. Birds of east Africa. East Africa Natural History Society, Nairobi.</p>
<p><NAME>., and A.D. Forbes-Watson. 1993. Checklist of birds of the Afrotropical and Malagasy regions. Volume 1: species limits and distribution. Tauraco Press, Liège, Belgium.</p>
<p>Keith, S., <NAME>, and <NAME> (editors). 1992. The birds of Africa. Volume IV. Academic Press, London.</p>
<p> </p>
<p>page 645, <strong>Golden-crowned Warbler <em>Basileuterus culicivorus</em></strong></p>
<p>Correct the scientific name of the polytypic group Golden-crowned Warbler (Golden-crowned) from <em>Basileuterus culicivorus</em> [<em>auricapillus</em> Group] to <em>Basileuterus culicivorus</em> [<em>auricapilla</em> Group].</p>
<p> </p>
<p>page 643, <strong>Red Warbler <em>Cardellina rubra</em></strong></p>
<p>Revise the range description of nominate <em>rubra</em> from “Mts. of w Mexico (Jalisco and Michoacán to Veracruz and Oaxaca)” to “south central Mexico (transvolcanic belt, from Jalisco and Michoacán to Veracruz and northern Oaxaca)”.</p>
<p>Revise the range description of subspecies <em>rowleyi</em> from “S Mexico (mountains of Oaxaca in Lachao Nuevo region)” to “southern Mexico (mountains of Guerrero and southern Oaxaca)”.</p>
<p> </p>
<p>page 650, <strong>Black-capped Hemispingus <em>Hemispingus atropileus</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.09</a>), Black-capped <em>Hemispingus Hemispingus atropileus</em> is removed from the genus <em>Hemispingus</em>, based on genetic evidence that <em>Hemispingus</em> is not monophyletic (Burns et al. 2014); instead it is classified in the newly described genus <em>Kleinothraupis</em> (Burns et al. 2016). Change the scientific name from <em>Hemispingus atropileus</em> to <em>Kleinothraupis atropileus</em>. The sequence of species of <em>Kleinothraupis</em> is revised.</p>
<p>Change the scientific name of the monotypic group Black-capped Hemispingus (Black-capped) from <em>Hemispingus atropileus atropileus</em> to <em>Kleinothraupis atropileus atropileus</em>.</p>
<p>Change the scientific name of the monotypic group Black-capped Hemispingus (White-browed) from <em>Hemispingus atropileus auricularis</em> to <em>Kleinothraupis atropileus auricularis</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p>page 650, <strong>Orange-browed Hemispingus <em>Hemispingus calophrys</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.09</a>), Orange-browed Hemispingus <em>Hemispingus calophrys</em> is removed from the genus <em>Hemispingus</em>, based on genetic evidence that <em>Hemispingus</em> is not monophyletic (Burns et al. 2014); instead it is classified in the newly described genus <em>Kleinothraupis</em> (Burns et al. 2016). Change the scientific name from <em>Hemispingus calophrys</em> to <em>Kleinothraupis calophrys</em>. The sequence of species of <em>Kleinothraupis</em> is revised.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p>page 650, <strong>Parodi’s Hemispingus <em>Hemispingus parodii</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.09</a>), Parodi’s Hemispingus <em>Hemispingus parodii</em> is removed from the genus <em>Hemispingus</em>, based on genetic evidence that <em>Hemispingus</em> is not monophyletic (Burns et al. 2014); instead it is classified in the newly described genus <em>Kleinothraupis</em> (Burns et al. 2016). Change the scientific name from <em>Hemispingus parodii</em> to <em>Kleinothraupis parodii</em>. The sequence of species of <em>Kleinothraupis</em> is revised.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p>page 651, <strong>Gray-capped Hemispingus <em>Hemispingus reyi</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.09</a>), Gray-capped Hemispingus <em>Hemispingus reyi</em> is removed from the genus <em>Hemispingus</em>, based on genetic evidence that <em>Hemispingus</em> is not monophyletic (Burns et al. 2014); instead it is classified in the newly described genus <em>Kleinothraupis</em> (Burns et al. 2016). Change the scientific name from <em>Hemispingus reyi</em> to <em>Kleinothraupis reyi</em>. The sequence of species of <em>Kleinothraupis</em> is revised.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p>page 651, <strong>Oleaginous Hemispingus <em>Hemispingus frontalis</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.10</a>), Oleaginous Hemispingus <em>Hemispingus frontalis</em> is removed from the genus <em>Hemispingus</em>, based on genetic evidence that <em>Hemispingu</em>s is not monophyletic (Burns et al. 2014); instead it is classified in the genus <em>Sphenopsis</em> (Burns et al. 2016). Change the scientific name from <em>Hemispingus frontalis</em> to <em>Sphenopsis frontalis</em>.</p>
<p>Change the spelling of the subspecies name <em>iteratus</em> to <em>iterata</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p>page 651, <strong>Black-eared Hemispingus <em>Hemispingus melanotis</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.10</a>), Black-eared Hemispingus <em>Hemispingus melanotis</em> is removed from the genus <em>Hemispingus</em>, based on genetic evidence that <em>Hemispingus </em>is not monophyletic (Burns et al. 2014); instead it is classified in the genus <em>Sphenopsis </em>(Burns et al. 2016). Change the scientific name from <em>Hemispingus melanotis</em> to <em>Sphenopsis melanotis</em>.</p>
<p>Change the spelling of the name of the monotypic group Black-eared Hemispingus (Western) from <em>Hemispingus melanotis ochreaceus</em> to <em>Sphenopsis melanotis ochracea</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p>pages 650-651, <strong>tanagers genus <em>Thlypopsis</em></strong></p>
<p>In accord with AOS-SACC, the sequence of species in <em>Thlypopsis</em> is revised, following Burns et al. (2014). The new sequence of species is</p>
<p>Orange-headed Tanager <em>Thlypopsis sordida</em></p>
<p>Buff-bellied Tanager <em>Thlypopsis inornata</em></p>
<p>Fulvous-headed Tanager <em>Thlypopsis fulviceps</em></p>
<p>Chestnut-headed Tanager <em>Thlypopsis pyrrhocoma</em></p>
<p>Rust-and-yellow Tanager <em>Thlypopsis ruficeps</em></p>
<p>Superciliaried Hemispingus <em>Thlypopsis superciliaris</em></p>
<p>Rufous-chested Tanager <em>Thlypopsis ornata</em></p>
<p>Brown-flanked Tanager <em>Thlypopsis pectoralis</em></p>
<p>Reference:</p>
<p><NAME>., <NAME>, P.<NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds.</p>
<p> </p>
<p>page 651, <strong>Chestnut-headed Tanager <em>Pyrrhocoma ruficeps</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.11</a>), Chestnut-headed Tanager is merged into the genus <em>Thlypopsis</em>, on the basis of genetic evidence that it is embedded in that genus (Burns et al. 2014). Because the name <em>ruficeps</em> is preoccupied in <em>Thlypopsis</em>, a new species name is needed as well. Change the scientific name from <em>Pyrrhocoma ruficeps</em> to <em>Thlypopsis pyrrhocoma</em> (Burns et al. 2016).</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p>page 650, <strong>Superciliaried Hemispingus <em>Hemispingus superciliaris</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.11</a>), Superciliaried Hemispingus is merged into the genus <em>Thlypopsis</em>, on the basis of genetic evidence that it is embedded in that genus (Burns et al. 2014). Change the scientific name from <em>Hemispingus superciliaris</em> to <em>Thlypopsis superciliaris</em> (Burns et al. 2016). Reposition Superciliared Hemispingus to follow Rust-and-yellow <em>Thlypopsis ruficeps</em>.</p>
<p>Change the scientific name of the monotypic group Superciliaried Hemispingus (Yellow-browed) from <em>Hemispingus superciliaris chrysophrys</em> to <em>Thlypopsis superciliaris chrysophrys</em>.</p>
<p>Change the scientific name of the poltytypic group Superciliaried Hemispingus (Superciliaried) from <em>Hemispingus superciliaris</em> [<em>superciliaris</em> Group] to <em>Thlypopsis superciliaris</em> [<em>superciliaris</em> Group].</p>
<p>Change the scientific name of the poltytypic group Superciliaried Hemispingus (White-bellied) from <em>Hemispingus superciliaris leucogastrus/insignis</em> to <em>Thlypopsis superciliaris leucogastra/insignis</em>.</p>
<p>Change the spelling of the subspecies name <em>leucogastrus</em> to <em>leucogastra</em>.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p>page 669, <strong>Plain-tailed Warbling-Finch <em>Poospiza alticola</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.14</a>), Plain-tailed Warbling-Finch is removed from <em>Poospiza</em>, based on genetic evidence that the genus <em>Poopsiza</em> is not monophyletic (Burns et al. 2014). Change the scientific name from <em>Poospiza alticola</em> to <em>Microspingus alticola</em> (Burns et al. 2016). Reposition <em>Microspingus</em> to immediately follow Brown-flanked Tanager <em>Thlypopsis pectoralis</em>.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p>page 669, <strong>Rusty-browed Warbling-Finch <em>Poospiza erythrophrys</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.14</a>), Rusty-browed Warbling-Finch is removed from <em>Poospiza</em>, based on genetic evidence that the genus <em>Poopsiza</em> is not monophyletic (Burns et al. 2014). Change the scientific name from <em>Poospiza erythrophrys</em> to <em>Microspingus erythrophrys</em> (Burns et al. 2016).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p>page 669, <strong>Buff-throated Warbling-Finch <em>Poopsiza lateralis</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.14</a>), Buff-throated Warbling-Finch is removed from <em>Poospiza</em>, based on genetic evidence that the genus <em>Poopsiza</em> is not monophyletic (Burns et al. 2014). Change the scientific name from <em>Poospiza lateralis</em> to <em>Microspingus lateralis</em> (Burns et al. 2016).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p>page 669, <strong>Gray-throated Warbling-Finch <em>Poospiza cabanisi</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.14</a>), Gray-throated Warbling-Finch is removed from <em>Poospiza</em>, based on genetic evidence that the genus <em>Poopsiza</em> is not monophyletic (Burns et al. 2014). Change the scientific name from <em>Poospiza cabanisi</em> to <em>Microspingus cabanisi</em> (Burns et al. 2016).</p>
<p>References:</p>
<p><NAME>., <NAME>, P.O. Title, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p>page 669, <strong>Ringed Warbling-Finch <em>Poospiza torquata</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.14</a>), Ringed Warbling-Finch is removed from <em>Poospiza</em>, based on genetic evidence that the genus <em>Poopsiza</em> is not monophyletic (Burns et al. 2014). Change the scientific name from <em>Poospiza torquata</em> to <em>Microspingus torquatus</em> (Burns et al. 2016).</p>
<p>Change the spelling of the nominate subspecies from <em>torquata</em> to <em>torquatus</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, P.O. Title, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p>page 669, <strong>Black-capped Warbling-Finch <em>Poospiza melanoleuca</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.14</a>), Black-capped Warbling-Finch is removed from <em>Poospiza</em>, based on genetic evidence that the genus <em>Poopsiza </em>is not monophyletic (Burns et al. 2014). Change the scientific name from <em>Poospiza melanoleuca</em> to <em>Microspingus melanoleucus</em> (Burns et al. 2016).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p>page 669, <strong>Cinereous Warbling-Finch <em>Poospiza cinerea</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.14</a>), Cinereous Warbling-Finch is removed from <em>Poospiza</em>, based on genetic evidence that the genus <em>Poopsiza</em> is not monophyletic (Burns et al. 2014). Change the scientific name from <em>Poospiza cinerea</em> to <em>Microspingus cinereus</em> (Burns et al. 2016).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p>page 650, <strong>Three-striped Hemispingus <em>Hemispingus trifasciatus</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.14</a>), Three-striped Hemispingus is removed from <em>Hemispingus</em>, based on genetic evidence that the genus <em>Hemispingus</em> is not monophyletic (Burns et al. 2014). Change the scientific name from <em>Hemispingus trifasciatus</em> to <em>Microspingus trifasciatus </em>(Burns et al. 2016).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p>page 658, <strong>Chestnut-bellied Mountain-Tanager <em>Dubusia castaneoventris</em></strong></p>
<p>Subspecies <em>peruviana</em>, with range “Andes of e Peru (north to La Libertad)”, is considered to be a junior synonym of nominate <em>casteneoventris</em>, with range “Andes of w Bolivia (La Paz, Cochabamba, w Santa Cruz)” (Remsen 1984), and is deleted; consequently, Chestnut-bellied Mountain-Tanager becomes monophyletic. Revise the range of the species to “east slope of the Andes of Peru (La Libertad to Puno) and Bolivia (La Paz to western Santa Cruz)”.</p>
<p>Reference:</p>
<p><NAME>., Jr. 1984. Natural history notes on some poorly known Bolivian birds. Part 2. Gerfaut 74: 163-179.</p>
<p> </p>
<p>page 660, <strong>Dotted Tanager <em>Tangara varia</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.19</a>), Dotted Tanager is removed from the genus <em>Tangara</em>, based on genetic evidence that <em>Tangara</em> is not monophyletic (Burns et al. 2014). Change the scientific name from <em>Tangara varia</em> to <em>Ixothraupis varia</em> (Burns et al. 2016). Reposition Ixothraupis to follow the genus <em>Thraupis</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p>Burns, K.J., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p>page 660, <strong>Rufous-throated Tanager <em>Tangara rufigula</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.19</a>), Rufous-throated Tanager is removed from the genus <em>Tangara</em>, based on genetic evidence that <em>Tangara </em>is not monophyletic (Burns et al. 2014). Change the scientific name from <em>Tangara rufigula</em> to <em>Ixothraupis rufigula</em> (Burns et al. 2016).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p>page 660, <strong>Speckled Tanager <em>Tangara gut</em></strong><em>tata</em></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.19</a>), Speckled Tanager is removed from the genus <em>Tangara</em>, based on genetic evidence that <em>Tangara</em> is not monophyletic (Burns et al. 2014). Change the scientific name from <em>Tangara guttata</em> to <em>Ixothraupis guttata</em> (Burns et al. 2016).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p>page 660, <strong>Yellow-bellied Tanager <em>Tangara xanthogastra</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.19</a>), Yellow-bellied Tanager is removed from the genus <em>Tangara</em>, based on genetic evidence that <em>Tangara</em> is not monophyletic (Burns et al. 2014). Change the scientific name from <em>Tangara xanthogastra</em> to <em>Ixothraupis xanthogastra</em> (Burns et al. 2016).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p>page 660, <strong>Spotted Tanager <em>Tangara punctata</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.19</a>), Spotted Tanager is removed from the genus <em>Tangara</em>, based on genetic evidence that <em>Tangara</em> is not monophyletic (Burns et al. 2014). Change the scientific name from <em>Tangara punctata</em> to <em>Ixothraupis punctata</em> (Burns et al. 2016).</p>
<p>References:</p>
<p><NAME>., <NAME>, P.O. Title, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p class="p1"><span class="s1">page 648, <strong>Giant Conebill <i>Oreomanes fraseri</i></strong></span></p>
<p class="p1"><span class="s1">In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.15</a>), Giant Conebill is transferred to <i>Conirostrum</i>, based on genetic data that the genus <i>Oreomanes </i>is embedded in <i>Conirostrum</i> (Burns et al. 2014). The name <i>fraseri</i> is preoccupied in <i>Conirostrum</i>, however, and so the species name changes to the available name <i>binghami</i> (Burns et al. 2016). Change the scientific name from <i>Oreomanes fraseri </i>to <i>Conirostrum binghami</i>.</span></p>
<p class="p1"><span class="s1">Reference:</span></p>
<p class="p1"><span class="s1"> <NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</span></p>
<p> </p>
<p>page 648, <strong>conebills genus <em>Conirostrum</em></strong></p>
<p>In accord with AOS-SACC, the sequence of species of conebill (<em>Conirostrum</em>) is revised, based on genetic evidence (Burns et al. 2014). The new sequence of species is</p>
<p>Bicolored Conebill <em>Conirostrum bicolor</em></p>
<p>Pearly-breasted Conebill <em>Conirostrum margaritae</em></p>
<p>Chestnut-vented Conebill <em>Conirostrum speciosum</em></p>
<p>White-eared Conebill <em>Conirostrum leucogenys</em></p>
<p>Giant Conebill <em>Conirostrum binghami</em></p>
<p>White-browed Conebill <em>Conirostrum ferrugineiventre</em></p>
<p>Blue-backed Conebill <em>Conirostrum sitticolor</em></p>
<p>Capped Conebill <em>Conirostrum albifrons</em></p>
<p>Tamarugo Conebill Conirostrum <em>tamarugense</em></p>
<p>Rufous-browed Conebill <em>Conirostrum rufum</em></p>
<p>Cinereous Conebill <em>Conirostrum cinereum</em></p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p> </p>
<p>page 668, <strong>White-winged Diuca-Finch <em>Diuca speculifera</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.16</a>, and resulting discussion), White-winged Diuca-Finch is merged into <em>Idiopsar</em>, based on new phylogenetic evidence (Burns et al. 2014, Barker et al. 2015). Change the scientific name from <em>Diuca speculifera</em> to <em>Idiopsar speculifer</em>.</p>
<p>Change the spelling of the nominate subspecies from <em>speculifera</em> to <em>speculifer</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2015. <a href="http://www.bioone.org/doi/pdf/10.1642/AUK-14-110.1">New insights into New World biogeography: an integrated view from the phylogeny of blackbirds, cardinals, sparrows, tanagers, warblers, and allies</a>. Auk 132: 333-348.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p> </p>
<p>page 667, <strong>Plumbeous Sierra-Finch <em>Phrygilus unicolor</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.17</a>), Plumbeous Sierra-Finch and Ash-breasted Sierra-Finch are removed from the genus <em>Phrygilus</em>, and are placed in the genus <em>Geospizopsis</em>, based on genetic data that the genus <em>Phrygilus</em> does not form a monophlyletic group (Burns et al. 2014, Barker et al. 2015, Burns et al. 2016). Change the scientific name of Plumbeous Sierra-Finch from <em>Phrygilus unicolor</em> to <em>Geospizopsis unicolor</em>. Because the genus <em>Geospizopsis</em> is most closely related to <em>Haplospiz</em>a and related genera (Burns et al. 2014, Barker et al. 2015), reposition <em>Geospizopsis</em> to immediately precede the genera <em>Haplospiza</em>, <em>Spodiornis</em>, and <em>Acanthidops</em>.</p>
<p>Change the spelling of the subspecies name <em>nivarius</em> to <em>nivaria</em>.</p>
<p>Change the spelling of the subspecies name<em> tucumanus </em>to <em>tucumana</em>.</p>
<p>Change the spelling of the subspecies name <em>ultimus</em> to <em>ultima</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2015. New insights into New World biogeography: an integrated view from the phylogeny of blackbirds, cardinals, sparrows, tanagers, warblers, and allies. Auk 132: 333-348.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p>page 668, <strong>Ash-breasted Sierra-Finch <em>Phrygilus plebejus</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.17</a>), Plumbeous Sierra-Finch and Ash-breasted Sierra-Finch are removed from the genus <em>Phrygilus</em>, and are placed in the genus <em>Geospizopsis</em>, based on genetic data that the genus <em>Phrygilus </em>does not form a monophlyletic group (Burns et al. 2014, Barker et al. 2015). Change the scientific name of Ash-breasted Sierra-Finch from <em>Phrygilus plebejus</em> to <em>Geospizopsis plebejus</em>. Because the genus <em>Geospizopsis</em> is most closely related to <em>Haplospiza</em> and related genera (Burns et al. 2014, Barker et al. 2015), reposition <em>Geospizopsis</em> to immediately precede the genera <em>Haplospiza, Spodiornis</em>, and <em>Acanthidop</em>s.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2015. New insights into</p>
<p>New World biogeography: an integrated view from the phylogeny of blackbirds, cardinals, sparrows, tanagers, warblers, and allies. Auk 132: 333-348.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p>page 673, <strong>Uniform Finch <em>Haplospiza unicolor</em></strong></p>
<p>Reposition Uniform Finch to precede Slaty Finch <em>Spodiornis rusticus</em>.</p>
<p> </p>
<p>page 673, <strong>Slaty Finch <em>Haplospiza rustica</em></strong></p>
<p>In accord with AOS-SACC <a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">(Proposal 730.16</a>, and resulting discussion), Slaty Finch is removed from <em>Haplospiza</em> and is placed in the monotypic genus <em>Spodiornis</em>, based on new phylogenetic evidence (Burns et al. 2014, Barker et al. 2015). Change the scientific name from <em>Haplospiza rustica</em> to <em>Spodiornis rusticus</em>.</p>
<p>Change the spelling of the subspecies name <em>rustica</em> to <em>rusticus</em>.</p>
<p>Change the spelling of the subspecies name <em>arcana</em> to <em>arcanus</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2015. New insights into New World biogeography: an integrated view from the phylogeny of blackbirds, cardinals, sparrows, tanagers, warblers, and allies. Auk 132: 333-348.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p> </p>
<p>page 668, <strong>Nightingale Finch <em>Nesospiza acunhae</em></strong></p>
<p>Change the English name of the monotypic group <em>Nesospiza acunhae acunhae</em> from Nightingale Finch (Inaccessible Island) to Nightingale Finch (Inaccessible I.).</p>
<p>Change the English name of the monotypic group <em>Nesospiza acunhae questi</em> from Nightingale Finch (Nightingale) to Nightingale Finch (Nightingale I.).</p>
<p> </p>
<p>page 668, <strong>Wilkins’s Finch <em>Nesospiza wilkinsi</em></strong></p>
<p>Change the English name of the monotypic group <em>Nesospiza wilkinsi dunnei</em> from Wilkins’s Finch (Inaccessible Island) to Wilkins’s Finch (Inaccessible I.).</p>
<p>Change the English name of the monotypic group <em>Nesospiza wilkinsi wilkinsi</em> from Wilkins’s Finch (Nightingale Island) to Wilkins’s Finch (Nightingale I.).</p>
<p> </p>
<p>page 650, <strong>Black-headed Hemispingus <em>Hemispingus verticalis</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.07</a>), Black-headed Hemispingus and Drab Hemispingus are removed from the genus <em>Hemispingus</em>, based on genetic evidence that the genus <em>Hemispingu</em>s is not monophyletic (Burns et al. 2014). Change the scientific name of Black-headed Hemispingus from <em>Hemispingus verticalis</em> to <em>Pseudospingus verticalis</em> (Burns et al. 2016). Reposition <em>Pseudospingus</em> to precede Gray-hooded Bush Tanager <em>Cnemoscopus rubrirostris</em> and Bay-chested Warbling-Finch <em>Castanozoster thoracicus</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p>page 650, <strong>Drab Hemispingus <em>Hemispingus xanthophthalmus</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.07</a>), Black-headed Hemispingus and Drab Hemispingus are removed from the genus <em>Hemispingu</em>s, based on genetic evidence that the genus <em>Hemispingus </em>is not monophyletic (Burns et al. 2014). Change the scientific name of Drab Hemispingus from <em>Hemispingus xanthophthalmus</em> to <em>Pseudospingus xanthophthalmus</em> (Burns et al. 2016). Reposition <em>Pseudospingus </em>to precede Gray-hooded Bush Tanager <em>Cnemoscopus rubrirostris</em> and Bay-chested Warbling-Finch <em>Castanozoster thoracicus</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p>page 650, <strong>Gray-hooded Bush Tanager <em>Cnemoscopus rubrirostris</em></strong></p>
<p>In accord with AOS-SACC, transfer Gray-hooded Bush Tanager <em>Cnemoscopus rubrirostris</em> to a position immediately following Drab Hemispingus <em>Pseudospingus xanthophthalmus</em>, based on genetic evidence that <em>Cnemoscopus</em> and <em>Pseudospingus</em> are sister genera (Burns et al.2014).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p> </p>
<p>page 669, <strong>Bay-chested Warbling-Finch <em>Poospiza thoracica</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.13</a>), Bay-chested Warbling-Finch is removed from the genus <em>Poospiza</em>, based on genetic evidence that <em>Poospiz</em>a is not monophyletic (Burns et al. 2014). Change the scientific name from <em>Poospiza thoracica</em> to <em>Castanozoster thoracicus</em> (Burns et al. 2016).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p>page 650, <strong>Slaty-backed Hemispingus <em>Hemispingus goeringi</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.08</a>), Slaty-backed Hemispingus is removed from the genus <em>Hemispingus</em>, based on genetic evidence that the genus <em>Hemispingus</em> is not monophyletic (Burns et al. 2014); instead, Slaty-backed Hemispingus and Rufous-browed Hemispingus are embedded in <em>Poospiza</em>. Change the scientific name of Slaty-backed Hemispingus from <em>Hemispingus goeringi</em> to <em>Poospiza goeringi</em> (Burns et al. 2016). Reposition Slaty-backed Hemispingus to immediately follow Bay-chested Warbling-Finch <em>Castanozoster thoracicus</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p>page 650, <strong>Rufous-browed Hemispingus <em>Hemispingus rufosuperciliaris</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.08</a>), Rufous-browed Hemispingus is removed from the genus <em>Hemispingus</em>, based on genetic evidence that the genus <em>Hemispingus</em> is not monophyletic (Burns et al. 2014); instead, Slaty-backed Hemispingus and Rufous-browed Hemispingus are embedded in <em>Poospiza</em>. Change the scientific name of Rufous-browed Hemispingus from <em>Hemispingus rufosuperciliaris </em>to <em>Poospiza rufosuperciliaris</em> (Burns et al. 2016). Reposition Rufous-browed Hemispingus to immediately follow Bay-chested Warbling-Finch <em>Castanozoster thoracicus</em> and Slaty-backed Hemispingus <em>Poospiza goeringi</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p>page 669, <strong>Cochabamba Mountain-Finch <em>Compsospiza garleppi</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.08</a>), Cochabamba Mountain-Finch is removed from the genus <em>Compsospiza,</em> based on genetic evidence that the genus <em>Compsospiza</em> is embedded in <em>Poospiza</em> (Burns et al. 2014). Change the scientific name of Cochabamba Mountain-Finch from <em>Compsospiza garleppi</em> to <em>Poospiza garleppi</em> (Burns et al. 2016). Reposition the two species of mountain-finch to immediately follow Collared Warbling-Finch <em>Poospiza hispaniolensis</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p>page 669, <strong>Tucuman Mountain-Finch <em>Compsospiza baeri</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.08</a>), Tucuman Mountain-Finch is removed from the genus <em>Compsospiza</em>, based on genetic evidence that the genus <em>Compsospiza</em> is embedded in <em>Poospiza</em> (Burns et al. 2014). Change the scientific name of Tucuman Mountain-Finch from <em>Compsospiza baeri</em> to <em>Poospiza baeri</em> (Burns et al. 2016). Reposition the two species of mountain-finch to immediately follow Collared Warbling-Finch <em>Poospiza hispaniolensis</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. A genus-level classification of the family Thraupidae (Class Aves: Order Passeriformes). Zootaxa 4088: 329-354.</p>
<p> </p>
<p>page 688, <strong>Mangrove Finch <em>Camarhynchus heliobates</em></strong></p>
<p>Revise the range description from “Mangrove swamps of Galapagos Islands (Fernandina and Isabela)” to “restricted to very small area in mangroves on west coast of Isabella (Galapagos Islands); formerly also on eastern coast of Isabela, and on Fernandina Island” (Wiedenfeld 2006).</p>
<p>Reference:</p>
<p>Wiedenfeld, D.A. 2006. Aves, The Galapagos Islands, Ecuador. Check List 2: 1-27.</p>
<p> </p>
<p>page 689, <strong>Black-throated Saltator <em>Saltator atricollis</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop730.htm">Proposal 730.03</a>), Black-throated Saltator is removed from the genus <em>Saltator</em> and placed in <em>Saltatricula</em> (Burns et al. 2014). Change the scientific name from <em>Saltator atricollis</em> to <em>Saltatricula atricollis</em>.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p> </p>
<p>page 649, <strong>Common Chlorospingus <em>Chlorospingus flavopectus</em></strong></p>
<p>Revise the range description of subspecies <em>dwighti</em> from “Caribbean slope of s Mexico (Chiapas) and e Guatemala” to “Caribbean slope of southern Mexico (Chiapas) and eastern Guatemala; population in Belize presumably also this subspecies”.</p>
<p> </p>
<p>page 684, <strong>Grasshopper Sparrow <em>Ammodramus savannarum</em></strong></p>
<p>Previously we recognized three groups in Grasshopper Sparrow: a polytypic group Grasshopper Sparrow (Western) <em>Ammodramus savannarum perpallidus/ammolegus</em>, and two monotypic groups, Grasshopper Sparrow (Eastern) <em>Ammodramus savannarum pratensis</em> and Grasshopper Sparrow (Florida) <em>Ammodramus savannarum floridanus</em>. These three groups are not field identifiable, however, and so are dissolved. We continue to recognize <em>perpallidus</em>, <em>ammolegus, pratensis</em>, and <em>floridanus</em> as subspecies.</p>
<p> </p>
<p>page 684, <strong>Le Conte’s Sparrow <em>Ammodramus leconteii</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), change the English name of <em>Ammodramus leconteii</em> from Le Conte’s Sparrow to LeConte’s Sparrow, “to conform to the generally accepted spelling of the name of entomologist <NAME> LeConte, for whom the species was named (Mearns and Mearns 1992, Jobling 2010)” (Chesser et al. 2017).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p><NAME>. 2010. The Helm dictionary of scientific bird names. <NAME>, London, United Kingdom.</p>
<p>Mearns, B., and <NAME>. 1992. Audubon to Xantus: the lives of those commemorated in North American bird names. Academic Press, New York, New York.</p>
<p> </p>
<p>page 685, <strong>Song Sparrow <em>Melospiza melodia</em></strong></p>
<p>Revise the range description of subspecies <em>heermanni</em> from “cistmontane California (Central Valley, Suisan Bay to Mojave, Colorado deserts), coastal Calif (from northern Monterey County to northern Baja)” to “central and southwestern California (including the Central Valley) and northwestern Baja California”.</p>
<p>Revise the range description of subspecies <em>graminea</em> from “Channel Is. (off southern California), Los Coronados Is. (off northwestern Baja California)” to “California Channel Islands (San Clemente, San Miguel, Santa Cruz, Santa Rosa, Anacapa) off of southern Calfornia and Los Coronados Islands, west of Baja California. Formerly also Santa Barbara Island (Channel Islands), but now extirpated”.</p>
<p>Revise the range description of subspecies <em>fallax</em> from “southern Nevada to southwestern Utah, southeastern Calif to northwestern Baja California, western Mexico (northeastern Sonora)” to “southeastern Nevada and southwestern Utah south to southeastern California, northeastern Baja California, and northeastern Sonora”.</p>
<p> </p>
<p>pages 665-667, <strong>Buntings and New World Sparrows Family Emberizidae</strong></p>
<p>With the removal of the New World species from Emberizidae, change the English name of this family from Buntings and New World Sparrows to Old World Buntings.</p>
<p> </p>
<p>page 665, <strong>Gray-hooded Bunting <em>Emberiza buchanani</em></strong></p>
<p>In accord with widespread usage (e.g. Inskipp et al. 1996, MacKinnon and Phillipps 2000, Rasmussen and Anderton 2012), change the English name of <em>Emberiza buchanani</em> from Gray-hooded Bunting to Gray-necked Bunting.</p>
<p>References:</p>
<p><NAME>., <NAME>, and <NAME>. 1996. An annotated checklist of the birds of the Oriental Region. Oriental Bird Club, Sandy, Bedfordshire, United Kingdom.</p>
<p><NAME>., and <NAME>. 2000. A field guide to the birds of China. Oxford University Press, New York and Oxford, United Kingdom.</p>
<p><NAME>., and <NAME>. 2012. Birds of South Asia. The Ripley guide. Volume 2: attributes and status. Second Edition. Smithsonian Institution and Lynx Edicions, Washington D.C. and Barcelona.</p>
<p> </p>
<p>page 665, <strong>Cinnamon-breasted Bunting <em>Emberiza tahapisi</em></strong></p>
<p>Revise the range description of subspecies <em>septemstriata</em> from “Sudan east of the Nile to w and n Ethiopia” to “southeastern Sudan, northeastern South Sudan, Eritrea, and northwestern Ethiopia; possibly a hybrid swarm between nominate <em>tapapsi</em> and Gosling’s Bunting <em>Emberiza goslingi</em>“.</p>
<p>Revise the range description of nominate <em>tahapisi</em> from “Gabon to Uganda, s Sudan, Ethiopia, Somalia and South Africa” to “central and southeastern Ethiopia and northern Somalia south to South Africa; Gabon, Angola, and central and southern Democratic Republic of the Congo”.</p>
<p>Following the entry for <em>Emberiza tahapisi tahapisi</em>, insert a previously overlooked subspecies, <em>nivenoru</em>m Witterbottom 1965, with range “western Namibia and (presumably) adjacent southwestern Angola”.</p>
<p>Reference:</p>
<p>Winterbottom, J.M. 1964 [1965]. Results of the Percy Fitzpatrick Institute-Windhoek State Museum Joint Ornithological Expeditions: report of the birds of Game Reserve No. 2. Cimbebasia number 9.</p>
<p> </p>
<p>page 665, <strong>Gosling’s Bunting <em>Emberiza goslingi</em></strong></p>
<p>Revise the range description from “Gambia to Sudan west of the Nile and extreme ne Zaïre” to “southern Mauritania to southern Senegal, east to southeastern Chad, northeastern Central African Republic, southwestern Sudan, and northwestern South Sudan; possibly also extreme northeastern Democratic Republic of the Congo”.</p>
<p> </p>
<p>page 666, <strong>Golden-breasted Bunting <em>Emberiza flaviventris</em></strong></p>
<p>Revise the range description of subspecies <em>kalaharica</em> from “S Angola to se Sudan, Kenya, Mozambique and n South Africa” to “southern South Sudan and central and southeastern Angola south to northeastern Namibia, Botswana, Mozambique, and northern South Africa”.</p>
<p> </p>
<p>page 666, <strong>Brown-rumped Bunting <em>Emberiza affinis</em></strong></p>
<p>Revise the range description of nominate <em>affinis</em> from “S Sudan, n Uganda and adjacent Zaïre” to “South Sudan, northeastern Democratic Republic of the Congo, northern Uganda, and northwestern Kenya; population in southeastern Sudan and northwestern Ethiopia presumably also this subspecies”.</p>
<p> </p>
<p>page 666, <strong>Cabanis’s Bunting <em>Emberiza cabanisi</em></strong></p>
<p>Revise the range description of the monotypic group Cabanis’s Bunting (Cabanis’s) <em>Emberiza cabanisi cabanisi</em> from “Sierra Leone to s Sudan, ne Zaïre and nw Uganda” to “Guinea and Sierra Leone to South Sudan, northeastern Democratic Republic of the Congo, and northwestern Uganda”.</p>
<p>Revise the range description of the monotypic group Cabanis’s Bunting (Three-streaked) <em>Emberiza cabanisi orientalis</em> from “S Zaïre to Tanzania, Angola, Zambia, Zimbabwe, n Mozambique” to “Gabon east to southern Democratic Republic of the Congo and Tanzania south to Angola, Zambia, Zimbabwe, and Mozambique”.</p>
<p> </p>
<p>page 654, <strong>Red-crowned Ant-Tanager <em>Habia rubica</em></strong></p>
<p>Change the subspecies name <em>mesoptamia</em> to the correct original spelling, <em>mesopotamia</em>.</p>
<p> </p>
<p>page 691, <strong>Blue-black Grosbeak <em>Cyanocompsa cyanoides</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop724.htm">Proposal 724</a>), change the scientific name of Blue-black Grosbeak from <em>Cyanocompsa cyanoides</em> to <em>Cyanoloxia cyanoides</em>. This change is based on genetic evidence that that Blue-black Grosbeak is more closely related to Glaucous-blue Grosbeak <em>Cyanoloxia glaucocaerulea</em> that it is to the type species of <em>Cyanocompsa</em>, Blue Bunting <em>Cyanocompsa parellina</em> (Bryson et al. 2014).</p>
<p>Change the scientific name of the polytypic group Blue-black Grosbeak (Blue-black) <em>Cyanocompsa cyanoides</em> [<em>cyanoides</em> Group] to <em>Cyanoloxia cyanoides</em> [<em>cyanoides</em> Group].</p>
<p>Change the scientific name of the monotypic group Blue-black Grosbeak (Amazonian) <em>Cyanocompsa cyanoides rothschildii</em> to <em>Cyanoloxia cyanoides rothschildii</em>.</p>
<p>Reference:</p>
<p><NAME>., Jr., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Diversification across the New World within the ‘blue’ caridinalids (Aves: Cardinalidae). Journal of Biogeography 41: 587-599.</p>
<p> </p>
<p>page 691, <strong>Ultramarine Grosbeak <em>Cyanocompsa brissonii</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop724.htm">Proposal 724</a>), change the scientific name of Ultramarine Grosbeak from <em>Cyanocompsa brissonii</em> to <em>Cyanoloxia brissonii</em>. This change is based on genetic evidence that that Ultramarine Grosbeak is more closely related to Glaucous-blue Grosbeak <em>Cyanoloxia glaucocaerulea</em> that it is to the type species of <em>Cyanocompsa</em>, Blue Bunting <em>Cyanocompsa parellina</em> (Bryson et al. 2014).</p>
<p>Reference:</p>
<p>Bryson, R.W., Jr., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Diversification across the New World within the ‘blue’ caridinalids (Aves: Cardinalidae). Journal of Biogeography 41: 587-599.</p>
<p> </p>
<p>pages 691-698, 701, <strong>Troupials and allies Family Icteridae</strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), the sequence of genera in Icteridae is revised, following Powell et al. (2014). The new sequence of genera is:</p>
<p><em>Xanthocephalus</em></p>
<p><em>Dolichonyx</em></p>
<p><em>Sturnella</em></p>
<p><em>Amblycercus</em></p>
<p><em>Cassiculus</em></p>
<p><em>Psarocolius</em></p>
<p><em>Cacicus</em></p>
<p><em>Icterus</em></p>
<p><em>Nesopsar</em></p>
<p><em>Agelaius</em></p>
<p><em>Molothrus</em></p>
<p><em>Dives</em></p>
<p><em>Ptiloxena</em></p>
<p><em>Euphagus</em></p>
<p><em>Quiscalus</em></p>
<p><em>Hypopyrrhus</em></p>
<p><em>Lampropsar</em></p>
<p><em>Gymnomystax</em></p>
<p><em>Macroagelaius</em></p>
<p><em>Curaeus</em></p>
<p><em>Amblyramphus</em></p>
<p><em>Anumara</em></p>
<p><em>Gnorimopsar</em></p>
<p><em>Oreopsar</em></p>
<p><em>Agelaioides</em></p>
<p><em>Agelasticus</em></p>
<p><em>Chrysomus</em></p>
<p><em>Xanthopsar</em></p>
<p><em>Pseudoleistes</em></p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. A comprehensive species-level molecular phylogeny of the New World blackbirds (Icteridae). Molecular Phylogenetics and Evolution 71: 94-112.</p>
<p> </p>
<p>pages 625-639, 701, <strong>Finches, Euphonias, and Allies Family Fringillidae</strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), the sequence of genera and species in Fringillidae is revised, following Arnaiz-Villena et al. (2007, 2008), Nguembock et al. (2009), Lerner et al. (2011), and Zuccon et al. (2012). The new sequence of genera is:</p>
<p><em>Fringilla</em></p>
<p><em>Chlorophonia</em></p>
<p><em>Euphonia</em></p>
<p><em>Mycerobas</em></p>
<p><em>Coccothraustes</em></p>
<p><em>Eophona</em></p>
<p><em>Melamprosops</em></p>
<p><em>Oreomystis</em></p>
<p><em>Paroreomyza</em></p>
<p><em>Loxioides</em></p>
<p><em>Telespiza</em></p>
<p><em>Chloridops</em></p>
<p><em>Rhodacanthis</em></p>
<p><em>Ciridops</em></p>
<p><em>Palmeria</em></p>
<p><em>Himatione</em></p>
<p><em>Drepanis</em></p>
<p><em>Psittirostra</em></p>
<p><em>Dysmorodrepanis</em></p>
<p><em>Pseudonestor</em></p>
<p><em>Hemignathus</em></p>
<p><em>Akialoa</em></p>
<p><em>Magumma</em></p>
<p><em>Chlorodrepanis</em></p>
<p><em>Viridonia</em></p>
<p><em>Loxops</em></p>
<p><em>Carpodacus</em></p>
<p><em>Pinicola</em></p>
<p><em>Pyrrhula</em></p>
<p><em>Rhodopechys</em></p>
<p><em>Bucanetes</em></p>
<p><em>Agraphospiza</em></p>
<p><em>Pyrrhoplectes</em></p>
<p><em>Callacanthis</em></p>
<p><em>Procarduelis</em></p>
<p><em>Leucosticte</em></p>
<p><em>Haemorhous</em></p>
<p><em>Rhodospiza</em></p>
<p><em>Rhynchostruthus</em></p>
<p><em>Chloris</em></p>
<p><em>Linurgus</em></p>
<p><em>Crithagra</em></p>
<p><em>Linaria</em></p>
<p><em>Acanthis</em></p>
<p><em>Loxia</em></p>
<p><em>Chrysocorythus</em></p>
<p><em>Carduelis</em></p>
<p><em>Serinus</em></p>
<p><em>Spinus</em></p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2007. Bayesian phylogeny of Fringillinae birds: status of the singular African oriole finch Linurgus olivaceus and evolution and heterogeneity of the genus <em>Carpodacus</em>. Acta Zoologia Sinica 53: 826-834.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2008. <a href="https://benthamopen.com/contents/pdf/TOOENIJ/TOOENIJ-1-1.pdf">Mitochondrial DNA phylogenetic definition of a group of ‘arid-zone’ Carduelini finches</a>. The Open Ornithology Journal 1: 1-7.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017<a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">. Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2011. Multilocus resolution of phylogeny and timescale in the extant adaptive radiation of Hawaiian honeycreepers. Current Biology 21: 1838-1844.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 638, <strong>Scarlet Finch <em>Haematospiza sipahi</em></strong></p>
<p>Scarlet Finch is transferred from the monotypic genus <em>Haematospiza</em> to <em>Carpodacus</em>, following Zuccon et al. (2012); change the scientific name from <em>Haematospiza sipahi</em> to <em>Carpodacus sipahi</em>.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 701, <strong><NAME>beak <em>Chaunoproctus ferreorostris</em></strong></p>
<p><NAME> is transferred from the monotypic genus <em>Chaunoproctus</em> to <em>Carpodacus</em>, following Zuccon et al. (2012); change the scientific name from <em>Chaunoproctus ferreorostris</em> to <em>Carpodacus ferreorostris</em>.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 638, <strong>Long-tailed Rosefinch <em>Uragus sibiric</em></strong><em>us</em></p>
<p>Long-tailed Rosefinch is transferred from the monotypic genus <em>Uragus</em> to <em>Carpodacus</em>, following Zuccon et al. (2012) and Tietze et al. (2013); change the scientific name from <em>Uragus sibiricus</em> to <em>Carpodacus sibiricus</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2013. Complete phylogeny and historical biogeography of true rosefinches (Aves: <em>Carpodacus</em>). Zoological Journal of Linnean Society 169: 215-234.</p>
<p> </p>
<p>page 629, <strong>Crimson-browed Finch <em>Pinicola subhimachala</em></strong></p>
<p>Crimson-browed Finch is transferred from <em>Pinicola </em>to <em>Carpodacus</em>, following Zuccon et al. (2012) and Tietze et al. (2013); change the scientific name from <em>Pinicola subhimachala</em> to <em>Carpodacus subhimachalus</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2013. Complete phylogeny and historical biogeography of true rosefinches (Aves: <em>Carpodacus</em>). Zoological Journal of Linnean Society 169: 215-234.</p>
<p> </p>
<p>page 629, <strong>Blanford’s Rosefinch <em>Carpodacus rubescens</em></strong></p>
<p>Blanford’s Rosefinch is not a member of <em>Carpodacus</em> (Zuccon et al. 2012, Tietze et al. 2013), and is transferred to a new genus (Zuccon et al. 2012); change the scientific name from <em>Carpodacus rubescens </em>to <em>Agraphospiza rubescens</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and Y-H. Sun. 2013. Complete phylogeny and historical biogeography of true rosefinches (Aves: <em>Carpodacus</em>). Zoological Journal of Linnean Society 169: 215-234.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 629, <strong>Dark-breasted Rosefinch <em>Carpodacus nipalensis</em></strong></p>
<p>Dark-breasted Rosefinch is not a member of <em>Carpodacus</em> (Zuccon et al. 2012, Tietze et al. 2013), and is transferred to the genus <em>Procarduelis</em>; change the scientific name from <em>Carpodacus nipalensis</em> to <em>Procarduelis nipalensis</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and Y-H. Sun. 2013. Complete phylogeny and historical biogeography of true rosefinches (Aves: <em>Carpodacus</em>). Zoological Journal of Linnean Society 169: 215-234.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 631, <strong>European Greenfinch <em>Chloris chloris</em></strong></p>
<p>Revise the range description of nominate <em>chloris</em> from “British Isles, n Europe, Corsica and Sardinia; winters to s Europe” to “British Isles, n Europe, Corsica and Sardinia; winters to s Europe. The species also now is established in southeastern Australia, Norfolk Island, New Zealand, and South America (northeastern Argentina, Uruguay, and extreme southern Brazil), but subspecific affinities of these feral populations are not known”.</p>
<p> </p>
<p>page 634, <strong>White-rumped Seedeater <em>Serinus leucopygius</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus</em> are transferred to the genus <em>Crithagra</em> (Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012).</p>
<p>Change the scientific name of White-rumped Seedeater from <em>Serinus leucopygius</em> to <em>Crithagra leucopygia</em>. Change the spelling of the name of the nominate subspecies from <em>leucopygius</em> <em>to leucopygia</em>. Revise the range description of the nominate subspecies from “Central and s Sudan to w Eritrea, w Ethiopia and nw Uganda” to “eastern Sudan, northern Eritrea, western and central Ethiopia, South Sudan, extreme northeastern Democratic Republic of the Congo (north end of Lake Albert), and northwestern Uganda”.</p>
<p>References:</p>
<p>Nguembock B., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera Serinus and Carduelis and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus</em> canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 635, <strong>Yellow-fronted Canary <em>Serinus mozambicus</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus</em> are transferred to the genus <em>Crithagra</em> (Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Yellow-fronted Canary from <em>Serinus mozambicus</em> to <em>Crithagra mozambica</em>.</p>
<p>Change subspecies name <em>barbatus </em>to <em>barbata</em>. Revise the range description of <em>barbata</em> from “S Chad to Sudan, ne Democratic Republic of the Congo, w Kenya and central Tanzania” to “southern Chad, Central African Republic, western Sudan, western and southern South Sudan, eastern Democratic Republic of the Congo, Uganda, southwestern Kenya, and central Tanzania”.</p>
<p>Revise the range description of subspecies <em>grotei</em> from “S Sudan (east of the Nile) to Eritrea and w Ethiopia” to “southeastern Sudan (east of the Nile), eastern South Sudan, and western and southwestern Ethiopia”.</p>
<p>Revise the range description of subspecies <em>gommaensis </em>from “W Ethiopia (Lake Tana to Gomma)” to “Eritrea and northwestern and central Ethiopia”.</p>
<p>Change the spelling of the nominate subspecies from <em>mozambicus</em> <em>to mozambica</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus </em>and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus</em> canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 634, <strong>African Citril <em>Serinus citrinelloides</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus</em> are transferred to the genus <em>Crithagra</em> (Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of African Citril from <em>Serinus citrinelloides</em> to <em>Crithagra citrinelloides.</em></p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus</em> canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 634, <strong>Western Citril <em>Serinus frontalis</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus</em> are transferred to the genus <em>Crithagra</em> (Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Western Citril from <em>Serinus frontalis</em> to <em>Crithagra frontalis</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>.2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 634, <strong>Southern Citril <em>Serinus hypostictus</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus </em>are transferred to the genus <em>Crithagra</em> (Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Southern Citril from <em>Serinus hypostictus</em> to <em>Crithagra hyposticta</em>.</p>
<p>Revise the range description of subspecies <em>brittoni </em>from “S Sudan and w Kenya” to “southeastern South Sudan and extreme western Kenya”.</p>
<p>Change the spelling of the name of the nominate subspecies from <em>hypostictus</em> to <em>hyposticta</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of hereCarduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus</em> canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 634, <strong>Black-faced Canary <em>Serinus capistratus</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus</em> are transferred to the genus <em>Crithagra</em> (Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Black-faced Canary from <em>Serinus capistratus</em> to <em>Crithagra capistrata</em>.</p>
<p>Change the spelling of the name of the nominate subspecies from <em>capistratus</em> to <em>capistrata</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic</p>
<p>relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 634, <strong>Papyrus Canary <em>Serinus koliensis</em></strong></p>
<p>The genus <em>Serinus </em>is not monophyletic, and many species of <em>Serinus</em> are transferred to the genus <em>Crithagra</em> (Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Papyrus Canary from <em>Serinus koliensis</em> to <em>Crithagra koliensis</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 634, <strong>Forest Canary <em>Serinus scotops</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus </em>are transferred to the genus <em>Crithagra </em>(Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Forest Canary from <em>Serinus scotops</em> to <em>Crithagra scotops</em>.</p>
<p>Change the spelling of the subspecies name <em>umbrosus</em> to <em>umbrosa</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 634, <strong>Olive-rumped Serin <em>Serinus rothschildi</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus</em> are transferred to the genus <em>Crithagra</em> (Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Olive-rumped Serin from <em>Serinus rothschildi</em> to <em>Crithagra rothschildi</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 634, <strong>Black-throated Canary <em>Serinus atrogularis</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus</em> are transferred to the genus <em>Crithagra</em> (Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Black-throated Canary from <em>Serinus atrogularis</em> to <em>Crithagra atrogularis</em>.</p>
<p>Change the spelling of the subspecies name <em>impiger</em> to <em>impigra</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., R. Prŷs-Jones, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 635, <strong>Reichenow’s Seedeater <em>Serinus reichenowi</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus</em> are transferred to the genus <em>Crithagra</em> (Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Reichenow’s Seedeater from <em>Serinus reichenowi</em> to <em>Crithagra reichenowi</em>.</p>
<p>Revise the range description from “SE Sudan to s Ethiopia, Somalia, ne Uganda, Kenya, ne Tanzania” to “Djibouti, Ethiopia, southeastern South Sudan, southern Somalia, northeastern Uganda, Kenya, and Tanzania”.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 635, <strong>Yellow-rumped Serin <em>Serinus xanthopygius</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus </em>are transferred to the genus <em>Crithagra</em> (Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Yellow-rumped Serin from <em>Serinus xanthopygius</em> to <em>Crithagra xanthopygia</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 635, <strong>Lemon-breasted Seedeater <em>Serinus citrinipectus</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus</em> are transferred to the genus <em>Crithagra</em> (Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Lemon-breasted Seedeater from <em>Serinus citrinipectus</em> to <em>Crithagra citrinipectus</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 635, <strong>White-bellied Canary <em>Serinus dorsostriatus</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus</em> are transferred to the genus <em>Crithagra</em> (Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of White-bellied Canary from <em>Serinus dorsostriatus</em> to <em>Crithagra dorsostriata</em>.</p>
<p>Change the spelling of the name of the nominate subspecies from <em>dorsostriatus</em> to <em>dorsostriata</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 635, <strong>Yellow-throated Serin <em>Serinus flavigula</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus</em> are transferred to the genus <em>Crithagra</em> (Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Yellow-throated Serin from <em>Serinus flavigula</em> to <em>Crithagra flavigula</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 635, <strong>Salvadori’s Serin <em>Serinus xantholaemus</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus</em> are transferred to the genus <em>Crithagra</em> (Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Salvadori’s Serin from <em>Serinus xantholaemus</em> to <em>Crithagra xantholaema</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 635, <strong>Northern Grosbeak-Canary <em>Serinus donaldsoni</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus </em>are transferred to the genus <em>Crithagra</em> (Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Northern Grosbeak-Canary from <em>Serinus donaldsoni</em> to <em>Crithagra donaldsoni</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 635, <strong>Southern Grosbeak-Canary <em>Serinus buchanani</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus</em> are transferred to the genus <em>Crithagra</em> (Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Southern Grosbeak-Canary from <em>Serinus buchanani</em> to <em>Crithagra buchanani</em>.</p>
<p>References:</p>
<p>Nguembock B., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 635, <strong>Brimstone Canary <em>Serinus sulphuratus</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus</em> are transferred to the genus <em>Crithagra</em> (Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Brimstone Canary from <em>Serinus sulphuratus</em> to <em>Crithagra sulphurata</em>.</p>
<p>Change the spelling of the name of the nominate subspecies from <em>sulphuratus</em> to <em>sulphurata</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 635, <strong>Yellow Canary <em>Serinus flaviventris</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus</em> are transferred to the genus <em>Crithagra</em> (Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Yellow Canary from <em>Serinus flaviventris</em> to <em>Crithagra flaviventris</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 635, <strong>White-throated Canary <em>Serinus albogularis</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus</em> are transferred to the genus <em>Crithagra</em> (Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of White-throated Canary from <em>Serinus albogularis</em> to <em>Crithagra albogularis</em>.</p>
<p>Change the spelling of the subspecies name <em>crocopygius</em> <em>to crocopygia</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 635, <strong>Streaky Seedeater <em>Serinus striolatus</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus</em> are transferred to the genus <em>Crithagra</em> (Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Streaky Seedeater from <em>Serinus striolatus</em> to <em>Crithagra striolata</em>.</p>
<p>Change the spelling of the name of the nominate subspecies from <em>striolatus</em> to <em>striolata</em>.</p>
<p>Revise the range description for nominate <em>striolata</em> from “Eritrea, Ethiopia, s Sudan, e Uganda, Kenya and n Tanzania” to “Eritrea, Ethiopia, extreme southeastern South Sudan, eastern Uganda, Kenya, and northern Tanzania”.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 635, <strong>Yellow-browed Seedeater <em>Serinus whytii</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus</em> are transferred to the genus <em>Crithagra </em>(Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Yellow-browed Seedeater from <em>Serinus whytii</em> to <em>Crithagra whytii</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p>Ryan, P.G., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 635, <strong>Thick-billed Seedeater <em>Serinus burtoni</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus</em> are transferred to the genus <em>Crithagra</em> (Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Thick-billed Seedeater from <em>Serinus burtoni</em> to <em>Crithagra burtoni</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 636, <strong>Tanzania Seedeater <em>Serinus melanochrous</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus</em> are transferred to the genus <em>Crithagra</em> (Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Tanzania Seedeater from <em>Serinus melanochrous</em> to <em>Crithagra melanochroa</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and</p>
<p>Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 636, <strong>Principe Seedeater <em>Serinus rufobrunneus</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus</em> are transferred to the genus <em>Crithagra</em> (Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Principe Seedeater from <em>Serinus rufobrunneus</em> to <em>Crithagra rufobrunnea</em>.</p>
<p>Change the spelling of the name of the nominate subspecies from <em>rufobrunneus</em> to <em>rufobrunnea</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and E. Pasquet. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 628, <strong>Sao Tome Grosbeak <em>Neospiza concolor</em></strong></p>
<p>Sao Tome Grosbeak is embedded in the genus <em>Crithagra</em> (Melo et al. 2017). Change the scientific name from <em>Neospiza concolor</em> to <em>Crithagra concolor</em>. Revise the range description from “São Tomé (rediscovered in 1996 after 101-year absence)” to “São Tomé (Gulf of Guinea)”.</p>
<p>Reference:</p>
<p>Melo, M., <NAME>, <NAME>, and <NAME>. 2017. The endangered São Tomé Grosbeak <em>Neospiza concolor</em> is the world’s largest canary. Ibis 159: 673-679.</p>
<p> </p>
<p>page 636, <strong>Black-eared Seedeater <em>Serinus mennelli</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus</em> are transferred to the genus <em>Crithagra </em>(Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Black-eared Seedeater from <em>Serinus mennelli</em> to <em>Crithagra mennelli</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 636, <strong>Streaky-headed Seedeater <em>Serinus gularis</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus</em> are transferred to the genus <em>Crithagra</em> (Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Streaky-headed Seedeater from <em>Serinus gularis</em> to <em>Crithagra gularis</em>.</p>
<p>Change the scientific name of the polytypic group Streaky-headed Seedeater (West African) from <em>Serinus gularis</em> [<em>canicapilla</em> Group] to <em>Crithagra gularis</em> [<em>canicapilla</em> Group].</p>
<p>Revise the range description of subspecies <em>elgonensis</em> from “S Sudan to w Kenya” to “northwestern Central African Republic and adjacent southern Chad, South Sudan, northeastern Democratic Republic of the Congo, northern Uganda, and extreme western Kenya”.</p>
<p>Change the scientific name of the polytypic group Streaky-headed Seedeater (Streaky-headed) from <em>Serinus gularis</em> [<em>gularis</em> Group] to <em>Crithagra gularis</em> [<em>gularis</em> Group].</p>
<p>References:</p>
<p>Nguembock B., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 636, <strong>Reichard’s Seedeater <em>Serinus reichardi</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus</em> are transferred to the genus <em>Crithagra</em> (Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Reichard’s Seedeater from <em>Serinus reichardi</em> to <em>Crithagra reichardi</em>.</p>
<p>Revise the range description of the monotypic group Reichard’s Seedeater (Stripe-breasted) <em>Crithagra reichardi striatipectus</em> from “Mts. of s Sudan to s Ethiopia and central highlands of Kenya” to “eastern and southeastern South Sudan, Ethiopia, and western and central Kenya”.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 636, <strong>Brown-rumped Seedeater <em>Serinus tristriatus</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus </em>are transferred to the genus <em>Crithagr</em>a (Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Brown-rumped Seedeater from <em>Serinus tristriatus</em> to <em>Crithagra tristriata</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 636, <strong>Yemen Serin <em>Serinus menachensis</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus</em> are transferred to the genus <em>Crithagra</em> (Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Yemen Serin from <em>Serinus menachensis</em> to <em>Crithagra menachensis</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 633, <strong>Ankober Serin <em>Carduelis ankoberensis</em></strong></p>
<p>Ankober Serin was classified by Fry and Keith (2004) in <em>Carduelis</em>, but it was described as a species of <em>Serinus</em> (Ash 1979), and is considered by most authorities to be closely related to Yemen Serin <em>Crithagra menachensis</em>. The genus <em>Serinus</em> is not monophyletic, and many species of <em>Serinus</em> are transferred to the genus <em>Crithagra</em> (Ryan et al. 2004, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Ankober Serin from <em>Carduelis ankoberensis</em> to <em>Crithagra ankoberensis</em>.</p>
<p>References:</p>
<p>Ash, J.S. 1979. A new species of serin from Ethiopia. Ibis 121: 1-7.</p>
<p><NAME>., and <NAME> (editors). 2004. The birds of Africa. Volume VII. Academic Press, London.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 636, <strong>Cape Siskin <em>Pseudochloroptila totta</em></strong></p>
<p>The genus <em>Pseudochloroptila</em> is embedded in the group of species transferred from <em>Serinus</em> to the genus <em>Crithagra</em> (Ryan et al. 2004). Change the scientific name of Cape Siskin from <em>Pseudochloroptila totta</em> to <em>Crithagra totta</em>.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p> </p>
<p>page 636, <strong>Drakensberg Siskin <em>Pseudochloroptila symonsi</em></strong></p>
<p>The genus <em>Pseudochloroptila</em> is embedded in the group of species transferred from <em>Serinus</em> to the genus <em>Crithagra</em> (Ryan et al. 2004). Change the scientific name of Drakensberg Siskin from <em>Pseudochloroptila symonsi</em> to <em>Crithagra symonsi</em>.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p> </p>
<p>page 633, <strong>Twite <em>Carduelis flavirostris</em></strong></p>
<p>The genus <em>Carduelis</em> is not monophyletic, and so Twite and the linnets are transferred to the genus <em>Linaria</em> (Arnaiz-Villena et al. 2007, Zuccon et al. 2012). Change the scientific name of Twite from <em>Carduelis flavirostris</em> to <em>Linaria flavirostris</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2007. Bayesian phylogeny of Fringillinae birds: status of the singular African oriole finch <em>Linurgus olivaceus</em> and evolution and heterogeneity of the genus <em>Carpodacus</em>. Acta Zoologia Sinica 53: 826-834.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 633, <strong>Eurasian Linnet <em>Carduelis cannabina</em></strong></p>
<p>The genus <em>Carduelis</em> is not monophyletic, and so Twite and the linnets are transferred to the genus i (Arnaiz-Villena et al. 2007, Zuccon et al. 2012). Change the scientific name of Eurasian Linnet from <em>Carduelis cannabina</em> to <em>Linaria cannabina</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2007. Bayesian phylogeny of Fringillinae birds: status of the singular African oriole finch <em>Linurgus olivaceus</em> and evolution and heterogeneity of the genus <em>Carpodacus</em>. Acta Zoologia Sinica 53: 826-834.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 633, <strong>Yemen Linnet <em>Carduelis yemenensis</em></strong></p>
<p>The genus <em>Carduelis</em> is not monophyletic, and so Twite and the linnets are transferred to the genus <em>Linaria</em> (Arnaiz-Villena et al. 2007, Zuccon et al. 2012). Change the scientific name of Yemen Linnet from <em>Carduelis yemenensis</em> to <em>Linaria yemenensis</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2007. Bayesian phylogeny of Fringillinae birds: status of the singular African oriole finch <em>Linurgus olivaceus</em> and evolution and heterogeneity of the genus <em>Carpodacus</em>. Acta Zoologia Sinica 53: 826-834.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 633, <strong>Warsangli Linnet <em>Carduelis johannis</em></strong></p>
<p>The genus <em>Carduelis</em> is not monophyletic, and so Twite and the linnets are transferred to the genus <em>Linaria</em> (Arnaiz-Villena et al. 2007, Zuccon et al. 2012). Change the scientific name of Warsangli Linnet from <em>Carduelis johannis</em> to <em>Linaria johannis</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2007. Bayesian phylogeny of Fringillinae birds: status of the singular African oriole finch <em>Linurgus olivaceus</em> and evolution and heterogeneity of the genus <em>Carpodacus</em>. Acta Zoologia Sinica 53: 826-834.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 634, <strong>Mountain Serin <em>Serinus estherae</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and as a result Mountain Serin is transferred to the genus <em>Chrysocorythus</em> (Zuccon et al. 2012). Change the scientific name of Mountain Serin from <em>Serinus estherae</em> to <em>Chrysocorythus estherae</em>.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 634, <strong>Citril Finch <em>Carduelis citrinella</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and as a result the citril finches are transferred to the genus <em>Carduelis</em> (Arnaiz-Villena 2007, 2008, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Citril Finch from <em>Serinus citrinella</em> to <em>Carduelis citrinella</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2007. Bayesian phylogeny of Fringillinae birds: status of the singular African oriole finch <em>Linurgus olivaceus</em> and evolution and heterogeneity of the genus <em>Carpodacus</em>. Acta Zoologia Sinica 53: 826-834.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2008. Mitochondrial DNA phylogenetic definition of a group of ‘arid-zone’ Carduelini finches. The Open Ornithology Journal 1: 1-7.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus</em> and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 634, <strong>Corsican Finch <em>Serinus corsicanus</em></strong></p>
<p>The genus <em>Serinus</em> is not monophyletic, and as a result the citril finches are transferred to the genus <em>Carduelis</em> (Arnaiz-Villena 2007, 2008, Nguembock et al. 2009, Zuccon et al. 2012). Change the scientific name of Corsican Finch from <em>Serinus corsicanus</em> to <em>Carduelis corsicana</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2007. Bayesian phylogeny of Fringillinae birds: status of the singular African oriole finch <em>Linurgus olivaceus</em> and evolution and heterogeneity of the genus <em>Carpodacus</em>. Acta Zoologia Sinica 53: 826-834.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2008. <a href="https://benthamopen.com/contents/pdf/TOOENIJ/TOOENIJ-1-1.pdf">Mitochondrial DNA phylogenetic definition of a group of ‘arid-zone’ Carduelini finches</a>. The Open Ornithology Journal 1: 1-7.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2009. Molecular phylogeny of Carduelinae (Aves, Passeriformes, Fringillidae) proves polyphyletic origin of the genera <em>Serinus </em>and <em>Carduelis</em> and suggests redefined generic limits. Molecular Phylogenetics and Evolution 51: 169-181.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 634, <strong>Abyssinian Siskin <em>Serinus nigriceps</em></strong></p>
<p>In accord with prevailing usage (e.g. Fry and Keith 2004, Sinclair and Ryan 2010), change the English name of <em>Serinus nigriceps</em> from Abyssinian Siskin to Ethiopian Siskin.</p>
<p>References:</p>
<p><NAME>., and <NAME> (editors). 2004. The birds of Africa. Volume VII. Academic Press, London.</p>
<p><NAME>., and <NAME>. 2010. Birds of Africa south of the Sahara. Second edition. Struik Nature, Cape Town.</p>
<p> </p>
<p>page 636, <strong>Black-headed Canary <em>Alario alario</em></strong></p>
<p>The genus <em>Alario</em> is embedded in <em>Serinus</em> (Ryan et al. 2004, Arnaiz-Villena et al. 2007, 2008). Change the scientific name of Black-headed Canary from <em>Alario alario</em> to <em>Serinus alario</em>.</p>
<p>References:</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2007. Bayesian phylogeny of Fringillinae birds: status of the singular African oriole finch <em>Linurgus olivaceus</em> and evolution and heterogeneity of the genus <em>Carpodacus</em>. Acta Zoologia Sinica 53: 826-834.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2008. <a href="https://benthamopen.com/contents/pdf/TOOENIJ/TOOENIJ-1-1.pdf">Mitochondrial DNA phylogenetic definition of a group of ‘arid-zone’ Carduelini finches</a>. The Open Ornithology Journal 1: 1-7.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p> </p>
<p>page 636, <strong>Damara Canary <em>Alario leucolaemus</em></strong></p>
<p>The genus <em>Alario</em> is embedded in <em>Serinus</em> (Ryan et al. 2004, Arnaiz-Villena et al. 2007, 2008). Change the scientific name of Damara Canary from <em>Alario leucolaemus</em> to <em>Serinus leucolaemus</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2007. Bayesian phylogeny of Fringillinae birds: status of the singular African oriole finch <em>Linurgus olivaceus</em> and evolution and heterogeneity of the genus <em>Carpodacus</em>. Acta Zoologia Sinica 53: 826-834.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2008. <a href="https://benthamopen.com/contents/pdf/TOOENIJ/TOOENIJ-1-1.pdf">Mitochondrial DNA phylogenetic definition of a group of ‘arid-zone’ Carduelini finches</a>. The Open Ornithology Journal 1: 1-7.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2004. Systematics of <em>Serinus </em>canaries and the status of Cape and Yellow-crowned Canaries inferred from mtDNA and morphology. Ostrich 75: 288-294.</p>
<p> </p>
<p>page 634, Tibetan Serin <em>Serinus thibetanus</em></p>
<p>The genus <em>Serinus</em> is not monophyletic, and as a result Tibetan Serin is transferred to the genus <em>Spinus</em> (Zuccon et al. 2012). Change the scientific name of Tibetan Serin from <em>Serinus thibetanus</em> to <em>Spinus thibetanus</em>.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2012. The phylogenetic relationships and generic limits of finches (Fringillidae). Molecular Phylogenetics and Evolution 62: 581-596.</p>
<p> </p>
<p>page 632, <strong>Lesser Goldfinch <em>Spinus psaltria</em></strong></p>
<p>Correct the subspecies name <em>columbianus</em> to the correct original spelling <em>colombianus</em>.</p>
<p> </p>
<p>page 603, <strong>Shelley’s Rufous Sparrow <em>Passer shelleyi</em></strong></p>
<p>Revise the range description from “SE Sudan to ne Uganda, e Ethiopia and nw Somalia” to “southeastern South Sudan, southern and eastern Ethiopia, northwestern Somalia, northern Uganda, and extreme western Kenya”.</p>
<p> </p>
<p>page 603, <strong>Northern Gray-headed Sparrow <em>Passer griseus</em></strong></p>
<p>Revise the range description of nominate <em>griseus</em> from “Senegal to s Chad, n Cameroon, n Gabon and s Sudan” to “southern Mauritania and Senegal to Cameroon, western Central African Republic, southern Chad, south to northern Gabon”.</p>
<p>Revise the range description of subspecies <em>ugandae</em> from “Angola to s Sudan, n Ethiopia, Kenya and ne Tanzania” to “eastern Sudan, Eritrea, and northwestern Ethiopia south to southern Gabon, Angola, northern Namibia, northern Zimbabwe, Malawi, and central Tanzania”.</p>
<p> </p>
<p>page 603, <strong>Parrot-billed Sparrow <em>Passer gongonensis</em></strong></p>
<p>Revise the range description from “Extreme se Sudan to s Ethiopia, s Somalia, Kenya and ne Tanzania” to “extreme southeastern South Sudan, southern Ethiopia, eastern Uganda, Kenya, southern Somalia, and northeastern Tanzania”.</p>
<p> </p>
<p>page 603, <strong>Arabian Golden Sparrow <em>Passer euchlorus</em></strong></p>
<p>Revise the range description from “SW Saudi Arabia to South Yemen, adj. Ethiopia and n Somalia” to “southwestern Arabian Peninsula (southwestern Saudi Arabia and western Yemen; Djibouti and northwestern Somalia”.</p>
<p> </p>
<p>page 603, <strong>Chestnut Sparrow <em>Passer eminibey</em></strong></p>
<p>Revise the range description from “S Sudan and sw Ethiopia to Uganda, Somalia and n Tanzania” to “central Chad, southwestern Sudan, South Sudan, and southwestern Ethiopia to northeastern Uganda, Kenya, and central and eastern Tanzania”.</p>
<p> </p>
<p>page 603, <strong>Yellow-spotted Petronia <em>Petronia pyrgita</em></strong></p>
<p>Revise the range description of nominate <em>pyrgita</em> from “SE Sudan to s Ethiopia, Somalia, ne Uganda and ne Tanzania” to “Eritrea, Ethiopia, Somalia, southeastern South Sudan, northeastern Uganda, Kenya, and northern Tanzania”.</p>
<p> </p>
<p>page 604, <strong>Bush Petronia <em>Petronia dentata</em></strong></p>
<p>Revise the range description from “Senegambia to s Sudan, Ethiopia and s Arabian Peninsula” to “southern Mauritania to Guinea, east to southern Sudan, South Sudan, Eritrea, and western Ethiopia; western Yemen”.</p>
<p> </p>
<p>page 604, <strong>White-billed Buffalo-Weaver <em>Bubalornis albirostris</em></strong></p>
<p>Revise the range description from “Senegambia to s Sudan, Ethiopia, n Uganda and nw Kenya” to “southern Mauritania to Guinea, east to southern Sudan, western Ethiopia, and northern Eritrea, south to southeastern South Sudan, northeastern Uganda, and northwestern Kenya”.</p>
<p> </p>
<p>page 605, <strong>Speckle-fronted Weaver <em>Sporopipes frontalis</em></strong></p>
<p>Revise the range description of subspecies <em>emini</em> from “S Sudan to ne Uganda, Kenya and n Tanzania” to “southern South Sudan, northern Uganda, western Kenya, and northern and central Tanzania”.</p>
<p> </p>
<p>page 605, <strong>Gray-headed Social-Weaver <em>Pseudonigrita arnaudi</em></strong></p>
<p>Revise the range description of nominate <em>arnaudi</em> from “SW Sudan to s Ethiopia, Kenya, southern Ethiopia, southern Somalia, northeastern Uganda, Kenya, and extreme northern Tanzania”.</p>
<p> </p>
<p>page 606, <strong>Red-headed Weaver <em>Anaplectes rubriceps</em></strong></p>
<p>Revise the range description of the monotypic group Red-headed Weaver (Northern) <em>Anaplectes rubriceps leuconotos</em> from “Senegambia to s Sudan, Ethiopia, Kenya, Tanzania and Malawi” to “Senegambia to southern Chad, northern Central African Republci, southwestern and southern Sudan, South Sudan, Eritrea, Ethiopia, and northern Somalia, south through Uganda and Kenya to Malawi”.</p>
<p> </p>
<p>page 606, <strong>Baglafecht Weaver <em>Ploceus baglafecht</em></strong></p>
<p>Revise the range description of subspecies <em>eremobius</em> from “NE Democratic Republic of the Congo and sw Sudan” to “northeastern Democratic Republic of the Congo and southwestern South Sudan”.</p>
<p>Revise the range description of the monotypic group Baglafecht Weaver (Emin’s) <em>Ploceus baglafecht emini</em> from “SE Sudan and n Uganda” to “southeastern South Sudan, central and eastern Ethiopia, and northern Uganda”.</p>
<p> </p>
<p>page 606, <strong>Little Weaver <em>Ploceus luteolus</em></strong></p>
<p>Revise the range description from “Senegambia to ne Democratic Republic of the Congo, s Sudan, Ethiopia, Kenya and Tanzania” to “southern Mauritania and Senegambia east to southern Sudan, South Sudan, Eritrea, and Ethiopia to northeastern Democratic Republic of the Congo, Uganda, western Kenya, and northern Tanzania”.</p>
<p> </p>
<p>page 606, <strong>Black-necked Weaver <em>Ploceus nigricollis</em></strong></p>
<p>Revise the range description of nominate <em>nigricollis</em> from “E Cameroon to n Angola, s Sudan, s Democratic Republic of the Congo, w Kenya, nw Tanzania” to “eastern Cameroon south to northern Angola and southern Democratic Republic of the Congo, east to extreme southern South Sudan, Uganda, western Kenya, and northwestern Tanzania”.</p>
<p> </p>
<p>page 606, <strong>Spectacled Weaver <em>Ploceus ocularis</em></strong></p>
<p>Revise the range description of the monotypic group Spectacled Weaver (Yellow-throated) <em>Ploceus ocularis crocatus</em> from “Cameroon to s Sudan, s Ethiopia, Angola and Zambia” to “Cameroon to extreme southern South Sudan and southwestern Ethiopia, south to Angola and through East Africa (west of the Rift Valley) to northeastern Namibia, northern Botswana, Zambia (west of the Muchinga Mountains), and northwestern Zimbabwe”.</p>
<p> </p>
<p>page 606, <strong>Black-billed Weaver <em>Ploceus melanogaster</em></strong></p>
<p>Revise the range description of the monotypic group Black-billed Weaver (Eastern) <em>Ploceus melanogaster stephanophorus</em> from “S Sudan to Uganda, e Democratic Republic of the Congo, Burundi, w Kenya and w Tanzania” to “extreme southeastern South Sudan, eastern Democratic Republic of the Congo, western and eastern Uganda, western Kenya, Rwanda, and extreme northwestern Tanzania”.</p>
<p> </p>
<p>page 607, <strong>Northern Masked-Weaver <em>Ploceus taeniopterus</em></strong></p>
<p>Revise the range description from “Extreme se Sudan to ne Democratic Republic of the Congo, n Uganda and s Ethiopia” to “Sudan, western and southwestern Ethiopia, South Sudan, northeastern Democratic Republic of the Congo, and western Kenya”.</p>
<p> </p>
<p>page 607, <strong>Lesser Masked-Weaver <em>Ploceus intermedius</em></strong></p>
<p>Revise the range description of nominate <em>intermedius</em> from “SE Sudan to s Ethiopia, Somalia, e Democratic Republic of the Congo, Kenya and Tanzania” to “Ethiopia, northwestern and southern Somalia, southeastern South Sudan, northeastern Democratic Republic of the Congo, Kenya, Rwanda, and Tanzania”.</p>
<p> </p>
<p>page 607, <strong>Katanga Masked-Weaver <em>Ploceus katangae</em></strong></p>
<p>We add a previously overlooked subspecies, <em>upembae</em> Verheyen 1953, with range “southeastern Democratic Republic of the Congo (headwaters of the Lualaba River, Katanga)”.</p>
<p>With the addition of subspecies <em>upembae</em>, revise the range description of nominate <em>katangae</em> from “Spottily distributed e Democratic Republic of the Congo and n Zambia” to “spottily distributed in northern Zambia and immediately adjacent southeastern Democratic Republic of the Congo”.</p>
<p> </p>
<p>page 607, <strong>Heuglin’s Masked-Weaver <em>Ploceus heuglini</em></strong></p>
<p>Revise the range description from “Senegambia to sw Sudan, s Uganda and nw Kenya” to “Senegambia to southern Chad and northern Central African Republic; southwestern South Sudan (and southeastern Central African Republic?), northeastern Democratic Republic of the Conto, Uganda and western Kenya”.</p>
<p> </p>
<p>pages 607-608<strong>, Village Weaver <em>Ploceus cucullatus</em></strong></p>
<p>Revise the range description of subspecies <em>bohndorffi </em>from “N Gabon to n Democratic Republic of the Congo, Uganda, w Kenya and sw Sudan” to “northern Gabon, Central African Republic, northern Democratic Republic of the Congo, southwestern South Sudan, Uganda, western Kenya, and northwestern Tanzania”.</p>
<p>Revise the range description of subspecies <em>abyssinicus </em>from “Ethiopia and se Sudan” to “Eritrea, Ethiopia, southeastern Sudan, and eastern South Sudan”.</p>
<p> </p>
<p>page 608, <strong>Black-headed Weaver <em>Ploceus melanocephalus</em></strong></p>
<p>Revise the range description of subspecies <em>dimidiatus </em>from “E Sudan to nw Ethiopia, Uganda, Kenya, Tanzania and Zambia” to “Eritrea, northwestern Ethiopia, southeastern Sudan, southern South Sudan, easern Democratic Republic of the Congo, Uganda, Rwanda, Burundi, western Kenya, northwestern Tanzania, and northeastern Zambia”.</p>
<p> </p>
<p>page 608, <strong>Golden-backed Weaver <em>Ploceus jacksoni</em></strong></p>
<p>Revise the range description from “Extreme se Sudan to Uganda, Burundi, Kenya and n Tanzania” to “southern South Sudan, Uganda, Burundi, western Kenya, and Tanzania”.</p>
<p> </p>
<p>page 608, <strong>Chestnut Weaver <em>Ploceus rubiginosus</em></strong></p>
<p>Revise the range description of the monotypic group Chestnut Weaver (Chestnut) <em>Ploceus rubiginosus rubiginosus</em> from “Extreme se Sudan to Ethiopia, Somalia, Kenya and n Tanzania” to “Eritrea, Ethiopia, northwestern and southern Somalia, extreme southeastern South Sudan, northeastern Uganda, Kenya, and Tanzania”.</p>
<p> </p>
<p>page 608, <strong>Cinnamon Weaver <em>Ploceus badius</em></strong></p>
<p>Revise the range description from “Sudan (Nile tributaries south to Uganda border)” to “Nile Valley of central and southern Sudan and of South Sudan”.</p>
<p> </p>
<p>page 608, <strong>Yellow-mantled Weaver <em>Ploceus tricolor</em></strong></p>
<p>Revise the range description of subspecies <em>interscapularis</em> from “N Democratic Republic of the Congo to w Uganda, Kenya and extreme sw Sudan” to “patchily distributed in northern and central Democratic Republic of the Congo, extreme southwestern South Sudan, and Uganda; northwestern Angola”.</p>
<p> </p>
<p>page 608, <strong>Brown-capped Weaver <em>Ploceus insignis</em></strong></p>
<p>Revise the range description from “SE Nigeria to n Angola, s Sudan, Kenya and w Tanzania; Bioko” to “southeastern Nigeria, southwestern Cameroon, and Bioko; western Angola; extreme southern South Sudan, eastern Democratic Republic of the Congo, western and eastern Uganda, western Kenya, western Rwanda, western Burundi, and western Tanzania”.</p>
<p> </p>
<p>page 609, <strong>Asian Golden Weaver <em>Ploceus hypoxanthus</em></strong></p>
<p>Change the subspecies name <em>hymenaicus</em> to the older available name <em>chryseus</em> (Pittie and Dickinson 2008).</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2008. <a href="https://859e3154-a-62cb3a1a-s-sites.googlegroups.com/site/aasheeshpittie/documents/IB.4.1.AsianGolden.pdf?attachauth=ANoY7coiFhmSohgWQ81SZl40gSw6-bs-V4lFx_-OlRsEn4Y4JWVnWcqBP2m4z0rcjmQBQ2fNOq6skme1wODm1QF1Q4VxJayzHatoJeLdpdi-LDgN5gSgyG8iwZG5uAdXVmpeabI04OCARNe6J65o40UMtxCxWkhzLQcg8dFqLcYnlBcKZj3IFDPR0aT6-fpMtqLGeKafZi1A1D-DkYp5F-VzFgEDrPXVx-8dCP5fdSWLbJ4HhgYROlg%3D&attredirects=0">The oldest name of the mainland form of the Asian Golden Weaver <em>Ploceus hypoxanthus</em></a>. Indian Birds 4: 22.</p>
<p> </p>
<p>page 609, <strong>Compact Weaver <em>Pachyphantes superciliosus</em></strong></p>
<p>Revise the range description from “Sierra Leone to Angola, s Sudan, Ethiopia, Uganda and w Kenya” to “patchily distributed from southwestern Senegal and Guinea-Bissau east to southeastern Sudan, eastern and southern South Sudan, and western Ethiopia south to northern Angola, southern Democratic Republic of the Congo, Rwanda, Burundi, southwestern Kenya, and northwestern Tanzania”.</p>
<p> </p>
<p>page 609, <strong>Cardinal Quelea <em>Quelea cardinalis</em></strong></p>
<p>Revise the range description from “S Sudan to Ethiopia, Uganda, Kenya, e Democratic Republic of the Congo, Zambia, Zimbabwe” to “southern South Sudan, northeastern Democratic Republic of the Congo, Uganda, and western Kenya south to central and eastern Zambia and central Malawi; rare nonbreeding visitor to Ethiopia”.</p>
<p> </p>
<p>page 610, <strong>Northern Red Bishop <em>Euplectes francisca</em></strong><em>nus</em></p>
<p>Revise the range description from “Senegal to Sudan, e Democratic Republic of the Congo, Ethiopia, n Uganda and nw Kenya” to “southern Maurtania south to northern Liberia, east to Eritrea, Ethiopia, northwestern and southern Somalia, northeastern Democratic Republic of the Congo, Uganda, and Kenya”.</p>
<p> </p>
<p>page 610, <strong>Yellow-crowned Bishop <em>Euplectes afer</em></strong></p>
<p>Revise the range of nominate <em>afer</em> from “S Mauritania to Chad, CAR, w Sudan, n Democratic Republic of the Congo and nw Angola” to “southern Mauritania south patchily to Sierra Leona, east to western Sudan, and south to Congo, northern Democratic Republic of the Congo, and western Angola”.</p>
<p>Revise the range description of subspecies <em>taha</em> from “S Sudan, s Ethiopia and Somalia to South Africa” to “southern Angola east to South Sudan and southwestern Ethiopia, south to South Africa”.</p>
<p> </p>
<p>page 610, <strong>White-winged Widowbird <em>Euplectes albonotatus</em></strong></p>
<p>Revise the range of subspecies <em>eques</em> from “CAR to Sudan, Ethiopia and s Tanzania” to “Central African Republic, southwestern Sudan, southeastern South Sudan, Ethiopia, northeastern Democratic Republic of the Congo, Uganda, Rwanda, Burundi, Kenya, and Tanzania”.</p>
<p> </p>
<p>page 610, <strong>Yellow-mantled Widowbird <em>Euplectes macroura</em></strong></p>
<p>Revise the range description of nominate <em>macroura</em> from “Senegal to s Sudan, Democratic Republic of the Congo, Angola, Zambia and Malawi” to “southern Senegal to Liberia, east to southern Sudan and South Sudan, south to central Angola, Zambia, eastern Zimbabwe, western Mozambique, and Malawi”.</p>
<p> </p>
<p>page 610, <strong>Red-collared Widowbird <em>Euplectes ardens</em></strong></p>
<p>Revise the range description of subspecies <em>laticauda</em> from “Highlands of se Sudan, Eritrea and Ethiopia” to “highlands of eastern South Sudan, Eritrea and Ethiopia”.</p>
<p>Revise the range description of the monotypic group Red-collared Widowbird (Red-collared) <em>Euplectes ardens ardens</em> from “Sierra Leone to Uganda, sw Sudan and Tanzania to South Africa” to “Senegal and Sierra Leone east to southern South Sudan and western Uganda, south to central Angola, Zimbabwe, Zambia, Mozambique, and eastern South Africa”.</p>
<p> </p>
<p>page 610, <strong>Fan-tailed Widowbird <em>Euplectes axillaris</em></strong></p>
<p>Revise the range description of subspecies <em>phoeniceus</em> from “S Sudan to s Ethiopia, Uganda, w Kenya, w Tanzania and Zambia” to “southeastern Sudan, South Sudan, and western Ethiopia south through eastern Democratic Republic of the Congo, Uganda, and western Kenya to eastern Zambia and northern Malawi”.</p>
<p> </p>
<p>page 611, <strong>Grosbeak Weaver <em>Amblyospiza albifrons</em></strong></p>
<p>Revise the range description of subspecies <em>melanota</em> from “Ethiopia, s Sudan, Uganda, w Kenya, ne Democratic Republic of the Congo, nw Tanzania” to “South Sudan, western and central Ethiopia, eastern Democratic Republic of the Congo, Rwanda, Burundi, Uganda, western Kenya, and northwestern Tanzania”.</p>
<p> </p>
<p>page 611, <strong>Gray-headed Nigrita <em>Nigrita canicapillus</em></strong></p>
<p>Revise the range description of subspecies <em>schistaceus</em> from “S Sudan to n Democratic Republic of the Congo, sw Uganda, w Kenya and n Tanzania” to “extreme southern South Sudan, eastern Democratic Republic of the Congo, Rwanda, Burundi, Uganda, western Kenya, and northwestern Tanzania”.</p>
<p> </p>
<p>age 612, <strong>Gray-headed Oliveback <em>Nesocharis capistrata</em></strong></p>
<p>Revise the range description from “Guinea to s Mali, Cameroon, n Democratic Republic of the Congo, sw Sudan and w Uganda” to “Guinea-Bissau and Guinea east to Central African Republic, southwestern South Sudan, northeastern Democratic Republic of the Congo, and western Uganda”.</p>
<p> </p>
<p>page 612, <strong>Yellow-bellied Waxbill <em>Coccopygia quartinia</em></strong></p>
<p>Revise the range description of subspecies <em>kilimensis </em>from “SE Sudan, e Democratic Republic of the Congo, Uganda, Kenya to central Tanzania” to “extreme southeastern South Sudan, eastern Democratic Republic of the Congo, Uganda, Kenya, and northern and central Tanzania”.</p>
<p> </p>
<p>page 612, <strong>Shelley’s Crimson-wing <em>Cryptospiza shelleyi</em></strong></p>
<p>In accord with widespread usage (e.g., Dowsett and Forbes-Watson 1993, Fry and Keith 2004), change the English name of <em>Cryptospiza shelleyi</em> from Shelley’s Crimson-wing to Shelley’s Crimsonwing.</p>
<p>References:</p>
<p><NAME>., and A.D. Forbes-Watson. 1993. Checklist of birds of the Afrotropical and Malagasy regions. Volume 1: species limits and distribution. Tauraco Press, Liège, Belgium.</p>
<p><NAME>., and <NAME> (editors). 2004. The birds of Africa. Volume VII. Academic Press, London.</p>
<p> </p>
<p>page 612, <strong>Dusky Crimson-wing <em>Cryptospiza jacksoni</em></strong></p>
<p>In accord with widespread usage (e.g., Dowsett and Forbes-Watson 1993, Fry and Keith 2004), change the English name of <em>Cryptospiza jacksoni</em> from Dusky Crimson-wing to Dusky Crimsonwing.</p>
<p>References:</p>
<p><NAME>., and A.D. Forbes-Watson. 1993. Checklist of birds of the Afrotropical and Malagasy regions. Volume 1: species limits and distribution. Tauraco Press, Liège, Belgium.</p>
<p><NAME>., and <NAME> (editors). 2004. The birds of Africa. Volume VII. Academic Press, London.</p>
<p> </p>
<p>page 612, <strong>Abyssinian Crimson-wing <em>Cryptospiza salvadorii</em></strong></p>
<p>In accord with widespread usage (e.g., Dowsett and Forbes-Watson 1993, Fry and Keith 2004), change the English name of <em>Cryptospiza salvadorii</em> from Abyssinian Crimson-wing to Abyssinian Crimsonwing.</p>
<p>Revise the range description of subspecies <em>kilimensis</em> from “S Sudan to e Uganda, se Kenya and n Tanzania” to “southern South Sudan, eastern Uganda, Kenya (except the north), and northern Tanzania”.</p>
<p>References:</p>
<p><NAME>., and A.D. Forbes-Watson. 1993. Checklist of birds of the Afrotropical and Malagasy regions. Volume 1: species limits and distribution. Tauraco Press, Liège, Belgium.</p>
<p><NAME>., and <NAME> (editors). 2004. The birds of Africa. Volume VII. Academic Press, London.</p>
<p> </p>
<p>page 612, <strong>Red-faced Crimson-wing <em>Cryptospiza reichenovii</em></strong></p>
<p>In accord with widespread usage (e.g., Dowsett and Forbes-Watson 1993, Fry and Keith 2004), change the English name of <em>Cryptospiza reichenovii</em> from Red-faced Crimson-wing to Red-faced Crimsonwing.</p>
<p>Change the English name of the monotypic group <em>Cryptospiza reichenovii reichenovii</em> from Red-faced Crimson-wing (Western) to Red-faced Crimsonwing (Western).</p>
<p>Change the English name of the polytypic group <em>Cryptospiza reichenovii australis</em>/<em>ocularis</em> from Red-faced Crimson-wing (Eastern) to Red-faced Crimsonwing (Eastern).</p>
<p>References:</p>
<p><NAME>., and <NAME>. 1993. Checklist of birds of the Afrotropical and Malagasy regions. Volume 1: species limits and distribution. Tauraco Press, Liège, Belgium.</p>
<p><NAME>., and <NAME> (editors). 2004. The birds of Africa. Volume VII. Academic Press, London.</p>
<p> </p>
<p>page 612, <strong>Fawn-breasted Waxbill <em>Estrilda paludicola</em></strong></p>
<p>Revise the range description of the monotypic group Fawn-breasted Waxbill (Abyssinian) <em>Estrilda paludicola ochrogaster</em> from “Highlands of extreme e Sudan” to “western Ethiopia and eastern South Sudan”.</p>
<p>Revise the range description of nominate <em>paludicola</em> from “S Sudan, ne Democratic Republic of the Congo, Uganda and w Kenya” to “Central African Republic, southern Sudan, western South Sudan, northeastern Democratic Republic of the Congo, northern and central Uganda, and western Kenya”.</p>
<p> </p>
<p>page 612, <strong>Crimson-rumped Waxbill <em>Estrilda rhodopyga</em></strong></p>
<p>Revise the range description of subspecies <em>centralis</em> from “S Sudan to n Malawi” to “southern South Sudan, central and southern Ethiopia, and northern and southern Somalia south to northern Malawi”.</p>
<p> </p>
<p>page 612, <strong>Black-rumped Waxbill <em>Estrilda troglodytes</em></strong></p>
<p>Revise the range description from “Senegambia to s Sudan, nw Ethiopia, sw Eritrea and w Kenya” to “southern Mauritania and Senegambia east to southern Sudan, South Sudan, Eritrea, Uganda, and western Kenya; also widely introduced, to the Caribbean (Puerto Rico, Guadeloupe, Martinique), Hawaiian Islands, Spain, and Japan”.</p>
<p> </p>
<p>pages 612-613, <strong>Common Waxbill <em>Estrilda astrild</em></strong></p>
<p>Revise the range description of subspecies <em>macmillani</em> from “Central and s Sudan” to “South Sudan”.</p>
<p> </p>
<p>page 613, <strong>Black-cheeked Waxbill <em>Estrilda charmosyna</em></strong></p>
<p>Revise the range description of nominate <em>charmosyna</em> from “Extreme s Sudan to Ethiopia, Somalia, ne Uganda and ne Kenya” to “southern South Sudan, Ethiopia, Somalia, northeastern Uganda, and northern and eastern Kenya”.</p>
<p> </p>
<p>page 613, <strong>Red-headed Bluebill <em>Spermophaga ruficapilla</em></strong></p>
<p>Revise the range description of nominate <em>ruficapilla</em> from “S Sudan to Uganda, w Kenya, w Tanzania and nw Angola” to “northern Angola to southern South Sudan, Uganda, western Kenya, and western Tanzania”.</p>
<p> </p>
<p>page 613, <strong>Red-cheeked Cordonbleu <em>Uraeginthus bengalus</em></strong></p>
<p>Revise the range description of nominate <em>bengalus </em>from “Senegal to Sudan, Eritrea, Ethiopia, Uganda and w Kenya” to “Senegal south to Guinea, east to Eritrea, Ethiopia, northeastern Democratic Republic of the Congo, Uganda, and Kenya (west of the Rift Valley)”.</p>
<p> </p>
<p>page 614, <strong>Blue-capped Cordonbleu <em>Uraeginthus cyanocephalus</em></strong></p>
<p>Revise the range description of from “Extreme se Sudan to se Ethiopia, s Somalia, Kenya and n Tanzania” to “extreme southeastern South Sudan, extreme southeastern Ethiopia, southern Somalia, Kenya, and Tanzania”.</p>
<p> </p>
<p>page 614, <strong>Purple Grenadier <em>Granatina ianthinogaster</em></strong></p>
<p>Revise the range description from “Extreme se Sudan to s Ethiopia, Kenya, Uganda, Tanzania” to “southeastern South Sudan, southern Ethiopia, Somalia, northeastern Uganda, Kenya, and Tanzania”.</p>
<p> </p>
<p>page 614, <strong>Dybowski’s Twinspot <em>Euschistospiza dybowskii</em></strong></p>
<p>Revise the range description from “Senegambia and Guinea to n Democratic Republic of the Congo and extreme s Sudan” to “discontinuously distributed from southeastern Senegal to Guinea, east to northeastern Democratic Republic of the Congo, southern South Sudan, and northwestern Uganda”.</p>
<p> </p>
<p>page 614, <strong>Brown Twinspot <em>Clytospiza monteiri</em></strong></p>
<p>Revise the range description from “SE Nigeria to s Chad, s Sudan, w Uganda, w Kenya and n Angola” to “southeastern Nigeria south to northern Angola, east to southern South Sudan, western Uganda, and western Kenya”.</p>
<p> </p>
<p>page 614, <strong>Red-winged Pytilia <em>Pytilia phoenicoptera</em></strong></p>
<p>Revise the range description of subspecies <em>emini</em> from “Cameroon to n Democratic Republic of the Congo, n Uganda and extreme s Sudan” to “Cameroon to South Sudan, northeastern Democratic Republic of the Congo, and northern Uganda”.</p>
<p> </p>
<p>page 614, <strong>Green-winged Pytilia <em>Pytilia melba</em></strong></p>
<p>Revise the range description of subspecies <em>citerior</em> from “Senegal to Burkina Faso, s Chad, n Cameroon and w Sudan” to “southern Mauritania south to Guinea-Bissau, east to southern Sudan, western South Sudan, and northwestern Ethiopia”.</p>
<p>Revise the range description of subspecies <em>soudanensis</em> from “E Sudan to Ethiopia, Somalia, Kenya and ne Tanzania” to “southeastern South Sudan, Ethiopia, Somalia, Uganda, northern and eastern Kenya, and northeastern Tanzania”.</p>
<p> </p>
<p>page 614, <strong>Red-billed Firefinch <em>Lagonosticta senegala</em></strong></p>
<p>Revise the range description of subspecies <em>rhodopsis</em> from “E Mali to Sudan and lowlands of w Eritrea and Ethiopia” to “eastern Mali to western Sudan, northern and western South Sudan, Eritrea, and western Ethiopia”.</p>
<p>Revise the range description of subspecies <em>brunneiceps</em> from “Highlands of Ethiopia, Eritrea and se Sudan” to “southeastern South Sudan, and central Ethiopia”.</p>
<p> </p>
<p>page 614, <strong>Bar-breasted Firefinch <em>Lagonosticta rufopicta</em></strong></p>
<p>Revise the range description of subspecies <em>lateritia</em> from “NE Democratic Republic of the Congo, s Sudan, w Ethiopia, Uganda and Kenya” to “South Sudan, western Ethiopia, northeastern Democratic Republic of the Congo, Uganda, and western Kenya”.</p>
<p> </p>
<p>page 615, <strong>Black-faced Firefinch <em>Lagonosticta larvata</em></strong></p>
<p>Revise the range description of the monotypic group Black-faced Firefinch (Gray) from “Central and s Mali to Sudan and Uganda” to “central and southern Mali east to southwestern <em>Lagonosticta larvata nigricollis</em> Sudan, western and southern South Sudan, northeastern Democratic Republic of the Congo, and northern Uganda”.</p>
<p>Revise the range description of the monotypic group Black-faced Firefinch (Reddish) <em>Lagonosticta larvata larvata</em> from “W Ethiopia and e Sudan” to “southeastern Sudan, eastern South Sudan and western Ethiopia”.</p>
<p> </p>
<p>page 615, <strong>Black-bellied Firefinch <em>Lagonosticta rara</em></strong></p>
<p>Revise the range description of nominate <em>rara</em> from “N Cameroon to n Democratic Republic of the Congo, s Sudan, n Uganda and w Kenya” to “Cameroon to South Sudan, northeastern Democratic Republic of the Congo, northern Uganda, and western Kenya”.</p>
<p> </p>
<p>page 615, <strong>African Firefinch <em>Lagonosticta rubricata</em></strong></p>
<p>Revise the range description of subspecies <em>congica</em> from “Cameroon to s Sudan, Angola, w Uganda and Democratic Republic of the Congo” to “Cameroon south to northern Angola, east to South Sudan, northeastern Democratic Republic of the Congo, and western Uganda”.</p>
<p>Revise the range description of subspecies <em>haematocephala</em> from “Sudan, Ethiopia, Uganda, Kenya, Tanzania to Mozambique” to “eastern South Sudan, Eritrea, and Ethiopia south through Uganda and Kenya to Zambia, eastern Zimbabwe, Malawi, and central Mozambique”.</p>
<p> </p>
<p>page 615, <strong>Jameson’s Firefinch <em>Lagonosticta rhodopareia</em></strong></p>
<p>Revise the range description of nominate <em>rhodopareia</em> from “Ethiopia, Sudan and w Uganda to n Kenya” to “eastern South Sudan, Ethiopia, northeastern Uganda, and northern Kenya”.</p>
<p> </p>
<p>page 615, <strong>Cut-throat <em>Amadina fasciata</em></strong></p>
<p>Revise the range description of nominate <em>fasciata</em> from “Senegal to Sudan” to “southern Mauritania to southern Senegal, east to Sudan and South Sudan”.</p>
<p>Revise the range description of subspecies <em>alexanderi</em> from “Eritrea, Ethiopia, Somalia to se Sudan, Uganda, Kenya, Tanzania” to “Eritrea, Ethiopia, Somalia, southeastern South Sudan, northeastern Uganda, Kenya, and Tanzania”.</p>
<p> </p>
<p>page 615, <strong>Zebra Waxbill <em>Sporaeginthus subflavus</em></strong></p>
<p>Revise the range description of nominate <em>subflavus </em>from “Senegal to s Sudan, Ethiopia and w Kenya; South Yemen” to “extreme southwestern Mauritania and Senegal south to Liberia, east to western Sudan, South Sudan, Ethiopia, northeastern Democratic Republic of the Congo, Uganda, Kenya, and northwestern Tanzania; western Yemen”.</p>
<p> </p>
<p>page 615, <strong>Black-faced Quailfinch <em>Ortygospiza atricollis</em></strong></p>
<p>Revise the range description of subspecies <em>ugandae</em> from “S Sudan, nw Uganda, ne Democratic Republic of the Congo and w Kenya” to “southern Sudan, South Sudan, Uganda, and western Kenya”.</p>
<p> </p>
<p>page 617, <strong>Bronze Mannikin <em>Spermestes cucullata</em></strong></p>
<p>Revise the range description of nominate <em>cucullata</em> from “Senegal to w Kenya and nw Angola; São Tomé and Príncipe” to “Senegal south to Liberia, east to southwestern South Sudan and western Kenya, south to Gabon and Congo; Bioko, Príncipe, São Tomé and Annobón”.</p>
<p>Revise the range description of subspecies <em>scutata</em> from “Ethiopia and Sudan to Natal and e Cape Province; Comoro Is.” to “Ethiopia and adjacent southeastern Sudan and eastern South Sudan to Angola, northern and eastern Botswana, and eastern South Africa; Comoro Islands”.</p>
<p> </p>
<p>page 618, <strong>African Silverbill <em>Euodice cantans</em></strong></p>
<p>Revise the range description of nominate <em>cantans </em>from “Senegal to central Sudan” to “Mauritania and Senegal east to central Sudan and northwestern South Sudan”.</p>
<p>Revise the range description of subspecies <em>orientalis</em> from “Sudan to Ethiopia, Somalia, Kenya and n Tanzania; se Arabia” to “extreme southeastern Egypt, eastern Sudan, Eritrea, Ethiopia, Djibouti, Somalia, southeastern South Sudan, Kenya and northern Tanzania; southern Arabian Peninsula (southwestern Saudi Arabia, Yemen, and western Oman)”.</p>
<p> </p>
<p>page 620, <strong>Pin-tailed Whydah <em>Vidua macroura</em></strong></p>
<p>Revise the range description from “Senegambia to s Chad, s Sudan, s Somalia and South Africa” to “very widespread in Sub-Saharan Africa, from southern Mauritania east to Eritrea, south to South Africa; introduced in the southern United States (California, Florida) and the Caribbean (Puerto Rico)”.</p>
<p> </p>
<p>page 620, <strong>Sahel Paradise-Whydah <em>Vidua orientalis</em></strong></p>
<p>Revise the range description of nominate <em>orientalis</em> from “Chad to Sudan and Eritrea” to “northern Cameroon and southern Chad east to southern Sudan, northern South Sudan, Eritrea, and western Ethiopia”.</p>
<p> </p>
<p>page 620, <strong>Eastern Paradise-Whydah <em>Vidua paradisaea</em></strong></p>
<p>Revise the range description from “Angola to se Sudan, Ethiopia and Kenya south to ne S Africa” to “southeastern Sudan, Ethiopia, and Somalia south to northeastern South Africa, also west across southern Africa to Angola and northern Namibia”.</p>
<p> </p>
<p>page 620, <strong>Steel-blue Whydah <em>Vidua hypocherina</em></strong></p>
<p>Revise the range description from “S Sudan to Ethiopia, Somalia, Uganda, Kenya and Tanzania” to “southern South Sudan, Ethiopia, Somalia, northern Uganda, Kenya, and northern and central Tanzania”.</p>
<p> </p>
<p>page 620, <strong>Straw-tailed Whydah <em>Vidua fischeri</em></strong></p>
<p>Revise the range description from “Extreme se Sudan to Ethiopia, Somalia, Uganda and Tanzania” to “southeastern South Sudan, Ethiopia, Somalia, northeastern Uganda, Kenya, and Tanzania”.</p>
<p> </p>
<p>page 620, <strong>Village Indigobird <em>Vidua chalybeata</em></strong></p>
<p>Revise the range description of subspecies <em>neumanni</em> from “E Mali and Burkina Faso to s Sudan and Eritrea” to “eastern Mali, northern Ivory Coast, and Burkina Faso east to Sudan and South Sudan”.</p>
<p>Revise the range description of subspecies <em>ultramarina</em> from “Ethiopia” to “Eritrea and Ethiopia”.</p>
<p> </p>
<p>page 621, <strong>Wilson’s Indigobird <em>Vidua wilsoni</em></strong></p>
<p>Revise the range description from “Senegambia to n Democratic Republic of the Congo, s Sudan and nw Ethiopia” to “Senegambia to Guinea, east to northeastern Democratic Republic of the Congo, South Sudan, and western Ethiopia”.</p>
<p> </p>
<p>page 621, <strong>Quailfinch Indigobird <em>Vidua nigeriae</em></strong></p>
<p>Revise the range description from “Discontinuous in Gambia, Mali, Nigeria, Cameroon and Sudan” to “patchily distributed in Gambia, Mali, Nigeria, northern Cameroon, and southeastern South Sudan”.</p>
<p> </p>
<p>page 621, <strong>Jambandu Indigobird <em>Vidua raricola</em></strong></p>
<p>Revise the range description from “Sierra Leone to n Nigeria, n Cameroon, n Democratic Republic of the Congo and s Sudan” to “discontinuously distributed from Sierra Leone east to southwestern Sudan, northeastern Democratic Republic of the Congo, southern South Sudan, and western Ethiopia”.</p>
<p> </p>
<p>page 621, <strong>Baka Indigobird <em>Vidua larvaticola</em></strong></p>
<p>Revise the range description from “SE Senegal to Cameroon, n Democratic Republic of the Congo, Sudan and w Ethiopia” to “patchily distributed from Guinea-Bissau and northeastern Ivory Coast east to southwestern and eastern Sudan, northeastern South Sudan, and western Ethiopia”.</p>
<p> </p>
<p>page 621, <strong>Cameroon Indigobird <em>Vidua camerunensis</em></strong></p>
<p>Revise the range description from “Sierra Leone to e Cameroon, ne Democratic Republic of the Congo and s Sudan” to “discontinuously distributed from Sierra Leone east to northeastern Democratic Republic of the Congo and South Sudan”.</p>
<p><strong>GROUPS</strong></p>
<p><strong>GROUPS – newly created groups</strong></p>
<p> </p>
<p>Yellow-legged Tinamou (zabele) <em>Crypturellus noctivagus zabele</em></p>
<p> </p>
<p>Yellow-legged Tinamou (noctivagus) <em>Crypturellus noctivagus noctivagus</em></p>
<p> </p>
<p>Bare-faced Curassow (Belem) <em>Crax fasciolata pinima</em></p>
<p> </p>
<p>Bare-faced Curassow (Bare-faced) <em>Crax fasciolata fasciolata/grayi</em></p>
<p> </p>
<p>Chestnut-necklaced Partridge (Chestnut-necklaced) <em>Arborophila charltonii charltonii/atjenensis</em></p>
<p> </p>
<p>Chestnut-necklaced Partridge (Sabah) <em>Arborophila charltonii graydoni</em></p>
<p> </p>
<p>Mo<NAME> (Moorland) <em>Scleroptila psilolaema psilolaema</em></p>
<p> </p>
<p>Moorland Francolin (Elgon) <em>Scleroptila psilolaema elgonensis</em></p>
<p> </p>
<p>Shelley’s Francolin (Shelley’s) <em>Scleroptila shelleyi shelleyi</em></p>
<p> </p>
<p>Shelley’s Francolin (Whyte’s) <em>Scleroptila shelleyi whytei</em></p>
<p> </p>
<p>Crestless Fireback (Malay) <em>Lophura erythrophthalma erythrophthalma</em></p>
<p> </p>
<p>Crestless Fireback (Bornean) <em>Lophura erythrophthalma pyronota</em></p>
<p> </p>
<p>Black-shouldered Kite (African) <em>Elanus caeruleus caeruleus</em></p>
<p> </p>
<p>Black-shouldered Kite (Asian) <em>Elanus caeruleus [vociferus Group]</em></p>
<p> </p>
<p>Variable Goshawk (Lesser Sundas) <em>Accipiter hiogaster sylvestris</em></p>
<p> </p>
<p>Variable Goshawk (Variable) <em>Accipiter hiogaster [hiogaster Group]</em></p>
<p> </p>
<p>Sharp-shinned Hawk (Madrean) <em>Accipiter striatus madrensis</em></p>
<p> </p>
<p>Woodford’s Rail (Bougainville) <em>Nesoclopeus woodfordi tertius</em></p>
<p> </p>
<p>Woodford’s Rail (Santa Isabel) <em>Nesoclopeus woodfordi immaculatus</em></p>
<p> </p>
<p>Woodford’s Rail (Guadalcanal) <em>Nesoclopeus woodfordi woodfordi</em></p>
<p> </p>
<p>Mangrove Rail (Fonseca) <em>Rallus longirostris berryorum</em></p>
<p> </p>
<p>Red-breasted Dotterel (Northern) <em>Charadrius obscurus aquilonius</em></p>
<p> </p>
<p>Red-breasted Dotterel (Southern) <em>Charadrius obscurus obscurus</em></p>
<p> </p>
<p>Three-banded Plover (African) <em>Charadrius tricollaris tricollaris</em></p>
<p> </p>
<p>Three-banded Plover (Madagascar) <em>Charadrius tricollaris bifrontatus</em></p>
<p> </p>
<p>Painted Buttonquail (New Caledonian) <em>Turnix varius novaecaledoniae</em></p>
<p> </p>
<p>Painted Buttonquail (Painted) <em>Turnix varius varius/scintillans</em></p>
<p> </p>
<p>Metallic Pigeon (Metallic) <em>Columba vitiensis [vitiensis Group]</em></p>
<p> </p>
<p>Metallic Pigeon (Samoan) <em>Columba vitiensis castaneiceps</em></p>
<p> </p>
<p>Eurasian Collared-Dove (Eurasian) <em>Streptopelia decaocto decaocto</em></p>
<p> </p>
<p>Eurasian Collared-Dove (Burmese) <em>Streptopelia decaocto xanthocycla</em></p>
<p> </p>
<p>Bronze Ground-Dove (Western) <em>Alopecoenas beccarii beccarii</em></p>
<p> </p>
<p>Bronze Ground-Dove (Eastern) <em>Alopecoenas beccarii [johannae Group]</em></p>
<p> </p>
<p>Spinifex Pigeon (Rufous-bellied) <em>Geophaps plumifera ferruginea</em></p>
<p> </p>
<p>Spinifex Pigeon (White-bellied) <em>Geophaps plumifera plumifera/leucogaster</em></p>
<p> </p>
<p>Pheasant Pigeon (Green-naped) <em>Otidiphaps nobilis nobilis</em></p>
<p> </p>
<p>Pheasant Pigeon (White-naped) <em>Otidiphaps nobilis aruensis</em></p>
<p> </p>
<p>Pheasant Pigeon (Gray-naped) <em>Otidiphaps nobilis cervicalis</em></p>
<p> </p>
<p>Pheasant Pigeon (Black-naped) <em>Otidiphaps nobilis insularis</em></p>
<p> </p>
<p>Southern Crowned-Pigeon (Sclater’s) <em>Goura scheepmakeri sclaterii</em></p>
<p> </p>
<p>Southern Crowned-Pigeon (Scheepmaker’s) <em>Goura scheepmakeri scheepmakeri</em></p>
<p> </p>
<p>African Green-Pigeon (African) <em>Treron calvus [calvus Group]</em></p>
<p> </p>
<p>African Green-Pigeon (Gray-breasted) <em>Treron calvus delalandii/granti</em></p>
<p> </p>
<p>Red-eared Fruit-Dove (Red-eared) <em>Ptilinopus fischeri fischeri/centralis</em></p>
<p> </p>
<p>Red-eared Fruit-Dove (Lompobattang) <em>Ptilinopus fischeri meridionalis</em></p>
<p> </p>
<p>Ornate Fruit-Dove (Western) <em>Ptilinopus ornatus ornatus</em></p>
<p> </p>
<p>Ornate Fruit-Dove (Eastern) <em>Ptilinopus ornatus gestroi</em></p>
<p> </p>
<p>Crimson-crowned Fruit-Dove (Tongan) <em>Ptilinopus porphyraceus porphyraceus</em></p>
<p> </p>
<p>Crimson-crowned Fruit-Dove (Samoan) <em>Ptilinopus porphyraceus fasciatus</em></p>
<p> </p>
<p>Yellow-bibbed Fruit-Dove (Geelvink) <em>Ptilinopus solomonensis speciosus</em></p>
<p> </p>
<p>Yellow-bibbed Fruit-Dove (Yellow-banded) <em>Ptilinopus solomonensis [solomonensis Group]</em></p>
<p> </p>
<p>Bare-faced Go-away-bird (Brown-faced) <em>Corythaixoides personatus personatus</em></p>
<p> </p>
<p>Bare-faced Go-away-bird (Black-faced) <em>Corythaixoides personatus leopoldi</em></p>
<p> </p>
<p>Crested Coua (Crested) <em>Coua cristata [cristata Group]</em></p>
<p> </p>
<p>Crested Coua (Chestnut-vented) <em>Coua cristata pyropyga</em></p>
<p> </p>
<p>Chestnut-breasted Malkoha (Mentawai) <em>Phaenicophaeus curvirostris oeneicaudus</em></p>
<p> </p>
<p>Chestnut-breasted Malkoha (Chestnut-breasted) <em>Phaenicophaeus curvirostris [curvirostris </em></p>
<p><em>Group]</em></p>
<p> </p>
<p>Hantu Boobook (Buru) <em>Ninox squamipila hantu</em></p>
<p> </p>
<p>Hantu Boobook (Seram) <em>Ninox squamipila squamipila</em></p>
<p> </p>
<p>Solomons Boobook (West Solomons) <em>Ninox jacquinoti [jacquinoti Group]</em></p>
<p> </p>
<p>Solomons Boobook (Guadalcanal) <em>Ninox jacquinoti granti</em></p>
<p> </p>
<p>Solomons Boobook (Malaita) <em>Ninox jacquinoti malaitae</em></p>
<p> </p>
<p>Solomons Boobook (Makira) <em>Ninox jacquinoti roseoaxillaris</em></p>
<p> </p>
<p>Straight-billed Hermit (bourcieri) <em>Phaethornis bourcieri bourcieri</em></p>
<p> </p>
<p>Straight-billed Hermit (major) <em>Phaethornis bourcieri major</em></p>
<p> </p>
<p>Gray-chinned Hermit (Gray-chinned) <em>Phaethornis griseogularis griseogularis/zonura</em></p>
<p> </p>
<p>Gray-chinned Hermit (Porculla) <em>Phaethornis griseogularis porcullae</em></p>
<p> </p>
<p>Wedge-billed Hummingbird (Eastern) <em>Schistes geoffroyi geoffroyi/chapmani</em></p>
<p> </p>
<p>Wedge-billed Hummingbird (Western) <em>Schistes geoffroyi albogularis</em></p>
<p> </p>
<p>Antillean Mango (Hispaniolan) <em>Anthracothorax dominicus dominicus</em></p>
<p> </p>
<p>Antillean Mango (Puerto Rican) <em>Anthracothorax dominicus aurulentus</em></p>
<p> </p>
<p>Festive Coquette (Butterfly) <em>Lophornis chalybeus verreauxii/klagesi</em></p>
<p> </p>
<p>Festive Coquette (Festive) <em>Lophornis chalybeus chalybeus</em></p>
<p> </p>
<p>Bearded Mountaineer (Western) <em>Oreonympha nobilis albolimbata</em></p>
<p> </p>
<p>Bearded Mountaineer (Eastern) <em>Oreonympha nobilis nobilis</em></p>
<p> </p>
<p>Violet-throated Starfrontlet (Huanuco) <em>Coeligena violifer dichroura</em></p>
<p> </p>
<p>Violet-throated Starfrontlet (Apurimac) <em>Coeligena violifer albicaudata</em></p>
<p> </p>
<p>Violet-throated Starfrontlet (Cuzco) <em>Coeligena violifer osculans</em></p>
<p> </p>
<p>Violet-throated Starfrontlet (Bolivian) <em>Coeligena violifer violifer</em></p>
<p> </p>
<p>Booted Racket-tail (White-booted) <em>Ocreatus underwoodii [underwoodii Group]</em></p>
<p> </p>
<p>Booted Racket-tail (Peruvian) <em>Ocreatus underwoodii peruanus</em></p>
<p> </p>
<p>Booted Racket-tail (Anna’s) <em>Ocreatus underwoodii annae</em></p>
<p> </p>
<p>Booted Racket-tail (Adda’s) <em>Ocreatus underwoodii addae</em></p>
<p> </p>
<p>White-tailed Hillstar (Rufous-gaped) <em>Urochroa bougueri bougueri</em></p>
<p> </p>
<p>White-tailed Hillstar (White-tailed) <em>Urochroa bougueri leucura</em></p>
<p> </p>
<p>Black-throated Brilliant (Black-throated) <em>Heliodoxa schreibersii schreibersii</em></p>
<p> </p>
<p>Black-throated Brilliant (Black-breasted) <em>Heliodoxa schreibersii whitelyana</em></p>
<p> </p>
<p>Brace’s Emerald (Brace’s) <em>Chlorostilbon bracei bracei</em></p>
<p> </p>
<p>Brace’s Emerald (Caribbean) <em>Chlorostilbon bracei elegans</em></p>
<p> </p>
<p>Gray-breasted Sabrewing (largipennis) <em>Campylopterus largipennis largipennis</em></p>
<p> </p>
<p>Gray-breasted Sabrewing (obscurus) <em>Campylopterus largipennis obscurus</em></p>
<p> </p>
<p>Gray-breasted Sabrewing (Dry Forest) <em>Campylopterus largipennis calcirupicola</em></p>
<p> </p>
<p>Gray-breasted Sabrewing (diamantinensis) <em>Campylopterus largipennis diamantinensis</em></p>
<p> </p>
<p>Sapphire-spangled Emerald (Sapphire-spangled) <em>Amazilia lactea lactea/zimmeri</em></p>
<p> </p>
<p>Sapphire-spangled Emerald (Spot-vented) <em>Amazilia lactea bartletti</em></p>
<p> </p>
<p>Berylline Hummingbird (Northern) <em>Amazilia beryllina beryllina/viola</em></p>
<p> </p>
<p>Berylline Hummingbird (Sumichrast’s) <em>Amazilia beryllina [devillei Group]</em></p>
<p> </p>
<p>Black Dwarf Hornbill (Western) <em>Horizocerus hartlaubi hartlaubi</em></p>
<p> </p>
<p>Black Dwarf Hornbill (Eastern) <em>Horizocerus hartlaubi granti</em></p>
<p> </p>
<p>Blue-banded Kingfisher (Malay) <em>Alcedo euryzona peninsulae</em></p>
<p> </p>
<p>Blue-banded Kingfisher (Javan) <em>Alcedo euryzona euryzona</em></p>
<p> </p>
<p>Banded Kingfisher (Banded) <em>Lacedo pulchella [pulchella Group]</em></p>
<p> </p>
<p>Banded Kingfisher (Black-faced) <em>Lacedo pulchella melanops</em></p>
<p> </p>
<p>Lilac-cheeked Kingfisher (Sangihe) <em>Cittura cyanotis sanghirensis</em></p>
<p> </p>
<p>Lilac-cheeked Kingfisher (Sulawesi) <em>Cittura cyanotis cyanotis</em></p>
<p> </p>
<p>Moustached Kingfisher (Bougainville) <em>Actenoides bougainvillei bougainvillei</em></p>
<p> </p>
<p>Moustached Kingfisher (Guadalcanal) <em>Actenoides bougainvillei excelsus</em></p>
<p> </p>
<p>Green-backed Kingfisher (Blue-headed) <em>Actenoides monachus monachus</em></p>
<p> </p>
<p>Green-backed Kingfisher (Black-headed) <em>Actenoides monachus capucinus</em></p>
<p> </p>
<p>Scaly-breasted Kingfisher (Scaly-breasted) <em>Actenoides princeps princeps/erythrorhamphus</em></p>
<p> </p>
<p>Scaly-breasted Kingfisher (Plain-backed) <em>Actenoides princeps regalis</em></p>
<p> </p>
<p>Lilac-breasted Roller (Blue-breasted) <em>Coracias caudatus lorti</em></p>
<p> </p>
<p>Lilac-breasted Roller (Lilac-breasted) <em>Coracias caudatus caudatus</em></p>
<p> </p>
<p>Pied Puffbird (Lesser) <em>Notharchus tectus subtectus</em></p>
<p> </p>
<p>Pied Puffbird (Greater) <em>Notharchus tectus tectus/picatus</em></p>
<p> </p>
<p>Crescent-chested Puffbird (Lesser) <em>Malacoptila striata minor</em></p>
<p> </p>
<p>Crescent-chested Puffbird (Greater) <em>Malacoptila striata striata</em></p>
<p> </p>
<p>Yellow-billed Jacamar (Yellow-billed) <em>Galbula albirostris albirostris</em></p>
<p> </p>
<p>Yellow-billed Jacamar (Cerise-crowned) <em>Galbula albirostris chalcocephala</em></p>
<p> </p>
<p>Gray-throated Barbet (Gray-throated) <em>Gymnobucco bonapartei bonapartei</em></p>
<p> </p>
<p>Gray-throated Barbet (Gray-headed) <em>Gymnobucco bonapartei cinereiceps</em></p>
<p> </p>
<p>Naked-faced Barbet (Naked-faced) <em>Gymnobucco calvus calvus/congicus</em></p>
<p> </p>
<p>Naked-faced Barbet (Pale-throated) <em>Gymnobucco calvus vernayi</em></p>
<p> </p>
<p>White-eared Barbet (White-lined) <em>Stactolaema leucotis leucogrammica</em></p>
<p> </p>
<p>White-eared Barbet (White-eared) <em>Stactolaema leucotis leucotis/kilimensis</em></p>
<p> </p>
<p>Blue-eared Barbet (Blue-eared) <em>Psilopogon duvaucelii cyanotis/orientalis</em></p>
<p> </p>
<p>Blue-eared Barbet (Black-eared) <em>Psilopogon duvaucelii [duvaucelii Group]</em></p>
<p> </p>
<p>Lemon-throated Barbet (Lemon-throated) <em>Eubucco richardsoni richardsoni/nigriceps</em></p>
<p> </p>
<p>Lemon-throated Barbet (Flame-throated) <em>Eubucco richardsoni aurantiicollis/purusianus</em></p>
<p> </p>
<p>Versicolored Barbet (Blue-cowled) <em>Eubucco versicolor steerii</em></p>
<p> </p>
<p>Versicolored Barbet (Blue-chinned) <em>Eubucco versicolor glaucogularis</em></p>
<p> </p>
<p>Versicolored Barbet (Blue-moustached) <em>Eubucco versicolor versicolor</em></p>
<p> </p>
<p>Lettered Aracari (Humboldt’s) <em>Pteroglossus inscriptus humboldti</em></p>
<p> </p>
<p>Lettered Aracari (Lettered) <em>Pteroglossus inscriptus inscriptus</em></p>
<p> </p>
<p>Red-necked Aracari (Western) <em>Pteroglossus bitorquatus sturmii</em></p>
<p> </p>
<p>Red-necked Aracari (Eastern) <em>Pteroglossus bitorquatus bitorquatus/reichenowi</em></p>
<p> </p>
<p>Golden-collared Toucanet (Red-billed) <em>Selenidera reinwardtii reinwardtii</em></p>
<p> </p>
<p>Golden-collared Toucanet (Green-billed) <em>Selenidera reinwardtii langsdorffii</em></p>
<p> </p>
<p>White-throated Toucan (Red-billed) <em>Ramphastos tucanus tucanus</em></p>
<p> </p>
<p>White-throated Toucan (Cuvier’s) <em>Ramphastos tucanus cuvieri/inca</em></p>
<p> </p>
<p>White-barred Piculet (Marajo) <em>Picumnus cirratus macconnelli/confusus</em></p>
<p> </p>
<p>White-barred Piculet (White-barred) <em>Picumnus cirratus [cirratus Group]</em></p>
<p> </p>
<p>Crimson-mantled Woodpecker (Crimson-mantled) <em>Colaptes rivolii [rivolii Group]</em></p>
<p> </p>
<p>Crimson-mantled Woodpecker (Black-crowned) <em>Colaptes rivolii atriceps</em></p>
<p> </p>
<p>Green-barred Woodpecker (Green-barred) <em>Colaptes melanochloros melanochloros/nattereri</em></p>
<p> </p>
<p>Green-barred Woodpecker (Golden-breasted) <em>Colaptes melanochloros [melanolaimus Group]</em></p>
<p> </p>
<p><NAME> (Northern) <em>Colaptes rupicola cinereicapillus</em></p>
<p> </p>
<p><NAME> (Southern) <em>Colaptes rupicola rupicola/puna</em></p>
<p> </p>
<p>Lineated Woodpecker (Lineated) <em>Dryocopus lineatus [lineatus Group]</em></p>
<p> </p>
<p>Lineated Woodpecker (Dusky-winged) <em>Dryocopus lineatus fuscipennis</em></p>
<p> </p>
<p>Checker-throated Woodpecker (Checker-throated) <em>Picus mentalis humii</em></p>
<p> </p>
<p>Checker-throated Woodpecker (Javan) <em>Picus mentalis mentalis</em></p>
<p> </p>
<p>Buff-rumped Woodpecker (Buff-rumped) <em>Meiglyptes tristis grammithorax</em></p>
<p> </p>
<p>Buff-rumped Woodpecker (White-rumped) <em>Meiglyptes tristis tristis</em></p>
<p> </p>
<p>Gray-and-buff Woodpecker (Gray-and-buff) <em>Hemicircus concretus sordidus</em></p>
<p> </p>
<p>Gray-and-buff Woodpecker (Red-crested) <em>Hemicircus concretus concretus</em></p>
<p> </p>
<p>Blue-rumped Parrot (Blue-rumped) <em>Psittinus cyanurus cyanurus/pontius</em></p>
<p> </p>
<p>Blue-rumped Parrot (Simeulue) <em>Psittinus cyanurus abbotti</em></p>
<p> </p>
<p>Double-eyed Fig-Parrot (Double-eyed) <em>Cyclopsitta diophthalma [diophthalma Group]</em></p>
<p> </p>
<p>Double-eyed Fig-Parrot (Coxen’s) <em>Cyclopsitta diophthalma coxeni</em></p>
<p> </p>
<p>Large Fig-Parrot (Large) <em>Psittaculirostris desmarestii [desmarestii Group]</em></p>
<p> </p>
<p>Large Fig-Parrot (Yellow-naped) <em>Psittaculirostris desmarestii godmani</em></p>
<p> </p>
<p>Large Fig-Parrot (Red-faced) <em>Psittaculirostris desmarestii cervicalis</em></p>
<p> </p>
<p>Yellow-and-green Lorikeet (Mustard-capped) <em>Trichoglossus flavoviridis meyeri</em></p>
<p> </p>
<p>Yellow-and-green Lorikeet (Yellow-and-green) <em>Trichoglossus flavoviridis flavoviridis</em></p>
<p> </p>
<p>Festive Parrot (Northern) <em>Amazona festiva bodini</em></p>
<p> </p>
<p>Festive Parrot (Southern) <em>Amazona festiva festiva</em></p>
<p> </p>
<p>Mealy Parrot (Northern) <em>Amazona farinosa guatemalae/virenticeps</em></p>
<p> </p>
<p>Mealy Parrot (Southern) <em>Amazona farinosa farinosa</em></p>
<p> </p>
<p>White-bellied Parrot (Black-legged) <em>Pionites leucogaster xanthomerius</em></p>
<p> </p>
<p>White-bellied Parrot (Yellow-tailed) <em>Pionites leucogaster xanthurus</em></p>
<p> </p>
<p>White-bellied Parrot (Green-thighed) <em>Pionites leucogaster leucogaster</em></p>
<p> </p>
<p>Red-shouldered Macaw (Northern) <em>Diopsittaca nobilis nobilis</em></p>
<p> </p>
<p>Red-shouldered Macaw (Southern) <em>Diopsittaca nobilis cumanensis/longipennis</em></p>
<p> </p>
<p>Black-and-red Broadbill (Irrawaddy) <em>Cymbirhynchus macrorhynchos affinis</em></p>
<p> </p>
<p>Black-and-red Broadbill (Black-and-red) <em>Cymbirhynchus macrorhynchos [macrorhynchos </em></p>
<p><em>Group]</em></p>
<p> </p>
<p>Silver-breasted Broadbill (Gray-browed) <em>Serilophus lunatus rubropygius</em></p>
<p> </p>
<p>Silver-breasted Broadbill (Silver-breasted) <em>Serilophus lunatus [lunatus Group]</em></p>
<p> </p>
<p>Banded Broadbill (Banded) <em>Eurylaimus javanicus [harterti Group]</em></p>
<p> </p>
<p>Banded Broadbill (Javan) <em>Eurylaimus javanicus javanicus</em></p>
<p> </p>
<p>Ivory-breasted Pitta (Ivory-breasted) <em>Pitta maxima maxima</em></p>
<p> </p>
<p>Ivory-breasted Pitta (Morotai) <em>Pitta maxima morotaiensis</em></p>
<p> </p>
<p>Black-crested Antshrike (Streak-fronted) <em>Sakesphorus canadensis pulchellus</em></p>
<p> </p>
<p>Black-crested Antshrike (Black-crested) <em>Sakesphorus canadensis [canadensis Group]</em></p>
<p> </p>
<p>Rufous-capped Antshrike (Northern) <em>Thamnophilus ruficapillus [subfasciatus Group]</em></p>
<p> </p>
<p>Rufous-capped Antshrike (Southern) <em>Thamnophilus ruficapillus ruficapillus/cochabambae</em></p>
<p> </p>
<p>Ornate Antwren (Western) <em>Epinecrophylla ornata [ornata Group]</em></p>
<p> </p>
<p>Ornate Antwren (Eastern) <em>Epinecrophylla ornata hoffmannsi</em></p>
<p> </p>
<p>Rufous-winged Antwren (Northern) <em>Herpsilochmus rufimarginatus [scapularis Group]</em></p>
<p> </p>
<p>Rufous-winged Antwren (Southern) <em>Herpsilochmus rufimarginatus rufimarginatus</em></p>
<p> </p>
<p>Dusky Antbird (tyrannina/crepera) <em>Cercomacroides tyrannina tyrannina/crepera</em></p>
<p> </p>
<p>Dusky Antbird (saturatior/vicina) <em>Cercomacroides tyrannina saturatior/vicina</em></p>
<p> </p>
<p>Chestnut-backed Antbird (Chestnut-backed) <em>Poliocrania exsul [exsul Group]</em></p>
<p> </p>
<p>Chestnut-backed Antbird (Short-tailed) <em>Poliocrania exsul maculifer/cassini</em></p>
<p> </p>
<p>Collared Crescentchest (Double-collared) <em>Melanopareia torquata bitorquata</em></p>
<p> </p>
<p>Collared Crescentchest (Collared) <em>Melanopareia torquata torquata/rufescens</em></p>
<p> </p>
<p><NAME> (Northern) <em>Grallaria quitensis alticola</em></p>
<p> </p>
<p>T<NAME>ta (Western) <em>Grallaria quitensis quitensis</em></p>
<p> </p>
<p><NAME> (Southern) <em>Grallaria quitensis atuensis</em></p>
<p> </p>
<p>Long-tailed Woodcreeper (Little) <em>Deconychura longicauda [typica Group]</em></p>
<p> </p>
<p>Long-tailed Woodcreeper (Northern) <em>Deconychura longicauda longicauda</em></p>
<p> </p>
<p>Long-tailed Woodcreeper (Southern) <em>Deconychura longicauda [pallida Group]</em></p>
<p> </p>
<p>Wedge-billed Woodcreeper (pectoralis Group) <em>Glyphorynchus spirurus [pectoralis Group]</em></p>
<p> </p>
<p>Wedge-billed Woodcreeper (spirurus Group) <em>Glyphorynchus spirurus [spirurus Group]</em></p>
<p> </p>
<p>Wedge-billed Woodcreeper (albigularis) <em>Glyphorynchus spirurus albigularis</em></p>
<p> </p>
<p>Wedge-billed Woodcreeper (cuneatus Group) <em>Glyphorynchus spirurus [cuneatus Group]</em></p>
<p> </p>
<p>Cinnamon-throated Woodcreeper (devillei) <em>Dendrexetastes rufigula devillei</em></p>
<p> </p>
<p>Cinnamon-throated Woodcreeper (rufigula) <em>Dendrexetastes rufigula rufigula</em></p>
<p> </p>
<p>Cinnamon-throated Woodcreeper (paraensis/moniliger) <em>Dendrexetastes rufigula paraensis/moniliger</em></p>
<p> </p>
<p>Spot-crowned Woodcreeper (Northern) <em>Lepidocolaptes affinis affinis/lignicida</em></p>
<p> </p>
<p>Spot-crowned Woodcreeper (Southern) <em>Lepidocolaptes affinis neglectus</em></p>
<p> </p>
<p>Plain Xenops (mexicanus Group) <em>Xenops minutus [mexicanus Group]</em></p>
<p> </p>
<p>Plain Xenops (genibarbis Group) <em>Xenops minutus [genibarbis Group]</em></p>
<p> </p>
<p>Plain Xenops (White-throated) <em>Xenops minutus minutus</em></p>
<p> </p>
<p>Blackish Cinclodes (Black) <em>Cinclodes antarcticus maculirostris</em></p>
<p> </p>
<p>Blackish Cinclodes (Blackish) <em>Cinclodes antarcticus antarcticus</em></p>
<p> </p>
<p>Pale-browed Treehunter (Pale-tailed) <em>Cichlocolaptes leucophrus leucophrus</em></p>
<p> </p>
<p>Pale-browed Treehunter (Rufous-tailed) <em>Cichlocolaptes leucophrus holti</em></p>
<p> </p>
<p>Buff-throated Foliage-gleaner (turdinus) <em>Automolus ochrolaemus turdinus</em></p>
<p> </p>
<p>Buff-throated Foliage-gleaner (ochrolaemus) <em>Automolus ochrolaemus ochrolaemus</em></p>
<p> </p>
<p>Buff-throated Foliage-gleaner (auricularis) <em>Automolus ochrolaemus auricularis</em></p>
<p> </p>
<p>White-throated Barbtail (White-throated) <em>Premnoplex tatei tatei</em></p>
<p> </p>
<p>White-throated Barbtail (Paria) <em>Premnoplex tatei pariae</em></p>
<p> </p>
<p>Marcapata Spinetail (Pale-crowned) <em>Cranioleuca marcapatae weskei</em></p>
<p> </p>
<p>Marcapata Spinetail (Rufous-crowned) <em>Cranioleuca marcapatae marcapatae</em></p>
<p> </p>
<p>Light-crowned Spinetail (White-crowned) <em>Cranioleuca albiceps albiceps</em></p>
<p> </p>
<p>Light-crowned Spinetail (Buffy-crowned) <em>Cranioleuca albiceps discolor</em></p>
<p> </p>
<p>Greater Antillean Elaenia (Jamaican) <em>Elaenia fallax fallax</em></p>
<p> </p>
<p>Greater Antillean Elaenia (Hispaniolan) <em>Elaenia fallax cherriei</em></p>
<p> </p>
<p>Highland Elaenia (Highland) <em>Elaenia obscura obscura</em></p>
<p> </p>
<p>Highland Elaenia (Brazilian) <em>Elaenia obscura sordida</em></p>
<p> </p>
<p>Olive-striped Flycatcher (Olive-streaked) <em>Mionectes olivaceus olivaceus</em></p>
<p> </p>
<p>Olive-striped Flycatcher (Olive-striped) <em>Mionectes olivaceus [galbinus Group]</em></p>
<p> </p>
<p>Slaty-capped Flycatcher (transandinus) <em>Leptopogon superciliaris transandinus</em></p>
<p> </p>
<p>Ornate Flycatcher (Western) <em>Myiotriccus ornatus ornatus/stellatus</em></p>
<p> </p>
<p>Ornate Flycatcher (Eastern) <em>Myiotriccus ornatus phoenicurus/aureiventris</em></p>
<p> </p>
<p>Bronze-olive Pygmy-Tyrant (annectens/berlepschi) <em>Pseudotriccus pelzelni annectens/berlepschi</em></p>
<p> </p>
<p>Bronze-olive Pygmy-Tyrant (pelzelni/peruvianus) <em>Pseudotriccus pelzelni pelzelni/peruvianus</em></p>
<p> </p>
<p>Tawny-crowned Pygmy-Tyrant (Tawny-crowned) <em>Euscarthmus meloryphus meloryphus/paulus</em></p>
<p> </p>
<p>Tawny-crowned Pygmy-Tyrant (Tawny-fronted) <em>Euscarthmus meloryphus fulviceps</em></p>
<p> </p>
<p>Olivaceous Flatbill (Western) <em>Rhynchocyclus olivaceus [aequinoctialis Group]</em></p>
<p> </p>
<p>Olivaceous Flatbill (Eastern) <em>Rhynchocyclus olivaceus [olivaceus Group]</em></p>
<p> </p>
<p>White-throated Spadebill (Western) <em>Platyrinchus mystaceus [albogularis Group]</em></p>
<p> </p>
<p>White-throated Spadebill (Eastern) <em>Platyrinchus mystaceus [mystaceus Group]</em></p>
<p> </p>
<p>Cliff Flycatcher (Cliff) <em>Hirundinea ferruginea ferruginea/sclateri</em></p>
<p> </p>
<p>Cliff Flycatcher (Swallow) <em>Hirundinea ferruginea bellicosa/pallidior</em></p>
<p> </p>
<p>Greater Pewee (Mexican) <em>Contopus pertinax pertinax</em></p>
<p> </p>
<p>Greater Pewee (Central American) <em>Contopus pertinax minor</em></p>
<p> </p>
<p>Riverside Tyrant (Riverside) <em>Knipolegus orenocensis orenocensis/xinguensis</em></p>
<p> </p>
<p>Riverside Tyrant (Sclater’s) <em>Knipolegus orenocensis sclateri</em></p>
<p> </p>
<p>White-winged Black-Tyrant (White-rumped) <em>Knipolegus aterrimus heterogyna</em></p>
<p> </p>
<p>White-winged Black-Tyrant (White-winged) <em>Knipolegus aterrimus aterrimus/anthracinus</em></p>
<p> </p>
<p>Brown-backed Chat-Tyrant (Brown-backed) <em>Ochthoeca fumicolor [fumicolor Group]</em></p>
<p> </p>
<p>Brown-backed Chat-Tyrant (Rufous-browed) <em>Ochthoeca fumicolor superciliosa</em></p>
<p> </p>
<p>Green Manakin (Choco) <em>Cryptopipo holochlora litae/suffusa</em></p>
<p> </p>
<p>Green Manakin (Green) <em>Cryptopipo holochlora holochlora/viridior</em></p>
<p> </p>
<p>Black-tailed Tityra (Eastern) <em>Tityra cayana braziliensis</em></p>
<p> </p>
<p>Black-tailed Tityra (Western) <em>Tityra cayana cayana</em></p>
<p> </p>
<p>Gray-collared Becard (Eastern) <em>Pachyramphus major [major Group]</em></p>
<p> </p>
<p>Gray-collared Becard (Western) <em>Pachyramphus major uropygialis</em></p>
<p> </p>
<p>Dusky Myzomela (Red-brown) <em>Myzomela obscura rubrobrunnea</em></p>
<p> </p>
<p>Dusky Myzomela (Obi) <em>Myzomela obscura rubrotincta</em></p>
<p> </p>
<p>Dusky Myzomela (Moluccan) <em>Myzomela obscura simplex/mortyana</em></p>
<p> </p>
<p>Dusky Myzomela (Dusky) <em>Myzomela obscura [obscura Group]</em></p>
<p> </p>
<p>Red Myzomela (Red) <em>Myzomela cruentata cruentata/coccinea</em></p>
<p> </p>
<p>Red Myzomela (Reddish) <em>Myzomela cruentata [erythrina Group]</em></p>
<p> </p>
<p>Sulawesi Myzomela (Sulawesi) <em>Myzomela chloroptera [chloroptera Group]</em></p>
<p> </p>
<p>Sulawesi Myzomela (Bacan) <em>Myzomela chloroptera batjanensis</em></p>
<p> </p>
<p>Wakolo Myzomela (Seram) <em>Myzomela wakoloensis elisabethae</em></p>
<p> </p>
<p>Wakolo Myzomela (Buru) <em>Myzomela wakoloensis wakoloensis</em></p>
<p> </p>
<p>Red-collared Myzomela (Red-collared) <em>Myzomela rosenbergii rosenbergii/wahgiensis</em></p>
<p> </p>
<p>Red-collared Myzomela (Long-billed) <em>Myzomela rosenbergii longirostris</em></p>
<p> </p>
<p>Blue-faced Honeyeater (White-quilled) <em>Entomyzon cyanotis albipennis</em></p>
<p> </p>
<p>Blue-faced Honeyeater (Blue-faced) <em>Entomyzon cyanotis [cyanotis Group]</em></p>
<p> </p>
<p>Fan-tailed Gerygone (Fan-tailed) <em>Gerygone flavolateralis [flavolateralis Group]</em></p>
<p> </p>
<p>Fan-tailed Gerygone (Rennell) <em>Gerygone flavolateralis citrina</em></p>
<p> </p>
<p>Crested Satinbird (Red) <em>Cnemophilus macgregorii sanguineus</em></p>
<p> </p>
<p>Crested Satinbird (Yellow) <em>Cnemophilus macgregorii macgregorii</em></p>
<p> </p>
<p>Spotted Berrypecker (Thick-billed) <em>Melanocharis crassirostris crassirostris</em></p>
<p> </p>
<p>Spotted Berrypecker (Spotted) <em>Melanocharis crassirostris piperata/viridescens</em></p>
<p> </p>
<p>Crested Berrypecker (Eastern) <em>Paramythia montium montium/brevicauda</em></p>
<p> </p>
<p>Crested Berrypecker (Western) <em>Paramythia montium olivacea/alpina</em></p>
<p> </p>
<p>Western Whipbird (Black-throated) <em>Psophodes nigrogularis nigrogularis/oberon</em></p>
<p> </p>
<p>Western Whipbird (White-bellied) <em>Psophodes nigrogularis leucogaster/lashmari</em></p>
<p> </p>
<p>Gray-chinned Minivet (Gray-chinned) <em>Pericrocotus solaris [solaris Group]</em></p>
<p> </p>
<p>Gray-chinned Minivet (Gray-throated) <em>Pericrocotus solaris montanus/cinereigula</em></p>
<p> </p>
<p>Large Cuckooshrike (Indian) <em>Coracina macei macei/layardi</em></p>
<p> </p>
<p>Large Cuckooshrike (Large) <em>Coracina macei [nipalensis Group]</em></p>
<p> </p>
<p>Large Cuckooshrike (Malay) <em>Coracina macei larutensis</em></p>
<p> </p>
<p>Bar-bellied Cuckooshrike (Bar-bellied) <em>Coracina striata [striata Group]</em></p>
<p> </p>
<p>Bar-bellied Cuckooshrike (Visayan) <em>Coracina striata panayensis</em></p>
<p> </p>
<p>Black-and-white Triller (Northern) <em>Lalage melanoleuca melanoleuca</em></p>
<p> </p>
<p>Black-and-white Triller (Southern) <em>Lalage melanoleuca minor</em></p>
<p> </p>
<p>Solomons Cuckooshrike (Solomon) <em>Edolisoma holopolium holopolium/tricolor</em></p>
<p> </p>
<p><NAME> (New Georgia) <em>Edolisoma holopolium pygmaeum</em></p>
<p> </p>
<p>Rusty-breasted Whistler (Salayar) <em>Pachycephala fulvotincta teysmanni</em></p>
<p> </p>
<p>Rusty-breasted Whistler (Rusty-breasted) <em>Pachycephala fulvotincta [fulvotincta Group]</em></p>
<p> </p>
<p>Chinese Gray Shrike (Chinese) <em>Lanius sphenocercus sphenocercus</em></p>
<p> </p>
<p>Chinese Gray Shrike (Giant) <em>Lanius sphenocercus giganteus</em></p>
<p> </p>
<p>Scrub Greenlet (Yellow-green) <em>Hylophilus flavipes viridiflavus/xuthus</em></p>
<p> </p>
<p>Scrub Greenlet (Scrub) <em>Hylophilus flavipes [flavipes Group]</em></p>
<p> </p>
<p>Scrub Greenlet (Tobago) <em>Hylophilus flavipes insularis</em></p>
<p> </p>
<p>Lemon-chested Greenlet (Lemon-chested) <em>Hylophilus thoracicus griseiventris/aemulus</em></p>
<p> </p>
<p>Lemon-chested Greenlet (Rio de Janeiro) <em>Hylophilus thoracicus thoracicus</em></p>
<p> </p>
<p>Slaty-capped Shrike-Vireo (Pale-legged) <em>Vireolanius leucotis mikettae</em></p>
<p> </p>
<p>Slaty-capped Shrike-Vireo (Slaty-capped) <em>Vireolanius leucotis [leucotis Group]</em></p>
<p> </p>
<p>Tawny-crowned Greenlet (Rufous-fronted) <em>Tunchiornis ochraceiceps ferrugineifrons/viridior</em></p>
<p> </p>
<p>Tawny-crowned Greenlet (Olive-crowned) <em>Tunchiornis ochraceiceps luteifrons</em></p>
<p> </p>
<p><NAME> (Timor) <em>Oriolus melanotis melanotis</em></p>
<p> </p>
<p><NAME> (Wetar) <em>Oriolus melanotis finschi</em></p>
<p> </p>
<p>Black-and-crimson Oriole (Black-and-crimson) <em>Oriolus cruentus [consanguineus Group]</em></p>
<p> </p>
<p>Black-and-crimson Oriole (Javan) <em>Oriolus cruentus cruentus</em></p>
<p> </p>
<p>Hair-crested Drongo (Hair-crested) <em>Dicrurus hottentottus [hottentottus Group]</em></p>
<p> </p>
<p>Hair-crested Drongo (Short-tailed) <em>Dicrurus hottentottus striatus/samarensis</em></p>
<p> </p>
<p>Northern Fantail (Seram) <em>Rhipidura rufiventris cinerea</em></p>
<p> </p>
<p>Northern Fantail (Banda Sea) <em>Rhipidura rufiventris hoedti</em></p>
<p> </p>
<p>Northern Fantail (Biak) <em>Rhipidura rufiventris kordensis</em></p>
<p> </p>
<p>Northern Fantail (Northern) <em>Rhipidura rufiventris isura</em></p>
<p> </p>
<p>Rusty-bellied Fantail (Sulawesi) <em>Rhipidura teysmanni teysmanni/toradja</em></p>
<p> </p>
<p>Rusty-bellied Fantail (Taliabu) <em>Rhipidura teysmanni sulaensis</em></p>
<p> </p>
<p>Arafura Fantail (Supertramp) <em>Rhipidura dryas [semicollaris Group]</em></p>
<p> </p>
<p>Arafura Fantail (Arafura) <em>Rhipidura dryas dryas</em></p>
<p> </p>
<p>Brown Fantail (Bougainville) <em>Rhipidura drownei drownei</em></p>
<p> </p>
<p>Brown Fantail (Guadalcanal) <em>Rhipidura drownei ocularis</em></p>
<p> </p>
<p>Streaked Fantail (Vanuatu) <em>Rhipidura verreauxi spilodera</em></p>
<p> </p>
<p>Streaked Fantail (Fiji) <em>Rhipidura verreauxi layardi/erythronota</em></p>
<p> </p>
<p>Streaked Fantail (Taveuni) <em>Rhipidura verreauxi rufilateralis</em></p>
<p> </p>
<p>Streaked Fantail (New Caledonia) <em>Rhipidura verreauxi verreauxi</em></p>
<p> </p>
<p>African Crested-Flycatcher (Eastern) <em>Trochocercus cyanomelas [bivittatus Group]</em></p>
<p> </p>
<p>African Crested-Flycatcher (Southern) <em>Trochocercus cyanomelas cyanomelas/segregus</em></p>
<p> </p>
<p>Rufous Paradise-Flycatcher (Northern) <em>Terpsiphone cinnamomea unirufa</em></p>
<p> </p>
<p>Rufous Paradise-Flycatcher (Southern) <em>Terpsiphone cinnamomea cinnamomea/talautensis</em></p>
<p> </p>
<p>Black-tailed Monarch (Djaul) <em>Symposiachrus verticalis ateralbus</em></p>
<p> </p>
<p>Black-tailed Monarch (Black-tailed) <em>Symposiachrus verticalis verticalis</em></p>
<p> </p>
<p>Dull Flycatcher (Mussau) <em>Myiagra hebetior hebetior</em></p>
<p> </p>
<p>Dull Flycatcher (Velvet) <em>Myiagra hebetior eichhorni</em></p>
<p> </p>
<p>Dull Flycatcher (Djaul) <em>Myiagra hebetior cervinicolor</em></p>
<p> </p>
<p>Black Magpie (Malay) <em>Platysmurus leucopterus leucopterus</em></p>
<p> </p>
<p>Black Magpie (Bornean) <em>Platysmurus leucopterus aterrimus</em></p>
<p> </p>
<p>Black-collared Jay (Black-collared) <em>Cyanolyca armillata armillata/meridana</em></p>
<p> </p>
<p>Black-collared Jay (Quindio) <em>Cyanolyca armillata quindiuna</em></p>
<p> </p>
<p>Paradise-crow (Halmahera) <em>Lycocorax pyrrhopterus pyrrhopterus/morotensis</em></p>
<p> </p>
<p>Paradise-crow (Obi) <em>Lycocorax pyrrhopterus obiensis</em></p>
<p> </p>
<p>Torrent Flycatcher (Torrent) <em>Monachella muelleriana muelleriana</em></p>
<p> </p>
<p>Torrent Flycatcher (New Britain) <em>Monachella muelleriana coultasi</em></p>
<p> </p>
<p>Lemon-bellied Flycatcher (Lemon-bellied) <em>Microeca flavigaster [flavigaster Group]</em></p>
<p> </p>
<p>Lemon-bellied Flycatcher (Kimberley) <em>Microeca flavigaster tormenti</em></p>
<p> </p>
<p>Pacific Robin (Pacific) <em>Petroica multicolor [pusilla Group]</em></p>
<p> </p>
<p>Pacific Robin (Norfolk) <em>Petroica multicolor multicolor</em></p>
<p> </p>
<p>Dunn’s Lark (African) <em>Eremalauda dunni dunni</em></p>
<p> </p>
<p>Dunn’s Lark (Arabian) <em>Eremalauda dunni eremodites</em></p>
<p> </p>
<p>Common House-Martin (Western) <em>Delichon urbicum urbicum/meridionale</em></p>
<p> </p>
<p>Common House-Martin (Eastern) <em>Delichon urbicum lagopodum</em></p>
<p> </p>
<p>Plain-tailed Wren (Plain-tailed) <em>Pheugopedius euophrys [euophrys Group]</em></p>
<p> </p>
<p>Plain-tailed Wren (Gray-browed) <em>Pheugopedius euophrys schulenbergi</em></p>
<p> </p>
<p>Chestnut-breasted Wren (Northern) <em>Cyphorhinus thoracicus dichrous</em></p>
<p> </p>
<p>Chestnut-breasted Wren (Southern) <em>Cyphorhinus thoracicus thoracicus</em></p>
<p> </p>
<p>Musician Wren (Imeri) <em>Cyphorhinus arada transfluvialis</em></p>
<p> </p>
<p>Musician Wren (Gray-eared) <em>Cyphorhinus arada salvini</em></p>
<p> </p>
<p>Musician Wren (Musician) <em>Cyphorhinus arada arada</em></p>
<p> </p>
<p>Musician Wren (Gray-flanked) <em>Cyphorhinus arada griseolateralis</em></p>
<p> </p>
<p>Musician Wren (Rondonia) <em>Cyphorhinus arada interpositus</em></p>
<p> </p>
<p>Musician Wren (Ferruginous) <em>Cyphorhinus arada modulator</em></p>
<p> </p>
<p>Stripe-throated Bulbul (Pale-eyed) <em>Pycnonotus finlaysoni davisoni</em></p>
<p> </p>
<p>Stripe-throated Bulbul (Stripe-throated) <em>Pycnonotus finlaysoni finlaysoni/eous</em></p>
<p> </p>
<p>Puff-throated Bulbul (Gray-crowned) <em>Alophoixus pallidus griseiceps</em></p>
<p> </p>
<p>Puff-throated Bulbul (Puff-throated) <em>Alophoixus pallidus [pallidus Group]</em></p>
<p> </p>
<p>Ochraceous Bulbul (Ochraceous) <em>Alophoixus ochraceus [ochraceus Group]</em></p>
<p> </p>
<p>Ochraceous Bulbul (Chestnut-vented) <em>Alophoixus ochraceus ruficrissus/fowleri</em></p>
<p> </p>
<p>Gray-cheeked Bulbul (Gray-cheeked) <em>Alophoixus bres tephrogenys/gutturalis</em></p>
<p> </p>
<p>Gray-cheeked Bulbul (Brown-cheeked) <em>Alophoixus bres bres/balicus</em></p>
<p> </p>
<p>Sula Golden-Bulbul (Banggai) <em>Alophoixus longirostris harterti</em></p>
<p> </p>
<p>Sula Golden-Bulbul (Sula) <em>Alophoixus longirostris longirostris</em></p>
<p> </p>
<p>Gray-eyed Bulbul (Gray-eyed) <em>Iole propinqua [propinqua Group]</em></p>
<p> </p>
<p>Gray-eyed Bulbul (innectens) <em>Iole propinqua innectens</em></p>
<p> </p>
<p>Olive Bulbul (Olive) <em>Iole viridescens viridescens/lekhakuni</em></p>
<p> </p>
<p>Olive Bulbul (Baker’s) <em>Iole viridescens cinnamomeoventris</em></p>
<p> </p>
<p>Yellowish Bulbul (Yellowish) <em>Hypsipetes everetti everetti/samarensis</em></p>
<p> </p>
<p>Yellowish Bulbul (Sulu) <em>Hypsipetes everetti haynaldi</em></p>
<p> </p>
<p>Yellowish Bulbul (Camiguin) <em>Hypsipetes everetti catarmanensis</em></p>
<p> </p>
<p>Ashy Bulbul (Cinereous) <em>Hemixos flavala cinereus</em></p>
<p> </p>
<p>Ashy Bulbul (Green-winged) <em>Hemixos flavala connectens</em></p>
<p> </p>
<p>Sunda Bulbul (Sumatran) <em>Ixos virescens sumatranus</em></p>
<p> </p>
<p>Sunda Bulbul (Javan) <em>Ixos virescens virescens</em></p>
<p> </p>
<p>Scrub Warbler (Western) <em>Scotocerca inquieta saharae/theresae</em></p>
<p> </p>
<p>Scrub Warbler (Eastern) <em>Scotocerca inquieta [inquieta Group]</em></p>
<p> </p>
<p>Brownish-flanked Bush Warbler (Brownish-flanked) <em>Horornis fortipes [fortipes Group]</em></p>
<p> </p>
<p>Brownish-flanked Bush Warbler (Taiwan) <em>Horornis fortipes robustipes</em></p>
<p> </p>
<p>Aberrant Bush Warbler (Aberrant) <em>Horornis flavolivaceus [flavolivaceus Group]</em></p>
<p> </p>
<p>Aberrant Bush Warbler (Perplexing) <em>Horornis flavolivaceus intricatus/oblitus</em></p>
<p> </p>
<p>Sulawesi Leaf Warbler (Sulawesi) <em>Phylloscopus sarasinorum nesophilus</em></p>
<p> </p>
<p>Sulawesi Leaf Warbler (Lompobattang) <em>Phylloscopus sarasinorum sarasinorum</em></p>
<p> </p>
<p>Timor Leaf Warbler (Flores) <em>Phylloscopus presbytes floris</em></p>
<p> </p>
<p>Timor Leaf Warbler (Timor) <em>Phylloscopus presbytes presbytes</em></p>
<p> </p>
<p>Sunda Warbler (Sumatran) <em>Seicercus grammiceps sumatrensis</em></p>
<p> </p>
<p>Sunda Warbler (Javan) <em>Seicercus grammiceps grammiceps</em></p>
<p> </p>
<p>Papyrus Yellow-Warbler (Papyrus) <em>Calamonastides gracilirostris gracilirostris</em></p>
<p> </p>
<p>Papyrus Yellow-Warbler (Zambian) <em>Calamonastides gracilirostris bensoni</em></p>
<p> </p>
<p>Eurasian Reed Warbler (Siwa) <em>Acrocephalus scirpaceus ammon</em></p>
<p> </p>
<p>Fernbird (New Zealand) <em>Megalurus punctatus [punctatus Group]</em></p>
<p> </p>
<p>Fernbird (Snares) <em>Megalurus punctatus caudatus</em></p>
<p> </p>
<p>Chestnut-backed Bush Warbler (Sulawesi) <em>Locustella castanea castanea</em></p>
<p> </p>
<p>Chestnut-backed Bush Warbler (Buru) <em>Locustella castanea disturbans</em></p>
<p> </p>
<p>Chestnut-backed Bush Warbler (Seram) <em>Locustella castanea musculus</em></p>
<p> </p>
<p>Guadalcanal Thicketbird (Santo) <em>Megalurulus whitneyi whitneyi</em></p>
<p> </p>
<p>Guadalcanal Thicketbird (Guadalcanal) <em>Megalurulus whitneyi turipavae</em></p>
<p> </p>
<p>Black-throated Prinia (Black-throated) <em>Prinia atrogularis atrogularis</em></p>
<p> </p>
<p>Black-throated Prinia (Rufous-crowned) <em>Prinia atrogularis khasiana</em></p>
<p> </p>
<p>Yellow-bellied Prinia (Yellow-bellied) <em>Prinia flaviventris [flaviventris Group]</em></p>
<p> </p>
<p>Yellow-bellied Prinia (Chinese) <em>Prinia flaviventris sonitans</em></p>
<p> </p>
<p>Lesser Whitethroat (curruca/blythi) <em>Sylvia curruca curruca/blythi</em></p>
<p> </p>
<p>Lesser Whitethroat (halimodendri) <em>Sylvia curruca halimodendri</em></p>
<p> </p>
<p>Brown-winged Parrotbill (Yunnan) <em>Sinosuthora brunnea ricketti</em></p>
<p> </p>
<p>Brown-winged Parrotbill (Brown-winged) <em>Sinosuthora brunnea brunnea/styani</em></p>
<p> </p>
<p>White-breasted White-eye (Abyssinian) <em>Zosterops abyssinicus [abyssinicus Group]</em></p>
<p> </p>
<p>White-breasted White-eye (Kenya) <em>Zosterops abyssinicus flavilateralis/jubaensis</em></p>
<p> </p>
<p>Bridled White-eye (Bridled) <em>Zosterops conspicillatus conspicillatus</em></p>
<p> </p>
<p>Bridled White-eye (Saipan) <em>Zosterops conspicillatus saypani</em></p>
<p> </p>
<p>Cream-throated White-eye (Morotai) <em>Zosterops atriceps dehaani</em></p>
<p> </p>
<p>Cream-throated White-eye (Helmahera) <em>Zosterops atriceps fuscifrons</em></p>
<p> </p>
<p>Cream-throated White-eye (Bacan) <em>Zosterops atriceps atriceps</em></p>
<p> </p>
<p>Black-fronted White-eye (Black-fronted) <em>Zosterops minor [chrysolaemus Group]</em></p>
<p> </p>
<p>Black-fronted White-eye (Green-fronted) <em>Zosterops minor minor/rothschildi</em></p>
<p> </p>
<p>Gray-throated White-eye (Bougainville) <em>Zosterops ugiensis hamlini</em></p>
<p> </p>
<p>Gray-throated White-eye (Gray-throated) <em>Zosterops ugiensis ugiensis/oblitus</em></p>
<p> </p>
<p>Gray-cheeked Tit-Babbler (Gray-cheeked) <em>Mixornis flavicollis flavicollis</em></p>
<p> </p>
<p>Gray-cheeked Tit-Babbler (Kangean) <em>Mixornis flavicollis prillwitzi</em></p>
<p> </p>
<p>Chestnut-winged Babbler (Chestnut-winged) <em>Cyanoderma erythropterum [erythropterum </em></p>
<p><em>Group]</em></p>
<p> </p>
<p>Chestnut-winged Babbler (Gray-hooded) <em>Cyanoderma erythropterum bicolor/rufum</em></p>
<p> </p>
<p>Pale-breasted Illadopsis (Pale-breasted) <em>Illadopsis rufipennis rufipennis/extrema</em></p>
<p> </p>
<p>Pale-breasted Illadopsis (Gray-breasted) <em>Illadopsis rufipennis distans/pugensis</em></p>
<p> </p>
<p>Black-capped Babbler (Black-capped) <em>Pellorneum capistratum [nigrocapitatum Group]</em></p>
<p> </p>
<p>Black-capped Babbler (Rufous-browed) <em>Pellorneum capistratum capistratum</em></p>
<p> </p>
<p>Limestone Wren-Babbler (Grayish) <em>Turdinus crispifrons crispifrons/annamensis</em></p>
<p> </p>
<p>Limestone Wren-Babbler (Rufous) <em>Turdinus crispifrons calcicola</em></p>
<p> </p>
<p>Jungle Babbler (Jungle) <em>Turdoides striata [striata Group]</em></p>
<p> </p>
<p>Jungle Babbler (Black-winged) <em>Turdoides striata somervillei</em></p>
<p> </p>
<p>Moustached Laughingthrush (Western) <em>Ianthocincla cineracea cineracea/strenua</em></p>
<p> </p>
<p>Moustached Laughingthrush (Eastern) <em>Ianthocincla cineracea cinereiceps</em></p>
<p> </p>
<p>Black-throated Laughingthrush (Black-throated) <em>Ianthocincla chinensis [chinensis Group]</em></p>
<p> </p>
<p>Black-throated Laughingthrush (Hainan) <em>Ianthocincla chinensis monachus</em></p>
<p> </p>
<p>Silver-eared Mesia (Silver-eared) <em>Leiothrix argentauris [argentauris Group]</em></p>
<p> </p>
<p>Silver-eared Mesia (Sumatran) <em>Leiothrix argentauris laurinae/rookmakeri</em></p>
<p> </p>
<p>Spectacled Barwing (Eastern) <em>Actinodura ramsayi radcliffei/yunnanensis</em></p>
<p> </p>
<p>Spectacled Barwing (Western) <em>Actinodura ramsayi ramsayi</em></p>
<p> </p>
<p>Asian Fairy-bluebird (Asian) <em>Irena puella [puella Group]</em></p>
<p> </p>
<p>Asian Fairy-bluebird (Palawan) <em>Irena puella tweeddalii</em></p>
<p> </p>
<p>Brown-streaked Flycatcher (Brown-streaked) <em>Muscicapa williamsoni williamsoni</em></p>
<p> </p>
<p>Brown-streaked Flycatcher (Umber) <em>Muscicapa williamsoni umbrosa</em></p>
<p> </p>
<p>Oriental Magpie-Robin (Oriental) <em>Copsychus saularis [saularis Group]</em></p>
<p> </p>
<p>Oriental Magpie-Robin (Black) <em>Copsychus saularis [amoenus Group]</em></p>
<p> </p>
<p>White-rumped Shama (Barusan) <em>Copsychus malabaricus [melanurus Group]</em></p>
<p> </p>
<p>Blue-breasted Flycatcher (Blue-breasted) <em>Cyornis herioti herioti</em></p>
<p> </p>
<p>Blue-breasted Flycatcher (Rufous-breasted) <em>Cyornis herioti camarinensis</em></p>
<p> </p>
<p>Pale Blue Flycatcher (Unicolored) <em>Cyornis unicolor unicolor</em></p>
<p> </p>
<p>Pale Blue Flycatcher (Diao Luo) <em>Cyornis unicolor diaoluoensis</em></p>
<p> </p>
<p>Pale Blue Flycatcher (Hartert’s) <em>Cyornis unicolor harterti</em></p>
<p> </p>
<p>Tickell’s Blue Flycatcher (Tickell’s) <em>Cyornis tickelliae tickelliae/jerdoni</em></p>
<p> </p>
<p>Tickell’s Blue Flycatcher (Indochinese) <em>Cyornis tickelliae [sumatrensis Group]</em></p>
<p> </p>
<p>Flores Jungle-Flycatcher (Russet-backed) <em>Cyornis oscillans oscillans</em></p>
<p> </p>
<p>Flores Jungle-Flycatcher (Sumba) <em>Cyornis oscillans stresemanni</em></p>
<p> </p>
<p>Chestnut-tailed Jungle-Flycatcher (Philippine) <em>Cyornis ruficauda [ruficauda Group]</em></p>
<p> </p>
<p>Chestnut-tailed Jungle-Flycatcher (Sulu) <em>Cyornis ruficauda ocularis</em></p>
<p> </p>
<p>Chestnut-tailed Jungle-Flycatcher (Crocker) <em>Cyornis ruficauda ruficrissa/isola</em></p>
<p> </p>
<p>Vivid Niltava (Large) <em>Niltava vivida oatesi</em></p>
<p> </p>
<p>Vivid Niltava (Small) <em>Niltava vivida vivida</em></p>
<p> </p>
<p>Indigo Flycatcher (Rufous-vented) <em>Eumyias indigo ruficrissa/cerviniventris</em></p>
<p> </p>
<p>Indigo Flycatcher (Javan) <em>Eumyias indigo indigo</em></p>
<p> </p>
<p>Great Shortwing (Minahasa) <em>Heinrichia calligyna simplex</em></p>
<p> </p>
<p>Great Shortwing (Great) <em>Heinrichia calligyna calligyna/picta</em></p>
<p> </p>
<p>White-browed Shortwing (Himalayan) <em>Brachypteryx montana cruralis</em></p>
<p> </p>
<p>White-browed Shortwing (Chinese) <em>Brachypteryx montana sinensis</em></p>
<p> </p>
<p>White-browed Shortwing (Taiwan) <em>Brachypteryx montana goodfellowi</em></p>
<p> </p>
<p>White-browed Shortwing (Philippine) <em>Brachypteryx montana [poliogyna Group]</em></p>
<p> </p>
<p>White-browed Shortwing (Bornean) <em>Brachypteryx montana erythrogyna</em></p>
<p> </p>
<p>White-browed Shortwing (Sumatran) <em>Brachypteryx montana saturata</em></p>
<p> </p>
<p>White-browed Shortwing (Javan) <em>Brachypteryx montana montana</em></p>
<p> </p>
<p>White-browed Shortwing (Flores) <em>Brachypteryx montana floris</em></p>
<p> </p>
<p>Japanese Robin (Japanese) <em>Larvivora akahige akahige/rishirensis</em></p>
<p> </p>
<p>Japanese Robin (Izu) <em>Larvivora akahige tanensis</em></p>
<p> </p>
<p><NAME> (Ryukyu) <em>Larvivora komadori komadori/subrufus</em></p>
<p> </p>
<p><NAME> (Okinawa) <em>Larvivora komadori namiyei</em></p>
<p> </p>
<p>White-crowned Forktail (White-crowned) <em>Enicurus leschenaulti [frontalis Group]</em></p>
<p> </p>
<p>White-crowned Forktail (Javan) <em>Enicurus leschenaulti leschenaulti</em></p>
<p> </p>
<p>White-tailed Robin (White-tailed) <em>Myiomela leucura leucura/montium</em></p>
<p> </p>
<p>White-tailed Robin (Cambodian) <em>Myiomela leucura cambodiana</em></p>
<p> </p>
<p>Sunda Robin (Sumatran) <em>Myiomela diana sumatrana</em></p>
<p> </p>
<p>Sunda Robin (Javan) <em>Myiomela diana diana</em></p>
<p> </p>
<p>Red-breasted Wheatear (Buff-breasted) <em>Oenanthe bottae bottae</em></p>
<p> </p>
<p>Red-breasted Wheatear (Rusty-breasted) <em>Oenanthe bottae frenata</em></p>
<p> </p>
<p>Orange-headed Thrush (Orange-headed) <em>Geokichla citrina [citrina Group]</em></p>
<p> </p>
<p>Orange-headed Thrush (White-throated) <em>Geokichla citrina cyanota</em></p>
<p> </p>
<p>Orange-headed Thrush (Plain-winged) <em>Geokichla citrina albogularis/andamanensis</em></p>
<p> </p>
<p>Orange-headed Thrush (Buff-throated) <em>Geokichla citrina [aurimacula Group]</em></p>
<p> </p>
<p>Andean Solitaire (plumbeiceps) <em>Myadestes ralloides plumbeiceps</em></p>
<p> </p>
<p>Andean Solitaire (venezuelensis/candelae) <em>Myadestes ralloides venezuelensis/candelae</em></p>
<p> </p>
<p>Andean Solitaire (ralloides) <em>Myadestes ralloides ralloides</em></p>
<p> </p>
<p>Spotted Nightingale-Thrush (Gould’s) <em>Catharus dryas dryas/ovandensis</em></p>
<p> </p>
<p>Spotted Nightingale-Thrush (Sclater’s) <em>Catharus dryas maculatus/blakei</em></p>
<p> </p>
<p>Rufous-brown Solitaire (Chestnut-throated) <em>Cichlopsis leucogenys chubbi</em></p>
<p> </p>
<p>Rufous-brown Solitaire (Peruvian) <em>Cichlopsis leucogenys peruviana</em></p>
<p> </p>
<p>Rufous-brown Solitaire (Guianan) <em>Cichlopsis leucogenys gularis</em></p>
<p> </p>
<p>Rufous-brown Solitaire (Rufous-brown) <em>Cichlopsis leucogenys leucogenys</em></p>
<p> </p>
<p>Black-billed Thrush (Pantepui) <em>Turdus ignobilis murinus</em></p>
<p> </p>
<p>White-necked Myna (Northern) <em>Streptocitta albicollis torquata</em></p>
<p> </p>
<p>White-necked Myna (Southern) <em>Streptocitta albicollis albicollis</em></p>
<p> </p>
<p>Asian Pied Starling (Asian) <em>Gracupica contra [contra Group]</em></p>
<p> </p>
<p>Asian Pied Starling (Javan) <em>Gracupica contra jalla</em></p>
<p> </p>
<p>Vinous-breasted Starling (Burmese) <em>Acridotheres burmannicus burmannicus</em></p>
<p> </p>
<p>Vinous-breasted Starling (Vinous-breasted) <em>Acridotheres burmannicus leucocephalus</em></p>
<p> </p>
<p>Black-winged Starling (Black-winged) <em>Acridotheres melanopterus melanopterus</em></p>
<p> </p>
<p>Black-winged Starling (Gray-backed) <em>Acridotheres melanopterus tricolor</em></p>
<p> </p>
<p>Black-winged Starling (Gray-rumped) <em>Acridotheres melanopterus tertius</em></p>
<p> </p>
<p>Blue-winged Leafbird (Blue-winged) <em>Chloropsis cochinchinensis [moluccensis Group]</em></p>
<p> </p>
<p>Blue-winged Leafbird (Javan) <em>Chloropsis cochinchinensis cochinchinensis</em></p>
<p> </p>
<p>Orange-bellied Leafbird (Orange-bellied) <em>Chloropsis hardwickii hardwickii/malayana</em></p>
<p> </p>
<p>Orange-bellied Leafbird (Grayish-crowned) <em>Chloropsis hardwickii lazulina/melliana</em></p>
<p> </p>
<p>Flame-crowned Flowerpecker (Yellow-crowned) <em>Dicaeum anthonyi anthonyi</em></p>
<p> </p>
<p>Flame-crowned Flowerpecker (Flame-crowned) <em>Dicaeum anthonyi kampalili/masawan</em></p>
<p> </p>
<p>Fire-breasted Flowerpecker (Fire-breasted) <em>Dicaeum ignipectus [ignipectus Group]</em></p>
<p> </p>
<p>Fire-breasted Flowerpecker (Cambodian) <em>Dicaeum ignipectus cambodianum</em></p>
<p> </p>
<p>Fire-breasted Flowerpecker (Fire-throated) <em>Dicaeum ignipectus [luzoniense Group]</em></p>
<p> </p>
<p>Fire-breasted Flowerpecker (Sumatran) <em>Dicaeum ignipectus beccarii</em></p>
<p> </p>
<p>Blood-breasted Flowerpecker (Blood-breasted) <em>Dicaeum sanguinolentum </em></p>
<p><em>sanguinolentum/rhodopygiale</em></p>
<p> </p>
<p>Blood-breasted Flowerpecker (Sumba) <em>Dicaeum sanguinolentum wilhelminae</em></p>
<p> </p>
<p>Blood-breasted Flowerpecker (Timor) <em>Dicaeum sanguinolentum hanieli</em></p>
<p> </p>
<p>Mistletoebird (Pink-breasted) <em>Dicaeum hirundinaceum keiense/fulgidum</em></p>
<p> </p>
<p>Mistletoebird (Aru) <em>Dicaeum hirundinaceum ignicolle</em></p>
<p> </p>
<p>Mistletoebird (Mistletoebird) <em>Dicaeum hirundinaceum hirundinaceum</em></p>
<p> </p>
<p>Purple-throated Sunbird (Purple-throated) <em>Leptocoma sperata [sperata Group]</em></p>
<p> </p>
<p>Purple-throated Sunbird (Orange-lined) <em>Leptocoma sperata juliae</em></p>
<p> </p>
<p>Miombo Sunbird (Western) <em>Cinnyris manoensis pintoi</em></p>
<p> </p>
<p>Miombo Sunbird (Eastern) <em>Cinnyris manoensis manoensis/amicorum</em></p>
<p> </p>
<p>Beautiful Sunbird (Beautiful) <em>Cinnyris pulchellus pulchellus</em></p>
<p> </p>
<p>Beautiful Sunbird (Gorgeous) <em>Cinnyris pulchellus melanogastrus</em></p>
<p> </p>
<p>Shining Sunbird (Shining) <em>Cinnyris habessinicus [habessinicus Group]</em></p>
<p> </p>
<p>Shining Sunbird (Arabian) <em>Cinnyris habessinicus hellmayri/kinneari</em></p>
<p> </p>
<p>Madagascar Sunbird (Grand Comoro) <em>Cinnyris notatus moebii</em></p>
<p> </p>
<p>Madagascar Sunbird (Moheli) <em>Cinnyris notatus voeltzkowi</em></p>
<p> </p>
<p>Madagascar Sunbird (Long-billed) <em>Cinnyris notatus notatus</em></p>
<p> </p>
<p>Fork-tailed Sunbird (Fork-tailed) <em>Aethopyga christinae latouchii/sokolovi</em></p>
<p> </p>
<p>Fork-tailed Sunbird (Hainan) <em>Aethopyga christinae christinae</em></p>
<p> </p>
<p>Western Yellow Wagtail (flavissima) <em>Motacilla flava flavissima</em></p>
<p> </p>
<p>Western Yellow Wagtail (lutea) <em>Motacilla flava lutea</em></p>
<p> </p>
<p>Western Yellow Wagtail (flava) <em>Motacilla flava flava</em></p>
<p> </p>
<p>Western Yellow Wagtail (beema) <em>Motacilla flava beema</em></p>
<p> </p>
<p>Western Yellow Wagtail (iberiae) <em>Motacilla flava iberiae</em></p>
<p> </p>
<p>Western Yellow Wagtail (cinereocapilla) <em>Motacilla flava cinereocapilla</em></p>
<p> </p>
<p>Western Yellow Wagtail (pygmaea) <em>Motacilla flava pygmaea</em></p>
<p> </p>
<p>Hylocitrea (Northern) <em>Hylocitrea bonensis bonensis</em></p>
<p> </p>
<p>Hylocitrea (Southern) <em>Hylocitrea bonensis bonthaina</em></p>
<p> </p>
<p>Olive-crowned Yellowthroat (Baird’s) <em>Geothlypis semiflava bairdi</em></p>
<p> </p>
<p>Olive-crowned Yellowthroat (Olive-crowned) <em>Geothlypis semiflava semiflava</em></p>
<p> </p>
<p>Citrine Warbler (Northern) <em>Myiothlypis luteoviridis [luteoviridis Group]</em></p>
<p> </p>
<p>Citrine Warbler (Peruvian) <em>Myiothlypis luteoviridis striaticeps</em></p>
<p> </p>
<p>Citrine Warbler (Bolivian) <em>Myiothlypis luteoviridis euophrys</em></p>
<p> </p>
<p>Golden-fronted Redstart (Golden-fronted) <em>Myioborus ornatus chrysops</em></p>
<p> </p>
<p>Golden-fronted Redstart (Yellow-fronted) <em>Myioborus ornatus ornatus</em></p>
<p> </p>
<p>Ringed Warbling-Finch (Ringed) <em>Microspingus torquatus torquatus</em></p>
<p> </p>
<p>Ringed Warbling-Finch (Black-breasted) <em>Microspingus torquatus pectoralis</em></p>
<p> </p>
<p>Scarlet-bellied Mountain-Tanager (Scarlet-bellied) <em>Anisognathus igniventris [lunulatus Group]</em></p>
<p> </p>
<p>Scarlet-bellied Mountain-Tanager (Fire-bellied) <em>Anisognathus igniventris igniventris</em></p>
<p> </p>
<p>Blue-winged Mountain-Tanager (Blue-winged) <em>Anisognathus somptuosus [somptuosus Group]</em></p>
<p> </p>
<p>Blue-winged Mountain-Tanager (Bolivian) <em>Anisognathus somptuosus flavinucha</em></p>
<p> </p>
<p>Blue-and-yellow Tanager (Green-mantled) <em>Pipraeidea bonariensis darwinii</em></p>
<p> </p>
<p>Blue-and-yellow Tanager (Blue-and-yellow) <em>Pipraeidea bonariensis [bonariensis Group]</em></p>
<p> </p>
<p>Orange-eared Tanager (Orange-eared) <em>Chlorochrysa calliparaea calliparaea/bourcieri</em></p>
<p> </p>
<p>Orange-eared Tanager (Blue-throated) <em>Chlorochrysa calliparaea fulgentissima</em></p>
<p> </p>
<p>Golden-naped Tanager (Golden-naped) <em>Tangara ruficervix [ruficervix Group]</em></p>
<p> </p>
<p>Golden-naped Tanager (Rusty-naped) <em>Tangara ruficervix [fulvicervix Group]</em></p>
<p> </p>
<p>Black-headed Tanager (Black-headed) <em>Tangara cyanoptera cyanoptera</em></p>
<p> </p>
<p>Black-headed Tanager (Black-hooded) <em>Tangara cyanoptera whitelyi</em></p>
<p> </p>
<p>Lesser Antillean Tanager (St. Vincent) <em>Tangara cucullata versicolor</em></p>
<p> </p>
<p>Lesser Antillean Tanager (Grenada) <em>Tangara cucullata cucullata</em></p>
<p> </p>
<p>Blue-and-black Tanager (Blue-and-black) <em>Tangara vassorii vassorii/branickii</em></p>
<p> </p>
<p>Blue-and-black Tanager (Spot-bellied) <em>Tangara vassorii atrocoerulea</em></p>
<p> </p>
<p>Flame-faced Tanager (Flame-faced) <em>Tangara parzudakii parzudakii/urubambae</em></p>
<p> </p>
<p>Flame-faced Tanager (Yellow-faced) <em>Tangara parzudakii lunigera</em></p>
<p> </p>
<p>Cinereous Conebill (Ochraceous) <em>Conirostrum cinereum fraseri</em></p>
<p> </p>
<p>Cinereous Conebill (Cinereous) <em>Conirostrum cinereum cinereum/littorale</em></p>
<p> </p>
<p>Orange-billed Sparrow (aurantiirostris Group) <em>Arremon aurantiirostris [aurantiirostris Group]</em></p>
<p> </p>
<p>Orange-billed Sparrow (erythrorhynchus) <em>Arremon aurantiirostris erythrorhynchus</em></p>
<p> </p>
<p>Orange-billed Sparrow (spectabilis) <em>Arremon aurantiirostris spectabilis</em></p>
<p> </p>
<p>Black-capped Sparrow (Black-capped) <em>Arremon abeillei abeillei</em></p>
<p> </p>
<p>Black-capped Sparrow (Marañon) <em>Arremon abeillei nigriceps</em></p>
<p> </p>
<p>Pectoral Sparrow (Yellow-mandibled) <em>Arremon taciturnus axillaris</em></p>
<p> </p>
<p>Pectoral Sparrow (Pectoral) <em>Arremon taciturnus taciturnus/nigrirostris</em></p>
<p> </p>
<p>White-eared Ground-Sparrow (Gray-crowned) <em>Melozone leucotis occipitalis</em></p>
<p> </p>
<p>White-eared Ground-Sparrow (White-eared) <em>Melozone leucotis leucotis/nigrior</em></p>
<p> </p>
<p>Rose-breasted Chat (Rose-breasted) <em>Granatellus pelzelni pelzelni</em></p>
<p> </p>
<p>Rose-breasted Chat (Rose-bellied) <em>Granatellus pelzelni paraensis</em></p>
<p> </p>
<p>Russet-backed Oropendola (Russet-backed) <em>Psarocolius angustifrons [angustifrons Group]</em></p>
<p> </p>
<p>Russet-backed Oropendola (Green-billed) <em>Psarocolius angustifrons oleagineus</em></p>
<p> </p>
<p>Olive Oropendola (Amazonian) <em>Psarocolius bifasciatus yuracares/neivae</em></p>
<p> </p>
<p>Olive Oropendola (Para) <em>Psarocolius bifasciatus bifasciatus</em></p>
<p> </p>
<p>Blue Chaffinch (Tenerife) <em>Fringilla teydea teydea</em></p>
<p> </p>
<p>Blue Chaffinch (Gran Canaria) <em>Fringilla teydea polatzeki</em></p>
<p> </p>
<p>Antillean Euphonia (Hispaniolan) <em>Euphonia musica musica</em></p>
<p> </p>
<p>Antillean Euphonia (Puerto Rican) <em>Euphonia musica sclateri</em></p>
<p> </p>
<p>Antillean Euphonia (Lesser) <em>Euphonia musica flavifrons</em></p>
<p> </p>
<p>Brown Bullfinch (Brown) <em>Pyrrhula nipalensis [nipalensis Group]</em></p>
<p> </p>
<p>Brown Bullfinch (Malay) <em>Pyrrhula nipalensis waterstradti</em></p>
<p> </p>
<p>Reichard’s Seedeater (Stripe-breasted) <em>Crithagra reichardi striatipectus</em></p>
<p> </p>
<p>Reichard’s Seedeater (Reichard’s) <em>Crithagra reichardi reichardi</em></p>
<p> </p>
<p>Mountain Serin (Mountain) <em>Chrysocorythus estherae [estherae Group]</em></p>
<p> </p>
<p>Mountain Serin (Mindanao) <em>Chrysocorythus estherae mindanensis</em></p>
<p> </p>
<p>European Goldfinch (European) <em>Carduelis carduelis [carduelis Group]</em></p>
<p> </p>
<p>European Goldfinch (Eastern) <em>Carduelis carduelis [caniceps Group]</em></p>
<p> </p>
<p>Red-headed Weaver (Northern) <em>Anaplectes rubriceps leuconotos</em></p>
<p> </p>
<p>Red-headed Weaver (Red) <em>Anaplectes rubriceps jubaensis</em></p>
<p> </p>
<p>Red-headed Weaver (Southern) <em>Anaplectes rubriceps rubriceps</em></p>
<p> </p>
<p>Katanga Masked-Weaver (Upemba) <em>Ploceus katangae upembae</em></p>
<p> </p>
<p>Katanga Masked-Weaver (Katanga) <em>Ploceus katangae katangae</em></p>
<p> </p>
<p>Red-collared Widowbird (Red-cowled) <em>Euplectes ardens laticauda/suahelicus</em></p>
<p> </p>
<p>Red-collared Widowbird (Red-collared) <em>Euplectes ardens ardens</em></p>
<p> </p>
<p>Crimson Finch (White-bellied) <em>Neochmia phaeton evangelinae</em></p>
<p> </p>
<p>Crimson Finch (Black-bellied) <em>Neochmia phaeton phaeton</em></p>
</div><!-- .page-content -->
</div><!-- .standard_wrap -->
</div><!-- #content_area -->
</div><!-- .inner_section -->
</div><!-- #content_section -->
<div id="footer_section">
<div class="inner_section clearfix cmfix">
<div class="footer_col1">
<a href="http://www.cornell.edu/"><img src="http://www.birds.cornell.edu/clementschecklist/wp-content/themes/cornelllab/images/logo_cornell.gif" id="footer_logo" /></a>
</div>
<div class="footer_col3">
<ul>
<li class="footer_list"><a href="http://www.birds.cornell.edu/page.aspx?pid=1644" class="footer_link first">Contact Us</a></li>
<li class="footer_list"><a href="http://www.birds.cornell.edu/Page.aspx?pid=1635" class="footer_link">Privacy Policy</a></li>
<li class="footer_list"><a href="http://www.birds.cornell.edu/Page.aspx?pid=1636" class="footer_link">Terms of Use</a></li>
<li class="footer_list"><a href="http://www.allaboutbirds.org" class="footer_link last">All About Birds</a></li>
</ul>
</div>
<div class="footer_col2">
<p><a href="http://www.birds.cornell.edu" class="copyright_link">Copyright ©<span id="copyrightyear"> 2012</span> Cornell University</a></p>
<p>Cornell Lab of Ornithology<br />
159 Sapsucker Woods Rd<br />
Ithaca, NY 14850<br />
Tel: <span style="color:#ffffff" class="phone">800.843.2473</span></p>
</div>
</div>
</div>
<script>// By <NAME> & tweaked by <NAME>
jQuery(document).ready(function($){
// Find all YouTube videos
var $allVideos = $("iframe[src^='http://www.youtube.com']"),
// The element that is fluid width
$fluidEl = $(".embed-vid");
// Figure out and save aspect ratio for each video
$allVideos.each(function() {
$(this)
.data('aspectRatio', this.height / this.width)
// and remove the hard coded width/height
.removeAttr('height')
.removeAttr('width');
});
// When the window is resized
// (You'll probably want to debounce this)
$(window).resize(function() {
var newWidth = $fluidEl.width();
// Resize all videos according to their own aspect ratio
$allVideos.each(function() {
var $el = $(this);
$el
.width(newWidth)
.height(newWidth * $el.data('aspectRatio'));
});
// Kick off one resize to fix all videos on page load
}).resize();
});</script>
</body>
</html>
<file_sep># Clements Bird Checklist
<img src="images/ClementsCover.jpg" align="right" width=140/>
List of changes to Clements checklist from http://www.birds.cornell.edu/clementschecklist/. That web site provides downloadable versions of the checklist (.xslx and .csv) but only the latest version (currently 2018). Previous versions were recovered from the Internet Archive.
<file_sep><?php
ini_set("auto_detect_line_endings", true); // vital because some files have Windows ending
$nodes = array();
$nodes_map = array();
$edges = array();
$node_count = 0;
$row_count = 0;
$header = array();
$header_lookup = array();
$done = false;
$filename = '2017/eBird_Taxonomy_v2017_18Aug2017.csv';
$filename = '2018/eBird_Taxonomy_v2018_14Aug2018.csv';
$filename = '2019/eBird_Taxonomy_v2019.csv';
$file = @fopen($filename, "r") or die("couldn't open $filename");
$file_handle = fopen($filename, "r");
while (!feof($file_handle) && !$done)
{
$row = fgetcsv(
$file_handle,
0,
',',
'"'
);
//print_r($row);
$go = is_array($row);
if ($go && ($row_count == 0))
{
$header = $row;
$n = count($header);
for ($i = 0; $i < $n; $i++)
{
$header_lookup[$header[$i]] = $i;
}
$go = false;
}
if ($go)
{
//print_r($row);
$obj = new stdclass;
foreach ($row as $k => $v)
{
if ($v != '')
{
$obj->{$header[$k]} = $v;
}
}
//print_r($obj);
// nodes
// $path
$path = [];
/*
[TAXON_ORDER] => 7
[CATEGORY] => slash
[SPECIES_CODE] => y00934
[PRIMARY_COM_NAME] => Common/Somali Ostrich
[SCI_NAME] => Struthio camelus/molybdophanes
[ORDER1] => Struthioniformes
[FAMILY] => Struthionidae (Ostriches)
[REPORT_AS] => y00934
*/
$path[] = 'Aves';
if (isset($obj->ORDER1))
{
$path[] = $obj->ORDER1;
}
if (isset($obj->FAMILY))
{
$family = $obj->FAMILY;
$family = preg_replace('/\s+\(.*$/', '', $family);
$path[] = $family;
}
if (isset($obj->SCI_NAME))
{
$s = $obj->SCI_NAME;
$s = preg_replace('/\s+\(.*$/', '', $s);
$s = preg_replace('/ Group/i', '-Group', $s);
$s = preg_replace('/undescribed form/', 'undescribed-form', $s);
$parts = explode(' ', $s);
$n = count($parts);
$x = array();
for ($i = 0; $i < $n-1; $i++)
{
$x[] = $parts[$i];
$str = join(' ', $x);
/*
if ($i == $n - 1)
{
$str .= ' ' . $obj->SPECIES_CODE;
}
*/
if ($str != $path[count($path)-1])
{
$path[] = $str;
}
}
/*
$genus = '';
if (preg_match('/^(?<genus>.*)\s+/U', $obj->SCI_NAME, $m))
{
$genus = $m['genus'];
}
else
{
print_r($obj);
exit();
}
if ($genus != '')
{
if ($genus != $path[count($path)-1])
{
$path[] = $genus;
}
}
*/
$path[] = $obj->SPECIES_CODE;
}
else
{
$path[] = $obj->SPECIES_CODE;
}
// echo "Path\n";
//print_r($path);
$n = count($path);
for ($i = 0; $i < $n; $i++)
{
if (!isset($nodes_map[$path[$i]]))
{
$nodes[$node_count] = $path[$i];
$nodes_map[$path[$i]] = $node_count;
$node_count++;
}
}
for ($i = ($n - 1); $i > 0; $i--)
{
//if (in_array($nodes_map[$path])
$from = $nodes_map[$path[$i]];
$to = $nodes_map[$path[$i-1]];
$edges[$from] = $to;
}
// add path (if not already there)
// edges
// add edges
}
$row_count++;
if ($row_count > 30000)
//if ($obj->SPECIES_CODE == 'shoreb1')
{
$done = true;
//exit();
}
}
// dump tree
/*
echo "Nodes\n";
print_r($nodes);
echo "Nodes map\n";
print_r($nodes_map);
echo "Edges\n";
print_r($edges);
*/
echo "graph [\n";
echo "directed 1\n";
foreach ($nodes as $k => $v)
{
echo "node [";
echo " id " . $k . "";
echo " label \"" . addcslashes($v, '"') . "\"";
echo " ]\n";
}
foreach ($edges as $k => $v)
{
echo "edge [";
echo " source $v";
echo " target $k";
echo " ]\n";
}
echo "]\n";
?>
<file_sep><!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<title>Updates & Corrections – August 2018 | Clements Checklist</title>
<link rel="SHORTCUT ICON" href="http://www.birds.cornell.edu/images/FavIcon.ico" type="image/x-icon" /><link rel="ICON" href="http://www.birds.cornell.edu/images/FavIcon.ico" type="image/x-icon" />
<link rel="icon"
type="image/png"
href="http://www.birds.cornell.edu/bbimages/aab/favicon.png" />
<link type="text/css" rel="stylesheet" href="http://www.birds.cornell.edu/clementschecklist/wp-content/themes/cornelllab/style.css" />
<link rel='dns-prefetch' href='//s.w.org' />
<link rel="alternate" type="application/rss+xml" title="Clements Checklist » Feed" href="http://www.birds.cornell.edu/clementschecklist/feed/" />
<link rel="alternate" type="application/rss+xml" title="Clements Checklist » Comments Feed" href="http://www.birds.cornell.edu/clementschecklist/comments/feed/" />
<script type="text/javascript">
window._wpemojiSettings = {"baseUrl":"https:\/\/s.w.org\/images\/core\/emoji\/2.4\/72x72\/","ext":".png","svgUrl":"https:\/\/s.w.org\/images\/core\/emoji\/2.4\/svg\/","svgExt":".svg","source":{"concatemoji":"http:\/\/www.birds.cornell.edu\/clementschecklist\/wp\/wp-includes\/js\/wp-emoji-release.min.js?ver=4.9.4"}};
!function(a,b,c){function d(a,b){var c=String.fromCharCode;l.clearRect(0,0,k.width,k.height),l.fillText(c.apply(this,a),0,0);var d=k.toDataURL();l.clearRect(0,0,k.width,k.height),l.fillText(c.apply(this,b),0,0);var e=k.toDataURL();return d===e}function e(a){var b;if(!l||!l.fillText)return!1;switch(l.textBaseline="top",l.font="600 32px Arial",a){case"flag":return!(b=d([55356,56826,55356,56819],[55356,56826,8203,55356,56819]))&&(b=d([55356,57332,56128,56423,56128,56418,56128,56421,56128,56430,56128,56423,56128,56447],[55356,57332,8203,56128,56423,8203,56128,56418,8203,56128,56421,8203,56128,56430,8203,56128,56423,8203,56128,56447]),!b);case"emoji":return b=d([55357,56692,8205,9792,65039],[55357,56692,8203,9792,65039]),!b}return!1}function f(a){var c=b.createElement("script");c.src=a,c.defer=c.type="text/javascript",b.getElementsByTagName("head")[0].appendChild(c)}var g,h,i,j,k=b.createElement("canvas"),l=k.getContext&&k.getContext("2d");for(j=Array("flag","emoji"),c.supports={everything:!0,everythingExceptFlag:!0},i=0;i<j.length;i++)c.supports[j[i]]=e(j[i]),c.supports.everything=c.supports.everything&&c.supports[j[i]],"flag"!==j[i]&&(c.supports.everythingExceptFlag=c.supports.everythingExceptFlag&&c.supports[j[i]]);c.supports.everythingExceptFlag=c.supports.everythingExceptFlag&&!c.supports.flag,c.DOMReady=!1,c.readyCallback=function(){c.DOMReady=!0},c.supports.everything||(h=function(){c.readyCallback()},b.addEventListener?(b.addEventListener("DOMContentLoaded",h,!1),a.addEventListener("load",h,!1)):(a.attachEvent("onload",h),b.attachEvent("onreadystatechange",function(){"complete"===b.readyState&&c.readyCallback()})),g=c.source||{},g.concatemoji?f(g.concatemoji):g.wpemoji&&g.twemoji&&(f(g.twemoji),f(g.wpemoji)))}(window,document,window._wpemojiSettings);
</script>
<style type="text/css">
img.wp-smiley,
img.emoji {
display: inline !important;
border: none !important;
box-shadow: none !important;
height: 1em !important;
width: 1em !important;
margin: 0 .07em !important;
vertical-align: -0.1em !important;
background: none !important;
padding: 0 !important;
}
</style>
<script type='text/javascript' src='http://www.birds.cornell.edu/clementschecklist/wp/wp-includes/js/jquery/jquery.js?ver=1.12.4'></script>
<script type='text/javascript' src='http://www.birds.cornell.edu/clementschecklist/wp/wp-includes/js/jquery/jquery-migrate.min.js?ver=1.4.1'></script>
<script type='text/javascript' src='http://www.birds.cornell.edu/clementschecklist/wp/wp-content/plugins/mailchimp-widget/js/mailchimp-widget-min.js?ver=4.9.4'></script>
<script type='text/javascript' src='http://www.birds.cornell.edu/clementschecklist/wp-content/themes/cornelllab/js/cornell-custom.js?ver=4.9.4'></script>
<link rel='https://api.w.org/' href='http://www.birds.cornell.edu/clementschecklist/wp-json/' />
<link rel="EditURI" type="application/rsd+xml" title="RSD" href="http://www.birds.cornell.edu/clementschecklist/wp/xmlrpc.php?rsd" />
<link rel="wlwmanifest" type="application/wlwmanifest+xml" href="http://www.birds.cornell.edu/clementschecklist/wp/wp-includes/wlwmanifest.xml" />
<meta name="generator" content="WordPress 4.9.4" />
<link rel="canonical" href="http://www.birds.cornell.edu/clementschecklist/updates-corrections-2018/" />
<link rel='shortlink' href='http://www.birds.cornell.edu/clementschecklist/?p=727' />
<link rel="alternate" type="application/json+oembed" href="http://www.birds.cornell.edu/clementschecklist/wp-json/oembed/1.0/embed?url=http%3A%2F%2Fwww.birds.cornell.edu%2Fclementschecklist%2Fupdates-corrections-2018%2F" />
<link rel="alternate" type="text/xml+oembed" href="http://www.birds.cornell.edu/clementschecklist/wp-json/oembed/1.0/embed?url=http%3A%2F%2Fwww.birds.cornell.edu%2Fclementschecklist%2Fupdates-corrections-2018%2F&format=xml" />
<!-- Google Tag Manager -->
<script>(function(w,d,s,l,i){w[l]=w[l]||[];w[l].push({'gtm.start':
new Date().getTime(),event:'gtm.js'});var f=d.getElementsByTagName(s)[0],
j=d.createElement(s),dl=l!='dataLayer'?'&l='+l:'';j.async=true;j.src=
'//www.googletagmanager.com/gtm.js?id='+i+dl;f.parentNode.insertBefore(j,f);
})(window,document,'script','dataLayer','GTM-P7854M');</script>
<!-- End Google Tag Manager -->
<script type="text/javascript">
jQuery(document).ready(function($){
$("#text_resize_wrapper").click(function(){
$(".resizer").slideToggle('fast', function() {});
return false;
});
$("#text_resize_wrapper").css( 'cursor', 'pointer' );
$("#mobile_sections_link").unbind("click").click(
function (event) {
if($("#mobile_sections").is(':visible')) {
$("#mobile_sections").slideUp();
}
else {
$("#mobile_search").slideUp();
$("#mobile_sections").slideDown();
}
});
$("#mobile_search_link").unbind("click").click(
function (event) {
if($("#mobile_search").is(':visible')) {
$("#mobile_search").slideUp();
}
else {
$("#mobile_sections").slideUp();
$("#mobile_search").slideDown();
}
});
$('#search_text').focus(function() {
$(this).val('');
});
//GET IMAGES WITH CAPTION CLASS AND WRAP WITH DIV, ADD CAPTION SPAN AND TEXT-ALIGN RIGHT BOTH IMAGE AND CAPTION
$('img.caption-image').each(function(index) {
var parentNode = $(this).parent();
var newNode = $('<div />').addClass('caption-wrap').append($(this)).append('<span class="caption-text">' + $(this).attr("alt") + '</span>');
parentNode.append(newNode);
});
});
</script>
</head>
<body>
<div id="head_section">
<div class="inner_section clearfix cmfix" >
<div class="header_right mobile-device">
<ul id="toolbar" class="clearfix">
<li><a href="/enews" id="getEnews"><span>Get eNews</span></a></li>
<li><a href="http://www.birds.cornell.edu/page.aspx?pid=1644" id="contactUs"><span>Contact Us</span></a></li>
<li><a href="https://secure3.birds.cornell.edu/SSLpage.aspx?pid=1601" id="donateNow"><span>Donate Now</span></a></li>
</ul><!-- #toolbar -->
<div id="search_section" >
<form method="get" id="searchform" action="http://www.birds.cornell.edu/clementschecklist/">
<fieldset>
<input type="text" value="" name="s" id="s" />
<div id="search_button_wrap"><input type="submit" id="searchsubmit" value="" /></div>
</fieldset>
</form> </div><!-- #search_section -->
</div><!-- .header_right -->
<a href="http://www.birds.cornell.edu" id="logo"><span>Cornell Lab of Ornithology</span></a>
<h1 id="tagline" >
<a href="http://www.birds.cornell.edu/clementschecklist/" class="tagline_link" title="Clements Checklist" rel="home">Clements Checklist</a>
</h1>
</div><!-- .inner_section -->
</div><!-- #head_section -->
<div id="nav_section" class="clearfix other_device">
<div class="inner_section">
<div id="menu-wrapper" >
<div class="menu-main-menu-container"><ul id="menu-main-menu" class="menu"><li class="menu-item menu-item-home"><a href="http://www.birds.cornell.edu/clementschecklist/" title="Home">Home</a></li><li id="menu-item-8" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-has-children menu-item-8"><a href="http://www.birds.cornell.edu/clementschecklist/about/">About the Book</a>
<ul class="sub-menu">
<li id="menu-item-164" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-164"><a href="http://www.birds.cornell.edu/clementschecklist/about/preface/">Preface to the 6th Edition</a></li>
<li id="menu-item-171" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-171"><a href="http://www.birds.cornell.edu/clementschecklist/about/purchasing/">Purchasing</a></li>
<li id="menu-item-165" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-165"><a href="http://www.birds.cornell.edu/clementschecklist/about/methods/">Methods</a></li>
</ul>
</li>
<li id="menu-item-43" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-43"><a href="http://www.birds.cornell.edu/clementschecklist/jamesclements/"><NAME></a></li>
<li id="menu-item-174" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-has-children menu-item-174"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/">Latest Updates</a>
<ul class="sub-menu">
<li id="menu-item-746" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-746"><a href="http://www.birds.cornell.edu/clementschecklist/august-2018/">August 2018</a></li>
<li id="menu-item-680" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-680"><a href="http://www.birds.cornell.edu/clementschecklist/august-2017/">August 2017</a></li>
<li id="menu-item-641" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-641"><a href="http://www.birds.cornell.edu/clementschecklist/overview-august-2016/">August 2016</a></li>
<li id="menu-item-528" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-528"><a href="http://www.birds.cornell.edu/clementschecklist/overview-august-2015/">August 2015</a></li>
<li id="menu-item-480" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-480"><a href="http://www.birds.cornell.edu/clementschecklist/2014-overview/">August 2014</a></li>
<li id="menu-item-365" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-365"><a href="http://www.birds.cornell.edu/clementschecklist/overview-august-2013/">August 2013</a></li>
<li id="menu-item-20" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-20"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/sep12overview/">September 2012</a></li>
<li id="menu-item-35" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-35"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/23aug2011overview/">August 2011</a></li>
<li id="menu-item-170" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-170"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/feb11overview/">February 2011</a></li>
<li id="menu-item-169" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-169"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/dec09overview/">December 2009</a></li>
<li id="menu-item-168" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-168"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/dec08overview/">December 2008</a></li>
<li id="menu-item-167" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-167"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/oct07overview/">October 2007</a></li>
<li id="menu-item-166" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-166"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/july07/">July 2007</a></li>
</ul>
</li>
<li id="menu-item-161" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-161"><a href="http://www.birds.cornell.edu/clementschecklist/download/">Downloadable Checklist</a></li>
<li id="menu-item-163" class="menu-item menu-item-type-post_type menu-item-object-page menu-item-163"><a href="http://www.birds.cornell.edu/clementschecklist/contact/">Contact</a></li>
</ul></div> </div><!-- #menu-wrapper -->
</div><!-- .inner_section -->
</div><!-- #nav_section -->
<div id="mobile_nav_section" class="mobile_device">
<div class="clearfix">
<div class="inner_section mobile_nav_container">
<a href="http://www.birds.cornell.edu/clementschecklist/" id="mobile_home_link"
class='mobile_nav_link'
> </a>
<a href="#" id="mobile_sections_link"
class='mobile_nav_link active' >Website Sections</a>
<a href="#" id="mobile_search_link"
class='mobile_nav_link' >Search</a>
</div>
</div>
</div>
<div id="mobile_sections" class="mobile_device">
<div class="clearfix">
<ul class="mobile_sections_nav">
<div class="mobile-menu-main-menu-container"><ul id="menu-main-menu-1" class="menu"><li class="menu-item menu-item-home"><a href="http://www.birds.cornell.edu/clementschecklist/" title="Home">Home</a></li><li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-has-children menu-item-8"><a href="http://www.birds.cornell.edu/clementschecklist/about/">About the Book</a>
<ul class="sub-menu">
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-164"><a href="http://www.birds.cornell.edu/clementschecklist/about/preface/">Preface to the 6th Edition</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-171"><a href="http://www.birds.cornell.edu/clementschecklist/about/purchasing/">Purchasing</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-165"><a href="http://www.birds.cornell.edu/clementschecklist/about/methods/">Methods</a></li>
</ul>
</li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-43"><a href="http://www.birds.cornell.edu/clementschecklist/jamesclements/"><NAME></a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-has-children menu-item-174"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/">Latest Updates</a>
<ul class="sub-menu">
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-746"><a href="http://www.birds.cornell.edu/clementschecklist/august-2018/">August 2018</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-680"><a href="http://www.birds.cornell.edu/clementschecklist/august-2017/">August 2017</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-641"><a href="http://www.birds.cornell.edu/clementschecklist/overview-august-2016/">August 2016</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-528"><a href="http://www.birds.cornell.edu/clementschecklist/overview-august-2015/">August 2015</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-480"><a href="http://www.birds.cornell.edu/clementschecklist/2014-overview/">August 2014</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-365"><a href="http://www.birds.cornell.edu/clementschecklist/overview-august-2013/">August 2013</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-20"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/sep12overview/">September 2012</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-35"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/23aug2011overview/">August 2011</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-170"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/feb11overview/">February 2011</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-169"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/dec09overview/">December 2009</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-168"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/dec08overview/">December 2008</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-167"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/oct07overview/">October 2007</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-166"><a href="http://www.birds.cornell.edu/clementschecklist/updateindex/july07/">July 2007</a></li>
</ul>
</li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-161"><a href="http://www.birds.cornell.edu/clementschecklist/download/">Downloadable Checklist</a></li>
<li class="menu-item menu-item-type-post_type menu-item-object-page menu-item-163"><a href="http://www.birds.cornell.edu/clementschecklist/contact/">Contact</a></li>
</ul></div> </ul>
</div>
</div>
<div id="mobile_search" class="mobile_device">
<div class="clearfix">
<form method="get" action="http://www.birds.cornell.edu/clementschecklist/">
<div id="mobile_search_wrapper" >
<input type="text" id="mobile_search_input" name="s" />
</div>
<div id="mobile_expandable_wrap">
<div id="mobile_search_btn_wrap">
<input type="submit" id="mobile_search_btn" value="" />
</div>
</div>
</form>
</div>
</div>
<div id="breadcrumb_section"><div class="inner_section"><a href="http://www.birds.cornell.edu/clementschecklist">Home</a> » Updates & Corrections – August 2018</div></div>
<div id="content_section" class="cmfix">
<div class="inner_section clearfix">
<div id="content_area">
<div class="standard_wrap">
<h3 class="page-title">Updates & Corrections – August 2018</h3>
<div class="page-content">
<p><strong>To accompany the eBird/Clements Checklist v2018 spreadsheet<br />
Posted 6 August 2018</strong></p>
<p>The Updates and Corrections are grouped into four sections. Within each section, items are listed in the order in which they are encountered in the eBird/Clements Checklist v2018 spreadsheet, although we also continue to reference by page number the relevant entry in the last published edition of the Clements Checklist (6th, 2007).</p>
<p>The four sections are</p>
<p>1 <strong>Species</strong> – gains and losses (posted 14 August 2018)</p>
<p>2 <strong>Orders and</strong> <strong>Families</strong> – gains, losses, and changes to order or family composition or nomenclature (posted 14 August 2018)</p>
<p>3 <strong>Standard Updates and Corrections</strong> – all other changes, listed in sequence as they occur in the spreadsheet (incomplete documentation posted 14 August 2018)</p>
<p>4 <strong>Groups</strong> – a list of new groups (posted 14 August 2018)</p>
<p><strong>SPECIES</strong></p>
<p><strong>SPECIES GAINS (splits and newly recognized species)</strong></p>
<p><strong> </strong>page 29,<strong> Comb Duck <em>Sarkidiornis melanotos</em></strong></p>
<p>Comb Duck <em>Sarkidiornis melanotos</em> is split into two monotypic species (Kear 2005): Knob-billed Duck <em>Sarkidiornis melanotos</em> and Comb Duck <em>Sarkidiornis sylvicola</em>.</p>
<p>Reference:</p>
<p><NAME> (editor). 2005. Ducks, geese and swans. Oxford University Press, Oxford, United Kingdom.</p>
<p><strong> </strong></p>
<p>page 30, <strong>Mallard <em>Anas platyrhynchos</em></strong></p>
<p>The monotypic group Mallard (Mexican) <em>Anas platyrhynchos diazi</em> is elevated to species rank as Mexican Duck <em>Anas diazi</em>. Genetic divergence of Mexican Duck from Mallard is at comparable levels to other similar taxa that are recognized as species, such as American Black Duck <em>Anas rubripes</em> and Mottled Duck <em>Anas fulvigula</em> (McCracken et al. 2001, Lavretsky et al. 2014a, 2014b, 2015); and hybridization between Mallard and Mexican Duck has not been demonstrated to occur at higher levels than between Mallard and other species in the Mallard complex. Revise the range description for Mexican Duck from “S Texas, New Mexico and Arizona south to central Mexico” to “southeastern Arizona, southern New Mexico, and western Texas (Trans-Pecos region) south to central Mexico.”</p>
<p>References:</p>
<p><NAME>., <NAME>, and <NAME>. 2014a. Rapid radiation and hybridization contribute to weak differentiation and hinder phylogenetic inferences in the New World Mallard complex (<em>Anas</em> spp.). Auk 131: 524-538.</p>
<p><NAME>., <NAME>, and <NAME>. 2014b. Phylogenetics of a recent radiation in the mallards and allies (Aves: <em>Anas</em>): inferences from a genomic transect and the multispecies coalescent. Molecular Phylogenetics and Evolution 70: 402-411.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>., <NAME>, and <NAME>. 2015. Speciation genomics and a role for the Z chromosome in the early stages of divergence between Mexican ducks and mallards. Molecular Ecology 24: 5364–5378.</p>
<p><NAME>., <NAME>, and <NAME>. 2001. Molecular population genetics, phylogeography, and conservation biology of the mottled duck (<em>Anas fulvigula</em>). Conservation Genetics 2: 87-102.</p>
<p> </p>
<p>page 33, <strong>White-winged Scoter <em>Melanitta fusca</em></strong></p>
<p>White-winged Scoter <em>Melanitta fusca</em> is split into two species, following Livezey (1995), Sangster et al. (2005), and Collinson et al. (2006): a monotypic Velvet Scoter <em>Melanitta fusca</em>, and polytypic White-winged Scoter <em>Melanitta deglandi</em>, including subspecies <em>deglandi</em> and <em>stejnegeri</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2006. <a href="https://britishbirds.co.uk/wp-content/uploads/article_files/V99/V99_N04/V99_N04_P183_201_A002.pdf">Species limits within the genus <em>Melanitta</em>, the scoters</a>. British Birds 99: 183-201.</p>
<p><NAME>. 1995. <a href="https://sora.unm.edu/sites/default/files/journals/condor/v097n01/p0233-p0255.pdf">Phylogeny and evolutionary ecology of modern seaducks (Anatidae: Mergini)</a>. Condor 97: 233-255.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2005. Taxonomic recommendations for British birds: third report. Ibis 147: 821-826.</p>
<p> </p>
<p>page 33, <strong>Ruddy Duck <em>Oxyura jamaicensis</em></strong></p>
<p>In accord with AOS-NACC (AOU 1998), Ruddy Duck <em>Oxyura jamaicensis</em> is split into two species, a monotypic Ruddy Duck <em>Oxyura jamaicensis</em> and a polytypic Andean Duck <em>Oxyura ferruginea</em> (with subspecies <em>andina</em> and <em>ferruginea</em>).</p>
<p>Reference:</p>
<p>American Ornithologists’ Union. 1998. Check-list of North American birds. Seventh edition. American Ornithologists’ Union, Washington, DC.</p>
<p> </p>
<p>page addition (2018), <strong>Spotted Green Pigeon <em>Caloenas maculata</em></strong></p>
<p>Insert Spotted Green Pigeon <em>Caloenas maculata</em> immediately following Nicobar Pigeon <em>Caloenas nicobarica</em>, following Heupink et al. (2014). This is an extinct species, which is known from only two specimens (one of which now is lost) and one contemporaneous illustration, all apparently from the mid or late 18th century (van Grouw 2013). Its former distribution is not known, but it probably was native to an island or island group in the South Pacific. The date of its extinction also is unknown.</p>
<p>References:</p>
<p><NAME>, H. 2014. <a href="http://boc-online.org/bulletins/downloads/BBOC1344-vanGrouw.pdf">The Spotted Green Pigeon <em>Caloenas maculata</em>: as dead as a Dodo, but what else do we know about it?</a> Bulletin of the British Ornithologists’ Club 134: 291-301.</p>
<p>Heupink, T.H., H, <NAME>, and <NAME>. 2014. <a href="https://bmcevolbiol.biomedcentral.com/track/pdf/10.1186/1471-2148-14-136?site=bmcevolbiol.biomedcentral.com">The mysterious Spotted Green Pigeon and its relation of the Dodo and its kindred</a>. BMC Evolutionary Biology 14: 136.</p>
<p> </p>
<p>page 121, <strong>Southern Crowned-Pigeon <em>Goura scheepmakeri</em></strong></p>
<p>The two subspecies of Southern Crowned-Pigeon <em>Goura scheepmakeri</em> are not each other’s closest relatives: subspecies <em>sclaterii</em> is sister to Western Crowned-Pigeon <em>Goura cristata</em>, but nominate <em>scheepmakeri</em> is sister to Victoria Crowned-Pigeon <em>Goura victoria</em> (Bruxaux et al. 2017). Consequently Southern Crowned-Pigeon is split into two monotypic species: Sclater’s Crowned-Pigeon <em>Goura sclaterii</em>, and Scheepmaker’s Crowned-Pigeon <em>Goura scheepmakeri</em>.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018. Recovering the evolutionary history of crowned pigeons (Columbidae: <em>Goura</em>): implications for the biogeography and conservation of New Guinea lowland birds. Molecular Phylogenetics and Evolution 120: 248-258.</p>
<p> </p>
<p>page 209, <strong>Wedge-billed Hummingbird <em>Schistes geoffroyi</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop774.htm">Proposal 774</a>), we split Wedge-billed Hummingbird <em>Schistes geoffroyi</em> into two monotypic species, following del Hoyo and Collar (2014) and Donegan et al. (2015): White-throated Wedgebill <em>Schistes albogularis</em> and Geoffroy’s Wedgebill <em>Schistes geoffroyi</em>. These English names are provisional, as AOS-SACC has not yet adopted names for either species.</p>
<p>Revise the range description of White-throated Wedgebill from “Western and Central Andes of Colombia and w Ecuador” to “both slopes of Western Andes and west slope of Central Andes of Colombia and west slope of Andes of western Ecuador”.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2015. <a href="http://www.proaves.org/wp-content/uploads/2015/12/Listado-y-Splits-Conservacion-Colombiana-23-3-48.pdf">Revision of the status of bird species occurring in Colombia, with discussion of BirdLife International’s new taxonomy</a>. Conservación Colombiana 23: 3-48.</p>
<p>del <NAME>., and <NAME>. 2014. HBW and BirdLife International illustrated checklist of the birds of the world. Volume 1. Lynx Edicions, Barcelona.</p>
<p> </p>
<p>page 204, <strong>White-tailed Hillstar <em>Urochroa leucura</em></strong></p>
<p>We split White-tailed Hillstar <em>Urochroa leucura</em> into two monotypic species, based on AOS-SACC <a href="http://www.museum.lsu.edu/~Remsen/SACCprop775.htm">Proposal 775</a>, and following del Hoyo and Collar (2014) and Donegan et al. (2015): Rufous-gaped Hillstar <em>Urochroa bougueri</em> and White-tailed Hillstar <em>Urochroa leucura</em>. These English names are provisional, as AOS-SACC has not yet adopted names for either species.</p>
<p>Revise the range description for White-tailed Hillstar from “Andes of s Colombia (Nariño) to e Ecuador and ne Peru” to “east slope of Andes from southern Colombia (Nariño) to eastern Ecuador (south at least to Morona-Santiago) and northern Peru (Amazonas, San Martín)”.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2015. <a href="http://www.proaves.org/wp-content/uploads/2015/12/Listado-y-Splits-Conservacion-Colombiana-23-3-48.pdf">Revision of the status of bird species occurring in Colombia, with discussion of BirdLife International’s new taxonomy</a>. Conservación Colombiana 23: 3-48.</p>
<p>del Hoyo, J., and <NAME>. 2014. HBW and BirdLife International illustrated checklist of the birds of the world. Volume 1. Lynx Edicions, Barcelona.</p>
<p> </p>
<p>page addition (2017), <strong>Gray-breasted Sabrewing <em>Campylopterus largipennis</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/%7ERemsen/SACCprop756.htm">Proposal 756</a>), the monotypic group Gray-breasted Sabrewing (Dry Forest) <em>Campylopterus largipennis calcirupicola</em> is recognized as a separate species, Dry-forest Sabrewing <em>Campylopterus calcirupicola</em>. This English name is provisional, as AOS-SACC has not yet adopted a name for this species.</p>
<p> </p>
<p>page 166, <strong>Vermiculated Screech-Owl <em>Megascops guatemalae</em></strong></p>
<p>Vermiculated Screech-Owl <em>Megascops guatelamalae</em> is split into three species, following Ridgely and Greenfield (2001), Dantas et al. (2016), and Krabbe (2017); see also AOS-SACC <a href="http://www.museum.lsu.edu/~Remsen/SACCprop771.htm">Proposal 771</a>: Middle American Screech-Owl <em>Megascops guatelamae</em>, including subspecies <em>hastatus</em>, <em>cassini</em>, <em>fuscus</em>, <em>thompsoni</em>, <em>guatemalae</em>, <em>dacrysistactus</em>, and <em>vermiculatus</em>; Choco Screech-Owl <em>Megascrops centralis</em>; and Foothill Screech-Owl <em>Megascops roraimae</em>, including subspecies <em>roraimae</em>, <em>pallidus</em>, and <em>napensis</em>.</p>
<p>Change the English name of the polytypic group <em>Megascops guatemalae</em> [<em>guatemalae</em> Group] from Vermiculated Screech-Owl (Guatemalan) to Middle American Screech-Owl (Middle American).</p>
<p>Subspecies <em>tomlini</em>, with range “NW Mexico (se Sonora and sw Chihuahua to Sinaloa)”, in considered to be a junior synonym of <em>hastatus</em> (Marshall 1967), and is deleted. Revise the range description of <em>hastatus</em> from “W Mexico (sw Sinaloa to Oaxaca)” to “western Mexico (southeastern Sonora to Oaxaca)”.</p>
<p>Change the English name of the monotypic group <em>Megascops guatemalae vermiculatus</em> from Vermiculated Screech-Owl (Vermiculated) to Middle American Screech-Owl (Vermiculated).</p>
<p>Change the names of the monotypic group Vermiculated Screech-Owl (Roraima) <em>Megascops guatemalae roraimae</em> to Foothill Screech-Owl (Roraima) <em>Megascops roraimae roraimae</em>.</p>
<p>Change the names of the polytypic group Vermiculated Screech-Owl (Foothill) <em>Megascops guatemalae</em> [<em>napensis</em> Group] to Foothill Screech-Owl (Foothill) <em>Megascops roraimae</em> [<em>napensis</em> Group].</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2016. Molecular systematics of the New World screech-owls (<em>Megascops</em>: Aves, Strigidae): biogeographic and taxonomic implications. Molecular Phylogenetics and Evolution 94: 626-634.</p>
<p><NAME>. 2017. <a href="http://asociacioncolombianadeornitologia.org/wp-content/uploads/2017/12/16eA08.pdf">A new species of <em>Megascops</em> (Strigidae) from the Sierra Nevada de Santa Marta, Colombia, with notes on voices of New World screech-owls</a>. Ornitología Columbiana 16: eA08-1-27.</p>
<p><NAME>. 1967. Parallel variation in North and Middle American screech-owls. Monographs of the Western Foundation of Vertebrate Zoology 1.</p>
<p><NAME>., and <NAME>. 2001. The birds of Ecuador: status, distribution, and taxonomy. Cornell University Press, Ithaca, New York.</p>
<p> </p>
<p>page addition (2018), <strong>Santa Marta Screech-Owl <em>Megascops gilesi</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop769.htm">Proposal 769</a>), we add a recently described species, Santa Marta Screech-Owl <em>Megascops gilesi</em> Krabbe 2017, following Dantas et al. (2016) and Krabbe (2017), with range “northern Colombia (Sierra Nevada de Santa Marta)”. Position Santa Marta Screech-Owl to immediately follow Eastern Screech-Owl <em>Megascops asio</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2016. Molecular systematics of the New World screech-owls (<em>Megascops</em>: Aves, Strigidae): biogeographic and taxonomic implications. Molecular Phylogenetics and Evolution 94: 626-634.</p>
<p><NAME>. 2017. <a href="http://asociacioncolombianadeornitologia.org/wp-content/uploads/2017/12/16eA08.pdf">A new species of <em>Megascops</em> (Strigidae) from the Sierra Nevada de Santa Marta, Colombia, with notes on voices of New World screech-owls</a>. Ornitología Colombiana 16: eA08-1-27.</p>
<p> </p>
<p>page 256, <strong>Blond-crested Woodpecker <em>Celeus flavescens</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/%7ERemsen/SACCprop742.htm">Proposals 742</a> and <a href="http://www.museum.lsu.edu/%7ERemsen/SACCprop793.htm">793</a>), the monotypic group Blond-crested Woodpecker (Ochre-backed) <em>Celeus flavescens ochraceus</em> is recognized as a separate species, Ochre-backed Woodpecker <em>Celeus ochraceus</em>, following Benz and Robbins (2011) and Benz et al. (2015). Reposition Ochre-backed Woodpecker to immediately follow Kaempfer’s Woodpecker <em>Celeus obrieni</em>. Revise the range description from “Lower Amazonian and e Brazil south to e Bahia” to “lower Amazonian Brazil (south of the Amazon in Pará); northeastern Brazil, from Maranhão south to northern Goías and east to Ceará, Pernambuco, and western Bahia”.</p>
<p>With the removal of the group Blond-crested Woodpecker (Ochre-backed) <em>Celeus flavescens ochraceus</em> from Blond-crested Woodpecker, the polytypic group Blond-crested Woodpecker (Blond-crested) <em>Celeus flavescens flavescens</em>/<em>intercedens</em> no longer is necessary, and is deleted.</p>
<p>References:</p>
<p><NAME>., and <NAME>. 2011. Molecular phylogenetics, vocalizations, and species limits in <em>Celeus</em> Woodpeckers (Aves: Picidae). Molecular Phylogenetics and Evolution 61: 29–44.</p>
<p><NAME>., <NAME>, and <NAME>. 2015. Phylogenetic relationships of the Helmeted Woodpecker (<em>Dryocopus galeatus</em>): a case of interspecific mimicry? Auk 132: 938-950.</p>
<p> </p>
<p>page 293, <strong>Russet Antshrike <em>Thamnistes anabatinus</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop758.htm">Proposal 758</a>), subspecies <em>rufescens</em> of Russet Antshrike <em>Thamnistes anabatinus</em> is recognized as a separate species, Rufescent Antshrike <em>Thamnistes rufescens</em>, following Isler and Whitney (2017). Position Rufescent Antshrike immediately following Russet Antshrike.</p>
<p>Change the scientific name of the group Russet Antshrike (Andean) from Russet Antshrike (Andean) <em>Thamnistes anabatinus</em> [<em>aequatorialis</em> Group] to <em>Thamnistes anabatinus aequatorialis</em>/<em>gularis</em>.</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2017. Species limits in the genus <em>Thamnistes</em> (Aves: Passeriformes: Thamnophilidae): an evaluation based on vocalizations. Zootaxa 4291: 192–200.</p>
<p> </p>
<p>page addition (2018), <strong>Cordillera Azul Antbird <em>Myrmoderus eowilsoni</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop763.htm">Proposal 763</a>), we recognize a newly described species, Cordillera Azul Antbird <em>Myrmoderus eowilsoni</em> Moncrieff, Johnson, Lane, Beck, Angulo, and Fagan 2018, following Moncrieff et al. (2018), with range “northern Peru: local on ridge crests of Cordillera Azul (eastern San Martín)”. Insert Cordillera Azul Antbird immediately following Ferruginous-backed Antbird <em>Myrmoderus ferrugineus</em>.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2017. A new species of antbird (Passeriformes: Thamnophilidae) from the Cordillera Azul, San Martín, Peru. Auk 135: 114-126.</p>
<p> </p>
<p>page 305, <strong>Thrush-like Antpitta <em>Myrmothera campanisona</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop785.htm">Proposal 785</a>), Thrush-like Antpitta <em>Myrmothera campanisona</em> is split into two species, following Carneiro et al. (2018; see also Krabbe and Schulenberg 2003): a polytypic Thrush-like Antpitta <em>Myrmothera campanisona</em>, composed of subspecies <em>modesta</em>, <em>dissors</em>, <em>campanisona</em>, <em>signata</em>, and <em>minor</em>; and a monotypic Tapajos Antpitta <em>Myrmothera subcanescens</em>. Revise the range description of Tapajos Antpitta from “N Brazil south of R. Amazon (R. Madeira to R. Tapajós)” to “Amazonian Brazil, south of Amazon, from the right (east) bank of the Rio Madeira to the upper Rio Xingu”.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2018. Molecular systematics and biogeography of lowland antpittas (Aves, Grallariidae): the role of vicariance and dispersal in the diversification of a widespread Neotropical lineage. Molecular Phylogenetics and Evolution 120: 375-389.</p>
<p><NAME>., and <NAME>. 2003. Family Formicariidae (ground-antbirds). Pages 682-731 in <NAME>, <NAME>, and <NAME> (editors), Handbook of the birds of the world. Volume 8. Lynx Edicions, Barcelona.</p>
<p> </p>
<p>page 307, <strong>Rusty-breasted Antpitta <em>Grallaricula ferrugineipectus</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop784.htm">Proposal 784</a>), Rusty-breasted Antpitta <em>Grallaricula ferrugineipectus</em> is split into two species, following Van Doren et al. (2018): a polytypic Rusty-breasted Antpitta <em>Grallaricula ferrugineipectus</em>, composed of subspecies <em>rara</em> and <em>ferrugineipectus</em>; and a monotypic Leymebamba Antpitta <em>Grallaricula leymebambae</em>. The English name for <em>Grallaricula leymebambae</em> is provisional, as AOS-SACC has not yet adopted a name for this species.</p>
<p>Reference:</p>
<p><NAME>, B.M., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018. Species limits in the Rusty-breasted Antpitta (<em>Grallaricula ferrugineipectus</em>) complex. Wilson Journal of Ornithology 130: 152-167.</p>
<p> </p>
<p>page 281, <strong>Buff-throated Foliage-gleaner <em>Automolus ochrolaemus</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2018), the monotypic group Buff-throated Foliage-gleaner (<em>exsertus</em>) <em>Automolus ochrolaemus exsertus</em> is split from Buff-throated Foliage-gleaner and is recognized as a separate species, Chiriqui Foliage-gleaner <em>Automolus exsertus</em>, following Freeman and Montgomery (2017).</p>
<p>References:</p>
<p>Chesser, R.T., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2018. <a href="http://www.bioone.org/doi/pdf/10.1642/AUK-18-62.1">Fifty-ninth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 135: 798-813.</p>
<p><NAME>., and <NAME>. 2017. Using song playback experiments to measure</p>
<p>species recognition between geographically isolated populations: a comparison with acoustic trait analyses. Auk 134: 857-870.</p>
<p> </p>
<p>page 324, <strong>Paltry Tyrannulet <em>Zimmerius vilissimus</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop741.htm">Proposal 741</a>), Paltry Tyrannulet <em>Zimmerius vilissimus</em> is split into four species, following Traylor (1982) and Rheindt et al. (2013). The monotypic group Paltry Tyrannulet (Paltry) <em>Zimmerius vilissimus vilissimus</em> now is recognized as Guatemalan Tyrannulet <em>Zimmerius vilissimus</em>; the monotypic group Paltry Tyrannulet (Mistletoe) <em>Zimmerius vilissimus parvus</em> becomes Mistletoe Tyrannulet <em>Zimmerius parvus</em>; the polytypic group Paltry Tyrannulet (Mountain) <em>Zimmerius vilissimus improbus/tamae</em> becomes Spectacled Tyrannulet <em>Zimmerius improbus</em>, including <em>tamae</em> as a subspecies; and the monotypic group Paltry Tyrannulet (Venezuelan) <em>Zimmerius vilissimus petersi</em> becomes Venezuelan Tyrannulet <em>Zimmerius petersi</em>.</p>
<p>Revise the range description for Guatemalan Tyrannulet from “S Mexico (Chiapas) to El Salvador” to “highlands of southern Mexico (Chiapas), Guatemala, and western El Salvador”.</p>
<p>Revise the range description for Mistletoe Tyrannulet from “Honduras to Panama and extreme nw Colombia (Chocó)” to “lowlands from eastern Guatemala, Belize, and Honduras south to Panama and extreme northwestern Colombia (Chocó)”.</p>
<p>Revise the range description for subspecies <em>tamae</em> from “Santa Marta Mountains (ne Colombia)” to “Santa Marta Mountains (northern Colombia) and the Sierra de Perijá (on the Colombia/Venezuela border)”. Reposition <em>tamae</em> to precede, rather than to follow, nominate <em>improbus</em>.</p>
<p>Revise the range description for nominate <em>improbus</em> from “Andes of n Colombia and Sierra de Perijá (w Venezuela)” to “Andes of northern Colombia (Norte de Santander) and of Venezuela”.</p>
<p>References:</p>
<p><NAME>., <NAME>, and <NAME>. 2013. Rampant polyphyly indicates cryptic diversity in a clade of Neotropical flycatchers (Aves: Tyrannidae). Biological Journal of the Linnean Society 108: 889–900.</p>
<p><NAME>.A., Jr. 1982. <a href="https://biodiversitylibrary.org/page/2765469">Notes on tyrant flycatchers (Aves: Tyrannidae)</a>. <a href="https://biodiversitylibrary.org/page/2765464">Fieldiana new series number 13</a>.</p>
<p> </p>
<p>page 317, <strong>Striped Manakin <em>Machaeropterus regulus</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop761.htm">Proposal 761</a>), Striped Manakin <em>Machaeropterus regulus</em> is split into two species, following Whittaker and Oren (1999) and Lane et al. (2017): a polytypic Striolated Manakin <em>Machaeropterus striolatus</em> (including subspecies <em>antioquiae</em>, <em>striolatus</em>, <em>obscurostriatus</em>, <em>zulianus</em>, and <em>aureopectus</em>; and a monotypic Kinglet Manakin <em>Machaeropterus regulus</em>.</p>
<p>Change the names of the polytypic group Striped Manakin (Western) <em>Machaeropterus regulus</em> [<em>striolatus</em> Group] to Striolated Manakin (Striolated) <em>Machaeropterus striolatus</em> [<em>striolatus</em> Group].</p>
<p>Change the names of the monotypic group Striped Manakin (<em>aureopectus</em>) <em>Machaeropterus regulus aureopectus</em> to Striolated Manakin (Striolated) <em>Machaeropterus striolatus aureopectus</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, and <NAME>. 2017. A new species of manakin (Aves: Pipridae; <em>Machaeropterus</em>) from Peru with a taxonomic reassessment of the Striped Manakin (<em>M. regulus</em>) complex. Zootaxa 4320: 379–390.</p>
<p><NAME>., and <NAME>. 1999. <a href="https://biodiversitylibrary.org/page/40025516">Important ornithological records from the Rio Juruá, western Amazonia, including twelve additions to the Brazilian avifauna</a>. <a href="https://biodiversitylibrary.org/page/40025498">Bulletin of the British Ornithologists’ Club 119: 235–260</a>.</p>
<p> </p>
<p>page addition (2018), <strong>Painted Manakin <em>Machaeropterus eckelberryi</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop761.htm">Proposal 761</a>), we recognize a newly described species, Painted Manakin <em>Machaeropterus eckelberryi</em> Lane, Kratter, and O’Neill 2017 (Lane et al. 2017), with range “northern Peru: very local on ridgecrests of Andean foothills in eastern San Martín and southwestern Loreto”. Position Painted Manakin between Striolated Manakin <em>Machaeropterus striolatus</em> and Kinglet Manakin <em>Machaeropterus regulus</em>.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, and <NAME>. 2017. A new species of manakin (Aves: Pipridae; <em>Machaeropterus</em>) from Peru with a taxonomic reassessment of the Striped Manakin (<em>M. regulus</em>) complex. Zootaxa 4320: 379–390.</p>
<p> </p>
<p>page 516, <strong>Variegated Fairywren <em>Malurus lamberti</em></strong></p>
<p>Variegated Fairywren <em>Malurus lamberti</em> is split into two species: a monotypic Variegated Fairywren <em>Malurus lamberti</em>, and a polytypic Purple-backed Fairywren <em>Malurus assimilis</em>, including subspecies <em>rogersi</em>, <em>dulcis</em>, <em>assimilis</em>, and <em>bernieri</em>. The action is based on McLean et al. (2012, 2017a, 2017b).</p>
<p>Change the names of the polytypic group Variegated Fairywren (Lavender-flanked) <em>Malurus lamberti dulcis</em>/<em>rogersi</em> to Purple-backed Fairywren (Lavender-flanked) <em>Malurus assimilis dulcis</em>/<em>rogersi</em>.</p>
<p>Revise the range description of subspecies <em>rogersi</em> from “NE Northern Territory (Kimberley region)” to “northern Australia (Kimberley region in northeastern Western Australia)”.</p>
<p>Revise the range description of subspecies <em>dulcis </em>from “N Northern Territory (central Arnhem Land)” to “northern Australia (central Arnhem Land in northern Northern Territory)”.</p>
<p>Subspecies <em>bernieri</em>, formerly included (incorrectly!) in a polytypic group with Variegated Fairywren <em>Malurus lamberti</em>, instead is more closely related to nominate <em>assimilis</em>, and so joins a different group with that taxon. Change the names of the group Variegated Fairywren (Purple-flanked) <em>Malurus lamberti assimilis</em> to Purple-backed Fairywren (Purple-backed) <em>Malurus assimilis assimilis</em>/<em>bernieri</em>.</p>
<p>Revise the range description of subspecies <em>bernieri </em>from “Bernier Island, Western Australia” to “western Australia (Bernier Island, Western Australia)”.</p>
<p>Revise the range description of Variegated Fairywren from “SE Australia (Fitzroy R., Queensland to Bateman’s Bay, NSW” to “coastal southeastern Australia (eastern New South Wales, and southeastern Queensland north to Gladstone)”.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2012. Speciation in chestnut-shouldered fairy-wrens (<em>Malurus</em> spp.) and rapid phenotypic divergence in variegated fairy-wrens (<em>Malurus lamberti</em>): a multilocus approach. Molecular Phylogenetics and Evolution 63: 668-678.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2017a. Reassessment of a possible case of intraspecific gene flow across Australia’s Great Dividing Range in the variegated fairy wren, <em>Malurus lamberti</em> (Aves: Maluridae), and its systematic consequences. Biological Journal of the Linnean Society 122: 210-233.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2017b. Phylogeography and geno-phenotypic discordance in a widespread Australian bird, the Variegated Fairy-wren, <em>Malurus lamberti</em> (Aves: Maluridae). Biological Journal of the Linnean Society 121: 655-669.</p>
<p> </p>
<p>page addition (2018), <strong>Rote Myzomela <em>Myzomela irianawidodoae</em></strong></p>
<p>We recognize a newly described species, Rote Myzomela <em>Myzomela irianawidodoae</em> Prawiradilaga, Baveja, Suparno, Ashari, Ng, Gwee, Verbelen, and Rheindt 2017 (Eaton et al. 2016, Prawiradilaga et al. 2017), with range “Rote, Lesser Sundas”. Position Rote Myzomela immediately following Sumba Myzomela <em>Myzomela dammermani</em>.</p>
<p>References:</p>
<p>Eaton, J.A., <NAME>, <NAME>, and <NAME>. 2016. Birds of the Indonesian Archipelago: Greater Sundas and Wallacea. Lynx Edicions, Barcelona.</p>
<p>Prawiradilaga, D.M., <NAME>, Suparno, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2017. <a href="http://e-journal.biologi.lipi.go.id/index.php/treubia/article/view/3414/2909">A colourful new species of <em>Myzomela</em> honeyeater from Rote Islad in eastern Indonesia</a>. Treubia 44: 77-100.</p>
<p> </p>
<p>page 574, <strong>Mountain Sooty Boubou <em>Laniarius poensis</em></strong></p>
<p>The monotypic group Mountain Sooty Boubou (Ruwenzori) <em>Laniarius poensis holomelas</em> is split as a separate species, Albertine Boubou <em>Laniarius holomelas</em> (Voelker et al. 2010).</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2010. A new species of boubou (Malaconotidae: <em>Laniarius</em>) from the Alberine Rift. Auk 127: 678-689.</p>
<p> </p>
<p>page 623, <strong>Red-eyed Vireo <em>Vireo olivaceus</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2018), Red-eyed Vireo <em>Vireo olivaceus</em> is split into two species, following Battey and Klicka (2017) (see also Slager et al. 2014): a monotypic Red-eyed Vireo <em>Vireo olivaceus</em>; and a polytypic Chivi Vireo <em>Vireo chivi</em>, including subspecies <em>caucae</em>, <em>griseobarbatus</em>, <em>pectoralis</em>, <em>solimoensis</em>, <em>vividior</em>, <em>tobagensis</em>, <em>agilis</em>, <em>diversus</em>, and <em>chivi</em>.</p>
<p>Change the names of the polytypic group Red-eyed Vireo (resident Chivi) <em>Vireo olivaceus</em> [<em>agilis</em> Group] to Chivi Vireo (resident) <em>Vireo chivi</em> [<em>agilis </em>Group].</p>
<p>Revise the range description of subspecies <em>pectoralis</em> from “N Peru (middle Marañón Valley)” to “middle Marañón Valley of extreme southern Ecuador (Zamora-Chinchipe) and northern Peru (Cajamarca, Amazonas, La Libertad)”.</p>
<p>Revise the range description of subspecies <em>solimoensis</em> from “W Amazonian Brazil to e Ecuador and ne Peru” to “western Amazon River and tributaries, from eastern Ecuador and northern Peru east to central Brazil (east to the west bank of Madeira)”. Reposition subspecies <em>solimoensis</em> to follow subspecies <em>tobagensis</em>.</p>
<p>Revise the range description of subspecies <em>agilis</em> from “Coastal ne Brazil (Pará to Rio de Janeiro)” to “eastern Amazon River and tributaries (west to the Madeira), and coastal northeastern and eastern Brazil (south to Rio de Janeiro)”.</p>
<p>Change the names of the polytypic group Red-eyed Vireo (migratory Chivi) <em>Vireo olivaceus chivi</em>/<em>diversus</em> to Chivi Vireo (migratory) <em>Vireo chivi chivi</em>/<em>diversus</em>.</p>
<p>References:</p>
<p><NAME>., and <NAME>. 2017. Cryptic speciation and gene flow in a migratory songbird species complex: insights from the Red-Eyed Vireo (<em>Vireo olivaceus</em>). Molecular Phylogenetics and Evolution 113: 67-75.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2018. <a href="http://www.bioone.org/doi/pdf/10.1642/AUK-18-62.1">Fifty-ninth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 135: 798-813.</p>
<p><NAME>., <NAME>, <NAME>., <NAME>, and <NAME>. 2014. A multilocus phylogeny of a major New World avian radiation: the Vireonidae. Molecular Phylogenetics and Evolution 80: 95-104.</p>
<p> </p>
<p>page 470, <strong>Rusty-bellied Fantail <em>Rhipidura teysmanni</em></strong></p>
<p>Rusty-bellied Fantail <em>Rhipidura teysmanni</em> is split into two species, following Ng et al. (2017b): a polytypic Sulawesi Fantail <em>Rhipidura teysmanni</em>, with subspecies <em>toradja</em> and <em>teysmanni</em>; and a monotypic Taliabu Fantail <em>Rhipidura sulaensis</em>.</p>
<p>Reference:</p>
<p>Ng, N.S.R., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2017b. The effects of Pleistocene climate change on biotic differentiation in a montane songbird clade from Wallacea. Molecular Phylogenetics and Evolution 114: 353-366.</p>
<p> </p>
<p>page 592, <strong>Eurasian Magpie <em>Pica pica</em></strong></p>
<p>Eurasian Magpie <em>Pica pica</em> is split into five species, following Song et al. (2018; see also Kryukov et al. 2017).</p>
<p>The monotypic group Eurasian Magpie (North African) <em>Pica pica mauritanica</em> becomes Maghreb Magpie <em>Pica mauritanica</em>.</p>
<p>The monotypic group Eurasian Magpie (Arabian) <em>Pica pica asirensis</em> becomes Asir Magpie <em>Pica asirensis</em>.</p>
<p>The monotypic group Eurasian Magpie (Black-rumped) <em>Pica pica bottanensis</em> becomes Black-rumped Magpie <em>Pica bottanensis</em>.</p>
<p>Subspecies <em>serica</em>, which we formerly included in the polytypic group Eurasian Magpie (Eurasian) <em>Pica pica</em> [<em>pica</em> Group], is recognized as a species, Oriental Magpie <em>Pica serica</em>. Revise the range description of <em>serica</em> from “S China to Myanmar, Indochina, Hainan and Taiwan” to “southeastern Russia, northeastern China, and Korea south through eastern China, Taiwan, and Hainan to northern Myanmar, northern Laos, and northern Vietnam”.</p>
<p>Remaining as subspecies of Eurasian Magpie are Eurasian Magpie (Iberian) <em>Pica pica melanotos</em>; the members of the pica Group: <em>pica</em>, <em>fennorum</em>, <em>bactriana</em>, and <em>leucoptera</em>; and the monotypic group Eurasian Magpie (Kamchatkan) <em>Pica pica camtschatica</em>. Subspecies <em>galliae</em>, with range “W Europe to Balkans”, is considered to be a junior synonym of nominate <em>pica</em> (Cramp and Perrins 1994), and is deleted. Revise the range description of nominate <em>pica</em> from “British Isles, s Scandinavia, central and e Europe to Asia Minor” to “Europe, from the British Isles, France, and southern Scandinavia to eastern Europe and Asia Minor”. Subspecies <em>hemileucoptera</em>, with range “W and s Siberia to Outer Mongolia”, is considered to be a junior synonym of <em>bactriana</em> (Cramp and Perrins 1994), and is deleted. Revise the range description of <em>bactriana</em> from “Central Russia to n India and w Tibet” to “western and southern Siberia (east to Lake Baikal) and central Asia, south to the Caucasus region east to Pakistan and northwestern India”.</p>
<p>References:</p>
<p><NAME>., and <NAME> (editors). 1994. Handbook of the birds of Europe, the Middle East, and North Africa. The birds of the Western Palearctic. Volume 8. Oxford University Press, New York and Oxford, United Kingdom.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>’kin, <NAME>, <NAME>, and <NAME>. 2017. Deep phylogeographic breaks in Magpie <em>Pica pica</em> across the Holarctic: concordance with bioacoustics and phenotypes. Zoological Science 34: 185-200.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018. Complete taxon sampling of the avian genus <em>Pica</em> (magpies) reveals ancient relictual populations and synchronous Late-Pleistocene demographic expansion across the Northern Hemisphere. Journal of Avian Biology 49: e01612.</p>
<p> </p>
<p>page 385, <strong>Firecrest <em>Regulus ignicapilla</em></strong></p>
<p>The monotypic group Firecrest (Madeira) <em>Regulus ignicapilla madeirensis</em> is recognized as a separate species, Madeira Firecrest <em>Regulus madeirensis</em>, following Päckert et al. (2001, 2003) and Sangster et al. (2005). Reposition Madeira Firecrest to immediately follow Goldcrest <em>Regulus regulus</em>.</p>
<p>The polytypic group Firecrest (European) <em>Regulus ignicapilla ignicapilla</em>/<em>balearicus</em> no longer is necessary, and is dissolved. Change the English name of <em>Regulus ignicapilla</em> from Firecrest to Common Firecrest.</p>
<p>References:</p>
<p><NAME>., <NAME>, and <NAME>. 2001. Lautäußerungen der Sommergoldhähnchen von den Inseln Madeira und Mallorca (<em>Regulus ignicapillus madeirensis</em>, <em>R. i. balearicus</em>). Journal für Ornithologie 142: 16-29.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2003. Phylogenetic signal in the song of crests and kinglets (Aves: <em>Regulus</em>). Evolution 53: 616-629.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2005. Taxonomic recommendations for British birds: third report. Ibis 147: 821-826.</p>
<p> </p>
<p>page 428, <strong>Russet Bush Warbler <em>Locustella mandelli</em></strong></p>
<p>Subspecies <em>idonea</em> is elevated to species rank as Dalat Bush Warbler <em>Locustella idonea</em>, following Alström et al. (2015b). Revise the range description from “Langbian Plateau (Vietnam)” to “highlands of central and southern Vietnam (Kon Tum and Da Lat plateaus)”.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2015b. <a href="https://avianres.biomedcentral.com/track/pdf/10.1186/s40657-015-0016-z?site=avianres.biomedcentral.com">Integrative taxonomy of the Russet Bush Warbler <em>Locustella mandelli</em> complex reveals a new species from central China</a>. Avian Research 6: 9.</p>
<p> </p>
<p>page 448, <strong>Henna-tailed Jungle-Flycatcher <em>Cyornis colonus</em></strong></p>
<p>Henna-tailed Jungle-Flycatcher <em>Cyornis colonus</em> is split into two monotypic species, following Garg et al. (2018): Banggai Jungle-Flycatcher <em>Cyornis pelingensis</em> and Sula Jungle-Flycatcher <em>Cyornis colonus</em>. Subspecies <em>subsolanus</em>, known from a single specimen (now lost) from Sulawesi, is believed to refer to a mislabelled specimen of nominate <em>colonus</em> (Eaton et al. 2016), and is deleted.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2016. Birds of the Indonesian Archipelago: Greater Sundas and Wallacea. Lynx Edicions, Barcelona.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2018. Pleistocene land bridges act as semipermeable agents of avian gene flow in Wallacea. Molecular Phylogenetics and Evolution 125: 196-203.</p>
<p> </p>
<p>page 411, <strong>Slaty Thrush <em>Turdus nigriceps</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop786.htm">Proposal 786</a>), Slaty Thrush <em>Turdus nigriceps</em> is split into two monotypic species, following Ridgely and Tudor (1989), Voelker et al. (2007), Nylander et al. (2008), Cerqueira et al. (2016), and Avendaño et al. (2017): Andean Slaty Thrush <em>Turdus nigriceps</em> and Eastern Slaty Thrush <em>Turdus subalaris</em>. These English names are provisional, as AOS-SACC has not yet adopted names for either species.</p>
<p>Revise the range description of Andean Slaty Thrush from “Andes of s Ecuador to Peru, Bolivia and nw Argentina” to “breeds in the Andes of southern Bolivia (north to Santa Cruz) and northwestern Argentina (south to Córdoba), also locally in southwestern Ecuador and northwestern Peru; nonbreeding migrant to the east slope of the Andes from southeastern Ecuador to Bolivia”.</p>
<p>Revise the range description of Eastern Slaty Thrush from “S Brazil (Goiás, Mato Grosso, Paraná) to Paraguay, ne Argentina” to “breeds southernmost Brazil (north to Paraná), northeastern Argentina, and southeastern Paraguay; winters north to south central Brazil (north to Mato Grosso and Goiás)”.</p>
<p>References:</p>
<p><NAME>., <NAME>, and <NAME>. 2017. On the importance of geographic and taxonomic sampling in phylogeography: a reevaluation of diversification and species limits in a Neotropical thrush (Aves, Turdidae). Molecular Phylogenetics and Evolution 111: 87–97.</p>
<p><NAME>., <NAME>, and <NAME>. 2016. Phylogeography, inter-specific limits and diversification of <em>Turdus ignobilis</em> (Aves: Turdidae). Molecular Phylogenetics and Evolution 97: 177–186.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2008. Accounting for phylogenetic uncertainty in biogeography: a Bayesian approach to dispersal-vicariance analysis of the thrushes (Aves: <em>Turdus)</em>. Systematic Biology 57: 257-268.</p>
<p><NAME>., and <NAME>. 1989. The birds of South America. Volume I. University of Texas Press, Austin, Texas.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2007. Molecular systematics of a speciose, cosmopolitan songbird genus: defining the limits of, and relationships among, the <em>Turdus</em> thrushes. Molecular Phylogenetics and Evolution 42: 422-434.</p>
<p> </p>
<p>page 409, <strong>Island Thrush <em>Turdus poliocephalus</em></strong></p>
<p>The monotypic group Island Thrush (Taiwan) <em>Turdus poliocephalus niveiceps</em> is recognized as a separate species, Taiwan Thrush <em>Turdus niveiceps</em>, following Nylander et al. (2008).</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2008. Accounting for phylogenetic uncertainty in biogeography: a Bayesian approach to dispersal-vicariance analysis of the thrushes (Aves: <em>Turdus)</em>. Systematic Biology 57: 257-268.</p>
<p> </p>
<p>page 365, <strong>Yellowish Pipit <em>Anthus lutescens</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop765.htm">Proposal 765</a>), the monotypic group Yellowish Pipit (Peruvian) <em>Anthus lutescens peruvianus</em> is recognized as a separate species, Peruvian Pipit <em>Anthus peruvianus</em>, following van Els and Norambuena (2018). Accordingly, the formerly recognized polytypic group Yellowish Pipit (Yellowish) <em>Anthus lutescens lutescens</em>/<em>parvus</em> no longer is needed, and is deleted.</p>
<p>Reference:</p>
<p>van Els, P., and <NAME>. 2018. A revision of species limits in Neotropical pipits <em>Anthus</em> based on multilocus genetic and vocal data. Ibis 160: 158-172.</p>
<p> </p>
<p>page 691, <strong>Blue-black Grosbeak <em>Cyanoloxia cyanoides</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop736.htm">Proposal 736</a>), Blue-black Grosbeak is split into two species, following Bryson et al. (2014) and García et al. (2016): a polytypic Blue-black Grosbeak <em>Cyanoloxia cyanoides</em> (with subspecies <em>concreta</em>, <em>caerulescens</em>, and <em>cyanoides</em>); and a monotypic Amazonian Grosbeak <em>Cyanoloxia rothschildii</em>.</p>
<p>Revise the range description of nominate <em>cyanoides</em> from “Cent. and e Panama to Colombia, nw Venezuela and w Ecuador” to “central Panama south, west of the Andes, to extreme northwestern Peru (Tumbes, Piura), and east across northern Colombia (south to Norte de Santander) and northwestern and northern Venezuela (to Miranda)”.</p>
<p>Revise the range description of Amazonian Grosbeak from “E Colombia to Venezuela, the Guianas, Amaz. Brazil and Bolivia” to “eastern Colombia (north at least to western Meta) to southern and northeastern Venezuela (north to Sucre) east to the Guianas, south through Amazonia to Bolivia”.</p>
<p>Reference:</p>
<p><NAME>., Jr., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Diversification across the New World within the ‘blue’ caridinalids (Aves: Cardinalidae). Journal of Biogeography 41: 587-599.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2016. Congruence of phenotypic and genetic variation at the subspecific level in a Neotropical passerine. Ibis 158: 844-856.</p>
<p> </p>
<p>page 668, <strong>Nightingale Finch <em>Nesospiza acunhae</em></strong></p>
<p>page 668,<strong> Wilkin’s Finch <em>Nesospiza wilkinsi</em></strong></p>
<p>Change the English name of <em>Nesospiza acunhae</em> from Nightingale Finch to Inaccessible Island Finch. Change the English name of the monotypic group <em>Nesospiza acunhae acunhae</em> from Nightingale Finch (Inaccessible I.) to Inaccesssible Island Finch (Lowland). Revise the range description from “Inaccessible I. (s Atlantic Ocean)” to “Inaccessible Island, south Atlantic Ocean (coastal lowlands of Inaccessible Island); subspecies of Inaccessible Island Finch intergrade where their distributions abut”. Add subspecies <em>fraseri</em> Ryan 2008 (Ryan 2008), with range “Inaccessible Island, south Atlantic Ocean (central plateau of Inaccessible Island); subspecies of Inaccessible Island Finch intergrade where their distributions abut”. Insert <em>fraseri</em> immediately following nominate <em>acunhae</em>. We also recognize <em>fraseri</em> as a new monotypic group, Inaccessible Island Finch (Upland) Nesospiza acunhae fraseri.</p>
<p>Subspecies <em>dunnei</em> properly belongs with Inaccessible Island Finch <em>Nesospiza acunhae</em>, not with Wilkins’s Finch <em>Nesospiza wilkinsi</em>. Position subspecies <em>dunnei</em> immediately following subspecies <em>fraseri</em>. Change the names of the monotypic group Wilkins’s Finch (Inaccessible I.) <em>Nesospiza wilkinsi dunnei</em> to Inaccessible Island Finch (Dunn’s) <em>Nesospiza acunhae dunnei</em>. Revise the range description from “Inaccessible I. (s Atlantic Ocean)” to “Inaccessible Island, south Atlantic Ocean (coast and eastern part of interior plateau of Inaccessible Island); subspecies of Inaccessible Island Finch intergrade where their distributions abut”.</p>
<p>The monotypic group Nightingale Finch (Nightingale I.) <em>Nesospiza acunhae questi</em> is recognized as a species, Nightingale Island Finch <em>Nesospiza questi</em>, following Ryan et al. (2007) and Ryan (2008).</p>
<p>References:</p>
<p><NAME>. 2008. Taxonomic and conservation implications of ecological speciation in <em>Nesospiza</em> buntings in Tristan da Cunha. Bird Conservation International 18: 18: 20-29.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 20017. Ecological speciation in South Atlantic Island finches. Science 315: 1420-1423.</p>
<p> </p>
<p>page 669, <strong>Black-and-rufous Warbling-Finch <em>Poospiza nigrorufa</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop753.htm">Proposal 753</a>), Black-and-rufous Warbling-Finch <em>Poospiza nigrorufa</em> is split into two monotypic species, following Shultz and Burns (2013) and Jordan et al. (2017): Black-and-rufous Warbling-Finch <em>Poospiza nigrorufa</em> and Black-and-chestnut Warbling-Finch <em>Poospiza whitii</em>. Reposition Black-and-chestnut Warbling-Finch to immediately follow Cinnamon Warbling-Finch <em>Poospiza ornata</em> (thus preceding Black-and-rufous Warbling-Finch).</p>
<p>The previously recognized subspecies <em>wagneri</em>, with range “Andes of Bolivia on Mt. Chulumaní (La Paz)”, is considered to be a junior synonym of <em>whitii</em> (Jordan et al. 2017), and is deleted; revise the range description of <em>whitii</em> from “Andes of w Bolivia (La Paz and Cochabamba) to nw Argentina” to “Andes of Bolivia (north to La Paz) and northwestern Argentina (south to San Luis and western Córdoba)”.</p>
<p>Revise the range description for Black-and-rufous Warbling-Finch from “SE Paraguay to se Brazil, Uruguay and central Argentina” to “northeastern Argentina (eastern Formosa south to Buenos Aires), southeastern Paraguay, Uruguay, and southern Brazil (Paraná to Rio Grande do Sul)”.</p>
<p>References:</p>
<p><NAME>., <NAME>, and <NAME>. 2017. Mate recognition systems and species limits in a warbling-finch complex (<em>Poospiza nigrorufa</em>/<em>whitii</em>). Emu 117: 344-358.</p>
<p><NAME>., and <NAME>. 2013. Plumage evolution in relation to light environment in a novel clade of Neotropical tanagers. Molecular Phylogenetics and Evolution 66: 112–125.</p>
<p> </p>
<p>page 670, <strong>White-collared Seedeater <em>Sporophila torqueola</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2018), White-collared Seedeater <em>Sporophila torqueola</em> is split into two species, following Mason et al. (2018). The polytypic group White-collared Seedeater (White-collared) <em>Sporophila torqueola</em> [<em>morelleti</em> Group] becomes Morelet’s Seedeater <em>Sporophila morelleti</em> (including subspecies <em>sharpei</em> and <em>morelleti</em>); and the monotypic group White-collared Seedeater (Cinnamon-rumped) <em>Sporophila torqueola torqueola</em> becomes Cinnamon-rumped Seedeater <em>Sporophila torqueola</em> (including subspecies <em>torqueola</em> and <em>atriceps</em>).</p>
<p>Subspecies <em>mutanda</em> of Morelet’s Seedeater, with range “Pacific slope of s Mexico (Chiapas) to Guatemala and El Salvador”, is considered to be a junior synonym of nominate <em>morelleti</em> (Hellmayr 1938, Monroe 1968, Eitniear 2012), and is deleted. Revise the range of <em>morelleti</em> from “Caribbean slope of s Mexico (Veracruz) to extreme w Panama” to “Caribbean slope of Mexico (north to Veracruz) south to western Panama (Ngäbe-Buglé); Pacific slope from southern Mexico (Chiapas) south to western Panama (Chiriquí)”.</p>
<p>We add a previously overlooked subspecies of Cinnamon-rumped Seedeater, <em>atriceps</em> Baird 1867 (Paynter 1970, Dickinson and Christidis 2014), with distribution “western Mexico (central Sinaloa south to northern Jalisco; feral population in Baja California Sur derived from <em>atriceps</em>, <em>torqueola</em>, or a mix of both taxa)”.</p>
<p>References:</p>
<p>Chesser, R.T., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2018. <a href="http://www.bioone.org/doi/pdf/10.1642/AUK-18-62.1">Fifty-ninth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 135: 798-813.</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p><NAME>. 2012. White-collared Seedeater (<em>Sporophila torqueola</em>), version 2.0. In The Birds of North America (<NAME>, editor). Cornell Lab of Ornithology, Ithaca, New York.</p>
<p><NAME>. 1938. <a href="https://biodiversitylibrary.org/page/2760783">Catalogue of birds of the Americas. Part XI</a>. <a href="http://biodiversitylibrary.org/page/2760590">Field Museum of Natural History Zoological Series volume 13, part 11</a>.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2018. <a href="https://onlinelibrary.wiley.com/doi/abs/10.1002/ece3.3799">Hidden endemism, deep polyphyly, and repeated dispersal across the Isthmus of Tehuantepec: diversification of the White-collared Seedeater complex (Thraupidae: <em>Sporophila torqueola</em>)</a>. Ecology and Evolution 8: 1867–1881.</p>
<p><NAME>. 1968. A distributional survey of the birds of Honduras. Ornithological Monographs number 7. American Ornithologists’ Union.</p>
<p><NAME>., Jr. 1970. <a href="https://biodiversitylibrary.org/page/14483373">Subfamily Emberizinae, buntings and American sparrows</a>. Pages 3-214 in R.A. Paynter, Jr. and <NAME> (editors), <a href="https://biodiversitylibrary.org/page/14483224">Check-list of birds of the world. Volume XIII</a>. Museum of Comparative Zoology, Cambridge, Massachusetts.</p>
<p> </p>
<p><strong>SPECIES LOSSES (lumps and other deletions)</strong></p>
<p><strong> </strong></p>
<p>page 165, <strong>Colombian Screech-Owl <em>Megascops colombianus</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop770.htm">Proposal 770</a>), Colombian Screech-Owl <em>Megascops colombianus </em>is lumped with Rufescent Screech-Owl <em>Megascops ingens</em>, following Dantas et al. (2016) and Krabbe (2017). We recognize two new groups in Rufescent Screech-Owl: a monotypic Rufescent Screech-Owl (Colombian) <em>Megascops ingens colombianus</em>, and a polytypic Rufescent Screech-Owl (Rufescent) Megascops <em>ingens ingens/venezuelanus</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2016. Molecular systematics of the New World screech-owls (<em>Megascops</em>: Aves, Strigidae): biogeographic and taxonomic implications. Molecular Phylogenetics and Evolution 94: 626-634.</p>
<p><NAME>. 2017. <a href="http://asociacioncolombianadeornitologia.org/wp-content/uploads/2017/12/16eA08.pdf">A new species of <em>Megascops</em> (Strigidae) from the Sierra Nevada de Santa Marta, Colombia, with notes on voices of New World screech-owls</a>. Ornitología Columbiana 16: eA08-1-27.</p>
<p> </p>
<p>page addition (2011), <strong>White-chested Tinkerbird <em>Pogoniulus makawai</em></strong></p>
<p>The enigmatic White-chested Tinkerbird <em>Pogoniulus makawai</em> is known from only a single specimen. Genetic evidence suggests that it is not a separate species, but instead is embedded within Yellow-rumped Tinkerbird <em>Pogoniulus bilineatus</em> (Kirschel et al. 2018). Provisionally rbird (White-chested) <em>Pogoniulus bilineatus makawai</em>, although it is possible that broader genetic sampling may reveal that <em>makawai</em> is not a valid taxon at all.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, and <NAME>. 2018. Investigation of the status of the enigmatic White-chested Tinkerbird <em>Pogoniulus makawi</em> using molecular analysis of the type specimen. Ibis 160: 673-680.</p>
<p> </p>
<p>page 275, <strong>Baron’s Spinetail <em>Cranioleuca baroni</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop762.htm">Proposal 762</a>), Baron’s Spinetail <em>Cranioleuca baroni</em> is lumped with Line-cheeked Spinetail <em>Cranioleuca antisiensis</em>, following Seeholzer and Brumfield (2017). Each of the two former species is retained as a polytypic group: Line-cheeked Spinetail (Line-cheeked), <em>Cranioleuca antisiensis antisiensis</em>/<em>palamblae</em>, and Line-cheeked Spinetail (Baron’s) <em>Cranioleuca antisiensis</em> [<em>baroni</em> Group].</p>
<p>Reference:</p>
<p>Seeholzer, G.F., and R.T. Brumfield. 2018. Isolation-by-distance, not incipient ecological speciation, explains genetic differentiation in an Andean songbird (Aves: Furnariidae: <em>Cranioleuca antisiensis</em>, Line-cheeked Spinetail) despite near three-fold body size change across an environmental gradient. Molecular Ecology 27: 279–296.</p>
<p> </p>
<p>page 381, <strong>Liberian Greenbul <em>Phyllastrephus leucolepis</em></strong></p>
<p>Liberian Greenbul <em>Phyllastrephus leucolepis</em> is genetically indistinguishable from Icterine Greenbul <em>Phyllastrephus icterinus</em> (Collinson et al. 2018). As Liberian Greenbul has been known from only a single specimen, it is most probable that the “Liberian Greenbul” is only a rare plumage variant of Icterine Greenbul, and so is deleted.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018. Taxonomic status of the Liberian Greenbull <em>Phyllastrephus leucolepis</em> and the conservation of the Cavalla Forest, Liberia. Journal of Ornithology 159: 19-27.</p>
<p> </p>
<p>page 385, <strong>Canary Islands Kinglet <em>Regulus teneriffae</em></strong></p>
<p>Canary Islands Kinglet <em>Regulus teneriffae</em> is lumped into Goldcrest <em>Regulus regulus</em>, following Päckert et al. (2003, 2006). We continue to recognize <em>teneriffae</em> as a new monotypic group, Goldcrest (Tenerife) <em>Regulus regulus teneriffae</em>. Revise the range description for <em>teneriffae</em> from “Coniferous and mixed forests of Canary Islands” to “west central Canary Islands (La Gomera, Tenerife)”.</p>
<p>We add a previously overlooked subspecies, <em>ellenthalarae</em> Päckert, Dietzen, Martens, Wink, and Kvist 2006 (Päckert et al. 2006), with range “western Canary Islands (La Palma and El Hierro)”. We also recognize this taxon as a new monotypic group, Goldcrest (western Canary Islands) <em>Regulus regulus ellenthalarae</em>. Insert <em>ellenthalarae</em> immediately following the polytypic group Goldcrest (European) <em>Regulus regulus</em> [<em>regulus</em> Group].</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2003. Phylogenetic signal in the song of crests and kinglets (Aves: <em>Regulus</em>). Evolution 53: 616-629.</p>
<p><NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2006. Radiation of Atlantic goldcrests <em>Regulus regulus</em> spp.: evidence of a new taxon from the Canary Islands. Journal of Avian Biology 37: 364–380.</p>
<p> </p>
<p>page 428, <strong>Timor Bush Warbler <em>Locustella timorensis</em></strong></p>
<p>Timor Bush Warbler <em>Locustella timorensis</em> is considered to be a subspecies of Javan Bush Warbler <em>Locustella montis</em>, following Alström et al. (2015b). We recognize each of the two subspecies of Javan Bush Warbler as new monotypic groups: Javan Bush Warbler (Javan) <em>Locustella montis montis</em> and Javan Bush Warbler (Timor) <em>Locustalla montis timorensis</em>.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2015b. <a href="https://avianres.biomedcentral.com/track/pdf/10.1186/s40657-015-0016-z?site=avianres.biomedcentral.com">Integrative taxonomy of the Russet Bush Warbler <em>Locustella mandelli</em> complex reveals a new species from central China</a>. Avian Research 6: 9.</p>
<p> </p>
<p>page 655, <strong>Cherrie’s Tanager <em>Ramphocelus costaricensis</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2018), Passerini’s Tanager <em>Ramphocelus passerinii</em> and Cherrie’s Tanager <em>Ramphocelus costaricensis</em> are lumped as Scarlet-rumped Tanager <em>Ramphocelus passerinii</em>, based in part on Freeman and Montgomery (2017). We recognize each subspecies as separate monotypic groups, Scarlet-rumped Tanager (Passerini’s) <em>Ramphocelus passerinii passerinii</em> and Scarlet-rumped Tanager (Cherrie’s) <em>Ramphocelus passerinii costaricensis</em>.</p>
<p>Revise the range description of nominate <em>passerinii</em> from “S Mexico (se Veracruz and ne Oaxaca) to w Panamar” to “Atlantic slope of Central America, from southern Mexico (southeastern Veracruz and northeastern Oaxaca) to western Panama (Ngäbe-Buglé)”.</p>
<p>References:</p>
<p>Chesser, R.T., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2018. <a href="http://www.bioone.org/doi/pdf/10.1642/AUK-18-62.1">Fifty-ninth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 135: 798-813.</p>
<p><NAME>., and <NAME>. 2017. Using song playback experiments to measure species recognition between geographically isolated populations: a comparison with acoustic trait analyses. Auk 134: 857-870.</p>
<p><strong>ORDERS AND FAMILIES</strong></p>
<p><strong>ORDERS (newly recognized orders)</strong></p>
<p>pages 151-152, <strong>Musophagiformes Musophagidae (Turacos)</strong></p>
<p>We recognize a new order, Musophagiformes, to encompass Turacos Musophagidae (Dickinson and Remsen 2013, Winkler et al. 2015); Musophagidae previously was classified in Cuculiformes. Position Musophagiformes to immediately follow Otidiformes.</p>
<p>References:</p>
<p><NAME>., and <NAME>. (editors). 2013. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 1. Aves Press, Eastbourne, United Kingdom.</p>
<p><NAME>., <NAME>, and <NAME>. 2015. Bird families of the world. Lynx Edicions, Barcelona.</p>
<p><strong>ORDERS (sequence)</strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2016) and AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop723.htm">Proposal 723</a>), the sequence of orders of nonpasserine birds is revised, following Hackett et al. (2008), Jarvis et al. (2014), Prum et al. (2015), and other sources. As a result, the sequence of orders between Galliformes and Coliiformes becomes:</p>
<p>Phoenicopteriformes</p>
<p>Podicipediformes</p>
<p>Columbiformes</p>
<p>Mesitornithiformes</p>
<p><span class="s1">Pterocliformes</span></p>
<p>Otidiformes</p>
<p>Musophagiformes</p>
<p>Cuculiformes</p>
<p>Caprimulgiformes</p>
<p>Opisthocomiformes</p>
<p>Gruiformes</p>
<p>Charadriiformes</p>
<p>Eurypygiformes</p>
<p>Phaethontiformes</p>
<p>Gaviiformes</p>
<p>Sphenisciformes</p>
<p>Procellariiformes</p>
<p>Ciconiiformes</p>
<p>Suliformes</p>
<p>Pelecaniformes</p>
<p>Cathartiformes</p>
<p class="p1"><span class="s1">Accipitriformes</span></p>
<p>Strigiformes</p>
<p>The sequence of orders from Struthioniformes through Galliformes, and from Coliiformes through Passeriformes, remains unchanged.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2016. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-16-77.1?code=coop-site">Fifty-seventh supplement to the American Ornithologists’ Union <em>Check-list of North American birds</em></a>. Auk 133: 544-560.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2008. A phylogenomic study of birds reveals their evolutionary history. Science 320: 1763-1768.</p>
<p>Jarvis, E.D., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>’Brien, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Whole-genome analyses resolve early branches in the tree of life of modern birds. Science 346: 1320-1331.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2015. A comprehensive phylogeny of birds (Aves) using targeted next-generation DNA sequencing. Nature 526: 569-573.</p>
<p><strong>F</strong><strong>AMILIES (newly recognized families)</strong></p>
<p>pages 13-14, 699, <strong>Southern Storm-Petrels Oceanitidae</strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2018), we recognize a new family of storm-petrels, Southern Storm-Petrels Oceanitidae, following Nunn and Stanley (1998), Hackett et al. (2008), Prum et al. (2015), and Reddy et al. (2017). Oceanitidae includes the genera <em>Oceanites</em>, <em>Garrodia</em>, <em>Pelagodroma</em>, <em>Fregetta</em>, and <em>Nesofregetta</em>.</p>
<p>Resequence the families of tube-nosed seabirds from Albatrosses Diomedeidae; Shearwaters and Petrels Procellariidae; and Storm-Petrels Hydrobatidae to Albatrosses Diomedeidae; Southern Storm-Petrels Oceanitidae; Northern Storm-Petrels Hydrobatidae; and Shearwaters and Petrels Procellariidae.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2018. <a href="http://www.bioone.org/doi/pdf/10.1642/AUK-18-62.1">Fifty-ninth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 135: 798-813.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2008. A phylogenomic study of birds reveals their evolutionary history. Science 320: 1763-1768.</p>
<p><NAME>., and <NAME>. 1998. Body size effects and rates of cytochrome <em>b</em> evolution in tube-nosed seabirds. Molecular Biology and Evolution 15: 1360–1371.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2015. A comprehensive phylogeny of birds (Aves) using targeted next-generation DNA sequencing. Nature 526: 569-573.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2017. Why do phylogenomic data sets yield conflicting trees? Data type influences the avian tree of life more than taxon sampling. Systematic Biology 66: 857-879.</p>
<p> </p>
<p>page 586, <strong>Crested Shrikejay Platylophidae</strong></p>
<p>Both morphological and genetic evidence indicate that Crested Jay <em>Platylophus galericulatus</em> is not a member of Corvidae (Crows, Jays, and Magpies) (Jønsson et al. 2008, Manegold 2008, Jønsson et al. 2011a, Aggerbeck et al. 2014). Its relationships remain unresolved: it may be near the base of the corvoid radiation (Jønsson et al. 2011a), or it may be sister to Laniidae (Shrikes) (Jønsson et al. 2008, Aggerbeck et al. 2014). We follow Winkler et al. (2015) in recognizing a new family, Platylophidae, for this species. Provisionally we position Platylophidae immediately following Oreoicidae (Australo-Papuan Bellbirds). Change the English name from Crested Jay to Crested Shrikejay.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Resolving deep lineage divergences in core corvoid passerine birds supports a proto-Papuan island origin. Molecular Phylogenetics and Evolution 70: 272-285.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2008. Explosive avian radiations and multi-directional disperal across Wallacea: evidence from the Campephagidae and other Crown Corvida (Aves). Molecular Phylogenetics and Evolution 47: 221-236.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2011a. Major global radiation of corvoid birds originated in the proto-Papuan archipelago. Proceedings of the National Academy of Science 108: 2328–2333.</p>
<p><NAME>. 2008. Morphological characters of the tongue skeleton reveal phylogenetic relationships within the Corvidae (Oscines, Passeriformes). Emu 108: 321-330.</p>
<p><NAME>., <NAME>, and <NAME>. 2015. Bird families of the world. Lynx Edicions, Barcelona.</p>
<p><strong>FAMILIES (composition)</strong></p>
<p>page 473, <strong>Cerulean Paradise-Flycatcher <em>Eutrichomyias rowleyi</em></strong></p>
<p>Cerulean Paradise-Flycatcher is not a member of Monarch Flycatchers Monarchidae, but instead is most closely related to Drongo Fantail <em>Chaetorhynchus papuensis</em> and to the silktails (<em>Lamprolia)</em> (Jønsson et al. 2018). This set of species may merit recognition as a separate family (Silktails and Allies Lamproliidae; Jønsson et al. 2018), but currently we classify them as basal members of Fantails Rhipiduridae. Reposition Cerulean Paradise-Flycatcher to immediately follow Drongo Fantail. Revise the range description from “Rediscovered 1995 on Sangihe I. after considered extinct” to “Sangihe Island (northeast of Sulawesi)”.</p>
<p>Reference:</p>
<p><NAME>., <NAME>. <NAME>, <NAME>, and <NAME>. 2018. Relicts of the lost arc: high-throughput sequencing of the <em>Eutrichomyias rowleyi</em> (Aves: Passeriformes) holotype uncovers an ancient biogeographic link between the Philippines and Fiji. Molecular Phylogenetics and Evolution 120: 28–32.</p>
<p> </p>
<p>page 419, <strong>Rufous-vented Prinia <em>Prinia burnesii</em></strong></p>
<p>page 419, <strong>Swamp Prinia <em>Prinia cinerascens</em></strong></p>
<p>Rufous-vented Prinia is not a member of the genus <em>Prinia</em>, nor does it even belong to Cisticolidae (Cisticolas and Allies), the family that includes <em>Prinia</em>; instead, it is a member of Pellorneidae (Ground Babblers and Allies) (Olsson et al. 2013). Change the scientific name from <em>Prinia burnesii</em> to <em>Laticilla burnesii</em> (Olsson et al. 2013, Dickinson and Christidis 2014). The relationships of <em>Laticilla</em> within Pellorneidae are unresolved; provisionally we position <em>Laticilla</em> immediately following the genus <em>Schoeniparus</em> (Dickinson and Christidis 2014).</p>
<p>With the addition of subspecies <em>nepalica</em> Baral et al. 2008, Rufous-vented Prinia no longer is monotypic. Add an entry for the nominate subspecies, <em>burnesii</em>, immediately following the heading for the species. Revise the description of nominate <em>burnesii</em> from “Elephant and sarkhan grass of Pakistan to nw India (w Punjab)” to “Indus River Valley of Pakistan, and northwestern India (Punjab)”.</p>
<p>We add a previously overlooked subspecies, <em>nepalicola</em> Baral et al. (2007, 2008), with range “eastern Nepal”. Insert subspecies <em>nepalicola</em> immediately following the entry for nominate <em>burnesii</em>.</p>
<p>Swamp Prinia, which formerly was classified as a subspecies of Rufous-vented Prinia, follows that species to the genus <em>Laticilla </em>and to the family Pellorneidae (Ground Babblers and Allies) (Olsson et al. 2013). Change the scientific name from <em>Prinia cinerascens</em> to <em>Laticilla cinerascens </em>(Olsson et al. 2013, Dickinson and Christidis 2014).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and S. GC. 2007. A new subspecies of Rufous-vented Prinia <em>Prinia burnesii</em> (Aves: Cisticolidae) from Nepal. Danphe 16 (4): 1-10.</p>
<p> </p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and S. GC. 2008. A substitue name for <em>Prinia burnesii nipalensis</em>. Danphe 17 (1): 1-2.</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2013. Systematic revision of the avian family Cisticolidae based on a multi-locus phylogeny of all genera. Molecular Phylogenetics and Evolution 66: 790-799.</p>
<p> </p>
<p>page 461, <strong>Grandala <em>Grandala coelicolor</em></strong></p>
<p>Grandala is removed from Old World Flycatchers Muscicapidae and is transferred to Thrushes and Allies Turdidae, following Jønsson and Fjeldså (2006). Reposition Grandala at the head of Turdidae, immediately following Heuglin’s Wheatear <em>Oenanthe heuglini</em>.</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2006. A phylogenetic supertree of oscine passerines (Aves: Passeri). Zoologica Scripta 35: 149-186.</p>
<p> </p>
<p>page 649, <strong>Red-billed Pied Tanager <em>Lamprospiza melanoleuca</em></strong></p>
<p>page 653, <strong>Olive-green Tanager <em>Orthogonys chloricterus</em></strong></p>
<p>Red-billed Pied Tanager <em>Lamprospiza melanoleuca</em> and Olive-green Tanager <em>Orthogonys chloricterus</em> are members of Mitrospingidae (Mitrospingid Tanagers), not of Thraupidae (Tanagers and Allies) (Barker et al. 2013). Reposition Red-billed Pied Tanager to follow Olive-backed Tanager <em>Mitrospingus oleagineus</em>. Reposition Olive-green Tanager to follow Olive-backed Tanager and Red-billed Pied Tanager.</p>
<p>Reference:</p>
<p>Barker, F.K., <NAME>, <NAME>, <NAME>, and <NAME>. 2013. Going to extremes: contrasting rates of diversification in a recent radiation of New World passerine birds. Systematic Biology 62: 298-320.</p>
<p><strong>FAMILIES (sequence)</strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2017), the sequence of families of the nine-primaried oscines is revised, following Barker et al. (2013, 2015). The sequence of families that we adopit is</p>
<p>Fringillidae Finches, Euphonias, and Allies</p>
<p>Calcariidae Longspurs and Snow Buntings</p>
<p>Rhodinocichlidae Thrush-Tanager</p>
<p>Emberizidae Old World Buntings</p>
<p>Passerellidae New World Sparrows</p>
<p>Calyptophilidae Chat-Tanagers</p>
<p>Phaenicophilidae Hispaniolan Tanagers</p>
<p>Nesospingidae Puerto Rican Tanager</p>
<p>Spindalidae Spindalises</p>
<p>Zeledoniidae Wrenthrush</p>
<p>Teretistridae Cuban Warblers</p>
<p>Icteriidae Yellow-breasted Chat</p>
<p>Icteridae Troupials and Allies</p>
<p>Parulidae New World Warblers</p>
<p>Mitrospingidae Mitrospingid Tanagers</p>
<p>Cardinalidae Cardinals and Allies</p>
<p>Thraupidae Tanagers and Allies</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2013. Going to extremes: contrasting rates of diversification in a recent radiation of New World passerine birds. Systematic Biology 62: 298-320.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2015. New insights into New World biogeography: an integrated view from the phylogeny of blackbirds, cardinals, sparrows, tanagers, warblers, and allies. Auk 132: 333-348.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p><strong>FAMILIES (nomenclature)</strong></p>
<p>page 581, <strong>Pityriaseidae Bristlehead</strong></p>
<p>Change the spelling of the family name Pityriaseidae to Pityriasidae (Dickinson and Christidis 2014).</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p> </p>
<p>pages 649-650, 664, 676-687, <strong>Passerellidae New World Buntings and Sparrows</strong></p>
<p>Change the English name of the family Passerellidae from New World Buntings and Sparrows to New World Sparrows.</p>
<p> </p>
<p><strong>STANDARD UPDATES and CORRECTIONS</strong></p>
<p>The compilation of these updates and corrections is in progress; the full set of revisions will be posted as soon as this documentation is available. Initially we post a small subset of these updates and corrections, focusing on changes that affect the names (English or scientific) of species, or on the composition of species. In the meantime, please note that <em>all</em> revisions are documented in the eBird/Clements Checklist v2018 downloadable spreadsheet.</p>
<p><strong> </strong></p>
<p>page 28, <strong><NAME> <em>Tadorna radjah</em></strong></p>
<p>Change the scientific name of <NAME> from <em>Tadorna radjah</em> to <em>Radjah radjah</em>, based on genetic evidence that <em>Tadorna</em> is paraphyletic (Sraml et al. 1996, Gonzalez et al. 2009). Reposition Radjah Shelduck to immediately follow the genus <em>Chloephaga</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, and <NAME>. 2009. Phylogenetic relationships based on two mitochondrial genes and hybridization patterns in Anatidae. Journal of Zoology 279: 310-318.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 1996. Molecular relationships within Australasian waterfowl (Anseriformes). Australian Journal of Zoology 44: 47-58.</p>
<p> </p>
<p>page 32, <strong>White-eyed Duck <em>Aythya australis</em></strong></p>
<p>Change the English name of <em>Aythya australis</em> from White-eyed Duck to Hardhead, to conform to widespread usage (e.g., Christidis and Boles 2008, Menkhorst et al. 2017).</p>
<p>References:</p>
<p><NAME>. and W.E. Boles. 2008. Systematics and taxonomy of Australian birds. CSIRO Publishing, Melbourne.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2017. The Australian bird guide. CSIRO Publishing, Clayton South, Victoria, Australia.</p>
<p> </p>
<p>page 129, <strong>Zoe Imperial-Pigeon <em>Ducula zoeae</em></strong></p>
<p>Change the English name of <em>Ducula zoeae</em> from Zoe Imperial-Pigeon to Zoe’s Imperial-Pigeon (Jobling 2010).</p>
<p>Reference:</p>
<p>Jobling, J.A. 2010. The Helm dictionary of scientific bird names. <NAME>, London, United Kingdom.</p>
<p> </p>
<p>page 152, <strong>Ruwenzori Turaco <em>Ruwenzorornis johnstoni</em></strong></p>
<p>Change the spelling of the English name of <em>Ruwenzorornis johnstoni</em> from Ruwenzori Turaco to Rwenzori Turaco.</p>
<p>Change the spelling of the English name of the monotypic group <em>Ruwenzorornis johnstoni johnstoni </em>from Ruwenzori Turaco (Ruwenzori) to Rwenzori Turaco (Rwenzori). Change the range description from “Ruwenzori Mountains (northeastern Democratic Republic of the Congo and southwestern Uganda)” to “Rwenzori Mountains (northeastern Democratic Republic of the Congo and southwestern Uganda)”.</p>
<p>Change the spelling of the English name of the monotypic group <em>Ruwenzorornis johnstoni bredoi</em> from Ruwenzori Turaco (Mt. Kabobo) to Rwenzori Turaco (Mt. Kabobo).</p>
<p>Change the spelling of the English name of the monotypic group <em>Ruwenzorornis johnstoni kivuensis </em>from Ruwenzori Turaco (Kivu) to Rwenzori Turaco (Kivu).</p>
<p> </p>
<p>page 182, <strong>Montane Nightjar <em>Caprimulgus ruwenzorii</em></strong></p>
<p>Change the English name of <em>Caprimulgus ruwenzorii</em> from Montane Nightjar to Rwenzori Nightjar.</p>
<p> </p>
<p>page 82, <strong>African Crake <em>Crecopsis egregia</em></strong></p>
<p>African Crake <em>Crecopsis egregia</em> is merged into the genus <em>Crex</em>, following Urban et al. (1986); change the scientific name from <em>Crecopsis egregia</em> to <em>Crex egregia</em>.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, and <NAME>. 1986. The birds of Africa. Volume II. Academic Press, London.</p>
<p> </p>
<p>page 80, <strong>Slaty-breasted Rail <em>Gallirallus striatus</em></strong></p>
<p>Slaty-breasted Rail is not a member of the genus <em>Gallirallus</em>, but instead is related to the genus <em>Lewinia</em> (Garcia-R et al. 2014). Change the scientific name of Slaty-breasted Rail from <em>Gallirallus striatus</em> to <em>Lewinia striata</em>.</p>
<p>Change the spelling of the subspecies name <em>taiwanus</em> to <em>taiwana</em>.</p>
<p>Change the spelling of the nominate subspecies name <em>striatus</em> to <em>striata</em>.</p>
<p>Change the spelling of the subspecies name <em>paratermus </em>to <em>paraterma</em>.</p>
<p>Reference:</p>
<p>Garcia-R., J.C., <NAME>, and <NAME>. 2014. Deep global evolutionary radiation in birds: diversification and trait evolution in the cosmopolitan bird family Rallidae. Molecular Phylogenetics and Evolution 81: 96–108.</p>
<p> </p>
<p>page 84, <strong>Invisible Rail <em>Habroptila wallacii</em></strong></p>
<p>Invisible Rail <em>Habroptila wallacii</em> is a basal member of the genus <em>Gallirallus</em> (Kirchman 2012); change the scientific name from <em>Habroptila wallacii</em> to <em>Gallirallus wallacii</em>.</p>
<p>Reference:</p>
<p><NAME>. 2012. Speciation of flightless rails on islands: a DNA-based phylogeny of the typical rails of the Pacific. Auk 129: 56-69.</p>
<p> </p>
<p>page 84, <strong>Chestnut Rail <em>Eulabeornis castaneoventris</em></strong></p>
<p>Chestnut Rail <em>Eulabeornis castaneoventris</em> is a basal member of the genus <em>Gallirallus</em> (Kirchman 2012, Garcia-R et al. 2014); change the scientific name from <em>Eulabeornis castaneoventris</em> to <em>Gallirallus castaneoventris</em>.</p>
<p>References:</p>
<p>Garcia-R., J.C., <NAME>, and <NAME>. 2014. Deep global evolutionary radiation in birds: diversification and trait evolution in the cosmopolitan bird family Rallidae. Molecular Phylogenetics and Evolution 81: 96–108.</p>
<p><NAME>. 2012. Speciation of flightless rails on islands: a DNA-based phylogeny of the typical rails of the Pacific. Auk 129: 56-69.</p>
<p> </p>
<p>page 80, <strong>Woodford’s Rail <em>Nesoclopeus woodfordi</em></strong></p>
<p>Woodford’s Rail <em>Nesoclopeus woodfordi</em> is embedded in the genus <em>Gallirallus</em> (Garcia-R et al. 2014). Change the scientific name from <em>Nesoclopeus woodfordi</em> to <em>Gallirallus woodfordi</em>. Reposition Woodford’s Rail to immediately follow New Britain Rail <em>Gallirallus insignis</em>.</p>
<p>Change the scientific name of the monotypic group Woodford’s Rail (Bougainville) from <em>Nesoclopeus woodfordi tertius</em> to <em>Gallirallus woodfordi tertius</em>.</p>
<p>Change the scientific name of the monotypic group Woodford’s Rail (Santa Isabel) from <em>Nesoclopeus woodfordi immaculatus</em> to <em>Gallirallus woodfordi immaculatus</em>.</p>
<p>Change the scientific name of the monotypic group Woodford’s Rail (Guadalcanal) from <em>Nesoclopeus woodfordi woodfordi</em> to <em>Gallirallus woodfordi woodfordi</em>.</p>
<p>Reference:</p>
<p>Garcia-R., J.C., <NAME>, and <NAME>. 2014. Deep global evolutionary radiation in birds: diversification and trait evolution in the cosmopolitan bird family Rallidae. Molecular Phylogenetics and Evolution 81: 96–108.</p>
<p> </p>
<p>page 699, <strong>Bar-winged Rail <em>Nesoclopeus poecilopterus</em></strong></p>
<p>The genus <em>Nesoclopeus</em> is embedded in the genus <em>Gallirallus</em> (Garcia-R et al. 2014). Change the scientific name of Bar-winged Rail from <em>Nesoclopeus poecilopterus</em> to <em>Gallirallus poecilopterus</em>. Reposition Bar-winged Rail to immediately follow Woodford’s Rail <em>Gallirallus woodfordi</em>.</p>
<p>Reference:</p>
<p>Garcia-R., J.C., <NAME>, and <NAME>. 2014. Deep global evolutionary radiation in birds: diversification and trait evolution in the cosmopolitan bird family Rallidae. Molecular Phylogenetics and Evolution 81: 96–108.</p>
<p> </p>
<p>page 85, <strong>Black-backed Swamphen <em>Porphyrio indicus</em></strong></p>
<p>One subspecies of Black-backed Swamphen, <em>Porphyrio indicus viridis</em>, is reassigned to Gray-headed Swamphen <em>Porphyrio poliocephalus</em>. Reposition <em>Porphyrio poliocephalus viridis</em> to immediately follow <em>Porphyrio poliocephalus poliocephalus</em>.</p>
<p> </p>
<p>page 36, <strong>Black-shouldered Kite <em>Elanus caeruleus</em></strong></p>
<p>In accord with current widepread usage (e.g., Mullarney et al. 1999, Rasmussen and Anderton 2005, Beehler and Pratt 2016), change the English name of <em>Elanus caeruleus</em> from Black-shouldered Kite to Black-winged Kite.</p>
<p>Change the English name of the monotypic group <em>Elanus caeruleus caeruleus</em> from Black-shouldered Kite (African) to Black-winged Kite (African).</p>
<p>Change the English name of the polytypic group <em>Elanus caeruleus</em> [<em>vociferus</em> Group] from Black-shouldered Kite (Asian) to Black-winged Kite (Asian).</p>
<p>References:</p>
<p><NAME>., and <NAME>. 2016. Birds of New Guinea: distribution, taxonomy, and systematics. Princeton University Press, Princeton, New Jersey.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 1999. The complete guide to the birds of Europe. Princeton University Press, Princeton, New Jersey.</p>
<p><NAME>., and <NAME>. 2005. Birds of South Asia. The Ripley guide. Volume 2: attributes and status. Smithsonian Institution and Lynx Edicions, Washington D.C. and Barcelona.</p>
<p> </p>
<p>page 36, <strong>Australian Kite <em>Elanus axillaris</em></strong></p>
<p>In accord with current usage (e.g., Christidis and Boles 2008), change the English name of <em>Elanus axillaris</em> from Australian Kite to Black-shouldered Kite.</p>
<p>Reference:</p>
<p><NAME>. and <NAME>. 2008. Systematics and taxonomy of Australian birds. CSIRO Publishing, Melbourne.</p>
<p> </p>
<p>page 47, <strong>Great Philippine Eagle <em>Pithecophaga jefferyi</em></strong></p>
<p>Change the English name of <em>Pithecophaga jefferyi</em> from Great Philippine Eagle to Philippine Eagle (Dickinson et al. 1991, Kennedy et al. 2000).</p>
<p>References:</p>
<p><NAME>., <NAME>, and <NAME>. 1991. The birds of the Philippines. An annotated check-list. British Ornithologists’ Union Check-list number 12. British Ornithologists’ Union, London.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>., and <NAME>. 2000. A guide to the birds of the Philippines. Oxford University Press, New York and Oxford, United Kingdom.</p>
<p> </p>
<p>page 40, <strong>Frances’s Goshawk <em>Accipiter francesiae</em></strong></p>
<p>In accord with widespread usage (Langrand 1990, Morris and Hawkins 1998, Sinclair and Langrand 1998, Safford and Hawkins 2013), change the English name of <em>Accipiter francesiae</em> from Frances’s Goshawk to Frances’s Sparrowhawk.</p>
<p>References:</p>
<p><NAME>. 1990. Guide to the birds of Madagascar. Yale University Press, New Haven, Connecticut.</p>
<p><NAME>., and <NAME>. 1998. Birds of Madagascar: a photographic guide. Yale</p>
<p>University Press, New Haven, Connecticut.</p>
<p><NAME>., and <NAME> (editors). 2013. The birds of Africa. Volume VIII. The Malagasy region. <NAME>, London.</p>
<p><NAME>., and <NAME>. 1998. Birds of the Indian Ocean islands: Madagascar, Mauritius, Réunion, Rodrigues, Seychelles and the Comoros. Struik Publishers, Cape Town.</p>
<p> </p>
<p>page 161, <strong>Madagascar Red Owl <em>Tyto soumagnei</em></strong></p>
<p>Change the English name of <em>Tyto soumagnei</em> from Madagascar Red Owl to Red Owl.</p>
<p> </p>
<p>page 163, <strong>European Scops-Owl <em>Otus scops</em></strong></p>
<p>In accord with widespread usage (e.g., British Ornithologists’ Union 2013), change the English name of <em>Otus scops</em> from European Scops-Owl to Eurasian Scops-Owl.</p>
<p>Change the English name of the polytypic group Otus scops [scops Group] from European Scops-Owl (European) to Eurasian Scops-Owl (Eurasian).</p>
<p>Change the English name of the monotypic group Otus scops cyprius from European Scops-Owl (Cyprus) to Eurasian Scops-Owl (Cyprus).</p>
<p>Reference:</p>
<p>British Ornithologists’ Union. 2013. The British List: A Checklist of Birds of Britain (8th edition). Ibis 155: 635-676.</p>
<p> </p>
<p>page 174, <strong>White-browed Owl <em>Ninox superciliaris</em></strong></p>
<p>White-browed Owl is a member of the genus <em>Athene</em>, not <em>Ninox</em> (Wink et al. 2008, Koparde et al. 2018). Change the scientific name from <em>Ninox superciliaris</em> to <em>Athene superciliaris</em>. Reposition White-browed Owl to immediately follow Forest Owlet <em>Athene blewitti</em>.</p>
<p>References:</p>
<p>Koparde, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018. <a href="http://journals.plos.org/plosone/article?id=10.1371/journal.pone.0192359">The critically endangered forest owlet <em>Heteroglaux blewitti</em> is nested within the currently recognized <em>Athene</em> clade: a century-old debate addressed</a>. PLoS ONE 13: e0192359.</p>
<p> </p>
<p>page 175, <strong>African Long-eared Owl <em>Asio abyssinicus</em></strong></p>
<p>Change the English name of <em>Asio abyssinicus</em> from African Long-eared Owl to Abyssinian Owl.</p>
<p>Revise the range description for subspecies <em>graueri</em> from “Ruwenzori Mountains, Mt. Kabobo (eastern Democratic Republic of the Congo), and Mt. Kenya (Kenya)” to “Rwenzori Mountains (northeastern Democratic Republic of the Congo and southwestern Uganda), Mt. Kabobo (eastern Democratic Republic of the Congo), and Mt. Kenya (Kenya)”.</p>
<p> </p>
<p>page 176, <strong>Madagascar Long-eared Owl <em>Asio madagascariensis</em></strong></p>
<p>Change the English name of <em>Asio madagascariensis</em> from Madagascar Long-eared Owl to Madagascar Owl.</p>
<p> </p>
<p>page 228, <strong>Cuckoo-Roller <em>Leptosomus discolor</em></strong></p>
<p>Change the English name of <em>Leptosomus discolor</em> from Cuckoo-Roller to Cuckoo-roller.</p>
<p> </p>
<p>page 217, <strong>Black-backed Dwarf-Kingfisher <em>Ceyx erithaca</em></strong></p>
<p>page 217, <strong>Rufous-backed Dwarf-Kingfisher <em>Ceyx rufidorsa</em></strong></p>
<p>Subspecies <em>motleyi,</em> <em>captus</em>, and <em>jungei</em>, all previously classified as subspecies of Black-backed Dwarf-Kingfisher, instead belong in Rufous-backed Dwarf-Kingfisher. Change the respective scientific names from <em>Ceyx erithaca motleyi</em> to <em>Ceyx rufidorsa motleyi</em>; from <em>Ceyx erithaca captus</em> to <em>Ceyx rufidorsa captus</em>; and from <em>Ceyx erithaca jungei</em> to <em>Ceyx rufidorsa jungei</em>.</p>
<p> </p>
<p>page 246, <strong>African Piculet <em>Sasia africana</em></strong></p>
<p>Change the scientific name of African Piculet from <em>Sasia africana</em> to <em>Verreauxia africana</em> (Fuchs et al. 2006).</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2006. Molecular phylogeny and biogeographic history of the piculets (Piciformes: Picumninae). Journal of Avian Biology 37: 487-496.</p>
<p> </p>
<p>page 250, <strong>Sulawesi Woodpecker <em>Dendrocopos temminckii</em></strong></p>
<p>page 250, <strong>Philippine Woodpecker <em>Dendrocopos maculatus</em></strong></p>
<p>page 250, <strong>Sulu Woodpecker <em>Dendrocopos ramsayi</em></strong></p>
<p>page 250, <strong>Brown-capped Woodpecker <em>Dendrocopos nanus</em></strong></p>
<p>page 250, <strong>Sunda Woodpecker <em>Dendrocopos moluccensis</em></strong></p>
<p>page 250, <strong>Gray-capped Woodpecker <em>Dendrocopos canicapillus</em></strong></p>
<p>page 250, <strong>Pygmy Woodpecker <em>Dendrocopos kizuki</em></strong></p>
<p>The traditional <em>Dendrocopos</em> is not monophyletic, and consequently is partitioned into five genera (Fuchs and Pons 2015; see also Shakya et al. 2017).</p>
<p>Change the scientific name of <NAME>cker from <em>Dendrocopos temminckii</em> to <em>Yungipicus temminckii</em> (Fuchs and Pons 2015).</p>
<p>Change the scientific name of <NAME> from <em>Dendrocopos maculatus</em> to <em>Yungipicus maculatus</em> (Fuchs and Pons 2015).</p>
<p>Change the scientific name of <NAME>cker from <em>Dendrocopos ramsayi</em> to <em>Yungipicus ramsayi</em> (Fuchs and Pons 2015).</p>
<p>Change the scientific name of Brown-capped Woodpecker from <em>Dendrocopos nanus</em> to <em>Yungipicus nanus</em> (Fuchs and Pons 2015).</p>
<p>Change the scientific name of Sunda Woodpecker from <em>Dendrocopos moluccensis</em> to <em>Yungipicus moluccensis</em> (Fuchs and Pons 2015).</p>
<p>Change the scientific name of Gray-capped Woodpecker from <em>Dendrocopos canicapillus</em> to <em>Yungipicus canicapillus</em> (Fuchs and Pons 2015).</p>
<p>Change the scientific name of <NAME>cker from <em>Dendrocopos kizuki</em> to <em>Yungipicus kizuki</em> (Fuchs and Pons 2015).</p>
<p>The sequence of species of <em>Yungipicus</em> is revised (Fuchs and Pons 2015, Dufort 2016, Shakya et al. 2017).</p>
<p>References:</p>
<p><NAME>. 2016. An augmented supermatrix phylogeny of the avian family Picidae reveals uncertainty deep in the family tree. Molecular Phylogenetics and Evolution 94: 313-326.</p>
<p><NAME>., and <NAME>. 2015. A new classification of the pied woodpeckers assemblage (Dendropicini: Picidae) based on a comprehensive multi-locus phylogeny. Molecular Phylogenetics and Evolution 88: 28-37.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2017. Tapping the woodpecker tree for evolutionary insight. Molecular Phylogenetics and Evolution 116: 182-191.</p>
<p> </p>
<p>page 251, <strong>Yellow-crowned Woodpecker <em>Dendrocopos mahrattensis</em></strong></p>
<p>The traditional <em>Dendrocopos</em> is not monophyletic, and consequently is partitioned into five genera (Fuchs and Pons 2015; see also Shakya et al. 2017). Change the scientific name of Yellow-crowned Woodpecker from <em>Dendrocopos mahrattensis</em> to <em>Leiopicus mahrattensis</em> (Fuchs and Pons 2015).</p>
<p>References:</p>
<p><NAME>., and <NAME>. 2015. A new classification of the pied woodpeckers assemblage (Dendropicini: Picidae) based on a comprehensive multi-locus phylogeny. Molecular Phylogenetics and Evolution 88: 28-37.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2017. Tapping the woodpecker tree for evolutionary insight. Molecular Phylogenetics and Evolution 116: 182-191.</p>
<p> </p>
<p>page 251, <strong>Middle Spotted Woodpecker <em>Dendrocopos medius</em></strong></p>
<p>page 251, <strong>Brown-fronted Woodpecker <em>Dendrocopos auriceps</em></strong></p>
<p>page 251, <strong>Arabian Woodpecker <em>Dendrocopos dorae</em></strong></p>
<p>The traditional <em>Dendrocopos</em> is not monophyletic, and consequently is partitioned into five genera (Fuchs and Pons 2015; see also Dufort 2016, Shakya et al. 2017).</p>
<p>Change the scientific name of Middle Spotted Woodpecker from <em>Dendrocopos medius</em> to <em>Dendrocoptes m</em>edius (Fuchs and Pons 2015).</p>
<p>Change the scientific name of Brown-fronted Woodpecker from <em>Dendrocopos auriceps</em> to <em>Dendrocoptes auriceps</em> (Fuchs and Pons 2015).</p>
<p>Change the scientific name of Arabian Woodpecker from <em>Dendrocopos dorae</em> to <em>Dendrocoptes dorae</em> (Fuchs and Pons 2015).</p>
<p>The sequence of species of <em>Dendrocoptes</em> is revised.</p>
<p>References:</p>
<p><NAME>. 2016. An augmented supermatrix phylogeny of the avian family Picidae reveals uncertainty deep in the family tree. Molecular Phylogenetics and Evolution 94: 313-326.</p>
<p><NAME>., and <NAME>. 2015. A new classification of the pied woodpeckers assemblage (Dendropicini: Picidae) based on a comprehensive multi-locus phylogeny. Molecular Phylogenetics and Evolution 88: 28-37.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2017. Tapping the woodpecker tree for evolutionary insight. Molecular Phylogenetics and Evolution 116: 182-191.</p>
<p> </p>
<p>page 249, <strong>Abyssinian Woodpecker <em>Dendropicos abyssinicus</em></strong></p>
<p>page 249, <strong>Melancholy Woodpecker <em>Dendropicos lugubris</em></strong></p>
<p>page 249, <strong>Gabon Woodpecker <em>Dendropicos gabonensis</em></strong></p>
<p>page 249, <strong>Elliot’s Woodpecker <em>Dendropicos elliotii</em></strong></p>
<p>page 249, <strong>Little Gray Woodpecker <em>Dendropicos elachus</em></strong></p>
<p>page 249, <strong>Speckle-breasted Woodpecker <em>Dendropicos poecilolaemus</em></strong></p>
<p>page 249, <strong>Cardinal Woodpecker <em>Dendropicos fuscescens</em></strong></p>
<p>page 249, <strong>Bearded Woodpecker <em>Dendropicos namaquus</em></strong></p>
<p>page 249, <strong>Fire-bellied Woodpecker <em>Dendropicos pyrrhogaster</em></strong></p>
<p>page 249<strong>, Golden-crowned Woodpecker <em>Dendropicos xantholophus</em></strong></p>
<p>page 249, <strong>Stierling’s Woodpecker <em>Dendropicos stierlingi</em></strong></p>
<p>page 250, <strong>Brown-backed Woodpecker <em>Dendropicos obsoletus</em></strong></p>
<p>page 250, <strong>African Gray Woodpecker <em>Dendropicos goertae</em></strong></p>
<p>page 250, <strong>Mountain Gray Woodpecker <em>Dendropicos spodocephalus</em></strong></p>
<p>page 250, <strong>Olive Woodpecker <em>Dendropicos griseocephalus</em></strong></p>
<p>Change the scientific name of Abyssinian Woodpecker from <em>Dendropicos abyssinicus</em> to <em>Chloropicus abyssinicus</em> (Dickinson and Remsen 2013, Dufort 2016, Shakya et al. 2017).</p>
<p>Change the scientific name of <NAME> from <em>Dendropicos lugubris</em> to <em>Chloropicus lugubris</em> (Dickinson and Remsen 2013, Dufort 2016, Shakya et al. 2017).</p>
<p>Change the scientific name of <NAME> from <em>Dendropicos gabonensis</em> to <em>Chloropicus gabonensis</em> (Dickinson and Remsen 2013, Dufort 2016, Shakya et al. 2017).</p>
<p>Change the scientific name of Elliot’<NAME> from <em>Dendropicos elliotii</em> to <em>Chloropicus elliotii </em>(Dickinson and Remsen 2013, Dufort 2016, Shakya et al. 2017). Change the scientific name of the monotypic group Elliot’<NAME> (Elliot’s) from <em>Dendropicos elliotii elliotii</em> to <em>Chloropicus elliotii el</em>liotii. Change the scientific name of the monotypic group Elliot’<NAME> (Johnston’s) from <em>Dendropicos elliotii johnstoni</em> to <em>Chloropicus elliotii johnstoni</em>.</p>
<p>Change the scientific name of <NAME> from <em>Dendropicos elachus</em> to <em>Chloropicus elachus</em> (Dickinson and Remsen 2013, Dufort 2016, Shakya et al. 2017).</p>
<p>Change the scientific name of Speckle-breasted Woodpecker from <em>Dendropicos poecilolaemu</em>s to <em>Chloropicus poecilolaemus</em> (Dickinson and Remsen 2013, Dufort 2016, Shakya et al. 2017).</p>
<p>Change the scientific name of <NAME> from <em>Dendropicos fuscescens</em> to <em>Chloropicus fuscescens</em> (Dickinson and Remsen 2013, Dufort 2016, Shakya et al. 2017).</p>
<p>Change the scientific name of <NAME> from <em>Dendropicos namaquus</em> to <em>Chloropicus namaquus</em> (Dickinson and Remsen 2013, Dufort 2016, Shakya et al. 2017).</p>
<p>Change the scientific name of Fire-bel<NAME> from <em>Dendropicos pyrrhogaster</em> to <em>Chloropicus pyrrhogaster</em> (Dickinson and Remsen 2013, Dufort 2016, Shakya et al. 2017).</p>
<p>Change the scientific name of Golden-crow<NAME> from <em>Dendropicos xantholophus</em> to <em>Chloropicus xantholophus</em> (Dickinson and Remsen 2013, Dufort 2016, Shakya et al. 2017).</p>
<p>Change the scientific name of Stierling’<NAME> from <em>Dendropicos stierlingi</em> to <em>Chloropicus stierlingi</em> (Dickinson and Remsen 2013, Dufort 2016, Shakya et al. 2017).</p>
<p>Change the scientific name of Brown-backed Woodpecker from <em>Dendropicos obsoletus</em> to <em>Chloropicus obsoletus</em> (Dickinson and Remsen 2013, Dufort 2016, Shakya et al. 2017).</p>
<p>Change the scientific name of African Gray Woodpecker from <em>Dendropicos goertae</em> to <em>Chloropicus goertae</em> (Dickinson and Remsen 2013, Dufort 2016, Shakya et al. 2017). Change the scientific name of the polytypic group African Gray Woodpecker (Gray) from <em>Dendropicos goertae</em> [<em>goertae</em> Group] to <em>Chloropicus goertae</em> [<em>goertae</em> Group]. Change the scientific name of the monotypic group African Gray Woodpecker (Sahel) from <em>Dendropicos goertae koenigi</em> to <em>Chloropicus goertae koenigi</em>.</p>
<p>Change the scientific name of Mountain Gray Woodpecker from <em>Dendropicos spodocephalus</em> to <em>Chloropicus spodocephalus</em> (Dickinson and Remsen 2013, Dufort 2016, Shakya et al. 2017).</p>
<p>Change the scientific name of Olive Woodpecker from <em>Dendropicos griseocephalus</em> to <em>Chloropicus griseocephalus</em> (Dickinson and Remsen 2013, Dufort 2016, Shakya et al. 2017). Change the scientific name of the monotypic group Olive Woodpecker (Red-bellied) from <em>Dendropicos griseocephalus ruwenzori</em> to <em>Chloropicus griseocephalus ruwenzori</em>. Change the scientific name of the monotypic group Olive Woodpecker (Montane) from <em>Dendropicos griseocephalus kilimensis</em> to <em>Chloropicus griseocephalus kilimensis</em>. Change the scientific name of the monotypic group Olive Woodpecker (Southern) from <em>Dendropicos griseocephalus griseocephalus</em> to <em>Chloropicus griseocephalus griseocephalus</em>.</p>
<p>The sequence of species of <em>Chloropicus</em> is revised (Shakya et al. 2017; see also Fuchs and Pons 2015, Dufort 2016).</p>
<p>References:</p>
<p><NAME>., and <NAME>. (editors). 2013. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 1. Aves Press, Eastbourne, United Kingdom.</p>
<p>Dufort, M.J. 2016. An augmented supermatrix phylogeny of the avian family Picidae reveals uncertainty deep in the family tree. Molecular Phylogenetics and Evolution 94: 313-326.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2017. Tapping the woodpecker tree for evolutionary insight. Molecular Phylogenetics and Evolution 116: 182-191.</p>
<p> </p>
<p>page 260, <strong>Okinawa Woodpecker <em>Sapheopipo noguchii</em></strong></p>
<p>The genus <em>Sapheopipo</em> is embedded within <em>Dendrocopos</em> (Fuchs and Pons 2015; see also Shakya et al. 2017). Change the scientific name of Okinawa Woodpecker from <em>Sapheopipo noguchii</em> to <em>Dendrocopos noguchii</em>.</p>
<p>References:</p>
<p><NAME>., and <NAME>. 2015. A new classification of the pied woodpeckers assemblage (Dendropicini: Picidae) based on a comprehensive multi-locus phylogeny. Molecular Phylogenetics and Evolution 88: 28-37.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2017. Tapping the woodpecker tree for evolutionary insight. Molecular Phylogenetics and Evolution 116: 182-191.</p>
<p> </p>
<p>pages 250-251, <strong>Lesser Spotted Woodpecker <em>Dendrocopos minor</em></strong></p>
<p>page 251, <strong>Crimson-breasted Woodpecker <em>Dendrocopos cathpharius</em></strong></p>
<p>page 252, <strong>Downy Woodpecker <em>Picoides pubescens</em></strong></p>
<p>page 252, <strong>Nuttall’s Woodpecker <em>Picoides nuttallii</em></strong></p>
<p>page 252, <strong>Ladder-backed Woodpecker <em>Picoides scalaris</em></strong></p>
<p>page 252, <strong>Red-cockaded Woodpecker <em>Picoides borealis</em></strong></p>
<p>page 253, <strong>Hairy Woodpecker <em>Picoides villosus</em></strong></p>
<p>page 253, <strong>White-headed Woodpecker <em>Picoides albolarvatus</em></strong></p>
<p>page 253, <strong>Smoky-brown Woodpecker <em>Picoides fumigatus</em></strong></p>
<p>page 252, <strong>Strickland’s Woodpecker <em>Picoides stricklandi</em></strong></p>
<p>page 252, <strong>Arizona Woodpecker <em>Picoides arizonae</em></strong></p>
<p>page 254, <strong>Red-rumped Woodpecker <em>Veniliornis kirkii</em></strong></p>
<p>page 254, <strong>Golden-collared Woodpecker <em>Veniliornis cassini</em></strong></p>
<p>page 254, <strong>White-spotted Woodpecker <em>Veniliornis spilogaster</em></strong></p>
<p>page 252, <strong>Checkered Woodpecker <em>Veniliornis mixtus</em></strong></p>
<p>page 252, <strong>Striped Woodpecker <em>Veniliornis lignarius</em></strong></p>
<p>page 254, <strong>Blood-colored Woodpecker <em>Veniliornis sanguineus</em></strong></p>
<p>page 253, <strong>Little Woodpecker <em>Veniliornis passerinus</em></strong></p>
<p>page 254, <strong>Dot-fronted Woodpecker <em>Veniliornis frontalis</em></strong></p>
<p>page 253, <strong>Scarlet-backed Woodpecker <em>Veniliornis callonotus</em></strong></p>
<p>page 253, <strong>Yellow-vented Woodpecker <em>Veniliornis dignus</em></strong></p>
<p>page 253, <strong>Bar-bellied Woodpecker <em>Veniliornis nigriceps</em></strong></p>
<p>page 254, <strong>Red-stained Woodpecker <em>Veniliornis affinis</em></strong></p>
<p>page 254, <strong>Choco Woodpecker <em>Veniliornis chocoensis</em></strong></p>
<p>page 254, <strong>Yellow-eared Woodpecker <em>Veniliornis maculifrons</em></strong></p>
<p>The traditional <em>Dendrocopos </em>is not monophyletic, and consequently is partitioned into five genera (Fuchs and Pons 2015; see also Shakya et al. 2017).</p>
<p>Change the scientific name of Lesser Spotted Woodpecker from <em>Dendrocopos minor</em> to <em>Dryobates minor</em> (Fuchs and Pons 2015, Dufort 2016, Shakya et al. 2017).</p>
<p>Change the scientific name of Crimson-breasted Woodpecker from <em>Dendrocopos cathpharius</em> to <em>Dryobates cathpharius</em> (Fuchs and Pons 2015, Dufort 2016, Shakya et al. 2017). Change the scientific name of the polytypic group Crimson-breasted Woodpecker (Scarlet-breasted) from <em>Dendrocopos cathpharius</em> [<em>cathpharius</em> Group] to <em>Dryobates cathpharius</em> [<em>cathpharius</em> Group]. Change the scientific name of the polytypic group Crimson-breasted Woodpecker (Crimson-breasted) from <em>Dendrocopos cathpharius</em> [<em>pernyii</em> Group] to <em>Dryobates cathpharius</em> [<em>pernyii</em> Group].</p>
<p>In accord with AOS-NACC (Chesser et al. 2018), following Fuchs and Pons (2015), Dufort (2016), Shakya et al. (2017), and other references, make the following changes to the scientific names of these New World woodpeckers:</p>
<p>Change the scientific name of Downy Woodpecker from <em>Picoides pubescens</em> to <em>Dryobates pubescens</em>. Change the scientific name of the polytypic group Downy Woodpecker (Eastern) from <em>Picoides pubescens pubescens</em>/<em>medianus</em> to <em>Dryobates pubescens pubescens</em>/<em>medianus</em>. Change the scientific name of the polytypic group Downy Woodpecker (Rocky Mts.) from <em>Picoides pubescens leucurus/glacialis </em>to <em>Dryobates pubescens leucurus</em>/<em>glacialis</em>. Change the scientific name of the polytypic group Downy Woodpecker (Pacific) from <em>Picoides pubescens gairdnerii</em>/<em>turati</em> to <em>Dryobates pubescens gairdnerii/turati</em>.</p>
<p>Change the scientific name of Nuttall’s Woodpecker from <em>Picoides nuttallii</em> to <em>Dryobates nuttallii</em>.</p>
<p>Change the scientific name of Ladder-backed Woodpecker from <em>Picoides scalaris</em> to <em>Dryobates scalaris</em>, following Fuchs and Pons (2015), Dufort (2016), Shakya et al. (2017), and other references.</p>
<p>Change the scientific name of Red-cockaded Woodpecker from <em>Picoides borealis</em> to <em>Dryobates borealis</em>, following Fuchs and Pons (2015), Dufort (2016), Shakya et al. (2017), and other references.</p>
<p>Change the scientific name of Hairy Woodpecker from <em>Picoides villosus</em> to <em>Dryobates villosus</em>, following Fuchs and Pons (2015), Dufort (2016), Shakya et al. (2017), and other references. Change the scientific name of the polytypic group Hairy Woodpecker (Eastern) from <em>Picoides villosus</em> [<em>villosus</em> Group] to <em>Dryobates villosus</em> [<em>villosus</em> Group]. Change the scientific name of the polytypic group Hairy Woodpecker (Pacific) from <em>Picoides villosus</em> <em>[harrisi</em> Group] to <em>Dryobates villosus</em> [<em>harrisi</em> Group]. Change the scientific name of the polytypic group Hairy Woodpecker (Rocky Mts) from <em>Picoides villosus orius</em>/<em>icastus</em> to <em>Dryobates villosus orius</em>/<em>icastus</em>. Change the scientific name of the polytypic group Hairy Woodpecker (South Mexican) from <em>Picoides villosus</em> <em>jardinii</em>/<em>sanctorum</em> to <em>Dryobates villosus jardinii</em>/<em>sanctorum</em>. Change the scientific name of the monotypic group Hairy Woodpecker (Costa Rican) from <em>Picoides villosus extimus</em> to <em>Dryobates villosus extimus</em>.</p>
<p>Change the scientific name of White-headed Woodpecker from <em>Picoides albolarvatus</em> to <em>Dryobates albolarvatus</em>.</p>
<p>Change the scientific name of Smoky-brown Woodpecker from <em>Picoides fumigatus</em> to <em>Dryobates fumigatus</em>.</p>
<p>Change the scientific name of Strickland’s Woodpecker from <em>Picoides stricklandi</em> to <em>Dryobates stricklandi</em>.</p>
<p>Change the scientific name of Arizona Woodpecker from <em>Picoides arizonae</em> to <em>Dryobates arizonae.</em></p>
<p>Change the scientific name of Red-rumped Woodpecker from <em>Veniliornis kirkii</em> to <em>Dryobates kirkii.</em></p>
<p>Change the scientific name of Golden-collared Woodpecker from <em>Veniliornis cassini</em> to <em>Dryobates cassini</em>.</p>
<p>Change the scientific name of White-spotted Woodpecker from <em>Veniliornis spilogaster</em> to <em>Dryobates spilogaster</em>.</p>
<p>Change the scientific name of Checkered Woodpecker from <em>Veniliornis mixtus</em> to <em>Dryobates mixtus</em>.</p>
<p>Change the scientific name of Striped Woodpecker from <em>Veniliornis lignarius</em> to <em>Dryobates lignarius</em>.</p>
<p>Change the scientific name of Blood-colored Woodpecker from <em>Veniliornis sanguineus</em> to <em>Dryobates sanguineus</em>.</p>
<p>Change the scientific name of Little Woodpecker from <em>Veniliornis passerinus</em> to <em>Dryobates passerinus.</em></p>
<p>Change the scientific name of Dot-fronted Woodpecker from <em>Veniliornis frontalis</em> to <em>Dryobates frontalis</em>.</p>
<p>Change the scientific name of Scarlet-backed Woodpecker from <em>Veniliornis callonotus</em> to <em>Dryobates callonotus</em>.</p>
<p>Change the scientific name of Yellow-vented Woodpecker from <em>Veniliornis dignus</em> to <em>Dryobates dignus</em>.</p>
<p>Change the scientific name of Bar-bellied Woodpecker from <em>Veniliornis nigriceps</em> to <em>Dryobates nigriceps</em>.</p>
<p>Change the scientific name of Red-stained Woodpecker from <em>Veniliornis affinis</em> to <em>Dryobates affinis</em>.</p>
<p>Change the scientific name of Choco Woodpecker from <em>Veniliornis chocoensis</em> to <em>Dryobates chocoensis</em>.</p>
<p>Change the scientific name of Yellow-eared Woodpecker from <em>Veniliornis maculifrons</em> to <em>Dryobates maculifrons</em>.</p>
<p>The sequence of species of <em>Dryobates</em> is revised (Fuchs and Pons 2015, Dufort 2016, Shakya et al. 2017); see also Chesser et al. (2018).</p>
<p>References:</p>
<p>Chesser, R.T., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>., <NAME>, <NAME>, and <NAME>. 2018. <a href="http://www.bioone.org/doi/pdf/10.1642/AUK-18-62.1">Fifty-ninth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 135: 798-813.</p>
<p><NAME>. 2016. An augmented supermatrix phylogeny of the avian family Picidae reveals uncertainty deep in the family tree. Molecular Phylogenetics and Evolution 94: 313-326.</p>
<p><NAME>., and <NAME>. 2015. A new classification of the pied woodpeckers assemblage (Dendropicini: Picidae) based on a comprehensive multi-locus phylogeny. Molecular Phylogenetics and Evolution 88: 28-37.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2017. Tapping the woodpecker tree for evolutionary insight. Molecular Phylogenetics and Evolution 116: 182-191.</p>
<p> </p>
<p>page 258, <strong>Banded Woodpecker <em>Picus miniaceus</em></strong></p>
<p>pages 258-259, <strong>Greater Yellownape <em>Picus flavinucha</em></strong></p>
<p>page 259, <strong>Checker-throated Woodpecker <em>Picus mentalis</em></strong></p>
<p>The traditional genus <em>Picus </em>is not monophyletic (Fuchs et al. 2008, Dufort 2016, and Shakya et al. 2017).</p>
<p>Change the scientific name of Banded Woodpecker from <em>Picus miniaceus</em> <em>to Chrysophlegma miniaceum</em> (Fuchs et al. 2008). Change the spelling of the subspecies name <em>perlutus</em> to <em>perlutum</em>. Change the spelling of the subspecies name <em>malaccensis</em> to <em>malaccense</em>. Change the spelling of the subspecies name <em>niasensis</em> to <em>niasense</em>. Change the spelling of the name of the nominate subspecies from <em>miniaceus</em> to <em>miniaceum</em>.</p>
<p>Change the scientific name of Greater Yellownape from <em>Picus flavinucha</em> to <em>Chrysophlegma flavinucha</em> (Fuchs et al. (2008). Change the spelling of the subspecies name <em>mystacalis</em> to <em>mystacale</em>.</p>
<p>Change the scientific name of Checker-throated Woodpecker from <em>Picus mentalis</em> to <em>Chrysophlegma mentale</em> (Fuchs et al. 2008).</p>
<p>Change the scientific name of Checker-throated Woodpecker from <em>Picus mentalis</em> to <em>Chrysophlegma mentale</em> (Fuchs et al. 2008). Change the spelling of the scientific name of the monotypic group Checker-throated Woodpecker (Checker-throated) from <em>Picus mentalis humii</em> to <em>Chrysophlegma mentale humii</em>. Change the spelling of the scientific name of the monotypic group Checker-throated Woodpecker (Javan) from <em>Picus mentalis mentalis</em> to <em>Chrysophlegma mentale mentale.</em></p>
<p>References:</p>
<p>Dufort, M.J. 2016. An augmented supermatrix phylogeny of the avian family Picidae reveals uncertainty deep in the family tree. Molecular Phylogenetics and Evolution 94: 313-326.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2008. Molecular support for a rapid cladogenesis of the woodpecker clade Malarpicini, with further insights into the genus <em>Picus</em> (Piciformes: Picinae). Molecular Phylogenetics and Evolution 48: 34-36.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2017. Tapping the woodpecker tree for evolutionary insight. Molecular Phylogenetics and Evolution 116: 182-191.</p>
<p> </p>
<p>page 141, <strong>Greater Vasa Parrot <em>Mascarinus vasa</em></strong></p>
<p>page 141, <strong>Lesser Vasa Parrot <em>Mascarinus niger</em></strong></p>
<p>page 141, <strong>Seychelles Parrot <em>Mascarinus barklyi</em></strong></p>
<p>Vasa parrots are not closely related to Mascarene Parrot <em>Mascarinus mascarin</em> (Podsiadlowski et al. 2017).</p>
<p>Change the scientific name of Greater Vasa-Parrot from <em>Mascarinus vasa</em> to <em>Coracopsis vasa</em>, and change the English name from Greater Vasa-Parrot to Greater Vasa Parrot (e.g., Dowsett and Forbes-Watson 1993, Safford and Hawkins 2013).</p>
<p>Change the scientific name of Lesser Vasa-Parrot from <em>Mascarinus niger</em> to <em>Coracopsis nigra</em>, and change the English name from Lesser Vasa-Parrot to Lesser Vasa Parrot (e.g., Dowsett and Forbes-Watson 1993, Safford and Hawkins 2013). Change the spelling of the name of the nominate subspecies from <em>niger</em> to <em>nigra</em>.</p>
<p>Change the scientific name of Seychelles Parrot from <em>Mascarinus barklyi</em> to <em>Coracopsis barklyi</em>. Reposition Seychelles Parrot to follow, not precede, Lesser Vasa Parrot.</p>
<p>References:</p>
<p><NAME>., and A.D. Forbes-Watson. 1993. Checklist of birds of the Afrotropical and Malagasy regions. Volume 1: species limits and distribution. Tauraco Press, Liège, Belgium.</p>
<p><NAME>., <NAME>, and <NAME>. 2017. Revising the phylogenetic position of the extinct Mascarene Parrot <em>Mascarinus mascarin</em> (Linnaeus 1771) (Aves: Psittaciformes: Psittacidae). Molecular Phylogenetics and Evolution 107: 499-502.</p>
<p><NAME>., and <NAME> (editors). 2013. The birds of Africa. Volume VIII. The Malagasy region. <NAME>, London.</p>
<p> </p>
<p>page 700, <strong>Stephens Island Wren <em>Xenicus lyalli</em></strong></p>
<p>Change the scientific name of Stephens Island Wren from <em>Xenicus lyalli</em> to <em>Traversia lyalli</em> (Mitchell et al. 2016); reposition Stephens Island Wren at the beginning, not at the end, of New Zealand Wrens Acanthisittidae (Mitchell et al. 2016).</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2016. Ancient mitochondrial genomes clarify the evolutionary history of New Zealand’s enigmatic acanthisittid wrens. Molecular Phylogenetics and Evolution 102: 295-304.</p>
<p> </p>
<p>page 265, <strong>Sunbird Asity <em>Neodrepanis coruscans</em></strong></p>
<p>page 265, <strong>Yellow-bellied Asity <em>Neodrepanis hypoxantha</em></strong></p>
<p>Change the English name of <em>Neodrepanis coruscans</em> from Sunbird Asity to Common Sunbird-Asity (Sinclair and Langrand 1998, Safford and Hawkins 2013).</p>
<p>Change the English name of <em>Neodrepanis hypoxantha</em> from Yellow-bellied Asity to Yellow-bellied Sunbird-Asity (Sinclair and Langrand 1998, Safford and Hawkins 2013).</p>
<p>References:</p>
<p><NAME>., and <NAME> (editors). 2013. The birds of Africa. Volume VIII. The Malagasy region. Ch<NAME>, London.</p>
<p><NAME>., and <NAME>. 1998. Birds of the Indian Ocean islands: Madagascar, Mauritius, Réunion, Rodrigues, Seychelles and the Comoros. Struik Publishers, Cape Town.</p>
<p> </p>
<p>page 300, <strong>White-lined Antbird <em>Percnostola lophotes</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop744.htm">Proposal 744</a>), change the scientific name of White-lined Antbird from <em>Percnostola lophotes</em> to <em>Myrmoborus lophotes</em>, following Isler et al. (2013). Reposition White-lined Antbird to immediately follow Black-tailed Antbird <em>Myrmoborus me</em>lanurus.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, and <NAME>. 2013. Taxonomic revision of <em>Myrmeciza</em> (Aves: Passeriformes: Thamnophilidae) into 12 genera based on phylogenetic, morphological, behavioral, and ecological data. Zootaxa 3717: 469-497.</p>
<p> </p>
<p>page 303, <strong>White-throated Antbird <em>Gymnopithys salvini</em></strong></p>
<p>page 303, <strong>Lunulated Antbird <em>Gymnopithys lunulatus</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop740.htm">Proposal 740</a>), change the scientific name of White-throated Antbird from <em>Gymnopithys salvini</em> to <em>Oneillornis salvini</em>, following Brumfield et al. (2007) and Isler et al. (2014).</p>
<p>Change the scientific name of Lunulated Antbird from <em>Gymnopithys lunulatus</em> to <em>Oneillornis lunulatus,</em> following Brumfield et al. (2007) and Isler et al. (2014).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2007. Phylogenetic conservatism and antiquity of a tropical specialization: army-ant-following in the typical antbirds (Thamnophilidae). Molecular Phylogenetics and Evolution 45: 1-13.</p>
<p>Isler, M.L., <NAME>, and <NAME>. 2014. Systematics of the obligate ant-following clade of antbirds (Aves: Passeriformes: Thamnophilidae). Wilson Journal of Ornithology 126: 635–648.</p>
<p> </p>
<p>page 324, <strong>Golden-faced Tyrannulet <em>Zimmerius chrysops</em></strong></p>
<p>Following Rheindt et al. (2013, 2014; and see also AOS-SACC <a href="http://www.museum.lsu.edu/~Remsen/SACCprop766.htm">Proposal 766</a>), the monotypic group Golden-faced Tyrannulet (Loja) <em>Zimmerius chrysops flavidifrons</em> is reclassified as Peruvian Tyrannulet (Loja) <em>Zimmerius viridiflavus flavidifrons</em>. Position <em>flavidifrons</em> immediately following the heading for Peruvian Tyrannulet <em>Zimmerius viridiflavus</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, and <NAME>. 2013. Rampant polyphyly indicates cryptic diversity in a clade of Neotropical flycatchers (Aves: Tyrannidae). Biological Journal of the Linnean Society 108: 889–900.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2014. Introgression and phenotypic assimilation in <em>Zimmerius </em>flycatchers (Tyrannidae): population genetic and phylogenetic inferences from genome-wide SNPs. Systematic Biology 63: 134-152.</p>
<p> </p>
<p>page 586, <strong>Tooth-billed Catbird <em>Scenopoeetes dentirostris</em></strong></p>
<p>Change the English name of <em>Scenopoeetes dentirostris</em> from Tooth-billed Catbird to Tooth-billed Bowerbird (Schodde and Mason 1999, Christidis and Boles 2008).</p>
<p>References:</p>
<p><NAME>. and <NAME>. 2008. Systematics and taxonomy of Australian birds. CSIRO Publishing, Melbourne.</p>
<p><NAME>., and <NAME>. 1999. The directory of Australian birds. Passerines. CSIRO Publishing, Canberra.</p>
<p> </p>
<p>page 586, <strong>Macgregor’s Bowerbird <em>Amblyornis macgregoriae</em></strong></p>
<p>Revise the spelling of the English name of <em>Amblyornis macgregoriae</em> from Macgregor’s Bowerbird to MacGregor’s Bowerbird (Beehler and Pratt 2016).</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2016. Birds of New Guinea: distribution, taxonomy, and systematics. Princeton University Press, Princeton, New Jersey.</p>
<p> </p>
<p>page 467, <strong>Ruwenzori Batis <em>Batis diops</em></strong></p>
<p>Change the spelling of the English name of <em>Batis diops</em> from Ruwenzori Batis to Rwenzori Batis.</p>
<p> </p>
<p>page 574, <strong>Mountain Sooty Boubou <em>Laniarius poensis</em></strong></p>
<p>Change the English name of <em>Laniarius poensis</em> from Mountain Sooty Boubou to Western Boubou.</p>
<p>Change the English name of the monotypic group <em>Laniarius poensis camerunensis</em> from Mountain Sooty Boubou (Cameroon) to Western Boubou (Cameroon).</p>
<p>Change the English name of the monotypic group <em>Laniarius poensis poensis</em> from Mountain Sooty Boubou (Bioko) to Western Boubou (Bioko).</p>
<p> </p>
<p>page 487, <strong>Rusty Pitohui <em>Colluricincla ferruginea</em></strong></p>
<p>Change the scientific name of Rusty Pitohui from <em>Colluricincla ferruginea</em> to <em>Pseudorectes ferrugineus</em> (Dickinson and Christidis 2014).</p>
<p>Change the spelling of subspecies name <em>leucorhyncha</em> to <em>leucorhynchus</em>.</p>
<p>Change the spelling of subspecies name <em>fusca</em> to <em>fuscus</em>.</p>
<p>Change the spelling of subspecies name <em>ferruginea</em> to <em>ferrugineus</em>.</p>
<p>Change the spelling of subspecies name <em>holerythra</em> to <em>holerythrus</em>.</p>
<p>Change the spelling of subspecies name <em>clara</em> to <em>clarus</em>.</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p> </p>
<p>page 487, <strong>White-bellied Pitohui <em>Colluricincla incerta</em></strong></p>
<p>Change the scientific name of White-bellied Pitohui from <em>Colluricincla incerta</em> to <em>Pseudorectes incertus</em> (Dickinson and Christidis 2014).</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p> </p>
<p>page 485, <strong>Vanuatu Whistler <em>Pachycephala chlorura</em></strong></p>
<p>Subspecies <em>vanikorensis</em> is transferred from Vanuatu Whistler to Temotu Whistler <em>Pachycephala vanikorensis</em>, and subspecies <em>littayei</em> is transferred to Vanuatu Whistler from New Caledonian Whistler Pachycephala caledonica. The sequence of subspecies of Vanuatu Whistler is revised.</p>
<p>Revise the range description of subspecies <em>intacta</em> from “Vanuatu and Banks Group” to “Vanuatu (Banks Group, and larger islands of Vanuatu south to Efate)”.</p>
<p>Revise the range description of nominate <em>chlorura</em> from “Erromango (Vanuatu)” to “southern Vanuatu (Erromango)”.</p>
<p>Revise the range description of subspecies <em>cucullata</em> from “Aneityum (Vanuatu)” to “southern Vanuatu (Aneityum)”.</p>
<p> </p>
<p>page 486, <strong>New Caledonian Whistler <em>Pachycephala caledonica</em></strong></p>
<p>We follow Mayr (1967) in recognizing New Caledonian Whistler <em>Pachycephala caledonica</em> as monotypic. Transfer subspecies <em>littayei</em> from New Caledonian Whistler to Vanuatu Whistler <em>Pachycephala chlorura</em>, and position <em>littayei </em>immediately following subspecies <em>cucullata</em>. Revise the range description from “Loyalty Islands (Lifou and Maré)” to “Loyalty Islands (Ouvéa and Lifou)”. Delete the entry for <em>Pachycephala caledonica caledonica</em>.</p>
<p>Reference:</p>
<p>Mayr, E. 1967. <a href="https://biodiversitylibrary.org/page/14482148">Subfamily Pachycephalinae, whistlers or thickheads</a>. Pages 3-51 in R.A. Paynter, Jr. (editor), <a href="https://biodiversitylibrary.org/page/14482120">Check-list of birds of the world. Volume XII</a>. Museum of Comparative Zoology, Cambridge, Massachusetts.</p>
<p> </p>
<p>page 485, <strong>Temotu Whistler <em>Pachycephala utupuae</em></strong></p>
<p>With the transfer of subspecies <em>vanikorensis</em> from Vanuatu Whistler <em>Pachycephala chlorura</em> to Temotu Whistler, change the scientific name of Temotu Whistler from <em>Pachycephala utupuae</em> to <em>Pachycephala vanikorensis</em>.</p>
<p>Reposition subspecies <em>ornata</em> to precede (rather than follow) subspecies <em>utupuae</em>. Revise the range description of subspecies <em>ornata</em> from “N Santa Cruz Islands” to “northern Santa Cruz Islands (Nendo, Reef Islands, and Duff Islands)”.</p>
<p>Revise the range description of nominate <em>utupuae</em> from “Utupua I. (Solomon Islands)” to “central Santa Cruz Islands (Utupua Island)”.</p>
<p>The affinities of subspecies <em>vanikorensis</em> are not known with certainty. Previously we classified <em>vanikorensis</em> as a subspecies of Vanuatu Whistler <em>Pachycephala chlorura</em>, but on the basis of geography, it seems more likely to belong with Temotu Whistler instead. Position subspecies <em>vanikorensis</em> immediately following nominate <em>utupuae</em>. Revise the range description of <em>vanikorensis</em> from “Vanikoro and Santa Cruz Islands” to “southern Santa Cruz Islands (Vanikoro)”.</p>
<p> </p>
<p>page 571, <strong>Southern Gray Shrike <em>Lanius meridionalis</em></strong></p>
<p><em>Lanius meridionalis</em> is not polytypic, but instead is best considered to be a monotypic species; all subspecies previously assigned to this species are transferred to Great Gray Shrike <em>Lanius excubitor</em> (Olsson et al. 2010, Peer et al. 2011). Change the English name of <em>Lanius meridionalis</em> from Southern Gray Shrike to Iberian Gray Shrike. Position Iberian Gray Shrike immediately following Northern Shrike <em>Lanius borealis</em>. Revise the range description from “Iberian Peninsula and s France; > to nw Africa” to “Iberian Peninsula and southern France”.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2010. The <em>Lanius excubitor</em> (Aves, Passeriformes) conundrum—taxonomic dilemma when molecular and non-molecular data tell different stories. Molecular Phylogenetics and Evolution 55: 347–357.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2011. Complex biogeographic history of <em>Lanius</em> shrikes and its implications for the evolution of defenses against avian brood parasitism. Condor 113:3 85–394.</p>
<p> </p>
<p>page 571, <strong>Great Gray Shrike <em>Lanius excubitor</em></strong></p>
<p>Subspecies <em>koenigi</em>, <em>algeriensis</em>, <em>elegans</em>, and <em>leucopygos</em>, previously classified as subspecies of Southern Gray Shrike <em>Lanius meridionalis</em> (now Iberian Gray Shrike <em>Lanius meridionalis</em>), are transferred to Great Gray Shrike (Olsson et al. 2010, Peer et al. 2011). Collectively these four subspecies also represent a new polytypic group, Great Gray Shrike (Sahara) <em>Lanius excubitor</em> [<em>elegans</em> Group].</p>
<p>Subspecies <em>jebelmarrae</em>, with range “W Sudan (Darfur)”, is considered to be a junior synonym of <em>leucopygos</em> (Rand 1960, Yosef 2008), and is deleted. Revise the range description of <em>leucopygos</em> from “S Sahara (Mali to Nile River valley of the Sudan)” to “southern Sahara and Sahel from Mauritania to western and central Sudan”. Note that Nikolaus (1984) also did not recognize <em>leucopygos</em>, but considered this name be be based on specimens of nonbreeding <em>aucheri</em>, not <em>leucopygos</em>.</p>
<p>Subspecies <em>aucheri</em> and <em>buryi</em>, previously classified as subspecies of Southern Gray Shrike <em>Lanius meridionalis</em> (now Iberian Gray Shrike <em>Lanius meridionalis</em>), are transferred to Great Gray Shrike (Olsson et al. 2010, Peer et al. 2011). Collectively these two subspecies also represent a new polytypic group, Great Gray Shrike (Arabian) <em>Lanius excubitor aucheri</em>/<em>buryi</em>. Subspecies <em>theresae</em>, with range “Galilee hills of n Israel and s Lebanon”, is considered to be a junior synonym of <em>aucheri </em>(Vaurie 1955), and is deleted. Revise the range description of <em>aucheri </em>from “W coast of Red Sea to s Iran and Arabian Peninsula” to “eastern Sudan south to northern Somalia; southern Lebanon, Israel, Jordan, and Arabian Peninsula east to Iran”.</p>
<p>Subspecies <em>uncinatus</em>, previously classified as a subspecies of Southern Gray Shrike <em>Lanius meridionalis</em> (now Iberian Gray Shrike <em>Lanius meridionalis</em>), is transferred to Great Gray Shrike (Olsson et al. 2010, Peer et al. 2011). We also recognize <em>uncinatus</em> as a new monotypic group, Great Gray Shrike (Socotra) <em>Lanius excubitor uncinatus</em>.</p>
<p>The monotypic group Southern Gray Shrike (Steppe) <em>Lanius meridionalis pallidirostris</em> is transferred to Great Gray Shrike (Olsson et al. 2010, Peer et al. 2011). Change the name of this group to Great Gray Shrike (Steppe) <em>Lanius excubitor pallidirostris</em>.</p>
<p>Subspecies <em>lahtora</em>, previously classified as a subspecies of Southern Gray Shrike <em>Lanius meridionalis</em> (now Iberian Gray Shrike <em>Lanius meridionalis</em>), is transferred to Great Gray Shrike (Olsson et al. 2010, Peer et al. 2011). We also recognize <em>lahtora</em> as a new monotypic group, Great Gray Shrike (Indian) <em>Lanius excubitor lahtora</em>.</p>
<p>References:</p>
<p>Nikolaus, G. 1984. <a href="https://biodiversitylibrary.org/page/40084976"><em>Lanius excubitor</em> “<em>jebelmarrae</em>“</a>. Bulletin of the British Ornithologists’ Club 104: 147.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2010. The <em>Lanius excubitor</em> (Aves, Passeriformes) conundrum—taxonomic dilemma when molecular and non-molecular data tell different stories. Molecular Phylogenetics and Evolution 55: 347–357.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2011. Complex biogeographic history of <em>Lanius</em> shrikes and its implications for the evolution of defenses against avian brood parasitism. Condor 113:3 85–394.</p>
<p><NAME>. 1960. <a href="https://biodiversitylibrary.org/page/14481033">Family Laniidae, shrikes and allies</a>. Pages 309–365 in E. Mayr and <NAME>, Jr. (editors), <a href="https://biodiversitylibrary.org/page/14480984">Check-list of birds of the world. Volume IX</a>. Museum of Comparative Zoology, Cambridge, Massachusetts.</p>
<p><NAME>. 2008. Family Laniidae (shrikes). Pages 732-796 in <NAME>, <NAME>, and <NAME> (editors), Handbook of the birds of the world. Volume 13. Lynx Edicions, Barcelona.</p>
<p> </p>
<p>page 578, <strong>Crow-billed Drongo <em>Dicrurus annectans</em></strong></p>
<p>Correct the spelling of the species name from <em>annectans</em> to <em>annectens </em>(Dickinson and Christidis 2014).</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p> </p>
<p>page 579, <strong>Hair-crested Drongo <em>Dicrurus hottentottus</em></strong></p>
<p>Previously we recognized a large, polytypic group Hair-crested Drongo (Hair-crested) <em>Dicrurus hottentottus </em>[<em>hottentottus</em> Group], with 11 subspecies. One subspecies <em>(viridinitens</em>) is transferred to Sumatran Drongo <em>Dicrurus sumatranus</em>, and we also partition Hair-crested Drongo (Hair-crested) into nine groups. Therefore change the scientific name of the (now greatly smaller!) group Hair-crested Drongo (Hair-crested) from <em>Dicrurus hottentottus</em> [<em>hottentottus</em> Group] to <em>Dicrurus hottentottus hottentottus</em>/<em>brevirostris</em>.</p>
<p>We recognize a previously overlooked subspecies, <em>faberi</em> Hoogerwerf 1962, following Rocamora and Yeatman-Berthelot (2009), with range “Panaitan Island and islands in Jakarta Bay, western Java”. Insert subspecies <em>faberi</em> immediately following the heading for the new polytypic group Hair-crested Drongo (Javan) <em>Dicrurus hottentottus jentincki</em>/<em>faberi</em>.</p>
<p>Revise the range description of subspecies <em>jentincki</em> from “Bali and Kangean Islands” to “eastern Java, Bali, Masalembu, and Kangean Island”.</p>
<p>Revise the range description of subspecies <em>leucops</em> from “Sulawesi and adjacent islands, including Talaud Is., Sangihe, Banggai Is., and Tukangbesi Is.” to “Sulawesi, Matasiri Island (Java Sea), Sanghie, and Siau”.</p>
<p>We recognize a previously overlooked subspecies, <em>banggaiensis</em> Vaurie 1952, following Rocamora and Yeatman-Berthelot (2009), with range “Banggai Islands, off of eastern Sulawesi”. Insert subspecies <em>banggaiensis</em> immediately following subspecies <em>leucops</em>.</p>
<p>Reference:</p>
<p>Rocamora, G.J., and <NAME>. 2009. Family Dicruridae (drongos). Pages 172-226 in <NAME>, <NAME>, and <NAME> (editors), Handbook of the birds of the world. Volume 14. Lynx Edicions, Barcelona.</p>
<p> </p>
<p>page 579, <strong>Sumatran Drongo <em>Dicrurus sumatranus</em></strong></p>
<p>With the transfer of subspecies <em>viridinitens</em> from Hair-crested Drongo to Sumatran Drongo, we add an entry for the nominate subspecies, <em>sumatranus</em>. We also recognize sumatranus as a new monotypic group, Sumatran Drongo (Sumatran) <em>Dicrurus sumatranus sumatranus</em>. Position nominate <em>sumatranus</em> to immediately follow the heading for the species.</p>
<p>Subspecies <em>viridinitens</em>, previously considered to be a subspecies of Hair-crested Drongo <em>Dicrurus hottentottus</em>, is transferred to Sumatran Drongo, following Rocamora and Yeatman-Berthelot (2009). We also recognize <em>viridinitens</em> as a new monotypic group, Sumatran Drongo (Mentawai) <em>Dicrurus sumatranus viridinitens</em>. Position <em>viridinitens</em> to immediately follow nominate <em>sumatranus</em>.</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2009. Family Dicruridae (drongos). Pages 172-226 in <NAME>, <NAME>, and <NAME> (editors), Handbook of the birds of the world. Volume 14. Lynx Edicions, Barcelona.</p>
<p> </p>
<p>page 577, <strong>Pygmy Drongo-Fantail <em>Chaetorhynchus papuensis</em></strong></p>
<p>Change the English name of <em>Chaetorhynchus papuensis</em> from Pygmy Drongo-Fantail to Drongo Fantail.</p>
<p> </p>
<p>page 475, <strong>Fantailed Monarch <em>Symposiachrus axillaris</em></strong></p>
<p>Change the English name of <em>Symposiachrus axillaris</em> from Fantailed Monarch to Fan-tailed Monarch.</p>
<p> </p>
<p>page 477, <strong>Kolombangra Monarch <em>Symposiachrus browni</em></strong></p>
<p>Change the English name of <em>Symposiachrus browni</em> from Kolombangra Monarch to Kolombangara Monarch.</p>
<p>Revise the range description of nominate <em>browni </em>from “Kolombangra, Vonavona, Kohinggo, New Georgia, and Vangunu (Solomon Islands)” to “Kolombangara, Vonavona, Kohinggo, New Georgia, and Vangunu (Solomon Islands)”.</p>
<p> </p>
<p>page 587, <strong>Gray Jay <em>Perisoreus canadensis</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2018), change the English name of <em>Perisoreus canadensis</em> from Gray Jay to Canada Jay (see Strickland 2017).</p>
<p>Change the English name of the polytypic group <em>Perisoreus canadensis</em> [<em>canadensi</em>s Group] from Gray Jay (Northern) to Canada Jay (Northern).</p>
<p>Change the English name of the polytypic group <em>Perisoreus canadensis capitalis</em>/<em>albescens </em>from Gray Jay (Rocky Mts.) to Canada Jay (Rocky Mts.).</p>
<p>Change the English name of the polytypic group <em>Perisoreus canadensis</em> [<em>obscurus</em> Group] from Gray Jay (Pacific) to Canada Jay (Pacific).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, A.W. Kratter, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2018. <a href="http://www.bioone.org/doi/pdf/10.1642/AUK-18-62.1">Fifty-ninth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 135: 798-813.</p>
<p>Strickland, D. 2017. How the Canada Jay lost its name and why it matters. Ontario Birds 35: 1-16.</p>
<p> </p>
<p>page 584, <strong>Vogelkop Superb Bird-of-Paradise <em>Lophorina niedda</em></strong></p>
<p>Change the English name of <em>Lophorina niedda</em> from Vogelkop Superb Bird-of-Paradise to Vogelkop Lophorina.</p>
<p> </p>
<p>page 584, <strong>Greater Superb Bird-of-Paradise <em>Lophorina superba</em></strong></p>
<p>Change the English name of <em>Lophorina superba</em> from Greater Superb Bird-of-Paradise to Greater Lophorina.</p>
<p> </p>
<p>page 584, <strong>Lesser Superb Bird-of-Paradise <em>Lophorina minor</em></strong></p>
<p>Change the English name of <em>Lophorina minor</em> from Lesser Superb Bird-of-Paradise to Lesser Lophorina.</p>
<p> </p>
<p>page 361, <strong>Greater Striped-Swallow <em>Cecropis cucullata</em></strong></p>
<p>Greater Striped-Swallow <em>Cecropis cucullata</em> and Lesser Striped-Swallow <em>Cecropis abyssinica</em> are not sister species; therefore change “Striped-Swallow” to “Striped Swallow”.</p>
<p> </p>
<p>page 361, <strong>Lesser Striped-Swallow <em>Cecropis abyssinica</em></strong></p>
<p>Greater Striped-Swallow <em>Cecropis cucullata</em> and Lesser Striped-Swallow <em>Cecropis abyssinica</em> are not sister species; therefore change “Striped-Swallow” to “Striped Swallow”.</p>
<p> </p>
<p>page 468, <strong>Yellow-bellied Fairy-Fantail <em>Chelidorhynx hypoxantha</em></strong></p>
<p>Correct the spelling of the species name from <em>hypoxantha</em> to <em>hypoxanthus</em> (Dickinson and Christidis 2014).</p>
<p>Reference:</p>
<p>Dickinson, E.C., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p> </p>
<p>page 528, <strong>Yellow Tit <em>Machlolophus holsti</em></strong></p>
<p>Change the English name of <em>Machlolophus holsti</em> from Yellow Tit to Taiwan Yellow Tit.</p>
<p> </p>
<p>page 527, <strong>Black-lored Tit <em>Machlolophus xanthogenys</em></strong></p>
<p>Change the English name of <em>Machlolophus xanthogenys</em> from Black-lored Tit to Himalayan Black-lored Tit.</p>
<p> </p>
<p>page 527, <strong>Indian Tit <em>Machlolophus aplonotus</em></strong></p>
<p>Change the English name of <em>Machlolophus aplonotus</em> from Indian Tit to Indian Yellow Tit (Rasmussen and Anderton 2005).</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2005. Birds of South Asia. The Ripley guide. Volume 2: attributes and status. Smithsonian Institution and Lynx Edicions, Washington D.C. and Barcelona.</p>
<p> </p>
<p>page 532, <strong>African Spotted-Creeper <em>Salpornis salvadori</em></strong></p>
<p>Change the English name from African Spotted-Creeper to African Spotted Creeper.</p>
<p> </p>
<p>page 532, <strong>Indian Spotted-Creeper <em>Salpornis spilonotus</em></strong></p>
<p>Change the English name from Indian Spotted-Creeper to Indian Spotted Creeper. Change the spelling of the species name from <em>spilonotus</em> to <em>spilonota</em> (Dickinson and Christidis 2014).</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p> </p>
<p>page 388, <strong>Rufous-throated Dipper <em>Cinclus schulzi</em></strong></p>
<p>Change the scientific name of Rufous-throated Dipper from <em>Cinclus schulzi</em> to <em>Cinclus schulzii</em> (Dickinson and Christidis 2014).</p>
<p>Revise the range description from “E slope of Andes of extreme nw Argentina and se Bolivia” to “Andes of southern Bolivia (Chuquisaca and Tarija) and northwestern Argentina (south to Catamarca and Tucumán)”.</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p> </p>
<p>page 375, <strong>Black-and-white Bulbul <em>Pycnonotus melanoleucos</em></strong></p>
<p>The traditional, broadly defined genus <em>Pycnonotus</em> is not monophyletic, at least if the genus <em>Spizixos</em> is recognized (Oliveros and Moyle 2010, Shakya and Sheldon 2018, Fuchs et al. 2018b). Change the scientific name of Black-and-white Bulbul from <em>Pycnonotus melanoleucos</em> to <em>Brachypodius melanoleucos</em>.</p>
<p>The sequence of species of <em>Brachypodius </em>is revised.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018b. Phylogenetic affinities of the enigmatic Bare-faced Bulbul <em>Pycnonotus hualon</em> with description of a new genus. Ibis 160: 659-665.</p>
<p><NAME>., and <NAME>. 2010. Origin and diversification of Philippine bulbuls. Molecular Phylogenetics and Evolution 54: 822-832.</p>
<p><NAME>., and <NAME>. 2018. The phylogeny of the world’s bulbuls (Pycnonotidae) inferred using a supermatrix approach. Ibis 159: 498-509.</p>
<p> </p>
<p>page 377, <strong>Puff-backed Bulbul <em>Pycnonotus eutilotus</em></strong></p>
<p>The traditional, broadly defined genus <em>Pycnonotus</em> is not monophyletic, at least if the genus <em>Spizixos</em> is recognized (Oliveros and Moyle 2010, Shakya and Sheldon 2018, Fuchs et al. 2018b). Change the scientific name of Puff-backed Bulbul from <em>Pycnonotus eutilotus</em> to <em>Brachypodius eutilotus</em>.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>owie. 2018b. Phylogenetic affinities of the enigmatic Bare-faced Bulbul <em>Pycnonotus hualon</em> with description of a new genus. Ibis 160: 659-665.</p>
<p><NAME>., and <NAME>. 2010. Origin and diversification of Philippine bulbuls. Molecular Phylogenetics and Evolution 54: 822-832.</p>
<p><NAME>., and <NAME>. 2018. The phylogeny of the world’s bulbuls (Pycnonotidae) inferred using a supermatrix approach. Ibis 159: 498-509.</p>
<p> </p>
<p>page 377, <strong>Yellow-wattled Bulbul <em>Pycnonotus urostictus</em></strong></p>
<p>The traditional, broadly defined genus <em>Pycnonotus</em> is not monophyletic, at least if the genus <em>Spizixos</em> is recognized (Oliveros and Moyle 2010, Shakya and Sheldon 2018, Fuchs et al. 2018b). Change the scientific name of Yellow-wattled Bulbul from <em>Pycnonotus urostictus</em> to <em>Brachypodius urostictus</em>.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018b. Phylogenetic affinities of the enigmatic Bare-faced Bulbul <em>Pycnonotus hualon</em> with description of a new genus. Ibis 160: 659-665.</p>
<p><NAME>., and <NAME>. 2010. Origin and diversification of Philippine bulbuls. Molecular Phylogenetics and Evolution 54: 822-832.</p>
<p><NAME>., and <NAME>. 2018. The phylogeny of the world’s bulbuls (Pycnonotidae) inferred using a supermatrix approach. Ibis 159: 498-509.</p>
<p> </p>
<p>page 375, <strong>Gray-headed Bulbul <em>Pycnonotus priocephalus</em></strong></p>
<p>The traditional, broadly defined genus <em>Pycnonotus</em> is not monophyletic, at least if the genus <em>Spizixos</em> is recognized (Oliveros and Moyle 2010, Shakya and Sheldon 2018, Fuchs et al. 2018b). Change the scientific name of Gray-headed Bulbul from <em>Pycnonotus priocephalus</em> to <em>Brachypodius priocephalus</em> (Dickinson and Christidis 2014).</p>
<p>References:</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018b. Phylogenetic affinities of the enigmatic Bare-faced Bulbul <em>Pycnonotus hualon</em> with description of a new genus. Ibis 160: 659-665.</p>
<p><NAME>., and <NAME>. 2010. Origin and diversification of Philippine bulbuls. Molecular Phylogenetics and Evolution 54: 822-832.</p>
<p><NAME>., and <NAME>. 2018. The phylogeny of the world’s bulbuls (Pycnonotidae) inferred using a supermatrix approach. Ibis 159: 498-509.</p>
<p> </p>
<p>page 375, <strong>Black-headed Bulbul <em>Pycnonotus atriceps</em></strong></p>
<p>The traditional, broadly defined genus <em>Pycnonotus</em> is not monophyletic, at least if the genus <em>Spizixos</em> is recognized (Oliveros and Moyle 2010, Shakya and Sheldon 2018, Fuchs et al. 2018b). Change the scientific name of Black-headed Bulbul from <em>Pycnonotus atriceps</em> to <em>Brachypodius atriceps</em> (Dickinson and Christidis 2014).</p>
<p>References:</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018b. Phylogenetic affinities of the enigmatic Bare-faced Bulbul <em>Pycnonotus hualon</em> with description of a new genus. Ibis 160: 659-665.</p>
<p><NAME>., and <NAME>. 2010. Origin and diversification of Philippine bulbuls. Molecular Phylogenetics and Evolution 54: 822-832.</p>
<p><NAME>., and <NAME>. 2018. The phylogeny of the world’s bulbuls (Pycnonotidae) inferred using a supermatrix approach. Ibis 159: 498-509.</p>
<p> </p>
<p>page 376, <strong>Andaman Bulbul <em>Pycnonotus fuscoflavescens</em></strong></p>
<p>The traditional, broadly defined genus <em>Pycnonotus</em> is not monophyletic, at least if the genus <em>Spizixos</em> is recognized (Oliveros and Moyle 2010, Shakya and Sheldon 2018, Fuchs et al. 2018b). Change the scientific name of Andaman Bulbul from <em>Pycnonotus fuscoflavescens</em> to <em>Brachypodius fuscoflavescens</em> (Dickinson and Christidis 2014).</p>
<p>References:</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018b. Phylogenetic affinities of the enigmatic Bare-faced Bulbul <em>Pycnonotus hualon</em> with description of a new genus. Ibis 160: 659-665.</p>
<p><NAME>., and <NAME>. 2010. Origin and diversification of Philippine bulbuls. Molecular Phylogenetics and Evolution 54: 822-832.</p>
<p><NAME>., and <NAME>. 2018. The phylogeny of the world’s bulbuls (Pycnonotidae) inferred using a supermatrix approach. Ibis 159: 498-509.</p>
<p> </p>
<p>page 378, <strong>Spectacled Bulbul <em>Pycnonotus erythropthalmos</em></strong></p>
<p>The traditional, broadly defined genus <em>Pycnonotus</em> is not monophyletic, at least if the genus <em>Spizixos</em> is recognized (Oliveros and Moyle 2010, Shakya and Sheldon 2018, Fuchs et al. 2018b). Change the scientific name of Spectacled Bulbul from <em>Pycnonotus erythropthalmos</em> to <em>Rubigula erythropthalmos</em> (Fishpool and Tobias 2005, Shakya and Sheldon 2018, Fuchs et al. 2018b).</p>
<p>The sequence of species of <em>Rubigula</em> is revised.</p>
<p>References:</p>
<p>Fishpool, L.D.C., and <NAME>. 2005. Family Pycnonotidae (bulbuls). Pages 124-250 in <NAME>, <NAME>, and <NAME> (editors), Handbook of the birds of the world. Volume 10. Lynx Edicions, Barcelona.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018b. Phylogenetic affinities of the enigmatic Bare-faced Bulbul <em>Pycnonotus hualon</em> with description of a new genus. Ibis 160: 659-665.</p>
<p><NAME>., and <NAME>. 2010. Origin and diversification of Philippine bulbuls. Molecular Phylogenetics and Evolution 54: 822-832.</p>
<p><NAME>., and <NAME>. 2018. The phylogeny of the world’s bulbuls (Pycnonotidae) inferred using a supermatrix approach. Ibis 159: 498-509.</p>
<p> </p>
<p>page 376, <strong>Gray-bellied Bulbul <em>Pycnonotus cyaniventris</em></strong></p>
<p>The traditional, broadly defined genus <em>Pycnonotus</em> is not monophyletic, at least if the genus <em>Spizixos</em> is recognized (Oliveros and Moyle 2010, Shakya and Sheldon 2018, Fuchs et al. 2018b). Change the scientific name of Gray-bellied Bulbul from <em>Pycnonotus cyaniventris</em> to <em>Rubigula cyaniventris</em> (Fishpool and Tobias 2005, Shakya and Sheldon 2018, Fuchs et al. 2018b).</p>
<p>References:</p>
<p>Fishpool, L.D.C., and <NAME>. 2005. Family Pycnonotidae (bulbuls). Pages 124-250 in <NAME>, <NAME>, and <NAME> (editors), Handbook of the birds of the world. Volume 10. Lynx Edicions, Barcelona.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018b. Phylogenetic affinities of the enigmatic Bare-faced Bulbul <em>Pycnonotus hualon</em> with description of a new genus. Ibis 160: 659-665.</p>
<p><NAME>., and <NAME>. 2010. Origin and diversification of Philippine bulbuls. Molecular Phylogenetics and Evolution 54: 822-832.</p>
<p><NAME>., and <NAME>. 2018. The phylogeny of the world’s bulbuls (Pycnonotidae) inferred using a supermatrix approach. Ibis 159: 498-509.</p>
<p> </p>
<p>page 376, <strong>Scaly-breasted Bulbul <em>Pycnonotus squamatus</em></strong></p>
<p>The traditional, broadly defined genus <em>Pycnonotus</em> is not monophyletic, at least if the genus <em>Spizixos</em> is recognized (Oliveros and Moyle 2010, Shakya and Sheldon 2018, Fuchs et al. 2018b). Change the scientific name of Scaly-breasted Bulbul from <em>Pycnonotus squamatus</em> to <em>Rubigula squamata</em> (Fishpool and Tobias 2005, Shakya and Sheldon 2018, Fuchs et al. 2018b).</p>
<p>Change the spelling of the nominate subspecies from <em>squamatus</em> to <em>squamata</em>.</p>
<p>References:</p>
<p>Fishpool, L.D.C., and <NAME>. 2005. Family Pycnonotidae (bulbuls). Pages 124-250 in <NAME>, <NAME>, and <NAME> (editors), Handbook of the birds of the world. Volume 10. Lynx Edicions, Barcelona.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018b. Phylogenetic affinities of the enigmatic Bare-faced Bulbul <em>Pycnonotus hualon</em> with description of a new genus. Ibis 160: 659-665.</p>
<p><NAME>., and <NAME>. 2010. Origin and diversification of Philippine bulbuls. Molecular Phylogenetics and Evolution 54: 822-832.</p>
<p><NAME>., and <NAME>. 2018. The phylogeny of the world’s bulbuls (Pycnonotidae) inferred using a supermatrix approach. Ibis 159: 498-509.</p>
<p> </p>
<p>page 376, <strong>Black-crested Bulbul <em>Pycnonotus flaviventris</em></strong></p>
<p>The traditional, broadly defined genus <em>Pycnonotus</em> is not monophyletic, at least if the genus <em>Spizixos</em> is recognized (Oliveros and Moyle 2010, Shakya and Sheldon 2018, Fuchs et al. 2018b). Change the scientific name of Black-crested Bulbul from <em>Pycnonotus flaviventris</em> to <em>Rubigula flaviventris</em> (Fishpool and Tobias 2005, Shakya and Sheldon 2018, Fuchs et al. 2018b).</p>
<p>Change the spelling of the subspecies name <em>negatus</em> to <em>negata</em>.</p>
<p>Change the spelling of the subspecies name <em>auratus</em> to <em>aurata</em>.</p>
<p>References:</p>
<p>Fishpool, L.D.C., and <NAME>. 2005. Family Pycnonotidae (bulbuls). Pages 124-250 in <NAME>, <NAME>, and <NAME> (editors), Handbook of the birds of the world. Volume 10. Lynx Edicions, Barcelona.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018b. Phylogenetic affinities of the enigmatic Bare-faced Bulbul <em>Pycnonotus hualon</em> with description of a new genus. Ibis 160: 659-665.</p>
<p>Oliveros, C.H., and <NAME>. 2010. Origin and diversification of Philippine bulbuls. Molecular Phylogenetics and Evolution 54: 822-832.</p>
<p><NAME>., and <NAME>. 2018. The phylogeny of the world’s bulbuls (Pycnonotidae) inferred using a supermatrix approach. Ibis 159: 498-509.</p>
<p> </p>
<p>page 376, <strong>Flame-throated Bulbul <em>Pycnonotus gularis</em></strong></p>
<p>The traditional, broadly defined genus <em>Pycnonotus</em> is not monophyletic, at least if the genus <em>Spizixos</em> is recognized (Oliveros and Moyle 2010, Shakya and Sheldon 2018, Fuchs et al. 2018b). Change the scientific name of Flame-throated Bulbul from <em>Pycnonotus gularis</em> to <em>Rubigula gularis</em> (Fishpool and Tobias 2005, Shakya and Sheldon 2018, Fuchs et al. 2018b).</p>
<p>References:</p>
<p>Fishpool, L.D.C., and <NAME>. 2005. Family Pycnonotidae (bulbuls). Pages 124-250 in <NAME>, <NAME>, and <NAME> (editors), Handbook of the birds of the world. Volume 10. Lynx Edicions, Barcelona.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018b. Phylogenetic affinities of the enigmatic Bare-faced Bulbul <em>Pycnonotus hualon</em> with description of a new genus. Ibis 160: 659-665.</p>
<p><NAME>., and <NAME>. 2010. Origin and diversification of Philippine bulbuls. Molecular Phylogenetics and Evolution 54: 822-832.</p>
<p><NAME>., and <NAME>. 2018. The phylogeny of the world’s bulbuls (Pycnonotidae) inferred using a supermatrix approach. Ibis 159: 498-509.</p>
<p> </p>
<p>page 376, <strong>Black-capped Bulbul <em>Pycnonotus melanicterus</em></strong></p>
<p>The traditional, broadly defined genus <em>Pycnonotus </em>is not monophyletic, at least if the genus <em>Spizixos</em> is recognized (Oliveros and Moyle 2010, Shakya and Sheldon 2018, Fuchs et al. 2018b). Change the scientific name of Black-capped Bulbul from <em>Pycnonotus melanicterus</em> to <em>Rubigula melanicterus</em> (Fishpool and Tobias 2005, Shakya and Sheldon 2018, Fuchs et al. 2018b).</p>
<p>References:</p>
<p>Fishpool, L.D.C., and <NAME>. 2005. Family Pycnonotidae (bulbuls). Pages 124-250 in <NAME>, <NAME>, and <NAME> (editors), Handbook of the birds of the world. Volume 10. Lynx Edicions, Barcelona.</p>
<p><NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018b. Phylogenetic affinities of the enigmatic Bare-faced Bulbul <em>Pycnonotus hualon</em> with description of a new genus. Ibis 160: 659-665.</p>
<p><NAME>., and <NAME>. 2010. Origin and diversification of Philippine bulbuls. Molecular Phylogenetics and Evolution 54: 822-832.</p>
<p><NAME>., and <NAME>. 2018. The phylogeny of the world’s bulbuls (Pycnonotidae) inferred using a supermatrix approach. Ibis 159: 498-509.</p>
<p> </p>
<p>page 376, <strong>Ruby-throated Bulbul <em>Pycnonotus dispar</em></strong></p>
<p>The traditional, broadly defined genus <em>Pycnonotus</em> is not monophyletic, at least if the genus <em>Spizixos</em> is recognized (Oliveros and Moyle 2010, Shakya and Sheldon 2018, Fuchs et al. 2018b). Change the scientific name of Ruby-throated Bulbul from <em>Pycnonotus dispar</em> to <em>Rubigula dispar</em> (Fishpool and Tobias 2005, Shakya and Sheldon 2018, Fuchs et al. 2018b).</p>
<p>References:</p>
<p>Fishpool, L.D.C., and <NAME>. 2005. Family Pycnonotidae (bulbuls). Pages 124-250 in J. del Hoyo, <NAME>, and <NAME> (editors), Handbook of the birds of the world. Volume 10. Lynx Edicions, Barcelona.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018b. Phylogenetic affinities of the enigmatic Bare-faced Bulbul <em>Pycnonotus hualon</em> with description of a new genus. Ibis 160: 659-665.</p>
<p><NAME>., and <NAME>. 2010. Origin and diversification of Philippine bulbuls. Molecular Phylogenetics and Evolution 54: 822-832.</p>
<p><NAME>., and <NAME>. 2018. The phylogeny of the world’s bulbuls (Pycnonotidae) inferred using a supermatrix approach. Ibis 159: 498-509.</p>
<p> </p>
<p>page 376, <strong>Bornean Bulbul <em>Pycnonotus montis</em></strong></p>
<p>The traditional, broadly defined genus <em>Pycnonotus</em> is not monophyletic, at least if the genus <em>Spizixos</em> is recognized (Oliveros and Moyle 2010, Shakya and Sheldon 2018, Fuchs et al. 2018b). Change the scientific name of Bornean Bulbul from <em>Pycnonotus montis</em> to <em>Rubigula montis</em> (Fishpool and Tobias 2005, Shakya and Sheldon 2018, Fuchs et al. 2018b).</p>
<p>References:</p>
<p>Fishpool, L.D.C., and <NAME>. 2005. Family Pycnonotidae (bulbuls). Pages 124-250 in <NAME>, <NAME>, and <NAME> (editors), Handbook of the birds of the world. Volume 10. Lynx Edicions, Barcelona.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018b. Phylogenetic affinities of the enigmatic Bare-faced Bulbul <em>Pycnonotus hualon</em> with description of a new genus. Ibis 160: 659-665.</p>
<p><NAME>., and <NAME>. 2010. Origin and diversification of Philippine bulbuls. Molecular Phylogenetics and Evolution 54: 822-832.</p>
<p><NAME>., and <NAME>. 2018. The phylogeny of the world’s bulbuls (Pycnonotidae) inferred using a supermatrix approach. Ibis 159: 498-509.</p>
<p> </p>
<p>page addition (2010), <strong>Bare-faced Bulbul <em>Pycnonotus hualon</em></strong></p>
<p>The recently described Bare-faced Bulbul is best considered not a member of the genus <em>Pycnonotus</em>, but instead as representing a separate monotypic genus, <em>Nok</em> (Fuchs et al. 2018b). Change the scientific name of Bare-faced Bulbul from <em>Pycnonotus hualon</em> to <em>Nok hualon</em>.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018b. Phylogenetic affinities of the enigmatic Bare-faced Bulbul <em>Pycnonotus hualon</em> with description of a new genus. Ibis 160: 659-665.</p>
<p> </p>
<p>page 385, <strong>Nicobar Bulbul <em>Hypsipetes virescens</em></strong></p>
<p>The species name <em>virescens</em> Blyth 1845 is permanently invalid, at it “was preoccupied in a broad genus <em>Hypsipetes</em> by by <em>Ixos virescens</em> Temminck, 1825, and was replaced before 1961″ (Dickinson and Christidis 2014); therefore change the scientific name of Nicobar Bulbul from <em>Hypsipetes virescens</em> to <em>Hypsipetes nicobariensis</em>.</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p> </p>
<p>page 440, <strong>White-spectacled Warbler <em>Seicercus affinis</em></strong></p>
<p>The genus <em>Seicercus</em> is embedded within the genus <em>Phylloscopus</em> (Alström et al. 2018a), and the species name <em>affinis</em> Moore 1854 for White-spectacled Warbler is preoccupied in <em>Phylloscopus</em> by <em>affinis </em>Tickell 1833 (Tickell’s Leaf Warbler). Consequently, change the scientific name of White-spectacled Warbler from <em>Seicercus affinis</em> to <em>Phylloscopus intermedius</em> (Watson et al. 1986a, del Hoyo and Collar 2016, Alström et al. 2018a).</p>
<p>Change the subspecies name <em>affinis</em> to <em>zosterops</em> Elliott and del Hoyo 2016 (in del Hoyo and Collar 2016).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018a. Complete species-level phylogeny of the leaf warbler (Aves: Phylloscopidae) radiation. Molecular Phylogenetics and Evolution 126: 141-152.</p>
<p>del Hoyo, J., and <NAME>. 2016. HBW and BirdLife International illustrated checklist of the birds of the world. Volume 2. Lynx Edicions, Barcelona.</p>
<p><NAME>., <NAME>., and <NAME>. 1986a. <a href="https://biodiversitylibrary.org/page/14483960">Family Sylviidae, Old World warblers</a>. Pages 3-294 in E. Mayr and G.W. Cottrell (editors), <a href="https://biodiversitylibrary.org/page/14483698">Check-list of birds of the world. Volume XI</a>. Museum of Comparative Zoology, Cambridge, Massachusetts.</p>
<p> </p>
<p>page 440, <strong>Gray-cheeked Warbler <em>Seicercus poliogenys</em></strong></p>
<p>The genus <em>Seicercus</em> is embedded within the genus <em>Phylloscopus</em> (Alström et al. 2018a). Consequently, change the scientific name of Gray-cheeked Warbler from <em>Seicercus poliogenys</em> to <em>Phylloscopus poliogenys</em> (del Hoyo and Collar 2016, Alström et al. 2018a).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018a. Complete species-level phylogeny of the leaf warbler (Aves: Phylloscopidae) radiation. Molecular Phylogenetics and Evolution 126: 141-152.</p>
<p>del <NAME>., and <NAME>. 2016. HBW and BirdLife International illustrated checklist of the birds of the world. Volume 2. Lynx Edicions, Barcelona.</p>
<p> </p>
<p>page 440, <strong>Golden-spectacled Warbler <em>Seicercus burkii</em></strong></p>
<p>The genus <em>Seicercus</em> is embedded within the genus <em>Phylloscopus</em> (Alström et al. 2018a). Consequently, change the scientific name of Golden-spectacled Warbler from <em>Seicercus burkii</em> to <em>Phylloscopus burkii</em> (del Hoyo and Collar 2016, Alström et al. 2018a).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018a. Complete species-level phylogeny of the leaf warbler (Aves: Phylloscopidae) radiation. Molecular Phylogenetics and Evolution 126: 141-152.</p>
<p>del Hoyo, J., and <NAME>. 2016. HBW and BirdLife International illustrated checklist of the birds of the world. Volume 2. Lynx Edicions, Barcelona.</p>
<p> </p>
<p>page 440, <strong>Gray-crowned Warbler <em>Seicercus tephrocephalus</em></strong></p>
<p>The genus <em>Seicercus</em> is embedded within the genus <em>Phylloscopus</em> (Alström et al. 2018a). Consequently, change the scientific name of Gray-crowned Warbler from <em>Seicercus tephrocephalus</em> to <em>Phylloscopus tephrocephalus</em> (del Hoyo and Collar 2016, Alström et al. 2018a).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018a. Complete species-level phylogeny of the leaf warbler (Aves: Phylloscopidae) radiation. Molecular Phylogenetics and Evolution 126: 141-152.</p>
<p>del Hoyo, J., and <NAME>. 2016. HBW and BirdLife International illustrated checklist of the birds of the world. Volume 2. Lynx Edicions, Barcelona.</p>
<p> </p>
<p>page 440, <strong>Whistler’s Warbler <em>Seicercus whistleri</em></strong></p>
<p>The genus <em>Seicercus</em> is embedded within the genus <em>Phylloscopus</em> (Alström et al. 2018a). Consequently, change the scientific name of Whistler’s Warbler from <em>Seicercus whistleri</em> to <em>Phylloscopus whistleri</em> (del Hoyo and Collar 2016, Alström et al. 2018a).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018a. Complete species-level phylogeny of the leaf warbler (Aves: Phylloscopidae) radiation. Molecular Phylogenetics and Evolution 126: 141-152.</p>
<p>del Hoyo, J., and <NAME>. 2016. HBW and BirdLife International illustrated checklist of the birds of the world. Volume 2. Lynx Edicions, Barcelona.</p>
<p> </p>
<p>page 440, <strong>Bianchi’s Warbler <em>Seicercus valentini</em></strong></p>
<p>The genus <em>Seicercus</em> is embedded within the genus <em>Phylloscopus</em> (Alström et al. 2018a). Consequently, change the scientific name of Bianchi’s Warbler from <em>Seicercus valentini</em> to <em>Phylloscopus valentini</em> (del Hoyo and Collar 2016, Alström et al. 2018a).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018a. Complete species-level phylogeny of the leaf warbler (Aves: Phylloscopidae) radiation. Molecular Phylogenetics and Evolution 126: 141-152.</p>
<p>del <NAME>., and <NAME>. 2016. HBW and BirdLife International illustrated checklist of the birds of the world. Volume 2. Lynx Edicions, Barcelona.</p>
<p> </p>
<p>page 440, <strong>Martens’s Warbler <em>Seicercus omeiensis</em></strong></p>
<p>The genus <em>Seicercus</em> is embedded within the genus <em>Phylloscopus</em> (Alström et al. 2018a). Consequently, change the scientific name of Martens’s Warbler from <em>Seicercus omeiensis</em> to <em>Phylloscopus omeiensis</em> (del Hoyo and Collar 2016, Alström et al. 2018a).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018a. Complete species-level phylogeny of the leaf warbler (Aves: Phylloscopidae) radiation. Molecular Phylogenetics and Evolution 126: 141-152.</p>
<p>del Hoyo, J., and <NAME>. 2016. HBW and BirdLife International illustrated checklist of the birds of the world. Volume 2. Lynx Edicions, Barcelona.</p>
<p> </p>
<p>page 440<strong>, Plain-tailed Warbler <em>Seicercus soror</em></strong></p>
<p>Change the English name of <em>Seicercus soror</em> from Plain-tailed Warbler to Alström’s Warbler (Rheindt 2006, Brazil 2009, Dickinson and Christidis 2014, del Hoyo and Collar 2016).</p>
<p>The genus <em>Seicercus</em> is embedded within the genus <em>Phylloscopus</em> (Alström et al. 2018a). Consequently, change the scientific name of Alström’s Warbler from <em>Seicercus soror</em> to <em>Phylloscopus soror</em> (del Hoyo and Collar 2016, Alström et al. 2018a).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018a. Complete species-level phylogeny of the leaf warbler (Aves: Phylloscopidae) radiation. Molecular Phylogenetics and Evolution 126: 141-152.</p>
<p>Brazil, M. 2009. Birds of East Asia. China, Taiwan, Korea, Japan, and Russia. Princeton University Press, Princeton, New Jersey.</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p>del <NAME>., and <NAME>. 2016. HBW and BirdLife International illustrated checklist of the birds of the world. Volume 2. Lynx Edicions, Barcelona.</p>
<p>Rheindt, F.E. 2006. Splits galore: the revolution in Asian leaf warbler systematics. BirdingASIA 5: 25-39.</p>
<p> </p>
<p>page 440, <strong>Chestnut-crowned Warbler <em>Seicercus castaniceps</em></strong></p>
<p>The genus <em>Seicercus</em> is embedded within the genus <em>Phylloscopus</em> (Alström et al. 2018a). Consequently, change the scientific name of Chestnut-crowned Warbler from <em>Seicercus castaniceps</em> to Phylloscopus castaniceps (del Hoyo and Collar 2016, Alström et al. 2018a).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018a. Complete species-level phylogeny of the leaf warbler (Aves: Phylloscopidae) radiation. Molecular Phylogenetics and Evolution 126: 141-152.</p>
<p>del <NAME>., and <NAME>. 2016. HBW and BirdLife International illustrated checklist of the birds of the world. Volume 2. Lynx Edicions, Barcelona.</p>
<p> </p>
<p>page 440, <strong>Yellow-breasted Warbler <em>Seicercus montis</em></strong></p>
<p>The genus <em>Seicercus</em> is embedded within the genus <em>Phylloscopus</em> (Alström et al. 2018a). Consequently, change the scientific name of Yellow-breasted Warbler from <em>Seicercus montis</em> to <em>Phylloscopus montis</em> (del Hoyo and Collar 2016, Alström et al. 2018a).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018a. Complete species-level phylogeny of the leaf warbler (Aves: Phylloscopidae) radiation. Molecular Phylogenetics and Evolution 126: 141-152.</p>
<p>del Hoyo, J., and <NAME>. 2016. HBW and BirdLife International illustrated checklist of the birds of the world. Volume 2. Lynx Edicions, Barcelona.</p>
<p> </p>
<p>page 440, <strong>Sunda Warbler <em>Seicercus grammiceps</em></strong></p>
<p>The genus <em>Seicercus</em> is embedded within the genus <em>Phylloscopus</em> (Alström et al. 2018a). Consequently, change the scientific name of Sunda Warbler from <em>Seicercus grammiceps</em> to <em>Phylloscopus grammiceps</em> (del Hoyo and Collar 2016, Alström et al. 2018a).</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2018a. Complete species-level phylogeny of the leaf warbler (Aves: Phylloscopidae) radiation. Molecular Phylogenetics and Evolution 126: 141-152.</p>
<p>del <NAME>., and <NAME>. 2016. HBW and BirdLife International illustrated checklist of the birds of the world. Volume 2. Lynx Edicions, Barcelona.</p>
<p> </p>
<p>page 439, <strong>Davison’s Leaf Warbler <em>Phylloscopus davisoni</em></strong></p>
<p>The name <em>davisoni</em> Oates 1889 is preoccupied in <em>Phylloscopus</em> by <em>davisoni</em> Sharpe 1888 (<em>Phylloscopus montis davisoni</em> Yellow-breasted Warbler), and is replaced by the new name <em>muleyitensis</em> (Dickinson and Christidis 2014).</p>
<p>Previously we considered Davison’s Leaf Warbler to be monotypic. However, subspecies <em>intensior</em>, previously classified as a subspecies of Kloss’s Leaf Warbler <em>Phylloscopus ogilviegranti</em>, properly belongs with Davison’s Leaf Warbler (del Hoyo and Collar 2016). Because the name <em>intensior </em>Deignan 1956 is older than the name <em>muleyitensis</em> (Dickinson and Christidis 2014), change the species name of Davison’s Leaf Warbler from <em>Phylloscopus davisoni</em> to <em>Phylloscopus intensior</em>. Position nominate <em>intensior</em> to follow subspecies <em>muleyitensis</em>.</p>
<p>References:</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p>del <NAME>., and <NAME>. 2016. HBW and BirdLife International illustrated checklist of the birds of the world. Volume 2. Lynx Edicions, Barcelona.</p>
<p> </p>
<p>page 439, <strong>Kloss’s Leaf Warbler <em>Phylloscopus ogilviegranti</em></strong></p>
<p>Subspecies <em>intensior</em>, previously classified as a subspecies of Kloss’s Leaf Warbler, properly belongs with Davison’s Leaf Warbler <em>Phylloscopus intensior</em> (del Hoyo and Collar 2016).</p>
<p>Reposition subspecies <em>klossi</em> to follow, rather than precede, nominate <em>ogilviegrant</em>i.</p>
<p>Reference:</p>
<p>del <NAME>., and <NAME>. 2016. HBW and BirdLife International illustrated checklist of the birds of the world. Volume 2. Lynx Edicions, Barcelona.</p>
<p> </p>
<p>page 440, <strong>Kolombangra Leaf Warbler <em>Phylloscopus amoenus</em></strong></p>
<p>Change the English name of <em>Phylloscopus amoenus</em> from Kolombangra Leaf Warbler to Kolombangara Leaf Warbler.</p>
<p>Revise the range description from “montane forests of Kolombangra (central Solomon Islands)” to “montane forests of Kolombangara (central Solomon Islands)”.</p>
<p> </p>
<p>page 431, <strong>Thick-billed Warbler <em>Iduna aedon</em></strong></p>
<p>Thick-billed Warbler is better classified in a monotypic genus (Arbadi et al. 2014). Change the scientific name from <em>Iduna aedon</em> to <em>Arundinax aedon</em> (Pittie and Dickinson 2013, Dickinson and Christidis 2014).</p>
<p>References:</p>
<p><NAME>., <NAME>, and <NAME>. 2014. A re-evaluation of phylogenetic relationships within reed warblers (Aves: Acrocephalidae) based on eight molecular loci and ISSR profiles. Molecular Phylogenetics and Evolution 78: 304-313.</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p><NAME>., and <NAME>. 2013. The dating of the Second Supplement to Jerdon’s Catalogue of the birds of the peninsula of India in the Madras Journal of Literature and Science, volume 13 number 31. Zoological Bibliography 2: 151-166.</p>
<p> </p>
<p>page 421, <strong>Roberts’s Prinia <em>Prinia robertsi</em></strong></p>
<p>Roberts’s Prinia <em>Prinia robertsi</em> is not a member of the genus <em>Prinia</em> (Clancey 1991, Urban et al. 1997, Olsson et al. 2013). Change the scientific name to <em><NAME></em>, and the English name to Roberts’<NAME>.</p>
<p>References:</p>
<p><NAME>. 1991. <a href="https://biodiversitylibrary.org/page/40027686">The generic status of Roberts’ Prinia of the south-eastern Afrotropics</a>. Bulletin of the British Ornithologists’ Club 111: 217-222.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2013. Systematic</p>
<p>revision of the avian family Cisticolidae based on a multi-locus phylogeny of all genera. Molecular Phylogenetics and Evolution 66: 790-799.</p>
<p>Urban, E.K., <NAME>, and S. Keith. 1997. The birds of Africa. Volume V. Academic Press, London.</p>
<p> </p>
<p>page 421, <strong>Namaqua Prinia <em>Prinia substriata</em></strong></p>
<p>Namaqua Prinia <em>Prinia substriata</em> is not a member of the genus <em>Prinia</em> (Brooke and Dean 1990, Olsson et al. 2013); change the scientific name to <em>Phragmacia substriata</em>, and change the English name to Namaqua Warbler (Brooke and Dean 1990, Hockey et al. 2005).</p>
<p>References:</p>
<p><NAME>., and <NAME>. 1990. On the biology and taxonomic position of <em>Drymoica substriata</em> Smith, the so-called Namaqua Prinia. Ostrich 61: 50-55.</p>
<p>Hockey, P.A.R., <NAME>, and <NAME> (editors). 2005. Roberts Birds of southern Africa. VII edition. Trustees of the John Voelcker Bird Book Fund, Cape Town.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2013. Systematic revision of the avian family Cisticolidae based on a multi-locus phylogeny of all genera. Molecular Phylogenetics and Evolution 66: 790-799.</p>
<p> </p>
<p>page 422, <strong>Black-collared Apalis <em>Apalis pulchra</em></strong></p>
<p>Black-collared Apalis is not a member of the genus <em>Apalis </em>(Nguembock et al. 2008b, Olsson et al. 2013); change the scientific name from <em>Apalis pulchra</em> to <em>Oreolais pulchra</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2008b. Polyphyly of the genus <em>Apalis</em> and new generic name for the species <em>pulchra</em> and <em>ruwenzorii</em>. Ibis 150: 756-765.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2013. Systematic revision of the avian family Cisticolidae based on a multi-locus phylogeny of all genera. Molecular Phylogenetics and Evolution 66: 790-799.</p>
<p> </p>
<p>page 422, <strong>Ruwenzori Apalis <em>Apalis ruwenzorii</em></strong></p>
<p>Change the spelling of the English name of <em>Apalis ruwenzorii</em> from Ruwenzori Apalis to Rwenzori Apalis. Rwenzori Apalis is not a member of the genus <em>Apalis</em> (Nguembock et al. 2008b, Olsson et al. 2013); change the scientific name from <em>Apalis ruwenzorii</em> to <em>Oreolais ruwenzorii</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2008b. Polyphyly of the genus <em>Apalis</em> and new generic name for the species <em>pulchra</em> and <em>ruwenzorii</em>. Ibis 150: 756-765.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2013. Systematic revision of the avian family Cisticolidae based on a multi-locus phylogeny of all genera. Molecular Phylogenetics and Evolution 66: 790-799.</p>
<p> </p>
<p>page 424, <strong>Red-fronted Warbler <em>Urorhipis rufifrons</em></strong></p>
<p>Red-fronted Warbler <em>Urorhipis rufifrons</em> is embedded in the genus <em>Prinia</em> (Olsson et al. 2013); change the scientific name to <em>Prinia rufifrons</em>, and change the English name to Red-fronted Prinia.</p>
<p>Change the names of the polytypic group Red-fronted Warbler (Red-fronted) <em>Urorhipis rufifrons rufifrons</em>/<em>smithi</em> to Red-fronted Prinia (Red-fronted) <em>Prinia rufifrons rufifrons</em>/<em>smithi</em>.</p>
<p>Change the names of the monotypic group Red-fronted Warbler (Rufous-backed) <em>Urorhipis rufifrons rufidorsalis</em> to Red-fronted Prinia (Rufous-backed) <em>Prinia rufifrons rufidorsalis</em>.</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2013. Systematic revision of the avian family Cisticolidae based on a multi-locus phylogeny of all genera. Molecular Phylogenetics and Evolution 66: 790-799.</p>
<p> </p>
<p>page 444, <strong>Banded Warbler <em>Sylvia boehmi</em></strong></p>
<p>In accord with widespread usage (e.g., Britton 1980, Dowsett and Forbes-Watson 1993, Urban et al. 1997, Stevenson and Fanshawe 2002), change the English name of <em>Sylvia boehmi</em> from Banded Warbler to Banded Parisoma.</p>
<p>References:</p>
<p><NAME>. (editor). 1980. Birds of east Africa. East Africa Natural History Society, Nairobi.</p>
<p><NAME>., and <NAME>. 1993. Checklist of birds of the Afrotropical and Malagasy regions. Volume 1: species limits and distribution. Tauraco Press, Liège, Belgium.</p>
<p><NAME>., and <NAME>. 2002. Field guide to the birds of East Africa. T & A D Poyser, London.</p>
<p>Urban, E.K., <NAME>, and <NAME>. 1997. The birds of Africa. Volume V. Academic Press, London.</p>
<p> </p>
<p>page 444, <strong>Brown Warbler <em>Sylvia lugens</em></strong></p>
<p>In accord with widespread usage (e.g., Britton 1980, Dowsett and Forbes-Watson 1993, Urban et al. 1997, Stevenson and Fanshawe 2002), change the English name of <em>Sylvia lugens</em> from Brown Warbler to Brown Parisoma.</p>
<p>Change the English name of the polytypic group <em>Sylvia lugens</em> [<em>lugens</em> Group] from Brown Warbler (Brown) to Brown Parisoma (Brown).</p>
<p>Change the English name of the monotypic group <em>Sylvia lugens griseiventris</em> from Brown Warbler (Gray-vented) to Brown Parisoma (Gray-vented).</p>
<p>References:</p>
<p><NAME>. (editor). 1980. Birds of east Africa. East Africa Natural History Society, Nairobi.</p>
<p><NAME>., and <NAME>. 1993. Checklist of birds of the Afrotropical and Malagasy regions. Volume 1: species limits and distribution. Tauraco Press, Liège, Belgium.</p>
<p><NAME>., and <NAME>. 2002. Field guide to the birds of East Africa. T & A D Poyser, London.</p>
<p><NAME>., <NAME>, and <NAME>. 1997. The birds of Africa. Volume V. Academic Press, London.</p>
<p> </p>
<p>page 550, <strong>White-breasted White-eye <em>Zosterops abyssinicus</em></strong></p>
<p>In accord with widespread usage (e.g., Britton 1980, Dowsett and Forbes-Watson 1993, Zimmerrman 1996, Fry et al. 2000, Stevenson and Fanshawe 2002), change the English name of <em>Zosterops abyssinicus</em> from White-breasted White-eye to Abyssinian White-eye.</p>
<p>Change the English name of the polytypic group <em>Zosterops abyssinicus</em> [<em>abyssinicus</em> Group] from White-breasted White-eye (Abyssinian) to Abyssinian White-eye (Abyssinian).</p>
<p>Change the English name of the polytypic group <em>Zosterops abyssinicus flavilateralis</em>/<em>jubaensis</em> from White-breasted White-eye (Kenya) to Abyssinian White-eye (Kenya).</p>
<p>References:</p>
<p><NAME>. (editor). 1980. Birds of east Africa. East Africa Natural History Society, Nairobi.</p>
<p><NAME>., and <NAME>. 1993. Checklist of birds of the Afrotropical and Malagasy regions. Volume 1: species limits and distribution. Tauraco Press, Liège, Belgium.</p>
<p><NAME>., <NAME>, and E.<NAME> (editors). 2000. The birds of Africa. Volume VI. Academic Press, London.</p>
<p><NAME>., and <NAME>. 2002. Field guide to the birds of East Africa. T & A D Poyser, London.</p>
<p><NAME>., <NAME>, and <NAME>. 1996. Birds of northern Kenya and northern Tanzania. Princeton University Press, Princeton, New Jersey.</p>
<p> </p>
<p>page 550, <strong>Cape White-eye <em>Zosterops capensis</em></strong></p>
<p>Change the scientific name of Cape White-eye from <em>Zosterops capensis</em> to <em>Zosterops virens</em> (Moreau 1967, Thompson and Taylor 2014, Dickinson and Christidis 2015).</p>
<p>Change the scientific name of the polytypic group Cape White-eye (Cape) from <em>Zosterops capensis</em> [<em>capensis</em> Group] to <em>Zosterops virens</em> [<em>capensis</em> Group].</p>
<p>Change the scientific name of the monotypic group Cape White-eye (Green) from <em>Zosterops capensis virens</em> to <em>Zosterops virens virens</em>.</p>
<p>References:</p>
<p><NAME>., and <NAME>. 2015. List of errata for Vol. 2 plus corrigenda in respect of range statements and additional errata from Vol. 1 (2013). Aves Press, Eastbourne, United Kingdom.</p>
<p><NAME>. 1967. <a href="https://biodiversitylibrary.org/page/14482450">Family Zosteropidae, white-eyes. African and Indian Ocean taxa</a>. <a href="https://biodiversitylibrary.org/page/14482443">Pages 326-337</a> in R.A. Paynter, Jr. (editor), Check-list of birds of the world. Volume XII. Museum of Comparative Zoology, Cambridge, Massachusetts.</p>
<p><NAME>., and <NAME>. 2014. Is the Cape White-eye <em>Zosterops virens</em> or <em>Zosterops capensis?</em> Ostrich 85: 197–199.</p>
<p> </p>
<p>page 554, <strong>Kolombangra White-eye <em>Zosterops murphyi</em></strong></p>
<p>Change the English name of <em>Zosterops murphyi</em> from Kolombangra White-eye to Kolombangara White-eye. Revise the range description from “Kolombangra (central Solomon Islands)” to “Kolombangara (central Solomon Islands)”.</p>
<p> </p>
<p>page 490, <strong>Sukatschev’s Laughingthrush <em>Ianthocincla sukatschewi</em></strong></p>
<p>In accord with widespread usage (e.g., Inskipp et al. 1996, MacKinnon and Phillipps 2000), change the English name of <em>Ianthocincla sukatschewi</em> from Sukatschev’s Laughingthrush to Snowy-cheeked Laughingthrush.</p>
<p>References:</p>
<p><NAME>., <NAME>, and <NAME>. 1996. An annotated checklist of the birds of the Oriental Region. Oriental Bird Club, Sandy, Bedfordshire, United Kingdom.</p>
<p><NAME>., and <NAME>. 2000. A field guide to the birds of China. Oxford University Press, New York and Oxford, United Kingdom.</p>
<p> </p>
<p>page 491, <strong>Ashambu Laughingthrush <em>Montecincla meridionale</em></strong></p>
<p>Correct the spelling of the species name of Ashambu Laughingthrush from <em>meridionale</em> to <em>meridionalis</em> (Robin et al. 2017).</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2017. <a href="https://bmcevolbiol.biomedcentral.com/articles/10.1186/s12862-017-0882-6">Two new genera of songbirds represent endemic radiations fro the Shola Sky Islands of the Western Ghats, India</a>. BMC Evolutionary Biology 17: 31.</p>
<p> </p>
<p>page 413, <strong>Nilgiri Blue Robin <em>Sholicola major</em></strong></p>
<p>Change the English name of <em>Sholicola major</em> from Nilgiri Blue Robin to Nilgiri Sholakili (Robin et al. 2017).</p>
<p>Reference:</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2017. <a href="https://bmcevolbiol.biomedcentral.com/articles/10.1186/s12862-017-0882-6">Two new genera of songbirds represent endemic radiations fro the Shola Sky Islands of the Western Ghats, India</a>. BMC Evolutionary Biology 17: 31.</p>
<p> </p>
<p>page 413, <strong>White-bellied Blue Robin <em>Sholicola albiventris</em></strong></p>
<p>Change the English name of <em>Sholicola albiventris</em> from White-bellied Blue Robin to White-bellied Sholakili (Robin et al. 2017).</p>
<p>Reference:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2017. <a href="https://bmcevolbiol.biomedcentral.com/articles/10.1186/s12862-017-0882-6">Two new genera of songbirds represent endemic radiations fro the Shola Sky Islands of the Western Ghats, India</a>. BMC Evolutionary Biology 17: 31.</p>
<p> </p>
<p>page 453, <strong>White-bellied Blue Flycatcher <em>Cyornis pallipes</em></strong></p>
<p>Change the spelling of the species name from <em>pallipes</em> to <em>pallidipes </em>(Dickinson and Christidis 2014).</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p> </p>
<p>page 414, <strong>Brown-chested Alethe <em>Pseudalethe poliocephala</em></strong></p>
<p>page 414, <strong>Red-throated Alethe <em>Pseudalethe poliophrys</em></strong></p>
<p>page 414, <strong>Cholo Alethe <em>Pseudalethe choloensis</em></strong></p>
<p>page 414, <strong>White-chested Alethe <em>Pseudalethe fuelleborni</em></strong></p>
<p>The genus name <em>Chamaetylas</em> Heine 1860 has priority over <em>Pseudalethe</em> Beresford 2003 (Dickinson and Christidis 2014).</p>
<p>Change the scientific name of Brown-chested Alethe from <em>Pseudalethe poliocephala</em> to <em>Chamaetylas poliocephala</em>.</p>
<p>Change the scientific name of the monotypic group Brown-chested Alethe (Gray-headed) from <em>Pseudalethe poliocephala poliocephala</em> to <em>Chamaetylas poliocephala poliocephala</em>.</p>
<p>Change the scientific name of the monotypic group Brown-chested Alethe (Gabela) from <em>Pseudalethe poliocephala hallae</em> to <em>Chamaetylas poliocephala hallae</em>.</p>
<p>Change the scientific name of the monotypic group Brown-chested Alethe (Chestnut-backed) from <em>Pseudalethe poliocephala compsonota</em> to <em>Chamaetylas poliocephala compsonota</em>.</p>
<p>Change the scientific name of the polytypic group Brown-chested Alethe (Brown-chested) from <em>Pseudalethe poliocephala</em> [<em>carruthersi</em> Group] to <em>Chamaetylas poliocephala</em> [<em>carruthersi</em> Group].</p>
<p>Change the scientific name of Red-throated Alethe from <em>Pseudalethe poliophrys</em> to <em>Chamaetylas poliophrys</em>.</p>
<p>Change the scientific name of Cholo Alethe from <em>Pseudalethe choloensis</em> to <em>Chamaetylas choloensis.</em></p>
<p>Change the scientific name of White-chested Alethe from <em>Pseudalethe fuelleborni</em> to <em>Chamaetylas fuelleborni</em>.</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p> </p>
<p>page 456, <strong>Black-throated Blue Robin <em>Calliope obscura</em></strong></p>
<p>In accord with widespread usage (e.g. Inskipp et al. 1996, MacKinnon and Phillipps 2000, Robson 2000), change the English name of <em>Calliope obscura</em> from Black-throated Blue Robin to Blackthroat.</p>
<p>References:</p>
<p><NAME>., <NAME>, and <NAME>. 1996. An annotated checklist of the birds of the Oriental Region. Oriental Bird Club, Sandy, Bedfordshire, United Kingdom.</p>
<p><NAME>., and <NAME>. 2000. A field guide to the birds of China. Oxford University Press, New York and Oxford, United Kingdom.</p>
<p><NAME>. 2000. A guide to the birds of southeast Asia. Princeton University Press, Princeton, New Jersey.</p>
<p> </p>
<p>page 450, <strong>Korean Flycatcher <em>Ficedula zanthopygia</em></strong></p>
<p>In accord with widespread usage (e.g., Inskipp et al. 1996, MacKinnon and Phillipps 2000, Robson 2000, Brazil 2009), change the English name of <em>Ficedula zanthopygia</em> from Korean Flycatcher to Yellow-rumped Flycatcher.</p>
<p>References:</p>
<p>Brazil, M. 2009. Birds of East Asia. China, Taiwan, Korea, Japan, and Russia. Princeton University Press, Princeton, New Jersey.</p>
<p><NAME>., <NAME>, and <NAME>. 1996. An annotated checklist of the birds of the Oriental Region. Oriental Bird Club, Sandy, Bedfordshire, United Kingdom.</p>
<p><NAME>., and <NAME>. 2000. A field guide to the birds of China. Oxford University Press, New York and Oxford, United Kingdom.</p>
<p><NAME>. 2000. A guide to the birds of southeast Asia. Princeton University Press, Princeton, New Jersey.</p>
<p> </p>
<p>page 452, <strong>Black-and-rufous Flycatcher <em>Ficedula nigrorufa</em></strong></p>
<p>In accord with widespread usage (e.g. Inskipp et al. 1996, Rasmussen and Anderton 2005), change the English name of <em>Ficedula nigrorufa</em> from Black-and-rufous Flycatcher to Black-and-orange Flycatcher.</p>
<p>References:</p>
<p><NAME>., <NAME>, and <NAME>. 1996. An annotated checklist of the birds of the Oriental Region. Oriental Bird Club, Sandy, Bedfordshire, United Kingdom.</p>
<p><NAME>., and <NAME>ton. 2005. Birds of South Asia. The Ripley guide. Volume 2: attributes and status. Smithsonian Institution and Lynx Edicions, Washington D.C. and Barcelona.</p>
<p> </p>
<p>page 401, <strong>Blue-capped Rock-Thrush <em>Monticola cinclorhynchus</em></strong></p>
<p>Change the spelling of the species name from <em>cinclorhynchus</em> to <em>cinclorhyncha</em> (Dickinson and Christidis 2014).</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p> </p>
<p>page 401, <strong>Finsch’s Flycatcher-Thrush <em>Neocossyphus finschii</em></strong></p>
<p>Correct the spelling of the species name from <em>finschii</em> to <em>finschi</em> (Dickinson and Christidis 2014).</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p> </p>
<p>page 403, <strong>Oberlaender’s Ground-Thrush <em>Geokichla oberlaenderi</em></strong></p>
<p>Change the English name of <em>Geokichla oberlaenderi</em> from Oberlaender’s Ground-Thrush to Oberländer’s Ground-Thrush (Urban et al. 1997).</p>
<p>Revise the range description for subspecies <em>ruwenzorii</em> from “Ruwenzori Mountains (Democratic Republic of the Congo/Uganda border)” to “Rwenzori Mountains (northeastern Democratic Republic of the Congo and southwestern Uganda)”.</p>
<p>Reference:</p>
<p>Urban, E.K., <NAME>, and <NAME>. 1997. The birds of Africa. Volume V. Academic Press, London.</p>
<p> </p>
<p>page 410, <strong>Chinese Thrush <em>Turdus mupinensis</em></strong></p>
<p>Chinese Thrush is basal to the genus <em>Turdus</em>, and is sister to Groundscraper Thrush <em>Psophocichla litsitsirupa</em> (Nylander et al. 2008). Change the scientific name of Chinese Thrush from <em>Turdus mupinensis</em> to <em>Otocichla mupinensis</em> (Dickinson and Christidis 2014).</p>
<p>References:</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p>Nylander, J.A.A., <NAME>, <NAME>, and <NAME>. 2008. Accounting for phylogenetic uncertainty in biogeography: a Bayesian approach to dispersal-vicariance analysis of the thrushes (Aves: <em>Turdus</em>). Systematic Biology 57: 257-268.</p>
<p> </p>
<p>page 404, <strong><NAME> <em>Nesocichla eremita</em></strong></p>
<p>The monotypic genus <em>Nesocichla</em> is embedded within the genus <em>Turdus</em> (Klicka et al. 2005, Jønsson and Fjeldså 2006, Voelker et al. 2007, Nylander et al. 2008); change the scientific name of <NAME> from <em>Nesocichla eremita</em> to <em>Turdus eremita</em>.</p>
<p>References:</p>
<p><NAME>., and <NAME>. 2006. A phylogenetic supertree of oscine passerines (Aves: Passeri). Zoologica Scripta 35: 149-186.</p>
<p><NAME>., <NAME>, and <NAME>. 2005. A molecular phylogenetic analysis of the “true thrushes” (Aves: Turdinae). Molecular Phylogenetics and Evolution 34: 486-500.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2008. Accounting for phylogenetic uncertainty in biogeography: a Bayesian approach to dispersal-vicariance analysis of the thrushes (Aves: <em>Turdus</em>). Systematic Biology 57: 257-268.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2007. Molecular systematics of a speciose, cosmopolitan songbird genus: defining the limits of, and relationships among, the <em>Turdus</em> thrushes. Molecular Phylogenetics and Evolution 42: 422-434.</p>
<p> </p>
<p>page 598, <strong>Spot-winged Starling <em>Saroglossa spiloptera</em></strong></p>
<p>Correct the spelling of the species name from <em>spiloptera</em> to <em>spilopterus</em> (Dickinson and Christidis 2014).</p>
<p>Reference:</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p> </p>
<p>page 535, <strong>Kenya Violet-backed Sunbird <em>Anthreptes orientalis</em></strong></p>
<p>In accord with widespread usage (e.g., Britton 1980, Dowsett and Forbes-Watson 1993, Zimmerman et al. 1996, Fry et al. 2000, Stevenson and Fanshawe 2002), change the English name of <em>Anthreptes orientalis</em> from Kenya Violet-backed Sunbird to Eastern Violet-backed Sunbird.</p>
<p>References:</p>
<p><NAME>. (editor). 1980. Birds of east Africa. East Africa Natural History Society, Nairobi.</p>
<p><NAME>., and <NAME>. 1993. Checklist of birds of the Afrotropical and Malagasy regions. Volume 1: species limits and distribution. Tauraco Press, Liège, Belgium.</p>
<p><NAME>., <NAME>, and <NAME> (editors). 2000. The birds of Africa. Volume VI. Academic Press, London.</p>
<p><NAME>., and <NAME>. 2002. Field guide to the birds of East Africa. T & A D Poyser, London.</p>
<p><NAME>., <NAME>, and <NAME>. 1996. Birds of northern Kenya and northern Tanzania. Princeton University Press, Princeton, New Jersey.</p>
<p> </p>
<p>page 535, <strong>Seimund’s Sunbird <em>Anthreptes seimundi</em></strong></p>
<p>In accord with widespread usage (e.g., Britton 1980, Dowsett and Forbes-Watson 1993, Fry et al. 2000, Borrow and Demey 2001), change the English name of <em>Anthreptes seimundi</em> from Seimund’s Sunbird to Little Green Sunbird.</p>
<p>References:</p>
<p><NAME>., and <NAME>. 2001. A guide to birds of western Africa. Princeton University Press, Princeton, New Jersey.</p>
<p><NAME>. (editor). 1980. Birds of east Africa. East Africa Natural History Society, Nairobi.</p>
<p><NAME>., and <NAME>. 1993. Checklist of birds of the Afrotropical and Malagasy regions. Volume 1: species limits and distribution. Tauraco Press, Liège, Belgium.</p>
<p><NAME>., <NAME>, and <NAME> (editors). 2000. The birds of Africa. Volume VI. Academic Press, London.</p>
<p> </p>
<p>page 436, <strong>Sao Tome Short-tail <em>Amaurocichla bocagii</em></strong></p>
<p>Sao Tome Short-tail <em>Amaurocichla bocagii</em> is nested within <em>Motacilla</em>, and apparently is the sister species to Madagascar Wagtail <em>Motacilla flaviventris</em> (Harris et al. 2018; see also Alström et al. 2015a). Change the scientific name of Sao Tome Short-tail to <em>Motacilla bocagii</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>,<NAME>, <NAME>, <NAME>, and <NAME>. 2015a. Dramatic niche shifts and morphological change in two insular bird species. Royal Society Open Science 2: 140364.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2018. Discordance between genomic divergence and phenotypic variation in a rapidly evolving avian genus (<em>Motacilla</em>). Molecular Phylogenetics and Evolution 120: 183-195.</p>
<p> </p>
<p>page 664, <strong>Crested Bunting <em>Melophus lathami</em></strong></p>
<p>Crested Bunting, formerly <em>classified in a monotypic genus as Melophus lathami</em>, is embedded within the genus <em>Emberiza</em> (Alström et al. 2008, Päckert et al. 2015). Position Crested Bunting to immediately follow Brown-rumped Bunting <em>Emberiza affinis</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2008. Phylogeny and classification of the Old World Emberizini (Aves, Passeriformes). Molecular Phylogenetics and Evolution 47: 960–973.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2015. Phylogenetic relationships of endemic bunting species (Aves, Passeriformes, Emberizidae, <em>Emberiza</em>) from the eastern Qinghai-Tibet Plateau. Vertebrate Zoology 65: 135-150.</p>
<p> </p>
<p>page 664, <strong>Rock Bunting <em>Emberiza cia</em></strong></p>
<p>page 664, <strong>Godlewski’s Bunting <em>Emberiza godlewskii</em></strong></p>
<p>Subspecies <em>hordei</em> of Rock Bunting, with range “Greece, central Asia Minor and Levant”, is considered to be a junior synonym of nominate <em>cia</em> (Kirwan et al. 2008, Rising 2011), and is deleted. Revise the range description of <em>cia</em> from “Iberian Peninsula and s Europe to w Asia Minor and N Africa” to “Iberian Peninsula and s Europe to w Asia Minor and N Africa”.</p>
<p>Subspecies <em>prageri</em> of Rock Bunting, with range “Crimea, Caucasus, ne Turkey and nw Iran”, is considered to be a junior synonym of subspecies <em>par</em> (Rising 2011), and is deleted. Revise the range description of <em>par </em>from “N and central Iran to Pakistan, nw India and s Altai Mountains” to “Crimea and the Causasus east to Iran, Afghanistan, northern Pakistan, northeast to the Altai Mountains (eastern Kazakhstan, northwestern China, southern Russia, and western Mongolia); partially migratory.</p>
<p>Subspecies <em>stracheyi</em>, which we previously classified under Godlewski’s Bunting, properly belongs with Rock Bunting (Martens 1972, Rasmussen and Anderton 2012). Change the scientific name from <em>Emberiza godlewskii stracheyi</em> to <em>Emberiza cia stracheyi</em>. Reposition <em>stracheyi </em>to immediately follow <em>Emberiza cia par</em>. Revise the range description from “W Himalayas (Chitral to Ladakh)” to “western Himalayas (northern Pakistan to southwestern China and western Nepal)”.</p>
<p>Subspecies <em>flemingorum</em>, which we previously classified under Godlewski’s Bunting, properly belongs with Rock Bunting (Martens 1972, Rasmussen and Anderton 2012). Change the scientific name from <em>Emberiza godlewskii flemingorum</em> to <em>Emberiza cia flemingorum</em>. Reposition <em>flemingorum </em>to immediately follow Emberiza cia stracheyi. Revise the range description from “Nepal” to “central Nepal”.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2008. The birds of Turkey. <NAME>, London.</p>
<p><NAME>. 1972. Brutverbreitung paläarktischer Vögel im Nepal-Himalaya. Bonner Zoologische Beiträge 23: 95-121.</p>
<p><NAME>., and <NAME>. 2012. Birds of South Asia. The Ripley guide. Volume 2: attributes and status. Second Edition. Smithsonian Institution and Lynx Edicions, Washington D.C. and Barcelona.</p>
<p><NAME>. 2011. Family Emberizidae (buntings and New World sparrows). Pages 428-693 in <NAME>, <NAME>, and <NAME> (editors), Handbook of birds of the world. Volume 16. Lynx Edicions, Barcelona.</p>
<p> </p>
<p>page 664, <strong>Slaty Bunting <em>Latoucheornis siemsseni</em></strong></p>
<p>Slaty Bunting, formerly classified in a monotypic genus as <em>Latoucheornis siemsseni</em>, is embedded within the genus <em>Emberiza</em> (Alström et al. 2008, Päckert et al. 2015). Change the scientific name to <em>Emberiza siemsseni</em>. Position Slaty Bunting to immediately follow Striolated Bunting <em>Emberiza striola</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2008. Phylogeny and classification of the Old World Emberizini (Aves, Passeriformes). Molecular Phylogenetics and Evolution 47: 960–973.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2015. Phylogenetic relationships of endemic bunting species (Aves, Passeriformes, Emberizidae, <em>Emberiza</em>) from the eastern Qinghai-Tibet Plateau. Vertebrate Zoology 65: 135-150.</p>
<p> </p>
<p>page 684, <strong>LeConte’s Sparrow <em>Ammodramus leconteii</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2018), change the scientific name of LeConte’s Sparrow from <em>Ammodramus leconteii</em> to <em>Ammospiza leconteii</em>, based on genetic evidence that <em>Ammodramus</em> is not monophyletic (Klicka and Spellman 2007, Klicka et al. 2014, Barker et al. 2015, and other references). Position <em>Ammospiza</em> immediately following Vesper Sparrow <em>Pooecetes gramineus</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2015. New insights into New World biogeography: an integrated view from the phylogeny of blackbirds, cardinals, sparrows, tanagers, warblers, and allies. Auk 132: 333-348.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2018. <a href="http://www.bioone.org/doi/pdf/10.1642/AUK-18-62.1">Fifty-ninth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 135: 798-813.</p>
<p><NAME>., and <NAME>. 2007. A molecular evaluation of the North American “grassland” sparrow clade. Auk 124: 537-551.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>, Jr. 2014. A comprehensive multilocus assessment of sparrow (Aves: Passerellidae) relationships. Molecular Phylogenetics and Evolution 77: 177-182.</p>
<p> </p>
<p>page 683, <strong>Seaside Sparrow <em>Ammodramus maritimus</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2018), change the scientific name of Seaside Sparrow from <em>Ammodramus maritimus</em> to <em>Ammospiza maritima</em>, based on genetic evidence that <em>Ammodramus</em> is not monophyletic (Klicka and Spellman 2007, Klicka et al. 2014, Barker et al. 2015, and other references).</p>
<p>Change the scientific name of the polytypic group Seaside Sparrow (Atlantic) from <em>Ammodramus maritimus maritimus</em>/<em>macgillivraii</em> to <em>Ammospiza maritima maritima</em>/<em>macgillivraii</em>.</p>
<p>Change the spelling of the name of the nominate subspecies from <em>maritimus</em> to <em>maritima</em>.</p>
<p>Change the scientific name of the monotypic group Seaside Sparrow (Dusky) from <em>Ammodramus maritimus nigrescens</em> to <em>Ammospiza maritima nigrescens</em>.</p>
<p>Change the scientific name of the polytypic group Seaside Sparrow (Gulf of Mexico) from <em>Ammodramus maritimus</em> [<em>sennetti</em> Group] to <em>Ammospiza maritima</em> [<em>sennetti</em> Group].</p>
<p>Change the scientific name of the monotypic group Seaside Sparrow (Cape Sable) from <em>Ammodramus maritimus mirabilis</em> to <em>Ammospiza maritima mirabilis</em>.</p>
<p>References:</p>
<p>Barker, F.K., <NAME>, <NAME>, <NAME>, and <NAME>. 2015. New insights into New World biogeography: an integrated view from the phylogeny of blackbirds, cardinals, sparrows, tanagers, warblers, and allies. Auk 132: 333-348.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2018. <a href="http://www.bioone.org/doi/pdf/10.1642/AUK-18-62.1">Fifty-ninth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 135: 798-813.</p>
<p><NAME>., and <NAME>. 2007. A molecular evaluation of the North American “grassland” sparrow clade. Auk 124: 537-551.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>, Jr. 2014. A comprehensive multilocus assessment of sparrow (Aves: Passerellidae) relationships. Molecular Phylogenetics and Evolution 77: 177-182.</p>
<p> </p>
<p>page 684, <strong>Nelson’s Sparrow <em>Ammodramus nelsoni</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2018), change the scientific name of Nelson’s Sparrow from <em>Ammodramus nelsoni</em> to <em>Ammospiza nelsoni</em>, based on genetic evidence that <em>Ammodramus</em> is not monophyletic (Klicka and Spellman 2007, Klicka et al. 2014, Barker et al. 2015, and other references).</p>
<p>Change the scientific name of the polytypic group Nelson’s Sparrow (Interior) from <em>Ammodramus nelsoni</em>/<em>alter</em> to <em>Ammospiza nelsoni</em>/<em>altera</em>.</p>
<p>Change the spelling of the subspecies name <em>alter</em> to <em>altera</em>.</p>
<p>Change the scientific name of the monotypic group Nelson’s Sparrow (Atlantic Coast) from <em>Ammodramus nelsoni subvirgatus</em> to <em>Ammospiza nelsoni subvirgata</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2015. New insights into New World biogeography: an integrated view from the phylogeny of blackbirds, cardinals, sparrows, tanagers, warblers, and allies. Auk 132: 333-348.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2018. <a href="http://www.bioone.org/doi/pdf/10.1642/AUK-18-62.1">Fifty-ninth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 135: 798-813.</p>
<p><NAME>., and <NAME>. 2007. A molecular evaluation of the North American “grassland” sparrow clade. Auk 124: 537-551.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>, Jr. 2014. A comprehensive multilocus assessment of sparrow (Aves: Passerellidae) relationships. Molecular Phylogenetics and Evolution 77: 177-182.</p>
<p> </p>
<p>page 684, <strong>Saltmarsh Sparrow <em>Ammodramus caudacutus</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2018), change the scientific name of Saltmarsh Sparrow from <em>Ammodramus caudacutus</em> to <em>Ammospiza caudacuta</em>, based on genetic evidence that <em>Ammodramu</em>s is not monophyletic (Klicka and Spellman 2007, Klicka et al. 2014, Barker et al. 2015, and other references).</p>
<p>Change the spelling of the name of the nominate subspecies from <em>caudacutus</em> to <em>caudacuta</em>.</p>
<p>Change the spelling of the subspecies name <em>diversus</em> to <em>diversa</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2015. New insights into New World biogeography: an integrated view from the phylogeny of blackbirds, cardinals, sparrows, tanagers, warblers, and allies. Auk 132: 333-348.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2018. <a href="http://www.bioone.org/doi/pdf/10.1642/AUK-18-62.1">Fifty-ninth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 135: 798-813.</p>
<p><NAME>., and <NAME>. 2007. A molecular evaluation of the North American “grassland” sparrow clade. Auk 124: 537-551.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>, Jr. 2014. A comprehensive multilocus assessment of sparrow (Aves: Passerellidae) relationships. Molecular Phylogenetics and Evolution 77: 177-182.</p>
<p> </p>
<p>page 684, <strong>Baird’s Sparrow <em>Ammodramus bairdii</em></strong></p>
<p>page 684, <strong>Henslow’s Sparrow <em>Ammodramus henslowii</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2018), change the scientific name of Baird’s Sparrow from <em>Ammodramus bairdii</em> to <em>Centronyx bairdii</em>, based on genetic evidence that <em>Ammodramus</em> is not monophyletic (Klicka and Spellman 2007, Klicka et al. 2014, Barker et al. 2015, and other references).</p>
<p>Change the scientific name of Henslow’s Sparrow from <em>Ammodramus henslowii</em> to <em>Centronyx henslowii</em>.</p>
<p>Position <em>Centronyx</em> immediately following Savannah Sparrow <em>Passerculus sandwichensis</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, and <NAME>. 2015. New insights into New World biogeography: an integrated view from the phylogeny of blackbirds, cardinals, sparrows, tanagers, warblers, and allies. Auk 132: 333-348.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2018. <a href="http://www.bioone.org/doi/pdf/10.1642/AUK-18-62.1">Fifty-ninth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 135: 798-813.</p>
<p><NAME>., and <NAME>. 2007. A molecular evaluation of the North American “grassland” sparrow clade. Auk 124: 537-551.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>, Jr. 2014. A comprehensive multilocus assessment of sparrow (Aves: Passerellidae) relationships. Molecular Phylogenetics and Evolution 77: 177-182.</p>
<p> </p>
<p>page 693, <strong>Red-breasted Meadowlark <em>Sturnella militaris</em></strong></p>
<p>page 693, <strong>White-browed Meadowlark <em>Sturnella superciliaris</em></strong></p>
<p>page 693, <strong>Peruvian Meadowlark <em>Sturnella bellicosa</em></strong></p>
<p>page 693, <strong>Pampas Meadowlark <em>Sturnella defilippii</em></strong></p>
<p>page 693, <strong>Long-tailed Meadowlark <em>Sturnella loyca</em></strong></p>
<p>Meadowlarks (<em>Sturnella</em>) consist of two clades; althougth these are sister taxa, the divergence between them is very deep (Powell et al. 2014), and these clades are best considered to be separate genera (Dickinson and Christidis 2014, Remsen et al. 2016, Chesser et al. 2017; see also AOS-SACC <a href="http://www.museum.lsu.edu/~Remsen/SACCprop778.htm">Proposal 778</a>).</p>
<p>Change the scientific name of Red-breasted Meadowlark from <em>Sturnella militaris</em> to <em>Leistes militaris</em>.</p>
<p>Change the scientific name of White-browed Meadowlark from <em>Sturnella superciliaris</em> to <em>Leistes superciliaris</em>.</p>
<p>Change the scientific name of Peruvian Meadowlark from <em>Sturnella bellicosa</em> to <em>Leistes bellicosus</em>. Change the spelling of the nominate subspecies from <em>bellicosa</em> to <em>bellicosus</em>.</p>
<p>Change the scientific name of Pampas Meadowlark from <em>Sturnella defilippii</em> to <em>Leistes defilippii.</em></p>
<p>Change the scientific name of Long-tailed Meadowlark from <em>Sturnella loyca</em> to <em>Leistes loyca</em>. Change the spelling of the subspecies name from <em>catamarcana</em> to <em>catamarcanus</em>. Change the spelling of the subspecies name from <em>falklandica</em> to <em>falklandicus</em>.</p>
<p>References:</p>
<p>Chesser, R.T., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>., <NAME>, <NAME>, and <NAME>. 2017. <a href="http://www.americanornithologypubs.org/doi/pdf/10.1642/AUK-17-72.1?code=coop-site">Fifty-eighth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 134: 751-773.</p>
<p><NAME>., and <NAME>. 2014. The Howard & Moore complete checklist of the birds of the world. Fourth edition. Volume 2. Aves Press, Eastbourne, United Kingdom.</p>
<p>Powell, A.F.L.A., <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. A comprehensive species-level molecular phylogeny of the New World blackbirds (Icteridae). Molecular Phylogenetics and Evolution 71: 94-112.</p>
<p><NAME>., Jr., A.F.L.A. Powell, <NAME>, <NAME>, and <NAME>. 2016. A revised classification of the Icteridae (Aves) based on DNA sequence data. Zootaxa 4093: 285-292.</p>
<p> </p>
<p>page 643, <strong>Masked Yellowthroat <em>Geothlypis aequinoctialis</em></strong></p>
<p>page 643, <strong>Olive-crowned Yellowthroat <em>Geothlypis semiflava</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2018), the monotypic group Masked Yellowthroat (Chiriqui) <em>Geothlypis aequinoctialis chiriquensis</em> is transferred to Olive-crowned Yellowthroat <em>Geothlypis semiflava</em>, following Escalante et al. (2009) and Freeman and Montgomery (2017). Change English name to Olive-crowned Yellowthroat (Chiriqui), and the scientific name to <em>Geothlypis semiflava chiriquensis</em>. Position <em>chiriquensis</em> immediately following the monotypic group Olive-crowned Yellowthroat (Baird’s) <em>Geothlypis semiflava bairdi</em>. Revise the range description from “Lowlands of sw Costa Rica and w Panama (w Chiriquí)” to “Pacific slope of extreme southern Costa Rica and western Panama (western Chiriquí)”.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2018. <a href="http://www.bioone.org/doi/pdf/10.1642/AUK-18-62.1">Fifty-ninth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 135: 798-813.</p>
<p><NAME>., L. Márquez-Valdelamar, <NAME>, <NAME>, and <NAME>. 2009. Evolutionary history of a prominent North American warbler clade: the <em>Oporornis</em>–<em>Geothlypis </em>complex. Molecular Phylogenetics and Evolution 53: 668-678.</p>
<p><NAME>., and <NAME>. 2017. Using song playback experiments to measure species recognition between geographically isolated populations: a comparison with acoustic trait analyses. Auk 134: 857-870.</p>
<p> </p>
<p>page 673, <strong>Puerto Rican Bullfinch <em>Loxigilla portoricensis</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2018), change the scientific name of Puerto Rican Bullfinch from <em>Loxigilla portoricensis</em> to <em>Melopyrrha portoricensis</em>, based on genetic evidence that <em>Loxigilla</em> is polyphyletic, and that Puerto Rican and Greater Antillean bullfinches are more closely related to Cuban Bullfinch <em>Melopyrrha nigra</em> than to other species of <em>Loxigilla</em> (Burns et al. 2014).</p>
<p>Reposition Puerto Rican Bullfinch to immediately follow Orangequit <em>Euneornis campestris</em>.</p>
<p>References:</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2018. <a href="http://www.bioone.org/doi/pdf/10.1642/AUK-18-62.1">Fifty-ninth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 135: 798-813.</p>
<p> </p>
<p>page 673, <strong>Greater Antillean Bullfinch <em>Loxigilla violacea</em></strong></p>
<p>In accord with AOS-NACC (Chesser et al. 2018), change the scientific name of Greater Antillean Bullfinch from <em>Loxigilla violacea</em> to <em>Melopyrrha violacea</em>, based on genetic evidence that <em>Loxigilla</em> is polyphyletic, and that Puerto Rican and Greater Antillean bullfinches are more closely related to Cuban Bullfinch <em>Meloprrha nigra</em> than to other species of <em>Loxigilla</em> (Burns et al. 2014).</p>
<p>Reposition Greater Antillean Bullfinch to immediately follow Cuban Bullfinch.</p>
<p>References:</p>
<p>Burns, K.J., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Phylogenetics and diversification of tanagers (Passeriformes: Thraupidae), the largest radiation of Neotropical songbirds. Molecular Phylogenetics and Evolution 75: 41-77.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, Jr., <NAME>, <NAME>, and <NAME>. 2018. <a href="http://www.bioone.org/doi/pdf/10.1642/AUK-18-62.1">Fifty-ninth supplement to the American Ornithological Society’s <em>Check-list of North American birds</em></a>. Auk 135: 798-813.</p>
<p> </p>
<p>page 687, <strong>Española Cactus-Finch <em>Geospiza conirostris</em></strong></p>
<p>In accord with AOS-SACC (<a href="http://www.museum.lsu.edu/~Remsen/SACCprop747.htm">Proposal 747</a>), change the English name of <em>Geospiza conirostris</em> from Española Cactus-Finch to Española Ground-Finch.</p>
<p> </p>
<p>page 602, <strong>Russet Sparrow <em>Passer rutlians</em></strong></p>
<p>Change the scientific name of Russet Sparrow from <em>Passer rutilans</em> to <em>Passer cinnamomeus</em>; Mlíkovský (2011) reported that the name <em>cinnamomeus</em> has priority over <em>rutilan</em>s.</p>
<p>Reference:</p>
<p>Mlíkovský, J. 2011. Correct name for the Asian Russet Sparrow. Chinese Birds 2: 109-110.</p>
<p> </p>
<p>page 603, <strong>Yellow-spotted Petronia <em>Petronia pyrgita</em></strong></p>
<p>Change the scientific name of Yellow-spotted Petronia from <em>Petronia pyrgita</em> to <em>Gymnornis pyrgita</em> (Dickinson 2003, Fjeldså et al. 2010, Price et al. 2014).</p>
<p>References:</p>
<p><NAME>. (editor). 2003. The Howard & Moore complete checklist of the birds of the world. Third edition. Princeton University Press, Princeton, New Jersey.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2010. The Cinnamon Ibon <em>Hypocryptadius cinnamomeus</em> is a forest canopy sparrow. Ibis 152: 747-760.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Niche filling slows the diversification of Himalayan songbirds. Nature 509: 222-225.</p>
<p> </p>
<p>page 603, <strong>Chestnut-shouldered Petronia <em>Petronia xanthocollis</em></strong></p>
<p>Change the scientific name of Chestnut-shouldered Petronia from <em>Petronia xanthocollis</em> to <em>Gymnornis xanthocollis</em> (Dickinson 2003, Fjeldså et al. 2010, Price et al. 2014).</p>
<p>References:</p>
<p><NAME>. (editor). 2003. The Howard & Moore complete checklist of the birds of the world. Third edition. Princeton University Press, Princeton, New Jersey.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2010. The Cinnamon Ibon <em>Hypocryptadius cinnamomeus</em> is a forest canopy sparrow. Ibis 152: 747-760.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Niche filling slows the diversification of Himalayan songbirds. Nature 509: 222-225.</p>
<p> </p>
<p>page 603, <strong>Yellow-throated Petronia <em>Petronia superciliaris</em></strong></p>
<p>Change the scientific name of Yellow-throated Petronia from <em>Petronia superciliaris</em> to <em>Gymnornis superciliaris</em> (Dickinson 2003, Fjeldså et al. 2010, Price et al. 2014).</p>
<p>References:</p>
<p><NAME>. (editor). 2003. The Howard & Moore complete checklist of the birds of the world. Third edition. Princeton University Press, Princeton, New Jersey.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2010. The Cinnamon Ibon <em>Hypocryptadius cinnamomeus</em> is a forest canopy sparrow. Ibis 152: 747-760.</p>
<p>Price, T.D., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Niche filling slows the diversification of Himalayan songbirds. Nature 509: 222-225.</p>
<p> </p>
<p>page 604, <strong>Bush Petronia <em>Petronia dentata</em></strong></p>
<p>Change the scientific name of Bush Petronia from <em>Petronia dentata</em> to <em>Gymnornis dentata</em> (Dickinson 2003, Fjeldså et al. 2010, Price et al. 2014).</p>
<p>References:</p>
<p><NAME>. (editor). 2003. The Howard & Moore complete checklist of the birds of the world. Third edition. Princeton University Press, Princeton, New Jersey.</p>
<p><NAME>., <NAME>, <NAME>, and <NAME>. 2010. The Cinnamon Ibon <em>Hypocryptadius cinnamomeus</em> is a forest canopy sparrow. Ibis 152: 747-760.</p>
<p><NAME>., <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, and <NAME>. 2014. Niche filling slows the diversification of Himalayan songbirds. Nature 509: 222-225.</p>
<p> </p>
<p>page 604, <strong>Rock Petronia <em>Petronia petronia</em></strong></p>
<p>In accord with widespread usage (e.g., Inskipp et al. 1996, Fry and Keith 2004, Rasmussen and Anderton 2005), change the English name of <em>Petronia petronia</em> from Rock Petronia to Rock Sparrow.</p>
<p>References:</p>
<p><NAME>., and <NAME> (editors). 2004. The birds of Africa. Volume VII. Academic Press, London.</p>
<p><NAME>., <NAME>, and <NAME>. 1996. An annotated checklist of the birds of the Oriental Region. Oriental Bird Club, Sandy, Bedfordshire, United Kingdom.</p>
<p><NAME>., and <NAME>. 2005. Birds of South Asia. The Ripley guide. Volume 2: attributes and status. Smithsonian Institution and Lynx Edicions, Washington D.C. and Barcelona.</p>
<p> </p>
<p>page 605, <strong>Social Weaver <em>Philetairus socius</em></strong></p>
<p>In accord with widespread usage (Dowsett and Forbes-Watson 1993, Fry and Keith 2004, Hockey et al. 2005), change the English name of <em>Philetairus socius</em> from Social Weaver to Sociable Weaver.</p>
<p>References:</p>
<p><NAME>., and <NAME>. 1993. Checklist of birds of the Afrotropical and Malagasy regions. Volume 1: species limits and distribution. Tauraco Press, Liège, Belgium.</p>
<p><NAME>., and <NAME> (editors). 2004. The birds of Africa. Volume VII. Academic Press, London.</p>
<p><NAME>., <NAME>, and <NAME> (editors). 2005. Roberts Birds of southern Africa. VII edition. Trustees of the John Voelcker Bird Book Fund, Cape Town.</p>
<p> </p>
<p><strong>GROUPS</strong></p>
<p><strong>GROUPS – newly created groups</strong></p>
<p> </p>
<p>Speckled Chachalaca (Speckled) <em>Ortalis guttata guttata/subaffinis</em></p>
<p>Speckled Chachalaca (Parana) <em>Ortalis guttata remota</em></p>
<p>Sultan’s Cuckoo-Dove (Sulawesi) <em>Macropygia doreya</em> [<em>albicapilla</em> Group]</p>
<p>Sultan’s Cuckoo-Dove (Sultan’s) <em>Macropygia doreya</em> [<em>doreya</em> Group]</p>
<p>Thick-billed Pigeon (Thick-billed) <em>Treron curvirostra curvirostra</em></p>
<p>Thick-billed Pigeon (Barusan) <em>Treron curvirostra hypothapsinus</em></p>
<p>Little Bronze-Cuckoo (Pied) <em>Chrysococcyx minutillus crassirostris</em></p>
<p>Cape Petrel (Antarctic) <em>Daption capense capense</em></p>
<p>Cape Petrel (Snares) <em>Daption capense australe</em></p>
<p>Lesser Frigatebird (Lesser) <em>Fregata ariel ariel/iredaeli</em></p>
<p>Lesser Frigatebird (Trindade) <em>Fregata ariel trinitatis</em></p>
<p>Brown Pelican (Galapagpos) <em>Pelecanus occidentalis urinator</em></p>
<p>Shikra (African) <em>Accipiter badius sphenurus/polyzonoides</em></p>
<p>Shikra (Asian) <em>Accipiter badius</em> [<em>badius</em> Group]</p>
<p>Roadside Hawk (Northern) <em>Rupornis magnirostris</em> [<em>magnirostris</em> Group]</p>
<p>Roadside Hawk (Southern) <em>Rupornis magnirostris</em> [<em>pucherani</em> Group]</p>
<p>Rajah Scops-Owl (Sumatran) <em>Otus brookii solokensis</em></p>
<p>Rajah Scops-Owl (Bornean) <em>Otus brookii brookii</em></p>
<p>Sulawesi Scops-Owl (Sulawesi) <em>Otus manadensis manadensis</em></p>
<p>Sulawesi Scops-Owl (Banggai) <em>Otus manadensis mendeni</em></p>
<p>Rufescent Screech-Owl (Rufescent) <em>Megascops ingens ingens/venezuelanus</em></p>
<p>Collared Owlet (Collared) <em>Glaucidium brodiei brodiei/pardalotum</em></p>
<p>Collared Owlet (Sunda) <em>Glaucidium brodiei sylvaticum/borneense</em></p>
<p>Burrowing Owl (<em>guadeloupensis</em> Group) <em>Athene cunicularia</em> [<em>guadeloupensis</em> Group]</p>
<p>Burrowing Owl (Andean) <em>Athene cunicularia</em> [<em>tolima</em>e Group]</p>
<p>Burrowing Owl (Littoral) <em>Athene cunicularia nanodes/juninensis</em></p>
<p>Burrowing Owl (<em>grallaria</em>) <em>Athene cunicularia grallaria</em></p>
<p>Burrowing Owl (Southern) <em>Athene cunicularia</em> [<em>cunicularia</em> Group]</p>
<p>Brown Wood-Owl (Brown) <em>Strix leptogrammica</em> [<em>indranee</em> Group]</p>
<p>Brown Wood-Owl (Bornean) <em>Strix leptogrammica</em> [<em>leptogrammica</em> Group]</p>
<p>Black-throated Trogon (<em>tenellus</em>) <em>Trogon rufus tenellus</em></p>
<p>Black-throated Trogon (<em>cupreicauda</em>) <em>Trogon rufus cupreicauda</em></p>
<p>Black-throated Trogon (<em>rufus</em> Group) <em>Trogon rufus</em> [<em>rufus</em> Group]</p>
<p>Black-throated Trogon (<em>chrysochloros</em>) <em>Trogon rufus chrysochloros</em></p>
<p>Orange-breasted Trogon (Spice) <em>Harpactes oreskios</em> [<em>dulitensis</em> Group]</p>
<p>Orange-breasted Trogon (Orange-breasted) <em>Harpactes oreskios oreskios</em></p>
<p>Yellow-rumped Tinkerbird (Yellow-rumped) <em>Pogoniulus bilineatus</em> [<em>bilineatus</em> Group]</p>
<p>Southern Emerald-Toucanet (Black-billed) <em>Aulacorhynchus albivitta cyanolaemus</em></p>
<p>Long-tailed Parakeet (Andaman) <em>Psittacula longicauda tytleri</em></p>
<p>Long-tailed Parakeet (Nicobar) <em>Psittacula longicauda nicobarica</em></p>
<p>Long-tailed Parakeet (Long-tailed) <em>Psittacula longicauda longicauda/defontainei</em></p>
<p>Long-tailed Parakeet (Enganno) <em>Psittacula longicauda modesta</em></p>
<p>Dot-winged Antwren (Boucard’s) <em>Microrhopias quixensis</em> [<em>boucardi</em> Group]</p>
<p>Dot-winged Antwren (<em>microstictus</em>) <em>Microrhopias quixensis microstictus</em></p>
<p>Dot-winged Antwren (<em>quixensis</em>) <em>Microrhopias quixensis quixensis</em></p>
<p>Dot-winged Antwren (<em>nigriventris</em>) <em>Microrhopias quixensis nigriventris</em></p>
<p>Dot-winged Antwren (White-tailed) <em>Microrhopias quixensis albicauda</em>/<em>intercedens</em></p>
<p>Dot-winged Antwren (<em>bicolor</em>) <em>Microrhopias quixensis bicolor</em></p>
<p>Dot-winged Antwren (<em>emiliae</em>) <em>Microrhopias quixensis emiliae</em></p>
<p>Streak-chested Antpitta (Colombian Valleys) <em>Hylopezus perspicillatus pallidior</em></p>
<p>Streak-chested Antpitta (Baudo) <em>Hylopezus perspicillatus periophthalmicus</em></p>
<p>Plain Softtail (<em>dimorpha</em>) <em>Thripophaga fusciceps dimorpha</em></p>
<p>Plain Softtail (<em>obidensis</em>) <em>Thripophaga fusciceps obidensis</em></p>
<p>Plain Softtail (<em>fuscicep</em>s) <em>Thripophaga fusciceps fusciceps</em></p>
<p>Peruvian Tyrannulet (Peruvian) <em>Zimmerius viridiflavus viridiflavus</em></p>
<p>Snethlage’s Tody-Tyrant (Igapo) <em>Hemitriccus minor pallens</em></p>
<p>Snethlage’s Tody-Tyrant (Snethlage’s) <em>Hemitriccus minor minor</em>/<em>snethlageae</em></p>
<p>Cinnamon Flycatcher (Santa Marta) <em>Pyrrhomyias cinnamomeus assimilis</em></p>
<p>Cinnamon Flycatcher (Venezuelan) <em>Pyrrhomyias cinnamomeus</em> [<em>vieillotioides</em> Group]</p>
<p>Cinnamon Flycatcher (Andean) <em>Pyrrhomyias cinnamomeus cinnamomeus</em>/<em>pyrrhopterus</em></p>
<p>Dusky-capped Flycatcher (<em>tuberculifer</em>/<em>pallidus</em>) <em>Myiarchus tuberculifer tuberculifer</em>/<em>pallidus</em></p>
<p>Dusky-capped Flycatcher (<em>nigriceps</em>/<em>atriceps</em>) <em>Myiarchus tuberculifer nigriceps</em>/<em>atriceps</em></p>
<p>Swainson’s Flycatcher (<em>phaeonotus</em>) <em>Myiarchus swainsoni phaeonotus</em></p>
<p>Swainson’s Flycatcher (<em>swainsoni</em> Group) <em>Myiarchus swainsoni</em> [<em>swainsoni</em> Group]</p>
<p>Blue-backed Manakin (<em>pareola</em>/<em>atlantica</em>) <em>Chiroxiphia pareola pareola</em>/<em>atlantica</em></p>
<p>Blue-backed Manakin (<em>napensis</em>) <em>Chiroxiphia pareola napensis</em></p>
<p>Blue-backed Manakin (<em>regina</em>) <em>Chiroxiphia pareola regina</em></p>
<p>Little Shrikethrush (Waigeo) <em>Colluricincla megarhyncha affinis</em></p>
<p>Little Shrikethrush (Mamberamo) <em>Colluricincla megarhyncha</em> [<em>obscura</em> Group]</p>
<p>Little Shrikethrush (Sepik-Ramu) <em>Colluricincla megarhyncha</em> [<em>tappenbecki</em> Group]</p>
<p>Little Shrikethrush (Variable) <em>Colluricincla megarhyncha</em> [<em>fortis</em> Group]</p>
<p>Little Shrikethrush (Tagula) <em>Colluricincla megarhyncha discolor</em></p>
<p>Little Shrikethrush (Rufous) <em>Colluricincla megarhyncha</em> [<em>rufogaster</em> Group]</p>
<p>Great Gray Shrike (Great Gray) <em>Lanius excubitor</em> [<em>excubitor</em> Group]</p>
<p>Great Gray Shrike (Sahara) <em>Lanius excubitor</em> [<em>elegans</em> Group]</p>
<p>Great Gray Shrike (Arabian) <em>Lanius excubitor aucheri</em>/<em>buryi</em></p>
<p>Great Gray Shrike (Socotra) <em>Lanius excubitor uncinatus</em></p>
<p>Great Gray Shrike (Indian) <em>Lanius excubitor lahtora</em></p>
<p>Black-naped Oriole (Philippine) <em>Oriolus chinensis</em> [<em>chinensis</em> Group]</p>
<p>Black-naped Oriole (Talaud) <em>Oriolus chinensis melanisticus</em></p>
<p>Black-naped Oriole (Sulawesi) <em>Oriolus chinensis</em> [<em>frontalis</em> Group]</p>
<p>Black-naped Oriole (Tenggara) <em>Oriolus chinensis broderipi</em>/<em>boneratensis</em></p>
<p>Square-tailed Drongo (Saturnine) <em>Dicrurus ludwigii sharpei</em>/<em>saturnus</em></p>
<p>Square-tailed Drongo (Square-tailed) <em>Dicrurus ludwigii</em> [<em>ludwigii</em> Group]</p>
<p>Fork-tailed Drongo (Glossy-backed) <em>Dicrurus adsimilis divaricatus</em></p>
<p>Fork-tailed Drongo (Clancey’s) <em>Dicrurus adsimilis apivorus</em></p>
<p>Fork-tailed Drongo (<em>adsimilis</em>/<em>fugax</em>) <em>Dicrurus adsimilis adsimilis</em>/<em>fugax</em></p>
<p>Velvet-mantled Drongo (Fanti) <em>Dicrurus modestus atactus</em></p>
<p>Velvet-mantled Drongo (<em>coracinus</em>) <em>Dicrurus modestus coracinus</em></p>
<p>Hair-crested Drongo (Hair-crested) <em>Dicrurus hottentottus hottentottus</em>/<em>brevirostris</em></p>
<p>Hair-crested Drongo (Bornean) <em>Dicrurus hottentottus borneensis</em></p>
<p>Hair-crested Drongo (Javan) <em>Dicrurus hottentottus jentincki</em>/<em>faberi</em></p>
<p>Hair-crested Drongo (White-eyed) <em>Dicrurus hottentottus leucops</em>/<em>banggaiensis</em></p>
<p>Hair-crested Drongo (Obi) <em>Dicrurus hottentottus guillemardi</em></p>
<p>Hair-crested Drongo (Sula) <em>Dicrurus hottentottus pectoralis</em></p>
<p>Hair-crested Drongo (Palawan) <em>Dicrurus hottentottus palawanensis</em></p>
<p>Hair-crested Drongo (Cuyo) <em>Dicrurus hottentottus cuyensis</em></p>
<p>Hair-crested Drongo (Sulu) <em>Dicrurus hottentottus suluensis</em></p>
<p>Sumatran Drongo (Sumatran) <em>Dicrurus sumatranus sumatranus</em></p>
<p>Sumatran Drongo (Mentawai) <em>Dicrurus sumatranus viridinitens</em></p>
<p>Wallacean Drongo (Lombok) <em>Dicrurus densus vicinus</em></p>
<p>Wallacean Drongo (Flores) <em>Dicrurus densus bimaensis</em></p>
<p>Wallacean Drongo (Sumba) <em>Dicrurus densus sumbae</em></p>
<p>Wallacean Drongo (Timor) <em>Dicrurus densus densus</em></p>
<p>Wallacean Drongo (Tanimbar) <em>Dicrurus densus kuehni</em></p>
<p>Wallacean Drongo (Kai) <em>Dicrurus densus megalornis</em></p>
<p>Spangled Drongo (Morotai) <em>Dicrurus bracteatus morotensis</em></p>
<p>Spangled Drongo (Halmahera) <em>Dicrurus bracteatus atrocaeruleus</em></p>
<p>Spangled Drongo (Buru) <em>Dicrurus bracteatus buruensis</em></p>
<p>Spangled Drongo (Seram) <em>Dicrurus bracteatus amboinensis</em></p>
<p>Spangled Drongo (Papuan) <em>Dicrurus bracteatus carbonarius</em></p>
<p>Spangled Drongo (Bismarck) <em>Dicrurus bracteatus laemostictus</em></p>
<p>Spangled Drongo (Guadalcanal) <em>Dicrurus bracteatus meeki</em></p>
<p>Spangled Drongo (Makira) <em>Dicrurus bracteatus longirostris</em></p>
<p>Spangled Drongo (Spangled) <em>Dicrurus bracteatus</em> [<em>bracteatus</em> Group]</p>
<p>Hooded Crow (Hooded) <em>Corvus cornix</em> [<em>cornix</em> Group]</p>
<p>Hooded Crow (Mesopotamian) <em>Corvus cornix capellanus</em></p>
<p>Sultan Tit (Yellow-crested) <em>Melanochlora sultanea</em> [<em>sultanea</em> Group]</p>
<p>Sultan Tit (Black-crested) <em>Melanochlora sultanea gayeti</em></p>
<p>Japanese Tit (Japanese) <em>Parus minor</em> [<em>minor</em> Group]</p>
<p>Japanese Tit (<em>commixtus</em>) <em>Parus minor commixtus</em></p>
<p>Japanese Tit (Amami) <em>Parus minor amamiensis</em></p>
<p>Japanese Tit (Okinawa) <em>Parus minor okinawae</em></p>
<p>Japanese Tit (Ishigaki) <em>Parus minor nigriloris</em></p>
<p>Eurasian Nuthatch (Western) <em>Sitta europaea</em> [<em>europaea</em> Group]</p>
<p>Eurasian Nuthatch (White-bellied) <em>Sitta europaea</em> [<em>asiatica</em> Group]</p>
<p>Eurasian Nuthatch (Buff-bellied) <em>Sitta europaea</em> [<em>roseilia</em> Group]</p>
<p>Eurasian Nuthatch (Chinese) <em>Sitta europaea sinensis</em>/<em>formasana</em></p>
<p>White-lored Gnatcatcher (White-lored) <em>Polioptila albiloris albiloris</em>/<em>vanrossemi</em></p>
<p>White-lored Gnatcatcher (Yucatan) <em>Polioptila albiloris albiventris</em></p>
<p>Tropical Gnatcatcher (<em>plumbiceps</em>/<em>anteocularis</em>) <em>Polioptila plumbea plumbiceps</em>/<em>anteocularis</em></p>
<p>Tropical Gnatcatcher (<em>innotata</em>) <em>Polioptila plumbea innotata</em></p>
<p>Tropical Gnatcatcher (<em>plumbea</em>) <em>Polioptila plumbea plumbea</em></p>
<p>Tropical Gnatcatcher (<em>parvirostris</em>) <em>Polioptila plumbea parvirostris</em></p>
<p>Tropical Gnatcatcher (<em>atricapilla</em>) <em>Polioptila plumbea atricapilla</em></p>
<p>Goldcrest (European) <em>Regulus regulus</em> [<em>regulus</em> Group]</p>
<p>Goldcrest (western Canary Islands) <em>Regulus regulus ellenthalarae</em></p>
<p>Goldcrest (Asian) <em>Regulus regulus</em> [<em>himalayensis</em> Group]</p>
<p>Northern Crombec (Northern) <em>Sylvietta brachyura brachyura</em>/<em>carnapi</em></p>
<p>Northern Crombec (Eastern) <em>Sylvietta brachyura leucopsis</em></p>
<p>Greenish Warbler (<em>viridanus</em>) <em>Phylloscopus trochiloides viridanus</em></p>
<p>Greenish Warbler (<em>trochiloides</em>/<em>ludlowi</em>) <em>Phylloscopus trochiloides trochiloides</em>/<em>ludlowi</em></p>
<p>Greenish Warbler (<em>obscuratus</em>) <em>Phylloscopus trochiloides obscuratus</em></p>
<p>Menetries’s Warbler (<em>mystacea</em>) <em>Sylvia mystacea mystacea</em></p>
<p>Menetries’s Warbler (<em>rubescens</em>/<em>turcmenica</em>) <em>Sylvia mystacea rubescens</em>/<em>turcmenica</em></p>
<p>African Yellow White-eye (Green) <em>Zosterops senegalensis</em> [<em>stuhlmanni</em> Group]</p>
<p>African Yellow White-eye (Southern) <em>Zosterops senegalensis anderssoni</em>/<em>stierlingi</em></p>
<p>Broad-ringed White-eye (Ethiopian) <em>Zosterops poliogastrus poliogastrus</em></p>
<p>Broad-ringed White-eye (Kaffa) <em>Zosterops poliogastrus kaffensis</em></p>
<p>Broad-ringed White-eye (Mbulu) <em>Zosterops poliogastrus mbuluensis</em></p>
<p>Broad-ringed White-eye (Kilimanjaro) <em>Zosterops poliogastrus eurycricotus</em></p>
<p>Broad-ringed White-eye (South Pare) <em>Zosterops poliogastrus winifredae</em></p>
<p>Chestnut-backed Scimitar-Babbler (Sunda) <em>Pomatorhinus montanus bornensis</em>/<em>occidentalis</em></p>
<p>Chestnut-backed Scimitar-Babbler (Javan) <em>Pomatorhinus montanus montanus</em>/<em>ottolanderi</em></p>
<p>Horsfield’s Babbler (Hartert’s) <em>Turdinus sepiarius tardinatus</em>/<em>barussanus</em></p>
<p>Horsfield’s Babbler (Horsfield’s) <em>Turdinus sepiarius sepiarius</em></p>
<p>Horsfield’s Babbler (Salvadori’s) <em>Turdinus sepiarius rufiventris</em>/<em>harterti</em></p>
<p>Short-billed Pipit (Puna) <em>Anthus furcatus brevirostris</em></p>
<p>Short-billed Pipit (Fork-tailed) <em>Anthus furcatus furcatus</em></p>
<p>Pine Grosbeak (Eurasian) <em>Pinicola enucleator</em> [<em>enucleator</em> Group]</p>
<p>Pine Grosbeak (Pacific Northwest) <em>Pinicola enucleator flammula</em></p>
<p>Pine Grosbeak (Queen Charlotte) <em>Pinicola enucleator carlottae</em></p>
<p>Pine Grosbeak (Rocky Mts.) <em>Pinicola enucleator montana</em></p>
<p>Pine Grosbeak (California) <em>Pinicola enucleator californica</em></p>
<p>Pine Grosbeak (Taiga) <em>Pinicola enucleator leucura</em></p>
<p>Red Crossbill (Central American or type 11) <em>Loxia curvirostra mesamericana</em></p>
<p>Godlewski’s Bunting (<em>godlewskii</em> Group) <em>Emberiza godlewskii</em> [<em>godlewskii</em> Group]</p>
<p>Godlewski’s Bunting (<em>yunnanensis</em>/<em>khamensis</em>) <em>Emberiza godlewskii yunnanensis</em>/<em>khamensis</em></p>
<p>Saffron-billed Sparrow (Stripe-crowned) <em>Arremon flavirostris dorbignii</em></p>
<p>Saffron-billed Sparrow (Gray-backed) <em>Arremon flavirostris polionotus</em></p>
<p>Saffron-billed Sparrow (Saffron-billed) <em>Arremon flavirostris flavirostris</em></p>
<p>Rufous-collared Sparrow (Rufous-collared) <em>Zonotrichia capensis</em> [<em>capensis</em> Group]</p>
<p>Rufous-collared Sparrow (Patagonian) <em>Zonotrichia capensis australis</em></p>
<p>Unicolored Blackbird (Azara’s) <em>Agelasticus cyanopus atroolivaceus</em>/<em>unicolor</em></p>
<p>Unicolored Blackbird (Yellow-breasted) <em>Agelasticus cyanopus cyanopus</em></p>
<p>Tropical Parula (South American) <em>Setophaga pitiayumi</em> [<em>pitiayumi</em> Group]</p>
<p>Buff-breasted Mountain-Tanager (Carriker’s) <em>Dubusia taeniata carrikeri</em></p>
<p>Buff-breasted Mountain-Tanager (Buff-breasted) <em>Dubusia taeniata taeniata</em></p>
<p>Golden Tanager (<em>aurulenta</em> Group) <em>Tangara arthus</em> [<em>aurulenta</em> Group]</p>
<p>Golden Tanager (<em>pulchra</em> Group) <em>Tangara arthus</em> [<em>pulchra</em> Group]</p>
<p>Capped Conebill (White-capped) <em>Conirostrum albifrons</em> [<em>albifrons</em> Group]</p>
<p>Capped Conebill (Blue-capped) <em>Conirostrum albifrons</em> [<em>atrocyaneum</em> Group]</p>
<p>Inaccessible Island Finch (Upland) <em>Nesospiza acunhae fraseri</em></p>
<p>White-bellied Seedeater (Bicolored) <em>Sporophila leucoptera bicolor</em></p>
<p>White-bellied Seedeater (Gray-backed) <em>Sporophila leucoptera</em> [<em>leucoptera</em> Group]</p>
<p>Bananaquit (Greater Antillean) <em>Coereba flaveola</em> [<em>flaveola</em> Group]</p>
<p>Bananaquit (Puerto Rico) <em>Coereba flaveola portoricensis</em></p>
<p>Bananaquit (Lesser Antillean) <em>Coereba flaveola</em> [<em>bartholemica</em> Group]</p>
<p>Bananaquit (St. Vincent) <em>Coereba flaveola atrata</em></p>
<p>Bananaquit (Grenada) <em>Coereba flaveola aterrima</em></p>
<p>Bananaquit (Los Roques) <em>Coereba flaveola lowii</em></p>
<p>Bananaquit (<em>laurae</em>/<em>melanornis</em>) <em>Coereba flaveola laurae</em>/<em>melanornis </em></p>
<p>Bananaquit (Continental) <em>Coereba flaveola</em> [<em>luteola</em> Group]</p>
<p><NAME> (<em>pallidus</em>/<em>productus)</em> <em>Camarhynchus pallidus pallidus</em>/<em>productus</em></p>
<p><NAME> (<em>striatipecta</em>) <em>Camarhynchus pallidus striatipecta</em></p>
</div><!-- .page-content -->
</div><!-- .standard_wrap -->
</div><!-- #content_area -->
</div><!-- .inner_section -->
</div><!-- #content_section -->
<div id="footer_section">
<div class="inner_section clearfix cmfix">
<div class="footer_col1">
<a href="http://www.cornell.edu/"><img src="http://www.birds.cornell.edu/clementschecklist/wp-content/themes/cornelllab/images/logo_cornell.gif" id="footer_logo" /></a>
</div>
<div class="footer_col3">
<ul>
<li class="footer_list"><a href="http://www.birds.cornell.edu/page.aspx?pid=1644" class="footer_link first">Contact Us</a></li>
<li class="footer_list"><a href="http://www.birds.cornell.edu/Page.aspx?pid=1635" class="footer_link">Privacy Policy</a></li>
<li class="footer_list"><a href="http://www.birds.cornell.edu/Page.aspx?pid=1636" class="footer_link">Terms of Use</a></li>
<li class="footer_list"><a href="http://www.allaboutbirds.org" class="footer_link last">All About Birds</a></li>
</ul>
</div>
<div class="footer_col2">
<p><a href="http://www.birds.cornell.edu" class="copyright_link">Copyright ©<span id="copyrightyear"> 2012</span> Cornell University</a></p>
<p>Cornell Lab of Ornithology<br />
159 Sapsucker Woods Rd<br />
Ithaca, NY 14850<br />
Tel: <span style="color:#ffffff" class="phone">800.843.2473</span></p>
</div>
</div>
</div>
<script>// By <NAME> & tweaked by <NAME>
jQuery(document).ready(function($){
// Find all YouTube videos
var $allVideos = $("iframe[src^='http://www.youtube.com']"),
// The element that is fluid width
$fluidEl = $(".embed-vid");
// Figure out and save aspect ratio for each video
$allVideos.each(function() {
$(this)
.data('aspectRatio', this.height / this.width)
// and remove the hard coded width/height
.removeAttr('height')
.removeAttr('width');
});
// When the window is resized
// (You'll probably want to debounce this)
$(window).resize(function() {
var newWidth = $fluidEl.width();
// Resize all videos according to their own aspect ratio
$allVideos.each(function() {
var $el = $(this);
$el
.width(newWidth)
.height(newWidth * $el.data('aspectRatio'));
});
// Kick off one resize to fix all videos on page load
}).resize();
});</script>
</body>
</html>
| a864c4fb4ae032ade1d5f6fe5399040d45387809 | [
"Markdown",
"HTML",
"PHP"
] | 4 | HTML | rdmpage/clements-bird-checklist | a50cfe59132f711ad7fd135386efcfd35d855379 | 851a2aa32eb4659452e65b1a8e8c12316f1e298e |
refs/heads/main | <file_sep>var gTrans = {
title: {
en: 'Edit Text Lines',
he: 'עורך הטקסט'
},
backToGallery: {
en: 'Back to Gallery',
he: 'חזרה לגלריה',
},
memes: {
en: 'MEMES',
he: 'ממים',
},
gallery: {
en: 'GALLERY',
he: 'גלריה',
},
about: {
en: 'ABOUT',
he: 'עלינו',
},
download: {
en: 'Download',
he: 'הורדה',
}
}
var gCurrLang = 'en';
function getTrans(transKey) {
var keyTrans = gTrans[transKey]
if (!keyTrans) return 'UNKNOWN'
var txt = keyTrans[gCurrLang]
if (!txt) txt = keyTrans['en']
return txt;
}
function doTrans() {
var els = document.querySelectorAll('[data-trans]')
els.forEach(function(el) {
var txt = getTrans(el.dataset.trans)
console.dir(el)
if (el.nodeName === 'INPUT') {
el.setAttribute('placeholder', txt)
} else {
el.innerText = txt
}
})
}
function setLang(lang) {
gCurrLang = lang;
}
function formatNumOlder(num) {
return num.toLocaleString('es')
}
function formatNum(num) {
return new Intl.NumberFormat(gCurrLang).format(num);
}
function formatCurrency(num) {
return new Intl.NumberFormat('he-IL', { style: 'currency', currency: 'ILS' }).format(num);
}
function formatDate(time) {
var options = {
year: 'numeric',
month: 'short',
day: 'numeric',
hour: 'numeric',
minute: 'numeric',
hour12: true,
};
return new Intl.DateTimeFormat(gCurrLang, options).format(time);
}<file_sep>function downloadCanvas(elLink) {
const data = gCanvas.toDataURL()
elLink.href = data
}
function renderImg(img) {
gCtx.drawImage(img, 0, 0, gElCanvas.width, gElCanvas.height);
}
function resizeCanvas() {
var elContainer = document.querySelector('.canvas-container');
gCanvas.width = elContainer.offsetWidth - 20
}<file_sep>'use strict'
var gCanvas;
var gCtx;
var gTxt;
var isEditing = false;
console.log(gMeme);
function init() {
gCanvas = document.querySelector('#my-canvas')
gCtx = gCanvas.getContext('2d')
var editor = document.querySelector('.editor')
editor.hidden = true
var about = document.querySelector('.about')
about.hidden = true
addEventListener()
}
function toggleMenu() {
document.body.classList.toggle('menu-open');
}
function onDrowText() {
var id = gMeme.selectedLineIdx
gMeme.lines[id].txt = document.querySelector('[name=text]').value;
renderMeme()
drawText(id)
}
function onEditorOpen(elImg) {
var src = elImg.src
drawImg(src)
var gallery = document.querySelector('.opening-window')
var editor = document.querySelector('.editor')
gallery.hidden = true
editor.hidden = false
gMeme.selectedImgSrc = src
resizeCanvas()
}
function onEditorClose() {
var gallery = document.querySelector('.opening-window')
var editor = document.querySelector('.editor')
gallery.hidden = false
editor.hidden = true
var about = document.querySelector('.about')
about.hidden = true
}
// Render
function renderMeme() {
var img = new Image()
img.src = gMeme.selectedImgSrc;
img.onload = () => {
gCtx.drawImage(img, 0, 0, gCanvas.width, gCanvas.height)
gMeme.lines.forEach(line => {
gCtx.lineWidth = 2;
gCtx.fillStyle = line.color;
gCtx.strokeStyle = 'black'
gCtx.font = `${line.size}px ${line.font}`;
gCtx.textAlign = line.align;
gCtx.fillText(line.txt, line.positionx, line.positiony);
gCtx.strokeText(line.txt, line.positionx, line.positiony)
if (isEditing) {
setRect(gMeme.lines[gMeme.selectedLineIdx].positionx, gMeme.lines[gMeme.selectedLineIdx].positiony)
}
});
}
}
function onAddLine() {
gMeme.selectedLineIdx++
addLine()
drawText(gMeme.selectedLineIdx)
}
function onChangeLine() {
if (gMeme.selectedLineIdx === 2) gMeme.selectedLineIdx = 0
else gMeme.selectedLineIdx++
isEditing = true
renderMeme()
}
function onClearLine() {
clearLine(gMeme.lines[gMeme.selectedLineIdx].id)
gCtx.clearRect(0, 0, gCanvas.width, gCanvas.height);
renderMeme()
}
function onChangeColor() {
var color = document.querySelector('[name=color]').value;
gMeme.lines[gMeme.selectedLineIdx].color = color
renderMeme()
}
function onLeftAline() {
gMeme.lines[gMeme.selectedLineIdx].positionx = 50
renderMeme()
}
function onCenterAline() {
gMeme.lines[gMeme.selectedLineIdx].positionx = 250
renderMeme()
}
function onRightAline() {
gMeme.lines[gMeme.selectedLineIdx].positionx = 400
renderMeme()
}
function onMinusSize() {
gMeme.lines[gMeme.selectedLineIdx].size -= 10
renderMeme()
}
function onPlusSize() {
gMeme.lines[gMeme.selectedLineIdx].size += 10
renderMeme()
}
function onInputStiker(icon) {
gCtx.lineWidth = 2;
gCtx.strokeStyle = 'black'
gCtx.font = '100px san-serif'
gCtx.fillText(icon, 400, 300);
inputStiker(icon)
}
function onLineUp() {
gMeme.lines[gMeme.selectedLineIdx].positiony -= 30
renderMeme()
}
function onLineDown() {
gMeme.lines[gMeme.selectedLineIdx].positiony += 30
renderMeme()
}
function onShowAbout() {
var about = document.querySelector('.about')
about.hidden = false
var editor = document.querySelector('.editor')
editor.hidden = true
var gallery = document.querySelector('.opening-window')
gallery.hidden = true
}
function addEventListener() {
gCanvas.addEventListener('mousedown', startPosition)
gCanvas.addEventListener('mouseup', finishPosition)
}
function onSetLang(lang) {
setLang(lang);
if (lang === 'he') {
document.body.classList.add('rtl')
} else {
document.body.classList.remove('rtl')
}
doTrans();
}
function onSearchByKeyWord(elInput) {
var searchInput = elInput.value
} | edd43d2466a00e7af32c2c1106ec41959a61671b | [
"JavaScript"
] | 3 | JavaScript | moriyaeldar/Memegenerator | 7190d8fe853a1f592521895277c4d5e09370bae1 | d2d9316c07dae150a337342d2dbb0b570225b11e |
refs/heads/master | <file_sep>/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
public class TemperatureConversion
{
// Indices
public static final double
INDEX_F= 32,
INDEX_K= 273.15,
INDEX_RA = 459.67;
public static double celciusTO(Temperature temp, double c){
switch (temp){
case FAHRENHEIT:
return (c * 9 / 5) + INDEX_F;
case KELVIN:
return c + INDEX_K;
case RANKINE:
return (c * 1.8) + INDEX_F + INDEX_RA;
case REAUMUR:
return c * 0.8;
default:
return c;
}
}
public static double fahrenheitTO(Temperature temp, double f){
switch (temp){
case CELCIUS:
return (f - INDEX_F) * 5 / 9;
case KELVIN:
return fahrenheitTO(Temperature.CELCIUS, f) + INDEX_K;
case RANKINE:
return f + INDEX_RA;
case REAUMUR:
return (f - INDEX_F) / 2.25;
default:
return f;
}
}
public static double kelvinTO(Temperature temp, double k){
switch (temp){
case CELCIUS:
return k - INDEX_K;
case FAHRENHEIT:
return (kelvinTO(Temperature.CELCIUS, k) * 9 / 5) + INDEX_K;
case RANKINE:
return k * 1.8;
case REAUMUR:
return kelvinTO(Temperature.CELCIUS, k) * 0.8;
default:
return k;
}
}
public static double rankineTO(Temperature temp, double ra){
switch (temp){
case CELCIUS:
return (rankineTO(Temperature.FAHRENHEIT, ra) - INDEX_F) / 1.8;
case FAHRENHEIT:
return ra - INDEX_RA;
case KELVIN:
return ra / 1.8;
case REAUMUR:
return (rankineTO(Temperature.FAHRENHEIT, ra) - INDEX_F) / 2.25;
default:
return ra;
}
}
public static double reaumurTO(Temperature temp, double re){
switch (temp){
case CELCIUS:
return re * 1.25;
case FAHRENHEIT:
return (re * 2.25) + INDEX_F;
case KELVIN:
return reaumurTO(Temperature.CELCIUS, re) + INDEX_K;
case RANKINE:
return reaumurTO(Temperature.FAHRENHEIT, re) + INDEX_RA;
default:
return re;
}
}
public static double convert(Temperature from, Temperature to, double degree){
switch (from){
case CELCIUS: return celciusTO(to, degree);
case FAHRENHEIT: return fahrenheitTO(to, degree);
case KELVIN: return kelvinTO(to, degree);
case RANKINE: return rankineTO(to, degree);
case REAUMUR: return reaumurTO(to, degree);
default: return celciusTO(to, degree);
}
}
}
<file_sep>package fnn.smirl.temperature.converter;
import android.view.*;
import android.widget.*;
import android.app.Activity;
import android.os.Bundle;
import android.widget.AdapterView.OnItemSelectedListener;
import android.widget.TextView.OnEditorActionListener;
public class MainActivity extends Activity implements
OnEditorActionListener, OnItemSelectedListener
{
@Override
public void onItemSelected(AdapterView<?> p1, View p2, int p3, long p4)
{
// TODO: Implement this method
try{
if (p1.getId() == R.id.spinner1){
t1 = (Temperature) p1.getSelectedItem();
}else if (p1.getId() == R.id.spinner2){
t2 = (Temperature) p1.getSelectedItem();
}
if (et1.getText().toString() != null){
convert(swap);
}
}catch (Exception ee){}
}
@Override
public void onNothingSelected(AdapterView<?> p1)
{
// TODO: Implement this method
}
@Override
public boolean onEditorAction(TextView p1, int p2, KeyEvent p3)
{
// TODO: Implement this method
try{
if (p1.getId() == R.id.et1){
swap = true;
convert(swap);
tv1.setText("From:");
tv2.setText("To:");
}else
if (p1.getId() == R.id.et2){
swap = false;
convert(swap);
tv2.setText("From:");
tv1.setText("To:");
}
}catch (Exception ee){}
return true;
}
boolean swap = true;
Spinner spinner1, spinner2;
EditText et1, et2;
TextView tv1, tv2;
Temperature temps[];
Temperature t1 = Temperature.CELCIUS;
Temperature t2 = Temperature.CELCIUS;
@Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
temps = Temperature.values();
tv1 = (TextView)findViewById(R.id.tv1);
tv2 = (TextView)findViewById(R.id.tv2);
et1 = (EditText)findViewById(R.id.et1);
et1.setOnEditorActionListener(this);
et2 = (EditText)findViewById(R.id.et2);
et2.setOnEditorActionListener(this);
spinner1 = (Spinner)findViewById(R.id.spinner1);
spinner2 = (Spinner)findViewById(R.id.spinner2);
ArrayAdapter<Temperature> adapter =
new ArrayAdapter<Temperature>(this, android.R.layout.simple_dropdown_item_1line , temps);
spinner1.setAdapter(adapter);
spinner2.setAdapter(adapter);
spinner1.setOnItemSelectedListener(this);
spinner2.setOnItemSelectedListener(this);
}
private void convert(boolean sw){
if (sw){
double d = Double.parseDouble(et1.getText().toString());
double ans = TemperatureConversion.convert(t1, t2, d);
et2.setText("" + ans);
}else{
double d = Double.parseDouble(et2.getText().toString());
double ans = TemperatureConversion.convert(t2, t1, d);
et1.setText("" + ans);
}
}
}
<file_sep># TemperatureConversion
Conversion of temperature in 4 common degrees :
- Celcius
- Fahrenheit
- Kelvin
- Rankine
- Réaumur
The class consists of 6 public methods :
1 for each type of degree and 1 general.
#Temperature
an enum class for static int values of the five types of degrees. | feb9ef845c29acd2a61590e2c6e5b0f15c60e391 | [
"Markdown",
"Java"
] | 3 | Java | francisnnumbi/TemperatureConversion | 74171096fd42fb5939dce592cc862fde4ad49f22 | 6e8bdd7f3ef94026dec59979fa952eebf62da739 |
refs/heads/master | <file_sep><?php
namespace RentalManager\Main\Traits;
use RentalManager\Main\Common\ModelMethods;
/**
* Created by PhpStorm.
* User: gorankrgovic
* Date: 9/10/18
* Time: 10:00 AM
*/
trait RMUnitTrait
{
use ModelMethods;
/**
* @param $attr
* @return bool
*/
public function hasAttribute($attr)
{
return array_key_exists($attr, $this->attributes);
}
// METHODS
// ---------------------------
/**
* Set tags attribute as the json encoded string
*
* @param $value
*/
public function setTagsAttribute($value)
{
$this->attributes['tags'] = ( $value ) ? json_encode($value) : null;
}
/**
* Return tags attribute as the json decoded array
*
* @param $value
* @return mixed|null
*/
public function getTagsAttribute($value)
{
return ( $value ) ? json_decode( $value, true ) : null;
}
// RELATION BINDINGS
// ---------------------------
/**
* Associate the property - you can pass a name, object or an ID
*
* @param $object
* @return static
*/
public function associateProperty( $object )
{
return $this->associateModel('properties', $object, true);
}
/**
* @return $this
*/
public function dissociateProperty()
{
return $this->dissociateModel('properties', true);
}
// RELATIONS
// ---------------------------
/**
* Get the property that owns the property detail
*
* @return mixed
*/
public function property()
{
return $this->belongsTo(
'App\RentalManager\Main\Property',
'property_id'
);
}
}
<file_sep><?php
namespace RentalManager\Main\Commands;
use Illuminate\Console\Command;
/**
* Class SetupModelsCommand
* @package Propeller\Commands
*/
class SetupModelsCommand extends Command
{
/**
* The console command name.
*
* @var string
*/
protected $name = 'rm:setup-models-main';
/**
* The console command description.
*
* @var string
*/
protected $description = 'Setup models for the main package';
/**
* Commands to call with their description.
*
* @var array
*/
protected $calls = [
'rm:model-contact' => 'Creating the Contact model',
'rm:model-lease-duration' => 'Creating the LeaseDuration model',
'rm:model-location' => 'Creating the Location model',
'rm:model-property' => 'Creating the Property model',
'rm:model-property-type' => 'Creating the PropertyType model',
'rm:model-provider' => 'Creating the Provider model',
'rm:model-rental-restriction' => 'Creating the RentalRestriction model',
'rm:model-rental-type' => 'Creating the RentalType model',
'rm:model-unit' => 'Creating the Unit model',
];
/**
* Create a new command instance
*
* @return void
*/
public function __construct()
{
parent::__construct();
}
/**
* Execute the console command.
*
* @return void
*/
public function handle()
{
foreach ($this->calls as $command => $info) {
$this->line(PHP_EOL . $info);
$this->call($command);
}
}
}
<file_sep><?php
namespace RentalManager\Main;
/**
* Created by PhpStorm.
* User: gorankrgovic
* Date: 9/10/18
* Time: 7:57 AM
*/
/**
* Class Main
* @package RentalManager\Main
*/
class Main {
/**
* Laravel application.
*
* @var \Illuminate\Foundation\Application
*/
public $app;
/**
* Base constructor.
* @param $app
*/
public function __construct($app)
{
$this->app = $app;
}
}
<file_sep><?php
namespace RentalManager\Main\Traits;
use RentalManager\Main\Common\ModelMethods;
/**
* Created by PhpStorm.
* User: gorankrgovic
* Date: 9/10/18
* Time: 9:59 AM
*/
trait RMPropertyTrait
{
use ModelMethods;
// METHODS
// ---------------------------
/**
* Set tags attribute as the json encoded string
*
* @param $value
*/
public function setTagsAttribute($value)
{
$this->attributes['tags'] = ( $value ) ? json_encode($value) : null;
}
/**
* Return tags attribute as the json decoded array
*
* @param $value
* @return mixed|null
*/
public function getTagsAttribute($value)
{
return ( $value ) ? json_decode( $value, true ) : null;
}
// RELATION BINDINGS
// ---------------------------
/**
* Associate the property type - you can pass a name, object or an ID
*
* @param $object
* @return static
*/
public function associatePropertyType( $object )
{
return $this->associateModel('property_types', $object, true);
}
/**
* @return $this
*/
public function dissociatePropertyType()
{
return $this->dissociateModel('property_types', true);
}
/**
* @param $object
* @return static
*/
public function associateRentalType( $object )
{
return $this->associateModel('rental_types', $object, true);
}
/**
* @return $this
*/
public function dissociateRentalType()
{
return $this->dissociateModel('rental_types', true);
}
/**
* @param $object
* @return static
*/
public function associateLeaseDuration( $object )
{
return $this->associateModel('lease_durations', $object, true);
}
/**
* @return $this
*/
public function dissociateLeaseDuration()
{
return $this->dissociateModel('lease_durations', true);
}
/**
* @param $object
* @return static
*/
public function associateLocation( $object )
{
return $this->associateModel('locations', $object, true);
}
/**
* @return $this
*/
public function dissociateLocation()
{
return $this->dissociateModel('locations', true);
}
/**
* @param $object
* @return static
*/
public function associateRentalRestriction( $object )
{
return $this->associateModel('rental_restrictions', $object, true);
}
/**
* @return $this
*/
public function dissociateRentalRestriction()
{
return $this->dissociateModel('rental_restrictions', true);
}
/**
* @param $object
* @return static
*/
public function associateProvider( $object )
{
return $this->associateModel('providers', $object, true);
}
/**
* @return $this
*/
public function dissociateProvider()
{
return $this->dissociateModel('providers', true);
}
// RELATIONS
// ---------------------------
/**
* @return mixed
*/
public function contact()
{
return $this->hasOne(
'App\RentalManager\Main\Contact',
'property_id'
);
}
/**
* Property type relation
*
* @return mixed
*/
public function property_type()
{
return $this->belongsTo(
'App\RentalManager\Main\PropertyType',
'property_type_id');
}
/**
* Provider relation
*
* @return mixed
*/
public function provider()
{
return $this->belongsTo(
'App\RentalManager\Main\Provider',
'provider_id'
);
}
/**
* Get the units for this property
*
* @return mixed
*/
public function units()
{
return $this->hasMany(
'App\RentalManager\Main\Unit',
'property_id'
);
}
/**
* Rental type
*
* @return mixed
*/
public function rental_type()
{
return $this->belongsTo(
'App\RentalManager\Main\RentalType',
'rental_type_id');
}
/**
* Lease duration
*
* @return mixed
*/
public function lease_duration()
{
return $this->belongsTo(
'App\RentalManager\Main\LeaseDuration',
'lease_duration_id'
);
}
/**
* Lease duration
*
* @return mixed
*/
public function location()
{
return $this->belongsTo(
'App\RentalManager\Main\Location',
'location_id'
);
}
/**
* Rental restriction
*
* @return mixed
*/
public function rental_restriction()
{
return $this->belongsTo(
'App\RentalManager\Main\RentalRestriction',
'rental_restriction_id'
);
}
/**
* Handle dynamic method calls into the model.
*
* @param string $method
* @param array $parameters
* @return mixed
*/
public function __call($method, $parameters)
{
if (!preg_match('/^can[A-Z].*/', $method)) {
return parent::__call($method, $parameters);
}
}
}
<file_sep><?php
namespace RentalManager\Main\Models;
use Illuminate\Database\Eloquent\Model;
use RentalManager\Main\Traits\RMLocationTrait;
/**
* Created by PhpStorm.
* User: gorankrgovic
* Date: 9/10/18
* Time: 9:57 AM
*/
class RMLocation extends Model
{
use RMLocationTrait;
/**
* The database table used by the model.
*
* @var string
*/
protected $table;
/**
* Model constructor.
*
* @param array $attributes
*/
public function __construct(array $attributes = [])
{
parent::__construct($attributes);
// Set the table
$this->table = 'locations';
}
}
<file_sep><?php
namespace RentalManager\Main\Traits;
/**
* Created by PhpStorm.
* User: gorankrgovic
* Date: 9/10/18
* Time: 9:59 AM
*/
trait RMRentalRestrictionTrait
{
/**
* @return mixed
*/
public function properties()
{
return $this->hasMany(
'App\RentalManager\Main\Property',
'rental_restriction_id'
);
}
}
<file_sep><?php
namespace RentalManager\Main\Traits;
use RentalManager\Main\Common\ModelMethods;
/**
* Created by PhpStorm.
* User: gorankrgovic
* Date: 9/10/18
* Time: 9:58 AM
*/
trait RMContactTrait
{
use ModelMethods;
/**
* @return mixed
*/
public function property()
{
return $this->belongsTo(
'App\RentalManager\Main\Property',
'property_id'
);
}
/**
* @param $object
* @return static
*/
public function associateProperty( $object )
{
return $this->associateModel('property', $object, true);
}
/**
* @return $this
*/
public function dissociateProperty()
{
return $this->dissociateModel('property', true);
}
}
<file_sep><?php
namespace RentalManager\Main;
use Illuminate\Support\ServiceProvider;
/**
* Created by PhpStorm.
* User: gorankrgovic
* Date: 9/10/18
* Time: 7:55 AM
*/
/**
* Class MainServiceProvider
* @package RentalManager\Main
*/
class MainServiceProvider extends ServiceProvider
{
/**
* Indicates if loading of the provider is deferred.
*
* @var bool
*/
protected $defer = false;
/**
* The commands to be registered.
*
* @var array
*/
protected $commands = [
'MakeContact' => 'command.rentalmanager.contact',
'MakeLeaseDuration' => 'command.rentalmanager.lease-duration',
'MakeLocation' => 'command.rentalmanager.location',
'MakeProperty' => 'command.rentalmanager.property',
'MakePropertyType' => 'command.rentalmanager.property-type',
'MakeProvider' => 'command.rentalmanager.provider',
'MakeRentalRestriction' => 'command.rentalmanager.rental-restriction',
'MakeRentalType' => 'command.rentalmanager.rental-type',
'MakeUnit' => 'command.rentalmanager.unit',
'Seeder' => 'command.rentalmanager.seeder',
'Setup' => 'command.rentalmanager.setup',
'SetupModels' => 'command.rentalmanager.setup-models',
];
/**
* Bootstrap the application events.
*
* @return void
*/
public function boot()
{
// Merge config file for the current app
$this->mergeConfigFrom(__DIR__.'/../config/rentalmanager.php', 'rentalmanager');
// Publish the config files
$this->publishes([
__DIR__.'/../config/rentalmanager.php' => config_path('rentalmanager.php')
], 'rentalmanager');
$this->loadMigrationsFrom(__DIR__.'/../migrations');
}
/**
* Register any package services.
*
* @return void
*/
public function register()
{
// Register the app
$this->registerApp();
// Register Commands
$this->registerCommands();
}
/**
* Register the application bindings.
*
* @return void
*/
private function registerApp()
{
$this->app->bind('rentalmanager', function ($app) {
return new Main($app);
});
$this->app->alias('rentalmanager', 'RentalManager\Main');
}
/**
* Register the given commands.
*
* @return void
*/
protected function registerCommands()
{
foreach (array_keys($this->commands) as $command) {
$method = "register{$command}Command";
call_user_func_array([$this, $method], []);
}
$this->commands(array_values($this->commands));
}
protected function registerSeederCommand()
{
$this->app->singleton('command.rentalmanager.seeder', function () {
return new \RentalManager\Main\Commands\SeederCommand();
});
}
protected function registerSetupCommand()
{
$this->app->singleton('command.rentalmanager.setup', function () {
return new \RentalManager\Main\Commands\SetupCommand();
});
}
protected function registerSetupModelsCommand()
{
$this->app->singleton('command.rentalmanager.setup-models', function () {
return new \RentalManager\Main\Commands\SetupModelsCommand();
});
}
// Models
protected function registerMakeLeaseDurationCommand()
{
$this->app->singleton('command.rentalmanager.lease-duration', function ($app) {
return new \RentalManager\Main\Commands\Generators\MakeLeaseDurationCommand($app['files']);
});
}
protected function registerMakeContactCommand()
{
$this->app->singleton('command.rentalmanager.contact', function ($app) {
return new \RentalManager\Main\Commands\Generators\MakeContactCommand($app['files']);
});
}
protected function registerMakeLocationCommand()
{
$this->app->singleton('command.rentalmanager.location', function ($app) {
return new \RentalManager\Main\Commands\Generators\MakeLocationCommand($app['files']);
});
}
protected function registerMakePropertyCommand()
{
$this->app->singleton('command.rentalmanager.property', function ($app) {
return new \RentalManager\Main\Commands\Generators\MakePropertyCommand($app['files']);
});
}
protected function registerMakePropertyTypeCommand()
{
$this->app->singleton('command.rentalmanager.property-type', function ($app) {
return new \RentalManager\Main\Commands\Generators\MakePropertyTypeCommand($app['files']);
});
}
protected function registerMakeProviderCommand()
{
$this->app->singleton('command.rentalmanager.provider', function ($app) {
return new \RentalManager\Main\Commands\Generators\MakeProviderCommand($app['files']);
});
}
protected function registerMakeRentalRestrictionCommand()
{
$this->app->singleton('command.rentalmanager.rental-restriction', function ($app) {
return new \RentalManager\Main\Commands\Generators\MakeRentalRestrictionCommand($app['files']);
});
}
protected function registerMakeRentalTypeCommand()
{
$this->app->singleton('command.rentalmanager.rental-type', function ($app) {
return new \RentalManager\Main\Commands\Generators\MakeRentalTypeCommand($app['files']);
});
}
protected function registerMakeUnitCommand()
{
$this->app->singleton('command.rentalmanager.unit', function ($app) {
return new \RentalManager\Main\Commands\Generators\MakeUnitCommand($app['files']);
});
}
/**
* Get the services provided.
*
* @return array
*/
public function provides()
{
return array_values($this->commands);
}
}
<file_sep># Main package for the Rentbits Rental Manager
<file_sep><?php
namespace RentalManager\Main\Models;
use Illuminate\Database\Eloquent\Model;
use RentalManager\Main\Traits\RMPropertyTypeTrait;
/**
* Created by PhpStorm.
* User: gorankrgovic
* Date: 9/10/18
* Time: 9:56 AM
*/
class RMPropertyType extends Model
{
use RMPropertyTypeTrait;
/**
* The database table used by the model.
*
* @var string
*/
protected $table;
/**
* Model constructor.
*
* @param array $attributes
*/
public function __construct(array $attributes = [])
{
parent::__construct($attributes);
// Set the table
$this->table = 'property_types';
}
}
<file_sep><?php
use Illuminate\Support\Facades\Schema;
use Illuminate\Database\Schema\Blueprint;
use Illuminate\Database\Migrations\Migration;
class RentalManagerMainSetupTables extends Migration {
/**
* Run the migrations.
*
* @return void
*/
public function up()
{
// Providers
Schema::create('providers', function(Blueprint $table) {
$table->increments('id');
$table->string('name')->nullable();
$table->string('slug')->unique()->nullable();
$table->timestamps();
});
// Property types
Schema::create('property_types', function(Blueprint $table) {
$table->increments('id');
$table->string('name')->unique();
$table->string('slug')->unique();
});
// Rental types
Schema::create('rental_types', function(Blueprint $table) {
$table->increments('id');
$table->string('name')->unique();
$table->string('slug')->unique();
});
// Rental restrictions
Schema::create('rental_restrictions', function(Blueprint $table) {
$table->increments('id');
$table->string('name')->unique();
$table->string('slug')->unique();
});
// Lease durations
Schema::create('lease_durations', function(Blueprint $table) {
$table->increments('id');
$table->string('name')->unique();
$table->string('slug')->unique();
});
// Locations
Schema::create('locations', function(Blueprint $table) {
$table->increments('id');
$table->string('google_place_id')->index()->nullable();
$table->string('searchable_name')->nullable();
$table->string('display_name')->nullable();
$table->string('street_number')->nullable();
$table->string('street_name')->nullable();
$table->string('neighborhood')->nullable();
$table->string('city')->nullable();
$table->string('state_code')->nullable();
$table->string('county')->nullable();
$table->string('borough')->nullable();
$table->string('postal_code')->nullable();
$table->decimal('lat', 10, 8)->index()->nullable(); // lat
$table->decimal('lng', 11, 8)->index()->nullable(); // lng
$table->timestamps();
});
// Properties
Schema::create('properties', function(Blueprint $table) {
$table->increments('id');
$table->string('foreign_id')->index()->nullable();
$table->unsignedInteger('provider_id')->index()->nullable();
$table->unsignedInteger('property_type_id')->index()->nullable(); // property type
$table->unsignedInteger('rental_type_id')->index()->nullable(); // rental type
$table->unsignedInteger('rental_restriction_id')->index()->nullable(); // rental restriction
$table->unsignedInteger('lease_duration_id')->index()->nullable(); // lease duration
$table->unsignedInteger('location_id')->index()->nullable(); // property location
$table->string('slug')->nullable()->unique();
$table->boolean('is_community')->default(false);
$table->string('name')->nullable();
$table->text('lease_terms')->nullable();
$table->text('description')->nullable();
$table->text('tags')->nullable(); // for storing custom tags or keywords
$table->decimal('min_price', 10, 2)->nullable()->index(); // field for storing aggregate price
$table->decimal('min_baths', 4, 2)->nullable()->index(); // field for storing aggregate baths
$table->integer('min_beds')->nullable()->index(); // field for storing aggregate beds
$table->integer('min_sqft')->nullable()->index(); // field for storing aggregate sqft
$table->boolean('featured')->default(false);
// Status stuff
$table->enum('status', [
// Viewable by everyone
'active',
// Expired listing
'expired',
// System admin has blocked for uncertain reason (reason can be set in a field)
'blocked'
])->nullable()->index();
$table->string('status_reason')->nullable();
$table->softDeletes();
$table->timestamps();
$table->foreign('property_type_id')->references('id')->on('property_types')->onDelete('cascade');
$table->foreign('rental_type_id')->references('id')->on('rental_types')->onDelete('cascade');
$table->foreign('rental_restriction_id')->references('id')->on('rental_restrictions')->onDelete('cascade');
$table->foreign('lease_duration_id')->references('id')->on('lease_durations')->onDelete('cascade');
$table->foreign('location_id')->references('id')->on('locations')->onDelete('cascade');
$table->foreign('provider_id')->references('id')->on('providers')->onDelete('cascade');
});
// Units
Schema::create('units', function(Blueprint $table) {
$table->increments('id');
$table->unsignedInteger('property_id')->index()->nullable();
$table->string('foreign_id')->nullable();
$table->enum('type', ['unit', 'floor_plan'])->nullable();
$table->string('name')->nullable(); // a name
$table->string('apt_unit_ste')->nullable(); // a separate field for the apartment, unit or whatever
$table->integer('total_units')->nullable();
$table->integer('available_units')->nullable();
$table->integer('beds')->index()->nullable();
$table->decimal('baths', 4, 1)->index()->nullable();
$table->integer('sqft')->index()->index()->nullable();
$table->decimal('security_deposit', 10, 2)->nullable();
$table->decimal('price_min', 10, 2)->index()->nullable();
$table->decimal('price_max', 10, 2)->index()->nullable();
$table->decimal('pets_fee', 10, 2)->nullable();
$table->boolean('pets')->default(false);
$table->text('pets_info')->nullable();
$table->text('tags')->nullable(); // for storing custom tags, amenities or keywords
$table->timestamp('available_at')->nullable();
$table->timestamps();
$table->softDeletes();
$table->foreign('property_id')->references('id')->on('properties')->onDelete('cascade');
});
// Contacts
Schema::create('contacts', function(Blueprint $table) {
$table->increments('id');
$table->unsignedInteger('property_id')->nullable()->index();
$table->enum('owner', ['company', 'owner', 'tenant'])->nullable()->index();
$table->enum('method', ['email', 'api', 'redirect'])->nullable();
$table->string('url')->nullable();
$table->string('email_to')->nullable();
$table->string('email_cc')->nullable();
$table->string('phone')->nullable();
$table->timestamps();
$table->foreign('property_id')->references('id')->on('properties')->onDelete('cascade');
});
}
/**
* Down
*/
public function down()
{
Schema::dropIfExists('providers');
Schema::dropIfExists('property_types');
Schema::dropIfExists('rental_types');
Schema::dropIfExists('rental_restrictions');
Schema::dropIfExists('lease_durations');
Schema::dropIfExists('locations');
Schema::dropIfExists('properties');
Schema::dropIfExists('units');
Schema::dropIfExists('contacts');
}
}
<file_sep><?php
/**
* Created by PhpStorm.
* User: gorankrgovic
* Date: 9/10/18
* Time: 9:50 AM
*/
return [
// Seeder data - default
'seeder' => [
/**
* Default providers
*/
'providers' => [
[
'name' => 'Rentbits',
'slug' => 'rentbits'
],
[
'name' => 'ApartmentList',
'slug' => 'apartmentlist'
],
[
'name' => 'Zumper',
'slug' => 'zumper'
],
[
'name' => 'RentLingo',
'slug' => 'rentlingo'
]
],
/**
* Property type seeder data
*/
'property_type' => [
[
'name' => 'Apartment',
'slug' => 'apartments'
],
[
'name' => 'Condo',
'slug' => 'condos',
],
[
'name' => 'House',
'slug' => 'houses'
],
[
'name' => 'Townhouse',
'slug' => 'townhouses'
],
[
'name' => 'Duplex',
'slug' => 'duplexes'
]
],
/**
* Rental type seeder data
*/
'rental_type' => [
[
'name' => 'Regular',
'slug' => 'regular'
],
[
'name' => 'Room for rent',
'slug' => 'room-for-rent',
],
[
'name' => 'Sublet',
'slug' => 'sublet'
],
[
'name' => 'Corporate',
'slug' => 'corporate'
]
],
/**
* Rental restrictions seeder data
*/
'rental_restriction' => [
[
'name' => 'No restrictions',
'slug' => 'no-restrictions'
],
[
'name' => 'Senior housing',
'slug' => 'senior-housing',
],
[
'name' => 'Student housing',
'slug' => 'student-housing'
],
[
'name' => 'Military housing',
'slug' => 'military-housing'
],
[
'name' => 'Income restricted',
'slug' => 'income-restricted'
]
],
/**
* Lease duration data
*/
'lease_duration' => [
[
'name' => 'Short term',
'slug' => 'short-term'
],
[
'name' => 'Long term',
'slug' => 'long-term'
],
[
'name' => 'Flexible',
'slug' => 'flexible'
],
[
'name' => 'Rent to own',
'slug' => 'rent-to-own'
]
],
]
];
<file_sep><?php
namespace RentalManager\Main\Models;
use Illuminate\Database\Eloquent\Model;
use Illuminate\Database\Eloquent\SoftDeletes;
use RentalManager\Main\Traits\RMPropertyTrait;
/**
* Created by PhpStorm.
* User: gorankrgovic
* Date: 9/10/18
* Time: 9:57 AM
*/
class RMProperty extends Model
{
use SoftDeletes, RMPropertyTrait;
/**
* The database table used by the model.
*
* @var string
*/
protected $table;
/**
* Model constructor.
*
* @param array $attributes
*/
public function __construct(array $attributes = [])
{
parent::__construct($attributes);
// Set the table
$this->table = 'properties';
// Set the dates
$this->dates = [
'deleted_at'
];
}
}
<file_sep><?php
namespace RentalManager\Main\Common;
use Illuminate\Support\Str;
/**
* Created by PhpStorm.
* User: gorankrgovic
* Date: 9/10/18
* Time: 10:38 AM
*/
trait ModelMethods
{
/**
* Alias to eloquent associate() method
*
* @param string $relationship
* @param mixed $object
* @return static
*/
private function associateModel($relationship, $object, $singular = false)
{
if ( $singular )
{
// In associate there is just a singular method
$relationship = Str::singular($relationship);
}
$this->$relationship()->associate($object);
return $this;
}
/**
* Alias to eloquent dissociate method
*
* @param $relationship
* @param bool $singular
* @return $this
*/
private function dissociateModel($relationship, $singular = false)
{
if ( $singular )
{
// In associate there is just a singular method
$relationship = Str::singular($relationship);
}
$this->$relationship()->dissociate();
return $this;
}
}
<file_sep><?php
namespace RentalManager\Main\Models;
use Illuminate\Database\Eloquent\Model;
use RentalManager\Main\Traits\RMRentalRestrictionTrait;
/**
* Created by PhpStorm.
* User: gorankrgovic
* Date: 9/10/18
* Time: 9:56 AM
*/
class RMRentalRestriction extends Model
{
use RMRentalRestrictionTrait;
/**
* The database table used by the model.
*
* @var string
*/
protected $table;
/**
* Model constructor.
*
* @param array $attributes
*/
public function __construct(array $attributes = [])
{
parent::__construct($attributes);
// Set the table
$this->table = 'rental_restrictions';
}
}
| a18e7a9b3e6b2f297dfbb681469cca2b30c3b4f5 | [
"Markdown",
"PHP"
] | 15 | PHP | rentalmanager/main | 98a00fcec917ffd09558af6b192d921145fc3942 | 86cb2294d9ec5c153cecda2027cfe4839da3b6dc |
refs/heads/main | <repo_name>KSZV640/Proyecto-Heladeria<file_sep>/BL.Heladeria/ClienteBL.cs
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data.Entity;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace BL.Heladeria
{
public class ClienteBL
{
BindingList<Cliente> ListaCliente { get; set; }
public ClienteBL()
{
ListaCliente = new BindingList<Cliente>();
}
public BindingList<Cliente> ObtenerCliente()
{
return ListaCliente;
}
public Resultado GuardarCliente(Cliente cliente)
{
var resultado2 = Validar(cliente);
if(resultado2.Exitoso == false)
{
return resultado2;
}
if (cliente.id == 0)
{
cliente.id = ListaCliente.Max(item => item.id) + 1;
}
resultado2.Exitoso = true;
return resultado2;
}
public void AgregarCliente()
{
var nuevoCliente = new Cliente();
ListaCliente.Add(nuevoCliente);
}
public bool EliminarCliente (int id)
{
foreach (var cliente in ListaCliente)
{
if (cliente.id == id)
{
ListaCliente.Remove(cliente);
return true;
}
}
return false;
}
private Resultado Validar (Cliente cliente)
{
var resultado = new Resultado();
resultado.Exitoso = true;
if (string.IsNullOrEmpty(cliente.Nombre) == true)
{
resultado.Mensaje = "Ingrese un nombre";
resultado.Exitoso = false;
}
return resultado;
}
}
public class Cliente
{
public int id { get; set; }
public string Nombre { get; set; }
public string Tel { get; set; }
public string Email { get; set; }
public bool Activo { get; set; }
}
public class Resultado2
{
public bool Exitoso { get; set; }
public string Mensaje { get; set; }
}
}
<file_sep>/heladeria/FormMenu.cs
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace heladeria
{
public partial class FormMenu : Form
{
public FormMenu()
{
InitializeComponent();
}
private void loginToolStripMenuItem_Click(object sender, EventArgs e)
{
login();
}
private void login()
{
var formLogin = new FormLogin();
formLogin.ShowDialog();
}
private void productosToolStripMenuItem_Click(object sender, EventArgs e)
{
var formProductos = new FormProductos();
formProductos.MdiParent = this;
formProductos.Show();
}
private void reporteDeVentasToolStripMenuItem_Click(object sender, EventArgs e)
{
var formRVentas = new FormRVentas();
formRVentas.MdiParent = this;
formRVentas.Show();
}
private void reporteDeClientesToolStripMenuItem_Click(object sender, EventArgs e)
{
var formClientes = new FormClientes();
formClientes.MdiParent = this;
formClientes.Show();
}
private void accesoRapidoToolStripMenuItem_Click(object sender, EventArgs e)
{
}
private void facturaDeudoresToolStripMenuItem_Click(object sender, EventArgs e)
{
var formVentas = new FormVentas();
formVentas.MdiParent = this;
formVentas.Show();
}
private void FormMenu_Load(object sender, EventArgs e)
{
login();
}
private void almacenToolStripMenuItem_Click(object sender, EventArgs e)
{
}
private void reporteDeEfectivoToolStripMenuItem_Click(object sender, EventArgs e)
{
var formREfectivo = new FormREfectivo();
formREfectivo.MdiParent = this;
formREfectivo.Show();
}
private void heladeriaToolStripMenuItem_Click(object sender, EventArgs e)
{
}
private void facturaToolStripMenuItem_Click(object sender, EventArgs e)
{
var formFactura = new FormFactura();
formFactura.MdiParent = this;
formFactura.Show();
}
}
}
<file_sep>/heladeria/FormFactura.cs
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace heladeria
{
public partial class FormFactura : Form
{
public FormFactura()
{
InitializeComponent();
}
private void activoCheckBox_CheckedChanged(object sender, EventArgs e)
{
}
private void activoLabel_Click(object sender, EventArgs e)
{
}
private void FormFactura_Load(object sender, EventArgs e)
{
}
private void idTextBox_TextChanged(object sender, EventArgs e)
{
}
private void idLabel_Click(object sender, EventArgs e)
{
}
}
}
<file_sep>/BL.Heladeria/DatosdeInicio.cs
using System;
using System.Collections.Generic;
using System.Data.Entity;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using static BL.Heladeria.SeguridadBL;
namespace BL.Heladeria
{
public class DatosdeInicio : CreateDatabaseIfNotExists<Contexto>
{
protected override void Seed(Contexto contexto)
{
var usuarioAdmin = new Usuario();
usuarioAdmin.Nombre = "admin";
usuarioAdmin.Contrasena = "123";
contexto.Usuarios.Add(usuarioAdmin);
var categoria1 = new Categoria();
categoria1.Descripcion = "Hogar";
contexto.Categorias.Add(categoria1);
var categoria2 = new Categoria();
categoria2.Descripcion = "Cocina";
contexto.Categorias.Add(categoria2);
var categoria3 = new Categoria();
categoria3.Descripcion = "Frutas";
contexto.Categorias.Add(categoria3);
var categoria4 = new Categoria();
categoria4.Descripcion = "Verduras";
contexto.Categorias.Add(categoria4);
var categoria5 = new Categoria();
categoria5.Descripcion = "Lacteos";
contexto.Categorias.Add(categoria5);
var categoria6 = new Categoria();
categoria6.Descripcion = "Bebidas";
contexto.Categorias.Add(categoria6);
var categoria7 = new Categoria();
categoria7.Descripcion = "Granos";
contexto.Categorias.Add(categoria7);
var categoria8 = new Categoria();
categoria8.Descripcion = "Limpieza";
contexto.Categorias.Add(categoria8);
var categoria9 = new Categoria();
categoria9.Descripcion = "Carnes";
contexto.Categorias.Add(categoria9);
var categoria10 = new Categoria();
categoria10.Descripcion = "Despensa";
contexto.Categorias.Add(categoria10);
var categoria11 = new Categoria();
categoria11.Descripcion = "Mascotas";
contexto.Categorias.Add(categoria11);
var categoria14 = new Categoria();
categoria14.Descripcion = "Pan";
contexto.Categorias.Add(categoria14);
var tipo1 = new Tipo();
tipo1.Descripcion = "Producto Nacional";
contexto.Tipos.Add(tipo1);
var tipo2 = new Tipo();
tipo2.Descripcion = "Producto Extranjero";
contexto.Tipos.Add(tipo2);
base.Seed(contexto);
}
}
}
<file_sep>/heladeria/FormClientes.cs
using BL.Heladeria;
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace heladeria
{
public partial class FormClientes : Form
{
ClienteBL _clientes;
public FormClientes()
{
InitializeComponent();
_clientes = new ClienteBL();
listaClienteBindingSource.DataSource = _clientes.ObtenerCliente();
}
private void activoCheckBox_CheckedChanged(object sender, EventArgs e)
{
}
private void clienteBindingNavigatorSaveItem_Click(object sender, EventArgs e)
{
listaClienteBindingSource.EndEdit();
var cliente = (Cliente) listaClienteBindingSource.Current;
var resultado2 = _clientes.GuardarCliente(cliente);
if (resultado2.Exitoso == true)
{
listaClienteBindingSource.ResetBindings(false);
DeshabilitarHabilitarBotones(true);
}
else
{
MessageBox.Show(resultado2.Mensaje);
}
}
private void bindingNavigatorAddNewItem_Click(object sender, EventArgs e)
{
_clientes.AgregarCliente();
listaClienteBindingSource.MoveLast();
DeshabilitarHabilitarBotones(false);
}
private void DeshabilitarHabilitarBotones(bool valor)
{
bindingNavigatorMoveFirstItem.Enabled = valor;
bindingNavigatorMoveLastItem.Enabled = valor;
bindingNavigatorMovePreviousItem.Enabled = valor;
bindingNavigatorMoveNextItem.Enabled = valor;
bindingNavigatorPositionItem.Enabled = valor;
bindingNavigatorAddNewItem.Enabled = valor;
bindingNavigatorDeleteItem.Enabled = valor;
toolStripButton1Cancelar.Visible = !valor;
}
private void bindingNavigatorDeleteItem_Click(object sender, EventArgs e)
{
if (idTextBox.Text != "")
{
var resultado2 = MessageBox.Show("Desea eliminar este registro?", "Eliminar", MessageBoxButtons.YesNo);
if (resultado2 ==DialogResult.Yes)
{
var id = Convert.ToInt32(idTextBox.Text);
Eliminar(id);
}
}
}
private void Eliminar(int id)
{
var resultado = _clientes.EliminarCliente(id);
if (resultado == true)
{
listaClienteBindingSource.ResetBindings(false);
DeshabilitarHabilitarBotones(true);
}
else
{
MessageBox.Show("Ocurrio un error al eliminar el cliente");
}
}
private void toolStripButton1Cancelar_Click(object sender, EventArgs e)
{
DeshabilitarHabilitarBotones(true);
Eliminar(0);
}
}
}
| 60e40e3aea8006c8d3a6fc48d98269e784e6369d | [
"C#"
] | 5 | C# | KSZV640/Proyecto-Heladeria | cb0def74234cc08fc28d3a687efcfb8867ab613a | 03d121354d1078611d2f364fcaf70f3215da517b |
refs/heads/master | <repo_name>areThereAnyUserNamesLeft/microservices-task<file_sep>/PROGRESS.md
# My Progress - <NAME>
I am choosing to use Go for the backend as it seems a good choice bearing in mind the JD - I can't say I have built a microservice aside from cloud functions on AWS Lambda and DynamoDB where alot of the work is done for you so I am enjoying this as a challenge.
## TL;DR
I did not finish 100%, in fact I finished the API and packaged it as a Microsevice. I had a plan to use a MySQL DB as the storage which sucked up a lot of time and really was not nesessary for the small scale of the task.
In the end I implemented a nice little protocol that writes to a CSV and altered it 5 different ways to accomedate the different API calls.
There is a Task breakdown below followed by a rough time line I put together to "show my workings" - I Really have tried to be honest as I can and not try to hide any bad decissions I made as I think it is only fair to show you my process warts and all.
I'd like to think i'd have finished the task had I not spent 11-1630 trying to implement the DB - Live and learn, I guess.
### Task breakdown & initial thoughts
1. build MS
Ideas
- **Restful or gRPC?** Restful as I am only just getting to grips with gRPC and would not want to spoil an oppertunity just to be showing off that I think know something fancy better than I really do.
Use suggested endpoints - I'll use these as they'll tick the boxes:
- [GET] /users - Returns a list of user ids
- [GET] /users/{id} - returns a users specific details
- [POST] /users - Returns a list of user ids
- [POST] /users/{id} - Returns a users specific details
2. Dockerize
- Ubuntu?
- MySQL
- GO?
- ...
3. Build UI
- Not the crocodile closest to the boat right now
##### Where I can I'll cut corners for speed and not introduce complication of writing from scratch?
###### ~0930 Planning / Research
Borrowed / Repurposed API code from [http://himarsh.org/build-restful-api-microservice-with-go/](http://himarsh.org/build-restful-api-microservice-with-go/) as a boilerplate for a RestAPI and Tests. It is a lso kind of documented which is nice. This pretty much covers the backend of the project brief and on closer inspection has a Dockerfile ready and waiting in the accompanying repo. Taking the pressure off my writing code and allowing me to just refactor and repurpose what is here into a useable API for my ###### ~2350 Delete working now for updatenafarious purposes (Mwa ha ha ha ha...).
###### ~1100 Copy/Paste/Refactor - DB Choices
Code refactored and all seems to be running. The example I chose to use has a MySQL DB running locally and serving on port :3306 (as usual) - I guess I need to make that now.
- **~~Plan Ai)~~**
I've not used MySQL in a little while having moved to Arch Linux, who as a community use MariaDB over MySQL - see [https://wiki.archlinux.org/index.php/MySQL](https://wiki.archlinux.org/index.php/MySQL). This creates a blocker, so I've got some work to do here to make sure it is working locally.
- **Plan Aii)**
Look at dockerized versions of MySQL - probably should have used this as my first approch *there is a lesson to be learnt here*...
- **Plan B)**
If it seems like a non starter by around Lunchtime, I'll look to remove the MySQL DB from the boilerplate and replace it with text file or Json file as storage to allow me to look at other features - this also has an advantage of not requiring Sipsynergy to configure a DB for looking at the project.
###### ~1200 Choice Made!
**Choice made** - I downloaded a copy of MySQL from the AUR (Arch user repo) [https://aur.archlinux.org/packages/mysql/](https://aur.archlinux.org/packages/mysql/) but it seems like it will need an unklnown amount of troubleshooting around chroot environments and permissions - in short a potential time suck.
Considering dockerized versions of MySQL [https://hub.docker.com/_/mysql/](https://hub.docker.com/_/mysql/) - Hitting some issues with Docker now so time for a system reboot...
###### ~1230 Docker nightmare (turned out to be MySql versioning)
**Dockers working now!**
For my reference-
Launching with:
`sudo docker run --name userAPI-mysql -e MYSQL_ROOT_PASSWORD=<PASSWORD> -d mysql:8`
Container number: `659d98a83fb2703d461db02be49a0ea782ff146a1349e9d13fe93fe9d67bbb8c`
Name:`userAPI-mysql`
MySQL version: `8`
password: `<PASSWORD>`
CLI connect command:
`docker run -it --link userAPI-mysql:mysql --rm mysql sh -c 'exec mysql -h"$MYSQL_PORT_3306_TCP_ADDR" -P"$MYSQL_PORT_3306_TCP_PORT" -uroot -p"$MYSQL_ENV_MYSQL_ROOT_PASSWORD"'``
###### ~1310 LunchTime - One last try!
Before admitting defeat and turning to plan B as MySQL is throwing an error. I am going to create a fresh image with an older version of MySQL as 8 seems to have a new plugin(!?! - latest is not always best) and throws this error...
`ERROR 2059 (HY000): Authentication plugin 'caching_sha2_password' cannot be loaded: /usr/lib/mysql/plugin/caching_sha2_password.so: cannot open shared object file: No such file or directory`
Launched command:
`sudo docker run --name microservice-mysql -e MYSQL_ROOT_PASSWORD=<PASSWORD> -d mysql:5.7`
Containeer Number:`dcdfac0fe16a7b2608ba7c7ffd8d5bc4165e0faf1047cb17edc9ed3ddae36bf2`
Name:`microservice-mysql`
MySQL version: `5.7`
password: `<PASSWORD>`
CLI connect command:
`docker run -it --link microservice-mysql:mysql --rm mysql sh -c 'exec mysql -h"$MYSQL_PORT_3306_TCP_ADDR" -P"$MYSQL_PORT_3306_TCP_PORT" -uroot -p"$MYSQL_ENV_MYSQL_ROOT_PASSWORD"'``
###### ~1320 Success
**I'm in to mySQL CLI** - *Lesson here - MySQL 8 needs a plugin to avoid SHA2 password encryption error and docker hub documentation out of date currently around this!*
###### ~1330 Database sculpting
### DB build:
I am going to make only one table with three columns as below
### Table: USERS
|columns name|id|user_name|user_status|date|
|-|---|----------|------------|-|
|**Data type**|*int64=>BIGINT*|*string=>varchar(100) |string=>varchar(40) (I considered a bool but may want more options so will make it a str)|date=>date|
Manual commands - I might get to putting these isn a dockerfile but right now I'll put them here and add them to a config at the end...
`CREATE DATABASE USERS`
`create table users(id BIGINT NOT NULL AUTO_INCREMENT, user_name VARCHAR(100) NOT NULL, user_status VARCHAR(40), date DATE, PRIMARY KEY (id));
`
`insert into USERS (user_name, user_status,date) VALUES ("<NAME>", "Present", NOW());
`
` select * from users;`
<pre>
+----+--------------+-------------+------------+
| id | user_name | user_status | date |
+----+--------------+-------------+------------+
| 1 | <NAME> | Present | 2018-03-22 |
+----+--------------+-------------+------------+
</pre>
###### ~1400 Connecting stuff and E2E API testing
Run: `docker ps` and not container id
Then: `docker inspect container_id | grep IPAddress`
###### ~1500 Troubleshooting - So close
Tests are failing to get to the DB with connection refused
I have commented out the lines that try to create the DB in the db.go file so I get more meaning ful messages from the tests.
- GET tests = 404s
- POST test = 200
- PUT test = 404
Curl comes back with the same...
`docker `
<code>HTTP/1.1 404 Not Found
Content-Type: application/json; charset=utf-8
Date: Thu, 22 Mar 2018 15:24:53 GMT
Content-Length: 37
{"error":"no user(s) into the table"}</code>
2 ideas -
1. Move on to dockerizing the Go app to link the application within containers. I believe that the connection could be refused due to the docker MySQL container not being given express permission.
2. Keep trying to figure it out via Google searches
I am going to choose option 1. as it is aligned with where I am heading anyway and if it seems more likely to be the issue.
building dockerfile went well
Now first a bit of a brain break.
###### ~1630 Off to the Kids Martial Arts Class

###### ~1830 Back from the Kids Martial Arts Class
Give permissions for app to speak to DB
`sudo docker run --name microservices-task --link microservice-mysql:mysql -d microservices-task`
Then `ran docker inspect...`
and got <code> "SecondaryIPAddresses": null,
"IPAddress": "",
"IPAddress": "",
</code> So it looks like I need to rebuild with a valid IP exposed rather than just a port!
Looked at `docker logs xxx`
and see my old friend
`Create table failed dial tcp 172.17.0.4:3306: connect: connection refused`
The program is running but does not reach the DB
###### ~1930 Is it time for **plan B)**? - *kick MySQL to the kerb*
I'd love to run MySQL as part of this but the brief does not require it and I have already spent 1/2 a day trying to get a good connection - It is not worth it bearing in mind the small scall of the DB needed - I am sad to say -I am going to build a flat file store and replace the DB connection, ***Really, Really big lesson I have learnt here*** but conversely I have built the docker container and it is working which is something worth while.
###### ~2015 Back on target - kicking myself for not admitting failiure earlier
Running =>
`curl -i http://localhost:8080/api/v1/users`
Returns =>
<pre>
`[{"id":"1","user_name":" <NAME>","user_status":" Present","date":" 22-03-2018"}]`
</pre>
###### ~2040 two down...
Now both get requests are behaving
###### ~2150 Post request working - now UPDATE
Made a delete function while working on update
###### ~2230 Delete working now for update
OK finally managed to do some coding and the API is working flawlessly
Although I think the update function will allow a user to update with blank values.
###### ~2350 Delete working now for update
Spent some time building the docker image to realise I have not compiled the main.go - and test was failing where it was still looking for the DB - *Rookie mistake*
###### ~0010 Done!
Docker image working instructions at the bottom
#### Have a play --- Local host examples
If you `go run main.go` you can have a play using the following curls
- `curl -i http://localhost:8080/api/v1/users`
- Should give a list of users in Json...
- `curl -i http://localhost:8080/api/v1/users/1`
- Should just give you one user...
- `curl -i -X POST -H "Content-Type: application/json" -d "{ \"user_status\": \"No longer potus\", \"user_name\": \"<NAME>\", "date": "04-11-2020" }" http://localhost:8080/api/v1/users`
- Will create a new user
- `curl -i -X PUT -H "Content-Type: application/json" -d "{ \"user_status\": \"Potus\", \"user_name\": \"<NAME>\", \"date\": \"01-04-2018\" }" http://localhost:8080/api/v1/users/3
`
- Will update all of the fields
- `curl -i -X DELETE http://localhost:8080/api/v1/users/3`
- Will remove the person at id number 3
You'll see the affects in the users.csv file which took the place of the DB as each of these are called.
### Docker instructions
Run
=>`docker build -t microservice-test .`
then
=> `docker run -P microservice-test`
then `docker ps`
Grab the `<Container-ID\>`
then
`docker inspect <Container-ID> | grep IPAddress`
grab the `<\IP-Address\>`
Then
`curl -i <\IP-Address\>:8080/api/v1/users`
just replace Localhost in the 'go run ...' examples with the `<\IP-Address\>`
<pre>
curl -i <\IP-Address\>:8080/api/v1/users
</pre>
- Should give a list of users in Json...
<pre>
curl -i <\IP-Address\>:8080/api/v1/users/1
</pre>
- Should just give you one user...
<pre>
curl -i -X POST -H "Content-Type: application/json" -d "{ \"user_status\": \"No longer potus\", \"user_name\": \"<NAME>\", "date": "04-11-2020" }" <\IP-Address\>:8080/api/v1/users
</pre>
- Will create a new user
<pre>
curl -i -X PUT -H "Content-Type: application/json" -d "{ \"user_status\": \"Potus\", \"user_name\": \"<NAME>\", \"date\": \"01-04-2018\" }" <\IP-Address\>:8080/api/v1/users/3
</pre>
- Will update all of the fields
<pre>
curl -i -X DELETE <\IP-Address\>:8080/api/v1/users/3
</pre>
- Will remove the person at id number 3
## I think that about covers it...
Thanks for the oppertunity to do this - I have enjoyed it and learnt some lessons.
Sorry for the typos and I hope to speak in the future...
### Post script
I ended up finishing the app with a frontend built in Vue -
If you start the server *locally* and then run `npm run dev` in the `frontend/vueapp01` folder and then visit [localhost:8080](localhost:8080)
You'll see an interface to allow you to Create, Read and Delete Users.
<file_sep>/main.go
package main
import (
"microservice-task/app"
"time"
"github.com/gin-gonic/gin"
"github.com/itsjamie/gin-cors"
)
func SetupRouter() *gin.Engine {
router := gin.Default()
// Apply the middleware to the router (works with groups too
router.Use(cors.Middleware(cors.Config{
Origins: "*",
Methods: "GET, PUT, POST, DELETE",
RequestHeaders: "Origin, Authorization, Content-Type",
ExposedHeaders: "",
MaxAge: 50 * time.Second,
Credentials: true,
ValidateHeaders: false,
}))
v1 := router.Group("api/v1")
{
v1.GET("/users", app.GetUsers)
v1.GET("/users/:id", app.GetUser)
v1.POST("/users", app.PostUser)
v1.PUT("/users/:id", app.UpdateUser)
v1.DELETE("/users/:id", app.DeleteUser)
}
return router
}
func main() {
router := SetupRouter()
router.Run(":8090")
}
<file_sep>/main_test.go
package main
import (
"bytes"
"net/http"
"net/http/httptest"
"testing"
"github.com/gin-gonic/gin"
)
func TestGetUser(t *testing.T) {
gin.SetMode(gin.TestMode)
testRouter := SetupRouter()
req, err := http.NewRequest("GET", "/api/v1/users/1", nil)
if err != nil {
t.Errorf("Get heartbeat failed with error %d.", err)
}
resp := httptest.NewRecorder()
testRouter.ServeHTTP(resp, req)
if resp.Code != 200 {
t.Errorf("/api/v1/users failed with error code %d.", resp.Code)
}
}
func TestGetUsers(t *testing.T) {
gin.SetMode(gin.TestMode)
testRouter := SetupRouter()
req, err := http.NewRequest("GET", "/api/v1/users", nil)
if err != nil {
t.Errorf("Get hearteat failed with error %d.", err)
}
resp := httptest.NewRecorder()
testRouter.ServeHTTP(resp, req)
if resp.Code != 200 {
t.Errorf("/api/v1/users failed with error code %d.", resp.Code)
}
}
func TestPostUser(t *testing.T) {
gin.SetMode(gin.TestMode)
testRouter := SetupRouter()
body := bytes.NewBuffer([]byte("{\"user_status\": \"83\", \"user_name\": \"100\"}"))
req, err := http.NewRequest("POST", "/api/v1/users", body)
req.Header.Set("Content-Type", "application/json")
if err != nil {
t.Errorf("Post heartbeat failed with error %d.", err)
}
resp := httptest.NewRecorder()
testRouter.ServeHTTP(resp, req)
if resp.Code != 201 {
t.Errorf("/api/v1/users failed with error code %d.", resp.Code)
}
}
func TestPutUser(t *testing.T) {
gin.SetMode(gin.TestMode)
testRouter := SetupRouter()
body := bytes.NewBuffer([]byte("{\"user_status\": \"83\", \"user_name\": \"100\"}"))
req, err := http.NewRequest("PUT", "/api/v1/users/1", body)
req.Header.Set("Content-Type", "application/json")
if err != nil {
t.Errorf("Put heartbeat failed with error %d.", err)
}
resp := httptest.NewRecorder()
testRouter.ServeHTTP(resp, req)
if resp.Code != 200 {
t.Errorf("/api/v1/users failed with error code %d.", resp.Code)
}
}
<file_sep>/app/app.go
package app
import (
"bufio"
"encoding/csv"
"io"
"log"
"os"
"strconv"
"strings"
"github.com/gin-gonic/gin"
// _ "github.com/go-sql-driver/mysql"
)
type User struct {
Id string `json:"id"`
UserName string `json:"user_name"`
UserStatus string `json:"user_status"`
Date string `json:"date"`
}
//var dbmap = initDb()
func GetUsers(c *gin.Context) {
var users []User
csvFile, _ := os.Open("users.csv")
reader := csv.NewReader(bufio.NewReader(csvFile))
for {
line, error := reader.Read()
if error == io.EOF {
break
} else if error != nil {
c.JSON(404, gin.H{"error": "user not found"})
}
users = append(users, User{
Id: line[0],
UserName: line[1],
UserStatus: line[2],
Date: line[3],
})
}
//usersJson, _ := json.Marshal(users)
c.JSON(200, users)
// _, err := dbmap.Select(&users, "SELECT * FROM user")
// if err == nil {
// c.JSON(200, users)
// } else {
// c.JSON(404, gin.H{"error": "no user(s) into the table"})
// }
// curl -i http://localhost:8080/api/v1/users
}
func GetUser(c *gin.Context) {
id := c.Params.ByName("id")
var user User
csvFile, _ := os.Open("users.csv")
reader := csv.NewReader(bufio.NewReader(csvFile))
for {
line, error := reader.Read()
if error == io.EOF {
break
} else if error != nil {
c.JSON(404, gin.H{"error": "user not found"})
}
//stringId := fmt.Scanf("%s", id)
if strings.TrimRight(id, "\n") == line[0] {
user = User{
Id: line[0],
UserName: line[1],
UserStatus: line[2],
Date: line[3],
}
}
}
c.JSON(200, user)
// err := dbmap.SelectOne(&user, "SELECT * FROM user WHERE id=?", id)
// if err == nil {
// user_id, _ := strconv.ParseInt(id, 0, 64)
//
// content := &User{
// Id: user_id,
// UserStatus: user.UserStatus,
// UserName: user.UserName,
// }
//
// c.JSON(200, content)
// } else {
// c.JSON(404, gin.H{"error": "user not found"})
// }
// curl -i http://localhost:8080/api/v1/users/1
}
func checkError(message string, err error) {
if err != nil {
log.Fatal(message, err)
}
}
func increment() string {
csvFile, _ := os.Open("users.csv")
reader := csv.NewReader(bufio.NewReader(csvFile))
c := 0
for {
c++
_, error := reader.Read()
if error == io.EOF {
break
} else if error != nil {
log.Fatal(error)
}
}
return strconv.Itoa(c)
}
func returnUsers() []User {
var users []User
csvFile, _ := os.Open("users.csv")
reader := csv.NewReader(bufio.NewReader(csvFile))
for {
line, error := reader.Read()
if error == io.EOF {
break
} else if error != nil {
log.Fatal("error: user not found")
}
users = append(users, User{
Id: line[0],
UserName: line[1],
UserStatus: line[2],
Date: line[3],
})
}
//usersJson, _ := json.Marshal(users)
return users
// _, err := dbmap.Select(&users, "SELECT * FROM user")
// if err == nil {
// c.JSON(200, users)
// } else {
// c.JSON(404, gin.H{"error": "no user(s) into the table"})
// }
// curl -i http://localhost:8080/api/v1/users
}
func PostUser(c *gin.Context) {
var user User
c.Bind(&user)
var data = []string{increment(), user.UserName, user.UserStatus, user.Date}
var ru = returnUsers()
var allUsers = [][]string{}
for _, userBody := range ru {
u := []string{userBody.Id, userBody.UserName, userBody.UserStatus, userBody.Date}
allUsers = append(allUsers, u)
}
allUsers = append(allUsers, data)
if user.UserStatus != "" && user.UserName != "" {
file, err := os.Create("users.csv")
checkError("Cannot create file", err)
defer file.Close()
writer := csv.NewWriter(file)
defer writer.Flush()
for _, value := range allUsers {
err := writer.Write(value)
checkError("Cannot write to file", err)
}
c.JSON(201, user)
} else {
c.JSON(422, gin.H{"error": "fields are empty"})
}
// if insert, _ := dbmap.Exec(`INSERT INTO user (user_status, user_name) VALUES (?, ?)`, user.UserStatus, user.UserName); insert != nil {
// user_id, err := insert.LastInsertId()
// if err == nil {
// content := &User{
// Id: user_id,
// UserStatus: user.UserStatus,
// UserName: user.UserName,
// }
// c.JSON(201, content)
// } else {
// checkErr(err, "Insert failed")
// }
// }
// } else {
// c.JSON(422, gin.H{"error": "fields are empty"})
// }
// curl -i -X POST -H "Content-Type: application/json" -d "{ \"user_status\": \"Chasing Windmills\", \"user_name\": \"<NAME>\", "date": "01-07-1604" }" http://localhost:8080/api/v1/users
}
func UpdateUser(c *gin.Context) {
id := c.Params.ByName("id")
var user User
c.Bind(&user)
var data = []string{id, user.UserName, user.UserStatus, user.Date}
var ru = returnUsers()
var allUsers = [][]string{}
for _, userBody := range ru {
if strings.TrimRight(id, "\n") == userBody.Id {
allUsers = append(allUsers, data)
} else {
u := []string{userBody.Id, userBody.UserName, userBody.UserStatus, userBody.Date}
allUsers = append(allUsers, u)
}
}
if user.UserName != "" || user.UserStatus != "" || user.Date != "" {
file, err := os.Create("users.csv")
checkError("Cannot create file", err)
defer file.Close()
writer := csv.NewWriter(file)
defer writer.Flush()
for _, value := range allUsers {
err := writer.Write(value)
checkError("Cannot write to file", err)
}
c.JSON(200, user)
} else {
c.JSON(404, gin.H{"error": "user not found"})
}
// err := dbmap.SelectOne(&user, "SELECT * FROM user WHERE id=?", id)
//
// if err == nil {
// var json User
// c.Bind(&json)
// user_id, _ := strconv.ParseInt(id, 0, 64)
// user := User{
// Id: user_id,
// UserStatus: json.UserStatus,
// UserName: json.UserName,
// }
//
// if user.UserStatus != "" && user.UserName != "" {
// _, err = dbmap.Update(&user)
//
// if err == nil {
// c.JSON(200, user)
// } else {
// checkErr(err, "Updated failed")
// }
// } else {
// c.JSON(422, gin.H{"error": "fields are empty"})
// }
// } else {
// c.JSON(404, gin.H{"error": "user not found"})
// }
// curl -i -X PUT -H "Content-Type: application/json" -d "{ \"user_status\": \"\", \"user_name\": \"100\" }" http://localhost:8080/api/v1/users/1
// curl -i -X PUT -H "Content-Type: application/json" -d "{ \"user_status\": \"Slaying Giants\", \"user_name\": \"<NAME>\", \"date\": \"03-07-1604\" }" http://localhost:8080/api/v1/users/3
}
func DeleteUser(c *gin.Context) {
id := c.Params.ByName("id")
var user User
var data = []string{id, user.UserName, user.UserStatus, user.Date}
var ru = returnUsers()
var allUsers = [][]string{}
for _, userBody := range ru {
if strings.TrimRight(id, "\n") == userBody.Id {
allUsers = append(allUsers, data)
} else {
u := []string{userBody.Id, userBody.UserName, userBody.UserStatus, userBody.Date}
allUsers = append(allUsers, u)
}
}
file, err := os.Create("users.csv")
checkError("Cannot create file", err)
defer file.Close()
writer := csv.NewWriter(file)
defer writer.Flush()
for _, value := range allUsers {
err := writer.Write(value)
checkError("Cannot write to file", err)
}
c.JSON(200, user)
//id := c.Params.ByName("id")
//var user User
// err := dbmap.SelectOne(&user, "SELECT id FROM User WHERE id=?", id)
//
// if err == nil {
// _, err = dbmap.Delete(&user)
//
// if err == nil {
// c.JSON(200, gin.H{"id #" + id: " deleted"})
// } else {
// checkErr(err, "Delete failed")
// }
// } else {
// c.JSON(404, gin.H{"error": "user not found"})
// }
// curl -i -X DELETE http://localhost:8080/api/v1/users/1
}
| 7efb58c506f2e6433834e649405d73c1bb5b7c40 | [
"Markdown",
"Go"
] | 4 | Markdown | areThereAnyUserNamesLeft/microservices-task | 4f1b842f4ab77a556aac5e5669b7517aff0abc70 | a72dd0538cab2601a04db390582c0cd9ecc24b78 |
refs/heads/master | <repo_name>raymondzh/ConnectFour<file_sep>/minimax.py
def minimax(val, depth, board):
moves = possible_moves(board)
best_move = moves[0]
best_val = val * (-100)
for curr_move in moves:
new_board = board.copy()
move(curr_move, val, new_board)
if val == -1:
curr_val = max(depth, new_board)
if curr_val < best_val:
best_val = curr_val
best_move = curr_move
else:
curr_val = min(depth, new_board)
if curr_val > best_val:
best_val = curr_val
best_move = curr_move
return best_move
##TEST CODE##
# moves = possible_moves(board)
# return moves[0]
def min(depth, board):
if depth == 0 or board_filled(): #if game is over or max depth is reached
return value(board)
moves = possible_moves(board)
best_val = 100
for curr_move in moves:
new_board = board.copy()
move(curr_move, -1, new_board)
curr_val = max(depth-1, new_board)
if curr_val < best_val:
best_val = curr_val
return best_val
def max(depth, board):
if depth == 0 or board_filled(board): #if game is over or max depth is reached
return value(board)
moves = possible_moves(board)
best_val = -100
for curr_move in moves:
new_board = board.copy()
move(curr_move, 1, new_board)
curr_val = max(depth-1, new_board)
if curr_val > best_val:
best_val = curr_val
return best_val
# if value(board) != 0:
# return value(board)
# if board_filled(board):
# return value(board)
# moves = possible_moves(board)
# max_value = -2
# for curr_move in moves:
# new_board = list(board)
# move(curr_move % 3, 1, new_board) # TODO
# next_max = min(new_board)
# if next_max > max_value:
# max_value = next_max
# return max_value
def possible_moves(board):
moves = []
for i in range(9):
if board[i] == 0:
moves.append(i)
return moves
def board_filled(board):
for i in range(9):
if board[i] == 0:
return False
return True
def over(board):
for i in range(7):
for j in range(3):
val = 0
for k in range(4):
val += access(i, j+k, board)
if val == 4 or val == -4:
return val
for i in range(4):
for j in range(6):
val = 0
for k in range(4):
val += access(i + k, j, board)
if val == 4 or val == -4:
return val
for i in range(4):
for j in range(3):
val1 = val2 = 0
for k in range(4):
val1 += access(i+k, j+k, board)
val2 += access(6-k, j+k, board)
if abs(val1) == 4:
return val1
if abs(val2) == 4:
return val2
return 0
def value(board):
return over(board)
def move(column, turn, board):
column = column
while(column + 7 < 42 and board[column + 7] == 0):
column = column + 7
board[column] = turn
def access(x, y, board):
return board[7 * y + x]
<file_sep>/main.py
from time import sleep
import minimax as mm
def main():
board = [0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, ]
display(board);
while not mm.board_filled(board) and mm.over(board) == 0:
player_move = int(input("Move: ")) - 1
mm.move(player_move, 1, board) # fix
display(board)
if (mm.board_filled(board)) or mm.over(board) != 0:
break
print("Thinking...")
next_move = mm.minimax(-1, 1, board)
sleep(2)
mm.move(next_move % 7, -1, board) # fix
display(board)
print("Your Turn:")
# if mm.value(board) == 0:
# print("Tie")
# else:
# print("The winner is {0}".format(mm.value(board)))
# TODO: print winner
def display(board):
print()
print()
for i in range(6):
for j in range(7):
val = mm.access(j, i, board)
if val == -1:
print(" 0 ", end="")
elif val == 1:
print(" X ", end="")
else:
print(" ", end="")
if j < 6:
print("|", end="")
print("")
if i < 5:
print("---------------------------")
def test():
board = [0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1, 0, 0, 0,
0, 0, 0, 0, 1, 0, 0,
0, 0, 0, 0, 0, 1, 0,
0, 0, 0, 0, 0, 0, 1, ]
print(mm.value(board))
if __name__ == "__main__":
main()
# board = [1, -1, 1,
# 0, -1, 0,
# 0, 0, 1]
# print(mm.minimax(-1, board))
# test()
| 56c85daa281e1832c54a7ebf8f49f0e57b5c7a2b | [
"Python"
] | 2 | Python | raymondzh/ConnectFour | 0cf57b489908e2caab448dc6be0c3fd27e93dec7 | b5ea7c1e9497824368e799076a6fc4b21cf263af |
refs/heads/master | <repo_name>IrvingV/Panduit<file_sep>/Java/Panel_2_2.java
import javax.swing.*;
import java.awt.*;
public class Panel_2_2 extends JPanel{
DisplayVar t1 = new DisplayVar("actual mainprog time"," ####", "us");
DisplayVar t1min = new DisplayVar("min mainprog time"," ####", "us");
DisplayVar t1max = new DisplayVar("max mainprog time"," ####", "us");
DisplayVar t2 = new DisplayVar("actual grafprog time"," ####", "us");
DisplayVar t2min = new DisplayVar("min grafprog time"," ####", "us");
DisplayVar t2max = new DisplayVar("max grafprog time"," ####", "us");
DisplayVar vp = new DisplayVar("pixelverplaatsing vp"," ####", "pixels");
DisplayVar dx = new DisplayVar("dx "," ####", "mm");
DisplayVar dt = new DisplayVar("dt "," ####", "ms");
DisplayVar dtp = new DisplayVar("dt product "," ####", "ms");
public Panel_2_2() {
setLayout(new GridLayout(0, 1));
add(t1 );
add(t1min);
add(t1max);
add(t2 );
add(t2min);
add(t2max);
add(vp );
add(dx );
add(dt );
add(dtp );
}
public void update() {
t1 .update();
t1min.update();
t1max.update();
t2 .update();
t2min.update();
t2max.update();
vp .update();
dx .update();
dt .update();
dtp .update();
}
}<file_sep>/Java/ButtonOnOff.java
import javax.swing.*; // JPanel
import java.awt.event.*; // actionListener
import java.awt.*; // Color, Layou
public class ButtonOnOff extends JPanel {
public boolean status=false;
public boolean ri_status=false;
public boolean fe_status=false;
private boolean old_status;
private JButton btn;
public ButtonOnOff(String line1, String line2) {
String ss="<html><center>" + line1 + "<br>" + line2 + "</center></html>";
btn= new JButton(ss);
btn.addActionListener( new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
status=!status;
update();
}
});
add(btn );
update();
}
public void update() {
if (status) {
btn.setBackground(Color.yellow);
} else {
btn.setBackground(Color.lightGray);
}
ri_status = status && !old_status;
fe_status = !status && old_status;
old_status = status;
}
}
<file_sep>/Java/Panel_1_4.java
import javax.swing.*;
import java.awt.*;
public class Panel_1_4 extends JPanel{
Slider prgt = new Slider("program sample time" ," 00.0 " , 5 , 55 , "ms");
LevelInt mest = new LevelInt("time program executed" ," 00000 " , 0 , 5000 , "us");
public Panel_1_4() {
setLayout(new GridLayout(0, 1));
add(prgt);
add(mest);
}
public void update() {
prgt.update();
mest.update();
}
}<file_sep>/Java/Pan_Parameters.java
import javax.swing.*;
import java.awt.*;
public class Pan_Parameters extends JPanel{
Panel_1_1 tab1 = new Panel_1_1();
Panel_1_2 tab2 = new Panel_1_2();
Panel_1_3 tab3 = new Panel_1_3();
Panel_1_4 tab4 = new Panel_1_4();
public Pan_Parameters() {
JTabbedPane tabbedPane = new JTabbedPane();
tabbedPane.addTab("Production", tab1);
tabbedPane.addTab("Scoop", tab2);
tabbedPane.addTab("Sensors", tab3);
tabbedPane.addTab("Program", tab4);
setLayout(new GridLayout(0,1));
add(tabbedPane);
}
public void update() {
tab1.update();
tab2.update();
tab3.update();
tab4.update();
}
}<file_sep>/Java/conveyor.java
import java.awt.*;
import javax.swing.*;
public class conveyor extends JApplet
{
MainInit mn;
public void init()
{
mn = new MainInit();
}
public void start() {
mn.MainLoop();
}
public void stop()
{
}
public void destroy()
{
}
public String getAppletInfo()
{
// provide information about the applet
return "Title: \nAuthor: \nA simple applet example description. ";
}
public String[][] getParameterInfo()
{
// provide parameter information about the applet
String paramInfo[][] = {
{"firstParameter", "1-10", "description of first parameter"},
{"status", "boolean", "description of second parameter"},
{"images", "url", "description of third parameter"}
};
return paramInfo;
}
}
<file_sep>/Java/LevelInt.java
import javax.swing.*; // JPanel
import java.awt.*; // Color, Layou
import java.util.*; // Hashtable
import java.text.DecimalFormat;
public class LevelInt extends JPanel{
public Integer valInt=0;
public Double valDouble=0.0;
public DecimalFormat df;
private Integer i_Max=100, i_Min=10;
private Integer delta, i_HH, i_H, i_L, i_LL;
private Hashtable labelTable = new Hashtable();
private JSlider jsld_val;
JLabel jlbl_val ;
public LevelInt(String s, String format, Integer min, Integer max, String dim) {
df = new DecimalFormat(format);
i_Min = min;
i_Max = max;
delta = (i_Max-i_Min);
i_HH = 1000;
i_H = 800;
i_L = 400;
i_LL = 200;
JLabel jlbl_hdr;
jlbl_hdr = new JLabel(s);
jlbl_hdr.setFont(new Font("LucidaConsole", Font.PLAIN, 12));
jlbl_val = new JLabel();
jlbl_val.setFont(new Font("LucidaConsole", Font.BOLD, 12));
jlbl_val.setBorder(BorderFactory.createLineBorder(Color.black));
JLabel jlbl_dim;
jlbl_dim = new JLabel(dim);
jlbl_dim.setFont(new Font("LucidaConsole", Font.PLAIN, 12));
jsld_val = new JSlider(JSlider.HORIZONTAL, i_Min, i_Max, i_Min);
//Create the label table
// labelTable.put( new Integer( i_HH ), new JLabel("HH") );
// labelTable.put( new Integer( i_H ), new JLabel("H") );
// labelTable.put( new Integer( i_L ), new JLabel("L") );
// labelTable.put( new Integer( i_LL ), new JLabel("LL") );
// jsld_val.setLabelTable( labelTable );
// jsld_val.setPaintLabels(true);
JPanel panelNorth = new JPanel(new FlowLayout());
panelNorth.add(jlbl_hdr );
panelNorth.add(jlbl_val);
panelNorth.add(jlbl_dim);
setLayout(new BorderLayout());
add(panelNorth, BorderLayout.NORTH);
add(jsld_val, BorderLayout.CENTER);
update();
}
public void setSliderMax(int m) {
jsld_val.setMaximum(m);
}
public void update() {
jsld_val.setValue(valInt);
jlbl_val.setText(df.format(valDouble));
boolean yellow=false;
boolean red=false;
// delta = (i_Max-i_Min);
// i_HH = i_Min + delta * (HH/100);
// i_H = i_Min + delta * (H /100);
// i_L = i_Min + delta * (L /100);
// i_LL = i_Min + delta * (LL/100);
if ( valInt > i_H || valInt < i_L ) yellow=true;
if ( valInt > i_HH || valInt < i_LL ) {
red=true;
yellow =false;
}
if (yellow) jsld_val.setBackground(Color.yellow);
if (red) jsld_val.setBackground(Color.red);
if (!yellow && !red) jsld_val.setBackground(Color.green);
}
}
<file_sep>/Java/PAN_Grafiek.java
import javax.swing.*;
import java.awt.*;
import java.text.DecimalFormat;
class PAN_Grafiek extends JPanel {
public boolean xObjectBuilded = false,
xDrawInit;
public double t;
private CanvasPanel canvas;
private Graphics2D graphic;
private Image canvasImage;
private JFrame frame;
private int wd_dx = 8,
iB, iH,
iXminPx, iXmaxPx,
iXminIn, iXmaxIn,
iYminPx, iYmaxPx,
iYminIn, iYmaxIn;
private Integer iTemp;
private double t0,
dXPixelsPerUnit,
dYPixelsPerUnit;
public PAN_Grafiek( int iSchermPosX,
int iSchermPosY,
int iBreedte,
int iHoogte ){
iB=iBreedte;
iH=iHoogte;
frame = new JFrame();
canvas = new CanvasPanel();
canvas.setPreferredSize(new Dimension(iBreedte,iHoogte));
frame.setContentPane(canvas);
frame.setBounds(iSchermPosX,iSchermPosY,iBreedte,iHoogte);
frame.pack();
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
canvasImage = canvas.createImage(iBreedte,iHoogte);
graphic = (Graphics2D)canvasImage.getGraphics();
graphic.setColor(Color.black);
graphic.fillRect(0,0,iBreedte,iHoogte);
frame.setVisible(true);
xObjectBuilded = true;
xDrawInit = true;
}
public void setAxis( String title,
int SpanX,
int MinX,
String DimX,
int SpanY,
int MinY,
String DimY ) {
frame.setTitle(title);
dXPixelsPerUnit = (double) iB/SpanX;
iXminIn = MinX;
iXminPx = (int) ((double) MinX * dXPixelsPerUnit);
iXmaxIn = MinX + SpanX;
iXmaxPx = (int) ((double) (MinX + SpanX) * dXPixelsPerUnit);
dYPixelsPerUnit = (double) iH/SpanY;
iYminIn = MinY;
iYminPx = (int) ((double) MinY * dYPixelsPerUnit);
iYmaxIn = MinY + SpanY;
iYmaxPx = (int) ((double) (MinY + SpanY) * dYPixelsPerUnit);
}
public void setColor(Color c) {
graphic.setColor(c);
}
public void plotXaxis() {
plotLine( iXminIn+20, 0, iXmaxIn-20, 0 );
}
public void plotYaxis() {
plotLine( 0, iYminIn+20, 0, iYmaxIn-20 );
}
public void plotGrid() {
}
public void plotRectangle(int x, int y, int b, int h ) {
int xx = (int) ( (double) (x * dXPixelsPerUnit) );
int bb = (int) ( (double) (b * dXPixelsPerUnit) );
int yy = (int) ( (double) (y * dYPixelsPerUnit) );
int hh = (int) ( (double) (h * dYPixelsPerUnit) );
graphic.drawRect( xx-iXminPx, iH-hh+iYminPx-yy, bb, hh );
}
public void plotPixel(int x, int y ) {
int xx = (int) ( (double) (x * dXPixelsPerUnit) );
int yy = (int) ( (double) (y * dYPixelsPerUnit) );
graphic.drawLine( xx-iXminPx, iH+iYminPx-yy, xx-iXminPx, iH+iYminPx-yy );
}
public void plotLine(int x1, int y1, int x2, int y2 ) {
int xx1 = (int) ( (double) (x1 * dXPixelsPerUnit) );
int xx2 = (int) ( (double) (x2 * dXPixelsPerUnit) );
int yy1 = (int) ( (double) (y1 * dYPixelsPerUnit) );
int yy2 = (int) ( (double) (y2 * dYPixelsPerUnit) );
graphic.drawLine(xx1-iXminPx, iH+iYminPx-yy1, xx2-iXminPx, iH+iYminPx-yy2);
}
public void plotString(String s, int x, int y) {
int xx = (int) ( (double) (x * dXPixelsPerUnit) );
int yy = (int) ( (double) (y * dYPixelsPerUnit) );
graphic.drawString(s, (int) xx-iXminPx, iH+iYminPx-yy);
}
public void plotBorder(int d) {
graphic.drawRect( d, d, iB-2*d, iH-2*d);
}
public void plotArrowLeft(int x, int y) {
plotLine(x-16, y+4, x, y);
plotLine(x-16, y-4, x, y);
plotLine(x-32, y, x-16, y);
}
public void plotArrowRight(int x, int y) {
plotLine(x+16, y+4, x, y);
plotLine(x+16, y-4, x, y);
plotLine(x+32, y, x+16, y);
}
public void plotSensor(int x, int y) {
plotRectangle (x-6, y, 12, 10);
plotLine ( x, y, x, y-3);
}
// private int plotInteger(int newn, int prevn, int x, int y) {
// if (newn!=prevn) {
// setColor(Color.black);
// String s=String.format("% 5d", prevn);
// plotString(s, x, iXmax/2-y);
// setColor(Color.white);
// s=String.format("% 5d", newn);
// plotString(s, x, iXmax/2-y);
// }
// return(newn);
// }
// private double plotDouble(double newn, String format, double prevn, int x, int y) {
// if (newn!=prevn) {
// // graf.setColor(Color.black);
// DecimalFormat df = new DecimalFormat(format);
// // graf.drawString(df.format(prevn), x, h-y);
// // graf.setColor(Color.white);
// // graf.drawString(df.format(newn), x, h-y);
// }
// return(newn);
// }
// private void drawString(String s, int x, int y) {
// // graf.drawString(s, x, h/2-y);
// }
// private void fillRect(int x, int y, int l, int b){
// for(int i=1; i<=b; i++) {
// plotLine(x,y+i,x+l,y+i);
// }
// }
// private double drwDouble(double var, double db,String format,int x, int y){
// double db_ret = plotDouble(var, format, db, x, y);
// return(db_ret);
// }
public void watchdog(){
graphic.setColor( Color.black );
graphic.drawRect(wd_dx,8,5,5);
wd_dx=wd_dx+5;
if (wd_dx>30) {
wd_dx=8;
}
graphic.setColor( Color.white );
graphic.drawRect(wd_dx,8,5,5);
}
public void refreshcanvas() {
canvas.repaint();
}
private class CanvasPanel extends JPanel{
public void paint(Graphics g) {
t0 = System.nanoTime();
g.drawImage(canvasImage, 0, 0, null);
t = System.nanoTime() - t0;
}
}
}
<file_sep>/Java/Slider.java
import javax.swing.*; // JPanel
import java.awt.*; // Color
import java.awt.event.*; // actionListener
import javax.swing.event.*; // ChangeListener, ChangeEvent
import java.text.DecimalFormat;
public class Slider extends JPanel{
public Double val;
public Double oldval=0.0;
public DecimalFormat df;
public String hdr; String frm;double mn; double mx; String dm;
public boolean stopped=false;
private int sliderval;
private Double i_Max, i_Min, a1, b1, a2, b2;
private boolean blockUpdate=false;
private JLabel jlbl_hdr, jlbl_dim;
private JFormattedTextField jtxf_val;
private Integer i_LargeStep, i_SmallStep;
private JSlider jsld_val;
public Slider(String s, String format, double min, double max, String dim) {
hdr=s;
df = new DecimalFormat(format);
a1=(max-min)/1000;
b1=min;
a2=1000/(max-min);
b2=-a2*min;
val=a1*min+b1;
i_Min = min;
i_Max = max;
jlbl_hdr = new JLabel(s);
jlbl_hdr.setFont(new Font("LucidaConsole", Font.PLAIN, 12));
jlbl_hdr.setToolTipText("Integer: min = " + i_Min + ", max = " + i_Max);
jsld_val = new JSlider(JSlider.HORIZONTAL, 0, 1000, val.intValue());
jsld_val.addChangeListener(new ChangeListener() {
public void stateChanged(ChangeEvent e) {
sliderval = jsld_val.getValue();
val = a1 * sliderval + b1 ;
update();
}
});
JButton btn_1 = new JButton("++"); // button definitie
btn_1.addActionListener( new ActionListener() { // Listener
public void actionPerformed(ActionEvent arg0) { // anonieme klasse
sliderval=sliderval+100 ; if (sliderval>1000) sliderval=1000;
val = a1 * sliderval + b1 ;
update();
}
});
JButton btn_2 = new JButton("+"); // button definitie
btn_2.addActionListener( new ActionListener() { // Listener
public void actionPerformed(ActionEvent arg0) { // anonieme klasse
sliderval=sliderval+10 ; if (sliderval>1000) sliderval=1000;
val = a1 * sliderval + b1 ;
update();
}
});
JButton btn_3= new JButton("-");
btn_3.addActionListener( new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
sliderval=sliderval-10 ; if (sliderval<0) sliderval=0;
val = a1 * sliderval + b1 ;
update();
}
});
JButton btn_4 = new JButton("--");
btn_4.addActionListener( new ActionListener() {
public void actionPerformed(ActionEvent arg0) {
sliderval=sliderval-100 ; if (sliderval<0) sliderval=0;
val = a1 * sliderval + b1 ;
update();
}
});
jtxf_val=new JFormattedTextField();
jtxf_val.setFont(new Font("LucidaConsole", Font.BOLD, 12));
jtxf_val.setBorder(BorderFactory.createLineBorder(Color.black));
jtxf_val.addActionListener(new ActionListener() {
public void actionPerformed (ActionEvent evt) {
JFormattedTextField fi = (JFormattedTextField) evt.getSource();
fi.setBackground (Color.white);
try {
Double temp = Double.parseDouble(fi.getText());
if ( temp <= i_Max && temp >= i_Min ) {
val = temp;
Double d = a2 * val + b2 ;
sliderval = d.intValue() ;
stopped=true;
}
else {
fi.setBackground (Color.red);
}
}
catch (NumberFormatException fout) {
fi.setBackground (Color.red);
}
update();
}
});
jlbl_dim = new JLabel(dim);
jlbl_dim.setFont(new Font("LucidaConsole", Font.PLAIN, 12));
JPanel panel_north1 = new JPanel(new FlowLayout());
panel_north1.add(jlbl_hdr) ;
panel_north1.add(jtxf_val);
panel_north1.add(jlbl_dim);
JPanel panel_north2 = new JPanel(new GridLayout(1,0));
panel_north2.add(btn_4);
panel_north2.add(btn_3);
panel_north2.add(btn_2);
panel_north2.add(btn_1);
JPanel panel_north = new JPanel(new GridLayout(0,1));
panel_north.add(panel_north1);
panel_north.add(panel_north2);
setLayout(new BorderLayout());
add(panel_north,BorderLayout.NORTH);
add(jsld_val,BorderLayout.CENTER);
}
public void update() {
if (val!=oldval) {
jsld_val.setValue(sliderval);
jtxf_val.setText(df.format(val));
oldval=val;
}
}
}<file_sep>/Java/Panel_1_3.java
import javax.swing.*;
import java.awt.*;
public class Panel_1_3 extends JPanel{
Slider dist = new Slider("Distance between sensors"," 000 " , 10, 330 , "mm");
ButtonOnOff shbu = new ButtonOnOff("Show buffer","");
public Panel_1_3() {
setLayout(new GridLayout(0, 1));
add(dist);
add(shbu);
}
public void update() {
dist.update();
shbu.update();
}
} | d2ce17ddd8254f975c56a3840ed66f913ff94a5d | [
"Java"
] | 9 | Java | IrvingV/Panduit | 80c823f3e8e3298ca41088ed1c7c51fb0f0a127b | 0433fe1735944cb9e19eeb171545deaeb8bdc908 |
refs/heads/main | <repo_name>dschrier34/PoetryApplication<file_sep>/backend/src/main/resources/application.properties
spring.datasource.url=jdbc:h2:./database/stanza.mv.db
spring.jpa.hibernate.ddl-auto=update
<file_sep>/backend/src/main/java/stanzafinalproject/demo/storage/PoemsStorage.java
package stanzafinalproject.demo.storage;
import org.springframework.stereotype.Service;
import stanzafinalproject.demo.resources.Poems;
@Service
public class PoemsStorage {
private PoemRepository poemRepo;
public PoemsStorage(PoemRepository poemRepo) {
this.poemRepo = poemRepo;
}
public Iterable <Poems> retrieveAllPoems(){
return poemRepo.findAll();
}
public Poems retrieveById(Long id){
return poemRepo.findById(id).get();
}
public void savePoems(Poems poemsToSave){
poemRepo.save(poemsToSave);
}
public void deletePoemsById(Long id) {
poemRepo.deleteById(id);
}
}
<file_sep>/frontend/js/user.js
import { clearChildren, deleteUserPoem, getSingleUserPoem } from "./app.js";
const userPoemsElement = function (userName) {
const userPoemsElement = document.querySelector(".main-content");
clearChildren(userPoemsElement);
const userPoemsMainDiv = document.createElement("div");
userPoemsMainDiv.classList.add("user-poems-main-div");
userPoemsElement.appendChild(userPoemsMainDiv);
const userPoemsDiv = document.createElement("div");
userPoemsDiv.classList.add("user-poems-div");
userPoemsMainDiv.appendChild(userPoemsDiv);
const userPoemsHeader = document.createElement("h1");
userPoemsHeader.classList.add("user-poems-header");
userPoemsHeader.innerText = "View & Edit Your Poems";
userPoemsMainDiv.prepend(userPoemsHeader);
userName.userPoems.forEach((userPoems) => {
const singleUserPoemDiv = document.createElement("div");
singleUserPoemDiv.classList.add("single-user-poem-div");
singleUserPoemDiv.setAttribute("id", "userPoemEditor");
singleUserPoemDiv.setAttribute("method", "post");
userPoemsDiv.appendChild(singleUserPoemDiv);
const singleUserPoemHeader = document.createElement("h2");
singleUserPoemHeader.classList.add("single-user-poem-h2");
singleUserPoemHeader.innerText = userPoems.title;
singleUserPoemDiv.appendChild(singleUserPoemHeader);
const singleUserPoemP = document.createElement("p");
singleUserPoemP.classList.add("single-user-poem-p");
singleUserPoemP.innerHTML = userPoems.poemContent;
singleUserPoemDiv.appendChild(singleUserPoemP);
const userPoemEditButton = document.createElement("button");
userPoemEditButton.classList.add("poem-edit-button");
userPoemEditButton.innerText = "Edit";
userPoemEditButton.addEventListener("click", () => {
getSingleUserPoem(userPoems.id);
});
singleUserPoemDiv.appendChild(userPoemEditButton);
const userPoemDeleteButton = document.createElement("button");
userPoemDeleteButton.classList.add("poem-delete-button");
userPoemDeleteButton.innerText = "Delete";
userPoemDeleteButton.addEventListener("click", () => {
deleteUserPoem(userPoems.id);
});
singleUserPoemDiv.appendChild(userPoemDeleteButton);
});
};
export { userPoemsElement };
<file_sep>/frontend/js/poem-choice-page.js
import { poemTypeElement } from "./poemTypeView.js";
const poemChoiceElement = function (poemType) {
const poemChoiceContent = document.querySelector(".main-content");
clearChildren(poemChoiceContent);
const typeOfPoemDiv = document.createElement("div");
typeOfPoemDiv.classList.add("typeOfPoemBox");
poemChoiceContent.appendChild(typeOfPoemDiv);
poemType.forEach((examplePoemType) => {
const poemTypeButton = document.createElement("button");
poemTypeButton.classList.add("poem-type-button");
poemTypeButton.innerText = examplePoemType.typeName;
poemTypeButton.addEventListener("click", () =>
poemTypeElement(examplePoemType)
);
typeOfPoemDiv.appendChild(poemTypeButton);
});
return poemChoiceElement;
};
const clearChildren = function (element) {
while (element.firstChild) {
element.removeChild(element.lastChild);
}
};
export { poemChoiceElement };
<file_sep>/frontend/js/landing.js
import { getPoemTypes } from "./app.js";
const landing = function () {
const landing = document.createElement("main");
landing.classList.add("main-content");
const createButtonDiv = document.createElement("div");
createButtonDiv.classList.add("divCreateButton");
landing.appendChild(createButtonDiv);
const createButton = document.createElement("button");
createButton.classList.add("create-button");
createButton.innerText = "Create";
createButton.addEventListener("click", () => {
getPoemTypes();
});
landing.appendChild(createButton);
createButtonDiv.appendChild(createButton);
return landing;
};
export { landing };
<file_sep>/frontend/js/devs.js
import { clearChildren } from "./app.js";
const devsElement = function () {
const devsElement = document.querySelector(".main-content");
clearChildren(devsElement);
const devsMainDiv = document.createElement("div");
devsMainDiv.classList.add("devs-div-main");
devsElement.appendChild(devsMainDiv);
const devsDiv1 = document.createElement("div");
devsDiv1.classList.add("devs-div-1");
devsMainDiv.appendChild(devsDiv1);
const devsImage1 = document.createElement("img");
devsImage1.setAttribute("id", "devsImage1");
devsImage1.src = "images/eric-profile-pic.jpg";
devsDiv1.appendChild(devsImage1);
const devsImageCap1 = document.createElement("figcaption");
devsImageCap1.classList.add("devs-image-cap-1");
devsImageCap1.innerText = "<NAME>";
devsDiv1.appendChild(devsImageCap1);
const devsDiv2 = document.createElement("div");
devsDiv2.classList.add("devs-div-2");
devsMainDiv.appendChild(devsDiv2);
const devsImage2 = document.createElement("img");
devsImage2.setAttribute("id", "devsImage2");
devsImage2.src = "images/dylon-profile-pic.jpg";
devsDiv2.appendChild(devsImage2);
const devsImageCap2 = document.createElement("figcaption");
devsImageCap2.classList.add("devs-image-cap-2");
devsImageCap2.innerText = "<NAME>";
devsDiv2.appendChild(devsImageCap2);
const devsDiv3 = document.createElement("div");
devsDiv3.classList.add("devs-div-3");
devsMainDiv.appendChild(devsDiv3);
const devsImage3 = document.createElement("img");
devsImage3.setAttribute("id", "devsImage3");
devsImage3.src = "images/placeholder.jpg";
devsDiv3.appendChild(devsImage3);
const devsImageCap3 = document.createElement("figcaption");
devsImageCap3.classList.add("devs-image-cap-3");
devsImageCap3.innerText = "<NAME>";
devsDiv3.appendChild(devsImageCap3);
const devsDiv4 = document.createElement("div");
devsDiv4.classList.add("devs-div-4");
devsMainDiv.appendChild(devsDiv4);
const devsImage4 = document.createElement("img");
devsImage4.setAttribute("id", "devsImage4");
devsImage4.src = "images/duane-profile-pic.jpg";
devsDiv4.appendChild(devsImage4);
const devsImageCap4 = document.createElement("figcaption");
devsImageCap4.classList.add("devs-image-cap-4");
devsImageCap4.innerText = "<NAME>";
devsDiv4.appendChild(devsImageCap4);
const devsDiv5 = document.createElement("div");
devsDiv5.classList.add("devs-div-5");
devsMainDiv.appendChild(devsDiv5);
const devsImage5 = document.createElement("img");
devsImage5.setAttribute("id", "devsImage5");
devsImage5.src = "images/lyna-profile-pic.png";
devsDiv5.appendChild(devsImage5);
const devsImageCap5 = document.createElement("figcaption");
devsImageCap5.classList.add("devs-image-cap-5");
devsImageCap5.innerText = "<NAME>";
devsDiv5.appendChild(devsImageCap5);
const devsDiv6 = document.createElement("div");
devsDiv6.classList.add("devs-div-6");
devsMainDiv.appendChild(devsDiv6);
const devsImage6 = document.createElement("img");
devsImage6.setAttribute("id", "devsImage6");
devsImage6.src = "images/steph-profile-pic.jpg";
devsDiv6.appendChild(devsImage6);
const devsImageCap6 = document.createElement("figcaption");
devsImageCap6.classList.add("devs-image-cap-6");
devsImageCap6.innerText = "<NAME>";
devsDiv6.appendChild(devsImageCap6);
return devsElement;
};
export { devsElement };
<file_sep>/backend/src/main/java/stanzafinalproject/demo/storage/ExamplePoemTypeRepository.java
package stanzafinalproject.demo.storage;
import org.springframework.data.repository.CrudRepository;
import stanzafinalproject.demo.resources.ExamplePoemType;
public interface ExamplePoemTypeRepository extends CrudRepository<ExamplePoemType, Long> {
}
<file_sep>/frontend/js/poemTypeView.js
import { getRandomExamplePoem, saveUserPoem } from "./app.js";
import { addTextEditor } from "./syllableCounter.js";
import { addWordGenerator } from "./wordGenerator.js";
const poemTypeElement = function (examplePoemType) {
const poemTypeContent = document.querySelector(".main-content");
clearChildren(poemTypeContent);
const containerDiv = document.createElement("div");
containerDiv.setAttribute("id", "containerDiv");
containerDiv.classList.add("descriptionDiv");
poemTypeContent.appendChild(containerDiv);
// wrapper div for 3x columns:
const wrapperForFlexboxOrGrid = document.createElement("div");
wrapperForFlexboxOrGrid.classList.add("wrapperForFlexboxOrGrid");
containerDiv.appendChild(wrapperForFlexboxOrGrid);
const leftColumn = document.createElement("div");
leftColumn.setAttribute("id", "leftColumn");
wrapperForFlexboxOrGrid.appendChild(leftColumn);
const descHeader = document.createElement("h2");
descHeader.classList.add("description-header");
descHeader.innerText = examplePoemType.typeName;
leftColumn.appendChild(descHeader);
const typeDesP = document.createElement("p");
typeDesP.classList.add("type-description-p");
typeDesP.innerHTML = examplePoemType.typeDescription;
leftColumn.appendChild(typeDesP);
//poem type description & editor
const editorDiv = document.createElement("div");
editorDiv.classList.add("editor-div");
wrapperForFlexboxOrGrid.appendChild(editorDiv);
//title input
const titleInput = document.createElement("input");
titleInput.classList.add("title-input");
titleInput.setAttribute("id", "titleInput");
titleInput.setAttribute("placeholder", "Add a title");
titleInput.setAttribute("method", "post");
editorDiv.appendChild(titleInput);
//editor toolbar
const poemEditorFieldset = document.createElement("fieldset");
poemEditorFieldset.classList.add("poem-editor-fieldset");
editorDiv.appendChild(poemEditorFieldset);
const italicButton = document.createElement("button");
italicButton.classList.add("fontStyle-italic");
italicButton.setAttribute("title", "Italicize Highlighted Text");
italicButton.innerHTML = `<i class="fas fa-italic"></i>`;
italicButton.addEventListener("click", () => {
document.execCommand("italic", false, null);
});
poemEditorFieldset.appendChild(italicButton);
const boldButton = document.createElement("button");
boldButton.classList.add("fontStyle-bold");
boldButton.setAttribute("title", "Bold Highlighted Text");
boldButton.innerHTML = `<i class="fas fa-bold"></i>`;
boldButton.addEventListener("click", (e) => {
e.preventDefault();
document.execCommand("bold", false, null);
});
poemEditorFieldset.appendChild(boldButton);
const underlineButton = document.createElement("button");
underlineButton.classList.add("fontStyle-underline");
underlineButton.setAttribute("title", "Underline Highlighted Text");
underlineButton.innerHTML = `<i class="fas fa-underline"></i>`;
underlineButton.addEventListener("click", () => {
document.execCommand("underline", false, null);
});
poemEditorFieldset.appendChild(underlineButton);
const strikeButton = document.createElement("button");
strikeButton.classList.add("fontStyle-strikethrough");
strikeButton.setAttribute("title", "Strikethrough Highlighted Text");
strikeButton.innerHTML = `<i class="fas fa-strikethrough"></i>`;
strikeButton.addEventListener("click", () => {
document.execCommand("strikethrough", false, null);
});
poemEditorFieldset.appendChild(strikeButton);
const fontSelect = document.createElement("select");
fontSelect.classList.add("input");
fontSelect.setAttribute("id", "font-input");
fontSelect.addEventListener("change", () => {
changeFont(this);
});
poemEditorFieldset.appendChild(fontSelect);
const optionArial = document.createElement("option");
optionArial.value = "Arial";
optionArial.innerText = "Arial";
fontSelect.appendChild(optionArial);
const optionBallet = document.createElement("option");
optionBallet.value = "Ballet";
optionBallet.innerText = "Ballet";
fontSelect.appendChild(optionBallet);
const optionHelvetica = document.createElement("option");
optionHelvetica.value = "Helvetica";
optionHelvetica.innerText = "Helvetica";
fontSelect.appendChild(optionHelvetica);
const optionNewsreader = document.createElement("option");
optionNewsreader.value = "Newsreader";
optionNewsreader.innerText = "Newsreader";
fontSelect.appendChild(optionNewsreader);
const optionNotoSansJp = document.createElement("option");
optionNotoSansJp.value = "Noto Sans JP";
optionNotoSansJp.innerText = "Noto Sans JP";
fontSelect.appendChild(optionNotoSansJp);
const optionPtSerif = document.createElement("option");
optionPtSerif.value = "PT Serif";
optionPtSerif.innerText = "PT Serif";
fontSelect.appendChild(optionPtSerif);
const optionRoboto = document.createElement("option");
optionRoboto.value = "Roboto";
optionRoboto.innerText = "Roboto";
fontSelect.appendChild(optionRoboto);
const optionSource = document.createElement("option");
optionSource.value = "Source Sans Pro";
optionSource.innerText = "Source Sans Pro";
fontSelect.appendChild(optionSource);
const optionTimesNew = document.createElement("option");
optionTimesNew.value = "Times New Roman";
optionTimesNew.innerText = "Times New Roman";
fontSelect.appendChild(optionTimesNew);
const optionTruculenta = document.createElement("option");
optionTruculenta.value = "Truculenta";
optionTruculenta.innerText = "Truculenta";
fontSelect.appendChild(optionTruculenta);
//
const alignLeftButton = document.createElement("button");
alignLeftButton.classList.add("align-left-button");
alignLeftButton.innerHTML = `<i class="fas fa-align-left"></i>`;
alignLeftButton.addEventListener("click", () => {
document.execCommand("justifyLeft", false, null);
});
poemEditorFieldset.appendChild(alignLeftButton);
const alignCenterButton = document.createElement("button");
alignCenterButton.classList.add("align-center-button");
alignCenterButton.innerHTML = `<i class="fas fa-align-center"></i>`;
alignCenterButton.addEventListener("click", () => {
document.execCommand("justifyCenter", false, null);
});
poemEditorFieldset.appendChild(alignCenterButton);
const alignRightButton = document.createElement("button");
alignRightButton.classList.add("align-right-button");
alignRightButton.innerHTML = `<i class="fas fa-align-right"></i>`;
alignRightButton.addEventListener("click", () => {
document.execCommand("justifyRight", false, null);
});
poemEditorFieldset.appendChild(alignRightButton);
const redoButton = document.createElement("button");
redoButton.classList.add("redo-apply-button");
redoButton.innerHTML = `<i class="fas fa-redo-alt"></i>`;
redoButton.addEventListener("click", () => {
document.execCommand("redo", false, null);
});
poemEditorFieldset.appendChild(redoButton);
const undoButton = document.createElement("button");
undoButton.classList.add("undo-apply-button");
undoButton.innerHTML = `<i class="fas fa-undo-alt"></i>`;
undoButton.addEventListener("click", () => {
document.execCommand("undo", false, null);
});
poemEditorFieldset.appendChild(undoButton);
const colorChoice = document.createElement("input");
colorChoice.classList.add("color-apply");
colorChoice.setAttribute("type", "color");
colorChoice.setAttribute("id", "myColor");
colorChoice.addEventListener("change", () => {
chooseColor();
});
poemEditorFieldset.appendChild(colorChoice);
const fontSizeSelect = document.createElement("select");
fontSizeSelect.classList.add("input");
fontSizeSelect.id = "fontSize";
fontSizeSelect.addEventListener("change", () => {
changeSize();
});
poemEditorFieldset.appendChild(fontSizeSelect);
const optionSize1 = document.createElement("option");
optionSize1.value = "1";
optionSize1.innerText = "1";
fontSizeSelect.appendChild(optionSize1);
const imageLabelSyllableCountArrow = document.createElement("img");
imageLabelSyllableCountArrow.setAttribute(
"src",
"./images/syllables-arrow2.png"
);
imageLabelSyllableCountArrow.style.height = "22px";
imageLabelSyllableCountArrow.style.position = "relative";
imageLabelSyllableCountArrow.style.top = "7px";
imageLabelSyllableCountArrow.style.left = "18px";
poemEditorFieldset.appendChild(imageLabelSyllableCountArrow);
addTextEditor();
const rightColumn = document.createElement("div");
rightColumn.setAttribute("id", "rightColumn");
wrapperForFlexboxOrGrid.appendChild(rightColumn);
//poem type example random
const typeExamplesDiv = document.createElement("div");
typeExamplesDiv.classList.add("type-examples-div");
leftColumn.appendChild(typeExamplesDiv);
const typeExamplesHeader = document.createElement("h2");
typeExamplesHeader.classList.add("type-examples-header");
typeExamplesHeader.innerHTML = `Read an Example`;
typeExamplesDiv.appendChild(typeExamplesHeader);
let typeExamplesP = document.createElement("p");
typeExamplesP.classList.add("type-examples-p");
getRandomExamplePoem(examplePoemType.typeName);
typeExamplesDiv.appendChild(typeExamplesP);
const anotherExampleButton = document.createElement("button");
anotherExampleButton.classList.add("another-example-button");
anotherExampleButton.innerText = "Show me another";
anotherExampleButton.addEventListener("click", () => {
clearChildren(typeExamplesP);
getRandomExamplePoem(examplePoemType.typeName);
});
typeExamplesDiv.appendChild(anotherExampleButton);
//download poem function
function getFrontHTML() {
return `<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Document</title>
</head>
<body>
`;
}
function getBackHTML() {
return `
</body>
</html>`;
}
function download(filename, text) {
let downloadElement = document.createElement("a");
downloadElement.setAttribute(
"href",
"data:text/plain;charset=utf-8," + encodeURIComponent(text)
);
downloadElement.setAttribute("download", filename);
downloadElement.style.display = "none";
editorDiv.appendChild(downloadElement);
downloadElement.click();
editorDiv.removeChild(downloadElement);
}
//user poem buttons (download, share, reset)
const userPoemOptionsDiv = document.createElement("div");
userPoemOptionsDiv.classList.add("user-poem-options-div");
editorDiv.appendChild(userPoemOptionsDiv);
const downloadButton = document.createElement("button");
downloadButton.classList.add("download-button");
downloadButton.innerText = "Download";
downloadButton.addEventListener("click", () => {
let text =
getFrontHTML() +
document.getElementById("editor1").innerHTML +
getBackHTML();
let filename = "yournewfile.html";
download(filename, text);
});
userPoemOptionsDiv.appendChild(downloadButton);
const saveButton = document.createElement("button");
saveButton.classList.add("save-button");
saveButton.innerText = "Save to Account";
saveButton.addEventListener("click", () => {
saveUserPoem();
});
userPoemOptionsDiv.appendChild(saveButton);
const resetButton = document.createElement("button");
resetButton.classList.add("reset-button");
resetButton.innerText = "Reset";
resetButton.addEventListener("click", () => {
let resetEditor = document.getElementById("editor1");
let resetTitle = document.getElementById("titleInput");
resetEditor.innerText = "";
resetTitle.value = "";
});
userPoemOptionsDiv.appendChild(resetButton);
//tools button
const toolsDiv = document.createElement("div");
toolsDiv.classList.add("tools-div");
containerDiv.appendChild(toolsDiv);
const toolsButton = document.createElement("h2");
toolsButton.innerText = "Tools";
toolsDiv.appendChild(toolsButton);
rightColumn.appendChild(toolsDiv);
addWordGenerator();
//poem type example random
typeExamplesP = document.createElement("p");
typeExamplesP.classList.add("type-examples-p");
getRandomExamplePoem(examplePoemType.typeName);
containerDiv.appendChild(typeExamplesP);
wrapperForFlexboxOrGrid.style.position = 'relative';
wrapperForFlexboxOrGrid.style.top = '-35px';
document.body.style.backgroundPosition = 'center -65px';
return poemTypeElement;
};
const clearChildren = function (element) {
while (element.firstChild) {
element.removeChild(element.lastChild);
}
};
export { poemTypeElement };
<file_sep>/backend/src/main/java/stanzafinalproject/demo/storage/PoemRepository.java
package stanzafinalproject.demo.storage;
import org.springframework.data.repository.CrudRepository;
import stanzafinalproject.demo.resources.Poems;
public interface PoemRepository extends CrudRepository <Poems,Long>{
}
<file_sep>/backend/src/main/java/stanzafinalproject/demo/controllers/UserController.java
package stanzafinalproject.demo.controllers;
import org.springframework.web.bind.annotation.*;
import stanzafinalproject.demo.resources.User;
import stanzafinalproject.demo.resources.UserPoem;
import stanzafinalproject.demo.storage.UserPoemStorage;
import stanzafinalproject.demo.storage.UserStorage;
@RestController
@RequestMapping("/api/user")
public class UserController {
private UserStorage userStorage;
private UserPoemStorage userPoemStorage;
public UserController(UserStorage userStorage, UserPoemStorage userPoemStorage) {
this.userStorage = userStorage;
this.userPoemStorage = userPoemStorage;
}
@GetMapping("")
public Iterable<User> retrieveAllUsers(){
return userStorage.retrieveALlUsers();
}
@GetMapping("/{id}")
public User retrieveUserById(@PathVariable long id){
return userStorage.retrieveById(id);
}
@PostMapping("")
public User saveUser(@RequestBody User userNameToAdd){
userStorage.saveUser(userNameToAdd);
return userNameToAdd;
}
@PatchMapping("/{id}/poem")
public User addPoemToUser(@RequestBody UserPoem poemToAdd, @PathVariable Long id){
User tempUser = userStorage.retrieveById(id);
poemToAdd.addUserToPoem(tempUser);
userPoemStorage.addUserPoem(poemToAdd);
return tempUser;
}
}
<file_sep>/backend/src/main/java/stanzafinalproject/demo/resources/Poems.java
package stanzafinalproject.demo.resources;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import java.util.Objects;
@Entity
public class Poems {
private String poet;
private String title;
private String form;
@Id
@GeneratedValue
private Long id;
public Poems(String poet, String title, String form) {
this.poet = poet;
this.title = title;
this.form = form;
}
protected Poems() {
}
public Long getId() {
return id;
}
public String getPoet() {
return poet;
}
public String getTitle() {
return title;
}
public String getForm() {
return form;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Poems poems = (Poems) o;
return Objects.equals(poet, poems.poet) && Objects.equals(title, poems.title) && Objects.equals(form, poems.form) && Objects.equals(id, poems.id);
}
@Override
public int hashCode() {
return Objects.hash(poet, title, form, id);
}
}
<file_sep>/backend/src/test/java/stanzafinalproject/demo/JpaWiringTest.java
package stanzafinalproject.demo;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.autoconfigure.orm.jpa.DataJpaTest;
import org.springframework.boot.test.autoconfigure.orm.jpa.TestEntityManager;
import stanzafinalproject.demo.storage.PoemRepository;
import stanzafinalproject.demo.resources.Poems;
import static org.assertj.core.api.Assertions.assertThat;
@DataJpaTest
public class JpaWiringTest {
@Autowired
private PoemRepository poemRepo;
@Autowired
private TestEntityManager entityManager;
@Test
public void PoemRepoShouldRetrieveAllPoems(){
Poems testPoems = new Poems("Poet","Title of Poem", "Haiku");
poemRepo.save(testPoems);
entityManager.flush();
entityManager.clear();
Poems retrievePoems = poemRepo.findById(testPoems.getId()).get();
assertThat(retrievePoems).isEqualTo(testPoems);
}
}
<file_sep>/frontend/js/header.js
import { loginElement } from "./login.js";
const createHeader = function () {
const header = document.createElement("header");
header.classList.add("main-header");
const loginButton = document.createElement("button");
loginButton.classList.add("login-button");
loginButton.innerText = "Login";
loginButton.addEventListener("click", () => {
loginElement();
});
header.appendChild(loginButton);
return header;
};
export { createHeader };
<file_sep>/backend/src/main/java/stanzafinalproject/demo/resources/ExamplePoem.java
package stanzafinalproject.demo.resources;
import javax.persistence.*;
@Entity
public class ExamplePoem {
private String title;
private String poet;
@Column(length=500)
private String poemUrl;
@Id
@GeneratedValue
private Long id;
@ManyToOne
private ExamplePoemType examplePoemType;
protected ExamplePoem() {
}
public ExamplePoem(String title, String poet, ExamplePoemType examplePoemType, String poemUrl) {
this.title = title;
this.poet = poet;
this.examplePoemType = examplePoemType;
this.poemUrl = poemUrl;
}
public String getTitle() {
return title;
}
public String getPoet() {
return poet;
}
public ExamplePoemType getExamplePoemType() {
return examplePoemType;
}
public String getPoemUrl() {
return poemUrl;
}
public Long getId() {
return id;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ExamplePoem that = (ExamplePoem) o;
if (title != null ? !title.equals(that.title) : that.title != null) return false;
if (poet != null ? !poet.equals(that.poet) : that.poet != null) return false;
if (poemUrl != null ? !poemUrl.equals(that.poemUrl) : that.poemUrl != null) return false;
if (id != null ? !id.equals(that.id) : that.id != null) return false;
return examplePoemType != null ? examplePoemType.equals(that.examplePoemType) : that.examplePoemType == null;
}
@Override
public int hashCode() {
int result = title != null ? title.hashCode() : 0;
result = 31 * result + (poet != null ? poet.hashCode() : 0);
result = 31 * result + (poemUrl != null ? poemUrl.hashCode() : 0);
result = 31 * result + (id != null ? id.hashCode() : 0);
result = 31 * result + (examplePoemType != null ? examplePoemType.hashCode() : 0);
return result;
}
}
| 72823f33dd31bbfb325e2af9ff91484ba61d2891 | [
"JavaScript",
"Java",
"INI"
] | 14 | INI | dschrier34/PoetryApplication | 7d6400df0293ed1f5342275bf8ee3cea815c8e31 | 971de88f372ecc412d1ef2df140fa1696534e255 |
refs/heads/master | <file_sep>import React, { Component } from "react";
import {
View,
StyleSheet,
Animated,
TouchableWithoutFeedback
} from "react-native";
export default class App extends Component {
constructor() {
super();
this.state = {
animValue: new Animated.Value(250)
};
}
handleSelect = () => {
this.state.animValue._value > 250
? Animated.timing(this.state.animValue, {
toValue: 250,
duration: 500
}).start()
: Animated.timing(this.state.animValue, {
toValue: 450,
duration: 500
}).start();
};
renderRectangle = () => {
let rotateAnimation = this.state.animValue.interpolate({
inputRange: [250, 450],
outputRange: ["0deg", "360deg"]
});
const customStyle = {
height: this.state.animValue,
transform: [{ rotate: rotateAnimation }]
};
return (
<Animated.View useNativeDriver style={[styles.rectangle, customStyle]}>
<TouchableWithoutFeedback onPress={() => this.handleSelect()}>
<View style={{ flex: 1 }} />
</TouchableWithoutFeedback>
</Animated.View>
);
};
render() {
return <View style={styles.container}>{this.renderRectangle()}</View>;
}
}
// define your styles
const styles = StyleSheet.create({
container: {
flex: 1,
justifyContent: "center",
alignItems: "center"
},
rectangle: {
backgroundColor: "#2c3e50",
width: 250
}
});
<file_sep>import React, { Component } from "react";
import { View, StyleSheet, TouchableWithoutFeedback } from "react-native";
import * as Animatable from "react-native-animatable";
export default class App extends Component {
constructor() {
super();
this.state = {
trigger: false
};
}
handleSelect = () => {
this.setState({
trigger: this.state.trigger ? false : true
});
};
renderRectangle = () => (
<Animatable.View
useNativeDriver
transition={["rotate", "scaleY"]}
duration={500}
style={[
styles.rectangle,
{
scaleY: this.state.trigger ? 1.5 : 1,
transform: [{ rotate: this.state.trigger ? "0deg" : "360deg" }]
}
]}
>
<TouchableWithoutFeedback onPress={() => this.handleSelect()}>
<View style={{ flex: 1 }} />
</TouchableWithoutFeedback>
</Animatable.View>
);
render() {
return <View style={styles.container}>{this.renderRectangle()}</View>;
}
}
// define your styles
const styles = StyleSheet.create({
container: {
flex: 1,
justifyContent: "center",
alignItems: "center"
},
rectangle: {
backgroundColor: "#2c3e50",
width: 250,
height: 250
}
});
<file_sep>import React, { Component } from "react";
import { View, StyleSheet, Text, TouchableHighlight } from "react-native";
import AppAnimatable from "./AppAnimatable";
import AppAnimated from "./AppAnimated";
export default class App extends Component {
constructor() {
super();
this.state = {
animationMethod: "animated"
};
}
handleSelect = () => {
this.setState({
animationMethod:
this.state.animationMethod === "animated" ? "animatable" : "animated"
});
};
renderRectangle = () =>
this.state.animationMethod === "animatable" ? (
<AppAnimatable />
) : (
<AppAnimated />
);
render() {
return (
<View style={styles.container}>
<TouchableHighlight
style={{
width: 250,
height: 250,
alignItems: "center",
justifyContent: "center"
}}
onPress={this.handleSelect}
>
<Text>
Switch to{" "}
{this.state.animationMethod === "animated"
? "animatable"
: "animated"}
</Text>
</TouchableHighlight>
{this.renderRectangle()}
</View>
);
}
}
// define your styles
const styles = StyleSheet.create({
container: {
flex: 1,
justifyContent: "center",
alignItems: "center"
},
rectangle: {
backgroundColor: "#2c3e50",
width: 250,
height: 250
}
});
| 58863a0fe7a49a604e65297e58b81774a4a53f04 | [
"JavaScript"
] | 3 | JavaScript | Krisztiaan/react-native-animated-animatable-example | 1d7d8885075b46a1b1d530abcfeb595907a86945 | 1e84d7da53da75ea8a9e603a5d9cae7ba32aa107 |
refs/heads/master | <file_sep>This plugin is based on check_2hoptrace.sh. It is modified to do the trace between 2 hops with TCP SYN packets rather than UDP or ICMP packets.
<file_sep>#! /bin/bash
PROG="sudo /bin/traceroute -n -T"
GREP="/bin/egrep"
HOST=$1
FILE=/tmp/check_tracert-`date +%N`.txt
START="$2"
LAST="$3"
if [ $# -lt 2 ]
then
echo "Usage: check_tracert [host] [ip we should route via] [last hop]"
exit 2
fi
$PROG $HOST > $FILE
RESULT=`$GREP -c "($START|$LAST)" $FILE`
case "$RESULT" in
0) echo "CRITICAL! Routing not matching!!!"
rm -f $FILE
echo $RESULT > /tmp/result
exit 2
;;
1) echo "WARNING! One hop route not found!"
rm -f $FILE
echo $RESULT > /tmp/result
exit 1
;;
2) echo "OK! Currently routing via $LAST"
rm -f $FILE
echo $RESULT > /tmp/result
exit 0
;;
*) echo "UNKNOWN!"
rm -f $FILE
echo $RESULT > /tmp/result
exit 3
esac
| ab43ceabcc28102452074ec619abf84d011d89fb | [
"Markdown",
"Shell"
] | 2 | Markdown | gouldchu/Plugins | 51148ef31d9665fb39bd7953d548ad7e1f9650fa | a5374fe1e7944e591865a5f313822dead9569ec8 |
refs/heads/master | <repo_name>pinkeshgupta5/CountriesApp<file_sep>/app/src/main/java/com/countries/pinkesh/countries/room/CountryDatabase.java
package com.countries.pinkesh.countries.room;
import android.arch.persistence.db.SupportSQLiteOpenHelper;
import android.arch.persistence.room.Database;
import android.arch.persistence.room.DatabaseConfiguration;
import android.arch.persistence.room.InvalidationTracker;
import android.arch.persistence.room.Room;
import android.arch.persistence.room.RoomDatabase;
import android.content.Context;
import android.support.annotation.NonNull;
import com.countries.pinkesh.countries.models.BaseCountry;
@Database(entities ={BaseCountry.class},version = 1,exportSchema = false)
public abstract class CountryDatabase extends RoomDatabase{
public abstract CountryDao getcountryDao();
private static CountryDatabase countryDatabase;
public static CountryDatabase getInstance(Context context) {
if (countryDatabase==null) {
countryDatabase = buildDatabaseInstance(context);
}
return countryDatabase;
}
@NonNull
private static CountryDatabase buildDatabaseInstance(Context context) {
return Room.databaseBuilder(context,
CountryDatabase.class,
"countrydatabase")
.allowMainThreadQueries().build();
}
@NonNull
@Override
protected SupportSQLiteOpenHelper createOpenHelper(DatabaseConfiguration config) {
return null;
}
@NonNull
@Override
protected InvalidationTracker createInvalidationTracker() {
return null;
}
@Override
public void clearAllTables() {
countryDatabase= null;
}
}
<file_sep>/app/src/main/java/com/countries/pinkesh/countries/Utilities/Network.java
package com.countries.pinkesh.countries.Utilities;
import android.content.Context;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
/**
* Created by pingupta on 2/27/2018.
*/
public class Network {
public static boolean checkNetworkAvailability(Context context){
boolean status = false;
ConnectivityManager connMgr = (ConnectivityManager)context.getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo nwInfo = connMgr.getActiveNetworkInfo();
if(nwInfo != null && nwInfo.isConnected()) status = true;
return status;
}
}
<file_sep>/app/src/main/java/com/countries/pinkesh/countries/CountryDetailsActivity.java
package com.countries.pinkesh.countries;
import android.arch.persistence.room.Room;
import android.content.Context;
import android.content.Intent;
import android.graphics.drawable.PictureDrawable;
import android.net.Uri;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import com.bumptech.glide.Glide;
import com.bumptech.glide.RequestBuilder;
import com.bumptech.glide.request.RequestOptions;
import com.countries.pinkesh.countries.models.BaseCountry;
import com.countries.pinkesh.countries.room.CountryDatabase;
import com.countries.pinkesh.countries.svg.SvgSoftwareLayerSetter;
import butterknife.BindView;
import butterknife.ButterKnife;
import io.reactivex.Completable;
import io.reactivex.CompletableObserver;
import io.reactivex.android.schedulers.AndroidSchedulers;
import io.reactivex.disposables.Disposable;
import io.reactivex.functions.Action;
import io.reactivex.schedulers.Schedulers;
import static com.bumptech.glide.load.resource.drawable.DrawableTransitionOptions.withCrossFade;
public class CountryDetailsActivity extends AppCompatActivity {
@BindView(R.id.imageView)
ImageView ImageViewFlag;
@BindView(R.id.textView_name)
TextView textView_Name;
@BindView(R.id.textView_capital)
TextView textView_Capital;
@BindView(R.id.textView_region)
TextView textView_Region;
@BindView(R.id.textView_subregion)
TextView textView_Subregion;
@BindView(R.id.textView_callincodes)
TextView textView_CallinCodes;
@BindView(R.id.textView_curcode)
TextView textView_Curcode;
@BindView(R.id.textView_curname)
TextView textView_Curname;
@BindView(R.id.textView_cursymbol)
TextView textView_Cursymbol;
@BindView(R.id.textView_Langiso1)
TextView textView_LAngsio1;
@BindView(R.id.textView_Langiso2)
TextView textView_Langsio2;
@BindView(R.id.textView_LangName)
TextView textView_LangName;
@BindView(R.id.textView_LangNative)
TextView textView_LangNative;
@BindView(R.id.textView_TimeZones)
TextView textView_Timezones;
@BindView(R.id.button_save)
Button Button_Save;
private CountryDatabase countryDatabase;
private BaseCountry baseCountry;
private RequestBuilder<PictureDrawable> requestBuilder;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_country_details);
ButterKnife.bind(this);
Intent intent = getIntent();
Bundle extras = intent.getExtras();
baseCountry = (BaseCountry) extras.getSerializable("countrydata");
countryDatabase = CountryDatabase.getInstance(this);
//countryDatabase = Room.databaseBuilder(this,CountryDatabase.class,"database").build();
//Nameemployee
textView_Name.setText(baseCountry.getName());
//Capital
textView_Capital.setText(baseCountry.getCapital());
//Region
textView_Region.setText(baseCountry.getRegion());
//Subregion
textView_Subregion.setText(baseCountry.getSubregion());
//CcallingCodes
textView_CallinCodes.setText(baseCountry.getCallingCodes().get(0).toString());
//Currencies
textView_Curcode.setText(baseCountry.getCurrencies().get(0).getCode());
textView_Curname.setText(baseCountry.getCurrencies().get(0).getName());
textView_Cursymbol.setText(baseCountry.getCurrencies().get(0).getSymbol());
//Languages
textView_LAngsio1.setText(baseCountry.getLanguages().get(0).getIso639_1());
textView_Langsio2.setText(baseCountry.getLanguages().get(0).getIso639_2());
textView_LangName.setText(baseCountry.getLanguages().get(0).getName());
textView_LangNative.setText(baseCountry.getLanguages().get(0).getNativeName());
//TimeZones
textView_Timezones.setText(baseCountry.getTimezones().get(0).toString());
requestBuilder = Glide.with(this)
.as(PictureDrawable.class).apply(new RequestOptions().placeholder(R.drawable.ic_launcher_background)
.error(R.drawable.ic_launcher_background)).transition(withCrossFade())
.listener(new SvgSoftwareLayerSetter());
Uri uri = Uri.parse(baseCountry.getFlag());
requestBuilder.load(uri).into(ImageViewFlag);
Button_Save.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Completable.fromAction(new Action() {
@Override
public void run() throws Exception {
BaseCountry baseCountryDatainsert = new BaseCountry(baseCountry.getName(),baseCountry.getFlag());
countryDatabase.getcountryDao().insertAll(baseCountryDatainsert);
}
}).observeOn(AndroidSchedulers.mainThread()).subscribeOn(Schedulers.io()).subscribe(new CompletableObserver() {
@Override
public void onSubscribe(Disposable d) {
}
@Override
public void onComplete() {
Toast.makeText(CountryDetailsActivity.this, "DataInserted", Toast.LENGTH_SHORT).show();
}
@Override
public void onError(Throwable e) {
}
});
}
});
}
}<file_sep>/app/src/main/java/com/countries/pinkesh/countries/adapter/CountryAdapter.java
package com.countries.pinkesh.countries.adapter;
import android.content.Context;
import android.graphics.drawable.PictureDrawable;
import android.net.Uri;
import android.support.annotation.NonNull;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Filter;
import android.widget.Filterable;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.bumptech.glide.Glide;
import com.bumptech.glide.RequestBuilder;
import com.bumptech.glide.load.engine.DiskCacheStrategy;
import com.bumptech.glide.load.model.StreamEncoder;
import com.bumptech.glide.request.RequestOptions;
import com.countries.pinkesh.countries.R;
import com.countries.pinkesh.countries.models.BaseCountry;
import com.countries.pinkesh.countries.svg.SvgDecoder;
import com.countries.pinkesh.countries.svg.SvgDrawableTranscoder;
import com.countries.pinkesh.countries.svg.SvgSoftwareLayerSetter;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
import static com.bumptech.glide.load.resource.drawable.DrawableTransitionOptions.withCrossFade;
public class CountryAdapter extends RecyclerView.Adapter<CountryAdapter.CountryViewHolder> implements Filterable {
Context ctx;
List<BaseCountry> baseCountriesList;
private List<BaseCountry> baseCountryListFiltered;
private CountryAdapterListener listener;
private RequestBuilder<PictureDrawable> requestBuilder;
public CountryAdapter(Context ctx, List<BaseCountry> baseCountriesList, CountryAdapterListener listener) {
this.ctx = ctx;
this.baseCountriesList = baseCountriesList;
this.listener = listener;
this.baseCountryListFiltered = baseCountriesList;
}
@NonNull
@Override
public CountryViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) {
View item = LayoutInflater.from(parent.getContext()).inflate(R.layout.country_itemview, parent, false);
CountryViewHolder countryViewHolder = new CountryViewHolder(item);
return countryViewHolder;
}
@Override
public void onBindViewHolder(@NonNull CountryViewHolder holder, int position) {
BaseCountry baseCountry = baseCountryListFiltered.get(position);
//Glide.with(ctx).load(baseCountry.getFlag()).into(holder.imageViewFlag);
// Picasso.with(ctx).load(baseCountry.getFlag()).resize(120,60).into(holder.imageViewFlag);
holder.textViewName.setText(baseCountry.getName());
requestBuilder = Glide.with(ctx)
.as(PictureDrawable.class).apply(new RequestOptions().placeholder(R.drawable.ic_launcher_background)
.error(R.drawable.ic_launcher_background)).transition(withCrossFade())
.listener(new SvgSoftwareLayerSetter());
Uri uri = Uri.parse(baseCountry.getFlag());
requestBuilder.load(uri).into(holder.imageViewFlag);
}
@Override
public int getItemCount() {
return baseCountryListFiltered.size();
}
public class CountryViewHolder extends RecyclerView.ViewHolder {
@BindView(R.id.textView_name)
TextView textViewName;
@BindView(R.id.imageView_flag)
ImageView imageViewFlag;
@BindView(R.id.LinearLayout)
LinearLayout linearLayout;
public CountryViewHolder(View itemView) {
super(itemView);
ButterKnife.bind(this, itemView);
itemView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
// send selected Country in callback
listener.onCountrySelected(baseCountryListFiltered.get(getAdapterPosition()));
}
});
}
}
@Override
public Filter getFilter() {
return new Filter() {
@Override
protected FilterResults performFiltering(CharSequence charSequence) {
String charString = charSequence.toString();
if (charString.isEmpty()) {
baseCountryListFiltered = baseCountriesList;
} else {
List<BaseCountry> filteredList = new ArrayList<>();
for (BaseCountry row : baseCountriesList) {
// name match condition. this might differ depending on your requirement
// here we are looking for name or phone number match
if (row.getName().toLowerCase().contains(charString.toLowerCase())) {
filteredList.add(row);
}
}
baseCountryListFiltered = filteredList;
}
FilterResults filterResults = new FilterResults();
filterResults.values = baseCountryListFiltered;
return filterResults;
}
@Override
protected void publishResults(CharSequence charSequence, FilterResults filterResults) {
baseCountryListFiltered = (ArrayList<BaseCountry>) filterResults.values;
notifyDataSetChanged();
}
};
}
public interface CountryAdapterListener {
void onCountrySelected(BaseCountry baseCountry);
}
}
<file_sep>/app/src/main/java/com/countries/pinkesh/countries/remote/ApiService.java
package com.countries.pinkesh.countries.remote;
import com.countries.pinkesh.countries.models.BaseCountry;
import java.util.ArrayList;
import retrofit2.Call;
import retrofit2.http.GET;
public interface ApiService {
@GET("all")
Call<ArrayList<BaseCountry>> getallcountriesdata();
}
<file_sep>/README.md
# CountriesApp
Android app called ‘Countries’ which will enable user to search countries based on country name and see some details of countries.
Used Rich Ui Interface and latest Android Architecture Components, Live Data,View Model,Room,Retrofit,Butterfly dependency injectons.
Online Search: This will be landing screen of the application.
This screen will be empty initially, data will be shown only when user starts search.
There should be a search box on top, User should be able to search with country name (Use the API provided in Reference section),
for each character entered in the search box the search result should refresh.
The search result should be displayed as a list, each row in the list will contain the country flag and country name.
The flag image should be loaded lazily. On selecting an item in the list, the details of the country should be shown.
Country Details: This screen will display the details of the country selected.
This screen will have country flag, country name, capital, calling code, region, sub region, time zone, currencies and languages.
There will be a button in the screen to save the country for offline.
On clicking of this button, the country should be persisted locally including the flag image.
Offline: When the device is offline the landing screen will show all the countries saved for offline.
While offline the search should be performed on the saved countries,
the save for offline button will be hidden in this flow and rest of the flow remains the same
| 9e4d082b743ccf90649b2bee7dedcfc6793f8495 | [
"Markdown",
"Java"
] | 6 | Java | pinkeshgupta5/CountriesApp | 1412e1e627c207af1d0fbc6a42a4af250f0dc91d | a3e1647a68d22f3819d42184c7ae9b2f2fc6f8ef |
refs/heads/master | <file_sep>#ifndef TWIDDLE_H
#define TWIDDLE_H
class Twiddle {
public:
/**
* Current iteration
*/
int iteration;
/**
* Constructor
*
*/
Twiddle();
/**
* Constructor
* @param (p, dp) The initial values and the increments
*/
void Init(std::vector<double> p_, std::vector<double> dp_);
/**
* Destructor.
*/
virtual ~Twiddle();
/**
* Print status of twiddle
*/
void PrintResults();
/**
* Update twiddle values
* @param (pid, err) pid object to return, and current cte eror
*/
PID UpdateTwiddle(PID pid, double err);
/**
* Return best error
*/
double GetBestError();
private:
/**
* Parameters to tune `p` and increments `dp`
*/
std::vector<double> p;
std::vector<double> dp;
/**
* Current index to check and best error
*/
int index;
double best_err;
/**
* Flag to trigger different logic in the algorithm
*/
bool isImproving;
/**
* Improving logic for twiddle algorithm
* @param (pid, err) pid object to return, and current cte eror
*/
PID DidImprove(PID pid, double err);
/**
* Non-Improving logic for twiddle algorithm
* @param (pid, err) pid object to return, and current cte eror
*/
PID DidNotImprove(PID pid, double err);
};
#endif // TWIDDLE_H<file_sep>## PID Controller
[](http://www.cplusplus.org/)
[](http://www.udacity.com/drive)
In this project we use a PID controller to control the steering angle and maximum speed possible that allows our car to stay on the road in a virtual environment. This project involves the Udacity Simulator which can be downloaded [here](https://github.com/udacity/self-driving-car-sim/releases).
<img src="https://github.com/ajimenezjulio/P8_PID_Controller/blob/master/docs/pid.gif">
## Goal
In this project the objective is to drive at the maximum possible speed through a road with steep curves (70 mph were reached) using a PID controller to control the steering angle that our car must turn to stay on the road using the cross track error (`cte`) as the parameter to minimize.
The simulator will provide at each point with the cross track error (`cte`), the steering angle and the speed, which will be treated by the controller to respond appropriately to the changes.
## Dependencies
* cmake >= 3.5
* All OSes: [click here for installation instructions](https://cmake.org/install/)
* make >= 4.1(mac, linux), 3.81(Windows)
* Linux: make is installed by default on most Linux distros
* Mac: [install Xcode command line tools to get make](https://developer.apple.com/xcode/features/)
* Windows: [Click here for installation instructions](http://gnuwin32.sourceforge.net/packages/make.htm)
* gcc/g++ >= 5.4
* Linux: gcc / g++ is installed by default on most Linux distros
* Mac: same deal as make - [install Xcode command line tools]((https://developer.apple.com/xcode/features/)
* Windows: recommend using [MinGW](http://www.mingw.org/)
* [uWebSockets](https://github.com/uWebSockets/uWebSockets)
* Run either `./install-mac.sh` or `./install-ubuntu.sh`.
* If you install from source, checkout to commit `e94b6e1`, i.e.
```
git clone https://github.com/uWebSockets/uWebSockets
cd uWebSockets
git checkout e94b6e1
```
Some function signatures have changed in v0.14.x. See [this PR](https://github.com/udacity/CarND-MPC-Project/pull/3) for more details.
* Simulator. You can download these from the [project intro page](https://github.com/udacity/self-driving-car-sim/releases) in the classroom.
Another option is this guide to Windows set-up for the project [(here)](https://s3-us-west-1.amazonaws.com/udacity-selfdrivingcar/files/Kidnapped_Vehicle_Windows_Setup.pdf) if the environment you have set up for the Sensor Fusion projects does not work for this project. There's also an experimental patch for windows in this [PR](https://github.com/udacity/CarND-PID-Control-Project/pull/3).
## Running the Code
A script for the process of cleaning, directory creation, build and execution was provided, so you only have to run it from the project root.
```
> ./clean_and_make.sh
```
For a manual approach the next commands should be executed.
```
> rm -r build
> mkdir build && cd build
> cmake .. && make
> ./pid
```
Tips for setting up your environment can be found [here](https://classroom.udacity.com/nanodegrees/nd013/parts/40f38239-66b6-46ec-ae68-03afd8a601c8/modules/0949fca6-b379-42af-a919-ee50aa304e6a/lessons/f758c44c-5e40-4e01-93b5-1a82aa4e044f/concepts/23d376c7-0195-4276-bdf0-e02f1f3c665d).
## Implementation
The directory structure of this repository is as follows:
```
.
├── CMakeLists.txt
├── README.md
├── clean_and_make.sh
├── src
├── json.hpp
├── PID.h
├── Twiddle.h
├── Twiddle.cpp
├── PID.cpp
├── main.cpp
```
## Details
**PIDs**
1. Firstly, two PID instances (one for steering and one for speed) are initialized with the following parameters in P I D order: `steering = 0.045, 0.005, 0.9 | speed = 0.33, 0.0027, 0.02` .
2. At each point each instance is updated using the cross track error `cte` value provided by the simulator.
3. Finally, the error of all the components is calculated and propagated back to the control variable in the simulator.
**Twiddle**
1. An instance of the twiddle algorithm is created, the parameters to tune `p` as well as the increments `dp` must be given.
2. The flag `isTwiddleActive` must be set to use the twiddle algorithm to fine tune parameters P, I and D.
3. The algorithm will update the current parameter based on the established increment and will reset the state of the car, there is the possibility that the car may get stuck, so in addition to the state rest the error will be updated.
**Notes**
1. The algorithm was tested for the simulator in the `fastest` setting for `performance`, using a higher resolution such as `fantastic` led to unexpected behavior due to longer delay time for the update, unless you have a powerful enough computer these values may not work at higher resolutions.
1. The twiddle algorithm was used but the training process was too slow or erratic so manual trial and error intervention was used to reach the final value.
<file_sep>#include <math.h>
#include <uWS/uWS.h>
#include <iostream>
#include <string>
#include "json.hpp"
#include "PID.h"
#include "Twiddle.h"
// for convenience
using nlohmann::json;
using std::string;
// For converting back and forth between radians and degrees.
constexpr double pi() { return M_PI; }
double deg2rad(double x) { return x * pi() / 180; }
double rad2deg(double x) { return x * 180 / pi(); }
// Checks if the SocketIO event has JSON data.
// If there is data the JSON object in string format will be returned,
// else the empty string "" will be returned.
string hasData(string s) {
auto found_null = s.find("null");
auto b1 = s.find_first_of("[");
auto b2 = s.find_last_of("]");
if (found_null != string::npos) {
return "";
}
else if (b1 != string::npos && b2 != string::npos) {
return s.substr(b1, b2 - b1 + 1);
}
return "";
}
int main() {
uWS::Hub h;
/**
* Parameters
*/
// Parameters to use in PIDs
std::vector<double> steeringParameters = {0.045, 0.005, 0.9};
std::vector<double> speedParameters = {0.33, 0.0027, 0.02};
// Increments for twiddle
std::vector<double> dp = {0.05, 0.001, 0.08};
// Flag to activate twiddle algorithm for parameter tuning
bool isTwiddleActive = false;
/**
* Initialize the PIDs and Twiddle
*/
// Steering PID
PID pid;
// Speed PID
PID speedPID;
// Twiddle
Twiddle twiddle;
// Initialize PIDs
pid.Init(steeringParameters[0], steeringParameters[1], steeringParameters[2]);
speedPID.Init(speedParameters[0], speedParameters[1], speedParameters[2]);
// Initialize Twiddle
twiddle.Init(steeringParameters, dp);
h.onMessage([&pid, &speedPID, &twiddle, &isTwiddleActive](uWS::WebSocket<uWS::SERVER> ws, char *data, size_t length,
uWS::OpCode opCode) {
// "42" at the start of the message means there's a websocket message event.
// The 4 signifies a websocket message
// The 2 signifies a websocket event
if (length && length > 2 && data[0] == '4' && data[1] == '2') {
auto s = hasData(string(data).substr(0, length));
if (s != "") {
auto j = json::parse(s);
string event = j[0].get<string>();
if (event == "telemetry") {
// j[1] is the data JSON object
double cte = std::stod(j[1]["cte"].get<string>());
double speed = std::stod(j[1]["speed"].get<string>());
double angle = std::stod(j[1]["steering_angle"].get<string>());
double steer_value;
/**
* Extra variables for speed
*/
double throttle_value;
double speedValue = 70.0; // 70mph
/**
* Calculate steering value here, remember the steering value is
* [-1, 1].
* NOTE: Feel free to play around with the throttle and speed.
* Maybe use another PID controller to control the speed!
*/
// Update PIDs
pid.UpdateError(cte);
speedPID.UpdateError(speedValue - speed);
steer_value = pid.TotalError();
// Instead of having negative parameters, just multiply by -1 (speed should be positive
// for going forward and PID.TotalError() returns a negative value
throttle_value = -1 * speedPID.TotalError() * (speedValue - speed);
// DEBUG
//std::cout << "CTE: " << cte << " Steering Value: " << steer_value << std::endl;
if (isTwiddleActive && (pid.TotalError() > twiddle.GetBestError())) {
if (speed < speedValue * .1 && twiddle.iteration > 30) {
std::cout << "Car got stuck!" << std::endl;
pid = twiddle.UpdateTwiddle(pid, (double) INT_MAX);
}
else
{
pid = twiddle.UpdateTwiddle(pid, pid.TotalError());
}
twiddle.PrintResults();
std::string msg = "42[\"reset\",{}]";
ws.send(msg.data(), msg.length(), uWS::OpCode::TEXT);
}
else {
json msgJson;
msgJson["steering_angle"] = steer_value;
msgJson["throttle"] = throttle_value;
auto msg = "42[\"steer\"," + msgJson.dump() + "]";
//std::cout << msg << std::endl;
ws.send(msg.data(), msg.length(), uWS::OpCode::TEXT);
}
} // end "telemetry" if
} else {
// Manual driving
string msg = "42[\"manual\",{}]";
ws.send(msg.data(), msg.length(), uWS::OpCode::TEXT);
}
} // end websocket message if
}); // end h.onMessage
h.onConnection([&h](uWS::WebSocket<uWS::SERVER> ws, uWS::HttpRequest req) {
std::cout << "Connected!!!" << std::endl;
});
h.onDisconnection([&h](uWS::WebSocket<uWS::SERVER> ws, int code,
char *message, size_t length) {
ws.close();
std::cout << "Disconnected" << std::endl;
});
int port = 4567;
if (h.listen(port)) {
std::cout << "Listening to port " << port << std::endl;
} else {
std::cerr << "Failed to listen to port" << std::endl;
return -1;
}
h.run();
}<file_sep>#include <iostream>
#include <climits>
#include <vector>
#include "PID.h"
#include "Twiddle.h"
Twiddle::Twiddle() {}
Twiddle::~Twiddle() {}
void Twiddle::Init(std::vector<double> p_, std::vector<double> dp_) {
// Parameters to tune and increments
p = p_;
dp = dp_;
// Current parameter index being trated and best_error
index = 0;
best_err = (double) INT_MAX;
// Flags to activate different cases in the algorithm
isImproving = true;
// Keep track of the number of iterations
iteration = 0;
}
void Twiddle::PrintResults() {
std::cout << "P = [ " << p[0] <<","<< p[1] << "," << p[2] << "]" << std::endl;
std::cout << "DP = [ " << dp[0] <<","<< dp[1] << "," << dp[2] << "]" << std::endl;
std::cout << "Best Error " << best_err << std::endl;
}
PID Twiddle::UpdateTwiddle(PID pid, double err) {
// Increase counter
iteration++;
// If improving
if(err < best_err) {
isImproving = true;
return Twiddle::DidImprove(pid, err);
}
// Try on the other direction
else if (isImproving) {
p[index] -= 2 * dp[index];
pid.Init(p[0], p[1], p[2]);
isImproving = false;
}
// If no improvement
else {
isImproving = true;
return Twiddle::DidNotImprove(pid, err);
}
return pid;
}
PID Twiddle::DidImprove(PID pid ,double err) {
// Update error
best_err = err;
// Update increment
dp[index] *= 1.1;
// Prepare next index for the next iteration
index = (index + 1) % 3;
p[index] += dp[index];
pid.Init(p[0], p[1], p[2]);
return pid;
}
PID Twiddle::DidNotImprove(PID pid ,double err) {
// Return value to original state
p[index]+= dp[index];
// Update increment
dp[index] *= 0.9;
// Prepare next index for the next iteration
index = (index + 1) % 3;
p[index]+= dp[index];
pid.Init(p[0], p[1], p[2]);
return pid;
}
double Twiddle::GetBestError() {
return best_err;
} | 1bed664305fb2188b338f664812ce3acf27105bf | [
"Markdown",
"C++"
] | 4 | C++ | ajimenezjulio/P8_PID_Controller | 829a7c17dc1cd978901f9660db5c1d4411984696 | 46ade94072eb1928023819d1767f85818577ed0e |
refs/heads/master | <file_sep>using UnityEngine;
using System.Collections;
public class BlockTeleporter : BaseBlock {
// Use this for initialization
void Start () {
}
// Update is called once per frame
void Update () {
}
// When this function it checks for how many teleporters
//Are in play, stores them, checks how mant there are then
//moves the ball acordingly.
public void Tele()
{
GameObject[] Teleporters = GameObject.FindGameObjectsWithTag("BT");
int index;
index = Random.Range(0, Teleporters.Length);
if (Teleporters.Length > 2)
{
GameObject.FindGameObjectWithTag("Ball").transform.position = Teleporters[index].transform.position;
}
else if (Teleporters.Length == 2)
{
if (transform.position == Teleporters[0].transform.position)
{
GameObject.FindGameObjectWithTag("Ball").transform.position = Teleporters[1].transform.position;
}
else if (transform.position == Teleporters[1].transform.position)
{
GameObject.FindGameObjectWithTag("Ball").transform.position = Teleporters[0].transform.position;
}
}
}
}
<file_sep>using UnityEngine;
using System.Collections;
using UnityEngine.EventSystems;
public class WinLoseButtons : MonoBehaviour {
public EventSystem LevelEventSystem;
// Use this for initialization
void Start () {
}
// Update is called once per frame
void Update () {
}
public void Main()
{
Application.LoadLevel("Main");
}
public void Replay()
{
Application.LoadLevel("Level-1");
}
public void Quit()
{
Application.Quit();
Debug.Log("Quit");
}
}
<file_sep>using UnityEngine;
using System.Collections;
public class placeblock : MonoBehaviour {
//public RaycastHit hit = new RaycastHit();
public Vector3 mousePosition { get; set; }
// Use this for initialization
void Start () {
}
// Update is called once per frame
void Update () {
}
public void CanPlace( bool place)
{
Ray ray = Camera.main.ScreenPointToRay(Input.mousePosition);
RaycastHit hit;
if (Physics.Raycast (ray, out hit))
{
mousePosition = hit.point;
Debug.DrawLine (Camera.main.transform.position, mousePosition, Color.green);
//left click - place block
if ((Input.GetMouseButtonDown(0) && hit.transform.tag == "GridBlock") == true)
{
}
}
}
}
<file_sep>using UnityEngine;
using System.Collections;
public class BlockSlow : BaseBlock {
// Use this for initialization
void Start () {
}
// Update is called once per frame
void Update () {
}
public void Slow(float MoveSpeed)
{
MoveSpeed = MoveSpeed / 2;
}
}
<file_sep>using UnityEngine;
using System.Collections;
public class BlockGravity : BaseBlock {
// Use this for initialization
void Start () {
}
// Update is called once per frame
void Update () {
}
public void Grav()
{
float speed = 10.0f;
GameObject grav;
grav = GameObject.FindGameObjectWithTag("BG");
float step = speed * Time.deltaTime;
transform.position = Vector3.MoveTowards(transform.position, grav.transform.position, step);
}
}
<file_sep>using UnityEngine;
using System.Collections;
public class BlockLeft : BaseBlock {
// Use this for initialization
void Start () {
}
// Update is called once per frame
void Update () {
}
public void Left()
{
transform.rotation = Quaternion.AngleAxis(90, Vector3.left);
}
}
<file_sep>using UnityEngine;
using System.Collections;
public class Ball : MonoBehaviour {
//make RigidBody
public Rigidbody rb;
//speed of ball
public float moveSpeed = 20.00f;
// Set up the audio
public AudioClip A1;
public AudioClip A2;
public AudioClip B1;
public AudioClip B2;
public AudioClip C1;
public AudioClip C2;
public AudioClip D1;
public AudioClip D2;
public AudioClip E1;
public AudioClip E2;
public AudioClip F1;
public AudioClip F2;
public AudioClip G1;
public AudioClip G2;
// Use this for initialization
void Start () {
// declare the RigidBody
rb = GetComponent<Rigidbody>();
}
// Update is called once per frame
void Update () {
}
void FixedUpdate()
{
rb.MovePosition(transform.position + transform.forward * moveSpeed * Time.deltaTime);
}
//All check and call their block and function
public void WhatBlock(Collider otherObject)
{
if (otherObject.tag == "BG")
{
// calls function in the sound manager to play AudioClip
SoundContoller.Instance.RandomSFX(B1, B2);
// Dont know why it wont call
//Commented out so I could Test Other Things first
//otherObject.GetComponent<BlockGravity>().Grav;
}
else if (otherObject.tag == "BL")
{
// calls function in the sound manager to play AudioClip
SoundContoller.Instance.RandomSFX(C1, C2);
// Dont know why it wont call
//Commented out so I could Test Other Things first
//otherObject.GetComponent<BlockLeft>().Left;
}
else if (otherObject.tag == "BR")
{
// calls function in the sound manager to play AudioClip
SoundContoller.Instance.RandomSFX(D1, D2);
// Dont know why it wont call
//Commented out so I could Test Other Things first
//otherObject.GetComponent<BlockRight>().Right;
}
else if (otherObject.tag == "BT")
{
// calls function in the sound manager to play AudioClip
SoundContoller.Instance.RandomSFX(E1, E2);
// Dont know why it wont call
//Commented out so I could Test Other Things first
//otherObject.GetComponent<BlockTeleport>().Tele;
}
else if (otherObject.tag == "BS")
{
// calls function in the sound manager to play AudioClip
SoundContoller.Instance.RandomSFX(G1, G2);
// Dont know why it wont call
//Commented out so I could Test Other Things first
//otherObject.GetComponent<BlockSlow>().Slow;
}
}
}
<file_sep>using UnityEngine;
// Tell it to use UI
using UnityEngine.UI;
using System.Collections;
//tell it to use event system
using UnityEngine.EventSystems;
public class UIController : MonoBehaviour {
// tracking the time and blocks used
private float finalTime;
private int blocksUsed;
// Timer
private float timeLeft;
public float ballSpeed;
// Texts for UI in level
public Text FinalTime;
public Text BlocksUsed;
public Text TimeLeft;
public Text BallSpeed;
//Text For UI in WinLose
public Text WL;
public Text Time;
//public event system varible
public EventSystem LevelEventSystem;
// Use this for initialization
void Start () {
// Timer
timeLeft = 60.00f;
// Tracking
blocksUsed = 0;
finalTime = 0.00f;
//Texts in UI
SetTextCalls();
}
// Update is called once per frame
void Update () {
ballSpeed = GameObject.FindGameObjectWithTag("Ball").GetComponent<Ball>().moveSpeed;
//Call Timer Function
Timer();
//Has the user clicked the primary mouse button
if (Input.GetMouseButtonDown(0))
{
//Is the cursor over the UI?
if (LevelEventSystem.IsPointerOverGameObject())
{
Debug.Log("Over UI!");
}
else
{
//setup ray based on mouse possition
Ray ray = Camera.main.ScreenPointToRay(Input.mousePosition);
//Perform a raycast to determine what we have hit
RaycastHit hitResults;
if (Physics.Raycast(ray, out hitResults))
{
//retrive the game obgect ray collided with
GameObject hitObject = hitResults.collider.gameObject;
//attempting th retrive platform
Platform platform = hitObject.GetComponent<Platform>();
if (platform != null)
{
platform.ReactToMouseClick();
}
else
{
// Attempting to find platform from parent
platform = hitObject.GetComponentInParent<Platform>();
if (platform != null)
{
platform.ReactToMouseClick();
}
}
}
}
}
}
// Countdown timer - save final thime then load WinLose
public void Timer()
{
//count for blocks
if (Input.GetMouseButtonDown(0))
{
blocksUsed = blocksUsed + 1;
}
//Counters
finalTime += Time.deltaTime;
timeLeft -= Time.deltaTime;
//Texts in UI updates
SetTextCalls();
//Time runn out - end game
if (timeLeft <= 0)
{
PlayerPrefs.SetInt("Blocks Used: ", blocksUsed);
PlayerPrefs.SetFloat("Time: ", finalTime);
Application.LoadLevel("WinLose");
}
}
// Func to call the text lines
public void SetTextCalls()
{
FinalTime.text = "Time: " + finalTime.ToString();
BlocksUsed.text = "Blocks Used: " + blocksUsed.ToString();
TimeLeft.text = "Time Left: " + timeLeft.ToString();
BallSpeed.text = "Ball Speed: " + ballSpeed.ToString();
}
public void WinLose()
{
Time.text = "Time: " + PlayerPrefs.GetFloat("Time: ");
if (PlayerPrefs.GetFloat("Time: ") > 0)
{
WL.text = "WIN";
}
else
{
WL.text = "LOSE";
}
}
}
<file_sep>using UnityEngine;
using System.Collections;
using UnityEngine.Audio;
public class SoundContoller : MonoBehaviour {
public AudioSource Main;
public AudioSource SFX;
public static SoundContoller Instance = null;
public float lowPitchRange = 0.95f;
public float highPitchRange = 1.05f;
// Use this for initialization
void Awake () {
if (Instance == null)
Instance = this;
else if (Instance != null)
Destroy(gameObject);
DontDestroyOnLoad(gameObject);
}
public void PlaySingle (AudioClip clip)
{
SFX.clip = clip;
SFX.Play();
}
public void RandomSFX(params AudioClip[] clips)
{
int RandomIndex = Random.Range(0, clips.Length);
float randomPitch = Random.Range(lowPitchRange, highPitchRange);
SFX.pitch = randomPitch;
SFX.clip = clips[RandomIndex];
SFX.Play();
}
}
<file_sep>using UnityEngine;
using System.Collections;
public class MainButtons : MonoBehaviour {
public void Play()
{
Application.LoadLevel("Level-1");
}
public void Credits()
{
Application.LoadLevel("Credits");
}
public void Close()
{
Application.Quit();
print("Quit");
}
}
<file_sep>using UnityEngine;
using System.Collections;
using UnityEngine.UI;
using UnityEngine.EventSystems;
public class Platform : MonoBehaviour
{
// declaring new objects to tie to script
public GameObject PrefabToSpawn;
public GameObject AnchorPoint;
// Before to make this script is part of the UI and the buttons it's self
// var to check if any blocks are already there
private GameObject AttachedBuilding = null;
// Declare Eventsys
public EventSystem LevelEventSystem;
// Use this for initialization
void Start()
{
}
// Update is called once per frame
void Update()
{
}
public void ReactToMouseClick()
{
// Is there already a building?
if (AttachedBuilding != null)
{
// If Building is there then destroy it and set to null
GameObject.Destroy(AttachedBuilding);
AttachedBuilding = null;
}
else
{
// Spawn the new Prefab
AttachedBuilding = GameObject.Instantiate(PrefabToSpawn);
// Possition the Prefab at the anchor point
AttachedBuilding.transform.position = AnchorPoint.transform.position;
AttachedBuilding.transform.SetParent(AnchorPoint.transform);
}
}
// Calls fow what block to spawn from the button
public void Swap(GameObject Block)
{
PrefabToSpawn = Block;
}
}
<file_sep>using UnityEngine;
using System.Collections;
public class CreditsBack : MonoBehaviour {
public void Back()
{
Application.LoadLevel("Main");
}
}
<file_sep># Physics-based-game
ye
<file_sep>using UnityEngine;
using System.Collections;
public class BlockRight : BaseBlock {
// Use this for initialization
void Start () {
}
// Update is called once per frame
void Update () {
}
public void Right()
{
transform.rotation = Quaternion.AngleAxis(-90, Vector3.left);
}
}
| 8953700927d70b60ca85823f7bd027147f4def09 | [
"Markdown",
"C#"
] | 14 | C# | 1005779/Physics-based-game | 77e2f35e9607d519a1cb93ab234510b2dadc7392 | 945b8c61200b423283573cc205148073299418a0 |
refs/heads/master | <repo_name>DavidBoehm/Templates<file_sep>/README.md
# Templates
A collection of templates I use to start my projects,
<file_sep>/bs_p5_template/assets/js/sketch.js
var myFont;
function preload() {
myFont = loadFont('assets/fonts/quicksand/Quicksand-Bold.otf');
}
function setup(){
frameRate(60);
createCanvas(600, 600);
angleMode(DEGREES)
}
function draw(){
background(0);
// translate(width / 4.1, height / 2);
var hr = hour();
var mn = minute();
var sc = second();
var zero
var zero2
var middle = (width/2, height/2);
minuteZero(mn)
secondZero(sc)
function minuteZero(mn){
if (10 > mn) {
zero = "0";
//console.log("it is");
}
else {
zero = "";
//console.log("it isnt");
}
}
function secondZero(sc){
if (10 > sc) {
zero2 = "0";
//console.log("it is");
}
else {
zero2 = "";
//console.log("it isnt");
}
}
push()
fill(255);
noStroke();
textFont(myFont);
text(hr + ':' + zero + mn + ':' + zero2 + sc, 10, 200);
textSize(20)
pop()
let end1 = map(sc, 0, 60, 0, 360);
let end2 = map(mn, 0, 60, 0, 360);
let end3 = map(hr % 12, 0, 12, 0, 360);
push();
let color = map(sc, 0, 60, 0, 256);
let spin = map(sc, 0, 60, 0, 360);
translate(0 , 600);
rotate(-90);
fill(200,200,200);
stroke(1);
strokeWeight(4);
noFill();
stroke(255);
fill(0, 0, 0, 0,)
ellipse(middle, middle, 322);
stroke(200 ,100, 10);
strokeWeight(20);
push()
arc(middle, middle, 100, 100, 0, end3);
arc(middle, middle, 200, 200, 0, end2);
pop()
stroke(220 , color, pulse);
arc(middle, middle, 300, 300, 0, end1);
pop();
var frequency = 2 * PI / 178;
var pulse = sin(waveSin);
var waveSin = function clockPulse(){
var i = 0;
if (i < 360){ i++ }
else { i--};
console.log(i)
return(i)
}
console.print(i)
}
<file_sep>/p5_template/README.md
#Summary
A p5 template that includes all extra libraries but does not utilize them all.
## Contents
p5
p5.gibber
p5.gui
p5.scenemanager
p5.serialport
p5.play
p5.dimensions
### CDN links
# Issues<file_sep>/bs_p5_template/assets/js/digital.js
var myFont;
function preload() {
myFont = loadFont('assets/fonts/quicksand/Quicksand-Bold.otf');
}
function setup(){
frameRate(60);
createCanvas(600, 600);
angleMode(DEGREES)
rectMode(CENTER)
}
function draw(){
background(0);
var hr = hour();
var mn = minute();
var sc = second();
var zero;
var zero2;
var middle = (width/2, height/2);
var box = 20;
minuteZero(mn)
secondZero(sc)
function minuteZero(mn){
if (10 > mn) {
zero = "0";
//console.log("it is");
}
else {
zero = "";
//console.log("it isnt");
}
}
function secondZero(sc){
if (10 > sc) {
zero2 = "0";
//console.log("it is");
}
else {
zero2 = "";
//console.log("it isnt");
}
}
push()
fill(255);
noStroke();
textFont(myFont);
text(hr + ':' + zero + mn + ':' + zero2 + sc, 10, 200);
textSize(20)
pop()
let end1 = map(sc, 0, 60, 0, 60);
let end2 = map(mn, 0, 60, 0, 360);
let end3 = map(hr % 12, 0, 12, 0, 360);
rect(box, sc*box, box, box)
rect(box*2, mn*box, box, box)
rect(box*3, hr*box, box, box)
}
| 9704b34fe907c3d6a5b0c4d1d2ed0ae4bbf59446 | [
"Markdown",
"JavaScript"
] | 4 | Markdown | DavidBoehm/Templates | 57448775cf7616fc8f65c735d66d716a9f2d05e6 | f8cd1d45ab8aa9f6488cf4f9de38f829069f90b9 |
refs/heads/master | <repo_name>rygei8/DailyProgrammer<file_sep>/README.md
DailyProgrammer
===============
Programming challenges found on the /r/dailyprogrammer subreddit.
<file_sep>/HexTo8x8Bitmap/main.cpp
// Project: Zoom, Rotate, Invert Hex to Bitmap Picture
// Author: <NAME>
// Date: 7/20/14
#include <iostream>
#include "BitmapPicture.h"
using namespace std;
int main()
{
// Testing
BitmapPicture test;
// Initialize Bitmap
test.SetHexToBitmap( "18 3c 7E 7e 18 18 18 18" );
cout << test.GetBitmap() << endl << endl;
// Zoom 2x
test.ZoomIn();
cout << test.GetBitmap() << endl << endl;
// Zoom 4x
test.ZoomIn();
cout << test.GetBitmap() << endl << endl;
// Attempt to Zoom 8x
test.ZoomIn();
// Zoom out twice
test.ZoomOut();
cout << test.GetBitmap() << endl << endl;
test.ZoomOut();
cout << test.GetBitmap() << endl << endl;
// Rotate cw, then ccw
test.Rotate( true );
cout << test.GetBitmap() << endl << endl;
test.Rotate( false );
cout << test.GetBitmap() << endl << endl;
// Invert
test.Invert();
cout << test.GetBitmap() << endl << endl;
return 0;
}<file_sep>/HexTo8x8Bitmap/BitmapPicture.cpp
// BitmapPicture.cpp - BitmapPicture class implementation
// <NAME>
#pragma once
#include "BitmapPicture.h"
#include <vector>
// init constructor
BitmapPicture::BitmapPicture( string hex )
{
SetHexToBitmap( hex );
Zoom = 1;
}
// accept a string of hexadecimal input and convert it into a bitmap
void BitmapPicture::SetHexToBitmap( string hex )
{
Bitmap = "";
for( unsigned int i = 0; i < hex.length(); i++ )
{
hex[i] = toupper( hex[i] );
switch( hex[i] )
{
case '0': Bitmap += " "; break;
case '1': Bitmap += " x"; break;
case '2': Bitmap += " x "; break;
case '3': Bitmap += " xx"; break;
case '4': Bitmap += " x "; break;
case '5': Bitmap += " x x"; break;
case '6': Bitmap += " xx "; break;
case '7': Bitmap += " xxx"; break;
case '8': Bitmap += "x "; break;
case '9': Bitmap += "x x"; break;
case 'A': Bitmap += "x x "; break;
case 'B': Bitmap += "x xx"; break;
case 'C': Bitmap += "xx "; break;
case 'D': Bitmap += "xx x"; break;
case 'E': Bitmap += "xxx "; break;
case 'F': Bitmap += "xxxx"; break;
case ' ': Bitmap += "\n"; break;
default: break;
}
}
}
// Zoom in to a max of x4 by duplicating bits horizontally and vertically
void BitmapPicture::ZoomIn()
{
// verify Zoom is x1 or x2
if( Zoom == 1 || Zoom == 2 )
{
// predetermine bitmap length, as it will be changing in loop
int BitmapLength = Bitmap.length();
// traverse through bitmap via i, use bitPos to keep track of changing position in bitmap, use endLine to keep track of end of prev line
for( int i = 0, bitPos = 0, endLine = 0; i < BitmapLength; i++ )
{
if( Bitmap[bitPos] != '\n' )
{
// duplicate each bit
Bitmap.insert( bitPos + 1, Bitmap.substr( bitPos, 1 ) );
// adjust bitPos accordingly
bitPos += 2;
}
// if we hit end of line
else if( Bitmap[bitPos] == '\n' )
{
// duplicate the line
Bitmap.insert( bitPos + 1, Bitmap.substr( endLine, bitPos - endLine + 1 ) );
// adjust bitPos (must account for entirely new added line), endLine
bitPos += bitPos - endLine + 2;
endLine = bitPos;
}
}
// adjust Zoom
Zoom = Zoom * 2;
}
// notify user of error
else
{
cout << "Sorry, you can only zoom in to x4" << endl << endl;
}
}
// Zoom out after zooming in
void BitmapPicture::ZoomOut()
{
// verify Zoom is x2 or x4
if( Zoom == 2 || Zoom == 4 )
{
// traverse through bitmap via i, use endLine to keep track of end of prev line
for( unsigned int i = 0, endLine = 0; i < Bitmap.length(); i++ )
{
if( Bitmap[i] != '\n' )
{
// erase duplicate bit
Bitmap.erase( i + 1, 1 );
}
// if we hit end of line
else if( Bitmap[i] == '\n' )
{
// erase next line, length will be (i - endLine)*2 + 1 to account for zoomed out length and '\n' character
Bitmap.erase( i + 1, ( i - endLine ) * 2 + 1 );
// adjust endLine
endLine = i + 1;
}
}
// adjust Zoom
Zoom = Zoom/2;
}
// notify user of error
else
{
cout << "Sorry, you cannot zoom out any further" << endl << endl;
}
}
// Rotate 90 degrees - cw if bool passed is true, ccw otherwise (assumes bitmap is square)
void BitmapPicture::Rotate( bool cw )
{
// vector to capture each line of Bitmap
vector<string> lines;
// keep track of end of prev line
int endLine = 0;
// traverse through Bitmap
for( unsigned int i = 0; i < Bitmap.length(); i++ )
{
// check if at the end of a line
if( Bitmap[i] == '\n' )
{
// add each line to the vector, not including '\n'
lines.push_back( Bitmap.substr( endLine, i - endLine ) );
// adjust endLine
endLine = i + 1;
}
// also must check if at end of last line
else if( i == Bitmap.length() - 1 )
{
// you are adding final line one iteration earlier than other lines, so you must add 1 to substring length
lines.push_back( Bitmap.substr( endLine, i - endLine + 1 ) );
}
}
// clear Bitmap
Bitmap = "";
// each line of the Bitmap must be recreated
for( unsigned int i = 0; i < lines.size(); i ++ )
{
// line length
int lineLength = lines[i].length();
// must traverse through each bit of each line
for( int j = lineLength - 1; j >= 0; j-- )
{
// rotate clockwise
if( cw )
{
// starting in bottom left with first bit of last line, traverse up through each line, then right to next bit
Bitmap += lines[j][i];
}
// rotate counter-clockwise
else
{
// starting in top right with last bit of first line, traverse down through each line, then left to next line
Bitmap += lines[lineLength - j - 1][lineLength - i - 1];
}
}
Bitmap += '\n';
}
}
// Flip the bits in the bitmap, turning on those that were off and vice versa
void::BitmapPicture::Invert()
{
for( unsigned int i = 0; i < Bitmap.length(); i++ )
{
// if there is an x, replace it with a space
if( Bitmap[i] == 'x' )
{
Bitmap[i] = ' ';
}
// if there is a space, replace it with an x
else if( Bitmap[i] == ' ' )
{
Bitmap[i] = 'x';
}
}
}<file_sep>/HexTo8x8Bitmap/BitmapPicture.h
// BitmapPicture.h - BitmapPicture class declaration
// <NAME>
#pragma once
#include <iostream>
#include <string>
using namespace std;
class BitmapPicture
{
private:
string Bitmap;
int Zoom;
public:
// Default/Init constructor, Destructor
BitmapPicture() { Zoom = 1; }
BitmapPicture( string hex );
~BitmapPicture() {}
// Set/Get Bitmap methods
void SetHexToBitmap( string hex );
string GetBitmap() { return Bitmap; }
// Zoom in to a max of 4x by duplicating bits horizontally and vertically
void ZoomIn();
// Zoom out after zooming in
void ZoomOut();
// Rotate 90 degrees - cw if bool passed is true, ccw otherwise (assumes bitmap is square)
void Rotate( bool cw );
// Flip the bits in the bitmap, turning on those that were off and vice versa
void Invert();
}; | 0b9507c1398eef968ae0c7f8ef908b3d72f168a5 | [
"Markdown",
"C++"
] | 4 | Markdown | rygei8/DailyProgrammer | 0afd2f10c223406ff4d6f14f74ca7bfeb03537eb | d00ab5702abf37c5ce40b77236d6782f874e9c79 |
refs/heads/master | <repo_name>n19012/graduation-task<file_sep>/プロトタイプ.py
import tkinter
import tkinter.filedialog
def load_text():
typ = [("Text", "*.txt"), ("Python", "*.py")]
fn = tkinter.filedialog.askopenfilename(filetypes=typ)
if fn != "":
f = None
try:
f = open(fn, 'r', encoding="utf-8")
te.delete("1.0", "end")
te.insert("1.0", f.read())
except:
f = open(fn, 'r', encoding="shift-jis")
te.delete("1.0", "end")
te.delete("1.0", f.read())
finally:
if f != None:
f.close()
def save_text():
typ = [("Text", "*.txt")]
fn = tkinter.filedialog.asksaveasfilename(filetypes=typ)
if fn != "":
if fn[-4:] != ".txt":
fn = fn + ".txt"
with open(fn, 'w', encoding="utf-8") as f:
f.write(te.get("1.0", "end-1c"))
def col_black():
te.configure(bg="black", fg="white", insertbackground="white")
def col_white():
te.configure(bg="white", fg="black", insertbackground="black")
root = tkinter.Tk()
root.title("テキストエディタ")
fr = tkinter.Frame()
fr.pack(expand=True, fill=tkinter.BOTH)
te = tkinter.Text(fr, width=80, height=30)
sc = tkinter.Scrollbar(fr, orient=tkinter.VERTICAL, command=te.yview)
te.pack(expand=True, fill=tkinter.BOTH)
te["yscrollcommand"] = sc.set
mbar = tkinter.Menu()
mcom = tkinter.Menu(mbar, tearoff=0)
mcom.add_command(label="読み込み", command=load_text)
mcom.add_separator()
mcom.add_command(label="書き込み", command=save_text)
mbar.add_cascade(label="ファイル", menu=mcom)
mcom2 = tkinter.Menu(mbar, tearoff=0)
mcom2.add_command(label='黒', command=col_black)
mcom2.add_command(label='白', command=col_white)
mbar.add_cascade(label="背景色", menu=mcom2)
root["menu"] = mbar
root.mainloop()
<file_sep>/README.md
# graduation-task
## メンバー
n19012 永吉功和
n19002 儀間和音
## プロダクト
#### テキストエディタ
ファイルダイアログを使ってファイルの読み込みや書き込みを容易にする
背景を白黒すぐ簡単に切り替える機能をつけてその時の環境に応じた視認性を確保する
半角カタカナを自動で全角に変換する機能をつける
自動更新機能をつけてパソコンの急なトラブルなどに備える
テキストエディタ内で計算できるようにする
## 概要
PCを使う環境が安定しない営業マンなどがその場の環境に合わせて見やすくできる
古い文章ファイルに使われてる半角カタカナを全角に変換して統一できる
家計簿を作る際に四則演算機能でより利便性をあげることができる
## スケジュール
#### 承認予定日
9月24日
#### 予備調査終了日
10月
#### 実装開始日
11月後半~12月中旬
#### プレゼン作成
12月後半~
| e39f0e220e49cf482982aad55c288c266660695f | [
"Markdown",
"Python"
] | 2 | Python | n19012/graduation-task | fa8a7c20d79540842bdeb755842077923ebbaa1b | 4c0e6162c54c46d944173bb68377db262b9a1be5 |
refs/heads/master | <repo_name>magnusp/ghstats<file_sep>/src/main/java/se/fortnox/ghstats/User.java
package se.fortnox.ghstats;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.time.LocalDateTime;
public class User {
private String name;
@JsonProperty("dateOfBirth")
public LocalDateTime getDob() {
return dob;
}
private LocalDateTime dob;
public User(String name) {
this.name = name;
this.dob = LocalDateTime.now();
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
<file_sep>/src/main/java/se/fortnox/ghstats/SimpleRepo.java
package se.fortnox.ghstats;
import org.springframework.data.r2dbc.repository.query.Query;
import org.springframework.data.repository.reactive.ReactiveCrudRepository;
import reactor.core.publisher.Mono;
public interface SimpleRepo extends ReactiveCrudRepository<Simple, Long> {
@Query("SELECT 'hello' AS value")
Mono<Simple> doQuery();
}
<file_sep>/src/test/java/se/fortnox/ghstats/MainTests.java
package se.fortnox.ghstats;
import io.fabric8.mockwebserver.DefaultMockServer;
import org.assertj.core.api.Assertions;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType;
import org.springframework.test.context.TestPropertySource;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.test.web.reactive.server.WebTestClient;
import reactor.test.StepVerifier;
import se.fortnox.ghstats.github.GithubRepo;
import se.fortnox.ghstats.github.GithubUser;
import java.util.Arrays;
@RunWith(SpringRunner.class)
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT)
@TestPropertySource(properties = {"web-client.mapped-resource.GithubResource.base-url=http://localhost:20222", "web-client.default-url=http://localhost:20222"})
public class MainTests {
@SuppressWarnings("SpringJavaInjectionPointsAutowiringInspection")
@Autowired
private WebTestClient webTestClient;
@Autowired
UserResource userResource;
@Before
public void setup() {
GithubRepo githubRepo = new GithubRepo();
githubRepo.setName("hej");
GithubUser githubUser = new GithubUser("Test user");
DefaultMockServer server = new DefaultMockServer();
server
.expect()
.get()
.withPath("/users/mock")
.andReturn(200, githubUser)
.withHeader(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_VALUE)
.always();
server
.expect()
.get()
.withPath("/users/mock/repos")
.andReturn(200, Arrays.asList(githubRepo))
.withHeader(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_VALUE)
.always();
try {
// TODO Expectations should be set up per test
server.start(20222);
} catch (RuntimeException ex) {
// NOOP
}
}
@Test
public void canRequest() {
webTestClient.get()
.uri("/api/users/mock")
.accept(MediaType.APPLICATION_JSON_UTF8)
.exchange()
.expectBody(GithubUser.class)
.value(returnedGithubUser -> Assertions.assertThat(returnedGithubUser.getName()).isNotEmpty());
}
@Test
public void canCall() {
StepVerifier
.create(userResource.describe("mock"))
.expectSubscription()
.consumeNextWith(githubUser -> Assertions.assertThat(githubUser.getName()).isEqualTo("Test user"))
.verifyComplete();
}
}
| eccf045ad97a97ca2c127aa8eca0b3b931d8fdeb | [
"Java"
] | 3 | Java | magnusp/ghstats | 3a33d31bc427c3f8382eb28a303908d1613eeaef | c9308408909fdecd4eccc1ff7a2e26d1e3e99bb1 |
refs/heads/master | <file_sep>__author__ = 'iluki_000'
| fb294a9ce1f7b83950dcf648caf4fa5b7333019b | [
"Python"
] | 1 | Python | Madog000/test | a832a5cccb1350bc71348a044cb30a7b4589c269 | 41e95625b46b5333fb2d028b1658b3c2241e289f |
refs/heads/master | <repo_name>cjluzzl/harbor<file_sep>/scripts/actions.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# File: WeiboBot\actions.py
# Author: MingshiCai <EMAIL>
# Date: 2019-12-18 00:59:34
import json
import os
import re
import shutil
from os import getenv
from requests import get, Session
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from time import sleep
from marshaller import Marshaller
class Actions:
"""Class for weibo actions.
Note that you must call `init` and `login` first.
"""
driver = None # set default driver to get intelisense
base_url = 'https://m.weibo.com'
weibo_api = (
"https://m.weibo.cn/api/container/getIndex?containerid={}"
"-_WEIBO_SECOND_PROFILE_WEIBO{}"
) # &page_type=03&page={}
comment_api = (
"https://m.weibo.cn/comments/hotflow"
"?id={}&mid={}&max_id_type=0" # mid from card
)
user_info_api = (
"https://m.weibo.cn/profile/info?uid={}"
)
likes_api = (
"https://m.weibo.cn/api/attitudes/show?id={}&page=1" # mid
)
repost_api = (
"https://m.weibo.cn/api/statuses/repostTimeline?id={}&page=1" # mid
)
container_id = ''
uid = ''
driver_path = getenv('CHROMEDRIVER_PATH', './chromedriver.exe')
all_weibos_count = 0
session = Session()
@classmethod
def find_css(cls, selector, single=True):
"""Find element by css selector.
Args:
selector: (str) css selector
single: (boolean) whether select single or multiple elements
Returns:
(Element or [Element, ])
"""
if single:
return cls.driver.find_element_by_css_selector(selector)
return cls.driver.find_elements_by_css_selector(selector)
@classmethod
def find_xpath(cls, xpath, single=True):
"""Find element by xpath.
Args:
xpath: (str) xpath
single: (boolean) whether select single or multiple elements
Returns:
(Element or [Element, ])
"""
if single:
return cls.driver.find_element_by_xpath(xpath)
return cls.driver.find_element_by_xpath(xpath)
@classmethod
def wait(cls, seconds=1):
sleep(seconds)
return cls
@classmethod
def init(cls, directory, headless=False):
"""Init weibo bot.
Args:
directory: (str) driver default directory
headless: (boolean) whether run selenium in headless mode
"""
cls.directory = directory
chromeOptions = webdriver.ChromeOptions()
chromeOptions.add_experimental_option(
"prefs", {
"download.default_directory": cls.directory,
"download.prompt_for_download": False,
"download.directory_upgrade": True, "safebrowsing.enabled": True
})
if headless:
chromeOptions.add_argument("headless")
chromeOptions.add_argument('window-size=1200x800')
cls.driver = webdriver.Chrome(
cls.driver_path, chrome_options=chromeOptions
)
return cls
@classmethod
def login(cls, loginname, password):
"""Login and set request session cookies.
Args:
loginname: (str)
password: (str)
"""
cls.driver.get(cls.base_url)
sleep(2)
cls.find_xpath("//a[contains(text(),'登录/注册')]").click()
sleep(2)
cls.find_xpath("//a[contains(text(),'用帐号密码登录')]").click()
sleep(2)
cls.find_xpath("//input[@id='loginName']").clear()
cls.find_xpath("//input[@id='loginName']").send_keys(loginname)
cls.find_xpath('//*[@id="loginPassword"]').clear()
cls.find_xpath('//*[@id="loginPassword"]').send_keys(<PASSWORD>)
cls.find_xpath("//a[@id='loginAction']").click()
sleep(2)
return cls
@classmethod
def set_user_credentials(cls):
"""Set container id and uid."""
cls.click_btn_profile().wait()
cls.uid = re.findall(r'\d+', cls.driver.current_url)[0]
cls.scroll_to_bottom().click_btn_view_all().wait(2)
cls.container_id = re.findall(r'\d+_', cls.driver.current_url)[0]
cls.weibo_api = cls.weibo_api.format(cls.container_id, '{}')
return cls
@classmethod
def set_cookies(cls):
"""Set session cookies with webdriver's current cookies."""
cls.session.cookies.clear()
for cookie in cls.driver.get_cookies():
cls.session.cookies.set(cookie['name'], cookie['value'])
return cls
@classmethod
def patch_post_extra_info(cls, post_item):
"""Patch reposts, likes and comments data to post item.
Args:
post_item: (dict)
Returns:
(dict or None)
"""
if not post_item:
return None
mid = post_item['id']
for key, api, extra_mid in [
['comments', cls.comment_api, True],
['likes', cls.likes_api, False],
['reposts', cls.repost_api, False]
]:
url = api.format(*(mid, mid) if extra_mid else mid)
post_item[key] = cls.session.get(url).json()
return post_item
@classmethod
def download_original_image(cls, urls, mid):
"""Download and save image by weibo id.
Args:
urls: ([str, ]) image urls list
mid: (str) weibo mid
"""
pic_dir = './download/users/{}/images/{}'.format(cls.uid, mid)
if not os.path.exists(pic_dir):
os.makedirs(pic_dir)
for pic_index, url in enumerate(urls):
res = cls.session.get(url, stream=True)
pic_suffix = url.split('.')[-1]
with open(
'{}/{}.{}'.format(pic_dir, pic_index, pic_suffix), 'wb'
) as pic_file:
res.raw.decode_content = True
shutil.copyfileobj(res.raw, pic_file)
@classmethod
def get_posts_single_page(cls, page_index=None):
"""Get posts in single page.
Args:
page_index: (int) page index
Returns:
[dict, ]
"""
suffix = '&page_type=03&page={}'.format(
page_index) if page_index else ''
res = cls.session.get(cls.weibo_api.format(suffix))
cards = res.json()['data']['cards']
posts = []
for card in cards:
post = cls.patch_post_extra_info(Marshaller.mobile_card(card))
if post:
if post['pic_num'] > 0:
pic_urls = [item['large']['url'] for item in post['pics']]
cls.download_original_image(pic_urls, post['id'])
posts.append(post)
cls.cache_posts(posts, page_index)
cls.wait(10)
return posts
@classmethod
def cache_posts(cls, posts, page_index):
"""Save posts locally.
Args:
posts: ([dict, ]) list of posts
page_index: (int)
"""
with open(
'./download/users/{}/posts/{}.json'.format(cls.uid, page_index),
'w+', encoding='UTF8'
) as json_file:
json.dump(posts, json_file, ensure_ascii=False)
@classmethod
def click_btn_profile(cls):
"""Click the nav btn to go to homepage.
"""
cls.find_css('.nav-left').click()
return cls
@classmethod
def all_weibos_count(cls):
"""Get current user's all weibos count"""
return int(cls.session.get(
cls.user_info_api.format(cls.uid)
).json()['data']['user']['statuses_count'])
@classmethod
def click_btn_view_all(cls):
"""Go to all weibos page.
"""
cls.find_css('.lite-btn-more').click()
return cls
@classmethod
def scroll_to_bottom(cls):
cls.driver.execute_script(
"window.scrollTo(0, document.body.scrollHeight)"
)
return cls
<file_sep>/scripts/README.md
# 微博备份脚本
可以保存这些内容:
- 文本
- 赞、转发、评论
- 日期、来自
- 图片、视频
## 依赖
- [chromedriver](https://sites.google.com/a/chromium.org/chromedriver/getting-started)
## 用法
Powershell:
```powershell
$env:WEIBO_USERNAME = [USERNAME]; $env:WEIBO_PASSWORD = [<PASSWORD>]; python3 weibo_bot.py
```
Bash:
```bash
WEIBO_USERNAME='[USERNAME]' WEIBO_PASSWORD='[<PASSWORD>]' python3 weibo_bot.py
```
<file_sep>/scripts/README_en.md
# Script to backup weibo
A project for backuping own weibo content, including:
- fulltext
- reposts
- comments
- likes
- meta info
- original images
- videos
## Requirements
- [chromedriver](https://sites.google.com/a/chromium.org/chromedriver/getting-started) should be installed
## Usage
Powershell:
```powershell
$env:WEIBO_USERNAME = [USERNAME]; $env:WEIBO_PASSWORD = [<PASSWORD>]; python3 weibo_bot.py
```
Bash:
```bash
WEIBO_USERNAME='[USERNAME]' WEIBO_PASSWORD='[<PASSWORD>]' python3 weibo_bot.py
```
<file_sep>/harbor/utils.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# File: harbor/utils.py
# Author: MingshiCai <EMAIL>
# Created Date: 2019-08-06 10:28:04
# ----
# Last Modified:
# Modified By:
# ----
# Copyright (c) 2019 MingshiCai <EMAIL>
import re
from arrow import get
from uuid import uuid1
from harbor.constant import DIRTY_PATTERN, FILTER_PATTERNS
def uuid():
"""Get a uuid str.
"""
return str(uuid1())
def excerpt(given_str):
"""Generate excerpt.
TODO:
"""
return re.split(
r'[\W]', given_str)[0] + '...' if len(given_str) > 4 else None
def get_milliseconds(time=None):
"""Get milliseconds. Return current timestamp by default.
Args:
time: `datetime.datetme`
Return:
an `int`
"""
t = get(time or get())
return t.timestamp * 1000 + int(t.format('SSS'))
def filter_words(given_str):
"""Filter illegal words.
TODO:
"""
return purified_string(given_str, FILTER_PATTERNS)
def https_and_large(given_url):
"""HTTP to HTTPS and large
TODO: large pic require login/token
"""
return given_url.replace(
'http://', 'https://'
) if given_url else None
def purified_string(given_str, pattern=DIRTY_PATTERN):
"""Remove newline, spaces
"""
return re.sub(pattern, '', given_str)
<file_sep>/harbor/models/base.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# File: models/base.py
# Author: MingshiCai <EMAIL>
# Created Date: 2019-08-06 10:32:15
# ----
# Last Modified:
# Modified By:
# ----
# Copyright (c) 2019 MingshiCai <EMAIL>
from peewee import SqliteDatabase, Model
db = SqliteDatabase('weibo.db')
class BaseModel(Model):
class Meta:
database = db
<file_sep>/harbor/marshallers/factory.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# File: marshaller/factory.py
# Author: MingshiCai <EMAIL>
# Created Date: 2019-08-06 10:34:37
# ----
# Last Modified:
# Modified By:
# ----
# Copyright (c) 2019 MingshiCai <EMAIL>
from typing import NewType
from harbor.utils import (
uuid, get_milliseconds, excerpt, https_and_large, purified_string
)
from harbor.models.post import Post
PostItem = NewType('Post', Post) # this doesn't work yet.
class Marshaller:
"""Post marshaller base class.
"""
available_marshallers = ['ghost']
ghost_version = '2.14.0'
def __init__(self, marshaller):
"""Init
Args:
marshaller: `str` marshaller name
"""
if marshaller not in self.available_marshallers:
raise KeyError('Given marshaller key is not available.')
self._marshaller = marshaller
self._post_id = 100 # to avoid duplicated entry, set with a big num.
self._results = []
self._posts = []
self._json = {}
@property
def result(self):
"""Marshalled dict obj.
Return:
a dict
"""
getattr(self, '_{}_json'.format(self._marshaller))()
return self._json
@property
def posts(self):
"""
Return:
a list of dict
"""
return self._posts
def marshall(self, post_obj: PostItem):
"""Marshall post object.
Args:
post_obj: `models.Post`
"""
getattr(self, '_{}'.format(self._marshaller))(post_obj)
def _ghost_json(self):
"""JSON marshaller for ghost.
"""
tag_id = 2
user_id = 99
weibo_user_name = 'Latina_XXX' # customize
self._json = {
'meta': {
'exported_on': get_milliseconds(),
'version': self.ghost_version
},
'data': {
'posts': self._posts,
'tags': [{
'id': tag_id,
'name': '微博',
'slug': 'weibo',
'description': '' # customize it if necessary
}],
'posts_tags': [
{'tag_id': tag_id, 'post_id': post_item['id']}
for post_item in self._posts
],
'users': [{
"id": user_id,
"name": weibo_user_name,
"slug": '_'.join(weibo_user_name.split(' ')).lower(),
"email": "<EMAIL>",
"profile_image": None,
"cover_image": None,
"bio": None,
"website": None,
"location": None,
"accessibility": None,
"meta_title": None,
"meta_description": None,
"created_at": get_milliseconds(),
"created_by": user_id,
"updated_at": get_milliseconds(),
"updated_by": user_id
}],
}
}
def _ghost(self, post_obj: PostItem):
"""Marshaller for Ghost blog engine.
Args:
post_obj: `models.Post`
tags, post_tags, posts, users :: content, date, img_src, visibility
"""
temp = {
'id': self._post_id,
'title': excerpt(post_obj.content) or '无题',
'slug': '/weibo-{}'.format(uuid()),
'mobiledoc': (
"{\"version\":\"0.3.1\",\"atoms\":[],\"cards\":[],"
"\"markups\":[],\"sections\":[[1,\"p\","
"[[0,[],0,\"" + purified_string(post_obj.content) + "\"]]]]}"
),
'plaintext': post_obj.content,
'feature_image': https_and_large(post_obj.img_src),
"page": 0,
"featured": 0,
"status": "published",
"published_at": get_milliseconds(post_obj.time),
"published_by": 1,
"meta_title": None,
"meta_description": None,
"author_id": 1,
"created_at": get_milliseconds(post_obj.time),
"created_by": 1,
"updated_at": get_milliseconds(post_obj.time),
"updated_by": 1
}
self._posts.append(temp)
self._post_id += 1
<file_sep>/harbor/spider/main.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# File: spider/main.py
# Author: MingshiCai <EMAIL>
# Created Date: 2019-08-06 10:36:40
# ----
# Last Modified:
# Modified By:
# ----
# Copyright (c) 2019 MingshiCai <EMAIL>
import logging
import json
import re
from arrow import get as ar
from bs4 import BeautifulSoup as BS
from requests import get
from tqdm import tqdm
from time import sleep
from harbor.constant import (
URL_TPL, HEADER_PATH_TPL, REFERER_URL_TPL, LIKE_NUM_PATTERN,
COMMENT_NUM_PATTERN, REPO_NUM_PATTERN, REPO_TEST_PATTERN, TIME_PATTERN,
DATE_FMTS, SOURCE_DEVICE_PATTERN, VISIBILITY_PATTERN
)
from harbor.marshallers.factory import Marshaller
from harbor.models.attachment import Attachment
from harbor.models.post import Post
LOGGER = logging.getLogger(__name__)
class Spider:
"""A spider that does everything.
Not an abstract factory.
"""
def __init__(self, headers, uid, marshaller_name):
if not headers['Cookie']:
raise KeyError("Empty cookie.")
self._all_page_num = None
self._all_post_num = 0
self._current_year = ar().year
self._current_page = 0
self._current_html = None
self._post_item = None
self._attachment_item = None
self._uid = uid
self._headers = headers
self._marshaller = Marshaller(marshaller_name)
@property
def url(self):
"""Dynamically get url and `_current_page` auto increment.
"""
self._current_page += 1
return URL_TPL.format(self._uid, self._current_page)
@property
def headers(self):
"""Dynamically get `:path` and `referer` header.
"""
self._headers['path'] = HEADER_PATH_TPL.format(
self._uid, self._current_page)
self._headers['referer'] = REFERER_URL_TPL.format(
self._uid, self._current_page - 1
)
return self._headers
@property
def has_post(self):
return True if self._current_page < self._all_page_num else False
def _set_all_page_num(self):
"""Initialize all page num.
"""
res = get(self.url, headers=self.headers)
post_num = re.findall(r'微博\[(\d+)\]', res.text)[0]
page_num = re.findall(r'\/(\d+)页', res.text)[0]
self._current_page -= 1
self._all_page_num = int(page_num)
self._all_post_num = int(post_num)
def _get_html(self):
url = self.url
res = None
while True:
res = get(url, headers=self.headers)
if res.status_code == 200:
break
sleep(4)
self._current_html = res.text
def _parse(self):
"""Extract info from HTML content.
TODO: refractor
"""
soup = BS(self._current_html, 'lxml')
for item in soup.select('div.c'):
temp = {}
# main content
ctt = item.select('span.ctt')
if not ctt:
continue
weibo_body = item.select('div')
if len(weibo_body) > 1:
temp['content'] = weibo_body[0].text
btn_group = weibo_body[1].text
else:
temp['content'] = weibo_body[0].select('span.ctt')[0].text
btn_group = weibo_body[0].text
temp['is_repost'] = True if REPO_TEST_PATTERN.match(
temp['content']) else False
try:
temp['like_num'] = LIKE_NUM_PATTERN.findall(btn_group)[0]
temp['cmt_num'] = COMMENT_NUM_PATTERN.findall(btn_group)[0]
temp['repo_num'] = REPO_NUM_PATTERN.findall(btn_group)[0]
except Exception:
pass
cmt = item.select('.cmt')
# visibility
if cmt:
try:
temp['visibility'] = VISIBILITY_PATTERN.findall(
cmt[0].text)[0]
except Exception:
pass
# img in main content
img = item.select('div a img')
img_src = img[0].attrs['src'] if img else None
temp['img_src'] = img_src
LOGGER.debug('img_src: {}'.format(img_src))
# time & source device
ct = item.select('span.ct')
if ct:
ct = ct[0]
text = ct.text
reg_result = TIME_PATTERN.findall(text)[0]
temp['time'] = ar(
'{}年{}'.format(self._current_year, reg_result[0]),
DATE_FMTS[0]
).naive if reg_result[0] else ar(
reg_result[1], DATE_FMTS[1]
).naive
temp['source'] = SOURCE_DEVICE_PATTERN.findall(text)[0]
self._post_item = Post(**temp)
self._attachment_item = Attachment(
uri=img_src, post=self._post_item)
self._store()
def _store(self):
"""Commit to SQL database.
Can be async.
"""
self._post_item.save()
self._attachment_item.save()
self._marshaller.marshall(self._post_item)
def _dump(self):
"""Dump JSON object to document database.
"""
with open('weibo_dumps.json', 'w+', encoding='utf-8') as f:
json.dump(self._marshaller.result, f, ensure_ascii=False)
def start(self):
"""Start spider. Auto exit when no more post.
"""
if not self._all_page_num:
self._set_all_page_num()
LOGGER.debug('overall pages: {}'.format(self._all_page_num))
with tqdm(total=self._all_page_num) as progress_bar:
while self.has_post:
self._get_html()
self._parse()
# 10 post each page.
progress_bar.update(1)
progress_bar.set_description(
'Page #{}'.format(self._current_page))
sleep(4)
self._dump()
def main():
from harbor.models.base import db
from harbor.models.post import Post
from harbor.models.attachment import Attachment
from harbor.constant import HEADERS
"""Entry point
"""
db.create_tables([Attachment, Post])
HEADERS['Cookie'] = ''
uid = ''
marshaller = 'ghost'
s = Spider(HEADERS, uid, marshaller)
try:
s.start()
except Exception as e:
LOGGER.debug(e)
LOGGER.debug(s._current_page)
s._dump()
if __name__ == "__main__":
main()
<file_sep>/scripts/weibo_bot.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# File: WeiboBot\wbbot.py
# Last Edited By: MingshiCai <EMAIL>
# Date: 2019-12-15 00:08:57
from os import getenv
from tqdm import tqdm
from actions import Actions
class WeiboBot:
"""Class for weibo bot.
"""
def __init__(self, directory, loginname, password, headless=False):
"""Init weibo bot.
Args:
directory
headless
"""
self.acts = Actions.init(directory, headless)
self.acts.login(loginname, password)
def save_posts(self):
"""Get all weibo content."""
self.acts.wait().set_user_credentials().set_cookies()
all_weibos_count = self.acts.all_weibos_count()
progress = tqdm(total=all_weibos_count, desc='all weibos')
posts = []
page_index = 1
retry = 3
while len(posts) < all_weibos_count and retry > 0:
try:
posts.extend(self.acts.get_posts_single_page(page_index))
except Exception as e:
print(e)
retry -= 1
else:
page_index += 1
progress.update(len(posts))
retry = 3
def main():
save_to_directory = './weibo_backup'
bot = WeiboBot(
save_to_directory,
getenv('WEIBO_USERNAME', 'default-username'),
getenv('WEIBO_PASSWORD', '<PASSWORD>'),
)
bot.save_posts()
if __name__ == "__main__":
main()
<file_sep>/harbor/models/attachment.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# File: models/attachment.py
# Author: MingshiCai <EMAIL>
# Created Date: 2019-08-06 10:32:42
# ----
# Last Modified:
# Modified By:
# ----
# Copyright (c) 2019 MingshiCai <EMAIL>
from peewee import CharField, ForeignKeyField
from harbor.models.base import BaseModel
from harbor.models.post import Post
class Attachment(BaseModel):
uri = CharField(max_length=500, null=True)
post = ForeignKeyField(Post, backref='attachments')
<file_sep>/harbor/marshallers/mobile_marshaller.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# File: marshaller\mobile_marshaller.py
# Author: MingshiCai <EMAIL>
# Date: 2019-12-18 10:40:47
class MobileMarshaller:
"""Class for marshalling weibo json object.
"""
mobile_card_types = {
9: 'post', 31: 'search-box'
}
mobile_card_keys = [
'attitudes_count', 'pics', 'pic_num', 'id', 'created_at',
'comments_count', 'favorited', 'reads_count', 'reposts_count',
'source', 'text', 'textLength', 'visible', 'weibo_position',
['title', 'text']
]
@classmethod
def mobile_card(cls, card_object):
"""Extract main content from weibo h5 json card.
Args:
card_object: (dict) card item from weibo api
Returns:
({str: object} or None)
"""
if cls.mobile_card_types.get(card_object['card_type'], None) != 'post':
return None
result = {}
card = card_object['mblog']
for key in cls.mobile_card_keys:
if isinstance(key, str):
try:
result[key] = card[key]
except Exception as e:
print(e, str(e))
pass
else:
tmp = card
for sub_key in key:
tmp = tmp[sub_key]
result['.'.join(key)] = tmp
return result
<file_sep>/harbor/main.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# File: harbor/main.py
# Author: MingshiCai <EMAIL>
# Created Date: 2019-08-06 10:27:19
# ----
# Last Modified:
# Modified By:
# ----
# Copyright (c) 2019 MingshiCai <EMAIL>
import logging
from os import getenv
from harbor.spider.mobile_bot import WeiboBot
LOGGER = logging(__name__)
def main():
save_to_directory = './weibo_backup'
LOGGER.info('start mobile weibo bot')
bot = WeiboBot(
save_to_directory,
getenv('WEIBO_USERNAME', 'default-username'),
getenv('WEIBO_PASSWORD', '<PASSWORD>'),
)
bot.save_posts()
LOGGER.info('exit normally')
if __name__ == "__main__":
main()
<file_sep>/README.md
# Harbor
微博个人账户备份,可以保存这些内容:
- 文本
- 赞、转发、评论
- 日期、来自
- 图片、视频
## 简单使用
直接运行 `scripts` 文件夹下的脚本。
Powershell:
```powershell
$env:WEIBO_USERNAME = [USERNAME]; $env:PASSWORD = [PASSWORD]; python3 weibo_bot.py
```
Bash:
```bash
WEIBO_USERNAME='[USERNAME]' WEIBO_PASSWORD='[PASSWORD]' python3 weibo_bot.py
```
## 进阶使用
通过 pip 安装:`pip install weibo-harbor`
### 依赖
- Python 3.7 and up
- [chromedriver](https://sites.google.com/a/chromium.org/chromedriver/getting-started)
### 安装
使用 python setup-tools 安装和运行.
```
$ python3 -m venv env
$ python3 -m pip install -r requirements.txt
$ python3 setup.py install
$ env/bin/harbor
```
### 运行
Powershell:
```powershell
$env:WEIBO_USERNAME = [USERNAME]; $env:WEIBO_PASSWORD = [PASSWORD]; harbor
```
Bash:
```bash
WEIBO_USERNAME='[USERNAME]' WEIBO_PASSWORD='[<PASSWORD>]' harbor
```
## 开发
```bash
python3 -m venv env
. env/bin/activate
pip install -e .
```
## 测试
```bash
python3 setup.py nosetests
```
更新了 `setup.py` 中的依赖后,请运行 `pip-compile` 来刷新 `requirements.txt` 文件.
## 欢迎贡献代码
Pull requests are welcome. For major changes, please open an issue first to discuss what you would like to change.
Please make sure to update tests as appropriate.
## License
[MIT](https://choosealicense.com/licenses/mit/)
<file_sep>/harbor/models/post.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# File: models/post.py
# Author: MingshiCai <EMAIL>
# Created Date: 2019-08-06 10:33:20
# ----
# Last Modified:
# Modified By:
# ----
# Copyright (c) 2019 MingshiCai <EMAIL>
from peewee import (CharField, IntegerField, TextField, DateTimeField,
BooleanField)
from harbor.models.base import BaseModel
class Post(BaseModel):
content = TextField()
time = DateTimeField()
like_num = IntegerField(null=True)
comment_num = IntegerField(null=True)
repost_num = IntegerField(null=True)
source = CharField(max_length=20)
visibility = CharField(max_length=10, null=True)
img_src = CharField(max_length=100, null=True)
is_repost = BooleanField(null=True)
<file_sep>/harbor/constant.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# File: harbor/constant.py
# Author: MingshiCai <EMAIL>
# Created Date: 2019-08-06 10:28:15
# ----
# Last Modified:
# Modified By:
# ----
# Copyright (c) 2019 MingshiCai <EMAIL>
import re
FILTER_PATTERNS = [
re.compile(r'反动')
]
DIRTY_PATTERN = re.compile(r'(\\n)|(\s+)')
URL_TPL = 'https://weibo.cn/{}/profile?page={}'
HEADER_PATH_TPL = '/{}/profile?page={}'
REFERER_URL_TPL = 'https://weibo.cn/{}/profile?page={}'
LIKE_NUM_PATTERN = re.compile(r'赞\[(\d+)\]')
COMMENT_NUM_PATTERN = re.compile(r'评论\[(\d+)\]')
REPO_NUM_PATTERN = re.compile(r'转发\[(\d+)\]')
REPO_TEST_PATTERN = re.compile(r'(转发了)|(Repost)')
TIME_PATTERN = re.compile(
r'(\d+月\d+日 [\d\:]+)|(\d{4}\-\d{2}\-\d{2} [\d\:]+)')
SOURCE_DEVICE_PATTERN = re.compile(r'(来自.+)')
VISIBILITY_PATTERN = re.compile(r'\[([仅自].+)\]')
DATE_FMTS = (
'YYYY年MM月DD日 HH:mm', 'YYYY-MM-DD HH:mm:ss'
)
HEADERS = {
'authority': 'weibo.cn',
'method': 'GET',
'scheme': 'https',
'dnt': '1',
'upgrade-insecure-requests': '1',
'user-agent': ('Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X) '
'AppleWebKit/604.1.38 (KHTML, like Gecko) Version/11.0 '
'Mobile/15A372 Safari/604.1'),
'Cookie': None # set to your own cookie
}
<file_sep>/tests/unit/test_utils.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# File: unit/test_utils.py
# Author: MingshiCai <EMAIL>
# Created Date: 2019-08-06 11:21:36
# ----
# Last Modified:
# Modified By:
# ----
# Copyright (c) 2019 MingshiCai <EMAIL>
import logging
from nose.tools import assert_equals
from harbor import utils
# for debugging, avoid `print`
LOGGER = logging.getLogger(__name__)
def test_http_to_https():
"""Check if `utils.https_and_large` works
"""
fake = 'http://abc.com'
expect = 'https://abc.com'
assert_equals(utils.https_and_large(fake), expect)
<file_sep>/harbor/marshallers/__init__.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# File: marshaller/__init__.py
# Author: MingshiCai <EMAIL>
# Created Date: 2019-08-06 10:35:27
# ----
# Last Modified:
# Modified By:
# ----
# Copyright (c) 2019 MingshiCai <EMAIL>
<file_sep>/README_en.md
# Harbor
A package to crawl, backup and export weibo content.
## Installation
Install via pip: `pip install weibo-harbor`
### Requirements
- Python 3.7 and up
## Usage
Install and run binary via python setup-tools.
```
$ python3 -m venv env
$ python3 -m pip install -r requirements.txt
$ python3 setup.py install
$ env/bin/harbor
```
## Development
```
$ python3 -m venv env
$ . env/bin/activate
$ pip install -e .
```
Once you update requirements, make sure use `pip-compile` to refresh `requirements.txt`.
## Contributing
Pull requests are welcome. For major changes, please open an issue first to discuss what you would like to change.
Please make sure to update tests as appropriate.
## License
[MIT](https://choosealicense.com/licenses/mit/)
| 08ac90014cf16e586ef550a8a70ed51043ba571b | [
"Markdown",
"Python"
] | 17 | Python | cjluzzl/harbor | cadc11387b1275f9660252682d83e1a93dde2793 | f46d92fd3c163591b30d638fc4b1accdfcde5f83 |
refs/heads/master | <repo_name>cnv1989/tips<file_sep>/Tipper/BillManager.swift
//
// BillManager.swift
// Tipper
//
// Created by <NAME> on 8/24/14.
// Copyright (c) 2014 cnv. All rights reserved.
//
import UIKit
struct Bill {
var bill = 0.00
var tipPercentage = 0.00
var date: NSDate!
var total: Double {
return (bill + bill * tipPercentage)
}
var tip: Double {
return (bill * tipPercentage)
}
var dateString: String {
var formatter = NSDateFormatter()
formatter.timeStyle = .ShortStyle
return formatter.stringFromDate(date)
}
}
class BillManager: NSObject {
var bills = [Bill]()
func addBill(bill: Bill) {
bills.append(bill)
}
}
<file_sep>/README.md
tips
====
iOS app to calculate tip based on the bill.
This app has following functionality.
1. It calculates tip and total amount based on the tip percentage selected.
2. It also calculates the split amount among one, two and three people.
3. It allows user to change the default tip percentages.
4. It stores the tip percentages in percentage storage such that user preferences are retained.
<file_sep>/Tipper/SettingsViewController.swift
//
// SettingsViewController.swift
// Tipper
//
// Created by <NAME> on 8/24/14.
// Copyright (c) 2014 cnv. All rights reserved.
//
import UIKit
class SettingsViewController: UIViewController {
@IBOutlet weak var itSucked: UITextField!
@IBOutlet weak var itWasOk: UITextField!
@IBOutlet weak var itWasAmazing: UITextField!
@IBAction func updateItSucked(sender: AnyObject) {
tipsControl.itSucked = NSString(string: itSucked.text).doubleValue / 100
}
@IBAction func updateItWasOk(sender: AnyObject) {
tipsControl.itWasOk = NSString(string: itWasOk.text).doubleValue / 100
}
@IBAction func updateItWasAmazing(sender: AnyObject) {
tipsControl.itWasAmazing = NSString(string: itWasAmazing.text).doubleValue / 100
}
override func viewDidLoad() {
super.viewDidLoad()
itSucked.text = String(Int(floor(tipsControl.itSucked * 100)))
itWasOk.text = String(Int(floor(tipsControl.itWasOk * 100)))
itWasAmazing.text = String(Int(floor(tipsControl.itWasAmazing * 100)))
}
}
<file_sep>/Tipper/AppDelegate.swift
//
// AppDelegate.swift
// Tipper
//
// Created by <NAME> on 8/24/14.
// Copyright (c) 2014 cnv. All rights reserved.
//
import UIKit
class TipsControl {
var itSucked: Double {
didSet {
println("isSucked did got set.")
}
}
var itWasOk:Double {
didSet {
println("itWasOk dit got set.")
}
}
var itWasAmazing:Double {
didSet {
println("itWasAmazing did got set.")
}
}
init(itSucked: Double = 0.18, itWasOk: Double = 0.2, itWasAmazing: Double = 0.22) {
self.itSucked = itSucked
self.itWasOk = itWasOk
self.itWasAmazing = itWasAmazing
self.load()
}
func getString() -> String {
return String(format: "%.2f,%.2f,%.2f", arguments: [self.itSucked, self.itWasOk, itWasAmazing])
}
func save () {
var defaults = NSUserDefaults.standardUserDefaults()
defaults.setObject(self.getString(), forKey: "tipper_app_tips")
defaults.synchronize()
}
func load () {
var defaults = NSUserDefaults.standardUserDefaults()
if var tipperTipsOb = defaults.objectForKey("tipper_app_tips") as? String {
var stringValue = tipperTipsOb
var tipsArray = stringValue.componentsSeparatedByString(",")
self.itSucked = NSString(string: tipsArray[0]).doubleValue
self.itWasOk = NSString(string: tipsArray[1]).doubleValue
self.itWasAmazing = NSString(string: tipsArray[2]).doubleValue
}
}
}
var tipsControl = TipsControl()
@UIApplicationMain
class AppDelegate: UIResponder, UIApplicationDelegate {
var window: UIWindow?
func application(application: UIApplication!, didFinishLaunchingWithOptions launchOptions: NSDictionary!) -> Bool {
// Override point for customization after application launch.
return true
}
func applicationWillResignActive(application: UIApplication!) {
// Sent when the application is about to move from active to inactive state. This can occur for certain types of temporary interruptions (such as an incoming phone call or SMS message) or when the user quits the application and it begins the transition to the background state.
// Use this method to pause ongoing tasks, disable timers, and throttle down OpenGL ES frame rates. Games should use this method to pause the game.
}
func applicationDidEnterBackground(application: UIApplication!) {
// Use this method to release shared resources, save user data, invalidate timers, and store enough application state information to restore your application to its current state in case it is terminated later.
// If your application supports background execution, this method is called instead of applicationWillTerminate: when the user quits.
tipsControl.save()
}
func applicationWillEnterForeground(application: UIApplication!) {
// Called as part of the transition from the background to the inactive state; here you can undo many of the changes made on entering the background.
tipsControl.load()
}
func applicationDidBecomeActive(application: UIApplication!) {
// Restart any tasks that were paused (or not yet started) while the application was inactive. If the application was previously in the background, optionally refresh the user interface.
tipsControl.load()
}
func applicationWillTerminate(application: UIApplication!) {
// Called when the application is about to terminate. Save data if appropriate. See also applicationDidEnterBackground:.
tipsControl.save()
}
}
<file_sep>/Tipper/TipsViewController.swift
//
// TipsViewController.swift
// Tipper
//
// Created by <NAME> on 8/24/14.
// Copyright (c) 2014 cnv. All rights reserved.
//
import UIKit
class TipsViewController: UIViewController {
@IBOutlet weak var tipsSegmentControl: UISegmentedControl!
@IBOutlet weak var billField: UITextField!
@IBOutlet weak var tipField: UILabel!
@IBOutlet weak var totalField: UILabel!
@IBOutlet weak var splitByOne: UILabel!
@IBOutlet weak var splitByTwo: UILabel!
@IBOutlet weak var splitByThree: UILabel!
override func viewDidLoad() {
super.viewDidLoad()
var itSuckedString = String(Int(floor(tipsControl.itSucked * 100))) + "%"
tipsSegmentControl.setTitle(itSuckedString, forSegmentAtIndex: 0)
var itWasOkString = String(Int(floor(tipsControl.itWasOk * 100))) + "%"
tipsSegmentControl.setTitle(itWasOkString, forSegmentAtIndex: 1)
var itWasAmazingString = String(Int(floor(tipsControl.itWasAmazing * 100))) + "%"
tipsSegmentControl.setTitle(itWasAmazingString, forSegmentAtIndex: 2)
billField.becomeFirstResponder()
}
@IBAction func onEditingChanged(sender: AnyObject) {
var tipPercentages = [tipsControl.itSucked, tipsControl.itWasOk, tipsControl.itWasAmazing]
var tipPercentage = tipPercentages[tipsSegmentControl.selectedSegmentIndex]
var billAmount = NSString(string: billField.text).doubleValue
var tip = billAmount * tipPercentage
var total = billAmount + tip
tipField.text = String(format: "$%.2f", tip)
totalField.text = String(format: "$%.2f", total)
splitByOne.text = String(format: "$%.2f", total)
splitByTwo.text = String(format: "$%.2f", total/2)
splitByThree.text = String(format: "$%.2f", total/3)
}
@IBAction func onEditingBegin(sender: AnyObject) {
}
@IBAction func onEditingDidEnd(sender: AnyObject) {
}
@IBAction func onValueChanged(sender: AnyObject) {
var billAmount = NSString(string: billField.text.stringByTrimmingCharactersInSet(NSCharacterSet.symbolCharacterSet())).doubleValue
billField.text = String(format: "$%.2f", billAmount)
}
}
| f71492c806b726f5dfe77b4b78ece6da2ea1db16 | [
"Swift",
"Markdown"
] | 5 | Swift | cnv1989/tips | 5c66af1a8e5b1a11274a4c03e05a09b3c0815514 | 4c82094f793c6f3f60cc34fdfb288b016cee5251 |
refs/heads/master | <repo_name>RomanShestakov/unix_programming<file_sep>/Vagrantfile
# -*- mode: ruby -*-
# vi: set ft=ruby :
# some of the scrips are re-used from this github project:
# https://github.com/ANXS/erlang
# Vagrantfile API/syntax version. Don't touch unless you know what you're doing!
VAGRANTFILE_API_VERSION = "2"
# run shell provision on the host machine to download external roles for Ansible
# https://github.com/enginyoyen/ansible-best-practises
system("
if [ #{ARGV[0]} = 'up' ]; then
echo 'You are doing vagrant up and can execute your script'
./provision/scripts/role_update.sh
fi
")
Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
# All Vagrant configuration is done here. The most common configuration
# options are documented and commented below. For a complete reference,
# please see the online documentation at vagrantup.com.
config.vm.define "vagrant1" do |vagrant1|
vagrant1.vm.box = "virtualbox-centos7"
vagrant1.vm.host_name = "base1.local"
vagrant1.vm.network :private_network, ip: "192.168.111.222"
vagrant1.vm.network "forwarded_port", guest: 80, host: 8080
vagrant1.vm.network "forwarded_port", guest: 8888, host: 8888
vagrant1.vm.network "forwarded_port", id: "ssh", guest: 22, host: 2222
vagrant1.vm.provider "virtualbox" do |pmv|
pmv.memory = 4096
pmv.cpus = 4
end
end
# config.vm.define "vagrant2" do |vagrant2|
# vagrant2.vm.box = "virtualbox-centos7"
# vagrant2.vm.host_name = "base2.local"
# vagrant2.vm.network :private_network, ip: "192.168.111.223"
# vagrant2.vm.network "forwarded_port", guest: 80, host: 8081
# vagrant2.vm.network "forwarded_port", guest: 8888, host: 8889
# end
# sync folders
config.vm.synced_folder "/Users/romanshestakov/development", "/home/vagrant/development"
# # Enable SSH agent forwarding for github
# # https://coderwall.com/p/p3bj2a
# config.ssh.username = 'vagrant'
# config.ssh.private_key_path = [ '~/.vagrant.d/insecure_private_key', '~/.ssh/id_rsa' ]
config.ssh.forward_agent = true
config.ssh.insert_key = false
config.vm.boot_timeout = 900
# run ansible
config.vm.provision "ansible" do |ansible|
ansible.sudo = true
ansible.verbose = "vvv"
ansible.playbook = 'provision/ansible/playbooks/vm.yml'
ansible.host_key_checking = false
end
end
| d2db0695de4330d1be407d74f1b53255f1a51321 | [
"Ruby"
] | 1 | Ruby | RomanShestakov/unix_programming | 47355bdd258b983bd62e057f54ced2dc9c0e6c48 | 80e0185cd98700c49a9f1e2a54cb6647b5d544f3 |
refs/heads/master | <repo_name>IrisDS/distScreen<file_sep>/js/main.js
input = document.getElementById('myFileInput');
var id = genID();
var url = "ws://192.168.0.104:9876/ws/screen/" + id
var binaryString
function sendPic() {
var file = myInput.files[0];
var fileReader = new FileReader();
var Socket = new WebSocket(url);
fileReader.onload = function(readerEvt) {
binaryString = readerEvt.target.result;
}
fileReader.readAsDataURL(file);
Socket.onopen = function(){
Socket.send(JSON.stringify({"cmd" : "UPLOAD_IMAGE", "image" :
binaryString, "width" : (window.innerWidth > 0) ?
window.innerWidth : screen.width}));
}
$("#myFileInput").remove();
Socket.onmessage = function(message) {
myData = JSON.parse(message)
switch (myData.value) {
case "PLAY":
var vid = document.getElementById("source_video");
vid.src = URL;
break;
case "PLAY_AT":
// start/resume playback @
break;
case "PAUSE":
// Pause video
break;
case "MAKE_ADMIN":
// upgrade connection to admin
break;
}
}
}
function genID(){
return Math.random().toString(36).substr(2) + Math.random().toString(36).substr(2);
}
myInput.addEventListener('change', sendPic, false);
| 83ff0c78704a8080af4c9aaedb33409e3ac694b9 | [
"JavaScript"
] | 1 | JavaScript | IrisDS/distScreen | b9e0b8d3a22d330442272fbde9844eec420d0c13 | 1ac90c1a530b921827ab6e2c18536415e699c53e |
refs/heads/master | <file_sep>package com.moneytransfer.service;
import com.moneytransfer.domain.entities.Account;
import com.moneytransfer.domain.repository.BaseDao;
import lombok.NonNull;
import java.math.BigDecimal;
public class AccountServiceImpl implements AccountService {
private BaseDao<Account> accountDao;
public AccountServiceImpl(@NonNull final BaseDao<Account> accountDao) {
this.accountDao = accountDao;
}
@Override
public Account create(@NonNull final String owner) throws Exception {
return this.accountDao.save(new Account(owner));
}
@Override
public synchronized Account deposit(@NonNull final String accountNumber,
@NonNull final BigDecimal amount) throws Exception {
final Account account = this.accountDao.findById(accountNumber);
account.deposit(amount);
return this.accountDao.save(account);
}
@Override
public synchronized Account withdraw(@NonNull final String accountNumber,
@NonNull final BigDecimal amount) throws Exception {
final Account account = this.accountDao.findById(accountNumber);
account.withdraw(amount);
return this.accountDao.save(account);
}
}
<file_sep># Money transfer API
This project is a very simple RESTful API for money transfer using SOLID and KISS principles.
It's built on top of:
- Java JDK 11
- Spark Java web server
- Hibernate ORM
- H2 in-memory database
- JUnit 5
### Maven commands
- `$ mvn test`: Run all the test cases (unit, integration, e2e)
- `$ mvn package`: Create a new jar in the target folder (`target/money-transfer-api-1.0-SNAPSHOT.jar`)
### How to execute standalone application
- `$ java -jar money-transfer-api-1.0-SNAPSHOT.jar`
> This will initialize the server on port `4567`
### Important information
The project has basically only two entities: `Account` and `Transaction`.
- **Account**: has `number`, `owner` and `balance` and can perform `deposit` and `withdraw` operations.
- **Transaction**: has `origin`, `destination`, `amount` and `date` and can perform `transfer` operation.
Restrictions applied:
- Account must have a `owner` and initial balance is `0.00`
- Account/Transaction `number` is generated when object is being created
- There is only one default currency and values must have 2 decimal places
- Only positive amounts are allowed for `deposit` and `withdraw`
- The `withdraw` operation is only allowed if account has sufficient `balance`
- Accounts must exist for any of the operations
- It's not possible to transfer to own account
### Endpoints implemented
|Method|Endpoint|Description|Sample|
|---|---|---|---|
|POST|/api/accounts|Create a new account|`curl -X POST localhost:4567/api/accounts -d '{"owner": "<NAME>"}'`|
|PUT|/api/accounts/:id/deposit|Deposit money in account|`curl -X PUT localhost:4567/api/accounts/a3718ab<KEY>/deposit -d '{"amount": 1000.00}'`|
|PUT|/api/accounts/:id/withdraw|Withdraw money from account|`curl -X PUT localhost:4567/api/accounts/<KEY>/withdraw -d '{"amount": 200}'`|
|POST|/api/transfers|Create a new transfer|`curl -X POST localhost:4567/api/transfers -d '{"originAccountNumber": "<KEY>", "destinationAccountNumber": "<KEY>", "amount": 250.59}'`|<file_sep>package com.moneytransfer.domain.entities;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.NonNull;
import lombok.ToString;
import javax.persistence.Entity;
import javax.persistence.Id;
import java.io.Serializable;
import java.math.BigDecimal;
import static com.moneytransfer.utils.Constants.DECIMAL_PLACES;
import static java.math.BigDecimal.ZERO;
import static java.math.RoundingMode.HALF_UP;
import static java.util.UUID.randomUUID;
@Entity
@EqualsAndHashCode
@Getter
@ToString
public class Account implements Serializable {
private static final long serialVersionUID = -5933713453986435201L;
@Id
private String number;
private String owner;
private BigDecimal balance;
public Account() { }
public Account(@NonNull final String owner) {
this.number = randomUUID().toString();
setOwner(owner);
setBalance(ZERO);
}
public void deposit(@NonNull final BigDecimal amount) {
if (amount.compareTo(ZERO) <= 0) {
throw new IllegalArgumentException("Amount for deposit should be greater than zero");
}
final BigDecimal newBalance = this.balance.add(amount);
setBalance(newBalance);
}
public void withdraw(@NonNull final BigDecimal amount) {
if (amount.compareTo(ZERO) <= 0) {
throw new IllegalArgumentException("Amount for withdraw should be greater than zero");
}
if (amount.compareTo(this.balance) > 0) {
throw new IllegalArgumentException("Account balance should contain value for withdraw");
}
final BigDecimal newBalance = this.balance.subtract(amount);
setBalance(newBalance);
}
private void setOwner(@NonNull final String owner) {
if (owner.trim().isEmpty()) {
throw new IllegalArgumentException("Account owner should not be empty");
}
this.owner = owner;
}
private void setBalance(@NonNull final BigDecimal balance) {
this.balance = balance.setScale(DECIMAL_PLACES, HALF_UP);
}
}
<file_sep>package com.moneytransfer.service;
import com.moneytransfer.domain.entities.Account;
import com.moneytransfer.domain.entities.Transaction;
import com.moneytransfer.domain.repository.BaseDao;
import lombok.NonNull;
import java.math.BigDecimal;
public class TransactionServiceImpl implements TransactionService {
private BaseDao<Account> accountDao;
private BaseDao<Transaction> transactionDao;
public TransactionServiceImpl(@NonNull final BaseDao<Account> accountDao,
@NonNull final BaseDao<Transaction> transactionDao) {
this.accountDao = accountDao;
this.transactionDao = transactionDao;
}
@Override
public synchronized Transaction transfer(@NonNull final String originAccountNumber,
@NonNull final String destinationAccountNumber,
@NonNull final BigDecimal amount) throws Exception {
final Account origin = this.accountDao.findById(originAccountNumber);
final Account destination = this.accountDao.findById(destinationAccountNumber);
final Transaction transaction = new Transaction(origin, destination, amount);
transaction.transfer();
this.accountDao.save(origin);
this.accountDao.save(destination);
return this.transactionDao.save(transaction);
}
}
<file_sep>package com.moneytransfer.utils;
import com.google.gson.Gson;
import com.moneytransfer.domain.entities.Account;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
public class JsonTransformerTest {
@Test
public void testObjectShouldBeConvertedIntoJson() {
final Account account = new Account("<NAME>");
String json = new Gson().toJson(account);
assertEquals(json, new JsonTransformer().render(account));
}
}
<file_sep>package com.moneytransfer.domain.request;
import lombok.AllArgsConstructor;
import lombok.Data;
import java.math.BigDecimal;
@Data
@AllArgsConstructor
public class UpdateBalanceRequest {
private BigDecimal amount;
}
<file_sep>package com.moneytransfer.domain.repository;
import lombok.extern.slf4j.Slf4j;
import org.hibernate.boot.Metadata;
import org.hibernate.boot.MetadataSources;
import org.hibernate.boot.registry.StandardServiceRegistry;
import org.hibernate.boot.registry.StandardServiceRegistryBuilder;
import static org.hibernate.boot.registry.StandardServiceRegistryBuilder.destroy;
@Slf4j
public class HibernateSessionFactory implements SessionFactory<org.hibernate.SessionFactory> {
private StandardServiceRegistry registry;
private org.hibernate.SessionFactory sessionFactory;
@Override
public org.hibernate.SessionFactory getSessionFactory() {
if (this.sessionFactory == null) {
try {
this.registry = new StandardServiceRegistryBuilder().configure().build();
MetadataSources sources = new MetadataSources(this.registry);
Metadata metadata = sources.getMetadataBuilder().build();
this.sessionFactory = metadata.getSessionFactoryBuilder().build();
} catch (Exception e) {
log.error("Error creating session factory", e.getMessage());
shutdown();
}
}
return this.sessionFactory;
}
private void shutdown() {
if (this.registry != null) {
destroy(this.registry);
}
}
}
<file_sep>package com.moneytransfer.domain.entities;
import org.junit.jupiter.api.Test;
import java.math.BigDecimal;
import static com.moneytransfer.utils.Constants.DECIMAL_PLACES;
import static java.math.BigDecimal.ONE;
import static java.math.BigDecimal.TEN;
import static java.math.BigDecimal.ZERO;
import static java.math.RoundingMode.HALF_UP;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class AccountTest {
@Test
public void testNewAccountShouldRaiseErrorIfOwnerIsNull() {
assertThrows(IllegalArgumentException.class,
() -> new Account(null),
"Account must have an owner");
}
@Test
public void testNewAccountShouldRaiseErrorIfOwnerIsEmpty() {
assertThrows(IllegalArgumentException.class,
() -> new Account(" "),
"Account owner must not be empty");
}
@Test
public void testNewAccountShouldHaveANumberAndBalanceShouldBeZero() {
final Account account = new Account("<NAME>");
assertNotNull(account.getNumber(), "Account must have a number");
assertEquals(ZERO.setScale(DECIMAL_PLACES), account.getBalance(),
"Account must be initialized with a balance of zero");
}
@Test
public void testNewAccountsShouldHaveDifferentNumbers() {
final Account account1 = new Account("<NAME>");
final Account account2 = new Account("<NAME>");
assertNotEquals(account1.getNumber(), account2.getNumber());
}
@Test
public void testErrorShouldBeRaisedIfDepositIsEqualToNull() {
assertThrows(IllegalArgumentException.class,
() -> new Account("<NAME>").deposit(null),
"Deposit must contain a non-null value");
}
@Test
public void testErrorShouldBeRaisedIfDepositHasNegativeValue() {
assertThrows(IllegalArgumentException.class,
() -> new Account("<NAME>").deposit(new BigDecimal(-10d)),
"Deposit must contain a value greater than zero");
}
@Test
public void testErrorShouldBeRaisedIfDepositIsEqualToZero() {
assertThrows(IllegalArgumentException.class,
() -> new Account("<NAME>").deposit(ZERO),
"Deposit must contain a value greater than zero");
}
@Test
public void testDepositShouldIncreaseBalance() {
final Account account = new Account("<NAME>");
final BigDecimal previousBalance = account.getBalance();
account.deposit(TEN);
final BigDecimal newBalance = account.getBalance();
assertTrue(newBalance.compareTo(previousBalance) > 0,
"Deposit must increase balance");
}
@Test
public void testBalanceShouldAlwaysHaveTwoDecimalPlaces() {
final Account account = new Account("<NAME>");
assertEquals(DECIMAL_PLACES, account.getBalance().scale(),
"Balance must have two decimal places");
account.deposit(new BigDecimal(5.376d));
assertEquals(DECIMAL_PLACES, account.getBalance().scale(),
"Balance must have two decimal places");
account.withdraw(new BigDecimal(2.5d));
assertEquals(DECIMAL_PLACES, account.getBalance().scale(),
"Balance must have two decimal places");
}
@Test
public void testBalanceShouldBeRoundedUp() {
final Account account = new Account("<NAME>");
account.deposit(new BigDecimal(5.375d));
assertEquals(new BigDecimal(5.38d).setScale(DECIMAL_PLACES, HALF_UP),
account.getBalance(),
"Balance must be rounded up");
}
@Test
public void testBalanceShouldBeRoundedDown() {
final Account account = new Account("<NAME>");
account.deposit(new BigDecimal(5.374d));
assertEquals(new BigDecimal(5.37d).setScale(DECIMAL_PLACES, HALF_UP),
account.getBalance(),
"Balance must be rounded down");
}
@Test
public void testErrorShouldBeRaisedIfWithdrawIsEqualToNull() {
final Account account = new Account("<NAME>");
assertThrows(IllegalArgumentException.class,
() -> account.withdraw(null),
"Withdraw must contain a non-null value");
}
@Test
public void testErrorShouldBeRaisedIfWithdrawHasNegativeValue() {
final Account account = new Account("<NAME>");
assertThrows(IllegalArgumentException.class,
() -> account.withdraw(new BigDecimal(-10d)),
"Withdraw must contain a value greater than zero");
}
@Test
public void testErrorShouldBeRaisedIfWithdrawIsEqualToZero() {
final Account account = new Account("<NAME>");
assertThrows(IllegalArgumentException.class,
() -> account.withdraw(ZERO),
"Withdraw must contain a value greater than zero");
}
@Test
public void testErrorShouldBeRaisedIfWithdrawIsGreaterThanBalance() {
final Account account = new Account("<NAME>");
assertThrows(IllegalArgumentException.class,
() -> account.withdraw(ONE),
"Account balance must contain value for withdraw");
}
@Test
public void testWithdrawShouldDecreaseBalance() {
final Account account = new Account("<NAME>");
account.deposit(TEN);
final BigDecimal previousBalance = account.getBalance();
account.withdraw(ONE);
final BigDecimal newBalance = account.getBalance();
assertTrue(newBalance.compareTo(previousBalance) < 0,
"Withdraw must decrease balance");
}
}
<file_sep>package com.moneytransfer.service;
import com.moneytransfer.domain.entities.Account;
import com.moneytransfer.domain.repository.AccountDao;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.math.BigDecimal;
import java.util.NoSuchElementException;
import static com.moneytransfer.utils.Constants.DECIMAL_PLACES;
import static java.math.BigDecimal.ONE;
import static java.math.BigDecimal.TEN;
import static java.math.BigDecimal.ZERO;
import static java.math.RoundingMode.HALF_UP;
import static java.util.UUID.randomUUID;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class AccountServiceTest {
private AccountDao accountDao;
private AccountService accountService;
@BeforeEach
void beforeEach() {
this.accountDao = mock(AccountDao.class);
this.accountService = new AccountServiceImpl(this.accountDao);
}
@Test
public void testShouldRaiseAnErrorIfOwnerIsNull() {
assertThrows(IllegalArgumentException.class,
() -> this.accountService.create(null),
"Account owner must be non-null");
}
@Test
public void testShouldRaiseAnErrorIfOwnerIsEmpty() {
assertThrows(IllegalArgumentException.class,
() -> this.accountService.create(" "),
"Account owner must be non-empty");
}
@Test
public void testShouldCreateAccount() throws Exception {
final String owner = "<NAME>";
when(this.accountDao.save(any())).thenReturn(new Account(owner));
final Account savedAccount = this.accountService.create(owner);
assertNotNull(savedAccount, "Account must be saved");
assertEquals(owner, savedAccount.getOwner(), "Saved account must have provided owner");
assertNotNull(savedAccount.getNumber(), "Saved account must have a number");
assertEquals(ZERO.setScale(DECIMAL_PLACES, HALF_UP), savedAccount.getBalance(),
"Saved account must have initial balance as zero");
}
@Test
public void testShouldRaiseAnErrorIfAccountNumberForDepositIsNull() {
assertThrows(IllegalArgumentException.class,
() -> this.accountService.deposit(null, ONE),
"Account number must be non-null");
}
@Test
public void testShouldRaiseAnErrorIfAmountForDepositIsNull() {
assertThrows(IllegalArgumentException.class,
() -> this.accountService.deposit(randomUUID().toString(), null),
"Amount for deposit must be non-null");
}
@Test
public void testShouldRaiseAnErrorIfAccountForDepositIsNotFound() throws Exception {
final String nonExistentAccountNumber = randomUUID().toString();
when(this.accountDao.findById(nonExistentAccountNumber)).thenThrow(new NoSuchElementException());
assertThrows(NoSuchElementException.class,
() -> this.accountService.deposit(nonExistentAccountNumber, ONE),
"Account for deposit must exist");
}
@Test
public void testShouldRaiseAnErrorIfAmountForDepositIsNegative() throws Exception {
final String accountNumber = randomUUID().toString();
when(this.accountDao.findById(accountNumber)).thenReturn(new Account("<NAME>"));
assertThrows(IllegalArgumentException.class,
() -> this.accountService.deposit(accountNumber, new BigDecimal(-10d)),
"Amount for deposit must be greater than zero");
}
@Test
public void testShouldRaiseAnErrorIfAmountForDepositIsZero() throws Exception {
final String accountNumber = randomUUID().toString();
when(this.accountDao.findById(accountNumber)).thenReturn(new Account("<NAME>"));
assertThrows(IllegalArgumentException.class,
() -> this.accountService.deposit(accountNumber, ZERO),
"Amount for deposit must be greater than zero");
}
@Test
public void testShouldIncreaseAccountBalanceAfterDeposit() throws Exception {
final String accountNumber = randomUUID().toString();
final BigDecimal amount = ONE;
final Account account = new Account("<NAME>");
final BigDecimal previousBalance = account.getBalance();
when(this.accountDao.findById(accountNumber)).thenReturn(account);
this.accountService.deposit(accountNumber, amount);
assertTrue(account.getBalance().compareTo(previousBalance) > 0,
"Account balance must increase after deposit");
assertEquals(previousBalance.add(amount), account.getBalance(),
"Account balance must increase by given amount");
}
@Test
public void testShouldRaiseAnErrorIfAccountNumberForWithdrawIsNull() {
assertThrows(IllegalArgumentException.class,
() -> this.accountService.withdraw(null, ONE),
"Account number must be non-null");
}
@Test
public void testShouldRaiseAnErrorIfAmountForWithdrawIsNull() {
assertThrows(IllegalArgumentException.class,
() -> this.accountService.withdraw(randomUUID().toString(), null),
"Amount for withdraw must be non-null");
}
@Test
public void testShouldRaiseAnErrorIfAccountForWithdrawIsNotFound() throws Exception {
final String nonExistentAccountNumber = randomUUID().toString();
when(this.accountDao.findById(nonExistentAccountNumber)).thenThrow(new NoSuchElementException());
assertThrows(NoSuchElementException.class,
() -> this.accountService.withdraw(nonExistentAccountNumber, ONE),
"Account for withdraw must exist");
}
@Test
public void testShouldRaiseAnErrorIfAmountForWithdrawIsNegative() throws Exception {
final String accountNumber = randomUUID().toString();
when(this.accountDao.findById(accountNumber)).thenReturn(new Account("<NAME>"));
assertThrows(IllegalArgumentException.class,
() -> this.accountService.withdraw(accountNumber, new BigDecimal(-10d)),
"Amount for withdraw must be greater than zero");
}
@Test
public void testShouldRaiseAnErrorIfAmountForWithdrawIsZero() throws Exception {
final String accountNumber = randomUUID().toString();
when(this.accountDao.findById(accountNumber)).thenReturn(new Account("<NAME>"));
assertThrows(IllegalArgumentException.class,
() -> this.accountService.withdraw(accountNumber, ZERO),
"Amount for withdraw must be greater than zero");
}
@Test
public void testShouldRaiseAnErrorIfAmountForWithdrawIsGreaterThanAccountBalance() throws Exception {
final String accountNumber = randomUUID().toString();
when(this.accountDao.findById(accountNumber)).thenReturn(new Account("<NAME>"));
assertThrows(IllegalArgumentException.class,
() -> this.accountService.withdraw(accountNumber, ONE),
"Account must contain specified amount for withdraw");
}
@Test
public void testShouldDecreaseAccountBalanceAfterWithdraw() throws Exception {
final String accountNumber = randomUUID().toString();
final BigDecimal amount = ONE;
final Account account = new Account("<NAME>");
account.deposit(TEN);
final BigDecimal previousBalance = account.getBalance();
when(this.accountDao.findById(accountNumber)).thenReturn(account);
this.accountService.withdraw(accountNumber, amount);
assertTrue(account.getBalance().compareTo(previousBalance) < 0,
"Account balance must decrease after deposit");
assertEquals(previousBalance.subtract(amount), account.getBalance(),
"Account balance must decrease by given amount");
}
}
| 46d05d16dc4266d14b13500280288261697346a8 | [
"Markdown",
"Java"
] | 9 | Java | ugocastro/money-transfer-api | d28adc006dee26cb5588bf0eaa6468d16395f22d | 080432bb091bc8ef43db1e694968c989c6053dee |
refs/heads/master | <repo_name>vsoch/staged-recipes<file_sep>/recipes/rdfind/build.sh
# conflicts with '#include <version>'
rm -f ${SRC_DIR}/VERSION
./bootstrap.sh
./configure --prefix=$PREFIX
make -j${CPU_COUNT}
# Need a non coreutils binary for testing
sed -i.bak "s,which ls,which grep,g" testcases/symlinking_action.sh
sed -i.bak "s,which ls,which grep,g" testcases/hardlink_fails.sh
make -j${CPU_COUNT} check
make -j${CPU_COUNT} install
<file_sep>/recipes/muq/build.sh
#!/bin/bash
PYTHON_INCLUDE_DIR=$($PYTHON -c 'import distutils.sysconfig, sys; sys.stdout.write(distutils.sysconfig.get_python_inc())')
PYTHON_LIBRARY=$($PYTHON -c 'from distutils.sysconfig import get_config_var; import os, sys; sys.stdout.write(os.path.join(get_config_var("LIBDIR"),get_config_var("LDLIBRARY")))')
cd build
cmake \
-DCMAKE_INSTALL_PREFIX=$PREFIX \
-DPYTHON_INSTALL_PREFIX=$SP_DIR \
-DMUQ_BOOST_DIR=$PREFIX \
-DMUQ_EIGEN3_DIR=$PREFIX/include \
-DMUQ_HDF5_DIR=$PREFIX \
-DMUQ_NANOFLANN_DIR=$PREFIX \
-DMUQ_SUNDIALS_DIR=$PREFIX \
-DMUQ_NLOPT_DIR=$PREFIX \
-DMUQ_NANOFLANN_DIR=$PREFIX \
-DMUQ_USE_PYTHON=ON \
-DPYTHON_INCLUDE_DIR=$PYTHON_INCLUDE_DIR \
-DPYTHON_LIBRARY=$PYTHON_LIBRARY \
-DPYTHON_EXECUTABLE=$PYTHON \
-DCMAKE_INCLUDE_PATH=$PREFIX/include \
$SRC_DIR
make -j$CPU_COUNT
make install
| 0dd717cafbcc61b175bfde5602c99d7e9bf7faa0 | [
"Shell"
] | 2 | Shell | vsoch/staged-recipes | 00839efe505c9fc4b87838bda5b9f81c38079833 | b68ceb7964333b0d4ef5ba2126cffc90951c9a89 |
refs/heads/master | <file_sep>from django.contrib import admin
from .models import Employee, Organization,Survey,Question,SurveyEmployee, SurveyQuestion,SurveyResponse
admin.site.register(Employee)
admin.site.register(Organization)
admin.site.register(SurveyQuestion)
admin.site.register(Question)
admin.site.register(Survey)
admin.site.register(SurveyEmployee)
admin.site.register(SurveyResponse)
<file_sep>from django.db import models
from django.core.exceptions import ValidationError
class Organization(models.Model):
company_name = models.CharField(max_length=200)
location = models.CharField(max_length=100)
description = models.CharField(max_length=200)
def __str__(self):
return self.company_name
class Employee(models.Model):
emp_name = models.CharField(max_length=200)
emp_username = models.CharField(max_length=100)
emp_password = models.CharField(max_length=100)
emp_designation = models.CharField(max_length=100)
emp_address = models.CharField(max_length=200)
company = models.ForeignKey(Organization, on_delete=models.CASCADE)
def __str__(self):
return self.emp_name
class Meta:
verbose_name_plural = 'Employees'
class Survey(models.Model):
survey_name = models.CharField(max_length=200)
description = models.CharField(max_length=200)
date = models.DateField()
def __str__(self):
return self.survey_name
class Meta:
verbose_name_plural = 'surveys'
def validate_list(value):
'''takes a text value and verifies that there is at least one comma '''
values = value.split(',')
if len(values) < 2:
raise ValidationError(
"The selected field requires an associated list of choices. Choices must contain more than one item.")
class Question(models.Model):
TEXT = 'text'
RADIO = ' radio '
SELECT = 'select'
SELECT_MULTIPLE = 'select-multiple'
INTEGER = 'integer'
Question_types = (
(TEXT, 'text'),
(RADIO, 'radio'),
(SELECT, 'select'),
(SELECT_MULTIPLE, 'Select Multiple'),
(INTEGER, 'integer'),
)
survey = models.ForeignKey(Survey, on_delete=models.CASCADE, blank=False, null=True)
question = models.TextField()
is_required = models.BooleanField()
question_type = models.CharField(max_length=200, choices=Question_types, default=TEXT)
choices = models.TextField(blank=True, null=True,
help_text='if the question type is "radio,'
'" "select," or "select multiple"'
' provide a comma-separated list of options for this question .')
def save(self, *args, **kwargs):
if (self.question_type == Question.RADIO or self.question_type == Question.SELECT
or self.question_type == Question.SELECT_MULTIPLE):
validate_list(self.choices)
super(Question, self).save(*args, **kwargs)
def get_choices(self):
""" parse the choices field and return a tuple formatted appropriately
for the 'choices' argument of a form widget."""
choices = self.choices.split(',')
return choices
def __str__(self):
return self.question
class SurveyEmployee(models.Model):
employee = models.ForeignKey(Employee, on_delete=models.CASCADE)
survey = models.ForeignKey(Survey, on_delete=models.CASCADE)
# def __str__(self):
# return self.survey, self.employee
class SurveyQuestion(models.Model):
survey = models.ForeignKey(Survey, on_delete=models.CASCADE)
question = models.ForeignKey(Question, on_delete=models.CASCADE)
# def __str__(self):
# return self.survey, self.question
class SurveyResponse(models.Model):
employee = models.ForeignKey(Employee, on_delete=models.CASCADE)
survey = models.ForeignKey(Survey, on_delete=models.CASCADE)
question = models.ForeignKey(Question, on_delete=models.CASCADE)
response = models.TextField(blank=True, null=True)
created_date = models.DateField(auto_now_add=True)
SaveStatus = models.BooleanField(default=False)
# def __str__(self):
# return self.survey, self.employee, self.response
<file_sep># Survey-App
1. Admin:
a. Users –
a.i. CRUD
a.i.1. Create employees for respective organization
b. Question Library
b.i. Create and Manage Questions –
b.i.1. CRUD
b.i.2. Should be able to Create Survey using following types of questions: (https://www.questionpro.com/tour/sample-questions.html ) :
b.i.2.a. Open Ended Questions - Questionnaire: Open ended questions which are in the form of Comment Box / Single Row Text / Numeric Input / Email Address questions are designed to collect narrative responses. Assign open-ended text as custom variables for data pre-population within the survey.
c. Setup surveys
c.i. Create and Manage Survey - CRUD
c.ii. Assign questions to survey
d. Assign Surveys to Employees
d.i. Bulk Assignment
e. Send email notification to user on – survey assignment, survey completion
2. Employee:
a. Take surveys
a.i. Survey Renderer – Implement pagination considering 5 questions each page.
a.ii. Continue survey in case user lefts in between
b. Landing page for Employee – welcome message, display survey assignment link to start/continue survey
<file_sep># Generated by Django 2.1.5 on 2019-02-13 06:44
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('Surveyapp', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='question',
name='survey',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='Surveyapp.Survey'),
),
]
<file_sep># Generated by Django 2.1.5 on 2019-02-13 06:02
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Employee',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('emp_name', models.CharField(max_length=200)),
('emp_username', models.CharField(max_length=100)),
('emp_password', models.CharField(max_length=100)),
('emp_designation', models.CharField(max_length=100)),
('emp_address', models.CharField(max_length=200)),
],
options={
'verbose_name_plural': 'Employees',
},
),
migrations.CreateModel(
name='Organization',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('company_name', models.CharField(max_length=200)),
('location', models.CharField(max_length=100)),
('description', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question', models.TextField()),
('is_required', models.BooleanField()),
('question_type', models.CharField(choices=[('text', 'text'), (' radio ', 'radio'), ('select', 'select'), ('select-multiple', 'Select Multiple'), ('integer', 'integer')], default='text', max_length=200)),
('choices', models.TextField(blank=True, help_text='if the question type is "radio," "select," or "select multiple" provide a comma-separated list of options for this question .', null=True)),
],
),
migrations.CreateModel(
name='Survey',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('survey_name', models.CharField(max_length=200)),
('description', models.CharField(max_length=200)),
('date', models.DateField()),
],
options={
'verbose_name_plural': 'surveys',
},
),
migrations.CreateModel(
name='SurveyEmployee',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('employee', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Surveyapp.Employee')),
('survey', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Surveyapp.Survey')),
],
),
migrations.CreateModel(
name='SurveyQuestion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Surveyapp.Question')),
('survey', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Surveyapp.Survey')),
],
),
migrations.CreateModel(
name='SurveyResponse',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('response', models.TextField(blank=True, null=True)),
('created_date', models.DateField(auto_now_add=True)),
('SaveStatus', models.BooleanField(default=False)),
('employee', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Surveyapp.Employee')),
('question', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Surveyapp.Question')),
('survey', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Surveyapp.Survey')),
],
),
migrations.AddField(
model_name='employee',
name='company',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='Surveyapp.Organization'),
),
]
<file_sep>from . import views
from django.urls import path
urlpatterns = [
path('', views.index, name='index'),
path('employee/', views.employee, name='employee'),
path('login/', views.login, name='login'),
path('que_list/<int:survey_id>', views.question_list, name='que_list'),
path('save/<int:survey_id>', views.save, name='save'),
path('logout/', views.logout, name='logout'),
path('sendmail/', views.send_email, name='sendmail'),
]<file_sep>DateTime==4.3
Django==2.1.7
psycopg2==2.7.7
pytz==2018.9
zope.interface==4.6.0
<file_sep>from django.shortcuts import render, redirect
from .forms import LoginForm
from .models import Employee, Survey, SurveyEmployee, Question, SurveyResponse
from django.core.mail import EmailMessage
def index(request):
return render(request, 'Surveyapp/home.html')
def question_list(request, survey_id):
m = request.session['username']
emp_record = Question.objects.filter(surveyquestion__survey_id=survey_id)
question_all = Question.objects.all()
context = {'session': m, 'survey_id': survey_id, 'question_list': emp_record}
return render(request, 'Surveyapp/question_list.html', context)
def employee(request):
m = request.session['username']
emp = Employee.objects.get(emp_username=m)
emp_record = SurveyEmployee.objects.filter(employee=emp.id)
Completed_survey = list()
incomplete_survey = list()
assign_survey = list()
total_survey = list()
for survey in emp_record:
survey_count = SurveyResponse.objects.filter(employee_id=emp.id, survey_id=survey.survey_id).count()
print("survey_count*****", survey_count)
if survey_count:
if SurveyResponse.objects.filter(survey_id=survey.survey_id, employee_id=emp.id, SaveStatus=True):
Completed_survey.append(survey)
else:
incomplete_survey.append(survey)
else:
assign_survey.append(survey)
incomplete_surveylen = len(incomplete_survey)
Completed_surveylen = len(Completed_survey)
context = {'session': m, 'total_survey': total_survey, 'survey_list': emp_record,
'completed_survey': Completed_survey, 'incomplete_survey': incomplete_survey,
'assign_survey': assign_survey, 'complete_count': Completed_surveylen,
'incomplete_count': incomplete_surveylen}
send_email(request)
return render(request, "Surveyapp/survey.html", context)
def login(request):
form = LoginForm()
context = {'form': form}
if request.method == "POST":
username = request.POST.get("username")
password = request.POST.get("<PASSWORD>")
if Employee.objects.get(emp_username=username, emp_password=password):
m = request.session['username'] = username
print("Session Name = "+m)
return redirect('employee')
return render(request, "Surveyapp/login.html", context)
return render(request, "Surveyapp/login.html", context)
def save(request, survey_id):
m = request.session['username']
emp = Employee.objects.get(emp_username=m)
for name in request.POST:
print("question id: ", name)
if name != "csrfmiddlewaretoken" and name != "submitform":
isRecord = SurveyResponse.objects.filter(survey=Survey.objects.get(id=survey_id),
employee=Employee.objects.get(id=emp.id),
question=Question.objects.get(id=name))
if not isRecord:
if request.POST[name]:
surveyResponseObj = SurveyResponse()
surveyResponseObj.survey = Survey.objects.get(id=survey_id)
surveyResponseObj.employee = Employee.objects.get(id=emp.id)
surveyResponseObj.question = Question.objects.get(id=name)
surveyResponseObj.response = request.POST[name]
if request.POST['submitform'] == "Save":
surveyResponseObj.SaveStatus = False
else:
surveyResponseObj.SaveStatus = True
surveyResponseObj.save()
return redirect("employee")
def send_email(request):
try:
name = request.session['username']
email = EmailMessage('Survey Link', 'http://127.0.0.1:8000/employee/', to=['<EMAIL>'])
print("---------------mail sent---------------")
except Exception as e:
print(e)
return redirect('employee')
def logout(request):
try:
del request.session['username']
except KeyError:
pass
return redirect('login')
<file_sep>class PasswordError(Exception):
def __init__(self,msg):
self.msg =msg
def __str__(self):
return self.msg
class InvaliPassword(PasswordError):
pass
class PasswordchangedDayago(PasswordError):
pass
| 4705f2dae165103f4b5384bdd5b4e31aea77ec1a | [
"Markdown",
"Python",
"Text"
] | 9 | Python | priyanshdeshmukh/Survey-App | a69131387c4b14e3abefb539ee5e20d6279ca927 | be8506e93bd4a2dbd1ba3b692cf1577bf8ee0a14 |
refs/heads/main | <file_sep>let a = "x";
let b = "c";
// 자바에서 변수 선언시, 상수 : const
// 변수 : let
// let a = "y" --> 오류발생
a = "hello";
console.log(a);
// hello //<file_sep>const mathScore = prompt("수학 몇점?");
const engScore = prompt("영어 몇점?");
const result = (mathScore + engScore)/2;
console.log(result);<file_sep>const name = 'mike'
const age = 30;
const namea = 'Mike';
const nameb = "Mike";
const namec = `Mike`;
// - infinity - //
const x = 1/0;
console.log(x);
// - boolean - //
console.log( namea== 'Mike');
//true//
// null 과 undefined
let ages;
console.log(ages);
let user = null;
console.log(user);
// - type of 연산자 - //
console.log(typeof 3);
//number//
console.log(typeof name); //ㅇㅣ거 왜이래
//string//
console.log(typeof true);
//boolean//
console.log(typeof "xxx");
//string//
console.log(typeof null);
//object// --> 객체형
console.log(typeof undefined);
//undefined//
// - ``의 중요성 - //
const myname = "hoyoung";
const message = `My name is ${myname}`;
const message2 = 'My name is ${myname}';
console.log(message);
//My name is hoyoung
console.log(message2);
//My name is ${myname}
// - 문자 + 숫자 - //
const a = "나는";
const b = "입니다";
console.log(a + age +"살" + b);
// 나는30살입니다<file_sep># Server_Javascript
consol.log("hi")<file_sep>
const nodemailer = require('nodemailer'); // nodemailer 선언
// email 계정정보
const email = {
// 이 부분은 mailtrap.io 에서 퍼온 코드
host: "smtp.mailtrap.io",
port: 2525,
auth: {
user: "c9991be77dda47",
pass: "<PASSWORD>"
}
}
const send = async (option) =>{
// 이메일 보내는 함수 선언 , 마찬가지로 퍼옴
nodemailer.createTransport(email).sendMail(option, (error, info) => {
if(error) {
console.log(error);
}else {
console.log(info);
// sendMail메소드 이용, 콜백함수를 만들어 에러발생시 콘솔 출력
return info.response;
}
})
}
let email_data = {
from: '<EMAIL>',
to: '<EMAIL>',
subject: '테스트메일',
text: '1시간안에 node.js 파헤치기'
}
send(email_data)
// 이메일 보내기<file_sep>// prompt - 입력받기 , alert - 팝업 confirm - 확인 or 취소 팝업
const namea = prompt("write your name :");
alert("Welcome," + namea );
confirm("Are you an adult?")
| e0dfa4a135f9816f97ae2541e571be1f248b6147 | [
"JavaScript",
"Markdown"
] | 6 | JavaScript | tkarndbrtk/Server_Javascript | e9e8147965372d94bcc68f04b214609b35cade0b | 957a7a54fda27d1f6ff6d1a064d9622879af1031 |
refs/heads/master | <repo_name>jahnavilatha589/Jahnavi_Logging<file_sep>/cleanCode/src/main/java/com/epam/cleanCode/CalculateInterest.java
package com.epam.cleanCode;
import java.io.BufferedWriter;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.util.Scanner;
import org.apache.log4j.Logger;
/**
* Hello world!
*
*/
public class CalculateInterest
{
static Logger logger = Logger.getLogger(CalculateInterest.class);
public static void main( String[] args ) throws IOException
{
logger.debug("This is DEBUG STATEMENT");
logger.info("This is INFO STATEMENT");
logger.warn("This is WARN STATEMENT");
logger.error("This is ERROR STATEMENT");
logger.fatal("This is FATAL STATEMENT");
Scanner sc = new Scanner(System.in);
logger.info("Choose either option 1 or 2 :"+"\n");
logger.info("1.Simple Interest"+"\n");
logger.info("2.Compound Interest"+"\n");
SimpleInterest simpleInterest = new SimpleInterest();
CompoundInterest compoundInterest = new CompoundInterest();
int choice = sc.nextInt();
switch(choice){
case 1:simpleInterest.readValues();
simpleInterest.calculateSimpleInterest();
simpleInterest.displayAmount();
break;
case 2:compoundInterest.readValues();
compoundInterest.calculateCompoundInterest();
compoundInterest.displayAmount();
break;
default:logger.info("Please Enter Valid Option!");
}
}
}
| cabb3039f25dc2783fd6faa30161680ff77b068b | [
"Java"
] | 1 | Java | jahnavilatha589/Jahnavi_Logging | 0ca32733a025c8de8f5a3552675694b85d6feb63 | e81afe89501910cfd7b9a2593467d66f4a4890a8 |
refs/heads/main | <file_sep>package pl.droidsonroids.unknownfeatures
import android.app.usage.NetworkStats
import android.app.usage.NetworkStatsManager
import android.content.Context
import android.net.ConnectivityManager
import android.net.TrafficStats
class TrafficStatistics {
init {
println(TrafficStats.getMobileTxBytes())
println(TrafficStats.getTotalTxBytes())
}
fun dumpStats(context:Context) {
val networkStatsManager = context.getSystemService(NetworkStatsManager::class.java)
val bucket = NetworkStats.Bucket()
networkStatsManager.querySummary(ConnectivityManager.TYPE_WIFI, null, 0, Long.MAX_VALUE).getNextBucket(bucket)
println(bucket.rxBytes)
}
}<file_sep>package pl.droidsonroids.unknownfeatures
import android.app.Service
import android.content.Intent
import android.os.IBinder
class RemoteService : Service() {
override fun onCreate() {
super.onCreate()
}
override fun onBind(intent: Intent): IBinder {
// Return the interface
return binder
}
private val binder = object : IRemoteService.Stub() {
override fun getPid(): Int {
return 123456
}
override fun basicTypes(
anInt: Int,
aLong: Long,
aBoolean: Boolean,
aFloat: Float,
aDouble: Double,
aString: String
) {
// Does nothing
}
}
}<file_sep>package pl.droidsonroids.receiver
import android.content.ComponentName
import android.content.Intent
import android.content.ServiceConnection
import android.os.Bundle
import android.os.IBinder
import androidx.activity.ComponentActivity
import pl.droidsonroids.unknownfeatures.IRemoteService
class MainActivity : ComponentActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
val intent = Intent().setClassName("pl.droidsonroids.unknownfeatures", "RemoteService")
bindService(intent, RemoteServiceConnection(), BIND_AUTO_CREATE)
}
inner class RemoteServiceConnection :ServiceConnection {
override fun onServiceConnected(name: ComponentName?, service: IBinder?) {
val asInterface: IRemoteService = IRemoteService.Stub.asInterface(service)
println(asInterface.pid)
}
override fun onServiceDisconnected(name: ComponentName?) {
//TODO
}
}
}<file_sep>package pl.droidsonroids.library
import pl.droidsonroids.unknownfeatures.R
class Lib {
init {
R.string.text_in_library
}
}<file_sep>package pl.droidsonroids.unknownfeatures
import android.accounts.AccountManager
import android.app.Notification
import android.app.NotificationChannel
import android.app.NotificationManager
import android.graphics.Color
import android.os.Bundle
import android.os.UserManager
import com.google.android.material.snackbar.Snackbar
import androidx.appcompat.app.AppCompatActivity
import androidx.navigation.findNavController
import androidx.navigation.ui.AppBarConfiguration
import androidx.navigation.ui.navigateUp
import androidx.navigation.ui.setupActionBarWithNavController
import android.view.Menu
import android.view.MenuItem
import androidx.viewbinding.BuildConfig
import pl.droidsonroids.unknownfeatures.databinding.ActivityMainBinding
class MainActivity : AppCompatActivity() {
private lateinit var appBarConfiguration: AppBarConfiguration
private lateinit var binding: ActivityMainBinding
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
binding = ActivityMainBinding.inflate(layoutInflater)
setContentView(binding.root)
setSupportActionBar(binding.toolbar)
val navController = findNavController(R.id.nav_host_fragment_content_main)
appBarConfiguration = AppBarConfiguration(navController.graph)
setupActionBarWithNavController(navController, appBarConfiguration)
binding.fab.setOnClickListener { view ->
Snackbar.make(view, "Replace with your own action", Snackbar.LENGTH_LONG)
.setAction("Action", null).show()
}
println(BuildConfig.DEBUG)
TrafficStatistics().dumpStats(this)
showNotification()
//println(getString(R.string.text_in_library))
println(getSystemService(UserManager::class.java).isUserAGoat)
}
private fun showNotification() {
val notificationManager = getSystemService(NotificationManager::class.java)
with(notificationManager) {
val channel =
NotificationChannel("id", "channel", NotificationManager.IMPORTANCE_DEFAULT).apply {
description = "description"
setShowBadge(true)
}
createNotificationChannel(channel)
val notification = Notification.Builder(applicationContext, channel.id)
.setCategory("category")
.setContentTitle("title")
.setColor(Color.YELLOW)
.setChronometerCountDown(true)
.setColorized(true)
.setNumber(7)
.setSmallIcon(R.drawable.ic_launcher_foreground)
.build()
notify(1, notification)
}
}
override fun onCreateOptionsMenu(menu: Menu): Boolean {
// Inflate the menu; this adds items to the action bar if it is present.
menuInflater.inflate(R.menu.menu_main, menu)
return true
}
override fun onOptionsItemSelected(item: MenuItem): Boolean {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
return when (item.itemId) {
R.id.action_settings -> true
else -> super.onOptionsItemSelected(item)
}
}
override fun onSupportNavigateUp(): Boolean {
val navController = findNavController(R.id.nav_host_fragment_content_main)
return navController.navigateUp(appBarConfiguration)
|| super.onSupportNavigateUp()
}
}<file_sep>package pl.droidsonroids.unknownfeatures
import android.content.ContentProvider
import android.content.ContentValues
import android.database.Cursor
import android.net.Uri
class TestContentProvider : ContentProvider() {
override fun delete(uri: Uri, selection: String?, selectionArgs: Array<String>?): Int {
TODO("Implement this to handle requests to delete one or more rows")
}
override fun getType(uri: Uri): String? {
TODO(
"Implement this to handle requests for the MIME type of the data" +
"at the given URI"
)
}
override fun insert(uri: Uri, values: ContentValues?): Uri? {
TODO("Implement this to handle requests to insert a new row.")
}
override fun onCreate(): Boolean {
return true
}
override fun query(
uri: Uri, projection: Array<String>?, selection: String?,
selectionArgs: Array<String>?, sortOrder: String?
): Cursor? {
TODO("Implement this to handle query requests from clients.")
}
override fun update(
uri: Uri, values: ContentValues?, selection: String?,
selectionArgs: Array<String>?
): Int {
TODO("Implement this to handle requests to update one or more rows.")
}
} | 08c21602b0678d8897cd82fef159015c3b9422c0 | [
"Kotlin"
] | 6 | Kotlin | DroidsOnRoidsPlayground/unknown_android_features | 5fb797208069bc0ba9e315f09844651cf887f5ab | 0bee57366a5ed95673df329880dcef8c7e52f3dc |
refs/heads/master | <file_sep>from django.shortcuts import render,redirect
from django.conf import settings as s
curl=s.CURRENT_URL2
c=s.CURRENT_URL
def home(req):
return render(req,'User-Home.html',{'c':curl,'a':"home"})
def manageorder(req):
return render(req,'User-Manage Order.html',{'c':curl,'a':"ManageOption",'b':"ManageOrder"})
def changepwd(req):
return render(req,'User-Change Password.html',{'c':curl,'a':"settings",'b':"changepwd"})
def logout(req):
return redirect(c)
# def services(req):
# return render(req,'Services.html',{'c':curl,'a':"services"})
# def register(req):
# if req.method=="GET":
# return render(req,'Register.html',{'c':curl,'a':"register"})
# if req.method=="POST":
# return render(req,'Register.html',{'c':curl,'a':"register",'out':"Registration Successful"})
# def login(req):
# if req.method=="GET":
# return render(req,'Login.html',{'c':curl,'a':"login"})
# if req.method=="POST":
# return render(req,'Login.html',{'c':curl,'a':"login","out":"Log In Successful"})
<file_sep>from django.shortcuts import render,redirect
from django.conf import settings as s
curl=s.CURRENT_URL
def home(req):
return render(req,'Home.html',{'c':curl,'a':"home"})
def about(req):
return render(req,'About.html',{'c':curl,'a':"about"})
def contact(req):
return render(req,'Contact.html',{'c':curl,'a':"contact"})
def services(req):
return render(req,'Services.html',{'c':curl,'a':"services"})
def register(req):
if req.method=="GET":
return render(req,'Register.html',{'c':curl,'a':"register"})
if req.method=="POST":
return render(req,'Register.html',{'c':curl,'a':"register",'out':"Registration Successful"})
def login(req):
if req.method=="GET":
return render(req,'Login.html',{'c':curl,'a':"login"})
if req.method=="POST":
#return render(req,'Login.html',{'c':curl,'a':"login","out":"Log In Successful"})
#return redirect(curl+'Admin/')
return redirect(curl+'User/')
<file_sep>from django.shortcuts import render,redirect
from django.conf import settings as s
curl=s.CURRENT_URL1
c=s.CURRENT_URL
def home(req):
return render(req,'Admin-Home.html',{'c':curl,'a':"home"})
# def profile(req):
# return render(req,'Admin-Profile.html',{'c':curl,'a':"profile"})
def logout(req):
return redirect(c)
def manageuser(req):
return render(req,'Admin-ManageUser.html',{'c':curl,'a':"ManageOption",'b':"manageuser"})
def changepwd(req):
return render(req,'Admin-ChangePwd.html',{'c':curl,'a':"Settings",'b':"changepwd"})
# def services(req):
# return render(req,'Services.html',{'c':curl,'a':"services"})
# def register(req):
# if req.method=="GET":
# return render(req,'Register.html',{'c':curl,'a':"register"})
# if req.method=="POST":
# return render(req,'Register.html',{'c':curl,'a':"register",'out':"Registration Successful"})
# def login(req):
# if req.method=="GET":
# return render(req,'Login.html',{'c':curl,'a':"login"})
# if req.method=="POST":
# return render(req,'Login.html',{'c':curl,'a':"login","out":"Log In Successful"})
<file_sep>{% extends 'User-Home.html' %}
{% block me %}
<div id="me">
<div id="a" class='ManageOrder'><a href="{{c}}manageorder">Manage Orders</a></div>
<div id="a" class=""><a href="{{c}}">Order Cancellation</a></div>
<div class=""><a href="{{c}}">Payment History</a></div>
</div>
{% endblock %}
{% block banner %}
{% endblock %}
{% block content %}
<h1>Manage Order</h1>
{% endblock %}
<file_sep> function Name()
{
var name=document.getElementById("name").value
if(name.length<1)
{
document.getElementById("sp1").innerHTML="*required"
}
else
{
document.getElementById("sp1").innerHTML=""
}
}
function User()
{
var user=document.getElementById("user").value
var reg= /^\S*$/
if(user.length<1)
{
document.getElementById("sp2").innerHTML="*required"
}
else if(reg.test(user))
{
document.getElementById("sp2").innerHTML=""
}
else
{
document.getElementById("sp2").innerHTML="*no space between characters"
}
}
function Pwd()
{
var pwd=document.getElementById("pwd").value
if(pwd.length<1)
{
document.getElementById("sp3").innerHTML="*required"
}
else if(pwd.length<8 || pwd.length>16)
{
document.getElementById("sp3").innerHTML="*password must have 8-16 characters"
}
else
{
document.getElementById("sp3").innerHTML=""
}
}
function Cpwd()
{
var cpwd=document.getElementById("cpwd").value
var pwd=document.getElementById("pwd").value
if(cpwd.length<1)
{
document.getElementById("sp4").innerHTML="*required"
}
else if(pwd!==cpwd)
{
document.getElementById("sp4").innerHTML="*password does not match "
}
else
{
document.getElementById("sp4").innerHTML=""
}
}
function Email()
{
var email=document.getElementById("email").value
var reg=/^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/
if(email.length<1)
{
document.getElementById("sp5").innerHTML="*required"
}
else if(reg.test(email))
{
document.getElementById("sp5").innerHTML=""
}
else
{
document.getElementById("sp5").innerHTML="*invalid email"
}
}
function Mobile()
{
var mobile=document.getElementById("mobile").value
var reg= /^\+?([0-9]{2})\)?[-. ]?([0-9]{5})[-. ]?([0-9]{5})$/
if(mobile.length<5)
{
document.getElementById("sp6").innerHTML="*required"
}
else if(reg.test(mobile))
{
document.getElementById("sp6").innerHTML=""
}
else
{
document.getElementById("sp6").innerHTML="*invalid mobile number"
}
}
function Add()
{
var add=document.getElementById("add").value
if(add.length<1)
{
document.getElementById("sp7").innerHTML="*required"
}
else
{
document.getElementById("sp7").innerHTML=""
}
}
function Sub()
{
var sp1=document.getElementById("sp1").innerHTML
var sp2=document.getElementById("sp2").innerHTML
var sp3=document.getElementById("sp3").innerHTML
var sp4=document.getElementById("sp4").innerHTML
var sp5=document.getElementById("sp5").innerHTML
var sp6=document.getElementById("sp6").innerHTML
var sp7=document.getElementById("sp7").innerHTML
var name=document.getElementById("name").value
var user=document.getElementById("user").value
var pwd=document.getElementById("pwd").value
var cpwd=document.getElementById("cpwd").value
var email=document.getElementById("email").value
var mobile=document.getElementById("mobile").value
var add=document.getElementById("add").value
if(name.length==0 || user.length==0 || pwd.length==0 || cpwd.length==0 || email.length==0 || mobile.length==4 || add.length==0)
{
document.getElementById("sp8").innerHTML="Please fill the mandatory fields"
return false
}
if (sp1.length<1 && sp2.length<1 && sp3.length<1 && sp4.length<1 && sp5.length<1 && sp6.length<1 && sp7.length<1)
{
return true
}
else
{
document.getElementById("sp8").innerHTML="Please fill the Registration form correctly"
return false
}
}
function Log()
{
var sp2=document.getElementById("sp2").innerHTML
var sp3=document.getElementById("sp3").innerHTML
var user=document.getElementById("user").value
var pwd=document.getElementById("pwd").value
if(user.length==0 || pwd.length==0)
{
document.getElementById("sp9").innerHTML="Please fill the Log In Details"
return false
}
else if (sp2.length<1 && sp3.length<1)
{
return true
}
else
{
document.getElementById("sp9").innerHTML="User Name or Password Incorrect"
return false
}
}
<file_sep>"""BookMyMeal URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from App import views as v
from Admin import views as v2
from User import views as v3
urlpatterns = [
path('admin/', admin.site.urls),
path('',v.home),
path('about',v.about),
path('contact',v.contact),
path('services',v.services),
path('register',v.register),
path('login',v.login),
# Admin
path('Admin/',v2.home),
# path('Admin/profile',v2.profile),
# path('Admin/contact',v2.contact),
path('Admin/changepwd',v2.changepwd),
path('Admin/manageuser',v2.manageuser),
path('Admin/logout',v2.logout),
# User
path('User/',v3.home),
# path('User/profile',v3.profile),
path('User/manageorder',v3.manageorder),
path('User/changepwd',v3.changepwd),
# path('User/register',v3.register),
path('User/logout',v3.logout)
]
| a45281264efb36f252ab06bd5b43c7a5b18507ea | [
"JavaScript",
"Python",
"HTML"
] | 6 | Python | Suryam26/BookMyMeal | 3918e58a29fc5801dd491a859de22af4e7bd6a17 | b56d456ae6ee07476cc64f0087c49dadcdc563d4 |
refs/heads/master | <repo_name>YspAnimal/BeerRankingProject<file_sep>/WordAnalyzeModule.R
require(sqldf)
require(RSQLite)
require(tm)
require(dplyr)
require(Rstem)
#require(Snowball)
library(wordnet)
library(wordcloud)
library(RODBCext)
db1 <- dbConnect(SQLite(), dbname="NewBase/BeerDB.sqlite")
db <- dbConnect(SQLite(), dbname="BeerDB.sqlite")
BeerStyles <- dbReadTable(db1, "Styles")
Beers <- dbReadTable(db1, "Beers")
GeneralInfo <- dbReadTable(db1, "GeneralInfo")
BeerReviews <- dbReadTable(db1, "BeerReviewsNew")
SQLQueryStyles <- "SELECT BeerStyle, Type, style FROM BeerStyles"
SQLQueryDescription <- "SELECT style, Name, Beers.BeerLink, Description FROM Beers JOIN GeneralInfo
ON (Beers.BeerLink = GeneralInfo.BeerLink)"
SQLQueryReviews <- "SELECT style, Name, Beers.BeerLink, V2 FROM Beers JOIN BeerReviewsNew
ON (Beers.BeerLink = BeerReviewsNew.V1)"
#WHERE Beers.BeerLink = 71"
myQuery <- dbSendQuery(db1, SQLQueryReviews)
Beerdata <- dbFetch(myQuery, n = -1)
dbDisconnect(db)
temp <- filter(Beerdata, style == "71") %>% select(Description)
tm_map(temp, asPlain)
temp_txt <- paste(temp$Description, collapse = " ")
temp.vec <- VectorSource(temp_txt)
temp.cor <- Corpus(temp.vec)
#summary(temp.cor)
#inspect(temp.cor)
temp.cor <- tm_map(temp.cor, content_transformer(tolower))
temp.cor <- tm_map(temp.cor, removePunctuation)
temp.cor <- tm_map(temp.cor, removeNumbers)
#temp.cor <- tm_map(temp.cor, removeNumbers)
Beerstopwords <- c(stopwords("english"), "beer", "beers", "bottle", "conditioned", "ale", "abbey", "brewed")
temp.cor <- tm_map(temp.cor, removeWords, Beerstopwords)
temp.cor <- tm_map(temp.cor, stripWhitespace)
DTM <- DocumentTermMatrix(temp.cor)
#TDM <- TermDocumentMatrix(temp.cor)
#inspect(TDM)
inspect(DTM)
DTM_Mat <- as.matrix(DTM)
frequency <- colSums(as.matrix(DTM))
frequency <- sort(frequency, decreasing = TRUE)
head(frequency)
frequency[which(frequency>=5)]
DTM_Mat <- as.matrix(DTM)
DTM_v <- sort(colSums(DTM_Mat),decreasing=TRUE)
DTM_d <- data.frame(word = names(DTM_v),freq=DTM_v)
table(DTM_d$freq)
pal2 <- brewer.pal(8,"Dark2")
png("wordcloud_Beers_Rew.png", width=480,height=300)
wordcloud(DTM_d$word,DTM_d$freq, scale=c(8,.5),min.freq=2,
max.words=Inf, random.order=FALSE, rot.per=.30, colors=pal2)
dev.off()
<file_sep>/WebScrapModule.R
require(rvest)
require(reshape2)
require(stringr)
require(plyr)
require(sqldf)
require(data.table)
require(parallel)
options(stringsAsFactors = FALSE)
##Get Beer styles table
GetBeerStylesDataframe <- function (link) {
beer_page <-
read_html(link) # Set WEB-page of beer ranking
beer_Groups <-
html_text(html_nodes(beer_page, ".groupname")) # Get Global beer groups
beer_stylesHTML <-
html_nodes(beer_page, ".styleGroup") # Get beer styles nodes
beer_stylesHTML <-
lapply(beer_stylesHTML, function(x) {
html_nodes(x, "li")
}) # Prepare to transform to list
beer_styles <-
lapply(beer_stylesHTML, html_text) # Get list of beer styles
names(beer_styles) <- beer_Groups # Rename beer styles list
beer_styles <- melt(beer_styles) # melt list to data frame
beer_linksHTML <-
lapply(beer_stylesHTML, function(x) {
html_nodes(x, "a")
}) # extract information about links
beer_links <-
lapply(beer_linksHTML, function(x) {
html_attr(x, "href")
}) # Get beer styles links to own web page
beer_links <- unlist(beer_links) # Unlist it to vector
beer_styles <-
cbind(beer_styles, beer_links) # Create one data set, thet contains information about all of characteristic of beer
return(beer_styles)
}
####Try to aggregate full information about beer styles in one dataFrame
##Get beers list from style link
stylesFrame <- GetBeerStylesDataframe("http://www.ratebeer.com/beerstyles")
parlist <- list()
for (i in 1:nrow(stylesFrame)) {
styleLink <- as.vector(stylesFrame$beer_links[[i]])
StylePage <- paste0("http://www.ratebeer.com", styleLink)
ScriptTXT <- read_html(StylePage) %>% html_nodes("script")
ScriptTXT <- html_text(ScriptTXT[9]) %>% strsplit("[\r\n\t]") %>% unlist
needPar <- ScriptTXT[c(6,9,12,15,18,21,24,27)]
needParVal <- gsub('([[:punct:]])([[:alpha:]]*)([[:blank:]]*)', "", needPar)
parlist[[i]] <- needParVal
}
parlist <- do.call(rbind, parlist)
stylesFrame <- cbind(stylesFrame, parlist, stringsAsFactors = FALSE)
needParVal <- gsub('([[:punct:]])([[:alpha:]]*)([[:blank:]]*)', "", needPar)
needParNames <- gsub('([[:punct:]]*)([[:blank:]]*)', "", needPar)
needParNames <- gsub('[[:digit:]]*', "", needParNames)
names(stylesFrame) <- c("BeerStyle", "Type", "Link", needParNames) # Rename columns in dataset
####Try to scrap beer information table.
###Add a new column "JSLink" to be used for create beers table
stylesFrame <- mutate(stylesFrame, JSlink = paste0("http://www.ratebeer.com", "/ajax/top-beer-by-style.asp?", "style=", style,
"&sort=", sort, "&order=", order, "&min=", min, "&max=", max,
"&retired=", retired, "&new=", new, "&mine=", mine))
makeBeerDF <- function(LinkList){
d <- lapply(LinkList, function(i){
table <- readHTMLTable(i, as.data.frame = TRUE)[[1]]
links <- read_html(i) %>% html_nodes("a") %>% html_attr("href")
table[, 1] <- str_extract(i, "\\d+")
links <- paste0("http://www.ratebeer.com", links)
table <- cbind(table, links, stringsAsFactors = FALSE)
} )
rbindlist(d)
#do.call(rbind, d)
#as.data.frame(d)
#names(d) <- c("style", "Name", "Count", "ABV", "Score", "BeerLink")
#return(d)
}
beerTable <- makeBeerDF(stylesFrame$JSlink)
names(beerTable) <- c("style", "Name", "Count", "ABV", "Score", "BeerLink")
####Try to scrap General information and description about beers.
makeBeerGeneralInformationDF <- function(BeerLink) {
d <- lapply(BeerLink, function(i){
URL <- paste0("http://www.ratebeer.com", i)
#Info <- read_html(URL)
Info <- read_html(URL) %>%
html_nodes("#container table+ div:nth-child(2) , #_brand4 span , #_aggregateRating6 span , #_description3") %>%
html_text()
Info <- append(Info[c(2,4,5)], URL)
})
do.call(rbind, d)
as.data.frame(d)
names(d) <- c("Overall", "Brewed", "Description", "BeerLink")
return(d)
}
BeerGeneralInformation <- makeBeerGeneralInformationDF(beerTable$BeerLink)
##Try to scrap beer reviews
makeBeerReviewsDF <- function(BeerLink) {
d <- mclapply(BeerLink, function(Link){
PageNumber <-read_html(Link) %>% html_nodes("b.ballno+ .ballno") %>% html_text() #Scrap max page namber for selected beer
InfoV <- c()
if (!is.na(PageNumber)) {
if (as.numeric(PageNumber)>2) {
for (i in c(1:as.numeric(PageNumber))) {
URL <- paste0(Link, "1/", i,"/")
Info <- read_html(URL) %>% html_nodes("#container div br+ div") %>% html_text() #Scrap all comment at page
#Info <- Info[1:(length(Info)-2)]
InfoV <- append(InfoV, Info)
print(c(Link, PageNumber, i))
}
}
}
do.call(cbind, list(InfoV, Link))
})
#do.call(rbind, d)
#as.data.frame(d)
#names(d) <- c("Overall", "Brewed", "Description", "BeerLink")
#do.call(rbind, lapply(d, unlist))
return(d)
}
BeerReviews <- makeBeerReviewsDF(BeerGeneralInformation$BeerLink)
Test <- makeBeerReviewsDF("http://www.ratebeer.com/beer/t-ij-natte/6057/")
PageNumber <-read_html("http://www.ratebeer.com/beer/t-ij-natte/6057/") %>% html_nodes("b.ballno+ .ballno") %>% html_text() #Scrap max page namber for selected beer
InfoV <- c()
if ((!is.na(PageNumber))&(PageNumber>2)) {
for (i in c(2:PageNumber)) {
URL <- paste0("http://www.ratebeer.com/beer/la-choulette-de-noel/23326/", "1/", i,"/")
Info <- read_html(URL) %>% html_nodes("#container div br+ div") %>% html_text() #Scrap all comment at page
#Info <- Info[1:(length(Info)-2)]
InfoV <- append(InfoV, Info)
print(c("http://www.ratebeer.com/beer/la-choulette-de-noel/23326/", PageNumber, i))
}
}
###Read dataframe fro SQLite database to enviroment
db <- dbConnect(SQLite(), dbname="BeerDB.sqlite")
stylesFrame <- dbReadTable(db, "Styles")
beerTable <- dbReadTable(db, "Beers")
BeerGeneralInformation <- dbReadTable(db, "GeneralInfo")
dbDisconnect(db)
###Write dataframes to SQLite database
db <- dbConnect(SQLite(), dbname="BeerDB.sqlite")
dbWriteTable(conn = db, name = "Styles", value = stylesFrame, row.names = FALSE, overwrite = TRUE)
dbWriteTable(conn = db, name = "Beers", value = beerTable, row.names = FALSE, overwrite = TRUE)
dbWriteTable(conn = db, name = "GeneralInfo", value = as.data.frame(BeerGeneralInformation), row.names = FALSE, overwrite = TRUE)
dbReadTable(db, "Styles")
str(dbReadTable(db, "Beers"))
BeerGeneralInformation <- dbReadTable(db, "GeneralInfo")
dbDisconnect(db)
#test scrap comments!!!!!
# BeerGeneralInformation$BeerLink[1]
# URL <- BeerGeneralInformation$BeerLink[1]
# PageNumber <-read_html(URL) %>% html_nodes("b.ballno+ .ballno") %>% html_text() #Scrap max page namber for selected beer
# for (i in c(2:3)) {
# URLf <- paste0(URL, "1/", i,"/")
# Info <- read_html(URLf) %>% html_nodes("#container div br+ div") %>% html_text() #Scrap all comment at page
# Info <- Info[1:(length(Info)-2)]
# Info <- append(Info, Info)
# }
# t <- cbind(Info, URL)
#
#
#
# Infot <- read_html(URL) %>%
# html_nodes("#container div br+ div") %>%
# html_text()
# d <- lapply(BeerGeneralInformation$BeerLink[1:2], function(Link){
# PageNumber <-read_html(Link) %>% html_nodes("b.ballno+ .ballno") %>% html_text() #Scrap max page namber for selected beer
# InfoV <- c()
# for (i in c(2:PageNumber)) {
# URL <- paste0(Link, "1/", i,"/")
# Info <- read_html(URL) %>% html_nodes("#container div br+ div") %>% html_text() #Scrap all comment at page
# Info <- Info[1:(length(Info)-2)]
# InfoV <- append(InfoV, Info)
# #do.call(cbind, list(Info))
# }
# #Info <- append(Info[c(2,4,5)], URL)
# do.call(cbind, list(InfoV, Link))
# })
# eee <- do.call(rbind, lapply(d, unlist))
#
#
# eee <- bindlist(d)
# eee <- as.data.frame(d)
# do.call(rbind, d)
#
####Try to scrap user comments and rating scores.
# ResultURL <- paste0("http://www.ratebeer.com", beerTable[1, ]$BeerLink)
# beerGeneralInfo <- read_html(ResultURL) %>% html_nodes("#container div table:nth-child(8)") %>% html_text()
# beerGeneralInfo[, 1] <- stylesFrame[1, ]$style
# beerGeneralInfo <- cbind(beerGeneralInfo[, 1:5], beerLinks)
#
# html_attr(beerGeneralInfo, "id")
# test <- html_text(beerGeneralInfo)
#
#BeerGeneralInformation <- as.data.frame(BeerGeneralInformation)
#names(BeerGeneralInformation) <- c("Overall", "Brewed", "Description", "BeerLink")
## #container div table:nth-child(8)
# ResultURL <- paste0("http://www.ratebeer.com", "/ajax/top-beer-by-style.asp?", "style=", stylesFrame[1, ]$style,
# "&sort=", stylesFrame[1, ]$sort, "&order=", stylesFrame[1, ]$order, "&min=", stylesFrame[1, ]$min, "&max=", stylesFrame[1, ]$max,
# "&retired=", stylesFrame[1, ]$retired, "&new=", stylesFrame[1, ]$new, "&mine=", stylesFrame[1, ]$mine)
#
#
#
# styleLink <- as.vector(stylesFrame$Link[[1]])
# StylePage <- paste0("http://www.ratebeer.com", styleLink)
#
# #GetBeersTable
# ScriptTXT <- read_html(StylePage) %>% html_nodes("script")
# ScriptTXT <- html_text(ScriptTXT[9]) %>% strsplit("[\r\n\t]") %>% unlist
# needPar <- ScriptTXT[c(6,9,12,15,18,21,24,27)]
# needParVal <- gsub('([[:punct:]])([[:alpha:]]*)([[:blank:]]*)', "", needPar)
# needParNames <- gsub('([[:punct:]]*)([[:blank:]]*)', "", needPar)
# needParNames <- gsub('[[:digit:]]*', "", needParNames)
#
# ParDataFrame <- as.data.frame(rbind(needParVal))
# names(ParDataFrame) <- needParNames
#
#
#
#
# #needPar <- cbind(needParNames, needParVal)
#
#
#
#
# library(XML)
# beerTable <- readHTMLTable(ResultURL, as.data.frame = TRUE)[[1]]
# beerLinks <- read_html(ResultURL) %>% html_nodes("a") %>% html_attr("href")
# beerTable[, 1] <- stylesFrame[1, ]$style
# beerTable <- cbind(beerTable[, 1:5], beerLinks)
##Test PhantomJS
# write out a script phantomjs can process
# library(RSelenium)
# pJS <- phantom()
# remDr <- remoteDriver(browserName = "chrome")
# remDr$open()
# remDr$navigate(StylePage)# process it with phantomjs
# result <- remDr$phantomExecute("var page = http://www.ratebeer.com/beerstyles/abbey-dubbel/71/;
# var fs = require('fs');
# page.onLoadFinished = function(status) {
# var file = fs.open('output.htm', \"w\");
# file.write(page.content);
# file.close();
# phantom.exit();
# };")
#
# str(beer_styles)
#
# html_children(beer_stylesHTML[[1]])
#
#beer_stylesMatrix <- data.frame(sapply(beer_styles, '[', seq(max(sapply(beer_styles, length)))))
###!!!Test scrapping
# beer_stylespage <- read_html("http://www.ratebeer.com/beerstyles")
# beer_stylesUP <- html_nodes(beer_stylespage, ".groupname")
# beer_stylesCustomL <- html_nodes(beer_stylespage, ".styleGroup")
# beer_stylesCustom <- html_nodes(beer_stylesCustomL, "a")
#
# beer_stylesTable <- html_nodes(beer_stylespage, ".col-lg-12 table")
#
# table
# v <- html_text(beer_stylesCustom)
#
# html_text(beer_stylesCustom, trim = TRUE)
# html_attrs(beer_stylesCustom)[[1]]
#
#
# Ta <- html_table(beer_stylesTable, fill = TRUE)
# html_table(beer_stylesCustom)
# str<file_sep>/ui.R
fluidPage(
# Application title
titlePanel("Beer reviews Cloud"),
sidebarLayout(
# Sidebar with a slider and selection inputs
sidebarPanel(
uiOutput("choose_Type"),
uiOutput("choose_Style"),
sliderInput("freq",
"Minimum Frequency:",
min = 10, max = 200, value = 100),
sliderInput("max",
"Maximum Number of Words:",
min = 30, max = 500, value = 100),
hr(),
actionButton("update", "Change")
),
# Show Word Cloud
mainPanel(
#plotOutput("plotDescriptions"),
tabsetPanel(type = "tabs",
tabPanel("As product is"),
tabPanel("As people think", plotOutput("plotReviews"))
)
)
)
)<file_sep>/scrape.js
var page = require('webpage').create();
page.open('http://www.ratebeer.com/beerstyles/abbey-dubbel/71/', function () {
console.log(page.content); //page source
phantom.exit();
});
<file_sep>/server.R
function(input, output, session) {
# Define a reactive expression for the document term matrix
# Drop-down selection box for which data set
output$choose_Type <- renderUI({
selectInput("Type", "Type", as.list(Types))
})
output$choose_Style <- renderUI({
if(is.null(input$Type))
return()
Styles <- GetStyles(input$Type)
selectInput("Style", "Styles", as.list(Styles))
})
terms <- reactive({
if(is.null(input$Style))
return()
# Change when the "update" button is pressed...
input$update
# ...but not for anything else
isolate({
withProgress({
setProgress(message = "Processing corpus...")
getTermMatrix(input$Style)
})
})
})
# Make the wordcloud drawing predictable during a session
wordcloud_rep <- repeatable(wordcloud)
output$plotReviews <- renderPlot({
if(is.null(input$Style))
return()
v <- terms()
wordcloud_rep(v$word, v$freq, scale=c(4,0.5),
min.freq = input$freq, max.words=input$max, random.order=FALSE,
colors=brewer.pal(8, "Dark2"), vfont=c("gothic english", "plain"), rot.per=.45)
})
}#colors=brewer.pal(8, "Dark2")<file_sep>/global.R
require(shiny)
require(sqldf)
require(RSQLite)
require(tm)
require(dplyr)
library(wordcloud)
library(memoise)
require(RColorBrewer)
#library(RODBCext)
#require(Rstem)
#require(Snowball)
#library(wordnet)
db <- dbConnect(SQLite(), dbname="BeerDB.sqlite")
SQLQueryTypesStyles <- "SELECT BeerStyle, Type, style FROM Styles"
myQuery <- dbSendQuery(db, SQLQueryTypesStyles)
TypesFrame <- dbFetch(myQuery, n = -1)
Types <- unique(TypesFrame$Type)
dbDisconnect(db)
GetStyles <- function(type){
#db <- dbConnect(SQLite(), dbname="BeerDB.sqlite")
#SQLQueryTypesStyles <- paste0("SELECT BeerStyle FROM Styles WHERE Type = '", type, "'")
#myQuery <- dbSendQuery(db, SQLQueryTypesStyles)
#styles <- dbFetch(myQuery, n = -1)
unique(filter(TypesFrame, Type == type)$BeerStyle)
}
getTermMatrix <- memoise(function(style){
styleN <- TypesFrame[which(TypesFrame$BeerStyle == style), ]$style
db <- dbConnect(SQLite(), dbname="BeerDB.sqlite")
SQLQueryBeers <- paste0("SELECT BeerLink FROM Beers WHERE Style = '", styleN, "'")
myQuery <- dbSendQuery(db, SQLQueryBeers)
Style <- dbFetch(myQuery, n = -1)
dbDisconnect(db)
db <- dbConnect(SQLite(), dbname="BeerDB.sqlite")
data <- lapply(Style, function(y){
dbGetQuery(db, paste0("SELECT V1 FROM BeerReviewsNew WHERE V2='", y, "'"))
})
dbDisconnect(db)
data <- data$BeerLink
#tm_map(data, asPlain)
#temp_txt <- paste(temp$Description, collapse = " ")
temp.vec <- VectorSource(data)
temp.cor <- Corpus(temp.vec)
#summary(temp.cor)
#inspect(temp.cor)
temp.cor <- tm_map(temp.cor, content_transformer(tolower))
temp.cor <- tm_map(temp.cor, removePunctuation)
temp.cor <- tm_map(temp.cor, removeNumbers)
#temp.cor <- tm_map(temp.cor, removeNumbers)
Beerstopwords <- c(stopwords("english"),"aroma", "appearance","taste",
"palate","overall","beer", "beers", "bottle", "conditioned",
"ale", "abbey", "brewed", "beer", "ale", "brewered", "abbey",
"bottle", "aroma", "flavour", "gagooglefillslotbeerpage")
temp.cor <- tm_map(temp.cor, removeWords, Beerstopwords)
temp.cor <- tm_map(temp.cor, stripWhitespace)
#DTM <- TermDocumentMatrix(temp.cor)
DTM <- DocumentTermMatrix(temp.cor)
DTM_Mat <- as.matrix(DTM)
DTM_Mat <- sort(colSums(DTM_Mat),decreasing=TRUE)
DTM_Mat <- data.frame(word = names(DTM_Mat),freq=DTM_Mat)
#DTM_v <- sort(colSums(DTM_Mat),decreasing=TRUE)
#DTM_d <- data.frame(word = names(DTM_v),freq=DTM_v)
#table(DTM_d$freq)
#pal2 <- brewer.pal(8,"Dark2")
#png("wordcloud_Beers_Rew.png", width=480,height=300)
#wordcloud(DTM_d$word,DTM_d$freq, scale=c(8,.5),min.freq=2,
# max.words=Inf, random.order=FALSE, rot.per=.30, colors=pal2)
#dev.off()
#return(data)
#sort(rowSums(DTM_Mat), decreasing = TRUE)
})
# db1 <- dbConnect(SQLite(), dbname="NewBase/BeerDB.sqlite")
# db <- dbConnect(SQLite(), dbname="BeerDB.sqlite")
#
# BeerStyles <- dbReadTable(db1, "Styles")
# Beers <- dbReadTable(db1, "Beers")
# GeneralInfo <- dbReadTable(db1, "GeneralInfo")
# BeerReviews <- dbReadTable(db1, "BeerReviewsNew")
#
#
#
#
#
#
#
# SQLQueryStyles <- "SELECT BeerStyle, Type, style FROM BeerStyles"
#
# SQLQueryDescription <- "SELECT style, Name, Beers.BeerLink, Description FROM Beers JOIN GeneralInfo
# ON (Beers.BeerLink = GeneralInfo.BeerLink)"
#
# SQLQueryReviews <- "SELECT style, Name, Beers.BeerLink, V2 FROM Beers JOIN BeerReviewsNew
# ON (Beers.BeerLink = BeerReviewsNew.V1)"
#
#
# #WHERE Beers.BeerLink = 71"
#
# myQuery <- dbSendQuery(db1, SQLQueryReviews)
# Beerdata <- dbFetch(myQuery, n = -1)
# dbDisconnect(db)
#
# temp <- filter(Beerdata, style == "71") %>% select(Description)
#
# tm_map(temp, asPlain)
# temp_txt <- paste(temp$Description, collapse = " ")
# temp.vec <- VectorSource(temp_txt)
# temp.cor <- Corpus(temp.vec)
# #summary(temp.cor)
# #inspect(temp.cor)
#
# temp.cor <- tm_map(temp.cor, content_transformer(tolower))
# temp.cor <- tm_map(temp.cor, removePunctuation)
# temp.cor <- tm_map(temp.cor, removeNumbers)
# #temp.cor <- tm_map(temp.cor, removeNumbers)
# Beerstopwords <- c(stopwords("english"), "beer", "beers", "bottle", "conditioned", "ale", "abbey", "brewed", "beer", "ale", "brewered", "abbey", "bottle", "aroma", "flavour")
# temp.cor <- tm_map(temp.cor, removeWords, Beerstopwords)
# temp.cor <- tm_map(temp.cor, stripWhitespace)
# DTM <- DocumentTermMatrix(temp.cor)
# #TDM <- TermDocumentMatrix(temp.cor)
# #inspect(TDM)
# inspect(DTM)
# DTM_Mat <- as.matrix(DTM)
# frequency <- colSums(as.matrix(DTM))
# frequency <- sort(frequency, decreasing = TRUE)
# head(frequency)
# frequency[which(frequency>=5)]
#
# DTM_Mat <- as.matrix(DTM)
# DTM_v <- sort(colSums(DTM_Mat),decreasing=TRUE)
# DTM_d <- data.frame(word = names(DTM_v),freq=DTM_v)
# table(DTM_d$freq)
# pal2 <- brewer.pal(8,"Dark2")
# png("wordcloud_Beers_Rew.png", width=480,height=300)
# wordcloud(DTM_d$word,DTM_d$freq, scale=c(8,.5),min.freq=2,
# max.words=Inf, random.order=FALSE, rot.per=.30, colors=pal2)
# dev.off()
| 440362a85a3150f89ec37ff6da493dec39bc8ab9 | [
"JavaScript",
"R"
] | 6 | R | YspAnimal/BeerRankingProject | 575fd6c926c65fc34fbc44b74c1157211888de20 | dec73e4e446389bda843a428c1bbd69d9e1cda7f |
refs/heads/master | <file_sep>basic.forever(function () {
for (let j = 0; j <= 4; j++) {
for (let I = 0; I <= j; I++) {
led.plot(4 + I - j, 0 + 0)
}
}
})
| 90792eba7d08a4ff42e1fcd66f54a506e1f8789e | [
"TypeScript"
] | 1 | TypeScript | 410501052/code-1104 | afa74e99913801968b05f70309697869b55d96ce | f73becf809c1acc1d907e158860d3123943622bc |
refs/heads/master | <file_sep>using System.Collections.Generic;
namespace Combo.Models
{
public class Army
{
public int Morale { get; set; } = 3;
public int Actions { get; set; } = 5;
public enum UnitType {
Armor,
Bowman,
Cavalry,
Priest,
Spearman,
Mage,
Wizard,
Healer,
}
public Dictionary<UnitType, int> UnitTypeCount { get; set; } = new Dictionary<UnitType, int>()
{
{ UnitType.Armor, 0 },
{ UnitType.Bowman, 0 },
{ UnitType.Cavalry, 0 },
{ UnitType.Healer, 0 },
{ UnitType.Priest, 0 },
{ UnitType.Spearman, 0 },
{ UnitType.Mage, 0 },
{ UnitType.Wizard, 0 },
};
public List<UnitType> Units { get; set; } = new List<UnitType>();
public string UnitString { get; set; }
public int UnitCount { get; set; }
public Army(List<string> stack)
{
foreach(string unit in stack)
{
switch(unit)
{
case "A":
UnitTypeCount[UnitType.Armor]++;
Units.Add(UnitType.Armor);
break;
case "B":
UnitTypeCount[UnitType.Bowman]++;
Units.Add(UnitType.Bowman);
break;
case "C":
UnitTypeCount[UnitType.Cavalry]++;
Actions++;
Units.Add(UnitType.Cavalry);
break;
case "H":
UnitTypeCount[UnitType.Healer]++;
Units.Add(UnitType.Healer);
break;
case "M":
UnitTypeCount[UnitType.Mage]++;
Units.Add(UnitType.Mage);
break;
case "P":
UnitTypeCount[UnitType.Priest]++;
//Morale++;
Units.Add(UnitType.Priest);
break;
case "S":
UnitTypeCount[UnitType.Spearman]++;
Units.Add(UnitType.Spearman);
break;
case "W":
UnitTypeCount[UnitType.Wizard]++;
Units.Add(UnitType.Wizard);
break;
default:
break;
}
UnitCount++;
UnitString = UnitString + unit;
}
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Combo.Models
{
public class Engagement
{
public BattleSide _attacker { get; private set; }
public BattleSide _defender { get; private set; }
public Engagement(Army attacker, Army defender, Dice dice)
{
_attacker = new BattleSide(attacker, dice);
_defender = new BattleSide(defender, dice);
}
public bool Battle()
{
while(_attacker._army.Morale > 0 && _defender._army.Morale > 0)
{
Fight();
}
return _attacker._army.Morale > 0;
}
public bool Fight()
{
int attackerSum = 0;
int defenderSum = 0;
while(attackerSum == defenderSum)
{
attackerSum = _attacker.Battle(_defender._army);
defenderSum = _defender.Battle(_attacker._army);
}
if (attackerSum > defenderSum)
{
_defender._army.Morale--;
return true;
} else
{
_attacker._army.Morale--;
return false;
}
}
}
public class BattleSide
{
public Army _army { get; set; }
public List<int> _rolls { get; set; }
public Dice _dice;
public BattleSide(Army army, Dice dice)
{
_army = army;
_dice = dice;
}
public int Battle(Army enemy)
{
Roll(_dice);
// Account for Mage(s)
MageAffect();
var sum = _rolls.Sum();
// Account for Archer Bonus (Bowman > Spearman)
sum = sum + _army.UnitTypeCount[Army.UnitType.Bowman] * enemy.UnitTypeCount[Army.UnitType.Spearman];
// Account for Cavalry Bonus (Cavalry > Bowman)
sum = sum + _army.UnitTypeCount[Army.UnitType.Cavalry] * enemy.UnitTypeCount[Army.UnitType.Bowman];
// Account for Levy Bonus (Spearman > Cavalry)
sum = sum + _army.UnitTypeCount[Army.UnitType.Spearman] * enemy.UnitTypeCount[Army.UnitType.Cavalry];
// Account for Wizard Bonus (Wizard > Mage & Wizard > Priest)
sum = sum + _army.UnitTypeCount[Army.UnitType.Wizard] * enemy.UnitTypeCount[Army.UnitType.Mage];
sum = sum + _army.UnitTypeCount[Army.UnitType.Wizard] * enemy.UnitTypeCount[Army.UnitType.Priest];
// Account for Armor Bonus
sum = sum + _army.UnitTypeCount[Army.UnitType.Armor];
return sum;
}
private void Roll(Dice dice)
{
_rolls = new List<int>();
for (int i = 0; i < _army.UnitCount; i++)
{
_rolls.Add(dice.Roll());
}
}
private (int val, int index) GetMin()
{
var min = _rolls.Min();
return (min, _rolls.IndexOf(min));
}
private void MageAffect()
{
var mageCount = _army.UnitTypeCount[Army.UnitType.Mage];
while (mageCount > 0)
{
(int val, int index) = GetMin();
if (val <= Math.Ceiling((double)_dice.Sides / 2))
{
ReRoll(index);
}
mageCount--;
}
}
private void ReRoll(int index)
{
_rolls[index] = _dice.Roll();
}
}
}
<file_sep># comboSim
Just a thing
<file_sep>using System;
namespace Combo.Models
{
public class Dice
{
public int Sides { get; } = 6;
public readonly Random _rand;
public Dice(Random rand)
{
_rand = rand;
}
public Dice(Random rand, int sides) : this(rand)
{
Sides = sides;
}
public int Roll()
{
return _rand.Next(1, Sides + 1);
}
}
}
<file_sep>using Combo.Models;
using System;
using System.Collections.Generic;
namespace Combo
{
class Program
{
static Dice _dice;
static readonly int simCount = 1000;
static void Main(string[] args)
{
var set = new List<string> { "L", "A", "C", "M", "S", "P" };
var results = new List<List<double>>();
var combinations = GenerateCombinations(set, 4);
if (args.Length > 0)
{
_dice = new Dice(new Random(), Int32.Parse(args[0]));
}
using (System.IO.StreamWriter file = new System.IO.StreamWriter($"./Engagements/{_dice.Sides}d.txt"))
{
file.Write($"{_dice.Sides}d\t");
foreach (var combination in combinations)
{
string combinationStr = string.Join("", combination);
file.Write($"{combinationStr}\t");
}
file.WriteLine();
var index = 0;
foreach (var combination in combinations)
{
results.Add(new List<double>());
string combinationStr = string.Join("", combination);
file.Write($"{combinationStr}\t");
var _index = 0;
foreach (var _combination in combinations)
{
if (_index >= index)
{
results[index].Add(Simulate(combination, _combination));
} else
{
results[index].Add(100-results[_index][index]);
}
file.Write(String.Format("{0,5:##0.0}\t", results[index][_index]));
_index++;
}
file.WriteLine();
index++;
}
}
Console.WriteLine("Done");
Console.ReadKey();
}
private static List<List<T>> GenerateCombinations<T>(List<T> combinationList, int k)
{
var combinations = new List<List<T>>();
if (k == 0)
{
var emptyCombination = new List<T>();
combinations.Add(emptyCombination);
return combinations;
}
if (combinationList.Count == 0)
{
return combinations;
}
T head = combinationList[0];
var copiedCombinationList = new List<T>(combinationList);
List<List<T>> subcombinations = GenerateCombinations(copiedCombinationList, k - 1);
foreach (var subcombination in subcombinations)
{
subcombination.Insert(0, head);
combinations.Add(subcombination);
}
combinationList.RemoveAt(0);
combinations.AddRange(GenerateCombinations(combinationList, k));
return combinations;
}
private static double Simulate(List<string> armyOne, List<string> armyTwo)
{
var wins = 0;
for (int i = 0; i < simCount; i++)
{
Engagement engagement = new Engagement(new Army(armyOne), new Army(armyTwo), _dice);
if (engagement.Battle())
{
wins++;
}
}
//Console.WriteLine($" wins: {wins}");
return (double) wins/simCount*100;
}
}
}
| 445026c5c02d48f5a3d19f0b847f538bd8c6e7c7 | [
"Markdown",
"C#"
] | 5 | C# | gensior/comboSim | e7b3357c8f87ef5cf91de0169f6c03845288c260 | cdfd208d9c7f3a80ef785e857f0412033e34cec0 |
refs/heads/master | <repo_name>texas-delaney/language-game<file_sep>/src/reducers.js
export const word = (state, action) => {
switch(action.type){
case 'ADD_WORD':
let newWord = Object.assign({}, action.data, {
id: +new Date
});
return Object.assign(state, newWord)
default:
return state || {};
}
}
export const languages = (state, action) => {
switch(action.type){
case 'ADD_LANGUAGES':
return action.data
default:
return state || []
}
}
export const score = (state, action) => {
switch(action.type) {
case 'INC_SCORE':
return ++state
case 'DEC_SCORE':
return --state
default:
return state || 0
}
}
export const loading = (state, action) => {
switch(action.type) {
case 'START_LOADING':
return true
case 'FINISH_LOADING':
return false
default:
return state || false
}
}
<file_sep>/src/actions.js
export const addWord = word => ({ type: 'ADD_WORD' , data: word })
export const chooseLanguage = languages => ({ type: 'ADD_LANGUAGES', data: languages })
export const incrementScore = () => ({ type: 'INC_SCORE' });
export const decrementScore = () => ({ type: 'DEC_SCORE' });
export const startLoading = () => ({ type: 'START_LOADING' });
export const finishLoading = () => ({ type: 'FINISH_LOADING' });
<file_sep>/src/components/HomeWrapper.jsx
import React, { Component } from 'react';
import { connect } from 'react-redux';
import AlertContainer from 'react-alert';
import languageList from '../languageList'
import { addWord, chooseLanguage, incrementScore, startLoading, finishLoading } from '../actions';
const mapDispatchToProps = dispatch => ({
addTranslation: word => dispatch(addWord(word)),
chooseLanguage: languages => dispatch(chooseLanguage(languages)),
incrementScore: () => dispatch(incrementScore()),
startLoading: () => dispatch(startLoading()),
finishLoading: () => dispatch(finishLoading())
})
const mapStateToProps = ({word, languages, score, loading}) => ({
word,
languages,
score,
loading
})
class HomeWrapper extends Component {
constructor(props){
super(props);
this.alertOptions = {
offset: 0,
position: 'top left',
theme: 'light',
time: 5000,
transition: 'scale'
};
}
randomNumber(max, min) {
return Math.floor(Math.random() * (max - min + 1)) + min
}
/**
* Randomize array element order in-place.
* Using Durstenfeld shuffle algorithm.
*/
shuffleArray(array) {
for (var i = array.length - 1; i > 0; i--) {
var j = Math.floor(Math.random() * (i + 1));
var temp = array[i];
array[i] = array[j];
array[j] = temp;
}
return array;
}
generateTranslation() {
this.props.startLoading()
let language = languageList[this.randomNumber(languageList.length, 0)]
let languageOptions = [
language,
languageList[this.randomNumber(languageList.length, 0)],
languageList[this.randomNumber(languageList.length, 0)]
]
languageOptions = this.shuffleArray(languageOptions)
let wordLength = Math.floor(Math.random() * (5 - 3 + 1)) + 3
// Get a Random Word
fetch(`http://randomword.setgetgo.com/get.php?len=${wordLength}`)
.then(response => {
return response.text()
}).then(data => {
console.log(data)
let url = `https://translate.yandex.net/api/v1.5/tr.json/translate?key=trnsl.1.1.20160712T072542Z.499526d43e5d5cd2.7a480431617b86f5294ac82c2d8398e81aca76d2&text=${data}&lang=en-${language}`
// Translate the random word
fetch(url)
.then(urlResponse => {
return urlResponse.json()
}).then(urlData => {
//If the word cannot be translated generate a new one
if (urlData.text.toString() === data) {
throw new Error('word had no translation, let me find a new one ...');
}
console.log(urlData.text.toString());
// Call the redux action with the word data
this.props.addTranslation(Object.assign({}, {
word: data,
translatedWord: urlData.text.toString(),
language
}));
}).then(() => {
this.props.chooseLanguage([...languageOptions])
this.props.finishLoading()
}).catch((err) => {
console.log(err)
this.generateTranslation()
})
})
//
// fetch(url)
// .then(response => {
// return response.json()
// }).then(data => {
// this.props.addTranslation(Object.assign({}, {
// word,
// translatedWord: data.text.toString(),
// language
// }));
// }).then(() => {
// this.props.chooseLanguage([language, "en", "fr"])
// })
}
selectLanguage(language) {
if (language === this.props.word.language) {
this.props.incrementScore()
this.showAlert("Correct Answer!!")
} else {
this.showAlert("Incorrect! The answer was " + this.props.word.language)
}
this.generateTranslation()
}
showAlert(message){
msg.show(message, {
time: 2000,
type: 'success',
});
}
gameState() {
if (this.props.languages.length < 1) {
return <button onClick={this.generateTranslation.bind(this)}>Start Game</button>
} else {
return (
<div>
<h3>Points: {this.props.score}</h3>
<br/>
<p>Original Word: {this.props.word.word}</p>
<p>Translated Word: {this.props.word.translatedWord}</p>
<br/>
<form>
<input onClick={this.selectLanguage.bind(this, this.props.languages[0])} type="button" defaultValue={this.props.languages[0]}/>
<input onClick={this.selectLanguage.bind(this, this.props.languages[1])} type="button" defaultValue={this.props.languages[1]}/>
<input onClick={this.selectLanguage.bind(this, this.props.languages[2])} type="button" defaultValue={this.props.languages[2]}/>
</form>
</div>
)
}
}
render() {
let loadingSpinner = this.props.loading ? (<div>Loading...</div>) : ""
return(
<div>
<AlertContainer ref={(a) => global.msg = a} {...this.alertOptions} />
<h1>Language Score Game</h1>
{this.props.loading ? loadingSpinner : this.gameState()}
</div>
)
}
}
export default connect(mapStateToProps, mapDispatchToProps)(HomeWrapper);
| 56511f64b47894d81ff03c50a15589410611a58f | [
"JavaScript"
] | 3 | JavaScript | texas-delaney/language-game | 83f990b746702451266a53e9a4d9668628d71020 | f52cea2adad6889071dc3aadda3c9ac2a3843d9a |
refs/heads/master | <repo_name>azidesu/yogames_application<file_sep>/src/pages/login/login.ts
import { Component } from '@angular/core';
import { IonicPage, NavController, NavParams } from 'ionic-angular';
import {RegistrationPage} from "../registration/registration";
/**
* Generated class for the LoginPage page.
*
* See https://ionicframework.com/docs/components/#navigation for more info on
* Ionic pages and navigation.
*/
@IonicPage()
@Component({
selector: 'page-login',
templateUrl: 'login.html',
styles:[
`
.valid{
color: #32db64;
}
.invalid{
color: #f53d3d;
}
`
]
})
export class LoginPage {
UsernameCheck = "";
UsernameEmailvalue = "";
PasswordCheck = "";
Passwordvalue = "";
constructor(public navCtrl: NavController, public navParams: NavParams) {
}
usernameValidation(){
if (this.UsernameEmailvalue.length > 0){
this.UsernameCheck = "true";
}else{
this.UsernameCheck = "false";
}
}
passwordValidation(){
if(this.Passwordvalue.length > 0){
this.PasswordCheck = "<PASSWORD>";
}else{
this.PasswordCheck = "<PASSWORD>";
}
}
GoToRegistration(){
this.navCtrl.push(RegistrationPage);
}
}
<file_sep>/src/pages/game-offer/game-offer.module.ts
import { NgModule } from '@angular/core';
import { IonicPageModule } from 'ionic-angular';
import { GameOfferPage } from './game-offer';
@NgModule({
declarations: [
GameOfferPage,
],
imports: [
IonicPageModule.forChild(GameOfferPage),
],
})
export class GameOfferPageModule {}
<file_sep>/src/pages/upload-game/upload-game.module.ts
import { NgModule } from '@angular/core';
import { IonicPageModule } from 'ionic-angular';
import { UploadGamePage } from './upload-game';
@NgModule({
declarations: [
UploadGamePage,
],
imports: [
IonicPageModule.forChild(UploadGamePage),
],
})
export class UploadGamePageModule {}
<file_sep>/src/app/app.module.ts
import { BrowserModule } from '@angular/platform-browser';
import { ErrorHandler, NgModule } from '@angular/core';
import { IonicApp, IonicErrorHandler, IonicModule } from 'ionic-angular';
import { MyApp } from './app.component';
import { HomePage } from '../pages/home/home';
import { ListPage } from '../pages/list/list';
import { StatusBar } from '@ionic-native/status-bar';
import { SplashScreen } from '@ionic-native/splash-screen';
import { LoginPage } from "../pages/login/login";
import { RegistrationPage } from "../pages/registration/registration";
import {ContactUsPage} from "../pages/contact-us/contact-us";
import {FavoritePage} from "../pages/favorite/favorite";
import { UploadGamePage } from "../pages/upload-game/upload-game";
import {TimelinePage} from "../pages/timeline/timeline";
import {GameOfferPage} from "../pages/game-offer/game-offer";
import {GameOrderPage} from "../pages/game-order/game-order";
import {NotificationsPage} from "../pages/notifications/notifications";
import {AboutPage} from "../pages/about/about";
import {UsersPage} from "../pages/users/users";
import {MygamesPage} from "../pages/mygames/mygames";
@NgModule({
declarations: [
MyApp,
HomePage,
ListPage,
LoginPage,
RegistrationPage,
ContactUsPage,
FavoritePage,
UploadGamePage,
TimelinePage,
GameOfferPage,
GameOrderPage,
NotificationsPage,
AboutPage,
UsersPage,
MygamesPage,
NotificationsPage,
],
imports: [
BrowserModule,
IonicModule.forRoot(MyApp),
],
bootstrap: [IonicApp],
entryComponents: [
MyApp,
HomePage,
ListPage,
LoginPage,
RegistrationPage,
ContactUsPage,
FavoritePage,
UploadGamePage,
TimelinePage,
GameOfferPage,
GameOrderPage,
NotificationsPage,
AboutPage,
UsersPage,
MygamesPage,
NotificationsPage,
],
providers: [
StatusBar,
SplashScreen,
{provide: ErrorHandler, useClass: IonicErrorHandler}
]
})
export class AppModule {}
<file_sep>/src/pages/mygames/mygames.module.ts
import { NgModule } from '@angular/core';
import { IonicPageModule } from 'ionic-angular';
import { MygamesPage } from './mygames';
@NgModule({
declarations: [
MygamesPage,
],
imports: [
IonicPageModule.forChild(MygamesPage),
],
})
export class MygamesPageModule {}
<file_sep>/src/app/app.component.ts
import { Component, ViewChild } from '@angular/core';
import { Nav, Platform } from 'ionic-angular';
import { StatusBar } from '@ionic-native/status-bar';
import { SplashScreen } from '@ionic-native/splash-screen';
import { HomePage } from '../pages/home/home';
import { ListPage } from '../pages/list/list';
import { LoginPage } from "../pages/login/login";
import { RegistrationPage } from "../pages/registration/registration";
import { ContactUsPage } from "../pages/contact-us/contact-us";
import { FavoritePage } from "../pages/favorite/favorite";
import { UploadGamePage } from "../pages/upload-game/upload-game";
import { TimelinePage } from "../pages/timeline/timeline";
import { AboutPage } from "../pages/about/about";
import {GameOrderPage} from "../pages/game-order/game-order";
import {UsersPage} from "../pages/users/users";
import {MygamesPage} from "../pages/mygames/mygames";
import {NotificationsPage} from "../pages/notifications/notifications";
@Component({
templateUrl: 'app.html'
})
export class MyApp {
@ViewChild(Nav) nav: Nav;
rootPage: any = TimelinePage;
// This attr will show Login/Registration text on the menu when the user click it
SHOW_LOGIN_REGISTER_LINKS = false ;
SHOW_GAME_REQUEST = false ;
SHOW_PROFILE_OPTIONS_LINKS = false;
// pages: Array<{title: string, component: any}>;
constructor(public platform: Platform, public statusBar: StatusBar, public splashScreen: SplashScreen) {
this.initializeApp();
// used for an example of ngFor and navigation
// this.pages = [
// { title: 'الرئيسية', component: TimelinePage },
// { title: 'اتصل بنا', component: ContactUsPage },
// { title: 'حول', component: ContactUsPage },
//
//
// ];
}
initializeApp() {
this.platform.ready().then(() => {
// Okay, so the platform is ready and our plugins are available.
// Here you can do any higher level native things you might need.
this.statusBar.styleDefault();
this.splashScreen.hide();
});
}
openPage(page) {
// Reset the content nav to have just this page
// we wouldn't want the back button to show in this scenario
if(page == 'الرئيسية')
this.nav.setRoot(TimelinePage);
else if(page == 'حول')
this.nav.setRoot(AboutPage);
else if (page == 'تواصل معنا')
this.nav.setRoot(ContactUsPage);
else if (page == 'تسجيل الدخول')
this.nav.setRoot(LoginPage);
else if (page == 'تسجيل جديد')
this.nav.setRoot(RegistrationPage);
else if (page == 'طلب لعبة')
this.nav.setRoot(GameOrderPage);
else if (page == 'تعديل الملف الشخصي')
this.nav.setRoot(UsersPage);
else if (page == 'العابي')
this.nav.setRoot(MygamesPage);
else if (page == 'الاشعارات')
this.nav.setRoot(NotificationsPage);
}
loginAndRegistrationTrigger() {
if(this.SHOW_LOGIN_REGISTER_LINKS == false)
this.SHOW_LOGIN_REGISTER_LINKS = true;
else
this.SHOW_LOGIN_REGISTER_LINKS = false;
}
SupportTrigger(){
if(this.SHOW_GAME_REQUEST == false)
this.SHOW_GAME_REQUEST = true;
else
this.SHOW_GAME_REQUEST = false
}
ProfileOptions(){
if(this.SHOW_PROFILE_OPTIONS_LINKS == false)
this.SHOW_PROFILE_OPTIONS_LINKS = true;
else
this.SHOW_PROFILE_OPTIONS_LINKS = false
}
}
<file_sep>/src/pages/registration/registration.ts
import { Component } from '@angular/core';
import { IonicPage, NavController, NavParams } from 'ionic-angular';
import {tokenReference} from "@angular/compiler";
/**
* Generated class for the RegistrationPage page.
*
* See https://ionicframework.com/docs/components/#navigation for more info on
* Ionic pages and navigation.
*/
@IonicPage()
@Component({
selector: 'page-registration',
templateUrl: 'registration.html',
styles: [
`
.valid{
color: #32db64;
}
.invalid {
color: #f53d3d;
}
`
]
})
export class RegistrationPage {
full_name_text = "";
fullNameValidationCheck = "";
username = "";
usernameCheck = "";
email = "";
emailCheck = "";
phone = "";
phoneCheck = "";
gender = "";
genderCheck = "";
date = "";
dateCheck = "";
password = "";
passwordCheck = "";
repassword = "";
repasswordCheck = "";
agree = "";
agreeCheck = "";
button_status = "";
constructor(public navCtrl: NavController, public navParams: NavParams) {
}
fullNameValidation(){
this.submitBtnChecker();
var reg: RegExp = /^([a-zA-Zأ-ي ]{3,32})$/;
if(reg.test(this.full_name_text) == true){
this.fullNameValidationCheck = "true";
}else {
this.fullNameValidationCheck = "false";
}
}
usernameValidation(){
this.submitBtnChecker();
var reg = /^([A-Za-z0-9.]){3,20}([A-Za-z0-9_\-.]{0,20})$/;
if(reg.test(this.username) == true){
this.usernameCheck = "true";
}else {
this.usernameCheck = "false";
}
}
emailValidation(){
this.submitBtnChecker();
var reg = /^([A-Za-z0-9_\-.]{1,20})\@([A-Za-z]{1,20})\.([A-Za-z]{2,4})$/;
if(reg.test(this.email) == true){
this.emailCheck = "true";
}else {
this.emailCheck = "false";
}
}
phoneValidation(){
this.submitBtnChecker();
var reg = /05([0-9]{8})$/;
if(reg.test(this.phone) == true){
this.phoneCheck = "true";
}else {
this.phoneCheck = "false";
}
}
genderValidation(){
this.submitBtnChecker();
let choose = this.gender;
if(choose == "male" || choose == "female"){
this.genderCheck = "true";
}else {
this.genderCheck = "false";
}
}
dateValidation(){
this.submitBtnChecker();
let date = this.date;
if(date != ""){
this.dateCheck = "true";
}else {
this.dateCheck = "false";
}
}
passwordValidation(){
this.submitBtnChecker();
this.repasswordValidation();
if(this.password.length < 6 || this.password.length > 32){
this.passwordCheck = "false";
}else {
this.passwordCheck = "true"
}
}
repasswordValidation(){
this.submitBtnChecker();
let password = <PASSWORD>;
if(this.repassword == password){
this.repasswordCheck = "true";
}else {
this.repasswordCheck = "false";
}
}
agreeValidation(){
let check = this.agree;
if(check == "true"){
this.agreeCheck = "true";
}else {
this.agreeCheck = "false";
}
}
submitBtnChecker(){
if(this.fullNameValidationCheck == "true" && this.usernameCheck == "true" && this.emailCheck == "true" && this.phoneCheck == "true" && this.genderCheck == "true" && this.passwordCheck == "true" && this.repasswordCheck == "true"){
this.button_status = 'enabled';
}else {
this.button_status = 'disabled';
}
}
}
<file_sep>/src/pages/game-order/game-order.module.ts
import { NgModule } from '@angular/core';
import { IonicPageModule } from 'ionic-angular';
import { GameOrderPage } from './game-order';
@NgModule({
declarations: [
GameOrderPage,
],
imports: [
IonicPageModule.forChild(GameOrderPage),
],
})
export class GameOrderPageModule {}
| 1617070b2f67559459761f3277e9262c5c6dec53 | [
"TypeScript"
] | 8 | TypeScript | azidesu/yogames_application | 05e9d3e4ffbac8c0b5f22e202cfe291922922d4a | ac411ebca9abefde3c3395204795792f2816ea19 |
refs/heads/master | <file_sep>/* eslint-disable */
Array.prototype.unique = function () {
const l = this.length
const u = {}
const a = []
let i
for (i = 0; i < l; i++) {
if (u.hasOwnProperty(this[i])) continue
a.push(this[i])
u[this[i]] = 1
}
return a
}
String.prototype.capitalize = function () {
return this.charAt(0).toUpperCase() + this.slice(1).toLowerCase()
}
<file_sep>import axios from 'axios'
export const http = axios.create()
export default function install (Vue) {
Object.defineProperty(Vue.prototype, '$http', {
get () {
return http
},
})
}
<file_sep># Vue Crossword
[](https://travis-ci.com/Piterden/vue-crossword)
A Vue.js based crossword puzzle builder and filler front-end application.

## Demo
[**Russian Words DB Demo**](https://0l3q5ozyl.codesandbox.io/)
## Features
- [x] Supports 2-31 symbols words lengths.
- [x] Supports any grid size.
- [x] Random symmetric grid generator.
- [x] Automatic calculation of required to fill letter cells and clues numeration.
- [x] Words suggestions from API depending on length and filled letters.
- [x] Clues suggestions for filled words from API.
## ToDo
- [ ] User login (OAuth2).
- [ ] Save built crosswords.
- [ ] Add and save new words and clues.
- [ ] Search within suggested words by added letters.
- [ ] Search in clues of suggested words.
- [ ] Add taxonomy for clues.
- [ ] Automatic generation of full filled crossword.
- [ ] Add different types of symmetry to grid generation.
- [ ] Add analyzer and smart corrector for generated grids (to control words length).
- [ ] Improve the logic of API queries.
- [ ] Make a print version.
- [ ] Fix mobile version.
## Usage
There are a few simple rules:
- Generated forms of vertical and horizontal questions allow you to fill cells inside them with letters.
- Grid cells do not allow you to fill them with letters but you can edit a grid geometry by clicking on them.
- You can change the size of a grid pressing the `Change Size` button which enables *the size change mode*. In this mode requests to the API are temporarily disabled and suggesting words would be updated on exit to the normal edit mode.
- When word is filled, you could select the clue or enter your one. The cells of filled words are marked with a green color.
## Build Setup
``` bash
# install dependencies
npm i
# serve with hot reload at localhost:8080
npm run dev
# build for production with minification
npm run build
# build for production and view the bundle analyzer report
npm run build --report
# run unit tests
npm run unit
# run e2e tests
npm run e2e
# run all tests
npm test
```
## Built With
- [VueJS](https://vuejs.org/) - The Progressive JavaScript Framework.
- [CodeSandbox](https://codesandbox.io) - The online code editor for ...
## Contributing
PR's are appreciated.
## Authors
- **<NAME>** - *Code|Idea* - [Piterden](https://github.com/Piterden)
- **mvrlin** - *Help with styles* - [mvrlin](https://github.com/mvrlin)
## License
This project is licensed under the MIT License - see the [LICENSE.md](https://github.com/Piterden/chessbot/blob/master/LICENSE.md) file for details
| f810f6a2dea7caf82b9c6ab86e85e30910f7d792 | [
"JavaScript",
"Markdown"
] | 3 | JavaScript | Alejandro94bk/vue-crossword | efcc02954d13473534439b1d95a9d148cc731cba | 0b0269a71f0722aa6514dfde8e28fe16e6511c02 |
refs/heads/main | <file_sep>jogador1 = int(input("Exerça sua jogada, jogador 1 (1 = pedra, 2 = tesoura, 3 = papel):"))
jogador2 = int(input("Exerça sua jogada, jogador 2 (1 = pedra, 2 = tesoura, 3 = papel):"))
pedra = 1
tesoura = 2
papel = 3
if jogador1 == pedra and jogador2 == pedra:
print("Empate!")
if jogador1 == tesoura and jogador2 == tesoura:
print("Empate!")
if jogador1 == papel and jogador2 == papel:
print("Empate!")
if jogador1 == pedra and jogador2 == tesoura:
print("Vitória do jogador 1!")
if jogador1 == tesoura and jogador2 == pedra:
print("Vitória do jogador 2!")
if jogador1 == pedra and jogador2 == papel:
print("Vitória do jogador 2!")
if jogador1 == papel and jogador2 == pedra:
print("Vitória do jogador 1!")
if jogador1 == papel and jogador2 == tesoura:
print("Vitória do jogador 2!")
if jogador1 == tesoura and jogador2 == papel:
print("Vitória do jogador 1!") | 6b1a2986b800673102d951fd21d3da4bf1c9f099 | [
"Python"
] | 1 | Python | HelenDeunerFerreira/pedraPapelOuTesoura | 74c4f708e5ca8e4026c1a28a4e20664e035a7e2c | 7a496d6685d1b62ad9fabc4ed68b5af83114a1f5 |
refs/heads/master | <repo_name>Limarychard/First-project-node<file_sep>/index.js
// find ELE NOS PERMITE ENCONTRAR PERMISSÃO NO ARRAY E ASSIM QUE ELE ENCONTRAR ELE RETORNA A INFORMAÇÃO
// findIndex ELE NOS PERMITE ENCONTRAR PERMISSÃO NO ARRAY E ASSIM QUE ELE ENCONTRAR ELE RETORNA O LOCAL ONDE ESTÁ A INFORMAÇÃO
// Middleware => INTERCEPTADOR => Tem o poder de parar ou alterar dados da requisição
const { request, response } = require("express")
const express = require("express")
const uuid = require('uuid')
const port = 3000
const app = express()
app.use(express.json())
const users = []
const checkUserId = (request, response, next) => {
const { id } = request.params
const index = users.findIndex(user => user.id === id)
if(index < 0){
return response.status(404).json({ error: "User not found"})
}
request.userIndex = index
request.userId = id
next()
}
app.get('/users', (request, response) => {
return response.json(users)
})
app.post('/users', (request, response) => {
const { name, age } = request.body
const user = { id: uuid.v4(), name, age }
users.push(user)
return response.status(201).json(user)
})
app.put('/users/:id', checkUserId, (request, response) => {
const { name, age } = request.body
const index = request.userIndex
const id = request.userId
const updadeUser = { id, name, age }
users[index] = updadeUser
return response.json(updadeUser)
})
app.delete('/users/:id', checkUserId, (request, response) => {
const index = request.userIndex
users.splice(index, 1)
return response.status(204).json()
})
app.listen(port, () => {
console.log(`🚀 Server started on port ${port}`)
})
| 41d0a315248280be3c9080524de4795670781d3b | [
"JavaScript"
] | 1 | JavaScript | Limarychard/First-project-node | 748e89b549e02ee194b219a11d4d3d6007671217 | 8c09efaf65363f78eed19dd8d47fc5e83a41c42b |
refs/heads/master | <repo_name>Fungopro/algor_mag<file_sep>/horspul.py
string = 'asdgdlfkgjsldkjgl;ksdajfg;skdjfg;lksdjfg;ksdjf;gkjsdf;lkgjsd;lkgjsd;lkfjg;lskdjg;lksdfjg;lksdjf;g'
substring = 'ksdajfg;skdjfg;lksdjfg'
def horspul(s, ss):
len_ss = len(ss)-1
len_s = len(s)-1
if len(s) < len(ss):
return 0
i = 0
while i + len_ss < len_s:
res = False
for ii in range(0, len_ss):
if s[i+ii] != ss[ii]:
res = True
if res:
j = 0
for ri in reversed(range(0, len_ss)):
if s[i+len_ss] == ss[ri]:
i += len_ss - ri
break
else:
return i+1
# else:
if __name__ == '__main__':
print(horspul(string, substring))
<file_sep>/main.py
# This is a sample Python script.
# Press Shift+F10 to execute it or replace it with your code.
# Press Double Shift to search everywhere for classes, files, tool windows, actions, and settings.
from parso.parser import Stack
def func(list):
# Use a breakpoint in the code line below to debug your script.
st = Stack()
opened = 0
for item in list:
# print(item)
if item == '(':
st.append(1)
# print('open')
elif item == ')' and len(st) != 0:
# try:
st.pop()
print('close')
else:
print(opened)
# print('nothing')
if len(st):
print('error')
print(st)
# Press the green button in the gutter to run the script.
if __name__ == '__main__':
func('())(, ((())))')
# See PyCharm help at https://www.jetbrains.com/help/pycharm/
<file_sep>/knut.py
str = 'abbabababaaababaabababbaaabbbbaabbbaabbbabbbaabbaabbababbababaaaba'
def func_knut_morris(str, substring):
substring = list(substring)
suffix_arr = [1] * (len(substring) + 1)
i = 1
for pos in range(len(substring)):
while i <= pos and substring[pos] != substring[pos - i]:
i += suffix_arr[pos-i]
suffix_arr[pos+1] = i
i = 0
count_symbols = 0
for c in str:
while count_symbols == len(substring) or \
count_symbols >= 0 and substring[count_symbols] != c:
i += suffix_arr[count_symbols]
count_symbols -= suffix_arr[count_symbols]
count_symbols += 1
if count_symbols == len(substring):
yield i
if __name__ == '__main__':
subsrt = 'aabbb'
print([i for i in func_knut_morris(str, subsrt)])
<file_sep>/leftside.py
arr = [1, 30, 0, 2, 5]
def left_side_smaller(array, n):
print("_, ", end="")
# Начать со второго элемента
for i in range(1, n):
for j in range(i - 1, -2, -1):
if array[j] < array[i]:
print(array[j], ", ", end="")
break
if j == -1:
print("_, ", end="")
if __name__ == '__main__':
n = len(arr)
left_side_smaller(arr, n)
<file_sep>/lab2.py
op = {'+': lambda x, y: int(x) + int(y),
'*': lambda x, y: int(x) * int(y),
'/': lambda x, y: int(x) / int(y),
'-': lambda x, y: int(x) - int(y),
'^': lambda x, y: int(x) ^ int(y)}
def func(arr):
stack = []
for item in arr:
if item in op:
stack.append(op.get(item)(stack.pop(), stack.pop()))
else:
stack.append(item)
return stack[0]
if __name__ == '__main__':
print(func('3 4 2 * 1 5 - 2 ^ / +'.split(' ')))
<file_sep>/rabina.py
string = 'abbabababaaababaabababbaaabbbbaabbbaabbbabbbaabbaabbababbababaaaba'
def func_rabin_karp(source, substring):
hash_str = hash(source[:len(substring)])
hash_substr = hash(substring)
res = []
for i in range(0, len(source)):
if hash_str == hash_substr:
res.append(i)
hash_str = hash(source[i:i+len(substring)])
return res
if __name__ == '__main__':
subsrt = 'aabbb'
print('res',func_rabin_karp(string, subsrt))
| c5f6aeb7e4e363370ecf7639ff6a2901305daf4e | [
"Python"
] | 6 | Python | Fungopro/algor_mag | 0795101ffa1fcc1d6d094deb31e4e5c910a9e746 | 4b729e04f70db1a23a5055227c1b8627388c5e2a |
refs/heads/master | <repo_name>youzaiyouzaiE/-<file_sep>/TTNews/TTDefines.h
//
// TTDefines.h
// TTNews
//
// Created by jiahui on 2016/11/17.
// Copyright © 2016年 瑞文戴尔. All rights reserved.
//
#ifndef TTDefines_h
#define TTDefines_h
#define INITIALIZE_URL @"https://passportforapp.skykiwi.com/v2/register/init.do"
#define CHECK_EMAIL_URL @"https://passportforapp.skykiwi.com/v2/register/isExist.do"
#define PICTURE_VERIFY_CODE_URL @"https://passportforapp.skykiwi.com/v2/register/picVerifycode.do"////guid
#define SEND_EMAIL_URL @"https://passportforapp.skykiwi.com/v2/register/sendmail.do"
#define REGISTER_URL @"https://passportforapp.skykiwi.com/v2/register/done.do"
#define LOGIN_URL @"https://passportforapp.skykiwi.com/v2/login/logging.do"
#define USER_INFO_URL @"https://passportforapp.skykiwi.com/v2/member/self.do"
#endif /* TTDefines_h */
| c100b313a42555ae3caca28671b82e56dcebc029 | [
"C"
] | 1 | C | youzaiyouzaiE/- | 0fee639196a134b8661c89383de9174d37447f70 | 89a0119759162d720aceea8a639d3442836ce8f2 |
refs/heads/master | <file_sep><?php
require 'PHPMailerAutoload.php';
function mail_with_attachment($server, $username, $password, $port, $to, $files = array()){
$debug_text += $server . '<br>';
$debug_text += $username . '<br>';
$debug_text += $password. '<br>';
$debug_text += $port. '<br>';
$debug_text += $to. '<br>';
$mail = new PHPMailer;
//$mail->SMTPDebug = 3;
$mail->isSMTP();
$mail->Host = $server;
$mail->SMTPAuth = true;
$mail->Username = $username;
$mail->Password = <PASSWORD>;
$mail->SMTPSecure = 'tls';
$mail->Port = $port;
$mail->SMTPOptions = array(
'ssl' => array(
'verify_peer' => false,
'verify_peer_name' => false,
'allow_self_signed' => true
)
);
//$mail -> SMTPOptions = [ 'ssl' => [ 'verify_peer' => false ] ];
$mail->setFrom($username, 'web mailer');
$mail->addAddress($to);
foreach($files as $file){
$mail->addAttachment($_SERVER['DOCUMENT_ROOT'] . $file);
}
$mail->isHTML(true);
$mail->Subject = 'Server backup ' . $_SERVER['SERVER_NAME'];
$mail->Body = "Backup server " . $_SERVER['SERVER_NAME'] . ' with succes, here are your files.';
if(!$mail->send()) {
return 'Mailer Error: ' . $mail->ErrorInfo ;
} else {
return 'Message has been sent';
}
}
?><file_sep><?php
require 'mail/mail.php';
require 'ftp.php';
require 'database.php';
require 'file_operations.php';
class backupWeb {
// debug
public $debug = array();
// database variables
private $db_use = false;
private $db_host;
private $db_username;
private $db_password;
private $db_name;
private $db_backup_name;
// mail variables
private $mail_use = false;
private $mail_host;
private $mail_username;
private $mail_password;
private $mail_port = 587;
private $mail_to;
// ftp variables
private $ftp_use = false;
private $ftp_host;
private $ftp_username;
private $ftp_password;
private $ftp_path;
// local save variables
private $local_use = false;
private $delete_days = 10;
private $location = '/tmp_bk_class/';
private $subfolder;
// public
public $backup_base = '/';
public $backup_base_name = 'web_backup';
///////////
// functions //
public function set_db($host, $username, $password, $name){
$this->debug[] = __LINE__ . ' use database, set variables';
$this -> db_host = $host;
$this -> db_username = $username;
$this -> db_password = $<PASSWORD>;
$this -> db_name = $name;
$this -> db_use = true;
}
public function set_mail($server, $username, $password, $port, $to){
$this->debug[] = __LINE__ . ' use mailserver, set variables';
$this -> mail_use = true;
$this -> mail_host = $server;
$this -> mail_username = $username;
$this -> mail_password = $<PASSWORD>;
$this -> mail_port = $port;
$this -> mail_to = $to;
}
public function set_ftp($server, $username, $password, $path){
$this->debug[] = __LINE__ . " use ftp server, set variables";
$this -> ftp_use = true;
$this -> ftp_host = $server;
$this -> ftp_username = $username;
$this -> ftp_password = $<PASSWORD>;
$this -> ftp_path = $path;
}
public function set_local($path, $days = 0){
$this->debug[] = __LINE__ . " save local, set variables";
$this->delete_days = $days;
$this->location = $path;
$this->local_use = true;
}
public function debug(){
echo "DEBUG FUNCTION BACKUP CLASS<br><br>";
foreach($this->debug as $row){
echo $row . '<br>';
}
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//////////// MAIN
////////////////////////////////////////////////////////////
public function backup(){
// naming
$excludefolder = str_replace('/','', $this->location);
$this->location = $this->location . 'backup_' . date('d-m-Y_H-i') . '/';
$this_backup_name = str_replace(' ', '_', $this->backup_base_name . '_'.date('d-m-Y_H-i').'.zip');
// check subfolder
if(!file_exists($_SERVER['DOCUMENT_ROOT'] . $this->location)){
$this->debug[] = __LINE__ . " Folder not existing, creating dir";
mkdir($_SERVER['DOCUMENT_ROOT'] . $this->location, 0777, true);
} else {
$this->debug[] = __LINE__ . " Success: Backup folder found";
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//////////// DATABASE BACKUP
////////////////////////////////////////////////////////////
if($this->db_use == true){
// backup the database
// checking all variables
if($this -> db_host == ''){
$this->debug[] = 'line: '. __LINE__ .' '. ' database host not specified';
$this->db_use = false;
}
if($this -> db_username == ''){
$this->debug[] = 'line: '. __LINE__ .' '. ' database username not specified';
$this->db_use = false;
}
if($this -> db_password == ''){
$this->debug[] = 'line: '. __LINE__ .' '. ' database password not specified. not necessarily a problem';
}
if($this -> db_name == ''){
$this->debug[] = 'line: '. __LINE__ .' '. ' database name not specified';
$this->db_use = false;
}
// database name
$this->db_backup_name = $this->location . str_replace(' ', '_', $this-> db_name . '_'.date('d-m-Y_H-i').'.sql');
// backup the database backup
if($this->db_use == true){
$this->debug[] = 'Making database backup.....';
$this->debug[] = backup_db($this -> db_host, $this -> db_username, $this -> db_password, $this -> db_name, $this->db_backup_name);
}
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//////////// MAKE BACKUP FROM FILES
////////////////////////////////////////////////////////////
echo ExtendedZip::zipTree(
$excludefolder,
$_SERVER['DOCUMENT_ROOT'],
$_SERVER['DOCUMENT_ROOT'] . $this->location . $this_backup_name,
ZipArchive::CREATE,
'',
$this->db_backup_name
);
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//////////// MAIL
////////////////////////////////////////////////////////////
if($this->db_use == true && $this->mail_use == true){
// backup the database
// checking all variables
if($this -> mail_host == ''){
$this->debug[] = 'line: '. __LINE__ .' '. ' mail host not specified';
$this->mail_use = false;
}
if($this -> mail_username == ''){
$this->debug[] = 'line: '. __LINE__ .' '. ' mail username not specified';
$this->mail_use = false;
}
if($this -> mail_password == ''){
$this->debug[] = 'line: '. __LINE__ .' '. ' mail password not specified';
$this->mail_use = false;
}
if($this -> mail_to == ''){
$this->debug[] = 'line: '. __LINE__ .' '. ' mail name not specified';
$this->mail_to = false;
}
// backup the database backup
if($this->mail_use == true){
$this->debug[] = 'Making mail.....';
$this->debug[] = mail_with_attachment(
$this -> mail_host,
$this -> mail_username,
$this -> mail_password,
$this -> mail_port,
$this->mail_to,
array($this->db_backup_name)
);
}
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//////////// FTP
////////////////////////////////////////////////////////////
if($this->ftp_use = true){
// backup the database
// checking all variables
if($this->ftp_host == ''){
$this->debug[] = 'line: '. __LINE__ .' '. ' ftp host not specified';
$this->ftp_use = false;
}
if($this -> ftp_username == ''){
$this->debug[] = 'line: '. __LINE__ .' '. ' ftp username not specified';
$this->ftp_use = false;
}
if($this -> ftp_password == ''){
$this->debug[] = 'line: '. __LINE__ .' '. ' ftp password not specified';
$this->ftp_use = false;
}
if($this -> ftp_path == ''){
$this->debug[] = 'line: '. __LINE__ .' '. ' ftp path not specified';
$this->ftp_use = false;
}
// backup the database backup
if(uploadFTP(
$this->ftp_host,
$this->ftp_username,
$this->ftp_password,
$_SERVER['DOCUMENT_ROOT'] . $this->location . $this_backup_name,
$this -> ftp_path,
$this_backup_name)
){
$this->debug[] = 'line: '. __LINE__ .' '. ' file uploaded to ftp';
} else {
$this->debug[] = 'line: '. __LINE__ .' '. ' ftp upload problem';
}
}
if($this->local_use == true){
$this->debug[] = 'line: '. __LINE__ .' '. ' keep folder';
delete_old($_SERVER['DOCUMENT_ROOT'] . '/' . $excludefolder , $this->delete_days); // delete de files IN de folder
} else {
$this->debug[] = 'line: '. __LINE__ .' '. ' delete folder';
removeDirectory($_SERVER['DOCUMENT_ROOT'] . '/' . $excludefolder); // delete de hele folder
}
}
}
?><file_sep><?php
class ExtendedZip extends ZipArchive {
// Member function to add a whole file system subtree to the archive
public function addTree($dirname, $localname = '', $exclude) {
if ($localname)
$this->addEmptyDir($localname);
$this->_addTree($dirname, $localname, $exclude);
}
// Internal function, to recurse
protected function _addTree($dirname, $localname, $exclude) {
$dir = opendir($dirname);
while ($filename = readdir($dir)) {
// Discard . and ..
if ($filename == '.' || $filename == '..')
continue;
if(strpos($filename, $exclude) > -1){
continue;
}
// Proceed according to type
$path = $dirname . '/' . $filename;
$localpath = $localname ? ($localname . '/' . $filename) : $filename;
if (is_dir($path)) {
// Directory: add & recurse
$this->addEmptyDir($localpath);
$this->_addTree($path, $localpath, $exclude);
}
else if (is_file($path)) {
// File: just add
$this->addFile($path, $localpath);
}
}
closedir($dir);
}
// Helper function
public static function zipTree($exclude, $dirname, $zipFilename, $flags = 0, $localname = '', $extrafile = '') {
$zip = new self();
$zip->open($zipFilename, $flags);
$zip->addTree($dirname, $localname, $exclude);
if($extrafile != ''){
$zip->addFile($_SERVER['DOCUMENT_ROOT'] . $extrafile, 'sql_backup.sql');
}
$zip->close();
}
}
function delete_old($path, $days){
// Open the directory
$files = array_diff(scandir($path), array('.', '..'));
foreach($files as $file){
if (filemtime($path.$file) < ( time() - ( $days * 60 * 60 * 24 ) ) ){
removeDirectory($path.'/'.$file);
}
}
}
function removeDirectory($path) {
$files = glob($path . '/*');
foreach ($files as $file) {
is_dir($file) ? removeDirectory($file) : unlink($file);
}
rmdir($path);
return;
}
?><file_sep><?php
function uploadFTP($server, $username, $password, $local_file, $path, $remote_file){
if(!file_exists($local_file)){
ftp_close($connection);
return 'File niet gevonden';
}
// login
$connection = ftp_connect($server);
if (@ftp_login($connection, $username, $password)){
// success
} else{
ftp_close($connection);
return 'mislukt (connection)';
}
if(ftp_chdir($connection, $path)){
if(! ftp_put($connection, $remote_file, $local_file, FTP_ASCII) ){
ftp_close($connection);
return 'mislukt (ftp_put)';
}
} else {
ftp_close($connection);
return 'mislukt (chdir)';
}
ftp_close($connection);
return true;
}
?><file_sep>
php code ->
```php
require 'your_path/backup_autoload.php';
$backupweb = new backupWeb; // create new instance
$backupweb -> backup_base = '/'; // set the folder to be backup'ed (use '/' to backup your public root folder)
$backupweb -> backup_base_name = 'yourwebsite'; // set the basename of your backup (timestamp wil be added)
$backupweb -> set_db(HOST, USERNAME, PASSWORD, NAME); // (optional) use this if you want to backup your database
$backupweb -> set_mail(server, username, password, port, to); // (optional) use this if you want to send your database over mail
$backupweb -> set_ftp(server, username, password, path); // (optional) use this if you want to send your backup to an external ftp server
$backupweb -> set_local(path, delete_old_days); // (optional) use this if you want to backup locally (and delete old backups)
$backupweb -> backup(); // execute
//$backupweb -> debug(); // (optional) debug the process
``` | 94018b6fe4cf2b2934ef0d398b338f365b5cf6d7 | [
"Markdown",
"PHP"
] | 5 | PHP | MGramser/backup_class | a19d394efb6e2404941f4f1e7cd431b7eb2c62ac | 2963976dd43e01894daa7cff20ed544dac02dfe4 |
refs/heads/main | <file_sep>const Pool = require('pg').Pool
const pool = new Pool({
user: 'jean',
host:'localhost',
database: 'estudiantes',
password: '<PASSWORD>',
port: '5432',
})
//crear nuevo estudiante
const createEstudiante = function(request, response){
const {Nombre, Apellidos, fechaNacimiento, Telefono, correoElectronico} = request.body
pool.query('INSERT INTO Estudiante(Nombre, Apellidos, fechaNacimiento,'
+'Telefono, correoElectronico) VALUES ($1,$2,$3,$4,$5)',
[Nombre, Apellidos, fechaNacimiento, Telefono, correoElectronico],
function(error, results){
if(error){
throw error
}
response.status(201).send('El estudiante ha sido agregado')
})
}
//get estudiante
const getEstudiantes = function(request, response){
pool.query('SELECT * FROM estudiante ORDER BY idEstudiante ASC',
function(error, results){
if(error){
throw error
}
response.status(200).json(results.rows)
})
}
//update estudiante
const updateEstudiante = function(request,response){
const idEstudiante = parseInt(request.params.idEstudiante)
const {Nombre/*, Apellidos, fechaNacimiento, Telefono, correoElectronico*/} = request.body
pool.query('UPDATE estudiante set Nombre =$2 where idEstudiante = $1',
[idEstudiante,Nombre],
function(error, results){
if (error){
throw error
}
response.status(200).send('usuario actualizado')
})
}
const deleteEstudiante = function(request, response){
const idEstudiante = parseInt(request.params.idEstudiante)
pool.query('DELETE FROM estudiante WHERE idEstudiante = $1',
[idEstudiante],
function(error, results){
if(error){
throw error
}
response.status(200).send('se ha eliminado el estudiante')
})
}
//crear carrera
const createCarrera = function(request, response){
const {Carrera} = request.body
pool.query('INSERT INTO Carrera(Carrera) VALUES ($1)',
[Carrera],
function(error, results){
if(error){
throw error
}
response.status(201).send('La carrera ha sido agregada')
})
}
//getCarreras
const getCarreras = function(request, response){
pool.query('SELECT * FROM carrera ORDER BY idCarrera ASC',
function(error, results){
if(error){
throw error
}
response.status(200).json(results.rows)
})
}
//updateCarrera
const updateCarrera = function(request,response){
const idCarrera = parseInt(request.params.idCarrera)
const {Carrera} = request.body
pool.query('UPDATE carrera set Carrera =$2 where idCarrera = $1',
[idCarrera,Carrera],
function(error, results){
if (error){
throw error
}
response.status(200).send('carrera actualizada')
})
}
const deleteCarrera = function(request, response){
const idCarrera = parseInt(request.params.idCarrera)
pool.query('DELETE FROM carrera WHERE idCarrera = $1',
[idCarrera],
function(error, results){
if(error){
throw error
}
response.status(200).send('se ha eliminado la carrera')
})
}
const createCita = function(request, response){
const {idEstudiante,idCarrera, Cita, TiempoSesion} = request.body
pool.query('INSERT INTO CitaMatricula(idEstudiante,idCarrera, Cita, TiempoSesion)'+
'VALUES ($1,$2,$3,$4)',
[idEstudiante,idCarrera, Cita, TiempoSesion],
function(error, results){
if(error){
if(error.message == 'insert or update on table "citamatricula" violates foreign key constraint "fk_estudiante"'){
response.status(500).send('El estudiante no está registrado en la base de datos')
}
if(error.message == 'insert or update on table "citamatricula" violates foreign key constraint "fk_carrera"'){
response.status(500).send('la carrera no está registrada en la base de datos')
}
console.log(error.message)
}else{
response.status(201).send('La cita ha sido agregado')
}
})
}
const getCitas = function(request, response){
pool.query('SELECT * FROM CitaMatricula ORDER BY idCita ASC',
function(error, results){
if(error){
throw error
}
response.status(200).json(results.rows)
})
}
const updateCita = function(request,response){
const idCita = parseInt(request.params.idCita)
const {idEstudiante,idCarrera,Cita,TiempoSesion} = request.body
pool.query('UPDATE CitaMatricula set idEstudiante=$2, idCarrera=$3, Cita=$4, TiempoSesion=$5 where idCita = $1',
[idCita,idEstudiante,idCarrera,Cita,TiempoSesion],
function(error, results){
if(error){
response.status(500).send(error.message)
}else{
response.status(200).send('cita actualizada')
}
})
}
const deleteCita = function(request, response){
const idCita = parseInt(request.params.idCita)
pool.query('DELETE FROM CitaMatricula WHERE idCita = $1',
[idCita],
function(error, results){
if(error){
throw error
}
response.status(200).send('se ha eliminado la cita de matricula')
})
}
const getInfo = function(request, response){
const idCita = parseInt(request.params.idCita)
pool.query("SELECT e.idEstudiante, concat(e.Nombre, concat(' ',e.Apellidos)) as nombre,e.correoElectronico,"+
"c.Carrera, m.Cita, m.TiempoSesion "+
"from CitaMatricula m "+
"inner join carrera c on m.idCarrera = c.idCarrera "+
"inner join estudiante e on e.idEstudiante = m.idEstudiante "+
"where idCita = $1 "+
"order by idCita ASC",
[idCita],
function(error, results){
if(error){
throw error
}
response.status(200).json(results.rows)
})
}
module.exports = {
getEstudiantes,
createEstudiante,
updateEstudiante,
deleteEstudiante,
//carrera
createCarrera,
getCarreras,
updateCarrera,
deleteCarrera,
//citas
createCita,
getCitas,
updateCita,
deleteCita,
getInfo,
}
<file_sep>#!/bin/bash
#insert tablas
#curl --data "Nombre=<NAME>&Apellidos=<NAME>&fechaNacimiento=19-08-1989&Telefono=82453225&correoElectronico=<EMAIL>" http://localhost:4000/estudiante
#curl --data "Nombre=<NAME>&Apellidos=<NAME>&fechaNacimiento=13-10-1992&Telefono=84652156&correoElectronico=<EMAIL>" http://localhost:4000/estudiante
curl --data "Nombre=Sara&Apellidos=<NAME>&fechaNacimiento=05-01-2000&Telefono=65658456&correoElectronico=<EMAIL>" http://localhost:4000/estudiante
#curl --data "Carrera=Ingenieria en computación" http://localhost:4000/Carrera
#curl --data "Carrera=Ingenieria en industrial" http://localhost:4000/Carrera
#curl --data "Carrera=Educación matemática" http://localhost:4000/Carrera
curl --data "Carrera=Producción industrial" http://localhost:4000/Carrera
#curl --data "idEstudiante=3&idCarrera=4&Cita=10-06-2021 16:00&TiempoSesion=00:30:00" http://localhost:4000/cita
#curl --data "idEstudiante=4&idCarrera=5&Cita=10-05-2021 16:30&TiempoSesion=00:30:00" http://localhost:4000/cita
#curl --data "idEstudiante=5&idCarrera=6&Cita=11-05-2021 15:00&TiempoSesion=00:30:00" http://localhost:4000/cita
curl --data "idEstudiante=6&idCarrera=5&Cita=11-05-2021 15:30&TiempoSesion=00:30:00" http://localhost:4000/cita
#update cita
curl -X PUT -d "idEstudiante=4" -d "idCarrera=4" -d "Cita=11-05-2021 17:00" -d "TiempoSesion=00:045:00" http://localhost:4000/cita/22
#borrar cita
curl -X "DELETE" http://localhost:4000/cita/26
#get info
curl http://localhost:4000/info/24
<file_sep>const express = require('express')
const bodyParser = require('body-parser')
const app = express()
const port = 4000
const db = require('./crud')
app.use(bodyParser.json())
app.use(
bodyParser.urlencoded({
extended: true,
})
)
app.get('/', function (request, response){
response.json({info: 'prueba de servidor'})
})
app.get('/estudiantes',db.getEstudiantes)
app.post('/estudiante',db.createEstudiante)
app.put('/estudiante/:idEstudiante',db.updateEstudiante)
app.delete('/estudiante/:idEstudiante',db.deleteEstudiante)
//carrera
app.post('/carrera',db.createCarrera)
app.get('/carreras',db.getCarreras)
app.put('/carrera/:idCarrera',db.updateCarrera)
app.delete('/carrera/:idCarrera',db.deleteCarrera)
//citas
app.post('/cita',db.createCita)
app.get('/citas',db.getCitas)
app.put('/cita/:idCita',db.updateCita)
app.delete('/cita/:idCita',db.deleteCita)
//info
app.get('/info/:idCita',db.getInfo)
app.listen(port, function(){
console.log('la app está corriendo en el puerto ${port}')
})<file_sep>CREATE TABLE Estudiante
(
idEstudiante SERIAL PRIMARY KEY,
Nombre VARCHAR(25) NOT NULL,
Apellidos VARCHAR(50) NOT NULL,
fechaNacimiento DATE NOT NULL,
Telefono INT NOT NULL,
correoElectronico VARCHAR(50) NOT NULL
);
CREATE TABLE Carrera
(
idCarrera SERIAL PRIMARY KEY,
Carrera VARCHAR(30) NOT NULL
);
CREATE TABLE CitaMatricula
(
idCita SERIAL PRIMARY KEY,
idEstudiante SERIAL,
idCarrera SERIAL,
Cita TIMESTAMP NOT NULL,
TiempoSesion TIME NOT NULL,
CONSTRAINT fk_estudiante FOREIGN KEY(idEstudiante)
REFERENCES Estudiante(idEstudiante),
CONSTRAINT fk_carrera FOREIGN KEY(idCarrera)
REFERENCES Carrera(idCarrera)
); | b78fc089d3d881068b83107fd52a252d087a3a8d | [
"JavaScript",
"SQL",
"Shell"
] | 4 | JavaScript | JPHuntV/t3-Hunt_Alvarez_Martinez | d13db5643e5983e04038f8e399581b2893d547f9 | 0508a2a65dbeae211e1b9753d1d193ec701809e7 |
refs/heads/master | <file_sep>import urllib
from bs4 import BeautifulSoup
import pandas as pd
def export_to_excel(frame):
frame.to_excel("output.xlsx")
print ("***Excel downloaded!***")
def push_to_mainframe(frame,data,col):
f1= pd.DataFrame(data, index=[0], columns=col) #make a data frame from dict
return frame.append(f1,ignore_index = True)
def make_url(input_n):
if input_n//10==0: #adding a leading zero
input_n="0"+str(input_n)
ref_n=str(input_n).replace(".","")
url ="https://www.3gpp.org/DynaReport/"+ref_n+".htm"
return url
#opening the URL
def get_all_data(url):
URL = url
_file = urllib.request.urlopen(URL)
patent_html = _file.read()
soup = BeautifulSoup(patent_html, 'html.parser')
#print(soup.prettify()) # view the html
#intializing bilbliographic data
data = dict()
twog_val=0
threeg_val=0
lte_val=0
fiveg_val=0
#fetching data
ref = soup.find("span", { "id" : "referenceVal" }).text
title = soup.find("span", { "id" : "titleVal" }).text
tstype = soup.find("span", { "id" : "typeVal" }).text
ipr = soup.find("span", { "id" : "initialPlannedReleaseVal" }).text
if soup.find(id="radioTechnologyVals_0", checked="checked"):
twog_val= 1
elif soup.find(id="radioTechnologyVals_1", checked="checked"):
threeg_val= 1
elif soup.find(id="radioTechnologyVals_2", checked="checked"):
lte_val= 1
elif soup.find(id="radioTechnologyVals_3", checked="checked"):
fiveg_val= 1
data[col[0]]= ref
data[col[1]]= title
data[col[2]]= tstype
data[col[3]]= ipr
data[col[4]]= twog_val
data[col[5]]= threeg_val
data[col[6]]= lte_val
data[col[7]]= fiveg_val
return data
#data=get_all_data()
#print (data)
#input execel
excel_file='input.xlsx'
df_input=pd.read_excel(excel_file)
df_input=df_input.dropna()
#output dataframe init
col=["Reference","Title","Type","Initial Planned Release","2G","3G","LTE","5G"]
df_output=pd.DataFrame()
for i in range(df_input.shape[0]):
iref=df_input.iloc(0)[i][0]
URL=make_url(iref)
data=get_all_data(URL)
print(URL)
print(data)
df_output=push_to_mainframe(df_output,data,col)
export_to_excel(df_output)
<file_sep>import urllib2
import simplejson
from bs4 import BeautifulSoup
URL = 'https://www.google.co.in/patents/US5606609'
req = urllib2.Request(URL, headers={'User-Agent' : "python"})
_file = urllib2.urlopen(req)
patent_html = _file.read()
soup = BeautifulSoup(patent_html, 'html.parser')
patentNumber = soup.find("span", { "class" : "patent-number" }).text
assigneeMetaTag = soup.find("meta", { "scheme" : "assignee"})
patentAssignee = assigneeMetaTag.attrs["content"]
claimTag=soup.find("div", { "class" : "claim-text"}).text
claimTag2=soup.find("div",{"num" :"2"}).text
print "patent no.: ", patentNumber
print "assignee: ", patentAssignee
print claimTag2
<file_sep>import epo_ops
import xmltodict, json
import xml.etree.ElementTree as ET
import sys
def beautify(response):
#the function gets rid of some troublesome namespaces in the xml
string='<ops:world-patent-data xmlns="http://www.epo.org/exchange" xmlns:ops="http://ops.epo.org" xmlns:xlink="http://www.w3.org/1999/xlink">'
string1='</ops:world-patent-data>'
xml=response.content
xml=xml.replace(string,'')
xml=xml.replace(string1,'')
return xml
def get_dates():
#gives publication and application dates
publication_date=tree.find('.//publication-reference/document-id[@document-id-type="epodoc"]/date').text
priority_list=[]
for i in range(1,10):
prior_date=tree.find('.//priority-claims/priority-claim[@sequence="'+str(i)+'"]/document-id[@document-id-type="epodoc"]/date')
if prior_date!= None:
priority_list.append(int(prior_date.text))
else:
break
return publication_date, priority_list
def savefile(xml):
with open('data.xml', 'w') as f:
f.write(xml)
def published_data(client):
response = client.published_data( # Retrieve bibliography data
reference_type = 'publication', # publication, application, priority
input = epo_ops.models.Docdb('101430697', 'CN', 'B'), # original, docdb, epodoc
endpoint = 'biblio', # optional, defaults to biblio in case of published_data
#optional, list of constituents
)
return response
#main
if __name__ == "__main__":
pat= sys.argv # to be used in case of working with agrguments
client = epo_ops.Client(key='<KEY>', secret='<KEY>') # Instantiate client
#get data from epo
response=published_data(client)
xml=beautify(response) #cleaning XML
savefile(xml) #saving XML doc for parsing
tree = ET.parse('data.xml')
root = tree.getroot()
pat_num=tree.find('.//publication-reference/document-id[@document-id-type="epodoc"]/doc-number').text #obselete stuff
publication_date, priority_list= get_dates()
assignee = tree.find('.//parties/applicants/applicant/[@data-format="original"]/applicant-name/name').text #many has data-format="original"
try:
title = tree.find('.//invention-title[@lang="en"]').text #some times not present, cases CN
except AttributeError:
print "Error: Title could not be extracted!"
title="Error!!!"
print pat_num, assignee, title, publication_date, priority_list
<file_sep>from PyQt5.QtWidgets import *
from PyQt5 import QtCore, QtWidgets, QtWebEngineWidgets
import pandas as pd
import urllib.request as urllib
import sys,time
app = QtWidgets.QApplication(sys.argv)
window = QWidget()
#loader = QtWebEngineWidgets.QWebEngineView()
#loader.setZoomFactor(1)
#loader.page().pdfPrintingFinished.connect(lambda *args: print('finished:', args))
def download_pdfurl_file(download_url,filename):
response = urllib.urlopen(download_url)
file = open(filename, 'wb')
file.write(response.read())
file.close()
def emit_pdf(filename,loader,loop):
#print("I'm here")
#loader.show()
loader.page().printToPdf(filename)
loader.page().pdfPrintingFinished.connect(lambda: loop.exit())
a= lambda filename: print(filename, "downloaded")
a(filename)
def main(filename='mastersheet.xlsx'):
excel_file=filename
df=pd.read_excel(excel_file)
df=df.dropna()
for i in range(df.shape[0]):
details=[]
for j in range(df.shape[1]):
#print(df.iloc(0)[i][j])
details.append(df.iloc(0)[i][j])
print("Downloading files for: ",details[0])
if details[3].lower()=='video':
print("Videos cannot be downloaded, please download manually!!")
elif details[3].upper()=='PDF':
download_pdfurl_file(details[1],details[2])
else:
#print("here")
loader = QtWebEngineWidgets.QWebEngineView()
loader.setZoomFactor(1)
loader.load(QtCore.QUrl(details[1]))
loop = QtCore.QEventLoop()
loader.loadFinished.connect(lambda: emit_pdf(details[2],loader,loop))
loop.exec_()
def getUserInput():
text, okPressed = QInputDialog.getText(window, "Enter excel filename","for e.g- mastersheet.xlsx", QLineEdit.Normal,"mastersheet.xlsx")
if okPressed:
print("filename: ",text)
try:
if text !='':
main(str(text))
return True
else:
main()
return True
except FileNotFoundError:
alert1 = QMessageBox()
alert1.setText('File specified not found. Keep [.exe file] and [excel file] in same folder and try again.')
alert1.exec_()
return False
getUserInput()
sys.exit()
app.exec_()<file_sep># -*- coding: utf-8 -*-
"""
Created on Tue Jan 23 10:59:17 2018
@author: opuser1
"""
import pandas as pd
f=open('observations2/00result_test.csv','a')
mean_list=pd.DataFrame()
for i in range(63):
df=pd.read_csv('observations2/c_report_'+str(i)+'_.csv')
temp=pd.DataFrame(df.mean()).transpose()
if i==0:
temp.to_csv(f)
else:
temp.to_csv(f,header=False)
f.close() <file_sep># PatentTools
1.This includes tools for scraping patent data from google patents.This is included in 'reponse_patents'. <br/>
2.A multi-dimensional classification algorithm using logistic regression and decision trees for ranking of a patent is provided in 'observation_analysis'.Data is split is 80:20 training test set randomly.Also the number of input variables is varied. Output of each of these combinations of the classifier on test data is saved.Test files compiles the results of each iteration in a single CSV file.<br/>
3. EPO data fetching engine
<file_sep># -*- coding: utf-8 -*-
"""
Created on Sat Oct 12 18:12:46 2019
@author: <NAME>
"""
from Tkinter import *
from tkFileDialog import *
def main():
root = Tk()
root.title("Enter Patents")
L1 = Label(root, text="Patent Numbers (Enter Patent Numbers separated by comma) ")
L1.grid(row = 0, padx = 2)
T1 = Text(root, height=5, width=40)
T1.grid(row = 0, column = 2, padx = 5, pady = 5)
T2 = Text(root, height=1, width=40)
T2.grid(row = 2, column = 2, padx = 5, pady = 5)
L2 = Label(root, text="Select Destination File")
L2.grid(row = 2, column = 0)
B2 = Button(root,text='Browse',command= lambda: opendialog(T2))
B2.grid(row = 2, column = 3, padx = 5)
B1 = Button(root, text='Submit',command=lambda: submitpatents_filename (T1, T2, root))
B1.grid(row = 5, column = 1, padx = 5, pady = 5)
root.mainloop()
return patents_tuple,filename1
def submitpatents_filename(T1,T2,root):
global patents_tuple
global filename1
patents = T1.get("1.0","end-1c")
patents_tuple = tuple(patents.split(',')) #USE THIS AS FUTURE VARIABLE
filename1 = T2.get("1.0","end-1c") #USE THIS AS FUTURE VARIABLE
root.destroy()
return patents_tuple, filename1
def opendialog(T2):
global filename
filename = askopenfilename(title="Select file")
T2.insert(0.0, filename)
<file_sep># -*- coding: utf-8 -*-
"""
Created on Fri Jan 19 16:52:08 2018
@author: opuser1
"""
import pandas as pd
import itertools
import pickle
from sklearn.linear_model import LogisticRegression
logreg = LogisticRegression()
from sklearn.ensemble import RandomForestClassifier
rf = RandomForestClassifier(random_state=1)
#defining random state allows generation of same results at all times.
from sklearn import metrics
iterations=1
#importing train set and test set
df=pd.read_excel('Complete_Das.xlsx','Combined')#train set ;High:1, Medium+Low:0,NA:-1
df_test=df=pd.read_excel('corning.xlsx','test_set')#High:1, Medium+Low:0,NA:-1
#some data accumulators
report_data=[]
comb_list=[]
array=['LoC','NoFC','NoBC','FR','RL','TtG']
loc='test2'
def save_model(model_param):
dump_file=open(loc+'/dump_model.obj','wb')
X=df[model_param]
Y=df['EoO'] #target parameter
#model fitting, training
clf=rf.fit(X, Y)
train_accuracy=clf.score(X, Y)
print "Train Accuracy:",train_accuracy
pickle.dump(clf,dump_file)
def predictions(pred):
submission=pd.DataFrame({
"Patent Number":df['PatNo'],
"Extent of Overlap":pred
})
submission.to_csv(loc+'/Classified_patents'+str(index)+'.csv',index=False)
def getClassificationReportSorted(report):
lines=report.split('\n')
row={}
rowdata=lines[2].split(' ')
row['precision0']=float(rowdata[3])
row['recall0']=float(rowdata[4])
row['f1_score0']=float(rowdata[5])
row['support0']=float(rowdata[6])
rowdata=lines[3].split(' ')
row['precision1']=float(rowdata[3])
row['recall1']=float(rowdata[4])
row['f1_score1']=float(rowdata[5])
row['support1']=float(rowdata[6])
rowdata=lines[5].split(' ')
row['Total_precision']=float(rowdata[1])
row['Total_recall']=float(rowdata[2])
row['Total_f1_score']=float(rowdata[3])
row['Total_support']=float(rowdata[4])
report_data.append(row)
#print report_data
def getCombinations(array):
combinations=[]
for i in range(len(array)):
combinations.append(list(itertools.combinations(array,i+1)))
for i in range (len(combinations)):
for j in range(len(combinations[i])):
comb_list.append(list(combinations[i][j]))
#code below can be used to generate a csv of combination of variables
#comb_frame=pd.DataFrame(comb_list)
#comb_frame.to_csv('observations/00combinationsOfvariables.csv',index=False)
#print comb_list
#main
getCombinations(array)
index=0
for i in range(len(comb_list)):
temp=comb_list[i]
#col=['LoC','NoFC','NoBC','FR','RL'] I need a function to generate combinations of this
col=temp#['LoC','NoFC'] #paramters used for training
X=df[col]
Y=df['EoO'] #target parameter
#model fitting, training
clf=rf.fit(X, Y)
train_accuracy=clf.score(X, Y)
print "Train Accuracy:",train_accuracy
#testing
X3_test = df_test[col]
Y3_test = df_test['EoO']
Y3test_pred = rf.predict(X3_test)
predictions(Y3test_pred)
test_accuracy=rf.score(X3_test, Y3_test)
print('Accuracy of logistic regression classifier on test set: {:.2f}'.format(test_accuracy))
report=metrics.classification_report(Y3_test,Y3test_pred)
#print report
getClassificationReportSorted(report)
index+=1
print index
dict_frame=pd.DataFrame(report_data)# can use orient=''precision0''
dict_frame.to_csv(loc+'/classification_report_'+loc+'.csv',index=False)
del report_data[:]
#save_model(['LoC','NoFC'])
#Free some RAM
#todo(done): create permutations function, nomeculature for the csv, do something with the data
#Dumping Models
<file_sep># -*- coding: utf-8 -*-
"""
Created on Fri Jan 19 16:52:08 2018
@author: opuser1
"""
#import numpy as np
import pandas as pd
import itertools
#import matplotlib.pyplot as plt
#import seaborn as sb
from sklearn.model_selection import train_test_split
#import statsmodels.api as sm
from sklearn.linear_model import LogisticRegression
from sklearn import metrics
iterations=5
df=pd.read_excel('Complete_Das.xlsx','Combined')#High:1, Medium+Low:0,NA:-1
df_new=pd.DataFrame()
report_data=[]
combinations=[]
comb_list=[]
array=['LoC','NoFC']
#array=['LoC','NoFC','NoBC','FR','RL']
#
def getClassificationReport(report):
#this fucntion is not used due to diifrent layout used in excel recording!!
lines=report.split('\n')
for line in lines[2:-3]:
row={}
rowdata=line.split(' ')
row['class']=rowdata[2]
row['precision']=float(rowdata[3])
row['recall']=float(rowdata[4])
row['f1_score']=float(rowdata[5])
row['support']=float(rowdata[6])
report_data.append(row)
def getClassificationReportSorted(report):
lines=report.split('\n')
row={}
rowdata=lines[2].split(' ')
row['precision0']=float(rowdata[3])
row['recall0']=float(rowdata[4])
row['f1_score0']=float(rowdata[5])
row['support0']=float(rowdata[6])
rowdata=lines[3].split(' ')
row['precision1']=float(rowdata[3])
row['recall1']=float(rowdata[4])
row['f1_score1']=float(rowdata[5])
row['support1']=float(rowdata[6])
report_data.append(row)
rowdata=lines[5].split(' ')
row['Total_precision']=float(rowdata[1])
row['Total_recall']=float(rowdata[2])
row['Total_f1_score']=float(rowdata[3])
row['Total_support']=float(rowdata[4])
report_data.append(row)
def getCombinations(array):
for i in range(len(array)):
combinations.append(list(itertools.combinations(array,i+1)))
for i in range (len(combinations)):
for j in range(len(combinations[i])):
comb_list.append(list(combinations[i][j]))
print comb_list
index=0
#main
getCombinations(array)
for i in range(len(comb_list)):
temp=comb_list[i]
#col=['LoC','NoFC','NoBC','FR','RL'] I need a function to generate combinations of this
col=temp#['LoC','NoFC'] #paramters used for training
for k in range(iterations):
train, test = train_test_split(df,test_size=0.2)
X=train[col]
Y=train['EoO'] #target parameter
#model fitting, training
logreg = LogisticRegression()
clf=logreg.fit(X, Y)
train_accuracy=clf.score(X, Y)
#testing
X3_test = test[col]
Y3_test = test['EoO']
Y3test_pred = logreg.predict(X3_test)
test_accuracy=logreg.score(X3_test, Y3_test)
print('Accuracy of logistic regression classifier on test set: {:.2f}'.format(test_accuracy))
report=metrics.classification_report(Y3_test,Y3test_pred)
print report
getClassificationReportSorted(report)
dict_frame=pd.DataFrame(report_data)
dict_frame.to_csv('c_report_'+str(index)+'_.csv',index=False)
report_data=[]
index=index+1
print index
#todo(done): create permutations function, nomeculature for the csv, do something with the data
#Dumping Models
<file_sep>from PyQt5.QtWidgets import *
from PyQt5 import QtCore, QtWidgets, QtWebEngineWidgets
import pandas as pd
import urllib.request as urllib
import sys,time
class PDF_Conv:
def __init__():
pass
def download_pdfurl_file(self, download_url,filename):
response = urllib.urlopen(download_url)
file = open(filename, 'wb')
file.write(response.read())
file.close()
def main(self, filename='mastersheet.xlsx'):
excel_file=filename
df=pd.read_excel(excel_file)
df=df.dropna()
for i in range(df.shape[0]):
details=[]
for j in range(df.shape[1]):
details.append(df.iloc(0)[i][j])
print("Downloading files for: ",details[0])
if details[3].lower()=='video':
print("Videos cannot be downloaded, please download manually!!")
elif details[3].upper()=='PDF':
download_pdfurl_file(details[1],details[2])
else:
#print("here")
loader = QtWebEngineWidgets.QWebEngineView()
loader.setZoomFactor(1)
loader.load(QtCore.QUrl(details[1]))
loop = QtCore.QEventLoop()
loader.loadFinished.connect(lambda: emit_pdf(details[2],loader,loop))
loop.exec_()
class GUI(PDF_Conv):
def __init__(self):
app = QtWidgets.QApplication(sys.argv)
window = QWidget()
def getUserInput():
text, okPressed = QInputDialog.getText(window, "Enter excel filename","for e.g- mastersheet.xlsx", QLineEdit.Normal,"mastersheet.xlsx")
QInputDialog.labelText("Hi")
if okPressed:
print("filename: ",text)
try:
if text !='':
main(str(text))
return True
else:
main()
return True
except FileNotFoundError:
alert1 = QMessageBox()
alert1.setText('File specified not found. Keep [.exe file] and [excel file] in same folder and try again.')
alert1.exec_()
return False
ob=GUI()
<file_sep>import urllib2
import simplejson
from bs4 import BeautifulSoup
#opening the patent
URL = 'https://www.google.co.in/patents/US5606609'
req = urllib2.Request(URL, headers={'User-Agent' : "python"})
_file = urllib2.urlopen(req)
patent_html = _file.read()
soup = BeautifulSoup(patent_html, 'html.parser')
bibKeys=("Publication number","Publication type","Application number","Publication Date","Filing Date","Priority Date","Fee status")
bibList={}
#fetching the patent number
patentNumber = soup.find("span", { "class" : "patent-number" }).text
#fetching the assignee
assigneeMetaTag = soup.find("meta", { "scheme" : "assignee"})
patentAssignee = assigneeMetaTag.attrs["content"]
print "Assignee: ",patentAssignee
#fetching the claims
claimTag=soup.find("div", { "class" : "claim-text"}).text
#fetching all the inventors
def getInventorNames():
inventor=soup.find_all("meta", { "scheme" : "inventor",})
i=1
dist=len(inventor)
inventorName=""
for tag in inventor:
inventorName=inventorName+tag['content']
if i<dist:
inventorName=inventorName+", "
i=i+1
#print tag['content']
print "Inventors:",inventorName
#another way to fetch filing and publication , although priority can't be scraped using this
filingDate=soup.find("meta", { "scheme" : "dateSubmitted"})
publicationDate=soup.find("meta", { "scheme" : "issued"})
#mainBibData
def mainBibData():
i=0
bibData=soup.find_all("td",{ "class":"single-patent-bibdata"})
for ptag in bibData:
bibList[bibKeys[i]]=ptag.text
i=i+1
print ptag.text
print bibList.items()
mainBibData()
getInventorNames()
#print bibList['Publication number']<file_sep># -*- coding: utf-8 -*-
"""
Created on Wed Jan 17 19:29:42 2018
@author: opuser1
"""
import urllib2
from bs4 import BeautifulSoup
import pandas as pd
#opening the patent
claimNum="1"
URL = 'https://www.google.co.in/patents/US5606609'
req = urllib2.Request(URL, headers={'User-Agent' : "python"})
_file = urllib2.urlopen(req)
patent_html = _file.read()
soup = BeautifulSoup(patent_html, 'html.parser')
bibKeys=("Publication number","Publication type","Application number","Publication Date","Filing Date","Priority Date","Fee status")
bibList={}
#fetching the patent number
patentNumber = soup.find("span", { "class" : "patent-number" }).text
#fetching the assignee
assigneeMetaTag = soup.find("meta", { "scheme" : "assignee"})
patentAssignee = assigneeMetaTag.attrs["content"]
#print "Assignee: ",patentAssignee
#fetching the claims
claimTag=soup.find("div", { "class" : "claim-text"}).text
claimTag2=soup.find("div",{"num" :claimNum}).text
#print "Claim:",claimTag2
#fetching all the inventors
def getInventorNames():
inventor=soup.find_all("meta", { "scheme" : "inventor",})
i=1
dist=len(inventor)
inventorName=""
for tag in inventor:
inventorName=inventorName+tag['content']
if i<dist:
inventorName=inventorName+", "
i=i+1
#print tag['content']
#print "Inventors:",inventorName
return inventorName
#another way to fetch filing and publication , although priority can't be scraped using this
filingDate=soup.find("meta", { "scheme" : "dateSubmitted"})
publicationDate=soup.find("meta", { "scheme" : "issued"})
#mainBibData
def mainBibData():
i=0
bibData=soup.find_all("td",{ "class":"single-patent-bibdata"})
for ptag in bibData:
bibList[bibKeys[i]]=ptag.text
i=i+1
bibList["Claim"]=claimTag
bibList["Inventors"]=getInventorNames()
bibList["Assignee"]=patentAssignee
#print bibList.items()
mainBibData()
#print bibList.items()
df=pd.DataFrame(bibList,index=[0])
print df.head()
df.to_csv('PythonExport.csv', sep=',')
from mailmerge import MailMerge
from datetime import date
df=pd.read_csv('PythonExport.csv')
print(df.iloc[0,1])
template = "Practical-Business-Python.docx"
document = MailMerge(template)
print(document.get_merge_fields())
di={"Name":"Sherni"}
document.merge(
status='Gold',
city='Springfield',
phone_number=df.iloc[0,2],
Business=df.iloc[0,1],
zip=df.iloc[0,3],
purchases=df.iloc[0,4],
shipping_limit=df.iloc[0,5],
state=df.iloc[0,6],
address=df.iloc[0,7],
date='{:%d-%b-%Y}'.format(date.today()),
discount='5%',
recipient="sdfs")
document.write('DIA_'+bibList["Publication number"]+' {:%d-%b-%Y}'.format(date.today())+'.docx')
# plan to dump data in a file<file_sep>import epo_ops
import xmltodict, json
import xml.etree.ElementTree as ET
import pprint
client = epo_ops.Client(key='<KEY>', secret='<KEY>') # Instantiate client
response = client.published_data( # Retrieve bibliography data
reference_type = 'publication', # publication, application, priority
input = epo_ops.models.Docdb('1417800', 'EP', 'B1'), # original, docdb, epodoc
endpoint = 'biblio', # optional, defaults to biblio in case of published_data
# optional, list of constituents
)
doc = xmltodict.parse(response.text)
#print doc
data=[]
for article in doc['ops:world-patent-data']['exchange-documents']['exchange-document']['bibliographic-data']['publication-reference']['document-id']:
if article['@document-id-type'] == 'docdb':
#print article
country=article['country']
patnum = country + article['doc-number']
kind = article['kind']
publication_date = article['date']
print "Patent Num:", patnum+kind+ ", has the publication date: ", publication_date
break
da=doc['ops:world-patent-data']['exchange-documents']['exchange-document']['bibliographic-data']['priority-claims']['priority-claim'][1]
da=da['document-id'][0]
priorpat = da['doc-number']
priority_date = da['date']
print "here"
print priorpat
print priority_date
#print "Patent Num:", patnum+ ", has the priority date: ", priority_date +", the patent takes the priority from ",priorpat
<file_sep>import epo_ops
from lxml import etree as ET
from datetime import date
from mailmerge import MailMerge
from report_ui import *
import pandas as pd
import time
NS = {
"ops": "http://ops.epo.org",
"epo": "http://www.epo.org/exchange",
"ft": "http://www.epo.org/fulltext",
"reg": "http://www.epo.org/register",
}
#US5606609A, EP1417800B1
#patent_list=['EP1417800B1']
def read_patent_from_excel(path):
df=path #to be developed
def save_to_xmlfile(xml):
with open('data_family.xml', 'w') as f: #get xml response
f.write(xml)
def get_kind_code(patent):
# assumes country code is of two characters
patent=(str(patent)).strip().replace(" ","") #sanitization, remove spaces
kind=""
number= patent[2:]
country= patent[:2]
lastchar=patent[-1]
secondlastchar=patent[-2]
if (ord(lastchar)>64 and ord(lastchar)<91) or (ord(secondlastchar)>96 and ord(secondlastchar)<123) : #if last char is alphabet
kind =lastchar #last character is the kind code
number=patent[2:-1]
if ord(lastchar)>48 and ord(lastchar)<58: #check if integer
if (ord(secondlastchar)>64 and ord(secondlastchar)<91) or (ord(secondlastchar)>96 and ord(secondlastchar)<123):
kind= secondlastchar+lastchar #last two characters are kind code
number=patent[2:-2]
else:
print "Kind code absent or some alein characters in the patent number", patent
#raise exeception, do not process the patent
#make US applications Espacenet compatible
if country=='US':
if len(number)> 10:
if number[4]=='0':
number=number[:4]+number[5:]
return country, number, kind
def get_the_dates(bib_data):
priority_list=[]
#gives publication and application dates
pub_date=bib_data.find("./epo:publication-reference/epo:document-id[@document-id-type='epodoc']/epo:date", NS).text
i=1
while(1):
prior_date=bib_data.find('./epo:priority-claims/epo:priority-claim[@sequence="'+str(i)+'"]/epo:document-id[@document-id-type="epodoc"]/epo:date', NS)
if prior_date!= None:
priority_list.append(int(prior_date.text))
i=i+1
else:
break
return pub_date, priority_list
def get_bibdata(tree, NS):
documents = tree.findall("./epo:exchange-documents/epo:exchange-document", NS)
for document in documents:
data = dict()
#scraping
data["Family Id"] = document.attrib["family-id"]
bib_data = document.find("./epo:bibliographic-data", NS)
#Dates
app_date=get_application_date(bib_data)
pub, prior=get_the_dates(bib_data)
try:
title=get_title(bib_data) #title
except AttributeError:
title=" "
pass
try:
assignee=get_assignee(bib_data)#assignee/applicant
except AttributeError:
assignee=" "
pass
try:
inventors=get_inventor_data(bib_data)
except AttributeError:
inventors=" "
pass
#data logging
data["Title"]= title
data["Publication date"]= pub
data['Earliest priority']= str(min(prior)) #
data['Assignee']=assignee
data['Application Date']=app_date
data['Inventors']=inventors[:-2]
return data
def get_assignee(bib_data):
assignee = bib_data.find('./epo:parties/epo:applicants/epo:applicant/[@data-format="original"]/epo:applicant-name/epo:name', NS).text #many has data-format="original"
return assignee
def get_application_date(bib_data):
app_date = bib_data.find("./epo:application-reference/epo:document-id[@document-id-type='epodoc']/epo:date", NS).text #many has data-format="original"
return app_date
def get_title(bib_data):
title = bib_data.find("./epo:invention-title[@lang='en']", NS).text
if title is None: #when title in not present in english
title = bib_data.find("./epo:invention-title", NS)
return title
def XMLparser(response):
xml=response.text
tree = ET.fromstring(xml.encode("utf-8"))
return tree
def get_family_data(tree):
doc_db_list = list()
for el in tree.findall("./ops:patent-family/ops:family-member", NS):
pub_ref = el.find('./epo:publication-reference/epo:document-id[@document-id-type="docdb"]',NS)
if pub_ref is not None:
a=get_complete_patent_num(pub_ref)
doc_db_list.append(a) #can return from here, no need to execute further for published ones
app_ref = el.find('./epo:application-reference/epo:document-id[@document-id-type="docdb"]',NS)
if app_ref is not None:
b=get_complete_patent_num(app_ref) #not being used for now in the output of family members
#doc_db_list.append(b)
return doc_db_list
def get_inventor_data(tree):
inventor_name = ""
for el in tree.findall('./epo:parties/epo:inventors/epo:inventor[@data-format="original"]/epo:inventor-name/epo:name', NS):
inventor_name += el.text
#print inventor_name
return inventor_name
def get_complete_patent_num(base_object):
number=base_object.find('./epo:doc-number',NS).text
country=base_object.find('./epo:country',NS).text
kind=base_object.find('./epo:kind',NS).text
return country+number+kind
#############################################################
# API Calls
def family_data_api(client, patent): #returns complete family data
country, pat_num, kind= get_kind_code(patent)
response=client.family(reference_type='publication',
input=epo_ops.models.Docdb(pat_num, country, kind),
endpoint=None, constituents=None)
#save_to_xmlfile(response.content) #view xml response
tree = XMLparser(response)
return get_family_data(tree)
def published_data_api(client, patent): #returns complete biblio data
country, pat_num, kind= get_kind_code(patent)
response = client.published_data( # Retrieve bibliography data
reference_type = 'publication', # publication, application, priority
input = epo_ops.models.Docdb(pat_num, country, kind), # original, docdb, epodoc
endpoint = 'biblio', # optional, defaults to biblio in case of published_data
#optional, list of constituents
)
tree = XMLparser(response) #parse the xml
bib_data=get_bibdata(tree, NS) #get all biblo (title, dates)
return bib_data
##############################################################
def export_to_excel(frame):
frame.to_excel("output.xlsx")
print "***Excel downloaded!***"
def push_to_mainframe(frame,data):
f1= pd.DataFrame(data, index=[0], columns=col) #make a data frame from dict
return frame.append(f1,ignore_index = True) #append data frame to the main frame
def list_to_str(lst):
string=""
for element in lst:
string+= str(element) + "| "
return string
def populate_doc(frameX,frameY,patent_listX, patent_listY):
template = "TEMPLATE 5.docx"
document = MailMerge(template)
XListofdict=[]
YListofdict=[]
XList2ofdict=[]
for x in range(len(patent_listX)):
Xdict={}
Xdict["X_PAT_NUMBER"]=frameX.iloc[x,0]
Xdict["X_PAT_TITLE"]=frameX.iloc[x,1]
Xdict["X_PAT_ASSIGNEE"]=frameX.iloc[x,6]
Xdict["X_PAT_PUBDATE"]=frameX.iloc[x,5]
Xdict["X_PAT_APPDATE"]=frameX.iloc[x,3]
Xdict["X_PAT_PRIORDATE"]=frameX.iloc[x,4]
Xdict["X_PAT_INVENTORS"]=frameX.iloc[x,7]
Xdict["X_PAT_FAMILY"]=frameX.iloc[x,8]
XListofdict.append(Xdict)
if len(patent_listY)>1:
print patent_listY
for x in range(len(patent_listY)):
Ydict={}
Ydict["Y_PAT_NUMBER"]=frameY.iloc[x,0]
Ydict["Y_PAT_TITLE"]=frameY.iloc[x,1]
Ydict["Y_PAT_ASSIGNEE"]=frameY.iloc[x,6]
Ydict["Y_PAT_PUBDATE"]=frameY.iloc[x,5]
Ydict["Y_PAT_APPDATE"]=frameY.iloc[x,3]
Ydict["Y_PAT_PRIORDATE"]=frameY.iloc[x,4]
Ydict["Y_PAT_INVENTORS"]=frameY.iloc[x,7]
Ydict["Y_PAT_FAMILY"]=frameY.iloc[x,8]
YListofdict.append(Ydict)
for x in range(len(patent_listX)):
Xdict1 = {}
Xdict1["X_PAT_{}_NUMBER".format(x+1)]=frameX.iloc[x,0]
Xdict1["X_PAT_{}_TITLE".format(x+1)]=frameX.iloc[x,1]
Xdict1["X_PAT_{}_ASSIGNEE".format(x+1)]=frameX.iloc[x,6]
Xdict1["X_PAT_{}_PUBDATE".format(x+1)]=frameX.iloc[x,5]
Xdict1["X_PAT_{}_APPDATE".format(x+1)]=frameX.iloc[x,3]
Xdict1["X_PAT_{}_PRIORDATE".format(x+1)]=frameX.iloc[x,4]
Xdict1["X_PAT_{}_INVENTORS".format(x+1)]=frameX.iloc[x,7]
Xdict1["X_PAT_{}_FAMILY".format(x+1)]=frameX.iloc[x,8]
XList2ofdict.append(Xdict1)
#print YListofdict
document.merge_rows('X_PAT_NUMBER', XListofdict)
document.merge_rows('Y_PAT_NUMBER', YListofdict)
document.merge_pages(XList2ofdict)
#document.merge(**Ydict)
document.write('NEW REPORT1.docx')
# US8139109B2,US5606609A,EP1417800B1
if __name__ == "__main__":
frameX=pd.DataFrame() #define mainframe
frameY=pd.DataFrame()
col=['Patent No.', 'Title', 'Family Id', 'Application Date','Earliest priority', 'Publication date', 'Assignee','Inventors','Family members']
client = epo_ops.Client(key='<KEY>', secret='<KEY>') # Instantiate client
patent_listX, patent_listY, dir_path, f1, f2= main() #to work with gui
tick= time.time() #start timer
for patent in patent_listX:
try:
print "\n Processing "+patent+ "....\n"
data = published_data_api(client,patent)
family_list = family_data_api(client, patent) #list and piper separated string
data['Family members']=list_to_str(list(set(family_list)))[:-2] #remove last piper and cache to dict #list(set(data)) removes duplicates,
data['Patent No.']=patent.replace(" ","")
frameX=push_to_mainframe(frameX,data)
print data
except epo_ops.exceptions.MissingRequiredValue:
print patent, ": ERROR: Number, country code and kind code must be present!"
pass
except :
print "HTTP Error"
pass
for patent in patent_listY:
try:
print "\n Processing "+patent+ "....\n"
data = published_data_api(client,patent)
family_list = family_data_api(client, patent) #list and piper separated string
data['Family members']=list_to_str(list(set(family_list)))[:-2] #remove last piper and cache to dict #list(set(data)) removes duplicates,
data['Patent No.']=patent.replace(" ","")
frameY=push_to_mainframe(frameY,data)
print data
except epo_ops.exceptions.MissingRequiredValue:
print patent, ": ERROR: Number, country code and kind code must be present!"
pass
except :
print "HTTP Error"
pass
#print frame.head()
export_to_excel(frameX) #export to excel sheet # default name output.xlsx
populate_doc(frameX,frameY,patent_listX, patent_listY)
tock=time.time()
print "Execution time: ", tock-tick, " Sec"
#Playground below:
# US8139109B2,US5606609A,EP1417800B1
'''
def write_to_xml(tree):
tree.write('output11.xml')
documents = tree.findall("./epo:exchange-documents/epo:exchange-document", NS)
for document in documents:
i=1
while(1):
bib_data = document.find("./epo:bibliographic-data", NS)
pub_date=bib_data.find("./epo:publication-reference/epo:document-id[@document-id-type='epodoc']/epo:date", NS).text
prior_date=bib_data.find('./epo:priority-claims/epo:priority-claim[@sequence="'+str(i)+'"]/epo:document-id[@document-id-type="epodoc"]/epo:date', NS)
if prior_date!= None:
priority_list.append(int(prior_date.text))
i=i+1
else:
break
print pub_date, priority_list
'''
'''
documents = tree.findall("./epo:exchange-documents/epo:exchange-document", NS)
for document in documents:
data = dict()
data["family_id"] = document.attrib["family-id"]
bib_data = document.find("./epo:bibliographic-data", NS)
title = bib_data.find("./epo:invention-title[@lang='en']", NS).text
if title is None: #when title in not present in english
title = bib_data.find("./epo:invention-title", NS)
pub_date=bib_data.find("./epo:publication-reference/epo:document-id[@document-id-type='epodoc']/epo:date", NS).text
print bib_data, title, pub_date
''' | 3af63b444a558b213cabfa761b6efcc1a77b7d64 | [
"Markdown",
"Python"
] | 14 | Python | ankitk50/PatentTools | 218205e8f9b37999da16ce94b3396ee5f3ebc0c1 | 3b39aa19ec8df17a686210bdc164e93bece5f2f1 |
refs/heads/master | <file_sep>#!/bin/bash
rm test.log
touch test.log
go build -o basestation server.go
./basestation
<file_sep>package main
import (
"fmt"
"net/http"
log "github.com/HackRVA/master-base-2019/filelogging"
lb "github.com/HackRVA/master-base-2019/leaderboard"
ss "github.com/HackRVA/master-base-2019/serverstartup"
"github.com/HackRVA/master-base-2019/sync"
api "github.com/HackRVA/master-base-2019/webapi"
"github.com/gorilla/mux"
"github.com/spf13/viper"
)
var logger = log.Ger
func main() {
ss.InitConfiguration()
r := mux.NewRouter()
r.HandleFunc("/api/newgame", api.NewGame).Methods("POST")
r.HandleFunc("/api/nextgame", api.NextGame).Methods("GET")
r.HandleFunc("/api/games", api.AllGames).Methods("GET")
r.HandleFunc("/api/info/all", api.AllInfo).Methods("GET")
r.HandleFunc("/api/info/{id}", api.Info).Methods("GET")
r.HandleFunc("/api/zombie", api.Zombie).Methods("POST")
http.Handle("/", r)
fmt.Println("running web server on port 8000")
isMaster := viper.GetBool("isMaster")
if isMaster == false {
sync.StartSyncLoop()
}
lb.StartLeaderboardLoop()
ss.StartBadgeWrangler()
http.ListenAndServe(":8000", nil)
}
<file_sep>#!/bin/bash
##
## Http GET to fetch the next
## scheduled game
##
curl -X GET \
http://localhost:3000/api/nextgame<file_sep>package main
import (
"fmt"
fifo "github.com/HackRVA/master-base-2019/fifo"
irp "github.com/HackRVA/master-base-2019/irpacket"
)
func main() {
packetsIn := make(chan *irp.Packet)
go fifo.ReadFifo(fifo.BadgeInFile, packetsIn)
for {
packet := <-packetsIn
fmt.Println("\nPacket received from packetsIn channel")
packet.Print()
packet.PrintPayload()
fmt.Println()
}
}
<file_sep>package badgewrangler
// #include <../lasertag-protocol.h>
import "C"
import (
"strings"
"time"
log "github.com/HackRVA/master-base-2019/filelogging"
gm "github.com/HackRVA/master-base-2019/game"
irp "github.com/HackRVA/master-base-2019/irpacket"
"github.com/hackebrot/go-repr/repr"
)
const (
beaconInterval = 2 * time.Second
beaconDelay = 5 * time.Second
receivedPacketTimeout = 500 * time.Millisecond
)
// Values for expecting
const (
SenderBadgeID = C.OPCODE_BADGE_IDENTITY
GameID = C.OPCODE_GAME_ID
RecordCount = C.OPCODE_BADGE_RECORD_COUNT
BadgeID = C.OPCODE_BADGE_UPLOAD_HIT_RECORD_BADGE_ID
Timestamp = C.OPCODE_BADGE_UPLOAD_HIT_RECORD_TIMESTAMP
Team = C.OPCODE_SET_BADGE_TEAM
UserName = C.OPCODE_USERNAME_DATA
)
var debug = false
var irErr = false
var logger = log.Ger.With().Str("pkg", "badgewrangler").Logger()
// SetDebug - sets the debugging on and off
func SetDebug(isDebug bool) {
debug = isDebug
}
// SetIrErr - sets IR error simulation on and off
func SetIrErr(isIrErr bool) {
irErr = isIrErr
}
// Hit - The data comprising a Hit
type Hit struct {
GameID uint16
BadgeID uint16
Timestamp uint16
Team uint8
}
// BadgeIDPacket - return a hit's badgeID packet
func (h *Hit) BadgeIDPacket() *irp.Packet {
return BuildBadgeUploadHitRecordBadgeID(h.BadgeID)
}
// TimestampPacket - return a hit's timestamp packet
func (h *Hit) TimestampPacket() *irp.Packet {
return BuildBadgeUploadHitRecordTimestamp(h.Timestamp)
}
// TeamPacket - return a hit's team packet
func (h *Hit) TeamPacket() *irp.Packet {
return BuildBadgeUploadHitRecordTeam(h.Team)
}
// GameData - The game data dump from a badge
type GameData struct {
BadgeID uint16
GameID uint16
Hits []*Hit
UserName string
}
// BadgeIDPacket - return gameData's BadgeID packet
func (gd *GameData) BadgeIDPacket() *irp.Packet {
return BuildBadgeIdentity(gd.BadgeID)
}
// GameIDPacket - return a hit's gameID packet
func (gd *GameData) GameIDPacket() *irp.Packet {
return BuildBadgeUploadHitRecordGameID(gd.GameID)
}
// HitCountPacket - return gameData's hit count packet
func (gd *GameData) HitCountPacket(hitCount uint16) *irp.Packet {
return BuildBadgeUploadRecordCount(hitCount)
}
// UserNameSegmentPacket - return 2 letter segment of the gameData's user name
func (gd *GameData) UserNameSegmentPacket(userNameSegment uint16) *irp.Packet {
return BuildUserNameSegment(userNameSegment)
}
// Packets - return a slice containing all the gameData packets
func (gd *GameData) Packets(irErr bool) []*irp.Packet {
packetIndex := 0
packets := make([]*irp.Packet, len(gd.Hits)*3+8)
packets[packetIndex] = gd.BadgeIDPacket()
packetIndex++
packets[packetIndex] = gd.GameIDPacket()
packetIndex++
packets[packetIndex] = gd.HitCountPacket(uint16(len(gd.Hits)))
packetIndex++
for i, hit := range gd.Hits {
if i == 1 && irErr {
packets[packetIndex] = irp.BuildPacket(uint16(0), C.OPCODE_SET_GAME_START_TIME<<12|uint16(19&0x0fff))
} else {
packets[packetIndex] = hit.BadgeIDPacket()
}
packetIndex++
packets[packetIndex] = hit.TimestampPacket()
packetIndex++
packets[packetIndex] = hit.TeamPacket()
packetIndex++
}
encodedName := EncodeNameBytes(gd.UserName)
bytePair := make([]byte, 2)
for i := 0; i < 5; i++ {
bytePair, encodedName = encodedName[0:2], encodedName[2:]
packets[packetIndex] = gd.UserNameSegmentPacket(CompressNameBytes(bytePair))
packetIndex++
}
return packets
}
// TransmitBadgeDump - place the gameData element's packets on an outbound *Packet channel
func (gd *GameData) TransmitBadgeDump(packetsOut chan *irp.Packet, irErr bool) {
for _, packet := range gd.Packets(irErr) {
packetsOut <- packet
}
}
// PrintUnexpectedPacketError - print expected vs. unexpected character error
func PrintUnexpectedPacketError(expected uint8, got uint8) {
logger.Error().Msgf("Expected \"%s\" packet but got \"%s\" packet instead",
irp.GetPayloadSpecs(expected).Description,
irp.GetPayloadSpecs(got).Description)
}
// ReceivePackets - Receives incoming Packets, supresses beacon, and sends out GameData
func ReceivePackets(packetsIn chan *irp.Packet, gameDataOut chan *GameData, beaconHoldOut chan bool) {
if debug {
logger.Debug().Msg("Start processing packets")
}
var opcode uint8
var expecting uint8 = SenderBadgeID
var gameData *GameData
var hitCount uint16
var hitsRecorded uint16
var startTime time.Time
var packet *irp.Packet
var letters uint8
bsIn := make([]byte, 2)
bsName := make([]byte, 10)
for {
if expecting != SenderBadgeID {
if time.Now().Sub(startTime) > receivedPacketTimeout {
if debug {
logger.Debug().Msg("Game dump timeout")
}
expecting = SenderBadgeID
beaconHoldOut <- false
}
}
select {
case packet = <-packetsIn:
case <-time.After(500 * time.Millisecond):
continue
}
opcode = packet.Opcode()
switch opcode {
case C.OPCODE_BADGE_IDENTITY:
if expecting == SenderBadgeID {
beaconHoldOut <- true
startTime = time.Now()
gameData = &GameData{
BadgeID: uint16(packet.Payload & 0x01ff)}
expecting = GameID
if debug {
logger.Debug().Msgf("** Sender Badge ID Received: %s", repr.Repr(gameData.BadgeID))
}
} else {
PrintUnexpectedPacketError(expecting, opcode)
}
case C.OPCODE_GAME_ID:
if expecting == GameID {
startTime = time.Now()
gameData.GameID = uint16(packet.Payload & 0x0fff)
expecting = RecordCount
if debug {
logger.Debug().Msgf("** Game ID Received: %s", repr.Repr(gameData.GameID))
}
} else {
PrintUnexpectedPacketError(expecting, opcode)
}
case C.OPCODE_BADGE_RECORD_COUNT:
if expecting == RecordCount {
startTime = time.Now()
hitCount = uint16(packet.Payload & 0x0fff)
hitsRecorded = 0
gameData.Hits = make([]*Hit, hitCount)
if hitCount > 0 {
expecting = BadgeID
if debug {
logger.Debug().Msgf("** Badge Record Count Received: %s", repr.Repr(hitCount))
}
} else {
expecting = UserName
letters = 0
}
} else {
PrintUnexpectedPacketError(expecting, opcode)
}
case C.OPCODE_BADGE_UPLOAD_HIT_RECORD_BADGE_ID:
if expecting == BadgeID && hitsRecorded < hitCount {
startTime = time.Now()
hit := &Hit{
GameID: gameData.GameID,
BadgeID: uint16(packet.Payload & 0x01ff)}
gameData.Hits[hitsRecorded] = hit
expecting = Timestamp
if debug {
logger.Debug().Msgf("** Badge Upload Hit Record Badge ID Received: %s", repr.Repr(gameData.Hits[hitsRecorded].BadgeID))
}
} else {
PrintUnexpectedPacketError(expecting, opcode)
}
case C.OPCODE_BADGE_UPLOAD_HIT_RECORD_TIMESTAMP:
if expecting == Timestamp && hitsRecorded < hitCount {
startTime = time.Now()
gameData.Hits[hitsRecorded].Timestamp = uint16(packet.Payload & 0x0fff)
expecting = Team
if debug {
logger.Debug().Msgf("** Badge Upload Hit Record Timestamp Received: %s", repr.Repr(gameData.Hits[hitsRecorded].Timestamp))
}
} else {
PrintUnexpectedPacketError(expecting, opcode)
}
case C.OPCODE_SET_BADGE_TEAM:
if expecting == Team && hitsRecorded < hitCount {
startTime = time.Now()
gameData.Hits[hitsRecorded].Team = uint8(packet.Payload & 0x07)
if debug {
logger.Debug().Msgf("** Badge Upload Hit Record Team Received: %s", repr.Repr(gameData.Hits[hitsRecorded].Team))
}
if hitsRecorded++; hitsRecorded == hitCount {
expecting = UserName
letters = 0
} else {
expecting = BadgeID
}
} else {
PrintUnexpectedPacketError(expecting, opcode)
}
case C.OPCODE_USERNAME_DATA:
if expecting == UserName && letters < 10 {
startTime = time.Now()
bsIn = ExpandNameBytes(uint16(packet.Payload & 0x03ff))
bsName[letters], bsName[letters+1] = bsIn[0], bsIn[1]
letters += 2
if debug {
logger.Debug().Msgf("** Badge User Name Letters Received: %d", letters)
}
if letters == 10 {
gameData.UserName = strings.TrimSpace(DecodeNameBytes(bsName))
gameDataOut <- gameData
if debug {
logger.Debug().Msgf("UserName: %s", gameData.UserName)
logger.Debug().Msg("GameData Complete!")
}
hitsRecorded = 0
hitCount = 0
gameData = nil
expecting = SenderBadgeID
}
}
default:
{
}
if debug {
logger.Error().Msgf("** Opcode %s never expected", repr.Repr(opcode))
}
}
}
}
// BuildGameStartTime - Build a game start time packet
func BuildGameStartTime(game *gm.Game) *irp.Packet {
return irp.BuildPacket(game.BadgeID, C.OPCODE_SET_GAME_START_TIME<<12|uint16(game.StartTime&0x0fff))
}
// BuildGameDuration - Build a game duration packet
func BuildGameDuration(game *gm.Game) *irp.Packet {
return irp.BuildPacket(game.BadgeID, C.OPCODE_SET_GAME_DURATION<<12|game.Duration&0x0fff)
}
// BuildGameVariant - Build a game variant packet
func BuildGameVariant(game *gm.Game) *irp.Packet {
return irp.BuildPacket(game.BadgeID, C.OPCODE_SET_GAME_VARIANT<<12|uint16(game.Variant))
}
// BuildGameTeam - Build a game team packet
func BuildGameTeam(game *gm.Game) *irp.Packet {
return irp.BuildPacket(game.BadgeID, C.OPCODE_SET_BADGE_TEAM<<12|uint16(game.Team))
}
// BuildGameID - Build a game ID packet)
func BuildGameID(game *gm.Game) *irp.Packet {
return irp.BuildPacket(game.BadgeID, C.OPCODE_GAME_ID<<12|uint16(game.GameID&0x0fff))
}
// BuildBeacon - Build the "beacon" packet
func BuildBeacon() *irp.Packet {
return irp.BuildPacket(uint16(C.BADGE_IR_BROADCAST_ID), C.OPCODE_REQUEST_BADGE_DUMP<<12)
}
// BuildBadgeUploadHitRecordGameID - Build the game ID packet for the hit record
func BuildBadgeUploadHitRecordGameID(gameID uint16) *irp.Packet {
return irp.BuildPacket(uint16(C.BADGE_IR_BROADCAST_ID), C.OPCODE_GAME_ID<<12|gameID&0x0fff)
}
// BuildBadgeUploadRecordCount - Build the badge record count packet
func BuildBadgeUploadRecordCount(recordCount uint16) *irp.Packet {
return irp.BuildPacket(uint16(C.BADGE_IR_BROADCAST_ID), C.OPCODE_BADGE_RECORD_COUNT<<12|recordCount&0x0fff)
}
// BuildBadgeUploadHitRecordBadgeID - Build the badge ID packet for a hit record
func BuildBadgeUploadHitRecordBadgeID(hitBadgeID uint16) *irp.Packet {
return irp.BuildPacket(uint16(C.BADGE_IR_BROADCAST_ID), C.OPCODE_BADGE_UPLOAD_HIT_RECORD_BADGE_ID<<12|hitBadgeID&0x01ff)
}
// BuildBadgeUploadHitRecordTeam - Build the team packet for the hit record
func BuildBadgeUploadHitRecordTeam(team uint8) *irp.Packet {
return irp.BuildPacket(uint16(C.BADGE_IR_BROADCAST_ID), C.OPCODE_SET_BADGE_TEAM<<12|uint16(team&0x07))
}
// BuildBadgeUploadHitRecordTimestamp - Build the timestamp packet for the hit record
func BuildBadgeUploadHitRecordTimestamp(timestamp uint16) *irp.Packet {
return irp.BuildPacket(uint16(C.BADGE_IR_BROADCAST_ID), C.OPCODE_BADGE_UPLOAD_HIT_RECORD_TIMESTAMP<<12|timestamp&0x0fff)
}
// BuildBadgeIdentity - Build the badge identity packet
func BuildBadgeIdentity(senderBadgeID uint16) *irp.Packet {
return irp.BuildPacket(uint16(C.BADGE_IR_BROADCAST_ID), C.OPCODE_BADGE_IDENTITY<<12|senderBadgeID&0x01ff)
}
// BuildUserNameSegment - Build a 2 letter segment of the user name
func BuildUserNameSegment(userNameSegment uint16) *irp.Packet {
return irp.BuildPacket(uint16(C.BADGE_IR_BROADCAST_ID), C.OPCODE_USERNAME_DATA<<12|userNameSegment&0x03ff)
}
// TransmitNewGamePackets - Receives GameData, Transmits packets to the badge, and re-enables beacon
func TransmitNewGamePackets(packetsOut chan *irp.Packet, gameIn chan *gm.Game, beaconHold chan bool) {
for {
game := <-gameIn
if debug {
packetLogger := game.Logger(logger)
packetLogger.Debug().Msg("Send game to badge")
}
NewGamePackets(packetsOut, game)
time.Sleep(beaconDelay)
beaconHold <- false
}
}
// NewGamePackets - Put the new game packets on the channel
func NewGamePackets(packetsOut chan *irp.Packet, game *gm.Game) {
packetsOut <- BuildGameStartTime(game)
packetsOut <- BuildGameDuration(game)
if game.Duration > 0 {
packetsOut <- BuildGameVariant(game)
packetsOut <- BuildGameTeam(game)
packetsOut <- BuildGameID(game)
}
}
// TransmitBeacon - Transmits "beacon" packets to the badge to trigger gameData upload
// Switchable based on input from beaconHoldIn channel
func TransmitBeacon(packetsOut chan *irp.Packet, beaconHoldIn chan bool) {
beaconHold := false
if debug {
logger.Debug().Msg("Beacon is on")
}
for {
select {
case beaconHold = <-beaconHoldIn:
if debug {
status := "on"
if beaconHold {
status = "off"
}
logger.Debug().Msg("Beacon is " + status)
}
default:
}
if !beaconHold {
packetsOut <- BuildBeacon()
time.Sleep(beaconInterval)
}
}
}
// BadgeHandlePackets - packet handler for the badge simulator
func BadgeHandlePackets(packetsIn chan *irp.Packet, packetsOut chan *irp.Packet, gameData *GameData) {
if debug {
logger.Debug().Msg("Start handling packets")
}
var opcode uint8
for {
packet := <-packetsIn
opcode = packet.Opcode()
desc, data := packet.PayloadDescData()
switch opcode {
case C.OPCODE_REQUEST_BADGE_DUMP:
gameData.TransmitBadgeDump(packetsOut, irErr)
// Game Start Time
case C.OPCODE_SET_GAME_START_TIME:
logger.Debug().Msgf("[%s] packet received, payload: %d", desc, data)
// Game Duration
case C.OPCODE_SET_GAME_DURATION:
logger.Debug().Msgf("[%s] packet received, payload: %d", desc, data)
// Game Variant
case C.OPCODE_SET_GAME_VARIANT:
logger.Debug().Msgf("[%s] packet received, payload: %d", desc, data)
// Game Team
case C.OPCODE_SET_BADGE_TEAM:
logger.Debug().Msgf("[%s] packet received, payload: %d", desc, data)
// Game ID
case C.OPCODE_GAME_ID:
logger.Debug().Msgf("[%s] packet received, payload: %d", desc, data)
default:
if debug {
logger.Debug().Uint8("opcode", opcode).Msgf("[%s] packet not handled yet.", desc)
}
}
}
}
<file_sep>package database
import (
"strings"
"testing"
"time"
bw "github.com/HackRVA/master-base-2019/badgewrangler"
gm "github.com/HackRVA/master-base-2019/game"
)
var testGameData = &bw.GameData{
BadgeID: uint16(332),
GameID: uint16(1234),
Hits: []*bw.Hit{
{GameID: uint16(101), BadgeID: uint16(101), Timestamp: uint16(33), Team: uint8(2)},
{GameID: uint16(101), BadgeID: uint16(100), Timestamp: uint16(103), Team: uint8(2)},
{GameID: uint16(101), BadgeID: uint16(101), Timestamp: uint16(203), Team: uint8(2)},
{GameID: uint16(101), BadgeID: uint16(101), Timestamp: uint16(303), Team: uint8(2)},
{GameID: uint16(101), BadgeID: uint16(101), Timestamp: uint16(403), Team: uint8(2)},
{GameID: uint16(101), BadgeID: uint16(101), Timestamp: uint16(503), Team: uint8(2)},
{GameID: uint16(101), BadgeID: uint16(101), Timestamp: uint16(603), Team: uint8(2)},
{GameID: uint16(101), BadgeID: uint16(101), Timestamp: uint16(703), Team: uint8(2)},
},
}
// TestSaveGameData -- Tests SaveGameData and GetGameData
func TestSaveGameData(t *testing.T) {
SaveGameData(testGameData)
gData := GetGameData()
if gData[0].BadgeID != testGameData.BadgeID {
t.Error("BadgeID does not match test data")
}
if gData[0].GameID != testGameData.GameID {
t.Error("GameID does not match test data")
}
for i, h := range gData[0].Hits {
if h.BadgeID != testGameData.Hits[i].BadgeID {
t.Error("BadgeId - hit data doesn't match testData")
}
if h.Timestamp != testGameData.Hits[i].Timestamp {
t.Error("Timestamp - hit data doesn't match testData")
}
if h.Team != testGameData.Hits[i].Team {
t.Error("Team - hit data doesn't match testData")
}
}
}
func TestStrGameData(t *testing.T) {
d := &GameDataWithSent{}
d.GameData = *testGameData
var s []string
s = append(s, d.ToString())
if strings.Compare(StrGameData()[0], s[0]) != 0 {
t.Error("the strings don't match -- StrGameData isn't working correctly")
}
}
// TestSchedule -- tests scheduling and GetNext
func TestSchedule(t *testing.T) {
twoMin := time.Now().Local().Add(time.Minute * time.Duration(2))
testGame := &gm.Game{
BadgeID: 555,
Team: 55,
Duration: 12,
Variant: 1,
AbsStart: twoMin.Unix(),
}
ScheduleGame(*testGame)
next := GetNext()
another := GetNext()
if next.BadgeID != testGame.BadgeID {
t.Error("Test BadgeID does not match with next scheduled game")
}
if next.Team != 2 {
t.Error("Team assignment is not working correctly")
}
if another.Team != 1 {
t.Error("Team assignment is not working correctly")
}
if next.Duration != testGame.Duration {
t.Error("Test Duration does not match with next scheduled game")
}
if next.Variant != testGame.Variant {
t.Error("Test Variant does not match with next scheduled game")
}
}
// TestZombie -- testing that zombie is created and then it should switch to human
func TestZombie(t *testing.T) {
if determineTeam(2, 2) != 1 {
t.Error("determine team should send PatientZero")
}
if determineTeam(2, 2) != 2 {
t.Error("determine team should start sending humans")
}
if determineTeam(2, 2) != 2 {
t.Error("determine team should still be sending humans")
}
if determineTeam(2, 3) != 1 {
t.Error("determine team should send PatientZero")
}
if determineTeam(2, 3) != 2 {
t.Error("determine team should start sending humans")
}
if determineTeam(2, 3) != 2 {
t.Error("determine team should still be sending humans")
}
}
<file_sep>package badgewrangler
// EncodeNameBytes - Encode the ascii User Name as a byte array of 5 bit characters
func EncodeNameBytes(name string) []byte {
bs := make([]byte, 10)
length := len(name)
for i, c := range name {
if c >= 'A' && c <= 'Z' {
bs[i] = byte(c) - 'A'
} else if c == '_' {
bs[i] = byte(26)
} else {
bs[i] = byte(27)
}
}
for j := length; j < 10; j++ {
bs[j] = byte(27)
}
return bs
}
// CompressNameBytes - Compress the two element array of character bytes into 5 bit uints
func CompressNameBytes(bytes []byte) uint16 {
var compressed uint16
compressed = compressed | (uint16(bytes[0]) << 5)
compressed = compressed | uint16(bytes[1])
return compressed
}
// ExpandNameBytes - Expand the 5 bit uints into an 2 element array of regular bytes
func ExpandNameBytes(fragment uint16) []byte {
bs := make([]byte, 2)
bs[0] = byte((fragment >> 5) & 0x1f)
bs[1] = byte(fragment & 0x1f)
return bs
}
// DecodeNameBytes - Convert the array of 5 bit characters into a trimmed ascii string
func DecodeNameBytes(bytes []byte) string {
var decoded string
for _, b := range bytes {
if b >= 0 && b <= 25 {
decoded += string(b + 'A')
} else if b == 26 {
decoded += "_"
} else {
decoded += " "
}
}
return decoded
}
<file_sep>#!/bin/bash
##
## Http GET to fetch the all games
##
curl -X GET \
http://localhost:3000/api/games<file_sep>package utility
import (
"fmt"
"math"
"testing"
)
func TestInspectProcess(t *testing.T) {
starttime := int16(-42)
ustarttime := Int12fromInt16toUint16(starttime)
custarttime := Int12fromUint16toInt16(ustarttime)
fmt.Printf(" 42: %#7[1]x - %016[1]b\n", uint16(42))
fmt.Printf("-42: %#7[1]x - %016[1]b\n", starttime)
fmt.Printf("-42 as Int12 payload in Uint16: %#7[1]x - %016[1]b\n", ustarttime)
fmt.Printf("-42 as Int16 from payload: %#7[1]x - %016[1]b\n", custarttime)
fmt.Printf("MaxUint12: %6d - %#7[1]x - %016[1]b\n", 0x0fff)
fmt.Printf("MaxInt16: %6d - %#7[1]x - %016[1]b\n", math.MaxInt16)
}
func TestRoundTrip(t *testing.T) {
x := int16(-42)
payloadX := Int12fromInt16toUint16(x)
y := Int12fromUint16toInt16(payloadX)
if x != y {
t.Errorf("round trip of int12 payload does not equal start")
}
}
<file_sep>package gameinfo
import (
bw "github.com/HackRVA/master-base-2019/badgewrangler"
gm "github.com/HackRVA/master-base-2019/game"
"time"
)
type BadgeInfo struct {
TimeSeen time.Time // Time a badge was last retreived
ID uint16
Data bw.GameData
}
type GameInfo struct {
ID uint16
Details gm.Game
Badges map[uint16]BadgeInfo
}
func NewGameInfo(game gm.Game) *GameInfo {
var g GameInfo = GameInfo{}
g.Badges = make(map[uint16]BadgeInfo)
// If game is not empty add game details
if (gm.Game{}) != game {
g.Details = game
g.ID = game.GameID
}
return &g
}
func UpdateBadgeData(gameInfo GameInfo, gameData bw.GameData) GameInfo {
// If gameData is empty return
if len(gameData.Hits) == 0 && gameData.GameID == 0 {
return gameInfo
}
currentTime := time.Now().UTC()
// Determine if a badge already exists
// If it does update the badge info
badgeInfo, exists := gameInfo.Badges[gameData.BadgeID]
if (exists) {
badgeInfo.TimeSeen = currentTime
badgeInfo.Data = gameData
// This method always updates with
// the original badgeID used
// to retreive badge content
gameInfo.Badges[badgeInfo.ID] = badgeInfo
} else {
var badge BadgeInfo = BadgeInfo{}
badge.TimeSeen = currentTime
badge.ID = gameData.BadgeID
badge.Data = gameData
gameInfo.Badges[gameData.BadgeID] = badge
}
return gameInfo
}
<file_sep>package serial
import (
"fmt"
"io"
"time"
log "github.com/HackRVA/master-base-2019/filelogging"
irp "github.com/HackRVA/master-base-2019/irpacket"
"github.com/HackRVA/master-base-2019/utility"
"github.com/hackebrot/go-repr/repr"
"github.com/tarm/serial"
)
const (
sendDelay = 40 * time.Millisecond
)
var logger = log.Ger.With().Str("pkg", "serial").Logger()
var debug = false
var connected = true
var serialConn *serial.Port
// SetDebug - set debug on/off
func SetDebug(isDebug bool) {
debug = isDebug
}
// SetConnected - If true, passes packets to the channels;
// if false, the packets dispappear into the ether
// in a simulation of IR communication
func SetConnected(isConnected bool) {
connected = isConnected
}
// OpenPort - Open a serial port
func OpenPort(portName string, baud int) {
var err error
config := &serial.Config{Name: portName, Baud: baud}
serialConn, err = serial.OpenPort(config)
if err != nil {
errMsg := fmt.Sprintf("Error opening port %s", err)
fmt.Println(errMsg)
logger.Fatal().Err(err).Msgf("Error opening port: %s", portName)
}
}
//ReadSerial - Reads a badge packet from the serial port
func ReadSerial(packetsIn chan *irp.Packet) {
buf := make([]byte, 1)
packetBuffer := make([]byte, 0, 4)
for {
for len(packetBuffer) < 4 {
buf[0] = 0
byteCount, err := serialConn.Read(buf)
if err != nil {
if err == io.EOF {
fmt.Println("Fatal serial error: EOF on serial port:", err)
logger.Fatal().Err(err).Msgf("Error, EOF on serial port: %s\n", err)
} else {
logger.Debug().Msgf("Error reading packet: %s", err)
}
}
if byteCount != 1 {
logger.Debug().Msgf("Packet read is not 4 bytes, it is %d bytes", byteCount)
}
packetBuffer = append(packetBuffer, buf[0])
}
if debug {
logger.Debug().Str("bytes", "in").Hex("packet bytes", packetBuffer).Msgf("bytes in: %s", repr.Repr(packetBuffer))
}
packet := irp.PacketBytes(packetBuffer).Packet()
if debug {
packetLogger := packet.LoggerPlus(logger)
packetLogger.Debug().Str("microtime", utility.MicroTime()).Str("serial", "in").Msgf("Packet read from serial and routed to channel")
}
if connected {
packetsIn <- packet
}
packetBuffer = packetBuffer[:0]
}
}
// InitIR - writes an IR initialization sequence to the serial port
func InitIR() {
if debug {
logger.Debug().Msg("Initializing IR")
}
byteCount, err := serialConn.Write([]byte("ZsYnCxX#"))
if err != nil {
logger.Fatal().Err(err).Msg("Error initializing IR")
}
if byteCount != 8 {
logger.Fatal().Msg("IR init did not write 8 bytes")
}
err = serialConn.Flush()
if err != nil {
logger.Fatal().Err(err).Msg("Error flushing the buffer")
}
}
// WriteSerial - writes a packet to the serial port
func WriteSerial(packetsOut chan *irp.Packet) {
for {
packet := <-packetsOut
if connected {
if debug {
packetLogger := packet.LoggerPlus(logger)
packetLogger.Debug().Str("microtime", utility.MicroTime()).Str("serial", "out").Msgf("Packet to write received from channel")
}
bytes := packet.Bytes()
if debug {
logger.Debug().Str("bytes", "out").Hex("packet bytes", bytes).Msgf("bytes out: %s", repr.Repr(bytes))
}
byteCount, err := serialConn.Write(bytes)
if err != nil {
logger.Error().Msgf("Error writing packet: %s", err)
}
if byteCount != 4 {
logger.Error().Msg("Packet written was not 4 bytes")
}
err = serialConn.Flush()
if err != nil {
logger.Error().Msgf("Error flushing the buffer: %s", err)
}
}
time.Sleep(sendDelay)
}
}
// IRFilter - filters out packets for other applications
func IRFilter(packetsIn chan *irp.Packet, packetsOut chan *irp.Packet) {
for {
packet := <-packetsIn
if packet.Address == 0x13 {
packetsOut <- packet
}
}
}
<file_sep>package main
import (
"fmt"
bw "github.com/HackRVA/master-base-2019/badgewrangler"
log "github.com/HackRVA/master-base-2019/filelogging"
irp "github.com/HackRVA/master-base-2019/irpacket"
"github.com/HackRVA/master-base-2019/serial"
term "github.com/nsf/termbox-go"
)
var logger = log.Ger
const (
listening = iota
ignoring = iota
)
const (
erroring = iota
correct = iota
)
func reset() {
term.Sync() // cosmetic purpose?
}
func main() {
// Set up input and output channels
packetsIn := make(chan *irp.Packet)
packetsOut := make(chan *irp.Packet)
serial.SetConnected(false)
serial.SetDebug(false)
serial.OpenPort("/dev/ttyUSB1", 9600)
go serial.ReadSerial(packetsIn)
go serial.WriteSerial(packetsOut)
bw.SetDebug(true)
err := term.Init()
if err != nil {
panic(err)
}
defer term.Close()
connStatus := ignoring
errStatus := correct
nextConnStatusLabel := "Listen"
nextErrStatusLabel := "IR Error"
gameData := &bw.GameData{
BadgeID: uint16(333),
GameID: uint16(1234),
UserName: "BARNEY",
Hits: []*bw.Hit{
{BadgeID: uint16(101), Timestamp: uint16(33), Team: uint8(2)},
{BadgeID: uint16(101), Timestamp: uint16(103), Team: uint8(2)},
{BadgeID: uint16(101), Timestamp: uint16(203), Team: uint8(2)},
{BadgeID: uint16(101), Timestamp: uint16(303), Team: uint8(2)},
{BadgeID: uint16(101), Timestamp: uint16(403), Team: uint8(2)},
{BadgeID: uint16(101), Timestamp: uint16(503), Team: uint8(2)},
{BadgeID: uint16(101), Timestamp: uint16(603), Team: uint8(2)},
{BadgeID: uint16(101), Timestamp: uint16(703), Team: uint8(2)}}}
/*
gameData := &bw.GameData{
BadgeID: uint16(333),
GameID: uint16(1234),
Hits: []*bw.Hit{}}
*/
go bw.BadgeHandlePackets(packetsIn, packetsOut, gameData)
reset()
keyPressListenerLoop:
for {
switch connStatus {
case listening:
fmt.Println("Listening to base station")
serial.SetConnected(true)
nextConnStatusLabel = "Ignore"
case ignoring:
fmt.Println("Ignoring base station")
serial.SetConnected(false)
nextConnStatusLabel = "Listen"
}
switch errStatus {
case correct:
fmt.Println("Sending Correct Data")
bw.SetIrErr(false)
nextErrStatusLabel = "IR Error"
case erroring:
fmt.Println("Simulating IR Error in Data")
bw.SetIrErr(true)
nextErrStatusLabel = "No Error"
}
fmt.Println("F5:", nextConnStatusLabel, " F6:", nextErrStatusLabel, " Esc: Quit")
switch ev := term.PollEvent(); ev.Type {
case term.EventKey:
switch ev.Key {
case term.KeyEsc:
break keyPressListenerLoop
case term.KeyF5:
if connStatus == listening {
connStatus = ignoring
} else {
connStatus = listening
}
case term.KeyF6:
if errStatus == erroring {
errStatus = correct
} else {
errStatus = erroring
}
}
case term.EventError:
panic(ev.Err)
}
reset()
}
}
<file_sep>package main
import (
"fmt"
msg "github.com/HackRVA/master-base-2019/badgewrangler"
irp "github.com/HackRVA/master-base-2019/irpacket"
"github.com/HackRVA/master-base-2019/serial"
term "github.com/nsf/termbox-go"
)
func reset() {
term.Sync() // cosmetic purpose?
}
func main() {
serial.SetDebug(true)
packetsOut := make(chan *irp.Packet)
serial.OpenPort("/dev/ttyUSB0", 9600)
go serial.WriteSerial(packetsOut)
packet := msg.BuildBeacon()
err := term.Init()
if err != nil {
panic(err)
}
defer term.Close()
reset()
keyPressListenerLoop:
for {
switch ev := term.PollEvent(); ev.Type {
case term.EventKey:
if ev.Key == term.KeyEsc {
fmt.Println("Esc pressed")
break keyPressListenerLoop
} else if ev.Ch == 'p' {
fmt.Println("\nPacket built:")
packet.Print()
fmt.Println()
packetsOut <- packet
}
case term.EventError:
panic(ev.Err)
}
}
}
<file_sep>package irpacket
import (
"fmt"
"testing"
)
const start uint8 = 1
const command uint8 = 1
const address uint8 = 0x13
const badgeid uint16 = 0x000
const payload uint16 = 0x4000
func testRawPacket() RawPacket {
return RawPacket(StartBit(start) |
CommandBit(command) |
AddressBits(address) |
BadgeIDBits(badgeid) |
PayloadBits(payload))
}
func TestBitShifting(t *testing.T) {
fmt.Println("See how the bits pack")
fmt.Println()
fmt.Println("Badge packet is 32-bits:")
fmt.Println("1 start bit")
fmt.Println("1 cmd bit")
fmt.Println("5 address bits (like port number)")
fmt.Println("9 badge id bits")
fmt.Println("16 payload bits")
fmt.Println()
fmt.Printf("start - %#6x - %6[1]d - %08[1]b\n", start)
fmt.Printf("command - %#6x - %6[1]d - %08[1]b\n", command)
fmt.Printf("address - %#6x - %6[1]d - %08[1]b\n", address)
fmt.Printf("badgeid - %#6x - %6[1]d - %016[1]b\n", badgeid)
fmt.Printf("payload - %#6x - %6[1]d - %016[1]b\n", payload)
fmt.Println()
fmt.Printf("(start & 0x01) << 31 - %032b - %#[1]x\n", StartBit(start))
fmt.Printf("(command & 0x01) << 30 - %032b - %#[1]x\n", CommandBit(command))
fmt.Printf("(address & 0x01f) << 25 - %032b - %#[1]x\n", AddressBits(address))
fmt.Printf("(badgeid & 0x1ff) << 16 - %032b - %#[1]x\n", BadgeIDBits(badgeid))
fmt.Printf("(payload & 0x0ffff) - %032b - %#[1]x\n", PayloadBits(payload))
fmt.Println()
fmt.Printf("bits or'd together - %032b - %#[1]x\n", testRawPacket())
byte1 := uint8(testRawPacket() & 0x0ff)
byte2 := uint8((testRawPacket() >> 8) & 0x0ff)
byte3 := uint8((testRawPacket() >> 16) & 0x0ff)
byte4 := uint8((testRawPacket() >> 24) & 0x0ff)
fmt.Printf("Bytes: %#x, %#x, %#x, %#x\n", byte1, byte2, byte3, byte4)
fmt.Printf("Bytes: %d, %d, %d, %d\n", byte1, byte2, byte3, byte4)
fmt.Println("Byte Slice:", RawPacketToBytes(testRawPacket()))
fmt.Printf("12 bit integer mask: %#7x - %016[1]b\n", 0x0800)
}
func TestReadPacket(t *testing.T) {
testPacket := ReadPacket(testRawPacket())
fmt.Println()
PrintPacket(testPacket)
fmt.Println()
testPacket.Print()
fmt.Println()
if testPacket.Start != start {
t.Errorf("readPacket(testRawPacket()).Start = start")
}
if testPacket.Command != command {
t.Errorf("readPacket(testRawPacket()).Command = command")
}
if testPacket.Address != address {
t.Errorf("readPacket(testRawPacket()).Address = address")
}
if testPacket.BadgeID != badgeid {
t.Errorf("readPacket(testRawPacket()).BadgeID = badgeid")
}
if testPacket.Payload != payload {
t.Errorf("readPacket(testRawPacket()).Payload = payload")
}
}
func TestBuildPacket(t *testing.T) {
testPacket := BuildPacket(badgeid, payload)
if testPacket.Start != start {
t.Errorf("testPacket.Start = start")
}
if testPacket.Command != command {
t.Errorf("testPacket.Command = command")
}
if testPacket.Address != address {
t.Errorf("testPacket.Address = address")
}
if testPacket.BadgeID != badgeid {
t.Errorf("testPacket.BadgeID = badgeid")
}
if testPacket.Payload != payload {
t.Errorf("testPacket.Payload = payload")
}
}
func TestWritePacket(t *testing.T) {
testPacket := BuildPacket(badgeid, payload)
if WritePacket(testPacket) != testRawPacket() {
t.Errorf("writePacket(testPacket()) = testRawPacket")
}
}
<file_sep>package main
import "fmt"
func main() {
name := "BARNY_FIFE"
for i, c := range name {
fmt.Println(i, " => ", string(c))
}
var ba [10]byte
copy(ba[:], name)
fmt.Println("name:", []byte(name), "ba:", ba)
bs := make([]byte, 10)
copy(bs[:], name)
fmt.Println("name:", []byte(name), "bs:", bs)
bx := make([]byte, 2)
bx, bs = bs[0:2], bs[2:]
fmt.Println("bx:", bx, "bs:", bs)
bx, bs = bs[0:2], bs[2:]
fmt.Println("bx:", bx, "bs:", bs)
bx, bs = bs[0:2], bs[2:]
fmt.Println("bx:", bx, "bs:", bs)
bx, bs = bs[0:2], bs[2:]
fmt.Println("bx:", bx, "bs:", bs)
bx, bs = bs[0:2], bs[2:]
fmt.Println("bx:", bx, "bs:", bs)
var bs3 []byte
bs3 = append(bs3, 14)
}
<file_sep>package database
import (
"encoding/json"
bw "github.com/HackRVA/master-base-2019/badgewrangler"
)
// GameDataWithSent -- extending GameData locally to easily unmarshal
type GameDataWithSent struct {
bw.GameData
Sent bool
}
// MarshalBinary -
func (g *GameDataWithSent) MarshalBinary() ([]byte, error) {
return json.Marshal(g)
}
// UnmarshalBinary -
func (g GameDataWithSent) UnmarshalBinary(data []byte) error {
if err := json.Unmarshal(data, &g); err != nil {
return err
}
return nil
}
// ToString -- takes in GameData returns string
func (g *GameDataWithSent) ToString() string {
b, _ := g.MarshalBinary()
return string(b)
}
<file_sep>package game
import (
zl "github.com/rs/zerolog"
)
// Game - The game specification sent to the badge
type Game struct {
BadgeID uint16 // ID of badge receiving the game
AbsStart int64 // Unix time game starts
StartTime int16 // The number of seconds from now game starts
Duration uint16 // 0x0fff
Variant uint8 // 0x0f
Team uint8 // 0x0f
GameID uint16 // 0x0fff
}
func (g Game) Logger(logger zl.Logger) zl.Logger {
return logger.With().
Uint16("BadgeID", g.BadgeID).
Int64("AbsStart", g.AbsStart).
Int16("StartTime", g.StartTime).
Uint16("Duration", g.Duration).
Uint8("Variant", g.Variant).
Uint8("Team", g.Team).
Uint16("GameID", g.GameID).Logger()
}
<file_sep>package webapi
import (
"encoding/json"
bw "github.com/HackRVA/master-base-2019/badgewrangler"
)
// SendGameData - Send GameData to Leaderboard
func SendGameData(gameDataIn chan *bw.GameData) {
for {
gameData := <-gameDataIn
gameDataJSON, _ := json.Marshal(gameData)
logger.Debug().Msg("Send Game Data: " + string(gameDataJSON))
}
}
<file_sep>package main
import (
"fmt"
"time"
)
func main() {
a := makeTimeStamp()
b := makeTimeStamp2()
fmt.Println(time.Now().Format("2006-01-02 15:04:05.000000"))
fmt.Printf("%d \n", time.Now().UnixNano())
fmt.Printf("%d \n", a)
fmt.Printf("%d \n", b)
}
func makeTimeStamp() int64 {
return time.Now().UnixNano() / int64(time.Millisecond)
}
func makeTimeStamp2() int64 {
return time.Now().UnixNano() / 1e6
}
<file_sep>package leaderboard
import (
"encoding/json"
"fmt"
"io/ioutil"
"net/http"
"strconv"
"strings"
"time"
db "github.com/HackRVA/master-base-2019/database"
log "github.com/HackRVA/master-base-2019/filelogging"
"github.com/spf13/viper"
)
var logger = log.Ger.With().Str("pkg", "leaderboard").Logger()
// an in browser editor exists on the leaderboard webserver
// users scripts are fetched and queued up for transmitting to the badge
type script struct {
Content string `json:"content"`
Name string `json:"name"`
}
// UserScripts -- stores scripts of user and hash
type UserScripts struct {
Scripts []string
}
// GameDataResponse -- response from leaderboard when we send GameData
type GameDataResponse struct {
Status string `json:"status"`
Message string `json:"message"`
}
// PostGameData -- sends gameData to the leaderboard
func postGameData(gameData []string) {
uri := viper.GetString("leaderBoard_API") + "consume"
payload := strings.NewReader(`{"data":[` + strings.Join(gameData, ",") + `]}`)
req, _ := http.NewRequest("POST", uri, payload)
req.Header.Add("Content-Type", "application/json")
res, sendErr := http.DefaultClient.Do(req)
defer closeResponse(res)
if sendErr != nil {
logger.Error().Msg("error sending to leaderboard")
} else {
var g GameDataResponse
body, _ := ioutil.ReadAll(res.Body)
err := json.Unmarshal(body, &g)
if err != nil {
logger.Error().Msg("error unmarshalling Json response from leaderboard")
}
if g.Message == "recieved user data" {
logger.Info().Msg("sent data to leaderboard")
db.ZeroGameData()
}
}
}
func closeResponse(res *http.Response) {
if res != nil {
res.Body.Close()
}
}
// FetchScripts -- fetch user's scripts from leaderboard api
func FetchScripts(BadgeID uint16) {
uri := viper.GetString("leaderBoard_API")
b := strconv.Itoa(int(BadgeID))
resp, err := http.Get(uri + "users/" + b + "/scripts")
// req.Header.Set("Content-Type", "application/json")
if err != nil {
logger.Error().Msgf("error fetching user %d scripts", BadgeID)
return
}
defer resp.Body.Close()
body, err := ioutil.ReadAll(resp.Body)
fmt.Println(body)
}
func sendToLeaderboard(interval *time.Ticker, quit chan struct{}) {
for {
select {
case <-interval.C:
logger.Debug().Msg("attempt to send data to leaderboard")
postGameData(db.StrGameData())
case <-quit:
logger.Debug().Msg("stopping routine that sends data to leaderboard.")
interval.Stop()
return
}
}
}
// StartLeaderboardLoop -- loop to start go routine that sends data to leaderboard
func StartLeaderboardLoop() {
interval := time.NewTicker(30 * time.Second)
quit := make(chan struct{})
go sendToLeaderboard(interval, quit)
}
<file_sep>package main
import (
"fmt"
"github.com/spf13/viper"
)
func main() {
viper.SetDefault("serialPort", "/dev/ttyACM0")
viper.SetDefault("ir", true)
viper.SetDefault("serialDebug", false)
viper.SetDefault("bwDebug", false)
fmt.Println("testing configs...")
viper.SetConfigName("baseconfig")
viper.AddConfigPath("/etc/basestation")
viper.AddConfigPath("$HOME/etc/basestation")
err := viper.ReadInConfig()
if err != nil {
fmt.Printf("No config file: %s\nUsing Config Defaults\n", err)
}
fmt.Println("serialPort:", viper.GetString("serialPort"))
fmt.Println("ir:", viper.GetBool("ir"))
fmt.Println("serialDebug:", viper.GetBool("serialDebug"))
fmt.Println("badgeWranglerDebug:", viper.GetBool("bwDebug"))
}
<file_sep>package main
import (
"fmt"
msg "github.com/HackRVA/master-base-2019/badgewrangler"
"github.com/HackRVA/master-base-2019/fifo"
irp "github.com/HackRVA/master-base-2019/irpacket"
serial "github.com/HackRVA/master-base-2019/serial"
term "github.com/nsf/termbox-go"
)
func reset() {
term.Sync() // cosmetic purpose?
}
func main() {
serial.SetDebug(true)
packetsIn := make(chan *irp.Packet)
packetsOut := make(chan *irp.Packet)
serial.OpenPort("/dev/ttyACM0", 19200)
serial.InitIR()
go serial.ReadSerial(packetsIn)
go serial.WriteSerial(packetsOut)
beaconPacket := msg.BuildBeacon()
err := term.Init()
if err != nil {
panic(err)
}
defer term.Close()
reset()
termEvent := make(chan term.Event)
go func(termEvent chan term.Event) {
for {
ev := term.PollEvent()
termEvent <- ev
}
}(termEvent)
keyPressListenerLoop:
for {
select {
case packet := <-packetsIn:
fmt.Println("\nPacket received from", fifo.BadgeOutFile, "channel")
fmt.Println("Esc to quit")
packet.Print()
packet.PrintPayload()
fmt.Println()
case ev := <-termEvent:
switch ev.Type {
case term.EventKey:
if ev.Key == term.KeyEsc {
fmt.Println("Esc pressed")
break keyPressListenerLoop
} else if ev.Ch == 'b' {
fmt.Println("\nBeacon packet built:")
beaconPacket.Print()
fmt.Println()
packetsOut <- beaconPacket
}
case term.EventError:
panic(ev.Err)
}
default:
}
}
}
<file_sep>package serverstartup
import (
"fmt"
bw "github.com/HackRVA/master-base-2019/badgewrangler"
db "github.com/HackRVA/master-base-2019/database"
"github.com/HackRVA/master-base-2019/game"
irp "github.com/HackRVA/master-base-2019/irpacket"
"github.com/HackRVA/master-base-2019/serial"
"github.com/spf13/viper"
)
// InitConfiguration - Initialize the configuration
func InitConfiguration() {
// Config init
fmt.Println("Configuration Settings...")
viper.SetDefault("serialPort", "/dev/ttyACM0")
viper.SetDefault("baud", 9600)
viper.SetDefault("ir", true)
viper.SetDefault("serialDebug", false)
viper.SetDefault("bwDebug", false)
viper.SetDefault("leaderBoard_API", "http://localhost:5000/api/")
viper.SetDefault("isMaster", true)
viper.SetDefault("master_URL", "http://10.200.200.234:8000")
viper.BindEnv("leaderBoard_API")
viper.SetConfigName("baseconfig")
viper.AddConfigPath("/etc/basestation")
viper.AddConfigPath("$HOME/etc/basestation")
err := viper.ReadInConfig()
if err != nil {
fmt.Printf("No config file: %s\nUsing Config Defaults\n", err)
}
fmt.Println(" serialPort:", viper.GetString("serialPort"))
fmt.Println(" baud:", viper.GetInt("baud"))
fmt.Println(" ir:", viper.GetBool("ir"))
fmt.Println(" serialDebug:", viper.GetBool("serialDebug"))
fmt.Println("badgeWranglerDebug:", viper.GetBool("bwDebug"))
fmt.Println(" leaderBoard_API:", viper.GetString("leaderBoard_API"))
fmt.Println(" isMaster:", viper.GetBool("isMaster"))
fmt.Println(" master_URL:", viper.GetString("master_URL"))
}
// StartBadgeWrangler - Start up the badge wrangler
func StartBadgeWrangler() {
// Set up input a)nd output channels
packetsIn := make(chan *irp.Packet)
packetsOut := make(chan *irp.Packet)
filteredIn := make(chan *irp.Packet)
gameDataIn := make(chan *bw.GameData)
gameDataOut := make(chan *bw.GameData)
beaconHold := make(chan bool)
gameOut := make(chan *game.Game)
serial.SetDebug(viper.GetBool("serialDebug"))
bw.SetDebug(viper.GetBool("bwDebug"))
serial.OpenPort(viper.GetString("serialPort"), viper.GetInt("baud"))
if viper.GetBool("ir") {
serial.InitIR()
}
go serial.ReadSerial(packetsIn)
go serial.IRFilter(packetsIn, filteredIn)
go serial.WriteSerial(packetsOut)
go bw.ReceivePackets(filteredIn, gameDataIn, beaconHold)
go bw.TransmitBeacon(packetsOut, beaconHold)
go bw.TransmitNewGamePackets(packetsOut, gameOut, beaconHold)
go db.DataInGameOut(gameDataIn, gameDataOut, gameOut)
}
<file_sep>package main
import (
"os"
foo "github.com/HackRVA/master-base-2019/examples/logging/zap/testpkglog"
log "go.uber.org/zap"
)
var logger *log.Entry
func init() {
log.SetOutput(os.Stdout)
file, err := os.OpenFile("test.log", os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0666)
if err == nil {
log.SetOutput(file)
} else {
log.Debug("Failed to log to file, using default stderr")
}
logger = log.WithFields(log.Fields{"pkg": "main"})
}
func main() {
log.SetLevel(log.DebugLevel)
logger.Info("first message in main")
foo.Foo()
logger.Info("This is a\nmultiline log.")
}
/*
fund doit() {
gameDataC := make(chan *GameData)
gameSpecC = make(chan *gameSpec)
beaconHoldC = make(chan *beaconHoldC)
go msg.ReceivePackets(badgeOutC, gameDataC, beaconHoldC)
go msg.TransmitPackets(badgeInC, gameSpecC, beaconHoldC)
go dst.HandOutGameSpecs(gameDataC, gameSpecC)
for {}
}
*/
<file_sep>#!/bin/bash
##
## Http post to create a new game
## AbsStart must be 10 digit
## unix time in the future
##
curl -X POST \
http://localhost:3000/api/newgame \
-H 'Content-Type: application/json' \
-d '{
"AbsStart": 1551746973,
"Duration": 13,
"Variant": 1
}'<file_sep>
/* We have 16 bits of payload. Let's say the high order 4 bits are the opcode.
* That gives us 16 opcodes, with 12 bits of payload per opcode for single
* packet opcodes (we can have multi-packet opcodes if needed).
* All integer values are transmitted little endian (low order byte first).
*
* Badge packet is 32-bits:
* 1 start bit
* 1 cmd bit
* 5 address bits (like port number)
* 9 badge id bits
* 16 payload bits
*
*/
#define BADGE_IR_GAME_ADDRESS 0x1A /* Arbitrary 5 bits for now ... we will need to coordinate this later */
#define OPCODE_SET_GAME_START_TIME 0x00
/* Low 12 bits of payload are signed seconds until game starts, up to +/- 34 minutes. */
#define OPCODE_SET_GAME_DURATION 0x01
/* low 12 bits of payload are duration in seconds */
#define OPCODE_HIT 0x02
/* Low 4 bits of payload are team id of shooter */
#define OPCODE_SET_BADGE_TEAM 0x03
/* Low 4 bits of payload are the team ID */
#define OPCODE_REQUEST_BADGE_DUMP 0x04
/* Set game variant. Low 4 bits of payload contain game variant. */
#define OPCODE_SET_GAME_VARIANT 0x05
#define GAME_VARIANT_FREE_FOR_ALL 0
#define GAME_VARIANT_TEAM_BATTLE 1
#define GAME_VARIANT_ZOMBIE 2
#define GAME_VARIANT_CAPTURE_THE_BADGE 3
#define GAME_VARIANT_NONE 0x0f
#define OPCODE_BADGE_RECORD_COUNT 0x07
/* low 12 bits contain count of records about to be uploaded to base station */
#define OPCODE_BADGE_UPLOAD_HIT_RECORD_BADGE_ID 0x08
/* low 9 bits contain badge id of shooter */
#define OPCODE_GAME_ID 0x0a
/* payload is 16 bit unique game ID. This opcode is bidirectional. Base
* station transmits this to the badge at the beginning of a game, and the
* badge transmits it back to the base station when syncing. */
#define OPCODE_BADGE_UPLOAD_HIT_RECORD_TIMESTAMP 0x09
/* 16 bits timestamp of hit, seconds since game start */
<file_sep>package database
import (
"encoding/json"
"fmt"
"time"
bw "github.com/HackRVA/master-base-2019/badgewrangler"
log "github.com/HackRVA/master-base-2019/filelogging"
gm "github.com/HackRVA/master-base-2019/game"
"github.com/cnf/structhash"
scribble "github.com/nanobox-io/golang-scribble"
)
var logger = log.Ger.With().Str("pkg", "database").Logger()
var gamesSent = 0
var sendingZombie = false
// ScheduleGame -- save gamespec to database
func ScheduleGame(game gm.Game) {
hash, err := structhash.Hash(game, 1)
if err != nil {
logger.Error().Msgf("error scheduling game: %s", err)
}
currentTime := time.Now().UTC().UnixNano()
var bitMask int64
// Init mask
for i := 0; i < 16; i++ {
bitMask++
bitMask <<= 1
}
game.GameID = (uint16)(currentTime * bitMask)
// create a new scribble database, providing a destination for the database to live
db, _ := scribble.New("./data", nil)
logger.Info().Msg("scheduling game")
db.Write("games", hash, game)
}
// SaveGameData -- save game data to db
func SaveGameData(data *bw.GameData) {
d := &GameDataWithSent{}
d.GameData = *data
d.Sent = false
hash, err := structhash.Hash(d, 1)
if err != nil {
logger.Error().Msgf("error saving game data: %s", err)
}
db, _ := scribble.New("./data", nil)
logger.Info().Msg("saving game data")
db.Write("game_data", hash, d)
}
// ZeroGameData -- Sets all game data as sent
func ZeroGameData() {
gameData := GetGameData()
db, _ := scribble.New("./data", nil)
for _, g := range gameData {
oldHash, err := structhash.Hash(g, 1)
g.Sent = true
logger.Debug().Msgf("zeroing game_data for badgeID: %d", g.BadgeID)
if err != nil {
logger.Error().Msgf("error saving zeroed game data: %s", err)
}
db.Write("game_data", oldHash, g)
logger.Debug().Msg("zeroing game data")
}
}
func notSent(gd []GameDataWithSent, f func(GameDataWithSent) bool) []GameDataWithSent {
notSent := make([]GameDataWithSent, 0)
for _, g := range gd {
if f(g) {
notSent = append(notSent, g)
}
}
return notSent
}
// StrGameData -- GameData Returned as []String
func StrGameData() []string {
var s []string
for _, c := range GetGameData() {
s = append(s, c.ToString())
}
return s
}
// GetGameData -- retrieves gamedata from the db
func GetGameData() []GameDataWithSent {
// create a new scribble database, providing a destination for the database to live
db, _ := scribble.New("./data", nil)
// Read more games from the database
records, _ := db.ReadAll("game_data")
games := []GameDataWithSent{}
for _, g := range records {
gameFound := GameDataWithSent{}
if err := json.Unmarshal([]byte(g), &gameFound); err != nil {
fmt.Println("Error", err)
}
games = append(games, gameFound)
}
pendingData := notSent(games, func(g GameDataWithSent) bool {
return g.Sent == false
})
return pendingData
}
// ToggleZombie -- sets whether or not we should send zombie
func ToggleZombie(send bool) {
sendingZombie = send
if sendingZombie {
fmt.Println("sending zombies")
return
}
fmt.Println("sending humans")
}
func determineTeam(variant uint8, gameID uint16) uint8 {
gamesSent++
switch variant {
case 0:
// "FREE FOR ALL",
return 1
case 1:
// "TEAM BATTLE",
return uint8(gamesSent%2 + 1)
case 2:
// "ZOMBIES!",
// TEAM 1 is zombie
// TEAM 2 is non-zombie
if sendingZombie {
return 1
}
return 2
case 3:
// "CAPTURE BADGE",
return 1
}
return 1 // default return -- we should not use a zero value for team
}
// GetNext -- return the next game
func GetNext() gm.Game {
t := time.Now()
var g gm.Game
g.AbsStart = 0
games := GetGames()
for _, game := range games {
logger.Debug().Msgf(
"now: %d nextGame: %d in the future: %t",
int64(t.Unix()),
game.AbsStart,
int64(t.Unix()) < game.AbsStart+int64(game.Duration))
// return the first game that is greater than now
if int64(t.Unix()) < game.AbsStart+int64(game.Duration) {
game.Team = determineTeam(game.Variant, game.GameID)
game.StartTime = int16(game.AbsStart - t.Unix())
return game
}
}
return g
}
// GetGames -- retrieves games from DB
func GetGames() []gm.Game {
// create a new scribble database, providing a destination for the database to live
db, _ := scribble.New("./data", nil)
// Read more games from the database
moregames, _ := db.ReadAll("games")
// iterate over moregames creating a new game for each record
games := []gm.Game{}
for _, game := range moregames {
g := gm.Game{}
json.Unmarshal([]byte(game), &g)
games = append(games, g)
}
return games
}
// DataInGameOut - stores the game data and gets the current/next game
func DataInGameOut(gameDataIn chan *bw.GameData, gameDataOut chan *bw.GameData, gameOut chan *gm.Game) {
for {
gameData := <-gameDataIn
logger.Debug().Msg("DataInGameOut received gameData from GameDataIn channel")
nextGame := GetNext()
gameOut <- &nextGame
logger.Debug().Msg("DataInGameOut sent nextGame to gameOut channel")
SaveGameData(gameData)
}
}
<file_sep>package main
import (
"fmt"
"net"
"os"
"os/signal"
"syscall"
)
func main() {
l, err := net.ListenUnix("unix", &net.UnixAddr{"/tmp/unixdomain", "unix"})
if err != nil {
panic(err)
}
c := make(chan os.Signal, 1)
signal.Notify(c, os.Interrupt)
go func() {
for sig := range c {
if sig == syscall.SIGINT {
fmt.Println(sig)
os.Remove("/tmp/unixdomain")
os.Exit(0)
}
}
}()
for {
conn, err := l.AcceptUnix()
if err != nil {
panic(err)
}
var buf [1024]byte
n, err := conn.Read(buf[:])
if err != nil {
panic(err)
}
fmt.Printf("%s\n", string(buf[:n]))
conn.Close()
}
}
<file_sep>package main
// #include <../../lasertag-protocol.h>
import "C"
import "fmt"
func showOpcodes() {
//fmt.Println("BADGE_IR_GAME_ADDRESS", C.BADGE_IR_GAME_ADDRESS)
fmt.Println("OPCODE_SET_GAME_START_TIME", C.OPCODE_SET_GAME_START_TIME)
fmt.Println("OPCODE_SET_GAME_DURATION", C.OPCODE_SET_GAME_DURATION)
fmt.Println("OPCODE_HIT", C.OPCODE_HIT)
fmt.Println("OPCODE_SET_BADGE_TEAM", C.OPCODE_SET_BADGE_TEAM)
fmt.Println("OPCODE_REQUEST_BADGE_DUMP", C.OPCODE_REQUEST_BADGE_DUMP)
fmt.Println("OPCODE_SET_GAME_VARIANT", C.OPCODE_SET_GAME_VARIANT)
fmt.Println("GAME_VARIANT_FREE_FOR_ALL", C.GAME_VARIANT_FREE_FOR_ALL)
fmt.Println("GAME_VARIANT_TEAM_BATTLE", C.GAME_VARIANT_TEAM_BATTLE)
fmt.Println("GAME_VARIANT_ZOMBIE", C.GAME_VARIANT_ZOMBIE)
fmt.Println("GAME_VARIANT_CAPTURE_THE_BADGE", C.GAME_VARIANT_CAPTURE_THE_BADGE)
fmt.Println("GAME_VARIANT_NONE", C.GAME_VARIANT_NONE)
fmt.Println("OPCODE_BADGE_RECORD_COUNT", C.OPCODE_BADGE_RECORD_COUNT)
fmt.Println("OPCODE_BADGE_UPLOAD_HIT_RECORD_BADGE_ID", C.OPCODE_BADGE_UPLOAD_HIT_RECORD_BADGE_ID)
fmt.Println("OPCODE_GAME_ID", C.OPCODE_GAME_ID)
fmt.Println("OPCODE_BADGE_UPLOAD_HIT_RECORD_TIMESTAMP", C.OPCODE_BADGE_UPLOAD_HIT_RECORD_TIMESTAMP)
fmt.Println("OPCODE_BADGE_IDENTITY", C.OPCODE_BADGE_IDENTITY)
}
func main() {
showOpcodes()
}
<file_sep>package sync
import (
"encoding/json"
"io/ioutil"
"net/http"
"time"
db "github.com/HackRVA/master-base-2019/database"
log "github.com/HackRVA/master-base-2019/filelogging"
gm "github.com/HackRVA/master-base-2019/game"
"github.com/spf13/viper"
)
var logger = log.Ger.With().Str("pkg", "leaderboard").Logger()
func closeResponse(res *http.Response) {
if res != nil {
res.Body.Close()
}
}
// Fetches scheduled games from the MASTER master base station
func fetchScheduledGames() []gm.Game {
uri := viper.GetString("master_URL") + "/api/games"
resp, _ := http.Get(uri)
defer closeResponse(resp)
body, _ := ioutil.ReadAll(resp.Body)
var gms []gm.Game
jsonErr := json.Unmarshal(body, &gms)
if jsonErr != nil {
logger.Error().Msg("could not get schedule game from MASTER master base station")
}
return gms
}
// isInLocalDB -- returns true if this game already exists in the localDB
func isInLocalDB(game gm.Game) bool {
dbGames := db.GetGames()
for _, g := range dbGames {
if game.GameID == g.GameID {
return true
}
}
return false
}
// pushToLocalDB -- grabs future games and schedules them in local database
func pushToLocalDB(games []gm.Game) {
t := time.Now()
for _, game := range games {
if int64(t.Unix()) < game.AbsStart+int64(game.Duration) {
if !isInLocalDB(game) {
db.ScheduleGame(game)
}
}
}
}
func fetchGames(interval *time.Ticker, quit chan struct{}) {
for {
select {
case <-interval.C:
logger.Debug().Msg("attempt to send data to leaderboard")
pushToLocalDB(fetchScheduledGames())
case <-quit:
logger.Debug().Msg("stopping routine that sends data to leaderboard.")
interval.Stop()
return
}
}
}
// StartSyncLoop -- starts a go routine to fetch games from MASTER master base station
// this runs on an interval
func StartSyncLoop() {
interval := time.NewTicker(30 * time.Second)
quit := make(chan struct{})
go fetchGames(interval, quit)
}
<file_sep>package info
import (
"encoding/json"
"strconv"
log "github.com/HackRVA/master-base-2019/filelogging"
gm "github.com/HackRVA/master-base-2019/game"
gi "github.com/HackRVA/master-base-2019/gameinfo"
scribble "github.com/nanobox-io/golang-scribble"
)
var logger = log.Ger.With().Str("pkg", "info").Logger()
// WriteGameInfo - Overwrites the 'info' entry with the gameInfo
func WriteGameInfo(gameInfo gi.GameInfo) {
db, _ := scribble.New("./data", nil)
if err := db.Write("info", strconv.FormatInt(int64(gameInfo.ID), 10), gameInfo); err != nil {
logger.Error().Msgf("error writing to the database: %s", err)
}
}
// GetAllInfo -- retrieves all info entries from the DB
func GetAllInfo() []gi.GameInfo {
// create a new scribble database, providing a destination for the database to live
db, _ := scribble.New("./data", nil)
// Read the info table from the database
resultSet, _ := db.ReadAll("info")
// iterate over the info result-set
allInfo := []gi.GameInfo{}
for _, result := range resultSet {
info := gi.GameInfo{}
json.Unmarshal([]byte(result), &info)
allInfo = append(allInfo, info)
}
return allInfo
}
// GetInfo -- retreive game info from database
func GetInfo(gameID uint16) gi.GameInfo {
var gameInfo gi.GameInfo
db, _ := scribble.New("./data", nil)
err := db.Read("info", strconv.FormatUint(uint64(gameID), 10), &gameInfo)
if err != nil {
logger.Error().Msgf("Error reading: %s", err)
}
return gameInfo
}
// GetOldInfo -- retreive old game info from database
func GetOldInfo(gameID uint16) gi.GameInfo {
var gameInfo gi.GameInfo
var key = strconv.FormatUint((uint64)(gameID), 10) + "old"
db, _ := scribble.New("./data", nil)
err := db.Read("info", key, &gameInfo)
if err != nil {
logger.Error().Msgf("Error reading: %s", err)
}
return gameInfo
}
// AddNewGameEntryToGameInfo -
func AddNewGameEntryToGameInfo(game gm.Game) {
var gameInfo gi.GameInfo
// Establish driver connection
db, err := scribble.New("./data", nil)
logger.Info().Msg("Adding new entry")
if err != nil {
logger.Error().Msgf("Driver failure: %s", err)
return
}
err = db.Read("info", strconv.FormatUint((uint64)(game.GameID), 10), gameInfo)
if err != nil {
logger.Error().Msgf("Read error: %s", err)
}
// If no prior gameInfo information exists for this game
// Initialize gameInfo with game
if gameInfo.ID == 0 {
gameInfo.ID = game.GameID
gameInfo.Details = game
} else {
gameInfo.Details = game
}
err = db.Write("info", strconv.FormatInt((int64)(gameInfo.ID), 10), &gameInfo)
if err != nil {
logger.Error().Msgf("error writing to the database: %s", err)
}
}
// UpdateGameInfo -- updates the game info present in the database
// this function writes a new record if there isn't already
// a record present
func UpdateGameInfo(gameInfo gi.GameInfo) {
var storedGameInfo gi.GameInfo
var oldGameInfo gi.GameInfo
// Establish driver connection
db, err := scribble.New("./data", nil)
if err != nil {
logger.Error().Msgf("Driver failure: %s", err)
return
}
// retrieve a single entry from info
err = db.Read("info", strconv.FormatUint(uint64(gameInfo.ID), 10), &storedGameInfo)
if err != nil {
logger.Warn().Msgf("game info update failed: %s", err)
WriteGameInfo(gameInfo)
return
}
// Backup entry
err = db.Read("info", strconv.FormatUint((uint64)(oldGameInfo.ID), 10)+"old", &oldGameInfo)
if err != nil {
logger.Warn().Msgf("Reading game info backup failed: %s", err)
}
err = db.Delete("info", strconv.FormatUint((uint64)(oldGameInfo.ID), 10)+"old")
if err != nil {
logger.Warn().Msgf("Deleting backup failed: %s", err)
}
err = db.Write("info", strconv.FormatUint((uint64)(storedGameInfo.ID), 10)+"old", storedGameInfo)
if err != nil {
logger.Error().Msgf("Writing Backup failed: %s", err)
return
}
err = db.Write("info", strconv.FormatUint((uint64)(gameInfo.ID), 10), gameInfo)
if err != nil {
logger.Error().Msgf("Writing game info entry failed: %s", err)
}
}
<file_sep>package filelogging
import (
"os"
zl "github.com/rs/zerolog"
)
// Ger - the file logger object
var Ger zl.Logger
func init() {
file, err := os.OpenFile("test.log", os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0666)
if err == nil {
Ger = zl.New(file).With().Timestamp().Logger()
} else {
Ger.Debug().Msg("Failed to log to file, using default stderr")
}
}
// SetGlobalLevel - Set the global logging level
func SetGlobalLevel(level zl.Level) {
zl.SetGlobalLevel(level)
}
<file_sep>package main
import (
log "github.com/HackRVA/master-base-2019/filelogging"
"github.com/rs/zerolog"
)
func main() {
log.SetGlobalLevel(zerolog.DebugLevel)
log.Ger.Info().Msg("first message in main")
log.Ger.Info().Msg("This is a\nmultiline log.")
}
/*
func doit() {
gameDataC := make(chan *GameData)
gameSpecC = make(chan *gameSpec)
beaconHoldC = make(chan *beaconHoldC)
go msg.ReceivePackets(badgeOutC, gameDataC, beaconHoldC)
go msg.TransmitPackets(badgeInC, gameSpecC, beaconHoldC)
go dst.HandOutGameSpecs(gameDataC, gameSpecC)
for {}
}
*/
<file_sep>package badgewrangler
import (
"fmt"
"strings"
"testing"
)
func roundTripName(nameIn string) string {
bsIn := EncodeNameBytes(nameIn)
var bsOut []byte
//var bx []byte
bx := make([]byte, 2)
pkts := make([]uint16, 5)
for i := 0; i < 5; i++ {
bx, bsIn = bsIn[0:2], bsIn[2:]
pkts[i] = CompressNameBytes(bx)
}
for i := 0; i < 5; i++ {
bsOut = append(bsOut, ExpandNameBytes(pkts[i])...)
}
return strings.TrimSpace(DecodeNameBytes(bsOut))
}
func TestNameEncoding(t *testing.T) {
name := "BARNY_FIFE"
fmt.Println("name:", name)
bs := EncodeNameBytes(name)
fmt.Println("name:", []byte(name))
fmt.Println("bs:", bs)
name2 := "AARON"
fmt.Println("name2", name2)
bs2 := EncodeNameBytes(name2)
fmt.Println("name2:", []byte(name2))
fmt.Println("bs:", bs2)
firstTwo := bs[0:2]
fmt.Println("firstTwo:", firstTwo)
fmt.Printf("letter mask: %#7x - %016[1]b\n", 0x01f)
fmt.Println("decoded name1:", DecodeNameBytes(bs), ":")
fmt.Println("decoded name2:", DecodeNameBytes(bs2), ":")
if roundTripName(name) != name {
t.Errorf("roundTripName(name) == name")
}
if roundTripName(name2) != name2 {
t.Errorf("roundTripName(name2) == name2")
}
}
<file_sep>package main
import (
"flag"
fifo "github.com/HackRVA/master-base-2019/fifo"
log "github.com/HackRVA/master-base-2019/filelogging"
irp "github.com/HackRVA/master-base-2019/irpacket"
)
var namedPipe string
var logger = log.Ger
func main() {
channelInPtr := flag.Bool("in", false, "read ingoing badge Channel (otherwise outgoing)")
flag.Parse()
if *channelInPtr {
namedPipe = fifo.BadgeInFile
} else {
namedPipe = fifo.BadgeOutFile
}
packetsIn := make(chan *irp.Packet)
go fifo.ReadFifo(namedPipe, packetsIn)
for {
packet := <-packetsIn
packetLogger := packet.Logger(logger)
packetLogger.Info().Msg("Packet received from " + fifo.BadgeOutFile)
}
}
<file_sep>package fifo
import (
"bufio"
"io"
"os"
log "github.com/HackRVA/master-base-2019/filelogging"
irp "github.com/HackRVA/master-base-2019/irpacket"
"github.com/hackebrot/go-repr/repr"
)
var debug = false
var logger = log.Ger
// SetDebug - sets the debugging on or off
func SetDebug(isDebug bool) {
debug = isDebug
}
var connected = true
// SetConnected - If true, passes packets to the channels;
// if false, the packets dispappear into the ether
// in a simulation of IR communication
func SetConnected(isConnected bool) {
connected = isConnected
}
// BadgeOutFile - The path of the named pipe from the badge
var BadgeOutFile = "/tmp/fifo-from-badge"
// BadgeInFile - The path of the named pipe to the badge
var BadgeInFile = "/tmp/fifo-to-badge"
// ReadFifo - Reads a badge packet off of the named pipe (fifo)
func ReadFifo(fifoInFile string, packetsIn chan *irp.Packet) {
if debug {
logger.Debug().Msgf("Opening named pipe %s\n", fifoInFile)
}
fifoFd, err := os.OpenFile(fifoInFile, os.O_RDONLY, os.ModeNamedPipe)
if err != nil {
logger.Fatal().Msgf("Open Named pipe file error: %s", err)
}
buf := make([]byte, 4)
reader := bufio.NewReader(fifoFd)
for {
buf[0], buf[1], buf[2], buf[3] = 0, 0, 0, 0
byteCount, err := reader.Read(buf)
if err != io.EOF {
if err != nil {
logger.Debug().Msgf("Error reading packet: %s", err)
}
if byteCount != 4 {
logger.Debug().Msg("Packet read is not 4 bytes")
}
if debug {
logger.Debug().Msgf("bytes in: %s", repr.Repr(buf))
}
packet := irp.PacketBytes(buf).Packet()
if debug {
packetLogger := packet.Logger(logger)
packetLogger.Debug().Msgf("Packet read and routed to channel from: %s", fifoInFile)
}
if connected {
packetsIn <- packet
}
}
}
}
// WriteFifo - Writes a badge packet to the named pipe (fifo)
func WriteFifo(fifoOutFile string, packetsOut chan *irp.Packet) {
if debug {
logger.Debug().Msgf("Opening named pipe %s\n", fifoOutFile)
}
fifoFd, err := os.OpenFile(fifoOutFile, os.O_WRONLY, os.ModeNamedPipe)
if err != nil {
logger.Fatal().Msgf("Open Named pipe error: %s", err)
}
writer := bufio.NewWriter(fifoFd)
for {
packet := <-packetsOut
if connected {
if debug {
packetLogger := packet.Logger(logger)
packetLogger.Debug().Msgf("Packet to write received from channel: %s", fifoOutFile)
}
bytes := packet.Bytes()
//irp.RawPacketToBytes(irp.WritePacket(packet))
if debug {
logger.Debug().Msgf("bytes out: %s", repr.Repr(bytes))
}
byteCount, err := writer.Write(bytes)
if err != nil {
logger.Error().Msgf("Error writing packet: %s", err)
}
if byteCount != 4 {
logger.Error().Msg("Packet written was not 4 bytes")
}
err = writer.Flush()
if err != nil {
logger.Error().Msgf("Error flushing buffer: %s", err)
}
}
}
}
<file_sep># master-base
Master Base Station has been built for the [RVASec](https://rvasec.com/) conference by members at [HackRVA](https://hackrva.org).
The Master Base Station communicates with the RVASec conference attendees' badges over IR to schedule LaserTag Games. Additionally, The Base Station can help attendees transmit code to their badge.
## Connecting A badge over serial
The Base station expects a badge to be connected over usb on `/dev/ttyACM0`
## Config File
A config file can be created here: `/etc/basestation/baseconfig.yaml`
This is were you can override variables such as:
```
leaderBoard_API: "http://192.168.1.2:5000/api/"
serialPort: /dev/ttyACM0
```
note: default values exist if a config file is not present.
## Start Up Script
The start up script will compile and run a binary called `basestation`
```
$ sh build_and_run.sh
```<file_sep>package main
import (
"net"
"os"
)
func main() {
connType := "unix" // or "unixgram" or "unixpacket"
laddr := net.UnixAddr{"/tmp/unixdomaincli", connType}
conn, err := net.DialUnix(connType, &laddr /*can be nil*/, &net.UnixAddr{"/tmp/unixdomain", connType})
if err != nil {
panic(err)
}
defer os.Remove("/tmp/unixdomaincli")
_, err = conn.Write([]byte("hello"))
if err != nil {
panic(err)
}
conn.Close()
}
<file_sep>#!/bin/bash
mkdir -p /etc/basestation/
echo "serialPort: /dev/ttyACM0
ir: true
serialDebug: true
bwDebug: true" > /etc/basestation/baseconfig.yaml<file_sep>package webapi
import (
"encoding/json"
"io/ioutil"
"net/http"
"strconv"
"time"
db "github.com/HackRVA/master-base-2019/database"
log "github.com/HackRVA/master-base-2019/filelogging"
gm "github.com/HackRVA/master-base-2019/game"
gi "github.com/HackRVA/master-base-2019/gameinfo"
info "github.com/HackRVA/master-base-2019/info"
mux "github.com/gorilla/mux"
)
var logger = log.Ger.With().Str("pkg", "webapi").Logger()
// SendZombie -- json to toggle zombie
type SendZombie struct {
SendZombie bool `json:"sendZombie"`
}
// NewGame - function to schedule newgame
func NewGame(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
var e gm.Game
b, _ := ioutil.ReadAll(r.Body)
json.Unmarshal(b, &e)
db.ScheduleGame(e)
info.AddNewGameEntryToGameInfo(e)
j, _ := json.Marshal(e)
w.Write(j)
}
// NextGame -- returns the game that is sheduled next
func NextGame(w http.ResponseWriter, r *http.Request) {
t := time.Now()
w.Header().Set("Content-Type", "application/json")
next := func() gm.Game {
var g gm.Game
g.AbsStart = 0
games := db.GetGames()
for _, game := range games {
// return the first game that is greater than now
if int64(t.Unix()) < game.AbsStart+int64(game.Duration) {
game.StartTime = int16(game.AbsStart - t.Unix())
return game
}
}
return g
}()
var gameInfo gi.GameInfo
info.AddNewGameEntryToGameInfo(next)
gameInfo = info.GetInfo(next.GameID)
logger.Info().Msgf("gameInfo ID: %d", next.GameID)
if gameInfo.ID == 0 {
info.AddNewGameEntryToGameInfo(next)
}
if next.AbsStart == 0 {
j, _ := json.Marshal("There are no games scheduled")
w.Write(j)
} else {
j, _ := json.Marshal(next)
w.Write(j)
}
}
// AllGames - returns all scheduled games
func AllGames(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
j, _ := json.Marshal(db.GetGames())
w.Write(j)
}
// Info -- Handler that serves up gameInfo
func Info(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
var game gm.Game
var gameInfo gi.GameInfo
params := mux.Vars(r)
b, _ := ioutil.ReadAll(r.Body)
json.Unmarshal(b, &game)
gameIDFromRequest, err := strconv.ParseUint(params["id"], 10, 16)
if err != nil {
logger.Error().Msgf("Info: %s", err)
}
gameInfo = info.GetInfo(uint16(gameIDFromRequest))
if gameInfo.ID == 0 {
info.AddNewGameEntryToGameInfo(game)
gameInfo = info.GetInfo(game.GameID)
}
j, _ := json.Marshal(gameInfo)
w.Write(j)
}
// AllInfo -- handler that returns all GameInfo
func AllInfo(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
j, _ := json.Marshal(info.GetAllInfo())
w.Write(j)
}
// Zombie -- toggles whether or not we are sending zombie based \
// the data being posted
func Zombie(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
var z SendZombie
b, _ := ioutil.ReadAll(r.Body)
json.Unmarshal(b, &z)
db.ToggleZombie(z.SendZombie)
j, _ := json.Marshal(z)
w.Write(j)
}
<file_sep>package irpacket
// #include <../lasertag-protocol.h>
import "C"
import (
"fmt"
"strconv"
utl "github.com/HackRVA/master-base-2019/utility"
zl "github.com/rs/zerolog"
)
// Start - default Start value
const Start = 1
// Command - default Command value
const Command = 1
// BadgeIRGameAddress - Default Game Address until I understand what is going on.
const BadgeIRGameAddress = 0x13
// PayloadSpec - The data describing a payload type
type PayloadSpec struct {
Opcode uint8
Description string
Mask uint16
}
// PayloadSpecList - Slice of Payload Structures
var payloadSpecList = []PayloadSpec{
{C.OPCODE_SET_GAME_START_TIME, "Set Game Start Time", 0x0fff},
{C.OPCODE_SET_GAME_DURATION, "St Game Duration", 0x0fff},
{C.OPCODE_HIT, "Badge Hit (team)", 0x07},
{C.OPCODE_SET_BADGE_TEAM, "Set Badge Team", 0x07},
{C.OPCODE_REQUEST_BADGE_DUMP, "Request Badge Dump", 0x0},
{C.OPCODE_SET_GAME_VARIANT, "Set Game Variant", 0x0f},
{C.OPCODE_BADGE_RECORD_COUNT, "Badge Record Count", 0x0fff},
{C.OPCODE_BADGE_UPLOAD_HIT_RECORD_BADGE_ID, "Badge Upload Hit Record Badge ID", 0x01ff},
{C.OPCODE_GAME_ID, "Game ID", 0x0fff},
{C.OPCODE_BADGE_UPLOAD_HIT_RECORD_TIMESTAMP, "Badge Upload Hit Record Timestamp", 0x0fff},
{C.OPCODE_BADGE_IDENTITY, "Badge Identity", 0x03ff}}
var payloadSpecMap map[uint8]PayloadSpec
func init() {
payloadSpecMap = make(map[uint8]PayloadSpec)
for _, payload := range payloadSpecList {
payloadSpecMap[payload.Opcode] = payload
}
}
// GetPayloadSpecs - returns a PayloadData
func GetPayloadSpecs(opcode uint8) PayloadSpec {
return payloadSpecMap[opcode]
}
// RawPacket - the unsigned integer representing the raw badge packet
type RawPacket uint32
// ReadPacket - read a packet from a rawPacket
func ReadPacket(rawPacket RawPacket) *Packet {
return &Packet{
Start: uint8((rawPacket >> 31) & 0x01),
Command: uint8((rawPacket >> 30) & 0x01),
Address: uint8((rawPacket >> 25) & 0x1f),
BadgeID: uint16((rawPacket >> 16) & 0x1ff),
Payload: uint16(rawPacket & 0x0ffff)}
}
// RawPacketToBytes - convert a rawPacket to a four byte array
func RawPacketToBytes(rawPacket RawPacket) []byte {
return []byte{uint8(rawPacket & 0x0ff),
uint8((rawPacket >> 8) & 0x0ff),
uint8((rawPacket >> 16) & 0x0ff),
uint8((rawPacket >> 24) & 0x0ff)}
}
// Packet - return a Packet
func (r RawPacket) Packet() *Packet {
return ReadPacket(r)
}
// Bytes - return the PacketBytes
func (r RawPacket) Bytes() PacketBytes {
return r.Packet().Bytes()
}
// PacketBytes - the bytes containing the raw packet
type PacketBytes []byte
// BytesToRawPacket - convert four byte array to a RawPacket
func BytesToRawPacket(bytes PacketBytes) RawPacket {
return RawPacket(uint32(bytes[0]) | uint32(bytes[1])<<8 | uint32(bytes[2])<<16 | uint32(bytes[3])<<24)
}
// RawPacket - return the RawPacket
func (b PacketBytes) RawPacket() RawPacket {
return BytesToRawPacket(b)
}
// Packet - return the Packet
func (b PacketBytes) Packet() *Packet {
return b.RawPacket().Packet()
}
// Packet structure for badge messages
type Packet struct {
Start uint8
Command uint8
Address uint8
BadgeID uint16
Payload uint16
}
// BuildPacket - Build a packet
func BuildPacket(
badgeid uint16, payload uint16) *Packet {
return &Packet{
Start: Start,
Command: Command,
Address: BadgeIRGameAddress,
BadgeID: badgeid,
Payload: payload}
}
// StartBit - return a rawPacket with placed Start bit
func StartBit(start uint8) RawPacket {
return ((RawPacket(start) & 0x01) << 31)
}
// CommandBit - return a rawPacket with placed Command bit
func CommandBit(command uint8) RawPacket {
return ((RawPacket(command) & 0x01) << 30)
}
// AddressBits - return a rawPacket with placed Address bits
func AddressBits(address uint8) RawPacket {
return ((RawPacket(address) & 0x01f) << 25)
}
// BadgeIDBits - return a rawPacket with placed BadgeID bits
func BadgeIDBits(badgeid uint16) RawPacket {
return ((RawPacket(badgeid) & 0x1ff) << 16)
}
// PayloadBits - return a rawPacket with placed Payload bits
func PayloadBits(payload uint16) RawPacket {
return (RawPacket(payload) & 0x0ffff)
}
// WritePacket - return a rawPacket from a packet
func WritePacket(packet *Packet) RawPacket {
return RawPacket(StartBit(packet.Start) |
CommandBit(packet.Command) |
AddressBits(packet.Address) |
BadgeIDBits(packet.BadgeID) |
PayloadBits(packet.Payload))
}
// PrintPacket - print out a packet's contents
func PrintPacket(packet *Packet) {
fmt.Printf(" packet: %x\n", WritePacket(packet))
fmt.Printf(" cmd: %#6x - %6[1]d\n", packet.Command)
fmt.Printf(" start: %#6x - %6[1]d\n", packet.Start)
fmt.Printf(" address: %#6x - %6[1]d\n", packet.Address)
fmt.Printf("badge ID: %#6x - %6[1]d\n", packet.BadgeID)
fmt.Printf(" payload: %#6x - %6[1]d\n", packet.Payload)
}
// PacketLogger - return a packet sublogger
func PacketLogger(logger zl.Logger, packet *Packet) zl.Logger {
return logger.With().
Uint8("cmd", packet.Command).
Uint8("start", packet.Start).
Uint8("address", packet.Address).
Uint16("badge ID", packet.BadgeID).
Uint16("payload", packet.Payload).
Str("opcode", packet.OpcodeDescription()).
Int16("payload data", packet.PayloadData()).Logger()
}
// PacketLoggerPlus - return a better packet sublogger
func PacketLoggerPlus(logger zl.Logger, packet *Packet) zl.Logger {
return logger.With().
Str("packet", fmt.Sprintf("%x", packet.RawPacket())).
Str("cmd", fmt.Sprintf("%#x - %[1]d", packet.Command)).
Str("start", fmt.Sprintf("%#x - %[1]d", packet.Start)).
Str("address", fmt.Sprintf("%#x - %[1]d", packet.Address)).
Str("badge ID", fmt.Sprintf("%#x - %[1]d", packet.BadgeID)).
Str("payload", fmt.Sprintf("%#x - %[1]d", packet.Payload)).
Str("opcode", fmt.Sprintf("%#x - %[1]d - %s", packet.Opcode(), packet.OpcodeDescription())).
Str("payload data", fmt.Sprintf("%#x - %[1]d", packet.PayloadData())).Logger()
}
// PrintPayload - Print out the payload particulars
func PrintPayload(packet *Packet) {
opcode := uint8(packet.Payload >> 12)
pd := payloadSpecMap[opcode]
fmt.Println(strconv.Itoa(int(opcode))+":"+pd.Description+":", packet.Payload&pd.Mask)
}
// GetPayload - Returns the description and value of a packet's payload
func GetPayload(packet *Packet) (string, int16) {
opcode := uint8(packet.Payload >> 12)
pd := payloadSpecMap[opcode]
uintPayload := packet.Payload & pd.Mask
if opcode == C.OPCODE_SET_GAME_START_TIME {
return pd.Description, utl.Int12fromUint16toInt16(uintPayload)
}
return pd.Description, int16(uintPayload)
}
// Print - method to print a packet's contents
func (p Packet) Print() { PrintPacket(&p) }
// Opcode - Return Opcode of a packet's payload
func (p Packet) Opcode() uint8 {
return uint8(p.Payload >> 12)
}
// PayloadData - Return payload data from a packet
func (p *Packet) PayloadData() int16 {
_, payload := GetPayload(p)
return payload
}
// PayloadDescData - Return a payload description and data from a packet
func (p *Packet) PayloadDescData() (string, int16) {
return GetPayload(p)
}
// OpcodeDescription - Return the opcode descriptin from a packet
func (p *Packet) OpcodeDescription() string {
return GetPayloadSpecs(p.Opcode()).Description
}
// RawPacket - return a packet's rawPacket
func (p Packet) RawPacket() RawPacket {
return WritePacket(&p)
}
// Bytes - return a packet's raw bytes
func (p Packet) Bytes() []byte {
return RawPacketToBytes(WritePacket(&p))
}
// PrintPayload - prints a packets' payload opcode and value
func (p Packet) PrintPayload() {
PrintPayload(&p)
}
// Logger - returns a logger for this packet
func (p *Packet) Logger(logger zl.Logger) zl.Logger {
return PacketLogger(logger, p)
}
// LoggerPlus - returns a better logger for this packet
func (p *Packet) LoggerPlus(logger zl.Logger) zl.Logger {
return PacketLoggerPlus(logger, p)
}
<file_sep>package utility
import "time"
// Int12fromUint16toInt16 - Convert int12 payload to int16
func Int12fromUint16toInt16(x uint16) int16 {
if (x & 0x0800) > 0 {
x = x | 0x0f000
}
return int16(x)
}
// Int12fromInt16toUint16 - convert int16 to int12 payload
func Int12fromInt16toUint16(x int16) uint16 {
if x < 0 {
x = x & 0x0fff
}
return uint16(x)
}
// MicroTime - Timestamp in microseconds
func MicroTime() string {
return time.Now().Format("2006-01-02 15:04:05.000000")
}
<file_sep>package main
import (
"fmt"
log "github.com/HackRVA/master-base-2019/filelogging"
ss "github.com/HackRVA/master-base-2019/serverstartup"
term "github.com/nsf/termbox-go"
)
var logger = log.Ger
func reset() {
term.Sync() // cosmetic purpose?
}
func main() {
err := term.Init()
if err != nil {
panic(err)
}
defer term.Close()
reset()
ss.InitConfiguration()
ss.StartBadgeWrangler()
keyPressListenerLoop:
for {
switch ev := term.PollEvent(); ev.Type {
case term.EventKey:
if ev.Key == term.KeyEsc {
fmt.Println("Esc pressed")
break keyPressListenerLoop
}
case term.EventError:
panic(ev.Err)
}
}
}
<file_sep>package testpkglog
import (
log "github.com/sirupsen/logrus"
)
var logger *log.Entry
func init() {
logger = log.WithFields(log.Fields{"pkg": "tesgpkglog"})
}
func Foo() {
logger.Debug("Here we are in the package")
}
<file_sep>package main
import (
bw "github.com/HackRVA/master-base-2019/badgewrangler"
fifo "github.com/HackRVA/master-base-2019/fifo"
irp "github.com/HackRVA/master-base-2019/irpacket"
)
func main() {
packetsOut := make(chan *irp.Packet)
beaconHold := make(chan bool)
go fifo.WriteFifo(fifo.BadgeInFile, packetsOut)
go bw.TransmitBeacon(packetsOut, beaconHold)
for {
}
}
<file_sep>package main
import (
"fmt"
irp "github.com/HackRVA/master-base-2019/irpacket"
serial "github.com/HackRVA/master-base-2019/serial"
)
func main() {
serial.SetDebug(true)
packetsIn := make(chan *irp.Packet)
serial.OpenPort("/dev/ttyUSB1", 9600)
go serial.ReadSerial(packetsIn)
for {
packet := <-packetsIn
fmt.Println("\nPacket received from packetsIn channel")
packet.Print()
packet.PrintPayload()
fmt.Println()
}
}
<file_sep>package main
import (
"bytes"
"fmt"
"io/ioutil"
"net/http"
"time"
log "github.com/HackRVA/master-base-2019/filelogging"
)
var logger = log.Ger
func main() {
waitDuration, _ := time.ParseDuration("2m")
startTime := time.Now().Local().Add(waitDuration)
url := "http://10.200.200.234:8000/api/newgame"
var jsonStr = []byte(fmt.Sprintf(`{
"body":123,
"AbsStart": %d,
"Duration": 480,
"Variant": 0
}`, startTime.Unix()))
req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonStr))
req.Header.Set("Content-Type", "application/json")
client := &http.Client{}
resp, err := client.Do(req)
if err != nil {
panic(err)
}
defer resp.Body.Close()
logger.Info().Msgf("response Status: %s", resp.Status)
logger.Info().Msgf("response Headers: %s", resp.Header)
body, _ := ioutil.ReadAll(resp.Body)
logger.Info().Msgf("response Body: %s", string(body))
}
| df0e04094c634b19d325a320f0cf0cce22bc5201 | [
"Markdown",
"C",
"Go",
"Shell"
] | 47 | Shell | HackRVA/master-base-2019 | fdfaeafb5abc5a729a1f63e4aa89359dbc5aca66 | 309abe796f84ef591f5036312bd506036fcc4719 |
refs/heads/master | <repo_name>rakarnik/bioconda-recipes<file_sep>/recipes/autoconf/build.sh
#!/bin/sh
./configure --prefix=$PREFIX
make
sed -i.bak '1 s|^.*$|#!/usr/bin/env perl|g' bin/autom4te
sed -i.bak '1 s|^.*$|#!/usr/bin/env perl|g' bin/autoheader
sed -i.bak '1 s|^.*$|#!/usr/bin/env perl|g' bin/autoreconf
sed -i.bak '1 s|^.*$|#!/usr/bin/env perl|g' bin/ifnames
sed -i.bak '1 s|^.*$|#!/usr/bin/env perl|g' bin/autoscan
sed -i.bak '1 s|^.*$|#!/usr/bin/env perl|g' bin/autoupdate
make install
| c709bb3e1b9abfe5206c85e4c412c06b11e41bec | [
"Shell"
] | 1 | Shell | rakarnik/bioconda-recipes | f3a0513f5721f0013e702f79f7ef0c88927a600e | 86ee1e6b2fa4fdeee6d35d5820d17115a59b7b72 |
refs/heads/main | <repo_name>sidrakshe28/miniontalk<file_sep>/app.js
var btnTranslate =document.querySelector("#btn-translate");
var txtInput=document.querySelector("#txt-input");
var outputDiv=document.querySelector("#output");
var serverURL="https://api.funtranslations.com/translate/minion.json";
function getTranslateURL(text){
return serverURL + "?" + "text=" + text
}
function errorHandler(error){
console.log("error occured",error);
alert("something wrong happend")
}
function clickHandler(){
var inputText=txtInput.value;
//calling server for processing
fetch(getTranslateURL(inputText))
.then(response => response.json())
.then(json => {
var translateText=json.contents.translated;
outputDiv.innerHTML=translateText;
})
.catch(errorHandler)
};
btnTranslate.addEventListener("click", clickHandler)<file_sep>/README.md
<p align="center">
<img src="header.png" alt="minion" width="400px">
</p>
Translator to convert your text from English to Minion language
## :film_projector: DEMO
<p align="center">
<img src="miniontalk.gif" alt="minion">
</p>
| 0b8d9a666e6c7bb5804f72ef126f179b3e3e8050 | [
"JavaScript",
"Markdown"
] | 2 | JavaScript | sidrakshe28/miniontalk | 7134afabbb67ffbbd08bbaf7191f631d810db93c | ff57f18abaf972355d999460778b4e555ef5c60f |
refs/heads/master | <repo_name>elifgc/Slack<file_sep>/src/day_1/Login.java
package day_1;
public class Login {
public static void main(String[] args) {
System.out.println("hello");
//yesss
//gFDSA
System.out.println(" Iwrite something");
//I write something on line14
}
}
| f1504f13fdb083c5212a6bdaa8f9baf1c6be1ce1 | [
"Java"
] | 1 | Java | elifgc/Slack | ec8a3050d740a4f99d778ad3810294f1a2a0a44e | f448afa057e96bf6b6a1f01eb5c1e92a98cb4f5f |
refs/heads/master | <repo_name>FarihaKhalid/Python-Progrmming-CISCO<file_sep>/Qno.6.py
No1 = input("Enter first number: ")
No2 = input("ENter Secound number: ")
No3 = int(No1) + int(No2)
print("Your answer is: ", No3)<file_sep>/Qno3.py
import datetime
Current = datetime.datetime.now()
print (Current.strftime("Date is " "%d-%m-%Y " "and Time is" " %H:%M:%S"))
<file_sep>/Qno2.py
import platform
print(platform.sys.version)<file_sep>/Qno4.py
print("Please enter value of radius of a circle: " )
radius1 = input()
area = float(radius1) * float(radius1) * 3.14159
print("Circle of radius is: ", area)
<file_sep>/Qno5.py
First_Name = input("Enter your First Name: ")
Last_Name = input("Enter your Last Name: ")
First_Name_L = (len(First_Name))
Last_Name_L = (len(Last_Name))
print(Last_Name, " ", First_Name ) | cea3ef10a407ee7ad2068b30a23b76a0074c2788 | [
"Python"
] | 5 | Python | FarihaKhalid/Python-Progrmming-CISCO | 8609a248a2f3082e64cf6d4696bd77d5ee363ca2 | 3b45077b899238220f67f4b1d6930a1e4939d35d |
refs/heads/master | <file_sep><!--<?php while (have_posts()) : the_post(); ?>
<?php get_template_part('templates/page', 'header'); ?>
<?php get_template_part('templates/content', 'page'); ?>
<?php endwhile; ?>-->
<!-- personal.jpg -->
<div class="row text-center">
<div class="col-md-6">
<div class="photo">
<img src="<?=get_stylesheet_directory_uri();?>/dist/images/personal.jpg">
</div>
<!-- Name -->
<div class="name">
<div style="text-align: center;"><strong><NAME></strong></div>
</div>
</div>
<!-- Description Text -->
<div class="col-md-6">
<div class="about-copy text-center">
<div style="text-align: center;"><em>"A <strong>Motion Graphics Artist</strong> and <strong>Website Designer.</em></strong></div>
<div style="text-aling: center;"><em>As a Freelancer, i've worked with a variety of companies.</em></div>
<div style="text-aling: center;"><em>Ranging from <strong>Adobe Systems </strong>to small start-ups.</em></div>
<div style="text-align: center;"><em>I also do <strong>Creative Design</strong> for <strong>Jackalope Media</strong>.</em></div>
<div style="text-align: center;">
<div><em>And am the Co-Founder of <strong>Brake The Cycle</strong>. A local Utah Organization,</em></div>
<div><em>in support of suicide prevention".</em></div>
</div>
</div>
</div>
</div>
<!-- Price Sheet Text -->
<div class="about-copy-2">
<div style="text-align: center;">In need of an Animated Video?</div>
<div style="text-align: center; font-size: 20px;"><em>Take a look at my competitive prices below!</em></div>
</div>
<!-- pricing.svg -->
<div class="pricing text-center">
<svg class="pricing img-responsive" width="626" height="420"><?php get_template_part( 'assets/svg/inline', 'pricing.svg' ); ?></svg>
</div>
<div class="about-copy-3">
<div style="text-align: center;">Check out my Projects Page for examples!</div>
<div style="text-align: center; font-size: 20px;"><em>Animations, Vlogs, Social Videos and more!</em></div>
</div>
<!-- Bottom Text -->
<div class="about-inquire">
<div style="text-align: center;"><h4>For project inquiries, please fill out the form on the Contact Page</h4></div>
</div>
| 814801a1dd010b527fa6a49199d6eef63c1055c5 | [
"PHP"
] | 1 | PHP | Jaron-C/jaronimocall | 01822c05301435fa473727d7f0a3a45cbdd2169c | d73017f8f36274d675d76c1d344bee16ce6ce193 |
refs/heads/main | <repo_name>instance-id/Tommy.Serializer<file_sep>/Tommy.Serializer.Tests/TestModels/TestDataNoDefault.cs
// ----------------------------------------------------------------------------
// -- Project : https://github.com/instance-id/Tommy.Serializer --
// -- instance.id 2020 | http://github.com/instance-id | http://instance.id --
// ----------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
namespace Tommy.Serializer.Tests
{
[TommyTableName("tablename")]
public class TestDataNoDefault
{
[TommyIgnore]
public string TestIgnoreProperty { get; [ExcludeFromCodeCoverage] set; }
[TommyComment(" Comment for date property")]
public DateTime TestDateComment { get; set; }
[TommyComment(" Comment for Dictionary<K,V> property")]
public Dictionary<string, string> TestDictionaryComment { get; set; }
[TommyComment(" Comment for string property\n Testing second line comment\n" +
"This and subsequent items should appear after the sorted properties")]
public string TestStringComment { get; set; }
[TommyComment(@" This item should be a blank string : Testing null value")]
public string TestNullString { get; set; }
[TommyComment(@" Comment testing multiline verbatim strings #1
Comment testing multiline verbatim strings #2
Comment testing multiline verbatim strings #3")]
public string TestComment { get; set; }
[TommyComment(" Comment for bool property")]
public bool TestBoolComment { get; set; }
public bool TestBool { get; set; }
[TommyComment(" Comment for int property")]
public int TestIntComment { get; set; }
public int TestInt { get; set; }
[TommySortOrder(1)]
[TommyComment(@" Comment for ulong property
This item should appear second as it's sort order is : 1")]
public UInt64 TestUlongComment { get; set; }
public UInt64 TestUlong { get; set; }
[TommySortOrder(2)]
[TommyComment(@" Comment for float property
This item should appear third as it's sort order is : 2")]
public float TestFloatComment { get; set; }
public float TestFloat { get; set; }
[TommyComment(" Comment for double property")]
public double TestDoubleComment { get; set; }
public double TestDouble { get; set; }
[TommyComment(" Comment for decimal property")]
public decimal TestDecimalComment { get; set; }
public decimal TestDecimal { get; set; }
[TommyComment(" Comment for IntArray property")]
public int[] TestIntArrayComment { get; set; }
[TommySortOrder(0)]
[TommyComment(@" This item should appear first as it's sort order is : 0")]
public int[] TestIntArray { get; set; }
[TommyComment(@" Comment for List<string> property")]
public List<string> TestStringListComment { get; set; }
public List<string> TestStringList { get; set; }
[TommyComment(@" Comment for ulong array property")]
public ulong[] TestULongArray { get; set; }
[TommyComment(@" Comment for List<ulong> property")]
public List<ulong> TestULongList { get; set; }
}
}
<file_sep>/Tommy.Serializer.Demo/Models/TestData.cs
// ----------------------------------------------------------------------------
// -- Project : https://github.com/instance-id/Tommy.Serializer --
// -- instance.id 2020 | http://github.com/instance-id | http://instance.id --
// ----------------------------------------------------------------------------
using System;
using System.Collections.Generic;
#pragma warning disable 414
namespace Tommy.Serializer.Demo
{
[TommyTableName("tablename")]
public class TestData
{
[TommyInclude]
private string TestIncludeProperty { get; set; } = "I should show up in the created file even when private";
[TommyInclude]
[TommySortOrder(4)]
[TommyComment(@" Comment for private field
This item should appear fifth as it's sort order is : 4")]
private string testIncludePrivateField = "I should be included even when private";
[TommyInclude]
[TommySortOrder(3)]
[TommyComment(@" Comment for public field
This item should appear fourth as it's sort order is : 3")]
public string TestIncludePublicField = "Public string Data";
[TommyIgnore]
public string TestIgnoreProperty { get; set; } = "I should not show up in the created file";
[TommyComment(" Comment for date property")]
public DateTime TestDateComment { get; set; } = DateTime.Now;
[TommyComment(" Comment for Dictionary<K,V> property")]
public Dictionary<string, string> TestDictionaryComment { get; set; } =
new Dictionary<string, string>{{"string1Key", "string1Value"}, {"string2Key", "string2Value"}};
[TommyComment(" Comment for string property\n Testing second line comment\n" +
"This and subsequent items should appear after the sorted properties")]
public string TestStringComment { get; set; } = "Test String";
[TommyComment(@" This item should be a blank string : Testing null value")]
public string TestNullString { get; set; }
[TommyComment(@" Comment testing multiline verbatim strings #1
Comment testing multiline verbatim strings #2
Comment testing multiline verbatim strings #3")]
public string TestComment { get; set; } = "Test String";
[TommyComment(" Comment for bool property")]
public bool TestBoolComment { get; set; } = true;
public bool TestBool { get; set; }
[TommyComment(" Comment for int property")]
public int TestIntComment { get; set; } = 1;
public int TestInt { get; set; } = 1;
[TommySortOrder(1)]
[TommyComment(@" Comment for ulong property
This item should appear second as it's sort order is : 1")]
public ulong TestUlongComment { get; set; } = 444543646457048001;
public ulong TestUlong { get; set; } = 444543646457048001;
[TommySortOrder(2)]
[TommyComment(@" Comment for float property
This item should appear third as it's sort order is : 2")]
public float TestFloatComment { get; set; } = 123.123f;
public float TestFloat { get; set; } = 123.123f;
[TommyComment(" Comment for double property")]
public double TestDoubleComment { get; set; } = 1234.123;
public double TestDouble { get; set; } = 1234.123;
[TommyComment(" Comment for decimal property")]
public decimal TestDecimalComment { get; set; } = new decimal(0.11);
public decimal TestDecimal { get; set; } = new decimal(0.11);
[TommyComment(" Comment for IntArray property")]
public int[] TestIntArrayComment { get; set; } = new[] {1, 2, 3, 4};
[TommySortOrder(0)]
[TommyComment(@" This item should appear first as it's sort order is : 0")]
public int[] TestIntArray { get; set; } = new[] {1, 2, 3, 4};
[TommyComment(@" Comment for List<string> property")]
public List<string> TestStringListComment { get; set; } = new List<string> {"string1", "string2", "string3"};
public List<string> TestStringList { get; set; } = new List<string> {"string1", "string2", "string3"};
[TommyComment(@" Comment for ulong array property")]
public ulong[] TestULongArray { get; set; } = new ulong[] {448543646457048001, 448543646457048002, 448543646457048003, 448543646457048004};
[TommyComment(@" Comment for List<ulong> property")]
public List<ulong> TestULongList { get; set; } = new List<ulong> {448543646457048001, 448543646457048002, 448543646457048003};
}
}
<file_sep>/Tommy.Serializer.Tests/TestUtilities.cs
// ----------------------------------------------------------------------------
// -- Project : https://github.com/instance-id/Tommy.Serializer --
// -- instance.id 2020 | http://github.com/instance-id | http://instance.id --
// ----------------------------------------------------------------------------
using System;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.IO;
using System.Reflection;
namespace Tommy.Serializer.Tests
{
[ExcludeFromCodeCoverage]
public static class TestUtilities
{
#region Demo Helpers
public static object GetPropertyValue(
this object src,
string propName,
BindingFlags bindingAttr = BindingFlags.Instance | BindingFlags.Public)
{
return src.GetType().GetProperty(propName, bindingAttr)?.GetValue(src, null);
}
/// <summary>
/// Check whether the application is running in debug mode in order to determine where to export the file
/// </summary>
/// <param name="config">The string name of the output file to create</param>
/// <returns>The full path in which the file will be created</returns>
public static string DeterminePath(this string config)
{
var path = "";
try
{
path = Debugger.IsAttached
? Path.Combine(Directory.GetCurrentDirectory(), "../../../", config)
: Path.Combine(Directory.GetCurrentDirectory(), config);
}
catch (Exception ex)
{
Console.WriteLine(ex);
throw;
}
return Path.GetFullPath(path);
}
#endregion
}
}
<file_sep>/Tommy.Serializer.Tests/TestModels/TestData.cs
// ----------------------------------------------------------------------------
// -- Project : https://github.com/instance-id/Tommy.Serializer --
// -- instance.id 2020 | http://github.com/instance-id | http://instance.id --
// ----------------------------------------------------------------------------
using System;
using System.Diagnostics.CodeAnalysis;
namespace Tommy.Serializer.Tests
{
[ExcludeFromCodeCoverage]
[TommyTableName("tablename")]
public class TestData
{
public DateTime TestDate { get; set; } = DateTime.Now;
public string TestString { get; set; } = "Test String";
public bool TestBool { get; set; } = true;
public int TestInt { get; set; } = 1;
public ulong TestUlong { get; set; } = 444543646457048001;
public float TestFloat { get; set; } = 123.123f;
public double TestDouble { get; set; } = 1234.123;
public decimal TestDecimal { get; set; } = new decimal(0.11);
}
}
<file_sep>/Tommy.Serializer.Tests/TestModels/TestDataComments.cs
// ----------------------------------------------------------------------------
// -- Project : https://github.com/instance-id/Tommy.Serializer --
// -- instance.id 2020 | http://github.com/instance-id | http://instance.id --
// ----------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
namespace Tommy.Serializer.Tests
{
[TommyTableName("tablename")]
public class TestDataComments
{
[TommyIgnore]
public string TestIgnoreProperty { [ExcludeFromCodeCoverage] get; [ExcludeFromCodeCoverage] set; } = "I should not show up in the created file";
[TommyComment(" Comment for date property")]
public DateTime TestDateComment { get; [ExcludeFromCodeCoverage] set; } = DateTime.Now;
[TommyComment(" Comment for Dictionary<K,V> property")]
public Dictionary<string, string> TestDictionaryComment { get; [ExcludeFromCodeCoverage] set; } =
new Dictionary<string, string>{{"string1Key", "string1Value"}, {"string2Key", "string2Value"}};
[TommyComment(" Comment for string property\n Testing second line comment\n" +
"This and subsequent items should appear after the sorted properties")]
public string TestStringComment { get; [ExcludeFromCodeCoverage] set; } = "Test String";
[TommyComment(@" This item should be a blank string : Testing null value")]
public string TestNullString { get; [ExcludeFromCodeCoverage] set; }
[TommyComment(@" Comment testing multiline verbatim strings #1
Comment testing multiline verbatim strings #2
Comment testing multiline verbatim strings #3")]
public string TestComment { get; [ExcludeFromCodeCoverage] set; } = "Test String";
[TommyComment(" Comment for bool property")]
public bool TestBoolComment { get; [ExcludeFromCodeCoverage] set; } = true;
public bool TestBool { get; [ExcludeFromCodeCoverage] set; }
[TommyComment(" Comment for int property")]
public int TestIntComment { get; [ExcludeFromCodeCoverage] set; } = 1;
public int TestInt { get; [ExcludeFromCodeCoverage] set; } = 1;
[TommySortOrder(1)]
[TommyComment(@" Comment for ulong property
This item should appear second as it's sort order is : 1")]
public ulong TestUlongComment { get; [ExcludeFromCodeCoverage] set; } = 444543646457048001;
public ulong TestUlong { get; [ExcludeFromCodeCoverage] set; } = 444543646457048001;
[TommySortOrder(2)]
[TommyComment(@" Comment for float property
This item should appear third as it's sort order is : 2")]
public float TestFloatComment { get; [ExcludeFromCodeCoverage] set; } = 123.123f;
public float TestFloat { get; [ExcludeFromCodeCoverage] set; } = 123.123f;
[TommyComment(" Comment for double property")]
public double TestDoubleComment { get; [ExcludeFromCodeCoverage] set; } = 1234.123;
public double TestDouble { get; [ExcludeFromCodeCoverage] set; } = 1234.123;
[TommyComment(" Comment for decimal property")]
public decimal TestDecimalComment { get; [ExcludeFromCodeCoverage] set; } = new decimal(0.11);
public decimal TestDecimal { get; [ExcludeFromCodeCoverage] set; } = new decimal(0.11);
[TommyComment(" Comment for IntArray property")]
public int[] TestIntArrayComment { get; [ExcludeFromCodeCoverage] set ; } = new[] {1, 2, 3, 4};
[TommySortOrder(0)]
[TommyComment(@" This item should appear first as it's sort order is : 0")]
public int[] TestIntArray { get; [ExcludeFromCodeCoverage] set ; } = new[] {1, 2, 3, 4};
[TommyComment(@" Comment for List<string> property")]
public List<string> TestStringListComment { get; [ExcludeFromCodeCoverage] set ; }
= new List<string> {"string1", "string2", "string3"};
public List<string> TestStringList { get; [ExcludeFromCodeCoverage] set ; }
= new List<string> {"string1", "string2", "string3"};
[TommyComment(@" Comment for ulong array property")]
public ulong[] TestULongArray { get; [ExcludeFromCodeCoverage] set ; }
= new ulong[] {448543646457048001, 448543646457048002, 448543646457048003, 448543646457048004};
[TommyComment(@" Comment for List<ulong> property")]
public List<ulong> TestULongList { get; [ExcludeFromCodeCoverage] set ; }
= new List<ulong> {448543646457048001, 448543646457048002, 448543646457048003};
}
}
<file_sep>/Tommy.Serializer.Demo/Program.cs
// ----------------------------------------------------------------------------
// -- Project : https://github.com/instance-id/Tommy.Serializer --
// -- instance.id 2020 | http://github.com/instance-id | http://instance.id --
// ----------------------------------------------------------------------------
using System;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Reflection;
namespace Tommy.Serializer.Demo
{
class Program
{
static void Main(string[] args)
{
var path = "TestData.toml".DeterminePath();
var pathCombined = "TestDataCombined.toml".DeterminePath();
var testData = new TestData();
var testData2 = new TestData2();
// -- Takes the TestData class and writes it's default values to disk.
TommySerializer.ToTomlFile(testData, path);
// -- Write both TestData and TestData2 values to single file.
TommySerializer.ToTomlFile(new object[] {testData, testData2}, pathCombined);
// ---------------
// -- Reads the file created from TestData and displays the values in the console.
TestDataNoDefault loadTestData = TommySerializer.FromTomlFile<TestDataNoDefault>(path);
string classData = null;
var props = loadTestData.GetType().GetProperties(Utilities.bindingFlags)
.Where(x => !Attribute.IsDefined(x, typeof(TommyIgnore)));
foreach (var prop in props)
classData += $"Name: {prop.Name} Value: {loadTestData.GetPropertyValue(prop.Name)}\n";
var fields = loadTestData.GetType().GetFields(Utilities.bindingFlags)
.Where(x => !x.Name.Contains("k__BackingField")
&& !Attribute.IsDefined(x, typeof(TommyIgnore)));
foreach (var field in fields)
classData += $"Name: {field.Name} Value: {loadTestData.GetFieldValue(field.Name)}\n";
Console.WriteLine(classData);
}
}
#region Demo Helpers
public static class Utilities
{
public static BindingFlags bindingFlags = BindingFlags.Instance | BindingFlags.Public | BindingFlags.NonPublic;
public static object GetPropertyValue(this object src, string propName ) =>
src.GetType().GetProperty(propName, bindingFlags)?.GetValue(src, null);
public static object GetFieldValue(this object src, string fieldName ) =>
src.GetType().GetField(fieldName, bindingFlags)?.GetValue(src);
/// <summary>
/// Check whether the application is running in debug mode in order to determine where to export the file
/// </summary>
/// <param name="config">The string name of the output file to create</param>
/// <returns>The full path in which the file will be created</returns>
public static string DeterminePath(this string config)
{
var path = "";
try
{
path = Debugger.IsAttached
? Path.Combine(Directory.GetCurrentDirectory(), "../../../", config)
: Path.Combine(Directory.GetCurrentDirectory(), config);
}
catch (Exception ex)
{
Console.WriteLine(ex);
throw;
}
return Path.GetFullPath(path);
}
}
#endregion
}
<file_sep>/Tommy.Serializer.Tests/StartToEndProcessTest.cs
// ----------------------------------------------------------------------------
// -- Project : https://github.com/instance-id/Tommy.Serializer --
// -- instance.id 2020 | http://github.com/instance-id | http://instance.id --
// ----------------------------------------------------------------------------
using FluentAssertions;
using Xunit;
namespace Tommy.Serializer.Tests
{
public class StartToEndProcessTest
{
[Fact]
public void ProcessTest()
{
var path = "";
string originalData = null;
string processedData = null;
var testData = new TestDataComments();
var memoryStream = TommySerializer.ToTomlFile(testData, path, true);
var loadTestData = TommySerializer.FromTomlFile<TestDataNoDefault>(path, memoryStream);
var originalProperties = testData.GetType().GetProperties();
var processedProperties = loadTestData.GetType().GetProperties();
foreach (var prop in originalProperties)
originalData += $"Name: {prop.Name} Value: {loadTestData.GetPropertyValue(prop.Name)}\n";
foreach (var prop in processedProperties)
processedData += $"Name: {prop.Name} Value: {loadTestData.GetPropertyValue(prop.Name)}\n";
processedData.Should().ContainAll(originalData);
}
}
}
<file_sep>/README.md
# Tommy.Serializer
The primary goal of this library is to add automatic serialization/deserialization of a class instance to the [Tommy Toml library](https://github.com/dezhidki/Tommy).
##### (De)Serialization of a class instance to/from a file has been implemented, along with the ability to work with List<primitive>, Primitive[], and basic Dictionary<primitive, primitive>. If more advanced usage is required, just let me know.
## Installation
Currently, to install this extension either download the [extensions file](https://github.com/instance-id/Tommy.Serializer/blob/main/Tommy.Serializer/TommySerializer.cs)
and add it to your project, or create a new C# script in your project and simply copy and paste the contents of the script.
I may look into making a nuget package of it as well.
## Components
<details open>
<summary>Attributes</summary>
<table>
<!-- ---------------------------------------------- -->
<tr>
<td> Attribute </td> <td> Usage </td> <td> Result </td>
</tr>
<tr>
<a name="tommytablename"></a>
<td valign="top" style="padding-top: 25px"> TommyTableName </td>
<td valign="top">
```c#
// Designates a class as a Toml Table and applies all
// contained properties as children of that table
[TommyTableName("mytablename")]
public class TestData { //... }
```
</td>
<td valign="top">
```toml
[mytablename]
```
</td>
</tr>
<!-- ---------------------------------------------- -->
<tr>
<a name="tommycomment"></a>
<td valign="top" style="padding-top: 25px"> TommyComment </td>
<td valign="top">
```c#
// String value which will be used as a
// comment for the property/field
[TommyComment(" Comment for string property")]
public string TestString { get; set; } = "Test String"
```
</td>
<td valign="top">
```toml
# Comment for string property
TestString = "Test String"
```
</td>
</tr>
<!-- ---------------------------------------------- -->
<tr>
<a name="tommysortorder"></a>
<td valign="top" style="padding-top: 25px"> TommySortOrder </td>
<td valign="top">
```c#
// Determines the order in which the
// properties will be written to file
[TommySortOrder(1)]
[TommyComment(" Sort order 1")]
public float TestFloat1 { get; set; } = 234.234f;
[TommySortOrder(0)]
[TommyComment(" Sort order 0")]
public float TestFloat0 { get; set; } = 123.123f;
```
</td>
<td valign="top">
```toml
# Sort order 0
TestFloat0 = 123.123
# Sort order 1
TestFloat1 = 234.234
```
</td>
</tr>
<!-- ---------------------------------------------- -->
<tr>
<a name="tommyinclude"></a>
<td valign="top" style="padding-top: 25px"> TommyInclude </td>
<td valign="top">
```c#
// Designates a private field to be
// included by the Tommy processor
[TommyInclude]
private string testIncludeField = "I'm private, so what?";
```
</td>
<td valign="top">
```toml
testIncludeField = "I'm private, so what?"
```
</td>
</tr>
<!-- ---------------------------------------------- -->
<tr>
<a name="tommyignore"></a>
<td valign="top" style="padding-top: 25px"> TommyIgnore </td>
<td valign="top">
```c#
// Designates a property to be ignored
// by the Tommy processor
[TommyIgnore]
public string TestIgnoreProperty { get; set; }
```
</td>
<td valign="top">
```toml
```
</td>
</tr>
</table>
</details>
## Usage
While attributes are included for specific situations, if a property or field is public, they will be included automatically, unless the [\[TommyIgnore\]](#tommyignore) attribute is applied to them.
### Saving to file
---
### Single Data Object to File
```c#
using Tommy.Serializer;
TestData testData = new TestData();
string path = "path/to/TestData.toml";
TommySerializer.ToTomlFile(testData, path);
```
### Multiple Data Objects to Single File
#### NOTE: When outputting multiple data objects to a single file, while not required, it is advised that each data class utilize the [\[TommyTableName\]](#tommytablename) attribute to encapsulate the data under the proper table (primarily so that you can choose their table name). If the attribute is omitted, the object's type name is used as the table name automatically.
```c#
var testData = new TestData();
var testData2 = new TestData2();
var path = "path/to/TestData.toml";
Tommy.Serializer.ToTomlFile(new object[] {testData, testData2}, path);
```
### Multiple Data Objects to Multiple Files
```c#
var testData = new TestData();
var path = "path/to/TestData.toml";
var testData2 = new TestData2();
var path2 = "path/to/TestData2.toml";
TommySerializer.ToTomlFile(testData, path);
TommySerializer.ToTomlFile(testData, path2);
```
### Data from file
---
```c#
var path = "path/to/TestData.toml";
TestData testData = TommySerializer.FromTomlFile<TestData>(path);
```
---
## Included Example
If you download the complete solution from this repo and run the Demo project, it will use the following data class and produce the output file seen below that.
<details>
<summary>Data Class</summary>
```c#
[TommyTableName("tablename")]
public class TestData
{
[TommyInclude]
private string TestIncludeProperty { get; set; } = "I should show up in the created file even when private";
[TommyInclude]
[TommySortOrder(4)]
[TommyComment(@" Comment for private field
This item should appear fifth as it's sort order is : 4")]
private string testIncludePrivateField = "I should be included even when private";
[TommyInclude]
[TommySortOrder(3)]
[TommyComment(@" Comment for public field
This item should appear fourth as it's sort order is : 3")]
public string TestIncludePublicField = "Public string Data";
[TommyIgnore]
public string TestIgnoreProperty { get; set; } = "I should not show up in the created file";
[TommyComment(" Comment for date property")]
public DateTime TestDateComment { get; set; } = DateTime.Now;
[TommyComment(" Comment for string property\n Testing second line comment\n" +
"This and subsequent items should appear after the sorted properties")]
public string TestStringComment { get; set; } = "Test String";
[TommyComment(@" This item should be a blank string : Testing null value")]
public string TestNullString { get; set; }
[TommyComment(@" Comment testing multiline verbatim strings #1
Comment testing multiline verbatim strings #2
Comment testing multiline verbatim strings #3")]
public string TestComment { get; set; } = "Test String";
[TommyComment(" Comment for bool property")]
public bool TestBoolComment { get; set; } = true;
public bool TestBool { get; set; }
[TommyComment(" Comment for int property")]
public int TestIntComment { get; set; } = 1;
public int TestInt { get; set; } = 1;
[TommySortOrder(1)]
[TommyComment(@" Comment for ulong property
This item should appear second as it's sort order is : 1")]
public ulong TestUlongComment { get; set; } = 444543646457048001;
public ulong TestUlong { get; set; } = 444543646457048001;
[TommySortOrder(2)]
[TommyComment(@" Comment for float property
This item should appear third as it's sort order is : 2")]
public float TestFloatComment { get; set; } = 123.123f;
public float TestFloat { get; set; } = 123.123f;
[TommyComment(" Comment for double property")]
public double TestDoubleComment { get; set; } = 1234.123;
public double TestDouble { get; set; } = 1234.123;
[TommyComment(" Comment for decimal property")]
public decimal TestDecimalComment { get; set; } = new decimal(0.11);
public decimal TestDecimal { get; set; } = new decimal(0.11);
[TommyComment(" Comment for IntArray property")]
public int[] TestIntArrayComment { get; set; } = new[] {1, 2, 3, 4};
[TommySortOrder(0)]
[TommyComment(@" This item should appear first as it's sort order is : 0")]
public int[] TestIntArray { get; set; } = new[] {1, 2, 3, 4};
[TommyComment(@" Comment for List<string> property")]
public List<string> TestStringListComment { get; set; } = new List<string> {"string1", "string2", "string3"};
public List<string> TestStringList { get; set; } = new List<string> {"string1", "string2", "string3"};
[TommyComment(@" Comment for ulong array property")]
public ulong[] TestULongArray { get; set; } = new ulong[] {448543646457048001, 448543646457048002, 448543646457048003};
[TommyComment(@" Comment for List<ulong> property")]
public List<ulong> TestULongList { get; set; } = new List<ulong> {448543646457048001, 448543646457048002, 448543646457048003};
[TommyComment(" Comment for Dictionary<K,V> property")]
public Dictionary<string, string> TestDictionaryComment { get; set; } =
new Dictionary<string, string>{{"string1Key", "string1Value"}, {"string2Key", "string2Value"}};
}
```
</details>
<details>
<summary>Output of Above Data Class</summary>
```toml
[tablename]
# This item should appear first as it's sort order is : 0
TestIntArray = [ 1, 2, 3, 4 ]
# Comment for ulong property
# This item should appear second as it's sort order is : 1
TestUlongComment = 444543646457048001
# Comment for float property
# This item should appear third as it's sort order is : 2
TestFloatComment = 123.123
# Comment for public field
# This item should appear fourth as it's sort order is : 3
TestIncludePublicField = "Public string Data"
# Comment for private field
# This item should appear fifth as it's sort order is : 4
testIncludePrivateField = "I should be included even when private"
TestIncludeProperty = "I should show up in the created file even when private"
# Comment for date property
TestDateComment = 2020-12-13 15:06:18
# Comment for string property
# Testing second line comment
# This and subsequent items should appear after the sorted properties
TestStringComment = "Test String"
# This item should be a blank string : Testing null value
TestNullString = ""
# Comment testing multiline verbatim strings #1
# Comment testing multiline verbatim strings #2
# Comment testing multiline verbatim strings #3
TestComment = "Test String"
# Comment for bool property
TestBoolComment = true
TestBool = false
# Comment for int property
TestIntComment = 1
TestInt = 1
TestUlong = 444543646457048001
TestFloat = 123.123
# Comment for double property
TestDoubleComment = 1234.123
TestDouble = 1234.123
# Comment for decimal property
TestDecimalComment = 0.11
TestDecimal = 0.11
# Comment for IntArray property
TestIntArrayComment = [ 1, 2, 3, 4 ]
# Comment for List<string> property
TestStringListComment = [ "string1", "string2", "string3" ]
TestStringList = [ "string1", "string2", "string3" ]
# Comment for ulong array property
TestULongArray = [ 448543646457048001, 448543646457048002, 448543646457048003 ]
# Comment for List<ulong> property
TestULongList = [ 448543646457048001, 448543646457048002, 448543646457048003 ]
# Comment for Dictionary<K,V> property
[tablename.TestDictionaryComment]
DictionaryKeys = [ "string1Key", "string2Key" ]
DictionaryValues = [ "string1Value", "string2Value" ]
```
</details>
## Contributing
Pull requests are welcome. For major changes, please open an issue first to discuss what you would like to change.
Please make sure to update tests as appropriate (once there are some, of course).
## License
[MIT](https://choosealicense.com/licenses/mit/)
---

<file_sep>/Tommy.Serializer.Tests/ComponentTests.cs
// ----------------------------------------------------------------------------
// -- Project : https://github.com/instance-id/Tommy.Serializer --
// -- instance.id 2020 | http://github.com/instance-id | http://instance.id --
// ----------------------------------------------------------------------------
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using FluentAssertions;
using Xunit;
using Xunit.Sdk;
// ReSharper disable RedundantExplicitArrayCreation
namespace Tommy.Serializer.Tests
{
public class ComponentTests
{
[Fact]
public void TestGetTomlNode()
{
var testData = new TestData();
var properties = testData.GetType().GetProperties();
for (int i = 0; i < properties.Length; i++)
{
var result = TommySerializer.GetTomlNode(properties[i].GetValue(testData), properties[i].PropertyType);
(result as TomlNode).Should().BeOfType(NodeLookup[properties[i].PropertyType]);
}
Action act = () => TommySerializer.GetTomlNode(12, typeof(TypeCode));
act.Should().Throw<Exception>()
.WithMessage($"Was not able to process item {typeof(TypeCode).Name}");
}
[Theory]
[MemberData(nameof(SortNodeData))]
public void TestSortingNodesOrder<T>(T[] nodes)
{
var nodeArray = nodes as SortNode[];
var nodeList = nodeArray!.ToList();
var sortedNodes = nodeList.SortNodes((from l in nodeList select l.SortOrder).Max());
var expectedResults = new[] {"intField", "floatField", "stringField", "boolField", "dateField"};
for (var i = 0; i < sortedNodes.Count; i++)
sortedNodes[i].Name.Should().Be(expectedResults[i]);
}
[Theory]
[MemberData(nameof(DictionaryData))]
public void TestBuildDictionaryGeneric<T, V>(T[] keys, V[] values)
{
var keyList = keys.ToList();
var valueList = values.ToList();
var dictionary = Enumerable.Range(0, keys.Length).ToDictionary(i => keys[i], i => values[i]);
var keyString = typeof(T).IsNumeric() || typeof(T) == typeof(bool)
? keyList.Aggregate("", (first, next) => first + $"{next}, ")
: keyList.Aggregate("", (first, next) => first + $"\"{next}\", ");
var valueString = typeof(V).IsNumeric() || typeof(V) == typeof(bool)
? typeof(V) == typeof(bool)
? valueList.Aggregate("", (first, next) => first + $"{next.ToString()?.ToLower()}, ")
: valueList.Aggregate("", (first, next) => first + $"{next}, ")
: valueList.Aggregate("", (first, next) => first + $"\"{next}\", ");
keyString = keyString.Remove(keyString.Length - 2);
valueString = valueString.Remove(valueString.Length - 2);
string testTomlDictionaryFile = $@"
[tablename]
[tablename.TestDictionaryComment]
DictionaryKeys = [ {keyString} ]
DictionaryValues = [ {valueString} ]";
var tomlData = GetTableData(testTomlDictionaryFile);
dictionary = (Dictionary<T, V>) TommySerializer.CreateGenericDictionary(GetFieldData(tomlData), dictionary.GetType());
dictionary.Should().ContainKeys(keyList);
dictionary.Should().ContainValues(valueList);
}
[Theory]
[MemberData(nameof(ArrayData))]
public void TestBuildArrayList<T>(T[] values)
{
var valueList = values.ToList();
var valueString = typeof(T).IsNumeric() || typeof(T) == typeof(bool)
? typeof(T) == typeof(bool)
? valueList.Aggregate("", (first, next) => first + $"{next.ToString()?.ToLower()}, ")
: valueList.Aggregate("", (first, next) => first + $"{next}, ")
: valueList.Aggregate("", (first, next) => first + $"\"{next}\", ");
valueString = valueString.Remove(valueString.Length - 2);
string testTomlArrayFile = $@"
[tablename]
TestArray = [ {valueString} ]";
var tomlData = GetTableData(testTomlArrayFile);
var genericList = new List<T>();
var genericArray = Array.Empty<T>();
var listType = genericList.GetType();
var arrayType = genericArray.GetType();
genericList = (List<T>) TommySerializer.CreateGenericList(GetArrayData(tomlData), listType);
genericArray = (T[]) TommySerializer.CreateGenericList(GetArrayData(tomlData), arrayType);
genericList.Should().Contain(valueList);
genericArray.Should().Contain(values);
}
#region Processing Methods
private TomlTable GetTableData(string tomlData)
{
using StringReader reader = new StringReader(tomlData);
using TOMLParser parser = new TOMLParser(reader);
return parser.Parse();
}
private TomlNode GetFieldData(TomlTable tomltable)
{
var tableName = tomltable.Keys.ToArray()[0];
var fieldName = tomltable[tableName].Keys.ToArray()[0];
return tomltable[tableName][fieldName];
}
private TomlNode[] GetArrayData(TomlTable tomltable)
{
var tableName = tomltable.Keys.ToArray()[0];
var fieldName = tomltable[tableName].Keys.ToArray()[0];
return tomltable[tableName][fieldName].AsArray.RawArray.ToArray();
}
#endregion
#region Data
public static Dictionary<Type, Type> NodeLookup = new Dictionary<Type, Type>
{
{typeof(float), typeof(TomlFloat)},
{typeof(int), typeof(TomlInteger)},
{typeof(bool), typeof(TomlBoolean)},
{typeof(long), typeof(TomlInteger)},
{typeof(double), typeof(TomlFloat)},
{typeof(ulong), typeof(TomlInteger)},
{typeof(string), typeof(TomlString)},
{typeof(decimal), typeof(TomlFloat)},
{typeof(DateTime), typeof(TomlDateTime)}
};
// public static IEnumerable<object[]> TomlNodeData
// {
// get
// {
// yield return new object[] {typeof(TomlBoolean), (bool) true};
// yield return new object[] {typeof(TomlFloat), (double) 12.45};
// yield return new object[] {typeof(TomlFloat), (float) 12.45f};
// yield return new object[] {typeof(TomlInteger), (int) 4321};
// yield return new object[] {typeof(TomlInteger), (long) 1231231231231};
// yield return new object[] {typeof(TomlInteger), (ulong) 444543646457048001};
// yield return new object[] {typeof(TomlDateTime), DateTime.Parse("2020-12-12 15:36:16")};
// yield return new object[] {typeof(TomlString), "String Data"};
// }
// } // @formatter:on1
public static IEnumerable<object[]> SortNodeData // @formatter:off
{
get { yield return new object[] {new SortNode[] {
new SortNode { Name = "boolField", SortOrder = -1, Value = new TomlBoolean { Comment = "Comment for bool", Value = true}},
new SortNode { Name = "stringField", SortOrder = 2, Value = new TomlString { Comment = "Comment for string", Value = "String Value s"}},
new SortNode { Name = "floatField", SortOrder = 1, Value = new TomlFloat { Comment = "Comment for float", Value = 1.432f}},
new SortNode { Name = "intField", SortOrder = 0, Value = new TomlInteger { Comment = "Comment for int", Value = 6}},
new SortNode { Name = "dateField", SortOrder = -1, Value = new TomlDateTime { Comment = "Comment for Date", Value =
DateTime.Parse("2020-12-12 15:36:16")}}}}; }
} // @formatter:on
public static IEnumerable<object[]> ArrayData
{
get // @formatter:off
{
yield return new object[] { new int[] { 1, 2, 3, 4}};
yield return new object[] { new bool[] { true, false, true, false}};
yield return new object[] { new double[] { 11.22, 22.33, 33.44, 44.55}};
yield return new object[] { new float[] { 11.22f, 22.33f, 33.44f, 44.55f}};
yield return new object[] { new string[] { "one", "two", "three", "four"}};
yield return new object[] { new ulong[] { 444543646457048001, 444543646457048002, 444543646457048003}};
}
} // @formatter:on
public static IEnumerable<object[]> DictionaryData
{
get // @formatter:off
{
yield return new object[] { new int[] { 1, 2, 3, 4}, new int[] { 4, 3, 2, 1}};
yield return new object[] { new int[] { 4, 3, 2, 1}, new string[] { "one", "two", "three", "four"}};
yield return new object[] { new string[] { "one", "two", "three", "four"}, new int[] { 4, 3, 2, 1}};
yield return new object[] { new string[] { "one", "two", "three", "four"}, new bool[] { true, false, true, false}};
yield return new object[] { new double[] { 11.22, 22.33, 33.44, 44.55}, new float[] { 11.22f, 22.33f, 33.44f, 44.55f}};
yield return new object[] { new float[] { 11.22f, 22.33f, 33.44f, 44.55f}, new double[] { 11.22, 22.33, 33.44, 44.55}};
yield return new object[] { new string[] { "one", "two", "three", "four"}, new string[] { "one", "two", "three", "four"}};
yield return new object[]
{
new ulong[] {444543646457048001, 444543646457048002, 444543646457048003},
new ulong[] {544543646457048001, 544543646457048002, 544543646457048003}
}; // @formatter:on
}
}
#endregion
}
}
<file_sep>/Tommy.Serializer/Properties/AssemblyInfo.cs
using System.Runtime.CompilerServices;
// Allow internal visibility for testing purposes.
[assembly: InternalsVisibleTo("Tommy.Serializer.Tests")]
<file_sep>/Tommy.Serializer.Tests/TestModels/TestData2.cs
// ----------------------------------------------------------------------------
// -- Project : https://github.com/instance-id/Tommy.Serializer --
// -- instance.id 2020 | http://github.com/instance-id | http://instance.id --
// ----------------------------------------------------------------------------
using System.Diagnostics.CodeAnalysis;
namespace Tommy.Serializer.Tests
{
[TommyTableName("nametest2")]
[ExcludeFromCodeCoverage]
public class TestData2
{
[TommyComment(" Comment for string property\n Testing second line comment\n" +
"This and subsequent items should appear after the sorted properties")]
public string TestStringComment2 { get; set; } = "Test String";
[TommyComment(@" This item should be a blank string : Testing null value")]
public string TestString2 { get; set; }
[TommyComment(@" Comment testing multiline verbatim strings #1
Comment testing multiline verbatim strings #2
Comment testing multiline verbatim strings #3")]
public string TestComment2 { get; set; } = "Test String";
[TommyComment(" Comment for bool property")]
public bool TestBoolComment2 { get; set; } = true;
public bool TestBool2 { get; set; }
[TommyComment(" Comment for int property")]
public int TestIntComment2 { get; set; } = 1;
public int TestInt2 { get; set; } = 1;
[TommySortOrder(1)]
[TommyComment(@" Comment for ulong property
This item should appear second as it's sort order is : 1")]
public ulong TestUlongComment2 { get; set; } = 448543646457048970;
public ulong TestUlong2 { get; set; } = 448543646457048970;
[TommyIgnore]
public string TestIgnoreProperty2 { get; set; } = "I should not show up in the created file";
}
}
| fb7d69bda6505b5018e045592980230e85c07cec | [
"Markdown",
"C#"
] | 11 | C# | instance-id/Tommy.Serializer | 3d4c7cb34f5ba716813a234727ec19fecaa146c4 | 500bbd65810405b8a2f382f2d658e6168396dc62 |
refs/heads/master | <repo_name>rhodunda/programming-univbasics-3-methods-default-values-lab-atlanta-web-111819<file_sep>/lib/meal_choice.rb
# Your code here
#meal_choice{veg1,veg2,protein}
puts "what a nutritious meal!"
puts "A plate of #{protein} with #{veg1} and #{veg2}"
end
| d1935f6a04bea4f05b09210e5e5a6df929d52c25 | [
"Ruby"
] | 1 | Ruby | rhodunda/programming-univbasics-3-methods-default-values-lab-atlanta-web-111819 | 2a2295feca27e68154207ca0216d111c49e107a0 | c040c3b5f353d3ef125a5ec1d68f565b05876539 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.