branch_name
stringclasses
149 values
text
stringlengths
23
89.3M
directory_id
stringlengths
40
40
languages
listlengths
1
19
num_files
int64
1
11.8k
repo_language
stringclasses
38 values
repo_name
stringlengths
6
114
revision_id
stringlengths
40
40
snapshot_id
stringlengths
40
40
refs/heads/master
<file_sep><?php $path=$_SERVER['SCRIPT_FILENAME']; for ($i=0; $i<4; $i++) $path=dirname($path); $wpLoadPath=$path."/wp-load.php"; require_once $wpLoadPath; echo "<h1>Users</h1>"; $queryarg="%".$_REQUEST["q"]."%"; $query=$wpdb->prepare( "SELECT * ". "FROM wp_users ". "WHERE display_name LIKE %s", $queryarg ); $users=$wpdb->get_results($query); foreach ($users as $user) { $s=""; $s.="<div class='find-someone-result-entry'>"; $s.=get_avatar($user->ID); $s.=" <div class='find-someone-result-name'>"; $s.=" <a href='".get_author_posts_url($user->ID)."'>".$user->display_name."</a>"; $s.=" </div>"; $s.=" <div class='find-someone-result-info'>"; $s.=" ".date("F Y",strtotime($user->user_registered)); $s.=" </div>"; $s.="</div>\n"; echo $s; //$user->display_name."<br/>"; } <file_sep>find-something -------------- Search users using an ajax drop down.<file_sep><?php /* Plugin Name: Find Someone Plugin URI: http://github.com/limikael/find-someone Description: Find someone on the site. Version: 0.0.1 */ function find_someone() { wp_enqueue_script("find-someone"); $s.="<script>"; $s.="FIND_SOMETHING_RESULT_SCRIPT='".plugins_url()."/find-someone/find-someone-result.php';"; $s.="</script>"; $s.="<div class='find-someone'>"; $s.="<input type='text' class='find-someone-input' placeholder='Find someone...'/><br/>"; $s.="<div class='find-someone-result-wrapper'>"; $s.="<div class='find-someone-result'>"; $s.="hello"; $s.="</div></div></div>"; return $s; } add_shortcode("find-someone", "find_someone"); add_shortcode("find_someone", "find_someone"); function find_something_enqueue_scripts() { wp_register_script("find-someone",plugins_url()."/find-someone/find-someone.js"); wp_register_style("find-someone",plugins_url()."/find-someone/find-someone.css"); wp_enqueue_style("find-someone"); } add_action('wp_enqueue_scripts','find_something_enqueue_scripts');
ad33d424c45296db5740933dee1820550b97db68
[ "Markdown", "PHP" ]
3
PHP
limikael/find-someone
35dff2b06c8d223a7f4a39f407f4a588feb92232
1264bb4db1e8dfa2ac1a832826747469cf826d44
refs/heads/master
<file_sep>package com.example.karthik.sms_app; import android.Manifest; import android.app.PendingIntent; import android.content.Intent; import android.content.pm.PackageManager; import android.net.Uri; import android.os.Bundle; import android.support.design.widget.FloatingActionButton; import android.support.design.widget.Snackbar; import android.support.v4.app.ActivityCompat; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.Toolbar; import android.telephony.SmsManager; import android.util.Log; import android.view.View; import android.widget.AdapterView; import android.widget.Button; import android.widget.EditText; import android.widget.Toast; public class SendSMSActivity extends AppCompatActivity { Button sendSmsButton; EditText toPhoneNumber; EditText smsMessageET; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_send_sms); sendSmsButton = (Button) findViewById(R.id.btnSendSms); toPhoneNumber = (EditText) findViewById(R.id.editTextPhoneNo); smsMessageET = (EditText) findViewById(R.id.editTextSms); sendSmsButton.setOnClickListener(new View.OnClickListener(){ public void onClick(View v) { sendSms(); } }); if (ActivityCompat.checkSelfPermission(this, Manifest.permission.SEND_SMS) != PackageManager.PERMISSION_GRANTED) { Log.d("PLAYGROUND", "Permission is not granted, requesting"); ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.SEND_SMS}, 123); sendSmsButton.setEnabled(false); } else { Log.d("PLAYGROUND", "Permission is granted"); } } private void sendSms() { // ActivityCompat.requestPermissions(this,new String[]{ // Manifest.permission.SEND_SMS},1); // Intent intent=new Intent(getApplicationContext(),SendSMSActivity.class); // PendingIntent pi=PendingIntent.getActivity(getApplicationContext(), 0, intent,0); // String toPhone = toPhoneNumber.getText().toString(); String smsMessage = smsMessageET.getText().toString(); try { SmsManager smsManager = SmsManager.getDefault(); Toast.makeText(this, toPhone,Toast.LENGTH_LONG).show(); Toast.makeText(this, smsMessage,Toast.LENGTH_LONG).show(); smsManager.sendTextMessage(toPhone, null, smsMessage, null, null); Toast.makeText(this, "SMS Sent",Toast.LENGTH_LONG).show(); } catch(Exception e){ Toast.makeText(getApplicationContext(), "SMS faild, please try again.", Toast.LENGTH_LONG).show(); e.printStackTrace(); } // Log.i("Send SMS", ""); // Intent smsIntent = new Intent(Intent.ACTION_VIEW); // //// smsIntent.setData(Uri.parse("smsto:")); //// smsIntent.setType("vnd.android-dir/mms-sms"); // smsIntent.putExtra("address" , new String (toPhone)); // smsIntent.putExtra("sms_body" , smsMessage); // // try { // startActivity(smsIntent); // finish(); // Log.i("Finished sending SMS...", ""); // } // catch (android.content.ActivityNotFoundException ex) { // Toast.makeText(SendSMSActivity.this, // "SMS faild, please try again later.", Toast.LENGTH_SHORT).show(); // } } public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) { if (requestCode == 123) { if (grantResults.length > 0 && grantResults[0] == PackageManager.PERMISSION_GRANTED) { Log.d("PLAYGROUND", "Permission has been granted"); // textView.setText("You can send SMS!"); sendSmsButton.setEnabled(true); } else { Log.d("PLAYGROUND", "Permission has been denied or request cancelled"); // textView.setText("You can not send SMS!"); sendSmsButton.setEnabled(false); } } } public void goToInbox(View v) { Intent intent = new Intent(SendSMSActivity.this,RecieveSMSActivity.class); startActivity(intent); } }
1553fae4930f85ba2f5bf099c98c1caffa85b167
[ "Java" ]
1
Java
KarthikNani/SMS_APPP
56c19f4bf950e204611c4349f54ad6b8ed0ba123
0dee6672a0b93fa5c94da3bd124a93e8bb40e4d9
refs/heads/master
<repo_name>Colivhq/NewCoLive<file_sep>/utils/getRoutes.js const axios = require('axios'); const appRoutes = require('../routes.json'); const Prismic = require('prismic-javascript'); module.exports = async function getAppRoutes() { // Fetch blogPosts as object with languages as attributes and slugs as their values // fetchBlogPosts() // Iterate over each blog post const blogPosts = []; const pages = []; const routes = []; await Prismic.getApi(process.env.PRISMIC_ENDPOINT).then((api) => { // Get all Pages route api.query('').then(function(response) { response.results.forEach(function(page, index) { if(page.type == 'page') { pages.push({ uid: page.uid}) } if(page.type == 'blogpage') { blogPosts.push({ uid: page.uid}) } }); // response is the response object, response.results holds the documents }); }); const appartments = await axios.post('https://asia-east2-colivhq-backend.cloudfunctions.net/apiHomes', { //"operatorId": "HaF6mb19L6AzWVavPr5t", "neighborhoodId": "", "cityId": "" },{ headers: { Authorization: `Bearer ${process.env.COLIV_HQ_KEY}` } }) const homeList = []; appartments.data.data.forEach(function(appartment, index) { if(appartment.homeId != undefined) { homeList.push({ uid: appartment.homeId}) } }); // and every route defined in routes.json for (let k = 0; k < appRoutes.length; k += 1) { let routePath = appRoutes[k]; //const blogUid = blogPosts[k].uid; // replace the language placeholder with current language // If the route includes 'blog', iterate over all fetched posts in current language if (routePath.includes('/blog/')) { for (let postIndex = 0; postIndex < blogPosts.length; postIndex += 1) { routes.push(routePath.replace(/:uid/, blogPosts[postIndex].uid)); } } else if (routePath.includes('/findahome/')) { for (let postIndex = 0; postIndex < homeList.length; postIndex += 1) { routes.push(routePath.replace(/:uid/, homeList[postIndex].uid)); } } else if (routePath.includes('/page/')) { for (let postIndex = 0; postIndex < pages.length; postIndex += 1) { routes.push(routePath.replace(/:uid/, pages[postIndex].uid)); } } else { routes.push(routePath); } } // Return all available routes return routes; }; <file_sep>/plugins/vue-carousel.js import Vue from 'vue' import { Carousel, Slide } from 'vue-carousel'; //import owlCarousel from 'vue-owl-carousel'; Vue.component('carousel', Carousel); //Vue.component('owlCarousel', owlCarousel); Vue.component('slide', Slide);<file_sep>/README.md # NewCoLive NewCoLive Private template Template website for a new coliving business (based on ColivHQ backend and Prismic.io CMS) 1. Create a new repo using this as a template 2. Follow the step-by-step guide to create your one website for a coliving operator using Prismic.io as CMS, ColivHQ.com as the inventory repository, and this Nuxt.js frontend: https://colivhq.com/blog/how-to-create-a-website-for-your-coliving-business-with-home-inventory-listing-in-less-than-one-hour Visit us at colivhq.com for more info
c76be8b07594c7d6cfc967007e27e9c6efdb7901
[ "JavaScript", "Markdown" ]
3
JavaScript
Colivhq/NewCoLive
b7e6d0e2aaef8d901a7bf8009dce2b7096a0bdc3
2ac7c6c66a6e874e40b58e2356dca6a5037e3841
refs/heads/master
<repo_name>imronreviady/ez<file_sep>/ez-includes/application/models/Slide_m.php <?php class Slide_M extends MY_Model { protected $_table_name = 'slider'; protected $_primary_key = 'id'; protected $_order_by = 'id desc'; public $rules = array( 'title' => array( 'field' => 'title', 'label' => 'Title', 'rules' => 'trim|required|xss_clean' ), 'alias' => array( 'field' => 'alias', 'label' => 'Alias', 'rules' => 'trim|required|xss_clean' ), 'sliderType' => array( 'field' => 'sliderType', 'label' => 'Slider type', 'rules' => 'trim|required|xss_clean' ) ); public function get_new () { $slide = new stdClass(); $slide->title = ''; $slide->alias = ''; $slide->sliderType = ''; $slide->sliderSource = array(); $slide->customize = array(); return $slide; } }<file_sep>/ez-includes/application/controllers/Contact.php <?php defined('BASEPATH') OR exit('No direct script access allowed'); class Contact extends Frontend_Controller { /** * Set Default frontpage. * * @return mixed load view file */ public function index() { // Set page title for title tag $data['page_title'] = ez_line('contact'); // Set view file $data['main_content'] = 'contact'; // Load view file with data $this->load->view($this->pref->active_theme.'/layouts/main',$data); } } <file_sep>/ez-includes/application/controllers/Langswitch.php <?php class Langswitch extends Frontend_Controller { public function __construct() { parent::__construct(); // Load required helper $this->load->helper('url'); } /** * Switch language on session. * * @param string $language Language name * * @return mixed Change language on session data */ function switchLanguage($language = "") { $language = ($language != "") ? $language : "english"; $this->session->set_userdata('site_lang', $language); // Redirect user to referrer url redirect($_SERVER['HTTP_REFERER']); } } <file_sep>/ez-includes/application/models/Comment_m.php <?php class Comment_M extends MY_Model { protected $_table_name = 'comments'; protected $_primary_key = 'id'; protected $_primary_filter = 'intval'; protected $_order_by = 'created desc'; public $rules = array(); function __construct() { parent::__construct(); } public function get($id = NULL, $single = FALSE) { $this->db->select('comments.*, p.title as post, p.id as post_id, p.slug as post_slug, p.post_type'); $this->db->join('posts as p', 'comments.post_id=p.id', 'left'); $this->_primary_key = 'comments.id'; return parent::get($id, $single); } public function get_comments($post = null) { if(!is_null($post)) { $this->db->where('post_id', $post); } return $this->get(NULL, FALSE); } } <file_sep>/ez-includes/application/models/Category_m.php <?php class Category_M extends MY_Model { protected $_table_name = 'categories'; public $rules = array( 'title' => array( 'field' => 'title', 'label' => 'Title', 'rules' => 'trim|required|max_length[100]|xss_clean' ), 'slug' => array( 'field' => 'slug', 'label' => 'Slug', 'rules' => 'trim|required|max_length[100]|url_title|callback__unique_slug|xss_clean' ), 'parent_id' => array( 'field' => 'parent_id', 'label' => 'Parent category', 'rules' => 'trim|is_natural|xss_clean' ), 'order' => array( 'field' => 'order', 'label' => 'Order', 'rules' => 'trim|required|is_natural|xss_clean' ) ); public function get_new () { $article = new stdClass(); $article->title = ''; $article->slug = ''; $article->body = ''; $article->parent_id = 0; $article->order = ''; $map = directory_map(APPPATH . 'language', 1); foreach($map as $lang) { $lang = str_replace(DIRECTORY_SEPARATOR, '', $lang); if(is_dir(APPPATH . 'language/'.$lang) and $lang != 'english') { $title = 'title_'.$lang; $body = 'body_'.$lang; $article->$title = ''; $article->$body = ''; } } return $article; } public function get($id = NULL, $single = FALSE, $published = FALSE, $category = NULL) { $this->db->select('categories.*, b.*'); $this->db->join('post_meta as b', 'categories.id=b.post_id AND b.type="category"', 'left'); $this->_primary_key = 'categories.id'; return parent::get($id, $single); } public function get_last($limit = 5) { $this->db->limit($limit); return $this->get(); } }<file_sep>/ez-includes/admin/categories/edit.php <?php defined('BASEPATH') OR exit('No direct script access allowed');?> <div class="row"> <?php echo form_open();?> <div class="col-md-12"> <div class="nav-tabs-custom"> <ul class="nav nav-tabs"> <li class="active"><a href="#english" data-toggle="tab">English</a></li> <?php $map = directory_map(APPPATH . 'language', 1); $d = 0; foreach($map as $lang) { $lang = str_replace(DIRECTORY_SEPARATOR, '', $lang); if(is_dir(APPPATH . 'language/'.$lang) and $lang != 'english') { ?> <li><a data-toggle="tab" href="#<?=$lang?>"><?=$lang?></a></li> <?php } $d++; } ?> </ul> <div class="tab-content"> <div id="english" class="tab-pane fade in active"> <div class="form-group" style="margin-right: 0; margin-left: 0"> <?php echo form_label(ez_line('title'),'title'); echo form_input('title',set_value('title',$category->title),'class="form-control"'); echo form_error('title', '<p class="text-danger">', '</p>'); ?> </div> <div class="form-group" style="margin-right: 0; margin-left: 0"> <label><?=ez_line('body')?></label> <?php echo form_textarea('body', set_value('body', $category->body, FALSE), array('class' => 'form-control tinymce', 'id' => 'body')); ?> <?php echo form_error('body', '<p class="text-danger">', '</p>'); ?> </div> </div> <?php $map = directory_map(APPPATH . 'language', 1); $c = 0; foreach($map as $lang) { $lang = str_replace(DIRECTORY_SEPARATOR, '', $lang); if(is_dir(APPPATH . 'language/'.$lang) and $lang != 'english') { $title = 'title_'.$lang; $body = 'body_'.$lang; ?> <div id="<?=$lang?>" class="tab-pane fade"> <div class="form-group" style="margin-right: 0; margin-left: 0"> <label for="exampleInputEmail1"><?=ez_line('title')?></label> <?php echo form_input($title, set_value($title, $category->$title), array('class' => 'form-control')); ?> <?php echo form_error($title, '<p class="text-danger">', '</p>'); ?> </div> <div class="form-group" style="margin-right: 0; margin-left: 0"> <label><?=ez_line('body')?></label> <?php echo form_textarea($body, set_value($body, $category->$body, FALSE), array('class' => 'form-control tinymce', 'id' => 'body')); ?> <?php echo form_error($body, '<p class="text-danger">', '</p>'); ?> </div> </div> <?php $c++; } } ?> </div> </div> <div class="form-group"> <?php echo form_label(ez_line('slug'),'slug'); echo form_input('slug',set_value('slug',$category->slug),'class="form-control"'); echo form_error('slug', '<p class="text-danger">', '</p>'); ?> </div> <div class="form-group"> <?php $list = array(0 => 'No parent'); foreach (list_cats() as $option): if($option->parent_id == 0 and $option->id != @$this->uri->segment(4)) { $list[$option->id] = $option->title; } endforeach; echo form_label('Parent','parent_id'); echo form_dropdown('parent_id', $list, $category->parent_id); echo form_error('parent_id', '<p class="text-danger">', '</p>'); ?> </div> <div class="form-group"> <?php echo form_label(ez_line('order'),'order'); echo form_input('order',set_value('order',$category->order),'class="form-control"'); echo form_error('order', '<p class="text-danger">', '</p>'); ?> </div> <?php echo form_submit('submit', ez_line('save'), 'class="btn btn-primary btn-lg"');?> </div> <?php echo form_close();?> </div><file_sep>/ez-includes/application/controllers/admin/Shortcodes.php <?php class Shortcodes extends Admin_Controller { public function __construct () { parent::__construct(); } public function parse() { $code = $this->input->post('scode'); $data['content'] = $this->Shortcodes->parse($code); $this->load->ext_view('admin', 'layouts/shortcode',$data); } }<file_sep>/README.md # ez ini project ez banget <file_sep>/ez-includes/application/libraries/Widget.php <?php defined('BASEPATH') OR exit('No direct script access allowed'); class Widget { function __construct() { $this->ci = & get_instance(); log_message('debug', 'Widget Library: Initialized'); } function latest_posts($limit = 7, $category = null) { $data['posts'] = list_posts($limit, 0, $category); return $this->ci->load->view($this->ci->pref->active_theme . '/widgets/latest_posts', $data, TRUE); } function latest_comments($limit = 7) { $data['comments'] = latest_comments($limit); return $this->ci->load->view($this->ci->pref->active_theme . '/widgets/latest_comments', $data, TRUE); } function list_cats($expect = null) { $this->ci->db->select('*'); if($expect != null) { $this->ci->db->where_not_in('id', $expect); } $data['categories'] = $this->ci->db->get('categories')->result(); return $this->ci->load->view($this->ci->pref->active_theme . '/widgets/categories', $data, TRUE); } }<file_sep>/ez-content/themes/themeone/contact.php <div class="main"> <section class="module bg-dark-60 about-page-header parallax-bg" data-background="<?=theme_folder('themeone')?>assets/images/blog_bg.jpg"> <div class="container"> <div class="row"> <div class="col-sm-6 col-sm-offset-3"> <h2 class="module-title font-alt"><?=ez_line('contact')?></h2> </div> </div> </div> </section> <section class="module" id="contact"> <div class="container"> <div class="row"> <div class="col-sm-6 col-sm-offset-3"> <h2 class="module-title font-alt">Get in touch</h2> <div class="module-subtitle font-serif"></div> </div> </div> <div class="row"> <div class="col-sm-6 col-sm-offset-3"> <form id="contactForm" role="form" method="post" action="<?=base_url('home/contact')?>"> <div class="alert alert-success success_message" style="display: none"></div> <div class="alert alert-danger error_message" style="display: none"></div> <div class="form-group"> <label class="sr-only" for="name">Name</label> <input class="form-control" type="text" id="name" name="name" placeholder="Your Name*" required="required" data-validation-required-message="Please enter your name."/> <p class="help-block text-danger"></p> </div> <div class="form-group"> <label class="sr-only" for="email">Email</label> <input class="form-control" type="email" id="email" name="email" placeholder="Your Email*" required="required" data-validation-required-message="Please enter your email address."/> <p class="help-block text-danger"></p> </div> <div class="form-group"> <textarea class="form-control" rows="7" id="message" name="message" placeholder="Your Message*" required="required" data-validation-required-message="Please enter your message."></textarea> <p class="help-block text-danger"></p> </div> <div class="text-center"> <button class="btn btn-block btn-round btn-d" id="cfsubmit" type="submit">Submit</button> </div> </form> <div class="ajax-response font-alt" id="contactFormResponse"></div> </div> </div> </div> </section> <file_sep>/ez-includes/application/models/Contact_m.php <?php class Contact_M extends MY_Model { protected $_table_name = 'contact'; protected $_order_by = ''; public $rules = array(); }<file_sep>/ez-content/themes/ez/account/account_menu.php <style type="text/css"> .form-actions { margin-top: 15px; } </style> <ul class="nav nav-pills nav-stacked"> <li class="<?php echo is_profile() ? 'active' : null; ?>"><?php echo anchor('account/profile', ez_line('profile')); ?></li> <li class="<?php echo is_account_settings() ? 'active' : null; ?>"><?php echo anchor('account/settings', ez_line('account_settings')); ?></li> <?php if ($account->password) : ?> <li class="<?php echo is_account_password() ? 'active' : null; ?>"><?php echo anchor('account/password', ez_line('password')); ?></li> <?php endif; ?> <li class="<?php echo ($current == 'sign_out') ? 'active' : null; ?>"><?php echo anchor('account/sign_out', ez_line('logout')); ?></li> </ul><file_sep>/ez-includes/application/controllers/admin/Module.php <?php class Module extends Admin_Controller { public function __construct () { parent::__construct(); $this->load->model('Modules_m'); } public function index () { // Redirect unauthorized users if ( ! $this->authorization->is_permitted('retrieve_modules')) { $this->session->set_flashdata('access_error', ez_line('access_denied')); redirect('admin'); } // Fetch all posts $data['modules'] = $this->Modules_m->get(); $data['main_content'] = 'modules/index'; $data['page_title'] = ez_line('modules') . ' <span class="badge bg-aqua">' . count_table('modules') . '</span>'; $this->load->ext_view('admin', 'layouts/main',$data); } /** * Uninstall Module */ function uninstall($module) { if(is_demo() == FALSE) { $path = FCPATH . 'ez-content/modules/'.$module; delete_files($path, true); if(rmdir($path)) { $db_module = $this->Modules_m->get_by('module_name', $module); $this->Modules_m->delete($db_module->id); // Set successfully flashdata $this->session->set_flashdata('message', 'Your module have been deleted.'); // redirect user to referrer url redirect($this->agent->referrer()); } else { // Set successfully flashdata $this->session->set_flashdata('error', 'Your module can not delete.'); // redirect user to referrer url redirect($this->agent->referrer()); } } else { // Set successfully flashdata $this->session->set_flashdata('error', 'This option not work in demo site.'); // redirect user to referrer url redirect($this->agent->referrer()); } } /** * Module settings */ public function setting($id) { $data['module'] = $this->Modules_m->get($id)->module_name; // Set page title $data['page_title'] = ucfirst($data['module']) . ' ' . ez_line('settings'); // Set view file $data['main_content'] = 'modules/setting'; // Load view file with data $this->load->ext_view('admin', 'layouts/main',$data); } /** * Update theme options on database. */ public function update () { if(is_demo() == FALSE) { // Loop the post data in foreach foreach($this->input->post() as $key => $value){ // update current option value update_option($key, $value); } // Set successfully flashdata $this->session->set_flashdata('message', 'Your settings have been saved successfully.'); // redirect user to referrer url redirect($this->agent->referrer()); } else { // Set successfully flashdata $this->session->set_flashdata('error', 'This option not work in demo site.'); // redirect user to referrer url redirect($this->agent->referrer()); } } public function enable ($id) { if(is_demo() == FALSE) { $this->Modules_m->save(array('statue' => 'enable'), $id); $this->session->set_flashdata('message', ez_line('enabled', $this->lang->line('module') )); redirect('admin/module'); } else { // Set successfully flashdata $this->session->set_flashdata('error', 'This option not work in demo site.'); // redirect user to referrer url redirect($this->agent->referrer()); } } public function disable ($id) { if(is_demo() == FALSE) { $this->Modules_m->save(array('statue' => 'disable'), $id); $this->session->set_flashdata('message', ez_line('disabled', $this->lang->line('module') )); redirect('admin/module'); } else { // Set successfully flashdata $this->session->set_flashdata('error', 'This option not work in demo site.'); // redirect user to referrer url redirect($this->agent->referrer()); } } }<file_sep>/ez-includes/application/controllers/admin/Media.php <?php class Media extends Admin_Controller { public function __construct() { parent::__construct(); } function index() { // Redirect unauthorized users if ( ! $this->authorization->is_permitted('manage_media')) { $this->session->set_flashdata('access_error', ez_line('access_denied')); redirect($this->agent->referrer()); } $data['main_content'] = 'media/index'; $data['page_title'] = ez_line('media'); $this->load->ext_view('admin', 'layouts/main',$data); } }<file_sep>/ez-includes/application/controllers/admin/Categories.php <?php class Categories extends Admin_Controller { public function __construct() { parent::__construct(); } function index() { // Redirect unauthorized users if ( ! $this->authorization->is_permitted('retrieve_cats')) { $this->session->set_flashdata('access_error', ez_line('access_denied')); redirect($this->agent->referrer()); } $data['categories'] = $this->Category_m->get(); $data['main_content'] = 'categories/index'; $data['page_title'] = ez_line('categories') . ' <span class="badge bg-aqua">' . count_table('categories') . '</span>'; $this->load->ext_view('admin', 'layouts/main',$data); } public function edit ($id = NULL) { // Fetch a page or set a new one if ($id) { // Redirect unauthorized users if ( ! $this->authorization->is_permitted('update_cats')) { $this->session->set_flashdata('access_error', ez_line('access_denied')); redirect($this->agent->referrer()); } $data['category'] = $this->Category_m->get($id); count($data['category']) || $data['errors'][] = 'page could not be found'; $data['page_title'] = ez_line('edit', $this->lang->line('category') ); } else { // Redirect unauthorized users if ( ! $this->authorization->is_permitted('create_cats')) { $this->session->set_flashdata('access_error', ez_line('access_denied')); redirect($this->agent->referrer()); } $data['category'] = $this->Category_m->get_new(); $data['page_title'] = ez_line('add', $this->lang->line('category') ); } // Set up the form $rules = $this->Category_m->rules; $this->form_validation->set_rules($rules); // Process the form if ($this->form_validation->run($this) == TRUE) { $postArray = array( 'title', 'body', 'slug', 'parent_id', 'order' ); $map = directory_map(APPPATH . 'language', 1); foreach($map as $lang) { $lang = str_replace(DIRECTORY_SEPARATOR, '', $lang); if(is_dir(APPPATH . 'language/'.$lang) and $lang != 'english') { $title = 'title_'.$lang; $body = 'body_'.$lang; if(!empty($title) or !empty($body)) { array_push($postArray, $title, $body); } } } $data = $this->Category_m->array_from_post($postArray); foreach($map as $lang) { $lang = str_replace(DIRECTORY_SEPARATOR, '', $lang); if(is_dir(APPPATH . 'language/'.$lang) and $lang != 'english') { $title = 'title_'.$lang; $body = 'body_'.$lang; unset($data[$title]); unset($data[$body]); } } if(is_demo() == FALSE) { $post_id = $this->Category_m->save($data, $id); } else { // Set successfully flashdata $this->session->set_flashdata('error', 'This option not work in demo site.'); // redirect user to referrer url redirect($this->agent->referrer()); } $map = directory_map(APPPATH . 'language', 1); $i = 0; foreach($map as $lang) { $lang = str_replace(DIRECTORY_SEPARATOR, '', $lang); if(is_dir(APPPATH . 'language/'.$lang) and $lang != 'english') { $title = 'title_'.$lang; $body = 'body_'.$lang; $title_val = $this->input->post($title); $body_val = $this->input->post($body); if(!empty($title_val) or !empty($body_val)) { if($id) { $data = array( $title => $title_val, $body => $body_val, 'type' => 'category' ); $new = FALSE; if(count($this->db->where('post_id', $post_id)->where('type', 'category')->get('post_meta')->result()) < 1) { $new = TRUE; } if($new) { $data = array( 'post_id' => $post_id, $title => $title_val, $body => $body_val, 'type' => 'category' ); $this->db->set($data); $this->db->insert('post_meta'); } else { $this->db->set($data); $this->db->where('post_id', $post_id); $this->db->update('post_meta'); } } else { $data = array( 'post_id' => $post_id, $title => $title_val, $body => $body_val, 'type' => 'category' ); if($i == 0) { $data = array( 'post_id' => $post_id, $title => $title_val, $body => $body_val, 'type' => 'category' ); $this->db->set($data); $this->db->insert('post_meta'); } else { $data = array( $title => $title_val, $body => $body_val, 'type' => 'category' ); $this->db->set($data); $this->db->where('post_id', $post_id); $this->db->update('post_meta'); } } } else { $data = array( 'post_id' => $post_id, $title => $title_val, $body => $body_val, 'type' => 'category' ); $this->db->set($data); $this->db->where('post_id', $post_id); $this->db->update('post_meta'); } } $i++; } $this->session->set_flashdata('message', ez_line('saved', $this->lang->line('category') )); redirect('admin/categories'); } $data['main_content'] = 'categories/edit'; $this->load->ext_view('admin', 'layouts/main',$data); } public function delete_multi () { // Redirect unauthorized users if ( ! $this->authorization->is_permitted('delete_cats')) { $this->session->set_flashdata('access_error', ez_line('access_denied')); redirect($this->agent->referrer()); } $id = $this->input->post('id'); if(is_demo() == FALSE) { $this->Category_m->delete_multi($id); $this->session->set_flashdata('message', ez_line('deleted', $this->lang->line('categories') )); redirect('admin/categories'); } else { // Set successfully flashdata $this->session->set_flashdata('error', 'This option not work in demo site.'); // redirect user to referrer url redirect($this->agent->referrer()); } } public function delete ($id) { // Redirect unauthorized users if ( ! $this->authorization->is_permitted('delete_cats')) { $this->session->set_flashdata('access_error', ez_line('access_denied')); redirect($this->agent->referrer()); } if(is_demo() == FALSE) { $this->Category_m->delete($id); $this->session->set_flashdata('message', ez_line('deleted', $this->lang->line('category') )); redirect('admin/categories'); } else { // Set successfully flashdata $this->session->set_flashdata('error', 'This option not work in demo site.'); // redirect user to referrer url redirect($this->agent->referrer()); } } public function _unique_slug ($str) { // Do NOT validate if slug already exists // UNLESS it's the slug for the current page $id = $this->uri->segment(4); $this->db->where('categories.slug', $this->input->post('slug')); ! $id || $this->db->where('categories.id !=', $id); $page = $this->Category_m->get(); if (count($page)) { $this->form_validation->set_message('_unique_slug', 'This %s is currently used for another category.'); return FALSE; } return TRUE; } }<file_sep>/ez-includes/application/models/Modules_m.php <?php class Modules_m extends MY_Model { protected $_table_name = 'modules'; }<file_sep>/ez-includes/application/controllers/admin/Dashboard.php <?php class Dashboard extends Admin_Controller { public function __construct() { parent::__construct(); } function index() { // Set view file $data['main_content'] = 'dashboard/index'; // Set page title $data['page_title'] = ez_line('dashboard'); // Load view file with data $this->load->ext_view('admin', 'layouts/main',$data); } } <file_sep>/ez-includes/application/controllers/admin/Slider.php <?php class Slider extends Admin_Controller { public function __construct () { parent::__construct(); } public function index () { // Redirect unauthorized users if ( ! $this->authorization->is_permitted('retrieve_sliders')) { $this->session->set_flashdata('access_error', ez_line('access_denied')); redirect($this->agent->referrer()); } // Fetch all posts $data['sliders'] = $this->Slide_m->get(); $data['main_content'] = 'sliders/index'; $data['page_title'] = ez_line('sliders'); $this->load->ext_view('admin', 'layouts/main',$data); } public function edit ($id = NULL) { // Fetch a post or set a new one if ($id) { // Redirect unauthorized users if ( ! $this->authorization->is_permitted('update_sliders')) { $this->session->set_flashdata('access_error', ez_line('access_denied')); redirect($this->agent->referrer()); } $data['slide'] = $this->Slide_m->get($id); count($data['slide']) || $data['errors'][] = 'slide could not be found'; $data['page_title'] = ez_line('edit', 'slider'); } else { // Redirect unauthorized users if ( ! $this->authorization->is_permitted('create_sliders')) { $this->session->set_flashdata('access_error', ez_line('access_denied')); redirect($this->agent->referrer()); } $data['slide'] = $this->Slide_m->get_new(); $data['page_title'] = ez_line('add', 'slider'); } $data['categories'] = $this->Category_m->get(); $data['posts'] = $this->Post_m->get(); // Set up the form $rules = $this->Slide_m->rules; $this->form_validation->set_rules($rules); // Process the form if ($this->form_validation->run($this) == TRUE) { $data = $this->Slide_m->array_from_post(array( 'title', 'alias', 'sliderType', 'sliderSource', 'customize' )); foreach($data as $key => $value){ if( is_array($data[$key]) ) { $data[$key] = base64_encode( serialize($value) ); } elseif( is_null($value) ) { unset( $key ); } } if(is_demo() == FALSE) { if($this->input->post()) { $slider_id = $this->Slide_m->save($data, $id); $this->session->set_flashdata('message', ez_line('saved', 'slider')); redirect('admin/slider'); } } else { // Set successfully flashdata $this->session->set_flashdata('error', 'This option not work in demo site.'); // redirect user to referrer url redirect($this->agent->referrer()); } } $data['main_content'] = 'sliders/edit'; $this->load->ext_view('admin', 'layouts/main',$data); } public function delete_multi () { // Redirect unauthorized users if ( ! $this->authorization->is_permitted('delete_sliders')) { $this->session->set_flashdata('access_error', ez_line('access_denied')); redirect($this->agent->referrer()); } $id = $this->input->post('id'); if(is_demo() == FALSE) { $this->Slide_m->delete_multi($id); $this->session->set_flashdata('message', ez_line('deleted', $this->lang->line('sliders') )); redirect('admin/slider'); } else { // Set successfully flashdata $this->session->set_flashdata('error', 'This option not work in demo site.'); // redirect user to referrer url redirect($this->agent->referrer()); } } public function delete ($id) { // Redirect unauthorized users if ( ! $this->authorization->is_permitted('delete_sliders')) { $this->session->set_flashdata('access_error', ez_line('access_denied')); redirect($this->agent->referrer()); } if(is_demo() == FALSE) { $this->Slide_m->delete($id); $this->session->set_flashdata('message', ez_line('deleted', $this->lang->line('page') )); redirect('admin/slider'); } else { // Set successfully flashdata $this->session->set_flashdata('error', 'This option not work in demo site.'); // redirect user to referrer url redirect($this->agent->referrer()); } } }<file_sep>/ez-includes/admin/apperance/edit_modal.php <div class="form-group"> <label for="edititemType">Select type</label> <select name="type" id="edititemType" class="form-control not"> <option value="">Select Type</option> <option value="home" <?=$item->type == 'home' ? ' selected' : null?>><?=ez_line('home')?></option> <option value="contact" <?=$item->type == 'contact' ? ' selected' : null?>><?=ez_line('contact')?></option> <option value="posts" <?=$item->type == 'posts' ? ' selected' : null?>><?=ez_line('posts')?></option> <option value="page" <?=$item->type == 'page' ? ' selected' : null?>>Page</option> <option value="category" <?=$item->type == 'category' ? ' selected' : null?>>Category</option> <option value="post" <?=$item->type == 'post' ? ' selected' : null?>>Post</option> <option value="custom" <?=$item->type == 'custom' ? ' selected' : null?>>Custom link</option> </select> </div> <div class="form-group"> <label for="edittypeItems">Select item</label> <select name="edittypeItems" data-placeholder="Select item" id="edittypeItems" class="form-control not"> </select> </div> <div class="form-group"> <label for="edititemId">Item id</label> <input type="text" name="item_id" class="form-control" id="edititemId"/> </div> <div class="form-group"> <label for="edititemTitle">Title</label> <input type="text" name="title" class="form-control" id="edititemTitle"/> </div> <div class="form-group"> <label for="edititemUrl">link</label> <input type="text" name="link" class="form-control" id="edititemUrl"/> </div> <script type="text/javascript"> $(document).ready(function(){ var itemType = $("#edititemType :selected").val(); //alert(itemType); var postUrl = "<?= base_url() ?>admin/menu/list_items/" + itemType; if(itemType !== 'custom' && itemType !== 'home' && itemType !== 'contact' && itemType !== 'posts') { $.post(postUrl, { itemType: itemType }, function(data){ var result = '<option value="">Select item</option>'; $.each(data, function(i, data) { var link = '<?=base_url()?>' + itemType + '/'; link += data.id + "/" + data.slug; var curId = '<?=$item->item_id?>'; var isSel = ''; if(curId === data.id) { isSel = ' selected'; } result += "<option value='" + link + "' data-id='"+ data.id +"'" + isSel + ">" + data.title + "</option>"; $('#edititemTitle').val(data.title); $('#edititemId').val(data.id); $('#edititemUrl').val(link); }); $('#edittypeItems').html(result); }); } else { $('#edittypeItems').html("<option value=''>Select item</option>"); $('#edititemTitle').val('<?=$item->title?>'); $('#edititemId').val('0'); $('#edititemUrl').val('<?=$item->link?>'); } $("#edititemType").on('change', function() { var itemType = this.value; //alert(itemType); var postUrl = "<?= base_url() ?>admin/menu/list_items/" + itemType; if(itemType !== 'custom' && itemType !== 'home' && itemType !== 'contact' && itemType !== 'posts') { $.post(postUrl, { itemType: itemType }, function(data){ var result = '<option value="">Select item</option>'; $.each(data, function(i, data) { var link = '<?=base_url()?>' + itemType + '/'; link += data.id + "/" + data.slug; result += "<option value='" + link + "' data-id='"+ data.id +"'>" + data.title + "</option>"; }); $('#edittypeItems').html(result); }); $('#edititemTitle').val(''); $('#edititemId').val(''); $('#edititemUrl').val(''); } else if(itemType == 'home') { $('#edittypeItems').html("<option value=''>Select item</option>"); $('#edititemTitle').val('Home'); $('#edititemId').val('0'); $('#edititemUrl').val('<?=base_url()?>'); } else if(itemType == 'posts') { $('#edittypeItems').html("<option value=''>Select item</option>"); $('#edititemTitle').val('Posts'); $('#edititemId').val('0'); $('#edititemUrl').val('<?=base_url('posts')?>'); } else if(itemType == 'contact') { $('#edittypeItems').html("<option value=''>Select item</option>"); $('#edititemTitle').val('Contact'); $('#edititemId').val('0'); $('#edititemUrl').val('<?=base_url('contact')?>'); } else { $('#edittypeItems').html("<option value=''>Select item</option>"); $('#edititemTitle').val(''); $('#edititemId').val('0'); $('#edititemUrl').val(''); } }); $("#edittypeItems").on('change', function() { var edititemTitle = $("#edittypeItems option:selected").text(); var edititemURL = this.value; var edititemId = $("#edittypeItems option:selected").attr('data-id'); $('#edititemTitle').val(edititemTitle); $('#edititemId').val(edititemId); $('#edititemUrl').val(edititemURL); }); }); </script><file_sep>/ez-includes/application/models/Siteconfig.php <?php class Siteconfig extends CI_Model { public function __construct() { parent::__construct(); } public function get_all() { return $this->db->get('config_data'); } }<file_sep>/ez-includes/application/config/ez_config.php <?php $config['site_name'] = 'ez CMS'; $config['demo_site'] = FALSE; $config['templates_path'] = 'ez-content/themes/'; $config['timezone'] = 'Africa/Cairo';<file_sep>/ez-includes/application/controllers/Seo.php <?php Class Seo extends CI_Controller { function sitemap() { $data['posts'] = $this->Post_m->get(); $data['pages'] = $this->Page_m->get(); header("Content-Type: text/xml;charset=iso-8859-1"); $this->load->ext_view('admin', 'sitemap',$data); } }<file_sep>/ez-includes/application/controllers/Retrieve_Shortcode.php <?php class Retrieve_Shortcode extends Frontend_Controller { public function __construct () { parent::__construct(); } public function retrieve() { $shortcode = '[' . $this->input->post('shortcode') . '/]'; $shortcode = str_replace('+', ' ', $shortcode); echo do_shortcode($shortcode); } }
9faa95339d0e737c7336ae8ae1022cbee7d6cfa0
[ "Markdown", "PHP" ]
23
PHP
imronreviady/ez
2834ecba3d0cf9272b4a16631fdb144c89e72dad
2ba479b8cef09c3160aa4f98169827bc26511ec4
refs/heads/master
<repo_name>Beau-Cross/IntegerSplitter<file_sep>/main.cpp #include <iostream> #include <cmath> using namespace std; void border(){ for (int i = 0; i < 50; i++){ cout << "-"; } cout << endl; } int findLength(int input){ int length; int inputC = input; float base = 10; for (int i = 1; i <= 20; i++){ input = inputC; int postPow = pow(base,i); input %= postPow; if (input == inputC){ length = i; i = 20; } } return length; } void fillArray(int numbers[], int length, int input){ int inputN = 0; float base = 10; for (int i = 1; i <= length; i++){ int postPow = pow(base, i); inputN = input%postPow; numbers[length-i] = inputN; input -= inputN; } } void calculations(int input,int numbers[]){ } int main(){ int input = -1; int length; border(); cout << "This program will split a positive integer into an array." << endl; cout << "What number would you like to convert? "; cin >> input; while (input < 0){ border(); cout << "Sorry, that number is invalid. Please choose a positive integer. "; cin >> input; } border(); //Initializing the Array length = findLength(input); int numbers[length] = {0}; //Giving Values to the Array fillArray(numbers, length, input); for (int i = 1; i <= length; i++){ cout << "numbers[" << i-1 << "] = " << numbers[i-1] << endl; } //Calculations based on Array return 0; }
2270ae0e19b028f33279133ea32c9cc760b6ac55
[ "C++" ]
1
C++
Beau-Cross/IntegerSplitter
2e6629ac952fe778f8cf5c3088e294fdb2a87ffd
23635c3619beba510509282646a345d0b6c4d5ca
refs/heads/master
<file_sep>import numpy as np import pandas as pd import torch import torch.nn as nn import torch.nn.functional as F import torch.optim as optim from sklearn.metrics import f1_score from sklearn.model_selection import train_test_split from torch.utils.data import DataLoader, Dataset from transformers import BertModel, BertTokenizer # https://medium.com/swlh/painless-fine-tuning-of-bert-in-pytorch-b91c14912caa class BertClassifier(nn.Module): def __init__(self, freeze_bert = True, hidden_size=1024, num_layers=25, num_class=2): super(BertClassifier, self).__init__() #Instantiating BERT model obeject self.bert_layer = BertModel.from_pretrained( 'bert-large-uncased', output_hidden_states=True, output_attentions=True, attention_probs_dropout_prob=0.5, hidden_dropout_prob=0.5 ) self.hidden_size = hidden_size self.num_layers = num_layers #Freeze bert layers if freeze_bert: for p in self.bert_layer.parameters(): p.requires_grad = False #Classification Layer self.weights = nn.Parameter(torch.rand(self.num_layers, 1)) self.dropouts = nn.ModuleList([ nn.Dropout(0.5) for _ in range(5) ]) self.fc = nn.Linear(self.hidden_size, num_class-1) def forward(self, seq, attn_masks): ''' Inputs: -seq: Tensor of shape [B,T] containing token ids of sequences -attn_masks: Tensor of shape [B, T] conatining attention masks to be used to avoid contribution of PAD tokens ''' #Feeding the input to BERT model to obtain contextualized representation cont_reps, _ = self.bert_layer(seq, attention_mask = attn_masks)[-2:] batch_size = seq.shape[0] ht_cls = torch.cat(cont_reps)[:, :1, :].view(self.num_layers, batch_size, 1, self.hidden_size) #Obtaining the representation of [CLS} head atten = torch.sum(ht_cls * self.weights.view(self.num_layers, 1, 1, 1), dim=[1, 3]) atten = F.softmax(atten.view(-1), dim=0) feature = torch.sum(ht_cls * atten.view(self.num_layers, 1, 1, 1), dim=[0, 2]) for i, dropout in enumerate(self.dropouts): if i==0: h = self.fc(dropout(feature)) else: h += self.fc(dropout(feature)) h = h / len(self.dropouts) return h def freeze_bert(self): for p in self.bert_layer.parameters(): p.requires_grad = False class EarlyStopping: """Early stops the training if validation loss doesn't improve after a given patience.""" def __init__(self, patience=7, verbose=False, delta=0): """ Args: patience (int): How long to wait after last time validation loss improved. Default: 7 verbose (bool): If True, prints a message for each validation loss improvement. Default: False delta (float): Minimun change in the monitored quantity to qualify as an improvement. Default: 0 """ self.patience = patience self.verbose = verbose self.counter = 0 self.best_score = None self.early_stop = False self.val_loss_min = np.Inf self.delta = delta def __call__(self, val_loss, model): score = -val_loss if self.best_score is None: self.best_score = score #self.save_checkpoint(val_loss, model) elif score < self.best_score + self.delta: self.counter += 1 print(f'EarlyStopping counter; {self.counter} out of {self.patience}') if self.counter >= self.patience: self.early_stop = True else: self.best_score = score #self.save_checkpoint(val_loss, model) self.counter = 0 def save_checkpoint(self, val_loss, model): '''Saves model when validation loss decrease.''' if self.verbose: print(f'Validation loss descreased ({self.val_loss_min:.6f}) --> {val_loss:.6f}. Saving model ...') torch.save(model.state_dict(), 'checkpoint.pt') self.val_loss_min = val_loss def train(net, criterion, opti, train_loader, val_loader, max_eps, patience, print_every): best_acc = 0 early_stopping = EarlyStopping(patience=patience, verbose=True) for ep in range(1, max_eps+1): if ep > 1: net.freeze_bert() for it, (_, seq, attn_masks, labels) in enumerate(train_loader): #Clear gradients opti.zero_grad() #Converting these to cuda tensors seq, attn_masks, labels = seq.cuda(), attn_masks.cuda(), labels.cuda() # freeze bert layer from the 2 iteration #Obtaining the logits from the model logits = net(seq, attn_masks) #Computing loss loss = criterion(logits.squeeze(-1), labels.float()) #Back propagating the gradients loss.backward() #Optimization step opti.step() if (it+1) % print_every == 0: acc = get_accuracy_from_logits(logits, labels) print("Iteration {} of epochs {} complete. Loss : {} Accuracy : {}".format(it+1, ep, loss.item(), acc)) # print score train_f1, train_acc, train_loss = evaluate(net, criterion, train_loader) print("Epoch {} complete! Train F1 : {}, Train Accuracy : {}, Train Loss : {}".format(ep, train_f1, train_acc, train_loss)) val_f1, val_acc, val_loss = evaluate(net, criterion, val_loader) print("Epoch {} complete! Validation F1 : {}, Validation Accuracy : {}, Validation Loss : {}".format(ep, val_f1, val_acc, val_loss)) early_stopping(val_loss, net) if val_acc > best_acc: print("Best validation accuracy improved from {} to {}".format(best_acc, val_acc)) best_acc = val_acc if early_stopping.early_stop: print("Early stopping") break def get_accuracy_from_logits(logits, labels): probs = torch.sigmoid(logits.unsqueeze(-1)) soft_probs = (probs > 0.5).long() acc = (soft_probs.squeeze() == labels).float().mean() return acc def get_class_from_logits(logits): probs = torch.sigmoid(logits.unsqueeze(-1)) return (probs > 0.5).long().squeeze().tolist() def evaluate(net, criterion, dataloader): net.eval() mean_acc, mean_loss = 0, 0 count = 0 y, y_pred = [], [] with torch.no_grad(): for _, seq, attn_masks, labels in dataloader: seq, attn_masks, labels = seq.cuda(), attn_masks.cuda(), labels.cuda() logits = net(seq, attn_masks) mean_loss += criterion(logits.squeeze(-1), labels.float()).item() mean_acc += get_accuracy_from_logits(logits, labels) count += 1 y_pred += get_class_from_logits(logits) y += labels.tolist() f1 = f1_score(y, y_pred, average=None)[0] return f1, mean_acc / count, mean_loss / count class TweetDataset(Dataset): def __init__(self, df, maxlen): #Store the contents of the file in a pandas dataframe self.df = df #Initialize the BERT tokenizer self.tokenizer = BertTokenizer.from_pretrained('bert-large-uncased', do_lower_case=True) self.maxlen= maxlen def __len__(self): return len(self.df) def __getitem__(self, index): #Selecting the sentence and label at the specified index in the dataframe Id = self.df.loc[index, 'id'] sentence = self.df.loc[index, 'text'] label = self.df.loc[index, 'target'] #Preprocessing the text to be suitable for BERT tokens = self.tokenizer.tokenize(sentence) tokens = ['[CLS]'] + tokens + ['[SEP]'] if len(tokens) < self.maxlen: tokens = tokens + ['[PAD]' for _ in range(self.maxlen - len(tokens))] else: tokens = tokens[:self.maxlen-1] + ['[SEP]'] tokens_ids = self.tokenizer.convert_tokens_to_ids(tokens) tokens_ids_tensor = torch.tensor(tokens_ids) #Obtaining the attention mask i.e a tensor containing 1s for no padded tokens and 0s for padded ones attn_mask = (tokens_ids_tensor != 0).long() return Id, tokens_ids_tensor, attn_mask, label # http://jalammar.github.io/a-visual-guide-to-using-bert-for-the-first-time/ def main(train_loader, valid_loader, freeze_bert=False, lr=1e-5, print_every=2000, max_eps=5, patience=1): net = BertClassifier(freeze_bert = freeze_bert) criterion = nn.BCEWithLogitsLoss() # AdamW param_optimizer = list(net.named_parameters()) no_decay = ['bias', 'LayerNorm.bias', 'LayerNorm.weight'] optimizer_grouped_parameters = [ {'params': [p for n, p in param_optimizer if not any(nd in n for nd in no_decay)], 'weight_decay': 0.01}, {'params': [p for n, p in param_optimizer if any(nd in n for nd in no_decay)], 'weight_decay': 0.0} ] opti = optim.AdamW(optimizer_grouped_parameters, lr=lr, eps=1e-6) net.cuda() train(net, criterion, opti, train_loader, valid_loader, max_eps, patience, print_every) return net def predict(net, test_loader): y_pred = {} for ids, seq, attn_masks, _ in test_loader: #Converting these to cuda tensors ids, seq, attn_masks = ids.cuda(), seq.cuda(), attn_masks.cuda() #Obtaining the logits from the model logits = net(seq, attn_masks) for Id, label in zip(ids, get_class_from_logits(logits)): y_pred[Id.long().item()] = label return pd.DataFrame([[index, label] for (index, label) in sorted(y_pred.items(), key=lambda x: x[0])], columns=('id', 'target')) if __name__ == "__main__": df, df_test = pd.read_csv('./data/train.csv'), pd.read_csv('./data/test_leak.csv') df_train, df_valid = train_test_split(df, test_size=0.15, random_state=42, stratify=df.target.values.tolist()) df_train, df_valid = df_train.reset_index(), df_valid.reset_index() maxlen, batch_size = 40, 8 #Creating instances of training and validation set train_set = TweetDataset(df = df, maxlen = maxlen) test_set = TweetDataset(df = df_test, maxlen = maxlen) #Creating intsances of training and validation dataloaders train_loader = DataLoader(train_set, batch_size = batch_size, num_workers = 5) test_loader = DataLoader(test_set, batch_size = batch_size, num_workers = 5) net = main( train_loader, test_loader, freeze_bert=False, lr=1e-5, print_every=2000, max_eps=5, patience=1 ) submit = predict(net, test_loader) submit.to_csv('./output/submit.csv', index=None)
fac8bdd7ad98600b35e200ea43433b01180949ec
[ "Python" ]
1
Python
FooQoo/bert-torch
5876e09391fe3b2f9e8b6460820db779cd9b8dfa
cd8343356bb064c98877a67079d0b8a555b5578f
refs/heads/master
<file_sep># PhD project meeting notes 2020-09-02 Attendees: <NAME>, Gib ## Agenda * properties of ewas paper * Item2 * AOB ## properties of ewas paper ### Calculating standardised betas Can we go over how to calculate beta per sd increase __NOTES:__ Some meeting notes ## Item2 Some pre-meeting notes __NOTES:__ Some meeting notes ## AOB * AOB item1<file_sep># useful functions - mainly for tidying data new_load <- function(file) { temp_space <- new.env() var <- load(file, temp_space) out <- get(var, temp_space) rm(temp_space) return(out) } comma <- function(x) format(x, digits = 2, big.mark = ",") tidy_nums <- function(df) { df[] <- lapply(df, comma) return(df) } tidy_colnames <- function(df) { colnames(df) <- gsub("_", "-", colnames(df)) return(df) } num_to_text <- function(x, start_of_sentence = FALSE) { if (!x%%1 == 0) warning("X is not an integer") if (start_of_sentence) { out <- numbers_to_words(x) out <- stringr::str_to_sentence(out) } else { if (x < 11 & x > -1) { out <- numbers_to_words(x) } else { out <- x } } return(out) }<file_sep># PhD project meeting notes 2020-05-12 Attendees: <NAME>, Gib ## Agenda * Funded extension * ewas-gwas comparison simulations and paper * AOB ## Funded extension Think I should just apply for the full 6 months extension. Wellcome has already put the money in the account... ## ewas-gwas comparison simulations and paper Goal of simulations: Help understand the scenarios for which there is no more overlap of EWAS and GWAS genes/pathways than expected by chance. Simulation 1: Model how changes in percentage of EWAS hits that are causal influence overlap overview: X randomly sampled causal genes Y randomly sampled consequential genes N GWAS genes (N determined by empirical data) are randomly sampled from X N EWAS genes (N determined by empirical data) are randomly sampled from X and Y at proportions determined by a varying parameter, ECaG Test: Fisher's Exact test to determine if there is more overlap between EWAS genes/pathways and GWAS genes/pathways than expected (expected determined from the total number of genes/pathways). Simulation 2: Model how changes in overlap between causal and consequential genes influence overlap See Y for illustration overview: GWAS and EWAS genes taken from the empirical data X causal genes Y consequential genes Causal genes = GWAS genes + randomly sample from all genes to make up X causal genes Consequential genes = EWAS genes (except those also GWAS genes) + randomly sample from all genes that aren't causal genes All trait genes = Causal genes + consequential genes Test: Fisher's exact to determine if there is more overlap between EWAS genes/pathways and GWAS genes/pathways than expected (expected determined from the number of genes from 'all trait genes') ### Thinking about simulation 4 __IGNORE THIS GIB__ my ramblings here won't really make sense without me to explain them known parameters: All genes (AG) N trait genes (NG) gwas genes (GG) ewas genes (EG) varying: causal and consequential gene overlap (CaConO) end goal: Causal genes (CaG) consequential genes (ConG) additional terms key: overlapping genes (OG) Trait genes (TG) --> All causal and consequential genes gwas only genes (GOG) ewas only genes (EOG) casual ewas genes (CaEG) consequential ewas genes (ConEG) leftover genes (LG) Overlapping causal and consequential genes (CaConG) N. = number of OG = EG in GG GOG = GG not in OG EOG = EG not in OG LG = AG not in GG and AG not in EG CaEG = OG and sample N.EOG x CaConO genes from EOG CaG = CaEG and GOG and sample NG - N.CaEG - N.GOG genes from LG ConEG = EG - CaEG CaConG = CaEG and sample (N.CaG - N.CaEG) * CaConO from CaG not in CaEG ConG = CaConG and ConEG and sample NG - N.CaConG - N.ConEG from LG not in CaG and LG not in ConEG allgenes = CaG and ConG <file_sep><style> body { text-align: justify} </style> Below is a list of publications I have been involved with throughout my PhD, some of which is presented in this thesis. I conducted the vast majority of the work, but in certain scenarios others also contributed and I have detailed this in contributions statements at the start of each chapter. The first and senior authors for each piece of work have agreed on my contributions detailed in these statements. <br> \textbf{Relevant to this thesis} <br> Battram,T., Richmond,R.C., Baglietto,L., Haycock,P.C., Perduca,V., Bojesen,S.E., Gaunt,T.R., Hemani,G., Guida,F., Carreras-Torres,R., et al. (2019) Appraising the causal relevance of DNA methylation for risk of lung cancer. Int. J. Epidemiol. <br> Battram,T., Gaunt,T.R., Speed,D., Timpson,N.J. and Hemani,G. (2020) Exploring the variance in complex traits captured by DNA methylation assays. bioRxiv <br> Battram,T., Yousefi,P., Crawford,C., Prince,C., Babaei,M., Khodabakhsh,S., Whitehurst,O., Mahoney,L., Hemani,G., Gaunt,T.R., et al. The EWAS Catalog: a database of epigenome-wide association studies. Submitted to Bioinformatics November 2020. <br> \textbf{Not relevant to this thesis} <br> Battram,T., Hoskins,L., Hughes,D.A., Kettunen,J., Ring,S.M., Smith,G.D. and Timpson,N.J. (2019) Coronary artery disease, genetic risk and the metabolome in young individuals. Wellcome Open Res. <br> Howe,L.J., Battram,T., Morris,T.T., Hartwig,F.P., Hemani,G., Davies,N.M. and Smith,G.D. (2020) Assortative mating and within-spouse pair comparisons. bioRxiv. <br> <NAME>., McCartney,D.L., Patxot,M., Anchieri,L., Battram,T., Christiansen,C., Costeira,R., Walker,R.M., Morris,S.W., Campbell,A., et al. (2020) Bayesian reassessment of the epigenetic architecture of complex traits. Nat. Commun. <br> Jamieson,E., Korologou-Linden,R., Wootton,R.E., Guyatt,A.L., Battram,T., Burrows,K., Gaunt,T.R., Tobin,M.D., Munafò,M., <NAME>., et al. (2020) Smoking, DNA Methylation, and Lung Function: a Mendelian Randomization Analysis to Investigate Causal Pathways. Am. J. Hum. Genet.<file_sep># PhD project meeting notes 2020-08-18 Attendees: <NAME>, Chaz ## Agenda * Enrichment analyses for properties of EWAS project ## Enrichment analyses for properties of EWAS project * What I want: give you a list or multiple lists of CpGs and then you do enrichment analyses on those and give me the output * What does LOLA output? + Does it do genomic region as well as epigenetic mark enrichment? Lots of different things -- Chaz can output everything and we can run through what may or may not be significant * Is there a recommended number of CpGs that should be input? No - Chaz previously worked on analyses using roughly 100 sites * When do you think you can do the work? Starting asap! * Are you ok to have all the scripts on git? + Need this for thesis + Will start with separate repo on gitlab that’ll be private + Then will move this to github when all finished Will make a separate git repo <file_sep>\textbf{ALSPAC} - Avon Longitudinal Study of Parents and Children \newline \textbf{ARIES} - Accessible Resource for Integrated Epigenomics Studies \newline \textbf{AUC} - area under the curve \newline \textbf{BMI} - body mass index \newline \textbf{CCHS} - Copenhagen City Heart Study \newline \textbf{CI} - confidence interval \newline \textbf{DMP} - differentially methylated position \newline \textbf{DMR} - differentially methylated region \newline \textbf{EFO} - experimental factor ontology \newline \textbf{ENCODE} - Encyclopedia of DNA Elements \newline \textbf{EPIC} - European Prospective Investigation into Cancer and Nutrition \newline \textbf{EWAS} - epigenome-wide association study \newline \textbf{FDR} - false discovery rate \newline \textbf{GEO} - Gene Expression Omnibus \newline \textbf{GO} - Gene Ontology \newline \textbf{GoDMC} - Genetics of DNA Methylation Consortium \newline \textbf{GTEx} - Gene-Tissue Expression (consortium) \newline \textbf{GWAS} - genome-wide association study \newline \textbf{h\textsuperscript{2}} - narrow-sense heritability \newline \textbf{H\textsuperscript{2}} - broad-sense heritability \newline \textbf{h\textsuperscript{2}\textsubscript{EWAS}} - the proportion of trait variation captured by DNA methylation commonly measured in epigenome-wide association studies \newline \textbf{h\textsuperscript{2}\textsubscript{SNP}} - SNP-heritability \newline \textbf{HM450 array} - Illumina Infinium HumanMethylation450 BeadChip \newline \textbf{HMEPIC array} - Illumina Infinium HumanMethylationEPIC BeadChip \newline \textbf{HR} - hazard ratio \newline \textbf{IQR} - interquartile range \newline \textbf{KEGG} - Kyoto Encyclopedia of Genes and Genomes \newline \textbf{LD} - linkage disequilibrium \newline \textbf{LOLA} - Locus Overlap Analysis \newline \textbf{MCCS} - Melbourne Collaborative Cohort Study \newline \textbf{MR} - Mendelian randomization \newline \textbf{MRM} - methylation relationship matrix \newline \textbf{mQTL} - methylation quantitative trait loci \newline \textbf{NOWAC} - Norwegian Women and Cancer \newline \textbf{NSHDS} - Northern Sweden Health and Disease Study \newline \textbf{OR} - odds ratio \newline \textbf{PACE} - Pregnancy and Childhood Epigenetics (consortium) \newline \textbf{QC} - quality control \newline \textbf{REML} - restricted maximum likelihood \newline \textbf{ROC} - receiver operating characteristic \newline \textbf{SNP} - single nucleotide polymorphism \newline \textbf{SV} - surrogate variable \newline \textbf{SVA} - surrogate variable analysis \newline \textbf{TAG} - Tobacco and Genetics (consortium) \newline \textbf{TCGA} - The Cancer Genome Atlas \newline \textbf{TRICL-ILCCO} - Transdisciplinary Research in Cancer of the Lung and The International Lung Cancer Consortium \newline<file_sep># PhD project meeting notes 2020-07-01 Attendees: <NAME>, Gib, <NAME>, Nic ## Agenda * General updates * Run through of ewas-gwas comparison paper ## General updates * Currently inputting data into the EWAS Catalog + Don't think this will take a huge amount of time - roughly 1 month I think + As technical changes have been made, I can get going with scripts to analyse the data as soon as the manuscript for the ewas-gwas comparison paper is finished * Started reformatting results chapters ## Run through of ewas-gwas comparison paper see [ewas-gwas-comparison-paper.pptx](ewas-gwas-comparison-paper.pptx) (or click __[here](ewas-gwas-comparison-paper.pdf)__ to view pdf on github) __NOTES:__ * We can never tell if using both EWAS and GWAS is redundant because we're not capturing all of the genome or all of the methylome. But what we can say is if they tend to be sampling from the same distribution or not. So if there were 100 genes related to a trait, GWAS had the power to pick up 10 and EWAS had the power to pick up 20 then you'd expect there to be at least 2 genes that overlap. If they're not sampling from the same distribution e.g. if EWAS is sampling from just downstream genes and GWAS is just sampling from upstream genes then you'd expect lower overlap and if EWAS was sampling almost randomly from the genome (because of confounding) then you'd also expect lower overlap. * When assessing physical overlap, could do some sensitivity analyses to change the distance and see if that really alters the number of overlapping sites found * When assessing pathway overlap, we could use EpiGraphDB to extract pathways * It might be a good idea to make the pathway overlap of all GWAS and EWAS available somehow. Could do so via epigraphdb with an R package that links to the data through some API. * Write manuscript in an Results-Methods format * Make the intro as short as possible as the concept is pretty straight forward and the methods need more explaining<file_sep># PhD project meeting notes 2020-07-10 Attendees: <NAME>, Gib ## Agenda * h<sup>2</sup><sub>EWAS</sub> paper phenotypes * ewas-gwas comparison simulations * AOB ## h<sup>2</sup><sub>EWAS</sub> paper phenotypes Phenotypes are cleaned up. Had to chuck a few out because they're completely meaningless. Such as phenotypes that reflect the accuracy of some coding tool. Still have 400 phenotypes though Need a way to make data available. Was thinking zenodo, already have an account, but main issue is uploading the data takes FOREVER as my upload speed isn't very fast. Could use bc4 and their zenodo's API to make upload speed faster? Have also added in supplementary table with these phenotypes and number of hits each phenotype has at the two main P val thresholds used and updated the numbers in the paper. ## ewas-gwas comparison simulations ### Using epigraphDB * Maps genes to pathways via proteins. This restricts to protein coding genes so the number of genes reduces from roughly 60000 to roughly 18000. Don't think this is really an issue - as long as method across study types (EWAS and GWAS) is the same... * It's unclear how they are mapping protein-protein interactions. For now can use their defaults, but need to find out what these are... * Can map protein-protein interactions via an intermediate protein. Again, doesn't really matter. * Most proteins are not part of a reactome pathway, which I thought combined various other pathway databases, but on closer inspection it doesn't. + How they add pathway info: "An external domain expert provides his or her expertise, a curator formalizes it into the database structure, and an external domain expert reviews the representation" * From this I think the databases can be used together, BUT it may look a little better and read easier if all our analysis went through epigraphdb -- so we'd have reactome as our pathway database and a combination of intact and stringdb as our protein interaction database. * Main issue is understanding how epigraphdb defines protein-protein interactions ### Results narrative Not sure adding simulations containing actual empirical data adds much. If we can't make it clear exactly what these simulations mean or how they fit in by the end of the meeting I want to drop them. Results narrative without it: 1. Little physical overlap 2. These scenarios would mean we would likely detect overlap 3. We didn't detect overlap (thus either no meaningful overlap or too little to detect) 4. There is more overlap between EWAS and GWAS of different traits in some cases Results narrative with extra simulations: 1. Little physical overlap 2. These scenarios would mean we would likely detect overlap 3. We didn't detect overlap 4. From the empirical data, it is likely the overlap between causal and consequential genes is this ([simple_simulation4_gene.pdf](simple_simulation4_gene.pdf), [simple_simulation4_go.pdf](simple_simulation4_go.pdf), [simple_simulation4_kegg.pdf](simple_simulation4_kegg.pdf)), but that depends on other parameters too... 5. There is more overlap between EWAS and GWAS of different traits in some cases __NOTES__: - Other set of simulations do show something -- or could be useful at least. They are concerned with the underlying architectures! - For each trait characterise using a radar plot - Change sim4 to see if you get different distributions when you add in a trait that has loads of gwas and ewas gene overlap! - If there is some change then need to make the 'n-genes' parameter more dynamic as it looks as though the number of genes is just drowning out any signal - Also need to check how much overlap is due to non-coding regions --> Can just use hgnc mappings to check this - Might be interesting to check if there is some kind of common theme in terms of the genes that do overlap ## AOB Turned down job offer Streamlining analysis in python...<file_sep># --------------------------------------------- # Using ieugwasr::ld_clump() # --------------------------------------------- # This script just gives an example of where the # ieugwasr::ld_clump() function did not work pkgs <- c("tidyverse", "gwasvcf", "ieugwasr") lapply(pkgs, require, character.only = T) gwasdb_path <- "/mnt/storage/private/mrcieu/data/IGD/data/public" # --------------------------------------------- # extract data # --------------------------------------------- set_bcftools(path = genetics.binaRies::get_bcftools_binary()) id <- "ukb-d-20542" filename <- paste0(id, ".vcf.gz") vcffile <- file.path(gwasdb_path, id, filename) vcf <- query_gwas(vcffile, pval=5e-8) dat <- vcf_to_tibble(vcf) dat$pval <- 10^(-dat$LP) path_to_ref <- "data/1kg-data/EUR" ## CHANGE THIS!! clumped_dat <- ld_clump( dplyr::tibble(rsid=dat$rsid, pval=dat$pval, id=dat$id), plink_bin = genetics.binaRies::get_plink_binary(), bfile = path_to_ref )<file_sep># PhD project meeting notes 2021-06-16 Attendees: <NAME>, Gib ## Agenda * [ewas-gwas comp](#item1) * [properties ewas](#item2) * [AOB](#aob) ## EWAS GWAS comp <a name="item1"></a> any updates on paper? - notes on paper __NOTES:__ Some meeting notes ## properties of ewas code review <a name="item2"></a> Could someone in team review the code?? __NOTES:__ Some meeting notes ## Power analyses HOW TO DO?!? ## AOB <a name="aob"></a> * AOB item1<file_sep># PhD project meeting notes 2020-10-02 Attendees: <NAME>, Gib ## Agenda * Properties of EWAS work * Updating ewas-gwas comparison scripts * Updates on everything else * AOB ## Properties of EWAS work Charlie ran analysis for each of the 6 LOLA annotation types If we just look at "encode segmentation" then that would make things easiest I think in terms of talking about genomic regions. [encode segmentation results](encode_segmentation_or_plots.pdf) Of roughly 10,000 sites at p<1e-7, roughly 6000 are hypomethylated in relation to trait... If DNAm changes are tagging changes in gene expression, then this is what I'd expect given enrichment for transcribed genes + TSS How to present these results concisely?? - see notes __NOTES:__ Things seem same no matter how portion up the CpG sites (i.e. no difference between "all" CpGs and "replicated") Double check that Charlie sent all results regardless of P value etc. -- not many cell types in encode_segmentation collection Do chromatin states stuff (ENCODE and ROADMAP) Present results as something like: There is enrichment across these regions (FIGURE(S)). Then can discuss why this might be. Enrichment at TFBS fits with notion that TFs are what are altering DNAm in relation to traits -- confounding + reverse causation. Also fits with GoDMC --> CpGs of TransQTLs in TFBS too... ## Updating ewas-gwas comparison scripts Updating scripts to make second run through easier. * Is there a way to search the studies data for the open gwas project like the website? Or do I need to come up with my own "fuzzy" matching code in R? * Think opengwas project is missing data from a big smoking/alcohol gwas: [pmid=30643251](https://pubmed.ncbi.nlm.nih.gov/30643251/) -- send details * Also, the "ieu-b" IDs aren't present here: "/mnt/storage/private/mrcieu/data/IGD/data/public" - is extracting data using that directory reproducible (will it keep being updated??) -- just use the R package!!! * Smoking phenotypes... + miss phenotyped former vs never + current vs never EWAS == ever smoked regularly GWAS? -- should be fine. Could do one of two things: 1. supplementary analyses looking at another smoking GWAS. 2. Just use the results from the "all-vs-all" pathway enrichment correlation stuff! ## Updates on everything else - Will email Nic + Tom as still no comments on ewas-gwas comparison paper or any other chapters - Still waiting on others for EWAS Catalog data -- emailed Paul about it as need his GEO data - Chapter 7 has been edited into the thesis -- Going to have a quick run through then send it over to you. Will highlight things that need to be checked -- 1. acknowledgement of others work, 2. whether to move datasets into "Data sources" section. + Chapter 7 is also very long because of all the supplementary figures and tables that needed to be included - Discussion comments __NOTES:__ ## AOB * Mock vivas: + Matt + Josine + Lavinia + <NAME> + Gemma + End of november! * Find out about whether the data.bris data has been accepted! and then can submit h2ewas paper! <file_sep># PhD project meeting notes 2020-06-17 Attendees: <NAME>, Gib ## Agenda * thesis intro * ewas-gwas comparison simulations * AOB ## thesis intro Finished. Worth sending to you first? ## ewas-gwas comparison simulations As expected I screwed up the analysis for simulations we checked last time. Here are new results: [methods_simulations_overall_res.pdf](methods_simulations_overall_res.pdf) and [methods_test_auc_plot.pdf](methods_test_auc_plot.pdf) see [simulations_flowchart.pdf](simulations_flowchart.pdf) for how the "pathway down" approach looks Results from old sims here: [methods_test_pathway_down_auc_plot.pdf](methods_test_pathway_down_auc_plot.pdf). Simulations including correlation between pathway enrichment scores are on their way! Want to be finished with this part of the project asap. I'm happy with the simulations and think the manuscript can go: 1. Region overlap 2. Simulations to check best method (and that a method could work) for gene/pathway overlap 3. gene/pathway overlap 4. gene/pathway overlap across all GWAS (+ EWAS) Sound good? Also need to add in a new phenotype: blood pressure. To-do: 1. Check the rho-p results 2. If not predictive then stick with gene overlap 3. Extract GWAS of same trait and assess gene/pathway overlap at varying p thresholds to determine p threshold going forward 4. Then assess overlap of all EWAS and all GWAS 5. Write up and send paper round 6. Think about journals to publish this in - potentials include Genome Biology, Genome Research, Nat Comms, elife<file_sep># PhD project meeting notes 2021-03-12 Attendees: <NAME>, Gib ## Agenda * [ewas-gwas comparison paper](#item1) * [Item2](#item2) * [AOB](#aob) ## ewas-gwas comparison paper <a name="item1"></a> ### New traits Only 6 more traits - lots more EWAS of N>4500, but just weren't viable Excluded some new studies due to trait duplication and excluded an EWAS of homocysteine GRS/single mutation ### Overlap results [region overlap plot](all_traits_overlap_bar.pdf) New [gene overlap table](gene-overlap-tab.png) New [geneset overlap table](geneset-overlap-tab.png) Interesting that there is overlap for diastolic and systolic blood pressure! - suggests that EWAS may be picking up some causal genes more than expected by chance. ### 14 ewas vs. all gwas Data available on the open gwas project website isn't available in the vcf files on bc4 - at least not at the normal path: .../public/GWAS-ID/ It's just biobank japan and ieu-b data Running the new ewas traits through these scripts there weren't any associations greater than expected by chance ### Rest of analyses Final simulations running on bc3 - takes roughly 6 days Then can write up new results and send out __NOTES:__ Some meeting notes ## Item2 <a name="item2"></a> Some pre-meeting notes __NOTES:__ Some meeting notes ## AOB <a name="aob"></a> * $\sigma$<sup>2</sup><sub>HM450</sub> paper been sent out<file_sep># PhD project meeting notes 2020-06-18 Attendees: <NAME>, Gib, Nic, <NAME> ## Agenda * general updates * h<sup>2</sup><sub>EWAS</sub> manuscript ## general updates * Finished technical updates to EWAS Catalog, just need to update the data in there now * Should have an ewas-gwas pathway comparison project manuscript by the end of this month * h<sup>2</sup><sub>EWAS</sub> manuscript comments should be addressed by the end of this week * Sending intro - Please can you get it back to me in 3 weeks. This way I'll have a week to go over comments before our next monthly meeting. ## h<sup>2</sup><sub>EWAS</sub> manuscript Latest version: [h2ewas_paper_TRG_njt.docx](h2ewas_paper_TRG_njt.docx) Comments to go over: 1. Nic's on intro 2. Tom's fig 4 (related to Nic's at start of discussion) --> Is it all just lack of power? 3. Nic's in 2nd paragraph of discussion __NOTES__ Added some comments to the manuscript ## ewas-gwas comparison project don't use causal and consequential. Just use GWAS-specific and EWAS-specific pathways for pathway analysis, worth using stringdb instead of other pathway databases Interesting question: could you recapitulate GWAS genes from EWAS genes. Of course this depends on the trait... Also important to think about how genetic architecture and epigenetic architecture of traits would relate to their "overlap". Strikingly there is so little overlap between EWAS and GWAS. This suggests they are measuring separate things... And gives weight to the idea that DNA methylation is more similar to other observational phenotypes than genotype <file_sep># PhD project meeting notes 2020-08-20 Attendees: <NAME>, Gib, Nic ## Agenda * Career advice * How to present properties of EWAS * Cellular heterogeneity section * AOB ## Career advice __NOTES:__ Get a mentor early doors!! ## How to present properties of EWAS Was thinking best way to present it was: we have this database of EWAS associations, is there some underlying pattern to EWAS results that can help explain why some CpGs are associated with traits, whilst others aren't? One reason associations can come about is because of bias - do studies tend to adjust for batch? how many DMPs may have been the result of faulty probes? Have many results been replicated? Could also go into cellular heterogeneity stuff (talk about that later) If they're not due to bias then are there characteristics of CpG sites that makes them more likely to be identified? Or have an increased effect size? -- variability and effect size, average DNAm level and effect size, heritability and effect size, enrichment of sites for genomic features __NOTES:__ Yes that works -- keep with this Could use the RA differences to illustrate how important it is to interogate these issues in EWAS! -- worth finding out what is likely the main reason for differences. If it's likely something everyone knows about then just point it out in the discussion of thesis, but if not then this could form some kind of letter! Worth also characterising the _ABCG1_ findings a bit... ## Cellular heterogeneity section Not sure if it should be part of this project or whether it's a whole new project... [current paper](paper.pdf) use the tissue data!! Can use r2! Higher r2 means more cellular specificity. Could have this talked about in the discussion ## AOB * Make a formal timeline that can be sent to supervisors! <file_sep>## ---- load-data-03 -------------------------------- studies <- read_tsv(file.path(tab_path, "03-ewas_catalog", "studies.txt")) results <- read_tsv(file.path(tab_path, "03-ewas_catalog", "results.txt")) ## ---- catalog-data-setup -------------------------------- pub_studies <- studies %>% dplyr::filter(!is.na(PMID), Author != "<NAME>") pub_results <- results %>% dplyr::filter(StudyID %in% pub_studies$StudyID) geo_aries_results <- results %>% dplyr::filter(!StudyID %in% pub_studies$StudyID) n_pubs <- length(unique(pub_studies$PMID)) n_aries <- sum(studies$Consortium %in% "ARIES" & studies$Author == "<NAME>") n_geo <- sum(studies$Consortium %in% "GEO") n_geo_aries <- sum(n_aries, n_geo) # geo info geo_date <- "2020-10-12" geo_exp_n <- 136 ## ---- catalog-use -------------------------------- include_graphics(file.path(fig_path, "03-ewas_catalog", "using_the_catalog.pdf")) ## ---- catalog-project-workflow -------------------------------- include_graphics(file.path(fig_path, "03-ewas_catalog", "project_flowchart.pdf"))<file_sep># PhD project meeting notes 2020-11-06 Attendees: <NAME>, Gib ## Agenda * [Finishing thesis](#item1) * [Item2](#item2) * [AOB](#aob) ## Finishing thesis <a name="item1"></a> * Sending over things to check + Chapter summaries + Abstract + Contribution statements + etc. * Inconsistencies between chapters/justifying decisisons made by others (or made 3 years ago) + Thresholds needed -- explain why they are needed and what the thresholds chosen were arbitrary, but with reasons: + P < 1e-4 -- less stringent than conventional threshold as wanted to be able to see replication across studies, but not too lenient as that would be computationally difficult to handle + N > 100 -- wanted to be low enough to include studies of rarer phenotypes and in less studied samples (e.g. non-Europeans), but didn't want to include studies that were very underpowered + ncpg > 100,000 + Need broad reasons for arbritrary choices -- have a reason for each one! __NOTES:__ Some meeting notes ## Item2 <a name="item2"></a> Some pre-meeting notes __NOTES:__ Some meeting notes ## AOB <a name="aob"></a> * AOB item1<file_sep># PhD project meeting notes 2021-01-29 Attendees: <NAME>, Gib ## Agenda * [Vectology](#item1) * [Item2](#item2) * [AOB](#aob) ## Vectology <a name="item1"></a> * How does opengwas database work? + https://github.com/MRCIEU/opengwas-metadata-curation __NOTES:__ Some meeting notes ## Item2 <a name="item2"></a> Some pre-meeting notes __NOTES:__ Some meeting notes ## AOB <a name="aob"></a> * Put h2ewas paper on biorxiv and submit to Genome Medicine * Put data in one place and scripts in another!! * Snakemake half way through using godmc - really useful Snakemake examples: https://github.com/explodecomputer/covid-uob-pooling/blob/master/Snakefile https://github.com/MRCIEU/godmc_phase2_analysis/blob/master/20_coloc-cpg-gwas/Snakefile https://github.com/MRCIEU/godmc_phase2_analysis/blob/master/20_coloc-cpg-gwas/bc4-cluster.json https://github.com/MRCIEU/godmc_phase2_analysis/tree/master/20_coloc-cpg-gwas https://github.com/MRCIEU/godmc_phase2_analysis/blob/master/16_cellcount-mr/README.md <file_sep># PhD project meeting notes 2020-08-03 Attendees: <NAME>, Gib ## Agenda * Intro comments * Thesis results chapters * AOB ## Intro comments Thesis quality... See highlighted sections for discussion __NOTES:__ Some meeting notes ## Thesis results chapters Should I send them all over?? One is published work and I'm just updating it so that the supplementary is now part of the main text and there is a paragraph linking things... __NOTES:__ Send over results chapters and just say to check intro of them! --- need to know if they link together nicely... ## AOB * Matt sent back a few thoughts on the h<sup>2</sup><sub>EWAS</sub> paper. Not really things that can be added, but thoughts for future work.<file_sep># PhD project meeting notes 2020-07-03 Attendees: <NAME>, Gib ## Agenda * h<sup>2</sup><sub>EWAS</sub> paper phenotypes * ewas-gwas comparison simulations * AOB ## h<sup>2</sup><sub>EWAS</sub> paper phenotypes Every time I go back to the analysis/data I find something else a wrong... Currently unsure how to label phenotypes such as "New carpet in bedroom in past YR". I was thinking that I should just keep many of the phenotype labels like this "as is" and then just label when the phenotype was measured. I also found that there were some duplicated phenotypes at different time points, e.g. Time taken to walk 3 metres at FOM2 and FOM3. I double checked the correlation and weirdly they aren't very correlated (r = 0.3). BUT this doesn't mean they don't covary similarly with DNAm at different time points. So I checked the h<sup>2</sup><sub>EWAS</sub> estimates and number of DMPs at various thresholds for this phenotype and they are pretty similar. Also many of the same question over different time periods. If I excluded all the duplicates then I'd end up excluding ~90 traits. Ideally I'd combine them all into one phenotype and this is possible for those duplicates in the ~400 traits I've used BUT it seems a little bad to derive these variables post looking at correlation between phenotypes... Could add to discussion --> Traits may be duplicated, but they are not correlated and the analysis and this shouldn't effect things. ## ewas-gwas comparison simulations [gene-up simulations](methods_test_gene_up_auc_plot_NEW.pdf) make sense, but the [pathway down simulations](methods_test_pathway_down_auc_plot_NEW.pdf) are all over the place... So the number of genes picked up when doing the pathway down approach and using stringdb just isn't very high at all... This means little overlap and wacky results. [Gene overlap plot](methods_test_gene_overlap_only_res_NEW.pdf) and [pathway overlap plot](methods_test_pathway_overlap_only_res_NEW.pdf). This doesn't happen with "gene-up" approach because the number of genes are set... and the minimum number of EWAS genes is 25 and the minimum number of GWAS genes is 50 (n-genes x power). [Genes per pathway](genes_per_pathway_distributions.pdf) and summary tables: ``` pathway_id n_genes Length:337 Min. : 1.0 Class :character 1st Qu.: 43.0 Mode :character Median : 87.0 Mean : 115.9 3rd Qu.: 153.0 Max. :1627.0 pathway_id n_genes Length:18467 Min. : 1.00 Class :character 1st Qu.: 1.00 Mode :character Median : 3.00 Mean : 20.62 3rd Qu.: 10.00 Max. :11935.00 pathway_id n_genes Length:12329 Min. : 1.00 Class :character 1st Qu.: 4.00 Mode :character Median : 16.00 Mean : 52.46 3rd Qu.: 65.00 Max. :1352.00 ``` Guess it'd be best to just alter the parameters to make sure enough genes are captured by the "pathway down" approach and then repeat the analysis with the data from epigraph db??? Need to try and combine simulations in manuscript: 1. Set of arbitrary scenarios show in which situations we have power to detect overlap 2. Using empirical data to estimate which scenario the traits fit into? ## AOB Job offer ## To-do For h<sup>2</sup><sub>EWAS</sub> project 1. just label phenotypes "as is" then add to limitations of the paper. 2. Try and find something to describe some of the phenotypes that aren't clear from their alspac label For ewas-gwas comparison project 3. Extract data from epigraphdb 4. Set up simulations with that data (both with and without using empirical data!) 5. Write out some narative with the simulations <file_sep># PhD project meeting notes 2021-09-16 Attendees: <NAME>, Gib ## Agenda * [Project updates](#item1) * [Mini-projects](#item2) * [AOB](#aob) ## Project updates <a name="item1"></a> Really want to get everything at least up on biorxiv by xmas. ### EWAS Catalog Going to Wellcome Open - just need to reformat ### Properties of EWAS Go through suggested changes and meet again in roughly a month * put meeting in calendar for about a months ### h2ewas "Reviews Received" ### comparison of EWAS and GWAS Emailed Nic + Tom again to look at it - can ask Nic if he's started and then do an updated version. __NOTES:__ Some meeting notes ## Mini-projects <a name="item2"></a> Been chatting to Matt + Paul + Hannah about clustering traits based on data in the catalog. I mentioned that one use of this could be selecting traits where multivariate regression might improve power for EWAS. This was something you mentioned (e.g. using MTAG), but could also be a mini-project I think. Could take one of the other ideas you had during the PhD and do them as a mini-project ## docker + phd container plugins + themes -- only writing themes download git setup instance be able develop locally case studies page table of contents on side __NOTES:__ Some meeting notes ## AOB <a name="aob"></a> * AOB item1<file_sep>#!/bin/bash # date_to_use=$1 if [[ -z "$date_to_use" ]]; then echo 'using todays date as date' date_to_use=$(date +'%Y-%m-%d') else echo 'date given' fi echo 'date of meeting set to' echo $date_to_use new_dir=meetings/$date_to_use mkdir $new_dir cp notes_template.md $new_dir/notes.md subl $new_dir/notes.md<file_sep># PhD project meeting notes 2020-07-14 Attendees: <NAME>, Gib ## Agenda * Mapping variants to proteins * AOB ## Mapping variants to proteins up to 30% of variants/cpgs identified by a trait were mapped to ensembl gene ids that do not map to proteins. Way that variants/cpgs are mapped to genes currently is: Does it lie within a gene? -- if yes maps to that gene, if no maps to nearest gene Should there be a cutoff when mapping to nearest gene? -- if not then I can just map all variants/cpgs to the nearest protein coding gene and keep it I guess I could do some sensitivity analysis whereby I remove variants/cpgs that are very far from the gene?? Here are distance to gene stats with ensembl gene id and hgnc symbols: __NOTES:__ Check how much overlap is lost --> If much overlap is lost, to the point where we can't do the downstream analyses then we should keep with ensembl gene ids mapping If changing mapping to proteins then can have the other results in the supplementary <file_sep># PhD project meeting notes 2020-10-27 Attendees: <NAME>, Gib, <NAME>, Nic ## Agenda * [Properties of EWAS major comments](#item1) * [Other thesis comments](#item2) * [AOB](#aob) ## Properties of EWAS major comments <a name="item1"></a> * Variation of DNA methylation and effect size. Expect an association? Could variation in DNAm be selected for? + we've only measured between cell-type variance -- just looking at the methylation level only you could be missing the trick... + less variation means higher noise to signal ratio + Frame this as an array design thing! + Speak to practical perspective and can then discuss the potential biological explanations later + Look at whether replicated probes are also less variable * Differences between hyper- and hypo-methylated probes + Do you see more associations amongst EWAS in any particular direction? + Smoking may be an exception -- may need to partition traits! * Worth contrasting GWAS results of few samples to EWAS results of few samples? + Nic Q: is there a contrast which follows power - going back to the infinite power experiment I have mentioned before - in that situation, do the EWAS and GWAS results become more similar as GWAS picks up the “environment” and confounding like EWAS… in contrast, as power falls away, GWAS becomes less able to pickup those really broad and polygenic contributions whilst EWAS still shows a footprint of confounders?? -- something for discussion... * Distribution of r<sup>2</sup> values -- large values for sum of r<sup>2</sup> suggests to me that the results are biased or some correlation structure driving up r<sup>2</sup> + Would average r<sup>2</sup> for each site (which limits values in the r<sup>2</sup> range) be more appropriate? + method for detecting inflated r<sup>2</sup> could maybe be improved by dividing sum(r<sup>2</sup>) by the sample size, then you're finding outliers based on having higher r<sup>2</sup> than expected for the sample size * Limitations around replication + Power of studies + Reporting of top hits only + Given sample size of replication dataset (and assumption effect size is unbiased) -- know what expected effect size dist should be... -- Gib sending code for this! * Good to make the point that EWAS may be viewed as hypothesis-free, BUT the array was designed to pick up certain regions -- only 2% of sites! __NOTES:__ Some meeting notes ## Other thesis comments <a name="item2"></a> * Linking chapters * Talking about role of others __NOTES:__ Make a summary of things at the start of chapter which includes what other people have done! Need to use passive voice! ## AOB <a name="aob"></a> * ARDE form -- Gib sending off * Update Nic + Tom + Gib with plan!!<file_sep># PhD project meeting notes 2020-06-26 Attendees: <NAME>, Gib ## Agenda * planning genetics jc * h<sup>2</sup><sub>EWAS</sub> paper * ewas-gwas comp ## planning genetics jc Suggested scheduling for 12 months + having something in place if someone drops out of presenting. What Matt does is great and I get a fair bit out of it, but seems to be a lot of effort... ## h<sup>2</sup><sub>EWAS</sub> paper ### Feedback/questions on the gwasvcf package If I'm using the genetics.binaRies package anyway, is there any point in setting bcftools? It says on the gwasvcf package site that it has functionality to clump the data, but I couldn't find that functionality... ended up using the ieugwasr package instead. Rest was pretty simple! ### snp-count ~ h<sup>2</sup><sub>SNP</sub> Unsure about memory with bluecrystal... Weird error with `ld_clump()` function in ieugwasr package. `Clumping ukb-d-20542, 3 variants, using EUR population reference PLINK v1.90b6.10 64-bit (17 Jun 2019) www.cog-genomics.org/plink/1.9/ (C) 2005-2019 <NAME>, <NAME> GNU General Public License v3 Logging to /tmp/RtmpTuWy3r/file7c43596e0875.log. Options in effect: --bfile data/1kg-data/EUR --clump /tmp/RtmpTuWy3r/file7c43596e0875 --clump-kb 10000 --clump-p1 0.99 --clump-r2 0.001 --out /tmp/RtmpTuWy3r/file7c43596e0875 128240 MB RAM detected; reserving 64120 MB for main workspace. 8550156 variants loaded from .bim file. 503 people (0 males, 0 females, 503 ambiguous) loaded from .fam. Ambiguous sex IDs written to /tmp/RtmpTuWy3r/file7c43596e0875.nosex . Using 1 thread (no multithreaded calculations invoked). Before main variant filters, 503 founders and 0 nonfounders present. Calculating allele frequencies... done. 8550156 variants and 503 people pass filters and QC. Note: No phenotypes present. Warning: No significant --clump results. Skipping. Error in file(file, "rt") : cannot open the connection Calls: lapply ... extract_data -> ld_clump -> ld_clump_local -> read.table -> file In addition: Warning message: In file(file, "rt") : cannot open file '/tmp/RtmpTuWy3r/file7c43596e0875.clumped': No such file or directory` So for a gwas dataset there were 3 hits at P<5e-8, but plink hasn't said they were removed from the dataset (i.e. not in reference panel), yet it says 'No significant --clump results'. To me this warning indicates there were no results with P< p1 (p1 = 0.99...). Excluding that result: h<sup>2</sup><sub>SNP</sub> associated with the presence of a GWAS hit, beta = 21.9 [95%CI 19.6, 24.1] and the association between number of SNPs identified (when the number is above 0) and h<sup>2</sup><sub>SNP</sub> (mean increase of 1.5, [95%CI 0.93, 2.5] SNPs when h2SNP increases by 0.1). So the association is larger than n-DMPs ~ h<sup>2</sup><sub>EWAS</sub> association. ### Other When I was thinking about the paper I realised I didn't really think about the fact that the time of measurement of phenotypes may impact the results. I checked the phenotypes and there were ~100 that were clinic phenotypes taken from around the same time the blood for DNAm measurements was taken. Most of the rest of the ~300 phenotypes were measured prior to DNAm measurement. As a lot of EWAS don't measure DNA methylation at the same time as their phenotype of interest, I don't think this is a big problem, but I think I should add a sentence in the discussion along the lines of: "Not only does DNA methylation vary with time, but the covariation of DNA methylation and phenotypes may change over time. This means an h<sup>2</sup><sub>EWAS</sub> estimate may vary with time. This likely also the case for h<sup>2</sup><sub>SNP</sub> estimates (ref), but the magnitude of change over time may be different." Ideally, should try and predict DMPs with h2ewas in a different sample!!! ### Submitting Sent off alspac checklist. Happy to submit when I get that back? ## ewas-gwas comp Tried using stringdb as Tom G suggested. Essentially for each potential protein-protein interaction it has different ways of assessing whether they might interact including textmining, annotated pathways, experimental evidence. Then it scores these from 0-1 with 1 being we've detected an interaction here and we're confident that it's true (DOESN'T SAY ANYTHING ABOUT THE STRENGTH OF SPECIFICITY). They also have a combined score, _S_, which takes into account different avenues of evidence, _S_ = 1 - &prod;<sub>i</sub>(1 - _S_<sub>i</sub>) where _S<sub>i</sub>_ is one of the roughly 5 different scores. So to incorporate stringdb data into the analysis I essentially said for any one protein, all the genes linked with that protein are those mapped to all the proteins that one protein interacts with at a given score threshold. I used 0.9 as the threshold. See [proteins-linked-to-genes.pdf](proteins-linked-to-genes.pdf) for an illustration of how genes are linked to a protein (mapping of genes to proteins by genomic position). Plenty of protein-protein interactions with combined score >0.9. ## To-do 1. Send Gib reproducible code for what went wrong with ieugwasr::ld_clump() 2. Put snp-count ~ h2snp into results 3. Put EWAS from h2ewas paper into the ewas catalog 4. Add in extra bits into the discussion of h2ewas paper 5. Send paper to Matt and tell Caroline about it 6. Setup for genetics journal club to be continued in September <file_sep># PhD project meeting notes 2022-02-09 Attendees: <NAME>, Gib ## Agenda * [h2ewas paper](#item1) ## h2ewas paper <a name="item1"></a> plan for the paper given pat leave?? * Could get rough response by end of Thursday (not including 1000 iterations) * OR just get it finished and submit once Tom G + Nic + Doug are happy? - (will send to them anyways) Here is where I'm at: Have run analyses in all people and when removing relateds. Here are the [comparisons](new-cohorts-vs-fom1-estimates-with-unrelateds.png) Interpretation: 5 of 7 estimates overlap with CIs from original analyses, however, results tend to be larger than 0. This suggests that variance captured by DNAm is still likely to be low (near to zero), it will vary a lot by trait (as concluded before) and as samples increase we can get a clearer view on the variance captured by DNAm across traits. Differences seen here may also reflect the timepoint difference. With the original FOM analyses we did not restrict phenotypes to one time point as that is not what is done in EWAS. However, here most of the measures are derived at the same time as DNAm is measured. Questions: 1. Why do we have such small CIs for some? 2. I guess PC generation needs to be done each time I do an iteration? 3. One tough thing a reviewer has a problem with is the "unexpected" values in the results - i.e. variance captured < 0. I think it's going to be hard to add anything extra to clarify this issue... __NOTES:__ Some meeting notes<file_sep># PhD project meeting notes 2020-10-16 Attendees: <NAME>, Gib ## Agenda * [properties of ewas](#properties-of-ewas) * [ibsc](#ibsc) * [AOB](#aob) ## properties of ewas ### Ethnic differences * 27 studies in the catalog have done EWAS using multiple ethnicities * EWAS have been coducted in different ethnicities for 7 traits * There have been at least 4 papers that have done a direct comparison of EWAS results in different ethnicities -- not sure it's worth us exploring this? * could put a few sentences in discussion?? + Here's what we found ### Enrichment work * [chromatin state plots](chromatin_states_enrichment_boxplots.pdf) * [tfbs enrichment plot](cpg_corebg_matched_all_enrichment_All_OR.pdf) waiting for Josine to give me access to a folder so I can have all the data needed to re-create the tfbs plot... __NOTES:__ chrom state: - log scale y-axis - rows as type - mixture of colours and symbols for plot what current 450k array is targeting. Is that a good idea? what current epic array is targeting. Is that a good idea? tissue specificity strict p versus not strict p all versus others ## ibsc Could help out if you still need it -- Gib to send over deets __NOTES:__ Some meeting notes ## AOB * Thinking of submitting h2ewas stuff to ajhg -- go for it <file_sep># PhD project meeting notes 2020-04-23 Attendees: <NAME>, Gib Agenda: * github to track meetings * when to start the properties of ewas project * ewas-gwas comparison simulations * predicting the evolutionary trajectories of human traits * AOB ## github to track meetings How to use issues to help? __NOTES:__ Can add stuff as a todo list if wanted ## when to start the properties of ewas project There are a lot of papers that are waiting to be input into the ewas catalog currently... so I'm not sure if it's best to start the project now or wait for a bit. __NOTES:__ Clean data manually in EWAS Catalog first before starting. Can put together some scripts, but would be best if the data in the ewas catalog were all there --> Chat to Matt and Paul about timing. ## ewas-gwas comparison simulations See `sim_report.pdf` __NOTES:__ Talking about it on Friday ## predicting the evolutionary trajectories of human traits Do you know what's happening in terms of jobs funded by fellowship funds? Also, what would your thoughts be on starting dates etc.? __NOTES:__ GH is going to look at fellowship stuff again and get back to TB ## AOB ### m2 manuscript New plan: make changes GH suggested and send to all with deadline to submit (give 3 weeks). ### EWAS Catalog Getting upload pipeline sorted may be harder than previously thought: IEU database --> API orchestrates uploads. Goes through several docker containers to check EVERYTHING and outputs it into a document. It doesn't get input until somebody OKs the document Give everything possible an ontology and only allow ontologies to be input! <file_sep># PhD project meeting notes 2020-09-25 Attendees: <NAME>, Gib ## Agenda * ewas-gwas comparison paper * colocalisation project * AOB ## ewas-gwas comparison paper Giving an example of "biological insight" differences using the pathways was very subjective. Most of the pathways for which genes were enriched were very generic such as "protein binding" so had to look through all the traits to find one in which the GWAS pathways identified made sense... __NOTES:__ Could send to people for comments on writing and tell them cba to conduct more analyses ## colocalisation project would be up for doing the colocalisation project at some point in the future if nobody else has done/wants to do it. __NOTES:__ Gib putting some simulations together + may end up doing by himself -- will discuss project more in a few months time! ## AOB * Check whether formal process means handing in project<file_sep>--- author: '<NAME>' date: 'November 2020' # Month and year of submission title: "Complex trait architecture through the lens of epigenome-wide association studies" university: "University of Bristol" faculty: "Health Sciences" school: "Bristol Medical School" group: "MRC Integrative Epidemiology Unit" degree: "Population Health Sciences" logo: figure/index/UoBcrest.pdf # logo will only show on PDF bibliography: bib/thesis.bib # location of your bibliography csl: csl/american-journal-of-epidemiology.csl # location of your referencing style knit: "bookdown::render_book" # doesnt need changing site: bookdown::bookdown_site # doesnt need changing lot: true lof: true space_between_paragraphs: true graphics: yes fig_caption: true header-includes: \usepackage[T1]{fontenc} \usepackage{lmodern} output: thesisdown::thesis_pdf: toc: true toc-depth: 4 latex_engine: pdflatex # thesisdown::thesis_gitbook: default # thesisdown::thesis_word: default # thesisdown::thesis_epub: default <!-- geometry: left=0cm, right=0cm, top=0cm, bottom=0cm # This doesn't work --> abstract: | `r if(knitr:::is_latex_output()) paste(readLines("00-abstract.Rmd"), collapse = '\n ')` acknowledgements: | `r if(knitr:::is_latex_output()) paste(readLines("00-acknowledgements.Rmd"), collapse = '\n ')` publications: | `r if(knitr:::is_latex_output()) paste(readLines("00-publications.Rmd"), collapse = '\n ')` declaration: | `r if(knitr:::is_latex_output()) paste(readLines("00-declaration.Rmd"), collapse = '\n ')` abbreviations: | `r if(knitr:::is_latex_output()) paste(readLines("00-abbreviations.Rmd"), collapse = '\n ')` --- ```{r thesis-setup, include=FALSE} # you need these packages if(!require(devtools)) install.packages("devtools", repos = "http://cran.rstudio.com") if(!require(thesisdown)) devtools::install_github("ismayc/thesisdown") library(thesisdown) if(!require(kableExtra)) devtools::install_github("haozhu233/kableExtra") library(kableExtra) if(!require(tinytex)) devtools::install_github('yihui/tinytex') library(tinytex) options(tinytex.verbose = TRUE) if(!require(flextable)) devtools::install_github("davidgohel/flextable") library(flextable) # extra packages I use in a lot of chunks library(tidyverse) # simple tidying of data library(RColorBrewer) # customising plot colours library(gridExtra) # plotting multiple graphs on the same page library(readxl) # reading in excel spreadsheets library(xfun) # misc functions including numbers_to_words() # this will allow us to make word documents with relatively ok tables doc.type <- knitr::opts_knit$get('rmarkdown.pandoc.to') # set figure and table paths fig_path <- "figure" tab_path <- "data" stopifnot(file.exists(fig_path) | file.exists(tab_path)) # some useful functions source("chunks/useful_functions.R", local = knitr::knit_global()) ``` ```{r setoptions, eval = TRUE, echo = FALSE} opts_chunk$set(echo = FALSE, warning = FALSE, message = TRUE, cache = FALSE, dpi = 300, fig.align = "center", out.width = "100%") ``` <file_sep># PhD project meeting notes 2020-07-23 Attendees: <NAME>, Gib, <NAME>, Nic ## Agenda * Thesis intro comments * Current timeline plan ## Thesis intro comments ### Overarching comments * Many comments were specific and easy to change which is nice. I completely agree about restructuring the section discussing variance captured by all DNA methylation sites measured genome-wide -- need to explain what the problem is in DNA methylation before jumping to heritability measurements. * The other overarching comment was: "Got to here (end of inferring biology from signals section) and there has been a lot of discussion about a broad set of issues pertinent to EWAS and methylation – I have kid of forgotten what the job of the thesis is though – how does this all knit together?" -- This is something I really struggled with when writing the intro, knitting things together... + To remedy this just write something at the start of the "Using methods from genetics to help inform future EWAS section" that states something along the lines of 'In order to remedy some of the problems EWAS face and to help understand whether the "experiment" of measuring DNA methylation across many cohorts and studies has been successful, we can borrow ideas and methods developed in genetics and genetic epidemiology.' + Also need to add: 'There is no reason to believe that genotype and DNA methylation measurements share the same properties. They are distinct entities and it might be helpful to liken DNA methylation to more common phenotypic measurements rather than genotypic measurements. That being said, there are specific methods and ideas from genetics that can be adapted to explore epigenetics.' + Then give examples as per TG's comment. Would be worth talking more philosophically about the work. Why were the probes for 450k used? Was the "experiment" of measuring DNA methylation for the 450k in blood worth while or has everyone wasted their money? DNAm thought to be important enough to create multiple 'chips', but why? Are epigenetic signals surprising? There are an abundance of signal for certain phenotypes with low sample size, but of course you'd expect that if you liken DNAm to a phenotype rather than to a genotype Things are written as if DNAm comes before changes in trait. State explicitly, early on in the intro, that DNAm changes can come after or before trait changes Also worth discussing in more detail that there is experimental evidence that DNAm does not come before gene silencing in many cases. Change wording from DNAm 'capturing' trait variance to DNAm and traits covarying! ### Specific comments * See intro doc --> Highlighted uncertain bits __NOTES:__ Some meeting notes ## Current timeline plan ### 2020-07-20 - 2020-07-26 * Intro comments attended to and re-written * ewas-gwas comp draft (Chapter 6) written ### 2020-07-27 - 2020-08-02 * Intro checked over and sent back * ewas-gwas comp draft finished and sent ### 2020-08-03 - 2020-08-09 * Finished extracting EWAS Catalog papers * Finished properties of ewas (Chapter 4) analysis (analysis plan already agreed upon and fairly simple to run) ### 2020-08-10 - 2020-08-16 * Properties of ewas first draft written and sent off * Started discussion ### 2020-08-17 - 2020-08-23 * Finished discussion and sent * Finished any outstanding results work and/or attended to comments __NOTES:__ Some meeting notes ## AOB * Send analysis plan to PY and MS for "properties of EWAS paper"<file_sep># PhD project meeting notes 2020-07-31 Attendees: <NAME>, Gib ## Agenda * ewas-gwas comp discussion * Thesis results chapters * Intro comments * AOB ## ewas-gwas comp paper ### Results StringDB appears to outperform other databases in terms of power to detect overlap: [methods_test_gene_up_auc_plot_all_databases.pdf](methods_test_gene_up_auc_plot_all_databases.pdf). However, to create this database I used stringent cutoff thresholds for confidence scores (CS > 0.9) to define a protein-protein interactions. This meant only 12k genes/proteins remained and so number of pathways the genes identified in empirical analyses is limited... In some cases, we're removing 1/3 of the genes identified in the ewas/gwas... Could re-map and run things again, but don't think it's worth it. Also a little weird to use stringdb + epigraphdb estimated ppi... ` trait eg eg_in_sdb gg gg_in_sdb <chr> <int> <int> <int> <int> 1 alcohol_consumption_per_day 313 130 196 84 2 former_versus_never_smoking 260 123 24 16 3 current_versus_never_smoking 1781 784 310 87 4 body_mass_index 227 118 231 86 5 glucose 11 7 50 24 6 insulin 30 12 7 4 7 educational_attainment 23 13 303 101 ` ### Discussion Been thinking about discussion of paper and why you might not find correlation between enriched pathways of EWAS and any GWAS even if you had good study power: [pathway-overlap-diagram.pdf](pathway-overlap-diagram.pdf). I'd assume the likely scenario is confounding/reverse causation + CpGs related to lots of different traits downstream... This is kind of going to be tested in the 'properties of EWAS' paper - pleiotropy of CpG sites... Points I want to make: * Little overlap suggests new information gained from EWAS * However, this information is hard to interpret - uncertain what is causing associations + uncertain what impact DNA methylation has on downstream traits * Limitations Any other big points to make?? __NOTES:__ Make sure continuity across the paper is clear --> So if you use diagrams to discuss things then make sure these are pointed out before simulations! Overlap is low -- simulations look to see what would permit 0 overlap and there COULD still be overlap -- use more informed more knowledge based views of literature ## Thesis results chapters Should I send them all over?? One is published work and I'm just updating it so that the supplementary is now part of the main text and there is a paragraph linking things... __NOTES:__ Some meeting notes ## Intro comments Thesis quality... See highlighted sections for discussion __NOTES:__ Will go over thesis stuff in next meeting ## AOB * Memory in bluecrystal 4... If a job is going to take up more memory does that memory get partitioned automatically across the processors/nodes OR is the memory just the memory used overall? * Extracting data from IEU Open GWAS Database files that were needed for all EWAS-GWAS overlap stuff and found that this ID existed in the gwas info (`ieugwasr::gwasinfo()`) "bbj-a-73", but VCF wasn't found in the same format as other GWAS IDs in the data repository (ID/ID.vcf.gz) and the ID itself didn't exist as it's own repo. * Also there are a fair few VCFs that have 'length 0' apparently...<file_sep># PhD project meeting notes 2020-05-21 Attendees: CANCELLED ## Project progress ### EWAS Catalog Next week we're starting the final part of updating the catalog before sending out the manuscript (updating with newer publications). So hoping this will be done very soon. ### Properties of EWAS Will start analysis once data in EWAS Catalog is updated. After the last meeting I've updated the [analysis plan](properties-of-ewas-plan.pdf). If you can have another quick look through it and give any comments that'd be great. I don't really want to get moving with this project until I'm happy with the EWAS Catalog, but if we're happy with the analysis plan then as soon as that project is finished then I can immediately get started with this. ### Comparison of EWAS and GWAS First draft of manuscript is written. Need to work out a couple of kinks and then will send round. ## Thesis writing Formatted DNAm-lung cancer MR into the results section. Started on intro. Hoping to get a first draft of this to you by the end of this month or in the first week of June. <file_sep># PhD project meeting notes 2020-10-30 Attendees: <NAME>, Gib ## Agenda * [ewas-gwas comparison geneset filtering](#item1) * [ewas catalog data](#item2) * [AOB](#aob) ## ewas-gwas comparison geneset filtering <a name="item1"></a> * Realised that a lot of go term genesets are only one gene so tested if removing these (and massive genesets of >5000 genes) had an impact on power. Answer is no: [plot](go_sim1_filtered_terms_power.pdf) * Not limiting to "independent" SNPs means there are a lot of genes... -- currently just added this to the limitations. + All SNPs at P<5e-8 kept and mapped to either gene they're within OR nearest gene + For BMI, if the independent SNPs are taken from the Yengo paper then mapped to genes a few more genes than SNPs are present (expected), but this is about 3x fewer genes than with the method currently used... + Worth doing a sensitivity analyses: limit to independent SNPs AND/OR eQTL stuff. could also look at various other GWAS that have done gene mapping! * Some of the new architecture sim results are wild: [plot](architecture_sims_crp_fvns_only_correlation_of_pathway_enrichment_scores.png) __NOTES:__ <NAME> -- repeatability of probes -- could see if repeatable probes more likely to come up as EWAS hits ## ewas catalog data <a name="item2"></a> * Need to "timestamp" results chapters that use EWAS Catalog data. + Was thinking to have a sentence or two at the end of that chapter + For ewas-gwas comparison it's weird because of glucose + insulin EWAS... + Can mention will update for the paper and that just added those two studies! __NOTES:__ Some meeting notes ## AOB <a name="aob"></a> * AOB item1<file_sep># PhD project meeting notes 2020-06-22 Attendees: <NAME>, Gib ## Agenda * h<sup>2</sup><sub>EWAS</sub> manuscript ## h<sup>2</sup><sub>EWAS</sub> manuscript Problems with h<sup>2</sup><sub>SNP</sub> - GWAS hits prediction work: 1. If define hits above expected as threshold x one million then get too many 'sig' hits above number expected and if you take into account n-traits then you get too few... (formula is GWAS hits above expected ~ h<sup>2</sup><sub>SNP</sub>) 2. The extract_instruments() function appears to be a bit temperamental... pre-lim results: [h2_snp-count_roc.pdf](h2_snp-count_roc.pdf), [h2_snp-count_roc_strict.pdf](h2_snp-count_roc_strict.pdf) Just use bc4 and https://mrcieu.github.io/gwasvcf/ to extract data and run the analysis... * access bc4 * file is stored here: /mnt/storage/private/mrcieu/research/scratch/IGD/data/public
 + directory for every id + in directory there is a vcf file Finished going over comments. Also wanted to ask if I need to generate a new B number for this project or whether I can use an existing one... When going through this version of the manuscript please check the UKB section of the methods + supplement. It's now a very minor part of the analysis so just added a single sentence in the results to reference it and put the plot in the supplement. So I didn't put much detail in to describe the data... There are some 'unresolved' comments in the manuscript. I've tried to answer them but realise I might not have done it justice. - Add in to results the imprecision and interpretation. I'll send over a 'clean' (with only comments) and 'unclean' version. To-do: 1. Submit proposal to ALSPAC 2. Add imprecision into results of manuscript 3. Change results section to match model of gwas-hits ~ h2snp 4. Send clean version and fully commented version to Gib 5. Apply for bc4 access 6. Make script for gwas-hits ~ h2snp analysis. 7. Run script and put results in manuscript <file_sep># PhD project meeting notes 2020-09-18 Attendees: <NAME>, Gib ## Agenda * ewas-gwas comparison paper * properties of ewas * AOB ## ewas-gwas comparison paper Once we have new data, the number of EWAS with sample size > 4500 will be roughly double what we have currently... I guess an idea would be to keep it at seven traits in the thesis and expand it for the paper?? - try and keep paper and thesis chapter together for as long as possible Added in the extra simulations after the empirical results. so results sections goes: - Overlap in genomic regions identified - Assessment of power to detect overlap (sims 1) - Empirical overlap - Using empirical overlap to understand architecture of traits - Overlap of non-corresponding EWAS and GWAS Was thinking just a z-test to determine whether there is a difference between empirical correlation of enrichment scores and mean simulated value. Also, I think based on our discussions I'm going to remove "consequential" from the manuscript and just use "non-causal" instead. I'll also add to the discussion that sampling genes at random to populate the "non-causal" genes is likely to be incorrect and any genes identified through confounding or reverse causation in EWAS may be correlated with the causal genes in some way. - this sounds fine __NOTES:__ Some meeting notes ## properties of ewas [Association between variance and mean meth level](variance_vs_avg_meth.png) [mean meth level and effect size](avg_meth_vs_absolute_effect_size.png) What was thinking behind centering on zero? - effect size vs. abs(mean_meth - 0.5) + variance + h2 Also calculate AUC for all of them Genomic features stuff central to chapter -- interested in probe design. __NOTES:__ Some meeting notes ## AOB * AOB item1<file_sep># PhD project meeting notes 2020-08-14 Attendees: <NAME>, Gib ## Agenda * Job * Properties of EWAS * AOB ## Job Will be applying __NOTES:__ Use interview as an opportunity to interview them! Be clear about you what you want from job! First post-doc is quite important... With Doug, learning LMM, could end up with ## Properties of EWAS ### Issue 1: bias due to ARIES contributing wealth of data - Not an issue because the number of CpGs identified is actually fairly low. Only really an issue when looking at traits, but can just mention that... ### Issue 2: Haven't collected variance explained data - Can estimate rsq!!! + rsq = (t^2) / (t^2 + n - 1) -- t is pretty much just z for large sample sizes - Can also use this to estimate the effect estimates per SD increase + 
rsq = vx / vy
 + vx = b^2 * var(x) + b = sqrt(rsq / var(x))
 ### Issue 3: So many CpGs identified as DMPs -- roughly 350k - define an EWAS as a study aiming to look at changes in DNA methylation between the same samples that differ only by the trait of interest (e.g. height or a disease). This would exclude things like age and cross-tissue analyses and so should remove some of the analyses that gave loads of hits! - Could also do some sensitivity analyses where we remove actual EWAS with crazy high numbers of CpGs... -- e.g. the rheumatoid arthritis one ### Issue 4: Getting variability and avg DNAm data - GoDMC has data on both variance and average methylation levels. Should also have data for each cohort. Gib will send this over. - Need to test for heterogeneity between cohorts and remove any mean methylation levels or variances that aren't consistent across cohorts. ### Issue 5: Getting DNAm heritability data - can't access DNAm heritability results here: [http://www.epigenomicslab.com/online-data-resources](http://www.epigenomicslab.com/online-data-resources/) - can use estimates from van dongen study or another study - Gib sending over estimates ### ANOTHER IDEA - CELL SPECIFICITY - Cell type specificity is obvs a big issue - Also could be missing a lot of stuff by not using single cell stuff - There is correlation between DNA methylation sites across tissues, suggesting stability in DNAm, but would we expect to find associations at these positions across tissues? + If this stability in DNAm just reflects reliable probes measuring those sites well then the associations track across tissues, but we may be missing plenty more that are poorly measured + However, the stability in DNAm across tissues could reflect the redundancy of DNAm at those sites. The sites that correlate across tissues may be reflected by either constant or little gene expression at nearby genes. For example, correlation at housekeeping genes is expected. - Could group CpGs by conserved across tissues vs. not conserved and look at gene expression of nearby genes and/or specifically house keeping genes. - Josine may have some data on correlation between tissue types, but look around the literature first For enrichment analyses - can have a separate repo for Charlie, so can be completely up front about what her contributions were to the project! ## AOB * <NAME> got back to me about making data available and just sent me this: http://www.bristol.ac.uk/staff/researchers/data/publishing-research-data/. and said "The IEU does not have an RDSF space dedicated to this. Most PIs or programme leads should have a 5TB RDSF space which you can use for this process." If you don't have space, you need to: 1. Register as a data steward and register the project, 2. Make me a data deputy, 3. Wait for me to sort everything out, 4. request final data publication when I've set it up. <file_sep># PhD project meeting notes 2021-05-21 Attendees: <NAME>, Gib ## Agenda * [properties of ewas](#item1) * [AOB](#aob) ## properties of ewas <a name="item1"></a> Main question(s): Can we explain EWAS hits using available EWAS summary stats and could this be used to inform future study design? Could change it so title is: Insights gained from the analysis of XXX EWAS. This would allow a description of the data and for it to be presented properly. The results for the thesis chapter was kind of split into 3 sections: 1. A description of the catalog and associations in it (not answering main question) 2. How much are false positives likely contributing to results? (faulty probes, inclusion of certain covariates, replication rate) 3. Are there innate characteristics of the DNAm sites identified that make them more likely to be identified in EWAS? (h2, var, where they are in genome) Technical difficulty with estimating R2 as we have winners curse. - could use Amanda Forde's package: https://github.com/amandaforde/winnerscurse/. 2 and 3 answer main question. Replication needs thinking about - presentation and work-wise. __NOTES:__ Some meeting notes ## AOB <a name="aob"></a> * AOB item1<file_sep># ------------------------------------------------ # Simulations to compare methods for assessing overlap of pathways # ------------------------------------------------ # There are 3 (kind of 2.5) methods look at whether there is more # overlap between pathways identified by EWAS and GWAS than expected # by chance. This script aims to assess which is the best! pkgs <- c("tidyverse", "FDb.InfiniumMethylation.hg19", "IlluminaHumanMethylation450kanno.ilmn12.hg19") lapply(pkgs, require, character.only = T) dir <- "~/main_project/epi_gen_comp/" setwd(dir) devtools::load_all("~/repos/usefunc/") source("R/mapping_functions.R") source("R/simulation_functions.R") args <- commandArgs(trailingOnly = TRUE) sim_n <- as.numeric(args[1]) # simulation number (can be 1, 2, 3 or 4) split <- args[2] split <- as.numeric(unlist(strsplit(split, ","))) message("sim function = ", sim_n) message("split = ", split) # gene ontology terms - table of terms + genes linked to the terms # ensembl_gene_id pathway_id description go_terms <- read_tsv("data/derived/gene_ontology_terms.txt") %>% dplyr::filter(!is.na(pathway_id)) # same as above but for kegg kegg_terms <- read_tsv("data/derived/kegg_terms.txt") # gene names + positions # ensembl_gene_id hgnc_symbol chromosome_name start_position end_position all_genes <- read_tsv("data/derived/ensembl_genes.txt", guess_max = 1e6) all_genes$size <- all_genes$end_position - all_genes$start_position summary(all_genes$size) # unique pathways u_go <- unique(go_terms$pathway_id) u_kegg <- unique(kegg_terms$pathway_id) # ------------------------------------------------ # Setup simulations # ------------------------------------------------ # First simulation, want to see in which situations the methods # are able to detect true overlap above chance (i.e. when percent_ewas_causal = 0) params <- expand.grid( n_genes = c(500, 1000, 2500, 5000, 10000, 25000), gwas_power = c(0.1), ewas_power = c(0.05, 0.1, 0.2, 0.5, 1), percent_ewas_causal = c(0, 0.01, 0.05, 0.1, 0.5, 1), enriched_paths_only = c(TRUE, FALSE), pathway = c("kegg", "go"), sim = c(1:1), gene_overlap = NA, pathway_overlap = NA, or_g = NA, or_p = NA, p_g = NA, p_p = NA ) extract_pathways <- function(gwas_genes, ewas_genes, databases) { # function to extact pathways from each of the databases # supplied. Database dfs should be name DATABASE_terms out_paths <- lapply(seq_along(databases), function(x) { pathway_dat <- get(paste0(database[x], "_terms")) gwasp <- pathway_dat %>% dplyr::filter(ensembl_gene_id %in% gwas_genes) %>% pull(pathway_id) %>% unique ewasp <- pathway_dat %>% dplyr::filter(ensembl_gene_id %in% ewas_genes) %>% pull(pathway_id) %>% unique return(list(gwas = gwasp, ewas = ewasp)) }) } perform_enrichment <- function(identified_genes, pathway_df, background_genes) { # uses fishers exact test to perform geneset enrichment analyses # for each pathway and output pathway_id and "enrichment scores" unique_pathways <- unique(pathway_df$pathway_id) len_up <- length(unique_pathways) enriched_paths <- map_dfr(seq_along(unique_pathways), function(x) { message("Pathway ", x, " of ", len_up) pathway_of_interest <- unique_pathways[x] pathway_genes <- pathway_df %>% dplyr::filter(pathway_id %in% pathway_of_interest) %>% pull(ensembl_gene_id) res <- overlap_test(group1 = identified_genes, group2 = pathway_genes, all_variables = background_genes) out <- data.frame(pathway_id = pathway_of_interest, enrich_or = res$estimate, enrich_p = res$p) %>% mutate(adj_enrich_p = p.adjust(enrich_p, method = "BH")) return(out) }) } overlap_test <- function(group1, group2, all_variables) { # asks if there is more of an overlap between # group one and group two than expected by chance if # those groups were randomly sampled from all the variables q <- sum(group1 %in% group2) # overlap between ewas and gwas pathways m <- length(group2) - q # number of pathways identified by GWAS but not EWAS k <- length(group1) - q # number of pathways identified by EWAS but not by GWAS n <- length(all_variables) - q - m - k # all pathways minus pathways identified by EWAS and GWAS tab <- matrix(c(q, m, k, n), 2, 2) out <- fisher.test(tab, alternative = "greater") return(out) } sim_func <- function(n_genes, ewas_power, gwas_power, percent_ewas_causal, database, enriched_paths_only) { # simulation function pathway_dat <- get(paste0(database, "_terms")) # sample genes causal_trait_genes <- sample(all_genes$ensembl_gene_id, n_genes) gwas_genes <- sample(trait_genes, causal_trait_genes * gwas_power) n_ewas_genes <- n_genes * ewas_power n_ewas_causal_genes <- n_ewas_genes * percent_ewas_causal ewas_genes <- c(sample(causal_trait_genes, n_ewas_causal_genes), sample(all_genes$ensembl_gene_id, n_ewas_genes - n_ewas_causal_genes)) # perform gene overlap test outg <- overlap_test(ewas_genes, gwas_genes, all_genes$ensembl_gene_id) # perform enrichment analysis if needed if (enriched_paths_only) { gwas_enrich <- perform_enrichment(gwas_genes, pathway_dat, all_genes$ensembl_gene_id) gwasp <- gwas_enrich %>% dplyr::filter(adj_enrich_p < 0.05) %>% pull("pathway_id") ewas_enrich <- perform_enrichment(ewas_genes, pathway_dat, all_genes$ensembl_gene_id) ewasp <- ewas_enrich %>% dplyr::filter(adj_enrich_p < 0.05) %>% pull("pathway_id") } else { all_pathways <- extract_pathways(gwas_genes, ewas_genes, database) gwasp <- all_pathways$gwas ewasp <- all_pathways$ewas } # perform overlap tests for pathways outp <- overlap_test(ewasp, gwasp, pathwas_dat$pathway_id) # write out results out <- list(gene_res = outg, gene_overlap = sum(ewas_genes %in% gwas_genes), pathway_overlap = sum(ewasp %in% gwasp), pathway_res = outp) return(out) } params2 <- expand.grid( n_genes = c(500, 1000, 2500, 5000, 10000, 25000), gwas_power = c(0.1), ewas_power = c(0.05, 0.1, 0.2, 0.5, 1), percent_ewas_causal = c(0, 0.01, 0.05, 0.1, 0.5, 1), sim = c(1:1), gene_overlap = NA, pearsons_rho = NA, spearmans_rho = NA ) sim_func2 <- function(n_genes, ewas_power, gwas_power, percent_ewas_causal) { # simulation function pathway_dat <- get(paste0(database, "_terms")) # sample genes causal_trait_genes <- sample(all_genes$ensembl_gene_id, n_genes) gwas_genes <- sample(trait_genes, causal_trait_genes * gwas_power) n_ewas_genes <- n_genes * ewas_power n_ewas_causal_genes <- n_ewas_genes * percent_ewas_causal ewas_genes <- c(sample(causal_trait_genes, n_ewas_causal_genes), sample(all_genes$ensembl_gene_id, n_ewas_genes - n_ewas_causal_genes)) # perform gene overlap test outg <- overlap_test(ewas_genes, gwas_genes, all_genes$ensembl_gene_id) } # * power to detect correlation/overlap between the GWAS (or EWAS, doesn't matter) of the same trait given differing powers of the study # + Take range for N genes from range of actual EWAS/GWAS (roughly from 50 to 2500) # + Essentially simulation will be: Here are X genes. Study one detected X genes and study two detected Y genes. Vary X and Y and compare power each of the methods have to detect overlap/correlation (will need to create null for methods 1 and 2) # So these simulations should show which methods we'd see overlap when we expect it (i.e. when X is very similar to Y) and when we wouldn't expect it (i.e. when there is no similarity between X and Y) as well as scenarios in between. <file_sep># PhD project meeting notes 2020-06-12 Attendees: <NAME>, Gib ## Agenda * h<sup>2</sup><sub>EWAS</sub> manuscript * ewas-gwas comparison: method for measuring overlap * AOB ## h<sup>2</sup><sub>EWAS</sub> manuscript Nic trying to get comments back by the end of the day ## ewas-gwas comparison: method for measuring overlap See plots: [methods_test_auc_plot.pdf](methods_test_auc_plot.pdf) and [methods_simulations_overall_res.pdf](methods_simulations_overall_res.pdf). __NOTES__ Try a pathway down approach! -- i.e. for a trait there are X pathways. Then for the GWAS genes and EWAS genes sample from these pathways. Then map these genes to pathways and check gene/pathway overlap. Worth also varying the amount of pathway overlap! Double check didn't do something similar already, feels familiar... Also potentially worth making an R package to let users input positions/genes and have then output which GWAS the users positions/genes correlate with in terms of pathways <file_sep>cd index/ Rscript -e "require(knitr); require(markdown); require(thesisdown); bookdown::render_book('index.Rmd', output_format = 'all', clean = FALSE)"<file_sep>## ---- load-data-02 -------------------------------- tp_02 <- file.path(tab_path, "02-data_sources") fp_02 <- file.path(fig_path, "02-data_sources") data_overview_tab <- readxl::read_xlsx(file.path(tp_02, "data_sources_overview.xlsx")) data_overview_cap <- "Overview of data used in this thesis" ## ---- data-overview-tab -------------------------------- kbl(data_overview_tab, format = "latex", caption = data_overview_cap, booktabs = TRUE) %>% kable_styling(latex_options = c("striped", "HOLD_position", "scale_down")) %>% add_footnote(c("ARIES = Accessible Resource for Integrated Epigenomic Studies", "GEO = Gene Expression Omnibus", "IEU = Integrative Epidemiology Unit", "EPIC-Italy = Italian strand of the European Prospective Investigation into Cancer and Nutrition study", "MCCS = Melbourne Collaborative Cohort Study", "NOWAC = Norwegian Women and Cancer", "NSHDS = Northern Sweden Health and Disease Study", "TRICL-ILCCO = Transdisciplinary Research in Cancer of the Lung and The International Lung Cancer Consortium", "CCHS = Copenhagen City Heart Study", "GTEx = Genotype-Tissue Expression (project)", "DNAm = DNA methylation"), notation = "none", escape = FALSE) %>% add_footnote(c("Data source created in Chapter 3"), notation = "symbol")<file_sep># PhD project meeting notes 2020-05-12 Attendees: <NAME>, Gib ## Agenda * general updates * ewas-gwas comparison simulations and paper * AOB ## general updates * Sent out h<sup>2</sup><sub>EWAS</sub> paper. + <NAME> he doesn't really need to comment on this version. + Also I ended up not condensing the intro too much. The current length of the manuscript is still pretty long, I think a lot of the info there is needed. Can cut down when submitting if needs be. * We decided on a pretty simple upload button for the EWAS catalog. Meeting about it tomorrow, but essentially as long as there is some place people can upload data for when it's submitted then that'll be great. I'm going to say to Paul and Matt that I want 1. to only spend max 1 more week on automating user upload and 2. to have everything else ready * Started writing intro __NOTES:__ * When writing intro, think about exactly what you want to convey to the reader. It doesn't need to be like a lesson, but more of a prelude to the work! ## ewas-gwas comparison simulations and paper see `sim_report.pdf`. __NOTES:__ Change simulations so that the GWAS genes themselves are extracted (not just number) and they are set to be causal in simulations and then all EWAS genes are set to be consequential (unless overlap between consequential and causal genes is 0! Then any EWAS genes that are GWAS genes will have to be causal only!). Could add a parameter to change the overlap between causal and consequential genes. NEW INTERPRETATION: As proportion of overlap between causal and consequential genes changes this happens (GWAS and EWAS overlap increases). Based on empirical data, it is likely there is X amount of overlap between causal and consequential genes/pathways. ## AOB Gib is going to plan the future work required for his fellowship when he has the chance! Could also think about writing a grant to do some quant gen work using the GWAS data in IEU GWAS database... <file_sep>--- author: '<NAME>' date: 'November 2020' # Month and year of submission title: "Complex trait architecture through the lens of epigenome-wide association studies" university: "University of Bristol" faculty: "Health Sciences" school: "Bristol Medical School" group: "MRC Integrative Epidemiology Unit" degree: "Population Health Sciences" logo: figure/index/UoBcrest.pdf # logo will only show on PDF bibliography: bib/thesis.bib # location of your bibliography csl: csl/american-journal-of-epidemiology.csl # location of your referencing style knit: "bookdown::render_book" # doesnt need changing site: bookdown::bookdown_site # doesnt need changing lot: true lof: true space_between_paragraphs: true graphics: yes fig_caption: true header-includes: \usepackage[T1]{fontenc} \usepackage{lmodern} output: thesisdown::thesis_pdf: toc: true toc-depth: 4 latex_engine: pdflatex # thesisdown::thesis_gitbook: default # thesisdown::thesis_word: default # thesisdown::thesis_epub: default <!-- geometry: left=0cm, right=0cm, top=0cm, bottom=0cm # This doesn't work --> abstract: | <style> body { text-align: justify} </style> Quantifying underlying DNA methylation signatures of complex traits presents an opportunity to identify biomarkers and modes of disease intervention. Years of epigenome-wide association studies (EWAS) have shown signatures vary greatly by trait and the interpretation of signals remains difficult. This thesis explores potential explanations for this and examines the role of EWAS in understanding complex traits. <br> To ascertain necessary data, I led a collection of EWAS results and developed a web resource for storing and querying the 975,574 associations across 1244 EWAS. Evidence was found that results for EWAS that accounted for common biases, such as batch effects and cell composition, could partially be explained by variance and heritability of DNA methylation. Further, identified sites were enriched in promoter regions, enhancer regions and transcription factor binding sites. <br> Across the EWAS surveyed, DNA methylation was commonly measured in blood at roughly 450,000 sites genome-wide. I examined the predictive capacity of DNA methylation in this context and found that it captured little variance of 400 independent complex traits. <br> Next, commonalities between the overlap in biology highlighted by EWAS and GWAS of corresponding traits was explored and I found that the genes and genesets identified were substantially different. Trait aetiology may still be explored through EWAS, but the largely differential biology highlighted suggests the majority of EWAS results here are due to confounding and reverse causation. <br> Mendelian randomization (MR) analyses further suggested residual confounding as being responsible for EWAS results as marked differences were found between an EWAS meta-analysis of lung cancer and the corresponding MR analyses. <br> Through cataloguing published results and integrating methods and results from other fields, this thesis identifies limitations to the current EWAS study design that reveal the complexity of the role of DNA methylation on mediating the path from genotype or environment to phenotype. acknowledgements: | <style> body { text-align: justify} </style> I would like to thank all of the lovely people who have helped me throughout my PhD. There are so many people to thank and if I detailed my gratitude towards each of them (my friends, family, supervisors, and colleagues) for all their support then I would have no time to write my thesis... All those who have helped me know who they are and hopefully know I appreciate them and love them dearly. There are just a couple of extremely special mentions I would like to make. Firstly, I am forever indebted to my parents for everything they have done to support me in every aspect of my life. From looking after me during difficult times when growing up, to taking my mind off of work by having a chat and pouring me another glass of wine, they are always there when I need them. Secondly, I thank my beautiful flatmates, <NAME> and <NAME>. During a pandemic they have kept me sane (as sane as I could be), made me laugh every single day and have brought me countless (yeah I said it Tom) cups of tea. Writing this thesis would have been LITERALLY impossible without them. publications: | <style> body { text-align: justify} </style> Below is a list of publications I have been involved with throughout my PhD, some of which is presented in this thesis. I conducted the vast majority of the work, but in certain scenarios others also contributed and I have detailed this in contributions statements at the start of each chapter. The first and senior authors for each piece of work have agreed on my contributions detailed in these statements. <br> \textbf{Relevant to this thesis} <br> Battram,T., Richmond,R.C., Baglietto,L., Haycock,P.C., Perduca,V., Bojesen,S.E., Gaunt,T.R., Hemani,G., Guida,F., Carreras-Torres,R., et al. (2019) Appraising the causal relevance of DNA methylation for risk of lung cancer. Int. J. Epidemiol. <br> Battram,T., Gaunt,T.R., Speed,D., Timpson,N.J. and Hemani,G. (2020) Exploring the variance in complex traits captured by DNA methylation assays. bioRxiv <br> Battram,T., Yousefi,P., Crawford,C., Prince,C., Babaei,M., Khodabakhsh,S., Whitehurst,O., Mahoney,L., Hemani,G., Gaunt,T.R., et al. The EWAS Catalog: a database of epigenome-wide association studies. Submitted to Bioinformatics November 2020. <br> \textbf{Not relevant to this thesis} <br> Battram,T., Hoskins,L., Hughes,D.A., Kettunen,J., Ring,S.M., Smith,G.D. and Timpson,N.J. (2019) Coronary artery disease, genetic risk and the metabolome in young individuals. Wellcome Open Res. <br> Howe,L.J., Battram,T., Morris,T.T., Hartwig,F.P., Hemani,G., Davies,N.M. and Smith,G.D. (2020) Assortative mating and within-spouse pair comparisons. bioRxiv. <br> <NAME>., McCartney,D.L., Patxot,M., Anchieri,L., Battram,T., Christiansen,C., Costeira,R., Walker,R.M., Morris,S.W., Campbell,A., et al. (2020) Bayesian reassessment of the epigenetic architecture of complex traits. Nat. Commun. <br> Jamieson,E., Korologou-Linden,R., Wootton,R.E., Guyatt,A.L., Battram,T., Burrows,K., Gaunt,T.R., Tobin,M.D., Munafò,M., <NAME>., et al. (2020) Smoking, DNA Methylation, and Lung Function: a Mendelian Randomization Analysis to Investigate Causal Pathways. Am. J. Hum. Genet. declaration: | <style> body { text-align: justify} </style> I declare that the work in this dissertation was carried out in accordance with the requirements of the University's Regulations and Code of Practice for Research Degree Programmes and that it has not been submitted for any other academic award. Except where indicated by specific reference in the text, the work is the candidate's own work. Work done in collaboration with, or with the assistance of, others, is indicated as such. Any views expressed in the dissertation are those of the author. <br> \bigskip \bigskip \bigskip \bigskip \bigskip Signed \bigskip \bigskip \bigskip \bigskip \bigskip Dated abbreviations: | \textbf{ALSPAC} - Avon Longitudinal Study of Parents and Children \newline \textbf{ARIES} - Accessible Resource for Integrated Epigenomics Studies \newline \textbf{AUC} - area under the curve \newline \textbf{BMI} - body mass index \newline \textbf{CCHS} - Copenhagen City Heart Study \newline \textbf{CI} - confidence interval \newline \textbf{DMP} - differentially methylated position \newline \textbf{DMR} - differentially methylated region \newline \textbf{EFO} - experimental factor ontology \newline \textbf{ENCODE} - Encyclopedia of DNA Elements \newline \textbf{EPIC} - European Prospective Investigation into Cancer and Nutrition \newline \textbf{EWAS} - epigenome-wide association study \newline \textbf{FDR} - false discovery rate \newline \textbf{GEO} - Gene Expression Omnibus \newline \textbf{GO} - Gene Ontology \newline \textbf{GoDMC} - Genetics of DNA Methylation Consortium \newline \textbf{GTEx} - Gene-Tissue Expression (consortium) \newline \textbf{GWAS} - genome-wide association study \newline \textbf{h\textsuperscript{2}} - narrow-sense heritability \newline \textbf{H\textsuperscript{2}} - broad-sense heritability \newline \textbf{h\textsuperscript{2}\textsubscript{EWAS}} - the proportion of trait variation captured by DNA methylation commonly measured in epigenome-wide association studies \newline \textbf{h\textsuperscript{2}\textsubscript{SNP}} - SNP-heritability \newline \textbf{HM450 array} - Illumina Infinium HumanMethylation450 BeadChip \newline \textbf{HMEPIC array} - Illumina Infinium HumanMethylationEPIC BeadChip \newline \textbf{HR} - hazard ratio \newline \textbf{IQR} - interquartile range \newline \textbf{KEGG} - Kyoto Encyclopedia of Genes and Genomes \newline \textbf{LD} - linkage disequilibrium \newline \textbf{LOLA} - Locus Overlap Analysis \newline \textbf{MCCS} - Melbourne Collaborative Cohort Study \newline \textbf{MR} - Mendelian randomization \newline \textbf{MRM} - methylation relationship matrix \newline \textbf{mQTL} - methylation quantitative trait loci \newline \textbf{NOWAC} - Norwegian Women and Cancer \newline \textbf{NSHDS} - Northern Sweden Health and Disease Study \newline \textbf{OR} - odds ratio \newline \textbf{PACE} - Pregnancy and Childhood Epigenetics (consortium) \newline \textbf{QC} - quality control \newline \textbf{REML} - restricted maximum likelihood \newline \textbf{ROC} - receiver operating characteristic \newline \textbf{SNP} - single nucleotide polymorphism \newline \textbf{SV} - surrogate variable \newline \textbf{SVA} - surrogate variable analysis \newline \textbf{TAG} - Tobacco and Genetics (consortium) \newline \textbf{TCGA} - The Cancer Genome Atlas \newline \textbf{TRICL-ILCCO} - Transdisciplinary Research in Cancer of the Lung and The International Lung Cancer Consortium \newline --- <!--chapter:end:index.Rmd--> # Introduction The regulatory processes that occur within human cells can differentiate healthy and unhealthy individuals. DNA methylation, a small chemical modification to the DNA, is a part of the molecular machinery governing these regulatory processes. Therefore, understanding how variation in DNA methylation across a population may manifest in observable phenotypic differences, may yield great medical and sociological insights. Technological advances over recent years have enabled measurement of DNA methylation at thousands of sites across the genome in hundreds and thousands of samples. Coupled with these advances have come epigenome-wide association studies (EWAS), which aim to identify and quantify relationships between DNA methylation and other human phenotypes. Hundreds of EWAS have now been performed and successful identification of these relationships vary widely by the trait being studied [@Joehanes2016; @Breitling2011; @Wahl2017; @Yang2013; @Reese2019]. Further, the interpretation of any identified signal is not straightforward [@Birney2016]. This thesis explores explanations for apparent differences between EWAS results and applies methods to aid interpretation and future study design for EWAS. In this chapter I present the historical interpretation of ‘information flow’ in human cells (the central dogma), describe DNA methylation in the context of regulatory processes that augment that information flow, discuss its potential for use in population level studies and describe the current state of EWAS research. Then I explain how we might be able to draw on the methods developed by geneticists to understand 1. what information has been gained from EWAS, 2. what information is left to gain from EWAS and 3. the causal nature of DNA methylation-trait associations identified in EWAS. ## Moving on from the central dogma of molecular biology {#central-dogma} To gauge how molecular mechanisms result in more observable phenotypes, it is important to understand how molecular machinery interacts. The central dogma of molecular biology was originally proposed by <NAME> [@Cobb2017; @CRICK1958] and described how information flowed from nucleic acids to proteins within cells (__Figure \@ref(fig:central-dogma)__). Crick postulated that information could flow from nucleic acids to proteins, but not from proteins to nucleic acids. By information, Crick was specifically referencing changes in polymer sequence. Although this is generally the process of information flow, it does not describe other complex interactions that impact function without changing polymer sequence. Post-translational and post-transcriptional modifications can influence the lifespan and function of proteins and RNA respectively [@Hafner2019; @Corbett2018; @Wang2014; @Filipowicz2008]. Proteins of the same polypeptide sequence can take on slightly different conformations when interacting with other cellular factors [@Alberts2017] and certain proteins (known as prions) can even alter the conformation of other polypeptides with the same sequence [@Colby2011]. Further, modifications to the DNA and to DNA-bound proteins may have a profound influence on the concentration of certain gene transcripts as well as the post-transcriptional splicing of transcripts [@Kornberg1999; @Gibney2010]. As mentioned, DNA methylation is one such modification that can occur. (ref:central-dogma-cap) __The central dogma of molecular biology__. The dogma stipulates that sequence changes (that is the addition, removal or mutation of elements) can only occur in the direction of the arrows. 1. DNA replication 2. Transcription 3. Translation 4. Reverse transcription 5. RNA replication 6. Direct DNA-protein translation. (ref:central-dogma-scap) The central dogma of molecular biology \begin{figure} {\centering \includegraphics[width=1\linewidth]{figure/01-introduction/central-dogma} } \caption[(ref:central-dogma-scap)]{(ref:central-dogma-cap)}(\#fig:central-dogma) \end{figure} The importance of these molecular alterations to phenotypic change is exemplified in the developmental stages of human life. Humans start as a single cell and after roughly nine months are transformed into a multicellular organism with trillions of cells, including hundreds of distinct cell types. As these cells arise from a single progenitor, they must contain identical genetic sequences (bar somatic mutations which occur at a low rate – recently estimated to be roughly one mutation per cell division [@Milholland2017]). The process by which the body is able to create such diversely functioning cells and tissues, must come from regulation of how the genetic sequence is read and from the regulation of its products. In addition to the intra-individual variation that makes the multicellular nature of humans possible, there is also inter-individual variation in molecular traits [@Garg2018; @Hughes2015]. DNA methylation is now being used widely by epidemiologists to try and understand how molecular changes across individuals, that do not involve direct sequence alterations, might result in variation amongst observable phenotypes in the population. The full rationale and details of these studies will be described later, but first I will introduce what is known about the role DNA methylation has within cells as this is necessary to understand how it may influence observable phenotypes. ## DNA methylation as part of the regulatory machinery {#dnam-as-part-of-regulation} DNA methylation is correlated with gene expression levels and has been hypothesised to contribute to gene regulation [@Suzuki2008; @Siegfried1999; @Bird2002; @Jones2012]. This is the main mechanism by which it is thought DNA methylation influences variability in phenotypes. However, DNA methylation is just one of many epigenetic marks that are involved in gene regulation. In this section I describe epigenetics and evidence gathered about the function of different epigenetic marks. ### Defining epigenetics The definition of epigenetics is much debated [@Greally2018]. In the 1940s the ‘epigenetic landscape’ was introduced by Waddington to describe how genes influence cell fates [@Stern2000]. Since then the term “epigenetics” has been used in many forms, so to avoid confusion, throughout the rest of the thesis, I will define epigenetics as: “the study of mitotically (and potentially meiotically) heritable alterations in gene expression that are not caused by changes in DNA sequence” [@Gibney2010]. In an extension to this, epigenetic marks therefore refer to chemical changes to the genome and genome-bound proteins that are mitotically heritable (i.e. changes that remain after cell division) and may influence gene expression without changing the DNA sequence. ### Histone modifications DNA within the nuclei of human cells is contained in chromatin. This chromatin consists of DNA strands wrapped around histone proteins in structures called nucleosomes, forming the "beads on a string" structures that can be viewed under a microscope [@Kornberg1974; @Li2011; @Olins2003]. This is necessary to allow DNA to be compressed enough to fit into nuclei [@Kornberg1974; @Li2011; @Olins2003]. Further, chromatin states can be manipulated to alter the access other proteins have to the DNA. If a region of the genome is in a state of heterochromatin then it is highly condensed, which prevents RNA polymerases accessing the DNA and transcribing its code [@Li2011; @Olins2003]. Euchromatic DNA is less condensed, facilitating easier transcription. Using the beads on a string analogy, one can think of heterochromatin as being when the string is wrapped tightly around the beads, causing the beads to be in close proximity of each other, leaving little or no free string, whereas euchromatin, the beads will be separate and there will be stretches of free string that can be accessed. Without histones this packaging of DNA into chromatin would not be possible [@Kornberg1974; @Olins2003]. The "beads" are histone octomers - four unique histone proteins each present twice in the nucleosome. Post-translational modifications can occur to any of the histone monomers and these have been associated with both positive and negative changes in gene expression [@Bannister2011; @Berger2007; @Jenuwein2001]. Histone modifications are numerous and complex in nature. To briefly describe the complexity, there are at least nine types of histone modifications that can occur [@Kouzarides2007], each of the histone monomers can be modified across many different sites [@Kouzarides2007; @Berger2007; @Jenuwein2001], and for any one site multiple of the same modification can occur [@Berger2007; @Jenuwein2001]. It is the combination of modifications across all histones that plays a role in gene expression regulation [@Berger2007; @Jenuwein2001]. Furthermore, histone modifications are subject to rapid change upon environmental stimulus to help induce or repress gene expression [@Bannister2011]. Therefore, considerable variation in histone states between individuals at the same sites and in the same tissue might exist, and this could explain some variation in health outcomes. However, the complexity of histone modifications, and the practical difficulties in collecting or assaying samples to assess these epigenetic marks, remains a barrier to their wide-spread measurement for use in population-based analyses [@Birney2016]. They may become far more prominent in the future as our understanding and ability to measure the modifications in a meaningful way increases. ### DNA methylation {#dna-methylation} DNA methylation is the addition of a methyl group to DNA. This primarily occurs at the 5' cytosine where a cytosine precedes a guanine in DNA sequence (CpG site), however the DNA may be methylated at other sites [@Jones2012]. Little is known about the role of non-CpG site DNA methylation in humans and current EWAS tend to only measure CpG methylation. Two papers initially suggested that the function of this epigenetic mark was to repress gene expression [@Holliday1975; @Riggs1975]. Since then, association with various important intracellular processes such as X-inactivation, genomic imprinting and suppression of transposon action have been elucidated [@Riggs1975; @Bell2000; @Yoder1997]. This genomic modification is also conserved amongst a wide variety of species, including various bacteria, plants, fungi, and mammals [@Casadesus2006; @Cokus2008; @Rountree1997; @Jones2012]. Interestingly, one of the hypothesised functions of DNA methylation – protection against ‘parasitic genomic sequences’ – is common to both human cells and bacterial cells [@Jones2012; @Casadesus2006]. However, the relationship with gene expression may not be the same in prokaryotic organisms [@Casadesus2006]. Despite the abundance of research conducted in the area in the last 50 years, the role DNA methylation plays in regulating gene expression within human cells is not fully understood and research is still ongoing. One thing research has revealed is that the location of DNA methylation is important to its relationship with gene expression. CpG sites are not randomly distributed throughout the genome but are often found in clusters [@Jones2012] and it is thought that methylation and de-methylation of CpG sites in groups, is what drives their association with regulatory function [@Jones2012]. Clusters known as ‘CpG islands’ are found at the majority of protein coding genes and constitute small areas of the genome that are enriched for CpG sites [@Jones2012; @Illingworth2009]. The location of these islands as well as other CpG sites relative to genes and other regulatory elements is also of importance. Several studies have shown that higher levels of DNA methylation at transcription start sites tends to be associated with lower levels of gene expression [@Jones2012; @Ando2019; @Deaton2011], but gene body DNA methylation is positively correlated with expression (__Figure \@ref(fig:dnam-functions)__) [@Wolf1984; @Hellman2007]. This suggests that regulation of DNA methylation in clusters at specific sites relative to genes is important in determining observed relationships with gene expression. Supporting this, there are clear biological processes that regulate DNA methylation at nearby sites together, for example, CpGs at transcription factor binding sites can be de-methylated as a group when the transcription factor binds [@Zhu2016]. Further, nearby sites are often correlated [@Jaffe2012; @Suderman2018]. However, there is no evidence to suggest that neighbouring sites do indeed act in tandem or whether it is likely one site from the group is driving regulatory function. This is something I explore in __Chapter \@ref(h2ewas-chapter)__. These strong associations between DNA methylation and gene expression do not necessarily mean that the addition or removal of methyl groups will actively impact gene expression. Elucidating the causal nature of the association between DNA methylation changes, at single sites or across regions, and gene expression has been fraught with difficulties and has often provided conflicting results. One study showed an enzyme that catalyzes the addition of methyl groups to the DNA, DNA methyltransferase 3A, is required in haematopoeitic stem cells for them to differentiate, suggesting gene expression changes required for differentiation were not possible without addition of methyl groups to the DNA [@Challen2012]. However, studies have provided evidence that DNA methylation is unlikely to initiate the ‘silencing’ of gene expression and may occur at transcription start sites of genes after they’ve already been repressed [@Lock1987; @Ohm2007]. To further complicate things, if DNA methylation does influence gene expression, the mechanism of action is unclear and may depend on the gene being examined. One study showed the presence of DNA methylation at the binding sites of the transcription factor, _MYC_, was inversely associated with its binding [@Prendergast1991], but another study suggested the presence of DNA methylation didn’t have the same impact on the binding of the transcription factor, _SP1_ [@Harrington1988]. Although the body of work presented in this thesis does not aim to explore if and how DNA methylation influences gene expression, it is important to note the relationship between the two isn’t clear when thinking of the implications of DNA methylation-trait associations [@Hemani2017]. This will be discussed further in the following sections. (ref:dnam-functions-cap) __Simplified diagrams of the associations between DNA methylation and gene regulation__. In _a_, CpG sites are methylated at the promoter region, but not in the gene body, which is associated with lack of gene expression. In _b_, the opposite is occurring. (ref:dnam-functions-scap) Simplified diagrams of the associations between DNA methylation and gene regulation \begin{figure} {\centering \includegraphics[width=1\linewidth]{figure/01-introduction/dnam-gene-expression} } \caption[(ref:dnam-functions-scap)]{(ref:dnam-functions-cap)}(\#fig:dnam-functions) \end{figure} ## Population-based studies of DNA methylation associations {#dnam-phs} The importance of DNA methylation in disease has already been established in rare developmental disorders caused by aberrant imprinting patterns [@Wrzeska2004; @Nicholls2000]. In the past 15 years, technological advances have enabled researchers to measure DNA methylation across thousands of sites in the genome at low cost [@Rakyan2011; @Relton2010]. This provides the opportunity to study the relationship between DNA methylation and common traits with more complicated aetiologies, where large samples may be needed to ascertain reliable associations. Despite years of these studies, the relationship between DNA methylation and complex traits is unclear. In this section I discuss the study of complex traits, the appeal of studying DNA methylation for public health, introduce the most common study design to assess DNA methylation-trait associations, EWAS, and overview some successes and complications of the work. ### Studying complex trait associations Using DNA methylation in population-based studies comprises studying its association with complex traits, which are phenotypes that are caused by a myriad of factors. The phenotypic value of any complex trait can be partitioned into its corresponding genetic ($G$) and environmental effects ($E$) like so \begin{equation} z = G + E (\#eq:phenotypic-values) \end{equation} Both the genotypic and environmental values can further be partitioned into various effects [@Lynch1998]. As $E$ can be defined as any variation in $z$ that is not explained by $G$, this also includes stochastic processes, which may account for a large proportion of $E$. It’s important to remember that DNA methylation itself is a complex trait and thus methylation of DNA at a given CpG site, is the result of a variety of genetic and environmental factors [@Jones2012]. However, unless explicitly stated, when discussing complex traits, I will be referring to human health and socio-economic outcomes rather than molecular phenotypes such as DNA methylation and gene expression. Phenotypic values for any complex trait will vary across the population as each individual has a unique genomic sequence (except monozygotic twins) and is exposed to a variety of different environments, both external and internal. Studying the associations between complex traits and other measures across the population can help deduce the aetiology of that trait, but phenotypic values may also covary with measures that have no implication for that traits aetiology. Identifying these covariations can still be useful for phenotype prediction. It should be noted that different fields of study may have different views on the importance of certain factors for complex traits. As discussed, there is evidence that DNA methylation may be inconsequential to gene expression changes [@Lock1987; @Ohm2007], making it an unattractive measure to study when interested in the aetiology of cellular phenotypes in molecular biology. For epidemiological studies, understanding how DNA methylation covaries with complex traits could help provide useful predictors and despite the difficulty in untangling the contributions of $E$ to $z$, could yet yield insights into the underlying biology of complex traits. This will be explained in more detail in the coming sections. ### The potential value of DNA methylation measures to epidemiologists {#appeal-of-dnam} Epigenetic modifications are of potential interest to those studying any phenotype. Arguably, epigenetics could be required at some level for all phenotypic changes and, if causal, could be the difference between individuals who develop disease and those who do not [@Relton2010]. Further, epigenetic marks are modifiable, which means theoretically it would be possible to prevent or treat disease by altering such epigenetic patterns of individuals [@Weaver2004]. However, there are large practical issues with targeting DNA methylation with pharmaceutical agents, which are discussed in more detail in __Section \@ref(problems-for-ewas)__. Even if targeting epigenetic marks is not easy, as long as it is possible to observe them, they could be used as diagnostic biomarkers and predictors [@Birney2016; @Relton2010; @Koch2018; @Hentze2019]. Thus, the ability to measure, and the research in to understanding epigenetic mechanisms, could have broad consequences for public health. A major difference between DNA methylation and other epigenetic marks, is that DNA methylation is more stable. Enzymes do exist that can actively de-methylate the DNA, for example the ten-eleven translocation (TET) enzymes, but cell division or excision of the nucleotide is required for full de-methylation of a DNA molecule [@Cortellino2011; @Kohli2013]. Biologically, this suggests DNA methylation might be involved in long-term repression of gene expression, which is thought to be the case for X-inactivation [@Venolia1983], and practically it makes studying the epigenetic mark easier because stability ensures the marks are more resistant to changes after collection of samples. Also, even though it is not clear that DNA methylation precedes gene expression regulation, the regulatory processes that govern whether genes are transcribed are linked. The totality of epigenetic marks and the chromatin structure of a genomic region may be thought of as the "epigenetic state" of that region. It is this state and not one mark that is likely responsible for the regulation of gene expression [@Bonasio2010; @Allis2016]. However, as the marks correlate with one another as well as chromatin state, one may be able to proxy this state by measuring a limited number of the marks. There are known examples of how DNA methylation tends to associate with other epigenetic marks, including positive correlation with the histone modification H3K9me3 [@Jones2012] and histone deacetylation [@Jones2012; @Wade2001]. This means measuring DNA methylation may capture the epigenetic state, even if addition or removal of methyl groups to the DNA would have little impact on gene expression. Recently, it has been shown that epigenetic marks can be used to predict each other with high accuracy [@Ernst2015], adding weight to the argument DNA methylation measurements capture far more than just DNA methylation itself. ### Epigenome-wide association studies {#ewas} When considering designing a study to assess whether one trait associates with another, usually there are _a priori_ hypotheses or evidence that suggest studying the association would be of relevance to scientific understanding and public health. For studying the association between DNA methylation and complex traits, there is an abundance of evidence suggesting this could yield results of relevance. As mentioned previously, no phenotypic change is possible without some molecular change, DNA methylation is a relatively easy molecular measurement to make and it is known to highly correlate with an important component of cellular regulation – gene expression. Further, DNA methylation has the potential to improve upon the prediction of complex traits beyond what can be done with current epidemiological and clinical measures [@Relton2010] as well as with genetic variants [@Shah2015; @Reed2020]. EWAS are the most common study design for assessing the association between DNA methylation and a complex trait. They typically involve measuring hundreds of thousands of DNA methylation sites across the genome in a case-control or cohort setting and using linear models to assess the association between DNA methylation and the trait of interest. Ideally, in every sample used in an EWAS, DNA methylation would be measured across all sites in the genome. Unfortunately, this is not currently possible and sequencing technologies that offer something similar are often very expensive. There are three alternatives available to measuring DNA methylation for studies assessing the relationship between the epigenetic mark and complex traits of interest. Firstly, one could sequence a small portion of the genome if this section is of particular interest. This candidate gene model was employed by the genetics community before the era of genome-wide association studies. It led to a large number of false positives due to poor statistical practice and publication bias [@Bosker2011; @Hirschhorn2002; @Munafo2006]. Therefore, this candidate gene approach is unlikely to be profitable unless the genes targeted already have very good evidence for epigenetic variation with the trait of interest. As complex traits are polygenic [@Lynch1998] and we have incomplete knowledge of their underlying biology, a hypothesis-free approach, that samples from as much of the genome as possible, is preferable. Secondly, measuring DNA methylation on repeat sequences of the genome, such as long interspersed nuclear elements (LINEs) and short interspersed nuclear elements (SINEs), can provide estimates for global DNA methylation changes [@Yang2004]. These measurements indicate if a trait is related to large perturbations of DNA methylation across the genome, but give little mechanistic insight into what effects these changes may be having, as methylation at functional genes is not measured. Thirdly, one could employ an array approach that covers DNA methylation genome-wide at selected sites. This last approach is the most common for population-based studies as it enables measurement of DNA methylation at hundreds of thousands of sites at a relatively cheap price per sample [@Dedeurwaerder2011]. Without capabilities to measure methylation at every site in the genome, one must decide which sites are worth measuring. Current commonly used array technologies include the Illumina Infinium HumanMethylation450 BeadChip (HM450 array) and the Illumina Infinium HumanMethylationEPIC BeadChip (HMEPIC array), which measure DNA methylation at over 450,000 sites and over 800,000 sites respectively. They cover roughly 1.5-4% of CpG sites in the genome [@Lovkvist2016]. In order to capture what was thought to be the most relevant DNA methylation sites in relation to complex traits, the probes were chosen to map to 99% of RefSeq genes and predominantly map to the promoter regions of these genes [@Illumina2012]. One reason for this was to potentially help improve interpretation of EWAS findings [@Illumina2012]. Identifying methylation changes at a specific gene suggests investigating the relationship of that gene and the complex trait further may yield interesting results, whereas interpreting complex trait associations with DNA methylation at a site in a relatively uncharacterized region of the genome would be more difficult for obvious reasons. The strongest association between DNA methylation and gene expression comes at CpG sites clustered around transcription start sites [@Jones2012], thus enrichment of probes targeting promoter regions may also help with interpretation of EWAS. To measure DNA methylation, these array technologies, as well as sequencing techniques, often begin with bisulphite conversion of the DNA [@Rakyan2011]. This converts any un-methylated cytosine base to uracil and leaves methylated cytosines unchanged [@Li2011]. The DNA samples can then be distributed amongst the array and the probes on the array will bind cytosines present at the regions for which their sequence corresponds [@Li2011]. If a probe binds its target cytosine, then it will fluoresce, and this fluorescence can be quantified to give ‘beta values’. Beta values range between zero and one, with zero corresponding to no methylation across all DNA molecules (in a sample) at the target cytosine and one corresponding to methylation across all DNA samples at the target cytosine. DNA methylation is by nature a binary feature, but mixtures of DNA molecules (i.e. multiple cells) mean that a continuous variable is generated unless single cell procedures are adopted. This study design has been widely adopted over the past ten years and the relationship between a plethora of traits, from smoking to anthropometric measures to childhood adversities, and DNA methylation has now been studied [@Joehanes2016; @Breitling2011; @Wahl2017; @Yang2013]. There are also large consortia that are pooling samples to gain power for these studies, for example the Pregnancy and Childhood Epigenetics (PACE) consortium [@Felix2018]. A few traits have been identified as being associated with large variations in DNA methylation, one of which is smoking, where strong associations across thousands of sites have been identified and many replicated in EWAS of smoking [@Joehanes2016]. It has been revealed that the association at some sites is driven by smoking causing changes in DNA methylation and over time these DNA methylation changes may be (mostly) reversible by giving up smoking [@Philibert2016]. Also, DNA methylation can be used to predict smoking status, and one study has provided evidence that DNA methylation of a single locus can predict smoking status with high accuracy [@Philibert2020]. Another measure shown to relate to large variation in DNA methylation across the genome is age. Similarly to smoking, DNA methylation makes a highly accurate predictor for age and is thought to be able to establish whether rate of ‘biological’ aging is different from chronological aging [@Horvath2013; @Jones2015; @Horvath2018]. These studies have shown that large perturbations in the DNA methylome can be related to complex traits and highlight the potential for EWAS to identify accurate predictors for these traits. ### Problems for EWAS {#problems-for-ewas} Interpreting the associations discovered in EWAS, with regards to understanding trait biology and development of interventions that target DNA methylation, require two key components. Firstly, the causal nature of associations between DNA methylation and the complex trait of interest, i.e. do the associations reflect epigenetic pertubations that are impacting complex trait variation, is the reverse true, or are the effect estimates biased? Secondly, the cellular consequence of DNA methylation variation at the identified genomic regions. As postulated previously, DNA methylation may be marking an epigenetic state and alterations to the whole state could be required to impact phenotypic variation. Elucidating the truth from these components is extremely difficult. Therefore, despite the promise of understanding the underlying biological processes related to traits, studying the relationship between DNA methylation and complex traits provides many practical difficulties that often make the results of EWAS hard to interpret [@Birney2016]. #### Confounding {#confounding} As discussed with the example of smoking EWAS, DNA methylation changes may come as a response to complex traits. This inherently leads to two issues within EWAS that are pervasive across observational epidemiology, confounding and reverse causation. Having EWAS identify molecular changes that are caused by variation in the trait of interest (reverse causation) is not necessarily a limitation. In fact, this allows for downstream consequences of a trait to be explored hypotheses regarding molecular mediation to be tested. Confounding amongst EWAS is a far bigger issue. This is where the traits of interest share a common cause, which can generate effects and bias effect estimates, hindering correct interpretation of the association between traits. For example, smoking is common cause of both DNA methylation and lung cancer risk, thus without adequate adjustment for smoking behaviour in an EWAS of lung cancer, one might incorrectly infer that changes in DNA methylation that are the result of smoking, cause an alteration in lung cancer risk. Complex traits (including molecular traits) are strongly correlated with each other, often in clusters, which can lead to large amounts of measured and unmeasured confounding being present in EWAS [@Lawlor2004; @Relton2012]. Of course, in order to produce therapies to prevent or treat disease by altering DNA methylation or other parts of the epigenome, causality must be established. Therefore, problems of confounding must be overcome in EWAS to use these results to start developing methods of targeting DNA methylation changes, this is discussed more in __Section \@ref(establishing-causality)__. #### Cell type heterogeneity {#cell-type-heterogeneity} As discussed, epigenetic factors guide differentiation of a single pluripotent cell to hundreds of cell types in human development. As these cell types can have large differences in morphology and function, it is clear that epigenetic marks, including DNA methylation, will vary between cell types [@Houseman2012; @Jaffe2014]. This poses two distinct problems for EWAS. Firstly, when collecting samples to measure DNA methylation, unless cells are purified, then a pool of cell types will be present in the samples, each with their own distinct DNA methylation patterns. This can lead to issues of confounding by cell type. For example, in a case control study, cases may be more likely to have increased numbers of CD4+ Th2 immune cells and these cells may on average have a higher level of DNA methylation at site X. In this scenario if one were to take blood cells, measure DNA methylation, and assess the association between DNA methylation and the trait of interest, one might find an association between DNA methylation at site X and the trait, but this may just be function of the increased number of CD4+ Th2 cells present in cases and site X may have no causal relationship with the trait itself. In reality, there are often thousands of CpG sites that have differential methylation between cell types, which could result in a host of biased effect estimates. There have been efforts to try and account for cell type heterogeneity in EWAS [@Houseman2012; @McGregor2016; @Teschendorff2017], but to completely prevent its confounding effects, cells should be collected from a homogenous tissue or purified. In addition to generating false positives, this confounding could mask true effects found within specific cell types. The second problem arising from cell-type specific patterns of DNA methylation is the uncertainty that the cell type being studied is one in which DNA methylation covaries with the trait of interest. Non-invasive cells to collect, such as blood, skin, and saliva, are common amongst epidemiological studies, but it is unclear whether EWAS in these studies are relevant to a large proportion of complex traits. This is studied, with regards to blood, in __Chapter \@ref(h2ewas-chapter)__. Studies have actually shown high levels of correlation between DNA methylation across cell types at many CpGs [@Forest2018], but it is unknown whether the correlated sites are important to trait variation. Further, this correlation may complicate interpretation of EWAS findings with regards to translational potential. Associations may be found in blood because those CpGs correlate with causal CpGs in another tissue, such as the brain, in this situation an intervention on DNA methylation levels in blood will fail to illicit the expected response and it may transpire that targeting the causal CpGs in the correct tissue is unfeasible. #### Measuring DNA methylation {#measuring-dna-methylation} DNA methylation arrays face certain technical issues. Some probes map to single nucleotide polymorphisms (SNPs), which can lead to inadvertent detection of genetic variation, others map to multiple sites across the genome (i.e. are non-specific) and others may cross-hybridise. Batch effects can also substantially bias results in EWAS [@Price2018]. Considerable effort has been made to characterise the arrays to identifiy potentially faulty probes [@Zhou2017; @Naeem2014] and methods developed (some originally for use in RNA-based studies) to help correct for batch effects [@Leek2007; @Ritchie2015; @Perrier2018]. In __Chapter \@ref(properties-of-ewas)__ I explore the extent to which batch effects tend to be removed in current EWAS and whether EWAS results are enriched for potentially faulty probes not removed by study authors. #### Complexity of regulatory mechanisms EWAS identify single sites in the genome for which DNA methylation variation is associated with a trait of interest. As discussed, DNA methylation at a single site will likely be correlated with DNA methylation at neighbouring sites and other nearby epigenetic marks. This makes inferring mechanism of action very difficult. Differentially methylated region (DMR) analysis is often employed, which aims to determine if multiple neighbouring sites share an association with the trait of interest with the same direction of effect [@Jaffe2014; @Suderman2018]. These give evidence as to whether the sites covary similarly with the trait of interest, but do not provide evidence that the sites are acting independently or not. There are ways to circumvent the issues of biological complexity, but without additional gene expression data these often involve assuming the genes immediately adjacent to DNA methylation changes are of importance to the trait. However, no systematic evaluation of whether this assumption holds true for the majority of cases has been conducted. #### Treatments {#treatments} Currently there are therapies used in the clinic that target enzymes responsible for epigenetic alterations, for example DNA methyltransferase inhibitors and histone deacetylase inhibitors [@Piekarz2009; @Nebbioso2018]. They are primarily used to treat cancers, but as with many cancer treatments, are highly toxic. These therapies impact the epigenome globally and do not target any specific regions of the genome. This makes them highly undesirable for most diseases and as of yet there are no epigenetic therapies targeting specific regions of the genome. Methods, such as adapting CRISPR-cas9 enzymes, are being used in laboratories to alter DNA methylation at specific sites [@Pickar-Oliver2019], and some have even achieved _in vivo_ targeted epigenetic modulation in mice [@Liao2017]. However, it is unclear whether these techniques can be scaled up for clinical use in humans and how long it may take to overcome the various complications. In summary, there is great potential for EWAS to identify sites in the genome that could be targeted for treatment, but there are several challenges still to overcome. A great importance should be placed on using the data available to inform future designs of EWAS to maximise the potential of these studies. ## Using methods from statistical genetics to help inform future EWAS {#genetics-in-ewas} In order to remedy some of the problems EWAS face and to help understand whether the "experiment" of measuring DNA methylation across many epidemiological cohorts and studies has been successful, we can borrow ideas and methods developed in statistical genetics and genetic epidemiology. Statistical genetics is concerned with ascertaining the connection between traits and genetic variation. Germline genetic variants, the units of measurement for genome-wide association studies (GWAS), are fixed from conception and the association between these variants and complex traits tends to be unconfounded [@DaveySmith2003; @DaveySmith2014]. Therefore, the properties of these variants and DNA methylation are different and one would expect the genetic and epigenetic architectures of complex traits to differ. However, genetic epidemiologists have had to overcome problems to help interpret GWAS, which are also pertinent to EWAS. These include understanding how much trait variation is captured by all the variants used in the study and how to infer function from genetic variation. Further, cataloging genetic associations has proven an invaluable resource for the research community [@Buniello2019] and these variants can be used as tools to augment the understanding of DNA methylation-trait associations. In this section I briefly describe some examples of these efforts and explain how they might be adapted to help inform future EWAS. ### Catalogues of genome-wide associations {#gwas-catalog} Cataloging genome-wide associations has a broad range of applications for researchers, from replication of GWAS, to identifying overlapping GWAS signals between traits, to pooling the data to try and understand the genetic architecture of complex traits as a whole. There are multiple databases now available to the genetic epidemiologist community that have catalogued these associations. These include manually curated databases of publicly available GWAS data, The GWAS Catalog, [@Buniello2019] and the IEU OpenGWAS Project [@Elsworth2020; @Hemani2018]. A corollary database for EWAS is likely to also provide value for epigenetic epidemiologists. At the very least it would provide an easy tool to assess whether results replicate. Catalogues such as EWASdb [@Liu2019] and the EWAS Atlas [@Li2019] are currently available but fall short of some key researcher requirements including ease of use and access to full summary statistics. The development of a new database, The EWAS Catalog, is the focus of __Chapter \@ref(ewas-catalog)__. ### Total variance captured by all sites measured genome-wide {#heritability} In addition to cataloguing the information gained, efforts also need to be made in understanding the epigenetic architecture of complex traits to enable interpretation of these data. As discussed, the phenotypic value of any trait can be partitioned into genetic effects and environmental effects. Thus, the variation of phenotypic values are the function of the genetic variance ($\sigma^2_{G}$) and environmental variance ($\sigma^2_{E}$), \begin{equation} \sigma^2_{z} = \sigma^2_{G} + \sigma^2_{E} (\#eq:phenotypic-variance) \end{equation} The environmental effects could be split into a large number of different factors, most of which would negligibly influence the phenotypic variance. Knowing the extent to which each factor contributes to phenotypic variation is important for two reasons. Firstly, if a factor substantially influenced phenotypic variation then by modifying that factor one could modify the phenotypic values across a population. Secondly, one can identify which factors may best predict phenotypic values within a population. Currently, it is unknown how much variation in complex traits is attributable to DNA methylation changes. As discussed, assessing whether DNA methylation affects complex traits is difficult, but understanding whether they covary can still help quantify its total predictive capacity. Further, understanding how DNA methylation, as measured in contemporary EWAS, covaries with complex traits can help give insight on the validity of current study designs. Many EWAS have been conducted and few have identified strong associations that capture substantial complex trait variation. Tissue types used and sites measured may partially explain this. It is a possibility that DNA methylation might not covary with many complex traits, or it could be that the associations between DNA methylation at individual sites are numerous, but the associations at each site are too small to detect with current sample sizes. By combining information across all sites measured, one could quantify the total variation captured by DNA methylation for a complex trait of interest and so could properly assess the utility of association studies using the tissue types and sites measured. Methods have already been developed to assess the total contribution of genetic variants measured in GWAS to complex trait variation [@Yang2010; @Speed2012] and in __Chapter \@ref(h2ewas-chapter)__, I repurpose these methods developed to estimate the proportion of phenotypic variance correlated with DNA methylation across a range of phenotypes. ### Inferring biology from signals {#inferring-biology-from-signals} As discussed, the complexity of cellular processes makes it difficult to infer what consequences a change in methylation at a specific site may have. Similarly, for the majority of SNPs identified in GWAS, the functional change that relates to an association between genetic variation at that site and the trait of interest is unclear. Complex traits themselves are the result of a large number of complicated biological pathways that are determined by potentially thousands of gene products. It is often assumed that the signal from GWAS highlight genomic regions of importance to the trait and thus as a step to investigate the nature of the signal, sites identified are mapped to nearby genes. These genes can then be mapped to pathways and gene set enrichment analysis performed to assess whether the genes identified are present in any particular pathways more than expected by chance. This method has been adopted by epigenetic epidemiologists for use to examine EWAS signal [@Phipson2016]. Given the DNA methylation probes on contemporary arrays were chosen specifically based on their proximity to protein coding genes, this gene mapping technique may actually be more valid for CpG sites. Establishing causality from DNA methylation signal is difficult (See __Sections \@ref(dnam-phs) and \@ref(establishing-causality)__). Thus, when applying gene set enrichment analyses to identify prominent pathways in EWAS signals, the pathways identified may be downstream consequences of one or many confounders rather than of aetiological relevance to the trait of interest. Further, the EWAS signals, and therefore pathways they might influence, may be a consequence of trait variation. This is important to remember when interpreting the results of such an analysis, but it does not render the results useless. There is a huge body of work that characterises gene action and relationships of this gene action with various traits. By mapping EWAS signals to genes and pathways, a path between the trait (or a confounder) and changes in DNA methylation might become clearer. One example of this comes with EWAS of smoking, that have consistently identified DNA methylation at the _AHRR_ gene [@Joehanes2016; @Philibert2012; @Shenker2013; @Zeilinger2013; @Bojesen2017]. This gene is known to play a role in handling toxic substances found in tobacco smoke [@Haarmann-Stemmann2006; @Larigot2018]. Thus, large changes in DNA methylation related to this gene points towards epigenetic changes at that locus influencing the cellular response to smoking. This shows, that despite difficulties in interpreting EWAS findings and subsequent pathway analysis, EWAS can actually add to the pool of information about underlying trait biology when used in conjuncture with other evidence. Although, it is important to note this is one of many sites identified in smoking EWAS and there are plenty of sites for which changes in DNA methylation are yet to be explained. Understanding both the causes and consequences of complex traits are pertinent to intervening on health outcomes. As EWAS has the potential to identify both, it could identify important facets of trait biology missed by GWAS studies; however, it is unclear as to whether the analogous gene set enrichment design adopted by EWAS is currently adding to the information discovered by GWAS. In __Chapter \@ref(ewas-gwas-comp-chapter)__ I compare overlap of GWAS and EWAS signals in this context and discuss the use of both study designs in elucidating underlying trait biology. ### Establishing causality {#establishing-causality} As discussed, EWAS are a form of observational epidemiological study. These studies are limited by the data at their disposal and the highly correlated nature of complex traits [@DaveySmith2003]. When studying relationships between complex traits, one would ideally be able to setup an experiment to remove the influence of all factors other than the ones of interest. Randomised controlled trials (RCTs) attempt to emulate this scenario and some EWAS have been conducted under the RCT framework [@Irwin2019; @Perfilyev2017; @Geraghty2018]. However, they are expensive and not plausible to study some traits, so relying on them to provide aetiological inference across the spectrum of complex traits studied in EWAS is not realistic. Other study designs may be used to try and limit the impact of reverse causation and confounding. For example, when studying the effect of an exposure on an outcome, if the study participants are exposed before the outcome of interest then reverse causation may be unlikely. Despite many efforts to avoid or adjust for confounding factors, observational epidemiological findings often do not replicate in a trial setting [@Lawlor2004]. #### Mendelian randomization {#mr-01} One method that aims to mitigate confounding and reverse causation is Mendelian randomization (MR) [@DaveySmith2003; @DaveySmith2014; @Richmond2016], which uses genetic variants as proxies for the exposure of interest in an instrumental variable framework (illustrated in __Figure \@ref(fig:mr-diagram)__). Using genetic variants as instruments has the advantage that the direction of effect will always be from instrument to exposure and not vice versa, making interpretation of the studies simpler. Furthermore, unlike environmental phenotypes, that tend to be highly correlated and clustered into groups, genetic variants associated with a trait tend to be unconfounded [@DaveySmith2003; @DaveySmith2014]. In the absence of assortative mating, genetic variants should be distributed randomly across the population, so in effect those grouped by genotype should exhibit differences in exposure, but confounding factors should not differ between genotype groups [@DaveySmith2014]. Assortative mating has been reliably shown to occur with some traits such as social behaviours and anthropometric measures [@Silventoinen2003; @Maes1997; @Eaves1981]. Assortment tends to occur on visible social factors and so intentional assortment based on DNA methylation profiles is very unlikely. However, DNA methylation may associate with factors that are assorted on, for example alcohol consumption [@Howe2019; @Dugue2019], which may lead to unintentional assortment on DNA methylation profiles. The impact this may have on MR studies using DNA methylation has not been explored, and this analysis is beyond the scope of this thesis but is something that should be noted when assessing the reliability of such MR studies. #### Availability of data for MR Another advantage of MR is the data it uses. Thousands of GWAS have been conducted giving researchers ample instruments for a wide variety of traits and many of these instruments are easily accessible through databases such as the GWAS Catalog [@Buniello2019] and IEU OpenGWAS Project [@Elsworth2020; @Hemani2018]. Furthermore, it isn’t necessary to use individual-level data to conduct MR studies; summary statistics from GWAS are all that is needed to provide data in a two-sample MR framework [@Inoue2010; @Pierce2013]. This is especially valuable to conducting MR studies using DNA methylation data as DNA methylation is not widely measured across cohorts and case-control studies. Thus, without a method to combine summary data from both GWAS of DNA methylation and GWAS of other complex traits, well-powered MR studies would not be possible to assess the potential effect of DNA methylation on complex traits (and _vice versa_). #### Assumptions of MR In order for MR analyses to be valid, they must satisfy three instrumental variable assumptions, these are illustrated in __Figure \@ref(fig:mr-diagram)__. Testing assumption one, the instruments associate with the exposure of interest, is simple, but the other two assumptions cannot technically be proven to be true. Horizontal pleiotropy, where genetic variants associate with more variables than just the exposure of interest, can lead to violations in assumptions two and three. Ideally, MR would be performed in the context where the genetic effect on the exposure had been characterised such that the mechanism of action was understood clearly. This would help give evidence against assumptions two and three being broken. Unfortunately, this is rarely possible. However, a plethora of methods have now been developed to test for pleiotropic effects, given the exposure of interest has multiple independent genetic variants reliably associated with it. (ref:mr-diagram-cap) __Mendelian randomization__. Mendelian randomization can be used to test the causal nature of exposure-outcome relationships provided the assumptions are met. Assumption 1. There is an association between the instrument and the exposure. Assumption 2. There are no associations between the instrument and outcome, except through the exposure. Assumption 3. The instrument is not associated with any factors that confound the exposure-outcome relationship. (ref:mr-diagram-scap) Mendelian randomization \begin{figure}[!h] {\centering \includegraphics[width=1\linewidth]{figure/01-introduction/mr-diagram} } \caption[(ref:mr-diagram-scap)]{(ref:mr-diagram-cap)}(\#fig:mr-diagram) \end{figure} #### Applying MR in a DNA methylation context MR can be applied to studies of DNA methylation by using methylation quantitative trait loci (mQTL), genetic variants associated with changes in DNA methylation levels, as proxies for DNA methylation variation [@Relton2010; @Relton2012; @Richardson2018]. As mentioned previously, using a two-sample MR framework is especially useful to help increase power for these studies [@Relton2010]. For each DNA methylation site few independent mQTLs have been identified [@Gaunt2016], which prevents the use of various tests to examine whether the instruments are likely to be pleiotropic. Further, without reliable associations between SNPs and DNA methylation at some sites, MR cannot be conducted. Both cis-mQTLs (mQTLs within 1Mb of the DNA methylation site) and trans-mQTLs (mQTLs over 1Mb away from the DNA methylation site) have been identified in GWAS of DNA methylation variation. As genetic architecture of DNA methylation changes is also not well understood, the mechanism of action for each mQTL can only be speculated at present. Cis-mQTLs are thought to be less likely to be pleiotropic than trans-mQTLs as the mechanism of action seems more likely to be related to the binding of regulatory machinery that may influence DNA methylation levels [@Relton2012; @Neumeyer2020]. For example, a genetic variant may decrease the affinity of a transcription factor for that region and so the transcription factor will bind less frequently and/or for a shorter period, this would lead to increased methylation at that site [@Johnston2019]. On the contrary, the mechanism of trans-mQTL action, especially those on separate chromosomes to the DNA methylation site of interest, is more likely to be pleiotropic [@Neumeyer2020], for example a trans-mQTL could influence gene expression of a transcription factor that binds many sites and alters their DNA methylation [@Bonder2017], this would make the trans-mQTL associate with multiple DNA methylation sites. Therefore, if one limits mQTLs to those in cis, this gives greater confidence that horizontal pleiotropy isn’t biasing results. Due to the complexity of cellular regulatory mechanisms (__Section \@ref(dnam-as-part-of-regulation)__), it may be impossible to identify the exact cause of changes in complex traits, even with replicated MR results that give strong evidence of an effect of DNA methylation on a complex trait. As mentioned, DNA methylation varies both temporally and across cell types. If the instruments used to represent DNA methylation are viewed as influencing life-time differences in DNA methylation between individuals, then temporal variation can be ignored. However, cell type-specific effects are likely to occur for some mQTLs. In some cases genetic variation may influence proximal DNA methylation by altering the binding affinity of transcription factors for that region. In cells for which chromatin structure prohibits binding of transcription factors to that region, genetic variation would have no impact on DNA methylation. Thus, genetic variation would have an impact with DNA methylation in some cells but not others. With all this in mind, it is important to maintain the idea that making strong conclusions of causality in the context of DNA methylation is difficult, but triangulating evidence from multiple sources could be key to understanding the role of DNA methylation in underlying trait biology [@Lawlor2016]. Such evidence can come from different sources of statistical methodology that can be used to assess causality in observational studies [@Pearl2010; @Hernan2016; @Hernan2018]. One that has been applied in EWAS includes taking advantage of temporal measurements [@Baglietto2017]. If DNA methylation is measured before the trait of interest, then chances of reverse causation are greatly diminished, although confounding may still be an issue. DNA methylation associations may also be tested across different, relevant tissue types and molecular biology can be used to augment epidemiological evidence. Tools exist to experimentally manipulate DNA methylation at specific regions of the genome in cultured cells or in model organisms [@Pickar-Oliver2019; @Liao2017]. Using a negative control design, one could follow up any findings from an epidemiological study like an EWAS, in the laboratory by assessing if changes in the DNA methylation sites identified have any intracellular impact. This could be done for a variety of tissue types of interest. To truly provide strong evidence that changes in DNA methylation are causally related to a trait, one must take a cross-disciplinary approach. ## Overview of thesis aims DNA methylation has great potential for use in an epidemiological sense and as samples with DNA methylation data continue to grow it is important to understand the limitations of EWAS and how to maximise its potential. My thesis aims to address this by exploring what information has been gained from EWAS (__Chapters \@ref(ewas-catalog) and \@ref(properties-of-ewas)__), what information is still to gain from EWAS (__Chapter \@ref(h2ewas-chapter)__), whether EWAS might add to our biological understanding of complex traits above GWAS (__Chapter \@ref(ewas-gwas-comp-chapter)__) and by applying MR in a particular case, the potential for confounding in EWAS (__Chapter \@ref(dnam-lung-cancer-mr)__). See the flowchart in __Figure \@ref(fig:thesis-flowchart)__ for a graphical depiction. In __Chapter \@ref(ewas-catalog)__ a database of published EWAS is curated and made publicly available, which will be used in later chapters. The aim of __Chapter \@ref(properties-of-ewas)__ is to analyse the results present in this database jointly to allow the discovery of commonalities across methylome-trait associations and provide a platform to explore what is driving these commonalities. Further, the chapter explores the extent to which published results are reliable by assessing replication rate and whether sites measured by unreliable probes are prominent. After exploring the information already gained from EWAS, __Chapter \@ref(h2ewas-chapter)__ investigates the information still to gain from EWAS. The aim of the chapter is to apply methods developed to assess SNP-heritability to estimate the proportion of complex trait variation that is associated with sites commonly measured in EWAS. Quantifying the total covariation of DNA methylation measured in EWAS and complex traits enables an evaluation of whether study design should be altered to maximise understanding of complex traits through DNA methylation. __Chapter \@ref(ewas-gwas-comp-chapter)__ will then aim to assess whether the discoveries of EWAS may provide extra biological insight for traits of interest on top of those from GWAS. Tests will be applied to assess whether there is more overlap between the sites, genes or pathways identified by some large EWAS (N > 4500) and their corresponding GWAS than expected by chance. Finally, __Chapter \@ref(dnam-lung-cancer-mr)__ will apply MR to explore the causal nature of associations between DNA methylation and lung cancer. This application case-study will compare and contrast findings to conventional EWAS estimates to give an example of the potential residual confounding that can be present in EWAS. (ref:thesis-flowchart-cap) __Thesis flowchart__. Flowchart showing questions being asked in the thesis and the corresponding chapters attempting to help answer them. (ref:thesis-flowchart-scap) Thesis flowchart \blandscape \begin{figure}[htbp] {\centering \includegraphics[width=1\linewidth]{figure/01-introduction/thesis-flowchart} } \caption[(ref:thesis-flowchart-scap)]{(ref:thesis-flowchart-cap)}(\#fig:thesis-flowchart) \end{figure} \elandscape <!--chapter:end:01-introduction.Rmd--> # Data sources {#data-sources} I utilize multiple data sources throughout this thesis and in this chapter, each of these sources will be described in relevant detail so that they can be referenced for future results chapters. __Table \@ref(tab:data-overview-tab)__ gives a list of the data sources used, which results chapters they appear in and what type of data I extracted from each source. It should be noted that I also use databases of molecular and genomic annotations, for example the Gene Ontology resource that brings together evidence across fields to annotate genes based on proposed function, pathways they are involved in and where the gene products are active in the cell [@Ashburner2000; @Carbon2019]. These are not described here, but are described in the chatpers they were used. ## Accessible Resource for Integrated Epigenomic Studies (ARIES) {#aries-02} ### Summary {#aries-summary} ARIES is a subsection of the Avon Longitudinal Study of Parents and Children (ALSPAC) prospective birth cohort [@Relton2015-aries]. This data source contains dense phenotypic data, DNA methylation data measured by the HM450 array, and genotype data from 1018 mother–child pairs. Whole blood samples were collected for DNA methylation measurement at three timepoints in children and two in their mothers, the timepoints with mean ages in years (in brackets) are as follows for children: birth, childhood (7.5), adolescence (17.1) and for mothers: during pregnancy (28.7), and at middle age (46.9). Within this thesis, data from mothers at middle age was used for all the analyses and after exclusion of individuals during quality control steps, this lead to a maximum of 940 mothers used in any analyses. ### ALSPAC description {#alspac-description-02} ALSPAC recruited pregnant women in the Bristol and Avon area, United Kingdom, with an expected delivery date between April 1991 and December 1992 (http://www.bris.ac.uk/alspac/). Over 14,000 pregnancies have been followed up (both children and parents) throughout the life-course. Full details of the cohort have been published previously [@Fraser2013; @Boyd2013]. Ethical approval for ALSPAC was obtained from the ALSPAC Ethics and Law Committee and from the UK National Health Service Local Research Ethics Committees. Written informed consent was obtained from both the parent/guardian and, after the age of 16, children provided written assent. The study website contains details of all the data that is available through a fully searchable data dictionary (http://www.bristol.ac.uk/alspac/researchers/access/). Study data were collected and managed using REDCap electronic data capture tools hosted at ALSPAC [@Harris2009; @Harris2019]. REDCap (Research Electronic Data Capture) is a secure, web-based software platform designed to support data capture for research studies, providing 1) an intuitive interface for validated data capture; 2) audit trails for tracking data manipulation and export procedures; 3) automated export procedures for seamless data downloads to common statistical packages; and 4) procedures for data integration and interoperability with external sources. ### ALSPAC genetic data {#alspac-genetic-data} Mothers were genotyped using the Illumina human660W-quad genome-wide single nucleotide polymorphism (SNP) genotyping platform (Illumina Inc., San Diego, CA, USA) at the Centre National de Génotypage (CNG; Paris, France). SNPs were removed if they displayed more than 5% missingness or a Hardy-Weinberg equilibrium P value of less than 1.0e-06. Additionally, SNPs with a minor allele frequency of less than 1% were removed. Samples were excluded if they displayed more than 5% missingness, had indeterminate X chromosome heterozygosity or extreme autosomal heterozygosity. Samples showing evidence of population stratification were identified by multidimensional scaling of genome-wide identity by state pairwise distances using the four HapMap populations as a reference, and then excluded. Cryptic relatedness was assessed using a IBD estimate of more than 0.125 which is expected to correspond to roughly 12.5% alleles shared IBD or a relatedness at the first cousin level. Related subjects that passed all other quality control thresholds were retained during subsequent phasing and imputation. Imputation of mother’s genotype data in ALSPAC was done with ALSPAC children’s data. So, genotypes in common between the sample of mothers and sample of children were combined. SNPs with genotype missingness above 1% due to poor quality were removed along with subjects due to potential ID mismatches. Haplotypes were estimated using ShapeIT (v2.r644) which utilises relatedness during phasing. A phased version of the 1000 genomes reference panel (Phase 1, Version 3) was obtained from the Impute2 reference data repository (phased using ShapeIt v2.r644, haplotype release date Dec 2013). Imputation of the target data was performed using Impute V2.2.2 against the reference panel (all polymorphic SNPs excluding singletons), using all 2186 reference haplotypes (including non-Europeans). ### ARIES DNA methylation data {#aries-dnam-data} Following DNA extraction samples were bisulfite converted using the Zymo EZ DNA Methylation kit (Zymo, Irvine, CA). Genome-wide methylation was measured using the HM450 array. The arrays were scanned using an Illumina iScan, with initial quality review using GenomeStudio. During the data generation process, a wide range of batch variables were recorded in a purpose-built laboratory information management system (LIMS). The LIMS also reported quality control (QC) metrics from the standard control probes on the HM450 array for each sample. Methylation data were normalised in R with the wateRmelon package [@Pidsley2013] using the Touleimat and Tost [@Touleimat2012] algorithm to reduce the non-biological differences between probes. Cell proportions (CD8+ and CD4+ T cells, B cells, monocytes, natural killer cells, and granulocytes) were estimated using an algorithm proposed by Houseman et al. [@Houseman2012]. ## Gene expression omnibus (GEO) {#geo-02} The GEO database is an online repository that allows deposition of functional genomics datasets that are made publically available. It contains individual participant data from various EWAS and so was queried to recruit additional datasets for entry into The EWAS Catalog (__Chapter \@ref(ewas-catalog)__). More details of how the database was queried for The EWAS Catalog can be found in __Section \@ref(geo-data-extraction)__. ## UK Biobank {#uk-biobank-02} The UK Biobank is a prospective cohort study that recruited over 500,000 people aged 37-73 years were recruited for the study between 2006 and 2010. Phenotype and genotype data were collected from assessment centres all over the United Kingdom. The quality control and the participants have been described in detail elsewhere [@Collins2012; @Allen2014; @Bycroft2018]. Importantly, this data was made easily accessible to researchers, which has lead to multiple efforts to perform hundreds of GWAS in parallel and make the summary data from these openly available. This can be found on platforms such as the IEU OpenGWAS Project (__Section \@ref(ieu-opengwas-project-02)__). The GWAS summary statistics from these efforts were utilized in __Chapters \@ref(h2ewas-chapter) and \@ref(ewas-gwas-comp-chapter)__. Further, the Neale Lab (http://www.nealelab.is/uk-biobank/) calculated h^2^~SNP~ using LD-score regression for the traits for which they performed GWAS and made this data publically available. This was used in __Chapter \@ref(h2ewas-chapter)__. More details can be found in those chapters. ## IEU OpenGWAS Project {#ieu-opengwas-project-02} A team at the IEU recently produced a database of full GWAS summary statistics, the IEU OpenGWAS Project [@Elsworth2020; @Hemani2018]. This is accompanied by a website and packages to extract the data and perform various analyses [@Elsworth2020; @Hemani2018]. There are thousands of GWAS present in the database. For each GWAS, the database stores meta-data such as sample size, participant ancestry and sex, as well as full association statistics (betas, standard errors, P values) for all genetic variants assayed in the GWAS, i.e. not just those surpassing a given P value threshold. There are thousands of GWAS with this data present in the database. These come from studies which the authors have released full summary statistics and from many GWAS performed in large-scale biobanks by members of the IEU OpenGWAS Project team or by other groups. The IEU OpenGWAS Project was used in __Chapters \@ref(h2ewas-chapter) and \@ref(ewas-gwas-comp-chapter)__. More details of why summary level GWAS statistics were required can be found in those chapters. ## Summary data for an EWAS of lung cancer {#lc-ewas-data} In __Chapter \@ref(dnam-lung-cancer-mr)__, I assess the association between DNA methylation and lung cancer by meta-analysing summary data from four case-control studies nested within prospective cohorts. In total 918 case-control pairs were used in the analyses. Below I give a description of the cohorts and how DNA methylation was measured for this meta-analysis of EWAS, referencing an earlier study where the methods are described in more detail [@Baglietto2017]. At the various laboratory sites, samples were distributed into 96-well plates and processed in chips of 12 arrays (8 chips per plate) with case-control pairs arranged randomly on the same chip. Methylation data were pre-processed and normalized in each study, and probe filtering was performed as previously described [@Baglietto2017], leaving 465,886 CpGs suitable for the analysis in EPIC-Italy, 485,330 CpGs in MCCS, 450,890 CpGs in NOWAC and 482,867 CpGs in NSHDS. ### European Prospective Investigation into Cancer and Nutrition-Italy (EPIC-Italy) {#epic-italy} EPIC-Italy includes 47,749 volunteers (32,579 women) aged 35–70 years at the time of recruitment (1992–1998). Anthropometric measurements and lifestyle variables including detailed information on smoking history were collected at recruitment through standardized questionnaires, together with a blood sample. Within EPIC-Italy a nested case-control study was conducted utilizing incident cases diagnosed within follow-up and healthy controls individually matched to cases by sex, date of birth (±5 years), date of inclusion in the study and study centre. Analysis was performed for 185 incident cases diagnosed within follow-up and matched controls. Laboratory procedures were carried out at the Human Genetics Foundation (Turin, Italy) and DNA extracted from buffy coats as previously described [@Baglietto2017]. All participants signed an informed consent form, and the ethical review boards of the International Agency for Research on Cancer and of each local participating centre approved the study protocol. ### Melbourne Collaborative Cohort Study (MCCS) {#mccs} The MCCS is a prospective cohort study of 41,514 volunteers (24,469 women) aged between 27 and 76 years at baseline (1990-1994). At baseline attendance, participants completed questionnaires that measured demographic characteristics and lifestyle factors. Height and weight were directly measured, and a blood sample was collected and stored. Incident cases of lung cancer were identified through linkage with the State and National Cancer Registries during follow-up up to the end of 2011. The MCCS sample included 367 cases and 367 matched controls selected from MCCS participants who were lung cancer free at the age of diagnosis of the matching case (density sampling). Matching variables included sex, date of blood collection (within 6 months), date of birth (within 1 year), country of birth (Australia and UK versus Southern Europe), type of biospecimen (lymphocyte, buffy coat and dried blood spot) and smoking status (never smokers; short-term former smokers: quitting smoking less than 10 years before blood draw; long-term former smokers: quitting smoking 10 years or more before blood draw; current light smokers: less than 15 cigarettes per day at blood draw; and current heavy smokers: 15 cigarettes or more at blood draw). For the MCCS, laboratory procedures were carried out at the Genetic Epidemiology Laboratory, the University of Melbourne according to manufacturers' protocols. DNA extraction from lymphocytes and buffy coats was performed as previously described [@Baglietto2017]. The Cancer Council Victoria's Human Research Ethics Committee approved the study protocol. Subjects gave written consent to participate and for the investigators to obtain access to their medical records. ### Norwegian Women and Cancer (NOWAC) {#nowac} The biobank of the NOWAC cohort was established in the years 2003-2006. Those who filled in an eight-page questionnaire and accepted the invitation to donate blood were sent blood drawing equipment together with a two-page epidemiological questionnaire. Around 50 000 women returned two tubes of blood to the Institute of Community Medicine at UiT The Arctic University of Norway and data linkage to the National Cancer Registry of Norway was performed. During follow-up to the end of 2011, 132 eligible cases of lung cancer were identified and were used for the EWAS. For each case, one control with an available blood sample was selected and matched on time since blood sampling and year of birth in order to control for effects of storage time and ageing. The cases and the controls were processed together for all laboratory procedures in order to reduce any batch effect. Laboratory procedures were carried out at the Human Genetics Foundation (Turin, Italy). DNA extraction from buffy coats was performed as previously described [@Baglietto2017]. All participants gave informed consent. The study was approved by the Regional Committee for Medical and Health Research Ethics in North Norway. Data storage and linkage was approved by the Norwegian Data Inspectorate. ### Northern Sweden Health and Disease Study (NSHDS) {#nshds} NSHDS is an ongoing prospective cohort and intervention study intended for health promotion of the population of Västerbotten County in northern Sweden. All residents were invited to participate by attending a health check-up at their local health care centre at 40, 50 and 60 years of age. At the health check-up, participants were asked to complete a self-administered questionnaire covering various factors such as education, smoking habits, physical activity and diet. In addition, height and weight were measured and participants were asked to donate a blood sample. Incident lung cancer cases were identified through linkage to the regional cancer registry. One control was chosen at random for each lung cancer case from appropriate risk sets consisting of all cohort members alive and free of cancer (except non-melanoma skin cancer) at the time of diagnosis of the index case. Matching criteria were the same as for the MCCS except there was no matching for type of biospecimens as DNA was extracted from whole blood for all samples. After quality control, a total of 234 incident lung cancer cases and 234 individually matched controls were available for this analysis. Laboratory procedures for NSHDS were carried out at two sites. DNA extraction from the buffy coat was conducted at Umeå University, Sweden, as previously described. Illumina Infinium HumanMethylation450 BeadChip analysis was conducted at the ALSPAC/IEU Laboratory at the University of Bristol. All study subjects provided written informed consent at time of the recruitment into the NSHDS. ## Transdisciplinary Research in Cancer of the Lung and The International Lung Cancer Consortium (TRICL-ILCCO) {#tricl-ilcco-02} To perform the two-sample MR analyses in __Chapter \@ref(dnam-lung-cancer-mr)__, summary statistics from a large lung cancer GWAS were required. This was taken from a GWAS using data from the TRICL-ILCCO consortium. Transdisciplinary Research in Cancer of the Lung and The International Lung Cancer Consortium (TRICL-ILCCO) In the consortium there are 26 cohorts, which provide data on 29,863 lung cancer cases and 55,586 controls. These individuals were genotyped using various different methods, with the most recent and largest cohort being genotyped by the Illumina Infinium OncoArray-500K BeadChip (Illumina Inc. San Diego, CA). This data went through extensive quality control and harmonsiation before the meta-analysis was performed. Detailed information on the genotyping, quality control, and cohorts can be found in the original study [@McKay2017]. ## Copenhagen City Heart Study (CCHS) {#cchs-02} To clarify the association between DNA methylation and lung cancer in __Chapter \@ref(dnam-lung-cancer-mr)__, a one-sample MR analysis was conducted using data from the Copenhagen City Heart Study (CCHS), a prospective study of the general population [@Kaur-Knudsen2011]. Copenhagen residents were invited to complete a questionnaire and undergo a physical examination and are followed through a unique person identifier in the Danish health registries. All participants gave written informed consent, and a Danish ethics committee approved the study (KF100.2039/91). Phenotype, genotype and DNA methylation data were required for the analyses. Details of how these were collected and some quality control are below. ### Phenotypic data {#cchs-phenotypic-data} Participants were asked whether they smoked at the day of attendance or previously. If they answered affirmative to either of these questions, they were asked about their current and former smoking behaviour, including age of smoking initiation, age of smoking cessation, and number of daily consumed cigarettes, cheroots, cigars, and weekly grams of pipe tobacco. Based on these answers, participants were categorized as never, former, and current smokers. In addition, participants reported on alcohol consumption, occupational exposure to dust and/or welding fumes, exposure to passive smoking, education, and familial cases of lung cancer. The answers were reviewed together with an examiner at the day of attendance. Body mass index was calculated as measured weight in kilograms divided by measured height (in meters) squared. For lung cancer (ICD7, codes 1624 or 4624 until 1977, and ICD10, code C34 from 1978 and onwards), the date of first diagnosis was taken from the national Danish Cancer Registry from 1943 to December 2012. ### DNA methylation data {#cchs-dnam-data} At the physical examination, blood samples were drawn for DNA from which _AHRR_ methylation extent was measured [@Bojesen2017]. The _AHRR_ cg05575921 methylation extent was measured in duplicate samples of bisulphite treated DNA from peripheral blood from 9,234 individuals. A Taqman assay was used that was developed in the CCHS laboratory, and included standard curves, as well as internal controls in each 384-well plate. Coefficients of variation at the methylation level of 71% varied from 5.0 to 6.7%. Laboratory technicians were blinded to smoking and disease status of the individuals. Results were validated with pyrosequencing on a subset of samples [@Bojesen2017]. ### Genotype data {#cchs-genotype-data} Genotypes from the iCOGs array [@Bojesen2013] and prospective data on lung cancer incidence were also available for these participants. Of the 9234 individuals, genotype data from iCOGS on 8778 were available. In short, DNA isolated from leukocytes was genotyped with a custom Illumina iSelect genotyping array, designed to test genetic variants related to breast, ovary and prostate cancer, comprising roughly 211,000 SNPs after rigorous quality control [@Bojesen2013]. \begin{table}[H] \caption{(\#tab:data-overview-tab)Overview of data used in this thesis} \centering \resizebox{\linewidth}{!}{ \begin{tabular}[t]{llll} \toprule data-source & chapters-used-in & data-type & data\\ \midrule \cellcolor{gray!6}{ARIES} & \cellcolor{gray!6}{3, 4, 5, 7} & \cellcolor{gray!6}{individual and summary} & \cellcolor{gray!6}{DNAm, genotypic, phenotypic, GWAS}\\ EWAS Catalog* & 3, 4, 6 & summary & EWAS\\ \cellcolor{gray!6}{GEO} & \cellcolor{gray!6}{3} & \cellcolor{gray!6}{individual} & \cellcolor{gray!6}{DNAm, phenotypic}\\ IEU OpenGWAS Project & 5, 6 & summary & GWAS\\ \cellcolor{gray!6}{EPIC-Italy} & \cellcolor{gray!6}{7} & \cellcolor{gray!6}{summary} & \cellcolor{gray!6}{EWAS}\\ \addlinespace MCCS & 7 & summary & EWAS\\ \cellcolor{gray!6}{NOWAC} & \cellcolor{gray!6}{7} & \cellcolor{gray!6}{summary} & \cellcolor{gray!6}{EWAS}\\ NSHDS & 7 & summary & EWAS\\ \cellcolor{gray!6}{TRICL-ILCCO} & \cellcolor{gray!6}{7} & \cellcolor{gray!6}{summary} & \cellcolor{gray!6}{GWAS}\\ CCHS & 7 & individual & DNAm, genotypic, phenotypic\\ \addlinespace \cellcolor{gray!6}{GTEx} & \cellcolor{gray!6}{7} & \cellcolor{gray!6}{summary} & \cellcolor{gray!6}{GWAS}\\ \bottomrule \multicolumn{4}{l}{\textsuperscript{} ARIES = Accessible Resource for Integrated Epigenomic Studies}\\ \multicolumn{4}{l}{\textsuperscript{} GEO = Gene Expression Omnibus}\\ \multicolumn{4}{l}{\textsuperscript{} IEU = Integrative Epidemiology Unit}\\ \multicolumn{4}{l}{\textsuperscript{} EPIC-Italy = Italian strand of the European Prospective Investigation into Cancer and Nutrition study}\\ \multicolumn{4}{l}{\textsuperscript{} MCCS = Melbourne Collaborative Cohort Study}\\ \multicolumn{4}{l}{\textsuperscript{} NOWAC = Norwegian Women and Cancer}\\ \multicolumn{4}{l}{\textsuperscript{} NSHDS = Northern Sweden Health and Disease Study}\\ \multicolumn{4}{l}{\textsuperscript{} TRICL-ILCCO = Transdisciplinary Research in Cancer of the Lung and The International Lung Cancer Consortium}\\ \multicolumn{4}{l}{\textsuperscript{} CCHS = Copenhagen City Heart Study}\\ \multicolumn{4}{l}{\textsuperscript{} GTEx = Genotype-Tissue Expression (project)}\\ \multicolumn{4}{l}{\textsuperscript{} DNAm = DNA methylation}\\ \multicolumn{4}{l}{\textsuperscript{*} Data source created in Chapter 3}\\ \end{tabular}} \end{table} <!--chapter:end:02-data_sources.Rmd--> # The EWAS Catalog: a database of epigenome-wide association studies {#ewas-catalog} ## Chapter summary {#chapter-summary-03} Before investigating the properties of DNA methylation-trait associations, I sought to bring together published EWAS results to help identify studies for use in future chapters and to allow joint analysis of currently available EWAS results in __Chapter \@ref(properties-of-ewas)__. To this end, I led a project to produce The EWAS Catalog. This database contains manually curated CpG-trait associations (at P<1x10^-4^) from published EWAS, each assaying over 100,000 CpGs in at least 100 individuals. The database currently contains these 582,801 associations from 218 published studies as well as summary statistics for 392,773 associations from 428 EWAS using data from ARIES (__Section \@ref(aries-summary)__) and GEO (__Section \@ref(geo-02)__). This database provides a platform for future chapters in this thesis and will give researchers the opportunity to quickly and easily query EWAS associations to gain insight into the molecular underpinnings of disease as well as the impact of traits and exposures on the DNA methylome. ### Contributions statement {#contributions-statement-03} I developed the website (after its original production), conducted all EWAS in ARIES and GEO, extracted the most published data, have led the project for the past 18 months and wrote everything presented in this chapter, however I was not the sole contributor. Contributions by others are listed below: * <NAME> produced the original website and R package and originally led the project * <NAME> re-formatted and developed the website. Relative contributions to this development can be found here: https://github.com/ewascatalog/ewascatalog2/graphs/contributors * <NAME>fi extracted GEO data * <NAME> and <NAME> are part of the core development team (along with myself) and continue to provide expert guidance * <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, <NAME> all extracted some published data ## Introduction {#introduction-03} In recent years, there has been a dramatic increase in the number of EWAS being performed and published due to technological advancements making it possible to measure DNA methylation at hundreds of thousands of CpG sites cheaply and effectively [@Logue2017; @Rakyan2011; @Illumina2012; @IlluminaEPIC]. Curating these EWAS results and making them freely available would enable researchers to use the data to inform or enhance studies on specific traits of genomic regions of interest. Furthermore, the joint analysis of these data could provide insights into the properties of DNA methylation-trait associations, potentially leading to an understanding of how to improve design of EWAS. The latter is explored in __Chapter \@ref(properties-of-ewas)__. At the time of making the database, to my knowledge, there were no databases that had collected well-curated EWAS on all traits (not just diseases) in an online database accessible to researchers. During production one database fulfilled those metrics: EWAS Atlas [@Li2019]. Other databases are available but are limited to certain diseases (e.g. MethHC [@Huang2015]). The EWAS Atlas provides a website with annotated CpG sites and information on traits. Ideally a database of EWAS results will provide summary statistics, including betas, standard errors and p-values where provided from publications, in an easily accessible manner, this enables researchers to explore various aspects of the published data without having to retrieve the published article. For example, researchers might compare effect estimates between studies in the database or check to see if their results are replicated in another published study. At the time of writing, the EWAS Atlas platform did not enable users to download effect estimates and standard errors. A clear caveat with using data from published EWAS is that the data reported is governed by the authors of those papers; not all published EWAS include full summary statistics. Performing EWAS in a systematic way in available cohorts and enabling user upload of results allows for more in-depth results to be included in an EWAS database. The EWAS Catalog aims to improve upon current databases to 1) allow easy and programmatic access to summary statistics for downstream analyses by researchers and 2) provide full summary statistics from a range of EWAS conducted in multiple cohorts. To this end The EWAS Catalog has been produced, a manually curated database of currently published EWAS, 387 EWAS performed in ARIES (__Section \@ref(aries-summary)__) [@Fraser2013; @Boyd2013] and 41 EWAS performed from data from the Gene Expression Omnibus (GEO) database. The process and data inclusion are summarised in __Figure \@ref(fig:catalog-project-workflow)__. (ref:catalog-project-workflow-cap) __EWAS Catalog project flowchart__. On the left is a brief description of how the CpG-phenotype associations were assembled from published works and on the right is a brief description of the EWAS performed using individual participant data. (ref:catalog-project-workflow-scap) EWAS Catalog project flowchart \begin{figure} {\centering \includegraphics[width=1\linewidth]{figure/03-ewas_catalog/project_flowchart} } \caption[(ref:catalog-project-workflow-scap)]{(ref:catalog-project-workflow-cap)}(\#fig:catalog-project-workflow) \end{figure} ## Methods {#methods-03} ### Implementation The EWAS Catalog web-app was built using the Django Python package (https://djangoproject.com). The data is stored in a combination of MySQL databases and fast random access files [@Li2011] and can be queried via the web app or the R package (www.github.com/ewascatalog/ewascatalog-r/). ### Overview of publication data extraction To identify publications, periodic literature searches are performed in PubMed using the search terms: "epigenome-wide" OR "epigenome wide" OR "EWAS" OR "genome-wide AND methylation" OR "genome wide AND methylation". To try and maximise quality and usefulness of data, and minimize computational burden, criteria for inclusion of a study into The EWAS Catalog were developed. These criteria and reasons for them are as follows: 1. The EWAS performed must contain over 100 humans. Limiting the sample size to 100 or more individuals was done to try and remove EWAS that would be highly underpowered, but make sure EWAS of rarer phenotypes and in smaller cohorts (for example non-European cohorts) were not excluded. 2. The DNA methylation data must be genome-wide. As discussed, published candidate gene studies are at risk of publication bias and thus were excluded. 3. The analysis must contain over 100,000 CpG sites. Similarly to the previous criterion, this was to avoid studies performing more targeted DNA methylation measurements. 4. The study must include previously unpublished EWAS summary statistics. This was to prevent duplication of results. CpG-phenotype associations are extracted from studies at P<1x10^-4^. A P-value cut-off was imposed to avoid a large computational burden from storing millions of associations. This threshold was chosen to be more lenient than the conventional EWAS P-value threshold as there is potential information from associations reported with P-values above P<1x10^-7^, for example when trying to replicate associations in a study with a smaller sample size. All these criteria along with the variables extracted are documented on the website (www.ewascatalog.org/documentation). Experimental factor ontology (EFO) terms were mapped to traits to unify representation of these traits. These EFO terms were manually entered after looking up the trait in the European Bioinformatics Institute database (www.ebi.ac.uk/efo). Based on these criteria, from 2021-05-25, The EWAS Catalog contained 582,801 associations from 218 published studies. ### New EWAS performed {#new-ewas-03} #### Overview of GEO data extraction {#geo-data-extraction} To recruit additional datasets suitable for new EWAS analysis, the geograbi R package (https://github.com/yousefi138/geograbi) was used to both query GEO for experiments matching The EWAS Catalog inclusion criteria (described above) and extract relevant DNA methylation and phenotype information. The GEO database is briefly described in __Section \@ref(geo-02)__. The query of this database was performed on 2020-10-12 and identified 136 such experiments with 32,555 samples where DNA methylation and phenotype information could be successfully extracted. From these, the aim was to repeat the analyses performed in the publications linked by PubMed IDs to each GEO record. Thus, I looked up the corresponding full texts for each dataset and identified the main variables of interest. Of the 136 putative GEO studies, only 41 (30%) contained sufficient information to replicate the original analysis. #### Overview of ALSPAC data used {#alspac-03} EWAS were conducted for 387 continuous and binary traits in peripheral blood DNA methylation of ALSPAC mothers in middle age (N = 940), generated as part of the ARIES project [@Relton2015-aries]. All phenotypes used in this chapter were measured at the same time blood was drawn for DNA methylation measurement. The ARIES dataset is summarised and is described in more detail in __Section \@ref(aries-02)__ Ancestry principal components were used as covariates in the EWAS. These were generated within ALSPAC mothers using PLINK (v1.9). Quality control and imputation of the genetic data are described in __Section \@ref(alspac-genetic-data)__. After quality control and imputation, independent SNPs (r^2^ < 0.01) were used to calculate the top 10 ancestry principal components. As discussed, batch effects and cell type heterogeneity may account for a large proportion of covariation observed between DNA methylation and a phenotype of interest. In an attempt to combat this, surrogate variables were generated from the DNA methylation data using the smartsva R package [@Chen2017]. Surrogate variables capture variation in DNA methylation that are orthogonal to the relationship between the trait of interest and DNA methylation [@Chen2017; @Leek2007]. When used in an EWAS model, they capture the largest portion of DNA methylation variation that is not due to the trait of interest. Thus, if batch effects and cell type heterogeneity are causing substantial variation in the DNA methylation data, this should be captured by the surrogate variables [@Leek2007]. Values of continuous phenotypes were defined as outliers if greater than three multiplied by the interquartile range (IQR) or if less than three multiplied by the IQR and set to missing, then all phenotypes with 100 or more non-missing values were kept for further analysis. To ensure all phenotypes were approximately normal, each of their distributions were examined and then transformed. If a variable was deemed right-skewed, it was log-transformed then its distribution was re-assessed by eye. Square-roots and cube-roots were used to try and approximate normality if log-transformation did not work. If a variable was deemed left-skewed, it was squared, and the distribution re-assessed by eye. #### EWAS statistical models For all EWAS using ARIES and GEO data, linear regression models were fitted with DNA methylation at each site as the outcome and the phenotype as the exposure. DNA methylation was coded as beta values between 0 and 1. For a particular site, a beta value of 0 represents no methylation being detected in all cells measured and a value of 1 represents all cells being methylated at that site. For the 387 EWAS conducted using participant data from ARIES, covariates included age, the top 10 ancestry principal components, and 20 surrogate variables. For the 41 EWAS conducted using data extracted from GEO, just 20 surrogate variables were included as covariates. For GEO other covariates were considered, but surrogate variables only were used for two reasons: 1) to help automate the process and 2) because covariates used in the original EWAS were not included with many of the GEO datasets. Statistical analyses were conducted in R (Version 3.6.2). The smartsva package [@Chen2017] was used to create surrogate variables and the ewaff R package (https://github.com/perishky/ewaff) was used to conduct the EWAS, all p-values are two-sided. ## Results {#results-03} ### Database interface and use There are two ways to access this large, curated database: through the main website www.ewascatalog.org or by using the R package “ewascatalog”. The website provides a simple user interface, whereby there is one simple search bar and an advanced search bar to explore the database and links to tabs that contain documentation on the contents and how to cite its use (__Figure \@ref(fig:catalog-use)__). Users may enter a CpG, gene, genome position, trait, EFO term, author name, or PudMed ID into the search bar and it will rapidly return detail for relevant EWAS associations, including CpG, trait, sample size, publication and association (effect size and P value) (__Figure \@ref(fig:catalog-use)__). This information along with additional information such as ancestry, outcome, exposure units, and tissue analysed are available for download as a tab-separated value (tsv) text file. Unlike other EWAS databases, the option is provided to download summary results for both the user’s search and for the entire database. Further, users may upload their own data which will be parsed by a pipeline designed to check the data and format it for input into The EWAS Catalog MySQL database. (ref:catalog-use-cap) __Using The EWAS Catalog__. On the left hand side is the home page. On the top right hand side are examples of searches possible: 1. CpG sites, 2. genes and 3. traits. Below the searches shows the pages directed to after searching for "Vitamin B6 intake". Circled in red is the download button, this button enables the user to download the results of their search as a tab-separated value file. This file will contain the information shown on the website as well as additional analysis information. (ref:catalog-use-scap) Using The EWAS Catalog \blandscape \begin{figure}[htbp] {\centering \includegraphics[width=1\linewidth]{figure/03-ewas_catalog/using_the_catalog} } \caption[(ref:catalog-use-scap)]{(ref:catalog-use-cap)}(\#fig:catalog-use) \end{figure} \elandscape The R package, along with installation instructions and examples are available at https://github.com/ewascatalog/ewascatalog-r/. Once installed, the database can be queried directly in R using the “ewascatalog()” function similar to the website: simply supply the function with a CpG site, gene, genome position or trait and the function returns the same output as is downloadable from the website. ## Discussion {#discussion-03} In this chapter, a database of previously published EWAS and the full summary statistics of 428 newly performed EWAS within ALSPAC and GEO has been established. This is freely available for all researchers to use and provides a platform to explore what information has been gained from EWAS as well as a platform that can be used to pool all existing data to gain new insights into both the EWAS study itself and how DNA methylation associates with traits. Despite the fact The EWAS Atlas has similar aims to The EWAS Catalog, the latter provides full summary statistics, extra information, a user-friendly platform to enable more downstream analyses, and a pipeline for users to upload their own data and receive a citable DOI for it. The EWAS Catalog team will continue to collate and upload newly published EWAS and further increase the number of full summary statistics on the website by performing additional EWAS on available datasets and by inviting EWAS authors to provide full summary statistics. Currently work is ongoing to include additional functionality to allow users to easily and systematically compare their EWAS findings to EWAS in the database. With this full summary data, it is possible to make greater strides into discovering the epigenetic architecture of traits. In this chapter, a platform has been generated that enables us to examine 1) what information has been gained from EWAS and 2) what could explain EWAS associations, which will be explored in __Chapter \@ref(properties-of-ewas)__. <!--chapter:end:03-ewas_catalog.Rmd--> # Properties of epigenome-wide association studies {#properties-of-ewas} ## Chapter summary {#chapter-summary-04} The EWAS Catalog database developed in __Chapter \@ref(ewas-catalog)__ contains thousands of associations from hundreds of studies. By far the most common method of measuring DNA methylation amongst these EWAS is in blood using the Illumina Infinium HumanMethylation450 BeadChip (HM450 array). This platform assays fewer than 2% of CpG sites in the human genome, and those selected are ascertained for regions hypothesised to be relevant to gene regulation. Understanding what drives the associations found by measuring DNA methylation in this way could help prioritise CpG sites or regions of the genome to target for future technologies used in EWAS and further, it could guide current EWAS study design (for example by discovering sites which could be removed before analysis). In this chapter I use the data collected for The EWAS Catalog to evaluate the characteristics of known DNA methylation associations. Of the data in the EWAS Catalog, 9.9% of reported associations are from CpGs measured by probes recently reported as unreliable and 21% of studies did not account for both batch effects and cellular composition, suggesting some associations may be false positives. However, after removal of these potentially false positive results, characteristics of DNA methylation still associated with EWAS results. An increase in heritability by 10% associated with a decrease in the modulus of the standardised effect size by 8% [95% CI: 8.3% decrease, 7.7% decrease]. It was also observed that the modulus of the standardised effect size decreased by 26% [95% CI: 26% decrease, 25% decrease] per one standard deviation increase in the variance of DNA methylation. Differentially methylated positions (DMPs) were found to be present in actively transcribed promoter regions, enhancer regions and in over 100 transcription factor binding sites more than expected by chance. DMPs were also found to be depleted for heterochromatic regions, poised and bivalent promoters, regions repressed by polycomb proteins and quiescent regions. Targeting genomic regions for measurement of DNA methylation that are enriched for EWAS results, and avoiding regions that are depleted for EWAS results, may increase probability of identifying DMPs in future EWAS. These enrichment and depletion analyses were conducted across 25 distinct tissues and the strongest associations were found in blood, suggesting conducting EWAS in different tissues will likely yield tissue-specific results. The results presented in this chapter (and later chapters) are using data from The EWAS Catalog when published data present in the database was from before 2019-01-01. ### Contributions statement {#contributions-statement-04} I wrote all the sections and performed all analyses except the the enrichment analyses using LOLA, which was performed by <NAME>. I performed the downstream analyses from LOLA output (visualisation of the results and some further statistical analyses). ## Introduction {#introduction-04} Hundreds of EWAS have been conducted in the last 10-15 years, yet no systematic evaluation of published EWAS across complex traits has been conducted. By exploring the patterns of association across a large group of EWAS, one can discover potential explanations for the results found, that may shed light on technical issues affecting previous studies as well as shared epigenetic architectures across traits. Since the inception of EWAS, it has become clear that batch effects and cellular heterogeneity can generate false positives and bias effect sizes [@Price2018; @Forest2018; @Jaffe2014]. Also, as discussed in __Section \@ref(measuring-dna-methylation)__, characterisation of probes used by common arrays (e.g. the HM450 array and the HMEPIC array) has shown that unreliable methylation measurements may occur because of cross-hybridisation of probes, non-specific probe mapping and SNPs being present at the binding sites of probes [@Zhou2017; @Naeem2014]. Despite this, there are examples of replication amongst EWAS results, [@Nano2017; @Kaushal2017; @Morris2017; @Hedman2017; @Braun2017; @Teschendorff2015; @Zeilinger2013]. Further functional characterisation of EWAS results, such as new experimental studies or the application of existing gene function knowledge, can also be used to bolster evidence that changes in DNA methylation estimated are unlikely due to bias [@Rakyan2011; @Corbin2019]. By way of an example, changes in DNA methylation at _AHRR_ have been replicated across multiple smoking EWAS [@Zeilinger2013; @Elliott2014; @Joehanes2016; @Bojesen2017] and as functional research has implicated this gene in handling toxic substances found in tobacco smoke [@Zudaire2008], it seems unlikely these findings are chance occurrences. The characteristics of the DNA methylome may also explain some EWAS findings. Heritability varies across DNA methylation sites [VanDongen2016; @McRae2014; @Hannon2018], and so if genetic effects are driving EWAS associations, either through confounding or with DNA methylation as a mediator, one would expect heritable sites to be commonly identified in EWAS. Variance is also heterogenous across sites [@Garg2018] and at sites where variation is low, the ratio of noise to signal may be greater. Thus, some studies have advocated removing these sites to prevent generating false positives and to reduce the multiple testing burden [@Meng2010; @Logue2017]. However, it is unclear how variance in DNA methylation relates to the magnitude of effect estimates. Experimental studies have shown DNA methylation changes at different locations of the genome correlate with different regulatory functions. For example, an increase in DNA methylation at transcriptional start sites is correlated with a decrease in gene expression [@Jones2012; @Ando2019; @Deaton2011], but an increase in DNA methylation within a gene body shows the opposite association [@Wolf1984; @Hellman2007]. As discussed in __Section \@ref(ewas)__, the understanding that the genomic contexts in which DNA methylation occurs is related to gene regulation likely contributed to the design of contemporary arrays that measure DNA methylation. Yet, it is not known whether targeting protein-coding regions and enhancers has likely led to an increase in discovery of DNA methylation-trait associations. Understanding underlying factors that drive EWAS results is essential for future study design. This may come in the form of consideration of potential biasing factors, or by selecting certain DNA methylation sites based on their specific characteristics. Further, the HM450 and HMEPIC arrays both capture less than 5% of the total number of CpG sites in the genome, therefore understanding the characteristics of DNA methylation-trait associations could provide vital information when designing future studies targeting the other 95%. Also, by examining the commonalities of EWAS results, one has the potential to uncover links between traits that have not previously been made or to identify new potential mediating factors between traits. In this chapter I first describe the data present in The EWAS Catalog going on to explore the factors that predict EWAS hits. \newpage ## Methods {#methods-04} ### Epigenome-wide association studies data {#ewas-data-04} All the data for the analyses were extracted from The EWAS Catalog (__Chapter \@ref(ewas-catalog)__). Data were extracted when The EWAS Catalog had published EWAS data from before the start of 2019 (i.e. the data do not completely reflect that presented in __Chapter \@ref(ewas-catalog)__). Studies were removed that compared DNA methylation levels between tissue, race, and age. This was done because these variables are not complex traits and thus the properties of those study results are unlikely to be informative when attempting to understand how to best design EWAS. Overall, this left 614 EWAS, including 387 EWAS from the ARIES subsection of ALSPAC (__Section \@ref(aries-02)__) [@Relton2015-aries; @Fraser2013; @Boyd2013] and 40 EWAS performed using data from the gene expression omnibus (GEO) resource (__Section \@ref(geo-02)__). See __Chapter \@ref(ewas-catalog)__ for more details on the EWAS. ### Description of catalog data {-#description-of-data} DMPs, unless otherwise stated, will be defined as DNA methylation sites associated with a trait at P<1x10^-7^. Each of the CpGs in the Catalog are annotated to genes, using data from the meffil R package [@Min2018]. T-statistics ($t$) were calculated using P-values, sample sizes ($n$) and the qt() function in R. $r^2$ values were calculated from t-statistics as follows \begin{equation} r^2 = \frac{t^2} {t^2 + n - 1} (\#eq:r-squared) \end{equation} To identify which traits for which r^2^ values might be inflated, r^2^ values were first summed and divided by the sample size (N). The base 10 log of these values were taken to approximate a normal distribution. Then a z-test was performed to assess for which studies the sum of r^2^ values divided by N were greater than the mean of summed r^2^ values divided by N across all studies. From the z-test, those with a FDR-corrected P-value of less than 0.05 were labelled as having inflated r^2^ values. ### Identifying faulty probes {#identifying-faulty-probes} By far the most common method to measure DNA methylation across the studies in The EWAS Catalog is using the HM450 array. Since its development, the array has been extensively characterised [@Price2018; @Forest2018; @Jaffe2014; @Zhou2017] and it was found not all probes measure DNA methylation reliably. Some probes map to CpG sites that are influenced by SNPs, others are non-specific and some are prone to cross-hybridisation. Probes were assigned to be 'potentially faulty' if they were characterised as such by Zhou et al. [@Zhou2017]. ### Replication {#replication-methods-04} A study-wide significant association (P<1x10^-7^) was deemed to be replicated if it had been identified by another study at P<1x10^-4^ (i.e. it was replicated if reported by another study in The EWAS Catalog). It should be re-iterated that the published data collected for The EWAS Catalog was scraped from the journal articles and even though the threshold for inclusion into The EWAS Catalog remained constant at P<1x10^-4^, there is heterogeneity in reporting of EWAS associations. Therefore, some studies would not have reported any results with a P-value lower than the conventional EWAS P-value threshold (P<1x10^-7^), making power a key limitation for attempts to assess replication. The replicability of EWAS within the database was assessed using two methods. Firstly, replication within studies is recorded in the EWAS Catalog, thus a simple lookup for any studies that performed a replication or meta-analysed discovery and replication datasets was conducted. Secondly, a lookup of results for any traits for which multiple EWAS had been conducted was performed. The Catalog also contains results from studies that have uploaded their data to GEO as well as results from the re-analysis of that data (details in __Section \@ref(new-ewas-03)__). These re-analyses adjusted for 20 surrogate variables only as many studies did not provide a complete set of covariates to GEO. To assess the potential reliability of these EWAS, the original EWAS results were looked up in the results of the re-analysed data. ### Selecting data to assess DNA methylation characteristics Before further analyses, all potentially faulty probes and probes that mapped to sex chromosomes were removed. Studies that did not include batch and cell composition as covariates in at least one EWAS model were also excluded, and studies for which re-analysis of the data replicated less than 10% of the findings were removed. ### DNA methylation characteristics The relationship between the characteristics, heritability and variance, of DNA methylation at each CpG site and EWAS effect size was assessed. To allow this across traits, beta coefficients were standardised, $\beta_{standard}$, like so, \begin{equation} \beta_{standard} = \frac{\beta\sigma(x)} {\sigma(y)} (\#eq:standardised-beta-coeffs) \end{equation} where $\beta$ = beta coefficient, $\sigma$ = standard deviation, $x$ = independent variable, $y$ = dependent variable. As individual participant data were not available to us, the variance in DNA methylation sites was approximated by the variance in DNA methylation at sites as supplied by the Genetics of DNA Methylation Consortium (GoDMC) [@Min2020] and the trait variance was estimated by rearranging equation \@ref(eq:r-squared-from-beta) depending on whether DNA methylation was the independent ($x$) or dependent ($y$) variable in the model. \begin{equation} r^2 = \frac{\beta^2\sigma^2(x)} {\sigma^2(y)} (\#eq:r-squared-from-beta) \end{equation} GoDMC [@Min2020] also provided the mean levels of DNA methylation at each site. Heritability of DNA methylation at each site has been previously estimated by McRae et al. 2014 [@McRae2014] and Van Dongen et al. 2016 [@VanDongen2016]. These values were kindly made publically available by the authors of those studies, in this chapter the estimates of heritability from twin data (Van Dongen et al. 2016 [@VanDongen2016]) were used. Relationships between each characteristic and effect size were assessed using linear regression, fitting the absolute value of the standardised effect size as the dependent variable and the characteristic as the independent variable. The absolute values of standardised effect sizes were transformed using the natural log to approximate normality. The relationship between tendency for a DMP to replicate and heritability and variance were also assessed. Logistic regression models were fitted with the binary variable of a DMP replicating in at least one study (yes or no) used as the outcome measure and heritability and variance fitted as the dependent variable. It was also tested whether heritability and variance could predict whether a CpG site was likely to be identified as a DMP in EWAS by generating receiver operating characteristic (ROC) curves and quantifying the area under these curves (AUC). ### Enrichment tests {#enrichment-tests-04} Assessment of enrichment of DMPs amongst various genomic regions was carried out to help understand whether selecting regions to measure could maximise EWAS yield. Locus Overlap Analysis (LOLA) [@Sheffield2016] was used to assess whether DMPs identified in the EWAS Catalog were enriched for 25 chromatin states and 167 transcription factor binding sites in 127 different cell types comprising 25 distinct tissues. These data were generated by the Roadmap Epigenomics Project [@Kundaje2015] and ENCODE [@Dunham2012]. Five different groups of DMPs were defined for the enrichment analyses: * Group A - all sites associated with any complex trait at the conventional P-value threshold used in EWAS, P<1x10^-7^. * Group B - a subset of group A, all sites associated with any complex trait at a more stringent threshold, P<1.6x10^-10^. Multiple EWAS were conducted to produce the results in the database and so the stricter threshold of group B aimed to limit the false discovery rate by taking into account the multiple EWAS. * Group C - DMPs replicated at P<1x10^-4^ in any other EWAS of the same trait. * Group D - a subset of group A, but restricted to results from studies where DNA methylation was measured in whole blood. * Group E - a subset of group B, but restricted to results from studies where DNA methylation was measured in whole blood. To assess enrichment, LOLA performs Fisher's exact test and generates an odds ratio that can be interpreted as the odds of the DMPs being within an annotation divided by the odds of the DMPs not being within an annotation. Genomic annotations may differ by CG content and thus a differential CG content of regions containing the DMPs of interest and the background group of CpG sites might bias enrichment estimates. Thus, background sites were matched on CG content before the analysis. All analyses were completed using R (version 3.6.2). \newpage ## Results {#results-04} ### Description of the catalog {-#catalog-description} Before assessing the factors predicting DMPs, a brief summary of the data in The EWAS Catalog (as used for this study, see __Section \@ref(ewas-data-04)__ for details) is presented (__Table \@ref(tab:study-data-tab)__). The percentage of hypermethylated sites in relation to traits was 52% and there were five CpGs that associated with more than ten traits (__Figure \@ref(fig:traits-manhattan)__). Here are those sites with gene names, as mapped using Illumina-provided annotations, in brackets: cg01940273 (_-_), cg05575921 (_AHRR_), cg00574958 (_CPT1A_), cg17901584 (_DHCR24_), cg06500161 (_ABCG1_). cg06500161 (_ABCG1_) was associated with more traits than any other site - 71 traits. These correspond mostly to metabolites, weight-related traits, and type two diabetes. \begin{table}[!h] \caption{(\#tab:study-data-tab)Description of data present in the EWAS Catalog} \centering \resizebox{\linewidth}{!}{ \begin{tabular}[t]{ll} \toprule study-trait & value\\ \midrule \cellcolor{gray!6}{Number of EWAS} & \cellcolor{gray!6}{614}\\ Unique traits & 556\\ \cellcolor{gray!6}{Number of samples} & \cellcolor{gray!6}{389527}\\ Median sample size (range) & 536 (93 - 13474)\\ \cellcolor{gray!6}{Number of associations} & \cellcolor{gray!6}{155976}\\ \addlinespace Unique CpGs identified & 129670\\ \cellcolor{gray!6}{Unique genes identified} & \cellcolor{gray!6}{19305}\\ Sex (\%) & Both (38.6), Females (52.0), Males (2.1)\\ \cellcolor{gray!6}{Ethnicities} & \cellcolor{gray!6}{EUR (75.3), Unclear (12.5), AFR (4.6), Other (3.6), ADM (1.6), EAS (1.4), SAS (1.0)}\\ Age (\%) & Adults (72.5), Geriatrics (11.2), Children (4.9), Infants (4.4)\\ \addlinespace \cellcolor{gray!6}{Number of tissue types} & \cellcolor{gray!6}{42}\\ Most common tissues (\%) & whole blood (84.14), cord blood (4.34), cd4+ t-cells (2.60), placenta (1.24), saliva (0.99)\\ \bottomrule \multicolumn{2}{l}{\textsuperscript{} Identified associations were defined as those P < 1x10\textsuperscript{-7}}\\ \multicolumn{2}{l}{\textsuperscript{} Results for Sex, Ethnicities, Age, and Most common tissues were calculated per EWAS.}\\ \multicolumn{2}{l}{\textsuperscript{} For example, if one EWAS (or meta-analysis) contained just Afican indviduals then that would be counted as one.}\\ \multicolumn{2}{l}{\textsuperscript{} EUR = European, AFR = African, ADM = Admixed, EAS = East Asian, SAS = South Asian}\\ \end{tabular}} \end{table} Next estimates of the trait variance (see equation \@ref(eq:r-squared)) explained by each association were assessed. This indicates the predictive performance from EWAS, although it should be noted that winner's curse will artificially inflate the performance, even amongst EWAS with true positive results. The proportion of trait variance (ranging from 0 to 1) that correlated with DNA methylation (r^2^) at each site varied from 0.0011 to 0.97 with a median of 0.093 (__Figure \@ref(fig:rsq-distribution)__). The sum of r^2^ values ranged greatly from 0.0055 to 23,879 (__Figure \@ref(fig:rsq-sum-distribution)__), with a median of 1.2. There was evidence that 54 studies had a total sum of r^2^ values greater than the mean (FDR < 0.05) and r^2^ values from individual associations from these studies made up the majority of r^2^ values greater than 0.1 (__Figure \@ref(fig:rsq-distribution)__). When excluding those studies from the results, the median r^2^ value at individual sites was 0.023. (ref:rsq-distribution-cap) __Distribution of r^2^ values across all CpG sites in The EWAS Catalog__. Each EWAS can identify multiple differentially methylated positions, each of which will capture some variance of the trait of interest for that EWAS (r^2^). $\sum {r^2}$ is the sum of r^2^ values, the distribution of which is shown in __Figure \@ref(fig:rsq-sum-distribution)__. Fifty-four studies were identified for which there was some evidence that the sum of r^2^ values were greater than the mean across all studies. All of the differentially methylated positions identified by those studies are highlighted in blue on the plot. These results suggest that some associations within the database are likely to be inflated, yet for most traits, variation at individual DNA methylation sites captures little trait variance. Summing the r^2^ values indicates a substantial proportion of trait variance can be captured by multiple DNA methylation sites for some traits, but this can only be estimated by jointly modelling the contribution of all sites to trait variance. This is explored in __Chapter \@ref(h2ewas-chapter)__. Here, the sum of r^2^ values is used to indicate whether the results of a study are likely inflated and thus may not be reliable. \pagebreak (ref:traits-manhattan-cap) __Number of unique traits associated with DNA methylation at each CpG__. Sites associated with more than 10 unique traits are highlighted in orange and labelled. (ref:traits-manhattan-scap) Number of unique traits associated with DNA methylation at each CpG \begin{figure}[!hp] {\centering \includegraphics[width=1\linewidth]{figure/04-properties_of_ewas/traits_per_dmp_at_1e-07} } \caption[(ref:traits-manhattan-scap)]{(ref:traits-manhattan-cap)}(\#fig:traits-manhattan) \end{figure} (ref:rsq-distribution-scap) Distribution of r^2^ values across all CpG sites in The EWAS Catalog \begin{figure}[!hp] {\centering \includegraphics[width=1\linewidth]{thesis_files/figure-latex/rsq-distribution-1} } \caption[(ref:rsq-distribution-scap)]{(ref:rsq-distribution-cap)}(\#fig:rsq-distribution) \end{figure} (ref:rsq-sum-distribution-cap) __Distribution of the sum of r^2^ values across each study in The EWAS Catalog__. (ref:rsq-sum-distribution-scap) Distribution of the sum of r^2^ values across each study in The EWAS Catalog \begin{figure}[!hp] {\centering \includegraphics[width=1\linewidth]{thesis_files/figure-latex/rsq-sum-distribution-1} } \caption[(ref:rsq-sum-distribution-scap)]{(ref:rsq-sum-distribution-cap)}(\#fig:rsq-sum-distribution) \end{figure} \pagebreak ### Robustness of results {#robustness-of-results} As discussed, cellular heterogeneity, batch effects and inclusion of faulty probes can lead to false positives in EWAS. The extent to which this might be the case within EWAS included within The EWAS Catalog was explored. Each study may have reported results across multiple EWAS models, adjusting for different covariates. In at least one model, 579 studies adjusted for batch effects, 518 studies adjusted for cell composition, and 489 adjusted for both. Of all DMPs identified, 9.3% were measured by potentially faulty probes and an extra 0.64% were present on sex chromosomes (__Figure \@ref(fig:faulty-probes-plot)__). (ref:faulty-probes-cap) __The percentage of DMPs that may have been identified by faulty probes and the percentage of EWAS that reported identifying at least one of these probes__. The left-hand bar represents all DMPs reported across all EWAS that fit into the categories shown, the right-hand bar represents the number of EWAS that include CpGs that fit into the categories shown. Some CpGs are both on a sex chromosome and were identified as faulty by Zhou et al. They were labelled as 'potentially faulty'. (ref:faulty-probes-scap) The percentage of DMPs that may have been identified by faulty probes and the percentage of EWAS that reported identifying at least one of these probes \begin{figure}[!h] {\centering \includegraphics[width=1\linewidth]{thesis_files/figure-latex/faulty-probes-plot-1} } \caption[(ref:faulty-probes-scap)]{(ref:faulty-probes-cap)}(\#fig:faulty-probes-plot) \end{figure} There were 30 studies that performed a meta-analysis of discovery and replication samples. A further 48 studies performed a separate replication analysis. Together, this provides 1666 associations within the EWAS Catalog that have been replicated at P < 1x10^-4^. From the studies that uploaded their data to GEO, the association between DNA methylation and the phenotype of interest from the original study was re-analysed, including 20 surrogate variables as covariates. Both the original study results and the results from the re-analysis of the phenotype of interest are in The EWAS Catalog database for 9 studies. Across the studies, between 0% and 96.875% of DMPs were replicated at P < 1x10^-4^ (__Table \@ref(tab:geo-reanalysis-tab)__). Some of these EWAS reported very few DMPs (some only 1) and as they would have used different models, replicating the single reported result was not expected. \begin{table}[H] \caption{(\#tab:geo-reanalysis-tab)GEO re-analysis replication} \centering \resizebox{\linewidth}{!}{ \begin{tabular}[t]{llll} \toprule Trait & N-DMPs & N-replicated & Percent-replicated\\ \midrule \cellcolor{gray!6}{Age at menarche} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{0.00}\\ Arsenic exposure & 12 & 0 & 0.00\\ \cellcolor{gray!6}{Fetal alcohol spectrum disorder} & \cellcolor{gray!6}{19} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{5.26}\\ Inflammatory bowel disease & 14 & 13 & 92.86\\ \cellcolor{gray!6}{Nevus count} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{0.00}\\ \addlinespace Psoriasis & 16 & 0 & 0.00\\ \cellcolor{gray!6}{Rheumatoid arthritis} & \cellcolor{gray!6}{47,875} & \cellcolor{gray!6}{116} & \cellcolor{gray!6}{0.24}\\ Smoking & 32 & 31 & 96.88\\ \cellcolor{gray!6}{Smoking} & \cellcolor{gray!6}{30} & \cellcolor{gray!6}{12} & \cellcolor{gray!6}{40.00}\\ \bottomrule \multicolumn{4}{l}{\textsuperscript{} N-DMPs = number of differentially methylated positions identified at P < 1x10\textsuperscript{-7}}\\ \multicolumn{4}{l}{\textsuperscript{} N-replicated = number of DMPs replicated in the GEO re-analysis at P < P < 1x10\textsuperscript{-4}}\\ \end{tabular}} \end{table} Using the catalog data, DMPs were examined to see if they were also associated with that same trait in another study at P<1x10^-4^. There were 72 studies that shared a common phenotype of interest. Replication rate, judged as the percentage of CpGs also present in any other study of the same trait with P<1x10^-4^, varied from 0 to 100 between studies (__Table \@ref(tab:replication-tab)__, __Table \@ref(tab:replication-tab-smoking)__, __Table \@ref(tab:replication-tab-bmi)__). For many of the traits, the number of identified DMPs was low (0 studies reported one DMP), therefore the low replication rate for these studies is not completely unexpected given potential study heterogeneity in important factors such as study power, age, sex, ancestry and study design. However, there were also three studies that identified over 100 DMPs and none of them replicated, including an EWAS of smoking for which there are many high-powered replication studies. \linebreak \begin{table} \caption{(\#tab:replication-tab)Replication rate} \centering \resizebox{\linewidth}{!}{ \begin{tabular}[t]{lllll} \toprule Trait & N-DMPs & N-replicated & N-replication-studies & Prop-replicated\\ \midrule \cellcolor{gray!6}{glucose} & \cellcolor{gray!6}{4} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{0.25000}\\ insulin & 3 & 1 & 2 & 0.33333\\ \cellcolor{gray!6}{insulin} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{2} & \cellcolor{gray!6}{0.00000}\\ alzheimers & 21 & 5 & 1 & 0.23810\\ \cellcolor{gray!6}{alzheimers} & \cellcolor{gray!6}{25} & \cellcolor{gray!6}{7} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{0.28000}\\ \addlinespace Birth weight & 27 & 0 & 4 & 0.00000\\ \cellcolor{gray!6}{Birth weight} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{4} & \cellcolor{gray!6}{0.00000}\\ Birth weight & 2 & 0 & 4 & 0.00000\\ \cellcolor{gray!6}{Triglycerides} & \cellcolor{gray!6}{4} & \cellcolor{gray!6}{2} & \cellcolor{gray!6}{3} & \cellcolor{gray!6}{0.50000}\\ Triglycerides & 11 & 6 & 3 & 0.54545\\ \addlinespace \cellcolor{gray!6}{Triglycerides} & \cellcolor{gray!6}{33} & \cellcolor{gray!6}{26} & \cellcolor{gray!6}{3} & \cellcolor{gray!6}{0.78788}\\ Triglycerides & 1 & 1 & 3 & 1.00000\\ \cellcolor{gray!6}{Waist circumference} & \cellcolor{gray!6}{172} & \cellcolor{gray!6}{6} & \cellcolor{gray!6}{2} & \cellcolor{gray!6}{0.03488}\\ Waist circumference & 11 & 3 & 2 & 0.27273\\ \cellcolor{gray!6}{Waist circumference} & \cellcolor{gray!6}{2} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{2} & \cellcolor{gray!6}{0.50000}\\ \addlinespace Type II diabetes & 11 & 2 & 2 & 0.18182\\ \cellcolor{gray!6}{Type II diabetes} & \cellcolor{gray!6}{6} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{2} & \cellcolor{gray!6}{0.00000}\\ Type II diabetes & 1 & 1 & 2 & 1.00000\\ \cellcolor{gray!6}{HOMA-IR} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{1.00000}\\ HOMA-IR & 5 & 1 & 1 & 0.20000\\ \addlinespace \cellcolor{gray!6}{Schizophrenia} & \cellcolor{gray!6}{3} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{2} & \cellcolor{gray!6}{0.00000}\\ Schizophrenia & 163 & 0 & 2 & 0.00000\\ \cellcolor{gray!6}{C-reactive protein} & \cellcolor{gray!6}{3} & \cellcolor{gray!6}{3} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{1.00000}\\ C-reactive protein & 226 & 17 & 1 & 0.07522\\ \cellcolor{gray!6}{High-density lipoprotein cholesterol} & \cellcolor{gray!6}{2} & \cellcolor{gray!6}{2} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{1.00000}\\ \addlinespace High-density lipoprotein cholesterol & 63 & 5 & 1 & 0.07937\\ \cellcolor{gray!6}{Serum high-density lipoprotein cholesterol} & \cellcolor{gray!6}{22} & \cellcolor{gray!6}{17} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{0.77273}\\ Serum high-density lipoprotein cholesterol & 213 & 11 & 1 & 0.05164\\ \cellcolor{gray!6}{Serum low-density lipoprotein cholesterol} & \cellcolor{gray!6}{61} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{0.00000}\\ Serum total cholesterol & 1 & 0 & 2 & \vphantom{1} 0.00000\\ \addlinespace \cellcolor{gray!6}{Serum total cholesterol} & \cellcolor{gray!6}{111} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{2} & \cellcolor{gray!6}{0.00000}\\ Serum total cholesterol & 1 & 0 & 2 & 0.00000\\ \cellcolor{gray!6}{Serum triglycerides} & \cellcolor{gray!6}{46} & \cellcolor{gray!6}{38} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{0.82609}\\ Serum triglycerides & 99 & 33 & 1 & 0.33333\\ \cellcolor{gray!6}{Rheumatoid arthritis} & \cellcolor{gray!6}{47,875} & \cellcolor{gray!6}{8} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{0.00017}\\ \addlinespace Rheumatoid arthritis & 6 & 0 & 1 & 0.00000\\ \cellcolor{gray!6}{Depression} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{0.00000}\\ Depression & 2 & 0 & 1 & 0.00000\\ \bottomrule \multicolumn{5}{l}{\textsuperscript{} N-DMPs = number of differentially methylated positions identified at P<1x10\textsuperscript{-7}}\\ \multicolumn{5}{l}{\textsuperscript{} N-replicated = number of DMPs replicated in the GEO re-analysis at P<1x10\textsuperscript{-4}}\\ \multicolumn{5}{l}{\textsuperscript{} N-replication-studies = number of studies for which replication was examined}\\ \multicolumn{5}{l}{\textsuperscript{} Prop-replicated = proportion of DMPs replicated.}\\ \end{tabular}} \end{table} \begin{table} \caption{(\#tab:replication-tab-smoking)Replication rate in EWAS of smoking} \centering \resizebox{\linewidth}{!}{ \begin{tabular}[t]{lllll} \toprule Trait & N-DMPs & N-replicated & N-replication-studies & Prop-replicated\\ \midrule \cellcolor{gray!6}{smoking} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{21} & \cellcolor{gray!6}{\vphantom{2} 1.00000}\\ smoking & 1 & 1 & 21 & \vphantom{1} 1.00000\\ \cellcolor{gray!6}{smoking} & \cellcolor{gray!6}{10} & \cellcolor{gray!6}{10} & \cellcolor{gray!6}{21} & \cellcolor{gray!6}{1.00000}\\ smoking & 1,065 & 862 & 21 & 0.80939\\ \cellcolor{gray!6}{smoking} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{21} & \cellcolor{gray!6}{1.00000}\\ \addlinespace smoking & 22 & 20 & 21 & 0.90909\\ \cellcolor{gray!6}{smoking} & \cellcolor{gray!6}{30} & \cellcolor{gray!6}{9} & \cellcolor{gray!6}{21} & \cellcolor{gray!6}{0.30000}\\ smoking & 44 & 42 & 21 & 0.95455\\ \cellcolor{gray!6}{smoking} & \cellcolor{gray!6}{32} & \cellcolor{gray!6}{31} & \cellcolor{gray!6}{21} & \cellcolor{gray!6}{0.96875}\\ smoking & 450 & 417 & 21 & 0.92667\\ \addlinespace \cellcolor{gray!6}{smoking} & \cellcolor{gray!6}{37} & \cellcolor{gray!6}{28} & \cellcolor{gray!6}{21} & \cellcolor{gray!6}{0.75676}\\ smoking & 3 & 3 & 21 & 1.00000\\ \cellcolor{gray!6}{smoking} & \cellcolor{gray!6}{60} & \cellcolor{gray!6}{57} & \cellcolor{gray!6}{21} & \cellcolor{gray!6}{0.95000}\\ smoking & 171 & 171 & 21 & 1.00000\\ \cellcolor{gray!6}{smoking} & \cellcolor{gray!6}{258} & \cellcolor{gray!6}{257} & \cellcolor{gray!6}{21} & \cellcolor{gray!6}{0.99612}\\ \addlinespace smoking & 20 & 1 & 21 & 0.05000\\ \cellcolor{gray!6}{smoking} & \cellcolor{gray!6}{2,780} & \cellcolor{gray!6}{1,117} & \cellcolor{gray!6}{21} & \cellcolor{gray!6}{0.40180}\\ smoking & 524 & 424 & 21 & 0.80916\\ \cellcolor{gray!6}{smoking} & \cellcolor{gray!6}{192} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{21} & \cellcolor{gray!6}{0.00000}\\ smoking & 177 & 172 & 21 & 0.97175\\ \addlinespace \cellcolor{gray!6}{maternal\_smoking\_in\_pregnancy} & \cellcolor{gray!6}{19} & \cellcolor{gray!6}{19} & \cellcolor{gray!6}{4} & \cellcolor{gray!6}{1.00000}\\ maternal\_smoking\_in\_pregnancy & 24 & 24 & 4 & 1.00000\\ \cellcolor{gray!6}{maternal\_smoking\_in\_pregnancy} & \cellcolor{gray!6}{1,591} & \cellcolor{gray!6}{413} & \cellcolor{gray!6}{4} & \cellcolor{gray!6}{0.25959}\\ maternal\_smoking\_in\_pregnancy & 121 & 121 & 4 & 1.00000\\ \cellcolor{gray!6}{maternal\_smoking\_in\_pregnancy} & \cellcolor{gray!6}{4} & \cellcolor{gray!6}{4} & \cellcolor{gray!6}{4} & \cellcolor{gray!6}{1.00000}\\ \bottomrule \multicolumn{5}{l}{\textsuperscript{} N-DMPs = number of differentially methylated positions identified at P<1x10\textsuperscript{-7}}\\ \multicolumn{5}{l}{\textsuperscript{} N-replicated = number of DMPs replicated in the GEO re-analysis at P<1x10\textsuperscript{-4}}\\ \multicolumn{5}{l}{\textsuperscript{} N-replication-studies = number of studies for which replication was examined}\\ \multicolumn{5}{l}{\textsuperscript{} Prop-replicated = proportion of DMPs replicated.}\\ \end{tabular}} \end{table} \begin{table} \caption{(\#tab:replication-tab-bmi)Replication rate in EWAS of body mass index} \centering \resizebox{\linewidth}{!}{ \begin{tabular}[t]{lllll} \toprule Trait & N-DMPs & N-replicated & N-replication-studies & Prop-replicated\\ \midrule \cellcolor{gray!6}{Body mass index} & \cellcolor{gray!6}{2} & \cellcolor{gray!6}{2} & \cellcolor{gray!6}{8} & \cellcolor{gray!6}{1.00000}\\ Body mass index & 133 & 83 & 8 & 0.62406\\ \cellcolor{gray!6}{Body mass index} & \cellcolor{gray!6}{13} & \cellcolor{gray!6}{8} & \cellcolor{gray!6}{8} & \cellcolor{gray!6}{0.61538}\\ Body mass index & 14 & 12 & 8 & 0.85714\\ \cellcolor{gray!6}{Body mass index} & \cellcolor{gray!6}{3} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{8} & \cellcolor{gray!6}{0.33333}\\ \addlinespace Body mass index & 5 & 3 & 8 & 0.60000\\ \cellcolor{gray!6}{Body mass index} & \cellcolor{gray!6}{821} & \cellcolor{gray!6}{306} & \cellcolor{gray!6}{8} & \cellcolor{gray!6}{0.37272}\\ Body mass index & 182 & 113 & 8 & 0.62088\\ \cellcolor{gray!6}{Body mass index} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{8} & \cellcolor{gray!6}{1.00000}\\ \bottomrule \multicolumn{5}{l}{\textsuperscript{} N-DMPs = number of differentially methylated positions identified at P<1x10\textsuperscript{-7}}\\ \multicolumn{5}{l}{\textsuperscript{} N-replicated = number of DMPs replicated in the GEO re-analysis at P<1x10\textsuperscript{-4}}\\ \multicolumn{5}{l}{\textsuperscript{} N-replication-studies = number of studies for which replication was examined}\\ \multicolumn{5}{l}{\textsuperscript{} Prop-replicated = proportion of DMPs replicated.}\\ \end{tabular}} \end{table} \pagebreak Before continuing to assess what CpG characteristics might, in part, explain some associations found in EWAS, sites were removed that were identified by potentially faulty probes and were on either of the sex chromosomes. Further, studies that did not include batch effects and cell composition as covariates in at least one EWAS model were removed and studies for which fewer than 10% of sites identified in the original analyses were identified in a re-analysis using the data provided via GEO. Overall, this left 619 EWAS and 54961 associations (at P<1x10^-4^). <!-- Smoking is associated with large changes in DNA methylation across the genome (REF) and is associated with many different traits (REF). Thus, it may confound DNA methylation associations found in the catalog. If this was the case, one might expect smoking related CpGs to appear more in the catalog than expected by chance. The DMPs identified by EWAS of traits other than smoking were enriched for smoking related CpG sites (P = X). --> <!-- ### Correlation across tissues {-#correlation-across-tissues} * There is correlation between DNA methylation sites across tissues, suggesting stability in DNAm, but would we expect to find associations at these positions across tissues? * Replication of sites across tissues for same traits? * Do sites that are highly correlated across tissues appear more than expected by chance in whole blood EWAS? + If yes then suggests the correlation might be due to decreased measurement error at those sites + If the opposite (correlated sites appear less than expected by chance), then it suggests that correlation occurs at positions that don't really matter (i.e. are just stable because of things like being at housekeeping genes) * Can check housekeeping gene theory if needed --> ### CpG characteristics {#cpg-characteristics} Using the selected EWAS results, it was investigated whether the characteristics of DNA methylation at CpG sites explained associations found in EWAS. It has previously been suggested that sites at which DNA methylation variability is low should be removed [@Meng2010; @Logue2017]. The rationale for this is that if total variation is low then the ratio of variation due to technical effects to variation due to biological effects will be greater and thus any association with a complex trait is more likely to be due to technical artefacts. However, it's unknown whether this may be removing sites pertinent to complex trait variation. When assessing whether variance at a CpG site was associated with the odds of a DMP being replicated in at least one other study, it was found that an increase of variance by one standard deviation associated with a decreased odds of a DMP replicating (OR = 0.83 [95% CI: 0.79, 0.87] per sd increase in CpG variance). This contrasts with the notion that removing sites with lower variance will reduce the chances of identifying sites due to technical artefacts. There was also strong evidence of an inverse association between variance at a CpG site and effect size. It was observed that an increase in the variance of DNA methylation by one standard deviation was associated with a decrease in absolute standardised effect size by 26% [95% CI: 26% decrease, 25% decrease]. This suggests that removal of sites with little variation may reduce the chances of discovering changes in DNA methylation that have larger effects, whilst not removing more unreliable sites. <!-- DNA methylation is a binary feature, a CpG site can either be methylated or not on a particular DNA molecule. However, when measuring methylation across multiple DNA molecules, the proportion of those molecules methylated at a given site will be between 0 and 1. If DNA methylation at a given site is important for specific regulatory functions within a group of cells, one might expect that site to be methylated (or unmethylated) in the majority of the cells. Thus, changes in methylation away from an extreme, might have more of an impact on cellular function. There was strong evidence of an association between mean DNA methylation levels and negative effect sizes (P = 5.1e-86, __Table \@ref(tab:cpg-chars-tab)__) and an inverse association between mean methylation levels and positive effect sizes (P = 1e-99, __Table \@ref(tab:cpg-chars-tab)__). --> DNA methylation changes are heritable [@McRae2014; @VanDongen2016], and DNA methylation could mediate the effects of genotype on complex traits or genotype might confound the association between DNA methylation and complex traits. It was found that an increase in heritability by 10% was associated with an increased odds of a DMP replicating (OR = 1.9 [95% CI: 1.9, 2]). An increase in heritability by 10% also associated with a decrease in absolute standardised effect size by 8% [95% CI: 8.3% decrease, 7.7% decrease]. Heritability also had a capacity to predict whether a site would be identified as a DMP in at least one study (AUC = 0.76), whereas variability only had a modest ability to do so (AUC = 0.59) and further did not add to the predictive capacity of heritability (combined AUC = 0.76). <!-- There was evidence that effect sizes tended to be greater in more heritable sites (P = 5.9e-05, __Table \@ref(tab:cpg-chars-tab)__). The combined variance in effect size estimates explained by DNA methylation variability and heritability was 0.084 (__Table \@ref(tab:cpg-chars-tab)__). \linebreak \begin{table}[!h] \caption{(\#tab:cpg-chars-tab)Association between CpG chars and associations in EWAS} \centering \resizebox{\linewidth}{!}{ \begin{tabular}[t]{lllll} \toprule characteristic & beta & r\textsuperscript{2} & p & auc\\ \midrule \cellcolor{gray!6}{avg-meth (beta>0)} & \cellcolor{gray!6}{-6.7e-01} & \cellcolor{gray!6}{0.08377} & \cellcolor{gray!6}{1.0e-99} & \cellcolor{gray!6}{NA}\\ avg-meth (beta<0) & 4.3e-01 & 0.09138 & 5.1e-86 & NA\\ \cellcolor{gray!6}{variance} & \cellcolor{gray!6}{-1.2e+03} & \cellcolor{gray!6}{0.01806} & \cellcolor{gray!6}{1.0e-99} & \cellcolor{gray!6}{0.62}\\ h\textsuperscript{2} & 8.1e-02 & 0.00037 & 5.9e-05 & 0.78\\ \cellcolor{gray!6}{variance + h\textsuperscript{2}} & \cellcolor{gray!6}{NA} & \cellcolor{gray!6}{0.08395} & \cellcolor{gray!6}{NA} & \cellcolor{gray!6}{0.78}\\ \bottomrule \multicolumn{5}{l}{\textsuperscript{} avg-meth = average methylation level}\\ \multicolumn{5}{l}{\textsuperscript{} beta > 0 = DNA methylation hypermethylated with respect to the trait}\\ \multicolumn{5}{l}{\textsuperscript{} beta < 0 = DNA methylation hypomethylated with respect to the trait}\\ \multicolumn{5}{l}{\textsuperscript{} auc = area under the curve}\\ \end{tabular}} \end{table} \linebreak --> ### Enrichment of DMPs for genomic annotations {#dmp-enrichment-results} As the position of DNA methylation relative to genes is pertinent to its association with gene expression (__Section \@ref(dna-methylation)__) [@Jones2012; @Ando2019; @Deaton2011; @Wolf1984; @Hellman2007], the enrichment of DMPs identified in The EWAS Catalog across genomic regions and chromatin states were assessed (__Figure \@ref(fig:chrom-state-plot)__). Across all tissues, there was a trend for sites to be enriched for promoter regions (OR > 1). Evidence of enrichment across different enhancer types was mixed and there was a trend towards depletion of sites within heterochromatic regions, poised and bivalent promoters, regions repressed by polycomb proteins and quiescent regions (__Figure \@ref(fig:chrom-state-plot)__, OR < 1). (ref:chrom-state-cap) __Enrichment of DMPs for 25 chromatin states__. Chromatin states across the genome of 127 cell types comprising 25 distinct tissues were available from the Roadmap Epigenomics Project. Using LOLA, the enrichment of DMPs from across all data in The EWAS Catalog for chromatin states were assessed. DMPs were divided into five categories as detailed in __Section \@ref(enrichment-tests-04)__. The x-axis show the 25 chromatin states: TssA, Active TSS; PromU, Promoter Upstream TSS; PromD1, Promoter Downstream TSS with DNase; PromD2, Promoter Downstream TSS; Tx5', Transcription 5'; Tx, Transcription; Tx3', Transcription 3'; TxWk, Weak transcription; TxReg, Transcription Regulatory; TxEnh5', Transcription 5' Enhancer; TxEnh3', Transcription 3' Enhancer; TxEnhW, Transcription Weak Enhancer; EnhA1, Active Enhancer 1; EnhA2, Active Enhancer 2; EnhAF, Active Enhancer Flank; EnhW1, Weak Enhancer 1; EnhW2, Weak Enhancer 2; EnhAc, Enhancer Acetylation Only; DNase, DNase only; ZNF/Rpts, ZNF genes & repeats; Het, Heterochromatin; PromP, Poised Promoter; PromBiv, Bivalent Promoter; ReprPC, Repressed PolyComb, Quies, Quiescent/Low. (ref:chrom-state-scap) Enrichment of DMPs for 25 chromatin states \begin{figure}[!htp] {\centering \includegraphics[width=1\linewidth]{figure/04-properties_of_ewas/chromatin_states_enrichment_boxplots_onepage} } \caption[(ref:chrom-state-scap)]{(ref:chrom-state-cap)}(\#fig:chrom-state-plot) \end{figure} The DMPs identified by EWAS were also enriched for transcription factor binding sites. Of the 167 transcription factor binding sites tested, there was evidence that identified DMPs were enriched in 158 of them in at least one tissue type (FDR < 0.05). The strongest enrichments were found for transcription factor binding sites as measured in blood (median OR ranged from 1.5 to 1.7 based on how DMPs were defined - see __Section \@ref(enrichment-tests-04)__ for details) and varied from tissue to tissue, but overall enrichment was observed more often than not across all tissues (__Figure \@ref(fig:tfbs-plot)__). (ref:tfbs-cap) __Enrichment of DMPs for 167 transcription factor binding sites__. Using LOLA, the enrichment of DMPs from across all data in The EWAS Catalog for 167 transcription factor binding sites confirmed across 25 distinct tissues were assessed. DMPs were divided into five categories as detailed in __Section \@ref(enrichment-tests-04)__. The x-axis show the 25 distinct tissues. All transcription factor binding sites have not been confirmed across all tissues. For some tissues (e.g. "Eye" and "Gingiva") only five have been confirmed, but in blood over 131 have been confirmed. (ref:tfbs-scap) Enrichment of DMPs for 167 transcription factor binding sites \begin{figure} {\centering \includegraphics[width=1\linewidth]{figure/04-properties_of_ewas/tfbs_enrichment_plot} } \caption[(ref:tfbs-scap)]{(ref:tfbs-cap)}(\#fig:tfbs-plot) \end{figure} \newpage ## Discussion {#discussion-04} Understanding the nature of EWAS associations is imperative for biological inference. Using data from The EWAS Catalog this chapter shows that many CpGs associate with multiple different unique traits and the magnitude of these associations are partly explained by the characteristics of DNA methylation levels. False positives may also explain a proportion of EWAS associations. Roughly 10% of the DMPs identified were measured by potentially faulty probes and the median percentage of CpGs that could be replicated across studies was 52%. ### Identifying mediators {#identifying-mediators} Identifying modifiable molecular traits that mediate the effect of complex traits on disease is something that motivates a substantial portion of molecular epidemiology research [@Relton2010; @Relton2012]. Having a database of associations between DNA methylation and various traits and diseases may enable easy identification of potential mediators that warrant follow-up. Overall, DNA methylation at 126,673 CpGs are associated with multiple traits. The CpG that was identified in the most EWAS, cg06500161 (_ABCG1_), had evidence from multiple studies that methylation at that site associated with weight-related traits such as body mass index [@Mendelson2017; @Wahl2017; @Shah2015; @Demerath2015] and waist circumference [@Demerath2015], roughly 60 metabolites [@Hedman2017; @Sayols-Baixeras2016; @Braun2017; @Kriebel2016; @Pfeiffer2015] and with type 2 diabetes [@Chambers2015]. Some studies have explored these associations further, for example, two studies used Mendelian randomization (MR) to provide evidence that body mass index caused changes in methylation at this site [@Mendelson2017; @Wahl2017]. However, full characterisation and assessment of whether methylation at that site mediates the effect of adverse adiposity on any diseases has not been undertaken and could be followed-up. ### Biased results {#biased-results} The potential biases in EWAS have been well documented [@Birney2016] and were discussed at length in __Section \@ref(problems-for-ewas)__. It is encouraging that the majority of studies include batch effects and cell composition in at least one of their models (79%). However, there are still some studies including probes that have been characterised as faulty. Differences in cell composition, sample ethnicity, covariates used and other differential biases between studies might explain the low replication rate in some cases. However, studies only tend to report associations below the conventional EWAS P-value threshold, P<1x10^-7^, so differences in study power could also be a major factor. ### Understanding CpG characteristics {#understanding-cpg-characteristics} Characteristics of DNA methylation discovered in experimental studies, such as its association with gene expression, were used to select sites to measure DNA methylation for array-based technology used in many EWAS [@Illumina2012]. Individual studies may also select CpGs to analyse from those assayed based on statistical characteristics such as high variance [@Meng2010; @Logue2017]. Our results suggest removing CpG sites with low variances may make it more likely to remove sites with greater effects. Variance had a modest ability to predict whether or not a CpG site was likely to be identified in an EWAS, and it did not add to the predictive ability of heritability, despite explaining a higher proportion of variance in effect estimates. This may be explained by two things. Firstly, having a lower variance in the independent or dependent variable increases the standard error of the beta coefficient in a linear regression. Secondly, heritability will in part determine variance of DNA methylation. ### Choosing sites to measure {#choosing-sites-to-measure} As discussed in __Section \@ref(ewas)__, the HM450 array was designed to capture DNA methylation in various regions of the genome. The probes of the array target roughly only 2% of CpG sites in the genome, yet target over 99% of protein coding genes and predominantly target the promoter regions of these genes [@Illumina2012]. The newer HMEPIC array captures much of what the HM450 array does, and further covers 58% of FANTOM5 enhancers [@Pidsley2016]. The trend for DMPs to be enriched for promoter regions (compared to regions of similar CG density) suggests there may have been some justification for the chosen sites. However, not all promoter regions were enriched with DMPs and bivalent promoters were depleted for DMPs. Enrichment of enhancers was also seen, but the magnitude of enrichment was smaller. When designing future arrays, these results suggest that continuing to target promoters and enhancers, whilst avoiding gene regions that are less likely to be actively transcribed may yield more associations in EWAS. Despite the tissue specific nature of DNA methylation, the regions for which DMPs identified in The EWAS Catalog were found to be enriched were fairly consistent across tissues. However, enrichment of DMPs tended to be greater for blood-based genomic annotations, perhaps reflecting the fact the majority of EWAS in The EWAS Catalog were conducted using DNA methylation measured in whole blood. ### Limitations {#limitations-04} Individual participant data were not available and thus to calculate standardised betas, the variance of the trait had to be estimated from external measures of DNA methylation. If the GoDMC sample is not representative of the sample used for the study EWAS then these estimates may be substantially biased. Further, many studies do not report the effect estimates from their statistical analyses. If there is a marked difference in the studies that do not report effect sizes and those that do, then any associations between standardised effect estimates and DNA methylation site characteristics are likely to be biased. Like other observable phenotypes, DNA methylation varies under many contexts. Age, sex, tissue type, population, socioeconomic position and many other factors may influence the results of EWAS [@Birney2016; @Relton2010; @Relton2012]. The majority of EWAS conducted have used DNA methylation measured in whole blood from European adults making the results not necessarily apply broadly outside those bounds. The need for tissue-specific data has been discussed previously in __Section \@ref(problems-for-ewas)__. Differences in DNA methylation between ethnic groups has been shown previously [@ToinetCronje2020] and the predictive value of a smoking-related methylation score was shown to differ between Europeans and South Asians [@Elliott2014]. This suggests any biological insight and population health benefits that may be the result of EWAS is likely to to be maximised by diversifying populations. It is unclear from the work in this chapter whether the CpG characteristics and genomic annotations that show evidence that they influence EWAS results, will also influence EWAS results in the same way within a more ethnically diverse selection of samples. ## Conclusion {#conclusion-04} This chapter demonstrates the potential for using large-scale EWAS databases to understand DNA methylation-trait associations. It was found that study design flaws can help explain some associations. However, it is noteworthy that the vast majority of studies have accounted for some potential biasing factors, for example 79% of studies adjusted for batch effects and cell composition. Further, there was an invese association between DNA methylation variability and effect size, suggesting that studies that remove variable sites prior to analysis could be excluding important regions from the analysis. Finally, cg06500161 _ABCG1_ was identified as being associated with 71 traits that share known biological relationships. This highlights the potential to use The EWAS Catalog to identify molecular markers that might underlie the relationship between traits. <!--chapter:end:04-properties_of_ewas.Rmd--> # Exploring the variance in complex traits captured by DNA methylation assays {#h2ewas-chapter} ## Chapter summary {#chapter-summary-05} In __Chapter \@ref(properties-of-ewas)__, various aspects of DNA methylation were shown to correlate with the chances of a CpG site being identified in epigenome-wide association studies (EWAS). However, whether these identified CpG sites, or those that do not associate with with traits at a given P-value threshold, capture much complex trait variance is unclear. Quantifying the total covariation between the DNA methylation marks most commonly measured in EWAS and complex traits may reveal how much more information is to gain from the current EWAS design by increasing sample sizes. By re-purposing methods developed to estimate SNP-heritability, I estimated the proportion of phenotypic variation captured by 421,693 blood derived DNA methylation markers (h^2^~EWAS~) across 400 independent traits (1 - [r] > 0.4). The mean h^2^~EWAS~ was zero, with evidence for regular cigarette smoking exhibiting the largest association with all markers (h^2^~EWAS~ = 0.42) and the only one with evidence of h^2^~EWAS~ > 0 (FDR < 0.1). Though underpowered to determine the h^2^~EWAS~ value for any one trait, h^2^~EWAS~ was predictive of the number of EWAS hits across the traits analysed (AUC=0.7). Modelling the contributions of the methylome on a per-site versus a per-region basis gave varied h^2^~EWAS~ estimates (r=0.47) but neither approach obtained substantially higher model fits across all traits. Our analysis indicates that most complex traits do not heavily associate with the markers commonly measured in EWAS within blood. However, it is likely DNA methylation does capture variation in some traits and h^2^~EWAS~ may be a reasonable way to prioritise these traits that are likely to yield associations in EWAS. ### Contributions statement {#contributions-statement-05} I performed all analyses and wrote everything in this chapter. ## Introduction {#introduction-05} Epigenome-wide association studies (EWAS) aim to assess the association between phenotypes of interest and DNA methylation across hundreds of thousands of CpG sites throughout the genome [@Birney2016; @Rakyan2011]. As seen in __Chapter \@ref(properties-of-ewas)__, many recent EWAS yielded few sites across the genome with strong evidence for association. The proportion of total trait variance associated with each of these sites tended to be small (__Figure \@ref(fig:rsq-distribution)__), but when modelled together, may be large. There is a need to have a global view of the contribution of DNA methylation to complex traits in order to interpret these results. There are multiple possible reasons for there being few EWAS signals. Firstly, DNA methylation varies between cells and tissues, thus any changes related to a trait may occur in any number of tissues. Currently, because of ease of access and cost, the most common tissue used for EWAS is blood, which may not capture changes in DNA methylation related to the trait of interest [@Birney2016; @Rakyan2011]. Secondly, the commonly used technologies probe a small percentage of the total number of potentially methylated sites. In the absence of full knowledge of the correlation structure across methylation site variation, it is therefore difficult to fully understand coverage in current measures. Two more possibilities are that DNA methylation variation is actually not associated with the traits studied or that the associations are many but individually too small to detect with current sample sizes (Box 1). Interpretation of the paucity of EWAS hits is difficult because there is no knowledge of the total contribution of methylation variation to the trait. However, analogous to the calculation of genetic heritability estimates, which have now been expanded to make inference across non-familial population-level data (SNP heritability), the total contribution of methylation markers to complex traits can potentially be estimated. This could give insight into the underlying patterns of association between DNA methylation markers and complex traits (See Box 2 for a simple explanation of SNP heritability (or h^2^~SNP~) and its application to DNA methylation). SNP heritability estimates are sensitive to assumptions of the underlying genetic architecture and there are different ways in which to model the contribution of each SNP to the overall genetic component. The original model of calculating h^2^~SNP~ introduced by Yang et al. assumes that each variant has an effect that is independent of the regional linkage disequilibrium (LD) structure as each variant is unweighted (the blanket model), and this effectively assumes regions of high LD contribute more to phenotypic variance [@Yang2010]. Speed et al. proposed a new model, which considered the LD between SNPs so that each region of high LD can effectively be counted as a singular effect (the grouping model) [@Speed2012]. Finding which models fit the data better helps ensure a more accurate estimation of the proportion of DNA methylation association with a trait, further contrasting these models could also be biologically informative. Gene regions are methylated in a coordinated fashion, which is associated with changes in gene expression [@Jones2009; @Jones2012], with a tendency for promoter regions to be unmethylated and gene body regions to be methylated when gene expression is activated [@Jones2012]. This, amongst other complex patterns of gene regulation, induces a correlation structure within EWAS data, and it is not clear whether a single site is driving an association and neighbouring sites are consequentially correlated, or if the cumulative contributions of all neighbouring sites associate with the regulatory process. In EWAS, a common strategy is to collapse DNA methylation sites into groups based on proximity and if they share the same direction of association and potentially magnitude of association, this is often called differentially methylated region (DMR) analysis [@Chen2016]. This, however, does not explain whether the sites within groups are acting independently and cumulatively or as a set of distinct influences. __Figure \@ref(fig:h2ewas-model-comp)__ shows a representation of how the differences in models apply to DNA methylation data at a single small region using in one specific example. Of course, there are far more scenarios possible and furthermore, the models aren’t restricted to a single small region in the genome. They apply to all sites, as do the DMR methods used in EWAS. Thus, by applying both methods to DNA methylation data across multiple phenotypes and comparing their utility insight can be gained into how DNA methylation operates across gene regions. Furthermore, it is important to find the model that best fits the data to help prevent biased estimates. This chapter aims to estimate h^2^~EWAS~ values across a plethora of traits and assesses whether this estimate may be useful in identifying traits for which EWAS will likely yield successful identification of associated DNA methylation sites. To do this I perform hundreds of EWAS studies and evaluate if h^2^~EWAS~ estimates are predictive of the number of sites identified by the EWAS at various P value thresholds. I also compare the performance of different models underlying h^2^~EWAS~ estimates to infer likely methylation architecture of complex traits. *** **Box 1** The need for larger sample sizes in GWAS has been empirically demonstrated across a broad range of traits. For height and body mass index (BMI), the number of associations dramatically increased from 12 to 3290 and from one to 941, respectively after increasing sample sizes by ~670,000 [@Lettre2008; @Frayling2007; @Yengo2018]. This trend can be seen for many traits. Similar to early GWAS, many EWAS are discovering few sites strongly associated with complex traits. However, an example that suggests promise for increasing sample sizes for EWAS is seen with BMI, where an EWAS of 459 individuals identified just five sites, but increasing the sample size to over 5,000 led to identification of 278 sites [@Dick2014; @Wahl2017]. While we can continue to improve sample sizes in EWAS, there is a need to determine the upper limit of the information we can obtain from EWAS of complex traits like BMI. Furthermore, the BMI EWAS example may be unrepresentative of other traits, so having a corollary test for estimating h^2^~SNP~ for DNA methylation would help us understand if we’re capturing relevant information from the current arrays we are using in EWAS. Such information could inform future study designs in terms of growing sample sizes with the current assays available versus designing new assays. *** *** **Box 2** Methods used to estimate h^2^~SNP~ use restricted maximum likelihood (REML) tests to estimate the proportion of variance attributable to these genetic variants. Essentially this assesses whether individuals that are genetically similar are more likely to be phenotypically similar. If those individuals that have a high genetic overlap tend to correlate strongly phenotypically compared to those that don’t have high genetic overlap, then the phenotype of interest will have a high h^2^~SNP~. Unlike genetic variants, DNA methylation is responsive to the environment [@Birney2016] and determining causal directionality between DNA methylation markers associated with traits is not trivial [@Relton2010; @Tahara2015; @Kandimalla2013]. Therefore, estimating the proportion of trait variation captured by DNA methylation variation (which will henceforth be denoted as h^2^~EWAS~) using the same techniques will ascertain effects going in both directions as well as associations due to confounding. The combination of these mechanisms may increase power to detect trait-DNA methylation association, and could be the reason that so many DNA methylation markers are found in small EWAS compared to similarly sized GWAS [@Wahl2017]. *** (ref:h2ewas-model-comp-cap) __Comparison of the grouping and blanket models in the context of the relationship between DNA methylation and gene expression__. Both regions are exactly the same, the only difference is how each model assumes the methylation sites should be treated. The grouping model down-weights the contribution of correlated CpGs, effectively grouping them, and the blanket model assumes each CpG independently associates with a trait. As seen here, the grouping of correlated CpG sites may not be the correct thing to do as some of the sites may be acting independently of their correlated partners). (ref:h2ewas-model-comp-scap) Comparison of the grouping and blanket models in the context of the relationship between DNA methylation and gene expression \begin{figure} {\centering \includegraphics[width=1\linewidth]{figure/05-h2ewas/m2_model_comparison} } \caption[(ref:h2ewas-model-comp-scap)]{(ref:h2ewas-model-comp-cap)}(\#fig:h2ewas-model-comp) \end{figure} ## Method {#method-05} ### Study samples {#study-samples-05} All data used in this chapter came from the ARIES subsection of the ALSPAC cohort (see __Section \@ref(aries-02)__ for study details). This chapter uses phenotypic and DNA methylation data from 940 mothers at middle age for which data could be extracted. Not all individuals had data on the various covariates used in the analyses, thus the sample size varied with each analysis (mean of 805 and range of 491 to 940). Continuous and binary phenotypes measured in mothers were extracted from the cohort using the ‘alspac’ R package (github.com/explodecomputer/alspac) and went through extensive quality control. Originally over 15,000 traits that were related to the mothers were extracted from the database. After the quality control process, which is detailed in __Figure \@ref(fig:h2ewas-pheno-qc)__, there were 2408 traits left for analysis. (ref:h2ewas-pheno-qc-cap) __A summary of the data cleaning steps__. Binary variables with fewer than 100 cases or controls were removed. Variables with "no clear relevance" to the mothers were identified manually, most were descriptive of how samples were measured, for example the fieldworker that examined the mother several months or years after blood draw. Normality of traits after rank normal transformations were assessed using a Shapiro-Wilk test. Those with some evidence of non-normality (P < 0.05), were re-examined by eye and removed if thought to be non-normal. (ref:h2ewas-pheno-qc-scap) A summary of the data cleaning steps \begin{figure}[!hp] {\centering \includegraphics[width=0.675\linewidth,height=0.75\textheight]{figure/05-h2ewas/m2_data_cleaning} } \caption[(ref:h2ewas-pheno-qc-scap)]{(ref:h2ewas-pheno-qc-cap)}(\#fig:h2ewas-pheno-qc) \end{figure} All continuous traits were rank-normalised for further analyses. A Shapiro-Wilk test of normality was performed on these rank-normalised traits and for those with some evidence of non-normality (P < 0.05), the distribution of those traits was re-examined by eye to ensure it was approximately normal. It was found that any non-normality of phenotype distributions corresponded to an inflation of zero values. These traits were removed and overall there were 2408 traits left for analyses. These traits do not necessarily represent independent phenotypes and as such I wanted to prevent correlated traits skewing results. The absolute Pearson’s correlation coefficient between each trait was subtracted from one (1 –[r]). Then traits were greedily selected where 1 – [r] > 0.4 with any other trait. This left 400 traits, which consisted of ~30% clinically measured variables (including roughly 50 metabolites and some anthropometric traits), ~25% health related questions (for example “have you ever had asthma?”), ~40% behavioural and social traits (for example educational attainment variables, use of pesticide, and having pets), and ~5% of traits were related to the partner or child of the participant (for example the employment status of the partner). ### DNA methylation data {#dna-methylation-data-05} The DNA methylation data also came from ARIES. Measurement, quality control of this data are found in __Section \@ref(aries-dnam-data)__ Probes were excluded if they were present on either of the sex chromosomes, a SNP/control probe, had a detection p value > 0.05 across over 10% of samples or were identified as problematic by Zhou et al. [@Zhou2017]. This left 421,693 CpG sites for analyses. Before analysis a linear regression model was fitted with beta values for methylation (which ranges from 0 (no cytosines methylated) to 1 (all cytosines methylated)) as the outcome against batch variables (plate ID in ALSPAC) modelled as a random effect to help remove the effects of batch in the subsequent analyses. ### REML analysis {#reml-analysis} Using LDAK [@Speed2017] the relationship between the methylomes (as measured by the HM450 array) of 940 individuals was estimated by producing a DNA methylation relationship matrix (MRM). This matrix was used as input for the REML analysis to estimate the proportion of a trait’s variation that was explained by DNA methylation (h^2^~EWAS~). Age, the top 10 ancestry principal components, and derived cell proportions were added as covariates to the model. When producing the MRM, probes were scaled by their observed variance and the weighting of each probe was based on the variance of DNA methylation at that site using the formula below: \begin{equation} f_{j}(1-f_{j})^{(\alpha/2)} (\#eq:mrm-weights) \end{equation} where $f_j(1-f_j)$ is the variance of methylation at CpG $j$. The higher the alpha value the more weight is given to probes with greater variance; an alpha value of -1 gives equal weight to probes with low and high variance. The alpha value of -0.25 was chosen because previous analysis by Speed et al. [@Speed2017] suggested that this value was optimal for measuring h^2^~SNP~. Furthermore, it was hypothesised that probes with a greater variance would contribute more to trait variance. As the method was applied to DNA methylation data in this chapter, sensitivity analyses were conducted. MRMs were created specifying the alpha value at increasing increments of 0.25 from -2 to 0. The association between h^2^~EWAS~ and number of EWAS hits. The mean of the MRM diagonal should be 1 and the variance close to 0, as the diagonal values essentially represent the correlation between an individual’s methylome with itself. Although values are expected to vary slightly from 1. For the MRMs it was identified that some diagonal elements were very high (> 2), which caused the diagonal to have a high variance (0.13). To assess whether these values could skew results, sensitivity analyses were conducted, removing individuals with varying diagonal value cut-offs. Like h^2^~SNP~ estimates, h^2^~EWAS~ estimates should range from zero to one. If a trait has a true h^2^~EWAS~ value of zero, there is no association between the methylome and that trait, and if h^2^~EWAS~ equals one then DNA methylation has the capacity to completely predict that trait. However, estimation of h^2^~EWAS~ can be fairly imprecise and without constraining the software it’s possible to get estimates of h^2^~EWAS~ that are outside 0-1 due to large standard errors. These point estimates have to be erroneous by definition. Even though the grouping model effectively groups sites together, it is actually likely to increase the number of parameters because without the weightings imposed by this model, the blanket model essentially ignores sites that are not neighbouring others. Therefore, larger standard errors are expected with the grouping model. The grouping model applies a sliding window approach, with windows of 100kb, to capture the correlation between neighbouring sites and weight sites according to the correlation structure of the region. When applying the grouping model, the number of sites that were weighted were 45,863 (out of 421,693) and the number of sites neighbouring any single CpG site ranged from 29 to 28,217. ### Generating genetic principal components {#generating-genetic-principal-components} Ancestry principal components were generated within ALSPAC mothers using PLINK (v1.9). Before analysis, genetic data went through quality control and were imputed, full details can be found in __Section \@ref(alspac-genetic-data)__. After quality control and imputation, independent SNPs (r^2^ < 0.01) were used to calculate the top 10 ancestry principal components. ### Epigenome-wide association studies {#methods-ewas-05} EWAS were conducted for 400 selected traits (see __Section \@ref(study-samples-05)__ for selection process) within the ALSPAC cohort. For all traits, linear regression models were fitted with beta values of DNA methylation as the outcome and the phenotype as the exposure. Covariates included age, the top 10 ancestry principal components and cell proportions. ### Association between h^2^~EWAS~ and epigenome-wide association studies results {#methods-h2ewas-dmp} DMPs were extracted from the EWAS at P value thresholds ranging from 10^-3^ to 10^-7^. It was assessed whether h^2^~EWAS~ could predict that the number of identified DMPs in an EWAS was greater than number of DMPs expected to be identified at a given P threshold defined as the number of sites tested multiplied by the threshold. The traits were also “pruned” in the same way as described above, to prevent including overly correlated traits and biasing results. The sensitivity and specificity of this prediction was calculated and a receiver operating characteristic (ROC) curve was plotted. At p-value thresholds of 10^-6.5^ and 10^-7^ there were fewer than 100 traits for which EWAS identified any sites, so these thresholds were removed from the analysis. The association between the number of DMPs identified at P<1x10^-5^ and h^2^~EWAS~ values was assessed using a negative binomial hurdle model with the number of DMPs identified fitted as the outcome and h^2^~EWAS~ as the exposure. The negative binomial hurdle Poisson regression model results are twofold. The first of which assesses whether there is an association between the binary trait of whether a DMP was identified by EWAS (dependent variable) and h^2^~EWAS~ (independent variable). The second is a zero-truncated model, i.e. the zero values are removed from the model and the association between number of DMPs (dependent variable) and h^2^~EWAS~ (independent variable) is assessed. The same method was applied to estimate the association between the number of SNPs identified in GWAS at P<5x10^-8^ and h^2^~SNP~. SNPs associated with 485 traits in UK Biobank (see __Section \@ref(uk-biobank-02)__ for a brief description of the cohort) were extracted using the IEU OpenGWAS Project [@Hemani2018]. The h^2^~SNP~ estimates were extracted from http://www.nealelab.is/uk-biobank/. All analyses were conducted in R (version 3.3.3) or using the command line software LDAK [@Speed2017], GCTA [@Yang2011], and PLINK [@Chang2015]. For the EWAS analyses, the meffil R package was used [@Min2018]. A one-sided P value was used to assess if the h^2^~EWAS~ for a trait was > 0, and two-sided P values were used for everything else. ## Results {#results-05} A flowchart showing our study design and giving a summary of the results is shown in __Figure \@ref(fig:h2ewas-study-design)__. (ref:h2ewas-study-design-cap) __Study design with a summary of the results__. ALSPAC = Avon Longitudinal Study of Parents and Children, QC = quality control, EWAS = epigenome-wide association study, MRM = methylation relationship matrix, AUC = area under curve. (ref:h2ewas-study-design-scap) Study design with a summary of the results \begin{figure} {\centering \includegraphics[width=1\linewidth]{figure/05-h2ewas/m2_workflow} } \caption[(ref:h2ewas-study-design-scap)]{(ref:h2ewas-study-design-cap)}(\#fig:h2ewas-study-design) \end{figure} ### Estimating the proportion of phenotypic variance associated with DNA methylation {#estimating-h2ewas} Two models were used to estimate the total contribution of all DNA methylation sites to the variation (h^2^~EWAS~) for each of 400 traits within up to 940 individuals. The mean for both models was zero with ranges of -0.4 to 0.4 and -0.5 to 0.4 for the blanket and grouping models respectively __Figure \@ref(fig:h2ewas-estimates)__. The estimates were imprecise, the mean standard error was 0.03 and 0.05 for the blanket and grouping models respectively. The trait with the greatest evidence for h^2^~EWAS~ estimates being above zero was having smoked cigarettes regularly (FDR-corrected P = 0.06 and 0.10 for the blanket and grouping models respectively). The correlation between the h^2^~EWAS~ estimates of the two models was 0.47 and there was evidence that on average the estimates of the grouping model were higher (Paired t-test P = 1.8x10^-5^, __Figure \@ref(fig:h2ewas-estimates)__), and the mean difference between estimates was 0.018. There was little evidence that either of the models fit the data better (had higher likelihoods) across the 400 traits tested (difference in median likelihoods = 0.19, Wilcoxon’s paired ranked sum test P = 0.73). Further, the majority of h^2^~EWAS~ estimate differences between the traits were small. (ref:h2ewas-estimates-cap) __A comparison of h^2^~EWAS~ estimates given by applying REML using the blanket and grouping models across 400 traits__. The blue dashed line is at x=y. Values with h^2^~EWAS~ lower than 0 are due to imprecision in h^2^~EWAS~ estimates as the true estimate cannot be negative. Smoked_cigs_reg = smoked cigarettes regularly. The h^2^~EWAS~ of this phenotype has the greatest evidence for being above 0 for both the blanket and grouping model (Uncorrected P = 1.44x10^-4^ and P = 2.61x10^-4^, respectively). (ref:h2ewas-estimates-scap) A comparison of h^2^~EWAS~ estimates given by applying REML using the blanket and grouping models across 400 traits \begin{figure} {\centering \includegraphics[width=1\linewidth]{figure/05-h2ewas/model_m2_comparison} } \caption[(ref:h2ewas-estimates-scap)]{(ref:h2ewas-estimates-cap)}(\#fig:h2ewas-estimates) \end{figure} ### Sensitivity analyses when estimating the proportion of phenotypic variance associated with DNA methylation {#results-sensitivity-analyses-05} After examination of the MRMs required to produce the h^2^~EWAS~ estimates, for both the blanket and grouping model some unexpected values were observed. Ninety-six diagonal elements had values over 1.5 when using the blanket model, with the maximum value being 3.562. When assessing the impact of these potential outliers in the MRM to results it was found that the median and range of h^2^~EWAS~ estimates varied little (__Figure \@ref(fig:h2ewas-sens)__). The likelihood of the models tended to be greater as more outliers were removed (lower threshold for classing a diagonal element as an outlier), but it still did not vary greatly (__Figure \@ref(fig:h2ewas-sens)__). The weight of predictors used to produce the MRMs was also examined. As more weight was given to sites where methylation variation was greater (increasing alpha value) the h^2^~EWAS~ estimates were slightly higher (__Figure \@ref(fig:h2ewas-sens)__). However, the likelihood varied little, the median likelihood had a range of 2 across the alpha values (__Figure \@ref(fig:h2ewas-sens)__). (ref:h2ewas-sens-cap) __Boxplots summarising of sensitivity analyses__. __A__ and __B__ Analyses estimating h^2^~EWAS~ were repeated after removing individuals who had high diagonal values in the methylation relationship matrix (MRM). Varying thresholds were used to define individuals as an outlier, NA indicates that no individuals were removed from the analysis. __A__ shows the likelihood estimate (model fit) variation and __B__ shows the variation in h^2^~EWAS~ estimates. Analyses were also repeated in the same vein, varying $\alpha$ values, (see equation \@ref(eq:mrm-weights)) (__C__ and __D__). (ref:h2ewas-sens-scap) Boxplots summarising of sensitivity analyses \begin{figure}[!htp] {\centering \includegraphics[width=1\linewidth]{figure/05-h2ewas/sens_boxplots} } \caption[(ref:h2ewas-sens-scap)]{(ref:h2ewas-sens-cap)}(\#fig:h2ewas-sens) \end{figure} ### EWAS analyses {#results-ewas-analyses-05} In order to assess the association between h^2^~EWAS~ and EWAS results, EWAS of 400 traits were performed. No associations were found at the strict P value cut-off of P<2.5x10^-10^ (conventional EWAS P-value threshold, 1x10^-7^, divided by the number of traits, 400). A total of 29 associations between traits and CpGs were identified at the conventional EWAS P value cut-off – P<1x10^-7^. Of the traits tested, 16 had at least one EWAS hit, with the maximum number of CpGs associated with a trait being 13 (smoked cigarettes regularly). As there were so few traits with any identified hits, I took forward results from the lenient P value threshold of P<1x10^-5^, at which 340 traits had at least one EWAS hit. __Table \@ref(tab:model-testing-tab)__ shows each trait and the number of differentially methylated positions identified at varying P value thresholds. As the distributions of hit count data was heavily right skewed with an inflation at 0 and 1 (__Figure \@ref(fig:h2ewas-dmp-dist)__), to test the association between h^2^~EWAS~ and number of DMPs I opted to test goodness of fit for variations of Poisson models. Of the 6 models tested, the negative binomial hurdle Poisson regression model fit the data best (had a higher likelihood). It was found there was some evidence for an association between number of EWAS hits and h^2^~EWAS~ (__Figure \@ref(fig:dmps-and-h2ewas)__). There was some evidence of association between the presence of DMPs and h^2^~EWAS~ (beta = 6.2, [95%CI 2.5, 10]) as well as some evidence of an association between number of DMPs (when the number is above 0) and h^2^~EWAS~ (mean increase of 0.63, [95%CI 0.41, 0.84] DMPs when h^2^~EWAS~ increases by 0.1). Applying the same method to GWAS data, it was found evidence that the presence of identified SNPs associated with h^2^~SNP~ (beta = 21.9 [95%CI 19.6, 24.1]) and the association between number of SNPs identified (when the number is above 0) and h^2^~SNP~ (mean increase of 1.5, [95%CI 0.93, 2.5] SNPs when h^2^~SNP~ increases by 0.1). (ref:h2ewas-dmp-dist-cap) __The distribution of the number of DNA methylation sites identified at P<1x10^-5^ across EWAS of 400 traits__. (ref:h2ewas-dmp-dist-scap) The distribution of the number of DNA methylation sites identified at P<1x10^-5^ across EWAS of 400 traits \begin{figure} {\centering \includegraphics[width=1\linewidth]{figure/05-h2ewas/FOM_hit_count_distribution} } \caption[(ref:h2ewas-dmp-dist-scap)]{(ref:h2ewas-dmp-dist-cap)}(\#fig:h2ewas-dmp-dist) \end{figure} (ref:dmps-and-h2ewas-cap) __Association between h^2^~EWAS~ and number of DMPs identified in EWAS__. The correlation between DNA methylation and the variance of traits (h^2^~EWAS~) was calculated using REML analysis using the blanket and grouping models. EWAS were conducted on all the same traits and the distribution of the number of DMPs identified at P<1x10^-5^ and h^2^~EWAS~ are plotted above. Any traits where the h^2^~EWAS~ estimate is below 0 are coloured grey. The true h^2^~EWAS~ value of a trait cannot be negative, but sample sizes in this analysis are small so the estimates are imprecise. (ref:dmps-and-h2ewas-scap) Association between h^2^~EWAS~ and number of DMPs identified in EWAS \begin{figure} {\centering \includegraphics[width=1\linewidth]{figure/05-h2ewas/m2_hit_count_scatter_p5_test} } \caption[(ref:dmps-and-h2ewas-scap)]{(ref:dmps-and-h2ewas-cap)}(\#fig:dmps-and-h2ewas) \end{figure} \begin{table}[H] \caption{(\#tab:model-testing-tab)Summary of how well models fit to test the association between $h^2_{EWAS}$ and the number of differentially methylated positions identified across 400 traits at P < 1x10$^{-5}$.} \centering \resizebox{\linewidth}{!}{ \begin{tabular}[t]{lrr} \toprule Model & Log(likelihood) & DF\\ \midrule \cellcolor{gray!6}{Poisson} & \cellcolor{gray!6}{-1561} & \cellcolor{gray!6}{2}\\ Negative binomial & -972 & 3\\ \cellcolor{gray!6}{Hurdle-negative binomial} & \cellcolor{gray!6}{-954} & \cellcolor{gray!6}{5}\\ Hurdle & -1500 & 4\\ \cellcolor{gray!6}{Zero-inflated negative binomial} & \cellcolor{gray!6}{-972} & \cellcolor{gray!6}{5}\\ \addlinespace Zero-inflated Poisson & -1501 & 4\\ \bottomrule \multicolumn{3}{l}{\textsuperscript{} DF = degrees of freedom.}\\ \end{tabular}} \end{table} \pagebreak The ability of h^2^~EWAS~ estimated by both models to predict whether the number of DMPs identified was greater than expected was assessed at varying P value thresholds. ROC curves were produced and the area under the curve (AUC) ranged from 0.65 and 0.67 at P<1x10^-6^ to 0.79 and 0.71 at P<1x10^-3^ for the blanket and grouping models respectively and the predictive ability remained fairly stable as the threshold increased (__Figure \@ref(fig:h2ewas-dmp-roc-curve)__). (ref:h2ewas-dmp-roc-curve-cap) __The ability of h^2^~EWAS~ values to predict whether the number of differentially methylated positions identified in an EWAS is higher than expected by chance__. ROC curves for h^2^~EWAS~ values predicting number of DMPs at differing P value thresholds. AUC = area under the curve. (ref:h2ewas-dmp-roc-curve-scap) The ability of h^2^~EWAS~ values to predict whether the number of differentially methylated positions identified in an EWAS is higher than expected by chance \begin{figure}[!hb] {\centering \includegraphics[width=1\linewidth]{figure/05-h2ewas/roc_plot} } \caption[(ref:h2ewas-dmp-roc-curve-scap)]{(ref:h2ewas-dmp-roc-curve-cap)}(\#fig:h2ewas-dmp-roc-curve) \end{figure} \pagebreak ## Discussion {#discussion-05} The global contribution of DNA methylation to complex trait variance can inform researchers of how to design future studies that seek to discover new DNA methylation sites associated with their trait of interest. In this chapter I apply methods designed to estimate the predictive capacity of variants across a SNP-chip (h^2^~SNP~), to DNA methylation data measured in blood with the HM450 array across 400 independent traits, giving a distribution of the contribution of all sites typically measured in EWAS to complex trait variance. Although sample size was too small to reliably estimate h^2^~EWAS~ for any one trait, the distribution of estimates suggest little complex trait variation is captured by DNA methylation at the sites measured and h^2^~EWAS~ may be a good measure to identify traits for which EWAS will yield associations. ### Estimation of h^2^~EWAS~ {#estimation-of-h2ewas} The true h^2^~EWAS~ of a trait gives the total predictive capacity of DNA methylation for that trait, which is equivalent to the proportion of that trait’s total variance that is associated with changes in DNA methylation. Knowing this information can help design future EWAS studies. A low value of h^2^~EWAS~ doesn’t necessarily mean there is little correlation between DNA methylation and a trait, it could transpire that unmeasured sites contribute more to the association. It is important to remember that roughly 1.5% of CpG sites are targeted by the HM450 array and DNA can be methylated elsewhere (not at cytosine bases). Therefore, whole genome bisulphite sequencing, or a similar technique, may show that the variance of complex traits captured by DNA methylation is far higher. Furthermore, even if h^2^~EWAS~ is low and the sites discovered already do not explain all of the h^2^~EWAS~ estimate, there may still be value in increasing sample size to identify more DMPs as well as increase the precision of h^2^~EWAS~ estimates. DMPs discovered may not be highly correlated with a trait, but this doesn’t mean the potential biological information gained isn’t valuable. For example, if a change in a the levels of protein X has a large effect on a trait and change in DNA methylation has a small effect on the levels of protein X, then the effect of that DNA methylation change on the trait will be small, but identifying that DMP could lead to discovering the importance of the protein. Another thing to consider is that DNA methylation is tissue and cell specific. This means, that h^2^~EWAS~ may vary a lot depending on what tissue the methylation is measured in. The true underlying genetic architecture of complex traits is still unknown, and therefore it is difficult to know the appropriate model to choose when estimating the contribution of all measured SNPs to phenotypic variance amongst unrelated individuals and arguments for each model depending on this underlying genetic architecture are still being put forward [@Speed2017], [@Gazal2017; @Speed2018; @Speed2020], thus the attempts made in this study to re-purpose genomic REML are likely to suffer the same flaws that are trying to be overcome in genetic data. With this in mind, in addition to the imprecise estimates of h^2^~EWAS~ presented here (due to the small sample sizes of available data), individual trait h^2^~EWAS~ values should be treated with caution. This doesn’t exclude the possibility that estimating h^2^~EWAS~ may be useful and other methods are already being developed to measure the association between DNA methylation at all sites and complex traits [@TrejoBanos2020]. ### Future EWAS {#future-ewas-05} Heritability estimates from family-based studies gave an _a priori_ justification for the pursuit of gene mapping endeavours that eventually gave rise to GWAS, as they demonstrated variation in complex traits had a substantial genetic component. However, the evidence DNA methylation contributed to trait variation was not ascertained before EWAS were first conducted. To justify collecting more samples and continuing with EWAS research in the current vein, methods such as the one presented in this study should be used to show DNA methylation does substantially contribute to trait variance. It has become clear from the GWAS era of genetics, that for complex traits, such as coronary artery disease, many common genetic variants with small effects make up the genetic component of the trait [@VanderHarst2018; @Visscher2017]. This suggests a large number of molecular pathways contribute to these traits. DNA methylation at CpGs is heritable [@VanDongen2016; @Gaunt2016], thus it would be expected that the DNA methylation architecture of a trait will somewhat reflect the genetic architecture of the trait, although this has not been empirically tested. Despite uncertainty of h^2^~EWAS~ estimates for individual traits, I show h^2^~EWAS~ has a modest ability to predict whether the number of EWAS hits will be greater than expected by chance at a given P value threshold. This predictive ability remained stable as the P value threshold for detection increased from P<1x10^-6^ to P<1x10^-3^. These results suggest that increasing sample sizes for traits which truly associate with DNA methylation should result in the discovery of more DMPs. Furthermore, these results support a model for which small changes in methylation at many CpGs across the genome are related to complex traits. ### Contributions of individual CpG sites {#contribution-of-individual-cpg-sites} The original model for measuring h^2^~SNP~ assumed all genetic variants contributed the same effect on a trait [@Yang2010], Speed et al. offered an alternative model assuming a different underlying genetic architecture, whereby genetic variants in regions of high LD contributed less to the variance of a trait than more independent variants. Both groups have shown that the performance of the models depends on the alignment of the trait’s architecture with the models’ underlying assumptions. Previous literature has suggested that it is the methylation across groups of CpGs that may affect how other molecules interact with DNA and influence cellular functions such as gene expression [@Jones2012]. Furthermore, CpGs are not randomly distributed throughout the genome – many exist in close proximity within “islands” or other regions, suggesting that grouping of the CpGs may have functionality. However, the most common method used in EWAS is to treat CpG sites as independent. Here, the models proposed by Speed et al. (the grouping model) and Yang et al. (the blanket model), when estimating h^2^~EWAS~ were tested across 400 traits. The model fit the data better (had a higher likelihood) 207 times for the blanket model and 193 times for the grouping model. Thus for over half the traits treating DNA methylation sites as independent seems to be preferable and even though there is correlation between CpG sites, which allows them to be grouped, it might be that in some groups of correlated sites, individual sites within the group contribute separately to trait variance. It’s important to note that the grouping method takes into account correlation between CpGs within 100Kb of each other. Differential methylation at CpG sites may be correlated for a variety of biological reasons, for example, CpGs lying within a transcription factor binding site will be regulated together, but also, they will be correlated with CpGs that lie in other binding sites for that same transcription factor and these may be many megabases away. This is relevant to the relationship between DNA methylation and complex traits because transcription factor regulation might be the link between complex traits and DNA methylation. Even though grouping CpG sites might yet be the best way to model the relationship between DNA methylation and complex traits, the optimum way to group sites is unknown and will likely change depending on the trait of interest. ### Limitations {#limitations-05} The main limitation of the chapter is the small sample size (maximum N = 940) to estimate the h^2^~EWAS~. This meant the precision of our h^2^~EWAS~ estimates were very low and so our power to assess their ability to predict number of DMPs and find individual trait h^2^~EWAS~ values was low. To circumvent this problem, trends across multiple traits are assessed and I do not make strong conclusions for any one trait. As mentioned previously the HM450 array captures a small percentage of the total DNA methylome and h^2^~EWAS~ estimates will likely vary upon assaying more DNA methylation sites. Furthermore, when measuring more sites, it might be that one of the models fits the data better. Nevertheless, the results of this study can still give evidence towards the hypothesis that differential methylation at many sites across the genome each contribute minimally to the overall association between the methylome and a complex trait. Unlike germline genetic variants, there is intra-individual (between tissue and time dependent) DNA methylation variation [@Birney2016; @Rakyan2011]. Thus, it is to be expected that the variation of h^2^~EWAS~ estimates across traits is partly a product of the tissue and timepoint of choice. However, within the tissue biologically pertinent to the complex trait of interest, the number of pathways that associate with variation in that trait is likely to remain high, for example there are many processes affecting, or affected by, cancer development [@Hanahan2011]. Thus, it would still be expected that differential methylation at many CpG sites each associate with a trait, but the effect sizes are small. The same can be said when estimating h^2^~EWAS~ at various timepoints. Estimates of h^2^~EWAS~ will be a product of their environment and genetic make-up of the participants it’s measured in. Therefore, the results here may vary by population and by sex. However, participants used in this study are considered to be representative of the larger ALSPAC cohort [@Relton2015], which is itself considered to be representative of a large majority of women from the UK and potentially other high-income countries [@Fraser2013]. This suggests the results will be generalisable to a large group of samples for which EWAS are conducted, but replication in these samples as well as in different populations would provide greater confidence in the generalisability of the results. A wide range of complex traits was used in the analysis, but there are some notable absences. Rarer diseases and diseases that predominantly impact the elderly are not present in this study. The results presented here cannot be generalised to those traits. The factors important for the correlation structure of DNA methylation data are less known than those for linkage disequilibrium structure of genetic variants. Therefore, when applying models, such as the grouping model here, that aim to account for correlation of neighbouring DNA methylation sites, some of the important structure captured may be missed for example by trans-correlations (over 1Mb). A model that estimates h^2^~EWAS~ by incorporating all of the underlying correlation of DNA methylation data may therefore outperform both models tested here. ## Conclusion {#conclusion-05} Overall, the number of traits with good evidence for h^2^~EWAS~ > 0 was low (only smoking behaviour met the threshold of FDR < 0.1) and mean h^2^~EWAS~ value across both models was roughly 0, suggesting that for many traits DNA methylation variation as measured on the HM450 array in blood is of little relevance. However, these estimates varied greatly and therefore DNA methylation measured in this way will likely have relevance for some traits, for example smoking cigarettes regularly. Further, these estimates were correlated with the number of DMPs identified, suggesting that for traits whose variance associates with DNA methylation then increasing sample size will yield an increase in the number of CpGs identified in EWAS. I also provide evidence that there is value in assessing individual CpG-trait associations as opposed to groups of correlated CpG sites within 100Kb. However, this does not preclude the possibility that a more complex model of CpG site correlation may provide a better fit. <!--chapter:end:05-h2ewas.Rmd--> # A comparison of the genes and genesets identified by EWAS and GWAS of eight complex traits {#ewas-gwas-comp-chapter} ## Chapter summary {#chapter-summary-06} Despite little evidence that DNA methylation correlates highly with complex trait variation (__Chapter \@ref(h2ewas-chapter)__), it is of interest to understand whether the DNA methylation sites identified may be valuable in understanding the underlying biology of complex traits. Identifying the genes, properties of these genes and pathways to understand the underlying biology of complex traits responsible for differential health states in the population is a common goal of EWAS and GWAS. Genetic variants identified in GWAS will not be caused by phenotypic variation and their associations with complex traits are unlikely to be confounded. Whilst DNA methylation changes identified in EWAS do not share the same properties, the goal of identifying facets of genomic change that cause differential health states among the population remains the same for both studies. A simple method, routinely applied, to aid in this goal involves mapping regions of the genome identified by the study to genes and genesets. In this chapter I use data from The EWAS Catalog (__Chapter \@ref(ewas-catalog)__) and the IEU OpenGWAS Project to assess whether EWAS are identifying similar genes and genesets to GWAS, which indicates if EWAS is capturing the same underlying biology of a trait. Across eight different traits with large EWAS (N > 4500), the number of 500kb genomic regions with both an associated genetic variant (P<5x10^-8^) and differentially methylated position (P<1x10^-7^) identified was a small percentage of the total number variants and differentially methylated positions identified (ranging from 0% to 9%). Fisher's exact test was used to estimate whether genes identified by EWAS were more likely to be also identified by GWAS. The overlap was found to be no more than expected by chance across all the traits (P > 0.05 in all cases). Further, correlation between Gene Ontology term enrichment scores was no higher than expected by chance. Overall, the EWAS findings for these eight traits are vastly different to the GWAS findings and implicate different biological genesets. By way of example, the GWAS of alcohol consumption identified genes enriched in the ethanol catabolism pathway (FDR < 0.1), whereas the corresponding EWAS identified no genes in this pathway. Simulations suggested these results were likely due to the majority of EWAS signal being the consequence of trait variation or confounding. However, they did not preclude the possibility that some of the differentially methylated positions may be pertinent to the aetiology of the traits. Currently, interpreting EWAS is difficult, but even if only a small proportion of the DNA methylation sites identified are causally related in some way to the traits of interest here, it is likely they are acting through distinct biological pathways. ### Contributions statement {#contributions-statement-06} I conducted all analyses and wrote everything in this chapter. ## Introduction {#introduction-06} Often in EWAS, the potential biological implications of differentially methylated positions or regions (DMPs or DMRs) will be investigated further through genomic annotations [@Sharp2017; @Reese2019; @Everson2019; @Chen2020]. As discussed in __Section \@ref(dna-methylation)__, previous studies have demonstrated a relationship between DNA methylation levels and proximal genes [@Jones2012; @Illingworth2009]. This observation has lead to it being common place to map sites identified in EWAS to nearby genes and these genes and their function are often probed to ascertain their relevance to the trait of interest [@Sharp2017; @Reese2019; @Everson2019; @Chen2020]. Further, genes can be grouped with others into "genesets" that have similar functionality or lie within the same pathway. Over-represented genesets may provide an insight into the molecular biology of a trait, for example, DNA methylation sites identified in an autoimmune disease EWAS might tag genes within immunological pathways more than expected by chance. Other assessments may be made to infer potential biological understanding, including enrichment of other epigenetic marks at the regions identified [@Breeze2016] and follow-up experimental studies [@Rakyan2011]. However, using open access databases to investigate tagged genes and genesets is a simple and potentially effective approach to further biological understanding. GWAS often use similar approaches to EWAS to help infer function from the signals identified [@Rakyan2011; @Gallagher2018]. However, the properties of genetic variants and DNA methylation differ making potential inferences from EWAS and GWAS diverge. Importantly, DNA methylation is responsive to environmental stimuli, thus making associations identified in EWAS potentially attributable to reverse causation and to confounding [@Birney2016; @Relton2010; @Relton2015]. It should be noted GWAS may be susceptible to confounding, but it is reasonable to assume it is less pervasive amongst GWAS associations [@DaveySmith2003; @DaveySmith2014]. Given what is known about the properties of genetic variants, a direct comparison between EWAS and GWAS results can provide insight into what biological information EWAS is capturing. If EWAS are highlighting a similar set of genes and genesets, it suggests changes in DNA methylation are identifying facets of trait aetiology. In the event that GWAS and EWAS are not highlighting similar genes and genesets, it is plausible that EWAS may still be identifying facets of trait aetiology. If DNA methylation mediates non-genetic effects and if sites are mapped to genes or genesets incorrectly then overlap will not be guaranteed even if DNA methylation identified is aetiologically relevant. When DNA methylation mediates the effect of genetic variants distal to their genomic position on complex traits, the genes identified by GWAS and EWAS will also differ, but the genesets would likely overlap. The lack of overlap could also reflect the fact associations in EWAS may be driven by confounding and reverse causation. Despite the caveats mentioned, one might still expect to detect overlap in genes and genesets identified by EWAS and GWAS of corresponding traits in the absence of confounding and reverse causation. The extent to which this expectation holds is discussed in more detail in the __Section \@ref(discussion-06)__. In this chapter, the overlap between genes and genesets identified by GWAS and EWAS of eight complex traits is explored and the scenarios in which one would and would not expect to find overlap are modelled through simulations. ## Results {#results-06} ### Study data {#study-data} Traits were selected for which EWAS had been conducted with over 4500 samples and for which corresponding GWAS summary data were available. At the time of conducting these analyses, traits were identified using data from The EWAS Catalog (__Chapter \@ref(ewas-catalog)__) that contained EWAS published before 2019. The EWAS of glucose and insulin were also added at a later date. Traits and study data information is in __Table \@ref(tab:trait-data-tab-06)__. \begin{table}[!h] \caption{(\#tab:trait-data-tab-06)Study data} \centering \resizebox{\linewidth}{!}{ \begin{tabular}[t]{lllllll} \toprule trait & ewas-author & ewas-pmid & ewas-n & gwas-author & gwas-pmid & gwas-n\\ \midrule \cellcolor{gray!6}{current versus never smoking} & \cellcolor{gray!6}{<NAME>} & \cellcolor{gray!6}{27651444} & \cellcolor{gray!6}{9,389} & \cellcolor{gray!6}{Liu M} & \cellcolor{gray!6}{30643251} & \cellcolor{gray!6}{632,802}\\ former versus never smoking & <NAME> & 27651444 & 13,474 & Elsworth B & NA & 424,960\\ \cellcolor{gray!6}{alcohol consumption per day} & \cellcolor{gray!6}{Liu C} & \cellcolor{gray!6}{27843151} & \cellcolor{gray!6}{9,643} & \cellcolor{gray!6}{Liu M} & \cellcolor{gray!6}{30643251} & \cellcolor{gray!6}{537,349}\\ c-reactive protein & Ligthart S & 27955697 & 8,863 & Ligthart S & 30388399 & 204,402\\ \cellcolor{gray!6}{body mass index} & \cellcolor{gray!6}{Wahl S} & \cellcolor{gray!6}{28002404} & \cellcolor{gray!6}{10,238} & \cellcolor{gray!6}{Yengo L} & \cellcolor{gray!6}{30124842} & \cellcolor{gray!6}{681,275}\\ \addlinespace educational attainment & <NAME> & 29086770 & 10,767 & <NAME> & 30038396 & 766,345\\ \cellcolor{gray!6}{insulin} & \cellcolor{gray!6}{Liu J} & \cellcolor{gray!6}{31197173} & \cellcolor{gray!6}{4,740} & \cellcolor{gray!6}{Manning AK} & \cellcolor{gray!6}{22581228} & \cellcolor{gray!6}{51,750}\\ glucose & <NAME> & 31197173 & 4,808 & Manning AK & 22581228 & 58,074\\ \bottomrule \multicolumn{7}{l}{\textsuperscript{} Where gwas-pmid = NA, the GWAS were conducted as part of a UK Biobank GWAS pipeline within the Univeristy of Bristol's Integrative Epidemiology Unit and can be found on the OpenGWAS Project website (see Methods for more)}\\ \end{tabular}} \end{table} ### Genomic position overlap {#genomic-position-overlap} The genome was divided into 5591 500kb non-overlapping regions and mapped EWAS and GWAS signals for each trait to these regions (see __Section \@ref(methods-06)__ for more details). The number of regions that were identified by one study type and not the other was higher for each trait than the number of overlapping regions (__Figure \@ref(fig:overlap-barplot)__). Further, the magnitude of the greatest GWAS effect estimate in each region had little ability to predict whether or not a DNA methylation site was likely to be identified in the same region (AUC range = 0.43-0.56, __Figure \@ref(fig:auc-plot)__). (ref:position-overlap) __Overlap between genomic positions identified by corresponding EWAS and GWAS__. The genome was divided into 500Kb regions. Those where no probes on the HM450 array measured DNA methylation were excluded from the analysis. Regions were counted as being identified by a GWAS if one or more SNPs associated with the trait and as being identified by an EWAS if one or more CpGs associated with the trait. Neither = no EWAS or GWAS sites identified in the region, GWAS = GWAS sites only were identified, EWAS = EWAS sites only were identified, Both = Both EWAS and GWAS sites were identified, AC = alcohol consumption per day, BMI = body mass index, CRP = c-reactive protein, CsNs = current smokers vs never smokers, EA = educational attainment, FsNs = former smokers vs never smokers. (ref:position-overlap-scap) Overlap between genomic positions identified by corresponding EWAS and GWAS \begin{figure} {\centering \includegraphics[width=1\linewidth]{figure/06-ewas_gwas_comparison/all_traits_overlap_bar} } \caption[(ref:position-overlap-scap)]{(ref:position-overlap)}(\#fig:overlap-barplot) \end{figure} (ref:auc-plot-cap) __Can genetic variant associations predict the presence of DNA methylation associations in the same region?.__ For each 500kb region in the genome, the largest SNP-trait effect size was extracted. ROC curves were produced to determine whether these could predict whether a differentially methylated position related to the same trait was present in the same 500kb region. The area under these curves (AUC), with their confidence intervals, are plotted for each trait. The red dashed line is at AUC = 0.5, which represents a prediction no better than chance. AC = alcohol consumption per day, BMI = body mass index, CRP = c-reactive protein, CsNs = current smokers vs never smokers, EA = educational attainment, FsNs = former smokers vs never smokers. Note: insulin is not present in this plot as there were GWAS and EWAS signal that overlapped across all the 500kb regions. (ref:auc-plot-scap) Can genetic variant associations predict the presence of DNA methylation associations in the same region? \begin{figure} {\centering \includegraphics[width=1\linewidth]{thesis_files/figure-latex/auc-plot-1} } \caption[(ref:auc-plot-scap)]{(ref:auc-plot-cap)}(\#fig:auc-plot) \end{figure} ### Assessing power to detect shared annotations between GWAS and EWAS {#assessing-power-sims} Genomic function and trait biology are not divided into discrete 500kb genomic chunks, thus GWAS and EWAS could still be identifying similar facets of trait biology without identifying the same genomic regions. I sought to assess whether the genes and genesets identified overlapped more than expected by chance, thus genomic positions were mapped to genes and genes to genesets (details in __Section \@ref(methods-06)__). Overlap between genes identified was assessed using Fisher's exact test. Two methods for testing overlap between genesets were considered, one simply mapped genes to genesets and used Fisher's exact test in the same way as assessing overlap between identified genes. The other generated 'enrichment scores' for each geneset and assessed correlation between the geneset enrichment scores across studies. Assuming all genetic effects on DNA methylation are proximal. If all DMPs that associated with a trait were on the pathway from SNP to disease, then EWAS and GWAS would be identifying genes from the exact same geneset (i.e. genes that caused changes in the trait). The more DMPs that are identified because of confounding effects or reverse causation, the smaller the chance of overlap, assuming that causal and responsive genesets are distinct. As mentioned, the presence of trans-mQTLs would make the overlap of genes identified by GWAS and EWAS less likely despite both studies identifying aetiologically relevant factors. However, geneset overlap will be more likely to be maintained regardless of trans-mQTL presence. In a scenario where no DMPs are causing phenotypic changes, any overlap in genes and genesets found would be entirely attributable to chance (ignoring potential feedback loops whereby the trait causes changes in genesets that it is affected by). Simulations were run to assess which scenarios the enrichment and annotation methods had power to detect whether there was more overlap than expected by chance. Power was also assessed across different annotation methods. A schematic of how the simulations were set up can be found in __Figure \@ref(fig:method-simulations-schematic)__. (ref:sim1-schematic-cap) __Flowchart demonstrating how the first set of simulations were set up__. The flowchart (under the title "Generating simulated data") shows how the simulated data were generated. The boxes under "Analysis" show the analyses performed with the simulated data. Underlined parameters were varied. The simulations were repeated 1000 times for each set of parameters. (ref:sim1-schematic-scap) Flowchart demonstrating how the first set of simulations were set up \begin{figure}[!hp] {\centering \includegraphics[width=1\linewidth]{figure/06-ewas_gwas_comparison/simulations-gene-up-flowchart} } \caption[(ref:sim1-schematic-scap)]{(ref:sim1-schematic-cap)}(\#fig:method-simulations-schematic) \end{figure} Under each scenario, the ability to predict whether EWAS were identifying, in part, the same set of genes as GWAS ('causal genes') compared to a random set of genes ('associated genes') was tested. As expected, predictive capacity increased as study power and the proportion of causal DMPs increased (__Figure \@ref(fig:sim1-summ-plot)__, __Figure \@ref(fig:sim1-full-plot5)__). Predictive ability tended to increase as the number of identified genes increased, but this parameter was largely inconsequential when the proportion of causal DMPs was low (__Figure \@ref(fig:sim1-full-plot5)__). Performance was similar across annotation methods and between methods attempting to assess geneset overlap, with assessment of gene overlap performing better (__Figure \@ref(fig:sim1-summ-plot)__). Overall there was more power to detect overlap in genes than overlap in genesets. Between the geneset methods, there was more power to detect correlation between enrichment scores than direct overlap in genesets. Therefore, gene overlap and correlation between geneset enrichment scores were taken forward for the empirical analyses. Assessing correlations of GO term genesets had slightly more power (__Figure \@ref(fig:sim1-summ-plot)__) than other geneset databases, so these geneset annotations were taken forward. (ref:sim1-sum-plot-cap) __Power to detect overlap between genes and genesets identified by corresponding EWAS and GWAS.__ Simulations were set up as illustrated in __Figure \@ref(fig:method-simulations-schematic)__. The area under receiver operator curves (AUC) was used to estimate the ability to distinguish between results generated when EWAS and GWAS were sampling, in part, from the same set of causal genes and results generated when EWAS were sampling random genes from the genome. The header of each set indicates the proportion of genes identified by the simulated EWAS that were set to be causal. or_g = assessing overlap of genes, or_p = assessing overlap of genesets, rho_p = assessing correlation between geneset enrichment scores. go = Gene Ontology, ppi = protein-protein interaction database from EpiGraphDB. This is a summary of the results, full results can be found in __Figure \@ref(fig:sim1-full-plot5)__. (ref:sim1-sum-plot-scap) Power to detect overlap between genes and genesets identified by corresponding EWAS and GWAS \begin{figure} {\centering \includegraphics[width=1\linewidth]{figure/06-ewas_gwas_comparison/methods_test_gene_up_auc_plot_all_databases_summary} } \caption[(ref:sim1-sum-plot-scap)]{(ref:sim1-sum-plot-cap)}(\#fig:sim1-summ-plot) \end{figure} ### Gene and geneset overlap between GWAS and EWAS {#gwas-ewas-overlap} For the eight traits used, the number of genes identified by EWAS and GWAS that overlapped was low and for two traits no genes identified by the studies overlapped (__Table \@ref(tab:empirical-gene-tab)__). The number of genesets that overlapped was higher, peaking at 1,243 for GWAS and EWAS of body mass index (__Table \@ref(tab:empirical-pathway-tab)__). There was no strong evidence that the number of overlapping genes identified was more than expected by chance (__Table \@ref(tab:empirical-gene-tab)__). Although, power was low in most cases to detect a high level of gene overlap in most cases. There was also little evidence that correlation between enrichment scores of the GO terms was greater than expected by chance (__Table \@ref(tab:empirical-pathway-tab)__). As suggested by the previous simulations, this does not preclude the possibility that EWAS are identifying changes in DNA methylation that are upstream of phenotypic change. \linebreak \begin{table}[!h] \caption{(\#tab:empirical-gene-tab)Overlap of genes identified by EWAS and GWAS} \centering \resizebox{\linewidth}{!}{ \begin{tabular}[t]{llllllll} \toprule trait & n-ewas-genes & n-gwas-genes & gene-overlap & obs-OR & exp-gene-overlap & exp-OR & p-diff\\ \midrule \cellcolor{gray!6}{current versus never smoking} & \cellcolor{gray!6}{1,933} & \cellcolor{gray!6}{312} & \cellcolor{gray!6}{27} & \cellcolor{gray!6}{2.9} & \cellcolor{gray!6}{39} & \cellcolor{gray!6}{3.42} & \cellcolor{gray!6}{0.322}\\ former versus never smoking & 282 & 320 & 9 & 6.4 & 7 & 3.43 & 0.022\\ \cellcolor{gray!6}{alcohol consumption per day} & \cellcolor{gray!6}{361} & \cellcolor{gray!6}{196} & \cellcolor{gray!6}{3} & \cellcolor{gray!6}{2.6} & \cellcolor{gray!6}{3} & \cellcolor{gray!6}{2.44} & \cellcolor{gray!6}{0.908}\\ c-reactive protein & 189 & 302 & 3 & 3.2 & 1 & 0.91 & 0.121\\ \cellcolor{gray!6}{body mass index} & \cellcolor{gray!6}{232} & \cellcolor{gray!6}{3,221} & \cellcolor{gray!6}{23} & \cellcolor{gray!6}{2.0} & \cellcolor{gray!6}{39} & \cellcolor{gray!6}{3.04} & \cellcolor{gray!6}{0.052}\\ \addlinespace educational attainment & 25 & 1,594 & 1 & 1.5 & 3 & 3.00 & 0.430\\ \cellcolor{gray!6}{insulin} & \cellcolor{gray!6}{36} & \cellcolor{gray!6}{5} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{0.0} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{0.00} & \cellcolor{gray!6}{1.000}\\ glucose & 15 & 50 & 0 & 0.0 & 0 & 0.00 & 1.000\\ \bottomrule \multicolumn{8}{l}{\textsuperscript{} exp = expected, obs = observed}\\ \multicolumn{8}{l}{\textsuperscript{} odds ratios (ORs) can be interpreted as the odds of an gene being identified by EWAS and a GWAS over the odds of a gene being identified by an EWAS but not by a GWAS.}\\ \multicolumn{8}{l}{\textsuperscript{} exp-OR = the mean OR after repeating the analysis 1000 times, randomly sampling EWAS genes equal to the number identified in the empirical analysis.}\\ \end{tabular}} \end{table} \begin{table}[!h] \caption{(\#tab:empirical-pathway-tab)Correlation of geneset enrichment scores between EWAS and GWAS} \centering \resizebox{\linewidth}{!}{ \begin{tabular}[t]{lllllll} \toprule trait & n-ewas-genes & n-gwas-genes & geneset-overlap & obs-cor & exp-cor & p-diff\\ \midrule \cellcolor{gray!6}{current versus never smoking} & \cellcolor{gray!6}{1,933} & \cellcolor{gray!6}{312} & \cellcolor{gray!6}{1,053} & \cellcolor{gray!6}{0.200} & \cellcolor{gray!6}{0.22} & \cellcolor{gray!6}{0.034}\\ former versus never smoking & 282 & 320 & 661 & 0.298 & 0.30 & 0.994\\ \cellcolor{gray!6}{alcohol consumption per day} & \cellcolor{gray!6}{361} & \cellcolor{gray!6}{196} & \cellcolor{gray!6}{562} & \cellcolor{gray!6}{0.259} & \cellcolor{gray!6}{0.27} & \cellcolor{gray!6}{0.575}\\ c-reactive protein & 189 & 302 & 600 & 0.265 & 0.26 & 0.798\\ \cellcolor{gray!6}{body mass index} & \cellcolor{gray!6}{232} & \cellcolor{gray!6}{3,221} & \cellcolor{gray!6}{1,243} & \cellcolor{gray!6}{0.187} & \cellcolor{gray!6}{0.20} & \cellcolor{gray!6}{0.229}\\ \addlinespace educational attainment & 25 & 1,594 & 215 & 0.105 & 0.13 & 0.173\\ \cellcolor{gray!6}{insulin} & \cellcolor{gray!6}{36} & \cellcolor{gray!6}{5} & \cellcolor{gray!6}{16} & \cellcolor{gray!6}{0.093} & \cellcolor{gray!6}{0.12} & \cellcolor{gray!6}{0.376}\\ glucose & 15 & 50 & 49 & 0.153 & 0.16 & 0.809\\ \bottomrule \multicolumn{7}{l}{\textsuperscript{} For each geneset, odds of study genes being in the geneset divided by the odds the study genes not being in the geneset were assessed and correlation between these odds ratios are given here.}\\ \multicolumn{7}{l}{\textsuperscript{} expected-cor = the mean correlation between odds ratios after repeating the analysis 1000 times, randomly sampling EWAS genes equal to the number identified in the empirical analysis}\\ \multicolumn{7}{l}{\textsuperscript{} geneset-overlap indicates the number of gene ontology terms that map to both genes identified by the EWAS and GWAS.}\\ \end{tabular}} \end{table} \linebreak There were 16 GO term genesets that were commonly enriched (FDR < 0.1) for both the EWAS and GWAS traits. Of these zero were specific (contained under 100 genes). There were 51 specific genesets (geneset size < 100 genes) that did not overlap between studies of corresponding traits, for example, the genes identified by the GWAS of alcohol consumption were enriched for the “ethanol catabolism” pathway (geneset size = 12 genes), however none of the genes identified by the EWAS were present in this pathway. ### Understanding architecture from geneset overlap {#architecture-sims} The total number of genes and genesets that can be identified by GWAS and EWAS are unknown and the proportion of causal and non-causal genes EWAS will identify is also unknown. Thus it is uncertain how much overlap there will be between genes and genesets of corresponding GWAS and EWAS as sample sizes increase and more genes and genesets are discovered. Using the empirical data from __Table \@ref(tab:empirical-pathway-tab)__ to inform simulations, I sought to identify the proportion of overlap between causal and associated genes (genes that may or may not be causal) and how this is related to the number of genes yet to be discovered. For the simulations, three sets of genes were linked to each trait: genes identified by the GWAS (known GWAS genes), genes identified by the EWAS (known EWAS genes) and a random set of genes sampled from the total set of Ensembl gene IDs (excluding the genes identified by the EWAS and GWAS). Those genes identified by the GWAS and a number of the randomly selected set of genes was assigned as "causal" and the genes identified by the EWAS and the rest of the randomly selected set of genes was assigned as "associated". The overlap between the causal and associated genes as well as the number of total genes (causal and associated) was pre-determined. From these pools of causal and associated genes, $X$ genes were sampled randomly to represent GWAS genes and $Y$ genes were sampled randomly to represent EWAS genes. The number of genes, $X$ and $Y$, sampled was equal to the number identified in the empirical GWAS and EWAS, respectively. Having generated the GWAS and EWAS genes, enrichment of GO terms was performed and the correlation between enrichment scores across all the terms was estimated. Under a single simulation scenario, the number of total genes and the proportion of causal and associated gene overlap was pre-determined. The known GWAS and EWAS genes was determined by those discovered from the empirical results, thus the random set of genes was varied to alter the total number of genes and proportion of overlap. The proportion of overlap was 0, 0.01, 0.1, 0.5 or 1 and the total number of genes was split evenly into causal and associated genes. The number of associated genes (and causal genes) equalled the total number of EWAS and GWAS genes discovered multiplied by 1, 2, 3, 5, 10 or 20. For each scenario, the analyses were repeated 1000 times for each trait. A schematic of the methods for these simulations can be found in __Figure \@ref(fig:arch-simulations-schematic)__ and it is described in full in the __Section \@ref(methods-06)__. (ref:arch-simulations-schematic-cap) __Flowchart demonstrating how the second set of simulations were set up for each trait.__ Phenotypic variation may be caused by changes in gene/protein polymers (causal genes) and can be associated with changes in gene/protein polymers via other routes such as confounding or reverse causation (associated genes). In these simulations the causal genes were a mix of genes identified by GWAS, known GWAS genes (KGG), and a random set of genes, simulated GWAS genes (SGG). The associated genes were a mix of genes identified by EWAS, known EWAS genes (KEG), and a random set of genes, simulated EWAS genes (SEG). The level of overlap in the causal and associated genes was modified by changing the overlap in the SGG and SEG. The number of causal and associated genes was kept the same for each simulation, but this number varied between simulations. The minimum number of causal genes and the minimum number of associated genes was equal to the sum of KGG and KEG. The "simulated GWAS" step in the simulation simply equates to randomly sampling from the causal genes. The number of genes sampled was equal to the number of KGG. The "simulated EWAS" step was identical except the number of KEG from the associated genes. Geneset enrichment was performed as described in __Section \@ref(methods-06)__. The simulations were repeated 1000 times for each set of parameters. (ref:arch-simulations-schematic-scap) Flowchart demonstrating how the second set of simulations were set up for each trait \begin{figure} {\centering \includegraphics[width=1\linewidth]{figure/06-ewas_gwas_comparison/architecture-simulations-schematic} } \caption[(ref:arch-simulations-schematic-scap)]{(ref:arch-simulations-schematic-cap)}(\#fig:arch-simulations-schematic) \end{figure} From the simulations it was determined that, for seven of eight traits, it is likely that if the number of genes discovered currently is near to the total genes to discover for these traits, the proportion of overlap between causal and associated genes is low. However, for former vs. never smoking, there was greatest evidence for a scenario where the causal and consequential gene overlap was high. The results from the analysis of former vs. never smoking and c-reactive protein (representing simulations for the other traits) are shown in __Figure \@ref(fig:arch-simulations-crp-fvns)__. __Figure \@ref(fig:arch-simulations-supp-res)__ shows the same results for the other traits. Each simulation was repeated 1000 times. Evidence for a difference in the empirically determined correlation of geneset enrichment scores and the mean correlation of geneset enrichment scores across simulations was assessed using a z-test for difference. There was some evidence against 53 simulation scenarios (FDR < 0.05). Across the traits, the scenarios that were least likely tended to be when the number of genes yet to discover was low, and the overlap between causal and associated genes was high, except for former vs. never smoking, highlighting architecture differences between traits. (ref:arch-simulations-crp-fvns-cap) __Simulations to understand the likely number of genes still to identify in EWAS and GWAS of c-reactive protein and former vs. never smoking under different trait architectures__. Simulations were set up as illustrated in __Figure \@ref(fig:arch-simulations-schematic)__. Correlation of geneset enrichment scores from empirical data (__Table \@ref(tab:empirical-pathway-tab)__), is shown as a red dashed line. Box plots show the range of enrichment score correlations from 1000 simulations using the parameters indicated. The number of causal and associated genes, as well as the overlap between these genes were varied. N~KEG~ = number of known EWAS genes, N~KEG~ = number of known GWAS genes, N~KTG~ = number of known total genes (N~KEG~ + N~KGG~). By way of an example, when N~KTG~ = 491 and the ratio of causal and associated genes relative to N~KTG~ is 1:1, the number of causal genes in the simulations will be 491 and the number of associated genes in the simulations will be 491. Scenarios which lie close to the empirical result (red dashed line) are more likely to reflect the true underlying number of genes related to a trait and the true overlap between the causal and associated genes. Where there is some evidence that the empirical results are different to the simulations (z-test FDR < 0.05) the box outline is grey. (ref:arch-simulations-crp-fvns-scap) Simulations to understand the likely number of genes still to identify in EWAS and GWAS of c-reactive protein and smoking (former vs. never smokers) under different trait architectures \begin{figure} {\centering \includegraphics[width=1\linewidth]{figure/06-ewas_gwas_comparison/architecture_sims_crp_fvns_only_correlation_of_pathway_enrichment_scores} } \caption[(ref:arch-simulations-crp-fvns-scap)]{(ref:arch-simulations-crp-fvns-cap)}(\#fig:arch-simulations-crp-fvns) \end{figure} ### Overlap of non-corresponding EWAS and GWAS {#matching-gwas-to-ewas} It may be the case that EWAS of one trait are actually more closely related to GWAS of another trait. For example, if DNA methylation changes related to BMI were mediating the effect of BMI on changes in metabolites, then one could expect to see greater correspondence in identified genes and genesets with GWAS of those metabolites. To test if this was the case for any of the EWAS in this study, 1886 GWAS with SNPs associated at 5x10^-8^ with a sample size above 5000 from the IEU OpenGWAS Project [@Elsworth2020] were extracted. For each GWAS and the eight EWAS, enrichment scores were calculated using geneset enrichment analysis and correlation between the enrichment scores was calculated, as in previous analyses. Across all pairwise comparisons, correlations between enrichment scores ranged from -0.014 to 1 and had a mean of 0.12. The mean correlation between GWAS traits and EWAS traits was 0.23, which was higher than the correlations between just GWAS traits (0.12). Amongst just GWAS results, there was evidence that 14653 pairwise enrichment score correlations were greater than the the mean (FDR < 0.05). However, there was little evidence that any pairwise correlations between enrichment scores derived from EWAS and GWAS were greater than the mean correlation (FDR > 0.05). This suggests that the signal from EWAS is not capturing aspects of any specific factor that impacts the aetiology of any of the eight traits of interest. ## Discussion {#discussion-06} Several EWAS papers have compared their findings with those of the corresponding trait GWAS [@Kurushima2019; @Everson2019; @Agha2019; @Odintsova2019; @Geurtsen2019], but it's unknown if any overlap that might occur should be attributed to shared underlying genetic and epigenetic architectures or if it occurs by chance. In this study, the genes and genesets identified by eight large EWAS (N>4500) were not identified in their corresponding GWAS any more than expected by chance. Simulations suggested these EWAS could still be identifying aspects of trait aetiology, but it is likely most DMPs identified are due to confounding or reverse causation. Further simulations suggested that the overlap between genes that impact phenotypic variation and those that might be identified through confounded analyses or reverse causation is likely to be low. However, if the number of genes still to identify in EWAS and GWAS is high, it is possible that the overlap between "causal" and "associated" genes could be high also. ### Overlap expected {#overlap-expected} GWAS identifies the effects of genetic variation on complex traits. These effects are less likely to be confounded than associations estimated between observational phenotypes [@DaveySmith2003; @DaveySmith2014]. Thus, one would expect overlap between genes and genesets identified by EWAS and GWAS of the same trait if the DMPs identified are also of aetiological relevance. Assuming mapping of DMPs and SNPs to genes is correct, the genes identified by EWAS may cause variation in complex traits without overlapping with genes identified in GWAS. Under the scenario where the effect of a SNP on a complex trait is mediated by a distal DNA methylation site (or the gene that site is tagging), GWAS and EWAS may identify genes that do not overlap, but that are along the same causal pathway from genome to complex trait. One plausible mechanism for which trans-mQTLs may act is via transcription factors. A trans-mQTL may influence the transcription of a nearby transcription factor then the transcription factor could cause a change in DNA methylation at distal sites. One study has provided evidence this may occur frequently [@Bonder2017]. In this scenario the genes proximal to the identified SNP and DMP would lie on the same causal pathway and so would likely be part of the same genesets. This is likely not the only plausible mechanism of trans-mQTL function though. DNA methylation may also mediate non-genetic effects, allowing for causal DMPs to be identified at genes not near pertinent genetic variation. However, there is strong evidence that the majority of DNA methylation sites have a heritable component. As such any effect of DNA methylation on a trait could be influenced by genetic variation. If DNA methylation is influenced by proximal genetic variants then the discovery of the same gene(s) will be a function of GWAS and EWAS power. If only distal genetic variants influence the DNA methylation site, then the overlap of genesets is a function of regulatory mechanisms and power. These two issues, and likely others, may introduce noise into the results. However, there is overwhelming evidence that confounding and reverse causation are pervasive across observational epidemiology [@Lawlor2004; @Sattar2017; @DaveySmith1992; @Fewell2007] as well as within EWAS [@Birney2016; @Heijmans2012]. This suggests that the evidence that EWAS and GWAS are not identifying any more overlapping genes and genesets than expected by chance, is likely due to identified DMPs being mostly the result of reverse causation or confounding. As the simulations showed, even if an EWAS identifies DMPs that cause a change in the trait, if the majority of DMPs identified are due to confounding or reverse causation then the overlap will be indistinguishable to the overlap expected by chance. Thus, our empirical results do not preclude the possibility that some DMPs identified by the EWAS are pertinent to trait aetiology. For body mass index, work has already suggested the traits cause changes in DNA methylation rather than _vice versa_ [@Philibert2016; @Wahl2017], supporting our findings here. Further, one study suggested DNA methylation changes capture different components of body mass index variance than genetic variation [@Shah2015] and another estimated the percentage of trait variance captured by DNA methylation was 75.7% when accounting for genotype [@TrejoBanos2020]. Simulations involving empirical data from former vs. never smoking EWAS and GWAS suggested that overlap between causal and associated genes was high. This is surprising for two reasons. Firstly, it differs from the current vs. never smoking results, suggesting distinct genetic or epigenetic architectures of those traits. Secondly, there is evidence that for DNA methylation changes identified in relation to smoking, smoking is likely causing DNA methylation variation and not _vice versa_. Although the statistical tests suggested that it was unlikely that the proportion of causal and associated genes is 0.1 or lower, it should be noted that the absolute difference between simulated results and empirical results were not great. If EWAS is discovering some DMPs that influence genes that cause changes in the trait of interest, study power is the limiting factor for detecting overlap. For traits with a weak polygenic architecture (few genes explain most of the heritability), such as gene expression [@Wheeler2016; @Mogil2018], discovering almost total overlap would be inevitable even with modest sample sizes. ### Little overlap with any GWAS {#little-overlap-with-any-gwas} Little correlation was found between geneset enrichment scores for EWAS and GWAS of non-corresponding traits. In a scenario where DNA methylation was capturing a specific facet of a trait one might expect correlation between EWAS of the original trait and GWAS of that facet. For example, if changes in DNA methylation associated with smoking were mostly responsible the effect of smoking on lung cancer then one would expect to observe an overlap between the genes and pathways identified by an EWAS of smoking and GWAS of lung cancer. This specific example is explored, in part, in __Chapter \@ref(dnam-lung-cancer-mr)__ under a Mendelian randomization framework and has been examined before, with a study suggesting methylation at two sites (of over 1000 smoking-related sites) mediate over 30% of the effect of smoking on lung cancer [@Fasanelli2015]. The results of either study suggest most sites will not mediate the effect of smoking on lung cancer and thus there would be little overlap between genes and pathways of an EWAS of smoking and a GWAS of lung cancer. This is corroborated by the results of our study: there was little evidence that correlation of pathway enrichment scores between the two, 0.15, was greater than the mean correlation enrichment score across all GWAS-GWAS and GWAS-EWAS correlations, 0.12 (FDR > 0.05). It is important to note that overlap between EWAS and GWAS genesets may be missed even if this mediation model is true for various traits. As shown in the simulations, detecting this overlap depends on individual study power as well as the underlying genetic and epigenetic architecture of the trait. There are further things that may limit detection of geneset overlap that is discussed later on. ### Information gained from EWAS {#information-gained} The fact that genes and pathways identified from GWAS and EWAS of the same traits are seemingly very separate suggests we are gaining new information from EWAS, even if interpreting the new information may be difficult. Key to interpreting the EWAS results would be to try and disentangle whether the EWAS results are likely due to confounding (as is explored in __Chapter \@ref(dnam-lung-cancer-mr)__). Interpreting EWAS can also be difficult due to cell type heterogeneity and the complexity of mechanisms which mediate DNA methylation changes [@Birney2016; @Jones2012; @Houseman2012; @Jaffe2014; @Hemani2017]. Due to these difficulties it should not be concluded that EWAS definitely help increase our biological understanding of complex traits. Rather, DNA methylation is capturing different biological information. Regardless of biological insight gained, translational impact may still be gleaned from DNA methylation studies; DNA methylation may aid diagnoses by acting as a reliable biomarker or could help predict various health outcomes [@Relton2010]. There are also benefits to understanding the biological consequences of a trait, something that EWAS might help identify and GWAS will not (at least not directly). This does depend on further research to understand how changes in DNA methylation downstream to complex trait variation is relevant to human health. Further, establishing where exactly DNA methylation may lie on the causal pathway may be difficult and work is ongoing to discover this for various traits [@Neumeyer2020]. Use of causal inference methods such as Mendelian randomization [@DaveySmith2014; @DaveySmith2003; @Relton2012] can be applied (and are in __Chapter \@ref(dnam-lung-cancer-mr)__), but this still comes with various caveats, as discussed in __Section \@ref(establishing-causality)__ [@Relton2012; @Neumeyer2020]. Some studies also try and confirm effects experimentally [@Rakyan2011] and use previous biological knowledge of the trait to try and understand EWAS results. However, some of the biological knowledge of complex traits relating to genes and pathways comes from GWAS of those traits. This study suggests that EWAS is unlikely to identify many genes proximal to genetic variation pertinent to the trait of interest and further the genesets are unlikely to overlap with those identified in GWAS. Therefore, comparison of EWAS results to those of a corresponding GWAS is unlikely to explain a large proportion of DNA methylation-trait associations observed in EWAS. This may make inference from EWAS difficult, yet it seems likely the interpretability of DNA methylation studies will continue to improve over the coming years, as understanding the underlying epigenetic architecture of complex traits could still provide translational benefits [@Relton2010]. ### Limitations {#limitations-06} As discussed, detecting gene or pathway overlap depends on the genetic and DNA methylation architecture of the trait. Here only eight traits, two of which are smoking behaviour traits, have been studied. This means the results cannot be generalised to all or even the majority of complex traits. These analyses could be repeated by setting a less restrictive sample size limit, but it was felt that would make the results less reliable and impossible in many circumstances where too few DNA methylation sites had been discovered by EWAS. As sample sizes increase and technologies measuring more DNA methylation sites become more common, it would be interesting to repeat the analysis. Often in GWAS and EWAS, prioritisation of SNPs and DMPs identified occurs before functional mapping. Prioritisation for both studies may be informed by prior knowledge of the trait, prior understanding of molecular biology, predicted consequences of observed variation (for example using Ensembl's Variant Effect Predictor [@McLaren2016]), replication of findings or a number of other methods. In this study, I did not perform any prioritisation (besides the conventional P value threshold cut-offs) and thus may have increased the amount of "noise" in the signal taken forward for functional annotation. Unfortunately, this extra prioritisation of sites is not tractable when comparing many different association studies and may reduce power to detect any overlap between genes and pathways. However, added noise is unlikely to prevent detection of true overlap if that true overlap is substantial, as shown by the simulations (__Figure \@ref(fig:sim1-summ-plot)__). The nearest gene, by chromosomal position, to a DMP or SNP is not necessarily the gene of interest. SNPs may have effects on genes distal to their position [@Aguet2017]. Also, the correlation between genetic variants inflates associations of variants proximal to the true causal variant, which may map to unrelated genes. Further, the correlation structure in DNA methylation data may induce associations between complex traits at a site far from where variation in DNA methylation causes complex trait changes [@Aguet2017]. Therefore, the mapping of DMPs and SNPs to genes in this study could likely be improved. The "correct" method for this mapping has not yet been established though and even though some tools are available (such as eQTL studies), there are caveats to them too [@Huang2018; @Kanduri2019]. Our understanding of molecular pathways is not complete and thus attributing genes to certain pathways or functionalities may be erroneous. However, the results remained consistent across four different methods that annotate genes to pathway, suggesting differences in mapping genes to genesets should not impact our conclusions. Biological information gained from EWAS and GWAS may be defined in various ways and depending on the interpretation of this, one could alter methods used to extract biological information. However, first exploring the genomic regions identified and then mapping these to potentially relevant biological genesets is common amongst GWAS and EWAS [@Sharp2017; @Reese2019; @Everson2019; @Chen2020; @Yengo2018; @Visscher2017; @Ellinghaus2016] and gave a simple way to compare the information from the two study types. ## Conclusion {#conclusion-06} Overall this chapter provides evidence that, for eight complex traits, there is little overlap between genes and genesets identified by EWAS and GWAS of the same trait. Given the differences in properties between DNA methylation and genetic variants the results presented in this study may apply to other traits, but this is still to be confirmed. Where lack of overlap between genes and genesets is found, it suggests EWAS may be providing new biological information, however, the interpretability of EWAS is still in question and with current methods it is hard to determine if EWAS results are attributable to confounding or reverse causation. Regardless, as causal inference within epigenetic epidemiology improves, it seems likely we will be able to interpret this apparent gain in biological information. ## Methods {#methods-06} ### Samples {#samples-06} EWAS summary data and GWAS summary data were extracted from the EWAS Catalog (__Chapter \@ref(ewas-catalog)__) and the IEU OpenGWAS Project [@Hemani2018; @Elsworth2020] respectively. The data was extracted in July 2019, when the EWAS Catalog contained data published before 2019. For traits that had multiple EWAS with a sample size of greater than 4500, the EWAS with the largest sample size was used, the same was applied to the GWAS. The sample size, first authors and PubMed IDs can be found in __Table \@ref(tab:trait-data-tab-06)__. ### Overlapping genomic regions {#overlapping-genomic-regions} Each chromosome was divided into 500Kb blocks, each block that did not contain a DNA methylation site measured by the HM450 array was removed. For each trait, the genome blocks that had one or more EWAS sites and one or more GWAS sites that reached the set p-value threshold were tallied. The p-value threshold was set at a lenient P<1x10^-5^ or if it was lower, the maximum reported p-value in the EWAS of that trait. ### Mapping sites to genes and genesets {#mapping-sites-to-genes-and-genesets} The R package biomaRt [@Durinck2009] was used to extract Ensembl gene ids along with chromosome positions of all genes. The package was also used to extract Gene Ontology (GO) terms [@Ashburner2000; @Carbon2019] and map these to the Ensembl gene ids. The R package limma [@Ritchie2015] was used to extract KEGG terms [@Kanehisa2000; @Kanehisa2019; @Kanehisa2019] and these were mapped to Ensembl gene ids. Protein-protein interaction data, which includes data from StringDB [@Szklarczyk2019] and IntAct [@Orchard2014], and terms from the Reactome database [@Jassal2020] were extracted from [EpiGraphDB](http://www.epigraphdb.org/) [@Liu2020]. CpG sites associated with traits at P<1x10^-7^ and SNPs associated with traits at P<5x10^-8^ were taken forward to be mapped to genes. The correlation structure present in genetic and DNA methylation data makes it difficult to ascertain the precise site driving any signal observed. Thus, no filtering based on correlation between variants or CpG sites was performed. For each CpG site identified by EWAS and used in the analyses, it was mapped to the nearest gene (Ensembl gene ID) by chromosome position. If a CpG site lay within the bounds of multiple genes then the site was mapped to all of those genes. Therefore, one CpG site could map to multiple genes and one gene could map to multiple CpG sites. The same gene mapping approach was used for variants identified in GWAS. The positions of CpG sites were extracted using the R package meffil [@Min2018]. ### Methods for assessing overlap {#methods-for-assessing-overlap} To test the overlap between genes identified ORs were generated as follows \begin{equation} OR_{gene-overlap} = \frac{odds_{EG}} {odds_{EnG}} (\#eq:overlap-of-genes) \end{equation} where $odds_{EG}$ is the odds of a gene being identified in EWAS and GWAS and $odds_{EnG}$ is the odds of a gene being identified in EWAS, but not in GWAS. Genes may map to genesets by chance. Often in EWAS and GWAS, enrichment for any genesets are tested by assessing whether the genes identified are more common in any geneset than expected by chance. Power was compared between mapping genes to genesets and directly assessing overlap like in Equation \@ref(eq:overlap-of-genes) and correlation between enrichment scores for each geneset. Enrichment scores are also odds ratios generated in a similar way to those in Equation \@ref(eq:overlap-of-genes): \begin{equation} OR_{geneset-enrichment} = \frac{odds_{GS}} {odds_{nGS}} (\#eq:geneset-enrichment) \end{equation} where $odds_{GS}$ is the odds of a gene being annotated to the geneset and $odds_{nGS}$ is the odds of a gene not being annotated to the geneset. For many genesets, genes annotated to that geneset would not be identified in an EWAS or GWAS, causing the enrichment score for many genesets to be zero. Further, some genesets would have very large enrichment scores. This made the relationship between enrichment scores generated for the EWAS results and the enrichment scores generated for the GWAS non-normal. Thus, to examine the relationship between the two, Spearman's rank correlation coefficients of the logarithm of enrichment scores was used. ### Testing power to detect overlap {#testing-power-to-detect-overlap} Simulations were setup as seen in __Figure \@ref(fig:method-simulations-schematic)__. The simulations iterated over each set of parameters 1000 times. For each iteration, two sets of genes, GWAS genes and EWAS genes, were sampled from the total set of genes. Each iteration assessed gene overlap and geneset overlap between these gene sets using Equation \@ref(eq:overlap-of-genes). Equation \@ref(eq:geneset-enrichment) was used to generate enrichment scores for each gene set and then correlation between the enrichment scores was assessed. GWAS genes were only sampled from a set of "causal" genes and a proportion of EWAS genes were sampled from the set of causal genes and the rest from the set of "associated" genes. Receiver operator characteristic (ROC) curves were generated to assess whether it was possible to predict the gene overlap, geneset overlap and enrichment score correlations for scenarios where the proportion of causal EWAS genes was greater than zero from the scenario where the proportion of causal EWAS genes was zero. The area under these ROC curves were then calculated in each case. These simulations were repeated for each geneset and protein-protein interaction database. The protein-protein interaction and Reactome databases all map only to protein coding genes, whereas the GO and KEGG databases map to all Ensembl gene IDs. To compare predictive ability across annotation methods, Ensembl gene IDs that were not protein coding genes were excluded. A comparison between the performance of models when mapping to all Ensembl gene IDs or to protein coding genes was made for GO and KEGG databases. (__Figure \@ref(fig:sim1-go-kegg-gene-comp5)__). From these simulations, the best method to assess geneset overlap, and the best geneset annotation method to assess that overlap and the scenarios (i.e. study power required, proportion of DMPs that need to be causal) in which it could be expected to be able to detect overlap could be deduced. ### Empirical analyses {#empirical-analyses} The DNA methylation sites identified in the EWAS at P<1x10^-7^ and the SNPs identified in the GWAS at P < 5x10^-8^ were mapped to genes and genesets. Overlap between genes was calculated as before (Equation \@ref(eq:overlap-of-genes)), enrichment scores were generated and correlated as described above. Expected overlap was generated to compare to the observed results. For this, random positions were chosen in the genome equal to the number of DMPs identified in the EWAS. These genes were then used to assess gene and geneset overlap as for the observed results. This was repeated 1000 times to generate a null distribution and a z-test was used to assess whether there was a difference between the observed results and the mean of the null distribution. There is a correlation structure within DNA methylation data [@Zhang2015], and it was hypothesised this might contribute to the observed results. By randomly sampling positions from the genome, a new correlation structure between DMPs would be generated. I tested whether sampling the genome in a non-random way, aimed at keeping some correlation structure, altered the results. To generate new data whilst attempting to keep a similar correlation structure, a fixed number of base pairs were added to each of the DMPs identified in the empirical analysis such that \begin{equation} BP_{new} = BP_{dmp} + max(G) \times I (\#eq:generating-new-positions) \end{equation} where $BP_{new}$ = base pair of new site, $BP_{dmp}$ = base pair of DMP identified in the EWAS, $G$ = gene size, and $I$ = iteration. If $BP_{new}$ extended beyond the end of a chromosome, the position moved onto the next chromosome, with positions moving past the end of chromosome 22, being moved to chromosome 1. Overall, the overlap between genes and genesets identified by EWAS and GWAS did not change across null distribution sampling methods. ### Understanding architecture from geneset overlap {#architecture-sims-method} Simulations were setup as illustrated in __Figure \@ref(fig:arch-simulations-schematic)__. Here I describe simulations for a single trait. These were repeated for all traits. Firstly, SNPs identified in GWAS and DMPs identified in EWAS were mapped to genes as described in __Section \@ref(gwas-ewas-overlap)__. Genes were then randomly sampled from Ensembl gene IDs and were assigned as either "causal", meaning changes in that gene effect variation in the phenotype across individuals, or "associated", meaning changes in the gene are associated with the phenotype across individuals, but the nature of association is not known. The empirically identified (or "known") GWAS genes ($KGG$) were added to the list of causal genes and the empirically identified (or "known") EWAS genes ($KEG$) were added to the list of associated genes. These combined set of causal and associated genes can be thought of as all the genes related to the trait of interest. A number of genes, equal to the number of $KGG$ ($N_{KGG}$), was sampled from the causal set of genes and assigned to be the "GWAS genes" in the simulations. A number of genes, equal to the number of $KEG$ ($N_{KEG}$), was sampled from the associated set of genes and assigned to be the "EWAS genes" in the simulations. Then geneset enrichment analyses for both the EWAS and GWAS genes were performed (equation \@ref(eq:geneset-enrichment)) and correlation between the enrichment scores was assessed as previously. In these simulations, the number of total genes was varied and the number of causal and associated genes was always set to be half of the total number of genes related to a trait. The total number of genes was proportional to the total number of known genes ($N_{KTG} = N_{KGG} + N_{KEG}$). The smallest number of total genes for any simulation was double the number of $N_{KTG}$ and the greatest number of total genes was 40 times $N_{KTG}$. The other variable set to vary between simulations was the proportion of overlap between causal and associated genes. This varied between zero and one, where zero represented the scenario where only the overlap in $KGG$ and $KEG$ would be present in the overlap between causal and associated genes and one represented the scenario where all causal and associated genes were the same. For each simulation scenario, the simulations were repeated 1000 times and box plots show the range of output from those 1000 repeats. ### Assessing the correlation between geneset enrichment results {#assessing-the-correlation-between-geneset-enrichment-results} GWAS were extracted from the IEU OpenGWAS Project [@Elsworth2020] with the following criteria: * Sample size > 5000 * European population * For binary traits, cases and controls had to be greater than 500 * Full genome-wide results, i.e. not just associations between a molecular trait and variants in cis. For each GWAS, all SNPs that associated with the trait at P<5x10^-8^ were extracted. CpGs associated with the eight EWAS at P<1x10^-7^ were then extracted and mapped to genes. For each study, enrichment scores were generated for GO terms as before (Equation \@ref(eq:geneset-enrichment)) and correlation between them assessed. When assessing whether gene overlap or geneset enrichment score correlations were greater than the mean, a z-test was performed. As multiple tests were performed the false discovery rate was limited by applying the Benjamini-Hochberg method [@Benjamini1995]. <!--chapter:end:06-ewas_gwas_comparison.Rmd--> # Appraising the causal relevance of DNA methylation for risk of lung cancer {#dnam-lung-cancer-mr} ## Chapter summary {#chapter-summary-07} The results of __Chapter \@ref(ewas-gwas-comp-chapter)__ suggested that epigenome-wide association studies (EWAS) might be identifying differential facets of complex trait biology when compared to genome-wide association studies (GWAS) of corresponding traits. As discussed in __Section \@ref(problems-for-ewas)__, observational studies such as EWAS suffer from issues of reverse causation and confounding, which could have explained the results in __Chapter \@ref(ewas-gwas-comp-chapter)__. In this chapter I attempt to explore the extent to which EWAS results may be confounded by examining EWAS of one trait, lung cancer, and comparing these to corresponding results from Mendelian randomization analyses. DNA methylation has been postulated to mediate over 30% of the effect on smoking on lung cancer [@Fasanelli2015], making this case of particular interest to potential lung cancer preventative strategies. I first performed a meta-analysis of four lung cancer EWAS (918 cases, 918 controls). Next, I conducted a two-sample MR analysis, using genetic instruments for methylation at CpG sites identified in the EWAS meta-analysis, and 29,863 cases and 55,586 controls from the TRICL-ILCCO lung cancer consortium, to generate estimates of the causal relationship between methylation at these sites and lung cancer. Sixteen CpG sites were identified from the EWAS meta-analysis (FDR < 0.05), 14 of which were associated with genetic variants at P<5x10^-8^, which could be used as instruments. MR provided little evidence that DNA methylation in peripheral blood at the 14 CpG sites play a causal role in lung cancer development (FDR > 0.05), including for cg05575921 (_AHRR_) where methylation is strongly associated with both smoke exposure and lung cancer risk. The results contrast with previous observational and mediation analysis, which have made strong claims regarding the causal role of DNA methylation on lung cancer risk. Yet, they add evidence to the conclusions of __Chapter \@ref(ewas-gwas-comp-chapter)__ - that findings in EWAS may not be of aetiological relevance. Previous suggestions of a mediating role of methylation at sites identified in peripheral blood, such as cg05575921 (_AHRR_), could be unfounded. However, these results do not preclude the possibility that differential DNA methylation at other sites is causally involved in lung cancer development, especially within lung tissue. ### Contributions statement {#contributions-statement-07} I performed the main analyses: the meta-analysis of the EWAS, the MR using mQTLs identified in ARIES, the replication of mQTLs in the NSHDS cohort and the supplementary analyses assessing the association between mQTLs and gene expression using GTEx data. I also wrote the majority of the text. Some supplementary analyses, which provided more clarity on the relationship between DNA methylation and lung cancer were performed by others. <NAME> performed analysis in the CCHS cohort (see __Sections \@ref(ahrr-one-sample-mr-methods) and \@ref(ahrr-one-sample-mr)__) and <NAME> wrote these sections. <NAME> analysed differences in lung tumour and adjacent healthy tissue (see __Sections \@ref(lc-heathly-v-normal-methods) and \@ref(lc-heathly-v-normal)__). ## Introduction {#introduction-07} Lung cancer is the most common cause of cancer-related death worldwide [@Ferlay2013]. Several DNA methylation changes have been recently identified in relation to lung cancer risk [@Fasanelli2015; @Baglietto2017; @McCarthy2016]. However, these epigenetic marks are sensitive to reverse causation, being affected by cancer processes [@Jones2002], and are also prone to confounding, for example by socio-economic and lifestyle factors [@Borghol2012; @Elliott2014]. One CpG site, cg05575921 within the aryl hydrocarbon receptor repressor (_AHRR_) gene, has been consistently replicated in relation to both smoking [@Joehanes2016] and lung cancer [@Fasanelli2015; @Baglietto2017; @Bojesen2017] and functional evidence suggests that this region could be causally involved in lung cancer [@Zudaire2008]. However, the observed association between methylation and lung cancer might simply reflect separate effects of smoking on lung cancer and DNA methylation, i.e. the association may be a result of confounding [@Richmond2016], including residual confounding after adjustment for self-reported smoking behaviour [@Fewell2007; @Munafo2012]. Furthermore, recent EWAS of lung cancer have revealed additional CpG sites which may be causally implicated in development of the disease [@Fasanelli2015; @Baglietto2017]. As discussed in __Section \@ref(establishing-causality)__, Mendelian randomization (MR) can be used to help infer causality in associations between DNA methylation and complex traits [@Relton2012; @Relton2015; @Richardson2017] under a paradigm that largely mitigates the problem of confounding if certain assumptions are met (__Figure \@ref(fig:mr-diagram)__). Briefly, MR uses genetic variants as proxies for the exposure of interest (here DNA methylation) under an instrumental variable framework. If the key assumptions of MR are met (__Figure \@ref(fig:mr-diagram)__), any associations observed between the proxied exposure and outcome will reflect a causal relationship. In this study, I performed a meta-analysis of four lung cancer EWAS (918 case-control pairs) from prospective cohort studies to identify CpG sites associated with lung cancer risk and applied MR to investigate whether the observed DNA methylation changes at these sites are causally linked to lung cancer. ## Methods {#methods-07} ### EWAS study details {#ewas-study-details} A meta-analysis of four lung cancer case-control EWAS was conducted to identify DNA methylation sites associated with lung cancer. In total there were 918 cases and 918 matched controls for the analysis. A description of the studies, how controls were matched to cases and an outline of the study populations, laboratory methods, data pre-processing and quality control methods can be found in __Section \@ref(lc-ewas-data)__ and they described in detail elsewhere [@Baglietto2017]. ### EWAS Meta-analysis {#methods-ewas-meta-analysis} To quantify the association between the methylation level at each CpG and the risk of lung cancer conditional logistic regression models were fitted for beta values of methylation (which ranges from 0 (no cytosines methylated) to 1 (all cytosines methylated)) on lung cancer status for the four studies. Surrogate variables were computed in the four studies using the SVA R package [@Leek2016] and the proportion of CD8+ and CD4+ T cells, B cells, monocytes, natural killer cells and granulocytes within whole blood were derived from DNA methylation [@Houseman2012]. The following EWAS models were included in the meta-analysis: Model 1 – unadjusted; Model 2 – adjusted for 10 surrogate variables (SVs); Model 3 – adjusted for 10 SVs and derived cell proportions. EWAS stratified by smoking status was also conducted (never (N=304), former (N=648) and current smoking (N=857)). For Model 1 and Model 2, the case-control studies not matched on smoking status (EPIC-Italy and NOWAC) were adjusted for smoking. An inverse-variance weighted fixed effects meta-analysis was performed of the EWAS (918 case-control pairs) using the [METAL software](http://csg.sph.umich.edu/abecasis/metal/). Direction of effect, effect estimates and the I^2^ statistic were used to assess heterogeneity across the studies in addition to effect estimates across smoking strata (never, former and current). All sites identified at a false discovery rate (FDR) < 0.05 in Model 2 and 3 were also present in the sites identified in Model 1. The effect size differences between models for all sites identified in Model 1 were assessed by a Kruskal-Wallis test and a post-hoc Dunn’s test. There was little evidence for a difference (P > 0.1), so to maximize inclusion into the MR analyses sites identified in the unadjusted model (Model 1) were taken forward. ### Mendelian randomization {#methods-mendelian-randomization-07} The relationship between sites identified in the meta-analysis of EWAS and lung cancer risk were re-assessed under a two-sample MR framework to establish whether differential methylation at these sites were likely to be causal [@Inoue2010; @Pierce2013]. #### Sample 1: Accessible Resource for Integrated Epigenomic Studies (ARIES) In the first sample, mQTL-methylation effect estimates ($\beta_{GP}$) for each CpG site of interest were identified in an mQTL database from the Accessible Resource for Integrated Epigenomic Studies (ARIES) (http://www.mqtldb.org). Details on the methylation pre-processing, genotyping and quality control (QC) pipelines are described in __Section \@ref(aries-02)__. ARIES data was used in lieu of the larger Genetics of DNA Methylation Consortium (GoDMC) as GoDMC data was not available at the time of this study. However, as mentioned in __Section \@ref(results-mendelian-randomization-07)__, using the smaller sample from ARIES did not limit our power. Further discussion of this can be found in __Section \@ref(discussion-07)__ If there was evidence for an mQTL-CpG site association in ARIES in at least one time-point, it was assessed whether the mQTL replicated across time points in ARIES (FDR < 0.05, same direction of effect). Further, this association was re-analysed using linear regression of methylation on each genotyped SNP available in an independent cohort (NSHDS), using RVTESTS [@Zhan2016]. The same NSHDS samples on which DNA methylation was measured were genotyped using the Illumina Infinium OncoArray-500k BeadChip (Illumina Inc. San Diego, CA) and quality control parameters were applied under the recently published TRICL-ILCCO GWAS study on lung cancer [@McKay2017]. Genetic imputation was performed on these samples using the Haplotype Reference Consortium (HRC) Panel (release 1) [@McCarthy2016] through the Michigan Imputation Server [@Das2016]. Replicated mQTLs were included where possible to reduce the effect of winner’s curse using effect estimates from ARIES. The instrument strength of the mQTLs were assessed by the variance explained in methylation by each mQTL (r^2^) as well as the F-statistic in ARIES __Table \@ref(tab:sup-tab1-07)__. \linebreak \begin{table}[!h] \caption{(\#tab:sup-tab1-07)Instrument strength in ARIES} \centering \resizebox{\linewidth}{!}{ \begin{tabular}[t]{llllllll} \toprule SNP & CpG & Beta & SE & P & N & F & r\textsuperscript{2}\\ \midrule \cellcolor{gray!6}{rs1048691} & \cellcolor{gray!6}{cg23387569} & \cellcolor{gray!6}{0.35} & \cellcolor{gray!6}{0.053} & \cellcolor{gray!6}{3.9e-11} & \cellcolor{gray!6}{834} & \cellcolor{gray!6}{45} & \cellcolor{gray!6}{0.05}\\ rs1939110 & cg11660018 & -0.40 & 0.048 & 2.6e-16 & 834 & 70 & 0.08\\ \cellcolor{gray!6}{rs13087163} & \cellcolor{gray!6}{cg01901332} & \cellcolor{gray!6}{-0.19} & \cellcolor{gray!6}{0.035} & \cellcolor{gray!6}{5.8e-08} & \cellcolor{gray!6}{834} & \cellcolor{gray!6}{30} & \cellcolor{gray!6}{0.03}\\ rs7927381 & cg01901332 & 0.38 & 0.069 & 3.9e-08 & 834 & 31 & 0.04\\ \cellcolor{gray!6}{rs878481} & \cellcolor{gray!6}{cg05951221} & \cellcolor{gray!6}{-0.32} & \cellcolor{gray!6}{0.042} & \cellcolor{gray!6}{5.9e-14} & \cellcolor{gray!6}{834} & \cellcolor{gray!6}{58} & \cellcolor{gray!6}{0.07}\\ \addlinespace rs1048691 & cg16823042 & 0.32 & 0.053 & 2.8e-09 & 834 & 36 & 0.04\\ \cellcolor{gray!6}{rs734568} & \cellcolor{gray!6}{cg03636183} & \cellcolor{gray!6}{0.28} & \cellcolor{gray!6}{0.043} & \cellcolor{gray!6}{6.7e-11} & \cellcolor{gray!6}{834} & \cellcolor{gray!6}{44} & \cellcolor{gray!6}{0.05}\\ rs72967500 & cg23771366 & -0.63 & 0.062 & 1.3e-22 & 834 & 102 & 0.11\\ \cellcolor{gray!6}{rs3748971} & \cellcolor{gray!6}{cg21566642} & \cellcolor{gray!6}{-0.50} & \cellcolor{gray!6}{0.080} & \cellcolor{gray!6}{5.3e-10} & \cellcolor{gray!6}{834} & \cellcolor{gray!6}{39} & \cellcolor{gray!6}{0.05}\\ rs9643220 & cg25305703 & 0.34 & 0.049 & 7.1e-12 & 834 & 48 & 0.05\\ \addlinespace \cellcolor{gray!6}{rs77433148} & \cellcolor{gray!6}{cg08709672} & \cellcolor{gray!6}{-0.80} & \cellcolor{gray!6}{0.137} & \cellcolor{gray!6}{6.3e-09} & \cellcolor{gray!6}{834} & \cellcolor{gray!6}{34} & \cellcolor{gray!6}{0.04}\\ rs17518433 & cg09935388 & -0.33 & 0.046 & 1.8e-12 & 834 & 51 & 0.06\\ \cellcolor{gray!6}{rs463924} & \cellcolor{gray!6}{cg26963277} & \cellcolor{gray!6}{-0.39} & \cellcolor{gray!6}{0.045} & \cellcolor{gray!6}{6.8e-18} & \cellcolor{gray!6}{834} & \cellcolor{gray!6}{78} & \cellcolor{gray!6}{0.09}\\ rs56080708 & cg27241845 & 0.72 & 0.070 & 2.4e-23 & 834 & 105 & 0.11\\ \cellcolor{gray!6}{rs11744553} & \cellcolor{gray!6}{cg05575921} & \cellcolor{gray!6}{0.22} & \cellcolor{gray!6}{0.040} & \cellcolor{gray!6}{7.2e-08} & \cellcolor{gray!6}{834} & \cellcolor{gray!6}{30} & \cellcolor{gray!6}{0.03}\\ \addlinespace rs11746538 & cg05575921 & -0.37 & 0.058 & 3.0e-10 & 834 & 41 & 0.05\\ \bottomrule \multicolumn{8}{l}{\textsuperscript{} SE = standard error, P = P value, N = sample size}\\ \multicolumn{8}{l}{\textsuperscript{} F = F statistic, r\textsuperscript{2} = Variance explained}\\ \end{tabular}} \end{table} #### Sample 2: Transdisciplinary Research in Cancer of the Lung and The International Lung Cancer Consortium In the second, independent, sample, summary data was extracted from a GWAS meta-analysis of lung cancer risk conducted by the TRICL-ILCCO consortium (29,863 cases, 55,586 controls). A brief description of the data can be found in __Section \@ref(tricl-ilcco-02)__. This summary data was used to obtain mQTL-lung cancer estimates ($\beta_{GD}$). For each independent mQTL (r^2^ < 0.01), I calculated the log OR per SD unit increase in methylation by the formula $\frac{\beta_{GD}} {\beta_{GP}}$ (Wald ratio). Standard errors were approximated by the delta method [@Thomas2007]. Where multiple independent mQTLs were available for one CpG site, these were combined in a fixed effects meta-analysis after weighting each ratio estimate by the inverse variance of their associations with the outcome (the IVW method). Heterogeneity in Wald ratios across mQTLs was estimated using Cochran’s Q test, which can be used to indicate horizontal pleiotropy [@Bowden2015]. Differences between the observational and MR estimates were assessed using a Z-test for difference. The power to detect the observational effect estimates in the two-sample MR analysis was assessed _a priori_, based on an alpha of 0.05, sample size of 29,863 cases and 55,586 controls (from TRICL-ILCCO) and calculated variance explained (r^2^). #### Lung cancer subtype analysis {#lc-subtypes-methods} MR analyses were also performed to investigate the impact of methylation on lung cancer subtypes in TRICL-ILCCO: adenocarcinoma (11,245 cases, 54,619 controls), small cell carcinoma (2791 cases, 20,580 controls), and squamous cell carcinoma (7704 cases, 54,763 controls). The association in never smokers (2303 cases, 6995 controls) and ever smokers (23,848 cases, 16,605 controls) [@McKay2017] was also assessed. Differences between the smoking subgroups were assessed using a Z-test for difference. #### Association of mQTLs and smoking {#mqtl-smoking-mr-methods} Smoking is known to be associated with DNA methylation changes at many CpG sites [@Joehanes2016; @Shenker2013; @Zeilinger2013] and causes lung cancer. Therefore, smoking was thought to be the most likely confounder of any associations between DNA methylation and lung cancer. As discussed in __Section \@ref(mr-01)__, the genetic variants associated with an exposure of interest tend not to be associated with confounders. To ensure that the mQTLs used in the MR analyses were not exerting their effect on the exposure via smoking, the association between these mQTLs and four smoking behaviours were tested. This analysis was performed using GWAS of four smoking behaviours: number of cigarettes per day, smoking cessation rate, smoking initiation and age of smoking initiation from the Tobacco and Genetics (TAG) consortium (N=74,053) [@Furberg2010]. ### Supplementary analyses {#methods-supplementary-analyses-07} #### Assessing the potential causal effect of _AHRR_ methylation: one sample MR {#ahrr-one-sample-mr-methods} Given previous findings implicating methylation at _AHRR_ in relation to lung cancer [@Fasanelli2015; @Baglietto2017], a one-sample MR analysis [@Haycock2016] was performed of _AHRR_ methylation on lung cancer incidence using individual-level data from the Copenhagen City Heart Study (CCHS) (357 incident cases, 8401 remaining free of lung cancer). A description of this study and details on the phenotyping, measurement of DNA methylation and genotyping can be found in __Section \@ref(cchs-02)__. #### Identification of mQTLs for CCHS one-sample MR mQTLs located within 1Mb of cg05575921 _AHRR_ were identified in ARIES (FDR<0.05). Of those mQTLs which replicated within the CCHS, an LD pruning step was performed using a less stringent r^2^ threshold of 0.2 and generated an unweighted allele score, calculated by coding and then summing the alleles to reflect the average number of methylation-increasing alleles carried by an individual. Associations between the allele score and several potential confounding factors (sex, alcohol consumption, smoking status, occupational exposure to dust and/or welding fumes, passive smoking) were investigated. Then MR analyses were performed using two-stage Cox regression, with adjustment for age and sex, and further stratified by smoking status. #### Tumour and adjacent normal methylation patterns {#lc-heathly-v-normal-methods} DNA methylation data from lung cancer tissue and matched normal adjacent tissue (N=40 squamous cell carcinoma and N=29 adenocarcinoma), profiled as part of The Cancer Genome Atlas (TCGA), were used to assess tissue-specific DNA methylation changes across sites identified in the meta-analysis of EWAS, as outlined previously [@Teschendorff2015]. #### mQTL association with gene expression Gene expression at genes annotated to CpG sites identified in the lung cancer EWAS was examined in whole blood and lung tissue using data from the Gene-Tissue Expression (GTEx) consortium [@GTExConsortium2013]. Analyses were conducted in Stata (version 14) and R (version 3.2.2). For the two-sample MR analysis the MR-Base R package TwoSampleMR [@Hemani2016] was used. An adjusted P value that limited the FDR was calculated using the Benjamini-Hochberg method [@Benjamini1995]. All statistical tests were two-sided. ## Results {#results-07} A flowchart representing the study design along with a summary of the results at each step is displayed in __Figure \@ref(fig:fig1-07)__. ### EWAS meta-analysis {#results-ewas-meta-analysis} The basic meta-analysis adjusted for study-specific covariates identified 16 CpG sites which were hypomethylated in relation to lung cancer (FDR<0.05, Model 1, __Figure \@ref(fig:fig2-07)__). Adjusting for 10 surrogate variables (Model 2) and derived cell counts (Model 3) gave similar results (__Table \@ref(tab:tab1-07)__). The direction of effect at the 16 sites did not vary between studies (median I^2^=38.6) (__Table \@ref(tab:sup-tab2-07)__), but there was evidence for heterogeneity of effect estimates at some sites when stratifying individuals by smoking status (__Table \@ref(tab:tab1-07)__). (ref:fig1-07-cap) __Study design with results summary__. ARIES = Accessible Resource for Integrated Epigenomic Studies, TRICL-ILLCO = Transdisciplinary Research in Cancer of the Lung and The International Lung Cancer Consortium, MR = Mendelian randomization, CCHS = Copenhagen City Heart Study, TCGA = The Cancer Genome Atlas. * = 2000 individuals with samples at multiple timepoints. (ref:fig1-07-scap) Study design with results summary \blandscape \begin{figure}[htbp] {\centering \includegraphics[width=1\linewidth]{figure/07-dnam_lungcancer_mr/Figure_1} } \caption[(ref:fig1-07-scap)]{(ref:fig1-07-cap)}(\#fig:fig1-07) \end{figure} \elandscape (ref:fig2-07-cap) __Observational associations of DNA methylation and lung cancer: A fixed effects meta-analysis of lung cancer EWAS weighted on the inverse variance was performed to establish the observational association between differential DNA methylation and lung cancer__. Left-hand side: Manhattan plot, all points above the solid line are at P < 1x10^-7^ and all points above the dashed line (and triangular points) are at FDR < 0.05. In total 16 CpG sites are associated with lung cancer (FDR < 0.05). Right-hand side: Quantile-quantile plot of the EWAS results (same data as the Manhattan plot). (ref:fig2-07-scap) Observational associations of DNA methylation and lung cancer: A fixed effects meta-analysis of lung cancer EWAS weighted on the inverse variance was performed to establish the observational association between differential DNA methylation and lung cancer \blandscape \begin{figure}[htbp] \includegraphics[width=0.5\linewidth]{figure/07-dnam_lungcancer_mr/Figure_2a} \includegraphics[width=0.5\linewidth]{figure/07-dnam_lungcancer_mr/Figure_2b} \caption[(ref:fig2-07-scap)]{(ref:fig2-07-cap)}(\#fig:fig2-07) \end{figure} \elandscape \begin{landscape}\begin{table}[!h] \caption{(\#tab:tab1-07)Meta-analyses of EWAS of lung cancer using four separate cohorts: 16 CpG sites associated with lung cancer at false discovery rate < 0.05.} \centering \resizebox{\linewidth}{!}{ \begin{tabular}[t]{l>{}llllllllllllllllllll} \toprule \multicolumn{3}{c}{ } & \multicolumn{3}{c}{basic} & \multicolumn{3}{c}{sv-adjusted} & \multicolumn{3}{c}{sv-and-cell-count} & \multicolumn{3}{c}{never-smokers} & \multicolumn{3}{c}{former-smokers} & \multicolumn{3}{c}{current-smokers} \\ \cmidrule(l{3pt}r{3pt}){4-6} \cmidrule(l{3pt}r{3pt}){7-9} \cmidrule(l{3pt}r{3pt}){10-12} \cmidrule(l{3pt}r{3pt}){13-15} \cmidrule(l{3pt}r{3pt}){16-18} \cmidrule(l{3pt}r{3pt}){19-21} CpG & Gene & chr:pos & OR & SE & P & OR & SE & P & OR & SE & P & OR & SE & P & OR & SE & P & OR & SE & P\\ \midrule \cellcolor{gray!6}{cg05575921} & \em{\cellcolor{gray!6}{AHRR}} & \cellcolor{gray!6}{5:373378} & \cellcolor{gray!6}{0.47} & \cellcolor{gray!6}{0.047} & \cellcolor{gray!6}{1.4e-16} & \cellcolor{gray!6}{0.45} & \cellcolor{gray!6}{0.053} & \cellcolor{gray!6}{6.3e-14} & \cellcolor{gray!6}{0.45} & \cellcolor{gray!6}{0.055} & \cellcolor{gray!6}{3.6e-13} & \cellcolor{gray!6}{0.93} & \cellcolor{gray!6}{0.22} & \cellcolor{gray!6}{0.717} & \cellcolor{gray!6}{0.46} & \cellcolor{gray!6}{0.084} & \cellcolor{gray!6}{6.1e-07} & \cellcolor{gray!6}{0.71} & \cellcolor{gray!6}{0.066} & \cellcolor{gray!6}{5.4e-05}\\ cg21566642 & \em{ALPPL2} & 2:233284661 & 0.54 & 0.045 & 1.7e-15 & 0.53 & 0.050 & 2.5e-13 & 0.51 & 0.051 & 3.1e-13 & 0.89 & 0.14 & 0.418 & 0.52 & 0.081 & 1.4e-06 & 0.75 & 0.067 & 3.7e-04\\ \cellcolor{gray!6}{cg06126421} & \em{\cellcolor{gray!6}{IER3}} & \cellcolor{gray!6}{6:30720080} & \cellcolor{gray!6}{0.58} & \cellcolor{gray!6}{0.046} & \cellcolor{gray!6}{2.1e-13} & \cellcolor{gray!6}{0.54} & \cellcolor{gray!6}{0.054} & \cellcolor{gray!6}{2.5e-11} & \cellcolor{gray!6}{0.51} & \cellcolor{gray!6}{0.054} & \cellcolor{gray!6}{3.9e-12} & \cellcolor{gray!6}{0.78} & \cellcolor{gray!6}{0.19} & \cellcolor{gray!6}{0.222} & \cellcolor{gray!6}{0.56} & \cellcolor{gray!6}{0.087} & \cellcolor{gray!6}{1.9e-05} & \cellcolor{gray!6}{0.73} & \cellcolor{gray!6}{0.112} & \cellcolor{gray!6}{1.8e-02}\\ cg03636183 & \em{F2RL3} & 19:17000585 & 0.64 & 0.045 & 8.0e-12 & 0.61 & 0.053 & 8.2e-10 & 0.61 & 0.054 & 1.6e-09 & 0.91 & 0.17 & 0.553 & 0.62 & 0.084 & 7.5e-05 & 0.79 & 0.069 & 2.9e-03\\ \cellcolor{gray!6}{cg05951221} & \em{\cellcolor{gray!6}{ALPPL2}} & \cellcolor{gray!6}{2:233284402} & \cellcolor{gray!6}{0.66} & \cellcolor{gray!6}{0.045} & \cellcolor{gray!6}{9.7e-11} & \cellcolor{gray!6}{0.64} & \cellcolor{gray!6}{0.051} & \cellcolor{gray!6}{1.8e-09} & \cellcolor{gray!6}{0.63} & \cellcolor{gray!6}{0.052} & \cellcolor{gray!6}{1.5e-09} & \cellcolor{gray!6}{0.87} & \cellcolor{gray!6}{0.18} & \cellcolor{gray!6}{0.409} & \cellcolor{gray!6}{0.63} & \cellcolor{gray!6}{0.082} & \cellcolor{gray!6}{7.2e-05} & \cellcolor{gray!6}{0.82} & \cellcolor{gray!6}{0.066} & \cellcolor{gray!6}{7.4e-03}\\ \addlinespace cg01940273 & \em{ALPPL2} & 2:233284934 & 0.69 & 0.050 & 4.2e-08 & 0.68 & 0.058 & 7.3e-07 & 0.69 & 0.061 & 3.6e-06 & 1.14 & 0.23 & 0.428 & 0.57 & 0.086 & 2.6e-05 & 0.88 & 0.068 & 6.6e-02\\ \cellcolor{gray!6}{cg23771366} & \em{\cellcolor{gray!6}{PRSS23}} & \cellcolor{gray!6}{11:86510998} & \cellcolor{gray!6}{0.77} & \cellcolor{gray!6}{0.040} & \cellcolor{gray!6}{1.1e-07} & \cellcolor{gray!6}{0.73} & \cellcolor{gray!6}{0.051} & \cellcolor{gray!6}{1.5e-06} & \cellcolor{gray!6}{0.71} & \cellcolor{gray!6}{0.052} & \cellcolor{gray!6}{5.6e-07} & \cellcolor{gray!6}{1.09} & \cellcolor{gray!6}{0.16} & \cellcolor{gray!6}{0.490} & \cellcolor{gray!6}{0.62} & \cellcolor{gray!6}{0.076} & \cellcolor{gray!6}{1.4e-05} & \cellcolor{gray!6}{0.86} & \cellcolor{gray!6}{0.061} & \cellcolor{gray!6}{2.0e-02}\\ cg11660018 & \em{PRSS23} & 11:86510915 & 0.79 & 0.037 & 1.2e-07 & 0.70 & 0.051 & 2.0e-07 & 0.68 & 0.053 & 8.9e-08 & 0.94 & 0.13 & 0.586 & 0.75 & 0.071 & 1.0e-03 & 0.84 & 0.053 & 4.2e-03\\ \cellcolor{gray!6}{cg26963277} & \em{\cellcolor{gray!6}{KCNQ1}} & \cellcolor{gray!6}{11:2722407} & \cellcolor{gray!6}{0.67} & \cellcolor{gray!6}{0.055} & \cellcolor{gray!6}{1.2e-07} & \cellcolor{gray!6}{0.64} & \cellcolor{gray!6}{0.068} & \cellcolor{gray!6}{3.8e-06} & \cellcolor{gray!6}{0.62} & \cellcolor{gray!6}{0.069} & \cellcolor{gray!6}{2.5e-06} & \cellcolor{gray!6}{0.54} & \cellcolor{gray!6}{0.17} & \cellcolor{gray!6}{0.014} & \cellcolor{gray!6}{0.72} & \cellcolor{gray!6}{0.110} & \cellcolor{gray!6}{1.5e-02} & \cellcolor{gray!6}{0.71} & \cellcolor{gray!6}{0.087} & \cellcolor{gray!6}{1.6e-03}\\ cg27241845 & \em{ALPPL2} & 2:233250370 & 0.67 & 0.055 & 1.4e-07 & 0.68 & 0.067 & 1.7e-05 & 0.67 & 0.069 & 2.5e-05 & 0.75 & 0.21 & 0.193 & 0.68 & 0.108 & 5.0e-03 & 0.73 & 0.087 & 3.1e-03\\ \addlinespace \cellcolor{gray!6}{cg23387569} & \em{\cellcolor{gray!6}{AGAP2}} & \cellcolor{gray!6}{12:58120011} & \cellcolor{gray!6}{0.71} & \cellcolor{gray!6}{0.049} & \cellcolor{gray!6}{1.5e-07} & \cellcolor{gray!6}{0.70} & \cellcolor{gray!6}{0.058} & \cellcolor{gray!6}{3.7e-06} & \cellcolor{gray!6}{0.68} & \cellcolor{gray!6}{0.059} & \cellcolor{gray!6}{1.9e-06} & \cellcolor{gray!6}{0.79} & \cellcolor{gray!6}{0.16} & \cellcolor{gray!6}{0.169} & \cellcolor{gray!6}{0.71} & \cellcolor{gray!6}{0.107} & \cellcolor{gray!6}{1.0e-02} & \cellcolor{gray!6}{0.75} & \cellcolor{gray!6}{0.079} & \cellcolor{gray!6}{2.5e-03}\\ cg09935388 & \em{GFI1} & 1:92947588 & 0.68 & 0.055 & 2.5e-07 & 0.67 & 0.066 & 9.7e-06 & 0.67 & 0.070 & 3.0e-05 & 0.96 & 0.24 & 0.844 & 0.74 & 0.127 & 4.2e-02 & 0.68 & 0.075 & 1.1e-04\\ \cellcolor{gray!6}{cg01901332} & \em{\cellcolor{gray!6}{ARRB1}} & \cellcolor{gray!6}{11:75031054} & \cellcolor{gray!6}{0.72} & \cellcolor{gray!6}{0.048} & \cellcolor{gray!6}{2.8e-07} & \cellcolor{gray!6}{0.69} & \cellcolor{gray!6}{0.064} & \cellcolor{gray!6}{1.1e-05} & \cellcolor{gray!6}{0.66} & \cellcolor{gray!6}{0.064} & \cellcolor{gray!6}{2.2e-06} & \cellcolor{gray!6}{1.02} & \cellcolor{gray!6}{0.21} & \cellcolor{gray!6}{0.922} & \cellcolor{gray!6}{0.60} & \cellcolor{gray!6}{0.093} & \cellcolor{gray!6}{1.5e-04} & \cellcolor{gray!6}{0.78} & \cellcolor{gray!6}{0.072} & \cellcolor{gray!6}{3.9e-03}\\ cg25305703 & \em{CASC21} & 8:128378218 & 0.72 & 0.049 & 4.5e-07 & 0.72 & 0.067 & 1.1e-04 & 0.71 & 0.069 & 1.5e-04 & 0.80 & 0.17 & 0.210 & 0.76 & 0.106 & 2.6e-02 & 0.77 & 0.075 & 3.2e-03\\ \cellcolor{gray!6}{cg16823042} & \em{\cellcolor{gray!6}{AGAP2}} & \cellcolor{gray!6}{12:58119992} & \cellcolor{gray!6}{0.74} & \cellcolor{gray!6}{0.049} & \cellcolor{gray!6}{1.1e-06} & \cellcolor{gray!6}{0.73} & \cellcolor{gray!6}{0.058} & \cellcolor{gray!6}{1.5e-05} & \cellcolor{gray!6}{0.70} & \cellcolor{gray!6}{0.059} & \cellcolor{gray!6}{5.9e-06} & \cellcolor{gray!6}{0.83} & \cellcolor{gray!6}{0.18} & \cellcolor{gray!6}{0.309} & \cellcolor{gray!6}{0.72} & \cellcolor{gray!6}{0.100} & \cellcolor{gray!6}{7.4e-03} & \cellcolor{gray!6}{0.80} & \cellcolor{gray!6}{0.080} & \cellcolor{gray!6}{1.3e-02}\\ \addlinespace cg08709672 & \em{AVPR1B} & 1:206224334 & 0.75 & 0.048 & 1.4e-06 & 0.76 & 0.058 & 1.1e-04 & 0.74 & 0.060 & 5.3e-05 & 0.73 & 0.17 & 0.102 & 0.74 & 0.085 & 3.5e-03 & 0.82 & 0.079 & 2.1e-02\\ \bottomrule \multicolumn{21}{l}{\textsuperscript{} Meta-analyses of lung cancer EWAS adjusted for study specific covariates (basic, N = 1809),}\\ \multicolumn{21}{l}{\textsuperscript{} basic model + surrogate variables (sv-adjusted, N = 1809), basic model + surrogate variables + derived cell counts (sv-and-cell-count, N = 1809).}\\ \multicolumn{21}{l}{\textsuperscript{} Meta-analyses were also conducted stratified by smoking status (never-smokers (N = 304), former-smokers (N = 648), current-smokers (N = 857)) using the basic model}\\ \multicolumn{21}{l}{\textsuperscript{} OR = odds ratio per SD increase in DNA methylation, SE = standard error, chr:pos = chromosome:position}\\ \end{tabular}} \end{table} \end{landscape} \begin{landscape}\begin{table}[!h] \caption{(\#tab:sup-tab2-07)Heterogeneity between studies and smoker groups in the meta-analysis of EWAS in four cohorts} \centering \resizebox{\linewidth}{!}{ \begin{tabular}[t]{llllllllllllllllllllll} \toprule \multicolumn{1}{c}{ } & \multicolumn{3}{c}{basic} & \multicolumn{3}{c}{sv-adjusted} & \multicolumn{3}{c}{sv-and-cell-count} & \multicolumn{3}{c}{never-smokers} & \multicolumn{3}{c}{former-smokers} & \multicolumn{3}{c}{current-smokers} & \multicolumn{3}{c}{comp} \\ \cmidrule(l{3pt}r{3pt}){2-4} \cmidrule(l{3pt}r{3pt}){5-7} \cmidrule(l{3pt}r{3pt}){8-10} \cmidrule(l{3pt}r{3pt}){11-13} \cmidrule(l{3pt}r{3pt}){14-16} \cmidrule(l{3pt}r{3pt}){17-19} \cmidrule(l{3pt}r{3pt}){20-22} CpG & Dir & I\textsuperscript{2} & P & Dir & I\textsuperscript{2} & P & Dir & I\textsuperscript{2} & P & Dir & I\textsuperscript{2} & P & Dir & I\textsuperscript{2} & P & Dir & I\textsuperscript{2} & P & Dir & I\textsuperscript{2} & P\\ \midrule \cellcolor{gray!6}{cg01901332} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{0.631} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{12} & \cellcolor{gray!6}{0.335} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{29} & \cellcolor{gray!6}{0.237} & \cellcolor{gray!6}{{+}{-}{-}{-}} & \cellcolor{gray!6}{11} & \cellcolor{gray!6}{0.337} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{0.970} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{18} & \cellcolor{gray!6}{0.301} & \cellcolor{gray!6}{{+}{-}{-}} & \cellcolor{gray!6}{63} & \cellcolor{gray!6}{0.066}\\ cg01940273 & {-}{-}{-}{-} & 35 & 0.201 & {-}{-}{-}{-} & 61 & 0.053 & {-}{-}{-}{-} & 60 & 0.056 & {+}{-}{-}{+} & 59 & 0.064 & {-}{-}{-}{-} & 7 & 0.356 & {-}{-}{-}{+} & 34 & 0.206 & {+}{-}{-} & 81 & 0.005\\ \cellcolor{gray!6}{cg03636183} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{42} & \cellcolor{gray!6}{0.159} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{76} & \cellcolor{gray!6}{0.006} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{71} & \cellcolor{gray!6}{0.015} & \cellcolor{gray!6}{{+}{-}{+}{-}} & \cellcolor{gray!6}{26} & \cellcolor{gray!6}{0.254} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{30} & \cellcolor{gray!6}{0.231} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{0.540} & \cellcolor{gray!6}{{-}{-}{-}} & \cellcolor{gray!6}{33} & \cellcolor{gray!6}{0.225}\\ cg05575921 & {-}{-}{-}{-} & 47 & 0.131 & {-}{-}{-}{-} & 73 & 0.012 & {-}{-}{-}{-} & 70 & 0.018 & {+}{-}{-}{+} & 0 & 0.481 & {-}{-}{-}{-} & 0 & 0.433 & {-}{-}{-}{-} & 34 & 0.207 & {-}{-}{-} & 71 & 0.033\\ \cellcolor{gray!6}{cg05951221} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{45} & \cellcolor{gray!6}{0.139} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{61} & \cellcolor{gray!6}{0.053} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{54} & \cellcolor{gray!6}{0.092} & \cellcolor{gray!6}{{-}{-}{+}{+}} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{0.862} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{33} & \cellcolor{gray!6}{0.217} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{15} & \cellcolor{gray!6}{0.315} & \cellcolor{gray!6}{{-}{-}{-}} & \cellcolor{gray!6}{44} & \cellcolor{gray!6}{0.168}\\ \addlinespace cg06126421 & {?}{-}{-}{-} & 69 & 0.041 & {-}{-}{-}{-} & 67 & 0.027 & {-}{-}{-}{-} & 68 & 0.024 & {?}{-}{-}{-} & 0 & 0.464 & {?}{-}{-}{-} & 11 & 0.326 & {?}{-}{-}{-} & 0 & 0.400 & {-}{-}{-} & 22 & 0.278\\ \cellcolor{gray!6}{cg08709672} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{12} & \cellcolor{gray!6}{0.333} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{52} & \cellcolor{gray!6}{0.101} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{57} & \cellcolor{gray!6}{0.071} & \cellcolor{gray!6}{{-}{-}{+}{+}} & \cellcolor{gray!6}{55} & \cellcolor{gray!6}{0.085} & \cellcolor{gray!6}{{-}{-}{-}{+}} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{0.584} & \cellcolor{gray!6}{{-}{-}{-}{+}} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{0.735} & \cellcolor{gray!6}{{-}{-}{-}} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{0.657}\\ cg09935388 & {-}{-}{-}{-} & 20 & 0.291 & {-}{-}{-}{-} & 50 & 0.110 & {-}{-}{-}{-} & 29 & 0.241 & {-}{-}{-}{+} & 0 & 0.967 & {-}{+}{-}{-} & 60 & 0.056 & {-}{-}{-}{-} & 0 & 0.729 & {-}{-}{-} & 0 & 0.529\\ \cellcolor{gray!6}{cg11660018} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{0.476} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{7} & \cellcolor{gray!6}{0.358} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{0.455} & \cellcolor{gray!6}{{+}{-}{+}{+}} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{0.699} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{9} & \cellcolor{gray!6}{0.349} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{0.557} & \cellcolor{gray!6}{{-}{-}{-}} & \cellcolor{gray!6}{16} & \cellcolor{gray!6}{0.305}\\ cg16823042 & {-}{-}{-}{-} & 13 & 0.330 & {-}{-}{+}{-} & 0 & 0.482 & {-}{-}{+}{-} & 0 & 0.554 & {+}{-}{+}{-} & 19 & 0.293 & {-}{-}{-}{+} & 68 & 0.024 & {-}{-}{-}{+} & 0 & 0.919 & {-}{-}{-} & 0 & 0.648\\ \addlinespace \cellcolor{gray!6}{cg21566642} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{46} & \cellcolor{gray!6}{0.136} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{38} & \cellcolor{gray!6}{0.184} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{18} & \cellcolor{gray!6}{0.300} & \cellcolor{gray!6}{{+}{-}{+}{+}} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{0.681} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{0.798} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{65} & \cellcolor{gray!6}{0.035} & \cellcolor{gray!6}{{-}{-}{-}} & \cellcolor{gray!6}{69} & \cellcolor{gray!6}{0.040}\\ cg23387569 & {-}{-}{-}{-} & 29 & 0.239 & {-}{-}{+}{-} & 35 & 0.204 & {-}{-}{+}{-} & 22 & 0.279 & {-}{-}{+}{-} & 0 & 0.633 & {-}{-}{-}{+} & 76 & 0.005 & {-}{-}{-}{+} & 0 & 0.624 & {-}{-}{-} & 0 & 0.890\\ \cellcolor{gray!6}{cg23771366} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{42} & \cellcolor{gray!6}{0.161} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{75} & \cellcolor{gray!6}{0.007} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{72} & \cellcolor{gray!6}{0.013} & \cellcolor{gray!6}{{+}{+}{+}{+}} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{0.805} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{0.518} & \cellcolor{gray!6}{{-}{-}{-}{+}} & \cellcolor{gray!6}{27} & \cellcolor{gray!6}{0.249} & \cellcolor{gray!6}{{+}{-}{-}} & \cellcolor{gray!6}{81} & \cellcolor{gray!6}{0.006}\\ cg25305703 & {-}{-}{-}{-} & 53 & 0.096 & {-}{-}{-}{-} & 0 & 0.461 & {-}{-}{-}{-} & 4 & 0.373 & {-}{-}{-}{-} & 0 & 0.780 & {-}{-}{-}{-} & 28 & 0.246 & {-}{-}{-}{-} & 0 & 0.793 & {-}{-}{-} & 0 & 0.981\\ \cellcolor{gray!6}{cg26963277} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{0.512} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{0.516} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{0.430} & \cellcolor{gray!6}{{-}{-}{+}{+}} & \cellcolor{gray!6}{53} & \cellcolor{gray!6}{0.095} & \cellcolor{gray!6}{{-}{-}{-}{+}} & \cellcolor{gray!6}{0} & \cellcolor{gray!6}{0.466} & \cellcolor{gray!6}{{-}{-}{-}{-}} & \cellcolor{gray!6}{17} & \cellcolor{gray!6}{0.308} & \cellcolor{gray!6}{{-}{-}{-}} & \cellcolor{gray!6}{10} & \cellcolor{gray!6}{0.329}\\ \addlinespace cg27241845 & {-}{-}{-}{-} & 57 & 0.075 & {-}{-}{-}{-} & 48 & 0.122 & {-}{-}{-}{-} & 41 & 0.163 & {-}{-}{-}{+} & 0 & 0.643 & {-}{-}{-}{+} & 0 & 0.672 & {-}{-}{-}{-} & 32 & 0.221 & {-}{-}{-} & 0 & 0.846\\ \bottomrule \multicolumn{22}{l}{\textsuperscript{} Dir = Direction of effect}\\ \multicolumn{22}{l}{\textsuperscript{} I\textsuperscript{2} = Heterogeneity I-squared value}\\ \multicolumn{22}{l}{\textsuperscript{} P = Heterogeneity P value}\\ \multicolumn{22}{l}{\textsuperscript{} chr:pos = chromosome:position}\\ \multicolumn{22}{l}{\textsuperscript{} sv-adjusted = surrogate variables included as covariates in analysis}\\ \multicolumn{22}{l}{\textsuperscript{} sv-and-cell-count = surrogate variables and derived cell counts included as covariates in analysis}\\ \multicolumn{22}{l}{\textsuperscript{} never-smokers = basic model in never smokers only}\\ \multicolumn{22}{l}{\textsuperscript{} former-smokers = basic model in former smokers only}\\ \multicolumn{22}{l}{\textsuperscript{} current-smokers = basic model in current smokers only}\\ \multicolumn{22}{l}{\textsuperscript{} comp = comparison of smoker groups.}\\ \end{tabular}} \end{table} \end{landscape} ### Mendelian randomization {#results-mendelian-randomization-07} I identified 15 independent mQTLs (r^2^<0.01) associated with methylation at 14 of 16 CpGs. Ten mQTLs replicated at FDR<0.05 in NSHDS (__Table \@ref(tab:sup-tab3-07)__). MR power analyses indicated >99% power to detect ORs for lung cancer of the same magnitude as those in the meta-analysis of EWAS. There was little evidence for an effect of methylation at these 14 sites on lung cancer (FDR>0.05, __Table \@ref(tab:sup-tab4-07)__). For nine of 14 CpG sites the point estimates from the MR analysis were in the same direction as in the EWAS, but of a much smaller magnitude (Z-test for difference, P<0.001) (__Figure \@ref(fig:fig3-07)__). For 9 of out the 16 mQTL-CpG associations, there was strong replication across time points (__Table \@ref(tab:sup-tab5-07)__) and 10 out of 16 mQTL-CpG associations replicated at FDR<0.05 in an independent adult cohort (NSHDS). Using mQTL effect estimates from NSHDS for the 10 CpG sites that replicated (FDR<0.05), findings were consistent with limited evidence for a causal effect of peripheral blood-derived DNA methylation on lung cancer (__Figure \@ref(fig:sup-fig1-07)__). \begin{landscape}\begin{table}[!h] \caption{(\#tab:sup-tab3-07)The SNP-exposure association estimates from ARIES and NSHDS} \centering \resizebox{\linewidth}{!}{ \begin{tabular}[t]{ll>{}lllllllllll} \toprule CpG & CpG chr:pos & Gene & SNP & SNP chr:pos & A1 & A2 & MAF & Beta (95\% CI) & P & NSHDS Beta (95\% CI) & NSHDS P & Trans\\ \midrule \cellcolor{gray!6}{cg16823042} & \cellcolor{gray!6}{12:58119992} & \em{\cellcolor{gray!6}{AGAP2}} & \cellcolor{gray!6}{rs1048691} & \cellcolor{gray!6}{12:58152948} & \cellcolor{gray!6}{T} & \cellcolor{gray!6}{C} & \cellcolor{gray!6}{0.207} & \cellcolor{gray!6}{0.321 (0.216, 0.426)} & \cellcolor{gray!6}{2.8e-09} & \cellcolor{gray!6}{0.176 (0.031, 0.322)} & \cellcolor{gray!6}{1.8e-02} & \cellcolor{gray!6}{N}\\ cg23387569 & 12:58120011 & \em{AGAP2} & rs1048691 & 12:58152948 & T & C & 0.208 & 0.355 (0.251, 0.458) & 3.9e-11 & 0.186 (0.04, 0.331) & 1.2e-02 & N\\ \cellcolor{gray!6}{cg05575921*} & \cellcolor{gray!6}{5:373378} & \em{\cellcolor{gray!6}{AHRR}} & \cellcolor{gray!6}{rs11746538} & \cellcolor{gray!6}{5:427466} & \cellcolor{gray!6}{A} & \cellcolor{gray!6}{C} & \cellcolor{gray!6}{0.121} & \cellcolor{gray!6}{-0.369 (-0.482, -0.255)} & \cellcolor{gray!6}{3.0e-10} & \cellcolor{gray!6}{-0.062 (-0.315, 0.19)} & \cellcolor{gray!6}{6.3e-01} & \cellcolor{gray!6}{N}\\ cg05575921* & 5:373378 & \em{AHRR} & rs11744553 & 5:26366 & C & G & 0.311 & 0.217 (0.139, 0.295) & 7.2e-08 & 0.085 (-0.058, 0.228) & 2.4e-01 & N\\ \cellcolor{gray!6}{cg27241845} & \cellcolor{gray!6}{2:233250370} & \em{\cellcolor{gray!6}{ALPPL2}} & \cellcolor{gray!6}{rs56080708} & \cellcolor{gray!6}{2:233274475} & \cellcolor{gray!6}{A} & \cellcolor{gray!6}{C} & \cellcolor{gray!6}{0.078} & \cellcolor{gray!6}{0.716 (0.579, 0.852)} & \cellcolor{gray!6}{2.4e-23} & \cellcolor{gray!6}{0.464 (0.244, 0.684)} & \cellcolor{gray!6}{3.6e-05} & \cellcolor{gray!6}{N}\\ \addlinespace cg05951221 & 2:233284402 & \em{ALPPL2} & rs878481 & 2:233285872 & G & C & 0.408 & -0.319 (-0.401, -0.237) & 5.9e-14 & -0.182 (-0.313, -0.052) & 6.0e-03 & N\\ \cellcolor{gray!6}{cg21566642*} & \cellcolor{gray!6}{2:233284661} & \em{\cellcolor{gray!6}{ALPPL2}} & \cellcolor{gray!6}{rs3748971} & \cellcolor{gray!6}{2:233250683} & \cellcolor{gray!6}{T} & \cellcolor{gray!6}{C} & \cellcolor{gray!6}{0.074} & \cellcolor{gray!6}{-0.593 (-0.743, -0.443)} & \cellcolor{gray!6}{2.7e-14} & \cellcolor{gray!6}{0.111 (-0.115, 0.338)} & \cellcolor{gray!6}{3.4e-01} & \cellcolor{gray!6}{N}\\ cg01901332* & 11:75031054 & \em{ARRB1} & rs7927381 & 11:67346743 & T & C & 0.082 & 0.382 (0.247, 0.517) & 3.9e-08 & -0.191 (-0.4, 0.018) & 7.3e-02 & Y\\ \cellcolor{gray!6}{cg01901332*} & \cellcolor{gray!6}{11:75031054} & \em{\cellcolor{gray!6}{ARRB1}} & \cellcolor{gray!6}{rs13087163} & \cellcolor{gray!6}{3:77329538} & \cellcolor{gray!6}{A} & \cellcolor{gray!6}{C} & \cellcolor{gray!6}{0.390} & \cellcolor{gray!6}{-0.194 (-0.263, -0.124)} & \cellcolor{gray!6}{5.8e-08} & \cellcolor{gray!6}{0.11 (-0.019, 0.239)} & \cellcolor{gray!6}{9.4e-02} & \cellcolor{gray!6}{Y}\\ cg08709672* & 1:206224334 & \em{AVPR1B} & rs77433148 & 5:135967502 & T & A & 0.018 & -0.804 (-1.07, -0.535) & 6.3e-09 & -0.221 (-0.784, 0.342) & 4.4e-01 & Y\\ \addlinespace \cellcolor{gray!6}{cg25305703} & \cellcolor{gray!6}{8:128378218} & \em{\cellcolor{gray!6}{CASC21}} & \cellcolor{gray!6}{rs9643220} & \cellcolor{gray!6}{8:128386926} & \cellcolor{gray!6}{A} & \cellcolor{gray!6}{G} & \cellcolor{gray!6}{0.227} & \cellcolor{gray!6}{0.343 (0.247, 0.440)} & \cellcolor{gray!6}{7.1e-12} & \cellcolor{gray!6}{0.232 (0.078, 0.385)} & \cellcolor{gray!6}{3.0e-03} & \cellcolor{gray!6}{N}\\ cg03636183 & 19:17000585 & \em{F2RL3} & rs734568 & 19:17015685 & T & C & 0.361 & 0.284 (0.199, 0.368) & 6.7e-11 & 0.203 (0.074, 0.332) & 2.0e-03 & N\\ \cellcolor{gray!6}{cg09935388} & \cellcolor{gray!6}{1:92947588} & \em{\cellcolor{gray!6}{GFI1}} & \cellcolor{gray!6}{rs17518433} & \cellcolor{gray!6}{1:92599172} & \cellcolor{gray!6}{A} & \cellcolor{gray!6}{T} & \cellcolor{gray!6}{0.236} & \cellcolor{gray!6}{-0.330 (-0.421, -0.240)} & \cellcolor{gray!6}{1.8e-12} & \cellcolor{gray!6}{-0.186 (-0.339, -0.033)} & \cellcolor{gray!6}{1.7e-02} & \cellcolor{gray!6}{N}\\ cg26963277 & 11:2722407 & \em{KCNQ1} & rs463924 & 11:2717680 & T & C & 0.304 & -0.394 (-0.482, -0.307) & 6.8e-18 & -0.277 (-0.41, -0.145) & 4.0e-05 & N\\ \cellcolor{gray!6}{cg11660018} & \cellcolor{gray!6}{11:86510915} & \em{\cellcolor{gray!6}{PRSS23}} & \cellcolor{gray!6}{rs1939110} & \cellcolor{gray!6}{11:86515072} & \cellcolor{gray!6}{T} & \cellcolor{gray!6}{C} & \cellcolor{gray!6}{0.286} & \cellcolor{gray!6}{-0.404 (-0.498, -0.309)} & \cellcolor{gray!6}{2.6e-16} & \cellcolor{gray!6}{-0.229 (-0.385, -0.073)} & \cellcolor{gray!6}{4.0e-03} & \cellcolor{gray!6}{N}\\ \addlinespace cg23771366 & 11:86510998 & \em{PRSS23} & rs72967500 & 11:86505120 & T & C & 0.132 & -0.628 (-0.750, -0.506) & 1.3e-22 & -0.35 (-0.534, -0.166) & 1.9e-04 & N\\ \bottomrule \multicolumn{13}{l}{\textsuperscript{} * = SNPs used as an instrumental variables were not replicated in the independent dataset (NSHDS)}\\ \multicolumn{13}{l}{\textsuperscript{} Trans = trans mQTL (Yes/No)}\\ \multicolumn{13}{l}{\textsuperscript{} chr:position = chromosome:position}\\ \multicolumn{13}{l}{\textsuperscript{} MAF = minor allele frequency}\\ \multicolumn{13}{l}{\textsuperscript{} A1 = effect allele}\\ \multicolumn{13}{l}{\textsuperscript{} P = P value}\\ \end{tabular}} \end{table} \end{landscape} \begin{table}[!h] \caption{(\#tab:sup-tab4-07)Full results for MR of DNA methylation of 14 CpG sites on lung cancer} \centering \resizebox{\linewidth}{!}{ \begin{tabular}[t]{>{}llllll} \toprule Gene & CpG & N SNP & Outcome & OR (95\% CI) & P\\ \midrule \em{\cellcolor{gray!6}{AGAP2}} & \cellcolor{gray!6}{cg16823042} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{Lung cancer} & \cellcolor{gray!6}{0.937 (0.858, 1.02)} & \cellcolor{gray!6}{0.149}\\ \em{AGAP2} & cg23387569 & 1 & Lung cancer & 0.943 (0.871, 1.02) & 0.149\\ \em{\cellcolor{gray!6}{AHRR}} & \cellcolor{gray!6}{cg05575921*} & \cellcolor{gray!6}{2} & \cellcolor{gray!6}{Lung cancer} & \cellcolor{gray!6}{0.936 (0.870, 1.01)} & \cellcolor{gray!6}{0.081}\\ \em{ALPPL2} & cg27241845 & 1 & Lung cancer & 0.981 (0.926, 1.04) & 0.522\\ \em{\cellcolor{gray!6}{ALPPL2}} & \cellcolor{gray!6}{cg05951221} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{Lung cancer} & \cellcolor{gray!6}{1.02 (0.949, 1.10)} & \cellcolor{gray!6}{0.558}\\ \addlinespace \em{ALPPL2} & cg21566642* & 1 & Lung cancer & 0.922 (0.847, 1.00) & 0.058\\ \em{\cellcolor{gray!6}{ARRB1}} & \cellcolor{gray!6}{cg01901332*} & \cellcolor{gray!6}{2} & \cellcolor{gray!6}{Lung cancer} & \cellcolor{gray!6}{0.943 (0.871, 1.02)} & \cellcolor{gray!6}{0.146}\\ \em{AVPR1B} & cg08709672* & 1 & Lung cancer & 1.08 (0.954, 1.21) & 0.235\\ \em{\cellcolor{gray!6}{CASC21}} & \cellcolor{gray!6}{cg25305703} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{Lung cancer} & \cellcolor{gray!6}{1.00 (0.924, 1.09)} & \cellcolor{gray!6}{0.956}\\ \em{F2RL3} & cg03636183 & 1 & Lung cancer & 0.942 (0.864, 1.03) & 0.172\\ \addlinespace \em{\cellcolor{gray!6}{GFI1}} & \cellcolor{gray!6}{cg09935388} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{Lung cancer} & \cellcolor{gray!6}{1.03 (0.941, 1.12)} & \cellcolor{gray!6}{0.554}\\ \em{KCNQ1} & cg26963277 & 1 & Lung cancer & 0.962 (0.903, 1.03) & 0.236\\ \em{\cellcolor{gray!6}{PRSS23}} & \cellcolor{gray!6}{cg11660018} & \cellcolor{gray!6}{1} & \cellcolor{gray!6}{Lung cancer} & \cellcolor{gray!6}{0.972 (0.912, 1.04)} & \cellcolor{gray!6}{0.372}\\ \em{PRSS23} & cg23771366 & 1 & Lung cancer & 0.953 (0.901, 1.01) & 0.086\\ \bottomrule \multicolumn{6}{l}{\textsuperscript{} N SNP = number of SNPs used in the analysis as instrumental variables}\\ \multicolumn{6}{l}{\textsuperscript{} * = Instrumental variables for that CpG site did not replicate in an independent}\\ \multicolumn{6}{l}{dataset (NSHDS)}\\ \multicolumn{6}{l}{\textsuperscript{} Where N SNP = 1, the Wald ratio estimate is used}\\ \multicolumn{6}{l}{\textsuperscript{} Where N SNP > 1, the Wald ratio estimates were meta-analyzed and the estimates}\\ \multicolumn{6}{l}{were weighted by the inverse variance of the association with the outcome}\\ \end{tabular}} \end{table} <!-- \pagebreak --> (ref:fig3-07-cap) __Mendelian randomization (MR) vs. observational analysis__. Two-sample MR was carried out with methylation at 14/16 CpG sites identified in the EWAS meta-analysis as the exposure and lung cancer as the outcome. cg01901332 and cg05575921 had 2 instruments so the estimate was calculated using the inverse variance weighted method, for the rest the MR estimate was calculated using a Wald ratio. Only 14 of 16 sites could be instrumented using mQTLs from mqtldb.org. * = instrumental variable not replicated in independent dataset (NSHDS). The sites for which instrumental variables have not been replicated are cg01901332, cg21566642, cg05575921 and cg08709672. OR = odds ratio per SD increase in DNA methylation. (ref:fig3-07-scap) Mendelian randomization (MR) vs. observational analysis \begin{figure} {\centering \includegraphics[width=1\linewidth]{figure/07-dnam_lungcancer_mr/Figure_3} } \caption[(ref:fig3-07-scap)]{(ref:fig3-07-cap)}(\#fig:fig3-07) \end{figure} \begin{landscape}\begin{table}[!h] \caption{(\#tab:sup-tab5-07)The association between mQTLs and their CpG sites across the five timepoints in ARIES} \centering \resizebox{\linewidth}{!}{ \begin{tabular}[t]{l>{}lllllllllllllllll} \toprule \multicolumn{3}{c}{ } & \multicolumn{3}{c}{During pregnancy} & \multicolumn{3}{c}{Middle age} & \multicolumn{3}{c}{Birth} & \multicolumn{3}{c}{Childhood} & \multicolumn{3}{c}{Adolescence} \\ \cmidrule(l{3pt}r{3pt}){4-6} \cmidrule(l{3pt}r{3pt}){7-9} \cmidrule(l{3pt}r{3pt}){10-12} \cmidrule(l{3pt}r{3pt}){13-15} \cmidrule(l{3pt}r{3pt}){16-18} CpG & Gene & SNP & Beta & SE & P & Beta & SE & P & Beta & SE & P & Beta & SE & P & Beta & SE & P\\ \midrule \cellcolor{gray!6}{cg01901332} & \em{\cellcolor{gray!6}{ARRB1}} & \cellcolor{gray!6}{rs13087163} & \cellcolor{gray!6}{-0.007} & \cellcolor{gray!6}{0.003} & \cellcolor{gray!6}{4.1e-02} & \cellcolor{gray!6}{-0.004} & \cellcolor{gray!6}{0.003} & \cellcolor{gray!6}{2.5e-01} & \cellcolor{gray!6}{-0.003} & \cellcolor{gray!6}{0.002} & \cellcolor{gray!6}{1.1e-01} & \cellcolor{gray!6}{-0.003} & \cellcolor{gray!6}{0.003} & \cellcolor{gray!6}{1.7e-01} & \cellcolor{gray!6}{-0.015} & \cellcolor{gray!6}{0.003} & \cellcolor{gray!6}{2.9e-07}\\ cg01901332 & \em{ARRB1} & rs7927381 & -0.003 & 0.006 & 5.9e-01 & -0.005 & 0.006 & 4.2e-01 & 0.015 & 0.003 & 7.9e-07 & 0.000 & 0.004 & 9.8e-01 & 0.004 & 0.005 & 4.5e-01\\ \cellcolor{gray!6}{cg03636183} & \em{\cellcolor{gray!6}{F2RL3}} & \cellcolor{gray!6}{rs734568} & \cellcolor{gray!6}{0.029} & \cellcolor{gray!6}{0.006} & \cellcolor{gray!6}{3.6e-07} & \cellcolor{gray!6}{0.027} & \cellcolor{gray!6}{0.005} & \cellcolor{gray!6}{4.7e-07} & \cellcolor{gray!6}{0.039} & \cellcolor{gray!6}{0.007} & \cellcolor{gray!6}{3.9e-08} & \cellcolor{gray!6}{0.031} & \cellcolor{gray!6}{0.005} & \cellcolor{gray!6}{2.1e-10} & \cellcolor{gray!6}{0.031} & \cellcolor{gray!6}{0.005} & \cellcolor{gray!6}{3.6e-10}\\ cg05575921 & \em{AHRR} & rs11744553 & 0.002 & 0.004 & 5.9e-01 & -0.005 & 0.004 & 2.0e-01 & 0.008 & 0.003 & 2.3e-03 & 0.009 & 0.002 & 1.3e-07 & 0.003 & 0.002 & 2.1e-01\\ \cellcolor{gray!6}{cg05575921} & \em{\cellcolor{gray!6}{AHRR}} & \cellcolor{gray!6}{rs11746538} & \cellcolor{gray!6}{-0.013} & \cellcolor{gray!6}{0.006} & \cellcolor{gray!6}{2.9e-02} & \cellcolor{gray!6}{-0.011} & \cellcolor{gray!6}{0.005} & \cellcolor{gray!6}{3.7e-02} & \cellcolor{gray!6}{-0.014} & \cellcolor{gray!6}{0.004} & \cellcolor{gray!6}{3.1e-04} & \cellcolor{gray!6}{-0.016} & \cellcolor{gray!6}{0.002} & \cellcolor{gray!6}{4.8e-11} & \cellcolor{gray!6}{-0.010} & \cellcolor{gray!6}{0.003} & \cellcolor{gray!6}{9.6e-04}\\ \addlinespace cg05951221 & \em{ALPPL2} & rs878481 & -0.006 & 0.001 & 1.3e-06 & -0.006 & 0.001 & 4.6e-06 & -0.001 & 0.000 & 1.6e-03 & -0.005 & 0.001 & 1.3e-08 & -0.005 & 0.001 & 1.9e-12\\ \cellcolor{gray!6}{cg08709672} & \em{\cellcolor{gray!6}{AVPR1B}} & \cellcolor{gray!6}{rs77433148} & \cellcolor{gray!6}{-0.014} & \cellcolor{gray!6}{0.008} & \cellcolor{gray!6}{9.1e-02} & \cellcolor{gray!6}{-0.004} & \cellcolor{gray!6}{0.008} & \cellcolor{gray!6}{6.2e-01} & \cellcolor{gray!6}{0.010} & \cellcolor{gray!6}{0.008} & \cellcolor{gray!6}{2.3e-01} & \cellcolor{gray!6}{-0.032} & \cellcolor{gray!6}{0.006} & \cellcolor{gray!6}{6.9e-07} & \cellcolor{gray!6}{0.002} & \cellcolor{gray!6}{0.007} & \cellcolor{gray!6}{8.2e-01}\\ cg09935388 & \em{GFI1} & rs17518433 & -0.037 & 0.009 & 3.6e-05 & -0.024 & 0.010 & 1.6e-02 & -0.028 & 0.010 & 6.0e-03 & -0.054 & 0.010 & 4.2e-08 & -0.063 & 0.010 & 9.9e-11\\ \cellcolor{gray!6}{cg11660018} & \em{\cellcolor{gray!6}{PRSS23}} & \cellcolor{gray!6}{rs1939110} & \cellcolor{gray!6}{-0.017} & \cellcolor{gray!6}{0.003} & \cellcolor{gray!6}{6.5e-10} & \cellcolor{gray!6}{-0.014} & \cellcolor{gray!6}{0.003} & \cellcolor{gray!6}{1.2e-05} & \cellcolor{gray!6}{-0.004} & \cellcolor{gray!6}{0.003} & \cellcolor{gray!6}{1.9e-01} & \cellcolor{gray!6}{-0.014} & \cellcolor{gray!6}{0.003} & \cellcolor{gray!6}{8.0e-07} & \cellcolor{gray!6}{-0.012} & \cellcolor{gray!6}{0.003} & \cellcolor{gray!6}{1.0e-05}\\ cg16823042 & \em{AGAP2} & rs1048691 & 0.016 & 0.003 & 5.1e-08 & 0.013 & 0.003 & 1.0e-05 & 0.011 & 0.004 & 3.5e-03 & 0.019 & 0.003 & 1.3e-08 & 0.015 & 0.004 & 3.5e-05\\ \addlinespace \cellcolor{gray!6}{cg21566642} & \em{\cellcolor{gray!6}{ALPPL2}} & \cellcolor{gray!6}{rs3748971} & \cellcolor{gray!6}{-0.009} & \cellcolor{gray!6}{0.006} & \cellcolor{gray!6}{1.6e-01} & \cellcolor{gray!6}{-0.011} & \cellcolor{gray!6}{0.006} & \cellcolor{gray!6}{6.9e-02} & \cellcolor{gray!6}{-0.007} & \cellcolor{gray!6}{0.003} & \cellcolor{gray!6}{6.2e-03} & \cellcolor{gray!6}{-0.034} & \cellcolor{gray!6}{0.004} & \cellcolor{gray!6}{9.2e-15} & \cellcolor{gray!6}{-0.024} & \cellcolor{gray!6}{0.004} & \cellcolor{gray!6}{2.4e-08}\\ cg23387569 & \em{AGAP2} & rs1048691 & 0.026 & 0.004 & 1.6e-10 & 0.020 & 0.004 & 2.2e-07 & 0.014 & 0.005 & 2.2e-03 & 0.027 & 0.004 & 2.6e-10 & 0.020 & 0.004 & 1.5e-06\\ \cellcolor{gray!6}{cg23771366} & \em{\cellcolor{gray!6}{PRSS23}} & \cellcolor{gray!6}{rs72967500} & \cellcolor{gray!6}{-0.010} & \cellcolor{gray!6}{0.002} & \cellcolor{gray!6}{2.4e-06} & \cellcolor{gray!6}{-0.014} & \cellcolor{gray!6}{0.003} & \cellcolor{gray!6}{6.7e-07} & \cellcolor{gray!6}{-0.010} & \cellcolor{gray!6}{0.001} & \cellcolor{gray!6}{1.8e-16} & \cellcolor{gray!6}{-0.011} & \cellcolor{gray!6}{0.001} & \cellcolor{gray!6}{2.1e-13} & \cellcolor{gray!6}{-0.012} & \cellcolor{gray!6}{0.002} & \cellcolor{gray!6}{1.3e-12}\\ cg25305703 & \em{CASC21} & rs9643220 & 0.028 & 0.004 & 1.5e-10 & 0.023 & 0.004 & 1.6e-07 & 0.032 & 0.005 & 2.6e-09 & 0.020 & 0.003 & 1.1e-08 & 0.018 & 0.004 & 3.3e-06\\ \cellcolor{gray!6}{cg26963277} & \em{\cellcolor{gray!6}{KCNQ1}} & \cellcolor{gray!6}{rs463924} & \cellcolor{gray!6}{-0.018} & \cellcolor{gray!6}{0.002} & \cellcolor{gray!6}{1.6e-14} & \cellcolor{gray!6}{-0.015} & \cellcolor{gray!6}{0.003} & \cellcolor{gray!6}{1.2e-08} & \cellcolor{gray!6}{-0.008} & \cellcolor{gray!6}{0.003} & \cellcolor{gray!6}{5.6e-03} & \cellcolor{gray!6}{-0.012} & \cellcolor{gray!6}{0.002} & \cellcolor{gray!6}{6.3e-09} & \cellcolor{gray!6}{-0.015} & \cellcolor{gray!6}{0.002} & \cellcolor{gray!6}{7.5e-12}\\ \addlinespace cg27241845 & \em{ALPPL2} & rs56080708 & 0.038 & 0.011 & 4.5e-04 & 0.052 & 0.012 & 2.5e-05 & 0.095 & 0.010 & 6.4e-22 & 0.102 & 0.010 & 8.4e-25 & 0.051 & 0.011 & 4.4e-06\\ \bottomrule \multicolumn{18}{l}{\textsuperscript{} P = p value}\\ \end{tabular}} \end{table} \end{landscape} (ref:sup-fig1-07-cap) __Comparison of two-sample Mendelian randomization results when using the discovery (ARIES, n = 1018) and replication (NSHDS, n = 468)__. On the left-hand side of each column the bracketed numbers represent the number of instrumental variables for that CpG site. (ref:sup-fig1-07-scap) Comparison of two-sample Mendelian randomization results when using the discovery (ARIES, n = 1018) and replication (NSHDS, n = 468) \begin{figure} {\centering \includegraphics[width=1\linewidth]{figure/07-dnam_lungcancer_mr/sup_fig1} } \caption[(ref:sup-fig1-07-scap)]{(ref:sup-fig1-07-cap)}(\#fig:sup-fig1-07) \end{figure} There was little evidence of different effect estimates between ever and never smokers at individual CpG sites (__Figure \@ref(fig:sup-fig2-07)__, Z-test for difference, P>0.5). There was some evidence for a possible effect of methylation at cg21566642-_ALPPL2_ and cg23771366-_PRSS23_ on squamous cell lung cancer (OR=0.85 [95% CI=0.75,0.97] and 0.91 [95% CI=0.84,1.00] per SD [14.4% and 5.8%] increase, respectively) as well as methylation at cg23387569-_AGAP2_, cg16823042-_AGAP2_, and cg01901332-_ARRB1_ on lung adenocarcinoma (OR=0.86 [95% CI=0.77,0.96], 0.84 [95% CI=0.74,0.95], and 0.89 [95% CI=0.80,1.00] per SD [9.47%, 8.35%, and 8.91%] increase, respectively). However, none of the results withstood multiple testing correction (FDR<0.05) (__Figure \@ref(fig:sup-fig3-07)__). For those CpGs where multiple mQTLs were used as instruments (cg05575921-_AHRR_ and cg01901332-_ARRB1_), there was limited evidence for heterogeneity in MR effect estimates (Q-test, P>0.05, __Table \@ref(tab:sup-tab6-07)__). (ref:sup-fig2-07-cap) __DNA methylation – lung cancer Mendelian randomization effect estimates in ever and never smokers__. On the left-hand side of each column the bracketed numbers represent the number of instrumental variables for that CpG site. * indicates that the SNP(s) being used to instrument that CpG site are more than 1MB away from the CpG site in the genome (trans). (ref:sup-fig2-07-scap) DNA methylation – lung cancer Mendelian randomization effect estimates in ever and never smokers \begin{figure} {\centering \includegraphics[width=1\linewidth]{figure/07-dnam_lungcancer_mr/sup_fig2-never_v_ever_smokers} } \caption[(ref:sup-fig2-07-scap)]{(ref:sup-fig2-07-cap)}(\#fig:sup-fig2-07) \end{figure} (ref:sup-fig3-07-cap) __Mendelian randomization of DNA methylation on three lung cancer subgroups__ On the left-hand side of each column the bracketed numbers represent the number of instrumental variables for that CpG site. * indicates that the SNP(s) being used to instrument that CpG site are more than 1MB away from the CpG site in the genome (trans). Squamous = squamous cell carcinoma, Small = small cell carcinoma, Adeno = adenocarcinoma. (ref:sup-fig3-07-scap) Mendelian randomization of DNA methylation on three lung cancer subgroups \begin{figure} {\centering \includegraphics[width=1\linewidth]{figure/07-dnam_lungcancer_mr/sup_fig3-lc_subtype_mr} } \caption[(ref:sup-fig3-07-scap)]{(ref:sup-fig3-07-cap)}(\#fig:sup-fig3-07) \end{figure} Single mQTLs for cg05575921-_AHRR_, cg27241845-_ALPPL2_, and cg26963277-_KCNQ1_ showed some evidence of association with smoking cessation (former vs. current smokers), although these associations were not below the FDR<0.05 threshold (__Figure \@ref(fig:sup-fig4-07)__). \pagebreak \begin{table}[!h] \caption{(\#tab:sup-tab6-07)Estimates of heterogeneity of MR estimates across multiple SNPs} \centering \resizebox{\linewidth}{!}{ \begin{tabular}[t]{lllll} \toprule CpG & outcome & N SNP & Q & P\\ \midrule \cellcolor{gray!6}{cg05575921} & \cellcolor{gray!6}{Lung cancer (ever)} & \cellcolor{gray!6}{2} & \cellcolor{gray!6}{1.838} & \cellcolor{gray!6}{0.17}\\ cg05575921 & Small cell lung cancer & 2 & 0.019 & 0.89\\ \cellcolor{gray!6}{cg05575921} & \cellcolor{gray!6}{Lung cancer (never)} & \cellcolor{gray!6}{2} & \cellcolor{gray!6}{1.424} & \cellcolor{gray!6}{0.23}\\ cg05575921 & Lung adenocarcinoma & 2 & 0.437 & 0.51\\ \cellcolor{gray!6}{cg05575921} & \cellcolor{gray!6}{Lung cancer} & \cellcolor{gray!6}{2} & \cellcolor{gray!6}{0.003} & \cellcolor{gray!6}{0.96}\\ \addlinespace cg05575921 & Squamous cell lung cancer & 2 & 1.168 & 0.28\\ \cellcolor{gray!6}{cg01901332} & \cellcolor{gray!6}{Lung cancer (ever)} & \cellcolor{gray!6}{2} & \cellcolor{gray!6}{0.085} & \cellcolor{gray!6}{0.77}\\ cg01901332 & Small cell lung cancer & 2 & 0.004 & 0.95\\ \cellcolor{gray!6}{cg01901332} & \cellcolor{gray!6}{Lung cancer (never)} & \cellcolor{gray!6}{2} & \cellcolor{gray!6}{0.321} & \cellcolor{gray!6}{0.57}\\ cg01901332 & Lung adenocarcinoma & 2 & 0.780 & 0.38\\ \addlinespace \cellcolor{gray!6}{cg01901332} & \cellcolor{gray!6}{Lung cancer} & \cellcolor{gray!6}{2} & \cellcolor{gray!6}{0.965} & \cellcolor{gray!6}{0.33}\\ cg01901332 & Squamous cell lung cancer & 2 & 1.266 & 0.26\\ \bottomrule \multicolumn{5}{l}{\textsuperscript{} N SNP = number of SNPs used in the analysis as instrumental}\\ \multicolumn{5}{l}{variables}\\ \multicolumn{5}{l}{\textsuperscript{} Q = Cochrane’s Q statistic}\\ \multicolumn{5}{l}{\textsuperscript{} Lung cancer (ever) = lung cancer in ever smokers}\\ \multicolumn{5}{l}{\textsuperscript{} Lung cancer (never) = lung cancer in never smokers}\\ \multicolumn{5}{l}{\textsuperscript{} Where P < 0.05, there is good evidence of heterogeneity across}\\ \multicolumn{5}{l}{individual SNPs}\\ \end{tabular}} \end{table} (ref:sup-fig4-07-cap) __Associations of mQTLs and smoking behaviours__. Some SNPs that were genotyped in the TRICL consortium were not within the TAG consortium, thus were not available for analysis here. Units for the traits: AoI (age of smoking initiation) = log years, EvN (ever vs. never smoked) = log odds, FvS (former vs current smoker) = log odds. CpD = cigarettes smoked per day. (ref:sup-fig4-07-scap) Associations of mQTLs and smoking behaviours \begin{figure}[!hp] {\centering \includegraphics[width=1\linewidth]{figure/07-dnam_lungcancer_mr/sup_fig4-mqtl_smoking_mr} } \caption[(ref:sup-fig4-07-scap)]{(ref:sup-fig4-07-cap)}(\#fig:sup-fig4-07) \end{figure} \pagebreak #### Potential causal effect of _AHRR_ methylation on lung cancer risk: one sample MR {#ahrr-one-sample-mr} In the CCHS, a per (average methylation-increasing) allele change in a four-mQTL allele score was associated with a 0.73% [95% CI=0.56,0.90] increase in methylation (P<1x10^-10^) and explained 0.8% of the variance in cg05575921-_AHRR_ methylation (F-statistic=74.2). Confounding factors were not strongly associated with the genotypes in this cohort (P>=0.11) (__Table \@ref(tab:sup-tab7-07)__). Results provided some evidence for an effect of cg05575921 methylation on total lung cancer risk (HR=0.30 [95% CI=0.10,1.00] per SD (9.2%) increase) (__Table \@ref(tab:sup-tab8-07)__). The effect estimate did not change substantively when stratified by smoking status (__Table \@ref(tab:sup-tab8-07)__). \linebreak \begin{table}[!h] \caption{(\#tab:sup-tab7-07)Association of \textit{AHRR} methylation and methylation allele score with confounding factors in the CCHS} \centering \resizebox{\linewidth}{!}{ \begin{tabular}[t]{lllllll} \toprule \multicolumn{1}{c}{ } & \multicolumn{3}{c}{Methylation} & \multicolumn{3}{c}{Allele score} \\ \cmidrule(l{3pt}r{3pt}){2-4} \cmidrule(l{3pt}r{3pt}){5-7} Confounder & Beta & 95 CI & P & Beta & 95 CI & P\\ \midrule \cellcolor{gray!6}{Sex} & \cellcolor{gray!6}{-0.62} & \cellcolor{gray!6}{-0.94; -0.29} & \cellcolor{gray!6}{2.0e-04} & \cellcolor{gray!6}{-1e-02} & \cellcolor{gray!6}{-0.04; 0.06} & \cellcolor{gray!6}{0.72}\\ Alcohol & 0.00 & -0.001; 0.009 & 8.5e-01 & 0e+00 & -0.000; 0.000 & 0.15\\ \cellcolor{gray!6}{Former vs never smokers} & \cellcolor{gray!6}{-3.07} & \cellcolor{gray!6}{-3.54; -2.61} & \cellcolor{gray!6}{2.0e-38} & \cellcolor{gray!6}{4e-03} & \cellcolor{gray!6}{-0.07; 0.08} & \cellcolor{gray!6}{0.91}\\ Current vs never smokers & -10.76 & -11.30; -10.22 & 1.0e-50 & 1e-02 & -0.08; 0.10 & 0.82\\ \cellcolor{gray!6}{Exposure to dust} & \cellcolor{gray!6}{-0.65} & \cellcolor{gray!6}{-1.04; -0.26} & \cellcolor{gray!6}{1.0e-03} & \cellcolor{gray!6}{-5e-02} & \cellcolor{gray!6}{-0.12; 0.01} & \cellcolor{gray!6}{0.11}\\ \addlinespace Exposure to passive smoking & -0.46 & -0.78; -0.15 & 4.0e-03 & -3e-02 & -0.08; 0.03 & 0.34\\ \cellcolor{gray!6}{Current use of tobacco to per cigarette equivalent} & \cellcolor{gray!6}{-0.06} & \cellcolor{gray!6}{-0.08; -0.03} & \cellcolor{gray!6}{9.0e-05} & \cellcolor{gray!6}{-1e-03} & \cellcolor{gray!6}{-0.005; 0.004} & \cellcolor{gray!6}{0.72}\\ Cumulative use of tobacco to per pack-year & -0.05 & -0.06; -0.04 & 4.0e-28 & -5e-04 & -0.002; 0.001 & 0.54\\ \bottomrule \multicolumn{7}{l}{\textsuperscript{} For the allele score, genotypic effects were scaled to equate to the same magnitude of effect as a per 1\% increase in}\\ \multicolumn{7}{l}{methylation. Regressions were adjusted for the other factors in the table}\\ \end{tabular}} \end{table} \begin{table}[!h] \caption{(\#tab:sup-tab8-07)One-sample MR analysis of the effect of \textit{AHRR} methylation (\%) on lung cancer risk in the CCHS} \centering \resizebox{\linewidth}{!}{ \begin{tabular}[t]{lllll} \toprule Smoking status & Total N & N events & Age and sex adjusted HR (95\% CI) & P\\ \midrule \cellcolor{gray!6}{All} & \cellcolor{gray!6}{8,758} & \cellcolor{gray!6}{357} & \cellcolor{gray!6}{0.88 (0.78; 1.00)} & \cellcolor{gray!6}{0.05}\\ Current & 4,262 & 305 & 0.90 (0.79; 1.03) & 0.12\\ \cellcolor{gray!6}{Former} & \cellcolor{gray!6}{2,548} & \cellcolor{gray!6}{43} & \cellcolor{gray!6}{0.86 (0.61; 1.22)} & \cellcolor{gray!6}{0.41}\\ Never & 1,948 & 9 & 0.83 (0.38; 1.85) & 0.66\\ \bottomrule \multicolumn{5}{l}{\textsuperscript{} HR = hazard ratio, P = P value}\\ \end{tabular}} \end{table} \linebreak Given contrasting findings with the main MR analysis, where cg05575921-_AHRR_ methylation was not causally implicated in lung cancer, and the lower power in the one-sample analysis to detect an effect of equivalent size to the observational results (power = 19% at alpha = 0.05), a further two-sample MR was performed based on the four mQTLs using data from both CCHS (sample one) and the TRICL-ILCCO consortium (sample two). Results showed no strong evidence for a causal effect of DNA methylation on total lung cancer risk (OR=1.00 [95% CI=0.83,1.10] per SD increase) (__Figure \@ref(fig:sup-fig5-07)__). There was also limited evidence for an effect of cg05575921-_AHRR_ methylation when stratified by cancer subtype and smoking status (__Figure \@ref(fig:sup-fig5-07)__) and no strong evidence for heterogeneity of the mQTL effects (__Table \@ref(tab:sup-tab9-07)__). Conclusions were consistent when MR-Egger [@Bowden2015] was applied (__Figure \@ref(fig:sup-fig5-07)__) and when accounting for correlation structure between the mQTLs (__Table \@ref(tab:sup-tab9-07)__). (ref:sup-fig5-07-cap) __Two-sample Mendelian randomization analysis of DNA methylation at _AHRR_ on lung cancer__. Analysis are divided into these categories: a) All lung cancer b) Squamous cell carcinoma c) Adenocarcinoma d) Small cell carcinoma e) All lung cancer in never smokers only f) All lung cancer in ever smokers only (ref:sup-fig5-07-scap) Two-sample Mendelian randomization analysis of DNA methylation at _AHRR_ on lung cancer \blandscape \begin{figure}[htbp] {\centering \includegraphics[width=1\linewidth]{figure/07-dnam_lungcancer_mr/sup_fig5-ahrr_2samp} } \caption[(ref:sup-fig5-07-scap)]{(ref:sup-fig5-07-cap)}(\#fig:sup-fig5-07) \end{figure} \elandscape \begin{table}[!h] \caption{(\#tab:sup-tab9-07)Two sample MR analysis for \textit{AHRR}} \centering \resizebox{\linewidth}{!}{ \begin{tabular}[t]{lllllllll} \toprule \multicolumn{2}{c}{ } & \multicolumn{2}{c}{FE meta-analysis} & \multicolumn{2}{c}{Correction for correlation} & \multicolumn{3}{c}{ } \\ \cmidrule(l{3pt}r{3pt}){3-4} \cmidrule(l{3pt}r{3pt}){5-6} Lung cancer & N SNP & Beta & SE & Beta & SE & Het-Q & Het-DF & Het-P\\ \midrule \cellcolor{gray!6}{All} & \cellcolor{gray!6}{4} & \cellcolor{gray!6}{-0.005} & \cellcolor{gray!6}{0.010} & \cellcolor{gray!6}{-0.004} & \cellcolor{gray!6}{0.009} & \cellcolor{gray!6}{2.73} & \cellcolor{gray!6}{3} & \cellcolor{gray!6}{0.43}\\ Squamous cell & 4 & 0.003 & 0.024 & 0.004 & 0.014 & 6.65 & 3 & 0.08\\ \cellcolor{gray!6}{Adenocarcinoma} & \cellcolor{gray!6}{4} & \cellcolor{gray!6}{-0.022} & \cellcolor{gray!6}{0.011} & \cellcolor{gray!6}{-0.022} & \cellcolor{gray!6}{0.012} & \cellcolor{gray!6}{1.06} & \cellcolor{gray!6}{3} & \cellcolor{gray!6}{0.79}\\ Small cell carcinoma & 4 & 0.002 & 0.021 & 0.001 & 0.022 & 0.14 & 3 & 0.99\\ \cellcolor{gray!6}{All in never smokers} & \cellcolor{gray!6}{4} & \cellcolor{gray!6}{-0.003} & \cellcolor{gray!6}{0.025} & \cellcolor{gray!6}{-0.003} & \cellcolor{gray!6}{0.026} & \cellcolor{gray!6}{1.37} & \cellcolor{gray!6}{3} & \cellcolor{gray!6}{0.71}\\ \addlinespace All in ever smokers & 4 & -0.017 & 0.010 & -0.016 & 0.011 & 1.63 & 3 & 0.65\\ \bottomrule \multicolumn{9}{l}{\textsuperscript{} N SNP = number of SNPs used in the analysis as instrumental variables}\\ \multicolumn{9}{l}{\textsuperscript{} FE = fixed effects}\\ \multicolumn{9}{l}{\textsuperscript{} Q = Cochrane’s Q statistic}\\ \multicolumn{9}{l}{\textsuperscript{} DF = degrees of freedom}\\ \multicolumn{9}{l}{\textsuperscript{} P = P value}\\ \end{tabular}} \end{table} ### Tumour and adjacent normal lung tissue methylation patterns {#lc-heathly-v-normal} For cg05575921-_AHRR_, there was no strong evidence for differential methylation between adenocarcinoma tissue and adjacent healthy tissue (P=0.963), and weak evidence for hypermethylation in squamous cell carcinoma tissue (P=0.035) (__Figure \@ref(fig:fig4-07)__, __Table \@ref(tab:sup-tab10-07)__). For the other CpG sites there was evidence for a difference in DNA methylation between tumour and healthy adjacent tissue at several sites in both adenocarcinoma and squamous cell carcinoma, with consistent differences for CpG sites in _ALPPL2_ (cg2156642, cg05951221 and cg01940273), as well as cg23771366-_PRSS23_, cg26963277-_KCNQ1_, cg09935388-_GFI1_, cg0101332-_ARRB1_, cg08709672-_AVPR1B_ and cg25305703-_CASC21_. However, hypermethylation in tumour tissue was found for the majority of these sites, which is the opposite to what was observed in the EWAS analysis. \pagebreak (ref:fig4-07-cap) __Differential DNA methylation in lung cancer tissue__. A comparison of methylation at each of the 16 CpG sites identified in the meta-analysis was made between lung cancer tissue and adjacent healthy lung tissue for patients with lung adenocarcinoma (__A__) and squamous cell lung cancer (__B__). Data from The Cancer Genome Atlas was used for this analysis. (ref:fig4-07-scap) Differential DNA methylation in lung cancer tissue \begin{center}\includegraphics[width=1\linewidth,height=0.4\textheight]{figure/07-dnam_lungcancer_mr/Figure_4a} \end{center} \begin{figure}[!hp] {\centering \includegraphics[width=1\linewidth,height=0.4\textheight]{figure/07-dnam_lungcancer_mr/Figure_4b} } \caption[(ref:fig4-07-scap)]{(ref:fig4-07-cap)}(\#fig:fig4-07) \end{figure} \pagebreak \begin{table}[!h] \caption{(\#tab:sup-tab10-07)Comparison of MR results with tumour-healthy tissue differential methylation} \centering \resizebox{\linewidth}{!}{ \begin{tabular}[t]{l>{}lllllllll} \toprule \multicolumn{2}{c}{ } & \multicolumn{2}{c}{Adeno-MR} & \multicolumn{2}{c}{Adeno-T/H} & \multicolumn{2}{c}{SCC-MR} & \multicolumn{2}{c}{SCC-T/H} \\ \cmidrule(l{3pt}r{3pt}){3-4} \cmidrule(l{3pt}r{3pt}){5-6} \cmidrule(l{3pt}r{3pt}){7-8} \cmidrule(l{3pt}r{3pt}){9-10} CpG & Gene & Direction & P & Direction & P & Direction & P & Direction & P\\ \midrule \cellcolor{gray!6}{cg23387569} & \em{\cellcolor{gray!6}{AGAP2}} & \cellcolor{gray!6}{hypo} & \cellcolor{gray!6}{0.006} & \cellcolor{gray!6}{hyper} & \cellcolor{gray!6}{8.8e-01} & \cellcolor{gray!6}{hyper} & \cellcolor{gray!6}{0.364} & \cellcolor{gray!6}{hypo} & \cellcolor{gray!6}{5.0e-01}\\ cg05575921 & \em{AHRR} & hypo & 0.089 & hypo & 9.6e-01 & hypo & 0.052 & hyper & 3.5e-02\\ \cellcolor{gray!6}{cg05951221} & \em{\cellcolor{gray!6}{ALPPL2}} & \cellcolor{gray!6}{hyper} & \cellcolor{gray!6}{0.916} & \cellcolor{gray!6}{hyper} & \cellcolor{gray!6}{8.0e-03} & \cellcolor{gray!6}{hyper} & \cellcolor{gray!6}{0.273} & \cellcolor{gray!6}{hyper} & \cellcolor{gray!6}{1.0e-09}\\ cg21566642 & \em{ALPPL2} & hypo & 0.673 & hyper & 1.0e-04 & hypo & 0.016 & hyper & 8.0e-07\\ \cellcolor{gray!6}{cg27241845} & \em{\cellcolor{gray!6}{ALPPL2}} & \cellcolor{gray!6}{hyper} & \cellcolor{gray!6}{0.532} & \cellcolor{gray!6}{hypo} & \cellcolor{gray!6}{7.6e-02} & \cellcolor{gray!6}{hypo} & \cellcolor{gray!6}{0.101} & \cellcolor{gray!6}{hyper} & \cellcolor{gray!6}{2.8e-01}\\ \addlinespace cg01901332 & \em{ARRB1} & hypo & 0.045 & hyper & 3.0e-16 & hypo & 0.778 & hyper & 8.0e-12\\ \cellcolor{gray!6}{cg08709672} & \em{\cellcolor{gray!6}{AVPR1B}} & \cellcolor{gray!6}{hyper} & \cellcolor{gray!6}{0.129} & \cellcolor{gray!6}{hyper} & \cellcolor{gray!6}{3.0e-02} & \cellcolor{gray!6}{hypo} & \cellcolor{gray!6}{0.862} & \cellcolor{gray!6}{hyper} & \cellcolor{gray!6}{2.0e-02}\\ cg25305703 & \em{CASC21} & hyper & 0.170 & hypo & 3.0e-05 & hypo & 0.792 & hypo & 8.0e-04\\ \cellcolor{gray!6}{cg03636183} & \em{\cellcolor{gray!6}{F2RL3}} & \cellcolor{gray!6}{hypo} & \cellcolor{gray!6}{0.151} & \cellcolor{gray!6}{hyper} & \cellcolor{gray!6}{8.0e-04} & \cellcolor{gray!6}{hyper} & \cellcolor{gray!6}{0.758} & \cellcolor{gray!6}{hypo} & \cellcolor{gray!6}{7.6e-01}\\ cg09935388 & \em{GFI1} & hypo & 0.831 & hyper & 2.0e-04 & hyper & 0.567 & hyper & 2.0e-20\\ \addlinespace \cellcolor{gray!6}{cg26963277} & \em{\cellcolor{gray!6}{KCNQ1}} & \cellcolor{gray!6}{hypo} & \cellcolor{gray!6}{0.299} & \cellcolor{gray!6}{hyper} & \cellcolor{gray!6}{3.6e-02} & \cellcolor{gray!6}{hypo} & \cellcolor{gray!6}{0.314} & \cellcolor{gray!6}{hyper} & \cellcolor{gray!6}{3.0e-03}\\ cg23771366 & \em{PRSS23} & hyper & 0.819 & hypo & 4.0e-09 & hypo & 0.047 & hypo & 3.3e-02\\ \cellcolor{gray!6}{cg11660018} & \em{\cellcolor{gray!6}{PRSS23}} & \cellcolor{gray!6}{equal} & \cellcolor{gray!6}{0.999} & \cellcolor{gray!6}{hypo} & \cellcolor{gray!6}{3.0e-08} & \cellcolor{gray!6}{hypo} & \cellcolor{gray!6}{0.062} & \cellcolor{gray!6}{hypo} & \cellcolor{gray!6}{1.3e-01}\\ \bottomrule \multicolumn{10}{l}{\textsuperscript{} T/H = comparison of tumour and healthy tissue}\\ \multicolumn{10}{l}{\textsuperscript{} Adeno = Lung adenocarcinoma}\\ \multicolumn{10}{l}{\textsuperscript{} SCC = squamous cell carcinoma}\\ \multicolumn{10}{l}{\textsuperscript{} P = P value}\\ \multicolumn{10}{l}{\textsuperscript{} hyper = hypermethylation is associated with lung cancer}\\ \multicolumn{10}{l}{\textsuperscript{} hypo = hypomethylation is associated with lung cancer}\\ \multicolumn{10}{l}{\textsuperscript{} For tumour/healthy tissue comparison, pos = hypermethylation of the CpG within the tumour tissue (neg is}\\ \multicolumn{10}{l}{the opposite)}\\ \end{tabular}} \end{table} \pagebreak ### Gene expression associated with mQTLs in blood and lung tissue Of the 10 genes annotated to the 14 CpG sites, eight genes were expressed sufficiently to be detected in lung (_AVPR1B_ and _CASC21_ were not) and seven in blood (_AVPR1B_, _CASC21_ and _ALPPL2_ were not). Of these, gene expression of _ARRB1_ could not be investigated as the mQTLs in that region were not present in the GTEx data. rs3748971 and rs878481, mQTLs for cg21566642 and cg05951221 respectively, were associated with increased expression of _ALPPL2_ in lung tissue (P=0.002 and P=0.0001). No other mQTLs were associated with expression of the annotated gene at a Bonferroni corrected P value threshold (P<0.05/19=0.0026) (__Table \@ref(tab:sup-tab11-07)__). \begin{table}[!h] \caption{(\#tab:sup-tab11-07)mQTL-gene expression analysis in lung and whole blood using data from GTEx} \centering \resizebox{\linewidth}{!}{ \begin{tabular}[t]{>{}llllllllllllll} \toprule \multicolumn{8}{c}{ } & \multicolumn{3}{c}{Lung} & \multicolumn{3}{c}{Whole blood} \\ \cmidrule(l{3pt}r{3pt}){9-11} \cmidrule(l{3pt}r{3pt}){12-14} Gene & SNP & CpG & Trans & SNP chr:pos & A1 & A2 & MAF & Beta & SE & P & Beta & SE & P\\ \midrule \em{\cellcolor{gray!6}{AGAP2}} & \cellcolor{gray!6}{rs1048691} & \cellcolor{gray!6}{cg16823042} & \cellcolor{gray!6}{N} & \cellcolor{gray!6}{12:58152948} & \cellcolor{gray!6}{T} & \cellcolor{gray!6}{C} & \cellcolor{gray!6}{0.207} & \cellcolor{gray!6}{0.011} & \cellcolor{gray!6}{0.046} & \cellcolor{gray!6}{0.81500} & \cellcolor{gray!6}{0.053} & \cellcolor{gray!6}{0.037} & \cellcolor{gray!6}{0.153}\\ \em{AGAP2} & rs1048691 & cg23387569 & N & 12:58152948 & T & C & 0.208 & 0.011 & 0.046 & 0.81500 & 0.053 & 0.037 & 0.153\\ \em{\cellcolor{gray!6}{AHRR}} & \cellcolor{gray!6}{rs11746538} & \cellcolor{gray!6}{cg05575921} & \cellcolor{gray!6}{N} & \cellcolor{gray!6}{5:26366} & \cellcolor{gray!6}{A} & \cellcolor{gray!6}{C} & \cellcolor{gray!6}{0.121} & \cellcolor{gray!6}{-0.009} & \cellcolor{gray!6}{0.075} & \cellcolor{gray!6}{0.90100} & \cellcolor{gray!6}{-0.050} & \cellcolor{gray!6}{0.072} & \cellcolor{gray!6}{0.491}\\ \em{AHRR} & rs11744553 & cg05575921 & N & 5:427466 & C & G & 0.311 & -0.259 & 0.103 & 0.01230 & -0.032 & 0.110 & 0.773\\ \em{\cellcolor{gray!6}{ALPPL2}} & \cellcolor{gray!6}{rs56080708} & \cellcolor{gray!6}{cg27241845} & \cellcolor{gray!6}{N} & \cellcolor{gray!6}{2:233250683} & \cellcolor{gray!6}{A} & \cellcolor{gray!6}{C} & \cellcolor{gray!6}{0.078} & \cellcolor{gray!6}{0.020} & \cellcolor{gray!6}{0.131} & \cellcolor{gray!6}{0.87800} & \cellcolor{gray!6}{NA} & \cellcolor{gray!6}{NA} & \cellcolor{gray!6}{NA}\\ \addlinespace \em{ALPPL2} & rs878481 & cg05951221 & N & 2:233274475 & G & C & 0.408 & 0.396 & 0.123 & 0.00148 & NA & NA & NA\\ \em{\cellcolor{gray!6}{ALPPL2}} & \cellcolor{gray!6}{rs3748971} & \cellcolor{gray!6}{cg21566642} & \cellcolor{gray!6}{N} & \cellcolor{gray!6}{2:233285872} & \cellcolor{gray!6}{T} & \cellcolor{gray!6}{C} & \cellcolor{gray!6}{0.074} & \cellcolor{gray!6}{0.257} & \cellcolor{gray!6}{0.065} & \cellcolor{gray!6}{0.00011} & \cellcolor{gray!6}{NA} & \cellcolor{gray!6}{NA} & \cellcolor{gray!6}{NA}\\ \em{F2RL3} & rs734568 & cg03636183 & N & 19:17015685 & T & C & 0.361 & 0.071 & 0.048 & 0.14500 & -0.093 & 0.039 & 0.017\\ \em{\cellcolor{gray!6}{GFI1}} & \cellcolor{gray!6}{rs17518433} & \cellcolor{gray!6}{cg09935388} & \cellcolor{gray!6}{N} & \cellcolor{gray!6}{1:92599172} & \cellcolor{gray!6}{A} & \cellcolor{gray!6}{T} & \cellcolor{gray!6}{0.236} & \cellcolor{gray!6}{0.114} & \cellcolor{gray!6}{0.052} & \cellcolor{gray!6}{0.03130} & \cellcolor{gray!6}{-0.001} & \cellcolor{gray!6}{0.036} & \cellcolor{gray!6}{0.986}\\ \em{KCNQ1} & rs463924 & cg26963277 & N & 11:2717680 & T & C & 0.304 & 0.008 & 0.038 & 0.83000 & 0.003 & 0.028 & 0.919\\ \addlinespace \em{\cellcolor{gray!6}{PRSS23}} & \cellcolor{gray!6}{rs1939110} & \cellcolor{gray!6}{cg11660018} & \cellcolor{gray!6}{N} & \cellcolor{gray!6}{11:86505120} & \cellcolor{gray!6}{T} & \cellcolor{gray!6}{C} & \cellcolor{gray!6}{0.286} & \cellcolor{gray!6}{-0.151} & \cellcolor{gray!6}{0.092} & \cellcolor{gray!6}{0.10100} & \cellcolor{gray!6}{0.026} & \cellcolor{gray!6}{0.042} & \cellcolor{gray!6}{0.534}\\ \em{PRSS23} & rs72967500 & cg23771366 & N & 11:86515072 & T & C & 0.132 & -0.061 & 0.067 & 0.36300 & 0.014 & 0.027 & 0.614\\ \bottomrule \multicolumn{14}{l}{\textsuperscript{} Trans = trans mQTL (Yes/No)}\\ \multicolumn{14}{l}{\textsuperscript{} chr:pos = chromosome:position}\\ \multicolumn{14}{l}{\textsuperscript{} MAF = minor allele frequency}\\ \multicolumn{14}{l}{\textsuperscript{} A1 = effect allele}\\ \multicolumn{14}{l}{\textsuperscript{} P = P value}\\ \end{tabular}} \end{table} \pagebreak ## Discussion {#discussion-07} In this study, 16 CpG sites associated, at P<1x10^-7^, with lung cancer in a meta-analysis of EWAS, of which 14 have been previously identified in relation to smoke exposure [@Joehanes2016] and six were highlighted in a previous study as being associated with lung cancer [@Baglietto2017]. This previous study used the same data from the four cohorts investigated here, but in a discovery and replication, rather than meta-analysis framework. Overall, under the MR framework there was limited evidence supporting a potential causal effect of methylation at the CpG sites identified in peripheral blood on lung cancer. This aligns with the results presented in __Chapter \@ref(ewas-gwas-comp-chapter)__, where evidence was found that EWAS results were unlikely to be aetiologically relevant. These findings are in contrast to previous analyses suggesting that methylation at two CpG sites investigated (in _AHRR_ and _F2RL3_) mediated > 30% of the effect of smoking on lung cancer risk [@Fasanelli2015]. This previous study used methods which are sensitive to residual confounding and measurement error that may have biased results [@Richmond2016; @Hemani2017]. These limitations are largely overcome using MR [@Richmond2016]. While there was some evidence for an effect of methylation at some of the other CpG sites on risk of subtypes of lung cancer, these effects were not robust to multiple testing correction and were not validated in the analysis of tumour and adjacent normal lung tissue methylation nor in gene expression analysis. A major strength of the study was the use of two-sample MR to integrate an extensive epigenetic resource and summary data from a large lung cancer GWAS to appraise causality of observational associations with >99% power. Evidence against the observational findings were also acquired through tissue-specific DNA methylation and gene expression analyses. Limitations include potential “winner’s curse” which may bias causal estimates in a two-sample MR analysis towards the null if the discovery sample for identifying genetic instruments is used as the first sample, as was done for the main MR analysis using data from ARIES [@Burgess2011]. However, findings were similar when using replicated mQTLs in NSHDS, indicating the potential impact of this bias was minimal (__Figure \@ref(fig:sup-fig1-07)__). Another limitation relates to the potential issue of consistency and validity of the instruments across the two samples. For a minority of the mQTL-CpG associations (4 out of 16), there was limited replication across time points and in particular, 6 mQTLs were not strongly associated with DNA methylation in adults. Further, the primary data used for the first sample in the two-sample MR was ARIES, which contained no male adults at the time of the study. If the mQTLs identified vary by sex and time, then this could bias the results. However, the replication cohort NSHDS contains adult males. Therefore, the 10 mQTLs that replicated in NSHDS are unlikely to be biased by the sex discordance. Also, the findings for cg05575921 _AHRR_ in CCHS, which contains both adult males and females, were replicated in a two-sample MR analysis, suggesting these results are also not influenced by sex discordance. Caution is therefore warranted when interpreting the null results for the two-sample MR estimates for the CpG sites for which mQTLs were not replicated, which could be the result of weak-instrument bias. The lack of independent mQTLs for each CpG site did not allow us to properly appraise horizontal pleiotropy in the MR analyses. Where possible I only included cis-acting mQTLs to minimise pleiotropy and investigated heterogeneity where there were multiple independent mQTLs. Three mQTLs were nominally associated with smoking phenotypes, but not to the extent that this would bias the MR results substantially. Since this study was conducted, the Genetics of DNA Methylation Consortium (GoDMC) has released a large set of SNP-DNA methylation associations from a sample size that is over ten times that of ARIES [@Min2020]. The genetic architecture of DNA methylation, as revealed by that paper and others, has shown that few independent cis-acting mQTLs can be identified for the vast majority of DNA methylation sites, with Min et al. detecting a median of two independent cis-acting mQTLs across all DNA methylation sites measured [@Min2020]. Therefore repeating this analysis using instruments identified from GoDMC is unlikely to alter the results or conclusions, especially as there was over 99% power to detect effects as large as the observational associations using data presented in this chapter. Some of the mQTLs used influence multiple CpGs in the same region, suggesting genomic control of methylation at a regional rather than single CpG level. This was untested, but methods to detect differentially methylated regions (DMRs) and identify genetic variants which proxy for them may be fruitful in probing the effect of methylation across gene regions. A further limitation relates to the inconsistency in effect estimates between the one- and two-sample MR analysis to appraise the causal role of _AHRR_ methylation. While findings in CCHS were supportive of a causal effect of _AHRR_ methylation on lung cancer (HR=0.30 [95% CI=0.10,1.00] per SD), in two-sample MR this site was not causally implicated (OR=1.00 [95% CI=0.83,1.10] per SD increase). It was verified that this was not due to differences in the genetic instruments used, nor due to issues of weak instrument bias. Given the CCHS one-sample MR had little power (19% at alpha = 0.05) to detect a causal effect with a size equivalent to that of the observational analysis, there should be more confidence in the results from the two-sample approach. Peripheral blood may not be the ideal tissue to assess the association between DNA methylation and lung cancer. While a high degree of concordance in mQTLs has been observed across lung tissue, skin and peripheral blood DNA [@Shi2014], this was not directly evaluated here. A possible explanation for a lack of causal effect at _AHRR_ is due to the limitation of tissue specificity as it was found that the mQTLs used to instrument cg05575921 were not strongly related to expression of _AHRR_ in lung tissue. However, findings from MR analysis were corroborated by the lack of evidence for differential methylation at _AHRR_ between lung adenocarcinoma tissue and adjacent healthy tissue, and weak evidence for hypermethylation (opposite to the expected direction) in squamous cell lung cancer tissue. This result may be interesting in itself as smoking is hypothesized to influence squamous cell carcinoma more than adenocarcinoma. However, the result conflicts with that found in the MR analysis. Furthermore, another study investigating tumorous lung tissue (N=511) found only weak evidence for an association between smoking and cg05575921 _AHRR_ methylation, that did not survive multiple testing correction (P=0.02) [@Freeman2016]. These results do not fully exclude _AHRR_ from involvement in the disease process. _AHRR_ and _AHR_ form a regulatory feedback loop, which means that the actual effect of differential methylation or differential expression of _AHR_/_AHRR_ on pathway activity is complex [@Chen2017]. In addition, some of the CpG sites identified in the EWAS were found to be differentially methylated in the tumour and adjacent normal lung tissue comparison. While this could represent a false negative result of the MR analysis, it is of interest that differential methylation in the tissue comparison analysis was typically in the opposite direction to that observed in the EWAS. Furthermore, while this method can be used to minimize confounding, it does not fully eliminate the possibility of bias due to reverse causation (whereby cancer induces changes in DNA methylation) or intra-individual confounding e.g. by gene expression. Therefore, it doesn’t give conclusive evidence that DNA methylation changes at these sites are not relevant to the development of lung cancer. While DNA methylation in peripheral blood may be predictive of lung cancer risk, according to the present analysis it is unlikely to play a causal role in lung carcinogenesis at the CpG sites investigated. Findings from this study issue caution over the use of traditional mediation analyses to implicate intermediate biomarkers (such as DNA methylation) in pathways linking an exposure with disease, given the potential for residual confounding in this context [@Richmond2016]. However, the findings of this study do not preclude the possibility that other DNA methylation changes (i.e. changes at different sites in the genome or in different tissues) are causally related to lung cancer (or other smoking-associated disease) [@Gao2016]. <!--chapter:end:07-dnam_lungcancer_mr.Rmd--> # Discussion {#discussion-thesis} ## Overview {#overview-08} Phenotypic variation cannot occur without molecular change. Identifying the changes that cause phenotypic variation adds to the aetiological understanding of traits and thus has the potential to uncover novel therapeutic targets. Further, studying the association between molecular marks and complex traits may yield new, valid predictors, that could augment current prediction capacity within clinical practice [@Relton2010]. This is true regardless of whether the molecular marks identified are causal. Given the strong links made by experimental researchers between DNA methylation and the regulatory processes in cells [@Suzuki2008; @Siegfried1999; @Bird2002; @Jones2012; @Ando2019; @Deaton2011; @Wolf1984; @Hellman2007], there has been precedence for studying the covariation between DNA methylation and complex traits with the hope of discovering new regulatory features underlying trait biology. Over the past 15 years, hundreds of EWAS have been conducted, but the number of associations identified have varied dramatically by trait. EWAS have identified numerous associations between DNA methylation and complex traits such as smoking [@Joehanes2016; @Shenker2013; @Zeilinger2013] and body mass index [@Mendelson2017; @Wahl2017; @Shah2015; @Demerath2015]. However, there are also many examples where EWAS have identified few sites that reliably associate with a complex trait; in The EWAS Catalog (__Chapter \@ref(ewas-catalog)__), roughly 25% of the EWAS report fewer than 10 associations. Given the phenotypic nature of DNA methylation, one might expect to see covariation of complex traits and DNA methylation to be identified in abundance across the majority of EWAS. This has not been the case thus far. Understanding the aspects of EWAS that can explain associations across complex traits is imperative to progressing study designs and was the focus of this thesis. In this body of work hundreds of published EWAS along with hundreds more EWAS I conducted were catalogued in an open access database. Using these results, I showed that unaccounted for technical effects may explain over 10% of reported associations. When accounting for these technical effects, the total predictive capacity of DNA methylation measured using the HM450 array in whole blood was low across 400 complex traits (median was near zero). This did vary substantially between traits, yet having smoked cigarettes was the only trait for which there was evidence that the total predictive capacity was greater than zero (FDR < 0.1). This suggests that the lack of association between DNA methylation (as measured in blood by the HM450 array) and some complex traits is unlikely due to power, and we should not expect to see similar levels of wide-spread perturbations of DNA methylation in relation to traits like with smoking, when similar sample sizes for analysis are obtained. This apparent lack of covariation between DNA methylation and complex traits may be surprising given the phenotypic nature of DNA methylation, but as this study (as with most EWAS) only measured methylation at less than 2% of the total number of CpG sites across the genome and in only one tissue, it is plausible that much of the covariation between DNA methylation and complex traits was missed. Despite evidence of little predictive capacity, there are still EWAS that have identified numerous DMPs. However, it was found that EWAS for eight traits identified no more genes and genesets in common with corresponding GWAS than expected by chance, suggesting that the majority of DMPs identified were either acting through pathways independent of genetics or were not of aetiological relevance for the traits examined. Confounding and reverse causation may explain the marked differences in EWAS and GWAS results of corresponding traits. In the specific scenario of an EWAS of lung cancer, whether DNA methylation changes may be on the causal pathway to disease was examined. Observational EWAS of lung cancer identified associations with large effect sizes between DNA methylation, measured long before disease onset, and lung cancer risk. However, in a well powered Mendelian randomization (MR) analysis, none of these associations could be replicated. This work also revealed some promising aspects of the current EWAS study design. Firstly, the regions targeted by probes on the HM450 array were designed to be enriched for promoters of protein-coding genes and work presented in __Chapter \@ref(properties-of-ewas)__ suggests DMPs are also enriched for these regions. Therefore, this decision may have increased likelihood in identifying sites associated with complex traits. Secondly, the evidence that EWAS are identifying separate genes and genesets to GWAS does not preclude the possibility that these EWAS associations are useful. In fact, if overlap was very high then it would suggest a large amount of redundancy in the studies, but instead it is likely the study types are capturing very different information. Overall though, these findings suggest that substantial changes may be required in the design of EWAS to improve predictive capacity and enhance aetiological insight. ## Extensions to this work {#extensions-to-work} Indeed the issues pertaining to some design flaws and potential improvements that can aid interpretation of EWAS has already been reviewed [@Birney2016; @Rakyan2011; @Heijmans2012; @Lappalainen2017]. Lappalainen & Greally also suggested moving to conducting "second generation EWAS", which focused on improving interpretability of EWAS, especially when conducting them as a means to understand trait biology [@Lappalainen2017]. They argue that there are four key aspects which should be the focus for future EWAS. Firstly, the hypothesised method for which cellular changes will result in phenotypic variation should be clear. Secondly, participants and cell samples collected should be obtained to answer necessary questions regarding causality. Thirdly, cell types present in samples should be measured as precisely as possible. Finally, as many complementary genome-wide assays as possible should be conducted in the same samples and these assays should survey the maximum amount of loci possible. Reflecting on the work presented in this thesis, and following on from these points, here I discuss ideas that could be applied to improve both yield and interpretation of EWAS. ### Beyond the HM450 array in blood {#beyond-the-hm450} The work in this thesis focuses on DNA methylation measures taken for the majority of published EWAS, namely the use of the HM450 array to measure DNA methylation in blood samples. The HMEPIC array is now regularly used, but as will be explained, many of the limitations of EWAS identified in this thesis apply to studies using the contemporary array technology. However, given the correlation structure across all DNA methylation sites in the genome is not known, this work can not be generalised to DNA methylation studies as a whole. Further, although certain aspects of observational EWAS, such as propensity for confounding, will always hinder interpretation of results, other aspects of this thesis may not apply to data from other tissues. There is overwhelming evidence that complex traits are polygenic (influenced by multiple genes) [@Timpson2017; @Visscher2017] and some hypothesise that they may even be omnigenic (influenced by potentially all genes, even if the influence of some genes is almost immeasurably small) [@Boyle2017]. As DNA methylation is highly correlated with factors of gene expression regulation, it seems likely that either DNA methylation or something coupled to it has a substantial impact on the aetiology of many complex traits. Thus, the issues uncovered with EWAS in this thesis, such as the lack of covariation between DNA methylation and complex traits, may be because too few sites are being measured in the wrong tissues. Finucane et al. devised a method that used LD-score regression and publicly available gene expression data to identify the most relevant tissue and cell types for 34 traits [@Finucane2018]. Without access to large datasets with DNA methylation available in a plethora of tissues, molecular epidemiologists could rely on inferences made from studies such as this to help design future EWAS. Importantly, before such work is used to inform future studies, the definition of the relevant cell type should be clear. This will undoubtedly depend on the phenotype of interest, but will also differ based on the hypotheses being tested. The impact of different methylation states of the genome, such as hydroxymethylation, is also beginning to be explored [@Wilkins2020; @Marshall2020; @Greco2016]. The current methods employed by EWAS, which revolve around bisulphite conversion, capture all methylation states as one measurement. If these states have differing effects, separating these out will increase power and impart greater biological understanding. In a similar vein to other omics, sequencing technology will likely continue to improve and will become cheap enough to use across population samples [@Shendure2017; @Stark2019]. Therefore, identifying the differential methylation states using techniques such as single-molecule real-time (SMRT) sequencing will hopefully become more frequent [@Song2012]. Also, despite the vast majority of EWAS being conducted using blood (__Table \@ref(tab:study-data-tab)__), there are 42 tissue types recorded in The EWAS Catalog. The Cancer Genome Atlas (TCGA) has recorded DNA methylation data in a large number of tissues also [@Weinstein2013], and the trend to measure other tissues is likely to continue, especially as the number of reliable sites identified in EWAS tends to be low. As discussed in __Section \@ref(appeal-of-dnam)__, DNA methylation is a relatively stable molecular trait and this is consistent across tissues. Therefore, if tissues can be sampled, measurement of DNA methylation is simpler than other molecular traits such as RNA levels and histone modifications across any tissue. In addition, manuscripts using techniques to measure gene expression and DNA methylation in single cells are becoming more common [@Hedlund2018; @Papalexi2018; @Karemaker2018; @Gravina2015; @Farlik2015]. Simple experiments have also shown that single cell RNA sequencing can give much greater resolution of cell types in a sample compared to other techniques [@Lappalainen2017]. Given the issues of cell specificity, these studies may prove pivotal in advancement of the field of epigenetic epidemiology. As discussed in __Chapter \@ref(properties-of-ewas)__, DNA methylation is a binary feature, yet by pooling groups of cells and measuring methylation at each site as a proportion of the DNA molecules that are methylated, a continuous measure is derived. If cells of the same type are identical then one might expect they would be methylated at the same sites as one another [@Birney2016]. However, a mosaicism of methylation across cells may exist. If so, then pooling groups of cells for analysis, and taking account of differences between higher order cell types, will remain a valid approach for EWAS. However, if strong evidence is provided that this cellular mosaicism is improbable between cells of the same type, rapid movement towards single-cell techniques is likely. ### New methods and data {#new-methods-and-data} There is still substantial interest in assessing the association between DNA methylation and complex traits. In 2020, there have been over 100 EWAS conducted that have been or will be added to The EWAS Catalog. These EWAS are excluding studies that did not meet the criteria for inclusion into the database and studies that did not report findings relevant to the database. Further, new methods are being developed to measure the association between DNA methylation and complex traits. Two recent examples include <NAME> et al., who created a command line tool to conduct EWAS using different models, OSCA, [@Zhang2019] and a study that developed a Bayesian approach to conduct EWAS, using various confounders including genotype in their model [@TrejoBanos2020]. This interest will hopefully lead to a continuation in developments across the field of molecular epidemiology, but the complex nature of molecular pathways to disease may limit the speed at which robust aetiological inferences are made. Causal inference methods such as MR aid aetiological studies, but the lack of independent instruments coupled with the complexity of gene regulation make it difficult to be confident the key assumptions of MR (__Figure \@ref(fig:mr-diagram)__) are met [@Relton2010; @Richardson2018; @Relton2012; @Min2020]. Colocalization is now being readily used in the context of molecular MR [@Min2020; @Richardson2018], but this only provides evidence that the complex trait and the molecular phenotype have the same putatively causal genetic variant(s) [@Richardson2018], and do not provide evidence against these variants being pleiotropic. Lack of large-scale studies was a problem in identifying valid instruments for individual CpG sites, including within my study in __Chapter \@ref(dnam-lung-cancer-mr)__, but this has been partially alleviated by the formation of Genetics of DNA Methylation Consortium (GoDMC) [@Min2020]. This consortium has already yielded interesting results. Min et al. performed MR to establish whether there was likely an effect of DNA methylation changes, measured by the HM450 array, on 116 complex traits [@Min2020]. Similarly to the results presented in this thesis, they found that evidence for the aetiological relevance of DNA methylation measured across those sites was lacking. In the future, establishing tissue-specific mQTLs will be important in understanding whether tissue-specific DNA methylation changes are likely to have an effect on complex trait variation. For prediction, the complexity of establishing causality and understanding cellular function need not apply. Whether the goal is to identify DNA methylation differences that co-exist with disease states and could therefore help diagnose these states, or truly predict diseases before they arrive is important to consider. For the former, cross-sectional studies suffice, but for the latter cohort studies that have DNA methylation data collected prior to disease onset such as ALSPAC [@Fraser2013; @Boyd2013] are required. Larger studies, such as Generation Scotland [@Smith2013], provide the opportunity for developing more precise predictors and further, the power gained from the extra samples would enable one to estimate the total predictive capacity of DNA methylation for individual traits with relatively high precision using the methods laid out in __Chapter \@ref(h2ewas-chapter)__. ### Beyond DNA methylation {#beyond-dnam} As the premise of aetiological EWAS is that DNA methylation changes may impact upon some cellular function, which leads to phenotypic changes, reliable associations between DNA methylation and complex traits is not enough to impart translatable opportunities. How DNA methylation is changing these cellular functions is key for two reasons, 1. It might not be possible to target DNA methylation directly to treat/prevent disease and 2. DNA methylation may just be tagging some other epigenetic mark and in fact may be inconsequential to phenotypic state. Therefore, once a link between DNA methylation and complex traits is established, it is required that experimental work is undertaken to fill in the gap of how DNA methylation changes relate to cellular function differences that influence the trait. With initiatives such as ENCODE and the Roadmap Epigenomics Project [@Dunham2012; @RoadmapEpigenomicsConsortium2015], along with gene expression datasets such as GTEx [@GTExConsortium2013], the functional implications of DNA methylation changes may be predicted. However, experimental work should be used to confirm these predictions. As discussed in __Section \@ref(dnam-as-part-of-regulation)__, DNA methylation is not the only epigenetic mark in human cells. Technology has been a hindrance to population based studies of histone modifications genome-wide. At least one such study has been conducted [@Marzi2018], and the frequency is likely to increase as the cost of high throughput chromatin immunoprecipitation procedures, such as ChIP-seq, decreases. Recently large-scale datasets have been developed to analyse the relationship between protein expression and gene expression with complex traits [@GTExConsortium2013; @Uhlen2015; @Thul2017]. Proteins are often the ultimate target for many pharmaceuticals. As understanding of function and quantification of proteins and their variants increases, protein-based epidemiological studies may supersede EWAS with regards to their frequency. Indeed pharmaceutical companies may start interrogating potential drugs by identifying targetable proteins. DNA methylation, and other molecular traits, may also add to the evidence that regulation of a particular protein target is important for disease development or progression. However, as discussed, the interpretability of EWAS makes discerning whether or not observed differences in DNA methylation between individuals are important for disease aetiology. Similarly to DNA methylation studies, problems with confounding and cell specificity will be present in protein and gene based studies. Modifications to RNA transcripts and proteins provide an additional level of complication. RNA transcripts can be spliced and altered by post-transcriptional modifications and proteins can be modified post-translation of gene transcript [@Hafner2019; @Corbett2018; @Wang2014; @Filipowicz2008]. In extreme cases, proteins have been known to have their function reversed after their state is modified. The protein p53 naturally acts as a tumour suppressor, but during cancer development there is evidence that the protein can be modified and become oncogenic [@Hafner2019]. Studying each molecular measure will come with its own difficulties, and each can add to our understanding of human phenotypic variation. Therefore, instead of any one study superseding others, it seems likely that using a combination to untangle biological complexity will become more common. In fact, one thing Lapallainen and Greally stress is how, in their opinion, investment in deep molecular phenotyping of the same samples, although expensive, will likely pay high dividends with regards to interpretability of molecular epidemiological studies [@Lappalainen2017]. ## Final conclusions {#final-conclusions} The human body is unfathomably complex. Understanding it requires a huge concerted effort from the entire, global research community. Returning to the example of _p53_, by 2010 there had been nearly 50,000 studies on this single family of genes [@Lane2010]. The decades of research conducted to elucidate the function, importance and interactions of this single family of genes illustrates the patience required to understand the molecular underpinnings of disease. Given the brevity of existence molecular epidemiology has had, I'd argue that judging whether it has been a success or failure is a frivolous task. However, to speed up development of the field, there is a need to understand the limits of the current methodologies. This thesis provides evidence that the study design of current EWAS is unlikely to provide substantial improvements in the ability to predict or understand the aetiological aspects of complex traits. Key next steps will be diversifying the tissue and cell types collected as well as the DNA methylation sites measured in the genome. Further, more experimental studies, that will inevitably come with time, will aid in epidemiological inferences. It is important to note that this thesis, and many other molecular epidemiology studies, primarily focuses on samples of European origin. Datasets exist that contain individuals from different ethnic backgrounds and there have been EWAS conducted in non-Europeans (__Table \@ref(tab:study-data-tab)__), but there is certainly a bias towards European samples. To realise the full potential of molecular epidemiology and to help benefit all in our society, efforts should be made to reduce the bias in sample collection and expand analyses, where possible, to those of all ethnic backgrounds. Despite the apparent shortcomings, there is still great potential for EWAS to uncover important facets of complex trait biology. Imperative to the development of the studies will be the constant re-assessment of the predictive capacity and aetiological insights that can be gleaned from DNA methylation as more data is collected in different samples. <!--chapter:end:08-discussion.Rmd--> \appendix <!-- If you feel it necessary to include an appendix, it goes here. --> # Further assessment of the overlap between genes and genesets identified by EWAS and GWAS \pagebreak (ref:sim1-full-plot-cap) __Full results from simulations assessing power to detect overlap between genes and genesets identified by corresponding EWAS and GWAS__. Simulations were set up as illustrated in __Figure \@ref(fig:method-simulations-schematic)__. The ability to distinguish between results generated when EWAS and GWAS were sampling, in part, from the same set of causal genes and results generated when EWAS was sampling random genes from the genome. The header of each set indicates the proportion of genes identified by the simulated EWAS that were set to be causal. or_g = assessing overlap of genes, or_p = assessing overlap of genesets, rho_p = assessing correlation between geneset enrichment scores. go = gene ontology, ppi = protein-protein interaction database from EpiGraphDB. (ref:sim1-full-plot-scap) Full results from simulations assessing power to detect overlap between genes and genesets identified by corresponding EWAS and GWAS \begin{center}\includegraphics[width=1\linewidth]{figure/06-ewas_gwas_comparison/method_test_gene_up_all/PEC_0.05} \end{center} \begin{center}\includegraphics[width=1\linewidth]{figure/06-ewas_gwas_comparison/method_test_gene_up_all/PEC_0.1} \end{center} \begin{center}\includegraphics[width=1\linewidth]{figure/06-ewas_gwas_comparison/method_test_gene_up_all/PEC_0.2} \end{center} \begin{center}\includegraphics[width=1\linewidth]{figure/06-ewas_gwas_comparison/method_test_gene_up_all/PEC_0.5} \end{center} \begin{figure} {\centering \includegraphics[width=1\linewidth]{figure/06-ewas_gwas_comparison/method_test_gene_up_all/PEC_1} } \caption[(ref:sim1-full-plot-scap)]{(ref:sim1-full-plot-cap)}(\#fig:sim1-full-plot5) \end{figure} (ref:arch-simulations-supp-res-cap) __Simulations to understand the likely number of genes still to identify in EWAS and GWAS of six traits under different trait architectures__. Simulations were set up as illustrated in __Figure \@ref(fig:arch-simulations-schematic)__. Correlation of geneset enrichment scores from empirical data (__Table \@ref(tab:empirical-pathway-tab)__), is shown as a red dashed line. Box plots show the range of enrichment score correlations from 1000 simulations using the parameters indicated. The number of causal and associated genes, as well as the overlap between these genes were varied. N~KEG~ = number of known EWAS genes, N~KEG~ = number of known GWAS genes, N~KTG~ = number of known total genes (N~KEG~ + N~KGG~). By way of an example, when N~KTG~ = 491 and the ratio of causal and associated genes relative to N~KTG~ is 1:1, the number of causal genes in the simulations will be 557 and the number of associated genes in the simulations will be 491. Scenarios which lie close to the empirical result (red dashed line) are more likely to reflect the true underlying number of genes related to a trait and the true overlap between the causal and associated genes. (ref:arch-simulations-supp-res-scap) Simulations to understand the likely number of genes still to identify in EWAS and GWAS of six traits under different trait architectures \begin{figure} {\centering \includegraphics[width=1\linewidth]{figure/06-ewas_gwas_comparison/architecture_sims_other_traits_corr} } \caption[(ref:arch-simulations-supp-res-scap)]{(ref:arch-simulations-supp-res-cap)}(\#fig:arch-simulations-supp-res) \end{figure} (ref:sim1-go-kegg-gene-comp-cap) __Power to detect overlap between genes and genesets identified by corresponding EWAS and GWAS when mapping signal to all genes and protein coding genes__. Simulations were set up as illustrated in __Figure \@ref(fig:method-simulations-schematic)__. The ability to distinguish between results generated when EWAS and GWAS were sampling, in part, from the same set of causal genes and results generated when EWAS was sampling random genes from the genome. The header of each set indicates the proportion of genes identified by the simulated EWAS that were set to be causal. or_g = assessing overlap of genes, or_p = assessing overlap of genesets, rho_p = assessing correlation between geneset enrichment scores. go = gene ontology, suffix of '-genes' denotes using all Ensembl gene IDs for the analysis and the suffix of '-proteins' denotes using only protein coding genes. (ref:sim1-go-kegg-gene-comp-scap) Power to detect overlap between genes and genesets identified by corresponding EWAS and GWAS when mapping signal to all genes and protein coding genes \begin{center}\includegraphics[width=1\linewidth]{figure/06-ewas_gwas_comparison/method_test_gene_v_protein/PEC_0.05} \end{center} \begin{center}\includegraphics[width=1\linewidth]{figure/06-ewas_gwas_comparison/method_test_gene_v_protein/PEC_0.1} \end{center} \begin{center}\includegraphics[width=1\linewidth]{figure/06-ewas_gwas_comparison/method_test_gene_v_protein/PEC_0.2} \end{center} \begin{center}\includegraphics[width=1\linewidth]{figure/06-ewas_gwas_comparison/method_test_gene_v_protein/PEC_0.5} \end{center} \begin{figure} {\centering \includegraphics[width=1\linewidth]{figure/06-ewas_gwas_comparison/method_test_gene_v_protein/PEC_1} } \caption[(ref:sim1-go-kegg-gene-comp-scap)]{(ref:sim1-go-kegg-gene-comp-cap)}(\#fig:sim1-go-kegg-gene-comp5) \end{figure} <!--chapter:end:09-appendix.Rmd--> <!-- The bib chunk below must go last in this document according to how R Markdown renders. More info is at http://rmarkdown.rstudio.com/authoring_bibliographies_and_citations.html --> \backmatter <!-- If you'd like to change the name of the bibliography to something else, delete "References" and replace it. --> # References {-} <!-- This manually sets the header for this unnumbered chapter. --> \markboth{References}{References} <!-- To remove the indentation of the first entry. --> \noindent <!-- To create a hanging indent and spacing between entries. These three lines may need to be removed for styles that don't require the hanging indent. --> \setlength{\parindent}{-0.20in} \setlength{\leftskip}{0.20in} \setlength{\parskip}{8pt} <!-- This is just for testing with more citations for the bibliography at the end. Add other entries into the list here if you'd like them to appear in the bibliography even if they weren't explicitly cited in the document. --> --- <!--chapter:end:99-references.Rmd--> <file_sep>## ---- load-data-06 -------------------------------- fig_p <- file.path(fig_path, "06-ewas_gwas_comparison") tab_p <- file.path(tab_path, "06-ewas_gwas_comparison") # auc data auc_dat <- read_tsv(file.path(tab_p, "auc_data.txt")) # study data # trait_dat <- read_tsv(file.path(tab_p, "study_data.txt")) gwas_trait_dat <- read_tsv(file.path(tab_p, "gwas_studies_info.tsv")) ewas_trait_dat <- read_tsv(file.path(tab_p, "ewas_studies_info.tsv")) # divided genome data div_dat_groups <- read_tsv(file.path(tab_p, "groups.txt")) # load simulation data for plot load(file.path(tab_p, "methods_test_gene_up_auc_plot_all_databases_ggplotgrobs.RData")) # gene overlap res load(file.path(tab_p, "gene_overlap_results.RData")) # pathway enrichment score res load(file.path(tab_p, "pathway_enrichment_results.RData")) # load architecture simulation z-test data asim_ztest <- read_tsv(file.path(tab_p, "arch_sims_ztest_sig_res.tsv")) # all info from correlations of all gwas and ewas load(file.path(tab_p, "ewas_gwas_comp_stats.RData")) gwas_ewas_comp_stats <- out_list # load in enriched geneset examples data load(file.path(tab_p, "enriched_geneset_examples.RData")) num_to_text <- function(x, start_of_sentence = FALSE) { nums <- 0:10 names(nums) <- c("zero", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine", "ten") if (start_of_sentence) names(nums) <- stringr::str_to_title(names(nums)) if (!x%%1 == 0) warning("X is not an integer") if (x < 11 & x > -1) { out <- names(nums)[nums == x] } else { out <- x } return(out) } tidy_colnames <- function(df) { colnames(df) <- gsub("_", "-", colnames(df)) return(df) } tidy_traits <- function(df) { df$trait <- gsub("_", " ", df$trait) return(df) } tidy_nums <- function(df) { cols <- colnames(df) out <- map_dfc(1:ncol(df), function(x) { vals <- df[[x]] comma(vals) }) colnames(out) <- cols return(out) } shorten_obs_exp <- function(df) { colnames(df) <- gsub("observed", "obs", colnames(df)) colnames(df) <- gsub("expected", "exp", colnames(df)) return(df) } ## ---- trait-data-setup-06 -------------------------------- # some changes to trait_dat trait_dat_tab <- ewas_trait_dat %>% left_join(gwas_trait_dat, by = c("Trait" = "ec_trait")) %>% dplyr::select(-trait) %>% dplyr::select(trait = Trait, ewas_author = Author, ewas_pmid = PMID, ewas_n = N, gwas_author = author, gwas_pmid = pmid, gwas_n = sample_size) %>% dplyr::filter(!is.na(gwas_author)) %>% mutate(ewas_pmid = as.character(ewas_pmid), gwas_pmid = as.character(gwas_pmid)) %>% tidy_colnames %>% tidy_traits %>% tidy_nums ## ---- trait-data-tab-06 -------------------------------- kable(trait_dat_tab, format = "latex", booktabs = TRUE, caption = "Study data") %>% kable_styling(latex_options = c("striped", "hold_position", "scale_down")) %>% add_footnote(c("Where gwas-pmid = NA, the GWAS were conducted as part of a UK Biobank GWAS pipeline within the Univeristy of Bristol's Integrative Epidemiology Unit and can be found on the OpenGWAS Project website (see Methods for more)"), notation = "none") ## ---- gwas-predicting-ewas-setup -------------------------------- beta <- c("max_beta") auc_res <- auc_dat %>% dplyr::filter(predictor == beta) auc_range <- paste(comma(range(auc_res$auc)), collapse = "-") auc_plot <- ggplot(auc_res, aes(x = trait, y = auc)) + geom_point(position = position_dodge(width = 0.9)) + geom_linerange(aes(ymin = auc_ci_low, ymax = auc_ci_upper), position = position_dodge(width = 0.9)) + labs(x = bquote("Trait"), y = "Area under the curve") + scale_y_continuous(limits = c(0.2, 1)) + geom_hline(yintercept = 0.5, colour = "red", linetype = "dashed") + scale_x_discrete("Trait", labels = c("alcohol_consumption_per_day" = "AC", "body_mass_index" = "BMI", "c-reactive_protein" = "CRP", "educational_attainment" = "EA", "former_versus_never_smoking" = "FsNs", "current_versus_never_smoking" = "CsNs", "glucose" = "Glucose", "insulin" = "Insulin")) + theme_bw() region_overlap_barplot_file <- file.path(fig_p, "all_traits_overlap_bar.pdf") ## ---- overlap-barplot -------------------------------- include_graphics(region_overlap_barplot_file) ## ---- auc-plot -------------------------------- print(auc_plot) ## ---- methods-sims-setup -------------------------------- methods_sims_schematic <- file.path(fig_p, "simulations-gene-up-flowchart.pdf") methods_sims_summ_res_file <- file.path(fig_p, "methods_test_gene_up_auc_plot_all_databases_summary.pdf") methods_sims_all_res_dir <- file.path(fig_p, "method_test_gene_up_all") methods_sims_go_kegg_gene_res_dir <- file.path(fig_p, "method_test_gene_v_protein") PEC <- c(0.05, 0.1, 0.2, 0.5, 1) files_in_dirs <- paste0("PEC_", PEC, ".pdf") stopifnot(all(files_in_dirs %in% list.files(methods_sims_all_res_dir))) stopifnot(all(files_in_dirs %in% list.files(methods_sims_go_kegg_gene_res_dir))) methods_sims_all_res_files <- file.path(methods_sims_all_res_dir, files_in_dirs) methods_sims_go_kegg_gene_res_files <- file.path(methods_sims_go_kegg_gene_res_dir, files_in_dirs) ## ---- method-simulations-schematic -------------------------------- include_graphics(methods_sims_schematic) ## ---- sim1-summ-plot -------------------------------- include_graphics(methods_sims_summ_res_file) ## ---- sim1-full-plot1 -------------------------------- include_graphics(methods_sims_all_res_files[1]) ## ---- sim1-full-plot2 -------------------------------- include_graphics(methods_sims_all_res_files[2]) ## ---- sim1-full-plot3 -------------------------------- include_graphics(methods_sims_all_res_files[3]) ## ---- sim1-full-plot4 -------------------------------- include_graphics(methods_sims_all_res_files[4]) ## ---- sim1-full-plot5 -------------------------------- include_graphics(methods_sims_all_res_files[5]) ## ---- sim1-go-kegg-gene-comp1 -------------------------------- include_graphics(methods_sims_go_kegg_gene_res_files[1]) ## ---- sim1-go-kegg-gene-comp2 -------------------------------- include_graphics(methods_sims_go_kegg_gene_res_files[2]) ## ---- sim1-go-kegg-gene-comp3 -------------------------------- include_graphics(methods_sims_go_kegg_gene_res_files[3]) ## ---- sim1-go-kegg-gene-comp4 -------------------------------- include_graphics(methods_sims_go_kegg_gene_res_files[4]) ## ---- sim1-go-kegg-gene-comp5 -------------------------------- include_graphics(methods_sims_go_kegg_gene_res_files[5]) ## ---- empirical-results-setup -------------------------------- gene_empirical_tabs <- gene_overlap_tabs %>% map(shorten_obs_exp) %>% map(tidy_colnames) %>% map(tidy_traits) %>% map(tidy_nums) gene_empirical_out_tab <- gene_empirical_tabs$go n_traits_no_g_overlap <- sum(gene_empirical_out_tab[['gene-overlap']] == " 0") pathway_empirical_tabs <- pathway_enrich_tabs %>% map(shorten_obs_exp) %>% map(tidy_colnames) %>% map(tidy_traits) %>% map(tidy_nums) pathway_empirical_out_tab <- pathway_empirical_tabs$go max_p_overlap <- max(pathway_empirical_out_tab[['geneset-overlap']]) mpo_trait <- pathway_empirical_out_tab %>% dplyr::filter(`geneset-overlap` == max_p_overlap) %>% pull(trait) ## ---- empirical-gene-tab -------------------------------- kable(gene_empirical_out_tab, format = "latex", caption = "Overlap of genes identified by EWAS and GWAS", booktabs = TRUE) %>% kable_styling(latex_options = c("striped", "hold_position", "scale_down")) %>% add_footnote(c("exp = expected, obs = observed", "odds ratios (ORs) can be interpreted as the odds of an gene being identified by EWAS and a GWAS over the odds of a gene being identified by an EWAS but not by a GWAS.", "exp-OR = the mean OR after repeating the analysis 1000 times, randomly sampling EWAS genes equal to the number identified in the empirical analysis."), notation = "none") ## ---- empirical-pathway-tab -------------------------------- kable(pathway_empirical_out_tab, format = "latex", caption = "Correlation of geneset enrichment scores between EWAS and GWAS", booktabs = TRUE) %>% kable_styling(latex_options = c("striped", "hold_position", "scale_down")) %>% add_footnote(c("For each geneset, odds of study genes being in the geneset divided by the odds the study genes not being in the geneset were assessed and correlation between these odds ratios are given here.", "expected-cor = the mean correlation between odds ratios after repeating the analysis 1000 times, randomly sampling EWAS genes equal to the number identified in the empirical analysis", "geneset-overlap indicates the number of gene ontology terms that map to both genes identified by the EWAS and GWAS."), notation = "none") ## ---- arch-sims-setup -------------------------------- arch_sims_schematic <- file.path(fig_p, "architecture-simulations-schematic.pdf") arch_sims_main_res <- file.path(fig_p, "architecture_sims_crp_fvns_only_correlation_of_pathway_enrichment_scores.png") arch_sims_supp_file <- file.path(fig_p, "architecture_sims_other_traits_corr.png") asim_ztest_out <- asim_ztest %>% tidy_colnames() %>% tidy_traits() %>% tidy_nums() ## ---- arch-simulations-schematic -------------------------------- include_graphics(arch_sims_schematic) ## ---- arch-simulations-crp-fvns -------------------------------- include_graphics(arch_sims_main_res) ## ---- arch-simulations-supp-res -------------------------------- include_graphics(arch_sims_supp_file) ## ---- arch-simulations-ztest-tab -------------------------------- kable(asim_ztest_out, format = "latex", caption = "Simulation scenarios with evidence they differ from what is expected from empirical data", booktabs = TRUE) %>% kable_styling(latex_options = c("striped", "hold_position", "scale_down")) %>% add_footnote(c("For each geneset, odds of selected genes being in the geneset divided by the odds the selected genes not being in the geneset were assessed and correlation between these odds ratios are given here.", "N-KTG = the number of 'total known genes', which is the sum of genes identified in EWAS and GWAS", "R-KTG = the ratio of causal and associated genes relative to N-KTG used in the simulations. 1:1 means there are N-KTG causal genes and N-KTG associated genes.", "ca-assoc-overlap = set overlap of causal and associated genes", "expected-cor = the mean correlation between odds ratios from 1000 repeats of the given simulation scenario", "observed-cor = the correlation observed just using the empirical datasets."), notation = "none") ## ---- all-correlations-setup -------------------------------- ieugwas_studies <- gwas_ewas_comp_stats$gwas_studies cor_range <- gwas_ewas_comp_stats$cor_range cor_mean <- gwas_ewas_comp_stats$cor_mean ewas_gwas_mean <- gwas_ewas_comp_stats$ewas_gwas_mean gwas_gwas_mean <- gwas_ewas_comp_stats$gwas_gwas_mean gwas_cor_tab <- gwas_ewas_comp_stats$gwas_cor_tab %>% mutate(trait1 = tolower(trait1), trait2 = tolower(trait2)) %>% left_join(ieugwas_studies, by = c("trait1" = "id")) %>% left_join(ieugwas_studies, by = c("trait2" = "id")) %>% mutate(trait1 = trait.x, trait2 = trait.y) %>% dplyr::select(trait1, trait2, rho, p=p_diff) %>% tidy_colnames() %>% tidy_nums() lc_sm_rho <- gwas_ewas_comp_stats$lc_sm_cor$rho go_heatmap_file <- file.path(fig_p, "go_all_correlations_heatmap.png") ## ---- heatmap-go -------------------------------- include_graphics(go_heatmap_file) ## ---- gwas-cor-tab -------------------------------- kable(gwas_cor_tab, format = "latex", caption = "Correlation of enrichment scores between GWAS", booktabs = TRUE) %>% kable_styling(latex_options = c("striped", "hold_position", "scale_down")) <file_sep># PhD project meeting notes 2020-06-03 Attendees: <NAME>, Gib ## Agenda * ewas-gwas comparison simulations * ewas-gwas comparison harmonising methods * AOB ## ewas-gwas comparison simulations see plots: `simple_simulation4_gene.pdf` `simple_simulation4_go.pdf` `simple_simulation4_kegg.pdf` ## ewas-gwas comparison harmonising methods * method 1 = map sites to genes, genes to pathways and assess overlap * method 2 = map sites to genes, perform geneset enrichment analysis to generate pathway enrichment scores and then look at correlation With method 1 a null distribution was generated each time a comparison was made, making take much longer. Options: - Apply method 1 to assessing overlap between all the GWAS + seven EWAS generating a null distribution using permutations --> will take roughly 4h to generate the null distribution for each trait - Apply method 1, but use the empirical data as it's own null (null for GWAS1-GWAS2 overlap is all other pairwise overlaps for GWAS1) - Apply method 2 when comparing EWAS and GWAS of same traits __NOTES:__ - Could do enrichment approach whereby you get "enriched pathways" and look for overlap between them. - High OR = easy to get an association by chance. Would be nice to weight pathways by the number of genes in them - Ideally, we'd have simulations that tell us whether the methods we're using are giving us the correct answer and how power influences them. Then we could have empirical data to help validate then. Change to 2500 causal genes and in that are the 100 GWAS genes already detected. Then sample 100 genes from the 2500 then go to look at overlap. Also, would be worth trying to simplify things so that we can have a probability that the output is somewhere in the parameter space OR probabilities for each parameter space. Thinking some kind of likelihood function needs to be specified then we maximise the likelihood of where the outcome would sit in each parameter. --> TO BE DISCUSSED LATER <file_sep># PhD project meeting notes 2021-05-07 Attendees: <NAME>, Gib ## Agenda * [project updates](#item1) * [Item2](#item2) * [AOB](#aob) ## project updates <a name="item1"></a> * DNAm-her stuff is still at Genome Biology * ewas-gwas comparison stuff and properties of ewas stuff I need to write up + haven't found time __NOTES:__ Some meeting notes ## Item2 <a name="item2"></a> Some pre-meeting notes __NOTES:__ Some meeting notes ## AOB <a name="aob"></a> * AOB item1<file_sep># PhD project meeting notes 2020-08-07 Attendees: <NAME>, Gib ## Agenda * ewas-gwas comp * Item2 * AOB ## ewas-gwas comp Looking at correlation between pathway enrichment scores between GWAS of the same traits is more complex than I first thought it would be... Some correlations close to 0. At first I thought oddities could be because the number of SNPs being identified was low amongst a lot of GWAS [n_snps_histogram.pdf](n_snps_histogram.pdf). However, it isn't as simple as that: [snp_diff_rho_plots.pdf](snp_diff_rho_plots.pdf), [prop_snp_diff_rho_plots.pdf](prop_snp_diff_rho_plots.pdf). Also, I accidentally stopped the architecture simulation code early so waiting for it to finish... Really want the paper for this project to be sent off by the end of this week OR by the end of Monday. So I'm going to send it without those results in I think __NOTES:__ Change overlap correlation plots x-axis to genes and not snps because those snps ain't clumped... Send paper without second set of sims is fine ## Item2 Some pre-meeting notes __NOTES:__ Some meeting notes ## AOB * Will email Doug about job * <file_sep># PhD project meeting notes 2021-01-15 Attendees: <NAME>, Gib ## Agenda * [Plans for each results chapter](#item1) * [Timeline](#item2) * [AOB](#aob) ## Plans for each results chapter <a name="item1"></a> ### EWAS Catalog * Submitting next week -- still working with the main team * Data has also been updated - useful for other papers ### Properties of EWAS * Need to re-run things with the new EWAS Catalog data * Also Matt and Paul made a great point about another set of analysis that should be run * This hasn't been written in paper-format yet so will likely take the longest to get moving ### h2ewas * Need to make some minor changes based on the comments from viva examiners + Josine. * Josine made the point that results could be confounded by genotype, but I don't think we need to add in genotype to the analyses because evidence suggests that genotype confounds things towards the null anyway * Someone also took offence to us saying "EWAS" yield few associations * Will submit to AJHG very soon after making changes -- worth doing pre-submission enquiry? * May have to change format if AJHG reject it ### ewas-gwas comparison * Need to re-run things with new EWAS Catalog data * Apart from that, I don't think there were many comments on this so happy to submit/put it on biorxiv after I've re-run the analyses * Where to submit? -- Nat Comms or Genome Biology? * ### DNAm-lung cancer * Published __NOTES:__ Some meeting notes ## Timeline <a name="item2"></a> * EWAS Catalog submitted by end of next week * h2ewas submitted by the end of next week * ewas-gwas comparison analyses re-run by next meeting (in 2 weeks) * ewas-gwas comparison paper submitted and on biorxiv within a month * properties of ewas analyses re-run for meeting in 2 months * properties of ewas paper submitted and on biorxiv within 3 months __NOTES:__ Some meeting notes ## AOB <a name="aob"></a> * AOB item1<file_sep>## ---- load-data-04 -------------------------------- tp_04 <- file.path(tab_path, "04-properties_of_ewas") fp_04 <- file.path(fig_path, "04-properties_of_ewas") # study descriptions study_dat <- read_tsv(file.path(tp_04, "data_description.tsv")) # traits per DMP t_dat <- read_tsv(file.path(tp_04, "n_traits_per_cpg.tsv")) # hyper/hypo percentages dir_of_effect <- new_load(file.path(tp_04, "meth_direction_of_effect.RData")) # r-squared values rsq_dat <- read_tsv(file.path(tp_04, "rsquared_data.tsv")) rsq_sum_dat <- read_tsv(file.path(tp_04, "sum_of_rsquared_data.tsv")) # geo re-analysis results geo_rean <- read_tsv(file.path(tp_04, "geo_reanalysis_data.tsv")) # robust results summary data rob_summary <- new_load(file.path(tp_04, "robust_summary.RData")) # faulty probes and batch batch_and_f_probes <- new_load(file.path(tp_04, "batch_and_faulty_probes.RData")) # replication data replication_dat <- new_load(file.path(tp_04, "replication_data.RData")) # cpg-chars data cpg_chars_res <- read_tsv(file.path(tp_04, "characteristics_assoc_tab.tsv")) cpg_chars_res_2 <- new_load(file.path(tp_04, "cpg_chars_results.RData")) # tfbs enrichment data tfbs_en <- new_load(file.path(tp_04, "tfbs_data_for_chapter.RData")) ## ---- study-data-setup -------------------------------- study_caption <- "Description of data present in the EWAS Catalog" n_ewas_04 <- study_dat[study_dat$study_trait == "Number of EWAS", "value", drop=T] study_dat <- study_dat %>% tidy_colnames() ## ---- study-data-tab -------------------------------- kable(study_dat, format = "latex", caption = study_caption, booktabs = TRUE) %>% kable_styling(latex_options = c("striped", "hold_position", "scale_down")) %>% add_footnote(c("Identified associations were defined as those P < 1x10\\textsuperscript{-7}", "Results for Sex, Ethnicities, Age, and Most common tissues were calculated per EWAS.", "For example, if one EWAS (or meta-analysis) contained just Afican indviduals then that would be counted as one.", "EUR = European, AFR = African, ADM = Admixed, EAS = East Asian, SAS = South Asian"), notation = "none", escape = FALSE) ## ---- traits-manhattan-setup -------------------------------- h_genes <- t_dat %>% dplyr::filter(is_highlight == "yes") %>% arrange(n_traits) h_cpgs <- h_genes$CpG names(h_cpgs) <- h_genes$Gene h_cpg_genes <- paste0(h_cpgs, " (_", names(h_cpgs), "_)", collapse = ", ") trait_man_file <- file.path(fp_04, "traits_per_dmp_at_1e-07.png") ## ---- traits-manhattan -------------------------------- include_graphics(trait_man_file) ## ---- rsq-setup -------------------------------- min_r2 <- min(rsq_dat$rsq, na.rm = T) max_r2 <- max(rsq_dat$rsq, na.rm = T) # sum of rsq values rsq_sum_plot <- ggplot(rsq_sum_dat, aes(x = log_total_rsq)) + geom_histogram(fill = "blue", colour = "black") + theme_bw(base_size = 15) + labs(x = expression(log[10](sum(r^{2}))), y = "EWAS") # rsq values split by how high suspect_studies <- rsq_sum_dat %>% dplyr::filter(p_diff_adj < 0.05) new_rsq_dat <- rsq_dat %>% mutate(high_total_rsq = ifelse(StudyID %in% suspect_studies$StudyID, TRUE, FALSE)) # change in rsq new_med_rsq <- new_rsq_dat %>% group_by(high_total_rsq) %>% summarise(med_rsq = median(rsq,na.rm=T)) %>% dplyr::filter(high_total_rsq == FALSE) %>% pull(med_rsq) rsq_plot <- ggplot(new_rsq_dat, aes(x = rsq, fill = as.factor(high_total_rsq), alpha = as.factor(high_total_rsq))) + geom_histogram(colour = "black", binwidth = 0.01, position = "identity") + theme_bw(base_size = 15) + scale_alpha_manual(values = c("FALSE" = 1, "TRUE" = 0.3), guide = "none") + labs(x = expression(r^{2}), y = "Differentially methylated positions", fill = expression(inflated ~ ~sum(r^{2}))) ## ---- rsq-distribution -------------------------------- print(rsq_plot) ## ---- rsq-sum-distribution -------------------------------- print(rsq_sum_plot) ## ---- batch-faulty-setup -------------------------------- batch_tab <- batch_and_f_probes$batch f_probes <- batch_and_f_probes$faulty_probes percent_sex_probes <- f_probes %>% dplyr::filter(highlight == "on sex chromosome") %>% dplyr::filter(cpg_or_ewas == "cpg") %>% pull(percent) percent_faulty_probes <- f_probes %>% dplyr::filter(highlight == "potentially faulty") %>% dplyr::filter(cpg_or_ewas == "cpg") %>% pull(percent) f_probes_plot <- ggplot(f_probes, aes(x = cpg_or_ewas, y = percent, fill = highlight)) + geom_bar(colour = "black", stat="identity") + scale_fill_manual(values=c("white", "#E69F00", "#56B4E9")) + labs(fill = "") + theme_bw(base_size = 15) + theme(axis.title.x = element_blank(), legend.position="bottom") ## ---- faulty-probes-plot -------------------------------- print(f_probes_plot) ## ---- replication-setup -------------------------------- rep_study_n <- replication_dat$rep_study_n rep_rates <- replication_dat$rep_rates rep_tab <- rep_rates %>% dplyr::select(Trait = trait, N_DMPs = n_cpgs, N_replicated = rep_cpgs, N_replication_studies = n_rep_studies, Prop_replicated = prop_rep) %>% # dplyr::select(-studyid) %>% tidy_nums() %>% tidy_colnames() rep_tab_no_smoking <- rep_tab %>% dplyr::filter(!grepl("smoking", Trait)) %>% dplyr::filter(!grepl("Body mass index", Trait)) rep_tab_smoking <- rep_tab %>% dplyr::filter(grepl("smoking", Trait)) rep_tab_bmi <- rep_tab %>% dplyr::filter(grepl("Body mass index", Trait)) rep_caption <- "Replication rate" rep_caption_smoking <- "Replication rate in EWAS of smoking" rep_caption_bmi <- "Replication rate in EWAS of body mass index" rep_tab_footnote <- c("N-DMPs = number of differentially methylated positions identified at P<1x10\\textsuperscript{-7}", "N-replicated = number of DMPs replicated in the GEO re-analysis at P<1x10\\textsuperscript{-4}", "N-replication-studies = number of studies for which replication was examined", "Prop-replicated = proportion of DMPs replicated.") # geo re-analysis stuff geo_rean_tab <- geo_rean %>% dplyr::select(Trait = trait, N_DMPs = ori_dmps, N_replicated = n_rep, Percent_replicated = rep_percent) %>% tidy_nums() %>% tidy_colnames() %>% arrange(Trait) geo_rean_caption <- "GEO re-analysis replication" ## ---- replication-tab -------------------------------- kable(rep_tab_no_smoking, format = "latex", caption = rep_caption, booktabs = TRUE) %>% kable_styling(latex_options = c("striped", "scale_down")) %>% add_footnote(rep_tab_footnote, notation = "none", escape = FALSE) ## ---- replication-tab-smoking -------------------------------- kable(rep_tab_smoking, format = "latex", caption = rep_caption_smoking, booktabs = TRUE) %>% kable_styling(latex_options = c("striped", "scale_down")) %>% add_footnote(rep_tab_footnote, notation = "none", escape = FALSE) ## ---- replication-tab-bmi -------------------------------- kable(rep_tab_bmi, format = "latex", caption = rep_caption_bmi, booktabs = TRUE) %>% kable_styling(latex_options = c("striped", "scale_down")) %>% add_footnote(rep_tab_footnote, notation = "none", escape = FALSE) ## ---- geo-reanalysis-tab -------------------------------- kable(geo_rean_tab, format = "latex", caption = geo_rean_caption, booktabs = TRUE) %>% kable_styling(latex_options = c("striped", "HOLD_position", "scale_down")) %>% add_footnote(c("N-DMPs = number of differentially methylated positions identified at P < 1x10\\textsuperscript{-7}", "N-replicated = number of DMPs replicated in the GEO re-analysis at P < P < 1x10\\textsuperscript{-4}"), notation = "none", escape = FALSE) ## ---- cpg-characteristics-setup -------------------------------- # avg_meth_file <- paste0(fp_04, "figures/avg_meth_vs_effect_size.png") # var_meth_file <- paste0(fp_04, "figures/var_meth_vs_effect_size.png") # h2_twin_meth_file <- paste0(fp_04, "figures/h2_twin_vs_effect_size.png") # h2_twin_dmp_file <- paste0(fp_04, "figures/dmp_h2_twin_scatter.png") cpg_chars_file <- file.path(fp_04, "combined_characteristics_plots.png") cpg_chars_tab <- cpg_chars_res %>% tidy_nums() %>% tidy_colnames() colnames(cpg_chars_tab)[colnames(cpg_chars_tab) == "r2"] <- "r\\textsuperscript{2}" h2_row <- grep("^h2", cpg_chars_tab$characteristic) h2_and_var_row <- grep("variance \\+ h2", cpg_chars_tab$characteristic) cpg_chars_tab[h2_row, "characteristic"] <- "h\\textsuperscript{2}" cpg_chars_tab[h2_and_var_row, "characteristic"] <- "variance + h\\textsuperscript{2}" chars <- unique(cpg_chars_res$characteristic) assoc_val <- colnames(cpg_chars_res)[colnames(cpg_chars_res) != "characteristic"] x <- chars[1] char_vals <- lapply(chars, function(x) { df <- dplyr::filter(cpg_chars_res, characteristic == x) out <- lapply(assoc_val, function(i) df[[i]]) names(out) <- assoc_val return(out) }) names(char_vals) <- chars cpg_chars_cap <- "Association between CpG chars and associations in EWAS" get_cc_res <- function(outcome, pred_nam) { ## outcome needs to be "rep" "effect" or "dmp" df <- get(paste0("cc_", outcome)) out <- dplyr::filter(df, predictor == pred_nam) return(out) } cc_rep <- cpg_chars_res_2$log_model cc_effect <- cpg_chars_res_2$lm_model cc_dmp <- cpg_chars_res_2$dmp_pred ## ---- cpg-chars-plot -------------------------------- include_graphics(cpg_chars_file) ## ---- cpg-chars-tab -------------------------------- kable(cpg_chars_tab, format = "latex", caption = cpg_chars_cap, booktabs = TRUE, escape = FALSE) %>% kable_styling(latex_options = c("striped", "hold_position", "scale_down")) %>% add_footnote(c("avg-meth = average methylation level", "beta > 0 = DNA methylation hypermethylated with respect to the trait", "beta < 0 = DNA methylation hypomethylated with respect to the trait", "auc = area under the curve"), notation = "none", escape = FALSE) ## ---- enrichment-setup -------------------------------- chrom_state_file <- file.path(fp_04, "chromatin_states_enrichment_boxplots_onepage.pdf") tfbs_file <- file.path(fp_04, "tfbs_enrichment_plot.pdf") strict_p_threshold <- comma(1e-7 / rob_summary$n_ewas) power <- gsub(".*e-", "", strict_p_threshold) strict_p_threshold <- gsub(paste0("e-", power), paste0("x10^-", power, "^"), strict_p_threshold) ## ---- chrom-state-plot -------------------------------- include_graphics(chrom_state_file) ## ---- tfbs-plot -------------------------------- include_graphics(tfbs_file) <file_sep># PhD project meeting notes 2021-10-19 Attendees: <NAME>, Gib ## Agenda * [properties of ewas analyses](#item1) * [AOB](#aob) ## Properties of ewas analyses <a name="item1"></a> Expected replication: We have Beta, SE, P, N in discovery and N in replication Can estimate whether the sample size in replication is high enough to be able to detect an effect (assuming the effect + SD are the same in the replication). Could also check the sample size is high enough to detect an effect if the true effect is actually at the lower end of the CI. This would give us X expected replications across the studies and I'd just do a binomial test to see if the actual replications are as great or greater than the expected replications? See code Gib sent! -- will need to remember that the replication study won't always contain CpG. So need to extract the replication P value (i.e. the highest P-value from the study) from the results. How many axes of phenotypic space does CpG associate with? 450,000 CpGs by 2000 traits matrix - any cells where data is missing can be set to 0. Taking PCs - means each PC will have cluster of traits. Could do z-scores instead of betas Will be able to tell which traits contribute high and which contribute low to each thing of a PC [Last report](prop-ewas-results.html) for reference. __NOTES:__ Some meeting notes ## AOB <a name="aob"></a> * ewas-gwas comparison paper - realised I need Tom + Nic to reply as I don't have their funding info...<file_sep># PhD project meeting notes 2020-07-24 Attendees: <NAME>, Gib ## Agenda * Thesis intro * EWAS-GWAS comparison paper * AOB ## Thesis intro Do you want to take a look at the next draft? Or just comment on the additions. I'll probs have it done over the weekend. Putting a more philosophical spin on things... __NOTES:__ Begin with big picture stuff! Maybe opening paragraph should be something more big picture about gene regulation or disease variation. OR could signpost where you're going. Kind of helps tell the reader is what is coming next! Gib is sending by end of the day. ## EWAS-GWAS comparison paper ### intro Struggling to succinctly say why it's important to know if EWAS and GWAS are sampling from the same distribution of genes/pathways. Can kind of think about it like this: Little overlap expected because of confounding and reverse causation with DNAm-trait associations BUT would be expected that DNAm is picking up some upstream events (even if not causing changes in trait directly). If large overlap then suggests EWAS haven't added much, but as identifying more sites then there is reason to believe increase in EWAS sample size would yield more biological information than increase in GWAS sample size of same amount. If no overlap then it's unclear if EWAS is identifying any upstream genes and pathways that aren't caused by confounding effects (of course could also be identifying downstream genes + pathways). Why appraise comparison? --> Both trying to accomplish same things. Important to make point that r2 is limited in GWAS --> All upstream so heritability limited and spread across potentially lots of predictors so low power. Triangulation could be mentioned. Important point: Constraints on GWAS mean we can make certain inferences about the relationship between SNPs and traits. EWAS don't share these constraints. So inference is harder and large overlap suggests they are picking up upstream stuff and so being successful in that regard! ### results section Do you have a paper you've written or would recommend to help with writing the results with the simulations I've done? -- <NAME> Should results section for empirical overlap go like this: - Used Fisher's exact test (either correlation of enrichment scores or not) - Little evidence for overlap - This could be due any overlap being completely at random, or if EWAS and GWAS were identifying similar biological pathways then we might not have power to detect it depending on the genetic and epigenetic architectures of the traits and the power of the studies. - Certain architectures (i.e. high percentage of EWAS hits causing trait changes), study power, gene mapping, and pathway database used all play a role in the power to detect overlap between genes/pathways. - Brief description of simulations --- issue with this is that we have to back track a bit from we simulated the scenarios TO oh yeah and from these simulations we determined the pathway database we used in the empirical results performed the best OR should it start with simulations like: - We know there are various pathway databases and that other factors (listed above) may influence ability to detect overlap above that expected by chance. Therefore we set up simulations to test which databases performed best under which scenarios. - Brief description of simulations - Results of simulations - Empirical results --- issue with this is it's probably less interesting to have talked about the simulations etc. and then just report a bunch of null results... SECOND ONE! Could have two rows of 0.05 and two rows of 1 Could change into lines to avoid weird x-axis stuff with the sim plot. ## AOB <file_sep># PhD project meeting notes 2020-09-16 Attendees: <NAME>, Gib ## Agenda * Results from EWAS-GWAS comparison simulations * Properties of EWAS * Thesis structure * AOB ## Results from EWAS-GWAS comparison simulations _Premise of simulations_ We have known EWAS genes and known GWAS genes and a simulated number of causal + consequential genes. We mix the EWAS genes in the simulated consequential genes and pick out a number equal to the number of known EWAS genes. We do the same with GWAS genes and simulated causal genes. We check the overlap between causal and consequential genes picked out, generate enrichment scores for GO terms for each geneset and then assess correlation of enrichment scores. We vary the number of simulated causal + consequential genes as well as the proportion of overlap between causal + consequential genes. _Results_ [plot1](architecture_sims_correlation_of_pathway_enrichment_scores.pdf) [plot2](architecture_sims_log10_gene_overlap.pdf) For each trait it seems the only real scenarios we can rule out are: number of genes is close to what has been discovered and overlap is high... So we either haven't discovered all genes with EWAS + GWAS or the overlap between causal and consequential genes is likely not high. How can I represent this?? -- good idea to just put two of the plots in (such as BMI + current vs. never) and talk about inference that can be made from these. __NOTES:__ Presenting these sims: - Prose overview of what want to achieve with these simulations. - Brief mention of methods - Results - Full methods in Methods section later For rest of paper, it's worth grounding it in reality by talking about actual pathways that were found. SO pick a trait and check the pathways that were enriched for EWAS hits and GWAS hits and compare what those pathways are. This can be put at the end of the section where lack of empirical overlap is established. ## Properties of EWAS Genomic features analysis needed! -- intro includes talking about how DNAm sites choosing ## Thesis structure OK to have boxes in thesis chapters? - should be fine. How to add v large tables in thesis that would just be spreadsheets in supplement for papers? - email the library and ask them what they recommend. Some guidance: http://www.bristol.ac.uk/academic-quality/pg/pgrcode/annex4/ __NOTES:__ Some meeting notes ## AOB * OK to present at epi-epi on September 28th? -- yes, move to 1pm * Getting comments back -- Gib will ask them to get back with comments if needs be * Guess I can use my PhD money to go to virtual conferences? -- was thinking of putting in an abstract for properties of EWAS stuff -- yeah this is fine! <file_sep># PhD project meeting notes 2020-06-03 Attendees: <NAME>, Gib ## Agenda * general updates * ewas-gwas comparison paper * ewas-gwas comparison simulations * AOB ## general updates * Going to start updating EWAS Catalog from next week * Thesis introduction first draft pretty much ready --> should be with you by the end of the week * Keen to get the gwas-ewas comparison manuscript done by the end of next week ## ewas-gwas comparison paper Question: Can EWAS add to the biological information gained from conducting GWAS? Brief methods: Extract data for EWAS (N>4500) and corresponding GWAS. Match sites identified to regions/genes/pathways and check overlap at each stage. Explore pathway overlap between EWAS and other GWAS. ### Results * Little overlap in genomic regions identified * Large number of overlapping pathways, but no more than expected by chance * Scenarios in which we'd expect this to occur are... (simulations) * Generally EWAS pathway enrichment correlates poorly with all GWAS pathway enrichments, with some exceptions (e.g. insulin EWAS and body fatness GWAS). __NOTES__ - Should check to see why the methods aren't consistent between pathway overlap stuff - For intial overlap analysis could do this: 1. Keep overlap stuff same for looking at gene overlap 2. For pathways, do enrichment analysis then check for overlap of "enriched" pathways - Problem with correlation of just enrichments - May have highly variable enrichment scores! --> should check this out... - Could look at genetic correlations when assessing whether enrichment scores should be highly correlated for traits - If GWAS don't match that should then, it suggests that pathway enrichment doesn't necessarily give much overlap. ## ewas-gwas comparison simulations Goal of simulations: Help understand the scenarios for which there is no more overlap of EWAS and GWAS genes/pathways than expected by chance. ### Simulation 1 Model how changes in percentage of EWAS hits that are causal influence overlap overview: X randomly sampled causal genes Y randomly sampled consequential genes N GWAS genes (N determined by empirical data) are randomly sampled from X N EWAS genes (N determined by empirical data) are randomly sampled from X and Y at proportions determined by a varying parameter Test: Fisher's Exact test to determine if there is more overlap between EWAS genes/pathways and GWAS genes/pathways than expected (expected determined from the total number of genes/pathways). ### Simulation 2 Model how changes in overlap between causal and consequential genes influence overlap overview: GWAS and EWAS genes taken from the empirical data X causal genes Y consequential genes Causal genes = GWAS genes + randomly sample from all genes to make up X causal genes Consequential genes = EWAS genes (except those also GWAS genes) + randomly sample from all genes that aren't causal genes All trait genes = Causal genes + consequential genes Test: Fisher's exact to determine if there is more overlap between EWAS genes/pathways and GWAS genes/pathways than expected (expected determined from the number of genes from 'all trait genes') ### To-do * Re-do simulations * Try and make methods across sections of project the same * Check data more carefully for pathway enrichment correlation stuff <file_sep># PhD project meeting notes 2020-10-09 Attendees: <NAME>, Gib ## Agenda * Comments * ewas-gwas comparisons * AOB ## Comments * Worth going through your comments first?? -- started this for intro -- yeah * How to return revised documents? -- add previous comments back to new pdf?? --- latexdiff * Properties of EWAS comments * New equipment -- can get a [new battery](https://www.ifixit.com/Store/Mac/MacBook-Air-13-Inch-Late-2010-2017-Replacement-Battery/IF188-113?o=2) __NOTES:__ properties of ewas: - Would be nice to touch on differences in populations: + Have a look at the data and see if we can draw any conclusions from why it's important to include different populations in analyses Had a look at literature and found there are a few studies looking at differences between populations: [1](https://clinicalepigeneticsjournal.biomedcentral.com/articles/10.1186/s13148-019-0805-z), [2](https://clinicalepigeneticsjournal.biomedcentral.com/articles/10.1186/1868-7083-6-4), [3](https://www.thelancet.com/journals/landia/article/PIIS2213-8587(15)00127-8/fulltext), [4](https://onlinelibrary.wiley.com/doi/abs/10.1002/gepi.21789) Don't bother sending revised intro to TG + NT. When ready just send the whole thesis and say cast eyes over it to see which parts need to comment on. ## ewas-comparisons Started re-doing analysis adding in the latest GWAS results and no change in conclusions (so far) -- even though re-doing analysis meant adding a new trait (CRP) __NOTES:__ Some meeting notes ## AOB * Got some mocks set up! * Should be aware of what single cell could bring to the table -- could make up some sort of digested version of conclusions and ask about whether this speaks to moving towards single cell.<file_sep># PhD project meeting notes 2020-09-17 Attendees: <NAME>, Gib, Nic ## Agenda * General updates * Supervisor questions/ideas for future * AOB ## General Updates - Job doesn't start until 4th Jan - Want to be finished working on chapters by early-mid October __NOTES:__ Some meeting notes ## Supervisor questions/ideas for future Check with Sharen about when stipend ends - does it end when you hand in or not??? Let <NAME> know about order to give comments back! Change thesis title, send form again __NOTES:__ Some meeting notes ## AOB * AOB item1<file_sep>## ---- load-data-07 -------------------------------- data_dir <- "data/07-dnam_lungcancer_mr" figure_dir <- "figure/07-dnam_lungcancer_mr" supp_tables_file <- "supplementary-tables.xlsx" supp_tables_path <- file.path(data_dir, supp_tables_file) supp_tables <- supp_tables_path %>% excel_sheets() %>% set_names() %>% map(read_excel, path = supp_tables_path) tables_file <- "tables.xlsx" table1 <- read_excel(file.path(data_dir, tables_file)) ## ---- figures-setup-07 -------------------------------- # main figs fig1 <- file.path(figure_dir, "Figure_1.jpg") fig2a <- file.path(figure_dir, "Figure_2a.jpg") fig2b <- file.path(figure_dir, "Figure_2b.jpg") fig3 <- file.path(figure_dir, "Figure_3.jpg") fig4a <- file.path(figure_dir, "Figure_4a.jpg") fig4b <- file.path(figure_dir, "Figure_4b.jpg") # sup figs sup_figs <- file.path(figure_dir, grep("sup_fig", list.files(figure_dir), value=T)) ## ---- fig1-07 -------------------------------- include_graphics(fig1) ## ---- fig2-07 -------------------------------- include_graphics(c(fig2a, fig2b)) ## ---- fig3-07 -------------------------------- include_graphics(fig3) ## ---- fig4a-07 -------------------------------- include_graphics(fig4a) ## ---- fig4-07 -------------------------------- include_graphics(fig4b) ## ---- sup-fig1-07 -------------------------------- include_graphics(grep("fig1", sup_figs, value=T)) ## ---- sup-fig2-07 -------------------------------- include_graphics(grep("fig2", sup_figs, value=T)) ## ---- sup-fig3-07 -------------------------------- include_graphics(grep("fig3", sup_figs, value=T)) ## ---- sup-fig4-07 -------------------------------- include_graphics(grep("fig4", sup_figs, value=T)) ## ---- sup-fig5-07 -------------------------------- include_graphics(grep("fig5", sup_figs, value=T)) ## ---- tables-setup-07 -------------------------------- ### functions unite_chr_pos <- function(df) { ### put chr and pos together if (!any(grepl("position", colnames(df), ignore.case = T))) return(df) if (any(grepl("CpG Chr", colnames(df), ignore.case = T))) { df <- df %>% unite("CpG chr:pos", `CpG Chr`:`CpG Position`, sep=":") } if (any(grepl("SNP Chr", colnames(df), ignore.case = T))) { df <- df %>% unite("SNP chr:pos", `SNP Chr`:`SNP Position`, sep=":") } if (any(grepl("^Position", colnames(df), ignore.case = T))) { df <- df %>% unite("chr:pos", Chr:Position, sep=":") } return(df) } get_added_header <- function(df) { cols_to_sort <- grep(".*_.*", colnames(df), value = T) prefixes <- unique(gsub("_.*", "", cols_to_sort)) suffixes <- unique(gsub(".*_", "", cols_to_sort)) pref_ncols <- map_dbl(prefixes, function(x) length(grep(x, colnames(df)))) no_pref_ncols <- sum(!colnames(df) %in% cols_to_sort) aha <- setNames(c(no_pref_ncols, pref_ncols), c(" ", prefixes)) return(aha) } get_gene_col <- function(df) { grep("gene", colnames(df), ignore.case = T) } sort_dir_for_latex <- function(dirs) { chars <- nchar(dirs) if (!length(unique(chars)) == 1) stop("need character length of each string to be the same") vals <- seq(1, unique(chars), by=1) out <- map_chr(dirs, function(x) { adirection <- lapply(vals, function(i) { achar <- substr(x, i, i) paste0("{", achar, "}") }) paste0(unlist(adirection), collapse = "") }) return(out) } #### tables table1 <- unite_chr_pos(table1) table1_aha <- get_added_header(table1) table1_cap <- "Meta-analyses of EWAS of lung cancer using four separate cohorts: 16 CpG sites associated with lung cancer at false discovery rate < 0.05." colnames(table1) <- gsub(".*_", "", colnames(table1)) table1 <- table1 %>% tidy_nums() %>% tidy_colnames() table1_gene_col <- get_gene_col(table1) #### supplementary tables tables_to_sort <- map_lgl(supp_tables, function(st) { any(grepl("_", colnames(st))) }) tables_to_sort <- names(tables_to_sort)[tables_to_sort] supp_tables <- lapply(supp_tables, unite_chr_pos) cols <- colnames(supp_tables[["S2"]]) supp_tables[["S2"]] <- map_dfc(seq_along(supp_tables[["S2"]]), function(x) { col_nam <- colnames(supp_tables[["S2"]])[x] column <- supp_tables[["S2"]][[col_nam]] names(column) <- col_nam if (!grepl("_Dir", col_nam)) return(column) out <- sort_dir_for_latex(column) return(out) }) colnames(supp_tables[["S2"]]) <- cols st=tables_to_sort[4] ## get additional header data for supplementary tables that need it! add_header <- lapply(tables_to_sort, function(st) { print(st) sup <- supp_tables[[st]] aha <- get_added_header(sup) return(aha) }) names(add_header) <- tables_to_sort ## clean up column names and numbers clean_supp_tables <- lapply(seq_along(supp_tables), function(x) { print(x) st_nam <- names(supp_tables)[x] st <- supp_tables[[x]] if (st_nam %in% tables_to_sort) { colnames(st) <- gsub(".*_", "", colnames(st)) } st <- st %>% tidy_nums() %>% tidy_colnames() return(st) }) names(clean_supp_tables) <- names(supp_tables) clean_supp_tables[["S2"]][1,2] <- "{-}{-}{-}{-}" ## get tables with "Gene" in column names and convert these columns to italics gene_cols <- lapply(clean_supp_tables, get_gene_col) ## manually change anything if needs be col_to_change <- grep("r2", colnames(clean_supp_tables[["S1"]])) colnames(clean_supp_tables[["S1"]])[col_to_change] <- "r\\textsuperscript{2}" col_to_change <- grep("I2", colnames(clean_supp_tables[["S2"]])) colnames(clean_supp_tables[["S2"]])[col_to_change] <- "I\\textsuperscript{2}" # manually do this one because it isn't like the others! add_header[["S9"]] <- c(" " = 2, "FE meta-analysis" = 2, "Correction for correlation" = 2, " " = 3) ks_latex_options <- lapply(names(clean_supp_tables), function(st_nam) { out <- c("striped", "hold_position", "scale_down") if (st_nam %in% tables_to_sort) out <- c(out, "repeat_header") return(out) }) names(ks_latex_options) <- names(clean_supp_tables) captions <- list(S1 = "Instrument strength in ARIES", S2 = "Heterogeneity between studies and smoker groups in the meta-analysis of EWAS in four cohorts", S3 = "The SNP-exposure association estimates from ARIES and NSHDS", S4 = "Full results for MR of DNA methylation of 14 CpG sites on lung cancer", S5 = "The association between mQTLs and their CpG sites across the five timepoints in ARIES", S6 = "Estimates of heterogeneity of MR estimates across multiple SNPs", S7 = "Association of \\textit{AHRR} methylation and methylation allele score with confounding factors in the CCHS", S8 = "One-sample MR analysis of the effect of \\textit{AHRR} methylation (\\%) on lung cancer risk in the CCHS", S9 = "Two sample MR analysis for \\textit{AHRR}", S10 = "Comparison of MR results with tumour-healthy tissue differential methylation", S11 = "mQTL-gene expression analysis in lung and whole blood using data from GTEx") ## ---- tab1-07 -------------------------------- kable(table1, booktabs = TRUE, caption = table1_cap) %>% add_footnote(c("Meta-analyses of lung cancer EWAS adjusted for study specific covariates (basic, N = 1809),", "basic model + surrogate variables (sv-adjusted, N = 1809), basic model + surrogate variables + derived cell counts (sv-and-cell-count, N = 1809).", "Meta-analyses were also conducted stratified by smoking status (never-smokers (N = 304), former-smokers (N = 648), current-smokers (N = 857)) using the basic model", "OR = odds ratio per SD increase in DNA methylation, SE = standard error, chr:pos = chromosome:position"), notation = "none") %>% add_header_above(table1_aha) %>% kable_styling(latex_options = c("striped", "hold_position", "scale_down", "repeat_header"), position = "center") %>% column_spec(table1_gene_col, italic = TRUE) %>% landscape() ## ---- sup-tab1-07 -------------------------------- kbl(clean_supp_tables[["S1"]], booktabs = TRUE, caption = captions[["S1"]], escape = FALSE) %>% add_footnote(c("SE = standard error, P = P value, N = sample size", "F = F statistic, r\\textsuperscript{2} = Variance explained"), notation = "none", escape = FALSE) %>% kable_styling(latex_options = ks_latex_options[["S1"]]) ## ---- sup-tab2-07 -------------------------------- kbl(clean_supp_tables[["S2"]], booktabs = T, caption = captions[["S2"]], escape = FALSE) %>% add_footnote(c("Dir = Direction of effect", "I\\textsuperscript{2} = Heterogeneity I-squared value", "P = Heterogeneity P value", "chr:pos = chromosome:position", "sv-adjusted = surrogate variables included as covariates in analysis", "sv-and-cell-count = surrogate variables and derived cell counts included as covariates in analysis", "never-smokers = basic model in never smokers only", "former-smokers = basic model in former smokers only", "current-smokers = basic model in current smokers only", "comp = comparison of smoker groups."), notation = "none", escape = FALSE) %>% add_header_above(add_header[["S2"]]) %>% kable_styling(latex_options = ks_latex_options[["S2"]]) %>% landscape() ## ---- sup-tab3-07 -------------------------------- kbl(clean_supp_tables[["S3"]], booktabs = T, caption = captions[["S3"]]) %>% add_footnote(c("* = SNPs used as an instrumental variables were not replicated in the independent dataset (NSHDS)", "Trans = trans mQTL (Yes/No)", "chr:position = chromosome:position", "MAF = minor allele frequency", "A1 = effect allele", "P = P value"), notation = "none") %>% add_header_above(add_header[["S3"]]) %>% kable_styling(latex_options = ks_latex_options[["S3"]]) %>% column_spec(gene_cols[["S3"]], italic = TRUE) %>% landscape() ## ---- sup-tab4-07 -------------------------------- kbl(clean_supp_tables[["S4"]], booktabs = T, caption = captions[["S4"]]) %>% add_header_above(add_header[["S4"]]) %>% add_footnote(c("N SNP = number of SNPs used in the analysis as instrumental variables", "* = Instrumental variables for that CpG site did not replicate in an independent dataset (NSHDS)", "Where N SNP = 1, the Wald ratio estimate is used", "Where N SNP > 1, the Wald ratio estimates were meta-analyzed and the estimates were weighted by the inverse variance of the association with the outcome"), notation = "none") %>% kable_styling(latex_options = ks_latex_options[["S4"]]) %>% column_spec(gene_cols[["S4"]], italic = TRUE) ## ---- sup-tab5-07 -------------------------------- kbl(clean_supp_tables[["S5"]], booktabs = T, caption = captions[["S5"]]) %>% add_header_above(add_header[["S5"]]) %>% add_footnote(c("P = p value"), notation = "none") %>% kable_styling(latex_options = ks_latex_options[["S5"]]) %>% column_spec(gene_cols[["S5"]], italic = TRUE) %>% landscape() ## ---- sup-tab6-07 -------------------------------- kbl(clean_supp_tables[["S6"]], booktabs = T, caption = captions[["S6"]]) %>% add_header_above(add_header[["S6"]]) %>% add_footnote(c("N SNP = number of SNPs used in the analysis as instrumental variables", "Q = Cochrane’s Q statistic", "Lung cancer (ever) = lung cancer in ever smokers", "Lung cancer (never) = lung cancer in never smokers", "Where P < 0.05, there is good evidence of heterogeneity across individual SNPs"), notation = "none") %>% kable_styling(latex_options = c("striped", "scale_down", "hold_position")) ## ---- sup-tab7-07 -------------------------------- kbl(clean_supp_tables[["S7"]], booktabs = T, caption = captions[["S7"]]) %>% add_header_above(add_header[["S7"]]) %>% add_footnote(c("For the allele score, genotypic effects were scaled to equate to the same magnitude of effect as a per 1% increase in methylation. Regressions were adjusted for the other factors in the table"), notation = "none") %>% kable_styling(latex_options = ks_latex_options[["S7"]]) ## ---- sup-tab8-07 -------------------------------- kbl(clean_supp_tables[["S8"]], booktabs = T, caption = captions[["S8"]]) %>% add_header_above(add_header[["S8"]]) %>% add_footnote(c("HR = hazard ratio, P = P value"), notation = "none") %>% kable_styling(latex_options = ks_latex_options[["S8"]]) ## ---- sup-tab9-07 -------------------------------- kbl(clean_supp_tables[["S9"]], booktabs = T, caption = captions[["S9"]]) %>% add_header_above(add_header[["S9"]]) %>% add_footnote(c("N SNP = number of SNPs used in the analysis as instrumental variables", "FE = fixed effects", "Q = Cochrane’s Q statistic", "DF = degrees of freedom", "P = P value"), notation = "none") %>% kable_styling(latex_options = ks_latex_options[["S9"]]) ## ---- sup-tab10-07 -------------------------------- kbl(clean_supp_tables[["S10"]], booktabs = T, caption = captions[["S10"]]) %>% add_header_above(add_header[["S10"]]) %>% add_footnote(c("T/H = comparison of tumour and healthy tissue", "Adeno = Lung adenocarcinoma", "SCC = squamous cell carcinoma", "P = P value", "hyper = hypermethylation is associated with lung cancer", "hypo = hypomethylation is associated with lung cancer", "For tumour/healthy tissue comparison, pos = hypermethylation of the CpG within the tumour tissue (neg is the opposite)"), notation = "none") %>% kable_styling(latex_options = ks_latex_options[["S10"]]) %>% column_spec(gene_cols[["S10"]], italic = TRUE) ## ---- sup-tab11-07 -------------------------------- kbl(clean_supp_tables[["S11"]], booktabs = T, caption = captions[["S11"]]) %>% add_header_above(add_header[["S11"]]) %>% add_footnote(c("Trans = trans mQTL (Yes/No)", "chr:pos = chromosome:position", "MAF = minor allele frequency", "A1 = effect allele", "P = P value"), notation = "none") %>% kable_styling(latex_options = ks_latex_options[["S11"]]) %>% column_spec(gene_cols[["S11"]], italic = TRUE) <file_sep># PhD project meeting notes DATE Attendees: <NAME>, Gib ## Agenda * [Item1](#item1) * [Item2](#item2) * [AOB](#aob) ## Item1 <a name="item1"></a> Some pre-meeting notes __NOTES:__ Some meeting notes ## Item2 <a name="item2"></a> Some pre-meeting notes __NOTES:__ Some meeting notes ## AOB <a name="aob"></a> * AOB item1<file_sep># PhD project meeting notes 2022-01-07 Attendees: <NAME>, Gib ## Agenda * [sigma2_hm450 update](#item1) * [other updates](#item2) * [AOB](#aob) ## sigma2_hm450 update <a name="item1"></a> We have until February 14th now - increased time needed as bc4 is down. PCs?? - Would it be best to calculate PCs on the 1000 children separately to the rest of the samples? need to check if data is imputed on HRC! - want genetic data to be imputed in same way Use 20 PCs - first few might be batch! Repeating analyses - remove 1000 random people? or 1000 random non-related people? [prelim res](new-cohorts-vs-fom1-estimates.png) - without adjustment for PCs AND including relateds __NOTES:__ To do: 1. Check genetic data imputed to HRC 2. Combine genetic data 3. Generate PCs in unrelateds (see https://gist.github.com/samwalrus/aac96371de8f5c4e70d0de53e60040ce) 4. Project those PCs onto related individuals (see https://github.com/MRCIEU/godmc/blob/master/resources/genetics/pcs_relateds.R) 5. Change code so 20 PCs are used in model 6. Re-run initial analyses 7. Run analyses in each cohort individually 8. Run analyses 1000 times, removing 1000 (or so) random individuals each time 9. Generate plot comaring sig2hm450 across all different analyses for each trait - so should have point estimates + CI for sig2hm450 in Mums only, dads only, sabre only, kids only, combined, iterations (mean of distribution is point estimate) Other project ideas (or to do if paper is rejected): 1. what is the "missing" EWAS heritability thing? - extract large EWAS with corresponding traits in ARIES. Assess sigma2hm450 of traits in ARIES. Then use large EWAS to create a DNAm score in ARIES and estimate r2. Then see difference between the 2. 2. Does sample size decrease bias things in EWAS towards the null?? - should be no. ## other updates <a name="item2"></a> EWAS Catalog paper finally sent off <NAME> gave comments on ewas-gwas comp paper. Caroline messaged me about it and has some comments on it too... but hasn't given me the comments yet. Will work on Tom's comments and send off asap. __NOTES:__ Some meeting notes ## AOB <a name="aob"></a> * Check this out and get back to Gib: https://uob.sharepoint.com/:w:/t/grp-GoDMCMR/EcJpFcwbk1hLjYM-C-NcfCcBHUsSuxvV94l2D5vt1Nm_rg?e=dZwJAb&wdLOR=cF06884C1-6EF0-C04F-A5E5-C96A163DA8FD <file_sep># PhD project meeting notes 2020-08-28 Attendees: <NAME>, Gib ## Agenda * Properties of EWAS paper * Viva examiners * AOB ## Properties of EWAS paper Why is it important to know about the number of studies adjusting for batch when we already know they should be doing that? Got data on heterogeneity of CpG mean/sd methylation? [current results](paper.pdf) Check this paper for ideas: https://www.nature.com/articles/s42003-018-0261-x ### Weird RA results Found out I actually ran some analyses using GEO data. I only adjusted for SVs and they adjusted for Age, sex, smoking, cell comp. I found 27 DMPs and they found almost 50,000... They show changes when adjusting for batch for only 10 DMPs, but it has pretty large consequences for effect size + P value... __NOTES:__ Worth defining set of EWAS that are suspect and removing them OR putting them to one side and repeating analyses using both those studies and others. ## Viva examiners Other options: - <NAME> - <NAME> Could also have one geneticist who has done a bit of epigenetics (or at least QTL stuff) - <NAME> - <NAME> - <NAME> - <NAME> __NOTES:__ GH is going to contact Janine and Ele. ## AOB * Don't have a thesis title...<file_sep># PhD project meeting notes 2020-11-09 Attendees: <NAME>, Gib ## Agenda * [properties of ewas](#item1) * [Item2](#item2) * [AOB](#aob) ## properties of ewas <a name="item1"></a> ### plotting transcription factor enrichments * [plot1](tfbs_enrichment_plot.pdf) * [plot2](tfbs_enrichment_plot_v2.pdf) * No plot - just text like "DMPs were enriched for the binding sites of XXX transcription factors in at least one tissue type." Text should be there. Make plot be like plot 1 (tissue on x-axis), but have five boxes for each tissue (one for each cpg list) instead of faceting it! Problem: Don't have data from all tissues for all tfbs... SO could add to the text. "In the ENCODE data available, there were three transcription factors for which the binding sites were confirmed in 10 or more tissues (CTCF in 23 tissues, POL2 in 12 tissues, EZH2 in 10 tissues). There was/wasn't agreement..." -- fine Also figure 4.5 (rendering) -- fine ### r-squared * Moved to sum(r-squared) / N + Need to be able to justify why using "mean" as cutoff!! -- can do so in text and for viva * plots? ### cpg characteristics and ewas results * you ask for beta + SE, but rank transformed so worth giving?? * Seeing if variance is related to replication: + limit to cpgs that have the chance to be replicated + create "replicability" variable = has a CpG been replicated in at least one study? + can variability predict replicability? -- roc(rep ~ varcpg) + worth doing same for mean methylation and heritability? -- can just use a logisitc regression model and put them all in the same model........... __NOTES:__ Check distributions again. Could use rank transformations as a sensitivity analysis ## Item2 <a name="item2"></a> Some pre-meeting notes __NOTES:__ Some meeting notes ## AOB <a name="aob"></a> * AOB item1<file_sep># PhD project meeting notes 2020-06-08 Attendees: <NAME>, Gib ## Agenda * ewas-gwas comparison: method for measuring overlap * AOB ## ewas-gwas comparison: method for measuring overlap ### method 1 1. Extract ANY pathways tagged by EWAS/GWAS 2. Test to see if there is more overlap than expected by chance Notes on method: * Allows comparison it always gives pathways * Some pathways cover so many genes that even when no genes overlap there will be a lot of pathway overlap (although not always more than expected by chance) * Quick to run and simple to interpret ### method 2 1. Extract ENRICHED pathways tagged by EWAS/GWAS 2. Test to see if there is more overlap than expected by chance Notes on method: * Makes things far more specific and along with method 1 should give the upper and lower bounds for overlap * Not enough enriched pathways to check overlap most of the time... --> for alcohol consumption per day, only 2 ewas pathways and one gwas pathway "enriched" ### method 3 1. Run enrichment tests for all pathways and extract ORs 2. Test correlation between ORs Notes on method: * Haven't tried using permutations to estimate what the null is... + Could say any correlation with adjusted-p < 0.05 gives evidence for some correlation (but can get this with very small |r|) * Need to test whether non-parametric method is better for assessing correlation (remember that r assumes that E(Y|X) is linear!) ### simulations for looking at methods See code for testing whether there is much power to detect whether there is more of an overlap than expected by chance: [comparing_overlap_methods_sims.R] Old simulations kind of give results for the current method: see [simple_simulation3_go_overlap_OR.pdf] and [simple_simulation3_kegg_overlap_OR.pdf]. They show that there really isn't much power to detect overlap when it is really there (i.e. both ewas and gwas are causal) and when it isn't really there (i.e. when neither are causal). Change simulations so that ewas is 100% causal or 0% causal. Then use ROC curves to see which method can predict whether EWAS is causal or not. To-do: - re-run simulations to test methods - re-do empirical data analysis with the correct method - re-run the simulations to check which scenario our empirical results fall into! <file_sep># PhD project meeting notes 2021-02-26 Attendees: <NAME>, Gib ## Agenda * [Teaching](#item1) * [updates on other work](#item2) * [AOB](#aob) ## Teaching <a name="item1"></a> Teaching opportunities?? * Talk to Kaitlin about omics-MR stuff __NOTES:__ Some meeting notes ## updates on other work <a name="item2"></a> * h2ewas going off today/soon + wanted to see cover letter? - send it to Gib + need to re-arrange if it's in journals "guidelines"? - no need to reformat * started going through ewas-gwas. May present at the end of the week __NOTES:__ Some meeting notes ## AOB <a name="aob"></a> * AOB item1<file_sep># PhD project meeting notes 2020-04-23 Attendees: <NAME>, Gib, <NAME> Agenda: - Update on all current projects - Discussion of analysis plan for "properties of EWAS" chapter ## Update on all current projects ### EWAS Catalog Lots of work on the EWAS Catalog recently, it's looking much better! Meeting 2020-04-24, but overall I think we're pretty close to finishing everything! Things to sort out: * Getting it on shark server (Tom may need your help again) * Sending paper out Also, I've reformatted the ewas-catalog paper for my thesis, I just need to update it with the new features we'll add in. __NOTES__ * Could be port conflicts causing problems on shark server (specifically port 3306 on mysql server) - change in docker container. * Could switch over to crashdown or another server if shark is still causing issues. ### Comparison of EWAS and GWAS Main question of this project: could EWAS be providing extra biological information? Analysis for this is done and manuscript is written up. There is one result that Gib and I are going to discuss tomorrow (2020-04-24) and then hopefully soon I can send the manuscript to all of you. ### Discussion of analysis plan for "properties of EWAS" chapter See `analysis_plan.pdf`. __NOTES__ * Don't just show top 10 cpgs or genes. Better to show a distribution -- would be nice to have a manhattan with cpgs on the x-axis and no. of traits the cpg is associated with at p<1x10^-7^ on the y-axis. * Would be better to look at r^2^ distribution rather than beta distribution * GoDMC have CpG variability data available * Look at variance vs. effect size as well as methylation level vs. effect size * Speak to Charlie about CpGs under selection, could be incorporated here ## Actions * Make meeting notes and things available on github to make it easier to keep track of things * Update the analysis plan with ideas from today's meeting * Discuss with Matt how he is planning on doing a clustering analysis on all EWAS in the catalog * <file_sep># PhD project meeting notes 2020-07-17 Attendees: <NAME>, Gib ## Agenda * Team meetings * EWAS-GWAS Comparison project progress * AOB ## Team meetings Would be nice to have a general chat about science and how to improve things in the department/for each other with work __NOTES:__ Good idea, can make this a thing. ## EWAS-GWAS Comparison project progress No real difference between gene overlap when mapping to protein coding genes compared to ensembl gene ids. Therefore: Can use proteins :D Everything else is setup to run sequentially. In the simulations we've done so far, there hasn't been a huge difference between using enrichment score correlations and all pathway overlap to test whether there is more pathway overlap than expected by chance. __NOTES:__ It would be useful to note some of the raw numbers in the manuscript -- e.g. the number of genes that overlap Lack of correlation when using spearman suggests no systematic inflation of pathway correlation. -- may have arisen if pathways always get tagged Should just run correlations without permutations and then run permutations if there is high correlation so can follow up an "interesting" result. ## AOB * Will send h<sup>2</sup><sub>EWAS</sub> paper over<file_sep># PhD project meeting notes 2020-07-20 Attendees: <NAME>, Gib ## Agenda * ewas-gwas comparison project simulations * making EWAS data available * AOB ## ewas-gwas comparison project simulations Simulations assessing method seems to show there is more power using other pathway databases... [methods_test_gene_up_auc_plot_all_databases.pdf](methods_test_gene_up_auc_plot_all_databases.pdf) __NOTES:__ * Sort out sims by: + re-running GO + KEGG whilst mapping to only protein coding genes + Altering n-genes for things that map just to protein coding genes + re-running stringdb simulations with protein coding genes only * Once all that is done the plan is for results: + Simulation set one -- display results for all methods AND pathways + Empirical analyses -- display results for only enrichment score correlations AND best powered pathway. Then put other pathway res in supplement + Simulation set two -- same as empirical analyses + All GWAS vs. some EWAS, just use best pathway and don't include other shite to complicate things ## making EWAS data available So I've got uploading to zenodo working-ish Tried uploading all 400 in zip file and it gave me an error saying the file was too large to upload via an API (file is about 5Gb, but zenodo can take 50Gb files). Then tried dividing up into zipped folders of 40 and got a "ConnectionError" Then tried just taking 10 files and putting them into a zipped folder of 10 and got a "json.decoder.JSONDecodeError" After sequentially removing files from that folder I found it worked at 8 files in the folder. Although I'm not sure if this is a function of number/size of the files or the actual files themselves. Could be a problem with one of the ewas files for some reason. Still to be tested... __NOTES__: * Could just ask <NAME> to help out! ## AOB * Please can have overarching thoughts on intro ready for discussion on Thursday<file_sep># PhD project meeting notes 2020-10-15 Attendees: <NAME>, Gib, Nic, <NAME> ## Agenda * [h<sup>2</sup><sub>EWAS</sub> name change](#name-change) * [random thesis bits](#random-thesis-bits) * [comments](#comments) * [plan going forward](#plan-going-forward) * [AOB](#aob) ## h<sup>2</sup><sub>EWAS</sub> name change <a name="name-change"></a> * Don't mind how we name h<sup>2</sup><sub>EWAS</sub> + originally called it m<sup>2</sup> + Doug asked for it to be changed to h<sup>2</sup><sub>EWAS</sub> + I like the idea it has "EWAS" in the name + If h<sup>2</sup> makes it seem as though we're measuring heritability of DNAm then happy to change it + Nic suggested m<sub>r<sup>2</sup></sub> -- ?? + sigma-squared_HM450!! __NOTES:__ Some meeting notes ## random thesis bits * Mocks setup * setting up one drive -- do this! + Add in clear instructions! * Anything else to think about at this stage? + Noooope __NOTES:__ Think about next steps -- stay enthusiastic for viva! ## comments * anything you guys want to bring up? ### overarching thoughts/important points * Adding in a "Data sources" chapter + Think that it may not be "needed" as I can put the data in the first results chapter it's used in and then just refer back to it + BUT could be useful as the use of data differs between some chapters -- may be confusing saying in the EWAS Catalog chapter "we used ALSPAC data for xyz" and then having to refer back to that + Also could be used to appraise each dataset? need long description and short one -- short one to link back to as a memory aid -- just keep in mind the people reading these chapters! * Updating results with new data + Definitely will be updating The EWAS Catalog, BUT may not have time to update properties of ewas and ewas-gwas comp... + Important to think about how to frame this in thesis Need to make it clear when the data is from! Can say this snapshot was from time X. * Incorporating other peoples work into thesis: + EWAS Catalog -- Team effort (<NAME> made original website) + Properties of EWAS -- Charlie has done enrichment analyses + DNAm-lung cancer MR -- Becky joint first author AND <NAME> did analysis on TCGA data ### ewas-gwas comparison * TRG: "GO is not pathway" - very good point... GO terms place genes into "functional groups" + Need to just be specific with this! -- should be fine using all 3 GO "groups" together. Add this to discussion! * Being more cautious with end of abstract: "However, they did not preclude the posibility that some of the differentially methylated positions may be pertinent to the aetiology of the traits. Thus, combining GWAS and EWAS (amongst other studies), rather than using one study design, will likely be more fruitful in understanding trait biology." -- Agree caution is needed! Find it a little hard to strike a balance of being cautious but not implying EWAS is pointless in this thesis... + sentence is contradictory! + In general throughout the thesis need to remember that it's not saying EWAS is shite, but rather caution should be exercised when interpreting EWAS and the original thinking of "this could mediate effect" is likely not true in the majority of cases * TRG great point: Differences in properties of DNAm and genetic variants extends beyond just confounding and reverse causation -- correlation structure and ?? * Reasons to expect differences between gene and pathway overlap between EWAS and GWAS: + confounding + reverse causation + trans mQTL effects (only differences in genes?) + DNAm mediating non-genetic causes of trait (only differences in genes?) -- also question of if you had an infinite sample size would you detect all genes relevant to trait aetiology via GWAS? + mis-mapping of CpGs or SNPs to genes (yes, but for intro good to assume we are not doing this? Even though of course we will be...) -- fundamental question! + power * NJT great Q: "what would happen if you had infinite power for both the GWAS and the EWAS? One assumes that the overlap would then be a function of the genetic architecture of the trait of interest. If you had something truly poly/omnigenic, then you would get overlap I guess as you find “genetics of” confounding. For mono/oligogenic traits, overlap will always be capped. This may make you reflect more on the power/capacity of the EWAS and GWAS and how this may have shaped your learning here (e.g. educational attainment which you have)?" + Definitely think it's worth discussing this, but in the intro or discussion? * More ambitious with conclusion -- best way to talk about expansion to more than 7 traits? __notes:__ Need to justify relative plausiblility of confounding/reverse causation compared to other things brought up (see above) Could have connections between downstream and upstream pathways --- needs to be mentioned, but hard to think about how to actually do this analysis Need to make it clear EWAS is focused in blood whereas genetic variants would be assumed to have an effect in each tissue. ### EWAS Catalog * Justifying decisions made before I came on-board? -- e.g. why include only associations at P<1e-4? * Comparison of EWAS Catalog to other datasets to get across why pooling data is useful ### Properties of EWAS * will discuss these at next meeting! ### Intro * Structure of intro to whole thesis + GH: could re-structure intro so it is more high-level + NJT: "opening lines are key to getting the reader with you - use turns of phrase that ensure they get that you are deeply aware of the properties of the data you are focused on for your work" + agree with both -- struggled a lot to write this... * Couple of things I didn't quite understand from comments, but can leave to next meeting + including this from NJT: '“omewide” era where hypothese were abandoned for acephaly"' __NOTES:__ Some meeting notes ## Plan going forward * Comments on everything apart from 1. properties of ewas work and 2. discussion * Immediatly after meeting - will send round discussion with Gibs comments * By end of weekend -- will incorporate some of Nic's comments on properties of EWAS (except more difficult ones I want to discuss) + send round a copy of this WITH some extra analyses I left out of first draft + Think it would be nice to focus on this chapter in next meeting (Oct 27th) * Will incorporate ewas-gwas comparison comments and put into thesis -- will then have all chapters in one place SET UP SHAREPOINT ## AOB * AOB item1<file_sep>## ---- load-data-05 -------------------------------- fig_p <- file.path(fig_path, "05-h2ewas") tab_p <- file.path(tab_path, "05-h2ewas") ## ---- h2ewas-model-comp -------------------------------- include_graphics(file.path(fig_p, "m2_model_comparison.pdf")) ## ---- h2ewas-pheno-qc -------------------------------- include_graphics(file.path(fig_p, "m2_data_cleaning.pdf")) ## ---- h2ewas-pheno-corr-all -------------------------------- include_graphics(file.path(fig_p, "test_correlation_plot_all.png")) ## ---- h2ewas-pheno-corr-subset -------------------------------- include_graphics(file.path(fig_p, "test_correlation_plot.png")) ## ---- h2ewas-study-design -------------------------------- include_graphics(file.path(fig_p, "m2_workflow.pdf")) ## ---- h2ewas-estimates -------------------------------- include_graphics(file.path(fig_p, "model_m2_comparison.pdf")) ## ---- h2ewas-sens -------------------------------- include_graphics(file.path(fig_p, "sens_boxplots.pdf")) ## ---- h2ewas-dmp-dist -------------------------------- include_graphics(file.path(fig_p, "FOM_hit_count_distribution.pdf")) ## ---- model-testing-setup -------------------------------- mod_tests <- read_tsv(file.path(tab_p, "m2_hits_model_testing.txt")) tidy_model_names <- tibble(model = c("ML-Pois", "NB", "Hurdle-NB", "Hurdle", "ZINB", "ZIP"), full_name = c("Poisson", "Negative binomial", "Hurdle-negative binomial", "Hurdle", "Zero-inflated negative binomial", "Zero-inflated Poisson")) tidy_column_names <- c("Model", "Log(likelihood)", "DF") mod_tests_out <- tidy_model_names %>% left_join(mod_tests) %>% dplyr::select(Model = full_name, `Log(likelihood)` = logLik, DF = Df) mod_tests_cap <- "Summary of how well models fit to test the association between $h^2_{EWAS}$ and the number of differentially methylated positions identified across 400 traits at P < 1x10$^{-5}$." ## ---- model-testing-tab -------------------------------- kable(mod_tests_out, format = "latex", caption = mod_tests_cap, booktabs = TRUE, escape = FALSE) %>% kable_styling(latex_options = c("striped", "HOLD_position", "scale_down")) %>% add_footnote(c("DF = degrees of freedom."), notation = "none") ## ---- dmps-and-h2ewas -------------------------------- include_graphics(file.path(fig_p, "m2_hit_count_scatter_p5_test.pdf")) ## ---- h2ewas-dmp-roc-curve -------------------------------- include_graphics(file.path(fig_p, "roc_plot.pdf")) <file_sep># PhD project meeting notes 2020-10-23 Attendees: <NAME>, Gib ## Agenda * [ewas-gwas comparison v2](#item1) * [Item2](#item2) * [AOB](#aob) ## ewas-gwas comparison v2 <a name="item1"></a> * How can I replace pathway in intro/discussion? -- feels weird to use pathway throughout and then in the main analyses use a database that isn't technically a pathway database... + Can use term "genesets"!!! -- probably should define how genesets are different from pathways and that pathways are made up of genesets * Justifying why confounding/reverse causation is reason that EWAS and GWAS may differ -- if all DNAm sites have a genetic component (???) then would a GWAS large enough capture genes captured by EWAS? + also is it enough in the intro to put differences in EWAS and GWAS likely mean EWAS results due to confounding/reverse causation and then explain other options in the discussion? + "extent to which this holds is discussed in more detail in the discussion" * Tom suggested filtering out big pathways as a sensitivity analyses but how?? -- looking at the data it seems... * In the paper I talk about overlap between "causal" and "non-causal" genes, but technically speaking a gene can't be causal and non-causal... + Use term associated set also!! * Don't understand one of Nic's comments (not mentioning mediation -- in discussion) -- that's the only part I discuss the results of all GWAS vs. all GWAS vs. 7 EWAS. + Make it clear mediation is one mechanism by which this could occur * Tom mentioned "reverse causality" is dismissed as a bad thing... + [paper](https://pubmed.ncbi.nlm.nih.gov/32228717/) showing prediction longitudinally may be unlikely __NOTES:__ Some meeting notes ## Item2 <a name="item2"></a> Some pre-meeting notes __NOTES:__ Some meeting notes ## AOB <a name="aob"></a> * Going to ask Doug to present [this](https://www.biorxiv.org/content/10.1101/2020.08.24.265280v1?rss=1) at genetics JC. Thoughts? <file_sep># PhD project meeting notes 2020-09-04 Attendees: <NAME>, Gib ## Agenda * Properties of EWAS project * Viva * AOB ## Viva Examiners suggested sound good to me. Will get draft abstract prepped Start date for post-doc is Jan 4th so viva in December would be perfect if possible. __NOTES:__ ## properties of ewas project Little bit unsure about how to present the relationship between DNA methylation-trait associations and various cpg characteristics... __NOTES:__ * May be issue with low variance CpGs as the technical variation to true variation ratio increases so it's more likely variation is due to technical effects rather than actual effects! * make results section quite large by discussing stuff in the results section! - rationale, result, interpretation! ## AOB * Another thought on MR EvE stuff - think it's really worth emphasising what using a machine learning approach might add to using the manual approach. If I'm interested in using MR to estimate the effect a small number of exposures on a small number of outcomes (i.e. most MR studies) then why would I not use the manual approach? <file_sep># A thesis attempt Thesis written using [thesisdown](https://github.com/ismayc/thesisdown). This was altered to comply with the University of Bristol's regulations by <NAME>: [bristolthesis](https://github.com/mattlee821/bristolthesis). Current version of the thesis can be found [here](index/_book/thesis.pdf) Plan for September/October: | Task | Comments needed | Comments needed from | Complete | | ------------------------------- |:---------------:|:--------------------:|:--------:| | Discussion V1 | N | | Y | | EWAS-GWAS comparisons V2 | N | | N | | Chapters integrated into thesis | N | | N | | Properties of EWAS V2 | N | | N | | Discussion V2 | Y | TG + NT | N | | Intro V3 | Y | TG | N | | DNAm-lung cancer MR V2 | N | TG + GH | N | ## Meetings Meeting notes and items can be found in [meetings](meetings) ## changes to formatting ### removing red boxes around cross-references Add `\usepackage[hidelinks]{hyperref}` to [template.tex](index/template.tex) -- this can replace `\usepackage{hyperref}` ### list of figures Within chunk header, use `fig.scap='small caption'`. In this example `small caption` will appear in the list of figures and the figure caption will remain as what was stated for the option `fig.cap` ### toc depth To manually change the toc depth, in [index.Rmd](index/index.Rmd), swap `thesisdown::thesis_pdf: default` for ` thesisdown::thesis_pdf: toc: true toc-depth: TOC-DEPTH-CHOICE ` -- remember indents (not sure how to show on readme) To make sure the depth of heading numbers matches the toc depth, add ` \setcounter{secnumdepth}{$toc-depth$}` to [template.tex](index/template.tex) below `\setcounter{tocdepth}{$toc-depth$}` -- around line 155. ## log of errors when building ### Error: Package inputenc Error: Unicode character (U+2003) (inputenc) not set up for use with LaTeX. __Extra details__: This occured when copying text over from a paper into a chapter. Unicode character (U+2003) is just whitespace. __Solution__: Just re-entered the text part by part and re-built the thesis each time to check it worked. ### Unable to get references for word __Solution__: Only knit to word + remove abstract, abbreviations, acknowledgements etc. from yaml header. __NOTE__: to knit to pdf, it requires these in the yaml header to be present!
df80492428867e54ea20e8f005ad454680b4c96c
[ "Markdown", "R", "RMarkdown", "Shell" ]
71
Markdown
thomasbattram/thesis
6de0595b7eed13dc22d62fd8453da4cd0ae9c845
fd883027d3796cf61dffa62036d1d385a04e35e9
refs/heads/master
<file_sep>Qva.AddExtension('CVL/BasicTable', function() { // Create a variable to hold generated html var html = "<table>"; // Local variable to hold the reference to QlikView var _this = this; // function to handle users clicking on a row window.oncvlrowclick = function(rowIx) { _this.Data.SelectRow(rowIx); } // Cycle Through the data for (var i = 0; i < this.Data.Rows.length; i++) { // get the row var row = this.Data.Rows [i]; // Generate html html += "<tr><td onclick='oncvlrowclick("+i+")'>" + row[0].text + "</td><td onclick='oncvlrowclick("+i+")'>" + row[1].text + "</td></tr>"; } // Finalise the html html += "</table>"; // Set the HTML this.Element.innerHTML = html; },true);<file_sep>Qv.AddExtension("Hello", function () { // Set the extension object's inner Html this.Element.innerHTML = 'Hello World'; }); <file_sep>Qv.AddExtension("ET-HelloWorld", function () { // Set the extension object's inner Html, by referencing several properties this.Element.innerHTML = 'Title: ' + this.Layout.Text0.text + '<br/>' + 'Color Title: ' + this.Layout.Text1.text + '<br/>' + 'Sub-Title:' + this.Layout.Text2.text + '<br/>' + 'Show Sub-Title: ' +this.Layout.Text3.text; }); <file_sep># BI Experiments ## Qlikview and MSSQL BI Stack scripts and Macros
b6d8fefdbe0aff0feac3055d288e11c3fc10729b
[ "JavaScript", "Markdown" ]
4
JavaScript
derruba2000/BI.experiments
a1de55beb877033ac7cdbb741e23ee0935fe62b0
739837af97531e2dae066384c11e4c6be254b091
refs/heads/master
<file_sep>#!/usr/bin/python3 import re import argparse argparser = argparse.ArgumentParser() argparser.add_argument('filename', nargs=1, help='file to search for acronyms') argparser.add_argument('-e', '--encoding', nargs=1, help='input file encoding (utf_8,latin_1,...)', default=['utf_8']) args = argparser.parse_args() filename = args.filename[0] encoding = args.encoding[0] # Regex for potential acronyms: capital anywhere after the first letter acrre = re.compile(r'[a-zA-Z0-9]+[A-Z][a-zA-Z0-9]*') # Find possible acronyms acronyms = set() with open(filename,'r', encoding=encoding) as f: for line in f: for acr in acrre.findall(line): acronyms.add(acr) # Turn into a sorted list templist = list(acronyms) templist.sort() # Remove redundant plurals acrlist = [] prev = '' for acr in templist: if acr != prev+'s': acrlist.append(acr) prev = acr # Output results for acr in acrlist: print(acr) <file_sep># acronyms Find potential acronyms in text. Example usage on command line: ``` pdftotext thesis.pdf thesis.txt python3 acronyms.py -e latin_1 thesis.txt ```
d11215d227df7218e368a24142497c1b976374b1
[ "Markdown", "Python" ]
2
Python
tuomaura/acronyms
2d7969a04ab8e40be40dc8a41b5a0ab7f1f2bc47
d640b5ff44eb276efc3135d67b423b4e1233f78a
refs/heads/master
<file_sep>Simple PNG decoder for Computer Graphics class. Language: - C++ Installation: - Just copy the 8 files into your project. Preparation: - Subclass the `cmps3120::png_receptor`. - Override the `set_header` method. That's how you will get notified of the width and height of the image. - Override the `set_pixel` method. That's how you will get the pixel values for the image. Usage: - Create an instance of `cmps3120::png_decoder`. - Set the receptor for the decoder by calling the `set_receptor` method. - Call `read_file` for easy PNG file reading, or use one of the `put` methods to send chunks of data directly. Sample code: ```cpp using namespace cmps3120; // Your image/texture class header: class image : public png_receptor { ... public: void set_header (png_header h); void set_pixel (unsigned int x, unsigned int y, unsigned int level, png_pixel v); } // Your image/texture class implementation void image::set_header(png_header h) { //set your width and height fields m_width = h.width; m_height = h.height; //Setup the space for the image. //You would replace this line with code that // creates an array or initializes a std::vector, // for example. allocate_space(m_width,m_height); } void image::set_pixel (unsigned int x, unsigned int y, unsigned int level, png_pixel v) { //Replace these lines with code that takes the pixel values // and stores them in the array created in set_header. // //Remember that the color values from v are 16-bit, i.e. range // 0 - 65535. unsigned int position = coordinates_to_index(x,y); m_array[position] = v.r; } // ... //The part of the code where you load the images image the_sprite; png_decoder the_decoder; png_error result; the_decoder.set_receptor( &the_sprite ); result = the_decoder.read_file("sprite.png"); if (result == PNG_DONE) std::cout << "The image loaded successfully."; else std::cout << "There was an error loading the image."; ``` <file_sep>////////////////////////////////////////////////////////////////////////////// // // --- shader.cpp --- // ////////////////////////////////////////////////////////////////////////////// #include "SourcePath.h" std::string source_path = "/Users/noahhendlish/Desktop/Fall 2018/CMPS/Intro to Computer Graphics/project/raytracer/raytracing"; <file_sep> #if !(defined CMPS3120_LABS_ZENC_H) #define CMPS3120_LABS_ZENC_H #include "./zss.h" namespace cmps3120 { class zenc_hash_line { private: unsigned int *m_dta; unsigned int m_length; zenc_hash_line(unsigned int *p, unsigned int s); friend class zenc_hash; public: /** * Destructor */ ~zenc_hash_line(void); /** * @return the first value on the line */ operator unsigned int&(void); /** * @return the first value on the line */ operator const unsigned int&(void) const; /** * @param j the position in the hash * @return the value at the given position on the line */ unsigned int& operator[](unsigned int j); /** * @param j the position in the hash * @return the value at the given position on the line */ const unsigned int& operator[](unsigned int j) const; /** * @return the length of the line */ unsigned int length(void) const; }; class zenc_hash_const_line { private: const unsigned int *m_dta; unsigned int m_length; zenc_hash_const_line(const unsigned int *p, unsigned int s); friend class zenc_hash; public: /** * Destructor */ ~zenc_hash_const_line(void); /** * @return the first value on the line */ operator const unsigned int&(void) const; /** * @param j the position in the hash * @return the value at the given position on the line */ const unsigned int& operator[](unsigned int j) const; /** * @return the length of the line */ unsigned int length(void) const; }; class zenc_hash { private: unsigned int m_size; unsigned int m_length; unsigned int *m_table; public: /** * Constructor */ zenc_hash(void); /** * Desctructor */ ~zenc_hash(void); /** * @return the size of the hash */ unsigned int get_size(void) const; /** * @return the number of disambiguation entries * at each hash value */ unsigned int get_entry_length(void) const; /** * Resize the hash table. * @param sz the desired size of the hash * @param lh the number of disambiguation entries * in the hash * @return whether the resize was successful */ bool resize(unsigned int sz, unsigned int lh = 4); /** * Add an entry to the hash table. * @param hash_base the value to use as the basis * for computing the hash * @param value the value to add to the hash */ void put_entry(unsigned int hash_base, unsigned int value); /** * Get a hash table entry. * @param i the hash table value to use for indexing * @param j the position in the hash * @return the value at the given coordinate */ unsigned int& at(unsigned int i, unsigned int j = 0); /** * Get a hash table entry. * @param i the hash table value to use for indexing * @param j the position in the hash * @return the value at the given coordinate */ const unsigned int& at(unsigned int i, unsigned int j = 0) const; /** * Get a hash table entry. * @param i the hash table value to use for indexing * @param j the position in the hash * @return the value at the given coordinate */ unsigned int& operator()(unsigned int i, unsigned int j); /** * Get a hash table entry. * @param i the hash table value to use for indexing * @param j the position in the hash * @return the value at the given coordinate */ const unsigned int& operator()(unsigned int i, unsigned int j) const; /** * Get a hash table entry. * @param i the hash table value to use for indexing * @return a reference to the most recent value at the given index */ zenc_hash_line operator[](unsigned int i); /** * Get a hash table entry. * @param i the hash table value to use for indexing * @return a reference to the most recent value at the given index */ zenc_hash_const_line operator[](unsigned int i) const; }; enum zenc_pair_stop_t { zenc_pair_stop }; class zenc_pair { private: unsigned int nvalue; unsigned int dvalue; unsigned short n_ext; unsigned int d_ext; public: /** * Constructor * @param x a single literal character */ zenc_pair(unsigned char x); /** * Constructor * @param l a byte length to be repeated * @param d the distance backward to look */ zenc_pair(unsigned int len, unsigned int dist); /** * Constructor for a stop code */ zenc_pair(enum zenc_pair_stop_t); /** @return the literal or length code */ unsigned int literal(void) const; /** @return the literal or length code */ unsigned int length(void) const; /** @return the length extension value */ unsigned int length_ext(void) const; /** @return the distance or length code */ unsigned int distance(void) const; /** @return the distance extension value */ unsigned int distance_ext(void) const; }; class zenc : public zss { public: zenc(void); ~zenc(void); void reset(void); private: unsigned int m_d_block_size; zss_buffer m_bytes_pend; bool m_last_block; bool m_has_blocks; unsigned long int m_shift_out; zss_header m_xhdr; int m_bithold, m_bitcount; unsigned int m_hashpos; zenc_hash m_hash; zss_error put_char(unsigned char x); zss_error put_eof(void); zss_error put_char_or_eof(int ); int gen_bits(int x); int output_block(void); int push_bit(int B); int push_resync(void); void next_hash(unsigned char x); /** * @param curs the position in the bytes-pending buffer * @return the number of bytes skipped, or zero if the * a skip code is not vailable */ unsigned int try_hash(unsigned int curs); /** * @param curs the current position in the input bytes pending buffer * @param pos the distance to go backward; 1 is the most recent * @return the byte at that position in hypothetical history, * or zero if not available */ unsigned char augmented_previous(unsigned int curs, unsigned int pos); /** * Push a code to the output stream. * @param ndv the length/distance pair to push to the output stream * @return a \link zss_error\endlink value */ int augmented_push_bit(const zenc_pair& ndv); }; }; #endif /*CMPS3120_LABS_ZENC_H*/ <file_sep> #include "./png.h" #include <stdlib.h> #include <stdexcept> #include <limits.h> #include <string.h> #include <stdio.h> #include <errno.h> #define pass (0) namespace cmps3120 { static unsigned long int png_crc_cache[256]; static bool png_crc_cache_tf = false; static void png_make_crc_cache(void); unsigned long int png_color_recast (unsigned long int nw, unsigned int bd, unsigned int ct, bool no_throw_tf) { int u = 1; if (bd == 16) { if (ct & PNG_HAS_PALETTE) { /* pass; */ } else { if (ct & PNG_HAS_COLOR) u+=2; if (ct & PNG_HAS_ALPHA) u++; } if (nw > (UINT_MAX/(2*u))) { errno = ERANGE; if (!no_throw_tf) throw std::range_error("cmps3120::png_color_recast"); return UINT_MAX; } else { return nw*2*u; } } else if (bd == 8) { if (ct & PNG_HAS_PALETTE) { pass; } else { if (ct & PNG_HAS_COLOR) u+=2; if (ct & PNG_HAS_ALPHA) u++; } if (nw > (UINT_MAX/u)) { errno = ERANGE; if (!no_throw_tf) throw std::range_error("cmps3120::png_color_recast"); return UINT_MAX; } else { return nw*u; } } else if ((ct & PNG_HAS_PALETTE) || (!ct)) { switch (bd) { case 1: { return (nw/8)+((nw&7) ? 1 : 0); }break; case 2: { return (nw/4)+((nw&3) ? 1 : 0); }break; case 4: { return (nw/2)+((nw&1) ? 1 : 0); }break; default: { errno = EDOM; if (!no_throw_tf) throw std::domain_error("cmps3120::png_color_recast"); return 0; }break; } } if (!no_throw_tf) throw std::domain_error("cmps3120::png_color_recast"); errno = EDOM; return 0; } unsigned char png_paeth_predict (unsigned char left, unsigned char up, unsigned char corner) { int point = (int)left, xleft, xup, xcorner; point += (int)up; point -= (int)corner; xleft = abs(point-left); xup = abs(point-up); xcorner=abs(point-corner); if ((xleft<=xup) && (xleft<=xcorner)) return left; else if (xup<=xcorner) return up; else return corner; } png_header::png_header(void) : width(1), height(1), bit_depth(1), color_type(0), compression_type(0), filter_type(0), interlace_type(0) { } png_header::~png_header(void) { } bool png_header::is_supported(void) const { if (compression_type != 0) return false; if (filter_type != 0) return false; if (interlace_type > 1) return false; switch (color_type) { case 0: if ((bit_depth != 1) && (bit_depth != 2) && (bit_depth != 4) && (bit_depth != 8) && (bit_depth != 16)) return false; break; case 2: case 4: case 6: if ((bit_depth != 8) && (bit_depth != 16)) return false; break; case 3: if ((bit_depth != 1) && (bit_depth != 2) && (bit_depth != 4) && (bit_depth != 8)) return false; break; default: return false; } return true; } png_pixel::png_pixel() : r(0), g(0), b(0), a(0) {} png_pixel::png_pixel(const png_pixel& p) : r(p.r), g(p.g), b(p.b), a(p.a) {} png_pixel& png_pixel::operator=(const png_pixel& p) { r=p.r;g=p.g;b=p.b;a=p.a;return *this; } png_pixel::png_pixel(const png_color& p) : r(p.r*257), g(p.g*257), b(p.b*257), a(p.a*257) {} png_pixel& png_pixel::operator=(const png_color& p) { r=p.r*257;g=p.g*257;b=p.b*257;a=p.a*257;return *this; } png_pixel::operator png_color(void) const { png_color out; out.r = (r+128)>>8; out.g = (g+128)>>8; out.b = (b+128)>>8; out.a = (a+128)>>8; return out; } bool png_pixel::operator==(const png_pixel& other) { return (other.r == r)&&(other.g==g) &&(other.b==b)&&(other.a==a); } png_pixel png_pixel::down_cast(unsigned int n) const { png_pixel out; unsigned int i = 1; if (n < 16) { i = 65535/((1<<n)-1); } if (!i) i = 1; out.r = r/i; out.g = g/i; out.b = b/i; out.a = a/i; return out; } png_pixel png_pixel::up_cast(unsigned int n) const { png_pixel out; unsigned int i = 1; if (n < 16) { i = 65535/((1<<n)-1); } if (!i) i = 1; out.r = r*i; out.g = g*i; out.b = b*i; out.a = a*i; return out; } unsigned int png_pixel::gray(void) const { unsigned long int out = 0; out = r*5; out += g*9; out += b*2; return (unsigned int)(out >> 4); } void png_make_crc_cache(void) { unsigned long c; int n, k; for (n = 0; n < 256; n++) { c = (unsigned long) n; for (k = 0; k < 8; k++) { if (c & 1) c = 0xedb88320L ^ (c >> 1); else c = c >> 1; } png_crc_cache[n] = c; } png_crc_cache_tf = true; } png_checksum::png_checksum(unsigned long int l) : m_x(l^0xffffffff) { /*fprintf(stderr,"cache %s %08lx\n",(png_crc_cache_tf?"true":"false"), m_x );*/ if (!png_crc_cache_tf) png_make_crc_cache(); } png_checksum& png_checksum::add(unsigned char ch) { m_x = png_crc_cache[(m_x ^ ch) & 0xff] ^ (m_x >> 8); return *this; } png_checksum::operator unsigned long int(void) const { return m_x^0xffffffff; } png_palette::png_palette(void) : m_colors(NULL), m_count(0) { m_alpha.r=0; m_alpha.g=0; m_alpha.b=0; m_alpha.a=0; } png_palette::png_palette(const png_palette& other) : m_colors(NULL), m_count(0) { m_alpha = other.m_alpha; if (resize(other.m_count)) { if (m_count) memcpy(m_colors,other.m_colors,sizeof(png_color)*m_count); } } png_palette& png_palette::operator=(const png_palette& other) { m_alpha = other.m_alpha; if (resize(other.m_count)) { if (m_count) memcpy(m_colors,other.m_colors,sizeof(png_color)*m_count); } return *this; } png_palette::~png_palette(void) { if (m_colors != NULL) { free(m_colors); m_colors = NULL; } m_count = 0; } unsigned int png_palette::size(void) const { return m_count; } bool png_palette::resize(unsigned int l) { if (l == m_count) { return true; } else if (!l) { if (m_colors != NULL) { free(m_colors); m_colors = NULL; } m_count = 0; return true; } else if (l <= ((UINT_MAX/sizeof(png_color))-16)) { png_color* nc = (png_color*)realloc(m_colors,sizeof(png_color)*l); if (nc != NULL) { m_colors = nc; for (; m_count < l; m_count++) { m_colors[m_count].r = 0; m_colors[m_count].g = 0; m_colors[m_count].b = 0; m_colors[m_count].a = 255; } m_count = l; return true; } else return false; } else return false; } png_color& png_palette::operator[](unsigned int i) { if (i >= m_count) throw std::out_of_range("png_palette::operator[]"); return m_colors[i]; } const png_color& png_palette::operator[](unsigned int i) const { if (i >= m_count) throw std::out_of_range("png_palette::operator[]"); return m_colors[i]; } png_pixel& png_palette::get_transparent_pixel(void){ return m_alpha; } const png_pixel& png_palette::get_transparent_pixel(void) const { return m_alpha; } int png_palette::closest_match(png_pixel v, int siz) { unsigned int i, l = ((siz >= 0) ? siz : m_count); int j = -1; double lsq = 0; if (l > m_count) l = m_count; if (m_colors == NULL) return -1; for (i = 0; i < l; i++) { double nu, nv; png_pixel p = m_colors[i]; nu = v.r-p.r; nv = nu*nu; nu = v.g-p.g; nv+= nu*nu; nu = v.b-p.b; nv+= nu*nu; nu = v.a-p.a; nv+= nu*nu; if (nv == 0) return i; else if (nv < lsq || (!i)) { lsq = nv; j = i; } } return j; } png_buffer::png_buffer(void) : m_dta(NULL), m_siz(0) { } png_buffer::png_buffer(const png_buffer& b) : m_dta(NULL), m_siz(0) { if (b.m_siz && (b.m_dta != NULL)) { unsigned char* nc = (unsigned char*)calloc (b.m_siz,sizeof(unsigned char)); if (nc != NULL) { memcpy(nc,b.m_dta,b.m_siz*sizeof(unsigned char)); m_dta = nc; m_siz = b.m_siz; } } } png_buffer& png_buffer::operator=(const png_buffer& b) { if (m_dta != NULL) { free(m_dta); m_dta = NULL; } m_siz = 0; if (b.m_siz && (b.m_dta != NULL)) { unsigned char* nc = (unsigned char*)calloc (b.m_siz,sizeof(unsigned char)); if (nc != NULL) { memcpy(nc,b.m_dta,b.m_siz*sizeof(unsigned char)); m_dta = nc; m_siz = b.m_siz; } } return *this; } png_buffer::~png_buffer(void) { if (m_dta != NULL) free(m_dta); m_dta = NULL; m_siz = 0; } unsigned char* png_buffer::data(void){ return m_dta; } const unsigned char* png_buffer::data(void) const{ return m_dta; } unsigned int png_buffer::size(void) const{ return m_siz; } bool png_buffer::resize(unsigned int s) { unsigned char *nc; if (s) { if (m_dta != NULL) { nc = (unsigned char*)realloc(m_dta,s*sizeof(unsigned char)); if (nc != NULL) { m_dta = nc; if (s > m_siz) { memset(m_dta+m_siz,0,sizeof(unsigned char)*(s-m_siz)); } m_siz = s; } else { return false; } } else { nc = (unsigned char*)calloc(s,sizeof(unsigned char)); if (nc != NULL) { m_dta = nc; m_siz = s; } else { return false; } } } else { if (m_dta != NULL) free(m_dta); m_dta = NULL; m_siz = 0; } return true; } unsigned char& png_buffer::operator[](unsigned int i) { return *(m_dta+i); } const unsigned char& png_buffer::operator[](unsigned int i) const { return *(m_dta+i); } bool png_buffer::append(int C) { unsigned int xs = m_siz; if (xs >= ((UINT_MAX-1)/sizeof(unsigned char))) { return false; } if (!resize(xs+1)) return false; *(m_dta+xs) = (unsigned char)(C&255); return true; } png_adam7_data::png_adam7_data(int level) { if ((level >= 1) && (level <= 7)) { w = (1<<((7-level)>>1)); h = (1<<((8-level)>>1)); xoff = ((level&1) ? 0 : (1<<((6-level)>>1))); yoff = (((level&1)&&(level^1)) ? (1<<((7-level)>>1)) : 0); } else { w=1; h=1; xoff=0; yoff=0; } /*fprintf(stderr,"level %i [w %u h %u x %u y %u]\n",level, w,h,xoff,yoff);*/ l=level; } int png_adam7_data::get_level(void) const{ return l;} unsigned int png_adam7_data::compute_width(unsigned int inw) { unsigned int outw = (inw+(w-1))/(xoff+w); /* */ //fprintf(stderr,"level %i cw < %u > %u\n",l,inw,outw); return outw; } unsigned int png_adam7_data::compute_height(unsigned int inh) { unsigned int outh = (inh+(h-1))/(yoff+h); /* */ //fprintf(stderr,"level %i ch < %u > %u\n",l,inh,outh); return outh; } unsigned int png_adam7_data::real_x(unsigned int ix) { unsigned int ox = ix*(xoff+w) + xoff; //fprintf(stderr,"level %i rx < %u > %u\n",l,ix,ox); return ox; } unsigned int png_adam7_data::real_y(unsigned int iy) { unsigned int oy = iy*(yoff+h) + yoff; //fprintf(stderr,"level %i ry < %u > %u\n",l,iy,oy); return oy; } /* png_receptor */ png_receptor::~png_receptor(void) { } png_header png_receptor::get_header(void){ return png_header(); } void png_receptor::set_header(png_header h){ /*pass*/return; } png_pixel png_receptor::get_pixel(unsigned int x, unsigned int y, unsigned int level) { return png_pixel(); } void png_receptor::set_pixel (unsigned int x, unsigned int y, unsigned int level, png_pixel v) { /*pass*/return; } png_base::png_base(void) : m_xerr(0), m_receptor(NULL) { reset(); } png_base::~png_base(void) { } void png_base::reset(void) { m_xerr = 0; m_crc = png_checksum(); readmode = 0; submode = 0;sidemode = 0; xpect_nothing = 0; xlong = 0; xcharc=0; xpect_chunks = 0; xdiv_width = 0; xdiv_height = 0; xdiv_xpos = 0; xdiv_ypos = 0; xdiv_index=0; filter_typ = 0; filter_dist =0; filter_backlog.resize(0); memset(&filter_log[0],0,8*sizeof(unsigned char)); reset_sub(); } png_error png_base::get_error(void) const{ return (png_error)m_xerr; } png_error png_base::put(const png_buffer& dta, unsigned int *pos) { return put(dta.data(), dta.size(), pos); } png_error png_base::put (const unsigned char* dta, unsigned int siz, unsigned int *pos) { int xout = m_xerr; unsigned int i; for (i = 0; (!xout) && (i < siz); i++, (xout?0:xcharc++)) { xout = put_char(dta[i]); } if (pos) { *pos = i; } m_xerr = xout; return (png_error)xout; } png_header& png_base::get_header(void) { return m_header; } const png_header& png_base::get_header(void) const { return m_header; } png_palette& png_base::get_palette(void){ return m_palette; } const png_palette& png_base::get_palette(void) const{ return m_palette; } png_adam7_data& png_base::get_interlace_data(void){ return m_idata; } const png_adam7_data& png_base::get_interlace_data(void) const{ return m_idata; } png_receptor* png_base::get_receptor(void) const { return m_receptor; } void png_base::set_receptor(png_receptor* r) { m_receptor = r; } unsigned long int png_base::get_checksum(void) const { return m_crc; } void png_base::reset_checksum(void) { m_crc = png_checksum(); } unsigned long int png_base::get_char_count(void) const { return xcharc; } void png_base::reset_sub(void) { pass; } void png_base::put_previous(unsigned char ch) { m_crc.add(ch); } void png_base::set_error(png_error v) { m_xerr = v; } png_error png_base::put_char(unsigned char) { return PNG_UNDEFINED; } png_error png_base::get(png_buffer& dta, unsigned int *pos) { return get(dta.data(), dta.size(), pos); } png_error png_base::get (unsigned char* dta, unsigned int siz, unsigned int *pos) { int xout = m_xerr; unsigned int i; for (i = 0; (!xout) && (i < siz); (xout?0:i++), (xout?0:xcharc++)) { xout = get_byte(dta[i]); } if (pos) { *pos = i; } m_xerr = xout; return (png_error)xout; } png_error png_base::get_byte(unsigned char& y) { return PNG_UNDEFINED; } }; <file_sep> #include "./zenc.h" #include <stdlib.h> #include <limits.h> #include <string.h> namespace cmps3120 { zenc_hash_line::zenc_hash_line(unsigned int *p, unsigned int s) : m_dta(p), m_length(s) { } zenc_hash_line::~zenc_hash_line(void) { m_dta = NULL; m_length = 0; } zenc_hash_line::operator unsigned int&(void) { return *m_dta; } zenc_hash_line::operator const unsigned int&(void) const { return *m_dta; } unsigned int& zenc_hash_line::operator[](unsigned int j) { return m_dta[j]; } const unsigned int& zenc_hash_line::operator[](unsigned int j) const { return m_dta[j]; } unsigned int zenc_hash_line::length(void) const { return m_length; } zenc_hash_const_line::zenc_hash_const_line (const unsigned int *p, unsigned int s) : m_dta(p), m_length(s) { } zenc_hash_const_line::~zenc_hash_const_line(void) { m_dta = NULL; m_length = 0; } zenc_hash_const_line::operator const unsigned int&(void) const { return *m_dta; } const unsigned int& zenc_hash_const_line::operator[](unsigned int j) const { return m_dta[j]; } unsigned int zenc_hash_const_line::length(void) const { return m_length; } zenc_hash::zenc_hash(void) : m_size(0), m_length(0), m_table(NULL) { } zenc_hash::~zenc_hash(void) { if (m_table != NULL) { free(m_table); m_table = NULL; } m_size = 0; m_length = 0; } unsigned int zenc_hash::get_size(void) const { return m_size; } unsigned int zenc_hash::get_entry_length(void) const { return m_length; } bool zenc_hash::resize(unsigned int sz, unsigned int lh) { unsigned int xl; if (!lh) lh = 1; if (sz < ((UINT_MAX/sizeof(unsigned int))/lh)) { xl = sz*lh; if (xl == m_size*m_length) { m_size = sz; m_length = lh; } else if (xl) { unsigned int *ptr = (unsigned int*)calloc (xl,sizeof(unsigned int)); if (ptr != NULL) { if (m_table != NULL) { free(m_table); m_table = NULL; } m_table = ptr; m_size = sz; m_length = lh; } else { return false; } } else { if (m_table != NULL) { free(m_table); m_table = NULL; } m_table = NULL; m_size = sz; m_length = lh; } } return true; } void zenc_hash::put_entry(unsigned int hash_base, unsigned int value) { if ((m_table != NULL) && (m_size > 0) && (m_length > 0)) { if (m_length > 1) { memmove(&at(hash_base,1),&at(hash_base,0), sizeof(unsigned int)*(m_length-1)); } at(hash_base) = value; } } unsigned int& zenc_hash::at(unsigned int i, unsigned int j) { if (m_size) i %= m_size; if ((i >= m_size) || (j >= m_length)) return *(unsigned int*)NULL; return m_table[i*m_length+j]; } const unsigned int& zenc_hash::at(unsigned int i, unsigned int j) const { if (m_size) i %= m_size; if ((i >= m_size) || (j >= m_length)) return *(unsigned int*)NULL; return m_table[i*m_length+j]; } unsigned int& zenc_hash::operator()(unsigned int i, unsigned int j) { return at(i,j); } const unsigned int& zenc_hash::operator() (unsigned int i, unsigned int j) const { return at(i,j); } zenc_hash_line zenc_hash::operator[](unsigned int i) { return zenc_hash_line(&at(i), m_length); } zenc_hash_const_line zenc_hash::operator[] (unsigned int i) const { return zenc_hash_const_line(&at(i), m_length); } zenc_pair::zenc_pair(unsigned char x) : nvalue(x&255), dvalue(0), n_ext(0), d_ext(0) {} zenc_pair::zenc_pair(unsigned int len, unsigned int dist) : nvalue(257), dvalue(0), n_ext(0), d_ext(0) { if ((len < 3) || (len > 258) || (!dist) || (dist > 32768)) { nvalue = 286; dvalue = 32; } else { if (len >= 3 && len <= 10) nvalue = 257+(len-3); else if (len >= 11 && len <= 18) { len -= 11; nvalue = 265+(len>>1); n_ext = len&1; } else if (len >= 19 && len <= 34) { len -= 19; nvalue = 269+(len>>2); n_ext = len&3; } else if (len >= 35 && len <= 66) { len -= 35; nvalue = 273+(len>>3); n_ext = len&7; } else if (len >= 67 && len <= 130) { len -= 67; nvalue = 277+(len>>4); n_ext = len&15; } else if (len >= 131 && len <= 257) { len -= 131; nvalue = 281+(len>>5); n_ext = len&31; } else if (len == 258) { nvalue = 285; } /* distances */ d_ext = dist-1; for (len = 0; d_ext && len < 32; len++, d_ext>>=1) ; if (len < 3) { dvalue = dist-1; d_ext = 0; } else { dvalue = (len<<1)+(((dist-1)>>(len-2))&1)-2; d_ext = (dist-1)&((1<<(len-2))-1); } } } zenc_pair::zenc_pair(enum zenc_pair_stop_t) : nvalue(256), dvalue(0), n_ext(0), d_ext(0) {} unsigned int zenc_pair::literal(void) const { return nvalue; } unsigned int zenc_pair::length(void) const { return nvalue; } unsigned int zenc_pair::length_ext(void) const { return n_ext; } unsigned int zenc_pair::distance(void) const { return dvalue; } unsigned int zenc_pair::distance_ext(void) const { return d_ext; } zenc::zenc(void) : m_d_block_size(1024) { reset(); } zenc::~zenc(void) { } void zenc::reset(void) { reset_sub(); m_last_block = false; m_has_blocks = false; m_shift_out = 0; m_bytes_pend.resize(0); m_hash.resize(0,0); m_hash.resize(256,4); m_hashpos = 0; m_bithold = 0; m_bitcount = 0; } zss_error zenc::put_char(unsigned char x) { return put_char_or_eof(x&255); } zss_error zenc::put_eof(void) { return put_char_or_eof(-1); } zss_error zenc::put_char_or_eof(int x) { int out; bool need_redo; do { out = ZSS_UNDEFINED; need_redo = false; switch (readmode) { case 0: { out = 0; if (xlong == 0) { bool ok = setup_window(); if (!ok) { out = ZSS_MEMORY; } else { m_xhdr = get_header(); m_xhdr.set_check(); m_shift_out = m_xhdr; } } if (xlong < 2) { if (!append_no_history ((m_shift_out>>((1-xlong)<<3))&255) ) out = ZSS_MEMORY; xlong++; } if (xlong >= 2) { if (m_xhdr.fdict) readmode = 1; else readmode = 2; xlong = 0; } need_redo = true; }break; case 1: /* dictionary */ { out = 0; if (xlong < 4) { if (!append_no_history ((get_dict_checksum()>>((3-xlong)<<3))&255) ) out = ZSS_MEMORY; xlong++; } if (xlong >= 4) { readmode = 2; xlong = 0; } need_redo = true; }break; case 3: /* checksum */ { out = 0; if (xlong < 4) { if (!append_no_history ((get_checksum()>>((3-xlong)<<3))&255) ) out = ZSS_MEMORY; xlong++; } if (xlong >= 4) { readmode = 4; out = ZSS_DONE; } else{ need_redo = true; } }break; case 4: /* end of stream */ { out = ZSS_DONE; }break; case 2: { out = gen_bits(x); if (out == ZSS_DONE) { readmode = 3; out = 0; push_resync(); need_redo = true; } }break; default: /*out = ZSS_UNDEFINED;*/ break; } } while ((!out) && (need_redo)); return (zss_error) out; } int zenc::gen_bits(int x) { int out = 0; if (x >= 0) { x &= 255; if (m_bytes_pend.append(x) ) { if (m_bytes_pend.size() > m_d_block_size) { out = output_block(); } } else { out = ZSS_MEMORY; } } else { /* end of data */ m_last_block = true; out = output_block(); } return out; } int zenc::output_block(void) { int outmode, putmode = get_header().flevel; switch (putmode) { case 0: /* direct output */ { unsigned int i; outmode = push_bit(m_last_block?1:0); if (!outmode) outmode = push_bit(0); if (!outmode) outmode = push_bit(0); push_resync(); unsigned long int xlen, xnlen, xpos = 0; xlen = m_bytes_pend.size(); xnlen = ~xlen; do { if (xlen > 65535) xlen = 65535; xnlen = ~xlen; append_no_history((xlen )&255); append_no_history((xlen >>8)&255); append_no_history((xnlen )&255); append_no_history((xnlen>>8)&255); for (i = 0; (i < m_bytes_pend.size())&&(!outmode); i++) { if (!append_no_history(m_bytes_pend[i])) outmode = ZSS_MEMORY; next_hash(m_bytes_pend[i]); } if (xpos >= (m_bytes_pend.size()-xlen)) { xlen = 0; xpos = m_bytes_pend.size(); } else { xpos += xlen; xlen = m_bytes_pend.size()-xpos; } } while(xlen); m_has_blocks = true; if (m_last_block) outmode = ZSS_DONE; m_bytes_pend.resize(0); }break; case 1: /* fixed Huffman codes */ case 2: /* TODO dynamic Huffman codes, 7-9 */ case 3: /* TODO dynamic Huffman codes, 4-10, with removal */ { unsigned int i, l; outmode = push_bit(m_last_block?1:0); if (!outmode) outmode = push_bit(1); if (!outmode) outmode = push_bit(0); if (!outmode) { set_codes(zss_huffs::for_fixed()); set_distances(zss_huffs::for_distance()); get_codes().sort_by_value(); get_distances().sort_by_value(); for (i = 0; (!outmode) && (i < m_bytes_pend.size()); i++) { l = try_hash(i); if (l==1) { outmode = get_error(); } else if (l) { i += l-1; } else { outmode = augmented_push_bit(m_bytes_pend[i]); if (!outmode) next_hash(m_bytes_pend[i]); } } augmented_push_bit(zenc_pair_stop); } m_has_blocks = true; if (m_last_block) outmode = ZSS_DONE; m_bytes_pend.resize(0); }break; } return outmode; } int zenc::push_bit(int x) { m_bithold = (m_bithold)|((x?1:0)<<m_bitcount); m_bitcount++; if (m_bitcount >= 8) { if (!append_no_history(m_bithold&255)) return ZSS_MEMORY; m_bithold = 0; m_bitcount = 0; } return 0; } int zenc::push_resync(void) { if (m_bitcount) { if (!append_no_history(m_bithold&255)) return ZSS_MEMORY; m_bithold = 0; m_bitcount = 0; } m_bithold = 0; return 0; } void zenc::next_hash(unsigned char x) { put_previous(x); if (get_history_length() >= 3) { unsigned int accum; accum = get_previous(1); accum += get_previous(2); accum += get_previous(3); m_hash.put_entry(accum,m_hashpos++); } } unsigned int zenc::try_hash(unsigned int curs) { if ((curs >= 3) && (m_bytes_pend.size() >= 6) && (curs < m_bytes_pend.size()-3)) { unsigned int j, k, back, relback; unsigned int accum; accum = m_bytes_pend[curs]&255; accum += m_bytes_pend[curs+1]&255; accum += m_bytes_pend[curs+2]&255; for (j = 0; j < m_hash[accum].length(); j++) { back = m_hash[accum][j]; /* confirm the usability of the history position */ if (back > m_hashpos-3) continue; relback = m_hashpos-back; if (relback > get_history_length()) continue; if (get_previous(relback) != m_bytes_pend[curs]) continue; if (get_previous(relback-1) != m_bytes_pend[curs+1]) continue; if (get_previous(relback-2) != m_bytes_pend[curs+2]) continue; /* try to go as far as possible */ k = 3; next_hash(m_bytes_pend[curs++]); next_hash(m_bytes_pend[curs++]); next_hash(m_bytes_pend[curs++]); for (; k < 258 && curs < m_bytes_pend.size(); curs++) { if (get_previous(relback) == m_bytes_pend[curs]) { next_hash(m_bytes_pend[curs]); k++; } else break; } augmented_push_bit(zenc_pair(k,relback)); return k; } } return 0; } unsigned char zenc::augmented_previous (unsigned int curs, unsigned int pos) { if (pos <= curs) { return m_bytes_pend[curs-pos]; } else { return get_previous(pos-curs); } } int zenc::augmented_push_bit(const zenc_pair& ndv) { int out = 0; unsigned int i, l; if ((ndv.literal() >= 0) && (ndv.literal() <= 256)) { zss_huff_pair hp = get_codes().get_bits(ndv.literal()); if (hp.len) { while (hp.len--) { out = push_bit((hp.bits&(1<<hp.len))?1:0); } } else { out = ZSS_DATA_ERROR; } } else if ((ndv.literal() >= 257) && (ndv.literal() <= 285)) { zss_huff_pair hp; hp = get_codes().get_bits(ndv.length()); if (hp.len) { while (hp.len--) { out = push_bit((hp.bits&(1<<hp.len))?1:0); } } else { out = ZSS_DATA_ERROR; } if (!out) { if (ndv.length() < 261) { l = 0; } else if (ndv.length() == 285) { l = 0; } else { l = (ndv.length()-261)>>2; } for (i = 0; (!out) && i < l; i++) { out = push_bit((ndv.length_ext()&(1<<i))?1:0); } } if (!out) { hp = get_distances().get_bits(ndv.distance()); if (hp.len) { while (hp.len--) { out = push_bit((hp.bits&(1<<hp.len))?1:0); } } else { out = ZSS_DATA_ERROR; } } if (!out) { if (ndv.distance() < 2) { l = 0; } else { l = (ndv.distance()-2)>>1; } for (i = 0; (!out) && i < l; i++) { out = push_bit((ndv.distance_ext()&(1<<i))?1:0); } } } else { out = ZSS_DATA_ERROR; } return out; } }; <file_sep> #if !(defined CMPS3120_LABS_PNGENC) #define CMPS3120_LABS_PNGENC #include "./png.h" #include "./zenc.h" namespace cmps3120 { /** * \brief pseudo-random number generator */ class png_enc_random { private: mutable unsigned int m_v; public: /** * Constructor * @param v a seed value */ png_enc_random(unsigned int v = 1); /** * Copy constructor */ png_enc_random(const png_enc_random& ); /** * Destructor */ ~png_enc_random(void); /** * @return the next random number value */ operator unsigned int(void) const; /** * Set the seed. * @param v the new seed value */ png_enc_random& operator=(unsigned int v); /** * Assignment */ png_enc_random& operator=(const png_enc_random& ); }; /** * Portable Network Graphics image encoder */ class png_encoder : public png_base { private: zenc xenc; unsigned char xbuf[16]; unsigned int m_max; png_enc_random m_rand; png_buffer m_pend; unsigned int m_pendpos; png_pixel m_tmptrns; unsigned int m_d_chunk_size; public: /** * Constructor */ png_encoder(void); /** * Destructor */ ~png_encoder(void); /** * @return the maximum dimension allowed */ unsigned int get_max_dimension(void) const; /** * Set the maximum dimension allowed * @param x the new maximum */ void set_max_dimension(unsigned int x); /** * Write a file. * @param name the name of the PNG file to write * @return the last error code. If the write was successful, * PNG_DONE is returned. * @note don't forget to set the png_receptor first */ png_error write_file(const char* name); protected: void reset_sub(void); png_error get_byte(unsigned char&); int generate_sample(void); }; }; #endif /*CMPS3120_PNGENC*/ <file_sep>////////////////////////////////////////////////////////////////////////////// // // --- SourcePath.h --- // Created by <NAME> // ////////////////////////////////////////////////////////////////////////////// #ifndef __SOURCEPATH_H__ #define __SOURCEPATH_H__ #include <string> extern std::string source_path; #endif // __SOURCEPATH_H__ <file_sep> #include "./zdec.h" #include <stdlib.h> #include <stdio.h> namespace cmps3120 { static const char zdec_dyn_codes[] = { 16, 17, 18, 0, 8, 7, 9, 6, 10, 5, 11, 4, 12, 3, 13, 2, 14, 1, 15 }; zdec::zdec(void) : zss() { } zdec::~zdec(void) { } void zdec::reset(void) { reset_sub(); bit_pos = 0; last_block = false; } zss_error zdec::put_eof(void) { if (readmode == 9) { return ZSS_DONE; } else { return ZSS_MISSING_CHECKSUM; } } int zdec::compute_fetch(int rval) { if ((rval < 257) || (rval > 285)) return ZSS_DATA_FETCH; if (rval < 265) { num_fetch = rval-254; ti = 0; } else if (rval < 285) { rval -= 261; ti = (rval>>2); num_fetch = 3+((4+(rval&3))<<ti); } else /*if (rval == 285)*/ { num_fetch = 258; ti = 0; } return 0; } int zdec::compute_back_long(int rval) { if ((rval < 0) || (rval > 29)) return ZSS_DATA_BACK; if (rval < 4) { back_long = rval+1; ti = 0; } else if (rval < 30) { rval -= 2; ti = (rval>>1); back_long = 1+((2+(rval&1))<<ti); } return 0; } zss_error zdec::put_char(unsigned char x) { unsigned int out = 0; /* * states: * 0 - header * 1 - dict * 2 - other decompression methods * 3 - checksum * 4 - DEFLATE : block id * 5 - direct - block length * 6 - direct - one's complement block length * 7 - direct - data * 8 - fixed - data * 9 - done */ switch (readmode) { case 0: { if (submode < 2) { xlong <<= 8; xlong |= x&255; submode++; } if (submode >= 2) { zss_header nhdr(xlong); if (!nhdr.is_valid()) { return ZSS_BAD_HEADER; } set_header(nhdr); if (nhdr.fdict) readmode = 1; else if (nhdr.xmethod == 8) { readmode = 4; if (!setup_window() ) { out = ZSS_MEMORY; } } else readmode = 2; submode = 0; } }break; case 1: { if (submode < 4) { xpect_dict_adler <<= 8; xpect_dict_adler |= x&255; submode++; } if (submode >= 4) { if ((!is_dict_set()) || (get_dict_checksum() != xpect_dict_adler)) { return ZSS_DICTIONARY; } else if (get_header().xmethod == 8) { readmode = 4; if (!setup_window() ) { out = ZSS_MEMORY; } } else readmode = 2; submode = 0; } }break; case 3: { /*fprintf(stderr,"*%08lxv%08lx*\n", xlong,(unsigned long)get_checksum());*/ if (si < 4) { xlong = (xlong<<8)|(x&255); si++; } if (si >= 4) { if (xlong ^ (unsigned long)get_checksum()) { out = ZSS_CHECKSUM; } else { readmode = 9; out = ZSS_DONE; } } }break; case 4: case 8: case 10: { int i; for (i = 0; (i < 8)&&(!out); i++) { bit_pos = i; out = process_bit((x&(1<<i))?1:0); } }break; case 5: { /* byte direct, length */ if (si < 2) { xlong |= ((x&255)<<(si*8)); si++; } if (si >= 2) { back_long = xlong; si = 0; readmode = 6; xlong = 0; } }break; case 6: { /* byte direct, negative length */ if (si < 2) { xlong |= ((x&255)<<(si*8)); si++; } if (si >= 2) { if ((back_long&=65535) ^ ((~xlong)&65535)) out = ZSS_BLOCK_ERROR; else { si = 0; readmode = 7; xlong = 0; if (!back_long) { readmode = 4; submode = 0; } } } }break; case 7: { /* byte direct, data */ if (back_long) back_long--; append(x&255); if (!back_long) { if (last_block) { readmode = 3; submode = 4; si = 0; xlong = 0; } else { readmode = 4; submode = 0; } } }break; case 9: { /* do nothing */ }break; default: return 2; } return out; } zss_error zdec::process_bit(int b) { int out = 0; switch (submode) { case 0: { last_block = b?true:false; xlong = 0; si = 0; submode = 1; }break; case 1: { if (si < 2) { xlong |= (b?1:0)<<(si++); } if (si >= 2) { switch (xlong) { case 0: set_codes(zss_huffs::for_direct()); set_distances(zss_huffs()); submode = 2; readmode = 5; si = 0; break; case 1: set_codes(zss_huffs::for_fixed()); set_distances(zss_huffs::for_distance()); readmode = 8; submode = 3; si = 0; xlong = 0; break; case 2: set_codes(zss_huffs()); set_distances(zss_huffs()); readmode = 10; submode = 10; si = 0; xlong = 0; ti = 2; break; case 3: out = ZSS_BLOCK_ERROR; break; } } }break; case 2: { /* vsync to direct */ if (bit_pos) break; readmode = 5; si = 0; } case 4: { /* vsync to checksum */ if (bit_pos) break; readmode = 3; si = 0; } case 3: case 13: { /* fixed - data */ /* dynamic - data */ int rval; if (si <= 32) { xlong = (xlong<<1)|(b?1:0); si++; /*fprintf(stderr,"(%lo %i)",xlong,si);*/ rval = get_codes().get_value(xlong,si); if (rval >= 0) { /*fprintf(stderr,"<%i %lx:%i>",rval,get_char_count(), bit_pos);*/ if (rval == 256) { /*fprintf(stderr,"*%lo-(%08lx:%i)>%08lx*\n", xlong,get_char_count(),bit_pos, (unsigned long)get_checksum());*/ if (last_block) { readmode = 3; submode = 4; si = 0; xlong = 0; } else { readmode = 4; submode = 0; } } else if (rval > 256){ /* do fancy backtrack stuff */ out = compute_fetch(rval); si = 0; xlong = 0; if (ti > 0) submode = 5; else submode = 6; } else { append(rval&255); } xlong = 0; si = 0; } } if (si > 32) { out = ZSS_DATA_ERROR; } }break; case 5: case 15: { /* fixed - data size (extra) */ if (si < ti) { xlong |= (b?1:0)<<(si++); } if (si >= ti) { num_fetch += xlong; submode++; si = 0; xlong = 0; } }break; case 6: case 16: { /* fixed - distance */ int rval; if (si <= 32) { xlong = (xlong<<1)|(b?1:0); si++; /*fprintf(stderr,"(~%lo %i)",xlong,si);*/ rval = get_distances().get_value(xlong,si); if (rval >= 0) { /*fprintf(stderr,"<~%i>",rval);*/ if ((rval >= 0) && (rval <= 29)) { out = compute_back_long(rval); if (ti > 0) submode ++; else submode += 2; } else out = ZSS_DATA_ERROR; xlong = 0; si = 0; } } if (si > 32) { out = ZSS_DATA_ERROR; } }if ((submode != 8) && (submode != 18)) break; case 7: case 17: if ((submode == 7) || (submode == 17)) { /* fixed - distance (extra) */ if (si < ti) { xlong |= (b?1:0)<<(si++); } if (si >= ti) { back_long += xlong; submode++; si = 0; xlong = 0; } else break; } case 8: case 18: { do { unsigned char cc = get_previous(back_long); if (!append(cc&255)) { out = ZSS_OVERFLOW; break; } } while (--num_fetch); if (!num_fetch) { submode -= 5; } }break; case 10: { /* dynamic - header */ if (si < (ti?5:4)) { xlong |= (b?1:0)<<(si++); } if (si >= (ti?5:4)) { /*fprintf(stderr,"*%lo %lu*",xlong,xlong);*/ switch (ti) { case 2: hlit = xlong+257; break; case 1: hdist = xlong+1; break; case 0: hclen = xlong+4; break; } ti--; xlong = 0; si = 0; } if (ti<0) { ti=0;si=0;xlong=0;submode=11; set_extended_codes(zss_huffs()); /*fprintf(stderr,"@(%i,%i,%i)",hclen,hlit,hdist);*/ } }break; case 11: { /* dynamic - extended codes */ if (si < 3) { xlong |= (b?1:0)<<(si++); } if (si >= 3) { get_extended_codes().append_code(xlong,zdec_dyn_codes[ti]); ti++; si = 0; xlong = 0; } if (ti >= hclen) { get_extended_codes().sort_by_value(); bool is_made = get_extended_codes().sort_and_make(); /*fprintf(stderr,"%%%s%%", (is_made?"true":"false"));*/ if (!is_made) out = ZSS_EX_DATA_ERROR; submode = 12; ti = 0; num_fetch = 0; set_codes(zss_huffs()); } }break; case 12: { /* dynamic - length/distance extension */ int rval; if (si <= 32) { xlong = (xlong<<1)|(b?1:0); si++; /*fprintf(stderr,"(`%lo %i)",xlong,si);*/ rval = get_extended_codes().get_value(xlong,si); if (rval >= 0) { /*fprintf(stderr,"<`%i>",rval);*/ if (rval == 16) { /* do fancy backtrack stuff */ submode = 14; si = 0; xlong = 0; back_long = 2; /* num_fetch = num_fetch; */ } else if (rval == 17) { /* do fancy backtrack stuff */ submode = 14; si = 0; xlong = 0; num_fetch = 0; back_long = 3; } else if (rval == 18) { /* do fancy backtrack stuff */ submode = 14; si = 0; xlong = 0; num_fetch = 0; back_long = 7; } else { if (ti < hlit) get_codes().append_code(rval,ti); else if ((ti-hlit) < hdist) get_distances().append_code(rval,ti-hlit); num_fetch = rval; ti++; } xlong = 0; si = 0; if (ti >= (hlit+hdist)) { /*fprintf(stderr,"--");*/ get_codes().sort_and_make(); /*fprintf(stderr,"--");*/ get_distances().sort_and_make(); /*get_distances().print_err();*/ /*fprintf(stderr,"--");*/ ti = 0; submode = 13; xlong = 0; si = 0; num_fetch = 0; } } } if (si > 32) { out = ZSS_EX_DATA_ERROR; } }break; case 14: { if (si < (int)back_long) { xlong |= (b?1:0)<<(si++); } if (si >= (int)back_long) { switch (back_long) { case 2: case 3: xlong += 3; break; case 7: xlong += 11; break; } for (si = 0; (si < (int)xlong) && (ti < (hlit+hdist)); si++, ti++) { if (ti < hlit) get_codes().append_code(num_fetch,ti); else if ((ti-hlit) < hdist) get_distances().append_code(num_fetch,ti-hlit); } xlong = 0; si = 0; if (ti >= (hlit+hdist)) { /*fprintf(stderr,"--");*/ get_codes().sort_and_make(); /*fprintf(stderr,"--");*/ /*get_distances().print_err();*/ get_distances().sort_and_make(); /*fprintf(stderr,"--\n"); get_distances().print_err();*/ ti = 0; submode = 13; xlong = 0; si = 0; num_fetch = 0; } else { submode = 12; xlong = 0; si = 0; /* num_fetch = num_fetch */ } } }break; default: { out = 2; }break; } return out; } }; <file_sep> #include <stdlib.h> #include "./zss.h" #include <string.h> #include <limits.h> #include <stdio.h> namespace cmps3120 { struct zss_huff_entry { unsigned long int bits; #if (USHORT_MAX >= 289) unsigned short len; short val; #else unsigned int len; int val; #endif /*USHORT_MAX*/ }; zss_huffs::zss_huffs(void) : m_count(0), m_entries(NULL), m_alloc(0), m_sort(0) { } zss_huffs::~zss_huffs(void) { if (m_entries != NULL) { free(m_entries); m_entries = NULL; } m_count = 0; } zss_huffs::zss_huffs(const zss_huffs& h) : m_count(0), m_entries(NULL), m_alloc(0), m_sort(0) { if (h.m_count && resize(h.m_count)) { memcpy(m_entries,h.m_entries,sizeof(struct zss_huff_entry)*m_count); m_sort = h.m_sort; } } zss_huffs& zss_huffs::operator=(const zss_huffs& h) { if (h.m_count && resize(h.m_count)) { memcpy(m_entries,h.m_entries,sizeof(struct zss_huff_entry)*m_count); m_sort = h.m_sort; } else if ((!h.m_count) || (h.m_entries == NULL)) { if (m_entries != NULL) { free(m_entries); m_entries = NULL; } m_count = 0; m_sort = 0; } return *this; } unsigned int zss_huffs::get_count(void) const { return m_count; } bool zss_huffs::resize(unsigned int nc) { if (nc >= ((UINT_MAX/sizeof(struct zss_huff_entry))-16)) { return false; } else if (nc) { if (nc <= m_alloc) { m_count = nc; if (m_count < (m_alloc>>2)) { struct zss_huff_entry* ne = (struct zss_huff_entry*)realloc (m_entries,(m_alloc>>1)*sizeof(struct zss_huff_entry)); if (ne != NULL) { m_entries = ne; m_alloc = (m_alloc>>1); } } } else { struct zss_huff_entry* ne = (struct zss_huff_entry*)realloc (m_entries,nc*sizeof(struct zss_huff_entry)); if (ne != NULL) { m_entries = ne; if (nc > m_count) { memset(m_entries+m_count,0, sizeof(struct zss_huff_entry)*(nc-m_count)); } m_count = nc; m_alloc = nc; } else return false; } m_sort = 0; } else { if (m_entries != NULL) { free(m_entries); m_entries = NULL; } m_count = 0; m_sort = 0; } return true; } bool zss_huffs::append_code (unsigned int xlen, unsigned int val) { unsigned int nsiz = m_count; if (!resize(nsiz+1)) return false; m_entries[nsiz].bits = 0; m_entries[nsiz].len = xlen; m_entries[nsiz].val = val; m_sort =0; return true; } int zss_huffs::cmp_entry_v(const void* aa, const void* bb) { const struct zss_huff_entry *a = (const struct zss_huff_entry*)aa; const struct zss_huff_entry *b = (const struct zss_huff_entry*)bb; if (a->len < b->len) return -1; if (a->len > b->len) return +1; if (a->bits < b->bits) return -1; if (a->bits > b->bits) return +1; return 0; } int zss_huffs::cmp_entry_w(const void* aa, const void* bb) { const struct zss_huff_entry *a = (const struct zss_huff_entry*)aa; const struct zss_huff_entry *b = (const struct zss_huff_entry*)bb; if (a->val < b->val) return -1; if (a->val > b->val) return +1; return 0; } void zss_huffs::sort_by_length(void) { unsigned int i; struct zss_huff_entry *xe; if (m_entries == NULL) return; for (i = 0, xe = m_entries; i < m_count; i++, xe++) { xe->bits = i; } qsort(m_entries, m_count, sizeof(struct zss_huff_entry), &zss_huffs::cmp_entry_v); /*for (i = 0, xe = m_entries; i < m_count; i++, xe++) { fprintf(stderr,"[%lo %u %i]\n",xe->bits,xe->len,xe->val); }*/ m_sort = 1; } void zss_huffs::sort_by_bits(void) { /*unsigned int i; struct zss_huff_entry *xe; fprintf(stderr,"!!\n");*/ if (m_entries == NULL) return; qsort(m_entries, m_count, sizeof(struct zss_huff_entry), &zss_huffs::cmp_entry_v); /*for (i = 0, xe = m_entries; i < m_count; i++, xe++) { fprintf(stderr,"[%lo %u %i]\n",xe->bits,xe->len,xe->val); }*/ m_sort = 2; } void zss_huffs::print_err(void) const { unsigned int i; struct zss_huff_entry *xe; fprintf(stderr,"!!\n"); if (m_entries == NULL) return; for (i = 0, xe = m_entries; i < m_count; i++, xe++) { fprintf(stderr,"[%lo %u %i]\n",xe->bits,xe->len,xe->val); } } void zss_huffs::sort_by_value(void) { /*unsigned int i; struct zss_huff_entry *xe; fprintf(stderr,"~~\n");*/ if (m_entries == NULL) return; qsort(m_entries, m_count, sizeof(struct zss_huff_entry), &zss_huffs::cmp_entry_w); /*for (i = 0, xe = m_entries; i < m_count; i++, xe++) { fprintf(stderr,"[%lo %u %i]\n",xe->bits,xe->len,xe->val); }*/ m_sort = 3; } bool zss_huffs::make_codes(void) { unsigned int i, j = 0; unsigned long int l = 0; if (m_entries == NULL) return true; for (i = 0; i < m_count; i++) { if (j < m_entries[i].len) { l<<=(m_entries[i].len-j); j = m_entries[i].len; } else if (!j) { m_entries[i].bits = 0; continue; } if ((1UL<<j)&l) return false; m_entries[i].bits = l; l++; } return true; } bool zss_huffs::sort_and_make(void) { bool res; sort_by_length(); res = make_codes(); if (res) sort_by_bits(); return res; } zss_huffs zss_huffs::for_direct(void) { unsigned int i; zss_huffs out; for (i = 0; i < 256; i++) { out.append_code(8,i); } out.sort_and_make(); return out; } zss_huffs zss_huffs::for_distance(void) { unsigned int i; zss_huffs out; for (i = 0; i < 31; i++) { out.append_code(5,i); } out.sort_and_make(); return out; } zss_huffs zss_huffs::for_fixed(void) { unsigned int i; zss_huffs out; for (i = 0; i < 144; i++) { out.append_code(8,i); } for (; i < 256; i++) { out.append_code(9,i); } for (; i < 280; i++) { out.append_code(7,i); } for (; i < 288; i++) { out.append_code(8,i); } out.sort_and_make(); return out; } int zss_huffs::get_value(unsigned long int bits, unsigned int len) const { struct zss_huff_entry xe, *xv; if ((m_entries == NULL) || (!m_count)) return -1; xe.bits = bits; xe.len = len; xe.val = -1; if (m_sort == 2) { xv = (struct zss_huff_entry*)bsearch (&xe,m_entries,m_count,sizeof(struct zss_huff_entry), &cmp_entry_v); } else { unsigned int l; xv = NULL; for (l = 0; l < m_count; l++) { if (cmp_entry_v(&xe,m_entries+l) == 0) { xv = m_entries+l; break; } } } if (xv == NULL) return -1; return xv->val; } zss_huff_pair zss_huffs::get_bits(unsigned int value) const { struct zss_huff_entry xe, *xv; zss_huff_pair out = {0,0}; if ((m_entries == NULL) || (!m_count)) return out; xe.bits = -1; xe.len = -1; xe.val = value; if (m_sort == 3) { xv = (struct zss_huff_entry*)bsearch (&xe,m_entries,m_count,sizeof(struct zss_huff_entry), &cmp_entry_w); } else { unsigned int l; xv = NULL; for (l = 0; l < m_count; l++) { if (cmp_entry_w(&xe,m_entries+l) == 0) { xv = m_entries+l; break; } } } if (xv == NULL) return out; out.bits = xv->bits; out.len = xv->len; return out; } zss_header::zss_header(void) : fcheck(0), fdict(0), flevel(1), xmethod(8), xinfo(7) { } zss_header::zss_header(unsigned int v) { fcheck = v&31; v>>=5; fdict = v&1; v>>=1; flevel = v&3; v>>=2; xmethod = v&15; v>>=4; xinfo = v&15; } zss_header::operator unsigned int (void) const { unsigned int v; v = xinfo&15; v<<=4; v |= xmethod&15; v<<=2; v |= flevel&3; v<<=1; v |= fdict&1; v<<=5; v |= fcheck&31; return v; } void zss_header::set_check(void) { unsigned int v = ((unsigned int)*this)&(~31U); fcheck = 31-(v % 31); } bool zss_header::is_valid(void) const { unsigned int v = (unsigned int)*this; return !(v % 31); } zss_checksum::zss_checksum(unsigned long int l) : xva(l&65535), xvb((l>>16)&65535) { } zss_checksum& zss_checksum::add(unsigned char ch) { xva = ((unsigned long int)xva+ch)%65521UL; xvb = ((unsigned long int)xvb+xva)%65521UL; return *this; } zss_checksum::operator unsigned long int(void) const { return ((unsigned long int)xva)|(((unsigned long int)xvb)<<16); } zss_buffer::zss_buffer(void) : m_dta(NULL), m_siz(0) { } zss_buffer::zss_buffer(const zss_buffer& b) : m_dta(NULL), m_siz(0) { if (b.m_siz && (b.m_dta != NULL)) { unsigned char* nc = (unsigned char*)calloc (b.m_siz,sizeof(unsigned char)); if (nc != NULL) { memcpy(nc,b.m_dta,b.m_siz*sizeof(unsigned char)); m_dta = nc; m_siz = b.m_siz; } } } zss_buffer& zss_buffer::operator=(const zss_buffer& b) { if (m_dta != NULL) { free(m_dta); m_dta = NULL; } m_siz = 0; if (b.m_siz && (b.m_dta != NULL)) { unsigned char* nc = (unsigned char*)calloc (b.m_siz,sizeof(unsigned char)); if (nc != NULL) { memcpy(nc,b.m_dta,b.m_siz*sizeof(unsigned char)); m_dta = nc; m_siz = b.m_siz; } } return *this; } zss_buffer::~zss_buffer(void) { if (m_dta != NULL) free(m_dta); m_dta = NULL; m_siz = 0; } unsigned char* zss_buffer::data(void){ return m_dta; } const unsigned char* zss_buffer::data(void) const{ return m_dta; } unsigned int zss_buffer::size(void) const{ return m_siz; } bool zss_buffer::resize(unsigned int s) { unsigned char *nc; if (s) { if (m_dta != NULL) { nc = (unsigned char*)realloc(m_dta,s*sizeof(unsigned char)); if (nc != NULL) { m_dta = nc; if (s > m_siz) { memset(m_dta+m_siz,0,sizeof(unsigned char)*(s-m_siz)); } m_siz = s; } else { return false; } } else { nc = (unsigned char*)calloc(s,sizeof(unsigned char)); if (nc != NULL) { m_dta = nc; m_siz = s; } else { return false; } } } else { if (m_dta != NULL) free(m_dta); m_dta = NULL; m_siz = 0; } return true; } unsigned char& zss_buffer::operator[](unsigned int i) { return *(m_dta+i); } const unsigned char& zss_buffer::operator[](unsigned int i) const { return *(m_dta+i); } bool zss_buffer::append(int C) { unsigned int xs = m_siz; if (xs >= ((UINT_MAX-1)/sizeof(unsigned char))) { return false; } if (!resize(xs+1)) return false; *(m_dta+xs) = (unsigned char)(C&255); return true; } zss::zss(void) : xerr(0), hist(), pend(), hist_pos(0), hist_len(0), pend_pos(0), xhdr(0), dict_set_tf(0), dict_adler(0), readmode(0), submode(0), xpect_dict_adler(0), xlong(0), xcharc(0) { } zss::~zss(void) { hist.resize(0); pend.resize(0); } zss_error zss::get_error(void) const { return xerr; } void zss::set_error(zss_error v) { xerr = v; } void zss::reset(void) { reset_sub(); } void zss::reset_sub(void) { readmode = 0; hist.resize(0); pend.resize(0); hist_len = 0; hist_pos = 0; pend_pos = 0; xerr = 0; submode = 0; xhdr = 0; xlong = 0; xadler = 1; dict_set_tf = false; dict_adler = 1; xpect_dict_adler = 1; xcharc = 0; } unsigned char zss::get_previous(unsigned int pos) const { if (pos > hist_len) return 0; else if (pos > hist.size()) return 0; else if (pos > hist_pos) return hist[hist.size()-(pos-hist_pos)]; else return hist[hist_pos-pos]; } void zss::put_previous(unsigned char ch) { if (hist_len > hist.size()) hist_len = hist.size(); xadler.add(ch); if (!hist.size()) return; if (hist_pos < hist_len) { hist[hist_pos++] = ch; } else if (hist_pos < hist.size()) { hist_len++; hist[hist_pos++] = ch; } else { hist[(hist_pos=0)++] = ch; } } zss_error zss::put(const zss_buffer& dta, unsigned int *pos) { return put(dta.data(), dta.size(), pos); } zss_error zss::put (const unsigned char* dta, unsigned int siz, unsigned int *pos) { int xout = xerr; unsigned int i; for (i = 0; (!xout) && (i < siz); i++, (xout?0:xcharc++)) { xout = put_char(dta[i]); } if (pos) { *pos = i; } xerr = xout; return (zss_error)xout; } zss_error zss::finish(void) { int xout = xerr; if (!xout) { xout = put_eof(); } xerr = xout; return (zss_error)xout; } zss_error zss::put_eof(void) { return ZSS_UNDEFINED; } zss_error zss::put_char(unsigned char x) { return ZSS_UNDEFINED; } bool zss::is_dict_set(void) const { return dict_set_tf; } unsigned long zss::get_dict_checksum(void) const { return dict_adler; } bool zss::set_dictionary(const zss_buffer& dct) { if (readmode < 2) { unsigned int i; for (i = 0; i < dct.size(); i++) { put_previous(dct[i]); } dict_adler = xadler; xadler = 1; return true; } else return false; } const zss_header& zss::get_header(void) const { return xhdr; } void zss::set_header(const zss_header& h) { xhdr = h; } bool zss::setup_window(void) { unsigned int siz; if (xhdr.xinfo >= 8) return false; siz = 1<<(xhdr.xinfo+8); return hist.resize(siz); } zss_huffs& zss::get_codes(void) { return codes; } const zss_huffs& zss::get_codes(void) const { return codes; } void zss::set_codes(const zss_huffs& h) { codes = h; } zss_huffs& zss::get_extended_codes(void) { return xcodes; } const zss_huffs& zss::get_extended_codes(void) const { return xcodes; } void zss::set_extended_codes(const zss_huffs& h) { xcodes = h; } zss_huffs& zss::get_distances(void) { return dists; } const zss_huffs& zss::get_distances(void) const { return dists; } void zss::set_distances(const zss_huffs& h) { dists = h; } bool zss::append(unsigned char ch) { if (!pend.append(ch&255) ) return false; put_previous(ch&255); return true; } bool zss::append_no_history(unsigned char ch) { return pend.append(ch&255); } unsigned int zss::get_pending_count(void) const { return pend.size(); } zss_buffer& zss::get_pending(void){ return pend; } const zss_buffer& zss::get_pending(void) const{ return pend; } void zss::clear_pending(void){ pend.resize(0); } unsigned long int zss::get_checksum(void) const { return xadler; } unsigned int zss::get_history_length(void) const { return hist_len; } unsigned long int zss::get_char_count(void) const { return xcharc; } }; <file_sep>ADD_LIBRARY(pngdecode png.cpp png.h pngdec.cpp pngdec.h pngenc.cpp pngenc.h zdec.cpp zdec.h zenc.cpp zenc.h zss.cpp zss.h)<file_sep>cmake_minimum_required(VERSION 2.8) PROJECT(RAYTRACER) SET(CMAKE_BUILD_TYPE "Release") if (!MSVC) SET(CMAKE_CXX_FLAGS "-Wno-deprecated") endif() #Compile and Link GLFW ADD_SUBDIRECTORY(glfw-3.2) link_libraries(glfw) include_directories(${glfw_INCLUDE_DIRS}) include_directories("${CMAKE_SOURCE_DIR}/glfw-3.2/deps") add_library(glad "${CMAKE_SOURCE_DIR}/glfw-3.2/deps/glad/glad.h" "${CMAKE_SOURCE_DIR}/glfw-3.2/deps/glad.c") link_libraries(glad) ADD_SUBDIRECTORY(pngdecode) link_libraries(pngdecode) include_directories(${CMAKE_SOURCE_DIR}/pngdecode) SET(MY_SOURCE_PATH ${CMAKE_SOURCE_DIR}) CONFIGURE_FILE(${CMAKE_SOURCE_DIR}/source/common/SourcePath.cpp.in ${CMAKE_SOURCE_DIR}/source/common/SourcePath.cpp) include_directories(${CMAKE_SOURCE_DIR}/source/common ${CMAKE_SOURCE_DIR}/shaders) add_executable(raytracer WIN32 MACOSX_BUNDLE source/main.cpp source/common/common.h source/common/CheckError.h source/common/mat.h source/common/ObjMesh.cpp source/common/ObjMesh.h source/common/SourcePath.cpp source/common/SourcePath.h source/common/Trackball.cpp source/common/Trackball.h source/common/Object.cpp source/common/Object.h source/common/vec.h shaders/fshader.glsl shaders/vshader.glsl) #Windows cleanup if (MSVC) # Tell MSVC to use main instead of WinMain for Windows subsystem executables set_target_properties(raytracer PROPERTIES LINK_FLAGS "/ENTRY:mainCRTStartup") endif() #Apple cleanup if (APPLE) set_target_properties(raytracer PROPERTIES MACOSX_BUNDLE_BUNDLE_NAME "simple") set_target_properties(raytracer PROPERTIES MACOSX_BUNDLE_SHORT_VERSION_STRING ${GLFW_VERSION} MACOSX_BUNDLE_LONG_VERSION_STRING ${GLFW_VERSION_FULL} MACOSX_BUNDLE_ICON_FILE glfw.icns) endif() <file_sep> #if !(defined CMPS3120_LABS_ZDEC_H) #define CMPS3120_LABS_ZDEC_H #include "./zss.h" namespace cmps3120 { class zdec : public zss { public: zdec(void); ~zdec(void); void reset(void); private: int bit_pos, si, ti; bool last_block; unsigned long int back_long; int num_fetch; int hclen, hlit, hdist; zss_error put_char(unsigned char x); zss_error put_eof(void); zss_error process_bit(int b); int compute_fetch(int rval); int compute_back_long(int rval); }; }; #endif /*CMPS3120_LABS_ZDEC_H*/ <file_sep> #if !(defined CMPS3120_LABS_PNG_H) #define CMPS3120_LABS_PNG_H namespace cmps3120 { /** * Error codes from a \link png_base \endlink */ typedef enum png_error_t { /** Undefined operation or state */ PNG_UNDEFINED = -1, /** No error (yet) */ PNG_OK = 0, /** End of image */ PNG_DONE = 1, /** Bad file signature */ PNG_BAD_SIGNATURE = 2, /** Bad checksum */ PNG_BAD_CHECKSUM = 3, /** Duplicate or misplaced chunk. */ PNG_BAD_CHUNK = 4, /** A chunk is missing necessary data */ PNG_MISSING_DATA = 5, /** Header reports unsupported features */ PNG_UNSUPPORTED_HEADER = 6, /** Primary chunk unsupported */ PNG_UNSUPPORTED_CHUNK = 7, /** memory issues */ PNG_NO_MEMORY = 8, /** zip stream issues */ PNG_ZSS_ERROR = 9, /** the file could not be opened for reading */ PNG_FILE_READ_FAILED = 10, /** the file could not be opened for reading */ PNG_FILE_WRITE_FAILED = 11, /** no receptor has been set */ PNG_MISSING_RECEPTOR = 12, /** * no palette has been set although the receptor's * header requires a palete */ PNG_MISSING_PALETTE = 13, PNG_LAST_ERROR = 14 } png_error; /** * Common chunk flags */ typedef enum png_chunk_flag_t { PNG_CC_IEND = 16, PNG_CC_IHDR = 1, PNG_CC_PLTE = 2, PNG_CC_IDAT = 8, PNG_CC_tRNS = 4 } png_chunk_flag; /** * Portable Network Graphics color type */ typedef enum png_color_type_t { /** @note \verbatim restrict bit-depth: 1,2,4,8\endverbatim */ PNG_HAS_PALETTE = 1, /** @note \verbatim restrict bit-depth: 8,16\endverbatim */ PNG_HAS_COLOR = 2, /** @note \verbatim restrict bit-depth: 8,16\endverbatim */ PNG_HAS_ALPHA = 4, PNG_GRAYSCALE = 0, PNG_RGB = 2, PNG_RGB_PALETTE = 3, PNG_GRAYSCALE_ALPHA = 4, PNG_RGBA = 6 } png_color_type; /** * Convert a pixel width into a scanline length. * @param nw the pixel width * @param bd sample bit depth * @param ct a \link png_color_type \endlink value * @param no_throw_tf if true, no exceptions will be thrown * @return the length of a scanline given the arguments * @throw std::range_error if the scanline is too large */ unsigned long int png_color_recast (unsigned long int nw, unsigned int bd, unsigned int ct, bool no_throw_tf = false); /** * Run the Paeth predictor * @param left the left value * @param up the up value * @param corner the up-left value * @return the chosen value */ unsigned char png_paeth_predict (unsigned char left, unsigned char up, unsigned char corner); /** * Portable Network Graphics compression method */ typedef enum png_compression_type_t { /** zlib Deflate algorithm */ PNG_DEFLATE = 0 } png_compression_type; /** * Portable Network Graphics filter method */ typedef enum png_filter_type_t { /** default 5-filter adaptive */ PNG_ADAPTIVE = 0 } png_filter_type; /** * Portable Network Graphics interlace method */ typedef enum png_interlace_type_t { /** no interlace */ PNG_NO_INTERLACE = 0, /** Adam7 interlace */ PNG_ADAM7_INTERLACE = 1 } png_interlace_type; class png_checksum { private: unsigned long int m_x; public: png_checksum(unsigned long int l = 0UL); png_checksum& add(unsigned char ch); operator unsigned long int(void) const; }; /** * Portable Network Graphics image header */ struct png_header { public: /** * Default constructor */ png_header(void); /** * Destructor * @note does nothing */ ~png_header(void); /** * Pixel width */ unsigned long int width; /** * Pixel height */ unsigned long int height; /** * Bit depth per sample */ unsigned char bit_depth; /** * a \link png_color_type \endlink value */ unsigned char color_type; /** * a \link png_compression_type \endlink value */ unsigned char compression_type; /** * a \link png_filter_type \endlink value */ unsigned char filter_type; /** * a \link png_interlace_type \endlink value */ unsigned char interlace_type; /** * @return whether this implementation of PNG supports * the given header */ bool is_supported(void) const; }; /** * Palette color entry */ struct png_color { unsigned char r; unsigned char g; unsigned char b; unsigned char a; }; /** * Pixel in PNG image data */ struct png_pixel { /** Default constructor */ png_pixel(); /** Copy constructor */ png_pixel(const png_pixel& ); /** Assignment operator */ png_pixel& operator=(const png_pixel& ); /** Copy constructor */ png_pixel(const png_color& ); /** Assignment operator */ png_pixel& operator=(const png_color& ); /** Conversion operator */ operator png_color(void) const; /** Comparison operator */ bool operator==(const png_pixel& other); /** * Convert from 16-bit to lower bit depth * @param n the lower bit depth * @return a copy of this pixel in the lower bit depth */ png_pixel down_cast(unsigned int n) const; /** * Convert from lower bit depth to 16-bit * @param n the lower bit depth * @return a copy of this pixel at depth of 16 bits per sample */ png_pixel up_cast(unsigned int n) const; /** * Converts the pixel RGB values to a single grayscale value. * @return a value between [0, 65535], inclusive. */ unsigned int gray(void) const; /** Red value, range 0 - 65535 */ unsigned int r; /** Green value, range 0 - 65535 */ unsigned int g; /** Blue value, range 0 - 65535 */ unsigned int b; /** Alpha value, range 0 - 65535 */ unsigned int a; }; /** * Portable Network Graphics palette */ class png_palette { private: png_color* m_colors; unsigned int m_count; png_pixel m_alpha; public: /** * Constructor */ png_palette(void); /** * Copy constructor */ png_palette(const png_palette& ); /** * Assignment */ png_palette& operator=(const png_palette& ); /** * Destructor */ ~png_palette(void); /** * @return the size of the palette */ unsigned int size(void) const; /** * Resize the palette. * @param l the desired size * @return zero on success */ bool resize(unsigned int l); /** * @param i an index into the palette * @return the entry at the given index * @throw std::out_of_range if i is out of range */ png_color& operator[](unsigned int i); /** * @param i an index into the palette * @return the entry at the given index * @throw std::out_of_range if i is out of range */ const png_color& operator[](unsigned int i) const; /** * @return the pixel value to be marked as transparent */ png_pixel& get_transparent_pixel(void); /** * @return the pixel value to be marked as transparent */ const png_pixel& get_transparent_pixel(void) const; /** * Try to match a pixel to a palette color. * @param v the pixel color to try to match * @param siz one plus the last index to check; if negative, * the entire palette is checked * @return the index of the best match, or -1 if none of the * palette could be checked */ int closest_match(png_pixel v, int siz = -1); }; /** * Data used by the PNG processor. */ class png_buffer { private: unsigned char* m_dta; unsigned int m_siz; public: png_buffer(void); png_buffer(const png_buffer& ); png_buffer& operator=(const png_buffer& ); ~png_buffer(void); unsigned char* data(void); const unsigned char* data(void) const; unsigned int size(void) const; bool resize(unsigned int s); unsigned char& operator[](unsigned int ); const unsigned char& operator[](unsigned int ) const; bool append(int C); }; class png_adam7_data { private: unsigned char w, h, xoff, yoff; int l; public: png_adam7_data(int level = 0); /** * @return the interlace level described by this data set */ int get_level(void) const; /** * Compute the width of a pass. * @param inw the input width * @return the height of interlace pass described by * this data set */ unsigned int compute_width(unsigned int inw); /** * Compute the height of a pass. * @param inh the input height * @return the height of interlace pass described by * this data set */ unsigned int compute_height(unsigned int inh); /** * Convert interlace coordinates to real coordinates. * @param inx x-axis interlace coordinate * @return the x-coordinate of the corresponding real coordinate */ unsigned int real_x(unsigned int ix); /** * Convert interlace coordinates to real coordinates. * @param iny y-axis interlace coordinate * @return the y-coordinate of the corresponding real coordinate */ unsigned int real_y(unsigned int iy); }; /** * \link png_base \endlink input/output receptor * \note \b interface * * To use with the png_decoder class, override the * \link #set_header \endlink and \link #set \endlink * methods. The width, height and other information * will be sent through the set_header method, while * the pixel values will be sent through the set * method. * */ class png_receptor { public: virtual ~png_receptor(void); virtual png_header get_header (void); virtual void set_header (png_header h); virtual png_pixel get_pixel (unsigned int x, unsigned int y, unsigned int level); virtual void set_pixel (unsigned int x, unsigned int y, unsigned int level, png_pixel v); }; /** * Portable Network Graphics stream processor * base class * */ class png_base { private: int m_xerr; png_header m_header; png_palette m_palette; png_checksum m_crc; png_adam7_data m_idata; png_receptor *m_receptor; public: /** * Constructor */ png_base(void); /** * Destructor */ virtual ~png_base(void); /** * Reset the stream. * @note Calls \link #reset_sub \endlink */ void reset(void); /** * Get the last error. * @return the last error encountered */ png_error get_error(void) const; /** * Process an array of character/byte data. * @param dta the PNG image data to process * @param[out] pos the position in the data array at which the * last error was obtained * @return an error code, or zero on success */ png_error put(const png_buffer& dta, unsigned int *pos = 0L); /** * Process an array of character/byte data. * @param dta the PNG image data to process * @param siz the number of bytes in the data * @param[out] pos the position in the data array at which the * last error was obtained * @return an error code, or zero on success */ png_error put (const unsigned char* dta, unsigned int siz, unsigned int *pos = 0L); /** * @return the header */ png_header& get_header(void); /** * @return the header */ const png_header& get_header(void) const; /** * @return the palette */ png_palette& get_palette(void); /** * @return the palette */ const png_palette& get_palette(void) const; /** * Get the chunk checksum. */ unsigned long int get_checksum(void) const; /** * Reset the chunk checksum. */ void reset_checksum(void); /** * @return the interlace data set */ png_adam7_data& get_interlace_data(void); /** * @return the interlace data set */ const png_adam7_data& get_interlace_data(void) const; /** * @return the number of characters processed so far. */ unsigned long int get_char_count(void) const; /** * @return the current receptor */ png_receptor* get_receptor(void) const; /** * Set the receptor accessed by this processor * @param r the receptor to use */ void set_receptor(png_receptor* r); /** * Generate an array of character/byte data. * @param[out] dta the generated PNG image data is placed here * @param[out] pos the position in the data array at which the * last error was obtained * @return an error code, or zero on success */ png_error get(png_buffer& dta, unsigned int *pos = 0L); /** * Generate an array of character/byte data. * @param[out] dta the generated PNG image data is placed here * @param maxsiz the maximum number of bytes to output to * the given array * @param[out] pos the position in the data array at which the * last error was obtained * @return an error code, or zero on success */ png_error get (unsigned char* dta, unsigned int maxsiz, unsigned int *pos = 0L); protected: /** for use by subclasses */ int readmode; /** for use by subclasses */ int submode; /** for use by subclasses */ int sidemode; /** for use by subclasses */ unsigned long int xpect_nothing, xlong, xpect_length, xpect_chunks; /** for use by subclasses: shows current character count */ unsigned long int xcharc; unsigned long int xdiv_width, xdiv_height, xdiv_xpos, xdiv_ypos, xdiv_index; unsigned short int filter_typ; unsigned short int filter_dist; unsigned char filter_log[8]; png_buffer filter_backlog; virtual void reset_sub(void); void put_previous(unsigned char ch); void set_error(png_error v); /** * @return an error code, or zero on success */ virtual png_error put_char(unsigned char); /** * @param[out] y the output byte * @return an error code, or zero on success */ virtual png_error get_byte(unsigned char& y); }; }; #endif /*CMPS3120_LABS_PNG_H*/ <file_sep> #if !(defined CMPS3120_LABS_ZSS_H) #define CMPS3120_LABS_ZSS_H namespace cmps3120 { /** * zip stream error codes */ enum zss_error_t { ZSS_OK = 0, /** end of zip stream */ ZSS_DONE = 1, /** operation undefined */ ZSS_UNDEFINED = 2, /** bad zip stream header */ ZSS_BAD_HEADER = 3, /** request for dictionary */ ZSS_DICTIONARY = 4, /** memory issues */ ZSS_MEMORY = 5, /** Huffman codes acquired an error */ ZSS_CODE_ERROR = 6, /** block construction error */ ZSS_BLOCK_ERROR = 7, /** data was poorly encoded */ ZSS_DATA_ERROR = 8, /** bad terminating checksum */ ZSS_CHECKSUM = 9, /** overflow; pull some pending data and try again */ ZSS_OVERFLOW = 10, /** data was poorly encoded */ ZSS_DATA_FETCH = 11, /** data was poorly encoded */ ZSS_DATA_BACK = 12, /** dynamic code bank read error */ ZSS_EX_DATA_ERROR = 13, /** the stream was terminated too early */ ZSS_MISSING_CHECKSUM = 14, ZSS_LAST = 15 }; typedef int zss_error; struct zss_huff_entry; /** * Huffman code bits with length */ struct zss_huff_pair { unsigned long int bits; unsigned short len; }; /** * Huffman code bank. */ class zss_huffs { private: unsigned int m_count; struct zss_huff_entry *m_entries; unsigned int m_alloc; unsigned char m_sort; public: zss_huffs(void); ~zss_huffs(void); zss_huffs(const zss_huffs& ); zss_huffs& operator=(const zss_huffs& ); unsigned int get_count(void) const; bool resize(unsigned int nc); /** * @param xc the code to add to the code bank * @param xlen the length of the code in bits * @param val the value that the code represents * @return a success flag */ bool append_code (unsigned int xlen, unsigned int val); /** * Sort codes and make trees. */ bool sort_and_make(void); /** * @return a code bank for a direct data block */ static zss_huffs for_direct(void); /** * @return a code bank for a fixed-code data bank */ static zss_huffs for_fixed(void); /** * @return a code bank for a fixed-code distance bank */ static zss_huffs for_distance(void); /** * @param bits the bits of the code to look up * @param len the length of the code to look up * @return the value connected to the given bit sequence, * or -1 if the bit sequence could not be matched */ int get_value(unsigned long int bits, unsigned int len) const; /** * @param value the value to look up * @return the code bits for a given value; if the value * was not found, the code length will be set to zero */ zss_huff_pair get_bits(unsigned int value) const; /** * Sort characters by values. */ void sort_by_value(void); void print_err(void) const; protected: /** * Sort characters by code length. */ void sort_by_length(void); /** * Sort characters by bits, shorter bit codes first. */ void sort_by_bits(void); /** * Make codes by code length. */ bool make_codes(void); /** * Comparison function */ static int cmp_entry_v(const void* a, const void* b); /** * Comparison function */ static int cmp_entry_w(const void* a, const void* b); }; /** * Adler32 checksum */ class zss_checksum { private: unsigned int xva, xvb; public: zss_checksum(unsigned long int l = 1UL); zss_checksum& add(unsigned char ch); operator unsigned long int(void) const; }; /** * Zip-stream header */ struct zss_header { public: /** * Constructor for a default zip-stream header */ zss_header(void); /** * Constructor for a header from a packed integer */ zss_header(unsigned int ); /** * @return the zip-stream header paked into a single integer */ operator unsigned int (void) const; /** * @return whether the header is valid */ bool is_valid(void) const; /** * Computes and sets the check value for the ehader */ void set_check(void); /** * Check value, such that when the header is packed into * an integer, the integer is a multiple of 31. */ unsigned int fcheck : 5; /** * A flag, whether the described zip-stream uses a dictionary */ unsigned int fdict : 1; /** * Compression level. */ unsigned int flevel : 2; /** * Compression method. 8 = deflate */ unsigned int xmethod : 4; /** * Compression information. The meaning depends on the * compression method. */ unsigned int xinfo : 4; }; /** * Data used by the zip stream decoder. */ class zss_buffer { private: unsigned char* m_dta; unsigned int m_siz; public: /** * Constructor */ zss_buffer(void); /** * Copy constructor */ zss_buffer(const zss_buffer& ); /** * Assignment */ zss_buffer& operator=(const zss_buffer& ); /** * Destructor */ ~zss_buffer(void); /** * @return the data stored in the buffer */ unsigned char* data(void); /** * @return the data stored in the buffer */ const unsigned char* data(void) const; /** * @return the size of the buffer */ unsigned int size(void) const; /** * Resize the buffer. * @param s the desired size of the buffer * @return whether the resize was successful */ bool resize(unsigned int s); /** * Array indexing operator * @return the byte at the given index */ unsigned char& operator[](unsigned int ); /** * Array indexing operator * @return the byte at the given index */ const unsigned char& operator[](unsigned int ) const; /** * Add a byte to the end of the buffer. * @param C the value of the byte to add to the buffer * @return whether the append operation was successful */ bool append(int C); }; /** * Single-use zip stream transcoder. * @todo zip stream encoder */ class zss { private: int xerr; zss_buffer hist; zss_buffer pend; zss_huffs codes; zss_huffs dists; zss_huffs xcodes; unsigned int hist_pos; unsigned int hist_len; unsigned int pend_pos; zss_header xhdr; zss_checksum xadler; bool dict_set_tf; unsigned long dict_adler; public: /** Constructor */ zss(void); /** Destructor */ virtual ~zss(void); /** * Reset the zip-stream transcoder. */ virtual void reset(void); /** * @return any error code posted on the transcoder */ zss_error get_error(void) const; /** * Process an array of character/byte data. * @param dta the zip stream data to process * @param[out] pos the position in the data array at which the * last error was obtained * @return an error code, or zero on success */ zss_error put(const zss_buffer& dta, unsigned int *pos = 0L); /** * Process an array of character/byte data. * @param dta the zip stream data to process * @param siz the number of bytes in the data * @param[out] pos the position in the data array at which the * last error was obtained * @return an error code, or zero on success */ zss_error put (const unsigned char* dta, unsigned int siz, unsigned int *pos = 0L); /** * @return whether a dictionary has been set */ bool is_dict_set(void) const; /** * @return the current dictionary's checksum, if it has been set */ unsigned long get_dict_checksum(void) const; /** * Set dictionary. * @return a success flag */ bool set_dictionary(const zss_buffer& dct); /** * @return the header */ const zss_header& get_header(void) const; /** * Set the zip stream header. * @param h the new header */ void set_header(const zss_header& h); /** * @return the active code bank */ zss_huffs& get_codes(void); /** * @return the active code bank */ const zss_huffs& get_codes(void) const; /** * Set the active code bank. * @param h the code bank to set */ void set_codes(const zss_huffs& h); /** * @return the active distance code bank */ zss_huffs& get_distances(void); /** * @return the active distance code bank */ const zss_huffs& get_distances(void) const; /** * Set the active distance code bank. * @param h the distance code bank to set */ void set_distances(const zss_huffs& h); /** * @return the active code bank */ zss_huffs& get_extended_codes(void); /** * @return the active code bank */ const zss_huffs& get_extended_codes(void) const; /** * Set the active code bank. * @param h the code bank to set */ void set_extended_codes(const zss_huffs& h); /** * Append to the output buffer, without affecting the * history. * @param ch the character to append * @return a success flag * @note does not call @link #put_previous@endlink */ bool append_no_history(unsigned char ch); /** * Append to the output buffer. * @param ch the character to append * @return a success flag * @note also calls @link #put_previous@endlink */ bool append(unsigned char ch); /** * @return the number of bytes pending */ unsigned int get_pending_count(void) const; /** * @return the buffer of pending bytes */ zss_buffer& get_pending(void); /** * @return the buffer of pending bytes */ const zss_buffer& get_pending(void) const; /** * Drop all pending bytes. */ void clear_pending(void); /** * Get the stream checksum. */ unsigned long int get_checksum(void) const; /** * Get the length of the history. * @return the length of the history */ unsigned int get_history_length(void) const; /** * @return the number of characters processed so far. */ unsigned long int get_char_count(void) const; /** * Notify of end of stream. * @return a \link zss_error_t\endlink value */ zss_error finish(void); protected: /** for use by subclasses */ int readmode; /** for use by subclasses */ int submode; /** for use by subclasses */ unsigned long int xpect_dict_adler, xlong; /** for use by subclasses: shows current character count */ unsigned long int xcharc; /** * Reset the base transcoder values. */ void reset_sub(void); /** * Get a previous character in the stream. * @param pos the number of steps to retreat * @return the character, or zero if the character could * not be obtained */ unsigned char get_previous(unsigned int pos) const; /** * Write a character into the history. * @param ch the character to add * @note the history is limited by the window size * @see @link #setup_window @endlink */ void put_previous(unsigned char ch); /** * Directly set the error code. * @param v the error code to post on the transcoder * @note Normally, this function need not be called by subclasses. * Return values from \link put_char \endlink and \link put_eof * \endlink are automatically posted on the transcoder. */ void set_error(zss_error v); /** * Setup the history window based on the current header. * @return whether the window setup was successful */ bool setup_window(void); /** * @return an error code, or zero on success */ virtual zss_error put_eof(void); /** * @return an error code, or zero on success */ virtual zss_error put_char(unsigned char); }; } #endif /*CMPS3120_LABS_ZSS_H*/ <file_sep> #include "pngenc.h" #include <stdlib.h> #include <stdio.h> #include <time.h> #include <string.h> namespace cmps3120 { unsigned char pngenc_std_header[8] = {137, 80, 78, 71, 13, 10, 26, 10 }; unsigned char pngenc_chunk_iend[4] = { 0x49, 0x45, 0x4E, 0x44 }; unsigned char pngenc_chunk_ihdr[4] = { 0x49, 0x48, 0x44, 0x52 }; unsigned char pngenc_chunk_plte[4] = { 0x50, 0x4c, 0x54, 0x45 }; unsigned char pngenc_chunk_trns[4] = { 0x74, 0x52, 0x4e, 0x53 }; unsigned char pngenc_chunk_idat[4] = { 0x49, 0x44, 0x41, 0x54 }; png_enc_random::png_enc_random(unsigned int v) : m_v(v) { } png_enc_random::png_enc_random(const png_enc_random& v) : m_v(v.m_v) { } png_enc_random::~png_enc_random(void) { } png_enc_random::operator unsigned int(void) const { unsigned int out = m_v; m_v >>= 1; m_v *= 0xce3f5a29; m_v += 0x39402958; //m_v ^= 0xa92536ab; //m_v ^= out; out = m_v%RAND_MAX; return out; } png_enc_random& png_enc_random::operator=(unsigned int v) { m_v = v; return *this; } png_enc_random& png_enc_random::operator=(const png_enc_random& v) { m_v = v.m_v; return *this; } png_encoder::png_encoder(void) : xenc(), m_max(100000), m_d_chunk_size(1024) { reset(); } png_encoder::~png_encoder(void) { } unsigned int png_encoder::get_max_dimension(void) const { return m_max; } void png_encoder::set_max_dimension(unsigned int x) { m_max = x; } png_error png_encoder::write_file(const char* name) { FILE *nfile = fopen(name,"wb"); if (nfile != NULL) { unsigned int readch; unsigned char buf[256]; reset(); while (!get_error()) { get(buf,256,&readch); if (readch > 0) fwrite(buf,sizeof(unsigned char),readch,nfile); } fclose(nfile); return get_error(); } else { return PNG_FILE_WRITE_FAILED; } } void png_encoder::reset_sub(void) { m_rand = time(NULL); memset(xbuf,0,sizeof(xbuf)); } png_error png_encoder::get_byte(unsigned char& y) { int out = 0, i; switch (readmode) { case 0: /* header */ { if (submode < 8) { y = pngenc_std_header[submode]; submode++; } if (submode >= 8) { readmode = 1; submode = 0; } }break; case 1: /* IHDR length */ { if (submode < 4) { y = (submode!=3 ? 0 : 13); submode++; } if (submode >= 4) { readmode = 2; submode = 0; reset_checksum(); } }break; case 2: /* IHDR tag */ { if (submode < 4) { y = pngenc_chunk_ihdr[submode]; put_previous(pngenc_chunk_ihdr[submode]); submode++; } if (submode >= 4) { readmode = 3; submode = 0; } }break; case 3: /* IHDR data */ { if (submode == 0) { png_header xh; if (get_receptor() != NULL) { xh = get_receptor()->get_header(); if (!xh.is_supported()) out = PNG_UNSUPPORTED_HEADER; } else { xh.width = 1; xh.height = 1; } memset(xbuf,0,sizeof(xbuf)); for (i = 0; i < 4; i++) { xbuf[i] = (xh.width>>((3-i)<<3))&255; } for (i = 0; i < 4; i++) { xbuf[4+i] = (xh.height>>((3-i)<<3))&255; } xbuf[8] = xh.bit_depth; xbuf[9] = xh.color_type; xbuf[10]= xh.compression_type; xbuf[11]= xh.filter_type; xbuf[12]= xh.interlace_type; get_header() = (xh); } if (submode < 13) { y = xbuf[submode]; put_previous(xbuf[submode]); submode++; } if (submode >= 13) { readmode = 4; submode = 0; } }break; case 4: case 8: case 12: case 16: case 24: { if (submode == 0) { xlong = get_checksum(); reset_checksum(); } if (submode < 4) { y = (xlong>>((3-submode)<<3))&255; submode++; } if (submode >= 4) { if (readmode == 4) { if (get_palette().size() > 0) readmode = 5; else if (get_palette().get_transparent_pixel().a && (!(get_header().color_type & PNG_HAS_PALETTE))) readmode = 13; else if (get_header().color_type & PNG_HAS_PALETTE) { readmode = 5; out = PNG_MISSING_PALETTE; } else readmode = 17; submode = 0; } else if (readmode == 8) { if (get_header().color_type & PNG_HAS_PALETTE) readmode = 9; else if (get_palette().get_transparent_pixel().a) readmode = 13; else readmode = 17; submode = 0; } else if ((readmode == 12) || (readmode == 16)) { readmode = 17; submode = 0; } else if (readmode == 24) { readmode = 25; submode = 0; } } }break; case 5: /* block length */ case 9: case 13: case 21: { if (submode == 0) { if (readmode == 5) { xlong = get_palette().size()*3; if (xlong > 768) xlong = 768; } else if (readmode == 9) { xlong = get_palette().size(); if (xlong > 256) xlong = 256; } else if (readmode == 13) { xlong = ((get_header().color_type & 2) ? 6 : 2); } else xlong = 0; } if (submode < 4) { y = (xlong>>((3-submode)<<3))&255; submode++; } if (submode >= 4) { if (readmode == 5) readmode = 6; else if (readmode == 9) readmode = 10; else if (readmode == 13) readmode = 14; else if (readmode == 21) readmode = 22; submode = 0; } }break; case 6: /* PLTE tag */ { if (submode < 4) { y = pngenc_chunk_plte[submode]; put_previous(pngenc_chunk_plte[submode]); submode++; } if (submode >= 4) { readmode = 7; submode = 0; } }break; case 7: /* palette data */ { if ((unsigned int)submode < xlong) { unsigned int sm = submode/3; if (sm < get_palette().size()) { switch (submode%3) { case 0: y = get_palette()[sm].r; put_previous(get_palette()[sm].r); break; case 1: y = get_palette()[sm].g; put_previous(get_palette()[sm].g); break; case 2: y = get_palette()[sm].b; put_previous(get_palette()[sm].b); break; } submode++; } } if ((unsigned int)submode >= xlong) { readmode = 8; submode = 0; } }break; case 10: /* tRNS tag */ case 14: { if (submode < 4) { y = pngenc_chunk_trns[submode]; put_previous(pngenc_chunk_trns[submode]); submode++; } if (submode >= 4) { if (readmode == 10) readmode = 11; else if (readmode == 14) readmode = 15; submode = 0; } }break; case 11: { if ((unsigned int)submode < xlong) { if ((unsigned int)submode < get_palette().size()) { y = get_palette()[submode].a; put_previous(get_palette()[submode].a); submode++; } } if ((unsigned int)submode >= xlong) { readmode = 12; submode = 0; } }break; case 15: { if (submode == 0) { m_tmptrns = get_palette().get_transparent_pixel().down_cast (get_header().bit_depth); } if ((unsigned int)submode < xlong) { switch (submode) { case 0: y = (m_tmptrns.r>>8)&255;break; case 1: y = (m_tmptrns.r)&255;break; case 2: y = (m_tmptrns.g>>8)&255;break; case 3: y = (m_tmptrns.g)&255;break; case 4: y = (m_tmptrns.b>>8)&255;break; case 5: y = (m_tmptrns.b)&255;break; } } if ((unsigned int)submode >= xlong) { readmode = 16; submode = 0; } }break; case 17: case 18: case 19: { int err = 0; if (submode == 0) { if (readmode == 17) { xenc.reset(); xenc.set_header(zss_header()); readmode++; sidemode = 0; if (get_header().interlace_type == 1) { get_interlace_data() = png_adam7_data(1); xdiv_width = get_interlace_data().compute_width (get_header().width); xdiv_height = get_interlace_data().compute_height (get_header().height); xdiv_xpos = 0; xdiv_ypos = 0; } else { get_interlace_data() = png_adam7_data(0); xdiv_width = get_header().width; xdiv_height= get_header().height; xdiv_xpos = 0; xdiv_ypos = 0; } filter_typ =0; filter_backlog.resize(0); try { filter_backlog.resize(png_color_recast(xdiv_width, get_header().bit_depth, get_header().color_type) ); filter_dist = png_color_recast(1, get_header().bit_depth, get_header().color_type); /*fprintf(stderr,"filtr %i <- %i,%i\n",filter_dist, get_header().color_type, get_header().bit_depth );*/ } catch (...) { out = PNG_UNSUPPORTED_HEADER; } } xenc.clear_pending(); submode++; } if (submode == 1) { while ((xenc.get_pending_count() < m_d_chunk_size) && (readmode < 19) && (!err)) { xbuf[0] = generate_sample(); err = xenc.put(&xbuf[0],1,NULL); } m_pendpos = 0; if ((readmode >= 19) && (!err)) err = xenc.finish(); if (err && (err != 1)) out = PNG_ZSS_ERROR; submode++; xlong = xenc.get_pending_count(); } if (submode == 2) { /* length */ if (m_pendpos < 4) { y = (xlong>>((3-m_pendpos)<<3))&255; m_pendpos++; } else { submode++; m_pendpos = 0; } } if (submode == 3) { /* tag */ if (m_pendpos < 4) { y = pngenc_chunk_idat[m_pendpos]; put_previous(pngenc_chunk_idat[m_pendpos]); m_pendpos++; } else { submode++; m_pendpos = 0; } } if (submode == 4) { if (m_pendpos < xenc.get_pending_count()) { y = xenc.get_pending()[m_pendpos]; put_previous(xenc.get_pending()[m_pendpos]); m_pendpos++; } else { submode = 5; m_pendpos = 0; } } if (submode == 5) { if (m_pendpos == 0) { xlong = get_checksum(); reset_checksum(); } if (m_pendpos < 4) { y = (xlong>>((3-m_pendpos)<<3))&255; m_pendpos++; } if (m_pendpos >= 4) { submode = 0; if (readmode == 19) { readmode =21; } } } }break; case 22: /* IEND tag */ { if (submode < 4) { y = pngenc_chunk_iend[submode]; put_previous(pngenc_chunk_iend[submode]); submode++; } if (submode >= 4) { readmode = (xlong ? 23 : 24); submode = 0; } }break; case 23: { if (xlong) { put_previous(y); xlong--; } if (!xlong) readmode = 24; }break; case 25: /* end of stream */ { out = PNG_DONE; }break; default: { out = PNG_UNDEFINED; }break; } return (png_error)out; } int png_encoder::generate_sample(void) { int ch = 0; unsigned char xc; int xreadmode = get_interlace_data().get_level()+10; unsigned int i, lv; if (xreadmode == 10) xreadmode = 9; switch (sidemode) { case 0: /* filter */ { filter_typ = m_rand%5u; sidemode++; xdiv_index=0; ch = filter_typ; /*fprintf(stderr,"<%u %lu>\n",filter_typ, xenc.get_char_count() );*/ }break; case 1: /* data */ { if (get_receptor() != NULL) { png_receptor *rcpt = get_receptor(); png_adam7_data &idta = get_interlace_data(); switch (get_header().bit_depth) { case 1: { xc = 0; if (get_header().color_type & PNG_HAS_PALETTE) { for (i = 0; (i < 8)&&(xdiv_xpos<xdiv_width); i++, xdiv_xpos++) { lv = get_palette().closest_match (rcpt->get_pixel (idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), idta.get_level()), 2); xc |= ((lv&1)<<(7-i)); } } else { for (i = 0; (i < 8)&&(xdiv_xpos<xdiv_width); i++, xdiv_xpos++) { lv = (rcpt->get_pixel (idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), idta.get_level()).gray()>>15); xc |= ((lv&1)<<(7-i)); } } }break; case 2: { xc = 0; if (get_header().color_type & PNG_HAS_PALETTE) { for (i = 0; (i < 4)&&(xdiv_xpos<xdiv_width); i++, xdiv_xpos++) { lv = get_palette().closest_match (rcpt->get_pixel (idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), idta.get_level()), 4); xc |= ((lv&3)<<((3-i)<<1)); } } else { for (i = 0; (i < 4)&&(xdiv_xpos<xdiv_width); i++, xdiv_xpos++) { lv = (rcpt->get_pixel (idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), idta.get_level()).gray()>>14); xc |= ((lv&3)<<((3-i)<<1)); } } }break; case 4: { xc = 0; if (get_header().color_type & PNG_HAS_PALETTE) { for (i = 0; (i < 2)&&(xdiv_xpos<xdiv_width); i++, xdiv_xpos++) { lv = get_palette().closest_match (rcpt->get_pixel (idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), idta.get_level()), 16); xc |= ((lv&15)<<((1-i)<<2)); } } else { for (i = 0; (i < 2)&&(xdiv_xpos<xdiv_width); i++, xdiv_xpos++) { lv = (rcpt->get_pixel (idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), idta.get_level()).gray()>>12); xc |= ((lv&15)<<((1-i)<<2)); } } }break; case 8: { if (get_header().color_type == 0) { xc = (rcpt->get_pixel (idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), idta.get_level()).gray()>>8); xdiv_xpos++; } else if (get_header().color_type == 2) { switch (xdiv_index%3) { case 0: m_tmptrns = rcpt->get_pixel (idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), idta.get_level()); xc = m_tmptrns.r>>8; break; case 1: xc = m_tmptrns.g>>8; break; case 2: xc = m_tmptrns.b>>8; xdiv_xpos++; break; } } else if (get_header().color_type == 3) { lv = get_palette().closest_match (rcpt->get_pixel (idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), idta.get_level()), 256); xc = lv; xdiv_xpos++; } else if (get_header().color_type == 4) { switch (xdiv_index%2) { case 0: m_tmptrns = rcpt->get_pixel (idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), idta.get_level()); xc = m_tmptrns.gray()>>8; break; case 1: xc = m_tmptrns.a>>8; xdiv_xpos++; break; } } else if (get_header().color_type == 6) { switch (xdiv_index%4) { case 0: m_tmptrns = rcpt->get_pixel (idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), idta.get_level()); xc = m_tmptrns.r>>8; break; case 1: xc = m_tmptrns.g>>8; break; case 2: xc = m_tmptrns.b>>8; break; case 3: xc = m_tmptrns.a>>8; xdiv_xpos++; break; } } }break; case 16: { if (get_header().color_type == 0) { switch (xdiv_index%2) { case 0: m_tmptrns.r = (rcpt->get_pixel (idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), idta.get_level()).gray()); xc = (m_tmptrns.r>>8)&255; break; case 1: xc = (m_tmptrns.r)&255; xdiv_xpos++; break; } } else if (get_header().color_type == 2) { switch (xdiv_index%6) { case 0: m_tmptrns = rcpt->get_pixel (idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), idta.get_level()); xc = (m_tmptrns.r>>8)&255; break; case 1: xc = (m_tmptrns.r&255); break; case 2: xc = (m_tmptrns.g>>8)&255; break; case 3: xc = (m_tmptrns.g&255); break; case 4: xc = (m_tmptrns.b>>8)&255; break; case 5: xc = (m_tmptrns.b&255); xdiv_xpos++; break; } } else if (get_header().color_type == 3) { switch (xdiv_index%2) { case 0: m_tmptrns.r = get_palette().closest_match (rcpt->get_pixel (idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), idta.get_level()), 65535); xc = (m_tmptrns.r>>8)&255; break; case 1: xc = (m_tmptrns.r)&255; xdiv_xpos++; } } else if (get_header().color_type == 4) { switch (xdiv_index%4) { case 0: m_tmptrns = rcpt->get_pixel (idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), idta.get_level()); m_tmptrns.r = m_tmptrns.gray(); xc = (m_tmptrns.r>>8)&255; break; case 1: xc = (m_tmptrns.r)&255; break; case 2: xc = (m_tmptrns.a>>8)&255; break; case 3: xc = (m_tmptrns.a)&255; xdiv_xpos++; break; } } else if (get_header().color_type == 6) { switch (xdiv_index%8) { case 0: m_tmptrns = rcpt->get_pixel (idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), idta.get_level()); xc = (m_tmptrns.r>>8)&255; break; case 1: xc = m_tmptrns.r&255; break; case 2: xc = (m_tmptrns.g>>8)&255; break; case 3: xc = (m_tmptrns.g&255); break; case 4: xc = (m_tmptrns.b>>8)&255; break; case 5: xc = (m_tmptrns.b&255); break; case 6: xc = (m_tmptrns.a>>8)&255; break; case 7: xc = (m_tmptrns.a&255); xdiv_xpos++; break; } } }break; default: xc = m_rand; break; } } else xc = m_rand; xc&=255; // ch = xc; switch (filter_typ) { case 0: /* no effect */ break; case 1: { if ((xdiv_index >= filter_dist) && (xdiv_index < filter_backlog.size())) { ch = (xc-filter_log[(xdiv_index-filter_dist)&7])&255; } //ch=0; }break; case 2: { if (xdiv_index < filter_backlog.size()) { ch = (xc-filter_backlog[xdiv_index])&255; } //ch=0; }break; case 3: { unsigned int avg = 0; if ((xdiv_index >= filter_dist) && (xdiv_index < filter_backlog.size())) { avg += filter_log[(xdiv_index-filter_dist)&7]; } if (xdiv_index < filter_backlog.size()) { avg += filter_backlog[xdiv_index]; } ch = (xc-(avg>>1))&255; //ch=0; }break; case 4: { unsigned int a = 0, b = 0, c = 0, xpt; if ((xdiv_index >= filter_dist) && (xdiv_index < filter_backlog.size())) { a = filter_log[(xdiv_index-filter_dist)&7]; c = filter_backlog[xdiv_index-filter_dist]; } if (xdiv_index < filter_backlog.size()) { b = filter_backlog[xdiv_index]; } xpt = png_paeth_predict(a,b,c); ch = (xc-xpt)&255; //ch = 0; }break; default: break; } if (xdiv_index < filter_backlog.size()) { if (xdiv_index >= 8) filter_backlog[xdiv_index-8] = filter_log[xdiv_index&7]; filter_log[xdiv_index&7] = xc; xdiv_index++; } if (xdiv_index >= filter_backlog.size()) { sidemode = 0; xdiv_ypos++; /*fprintf(stderr,"++ %lu\n",xdiv_ypos);*/ for (xdiv_xpos = ((xdiv_index >= 8) ? (xdiv_index-8) : 0); xdiv_xpos<xdiv_index; xdiv_xpos++) { filter_backlog[xdiv_xpos] = filter_log[xdiv_xpos&7]; } xdiv_index = 0; xdiv_xpos=0; memset(&filter_log[0],0,8*sizeof(unsigned char)); } if (xdiv_ypos >= xdiv_height) { /* on to next interlace level */ if (((xreadmode >= 11) && (xreadmode <= 17)) || (xreadmode == 9)) { /* recompute data */ if ((xreadmode == 17) || (xreadmode == 9)) { xreadmode++; /*fprintf(stderr,"x19: y < h: %lu < %lu\n", xdiv_ypos,xdiv_height);*/ readmode = 19; } else if ((xreadmode >= 11) && (xreadmode <= 16)) { do { xreadmode++; /*fprintf(stderr,"xreadmode++ %i\n",xreadmode);*/ get_interlace_data() = png_adam7_data(xreadmode-10); xdiv_width = get_interlace_data().compute_width (get_header().width); xdiv_height = get_interlace_data().compute_height (get_header().height); } while ( ((xreadmode >= 11) && (xreadmode <= 17)) && ((xdiv_width == 0)|| (xdiv_height==0))); xdiv_xpos = 0; xdiv_ypos = 0; xdiv_index= 0; filter_typ =0; filter_backlog.resize(0); filter_backlog.resize(png_color_recast(xdiv_width, get_header().bit_depth, get_header().color_type,true) ); filter_dist = png_color_recast(1, get_header().bit_depth, get_header().color_type,true); if (xreadmode > 17) readmode = 19; } } } }break; default: readmode = 19; break; } /* TODO fix */ return ch; } }; <file_sep>///////////////////////////////////////////////////////////////////////////// // // --- Object.cpp --- // Created by <NAME> // ////////////////////////////////////////////////////////////////////////////// #include "common.h" #include <stdio.h> #include <stdlib.h> /* -------------------------------------------------------------------------- */ /* -------------------------------------------------------------------------- */ //Line: P = P1 + t (P2 – P1), t ∈ [0,1] //Ray: P = P1 + t V , t ∈ [0,∞) //Phase 1 (2.1) intersect functions: //transform ray in world space to object space, call raySphereIntersection() or raySquareIntersection() //2.2: Populate IntersectionValues result //3.1: raySphereIntersection() // Intersect a ray with a sphere //4.1: raySquareIntersection() // Intersect a ray with a square Object::IntersectionValues Sphere::intersect(vec4 p0_w, vec4 V_w){ //normalize V_w? //Let C be the local object definition space (OS) to world coordinate //space (WS) Transform the WS ray’s origin Rstart_ws and direction //Rdirection_ws to OS. //modeling transformation: {Model}ws = C {Model}os //Origin is just R′start_os = C-1Rstart_ws //R′direction_os = (C*)-1Rdirection_ws //So tws = tos / || R′direction_os || //The intersection point in WS is: Pws = Rstart_ws + tws * Rdirection_ws //Pick any vector Vos in the tangent plane: Vws = C Vos //V_w = normalize(V_w); //V_w.w = 0; IntersectionValues result; vec4 p0_o = INVC*p0_w; vec4 V_o = INVCStar*V_w; double L = length(V_o); V_o = normalize(V_o); double t_o = raySphereIntersection(p0_o, V_o); //result.t_o = t_o; result.t_w = t_o/L; //result.P_o = P0_o; result.P_w = p0_w + result.t_w * V_w; result.P_o = p0_o +t_o*V_o; result.N_o = result.P_o; result.N_o.w = 0; result.N_o = normalize(result.N_o); //also add this to other obj****:: NORMALIZE result.N_w = TRANINVC * result.N_o; result.N_w.w = 0; result.N_w = normalize(result.N_w); return result; } /* -------------------------------------------------------------------------- */ /* ------ Ray = p0 + t*V sphere at origin O and radius r : Find t ------- */ double Sphere::raySphereIntersection(vec4 p0, vec4 V, vec4 O, double r){ double t = std::numeric_limits< double >::infinity(); //TODO: Ray-sphere intersection; //in slides (get 2 roots of equation) //define a b (from slides), find roots //default: doublt t = infinity //sphere|P–O|^2 -r^2 =0 //Substitute ray form into sphere equation: |P0 + tV – O|^2 – r^2 = 0. //Solve quadratic equation at2 + bt + c = 0 where //a=1; b=2V·(P0–O); c=|P0–O|^2 –r^2 //(expand|P0+tV–O|2 =|tV+P0–O|2 =|tV+(P0 –O)|2 and recallthat V · V = 1) //t=(-b±sqrt(b2 –4ac))/2a //0, 1, or 2 solutions!a double a = 1; double b = dot(2*V,(p0-O)); double c = length(p0-O)*length(p0-O) - pow(r,2);; double discriminant =(pow(b,2) - 4*a*c); if(discriminant<0){ return t; } double sqroot = sqrt(discriminant); double t_0 = (-b + sqroot)/(2*a); double t_1 = (-b - sqroot)/(2*a); //take the one that is smaller //if less than epsilon (non-negative/zero) if(t_0 > EPSILON || t_1 > EPSILON){ if(t_0 < EPSILON){ t_0 =std::numeric_limits< double >::infinity(); } if(t_1 < EPSILON){ t_1= std::numeric_limits< double >::infinity(); } t = fmin(t_0, t_1); return fmin(t_0, t_1); } return t; } /* -------------------------------------------------------------------------- */ /* -------------------------------------------------------------------------- */ Object::IntersectionValues Square::intersect(vec4 p0_w, vec4 V_w){ IntersectionValues result; //TODO: Ray-square setup vec4 p0_o = INVC*p0_w; vec4 V_o = INVCStar*V_w; double L = length(V_o); V_o = normalize(V_o); double t_o = raySquareIntersection(p0_o, V_o); result.t_w = t_o/L; //result.t_o = t_0; //assume p and v are same as obj and pass to above function //if( == infinity){std::numeric_limits< double >::infinity();} result.P_w = p0_w + result.t_w * V_w; result.P_o = p0_o +t_o*V_o; result.N_o = vec4(0.0,0.0,1.0,0.0); //result.N_o.w = 0; //result.N_o = normalize(result.N_o); result.N_w = TRANINVC * result.N_o; result.N_w.w = 0; result.N_w = normalize(result.N_w); return result; } /* -------------------------------------------------------------------------- */ /* -------------------------------------------------------------------------- */ double Square::raySquareIntersection(vec4 p0, vec4 V){ double t = std::numeric_limits< double >::infinity(); //TODO: Ray-square intersection; //Now you have to write code that intersects unit square on the XY­plane with Z=0. // t= N·(S–P1)/N·(P2–P1) //p1=p0 //p2-p1 = V //N = (0,0,1,1) (positive z) //S = any point in plane (good point to pick is origin (0,0,0,1)) vec4 N = vec4(0,0,1,0); //where z is positive (normal z is positive) vec4 S = vec4(0,0,0,1); //origin V = normalize(V); //dot product if : dot(N, V) ==0, return inf double t_1 = (dot(N, (S-p0)))/(dot(N, V)); //if t < 0 --> it is infinity (no intersect) if(dot(N,V) == 0){ return t; } if(t_1 <= EPSILON){ //t = t_1; return t; } //else find p, check if within bounds: (1,1), (1,-1), (-1,-1), (-1,1) //P = P1 + t (P2 – P1) //if p in range xy -1 to 1, hit square, return new t vec4 P = p0 + t_1*V; //if((std::abs(P.x) < 1+EPSILON) && (std::abs(P.y) < 1+EPSILON)){ if((std::abs(P.x) < 1+EPSILON) && (std::abs(P.y) < 1+EPSILON)){ return t_1; } return t; /* vec4 norm = normalize(p0); vec4 x_plane = norm.x; vec4 y_plane = norm.y; vec4 z_plane = norm.z; */ } <file_sep>/* * (c) Copyright 1993, 1994, Silicon Graphics, Inc. * ALL RIGHTS RESERVED * Permission to use, copy, modify, and distribute this software for * any purpose and without fee is hereby granted, provided that the above * copyright notice appear in all copies and that both the copyright notice * and this permission notice appear in supporting documentation, and that * the name of Silicon Graphics, Inc. not be used in advertising * or publicity pertaining to distribution of the software without specific, * written prior permission. * * THE MATERIAL EMBODIED ON THIS SOFTWARE IS PROVIDED TO YOU "AS-IS" * AND WITHOUT WARRANTY OF ANY KIND, EXPRESS, IMPLIED OR OTHERWISE, * INCLUDING WITHOUT LIMITATION, ANY WARRANTY OF MERCHANTABILITY OR * FITNESS FOR A PARTICULAR PURPOSE. IN NO EVENT SHALL SILICON * GRAPHICS, INC. BE LIABLE TO YOU OR ANYONE ELSE FOR ANY DIRECT, * SPECIAL, INCIDENTAL, INDIRECT OR CONSEQUENTIAL DAMAGES OF ANY * KIND, OR ANY DAMAGES WHATSOEVER, INCLUDING WITHOUT LIMITATION, * LOSS OF PROFIT, LOSS OF USE, SAVINGS OR REVENUE, OR THE CLAIMS OF * THIRD PARTIES, WHETHER OR NOT SILICON GRAPHICS, INC. HAS BEEN * ADVISED OF THE POSSIBILITY OF SUCH LOSS, HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, ARISING OUT OF OR IN CONNECTION WITH THE * POSSESSION, USE OR PERFORMANCE OF THIS SOFTWARE. * * US Government Users Restricted Rights * Use, duplication, or disclosure by the Government is subject to * restrictions set forth in FAR 52.227.19(c)(2) or subparagraph * (c)(1)(ii) of the Rights in Technical Data and Computer Software * clause at DFARS 252.227-7013 and/or in similar or successor * clauses in the FAR or the DOD or NASA FAR Supplement. * Unpublished-- rights reserved under the copyright laws of the * United States. Contractor/manufacturer is Silicon Graphics, * Inc., 2011 N. Shoreline Blvd., Mountain View, CA 94039-7311. * * OpenGL(TM) is a trademark of Silicon Graphics, Inc. */ /* * Trackball code: * * Implementation of a virtual trackball. * Implemented by <NAME>, lots of ideas from <NAME> and * the August '88 issue of Siggraph's "Computer Graphics," pp. 121-129. * * Vector manip code: * * Original code from: * <NAME>, <NAME>, <NAME>, and <NAME> * * Much mucking with by: * <NAME> */ #if defined(_WIN32) #pragma warning (disable:4244) /* disable bogus conversion warnings */ #endif #include "common.h" /* * This size should really be based on the distance from the center of * rotation to the point on the object underneath the mouse. That * point would then track the mouse as closely as possible. This is a * simple example, though, so that is left as an Exercise for the * Programmer. */ #define TRACKBALLSIZE (0.8f) /* * Local function prototypes (not defined in trackball.h) */ static float tb_project_to_sphere(float, float, float); static void normalize_quat(float [4]); void TrackBall::vzero(float *v) { v[0] = 0.0; v[1] = 0.0; v[2] = 0.0; } void TrackBall::vset(float *v, float x, float y, float z) { v[0] = x; v[1] = y; v[2] = z; } void TrackBall::vsub(const float *src1, const float *src2, float *dst) { dst[0] = src1[0] - src2[0]; dst[1] = src1[1] - src2[1]; dst[2] = src1[2] - src2[2]; } void TrackBall::vcopy(const float *v1, float *v2) { register int i; for (i = 0 ; i < 3 ; i++) v2[i] = v1[i]; } void TrackBall::vcross(const float *v1, const float *v2, float *cross) { float temp[3]; temp[0] = (v1[1] * v2[2]) - (v1[2] * v2[1]); temp[1] = (v1[2] * v2[0]) - (v1[0] * v2[2]); temp[2] = (v1[0] * v2[1]) - (v1[1] * v2[0]); vcopy(temp, cross); } float TrackBall::vlength(const float *v) { return sqrt(v[0] * v[0] + v[1] * v[1] + v[2] * v[2]); } void TrackBall::vscale(float *v, float div) { v[0] *= div; v[1] *= div; v[2] *= div; } void TrackBall::vnormal(float *v) { vscale(v,1.0/vlength(v)); } float TrackBall::vdot(const float *v1, const float *v2) { return v1[0]*v2[0] + v1[1]*v2[1] + v1[2]*v2[2]; } void TrackBall::vadd(const float *src1, const float *src2, float *dst) { dst[0] = src1[0] + src2[0]; dst[1] = src1[1] + src2[1]; dst[2] = src1[2] + src2[2]; } /* * Ok, simulate a track-ball. Project the points onto the virtual * trackball, then figure out the axis of rotation, which is the cross * product of P1 P2 and O P1 (O is the center of the ball, 0,0,0) * Note: This is a deformed trackball-- is a trackball in the center, * but is deformed into a hyperbolic sheet of rotation away from the * center. This particular function was chosen after trying out * several variations. * * It is assumed that the arguments to this routine are in the range * (-1.0 ... 1.0) */ void TrackBall::trackball(float q[4], float p1x, float p1y, float p2x, float p2y) { float a[3]; /* Axis of rotation */ float phi; /* how much to rotate about axis */ float p1[3], p2[3], d[3]; float t; if (p1x == p2x && p1y == p2y) { /* Zero rotation */ vzero(q); q[3] = 1.0; return; } /* * First, figure out z-coordinates for projection of P1 and P2 to * deformed sphere */ vset(p1,p1x,p1y,tb_project_to_sphere(TRACKBALLSIZE,p1x,p1y)); vset(p2,p2x,p2y,tb_project_to_sphere(TRACKBALLSIZE,p2x,p2y)); /* * Now, we want the cross product of P1 and P2 */ vcross(p2,p1,a); /* * Figure out how much to rotate around that axis. */ vsub(p1,p2,d); t = vlength(d) / (2.0*TRACKBALLSIZE); /* * Avoid problems with out-of-control values... */ if (t > 1.0) t = 1.0; if (t < -1.0) t = -1.0; phi = 2.0 * asin(t); axis_to_quat(a,phi,q); } /* * Given an axis and angle, compute quaternion. */ void TrackBall::axis_to_quat(float a[3], float phi, float q[4]) { vnormal(a); vcopy(a,q); vscale(q,sin(phi/2.0)); q[3] = cos(phi/2.0); } /* * Multiply two quaternions (can be done in place). */ void TrackBall::quat_mult(float q1[4], float q2[4], float dest[4]) { float tmp[4]; tmp[0] = q1[3]*q2[0] + q1[0]*q2[3] + q1[1]*q2[2] - q1[2]*q2[1]; tmp[1] = q1[3]*q2[1] - q1[0]*q2[2] + q1[1]*q2[3] + q1[2]*q2[0]; tmp[2] = q1[3]*q2[2] + q1[0]*q2[1] - q1[1]*q2[0] + q1[2]*q2[3]; tmp[3] = q1[3]*q2[3] - q1[0]*q2[0] - q1[1]*q2[1] - q1[2]*q2[2]; vcopy(tmp,dest); } /* * Project an x,y pair onto a sphere of radius r OR a hyperbolic sheet * if we are away from the center of the sphere. */ static float tb_project_to_sphere(float r, float x, float y) { float d, t, z; d = sqrt(x*x + y*y); if (d < r * 0.70710678118654752440) { /* Inside sphere */ z = sqrt(r*r - d*d); } else { /* On hyperbola */ t = r / 1.41421356237309504880; z = t*t / d; } return z; } /* * Given two rotations, e1 and e2, expressed as quaternion rotations, * figure out the equivalent single rotation and stuff it into dest. * * This routine also normalizes the result every RENORMCOUNT times it is * called, to keep error from creeping in. * * NOTE: This routine is written so that q1 or q2 may be the same * as dest (or each other). */ #define RENORMCOUNT 97 void TrackBall::add_quats(float q1[4], float q2[4], float dest[4]) { static int count=0; float t1[4], t2[4], t3[4]; float tf[4]; #if 0 printf("q1 = %f %f %f %f\n", q1[0], q1[1], q1[2], q1[3]); printf("q2 = %f %f %f %f\n", q2[0], q2[1], q2[2], q2[3]); #endif vcopy(q1,t1); vscale(t1,q2[3]); vcopy(q2,t2); vscale(t2,q1[3]); vcross(q2,q1,t3); vadd(t1,t2,tf); vadd(t3,tf,tf); tf[3] = q1[3] * q2[3] - vdot(q1,q2); #if 0 printf("tf = %f %f %f %f\n", tf[0], tf[1], tf[2], tf[3]); #endif dest[0] = tf[0]; dest[1] = tf[1]; dest[2] = tf[2]; dest[3] = tf[3]; if (++count > RENORMCOUNT) { count = 0; normalize_quat(dest); } } /* * Quaternions always obey: a^2 + b^2 + c^2 + d^2 = 1.0 * If they don't add up to 1.0, dividing by their magnitued will * renormalize them. * * Note: See the following for more information on quaternions: * * - <NAME>., Animating rotation with quaternion curves, Computer * Graphics 19, No 3 (Proc. SIGGRAPH'85), 245-254, 1985. * - <NAME>., Quaternion calculus as a basic tool in computer * graphics, The Visual Computer 5, 2-13, 1989. */ static void normalize_quat(float q[4]) { int i; float mag; mag = sqrt(q[0]*q[0] + q[1]*q[1] + q[2]*q[2] + q[3]*q[3]); for (i = 0; i < 4; i++) q[i] /= mag; } /* * Build a rotation matrix, given a quaternion rotation. * */ void TrackBall::build_rotmatrix(float m[4][4], float q[4]) { m[0][0] = 1.0 - 2.0 * (q[1] * q[1] + q[2] * q[2]); m[0][1] = 2.0 * (q[0] * q[1] - q[2] * q[3]); m[0][2] = 2.0 * (q[2] * q[0] + q[1] * q[3]); m[0][3] = 0.0; m[1][0] = 2.0 * (q[0] * q[1] + q[2] * q[3]); m[1][1]= 1.0 - 2.0 * (q[2] * q[2] + q[0] * q[0]); m[1][2] = 2.0 * (q[1] * q[2] - q[0] * q[3]); m[1][3] = 0.0; m[2][0] = 2.0 * (q[2] * q[0] - q[1] * q[3]); m[2][1] = 2.0 * (q[1] * q[2] + q[0] * q[3]); m[2][2] = 1.0 - 2.0 * (q[1] * q[1] + q[0] * q[0]); m[2][3] = 0.0; m[3][0] = 0.0; m[3][1] = 0.0; m[3][2] = 0.0; m[3][3] = 1.0; } void TrackBall::matxmat(float out[4][4],float m0[4][4],float m1[4][4]) { float r[4][4]; int i,j,k; for(i=0;i<4;i++) { for(j=0;j<4;j++) { r[i][j] = 0; for(k=0;k<4;k++) { r[i][j] += m0[i][k]*m1[k][j]; } } } matcopy(out,r); return; } void TrackBall::matident(float m[4][4]) { int i,j; for(i=0;i<4;i++) { for(j=0;j<4;j++) { m[i][j] = (i == j)?1:0; } } return; } void TrackBall::matmult(float *in,float *out,float mat[4][4]) { float tmp[3]; tmp[0] = (in[0]*mat[0][0] + in[1]*mat[0][1] + in[2]*mat[0][2]); tmp[1] = (in[0]*mat[1][0] + in[1]*mat[1][1] + in[2]*mat[1][2]); tmp[2] = (in[0]*mat[2][0] + in[1]*mat[2][1] + in[2]*mat[2][2]); vcopy(tmp,out); return; } void TrackBall::matinvert(float A[4][4]) { /* * a b c a b * d e f d e * g h i g h * a b c a b * d e f d e */ double a,b,c,d,e,f,g,h,i; double Da,Db,Dc,Dd,De,Df,Dg,Dh,Di; double detA; long int k,j; /* assign temp vars */ a = A[0][0]; b = A[0][1]; c = A[0][2]; d = A[1][0]; e = A[1][1]; f = A[1][2]; g = A[2][0]; h = A[2][1]; i = A[2][2]; /* calc determinat of 3x3 A */ detA = (a * e * i) + (d * h * c) + (b * f * g); detA = detA - (c * e * g) - (a * f * h) - (b * d * i); /* check for singular matrix */ if (detA == 0.0) { fprintf(stderr,"Warning, singular matrix detected.\n"); return; } /* calc sub (2x2) determinats if Dx row and column is eliminated */ /* and mult by (-1)^i+j */ Da = ((e * i) - (f * h)); Db = ((f * g) - (d * i)); Dc = ((d * h) - (g * e)); Dd = ((h * c) - (b * i)); De = ((i * a) - (c * g)); Df = ((g * b) - (a * h)); Dg = ((b * f) - (e * c)); Dh = ((c * d) - (a * f)); Di = ((a * e) - (b * d)); /* build the transpose matrix from the sub determinats */ A[0][0] = Da; A[1][0] = Db; A[2][0] = Dc; A[0][1] = Dd; A[1][1] = De; A[2][1] = Df; A[0][2] = Dg; A[1][2] = Dh; A[2][2] = Di; /* devide by detA */ for (k=0; k<3; k++) { for (j=0; j<3; j++) A[k][j] = A[k][j]/detA; } return; } void TrackBall::matcopy(float a[4][4],float b[4][4]) { memcpy(a,b,16*sizeof(float)); } void TrackBall::mattrans(float a[4][4]) { float r[4][4]; int i,j; for(i=0;i<4;i++) { for(j=0;j<4;j++) { r[i][j] = a[j][i]; } } matcopy(a,r); } void TrackBall::matprint(float a[4][4],char *s) { int i; if (s) printf("Matrix:%s\n",s); for(i=0;i<4;i++) { printf("%6.3f %6.3f %6.3f %6.3f\n",a[0][i],a[1][i],a[2][i],a[3][i]); } } void TrackBall::matinv4x4(float A[4][4]) { float r[4][4]; int i,j; float det = matadjoint(r,A); if (det == 0.0) return; for(i=0;i<4;i++) { for(j=0;j<4;j++) { r[i][j] /= det; } } matcopy(A,r); return; } static float det2x2( float a1, float b1, float a2, float b2 ) { return (a1*b2 - b1*a2); } static float det3x3( float a1, float b1, float c1, float a2, float b2, float c2, float a3, float b3, float c3 ) { return ( a1*det2x2(b2,c2, b3,c3) - b1*det2x2(a2,c2, a3,c3) + c1*det2x2(a2,b2, a3,b3) ); } float TrackBall::matadjoint(float r[4][4],float _data[4][4]) { float outDet; float a1=_data[0][0],b1=_data[0][1],c1=_data[0][2],d1=_data[0][3], a2=_data[1][0],b2=_data[1][1],c2=_data[1][2],d2=_data[1][3], a3=_data[2][0],b3=_data[2][1],c3=_data[2][2],d3=_data[2][3], a4=_data[3][0],b4=_data[3][1],c4=_data[3][2],d4=_data[3][3]; r[0][0] = det3x3(b2,c2,d2, b3,c3,d3, b4,c4,d4); r[1][0] = -det3x3(a2,c2,d2, a3,c3,d3, a4,c4,d4); r[2][0] = det3x3(a2,b2,d2, a3,b3,d3, a4,b4,d4); r[3][0] = -det3x3(a2,b2,c2, a3,b3,c3, a4,b4,c4); r[0][1] = -det3x3(b1,c1,d1, b3,c3,d3, b4,c4,d4); r[1][1] = det3x3(a1,c1,d1, a3,c3,d3, a4,c4,d4); r[2][1] = -det3x3(a1,b1,d1, a3,b3,d3, a4,b4,d4); r[3][1] = det3x3(a1,b1,c1, a3,b3,c3, a4,b4,c4); r[0][2] = det3x3(b1,c1,d1, b2,c2,d2, b4,c4,d4); r[1][2] = -det3x3(a1,c1,d1, a2,c2,d2, a4,c4,d4); r[2][2] = det3x3(a1,b1,d1, a2,b2,d2, a4,b4,d4); r[3][2] = -det3x3(a1,b1,c1, a2,b2,c2, a4,b4,c4); r[0][3] = -det3x3(b1,c1,d1, b2,c2,d2, b3,c3,d3); r[1][3] = det3x3(a1,c1,d1, a2,c2,d2, a3,c3,d3); r[2][3] = -det3x3(a1,b1,d1, a2,b2,d2, a3,b3,d3); r[3][3] = det3x3(a1,b1,c1, a2,b2,c2, a3,b3,c3); outDet = a1 * r[0][0] + b1 * r[1][0] + c1 * r[2][0] + d1 * r[3][0]; return(outDet); } <file_sep># Raytracing Project Raytracing Project for Computer Graphics Class <file_sep> #if !(defined CMPS3120_LABS_PNGDEC_H) #define CMPS3120_LABS_PNGDEC_H #include "./png.h" #include "./zdec.h" #include <stdio.h> namespace cmps3120 { /** * Sample code: * \code // Your image/texture class: class image : public cmps3120::png_receptor { ... public: void set_header (cmps3120::png_header h); void set_pixel (unsigned int x, unsigned int y, unsigned int level, cmps3120::png_pixel v); } // ... //The part of the code where you load the images image the_sprite; png_decoder the_decoder; png_error result; the_decoder.set_receptor( &the_sprite ); result = the_decoder.read_file("sprite.png"); if (result == PNG_DONE) std::cout << "The image loaded successfully."; else std::cout << "There was an error loading the image."; \endcode */ class png_decoder : public png_base { private: zdec xdec; unsigned char xbuf[16]; unsigned int m_max; public: png_decoder(void); ~png_decoder(void); /** * @return the maximum dimension allowed */ unsigned int get_max_dimension(void) const; /** * Set the maximum dimension allowed * @param x the new maximum */ void set_max_dimension(unsigned int x); /** * Read a file. * @param name the name of the PNG file to read * @return the last error code. If the read was successful, * PNG_DONE is returned. * @note don't forget to set the png_receptor first */ png_error read_file(const char* name); protected: void reset_sub(void); png_error put_char(unsigned char); void process_sample(unsigned char); }; }; #endif /* CMPS3120_LABS_PNGDEC_H */ <file_sep> #include "./pngdec.h" #include <string.h> #include <stdio.h> #include <stdlib.h> namespace cmps3120 { unsigned char pngdec_std_header[8] = {137, 80, 78, 71, 13, 10, 26, 10 }; unsigned char pngdec_chunk_iend[4] = { 0x49, 0x45, 0x4E, 0x44 }; unsigned char pngdec_chunk_ihdr[4] = { 0x49, 0x48, 0x44, 0x52 }; unsigned char pngdec_chunk_plte[4] = { 0x50, 0x4c, 0x54, 0x45 }; unsigned char pngdec_chunk_trns[4] = { 0x74, 0x52, 0x4e, 0x53 }; unsigned char pngdec_chunk_idat[4] = { 0x49, 0x44, 0x41, 0x54 }; png_decoder::png_decoder(void) : xdec(), m_max(60000) { reset(); } png_decoder::~png_decoder(void) { } png_error png_decoder::read_file(const char* name) { FILE *nfile = fopen(name,"rb"); if (nfile != NULL) { int readch; unsigned char buf[256]; reset(); while ((!get_error()) && ((readch = fread(buf,sizeof(char),256,nfile)) > 0) ) { put(buf,readch,NULL); } fclose(nfile); return get_error(); } else { return PNG_FILE_READ_FAILED; } } unsigned int png_decoder::get_max_dimension(void) const{ return m_max; } void png_decoder::set_max_dimension(unsigned int x){ m_max = x; } void png_decoder::reset_sub(void) { get_palette().resize(0); xdec.reset(); xpect_length = 0; xpect_chunks = 0; } png_error png_decoder::put_char(unsigned char ch) { int outres = 0; switch (readmode) { case 0: /* header */ { if (submode < 8) { xbuf[submode] = ch; submode++; } if (submode >= 8) { if (memcmp(xbuf,pngdec_std_header,8) ) { outres = PNG_BAD_SIGNATURE; } else { readmode = 1; submode = 0; xpect_length = 0; } } }break; case 1: /* chunk length */ { if (submode < 4) { xpect_length = (xpect_length<<8)+ch; submode++; } if (submode >= 4) { readmode = 2; submode = 0; } }break; case 2: /* chunk name */ { if (submode < 4) { xbuf[submode] = ch; put_previous(ch); submode++; } if (submode >= 4) { xbuf[4] =0; /*fprintf(stderr," (%s chunk[[:%5lx] +x%5lx %lo)\n",xbuf, xcharc,xpect_length,xpect_chunks);*/ if (memcmp(pngdec_chunk_iend,xbuf,4) == 0) { readmode = 5; xpect_chunks |= PNG_CC_IEND; } else if (memcmp(pngdec_chunk_ihdr,xbuf,4) == 0) { if (xpect_chunks & PNG_CC_IHDR) { outres = PNG_BAD_CHUNK; } else if (!xpect_length) { outres = PNG_MISSING_DATA; } else { readmode = 6; submode = 0; xpect_chunks |= PNG_CC_IHDR; } } else if (memcmp(pngdec_chunk_plte,xbuf,4) == 0) { if ((xpect_chunks & PNG_CC_PLTE) || (!(xpect_chunks&PNG_CC_IHDR)) ) { outres = PNG_BAD_CHUNK; } else { readmode = 7; submode = 0; xpect_chunks |= PNG_CC_PLTE; } } else if (memcmp(pngdec_chunk_trns,xbuf,4) == 0) { if ((get_header().color_type & PNG_HAS_PALETTE) && (!(xpect_chunks&PNG_CC_PLTE)) ) { outres = PNG_BAD_CHUNK; } else if ((!(get_header().color_type & PNG_HAS_PALETTE)) && (!(xpect_chunks&PNG_CC_IHDR)) ) { outres = PNG_BAD_CHUNK; } else { readmode = 8; submode = 0; xpect_chunks |= PNG_CC_tRNS; } } else if (memcmp(pngdec_chunk_idat,xbuf,4) == 0) { if ((get_header().color_type & PNG_HAS_PALETTE) && (!(xpect_chunks&PNG_CC_PLTE)) ) { outres = PNG_BAD_CHUNK; } else if ((!(get_header().color_type & PNG_HAS_PALETTE)) && (!(xpect_chunks&PNG_CC_IHDR)) ) { outres = PNG_BAD_CHUNK; } else { submode = 0; if (!(xpect_chunks & PNG_CC_IDAT)) { if (get_header().interlace_type == 1) { get_interlace_data() = png_adam7_data(1); xdiv_width = get_interlace_data().compute_width (get_header().width); xdiv_height = get_interlace_data().compute_height (get_header().height); xdiv_xpos = 0; xdiv_ypos = 0; readmode = 11; } else { get_interlace_data() = png_adam7_data(0); xdiv_width = get_header().width; xdiv_height= get_header().height; xdiv_xpos = 0; xdiv_ypos = 0; readmode = 9; } filter_typ =0; filter_backlog.resize(0); try { filter_backlog.resize(png_color_recast(xdiv_width, get_header().bit_depth, get_header().color_type) ); filter_dist = png_color_recast(1, get_header().bit_depth, get_header().color_type); /*fprintf(stderr,"filtr %i <- %i,%i\n",filter_dist, get_header().color_type, get_header().bit_depth );*/ } catch (...) { outres = PNG_UNSUPPORTED_HEADER; } xpect_chunks |= PNG_CC_IDAT; } else { if (get_header().interlace_type == 1) { readmode = 10+get_interlace_data().get_level(); } else { readmode = 9; } } /*fprintf(stderr,"readmode <= %u\n",readmode);*/ } } else if (!(xbuf[0]&0x20)) { outres = PNG_UNSUPPORTED_CHUNK; } else { readmode = 5;xbuf[4] =0; /*fprintf(stderr,"%s chunk[[:%5lx] %lu %lo\n",xbuf, xcharc,xpect_length,xpect_chunks);*/ } if (xpect_length == 0) { readmode = 3; submode = 0; xpect_nothing = 0; } } }break; case 3: { if (submode < 4) { xpect_nothing = (xpect_nothing<<8)+ch; submode++; } if (submode >= 4) { /*fprintf(stderr," {%08x v %08x}\n", xpect_nothing, get_checksum());*/ if (xpect_nothing == get_checksum()) { if (xpect_chunks & PNG_CC_IEND) { readmode = 4; outres = PNG_DONE; } else { /* read next chunk */ readmode = 1; submode =0; xpect_length=0; } reset_checksum(); } else outres = PNG_BAD_CHECKSUM; } }break; case 4: /* end of PNG file */ { outres = PNG_DONE; }break; case 5: /* end-of-image/unknown chunk */ case 10: case 18: { if (xpect_length > 0) { put_previous(ch); xpect_length--; } if (!xpect_length) { readmode = 3; submode =0; xpect_nothing = 0; } }break; case 6: /* header */ { if (xpect_length > 0) { put_previous(ch); xpect_length--; if (submode < 13) xbuf[submode++] = ch; } if (!xpect_length) { if (submode < 13) { outres = PNG_MISSING_DATA; } else { int i; png_header xhdr; xhdr.width = 0; xhdr.height = 0; for (i = 0; i < 4; i++) xhdr.width = (xhdr.width<<8) + xbuf[i]; for (i = 4; i < 8; i++) xhdr.height= (xhdr.height<<8) + xbuf[i]; xhdr.bit_depth = xbuf[8]; xhdr.color_type = xbuf[9]; xhdr.compression_type = xbuf[10]; xhdr.filter_type = xbuf[11]; xhdr.interlace_type = xbuf[12]; get_header() = xhdr; if ((xhdr.is_supported() ) && (xhdr.width <= m_max) && (xhdr.height <= m_max)) { if (get_receptor() != NULL) { get_receptor()->set_header(xhdr); } } else outres = PNG_UNSUPPORTED_HEADER; } readmode = 3; submode =0; xpect_nothing = 0; } }break; case 7: /* palette */ { if (submode == 0) { xlong = xpect_length/3; if (xlong > 256) xlong = 256; if (get_palette().resize(xlong) ) { /* do nothing */ } else { outres = PNG_NO_MEMORY; } } if ((unsigned int)(submode/3) < get_palette().size()) { try{ switch (submode%3) { case 0: get_palette()[submode/3].r = ch;break; case 1: get_palette()[submode/3].g = ch;break; case 2: get_palette()[submode/3].b = ch;break; } }catch(...) { }; submode++; } if (xpect_length > 0) { xpect_length--; put_previous(ch); } if (!xpect_length) { readmode = 3; submode =0; xpect_nothing = 0; } }break; case 8: /* simple transparency */ { if (get_header().color_type & PNG_HAS_PALETTE) { if ((unsigned int)submode < get_palette().size()) { try{ get_palette()[submode].a = ch; }catch(...) { }; submode++; } } else { if (submode < 6) { png_pixel& xp = get_palette().get_transparent_pixel(); switch (submode) { case 0: xp.r = ch;break; case 1: xp.r = (xp.r<<8)+ch; if (!(get_header().color_type&PNG_HAS_COLOR)) { xp.g=xp.r; xp.b=xp.r; xp = xp.up_cast(get_header().bit_depth); xp.a = 65535; }break; case 2: xp.g = ch;break; case 3: xp.g = (xp.r<<8)+ch;break; case 4: xp.b = ch;break; case 5: xp.b = (xp.r<<8)+ch; xp = xp.up_cast(get_header().bit_depth); xp.a = 65535;break; } submode++; } } if (xpect_length > 0) { xpect_length--; put_previous(ch); } if (!xpect_length) { readmode = 3; submode =0; xpect_nothing = 0; } }break; case 9: /* pixel data : no interlace */ { if (xpect_length > 0) { xbuf[0] = ch; xdec.put(&xbuf[0],1,NULL); if (xdec.get_pending_count()) { const zss_buffer &xlongbuf = xdec.get_pending(); unsigned int xslong; for (xslong = 0; xslong < xlongbuf.size(); xslong++) { process_sample(xlongbuf[xslong]); } xdec.clear_pending(); } if ((xdec.get_error() != 0) && (xdec.get_error() != 1)) { outres = PNG_ZSS_ERROR; } xpect_length--; put_previous(ch); } if (!xpect_length) { readmode = 3; submode =0; xpect_nothing = 0; } // }break; case 11: case 12: case 13: case 14: case 15: case 16: case 17: /* pixel data : interlace */ { if (xpect_length > 0) { xbuf[0] = ch; xdec.put(&xbuf[0],1,NULL); if (xdec.get_pending_count()) { const zss_buffer &xlongbuf = xdec.get_pending(); unsigned int xslong; for (xslong = 0; xslong < xlongbuf.size(); xslong++) { process_sample(xlongbuf[xslong]); } xdec.clear_pending(); } if ((xdec.get_error() != 0) && (xdec.get_error() != 1)) { outres = PNG_ZSS_ERROR; } xpect_length--; put_previous(ch); } if (!xpect_length) { readmode = 3; submode =0; xpect_nothing = 0; } // }break; default: outres = PNG_UNDEFINED; break; } return (png_error)outres; } void png_decoder::process_sample(unsigned char xc) { int i; png_pixel nv; switch (sidemode) { case 0: /* filter */ { filter_typ = xc; sidemode++; xdiv_index=0; }break; case 1: { switch (filter_typ) { case 0: /* no effect */ break; case 1: { if ((xdiv_index >= filter_dist) && (xdiv_index < filter_backlog.size())) { xc = (xc+filter_log[(xdiv_index-filter_dist)&7])&255; } //xc=0; }break; case 2: { if (xdiv_index < filter_backlog.size()) { xc = (xc+filter_backlog[xdiv_index])&255; } //xc=0; }break; case 3: { unsigned int avg = 0; if ((xdiv_index >= filter_dist) && (xdiv_index < filter_backlog.size())) { avg += filter_log[(xdiv_index-filter_dist)&7]; } if (xdiv_index < filter_backlog.size()) { avg += filter_backlog[xdiv_index]; } xc = (xc+(avg>>1))&255; //xc=0; }break; case 4: { unsigned int a = 0, b = 0, c = 0, xpt; if ((xdiv_index >= filter_dist) && (xdiv_index < filter_backlog.size())) { a = filter_log[(xdiv_index-filter_dist)&7]; c = filter_backlog[xdiv_index-filter_dist]; } if (xdiv_index < filter_backlog.size()) { b = filter_backlog[xdiv_index]; } xpt = png_paeth_predict(a,b,c); xc = (xc+xpt)&255; //xc = 0; }break; default: break; } if (get_receptor() != NULL) { png_receptor *rcpt = get_receptor(); png_adam7_data &idta = get_interlace_data(); switch (get_header().bit_depth) { case 1: if (get_header().color_type & PNG_HAS_PALETTE) for (i = 0; (i < 8)&&(xdiv_xpos<xdiv_width); i++, xdiv_xpos++) { nv.r = (xc&(1<<(7-i)))?1:0; if (nv.r < get_palette().size()) { nv = get_palette()[nv.r]; } else { nv.r = 0;nv.g = 0;nv.b = 0;nv.a=0; } rcpt->set_pixel(idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), (readmode>=11)?readmode-10:0, nv); } else for (i = 0; i < 8; i++,xdiv_xpos++) { nv.r = (xc&(1<<(7-i)))?1:0; nv.g = nv.r*65535; nv.b = nv.g; nv.r = nv.g; nv.a = 65535; if (get_palette().get_transparent_pixel() == nv) { nv.a = 0; } rcpt->set_pixel(idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), (readmode>=11)?readmode-10:0, nv); }break; case 2: if (get_header().color_type & PNG_HAS_PALETTE) for (i = 0; i < 8; i+=2, xdiv_xpos++) { nv.r = (xc&(3<<(6-i)))>>(6-i); if (nv.r < get_palette().size()) { nv = get_palette()[nv.r]; } else { nv.r = 0;nv.g = 0;nv.b = 0;nv.a=0; } rcpt->set_pixel(idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), (readmode>=11)?readmode-10:0, nv); } else for (i = 0; i < 8; i+=2,xdiv_xpos++) { nv.r = (xc&(3<<(6-i)))>>(6-i); nv.g = nv.r*21845; nv.b = nv.g; nv.r = nv.g; nv.a = 65535; if (get_palette().get_transparent_pixel() == nv) { nv.a = 0; } rcpt->set_pixel(idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), (readmode>=11)?readmode-10:0, nv); }break; case 4: if (get_header().color_type & PNG_HAS_PALETTE) for (i = 0; i < 8; i+=4, xdiv_xpos++) { nv.r = (xc&(15<<(4-i)))>>(4-i); if (nv.r < get_palette().size()) { nv = get_palette()[nv.r]; } else { nv.r = 0;nv.g = 0;nv.b = 0;nv.a=0; } rcpt->set_pixel(idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), (readmode>=11)?readmode-10:0, nv); } else for (i = 0; i < 8; i+=4,xdiv_xpos++) { nv.r = (xc&(15<<(4-i)))>>(4-i); nv.g = nv.r*4369; nv.b = nv.g; nv.r = nv.g; nv.a = 65535; if (get_palette().get_transparent_pixel() == nv) { nv.a = 0; } rcpt->set_pixel(idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), (readmode>=11)?readmode-10:0, nv); }break; case 8: { switch (get_header().color_type) { case 0: { nv.r = xc*257; nv.g = nv.r; nv.b = nv.r; nv.a = 65535; if (get_palette().get_transparent_pixel() == nv) { nv.a = 0; } rcpt->set_pixel(idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), (readmode>=11)?readmode-10:0, nv); xdiv_xpos++; }break; case 2: if (xdiv_index%3 == 2){ nv.r = filter_log[(xdiv_index-2)&7]*257; nv.g = filter_log[(xdiv_index-1)&7]*257; nv.b = xc*257; nv.a = 65535; if (get_palette().get_transparent_pixel() == nv) { nv.a = 0; } rcpt->set_pixel(idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), (readmode>=11)?readmode-10:0, nv); xdiv_xpos++; }break; case 3: { if (xc < get_palette().size()) { nv = get_palette()[xc]; } else { nv.r = 0;nv.g = 0;nv.b = 0;nv.a=0; } rcpt->set_pixel(idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), (readmode>=11)?readmode-10:0, nv); xdiv_xpos++; }break; case 4: if (xdiv_index%2 == 1){ nv.r = filter_log[(xdiv_index-1)&7]*257; nv.g = nv.r; nv.b = nv.r; nv.a = xc*257; rcpt->set_pixel(idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), (readmode>=11)?readmode-10:0, nv); xdiv_xpos++; }break; case 6: if (xdiv_index%4 == 3){ nv.r = filter_log[(xdiv_index-3)&7]*257; nv.g = filter_log[(xdiv_index-2)&7]*257; nv.b = filter_log[(xdiv_index-1)&7]*257; nv.a = xc*257; rcpt->set_pixel(idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), (readmode>=11)?readmode-10:0, nv); xdiv_xpos++; }break; } }break; case 16: { switch (get_header().color_type) { case 0: if (xdiv_index%2 == 1){ nv.r = (filter_log[(xdiv_index-1)&7]<<8)+xc; nv.g = nv.r; nv.b = nv.r; nv.a = 65535; if (get_palette().get_transparent_pixel() == nv) { nv.a = 0; } rcpt->set_pixel(idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), (readmode>=11)?readmode-10:0, nv); xdiv_xpos++; }break; case 2: if (xdiv_index%6 == 5){ nv.r = (filter_log[(xdiv_index-5)&7]<<8)+ filter_log[(xdiv_index-4)&7]; nv.g = (filter_log[(xdiv_index-3)&7]<<8)+ filter_log[(xdiv_index-2)&7]; nv.b = (filter_log[(xdiv_index-1)&7]<<8)+xc; nv.a = 65535; if (get_palette().get_transparent_pixel() == nv) { nv.a = 0; } rcpt->set_pixel(idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), (readmode>=11)?readmode-10:0, nv); xdiv_xpos++; }break; case 3: if (xdiv_index&1){ nv.r = (filter_log[(xdiv_index-1)&7]<<8)+xc; if (nv.r < get_palette().size()) { nv = get_palette()[nv.r]; } else { nv.r = 0;nv.g = 0;nv.b = 0;nv.a=0; } rcpt->set_pixel(idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), (readmode>=11)?readmode-10:0, nv); xdiv_xpos++; }break; case 4: if (xdiv_index%4 == 3){ nv.r = (filter_log[(xdiv_index-3)&7]<<8)+ filter_log[(xdiv_index-2)&7]; nv.g = nv.r; nv.b = nv.r; nv.a = (filter_log[(xdiv_index-1)&7]<<8)+xc; rcpt->set_pixel(idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), (readmode>=11)?readmode-10:0, nv); xdiv_xpos++; }break; case 6: if ((xdiv_index&7) == 7){ nv.r = (filter_log[(xdiv_index-7)&7]<<8)+ filter_log[(xdiv_index-6)&7]; nv.g = (filter_log[(xdiv_index-5)&7]<<8)+ filter_log[(xdiv_index-4)&7]; nv.b = (filter_log[(xdiv_index-3)&7]<<8)+ filter_log[(xdiv_index-2)&7]; nv.a = (filter_log[(xdiv_index-1)&7]<<8)+xc; rcpt->set_pixel(idta.real_x(xdiv_xpos),idta.real_y(xdiv_ypos), (readmode>=11)?readmode-10:0, nv); xdiv_xpos++; }break; } }break; } } if (xdiv_index < filter_backlog.size()) { if (xdiv_index >= 8) filter_backlog[xdiv_index-8] = filter_log[xdiv_index&7]; filter_log[xdiv_index&7] = xc; xdiv_index++; } if (xdiv_index >= filter_backlog.size()) { sidemode = 0; xdiv_ypos++; for (xdiv_xpos = ((xdiv_index >= 8) ? (xdiv_index-8) : 0); xdiv_xpos<xdiv_index; xdiv_xpos++) { filter_backlog[xdiv_xpos] = filter_log[xdiv_xpos&7]; } xdiv_index = 0; xdiv_xpos=0; memset(&filter_log[0],0,8*sizeof(unsigned char)); } if (xdiv_ypos >= xdiv_height) { /* on to next interlace level */ if (((readmode >= 11) && (readmode <= 17)) || (readmode == 9)) { //readmode++; /* recompute data */ if ((readmode == 17) || (readmode == 9)) readmode++; else if ((readmode >= 11) && (readmode <= 16)) { do { readmode++; /*fprintf(stderr,"readmode++ %i\n",readmode);*/ get_interlace_data() = png_adam7_data(readmode-10); xdiv_width = get_interlace_data().compute_width (get_header().width); xdiv_height = get_interlace_data().compute_height (get_header().height); } while ( ((readmode >= 11) && (readmode <= 17)) && ((xdiv_width == 0)|| (xdiv_height==0))); xdiv_xpos = 0; xdiv_ypos = 0; xdiv_index= 0; filter_typ =0; filter_backlog.resize(0); filter_backlog.resize(png_color_recast(xdiv_width, get_header().bit_depth, get_header().color_type,true) ); filter_dist = png_color_recast(1, get_header().bit_depth, get_header().color_type,true); } } } }break; } return /*xres */; } };
be6b8917be00a203c418765a65b2665cbd41c36c
[ "Markdown", "CMake", "C++" ]
20
Markdown
noahhendlish/Raytracing
43b50e66ff0129c8679bc89fe9b1ff4e67c4d113
ec072c5841552ff23f1f0aeea74df3f9c9c748cc
refs/heads/master
<repo_name>Lotrotk/portion<file_sep>/Cargo.toml [package] name = "portion" version = "0.2.0" authors = ["Lotrotk <<EMAIL>>"] edition = "2018" license-file = "LICENSE.txt" readme = "README.md" description = "Dedicated wrappers for floating point numbers representing a portion of some numerical entity." repository = "https://github.com/Lotrotk/portion" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] <file_sep>/src/f64.rs /// The floating point type. pub type Flt = f64; /// A value in range [0..1]. #[derive(Clone, Copy, Debug, PartialEq, PartialOrd)] pub struct Portion(Flt); /// The relative portion of one (numerator) within another (denominator). /// Two segments are considered here : [0..denominator] and ]denominator..1]. /// If the numerator lies within first segment the resulting value is simply numerator/denumerator. /// Otherwise the relative portion within the second segment is returned. pub enum Within { /// Represents the portion within the interval [0..denominator]. First(Portion), /// Represents the portion within the interval ]denominator..1]. Second(Portion), } /// Any quantity that can be multiplied with a floating point value. /// /// ``` /// use portion::f64::{ApplyPortion, Flt}; /// struct Speed(f64); /// /// impl ApplyPortion for Speed { /// fn apply_portion(self, portion: Flt) -> Self { /// Self(self.0 * portion) /// } /// } /// ``` pub trait ApplyPortion { /// * `portion` - A value in range [0..1] fn apply_portion(self, portion: Flt) -> Self; } impl Portion { /// The minimum value. /// /// ``` /// use portion::f64::Portion; /// let p = Portion::zero(); /// assert_eq!(Portion::value(p), 0.0); /// ``` pub const fn zero() -> Self { Portion(0.0) } /// ``` /// use portion::f64::Portion; /// let p = Portion::half(); /// assert_eq!(Portion::value(p), 0.5); /// ``` pub const fn half() -> Self { Portion(0.5) } /// The unit and maximum value. /// /// ``` /// use portion::f64::Portion; /// let p = Portion::one(); /// assert_eq!(Portion::value(p), 1.0); /// ``` pub const fn one() -> Self { Portion(1.0) } /// Creates a portion at run time. /// /// ``` /// use portion::f64::Portion; /// let p = Portion::new(-0.5); /// assert!(p.is_err()); /// let p = Portion::new(0.0); /// assert!(p.is_ok()); /// let p = Portion::new(0.5); /// assert!(p.is_ok()); /// let p = Portion::new(1.0); /// assert!(p.is_ok()); /// let p = Portion::new(1.5); /// assert!(p.is_err()); /// for value in &[std::f64::NAN, std::f64::INFINITY, std::f64::NEG_INFINITY,] { /// let p = Portion::new(*value); /// assert!(p.is_err()); /// } /// ``` pub fn new(value: Flt) -> Result<Self, ()> { if 0.0 <= value && value <= 1. { Ok(Portion(value)) } else { Err(()) } } /// Returns a floating point value in range [0..1]. /// /// This is intentionally not a method. Avoid it. /// /// ``` /// use portion::f64::Portion; /// let p = Portion::new(0.25).unwrap(); /// assert_eq!(Portion::value(p), 0.25); /// ``` pub const fn value(p: Portion) -> Flt { p.0 } /// Multiplies the portion with a scalar. /// /// Note that multiplying with another portion is more optimized. /// /// ``` /// use portion::f64::Portion; /// let x = Portion::new(0.25).unwrap(); /// let y = x.scale(2.0).unwrap(); /// assert_eq!(Portion::value(y), 0.5); /// let y = x.scale(10.0); /// assert!(y.is_err()); /// let y = x.scale(-1.0); /// assert!(y.is_err()); /// ``` pub fn scale(self, s: Flt) -> Result<Self, ()> { Self::new(self.0 * s) } /// Apply the portion to a certain quantity. /// /// This is the recommended way of applying a portion. The quantity type should follow the newtype idiom. /// /// ``` /// use portion::f64::{ApplyPortion, Flt, Portion}; /// struct Speed(f64); /// /// impl ApplyPortion for Speed { /// fn apply_portion(self, portion: Flt) -> Self { /// Self(self.0 * portion) /// } /// } /// /// fn half_speed(speed: Speed) -> Speed { /// Portion::half().apply(speed) /// } /// /// fn test_half_speed() { /// let speed = half_speed(Speed(-16.0)); /// assert_eq!(speed.0, -8.0); /// } /// /// test_half_speed(); /// ``` pub fn apply<Q>(self, quantity: Q) -> Q where Q: ApplyPortion, { quantity.apply_portion(self.0) } /// Returns the difference to 1. /// /// ``` /// use portion::f64::Portion; /// let x = Portion::new(0.25).unwrap(); /// let y = x.complement(); /// assert_eq!(Portion::value(y), 0.75); /// ``` pub fn complement(self) -> Portion { Portion(1.0 - self.0) } /// The relative portion of one (numerator) within another (denominator). /// /// ``` /// use portion::f64::{Portion, Within}; /// let x = Portion::new(0.125).unwrap(); /// let y = Portion::half(); /// let z = x.within(y); /// if let Within::First(first) = z { /// assert_eq!(Portion::value(first), 0.25); /// } else { /// panic!("This should really lie in the first segment"); /// } /// /// let x = Portion::new(0.875).unwrap(); /// let y = Portion::half(); /// let z = x.within(y); /// if let Within::Second(second) = z { /// assert_eq!(Portion::value(second), 0.75); /// } else { /// panic!("This should really lie in the second segment"); /// } /// /// ``` pub fn within(self, denominator: Self) -> Within { if self.0 <= denominator.0 { Within::First(Portion(self.0 / denominator.0)) } else { Within::Second(Portion((self.0 - denominator.0) / (1.0 - denominator.0))) } } } impl std::ops::Mul for Portion { type Output = Self; fn mul(self, rhs: Self) -> Self::Output { Portion(self.0.mul(rhs.0)) } } impl std::ops::Mul<SPortion> for Portion { type Output = SPortion; fn mul(self, rhs: SPortion) -> Self::Output { SPortion(self.0.mul(rhs.0)) } } impl std::ops::Add for Portion { type Output = Result<Self, ()>; fn add(self, rhs: Self) -> Self::Output { Portion::new(self.0.add(rhs.0)) } } impl std::ops::Add<SPortion> for Portion { type Output = Result<SPortion, ()>; fn add(self, rhs: SPortion) -> Self::Output { SPortion::new(self.0.add(rhs.0)) } } impl std::ops::Sub for Portion { type Output = SPortion; fn sub(self, rhs: Self) -> Self::Output { SPortion(self.0.sub(rhs.0)) } } impl std::ops::Sub<SPortion> for Portion { type Output = Result<SPortion, ()>; fn sub(self, rhs: SPortion) -> Self::Output { SPortion::new(self.0.sub(rhs.0)) } } impl std::ops::Neg for Portion { type Output = SPortion; fn neg(self) -> Self::Output { SPortion(self.0.neg()) } } impl Eq for Portion {} impl Ord for Portion { #[allow(clippy::float_cmp)] fn cmp(&self, other: &Self) -> std::cmp::Ordering { if self.0 < other.0 { std::cmp::Ordering::Less } else if self.0 == other.0 { std::cmp::Ordering::Equal } else { std::cmp::Ordering::Greater } } } #[cfg(test)] mod tests_portion { use super::*; #[test] fn test_within() { let x = Portion::new(0.75).unwrap(); let y = Portion::zero(); match x.within(y) { Within::Second(_) => {} _ => panic!("This should really lie in the second segment"), }; let x = Portion::new(0.75).unwrap(); let y = Portion::one(); match x.within(y) { Within::First(_) => {} _ => panic!("This should really lie in the first segment"), }; } #[test] fn test_mul_self() { let x = Portion::half(); let y = x * x; assert_eq!(Portion::value(y), 0.25); } #[test] fn test_mul_negative() { let x = Portion::half(); let y = -SPortion::half(); let z = x * y; assert_eq!(SPortion::value(z), -0.25); } #[test] fn test_add_self() { let x = Portion::new(0.25).unwrap(); let y = (x + x).unwrap(); assert_eq!(Portion::value(y), 0.5); let x = Portion::new(0.75).unwrap(); let y = x + x; assert!(y.is_err()); } #[test] fn test_add_negative() { let x = Portion::new(0.25).unwrap(); let y = SPortion::new(0.5).unwrap(); let z = (x + y).unwrap(); assert_eq!(SPortion::value(z), 0.75); let y = SPortion::new(-0.5).unwrap(); let z = (x + y).unwrap(); assert_eq!(SPortion::value(z), -0.25); let y = SPortion::new(0.9).unwrap(); let z = x + y; assert!(z.is_err()); } #[test] fn test_sub_self() { let x = Portion::new(0.25).unwrap(); let y = Portion::new(0.5).unwrap(); let z = x - y; assert_eq!(SPortion::value(z), -0.25); } #[test] fn test_sub_negative() { let x = Portion::new(0.25).unwrap(); let y = SPortion::new(0.5).unwrap(); let z = (x - y).unwrap(); assert_eq!(SPortion::value(z), -0.25); let y = SPortion::new(-0.5).unwrap(); let z = (x - y).unwrap(); assert_eq!(SPortion::value(z), 0.75); let y = SPortion::new(-0.9).unwrap(); let z = x - y; assert!(z.is_err()); } #[test] fn test_neg() { let x: SPortion = -Portion::new(0.25).unwrap(); assert_eq!(SPortion::value(x), -0.25); } #[test] fn test_ord() { let x = Portion::new(0.25).unwrap(); let y = Portion::half(); assert!(x < y); assert!(y > x); assert_eq!(x, x); } } /// A signed portion: a value in range [-1..1]. /// This type represents the difference between two unsigned portions. #[derive(Clone, Copy, Debug, PartialEq, PartialOrd)] pub struct SPortion(Flt); impl SPortion { /// The minimum value. /// /// ``` /// use portion::f64::SPortion; /// let sp = SPortion::minus_one(); /// assert_eq!(SPortion::value(sp), -1.0); /// ``` pub const fn minus_one() -> Self { SPortion(-1.0) } /// The zero value. /// /// ``` /// use portion::f64::SPortion; /// let sp = SPortion::zero(); /// assert_eq!(SPortion::value(sp), 0.0); /// ``` pub const fn zero() -> Self { SPortion(0.0) } /// ``` /// use portion::f64::SPortion; /// let sp = SPortion::half(); /// assert_eq!(SPortion::value(sp), 0.5); /// ``` pub const fn half() -> Self { SPortion(0.5) } /// The unit and maximum value. /// /// ``` /// use portion::f64::SPortion; /// let sp = SPortion::one(); /// assert_eq!(SPortion::value(sp), 1.0); /// ``` pub const fn one() -> Self { SPortion(1.0) } /// Creates a portion at run time. /// /// ``` /// use portion::f64::SPortion; /// let sp = SPortion::new(-1.5); /// assert!(sp.is_err()); /// let sp = SPortion::new(-1.0); /// assert!(sp.is_ok()); /// let sp = SPortion::new(0.0); /// assert!(sp.is_ok()); /// let sp = SPortion::new(1.0); /// assert!(sp.is_ok()); /// let sp = SPortion::new(1.5); /// assert!(sp.is_err()); /// for value in &[std::f64::NAN, std::f64::INFINITY, std::f64::NEG_INFINITY,] { /// let sp = SPortion::new(*value); /// assert!(sp.is_err()); /// } /// ``` pub fn new(value: Flt) -> Result<Self, ()> { if -1.0 <= value && value <= 1. { Ok(SPortion(value)) } else { Err(()) } } /// Returns a floating point value in range [-1..1]. /// /// This is intentionally not a method. Avoid it. /// /// ``` /// use portion::f64::SPortion; /// let sp = SPortion::new(-0.25).unwrap(); /// assert_eq!(SPortion::value(sp), -0.25); /// ``` pub const fn value(sp: SPortion) -> Flt { sp.0 } /// Multiplies the portion with a scalar. /// /// Note that multiplying with another portion is more optimized. /// /// ``` /// use portion::f64::SPortion; /// let x = SPortion::new(0.25).unwrap(); /// let y = x.scale(2.0).unwrap(); /// assert_eq!(SPortion::value(y), 0.5); /// let y = x.scale(10.0); /// assert!(y.is_err()); /// let y = x.scale(-1.0).unwrap(); /// assert_eq!(SPortion::value(y), -0.25); /// ``` pub fn scale(self, s: Flt) -> Result<Self, ()> { Self::new(self.0 * s) } /// Converts to a positive portion, if the value is not negative. /// /// ``` /// use portion::f64::{Portion, SPortion}; /// let x = SPortion::half(); /// let y = x.to_portion().unwrap(); /// assert_eq!(Portion::value(y), 0.5); /// let x = SPortion::new(-0.5).unwrap(); /// let y = x.to_portion(); /// assert!(y.is_err()); /// ``` pub fn to_portion(self) -> Result<Portion, ()> { Portion::new(self.0) } /// Removes the value's sign. /// /// ``` /// use portion::f64::{Portion, SPortion}; /// let x = SPortion::half(); /// let y = x.abs(); /// assert_eq!(Portion::value(y), 0.5); /// let x = -SPortion::half(); /// let y = x.abs(); /// assert_eq!(Portion::value(y), 0.5); /// ``` pub fn abs(self) -> Portion { Portion(Flt::abs(self.0)) } } impl From<Portion> for SPortion { fn from(p: Portion) -> Self { SPortion(p.0) } } impl std::ops::Mul for SPortion { type Output = Self; fn mul(self, rhs: Self) -> Self::Output { SPortion(self.0.mul(rhs.0)) } } impl std::ops::Mul<Portion> for SPortion { type Output = Self; fn mul(self, rhs: Portion) -> Self::Output { SPortion(self.0.mul(rhs.0)) } } impl std::ops::Add for SPortion { type Output = Result<Self, ()>; fn add(self, rhs: Self) -> Self::Output { SPortion::new(self.0.add(rhs.0)) } } impl std::ops::Sub for SPortion { type Output = Result<Self, ()>; fn sub(self, rhs: Self) -> Self::Output { SPortion::new(self.0.sub(rhs.0)) } } impl std::ops::Neg for SPortion { type Output = Self; fn neg(self) -> Self::Output { SPortion(self.0.neg()) } } impl Eq for SPortion {} impl Ord for SPortion { #[allow(clippy::float_cmp)] fn cmp(&self, other: &Self) -> std::cmp::Ordering { if self.0 < other.0 { std::cmp::Ordering::Less } else if self.0 == other.0 { std::cmp::Ordering::Equal } else { std::cmp::Ordering::Greater } } } #[cfg(test)] mod tests_dportion { use super::*; #[test] fn test_from_portion() { let x = Portion::half(); let y: SPortion = x.into(); assert_eq!(SPortion::value(y), 0.5); } #[test] fn test_mul() { let x = SPortion::half(); let y = -x * x; assert_eq!(SPortion::value(y), -0.25); } #[test] fn test_mul_positive() { let x = -SPortion::half(); let y = Portion::half(); let z = x * y; assert_eq!(SPortion::value(z), -0.25); } #[test] fn test_add() { let x = SPortion::new(0.25).unwrap(); let y = SPortion::new(-0.5).unwrap(); let z = (x + y).unwrap(); assert_eq!(SPortion::value(z), -0.25); let x = SPortion::new(0.75).unwrap(); let y = x + x; assert!(y.is_err()); let x = SPortion::new(-0.75).unwrap(); let y = x + x; assert!(y.is_err()); } #[test] fn test_sub() { let x = SPortion::new(0.25).unwrap(); let y = SPortion::new(0.5).unwrap(); let z = (x - y).unwrap(); assert_eq!(SPortion::value(z), -0.25); let x = SPortion::new(0.75).unwrap(); let y = SPortion::new(-0.75).unwrap(); let z = x - y; assert!(z.is_err()); let x = SPortion::new(-0.75).unwrap(); let y = SPortion::new(0.75).unwrap(); let z = x - y; assert!(z.is_err()); } #[test] fn test_neg() { let x = -SPortion::new(0.25).unwrap(); assert_eq!(SPortion::value(x), -0.25); } #[test] fn test_ord() { let x = -SPortion::half(); let y = SPortion::half(); assert!(x < y); assert!(y > x); assert_eq!(x, x); } } /*The code above is practically a copy pase of f32's where 32 is replaced by 64*/ use crate::f32 as F32; impl From<F32::Portion> for Portion { fn from(p: F32::Portion) -> Self { Self(F32::Portion::value(p).into()) } } impl From<F32::SPortion> for SPortion { fn from(sp: F32::SPortion) -> Self { Self(F32::SPortion::value(sp).into()) } } <file_sep>/README.md # Portion [![crates.io](https://docs.rs/portion/badge.svg)](https://crates.io/crates/portion) Dedicated wrappers for floating point numbers representing a portion of some numerical entity. <file_sep>/src/lib.rs pub mod f32; pub mod f64;
36b0c4430e79dcc8c6ce0a60398232ed539c5fb5
[ "TOML", "Rust", "Markdown" ]
4
TOML
Lotrotk/portion
74c2eaa827ebd6de5f36080ccf44626bd8d76875
0ef3e67a73bf1d5e09bb779819743f442ae025f9
refs/heads/master
<repo_name>JadeMatrix/stickers.moe-API<file_sep>/src/api/user.cpp #line 2 "api/user.cpp" #include "user.hpp" #include "media.hpp" #include "../common/config.hpp" #include "../common/formatting.hpp" #include "../common/logging.hpp" #include "../common/postgres.hpp" #include "../common/redis.hpp" #include "../common/timestamp.hpp" #include <fstream> namespace { stickers::bigid write_user_details( std::unique_ptr< pqxx::connection >& connection, stickers::user & user, const stickers::audit::blame & blame, bool generate_id, bool signup = false ) { pqxx::work transaction{ *connection }; std::string current_email; if( user.info.avatar_hash ) stickers::assert_media_exist( transaction, { *user.info.avatar_hash } ); if( generate_id ) { auto result{ transaction.exec_params( PSQL( INSERT INTO users.user_core ( user_id, _a_revision, _email_current, password ) VALUES ( DEFAULT, $1, TRUE, ROW( $2, $3, $4, $5 ) ) RETURNING user_id ; ), blame.when, user.info.password.type(), pqxx::binarystring( user.info.password.hash() ), pqxx::binarystring( user.info.password.salt() ), user.info.password.factor() ) }; result[ 0 ][ "user_id" ].to< stickers::bigid >( user.id ); } else { auto result{ transaction.exec_params( PSQL( SELECT email FROM users.user_emails WHERE user_id = $1 AND current ; ), user.id ) }; if( result.size() >= 1 ) current_email = result[ 0 ][ "email" ].as< std::string >(); } // TODO: update user password std::string add_user_revision{ PSQL( INSERT INTO users.user_revisions ( user_id, revised, revised_by, revised_from, display_name, real_name, avatar_hash ) VALUES ( $1, $2, $3, $4, $5, $6, $7 ) ; ) }; transaction.exec_params( add_user_revision, user.id, blame.when, ( signup ? user.id : blame.who ), blame.where, user.info.display_name, user.info.real_name, user.info.avatar_hash ); if( user.info.email != current_email ) { // TODO: figure out a way to have an "invalid" signup email transaction.exec_params( PSQL( INSERT INTO users.user_emails VALUES ( $1, $2, $3, $4, $5, $6 ) ; ), user.id, user.info.email, signup, blame.when, blame.who, blame.where ); stickers::send_validation_email( user.id ); } transaction.commit(); return user.id; } template< typename T > pqxx::result query_user_records_by( pqxx::work& transaction, const std::string& field, const T& iden ) { std::string query_string; ff::fmt( query_string, PSQL( SELECT user_id, ( password ).type AS password_type, ( password ).hash AS password_hash, ( password ).salt AS password_salt, ( password ).factor AS password_factor, created, revised, display_name, real_name, avatar_hash, email FROM users.users WHERE {0} = $1 AND NOT deleted ; ), field ); pqxx::result result{ transaction.exec_params( query_string, iden ) }; transaction.commit(); return result; } stickers::user_info compile_user_info_from_row( const pqxx::row& row ) { stickers::password pw; if( row[ "password_type" ].as< stickers::password_type >() == stickers::password_type::SCRYPT ) { unsigned char factor, block_size, parallelization; stickers::scrypt::split_libscrypt_mcf_factor( row[ "password_factor" ].as< unsigned int >(), factor, block_size, parallelization ); pw = stickers::scrypt{ pqxx::binarystring( row[ "password_hash" ] ).str(), pqxx::binarystring( row[ "password_salt" ] ).str(), factor, block_size, parallelization }; } return { pw, row[ "created" ]. as< stickers::timestamp >(), row[ "revised" ]. as< stickers::timestamp >(), row[ "display_name" ]. as< std::string >(), row[ "real_name" ].get< std::string >(), row[ "avatar_hash" ].get< stickers::sha256 >(), row[ "email" ]. as< std::string >(), }; } } namespace stickers // Passwords //////////////////////////////////////////////// { void password::cleanup() { switch( _type ) { case password_type::RAW: { // Clang fails to compile this without the `using` statement using std::string; ( &raw_value ) -> string::~string(); break; } case password_type::SCRYPT: ( &scrypt_value ) -> scrypt::~scrypt(); break; default: break; } _type = password_type::INVALID; invalid_value = nullptr; } const char* password::type_name() const { switch( _type ) { case password_type::RAW: return "raw"; case password_type::SCRYPT: return "scrypt"; default: return "invalid"; } } password::password() : _type { password_type::INVALID }, invalid_value{ nullptr } {} password::password( const password& o ) : _type{ password_type::INVALID } { *this = o; } password::password( const std::string& v ) : _type { password_type::RAW }, raw_value{ v } {} password::password( const scrypt& v ) : _type { password_type::SCRYPT }, scrypt_value{ v } {} password::~password() { cleanup(); } bool password::operator==( const password& o ) const { if( _type != o._type ) { if( o._type == password_type::RAW ) return *this == o.raw_value; else return false; } switch( _type ) { case password_type::RAW: { bool equals{ true }; auto slen{ raw_value.size() > o.raw_value.size() ? raw_value.size() : o.raw_value.size() }; for( decltype( slen ) i = 0; i < slen; ++i ) { char c1{ i >= raw_value.size() ? o.raw_value[ i ] : raw_value[ i ] }; char c2{ i >= o.raw_value.size() ? raw_value[ i ] : o.raw_value[ i ] }; equals = equals && ( c1 == c2 ); } return equals; } case password_type::SCRYPT: return scrypt_value == o.scrypt_value; default: return false; } } bool password::operator!=( const password& o ) const { return !( *this == o ); } bool password::operator==( const std::string& raw ) const { switch( _type ) { case password_type::RAW: return *this == password( raw ); break; case password_type::SCRYPT: return scrypt_value == scrypt::make( raw, scrypt_value.raw_salt(), scrypt_value.factor(), scrypt_value.block_size(), scrypt_value.parallelization(), scrypt_value.raw_digest().size() ); break; default: return false; } } bool password::operator!=( const std::string& raw ) const { return !( *this == raw ); } password& password::operator=( const password& o ) { switch( o._type ) { case password_type::RAW: *this = o.raw_value; break; case password_type::SCRYPT: *this = o.scrypt_value; break; default: cleanup(); break; } return *this; } password& password::operator=( const std::string& v ) { cleanup(); _type = password_type::RAW; new( &raw_value ) std::string( v ); return *this; } password& password::operator=( const scrypt& v ) { cleanup(); _type = password_type::SCRYPT; new( &scrypt_value ) scrypt( v ); return *this; } std::string password::hash() const { switch( _type ) { case password_type::SCRYPT: return scrypt_value.raw_digest(); default: return ""; } } std::string password::salt() const { switch( _type ) { case password_type::SCRYPT: return scrypt_value.raw_salt(); default: return ""; } } long password::factor() const { switch( _type ) { case password_type::SCRYPT: return scrypt::make_libscrypt_mcf_factor( scrypt_value. factor(), scrypt_value. block_size(), scrypt_value.parallelization() ); default: return 0; } } password hash_password( const std::string& raw ) { std::ifstream urandom( "/dev/urandom", std::ios::binary ); if( !urandom.good() ) throw hash_error{ "failed to open /dev/urandom" }; char salt[ scrypt::default_salt_size ]; urandom.read( salt, scrypt::default_salt_size ); if( !urandom.good() ) throw hash_error{ "failed to read from /dev/urandom" }; return password{ scrypt::make( raw.c_str(), raw.size(), salt, scrypt::default_salt_size ) }; } } namespace stickers // User management ////////////////////////////////////////// { user create_user( const user_info& info, const audit::blame& blame, bool signup ) { STICKERS_LOG( log_level::VERBOSE, "creating new user" ); auto connection{ postgres::connect() }; user new_user{ blame.who, info }; if( new_user.info.password.type() == password_type::RAW ) { new_user.info.password = <PASSWORD>( new_user.info.password.value< std::string >() ); } else if( new_user.info.password.type() != password_type::INVALID ) STICKERS_LOG( log_level::WARNING, "creating user with a pre-set password" ); new_user.id = write_user_details( connection, new_user, blame, true, signup ); STICKERS_LOG( log_level::INFO, "created new user with id ", new_user.id ); return new_user; } user_info load_user( const bigid& id ) { auto connection{ postgres::connect() }; pqxx::work transaction{ *connection }; auto result{ query_user_records_by( transaction, "user_id", id ) }; if( result.size() < 1 ) throw no_such_user::by_id( id, "loading" ); return compile_user_info_from_row( result[ 0 ] ); } user_info update_user( const user& u, const audit::blame& blame ) { auto connection{ postgres::connect() }; user updated_user{ u }; if( updated_user.info.password.type() == password_type::RAW ) updated_user.info.password = <PASSWORD>( updated_user.info.password.value< std::string >() ); write_user_details( connection, updated_user, blame, false ); return updated_user.info; } void delete_user( const bigid& id, const audit::blame& blame ) { auto connection{ postgres::connect() }; pqxx::work transaction{ *connection }; transaction.exec_params( PSQL( INSERT INTO users.user_deletions ( user_id, deleted, deleted_by, deleted_from ) VALUES ( $1, $2, $3, $4 ) ON CONFLICT DO NOTHING ; ), id, blame.when, blame.who, blame.where ); transaction.commit(); } user load_user_by_email( const std::string& email ) { auto connection{ postgres::connect() }; pqxx::work transaction{ *connection }; auto result{ query_user_records_by( transaction, "email", email ) }; if( result.size() < 1 ) throw no_such_user::by_email( email, "loading" ); return { result[ 0 ][ "user_id" ].as< bigid >(), compile_user_info_from_row( result[ 0 ] ) }; } void send_validation_email( const bigid& id ) { // IMPLEMENT: } } namespace stickers // Exceptions /////////////////////////////////////////////// { no_such_user::no_such_user( const std::string& msg ) : no_such_record_error( msg ) {} no_such_user no_such_user::by_id( const bigid& id, const std::string& purpose ) { return no_such_user{ "no such user with ID " + static_cast< std::string >( id ) + " (" + purpose + ")" }; } no_such_user no_such_user::by_email( const std::string& email, const std::string& purpose ) { return no_such_user{ "no such user with email " + email + " (" + purpose + ")" }; } } namespace stickers // Assertions /////////////////////////////////////////////// { void _assert_users_exist_impl::exec( pqxx::work & transaction, const std::string& ids_string ) { std::string query_string; ff::fmt( query_string, PSQL( WITH lookfor AS ( SELECT UNNEST( ARRAY[ {0} ] ) AS user_id ) SELECT lookfor.user_id FROM lookfor LEFT JOIN users.users_core AS uc ON uc.user_id = lookfor.user_id LEFT JOIN users.user_deletions AS ud ON ud.user_id = uc.user_id WHERE uc.user_id IS NULL OR ud.user_id IS NOT NULL ; ), ids_string ); auto result{ transaction.exec( query_string ) }; if( result.size() > 0 ) throw no_such_user::by_id( result[ 0 ][ 0 ].as< bigid >(), "assert" ); } } <file_sep>/src/handlers/list.cpp #line 2 "handlers/list.cpp" #include "handlers.hpp" #include "../api/user.hpp" #include "../api/list.hpp" #include "../common/json.hpp" #include <show/constants.hpp> namespace stickers { void handlers::get_list( show::request& request, const handler_vars_type& variables ) { auto found_user_id_variable{ variables.find( "user_id" ) }; if( found_user_id_variable == variables.end() ) throw handler_exit{ show::code::NOT_FOUND, "need a user ID" }; stickers::bigid user_id{ bigid::MIN() }; try { user_id = bigid::from_string( found_user_id_variable -> second ); } catch( const std::invalid_argument& e ) { throw handler_exit{ show::code::NOT_FOUND, "need a valid user ID" }; } try { nlj::json list{ nlj::json::array() }; for( auto& item : get_user_list( user_id ) ) list.push_back( nlj::json{ { "product_id", static_cast< std::string >( item.product_id ) }, { "quantity", item.quantity }, { "updated" , to_iso8601_str( item.updated ) } } ); auto list_json{ list.dump() }; show::response response{ request.connection(), show::HTTP_1_1, show::code::OK, { show::server_header, { "Content-Type", { "application/json" } }, { "Content-Length", { std::to_string( list_json.size() ) } } } }; response.sputn( list_json.c_str(), list_json.size() ); } catch( const no_such_user& nsu ) { throw handler_exit{ show::code::NOT_FOUND, "no such user" }; } } void handlers::add_list_item( show::request& request, const handler_vars_type& variables ) { throw handler_exit{ show::code::NOT_IMPLEMENTED, "" }; } void handlers::update_list_item( show::request& request, const handler_vars_type& variables ) { throw handler_exit{ show::code::NOT_IMPLEMENTED, "" }; } void handlers::remove_list_item( show::request& request, const handler_vars_type& variables ) { throw handler_exit{ show::code::NOT_IMPLEMENTED, "" }; } } <file_sep>/src/common/jwt.hpp #pragma once #ifndef STICKERS_MOE_COMMON_JWT_HPP #define STICKERS_MOE_COMMON_JWT_HPP #include "json.hpp" #include "timestamp.hpp" #include "uuid.hpp" #include <exception> #include <optional> #include <string> namespace stickers { struct jwt { // Types /////////////////////////////////////////////////////////////// enum class signature_alg { // HS256, HS512 }; enum class jwt_type { JWT, JWT_PLUS }; class structure_error : public std::runtime_error { using runtime_error::runtime_error; }; class validation_error : public std::runtime_error { using runtime_error::runtime_error; }; // Members ///////////////////////////////////////////////////////////// jwt_type typ = jwt_type::JWT_PLUS ; signature_alg alg = signature_alg::HS512; std::optional< uuid > jti ; std::optional< std::string > kid ; std::optional< stickers::timestamp > iat ; std::optional< stickers::timestamp > nbf ; std::optional< stickers::timestamp > exp ; nlj::json claims = "{}"_json ; // Functions /////////////////////////////////////////////////////////// static const jwt parse( const std::string& raw ); static const jwt parse( const std::string& raw, const std::map< std::string, std::string >& signing_keys ); static const jwt parse_no_validate( const std::string& raw ); static std::string serialize( const jwt& token ); static std::string serialize( const jwt& token, const std::map< std::string, std::string >& signing_keys ); }; } #endif <file_sep>/src/common/hashing.hpp #pragma once #ifndef STICKERS_MOE_COMMON_HASHING_HPP #define STICKERS_MOE_COMMON_HASHING_HPP #include "postgres.hpp" #include <cryptopp/sha.h> #include <cctype> #include <exception> #include <sstream> #include <string> namespace stickers { class hash_error : public std::runtime_error { using runtime_error::runtime_error; }; class sha256 { // So libpqxx can use sha256's protected default constructor friend sha256 pqxx::field::as< sha256 >() const; friend sha256 pqxx::field::as< sha256 >( const sha256& ) const; protected: sha256(); CryptoPP::SecByteBlock digest; public: sha256( const CryptoPP::SecByteBlock& ); sha256( const std::string& ); sha256( const char*, std::size_t ); sha256( const sha256& ); sha256( sha256&& ); sha256& operator =( const sha256& ) ; sha256& operator =( sha256&& ) ; bool operator ==( const sha256& ) const; bool operator !=( const sha256& ) const; bool operator <( const sha256& ) const; bool operator >( const sha256& ) const; bool operator <=( const sha256& ) const; bool operator >=( const sha256& ) const; std::string raw_digest() const; std::string hex_digest() const; static sha256 make( const char*, std::size_t ); static sha256 make( const std::string& ); static sha256 from_hex_string( const std::string& ); class builder { protected: CryptoPP::SHA256 algorithm; public: void append( const char*, std::size_t ); void append( const std::string& ); sha256 generate_and_clear(); }; }; class scrypt { protected: scrypt(); std::string digest; std::string salt; unsigned char _factor; unsigned char _block_size; unsigned char _parallelization; public: scrypt( // Load from database fields const std::string& digest, const std::string& salt, unsigned char factor, unsigned char block_size, unsigned char parallelization ); scrypt( const scrypt& ); scrypt( scrypt&& ); scrypt& operator =( const scrypt& ) ; scrypt& operator =( scrypt&& ) ; bool operator ==( const scrypt& ) const; bool operator !=( const scrypt& ) const; bool operator <( const scrypt& ) const; bool operator >( const scrypt& ) const; bool operator <=( const scrypt& ) const; bool operator >=( const scrypt& ) const; std::string raw_digest () const; std::string hex_digest () const; std::string raw_salt () const; std::string hex_salt () const; unsigned char factor () const; unsigned char block_size () const; unsigned char parallelization() const; // Defaults from libscrypt v1.21 static const unsigned char default_salt_size { 16 }; static const unsigned char default_factor { 14 }; static const unsigned char default_block_size { 8 }; static const unsigned char default_parallelization{ 16 }; static const unsigned char default_digest_size { 64 }; static scrypt make( const char* input, std::size_t input_len, const char* salt, std::size_t salt_len, unsigned char factor = default_factor, unsigned char block_size = default_block_size, unsigned char parallelization = default_parallelization, std::size_t digest_size = default_digest_size ); static scrypt make( const std::string& input, const std::string& salt, unsigned char factor = default_factor, unsigned char block_size = default_block_size, unsigned char parallelization = default_parallelization, std::size_t digest_size = default_digest_size ); static unsigned int make_libscrypt_mcf_factor( unsigned char factor = default_factor, unsigned char block_size = default_block_size, unsigned char parallelization = default_parallelization ); static void split_libscrypt_mcf_factor( unsigned int combined, unsigned char& factor, unsigned char& block_size, unsigned char& parallelization ); }; } // Template specialization of `pqxx::string_traits<>(&)` for `stickers::sha256`, // which allows use of `pqxx::field::to<>(&)` and `pqxx::field::as<>(&)` namespace pqxx { template<> struct string_traits< stickers::sha256 > { using subject_type = stickers::sha256; static constexpr const char* name() noexcept { return "stickers::sha256"; } static constexpr bool has_null() noexcept { return false; } static bool is_null( const stickers::sha256& ) { return false; } [[noreturn]] static stickers::sha256 null() { internal::throw_null_conversion( name() ); } static void from_string( const char str[], stickers::sha256& h ) { auto len{ strlen( str ) }; if( len == 64 + 2 && str[ 0 ] == '\\' && str[ 1 ] == 'x' ) try { h = stickers::sha256::from_hex_string( std::string( str + 2, len - 2 ) ); return; } catch( const stickers::hash_error& he ) {} throw argument_error{ "Failed conversion to " + std::string{ name() } + ": '" + std::string{ str } + "'" }; } static std::string to_string( const stickers::sha256& h ) { std::string encoded{ h.raw_digest() }; std::ostringstream decoded; decoded << std::hex; for( auto& b : encoded ) if( std::isprint( b ) ) decoded << b; else decoded << "\\x" << static_cast< unsigned int >( b ); return decoded.str(); } }; } #endif <file_sep>/src/handlers/auth.cpp #line 2 "handlers/auth.cpp" #include "handlers.hpp" #include "../api/user.hpp" #include "../common/auth.hpp" #include "../common/config.hpp" #include "../common/logging.hpp" #include "../server/parse.hpp" #include <show/constants.hpp> namespace stickers { void handlers::signup( show::request& request, const handler_vars_type& variables ) { throw handler_exit{ show::code::NOT_IMPLEMENTED, "Not implemented" }; } void handlers::login( show::request& request, const handler_vars_type& variables ) { auto content_doc{ parse_request_content( request ) }; if( !content_doc.is_a< map_document >() ) throw handler_exit{ show::code::BAD_REQUEST, "invalid data format" }; auto& content{ content_doc.get< map_document >() }; for( const auto& field : { std::string{ "email" }, std::string{ "password" } } ) if( content.find( field ) == content.end() ) throw handler_exit{ show::code::BAD_REQUEST, "missing required field \"" + field + "\"" }; else if( !content[ field ].is_a< string_document >() ) throw handler_exit{ show::code::BAD_REQUEST, "required field \"" + field + "\" must be a string" }; try { auto user{ load_user_by_email( content[ "email" ].get< string_document >() ) }; if( user.info.password == content[ "password" ].get< string_document >() ) { permissions_assert_all( get_user_permissions( user.id ), { "log_in" } ); auto auth_jwt{ generate_auth_token_for_user( user.id, { user.id, "user login", now(), request.client_address() } ) }; auto auth_token{ jwt::serialize( auth_jwt ) }; STICKERS_LOG( stickers::log_level::INFO, "user with ID ", user.id, " logged in from ", request.client_address() ); auto token_message_json{ nlj::json{ { "jwt" , auth_token }, { "user_id", user.id } }.dump() }; show::response response{ request.connection(), show::HTTP_1_1, show::code::OK, { show::server_header, { "Authorization", { "Bearer " + auth_token } }, { "Content-Type", { "application/json" } }, { "Content-Length", { std::to_string( token_message_json.size() ) } }, { "Location", { "/user/" + static_cast< std::string >( user.id ) } }, { "Set-Cookie", { config()[ "auth" ][ "token_cookie_name" ].get< std::string >() + "=" + auth_token + "; expires=" + to_http_ts_str( *auth_jwt.exp ) + "; domain=" + config()[ "auth" ][ "token_cookie_domain" ].get< std::string >() } } } }; response.sputn( token_message_json.c_str(), token_message_json.size() ); return; } } catch( const no_such_user& e ) {} throw handler_exit{ show::code::UNAUTHORIZED, "check email & password" }; } } <file_sep>/src/api/list.cpp #line 2 "api/list.cpp" #include "list.hpp" #include "../api/user.hpp" #include "../common/postgres.hpp" namespace { } namespace stickers { std::vector< list_entry > get_user_list( const bigid& user_id ) { // Assert user exists auto user_info{ load_user( user_id ) }; auto connection = postgres::connect(); pqxx::work transaction{ *connection }; auto result = transaction.exec_params( PSQL( SELECT product_id, revised, quantity FROM lists.user_product_lists WHERE user_id = $1 ), user_id ); transaction.commit(); std::vector< list_entry > list; for( const auto& row : result ) list.push_back( { row[ "product_id" ].as< bigid >(), row[ "quantity" ].as< unsigned long >(), row[ "revised" ].as< timestamp >() } ); return list; } } <file_sep>/src/server/parse.hpp #pragma once #ifndef STICKERS_MOE_SERVER_PARSE_HPP #define STICKERS_MOE_SERVER_PARSE_HPP #include "../common/document.hpp" namespace stickers { document parse_request_content( show::request& ); } #endif <file_sep>/src/server/handler.hpp #pragma once #ifndef STICKERS_MOE_SERVER_HANDLER_HPP #define STICKERS_MOE_SERVER_HANDLER_HPP #include <show.hpp> #include <functional> // std::function #include <map> namespace stickers { using handler_vars_type = std::map< std::string, std::string >; using handler_type = std::function< void( show::request&, const handler_vars_type& ) >; class handler_exit { public: show::response_code response_code; std::string message; handler_exit( show::response_code response_code, const std::string& message ) : response_code{ response_code }, message { message } {} }; } #endif<file_sep>/src/common/sorting.cpp #line 2 "common/sorting.cpp" #include "sorting.hpp" #include <exception> namespace { using char_type = stickers::byte_string::value_type; static const char_type key_segment_new { 0x80 }; static const char_type key_segment_min_head{ 0x01 }; static const char_type key_segment_min { 0x00 }; static const char_type key_segment_max { 0xff }; } namespace stickers { byte_string next_sorting_key_between( std::optional< byte_string > before, std::optional< byte_string > after ) { if( ( before && before -> size() < 1 ) || ( after && after -> size() < 1 ) ) throw std::invalid_argument{ "zero-length sorting key" }; else if( before && after && *before >= *after ) throw std::invalid_argument{ "'before' sorting key must be less than 'after'" }; if( !before && !after ) return { key_segment_new }; else if( before && !after ) { byte_string new_key; auto before_iter{ before -> begin() }; while( before_iter != before -> end() && *before_iter == key_segment_max ) { new_key += key_segment_max; ++before_iter; } if( before_iter == after -> end() ) new_key += key_segment_new; else new_key += *before_iter + 1; return new_key; } else if( !before && after ) { byte_string new_key; auto after_iter{ after -> begin() }; while( after_iter != after -> end() && *after_iter == key_segment_min ) { new_key += key_segment_min; ++after_iter; } if( after_iter == after -> end() ) throw std::invalid_argument{ "invalid sorting key passed for 'after' (all min octet)" }; if( *after_iter == key_segment_min_head ) new_key += { key_segment_min, key_segment_new }; else new_key += *after_iter - 1; return new_key; } else { byte_string new_key; auto before_iter{ before -> begin() }; auto after_iter{ after -> begin() }; decltype( *before_iter - *after_iter ) diff; while( before_iter != before -> end() && after_iter != after -> end() && ( diff = *before_iter - *after_iter ) <= 1 ) { new_key += *before_iter; ++before_iter; ++ after_iter; } if( before_iter == before -> end() || after_iter == after -> end() ) { if( before_iter != before -> end() ) new_key += next_sorting_key_between( byte_string{ before_iter, before -> end() }, std::nullopt ); else if( after_iter != after -> end() ) new_key += next_sorting_key_between( std::nullopt, byte_string{ after_iter, after -> end() } ); else new_key += next_sorting_key_between( std::nullopt, std::nullopt ); } else new_key += diff / 2; return new_key; } } } <file_sep>/src/api/media.hpp #pragma once #ifndef STICKERS_MOE_API_MEDIA_HPP #define STICKERS_MOE_API_MEDIA_HPP #include "../audit/blame.hpp" #include "../common/crud.hpp" #include "../common/hashing.hpp" #include "../common/postgres.hpp" #include "../common/timestamp.hpp" #include <experimental/filesystem> #include <optional> #include <string> #include <streambuf> #include <show.hpp> namespace stickers { enum class media_decency { SAFE, QUESTIONABLE, EXPLICIT }; struct media_info { std::experimental::filesystem::path file_path; std::string file_url; std::string mime_type; media_decency decency; std::optional< std::string > original_filename; timestamp uploaded; bigid uploaded_by; }; struct media { sha256 file_hash; media_info info; }; // May throw `indeterminate_mime_type` or `unacceptable_mime_type` media save_media( std::streambuf & file_contents, const std::optional< std::string >& original_filename, const std::optional< std::string >& mime_type, media_decency decency, const audit::blame & blame ); // May throw `indeterminate_mime_type`, `unacceptable_mime_type`, or // `handler_exit` media save_media( show::request & upload_request, const audit::blame& blame ); media_info load_media_info( const sha256& ); class _assert_media_exist_impl { template< class Container > friend void assert_media_exist( pqxx::work &, const Container& ); _assert_media_exist_impl(); static void exec( pqxx::work&, const std::string& ); }; // ACID-safe assert; if any of the supplied hashes do not correspond to a // record, this will throw `no_such_media` for one of those hashes template< class Container = std::initializer_list< sha256 > > void assert_media_exist( pqxx::work & transaction, const Container& hashes ) { _assert_media_exist_impl::exec( transaction, postgres::format_variable_list( transaction, hashes ) ); } class no_such_media : public no_such_record_error { public: const sha256 hash; no_such_media( const sha256& ); }; class indeterminate_mime_type : public std::runtime_error { public: indeterminate_mime_type(); }; class unacceptable_mime_type : public std::invalid_argument { public: const std::string mime_type; unacceptable_mime_type( const std::string& mime_type ); }; } // Template specialization of `pqxx::string_traits<>(&)` for // `stickers::media_decency`, which allows use of `pqxx::field::to<>(&)` and // `pqxx::field::as<>(&)` namespace pqxx { template<> struct string_traits< stickers::media_decency > { using subject_type = stickers::media_decency; static constexpr const char* name() noexcept { return "stickers::media_decency"; } static constexpr bool has_null() noexcept { return false; } static bool is_null( const subject_type& ) { return false; } [[noreturn]] static stickers::media_decency null() { internal::throw_null_conversion( name() ); } static void from_string( const char str[], subject_type& v ) { std::string strval{ str }; if( strval == "safe" ) v = stickers::media_decency::SAFE; else if( strval == "questionable" ) v = stickers::media_decency::QUESTIONABLE; else if( strval == "explicit" ) v = stickers::media_decency::EXPLICIT; else throw argument_error{ "Failed conversion to " + static_cast< std::string >( name() ) + ": '" + strval + "'" }; } static std::string to_string( const subject_type& v ) { switch( v ) { case stickers::media_decency::SAFE: return "safe"; case stickers::media_decency::QUESTIONABLE: return "questionable"; case stickers::media_decency::EXPLICIT: return "explicit"; } } }; } #endif <file_sep>/src/handlers/handlers.hpp #pragma once #ifndef STICKERS_MOE_HANDLERS_HANDLERS_HPP #define STICKERS_MOE_HANDLERS_HANDLERS_HPP #include "../server/handler.hpp" namespace stickers { namespace handlers { void signup( show::request&, const handler_vars_type& ); void login( show::request&, const handler_vars_type& ); void create_user( show::request&, const handler_vars_type& ); void get_user( show::request&, const handler_vars_type& ); void edit_user( show::request&, const handler_vars_type& ); void delete_user( show::request&, const handler_vars_type& ); void get_list( show::request&, const handler_vars_type& ); void add_list_item( show::request&, const handler_vars_type& ); void update_list_item( show::request&, const handler_vars_type& ); void remove_list_item( show::request&, const handler_vars_type& ); void create_person( show::request&, const handler_vars_type& ); void get_person( show::request&, const handler_vars_type& ); void edit_person( show::request&, const handler_vars_type& ); void delete_person( show::request&, const handler_vars_type& ); void create_shop( show::request&, const handler_vars_type& ); void get_shop( show::request&, const handler_vars_type& ); void edit_shop( show::request&, const handler_vars_type& ); void delete_shop( show::request&, const handler_vars_type& ); void create_design( show::request&, const handler_vars_type& ); void get_design( show::request&, const handler_vars_type& ); void edit_design( show::request&, const handler_vars_type& ); void delete_design( show::request&, const handler_vars_type& ); void create_product( show::request&, const handler_vars_type& ); void get_product( show::request&, const handler_vars_type& ); void edit_product( show::request&, const handler_vars_type& ); void delete_product( show::request&, const handler_vars_type& ); void upload_media( show::request&, const handler_vars_type& ); void get_media_info( show::request&, const handler_vars_type& ); } } #endif <file_sep>/src/dependency_test.cpp #include <iostream> #include <memory> #include <string> #include "common/formatting.hpp" #include "common/hashing.hpp" #include "common/json.hpp" #include "common/postgres.hpp" #include "common/redis.hpp" //////////////////////////////////////////////////////////////////////////////// std::unique_ptr< pqxx::connection > connect( const std::string& dbname ) { return std::unique_ptr< pqxx::connection >( new pqxx::connection( "user=postgres dbname=" + dbname ) ); } int main( int argc, char* argv[] ) { if( argc < 2 ) { ff::writeln( std::cerr, "usage: ", argv[ 0 ], " <dbname>" ); return -1; } try { auto connection = connect( std::string( argv[ 1 ] ) ); pqxx::work transaction( *connection ); nlj::json test_json = { { "foo", true }, { "bar", { 1234123, "Hello World", true, 3.14 } } }; connection -> prepare( "test_query", PSQL( SELECT $1::JSONB->'bar' AS test_json; ) ); pqxx::result result = transaction.exec_prepared( "test_query", test_json.dump() ); transaction.commit(); redox::Redox redox; if( !redox.connect( "localhost", 6379 ) ) { ff::writeln( std::cerr, "could not connect to Redis server" ); return 1; } redox.set( "test_json", result[ 0 ][ "test_json" ].as< std::string >() ); test_json = nlj::json::parse( redox.get( "test_json" ) ); std::string message = test_json[ 1 ]; redox.disconnect(); ff::writeln( std::cout, message ); CryptoPP::SecByteBlock abDigest( CryptoPP::SHA256::DIGESTSIZE ); CryptoPP::SHA256().CalculateDigest( abDigest.begin(), ( byte* )message.c_str(), message.size() ); std::string message_hash; CryptoPP::HexEncoder( new CryptoPP::StringSink( message_hash ) ).Put( abDigest.begin(), abDigest.size() ); ff::writeln( std::cout, message_hash ); } catch( const std::exception &e ) { ff::writeln( std::cerr, e.what() ); return 1; } catch( ... ) { ff::writeln( std::cerr, "uncaught non-std::exception in main()" ); return 2; } return 0; } <file_sep>/src/common/postgres.cpp #line 2 "common/postgres.cpp" #include "postgres.hpp" #include "config.hpp" #include "logging.hpp" namespace stickers { namespace postgres { std::unique_ptr< pqxx::connection > connect() { auto& pg_config{ config()[ "database" ] }; // TODO: Make all of these optional return connect( pg_config[ "host" ].get< std::string >(), pg_config[ "port" ].get< int >(), pg_config[ "user" ].get< std::string >(), pg_config[ "pass" ].get< std::string >(), pg_config[ "dbname" ].get< std::string >() ); } std::unique_ptr< pqxx::connection > connect( const std::string& host, unsigned int port, const std::string& user, const std::string& pass, const std::string& dbname ) { auto connection{ std::make_unique< pqxx::connection >( "host=" + ( host == "" ? "''" : host ) + " port=" + std::to_string( port ) + " user=" + ( user == "" ? "''" : user ) + " password=" + ( pass == "" ? "''" : pass ) + " dbname=" + ( dbname == "" ? "''" : dbname ) ) }; STICKERS_LOG( log_level::VERBOSE, "created PostgreSQL connection {host=", connection -> hostname(), " port=", connection -> port(), " user=", connection -> username(), " dbname=", connection -> dbname(), "}" ); return connection; } } }<file_sep>/src/common/auth.cpp #line 2 "common/auth.cpp" #include "auth.hpp" #include "logging.hpp" #include "postgres.hpp" #include "string_utils.hpp" #include "../api/user.hpp" // stickers::no_such_user #include "../common/config.hpp" #include <algorithm> // std::set_difference() #include <iterator> // std::inserter namespace { bool extract_auth_from_token( const std::string& token_string, stickers::auth_info& info ) { try { auto auth_jwt{ stickers::jwt::parse( token_string ) }; auto found_user_id{ auth_jwt.claims.find( "user_id" ) }; auto found_permissions{ auth_jwt.claims.find( "permissions" ) }; stickers::permissions_type permissions; if( found_permissions != auth_jwt.claims.end() && found_permissions.value().is_array() ) for( const auto& permission : found_permissions.value() ) permissions.insert( permission.get< std::string >() ); else throw stickers::authentication_error{ "missing required claim \"permissions\"" }; if( found_user_id != auth_jwt.claims.end() && found_user_id.value().is_string() ) { auto user_id{ stickers::bigid::MIN() }; try { user_id = stickers::bigid::from_string( found_user_id.value().get< std::string >() ); } catch( const std::invalid_argument& e ) { throw stickers::authentication_error{ "required claim \"user_id\" not a valid bigid" }; } info = { user_id, permissions }; return true; } else throw stickers::authentication_error{ "missing required claim \"user_id\"" }; } catch( const stickers::jwt::structure_error& e ) { // Ignore any tokens that aren't JWTs STICKERS_LOG( stickers::log_level::VERBOSE, "skipping unusable token (not a JWT: ", e.what(), ")" ); return false; } catch( const stickers::jwt::validation_error& e ) { // Ignore any tokesn that don't pass validation (may not // even belong to this site) STICKERS_LOG( stickers::log_level::VERBOSE, "skipping unusable token (not valid: ", e.what(), ")" ); return false; } } } namespace stickers { auth_info authenticate( const show::request& request ) { bool header_found{ false }; bool auth_found{ false }; auth_info info{ bigid::MIN(), {} }; auto authorization_headers{ request.headers().find( "Authorization" ) }; if( authorization_headers != request.headers().end() ) { header_found = true; std::string bearer_begin{ "Bearer " }; for( const auto& header_value : authorization_headers -> second ) { if( header_value.find( bearer_begin ) == 0 ) { auth_found = extract_auth_from_token( header_value.substr( bearer_begin.size() ), info ); if( auth_found ) break; } } } auto cookie_headers{ request.headers().find( "Cookie" ) }; if( cookie_headers != request.headers().end() ) { header_found = true; std::string cookie_begin{ config()[ "auth" ][ "token_cookie_name" ].get< std::string >() + "=" }; for( const auto& header_value : cookie_headers -> second ) { if( header_value.find( cookie_begin ) == 0 ) { auth_found = extract_auth_from_token( header_value.substr( cookie_begin.size() ), info ); if( auth_found ) break; } } } if( !header_found ) throw authentication_error{ "missing auth header (\"Authorization\" or \"Cookie\")" }; else if( !auth_found ) throw authentication_error{ "no usable authentication tokens found" }; else return info; } jwt generate_auth_token_for_user( bigid user_id, const audit::blame& blame ) { auto permissions{ get_user_permissions( user_id ) }; auto token_lifetime = std::chrono::hours{ config()[ "auth" ][ "token_lifetime_hours" ].get< int >() }; jwt token{ .iat = now(), .nbf = now(), .exp = now() + token_lifetime, .claims = { { "user_id", static_cast< std::string >( user_id ) }, { "permissions", permissions }, { "blame", { { "who" , static_cast< std::string >( blame.who ) }, { "what" , blame.what }, { "when" , to_iso8601_str( blame.when ) }, { "where", blame.where } } } } }; STICKERS_LOG( log_level::INFO, "user ", blame.who, " generated auth token for user ", user_id, " from ", blame.where, " at ", to_iso8601_str( blame.when ), " ", blame.what ); return token; } // void set_user_permissions( // bigid user_id, // const permissions_type& permissions // ) // { // } permissions_type get_user_permissions( bigid user_id ) { auto connection{ postgres::connect() }; pqxx::work transaction{ *connection }; pqxx::result result{ transaction.exec_params( PSQL( SELECT p.permission AS permission FROM users.users AS u JOIN permissions.role_permissions AS rp ON u.user_role_id = rp.role_id JOIN permissions.permissions AS p ON rp.permission_id = p.permission_id WHERE u.user_id = $1 ; ), user_id ) }; transaction.commit(); if( result.size() < 1 ) { // Assert that the user even exists; if so, continue to return an // empty set of permissions; if not, throw `no_such_user` // FIXME: This (possibly) requires an extra database access for e.g. // banned users, who we most likely _don't_ want causing extra load auto user_info = load_user( user_id ); } permissions_type permissions; for( const auto& row : result ) permissions.insert( row[ "permission" ].as< std::string >() ); return permissions; } void permissions_assert_any( const permissions_type& got, const permissions_type& expect ) { std::set< std::string > difference; std::set_difference( expect.begin(), expect.end(), got.begin(), got.end(), std::inserter( difference, difference.begin() ) ); if( difference.size() == expect.size() ) throw authorization_error{ "missing one of these permissions: \"" + join( difference, std::string{ "\", \"" } ) + "\"" }; } void permissions_assert_all( const permissions_type& got, const permissions_type& expect ) { std::set< std::string > difference; std::set_difference( expect.begin(), expect.end(), got.begin(), got.end(), std::inserter( difference, difference.begin() ) ); if( difference.size() ) throw authorization_error{ "missing permissions: \"" + join( difference, std::string{ "\", \"" } ) + "\"" }; } } <file_sep>/src/api/user.hpp #pragma once #ifndef STICKERS_MOE_API_USER_HPP #define STICKERS_MOE_API_USER_HPP #include <exception> #include <optional> #include <string> #include "../audit/blame.hpp" #include "../common/bigid.hpp" #include "../common/crud.hpp" #include "../common/hashing.hpp" #include "../common/postgres.hpp" #include "../common/timestamp.hpp" namespace stickers // Passwords //////////////////////////////////////////////// { enum class password_type { INVALID = 0, RAW, SCRYPT }; class password { protected: password_type _type; union { void* invalid_value; std::string raw_value; scrypt scrypt_value; }; void cleanup(); const char* type_name() const; public: password(); password( const password & ); password( const std::string& ); password( const scrypt & ); ~password(); password_type type() const { return _type; } template< typename T > T& value(); std::string hash() const; std::string salt() const; long factor() const; bool operator==( const password& ) const; bool operator!=( const password& ) const; // Check hashed password against a raw password string bool operator==( const std::string& ) const; bool operator!=( const std::string& ) const; password& operator=( const password & ); password& operator=( const std::string& ); password& operator=( const scrypt & ); }; template<> inline std::string& password::value< std::string >() { if( _type != password_type::RAW ) throw std::logic_error{ "attempt to get wrong type of value (raw) from " "stickers::password (" + static_cast< std::string >( type_name() ) + ")" }; return raw_value; } template<> inline scrypt& password::value< scrypt >() { if( _type != password_type::SCRYPT ) throw std::logic_error{ "attempt to get wrong type of value (scrypt) from " "stickers::password (" + static_cast< std::string >( type_name() ) + ")" }; return scrypt_value; } // Return a fresh hashing of a new password using the preferred method password hash_password( const std::string& ); } namespace stickers // User management ////////////////////////////////////////// { struct user_info { password <PASSWORD>; timestamp created; timestamp revised; std::string display_name; std::optional< std::string > real_name; std::optional< sha256 > avatar_hash; std::string email; }; struct user { bigid id; user_info info; }; user create_user( const user_info&, const audit::blame&, bool signup = true ); user_info load_user( const bigid & ); user_info update_user( const user &, const audit::blame& ); void delete_user( const bigid &, const audit::blame& ); user load_user_by_email( const std::string& ); // TODO: Move void send_validation_email( const bigid& ); class _assert_users_exist_impl { template< class Container > friend void assert_users_exist( pqxx::work &, const Container& ); _assert_users_exist_impl(); static void exec( pqxx::work&, const std::string& ); }; // ACID-safe assert; if any of the supplied hashes do not correspond to a // record, this will throw `no_such_user` for one of those hashes template< class Container = std::initializer_list< bigid > > void assert_users_exist( pqxx::work & transaction, const Container& hashes ) { _assert_users_exist_impl::exec( transaction, postgres::format_variable_list( transaction, hashes ) ); } } namespace stickers // Exceptions /////////////////////////////////////////////// { class no_such_user : public no_such_record_error { protected: no_such_user( const std::string& ); public: static no_such_user by_id ( const bigid&, const std::string& ); static no_such_user by_email( const std::string&, const std::string& ); }; } // Template specialization of `pqxx::string_traits<>(&)` for // `stickers::password_type`, which allows use of `pqxx::field::to<>(&)` and // `pqxx::field::as<>(&)` namespace pqxx { template<> struct string_traits< stickers::password_type > { using subject_type = stickers::password_type; static constexpr const char* name() noexcept { return "stickers::password_type"; } static constexpr bool has_null() noexcept { return false; } static bool is_null( const stickers::password_type& ) { return false; } [[noreturn]] static stickers::password_type null() { internal::throw_null_conversion( name() ); } static void from_string( const char str[], stickers::password_type& pt ) { std::string s( str ); if( s == "scrypt" ) pt = stickers::password_type::SCRYPT; else if( s == "invalid" ) pt = stickers::password_type::INVALID; else throw argument_error{ "Failed conversion to " + static_cast< std::string >( name() ) + ": '" + static_cast< std::string >( str ) + "'" }; } static std::string to_string( stickers::password_type pt ) { switch( pt ) { case stickers::password_type::SCRYPT: return "scrypt"; default: return "invalid"; } } }; } #endif <file_sep>/src/audit/blame.hpp #pragma once #ifndef STICKERS_MOE_AUDIT_BLAME_HPP #define STICKERS_MOE_AUDIT_BLAME_HPP #include "../common/bigid.hpp" #include "../common/timestamp.hpp" #include <stdexcept> #include <string> #include <utility> namespace stickers { namespace audit { struct blame { const bigid who; // User ID const std::string what; // High-level operation description const timestamp when; // Request timestamp const std::string where; // IP address }; } } #endif <file_sep>/CMakeLists.txt CMAKE_MINIMUM_REQUIRED( VERSION 3.6 ) SET( CMAKE_C_COMPILER /usr/local/Cellar/llvm/6.0.0/bin/clang ) SET( CMAKE_CXX_COMPILER /usr/local/Cellar/llvm/6.0.0/bin/clang++ ) SET( CMAKE_CXX_STANDARD 17 ) SET( CMAKE_CXX_STANDARD_REQUIRED ON ) PROJECT( "stickers.moe API" CXX ) FIND_LIBRARY( PQXX_LIBRARY pqxx ) FIND_LIBRARY( REDOX_LIBRARY redox ) FIND_LIBRARY( FASTFORMAT_LIBRARY FastFormat ) FIND_LIBRARY( CRYPTOPP_LIBRARY cryptopp ) FIND_LIBRARY( TZ_LIBRARY tz ) FIND_LIBRARY( CURL_LIBRARY curl ) FIND_LIBRARY( SCRYPT_LIBRARY scrypt ) ADD_EXECUTABLE( server src/api/design.cpp src/api/list.cpp src/api/media.cpp src/api/person.cpp src/api/shop.cpp src/api/user.cpp src/common/auth.cpp src/common/bigid.cpp src/common/config.cpp src/common/document.cpp src/common/hashing.cpp src/common/jwt.cpp src/common/postgres.cpp src/common/sorting.cpp src/common/timestamp.cpp src/common/uuid.cpp src/handlers/auth.cpp src/handlers/design.cpp src/handlers/list.cpp src/handlers/media.cpp src/handlers/person.cpp src/handlers/product.cpp src/handlers/shop.cpp src/handlers/user.cpp src/server/main.cpp src/server/parse.cpp src/server/routing.cpp src/server/server.cpp ) TARGET_LINK_LIBRARIES( server "-L/usr/local/Cellar/llvm/6.0.0/lib" "-lc++experimental" ${PQXX_LIBRARY} ${REDOX_LIBRARY} ${FASTFORMAT_LIBRARY} ${CRYPTOPP_LIBRARY} ${TZ_LIBRARY} ${CURL_LIBRARY} ${SCRYPT_LIBRARY} ) ADD_EXECUTABLE( password_gen src/api/media.cpp src/api/user.cpp src/common/bigid.cpp src/common/config.cpp src/common/document.cpp src/common/hashing.cpp src/common/postgres.cpp src/common/timestamp.cpp src/common/uuid.cpp src/server/parse.cpp src/utilities/password_gen.cpp ) TARGET_LINK_LIBRARIES( password_gen "-L/usr/local/Cellar/llvm/5.0.1/lib" "-lc++experimental" ${PQXX_LIBRARY} ${FASTFORMAT_LIBRARY} ${CRYPTOPP_LIBRARY} ${TZ_LIBRARY} ${CURL_LIBRARY} ${SCRYPT_LIBRARY} ) ADD_EXECUTABLE( dependency_test src/dependency_test.cpp ) TARGET_LINK_LIBRARIES( dependency_test ${PQXX_LIBRARY} ${REDOX_LIBRARY} ${FASTFORMAT_LIBRARY} ${CRYPTOPP_LIBRARY} ${TZ_LIBRARY} ${CURL_LIBRARY} ${SCRYPT_LIBRARY} ) <file_sep>/src/common/postgres.hpp #pragma once #ifndef STICKERS_MOE_COMMON_POSTGRES_HPP #define STICKERS_MOE_COMMON_POSTGRES_HPP #define PQXX_HAVE_OPTIONAL #include <pqxx/pqxx> #include <memory> #include <string> #define PSQL( ... ) #__VA_ARGS__ namespace stickers { namespace postgres { std::unique_ptr< pqxx::connection > connect(); std::unique_ptr< pqxx::connection > connect( const std::string& host, unsigned int port, const std::string& user, const std::string& pass, const std::string& dbname ); // Format a sequence of query parameters as a comma-separated list which // can then be formatted into a query string surrounded by the // appropriate delimiters (parentheses etc.) template< typename Iterable > std::string format_variable_list( const pqxx::work& transaction, const Iterable & values ) { std::string s; // Use the begin() and end() functions like range-based `for` auto current{ begin( values ) }; auto last{ end( values ) }; while( true ) { s += transaction.quote( *current ); ++current; if( current == last ) break; else s += ','; } return s; } } } #endif <file_sep>/src/api/shop.cpp #line 2 "api/shop.cpp" #include "shop.hpp" #include "../api/person.hpp" #include "../common/formatting.hpp" #include "../common/logging.hpp" namespace { stickers::bigid write_shop_details( stickers::shop & shop, const stickers::audit::blame& blame, bool generate_id ) { auto connection{ stickers::postgres::connect() }; pqxx::work transaction{ *connection }; if( generate_id ) { auto result{ transaction.exec_params( PSQL( INSERT INTO shops.shops_core ( shop_id, _a_revision ) VALUES ( DEFAULT, $1 ) RETURNING shop_id ; ), blame.when ) }; result[ 0 ][ "shop_id" ].to< stickers::bigid >( shop.id ); } else stickers::assert_shops_exist( transaction, { shop.id } ); stickers::assert_people_exist( transaction, { shop.info.owner_person_id } ); transaction.exec_params( PSQL( INSERT INTO shops.shop_revisions ( shop_id, revised, revised_by, revised_from, shop_name, shop_url, founded, closed, owner_id ) VALUES ( $1, $2, $3, $4, $5, $6, $7, $8, $9 ) ; ), shop.id, blame.when, blame.who, blame.where, shop.info.name, shop.info.url, shop.info.founded, shop.info.closed, shop.info.owner_person_id ); transaction.commit(); return shop.id; } } namespace stickers // Person //////////////////////////////////////////////////// { shop create_shop( const shop_info & info, const audit::blame& blame ) { shop s{ bigid::MIN(), info }; write_shop_details( s, blame, true ); return s; } shop_info load_shop( const bigid& id ) { auto connection{ postgres::connect() }; pqxx::work transaction{ *connection }; auto result{ transaction.exec_params( PSQL( SELECT created, revised, shop_name, shop_url, founded::TIMESTAMPTZ, closed::TIMESTAMPTZ, owner_id FROM shops.shops WHERE shop_id = $1 AND NOT deleted ; ), id ) }; transaction.commit(); if( result.size() < 1 ) throw no_such_shop{ id }; auto& row{ result[ 0 ] }; shop_info info{ row[ "created" ].as< timestamp >(), row[ "revised" ].as< timestamp >(), row[ "shop_name" ].as< std::string >(), row[ "shop_url" ].as< std::string >(), row[ "owner_id" ].as< bigid >(), std::nullopt, std::nullopt }; if( !row[ "founded" ].is_null() ) info.founded = row[ "founded" ].as< timestamp >(); if( !row[ "closed" ].is_null() ) info.closed = row[ "closed" ].as< timestamp >(); return info; } shop_info update_shop( const shop& s, const audit::blame& blame ) { auto updated_shop{ s }; write_shop_details( updated_shop, blame, true ); return updated_shop.info; } void delete_shop( const bigid& id, const audit::blame& blame ) { auto connection{ postgres::connect() }; pqxx::work transaction{ *connection }; assert_shops_exist( transaction, { id } ); auto result{ transaction.exec_params( PSQL( INSERT INTO shops.shop_deletions ( shop_id, deleted, deleted_by, deleted_from ) VALUES ( $1, $2, $3, $4 ) ; ), id, blame.when, blame.who, blame.where ) }; transaction.commit(); } } namespace stickers // Exception //////////////////////////////////////////////// { no_such_shop::no_such_shop( const bigid& id ) : no_such_record_error{ "no such shop with ID " + static_cast< std::string >( id ) }, id{ id } {} } namespace stickers // Assertion ///////////////////////////////////////////////// { void _assert_shops_exist_impl::exec( pqxx::work & transaction, const std::string& ids_string ) { std::string query_string; ff::fmt( query_string, PSQL( WITH lookfor AS ( SELECT UNNEST( ARRAY[ {0} ] ) AS shop_id ) SELECT lookfor.shop_id FROM lookfor LEFT JOIN shops.shops_core AS sc ON sc.shop_id = lookfor.shop_id LEFT JOIN shops.shop_deletions AS sd ON sd.shop_id = sc.shop_id WHERE sc.shop_id IS NULL OR sd.shop_id IS NOT NULL ; ), ids_string ); auto result{ transaction.exec( query_string ) }; if( result.size() > 0 ) throw no_such_shop{ result[ 0 ][ 0 ].as< bigid >() }; } } <file_sep>/src/handlers/shop.cpp #line 2 "handlers/shop.cpp" #include "handlers.hpp" #include "../api/shop.hpp" #include "../common/auth.hpp" #include "../common/crud.hpp" #include "../common/json.hpp" #include "../server/parse.hpp" #include <show/constants.hpp> namespace { void shop_to_json( const stickers::bigid & id, const stickers::shop_info& info, nlj::json & shop_json ) { shop_json = { { "shop_id" , id }, { "created" , stickers::to_iso8601_str( info.created ) }, { "revised" , stickers::to_iso8601_str( info.revised ) }, { "name" , info.name }, { "url" , info.url }, { "founded" , nullptr }, { "closed" , nullptr }, { "owner_person_id", info.owner_person_id } }; if( info.founded ) shop_json[ "founded" ] = stickers::to_iso8601_str( *info.founded ); if( info.closed ) shop_json[ "closed" ] = stickers::to_iso8601_str( *info.closed ); } stickers::shop_info shop_info_from_document( const stickers::document& details_doc ) { if( !details_doc.is_a< stickers::map_document >() ) throw stickers::handler_exit{ show::code::BAD_REQUEST, "invalid data format" }; auto& details_map{ details_doc.get< stickers::map_document >() }; for( const auto& field : { std::string{ "name" }, std::string{ "url" }, std::string{ "owner_person_id" } } ) if( details_map.find( field ) == details_map.end() ) throw stickers::handler_exit{ show::code::BAD_REQUEST, "missing required field \"" + static_cast< std::string >( field ) + "\"" }; else if( !details_map[ field ].is_a< stickers::string_document >() ) throw stickers::handler_exit{ show::code::BAD_REQUEST, "required field \"" + static_cast< std::string >( field ) + "\" must be a string" }; for( const auto& field : { std::string{ "founded" }, std::string{ "closed" } } ) if( details_map.find( field ) == details_map.end() ) throw stickers::handler_exit{ show::code::BAD_REQUEST, "missing required field \"" + static_cast< std::string >( field ) + "\"" }; else if( !details_map[ field ].is_a< stickers:: null_document >() && !details_map[ field ].is_a< stickers::string_document >() ) throw stickers::handler_exit{ show::code::BAD_REQUEST, "required field \"" + static_cast< std::string >( field ) + "\" must be a string or null" }; auto owner_person_id{ stickers::bigid::MIN() }; try { owner_person_id = stickers::bigid::from_string( details_map[ "owner_person_id" ].get< stickers::string_document >() ); } catch( const std::invalid_argument& e ) { throw stickers::handler_exit{ show::code::BAD_REQUEST, "required field \"owner_person_id\" not a valid ID" }; } std::optional< stickers::timestamp > founded; if( !details_map[ "founded" ].is_a< stickers::null_document >() ) { bool valid{ false }; if( details_map[ "founded" ].is_a< stickers::string_document >() ) try { founded = stickers::from_iso8601_str( details_map[ "founded" ].get< stickers::string_document >() ); valid = true; } catch( const std::invalid_argument& e ) {} if( !valid ) throw stickers::handler_exit{ show::code::BAD_REQUEST, "required field \"founded\" not a valid ISO 8601 timestamp" }; } std::optional< stickers::timestamp > closed; if( !details_map[ "closed" ].is_a< stickers::null_document >() ) { bool valid{ false }; if( details_map[ "closed" ].is_a< stickers::string_document >() ) try { closed = stickers::from_iso8601_str( details_map[ "closed" ].get< stickers::string_document >() ); valid = true; } catch( const std::invalid_argument& e ) {} if( !valid ) throw stickers::handler_exit{ show::code::BAD_REQUEST, "required field \"closed\" not a valid ISO 8601 timestamp" }; } return { stickers::now(), stickers::now(), details_map[ "name" ].get< stickers::string_document >(), details_map[ "url" ].get< stickers::string_document >(), owner_person_id, founded, closed }; } } namespace stickers { void handlers::create_shop( show::request& request, const handler_vars_type& variables ) { auto auth{ authenticate( request ) }; permissions_assert_all( auth.user_permissions, { "edit_public_pages" } ); try { auto created{ create_shop( shop_info_from_document( parse_request_content( request ) ), { auth.user_id, "create shop handler", now(), request.client_address() } ) }; nlj::json shop_json; shop_to_json( created.id, created.info, shop_json ); auto shop_json_string{ shop_json.dump() }; show::response response{ request.connection(), show::HTTP_1_1, show::code::CREATED, { show::server_header, { "Content-Type", { "application/json" } }, { "Content-Length", { std::to_string( shop_json_string.size() ) } }, { "Location", { "/shop/" + static_cast< std::string >( created.id ) } } } }; response.sputn( shop_json_string.c_str(), shop_json_string.size() ); } catch( const no_such_record_error& e ) { throw handler_exit{ show::code::BAD_REQUEST, e.what() }; } } void handlers::get_shop( show::request& request, const handler_vars_type& variables ) { auto found_shop_id_variable{ variables.find( "shop_id" ) }; if( found_shop_id_variable == variables.end() ) throw handler_exit{ show::code::NOT_FOUND, "need a shop ID" }; auto shop_id{ bigid::MIN() }; try { shop_id = bigid::from_string( found_shop_id_variable -> second ); } catch( const std::invalid_argument& e ) { throw handler_exit{ show::code::NOT_FOUND, "need a valid shop ID" }; } try { auto info{ load_shop( shop_id ) }; nlj::json shop_json; shop_to_json( shop_id, info, shop_json ); auto shop_json_string{ shop_json.dump() }; show::response response{ request.connection(), show::HTTP_1_1, show::code::OK, { show::server_header, { "Content-Type", { "application/json" } }, { "Content-Length", { std::to_string( shop_json_string.size() ) } } } }; response.sputn( shop_json_string.c_str(), shop_json_string.size() ); } catch( const no_such_shop& e ) { throw handler_exit{ show::code::NOT_FOUND, e.what() }; } } void handlers::edit_shop( show::request& request, const handler_vars_type& variables ) { auto auth{ authenticate( request ) }; permissions_assert_all( auth.user_permissions, { "edit_public_pages" } ); auto found_shop_id_variable{ variables.find( "shop_id" ) }; if( found_shop_id_variable == variables.end() ) throw handler_exit{ show::code::NOT_FOUND, "need a shop ID" }; stickers::bigid shop_id{ bigid::MIN() }; try { shop_id = bigid::from_string( found_shop_id_variable -> second ); } catch( const std::invalid_argument& e ) { throw handler_exit{ show::code::NOT_FOUND, "need a valid shop ID" }; } try { auto updated_info{ update_shop( { shop_id, shop_info_from_document( parse_request_content( request ) ) }, { auth.user_id, "update shop handler", now(), request.client_address() } ) }; nlj::json shop_json; shop_to_json( shop_id, updated_info, shop_json ); auto shop_json_string = shop_json.dump(); show::response response{ request.connection(), show::HTTP_1_1, show::code::OK, { show::server_header, { "Content-Type", { "application/json" } }, { "Content-Length", { std::to_string( shop_json_string.size() ) } } } }; response.sputn( shop_json_string.c_str(), shop_json_string.size() ); } catch( const no_such_shop& e ) { throw handler_exit{ show::code::NOT_FOUND, e.what() }; } catch( const no_such_record_error& e ) { throw handler_exit{ show::code::BAD_REQUEST, e.what() }; } } void handlers::delete_shop( show::request& request, const handler_vars_type& variables ) { auto auth{ authenticate( request ) }; permissions_assert_all( auth.user_permissions, { "edit_public_pages" } ); auto found_shop_id_variable{ variables.find( "shop_id" ) }; if( found_shop_id_variable == variables.end() ) throw handler_exit{ show::code::NOT_FOUND, "need a shop ID" }; stickers::bigid shop_id{ bigid::MIN() }; try { shop_id = bigid::from_string( found_shop_id_variable -> second ); } catch( const std::invalid_argument& e ) { throw handler_exit{ show::code::NOT_FOUND, "need a valid shop ID" }; } try { delete_shop( shop_id, { auth.user_id, "delete shop handler", now(), request.client_address() } ); std::string null_json{ "null" }; show::response response{ request.connection(), show::HTTP_1_1, show::code::OK, { show::server_header, { "Content-Type", { "application/json" } }, { "Content-Length", { std::to_string( null_json.size() ) } } } }; response.sputn( null_json.c_str(), null_json.size() ); } catch( const no_such_shop& e ) { throw handler_exit{ show::code::NOT_FOUND, e.what() }; } } } <file_sep>/src/api/shop.hpp #pragma once #ifndef STICKERS_MOE_API_SHOP_HPP #define STICKERS_MOE_API_SHOP_HPP #include "../audit/blame.hpp" #include "../common/bigid.hpp" #include "../common/crud.hpp" #include "../common/postgres.hpp" #include "../common/timestamp.hpp" #include <exception> #include <optional> #include <string> namespace stickers { struct shop_info { timestamp created; timestamp revised; std::string name; std::string url; bigid owner_person_id; // TODO: maybe use a date type? std::optional< timestamp > founded; std::optional< timestamp > closed; }; struct shop { bigid id; shop_info info; }; shop create_shop( const shop_info&, const audit::blame& ); shop_info load_shop( const bigid & ); shop_info update_shop( const shop &, const audit::blame& ); void delete_shop( const bigid &, const audit::blame& ); class _assert_shops_exist_impl { template< class Container > friend void assert_shops_exist( pqxx::work &, const Container& ); _assert_shops_exist_impl(); static void exec( pqxx::work&, const std::string& ); }; // ACID-safe assert; if any of the supplied IDs do not correspond to a // record, this will throw `no_such_shop` for one of those IDs template< class Container = std::initializer_list< bigid > > void assert_shops_exist( pqxx::work & transaction, const Container& ids ) { _assert_shops_exist_impl::exec( transaction, postgres::format_variable_list( transaction, ids ) ); } class no_such_shop : public no_such_record_error { public: const bigid id; no_such_shop( const bigid& ); }; } #endif <file_sep>/src/server/server.hpp #pragma once #ifndef STICKERS_MOE_SERVER_SERVER_HPP #define STICKERS_MOE_SERVER_SERVER_HPP namespace stickers { void run_server(); } #endif <file_sep>/src/server/parse.cpp #line 2 "server/parse.cpp" #include "parse.hpp" #include "handler.hpp" #include "../common/config.hpp" #include "../common/json.hpp" #include "../common/logging.hpp" #include "../common/string_utils.hpp" #include <show/constants.hpp> #include <show/multipart.hpp> #include <cmath> #include <iterator> // std::istreambuf_iterator #include <optional> #include <vector> namespace // Utilities ///////////////////////////////////////////////////////// { struct content_type_info { std::string mime_type; std::optional< std::string > header_remainder; }; std::optional< content_type_info > content_type_from_headers( const show::headers_type& headers ) { auto found_content_type{ headers.find( "Content-Type" ) }; if( found_content_type == headers.end() || found_content_type -> second.size() != 1 ) return std::nullopt; auto& header_value{ found_content_type -> second[ 0 ] }; auto split_pos{ header_value.find( ";" ) }; auto mime_type{ header_value.substr( 0, split_pos ) }; if( split_pos != std::string::npos ) { ++split_pos; while( split_pos < header_value.size() ) switch( header_value[ split_pos ] ) { case ' ': case '\t': case '\n': case '\r': ++split_pos; break; default: return content_type_info{ mime_type, header_value.substr( split_pos ) }; } } return content_type_info{ mime_type, std::nullopt }; } std::string boundary_from_content_type_remainder( const std::string & mime_type, const std::optional< std::string >& header_remainder ) { if( header_remainder ) { std::string boundary_designator{ "boundary=" }; auto boundary_begin{ header_remainder -> find( boundary_designator ) }; if( boundary_begin != std::string::npos ) { boundary_begin += boundary_designator.size(); auto boundary_end{ header_remainder -> find( ";", boundary_begin ) }; auto boundary{ header_remainder -> substr( boundary_begin, boundary_end - boundary_begin ) }; if( boundary.size() > 0 ) return boundary; } } throw stickers::handler_exit{ show::code::BAD_REQUEST, ( "Missing boundary for " + stickers::log_sanitize( mime_type ) ) }; } struct multipart_info { std::optional< std::string > name; std::optional< std::string > filename; }; std::optional< std::string > decode_filename( std::string::size_type filename_start, const std::string & header_value, const show::headers_type& request_headers ) { auto found_user_agent{ request_headers.find( "User-Agent" ) }; if( found_user_agent != request_headers.end() && found_user_agent -> second.size() == 1 ) { auto& user_agent{ found_user_agent -> second[ 0 ] }; if( user_agent.find( "Chrome" ) != std::string::npos || user_agent.find( "Safari" ) != std::string::npos ) { /* Parse from `filename="` to `"` non-inclusive, then replace all `%22` with `"` (but no other percent/URL-encoded sequences) */ auto filename{ header_value.substr( filename_start, header_value.find( "\"", filename_start ) - filename_start ) }; std::string::size_type esc_seq_pos; while( ( esc_seq_pos = header_value.find( "%22" ) ) != std::string::npos ) filename.replace( esc_seq_pos, 3, "\"" ); return filename; } else if( user_agent.find( "Firefox" ) != std::string::npos ) { /* Parse from `filename="` to `"` non-inclusive, replacing all `\"` with `"` (but no other escape sequences, including `\\`) */ auto raw_filename{ header_value.substr( filename_start ) }; std::string filename; filename.reserve( raw_filename.size() ); for( std::string::size_type i{ 0 }; i < raw_filename.size(); ++i ) if( raw_filename[ i ] == '\\' && i + 1 < raw_filename.size() && raw_filename[ i + 1 ] == '"' ) { filename += '"'; ++i; } else if( raw_filename[ i ] == '"' ) break; else filename += raw_filename[ i ]; return filename; } else if( user_agent.find( "Windows" ) != std::string::npos ) /* Apparently IE doesn't encode the filename at all because `"` is not permissable in NTFS filenames anyways. Source: https://github.com/rack/rack/issues/323 */ return header_value.substr( filename_start, header_value.find( "\"", filename_start ) - filename_start ); } // Best-guess if no other method worked/matched #if 01 auto end_pos{ header_value.find( "\"; ", filename_start ) }; if( end_pos == std::string::npos ) end_pos = header_value.find( "\"", filename_start ); return header_value.substr( filename_start, end_pos - filename_start ); #else return std::nullopt; #endif } std::optional< multipart_info > split_multipart_info( const show::headers_type& headers, const show::headers_type& request_headers ) { auto found_content_disp{ headers.find( "Content-Disposition" ) }; if( found_content_disp == headers.end() || found_content_disp -> second.size() != 1 ) return std::nullopt; multipart_info info; /* Examples: Content-Disposition: form-data; name="submit-name" Content-Disposition: file; filename="file1.txt" Content-Disposition: form-data; name="files"; filename="file1.txt" */ auto& header_value{ found_content_disp -> second[ 0 ] }; std::string name_designator { "name=\"" }; std::string filename_designator_1{ "filename*=UTF-8''" }; std::string filename_designator_2{ "filename=\"" }; auto name_begin{ header_value.find( name_designator ) }; if( name_begin != std::string::npos ) { name_begin += name_designator.size(); auto name_end{ header_value.find( "\"; ", name_begin ) }; if( name_end == std::string::npos ) name_end = header_value.find( "\"", name_begin ); info.name = header_value.substr( name_begin, name_end - name_begin ); } auto filename_begin{ header_value.find( filename_designator_1 ) }; if( filename_begin != std::string::npos ) { filename_begin += filename_designator_1.size(); try { info.filename = show::url_decode( header_value.substr( filename_begin, header_value.find( ";", filename_begin ) - filename_begin ) ); } catch( const show::url_decode_error& e ) { throw stickers::handler_exit{ show::code::BAD_REQUEST, ( "RFC 5987 filename in Content-Disposition header not " "a valid URL-encoded string: " + std::string{ e.what() } ) }; } } else { filename_begin = header_value.find( filename_designator_2 ); if( filename_begin != std::string::npos ) info.filename = decode_filename( filename_begin + filename_designator_2.size(), header_value, request_headers ); } return info; } stickers::document json_to_document( nlj::json& parsed ) { if( parsed.is_null() ) return nullptr; else if( parsed.is_boolean() ) return parsed.get< bool >(); else if( parsed.is_number() ) { double integer_part; double fraction_part{ std::modf( parsed.get< double >(), &integer_part ) }; if( fraction_part > 0 ) return parsed.get< double >(); else return parsed.get< long >(); } else if( parsed.is_object() ) { stickers::document doc{ stickers::map_document{} }; for( auto pair = parsed.begin(); pair != parsed.end(); ++pair ) doc.get< stickers::map_document >()[ pair.key() ] = json_to_document( pair.value() ); return doc; } else if( parsed.is_array() ) { stickers::document doc{ stickers::map_document{} }; for( stickers::int_document i = 0; i < parsed.size(); ++i ) doc.get< stickers::map_document >()[ i ] = json_to_document( parsed[ i ] ); return doc; } else /*if( parsed.is_string() )*/ return std::move( parsed.get< std::string >() ); } } namespace // Parser declarations /////////////////////////////////////////////// { /* These (can be) recursive so they need the following arguments: buffer stream buffer being read from headers headers for the current segment of content (may be request headers) method request method for inclusion in error messages request_headers top-level request headers for user agent string, etc. */ stickers::document parse_json( std::streambuf & buffer, const show::headers_type& headers, const std::string & method, const show::headers_type& request_headers ); stickers::document parse_form_urlencoded( std::streambuf & buffer, const show::headers_type& headers, const std::string & method, const show::headers_type& request_headers ); stickers::document parse_multipart( std::streambuf & buffer, const show::headers_type& headers, const std::string & method, const show::headers_type& request_headers ); } namespace // Recursive request parse implementation //////////////////////////// { stickers::document parse_request_content_recursive( std::streambuf & buffer, const show::headers_type& headers, const std::string & method, const show::headers_type& request_headers ) { auto content_type{ content_type_from_headers( headers ) }; if( content_type ) { auto& mime_type{ content_type -> mime_type }; auto mime_supertype{ mime_type.substr( 0, mime_type.find( "/" ) ) }; if( mime_type == "application/json" ) return parse_json( buffer, headers, method, request_headers ); else if( mime_type == "application/x-www-form-urlencoded" ) return parse_form_urlencoded( buffer, headers, method, request_headers ); else if( mime_supertype == "multipart" ) return parse_multipart( buffer, headers, method, request_headers ); } // Default to a single binary blob stickers::document parsed{ std::string{ std::istreambuf_iterator< char >{ &buffer }, {} } }; if( content_type ) parsed.mime_type = content_type -> mime_type; return parsed; } } namespace // Parser implementations //////////////////////////////////////////// { stickers::document parse_json( std::streambuf & buffer, const show::headers_type& headers, const std::string & method, const show::headers_type& request_headers ) { nlj::json parsed; bool malformed_json{ false }; try { std::istream request_stream{ &buffer }; request_stream >> parsed; malformed_json = !request_stream.good(); } catch( const nlj::json::parse_error& e ) { malformed_json = true; } if( malformed_json ) throw stickers::handler_exit{ show::code::BAD_REQUEST, "Malformed JSON payload" }; else { auto doc{ json_to_document( parsed ) }; doc.mime_type = "application/json"; return doc; } } stickers::document parse_form_urlencoded( std::streambuf & buffer, const show::headers_type& headers, const std::string & method, const show::headers_type& request_headers ) { stickers::document doc{ stickers::map_document{} }; doc.mime_type = "application/x-www-form-urlencoded"; for( const auto& exp_set : stickers::split< std::vector< std::string > >( std::string{ std::istreambuf_iterator< char >{ &buffer }, {} }, std::string{ "&" } ) ) { auto exp{ stickers::split< std::vector< std::string > >( exp_set, std::string{ "=" } ) }; if( exp.size() == 1 ) doc.get< stickers::map_document >()[ show::url_decode( exp[ 0 ] ) ] = true; else { auto value{ show::url_decode( exp.back() ) }; for( auto i = 0; i < exp.size() - 1; ++i ) doc.get< stickers::map_document >()[ show::url_decode( exp[ i ] ) ] = value; } } return doc; } stickers::document parse_multipart( std::streambuf & buffer, const show::headers_type& headers, const std::string & method, const show::headers_type& request_headers ) { auto content_type{ content_type_from_headers( headers ) }; if( !content_type ) return std::string{ std::istreambuf_iterator< char >{ &buffer }, {} }; show::multipart parser{ buffer, boundary_from_content_type_remainder( content_type -> mime_type, content_type -> header_remainder ) }; stickers::document doc{ stickers::map_document{} }; for( auto& segment : parser ) { stickers::document segment_name{}; auto segment_doc{ parse_request_content_recursive( segment, segment.headers(), method, request_headers ) }; auto info{ split_multipart_info( segment.headers(), request_headers ) }; if( info ) { if( info -> filename ) segment_doc.name = *( info -> filename ); if( info -> name ) segment_name = *( info -> name ); } doc.get< stickers::map_document >().emplace( std::make_pair( std::move( segment_name ), std::move( segment_doc ) ) ); } doc.mime_type = content_type -> mime_type; return doc; } } namespace stickers // Request parse //////////////////////////////////////////// { document parse_request_content( show::request& request ) { auto max_length{ config()[ "server" ][ "max_request_bytes" ].get< std::streamsize >() }; if( request.unknown_content_length() ) throw handler_exit{ show::code::BAD_REQUEST, "Missing \"Content-Length\" header" }; else if( request.content_length() > max_length ) throw handler_exit{ show::code::PAYLOAD_TOO_LARGE, ( "Maximum request content size is " + std::to_string( max_length ) + " bytes" ) }; return parse_request_content_recursive( request, request.headers(), request.method(), request.headers() ); } } <file_sep>/src/common/timestamp.cpp #line 2 "common/timestamp.cpp" #include "timestamp.hpp" #include <date/iso_week.h> #include <iomanip> #include <sstream> namespace stickers { timestamp current_timestamp() { return std::chrono::system_clock::now(); } timestamp from_iso8601_str( const std::string& s ) { timestamp ts; if( !from_iso8601_str( s, ts ) ) throw std::invalid_argument{ "failed to parse " + s + " as an ISO 8601 timestamp" }; return ts; } bool from_iso8601_str( const std::string& s, timestamp& ts ) { std::istringstream stream{ s }; stream >> date::parse( "%F %T%z", ts ); return !stream.fail(); } std::string to_iso8601_str( const timestamp& ts ) { return date::format( "%F %T%z", ts ); } std::string to_http_ts_str( const timestamp& ts ) { std::stringstream weekday_abbreviation; weekday_abbreviation << static_cast< iso_week::year_weeknum_weekday >( std::chrono::time_point_cast< date::days >( ts ) ).weekday(); return ( weekday_abbreviation.str() // timestamps serialize to UTC/GMT by default + date::format( " %d-%m-%Y %H:%M:%S GMT", std::chrono::time_point_cast< std::chrono::seconds >( ts ) ) ); } timestamp from_unix_time( unsigned int unix_time ) { return timestamp{ std::chrono::duration_cast< std::chrono::microseconds >( std::chrono::seconds{ unix_time } ) }; } unsigned int to_unix_time( const timestamp& ts ) { return std::chrono::duration_cast< std::chrono::seconds >( ts.time_since_epoch() ).count(); } } <file_sep>/src/common/config.hpp #pragma once #ifndef STICKERS_MOE_COMMON_CONFIG_HPP #define STICKERS_MOE_COMMON_CONFIG_HPP #include <string> #include "json.hpp" namespace stickers { const nlj::json& config(); void set_config( const nlj::json & ); void set_config( const std::string& ); // void open_config( const std::string& ); enum class log_level { SILENT = 00, ERROR = 10, WARNING = 20, INFO = 30, VERBOSE = 40, DEBUG = 50 }; log_level current_log_level(); } #endif <file_sep>/src/common/uuid.cpp #line 2 "common/uuid.cpp" #include "uuid.hpp" #include <cryptopp/hex.h> #include <uuid/uuid.h> namespace stickers { uuid::uuid() {} uuid::uuid( const std::string& str ) { if( str.size() != 16 ) throw std::invalid_argument{ "UUID string must be 16 bytes" }; value = str; } uuid::uuid( const char* c_str, std::size_t c_str_len ) : uuid{ std::string{ c_str, c_str_len } } {} uuid::uuid( const uuid& o ) : value{ o.value } {} std::string uuid::raw_value() const { return value; } std::string uuid::hex_value() const { std::string hex; CryptoPP::StringSource{ value, true, new CryptoPP::HexEncoder{ new CryptoPP::StringSink{ hex } } }; return hex; } std::string uuid::hex_value_8_4_4_4_12() const { auto unbroken{ hex_value() }; return ( unbroken.substr( 0, 8 ) + "-" + unbroken.substr( 8, 4 ) + "-" + unbroken.substr( 12, 4 ) + "-" + unbroken.substr( 16, 4 ) + "-" + unbroken.substr( 20, 12 ) ); } uuid uuid::generate() { uuid_t generated; uuid_generate( generated ); return { reinterpret_cast< char* >( generated ), 16 }; } uuid uuid::from_string( const std::string& s ) { std::string::size_type max_possible_length{ 32 + 4 }; if( s.size() > max_possible_length ) throw std::invalid_argument{ "max standard UUID representation length is " + std::to_string( max_possible_length ) + " chars" }; std::string filtered( 32, '\0' ); for( auto c : s ) if( c != '-' ) filtered += c; std::string unhexed; CryptoPP::StringSource{ filtered, true, new CryptoPP::HexDecoder{ new CryptoPP::StringSink{ unhexed } } }; return uuid{ unhexed }; } } <file_sep>/src/handlers/product.cpp #line 2 "handlers/product.cpp" #include "handlers.hpp" #include <show/constants.hpp> namespace stickers { void handlers::create_product( show::request& request, const handler_vars_type& variables ) { throw handler_exit{ show::code::NOT_IMPLEMENTED, "" }; } void handlers::get_product( show::request& request, const handler_vars_type& variables ) { throw handler_exit{ show::code::NOT_IMPLEMENTED, "" }; } void handlers::edit_product( show::request& request, const handler_vars_type& variables ) { throw handler_exit{ show::code::NOT_IMPLEMENTED, "" }; } void handlers::delete_product( show::request& request, const handler_vars_type& variables ) { throw handler_exit{ show::code::NOT_IMPLEMENTED, "" }; } } <file_sep>/src/common/document.cpp #line 2 "common/document.cpp" #include "document.hpp" #include "../common/logging.hpp" #include <sstream> namespace stickers // Document ///////////////////////////////////////////////// { document::document( const char* c_str ) : document( std::string{ c_str } ) {} const document& map_document::operator []( const document_key_type& key ) const { return this -> at( key ); } std::ostream& operator <<( std::ostream& out, const document& doc ) { out << "doc("; if( doc.mime_type ) out << "mime=\"" << log_sanitize( *doc.mime_type ) << "\"" ; if( doc.name ) out << ( doc.mime_type ? ", " : "" ) << "name=\"" << log_sanitize( *doc.name ) << "\"" ; out << "):"; if( doc.is_a< null_document >() ) { out << "null"; } else if( doc.is_a< string_document >() ) { if( doc.get< string_document >().size() > 64 ) out << "<string[" << doc.get< string_document >().size() << "]>" ; else out << '"' << log_sanitize( doc.get< string_document >() ) << '"' ; } else if( doc.is_a< bool_document >() ) { out << ( doc.get< bool_document >() ? "true" : "false" ); } else if( doc.is_a< int_document >() ) { out << doc.get< int_document >(); } else if( doc.is_a< float_document >() ) { out << doc.get< float_document >(); } else if( doc.is_a< map_document >() ) { out << '{'; for( auto iter = doc.get< map_document >().begin(); iter != doc.get< map_document >().end(); /* incremented in loop */ ) { out << iter -> first << " => " << iter -> second << ( ++iter != doc.get< map_document >().end() ? ", " : "" ) ; } out << '}'; } return out; } } <file_sep>/src/api/design.hpp #pragma once #ifndef STICKERS_MOE_API_DESIGN_HPP #define STICKERS_MOE_API_DESIGN_HPP #include "../audit/blame.hpp" #include "../common/bigid.hpp" #include "../common/hashing.hpp" #include "../common/postgres.hpp" #include "../common/timestamp.hpp" #include <exception> #include <string> #include <vector> #include <initializer_list> namespace stickers { struct design_info { timestamp created; timestamp revised; std::string description; std::vector< sha256 > images; std::vector< bigid > contributors; }; struct design { bigid id; design_info info; }; design create_design( const design_info&, const audit::blame& ); design_info load_design( const bigid & ); design_info update_design( const design &, const audit::blame& ); void delete_design( const bigid &, const audit::blame& ); class _assert_designs_exist_impl { template< class Container > friend void assert_designs_exist( pqxx::work &, const Container& ); _assert_designs_exist_impl(); static void exec( pqxx::work&, const std::string& ); }; // ACID-safe assert; if any of the supplied IDs do not correspond to a // record, this will throw `no_such_design` for one of those IDs template< class Container = std::initializer_list< bigid > > void assert_designs_exist( pqxx::work & transaction, const Container& ids ) { _assert_designs_exist_impl::exec( transaction, postgres::format_variable_list( transaction, ids ) ); } class no_such_design : public std::runtime_error { public: const bigid id; no_such_design( const bigid& ); }; } #endif <file_sep>/src/common/json.hpp #pragma once #ifndef STICKERS_MOE_COMMON_JSON_HPP #define STICKERS_MOE_COMMON_JSON_HPP #include <nlohmann/json.hpp> namespace nlj = nlohmann; #endif <file_sep>/src/api/design.cpp #line 2 "api/design.cpp" #include "design.hpp" #include "../api/person.hpp" #include "../common/formatting.hpp" #include "../common/logging.hpp" #include <algorithm> // std::set_difference() #include <set> #include <tuple> namespace { std::vector< stickers::sha256 > get_images_for_design( const stickers::bigid& id, pqxx::work& transaction ) { auto result{ transaction.exec_params( PSQL( SELECT image_hash FROM designs.design_images WHERE design_id = $1 AND NOT deleted ORDER BY weight ; ), id ) }; std::vector< stickers::sha256 > images; for( const auto& row : result ) images.push_back( row[ "image_hash" ].as< stickers::sha256 >() ); return images; } std::vector< stickers::bigid > get_contributors_for_design( const stickers::bigid& id, pqxx::work& transaction ) { auto result{ transaction.exec_params( PSQL( SELECT person_id FROM designs.design_contributors WHERE design_id = $1 AND NOT deleted ; ), id ) }; std::vector< stickers::bigid > contributors; for( const auto& row : result ) contributors.push_back( row[ "person_id" ].as< stickers::bigid >() ); return contributors; } stickers::bigid write_design_details( stickers::design & design, const stickers::audit::blame& blame, bool generate_id ) { auto connection{ stickers::postgres::connect() }; pqxx::work transaction{ *connection }; if( generate_id ) { auto result{ transaction.exec_params( PSQL( INSERT INTO designs.designs_core ( design_id, _a_revision ) VALUES ( DEFAULT, $1 ) RETURNING design_id ; ), blame.when ) }; result[ 0 ][ "design_id" ].to< stickers::bigid >( design.id ); } else stickers::assert_designs_exist( transaction, { design.id } ); transaction.exec_params( PSQL( INSERT INTO designs.design_revisions ( design_id, revised, revised_by, revised_from, description ) VALUES ( $1, $2, $3, $4, $5 ) ; ), design.id, blame.when, blame.who, blame.where, design.info.description ); std::vector< stickers::sha256 > old_images; std::set < stickers::bigid > contributors_to_remove; std::set < stickers::bigid > contributors_to_add; // Don't have to do as much work if we know there are no relations if( generate_id ) { for( const auto& contributor : design.info.contributors ) contributors_to_add.insert( contributor ); } else { old_images = get_images_for_design( design.id, transaction ); std::set< stickers::bigid > old_contributors; std::set< stickers::bigid > new_contributors; for( const auto& contributor : get_contributors_for_design( design.id, transaction ) ) old_contributors.insert( contributor ); for( const auto& contributor : design.info.contributors ) new_contributors.insert( contributor ); std::set_difference( // In old but not in new old_contributors.begin(), old_contributors.end(), new_contributors.begin(), new_contributors.end(), std::inserter( contributors_to_remove, contributors_to_remove.begin() ) ); std::set_difference( // In new but not in old new_contributors.begin(), new_contributors.end(), old_contributors.begin(), old_contributors.end(), std::inserter( contributors_to_add, contributors_to_add.begin() ) ); } if( design.info.images != old_images ) { if( old_images.size() > 0 ) { // TODO: remove images } if( design.info.images.size() > 0 ) { // TODO: add images & weights } } if( contributors_to_remove.size() > 0 ) { // No need to check these exist, they were just pulled from the DB std::string query_string; ff::fmt( query_string, PSQL( UPDATE designs.design_contributor_revisions SET removed = $2 removed_by = $3 removed_from = $4 WHERE design_id = $1 AND person_id IN ( {0} ) AND removed IS NULL ; ), stickers::postgres::format_variable_list( transaction, contributors_to_remove ) ); transaction.exec_params( query_string, design.id, blame.when, blame.who, blame.where ); } if( contributors_to_add.size() > 0 ) { stickers::assert_people_exist( transaction, contributors_to_add ); auto columns = { "design_id", "added", "added_by", "added_from", "person_id" }; pqxx::tablewriter inserter( transaction, "designs.design_contributor_revisions", columns.begin(), columns.end() ); for( const auto& contributor : contributors_to_add ) { auto values = { pqxx::string_traits< stickers::bigid >::to_string( design.id ), pqxx::string_traits< stickers::timestamp >::to_string( blame.when ), pqxx::string_traits< stickers::bigid >::to_string( blame.who ), pqxx::string_traits< std::string >::to_string( blame.where ), pqxx::string_traits< stickers::bigid >::to_string( contributor ) }; inserter.insert( values ); } inserter.complete(); } transaction.commit(); return design.id; } } namespace stickers // Design /////////////////////////////////////////////////// { design create_design( const design_info & info, const audit::blame& blame ) { design s{ bigid::MIN(), info }; write_design_details( s, blame, true ); return s; } design_info load_design( const bigid& id ) { auto connection{ postgres::connect() }; pqxx::work transaction{ *connection }; auto result{ transaction.exec_params( PSQL( SELECT created, revised, description FROM designs.designs WHERE design_id = $1 AND NOT deleted ; ), id ) }; if( result.size() < 1 ) throw no_such_design{ id }; auto& row{ result[ 0 ] }; design_info info{ row[ "created" ].as< timestamp >(), row[ "revised" ].as< timestamp >(), row[ "description" ].as< std::string >(), get_images_for_design ( id, transaction ), get_contributors_for_design( id, transaction ) }; transaction.commit(); return info; } design_info update_design( const design& s, const audit::blame& blame ) { auto updated_design{ s }; write_design_details( updated_design, blame, true ); return updated_design.info; } void delete_design( const bigid& id, const audit::blame& blame ) { auto connection{ postgres::connect() }; pqxx::work transaction{ *connection }; assert_designs_exist( transaction, { id } ); auto result{ transaction.exec_params( PSQL( INSERT INTO designs.design_deletions ( design_id, deleted, deleted_by, deleted_from ) VALUES ( $1, $2, $3, $4 ) ; ), id, blame.when, blame.who, blame.where ) }; transaction.commit(); } } namespace stickers // Exception //////////////////////////////////////////////// { no_such_design::no_such_design( const bigid& id ) : std::runtime_error{ "no such design with ID " + static_cast< std::string >( id ) }, id{ id } {} } namespace stickers // Assertion //////////////////////////////////////////////// { void _assert_designs_exist_impl::exec( pqxx::work & transaction, const std::string& ids_string ) { std::string query_string; ff::fmt( query_string, PSQL( WITH lookfor AS ( SELECT UNNEST( ARRAY[ {0} ] ) AS design_id ) SELECT lookfor.design_id FROM lookfor LEFT JOIN designs.designs_core AS dc ON dc.design_id = lookfor.design_id LEFT JOIN designs.design_deletions AS dd ON dd.design_id = dc.design_id WHERE dc.design_id IS NULL OR dd.design_id IS NOT NULL ; ), ids_string ); auto result{ transaction.exec( query_string ) }; if( result.size() > 0 ) throw no_such_design{ result[ 0 ][ 0 ].as< bigid >() }; } } <file_sep>/src/server/routing.cpp #line 2 "server/routing.cpp" #include "routing.hpp" #include "server.hpp" #include "../common/auth.hpp" #include "../common/config.hpp" #include "../common/logging.hpp" #include "../common/json.hpp" #include "../common/string_utils.hpp" #include "../handlers/handlers.hpp" #include <show/constants.hpp> #include <exception> #include <map> #include <vector> namespace { struct routing_node { using methods_type = std::map< std::string, stickers::handler_type, show::_less_ignore_case_ASCII >; using subs_type = std::map< std::string, routing_node >; using variable_type = std::pair< std::string, routing_node >; methods_type methods; subs_type subs; variable_type* variable; }; void handle_options_request( show::request& request, const routing_node* current_node ) { std::vector< std::string > methods_list, subs_list; for( auto& method : current_node -> methods ) methods_list.push_back( method.first ); for( auto& sub : current_node -> subs ) subs_list.push_back( sub.first ); nlj::json options_object = { { "methods" , methods_list }, { "subs" , subs_list }, { "variable_sub", static_cast< bool >( current_node -> variable ) } }; std::string json_string = options_object.dump(); show::response response{ request.connection(), show::HTTP_1_1, show::code::OK, { show::server_header, { "Content-Type" , { "application/json" } }, { "Content-Length", { std::to_string( json_string.size() ) } } } }; response.sputn( json_string.c_str(), json_string.size() ); } } namespace { routing_node::variable_type user_manip{ "user_id", { { { "GET" , stickers::handlers:: get_user }, { "PUT" , stickers::handlers:: edit_user }, { "DELETE", stickers::handlers::delete_user } }, {}, nullptr } }; routing_node::variable_type list_entry_manip{ "list_entry_id", { { { "PUT" , stickers::handlers::update_list_item }, { "DELETE", stickers::handlers::remove_list_item } }, {}, nullptr } }; routing_node::variable_type list_subs{ "user_id", { { { "GET" , stickers::handlers::get_list }, { "POST", stickers::handlers::add_list_item } }, {}, &list_entry_manip } }; routing_node::variable_type person_manip{ "person_id", { { { "GET" , stickers::handlers:: get_person }, { "PUT" , stickers::handlers:: edit_person }, { "DELETE", stickers::handlers::delete_person } }, {}, nullptr } }; routing_node::variable_type shop_manip{ "shop_id", { { { "GET" , stickers::handlers:: get_shop }, { "PUT" , stickers::handlers:: edit_shop }, { "DELETE", stickers::handlers::delete_shop } }, {}, nullptr } }; routing_node::variable_type design_manip{ "design_id", { { { "GET" , stickers::handlers:: get_design }, { "PUT" , stickers::handlers:: edit_design }, { "DELETE", stickers::handlers::delete_design } }, {}, nullptr } }; routing_node::variable_type product_manip{ "product_id", { { { "GET" , stickers::handlers:: get_product }, { "PUT" , stickers::handlers:: edit_product }, { "DELETE", stickers::handlers::delete_product } }, {}, nullptr } }; routing_node::variable_type media_info{ "hash", { { { "GET", stickers::handlers::get_media_info } }, {}, nullptr } }; const routing_node tree{ {}, { { "signup", { { { "POST", stickers::handlers::signup } }, {}, nullptr } }, { "login", { { { "POST", stickers::handlers::login } }, {}, nullptr } }, { "user", { { { "POST", stickers::handlers::create_user } }, {}, &user_manip } }, { "list", { {}, {}, &list_subs } }, { "person", { { { "POST", stickers::handlers::create_person } }, {}, &person_manip } }, { "shop", { { { "POST", stickers::handlers::create_shop } }, {}, &shop_manip } }, { "design", { { { "POST", stickers::handlers::create_design } }, {}, &design_manip } }, { "product", { { { "POST", stickers::handlers::create_product } }, {}, &product_manip } }, { "media", { {}, { { "upload", { { { "POST", stickers::handlers::upload_media } }, {}, nullptr } }, { "info", { {}, {}, &media_info } } }, nullptr } } }, nullptr }; } namespace stickers { void route_request( show::request& request ) { bool handler_finished{ false }; // Make copies of these constants: show::response_code error_code { show::code::BAD_REQUEST }; show::headers_type error_headers{ show::server_header }; std::string error_message; try { handler_vars_type variables; // Need a mutable pointer to const data, so `const auto*` const auto* current_node{ &tree }; for( auto& element : request.path() ) { auto found_sub{ current_node -> subs.find( element ) }; if( found_sub != current_node -> subs.end() ) { current_node = &( found_sub -> second ); } else if( current_node -> variable ) { variables[ current_node -> variable -> first ] = element; current_node = &( current_node -> variable -> second ); } else throw handler_exit{ show::code::NOT_FOUND, "" }; } auto found_method = current_node -> methods.find( request.method() ); if( found_method != current_node -> methods.end() ) { found_method -> second( request, variables ); } else if( request.method() == "OPTIONS" ) { handle_options_request( request, current_node ); } else if( request.method() == "HEAD" ) { // TODO: HEAD method implementation for CORS throw handler_exit{ show::code::NOT_IMPLEMENTED, "CORS not implemented" }; } else throw handler_exit{ show::code::METHOD_NOT_ALLOWED, "" }; handler_finished = true; } catch( const show::connection_interrupted& ci ) { throw; } catch( const handler_exit& he ) { error_code = he.response_code; error_message = he.message; } catch( const authentication_error& ae ) { error_code = show::code::UNAUTHORIZED; // HTTP :^) error_message = "this action requires authentication credentials"; error_headers[ "WWW-Authenticate" ] = { "Bearer realm=\"stickers.moe JWT\"" }; STICKERS_LOG( log_level::INFO, "unauthenticated ", request.method(), " request from ", request.client_address(), " on ", join( request.path(), std::string{ "/" } ), ": ", ae.what() ); } catch( const authorization_error& ae ) { error_code = show::code::FORBIDDEN; error_message = "you are not permitted to perform this action (" + static_cast< std::string >( ae.what() ) + ")" ; STICKERS_LOG( log_level::INFO, "unauthorized ", request.method(), " request from ", request.client_address(), " on ", join( request.path(), std::string{ "/" } ), ": ", ae.what() ); } catch( const std::exception& e ) { error_code = show::code::INTERNAL_SERVER_ERROR; error_message = "please try again later"; STICKERS_LOG( log_level::ERROR, "uncaught std::exception in route_request(show::request&): ", e.what() ); } catch( ... ) { error_code = show::code::INTERNAL_SERVER_ERROR; error_message = "please try again later"; STICKERS_LOG( log_level::ERROR, "uncaught non-std::exception in route_request(show::request&)" ); } if( !request.unknown_content_length() ) request.flush(); if( handler_finished ) return; nlj::json error_object{ { "message", error_message }, { "contact", config()[ "server" ][ "admin" ] } }; std::string error_json{ error_object.dump() }; error_headers[ "Content-Type" ] = { "application/json" }; error_headers[ "Content-Length" ] = { std::to_string( error_json.size() ) }; show::response response{ request.connection(), show::HTTP_1_1, error_code, error_headers }; response.sputn( error_json.c_str(), error_json.size() ); } } <file_sep>/src/server/main.cpp #line 2 "server/main.cpp" #include "server.hpp" #include "../common/config.hpp" #include "../common/json.hpp" #include "../common/logging.hpp" #include <fstream> #include <iostream> #include <cstdlib> // std::srand() #include <ctime> // std::time() int main( int argc, char* argv[] ) { if( argc < 2 ) { STICKERS_LOG( stickers::log_level::ERROR, "usage: ", argv[ 0 ], " config.json" ); return -1; } std::srand( std::time( nullptr ) ); try { { nlj::json config; std::ifstream config_file{ argv[ 1 ] }; if( config_file.is_open() ) { config_file >> config; if( !config_file.good() ) { STICKERS_LOG( stickers::log_level::ERROR, "config file ", argv[ 1 ], " not a valid JSON file" ); return -1; } } else { STICKERS_LOG( stickers::log_level::ERROR, "could not open config file ", argv[ 1 ] ); return -1; } stickers::set_config( config ); } stickers::run_server(); } catch( const std::exception &e ) { STICKERS_LOG( stickers::log_level::ERROR, "uncaught std::exception in main(): ", e.what() ); return -1; } catch( ... ) { STICKERS_LOG( stickers::log_level::ERROR, "uncaught non-std::exception in main()" ); return -1; } return 0; } <file_sep>/src/common/document.hpp #pragma once #ifndef STICKERS_MOE_COMMON_DOCUMENT_HPP #define STICKERS_MOE_COMMON_DOCUMENT_HPP #include <show.hpp> #include <map> #include <optional> #include <ostream> #include <string> #include <variant> #include <vector> namespace stickers { class document; using document_key_type = document; using null_document = std::monostate; using string_document = std::string; using bool_document = bool; using int_document = long; using float_document = double; // Rather than just alias `std::map<...>` to `map_document`, make it a new // class that exposes `std::map::at()` using the access operator class map_document : public std::map< document_key_type, document > { public: using std::map< document_key_type, document >::map; using std::map< document_key_type, document >::operator []; const document& operator []( const document_key_type& ) const; }; using _document_base = std::variant< null_document, string_document, bool_document, int_document, float_document, map_document >; // Requires public inheritance so comparison operators can be used class document : public _document_base { public: // Inherit all of `std::variant`'s constructors... using _document_base::variant; // ... and add a string-type constructor from C-strings because // `std::variant` (understandably) can't do this by default, but it's // nice to have for map access. document( const char* ); std::optional< std::string > mime_type; std::optional< std::string > name; template< typename T > constexpr bool is_a() const { return std::holds_alternative< T >( *this ); } template< typename T > constexpr const T& get() const { return std::get< T >( *this ); } template< typename T > constexpr T& get() { return std::get< T >( *this ); } }; // Lossy, intended for debugging purposes std::ostream& operator <<( std::ostream&, const document& ); } #endif <file_sep>/src/common/bigid.hpp #pragma once #ifndef STICKERS_MOE_COMMON_BIGID_HPP #define STICKERS_MOE_COMMON_BIGID_HPP #include <exception> #include <string> #include "postgres.hpp" namespace stickers { class bigid { // So libpqxx can use bigid's protected default constructor friend bigid pqxx::field::as< bigid >() const; friend bigid pqxx::field::as< bigid >( const bigid& ) const; private: long long value; bigid(); public: bigid( long long ); bigid( const bigid& ); operator long long( ) const; operator std::string( ) const; operator bool( ) const; const bigid& operator =( const bigid& ) ; const bigid& operator =( long long ) ; bigid operator +( long long ) const; bigid operator -( long long ) const; const bigid& operator ++( ) ; const bigid& operator --( ) ; const bigid& operator +=( long long ) ; const bigid& operator -=( long long ) ; bool operator ==( const bigid& ) const; bool operator !=( const bigid& ) const; bool operator <( const bigid& ) const; bool operator >( const bigid& ) const; bool operator <=( const bigid& ) const; bool operator >=( const bigid& ) const; static bigid MIN(); static bigid MAX(); static bigid from_string( const std::string& ); }; } // Template specialization of `pqxx::string_traits<>(&)` for `stickers::bigid`, // which allows use of `pqxx::field::to<>(&)` and `pqxx::field::as<>(&)` namespace pqxx { template<> struct string_traits< stickers::bigid > { using subject_type = stickers::bigid; static constexpr const char* name() noexcept { return "stickers::bigid"; } static constexpr bool has_null() noexcept { return false; } static bool is_null( const stickers::bigid& ) { return false; } [[noreturn]] static stickers::bigid null() { internal::throw_null_conversion( name() ); } static void from_string( const char str[], stickers::bigid& id ) { bool conversion_error{ false }; try { long long llid; string_traits< long long >::from_string( str, llid ); id = llid; } catch( const std::invalid_argument& e ) { throw argument_error{ "Failed conversion to " + static_cast< std::string >( name() ) + ": '" + static_cast< std::string >( str ) + "'" }; } } static std::string to_string( const stickers::bigid& id ) { return string_traits< long long >::to_string( id ); } }; } #endif <file_sep>/src/server/routing.hpp #pragma once #ifndef STICKERS_MOE_SERVER_ROUTING_HPP #define STICKERS_MOE_SERVER_ROUTING_HPP #include <show.hpp> namespace stickers { void route_request( show::request& ); } #endif <file_sep>/src/common/bigid.cpp #line 2 "common/bigid.cpp" #include "bigid.hpp" namespace stickers // BigID //////////////////////////////////////////////////// { bigid::bigid() {} bigid::bigid( long long v ) { *this = v; } bigid::bigid( const bigid& o ) { *this = o; } bigid::operator long long() const { return value; } bigid::operator std::string() const { return std::to_string( value ); } bigid::operator bool() const { // Should always be true return static_cast< bool >( value ); } const bigid& bigid::operator =( const bigid& o ) { value = o.value; return *this; } const bigid& bigid::operator =( long long v ) { if( v >= MIN().value && v <= MAX().value ) value = v; else throw std::invalid_argument{ "can't convert \"" + std::to_string( v ) + "\" to a bigid" }; return *this; } bigid bigid::operator +( long long i ) const { return bigid{ value + i }; } bigid bigid::operator -( long long i ) const { return bigid{ value - i }; } const bigid& bigid::operator ++() { *this = value + 1; return *this; } const bigid& bigid::operator --() { *this = value - 1; return *this; } const bigid& bigid::operator +=( long long i ) { *this = value + i; return *this; } const bigid& bigid::operator -=( long long i ) { *this = value - i; return *this; } bool bigid::operator ==( const bigid& o ) const { return value == o.value; } bool bigid::operator !=( const bigid& o ) const { return value != o.value; } bool bigid::operator <( const bigid& o ) const { return value < o.value; } bool bigid::operator >( const bigid& o ) const { return value > o.value; } bool bigid::operator <=( const bigid& o ) const { return value <= o.value; } bool bigid::operator >=( const bigid& o ) const { return value >= o.value; } bigid bigid::MIN() { bigid min; min.value = 1000000000000000000ll; return min; } bigid bigid::MAX() { bigid max; max.value = 9223372036854775807ll; return max; } bigid bigid::from_string( const std::string& str ) { try { long long llid; pqxx::string_traits< long long >::from_string( str.c_str(), llid ); return llid; } catch( const pqxx::argument_error& e ) { throw std::invalid_argument{ "can't convert \"" + str + "\" to a bigid" }; } } } <file_sep>/src/api/list.hpp #pragma once #ifndef STICKERS_MOE_API_LIST_HPP #define STICKERS_MOE_API_LIST_HPP #include "../common/bigid.hpp" #include "../common/timestamp.hpp" #include <vector> namespace stickers { struct list_entry { bigid product_id; unsigned long quantity; timestamp updated; }; std::vector< list_entry > get_user_list( const bigid& ); } #endif <file_sep>/src/api/person.hpp #pragma once #ifndef STICKERS_MOE_API_PERSON_HPP #define STICKERS_MOE_API_PERSON_HPP #include "../audit/blame.hpp" #include "../common/bigid.hpp" #include "../common/crud.hpp" #include "../common/postgres.hpp" #include "../common/timestamp.hpp" #include <exception> #include <string> #include <variant> namespace stickers { struct person_info { timestamp created; timestamp revised; std::string about; std::variant< std::string, bigid > identifier; bool has_user() const { return std::holds_alternative< bigid >( identifier ); } }; struct person { bigid id; person_info info; }; person create_person( const person_info&, const audit::blame& ); person_info load_person( const bigid & ); person_info update_person( const person &, const audit::blame& ); void delete_person( const bigid &, const audit::blame& ); class _assert_people_exist_impl { template< class Container > friend void assert_people_exist( pqxx::work &, const Container& ); _assert_people_exist_impl(); static void exec( pqxx::work&, const std::string& ); }; // ACID-safe assert; if any of the supplied IDs do not correspond to a // record, this will throw `no_such_person` for one of those IDs template< class Container = std::initializer_list< bigid > > void assert_people_exist( pqxx::work & transaction, const Container& ids ) { _assert_people_exist_impl::exec( transaction, postgres::format_variable_list( transaction, ids ) ); } class no_such_person : public no_such_record_error { public: const bigid id; no_such_person( const bigid& ); }; } #endif <file_sep>/src/common/crud.hpp #pragma once #ifndef STICKERS_MOE_COMMON_CRUD_HPP #define STICKERS_MOE_COMMON_CRUD_HPP #include <exception> namespace stickers { class no_such_record_error : public std::runtime_error { using std::runtime_error::runtime_error; }; } #endif <file_sep>/src/common/jwt.cpp #line 2 "common/jwt.cpp" #include "jwt.hpp" #include "config.hpp" #include "hashing.hpp" #include "logging.hpp" #include "string_utils.hpp" #include <cryptopp/hex.h> #include <cryptopp/hmac.h> #include <show/base64.hpp> #include <cstdlib> // std::rand() namespace stickers { const jwt jwt::parse( const std::string& raw ) { auto signing_keys = config()[ "auth" ][ "jwt_keys" ].get< std::map< std::string, std::string > >(); for( auto& kv : signing_keys ) kv.second = show::base64_decode( kv.second ); return parse( raw, signing_keys ); } const jwt jwt::parse( const std::string& raw, const std::map< std::string, std::string >& signing_keys ) { auto token{ parse_no_validate( raw ) }; auto split_pos{ raw.rfind( "." ) }; auto payload{ raw.substr( 0, split_pos ) }; std::string signature; try { signature = show::base64_decode( raw.substr( split_pos + 1 ), show::base64_chars_urlsafe ); } catch( const show::base64_decode_error& e ) { throw validation_error{ "signature segment is not valid base64" }; } if( !token.kid ) { STICKERS_LOG( log_level::WARNING, ( "attempt to authenticate with JWT without a signing key ID;" " full token: " ), log_sanitize( raw ) ); throw validation_error{ "missing signing key ID" }; } auto signing_key_found{ signing_keys.find( *token.kid ) }; if( signing_key_found == signing_keys.end() ) { STICKERS_LOG( log_level::WARNING, "attempt to authenticate with JWT using unknown signing key ", log_sanitize( *token.kid ), "; full token: ", log_sanitize( raw ) ); throw validation_error{ "unknown signing key ID" }; } switch( token.alg ) { case signature_alg::HS512: { bool validated{ false }; CryptoPP::HMAC< CryptoPP::SHA512 > hmac{ reinterpret_cast< const CryptoPP::byte* >( signing_key_found -> second.c_str() ), signing_key_found -> second.size() }; // Seems like a good place to use // `CryptoPP::HashVerificationFilter::PUT_HASH`, but there's no // documentation on how to use that flag. I hate Crypto++. CryptoPP::StringSource{ payload + signature, true, new CryptoPP::HashVerificationFilter{ hmac, new CryptoPP::ArraySink{ reinterpret_cast< CryptoPP::byte* >( &validated ), sizeof( validated ) }, ( CryptoPP::HashVerificationFilter::HASH_AT_END | CryptoPP::HashVerificationFilter::PUT_RESULT ) } }; if( !validated ) { STICKERS_LOG( log_level::WARNING, ( "attempt to authenticate with JWT with invalid " "signature; full token: " ), log_sanitize( raw ) ); throw validation_error{ "signature mismatch" }; } } break; } if( token.nbf && *token.nbf > now() ) throw validation_error{ "premature token" }; if( token.exp && *token.exp <= now() ) throw validation_error{ "expired token" }; // TODO: check jti against blacklist return token; } const jwt jwt::parse_no_validate( const std::string& raw ) { jwt token; std::string headers_string; std::string claims_string; { auto jwt_segments{ split< std::vector< std::string > >( raw, std::string{ "." } ) }; if( jwt_segments.size() != 3 ) throw structure_error{ "token does not have 3 segments" }; try { headers_string = show::base64_decode( jwt_segments[ 0 ], show::base64_chars_urlsafe ); } catch( const show::base64_decode_error& e ) { throw validation_error{ "header segment is not valid base64" }; } try { claims_string = show::base64_decode( jwt_segments[ 1 ], show::base64_chars_urlsafe ); } catch( const show::base64_decode_error& e ) { throw validation_error{ "claims segment is not valid base64" }; } } nlj::json header; try { header = nlj::json::parse( headers_string ); } catch( const nlj::json::parse_error& e ) { throw structure_error{ "header segment is not valid JSON" }; } if( !header.is_object() ) throw structure_error{ "header segment is not a JSON object" }; auto alg{ header.find( "alg" ) }; auto kid{ header.find( "kid" ) }; if( alg == header.end() ) throw structure_error{ "header missing required key \"alg\"" }; if( kid == header.end() ) throw structure_error{ "header missing required key \"kid\"" }; if( !alg.value().is_string() ) throw structure_error{ "header key \"alg\" is not a string" }; if( !kid.value().is_string() ) throw structure_error{ "header key \"kid\" is not a string" }; auto alg_string{ alg.value().get< std::string >() }; if( alg_string == "HS512" ) token.alg = signature_alg::HS512; else { // Officially, JWT specifies "none" as a possible algorithm, which // is a considerable security hole. This just logs anyone possibly // trying to take advantage of that. STICKERS_LOG( log_level::WARNING, ( "attempt to authenticate with JWT with unknown hashing " "algorithm \"" ), log_sanitize( alg_string ), "\"; full token: ", log_sanitize( raw ) ); throw structure_error{ "unsupported hashing algorithm \"" + log_sanitize( alg_string ) + "\"" }; } token.kid = kid.value().get< std::string >(); try { token.claims = nlj::json::parse( claims_string ); } catch( const nlj::json::parse_error& e ) { throw structure_error{ "claims segment is not valid JSON" }; } if( !token.claims.is_object() ) throw structure_error{ "claims segment is not a JSON object" }; // Check known header keys ///////////////////////////////////////////// auto typ{ header.find( "typ" ) }; auto jti{ header.find( "jti" ) }; if( typ == header.end() ) throw structure_error{ "missing required header field \"typ\"" }; else try { auto typ_string = typ.value().get< std::string >(); if( typ_string == "JWT+" ) token.typ = jwt_type::JWT_PLUS; else if( typ_string == "JWT" ) token.typ = jwt_type::JWT; else throw structure_error{ "unsupported JWT type \"" + log_sanitize( typ_string ) + "\"" }; } catch( const nlj::json::parse_error& e ) { throw structure_error{ "header field \"typ\" is not a JSON string" }; } if( jti != header.end() ) try { auto jti_string{ jti.value().get< std::string >() }; try { token.jti = uuid::from_string( jti_string ); } catch( const std::invalid_argument& e ) { throw structure_error{ "unsupported non-UUID \"jti\" field \"" + log_sanitize( jti_string ) + "\": " + std::string{ e.what() } }; } } catch( const nlj::json::parse_error& e ) { throw structure_error{ "header field \"jti\" is not a JSON string" }; } // Check known claims ////////////////////////////////////////////////// auto iat{ token.claims.find( "iat" ) }; auto nbf{ token.claims.find( "nbf" ) }; auto exp{ token.claims.find( "exp" ) }; if( iat != token.claims.end() ) try { if( token.typ == jwt_type::JWT_PLUS ) try { token.iat = from_iso8601_str( iat.value().get< std::string >() ); } catch( const std::invalid_argument& e ) { throw structure_error{ "claim field \"iat\" is not a valid ISO 8601 " "timestamp " }; } else token.iat = from_unix_time( iat.value().get< unsigned int >() ); } catch( const nlj::json::parse_error& e ) { throw structure_error{ "claim field \"iat\" is not a JSON " + std::string{ token.typ == jwt_type::JWT_PLUS ? "string" : "unsigned integer" } }; } if( nbf != token.claims.end() ) try { if( token.typ == jwt_type::JWT_PLUS ) try { token.nbf = from_iso8601_str( nbf.value().get< std::string >() ); } catch( const std::invalid_argument& e ) { throw structure_error{ "claim field \"nbf\" is not a valid ISO 8601 " "timestamp " }; } else token.nbf = from_unix_time( nbf.value().get< unsigned int >() ); } catch( const nlj::json::parse_error& e ) { throw structure_error{ "claim field \"nbf\" is not a JSON " + std::string{ token.typ == jwt_type::JWT_PLUS ? "string" : "unsigned integer" } }; } if( exp != token.claims.end() ) try { if( token.typ == jwt_type::JWT_PLUS ) try { token.exp = from_iso8601_str( exp.value().get< std::string >() ); } catch( const std::invalid_argument& e ) { throw structure_error{ "claim field \"exp\" is not a valid ISO 8601 " "timestamp " }; } else token.exp = from_unix_time( exp.value().get< unsigned int >() ); } catch( const nlj::json::parse_error& e ) { throw structure_error{ "claim field \"exp\" is not a JSON " + std::string{ token.typ == jwt_type::JWT_PLUS ? "string" : "unsigned integer" } }; } return token; } std::string jwt::serialize( const jwt& token ) { auto signing_keys{ config()[ "auth" ][ "jwt_keys" ].get< std::map< std::string, std::string > >() }; for( auto& kv : signing_keys ) kv.second = show::base64_decode( kv.second ); return serialize( token, signing_keys ); } std::string jwt::serialize( const jwt& token, const std::map< std::string, std::string >& signing_keys ) { nlj::json header{ { "alg", "HS512" } }; if( token.typ == jwt_type::JWT_PLUS ) header[ "typ" ] = "JWT+"; else header[ "typ" ] = "JWT"; if( token.jti ) header[ "jti" ] = token.jti -> hex_value(); else header[ "jti" ] = uuid::generate().hex_value(); std::string signing_key_string; if( token.kid ) { // Find the specified signing key in `signing_keys` auto key_found{ signing_keys.find( *token.kid ) }; if( key_found == signing_keys.end() ) throw std::runtime_error{ "specified a JWT signing key that does not exist in the " "available options" }; header[ "kid" ] = key_found -> first; signing_key_string = key_found -> second; } else { // Choose a random signing key from `signing_keys` auto random_int{ std::rand() % signing_keys.size() }; auto key_to_use{ signing_keys.begin() }; for( int i = 0; i < random_int; ++i ) ++key_to_use; header[ "kid" ] = key_to_use -> first; signing_key_string = key_to_use -> second; } // Make a writable copy of the claims auto claims{ token.claims }; auto iat{ now() }; if( token.iat ) iat = *token.iat; if( token.typ == jwt_type::JWT_PLUS ) claims[ "iat" ] = to_iso8601_str( iat ); else claims[ "iat" ] = to_unix_time( iat ); if( token.nbf ) { if( token.typ == jwt_type::JWT_PLUS ) claims[ "nbf" ] = to_iso8601_str( *token.nbf ); else claims[ "nbf" ] = to_unix_time( *token.nbf ); } if( token.exp ) { if( token.typ == jwt_type::JWT_PLUS ) claims[ "exp" ] = to_iso8601_str( *token.exp ); else claims[ "exp" ] = to_unix_time( *token.exp ); } std::string header_claims_string{ show::base64_encode( header.dump(), show::base64_chars_urlsafe ) + "." + show::base64_encode( claims.dump(), show::base64_chars_urlsafe ) }; std::string signature_string; CryptoPP::HMAC< CryptoPP::SHA512 > hmac{ reinterpret_cast< const CryptoPP::byte* >( signing_key_string.c_str() ), signing_key_string.size() }; CryptoPP::StringSource{ header_claims_string, true, new CryptoPP::HashFilter{ hmac, new CryptoPP::StringSink{ signature_string } } }; return header_claims_string + "." + show::base64_encode( signature_string, show::base64_chars_urlsafe ); } }<file_sep>/src/common/timestamp.hpp #pragma once #ifndef STICKERS_MOE_COMMON_TIMESTAMP_HPP #define STICKERS_MOE_COMMON_TIMESTAMP_HPP #include "postgres.hpp" #include <date/date.h> #include <string> namespace stickers { using timestamp = date::sys_time< std::chrono::microseconds >; const timestamp& now(); // Implemented in ../server/server.cpp timestamp current_timestamp(); timestamp from_iso8601_str( const std::string& ); bool from_iso8601_str( const std::string&, timestamp& ); std::string to_iso8601_str( const timestamp& ); std::string to_http_ts_str( const timestamp& ); timestamp from_unix_time( unsigned int ); unsigned int to_unix_time( const timestamp& ); } // Template specialization of `pqxx::string_traits<>(&)` for // `stickers::timestamp`, which allows use of `pqxx::field::to<>(&)` and // `pqxx::field::as<>(&)` namespace pqxx { template<> struct string_traits< stickers::timestamp > { using subject_type = stickers::timestamp; static constexpr const char* name() noexcept { return "stickers::timestamp"; } static constexpr bool has_null() noexcept { return false; } static bool is_null( const stickers::timestamp& ) { return false; } [[noreturn]] static stickers::timestamp null() { internal::throw_null_conversion( name() ); } static void from_string( const char str[], stickers::timestamp& ts ) { if( !stickers::from_iso8601_str( std::string{ str } + "00", ts ) ) throw argument_error{ "Failed conversion to " + static_cast< std::string >( name() ) + ": '" + static_cast< std::string >( str ) + "'" }; } static std::string to_string( const stickers::timestamp& ts ) { return stickers::to_iso8601_str( ts ); } }; } #endif <file_sep>/src/api/person.cpp #line 2 "api/person.cpp" #include "person.hpp" #include "../api/user.hpp" #include "../common/formatting.hpp" #include "../common/logging.hpp" namespace { stickers::bigid write_person_details( stickers::person & person, const stickers::audit::blame& blame, bool generate_id ) { auto connection{ stickers::postgres::connect() }; pqxx::work transaction{ *connection }; if( generate_id ) { auto result{ transaction.exec_params( PSQL( INSERT INTO people.people_core ( person_id, _a_revision ) VALUES ( DEFAULT, $1 ) RETURNING person_id ; ), blame.when ) }; result[ 0 ][ "person_id" ].to< stickers::bigid >( person.id ); } else stickers::assert_people_exist( transaction, { person.id } ); std::string add_person_revision_query_string{ PSQL( INSERT INTO people.person_revisions ( person_id, revised, revised_by, revised_from, person_name, person_user, about ) VALUES ( $1, $2, $3, $4, $5, $6, $7 ) ; ) }; if( person.info.has_user() ) { const auto& user_id{ std::get< stickers::bigid >( person.info.identifier ) }; stickers::assert_users_exist( transaction, { user_id } ); transaction.exec_params( add_person_revision_query_string, person.id, blame.when, blame.who, blame.where, nullptr, user_id, person.info.about ); } else transaction.exec_params( add_person_revision_query_string, person.id, blame.when, blame.who, blame.where, std::get< std::string >( person.info.identifier ), nullptr, person.info.about ); transaction.commit(); return person.id; } } namespace stickers // Person /////////////////////////////////////////////////// { person create_person( const person_info & info, const audit::blame& blame ) { person p{ bigid::MIN(), info }; write_person_details( p, blame, true ); return p; } person_info load_person( const bigid& id ) { auto connection{ postgres::connect() }; pqxx::work transaction{ *connection }; auto result{ transaction.exec_params( PSQL( SELECT created, revised, person_name, person_user, about FROM people.people WHERE person_id = $1 AND NOT deleted ; ), id ) }; transaction.commit(); if( result.size() < 1 ) throw no_such_person{ id }; auto& row{ result[ 0 ] }; if( row[ "person_user" ].is_null() ) { return { row[ "created" ].as< timestamp >(), row[ "revised" ].as< timestamp >(), row[ "about" ].as< std::string >(), row[ "person_name" ].as< std::string >() }; } else { return { row[ "created" ].as< timestamp >(), row[ "revised" ].as< timestamp >(), row[ "about" ].as< std::string >(), row[ "person_user" ].as< bigid >() }; } } person_info update_person( const person& p, const audit::blame& blame ) { auto updated_person{ p }; write_person_details( updated_person, blame, true ); return updated_person.info; } void delete_person( const bigid& id, const audit::blame& blame ) { auto connection{ postgres::connect() }; pqxx::work transaction{ *connection }; assert_people_exist( transaction, { id } ); auto result{ transaction.exec_params( PSQL( INSERT INTO people.person_deletions ( person_id, deleted, deleted_by, deleted_from ) VALUES ( $1, $2, $3, $4 ) ; ), id, blame.when, blame.who, blame.where ) }; transaction.commit(); } } namespace stickers // Exception //////////////////////////////////////////////// { no_such_person::no_such_person( const bigid& id ) : no_such_record_error{ "no such person with ID " + static_cast< std::string >( id ) }, id{ id } {} } namespace stickers // Assertion //////////////////////////////////////////////// { void _assert_people_exist_impl::exec( pqxx::work & transaction, const std::string& ids_string ) { std::string query_string; ff::fmt( query_string, PSQL( WITH lookfor AS ( SELECT UNNEST( ARRAY[ {0} ] ) AS person_id ) SELECT lookfor.person_id FROM lookfor LEFT JOIN people.people_core AS pc ON pc.person_id = lookfor.person_id LEFT JOIN people.person_deletions AS pd ON pd.person_id = pc.person_id WHERE pc.person_id IS NULL OR pd.person_id IS NOT NULL ; ), ids_string ); auto result{ transaction.exec( query_string ) }; if( result.size() > 0 ) throw no_such_person{ result[ 0 ][ 0 ].as< bigid >() }; } } <file_sep>/src/common/hashing.cpp #line 2 "common/hashing.cpp" #include "hashing.hpp" #include <cryptopp/hex.h> #include <libscrypt.h> #include <cmath> #include <utility> // std::move<>() namespace { char unhex( char c ) { if( c >= '0' && c <= '9' ) return c - '0'; if( c >= 'A' && c <= 'F' ) return c - 'A' + 10; if( c >= 'a' && c <= 'f' ) return c - 'a' + 10; throw stickers::hash_error{ "char with value " + std::to_string( c ) + " out of range for hex" }; } } namespace stickers // SHA256 /////////////////////////////////////////////////// { sha256::sha256() : digest{ CryptoPP::SHA256::DIGESTSIZE } {} sha256::sha256( const CryptoPP::SecByteBlock& b ) : digest{ CryptoPP::SHA256::DIGESTSIZE } { if( b.size() == CryptoPP::SHA256::DIGESTSIZE ) digest = b; else throw hash_error{ "mismatch between digest and input sizes constructing a sha256 " "object (need " + std::to_string( CryptoPP::SHA256::DIGESTSIZE ) + " bytes, got " + std::to_string( b.size() ) + ")" }; } sha256::sha256( const std::string& s ) : digest{ CryptoPP::SHA256::DIGESTSIZE } { if( s.size() == CryptoPP::SHA256::DIGESTSIZE ) for( std::size_t i = 0; i < CryptoPP::SHA256::DIGESTSIZE; ++i ) digest.BytePtr()[ i ] = ( CryptoPP::byte )s[ i ]; else throw hash_error{ "mismatch between digest and input sizes constructing a sha256 " "object (need " + std::to_string( CryptoPP::SHA256::DIGESTSIZE ) + " bytes, got " + std::to_string( s.size() ) + ")" }; } sha256::sha256( const char* s, std::size_t l ) : digest{ CryptoPP::SHA256::DIGESTSIZE } { if( l == CryptoPP::SHA256::DIGESTSIZE ) for( std::size_t i = 0; i < CryptoPP::SHA256::DIGESTSIZE; ++i ) digest.BytePtr()[ i ] = static_cast< CryptoPP::byte >( s[ i ] ); else throw hash_error{ "mismatch between digest and input sizes constructing a sha256 " "object (need " + std::to_string( CryptoPP::SHA256::DIGESTSIZE ) + " bytes, got " + std::to_string( l ) + ")" }; } sha256::sha256( const sha256& o ) : digest{ o.digest } {} sha256::sha256( sha256&& o ) : digest{ std::move( o.digest ) } {} sha256& sha256::operator =( const sha256& o ) { digest = o.digest; return *this; } sha256& sha256::operator =( sha256&& o ) { digest = std::move( o.digest ); return *this; } bool sha256::operator==( const sha256& o ) const { // `CryptoPP::SecBlock<>::operator==()` is constant-time return digest == o.digest; } bool sha256::operator!=( const sha256& o ) const { return digest != o.digest; } bool sha256::operator <( const sha256& o ) const { int first_diff{ 0 }; for( CryptoPP::SecByteBlock::size_type i{ 0 }; i < CryptoPP::SHA256::DIGESTSIZE; ++i ) if( first_diff == 0 ) first_diff = digest.data()[ i ] - o.digest.data()[ i ]; return first_diff < 0; } bool sha256::operator >( const sha256& o ) const { return !( *this <= o ); } bool sha256::operator <=( const sha256& o ) const { return *this < o || *this == o; } bool sha256::operator >=( const sha256& o ) const { return !( *this < o ); } std::string sha256::raw_digest() const { return { reinterpret_cast< const char* >( digest.data() ), digest.size() }; } std::string sha256::hex_digest() const { std::string hex; CryptoPP::StringSource{ reinterpret_cast< const CryptoPP::byte* >( digest.data() ), digest.size(), true, new CryptoPP::HexEncoder{ new CryptoPP::StringSink{ hex } } }; return hex; } sha256 sha256::make( const char* s, std::size_t l ) { sha256 h; CryptoPP::SHA256{}.CalculateDigest( h.digest.begin(), reinterpret_cast< const CryptoPP::byte* >( s ), l ); return h; } sha256 sha256::make( const std::string& s ) { return make( s.c_str(), s.size() ); } sha256 sha256::from_hex_string( const std::string& s ) { if( s.size() != CryptoPP::SHA256::DIGESTSIZE * 2 ) throw hash_error{ "mismatch between digest and input sizes constructing a sha256 " "object from hex string (need " + std::to_string( CryptoPP::SHA256::DIGESTSIZE * 2 ) + " chars, got " + std::to_string( s.size() ) + ")" }; sha256 h; std::size_t i{ 0 }; for( auto& b : h.digest ) { b = ( unhex( s[ i ] ) << 4 ) | ( unhex( s[ i + 1 ] ) ); i += 2; } return h; } void sha256::builder::append( const char* s, std::size_t l ) { algorithm.Update( reinterpret_cast< const CryptoPP::byte* >( s ), l ); } void sha256::builder::append( const std::string& s ) { append( s.c_str(), s.size() ); } sha256 sha256::builder::generate_and_clear() { sha256 h; algorithm.Final( h.digest.begin() ); return h; } } namespace stickers // SCRYPT /////////////////////////////////////////////////// { scrypt::scrypt() {} scrypt::scrypt( const std::string& digest, const std::string& salt, unsigned char factor, unsigned char block_size, unsigned char parallelization ) : digest { digest }, salt { salt }, _factor { factor }, _block_size { block_size }, _parallelization{ parallelization } {} scrypt::scrypt( const scrypt& o ) : salt { o.salt }, digest { o.digest }, _factor { o._factor }, _block_size { o._block_size }, _parallelization{ o._parallelization } {} scrypt::scrypt( scrypt&& o ) : salt { std::move( o.salt ) }, digest { std::move( o.digest ) }, _factor { std::move( o._factor ) }, _block_size { std::move( o._block_size ) }, _parallelization{ std::move( o._parallelization ) } {} scrypt& scrypt::operator =( const scrypt& o ) { salt = o.salt ; digest = o.digest ; _factor = o._factor ; _block_size = o._block_size ; _parallelization = o._parallelization; return *this; } scrypt& scrypt::operator =( scrypt&& o ) { salt = std::move( o.salt ); digest = std::move( o.digest ); _factor = std::move( o._factor ); _block_size = std::move( o._block_size ); _parallelization = std::move( o._parallelization ); return *this; } bool scrypt::operator==( const scrypt& o ) const { bool equals{ true }; equals = equals && ( _factor == o._factor ); equals = equals && ( _block_size == o._block_size ); equals = equals && ( _parallelization == o._parallelization ); std::size_t slen; // `std::string::size()` is guaranteed constant-time as of C++11 slen = salt.size() > o.salt.size() ? salt.size() : o.salt.size(); for( std::size_t i = 0; i < slen; ++i ) { char c1 = i >= salt.size() ? ~o.salt[ i ] : salt[ i ]; char c2 = i >= o.salt.size() ? ~ salt[ i ] : o.salt[ i ]; equals = equals && ( c1 == c2 ); } slen = digest.size() > o.digest.size() ? digest.size() : o.digest.size(); for( std::size_t i = 0; i < slen; ++i ) { char c1 = i >= digest.size() ? ~o.digest[ i ] : digest[ i ]; char c2 = i >= o.digest.size() ? ~ digest[ i ] : o.digest[ i ]; equals = equals && ( c1 == c2 ); } return equals; } bool scrypt::operator!=( const scrypt& o ) const { return !( *this == o ); } bool scrypt::operator <( const scrypt& o ) const { std::size_t slen; // `std::string::size()` is guaranteed constant-time as of C++11 auto first_diff{ salt.size() - o.salt.size() }; if( first_diff == 0 ) first_diff = digest.size() - o.digest.size(); slen = salt.size() > o.salt.size() ? salt.size() : o.salt.size(); for( std::size_t i = 0; i < slen; ++i ) { char c1 = i >= salt.size() ? o.salt[ i ] : salt[ i ]; char c2 = i >= o.salt.size() ? salt[ i ] : o.salt[ i ]; if( first_diff == 0 ) first_diff = c1 - c2; } slen = digest.size() > o.digest.size() ? digest.size() : o.digest.size(); for( std::size_t i = 0; i < slen; ++i ) { char c1 = i >= digest.size() ? o.digest[ i ] : digest[ i ]; char c2 = i >= o.digest.size() ? digest[ i ] : o.digest[ i ]; if( first_diff == 0 ) first_diff = c1 - c2; } if( first_diff == 0 ) first_diff = make_libscrypt_mcf_factor( _factor, _block_size, _parallelization ) - make_libscrypt_mcf_factor( o._factor, o._block_size, o._parallelization ); return first_diff < 0; } bool scrypt::operator >( const scrypt& o ) const { return !( *this <= o ); } bool scrypt::operator <=( const scrypt& o ) const { return *this < o || *this == o; } bool scrypt::operator >=( const scrypt& o ) const { return !( *this < o ); } std::string scrypt::raw_digest() const { return digest; } std::string scrypt::hex_digest() const { std::string hex; CryptoPP::StringSource{ digest, true, new CryptoPP::HexEncoder{ new CryptoPP::StringSink{ hex } } }; return hex; } std::string scrypt::raw_salt() const { return salt; } std::string scrypt::hex_salt() const { std::string hex; CryptoPP::StringSource{ salt, true, new CryptoPP::HexEncoder{ new CryptoPP::StringSink{ hex } } }; return hex; } unsigned char scrypt::factor() const { return _factor; } unsigned char scrypt::block_size() const { return _block_size; } unsigned char scrypt::parallelization() const { return _parallelization; } scrypt scrypt::make( const char* input, std::size_t input_len, const char* salt, std::size_t salt_len, unsigned char factor, unsigned char block_size, unsigned char parallelization, std::size_t digest_size ) { scrypt sc; // Can't use list initialization to call this `std::string` constructor // when compiling with clang sc.digest = std::string( digest_size, '\0' ); auto result{ libscrypt_scrypt( reinterpret_cast< const uint8_t* >( input ), input_len, reinterpret_cast< const uint8_t* >( salt ), salt_len, static_cast< uint64_t >( pow( 2, factor ) ), block_size, parallelization, reinterpret_cast< uint8_t* >( sc.digest.data() ), sc.digest.size() ) }; if( result ) throw hash_error{ "failed to make scrypt (libscrypt_scrypt() returned " + std::to_string( result ) + ")" }; sc.salt = std::string{ salt, salt_len }; sc._factor = factor; sc._block_size = block_size; sc._parallelization = parallelization; return sc; } scrypt scrypt::make( const std::string& input, const std::string& salt, unsigned char factor, unsigned char block_size, unsigned char parallelization, std::size_t digest_size ) { return make( input.c_str(), input.size(), salt.c_str(), salt.size(), factor, block_size, parallelization, digest_size ); } unsigned int scrypt::make_libscrypt_mcf_factor( unsigned char factor, unsigned char block_size, unsigned char parallelization ) { return ( static_cast< unsigned int >( factor << 16 ) | static_cast< unsigned int >( block_size << 8 ) | static_cast< unsigned int >( parallelization << 0 ) ); } void scrypt::split_libscrypt_mcf_factor( unsigned int combined, unsigned char& factor, unsigned char& block_size, unsigned char& parallelization ) { factor = combined >> 16; block_size = combined >> 8; parallelization = combined >> 0; } } <file_sep>/src/handlers/design.cpp #line 2 "handlers/design.cpp" #include "handlers.hpp" #include "../api/design.hpp" #include "../common/auth.hpp" #include "../common/crud.hpp" #include "../common/json.hpp" #include "../server/parse.hpp" #include <show/constants.hpp> #include <utility> // std::move<>() namespace { void design_to_json( const stickers::bigid & id, const stickers::design_info& info, nlj::json & design_json ) { auto images_array{ nlj::json::array() }; auto contributors_array{ nlj::json::array() }; for( auto& image_hash : info.images ) images_array.push_back( image_hash.hex_digest() ); for( auto& contributor_id : info.contributors ) contributors_array.push_back( contributor_id ); design_json = { { "design_id" , id }, { "created" , stickers::to_iso8601_str( info.created ) }, { "revised" , stickers::to_iso8601_str( info.revised ) }, { "description" , info.description }, { "images" , images_array }, { "contributors", contributors_array } }; } stickers::design_info design_info_from_document( const stickers::document& details_doc ) { if( !details_doc.is_a< stickers::map_document >() ) throw stickers::handler_exit{ show::code::BAD_REQUEST, "invalid data format" }; auto& details_map{ details_doc.get< stickers::map_document >() }; for( const auto& field : { std::string{ "description" } } ) if( details_map.find( field ) == details_map.end() ) throw stickers::handler_exit{ show::code::BAD_REQUEST, "missing required field \"" + static_cast< std::string >( field ) + "\"" }; else if( !details_map[ field ].is_a< stickers::string_document >() ) throw stickers::handler_exit{ show::code::BAD_REQUEST, "required field \"" + static_cast< std::string >( field ) + "\" must be a string" }; for( const auto& field : { std::string{ "images" }, std::string{ "contributors" } } ) if( details_map.find( field ) == details_map.end() ) throw stickers::handler_exit{ show::code::BAD_REQUEST, "missing required field \"" + static_cast< std::string >( field ) + "\"" }; else if( !details_map[ field ].is_a< stickers::map_document >() ) throw stickers::handler_exit{ show::code::BAD_REQUEST, "required field \"" + static_cast< std::string >( field ) + "\" must be an array" }; std::vector< stickers::sha256 > images; std::vector< stickers::bigid > contributors; for( auto& pair : details_map[ "images" ].get< stickers::map_document >() ) try { images.emplace_back( stickers::sha256::from_hex_string( pair.second.get< stickers::string_document >() ) ); } catch( const stickers::hash_error& e ) { throw stickers::handler_exit{ show::code::BAD_REQUEST, ( "missing required field \"images\" must be an array of" " SHA-256 hashes" ) }; } for( auto& pair : details_map[ "contributors" ].get< stickers::map_document >() ) try { contributors.emplace_back( stickers::bigid::from_string( pair.second.get< stickers::string_document >() ) ); } catch( const std::invalid_argument& e ) { throw stickers::handler_exit{ show::code::BAD_REQUEST, ( "missing required field \"contributors\" must be an" " array of bigids" ) }; } return { stickers::now(), stickers::now(), details_map[ "description" ].get< stickers::string_document >(), std::move( images ), std::move( contributors ) }; } } namespace stickers { void handlers::create_design( show::request& request, const handler_vars_type& variables ) { auto auth{ authenticate( request ) }; permissions_assert_all( auth.user_permissions, { "edit_public_pages" } ); try { auto created{ create_design( design_info_from_document( parse_request_content( request ) ), { auth.user_id, "create design handler", now(), request.client_address() } ) }; nlj::json design_json; design_to_json( created.id, created.info, design_json ); auto design_json_string{ design_json.dump() }; show::response response{ request.connection(), show::HTTP_1_1, show::code::CREATED, { show::server_header, { "Content-Type", { "application/json" } }, { "Content-Length", { std::to_string( design_json_string.size() ) } }, { "Location", { "/design/" + static_cast< std::string >( created.id ) } } } }; response.sputn( design_json_string.c_str(), design_json_string.size() ); } catch( const no_such_record_error& e ) { throw handler_exit{ show::code::BAD_REQUEST, e.what() }; } } void handlers::get_design( show::request& request, const handler_vars_type& variables ) { auto found_design_id_variable{ variables.find( "design_id" ) }; if( found_design_id_variable == variables.end() ) throw handler_exit{ show::code::NOT_FOUND, "need a design ID" }; auto design_id{ bigid::MIN() }; try { design_id = bigid::from_string( found_design_id_variable -> second ); } catch( const std::invalid_argument& e ) { throw handler_exit{ show::code::NOT_FOUND, "need a valid design ID" }; } try { auto info{ load_design( design_id ) }; nlj::json design_json; design_to_json( design_id, info, design_json ); auto design_json_string{ design_json.dump() }; show::response response{ request.connection(), show::HTTP_1_1, show::code::OK, { show::server_header, { "Content-Type", { "application/json" } }, { "Content-Length", { std::to_string( design_json_string.size() ) } } } }; response.sputn( design_json_string.c_str(), design_json_string.size() ); } catch( const no_such_design& e ) { throw handler_exit{ show::code::NOT_FOUND, e.what() }; } } void handlers::edit_design( show::request& request, const handler_vars_type& variables ) { auto auth{ authenticate( request ) }; permissions_assert_all( auth.user_permissions, { "edit_public_pages" } ); auto found_design_id_variable{ variables.find( "design_id" ) }; if( found_design_id_variable == variables.end() ) throw handler_exit{ show::code::NOT_FOUND, "need a design ID" }; stickers::bigid design_id{ bigid::MIN() }; try { design_id = bigid::from_string( found_design_id_variable -> second ); } catch( const std::invalid_argument& e ) { throw handler_exit{ show::code::NOT_FOUND, "need a valid design ID" }; } try { auto updated_info{ update_design( { design_id, design_info_from_document( parse_request_content( request ) ) }, { auth.user_id, "update design handler", now(), request.client_address() } ) }; nlj::json design_json; design_to_json( design_id, updated_info, design_json ); auto design_json_string = design_json.dump(); show::response response{ request.connection(), show::HTTP_1_1, show::code::OK, { show::server_header, { "Content-Type", { "application/json" } }, { "Content-Length", { std::to_string( design_json_string.size() ) } } } }; response.sputn( design_json_string.c_str(), design_json_string.size() ); } catch( const no_such_design& e ) { throw handler_exit{ show::code::NOT_FOUND, e.what() }; } catch( const no_such_record_error& e ) { throw handler_exit{ show::code::BAD_REQUEST, e.what() }; } } void handlers::delete_design( show::request& request, const handler_vars_type& variables ) { auto auth{ authenticate( request ) }; permissions_assert_all( auth.user_permissions, { "edit_public_pages" } ); auto found_design_id_variable{ variables.find( "design_id" ) }; if( found_design_id_variable == variables.end() ) throw handler_exit{ show::code::NOT_FOUND, "need a design ID" }; stickers::bigid design_id{ bigid::MIN() }; try { design_id = bigid::from_string( found_design_id_variable -> second ); } catch( const std::invalid_argument& e ) { throw handler_exit{ show::code::NOT_FOUND, "need a valid design ID" }; } try { delete_design( design_id, { auth.user_id, "delete design handler", now(), request.client_address() } ); std::string null_json{ "null" }; show::response response{ request.connection(), show::HTTP_1_1, show::code::OK, { show::server_header, { "Content-Type", { "application/json" } }, { "Content-Length", { std::to_string( null_json.size() ) } } } }; response.sputn( null_json.c_str(), null_json.size() ); } catch( const no_such_design& e ) { throw handler_exit{ show::code::NOT_FOUND, e.what() }; } } } <file_sep>/src/api/media.cpp #line 2 "api/media.cpp" #include "media.hpp" #include "../common/config.hpp" #include "../common/formatting.hpp" #include "../common/logging.hpp" #include "../common/uuid.hpp" #include "../handlers/handlers.hpp" #include "../server/parse.hpp" #include <show.hpp> #include <show/constants.hpp> #include <show/multipart.hpp> #include <fstream> #include <sstream> namespace // Utilities ///////////////////////////////////////////////////////// { std::string standard_extension_for_mime_type( const std::string& mime_type ) { if( mime_type == "image/jpeg" ) return ".jpeg"; else if( mime_type == "image/png" ) return ".png"; else if( mime_type == "image/gif" ) return ".gif"; else if( mime_type == "video/webm" ) return ".webm"; else if( mime_type == "text/plain" ) return ".txt"; else throw stickers::unacceptable_mime_type{ mime_type }; } std::string format_image_subpath( const stickers::sha256& hash, const std::string & mime_type ) { auto hash_hex{ hash.hex_digest() }; return ( hash_hex.substr( 0, 2 ) + "/" + hash_hex.substr( 2, 2 ) + "/" + hash_hex.substr( 4 ) + standard_extension_for_mime_type( mime_type ) ); } std::experimental::filesystem::path image_hash_to_disk_path( const stickers::sha256& hash, const std::string & mime_type ) { return std::experimental::filesystem::u8path( stickers::config()[ "media" ][ "media_directory" ].get< std::string >() + "/" + format_image_subpath( hash, mime_type ) ); } std::string image_hash_to_url( const stickers::sha256& hash, const std::string & mime_type ) { return ( stickers::config()[ "media" ][ "base_url" ].get< std::string >() + format_image_subpath( hash, mime_type ) ); } std::string guess_mime_type( const std::optional< std::string >& file_name, const std::optional< std::string >& mime_type, const std::string & beginning_chunk, const std::string & ending_chunk ) { std::string extension; if( file_name ) extension = show::_ASCII_upper( file_name -> substr( file_name -> rfind( "." ) ) ); // See https://www.garykessler.net/library/file_sigs.html for more // information static const std::string magic_num_jpeg{ '\xff', '\xd8' }; static const std::string magic_num_png { '\x89', '\x50', '\x4e', '\x47', '\x0d', '\x0a', '\x1a', '\x0a' }; static const std::string magic_num_gif7{ '\x47', '\x49', '\x46', '\x38', '\x37', '\x61' }; static const std::string magic_num_gif9{ '\x47', '\x49', '\x46', '\x38', '\x39', '\x61' }; static const std::string magic_num_webm{ '\x1a', '\x45', '\xdf', '\xa3' }; static const std::string trailer_jpeg{ '\xff', '\xd9' }; static const std::string trailer_png { '\x49', '\x45', '\x4e', '\x44', '\xae', '\x42', '\x60', '\x82' }; static const std::string trailer_gif { '\x00', '\x3b' }; if( ( !mime_type || ( *mime_type == "image/jpeg" ) ) && ( !file_name || ( extension == ".JPG" || extension == ".JPEG" ) ) && beginning_chunk.substr( 0, magic_num_jpeg.size() ) == magic_num_jpeg && ending_chunk.substr( ending_chunk.size() - trailer_jpeg.size() ) == trailer_jpeg && beginning_chunk[ 2 ] == '\xff' && ( beginning_chunk[ 3 ] >= '\xe0' && beginning_chunk[ 3 ] <= '\xef' ) ) return "image/jpeg"; else if( ( !mime_type || ( *mime_type == "image/png" ) ) && ( !file_name || extension == ".PNG" ) && beginning_chunk.substr( 0, magic_num_png.size() ) == magic_num_png && ending_chunk.substr( ending_chunk.size() - trailer_png.size() ) == trailer_png ) return "image/png"; else if( ( !mime_type || ( *mime_type == "image/gif" ) ) && ( !file_name || extension == ".GIF" ) && ( beginning_chunk.substr( 0, magic_num_gif7.size() ) == magic_num_gif7 || beginning_chunk.substr( 0, magic_num_gif9.size() ) == magic_num_gif9 ) && ending_chunk.substr( ending_chunk.size() - trailer_gif.size() ) == trailer_gif ) return "image/gif"; else if( ( !mime_type || ( *mime_type == "video/webm" ) ) && ( !file_name || extension == ".WEBM" ) && beginning_chunk.substr( 0, magic_num_webm.size() ) == magic_num_webm ) return "video/webm"; else throw stickers::indeterminate_mime_type{}; } } namespace // Internal implementations ////////////////////////////////////////// { stickers::media_info load_media_info_impl( const stickers::sha256& hash, pqxx::work& transaction ) { auto result{ transaction.exec_params( PSQL( SELECT mime_type, decency, original_filename, uploaded, uploaded_by FROM media.images WHERE image_hash = $1 ; ), pqxx::binarystring{ hash.raw_digest() } ) }; if( result.size() < 1 ) throw stickers::no_such_media{ hash }; auto& row{ result[ 0 ] }; auto mime_type{ row[ "mime_type" ].as< std::string >() }; stickers::media_info info{ image_hash_to_disk_path( hash, mime_type ), image_hash_to_url ( hash, mime_type ), mime_type, row[ "decency" ].as< stickers::media_decency >(), std::nullopt, row[ "uploaded" ].as< stickers::timestamp >(), row[ "uploaded_by" ].as< stickers::bigid >() }; if( !row[ "original_filename" ].is_null() ) info.original_filename = row[ "original_filename" ].as< std::string >(); return info; } struct file_info { std::experimental::filesystem::path temp_file_path; stickers::sha256 file_hash; std::string mime_type; }; file_info save_temp_file( std::streambuf & file_contents, const std::optional< std::string >& original_filename, const std::optional< std::string >& sent_mime_type ) { auto temp_file_id{ stickers::uuid::generate() }; auto temp_file_path{ std::experimental::filesystem::u8path( stickers::config()[ "media" ][ "temp_file_location" ].get< std::string >() + "/" + temp_file_id.hex_value() ) }; std::experimental::filesystem::create_directories( temp_file_path.parent_path() ); std::fstream temp_file{ temp_file_path, std::ios::out | std::ios::in | std::ios::binary }; std::istream file_stream{ &file_contents }; std::streamsize file_size{ 0 }; stickers::sha256::builder hash_builder; // Write the file to temp location in kilobyte-sized chunks until less // than a kilobyte is left (reading another kilobyte will fail), then // write what's left in the file char buffer[ 1024 ]; while( file_stream.read( buffer, sizeof( buffer ) ) ) { temp_file.write ( buffer, sizeof( buffer ) ); hash_builder.append( buffer, sizeof( buffer ) ); file_size += sizeof( buffer ); } auto remaining{ file_stream.gcount() }; temp_file.write ( buffer, remaining ); hash_builder.append( buffer, remaining ); file_size += remaining; STICKERS_LOG( stickers::log_level::VERBOSE, "wrote ", file_size, " bytes to temp file \"", stickers::log_sanitize( temp_file_path ), "\"" ); auto file_hash{ hash_builder.generate_and_clear() }; temp_file.seekg( 0, std::ios::beg ); std::streamsize chunk_bytes{ 64 }; // Once again running afoul of the CLang stdlib bug where the // `std::string(char*, count)` constructor can't be used with curly // braces std::string beginning_chunk; if( temp_file.read( buffer, chunk_bytes ) ) beginning_chunk = std::string( buffer, chunk_bytes ); else { remaining = temp_file.gcount(); temp_file.read( buffer, remaining ); beginning_chunk = std::string( buffer, remaining ); } std::string detected_mime_type; try { if( file_size < chunk_bytes ) detected_mime_type = guess_mime_type( original_filename, sent_mime_type, beginning_chunk, beginning_chunk ); else { temp_file.seekg( file_size - chunk_bytes ); temp_file.read( buffer, chunk_bytes ); detected_mime_type = guess_mime_type( original_filename, sent_mime_type, beginning_chunk, std::string( buffer, chunk_bytes ) ); } } catch( ... ) { std::experimental::filesystem::remove( temp_file_path ); throw; } return { temp_file_path, file_hash, detected_mime_type }; } file_info save_temp_file( std::string & file_contents, const std::optional< std::string >& original_filename, const std::optional< std::string >& sent_mime_type ) { auto mime_type{ guess_mime_type( original_filename, sent_mime_type, file_contents.substr( 0, 64 ), file_contents.substr( file_contents.size() - 64 ) ) }; auto file_hash{ stickers::sha256::make( file_contents ) }; auto temp_file_id{ stickers::uuid::generate() }; auto temp_file_path{ std::experimental::filesystem::u8path( stickers::config()[ "media" ][ "temp_file_location" ].get< std::string >() + "/" + temp_file_id.hex_value() ) }; std::fstream temp_file{ temp_file_path, std::ios::out | std::ios::binary }; temp_file.write( file_contents.data(), file_contents.size() ); STICKERS_LOG( stickers::log_level::VERBOSE, "wrote ", file_contents.size(), " bytes to temp file \"", stickers::log_sanitize( temp_file_path ), "\"" ); return { temp_file_path, file_hash, mime_type }; } stickers::media save_media_impl( const std::experimental::filesystem::path& temp_file_path, const stickers::sha256 & file_hash, const std::string & mime_type, stickers::media_decency decency, const std::optional< std::string > & original_filename, const stickers::audit::blame & blame ) { auto connection{ stickers::postgres::connect() }; pqxx::work transaction{ *connection }; try { // Early return if media already exists return { file_hash, load_media_info_impl( file_hash, transaction ) }; } catch( const stickers::no_such_media& e ) {} transaction.exec_params( PSQL( INSERT INTO media.images ( image_hash, mime_type, decency, original_filename, uploaded, uploaded_by, uploaded_from ) VALUES ( $1, $2, $3, $4, $5, $6, $7 ) ; ), pqxx::binarystring{ file_hash.raw_digest() }, mime_type, decency, original_filename, blame.when, blame.who, blame.where ); auto final_file_path{ image_hash_to_disk_path( file_hash, mime_type ) }; std::experimental::filesystem::create_directories( final_file_path.parent_path() ); std::experimental::filesystem::rename( temp_file_path, final_file_path ); STICKERS_LOG( stickers::log_level::VERBOSE, "moved temp file \"", stickers::log_sanitize( temp_file_path ), "\" to final location \"", stickers::log_sanitize( temp_file_path ), "\"" ); // Commit transaction _after_ moving file transaction.commit(); STICKERS_LOG( stickers::log_level::INFO, "user ", blame.who, " uploaded file with SHA-256 ", file_hash.hex_digest() ); return { file_hash, { final_file_path, image_hash_to_url( file_hash, mime_type ), mime_type, decency, original_filename, blame.when, blame.who } }; } } namespace stickers // Media //////////////////////////////////////////////////// { media save_media( std::streambuf & file_contents, const std::optional< std::string >& original_filename, const std::optional< std::string >& mime_type, media_decency decency, const audit::blame & blame ) { auto info{ save_temp_file( file_contents, original_filename, mime_type ) }; return save_media_impl( info.temp_file_path, info.file_hash, info.mime_type, decency, original_filename, blame ); } media save_media( show::request & upload_request, const audit::blame& blame ) { auto upload_doc{ parse_request_content( upload_request ) }; if( !upload_doc.is_a< stickers::map_document >() ) throw stickers::handler_exit{ show::code::BAD_REQUEST, "invalid data format" }; auto& upload_map{ upload_doc.get< stickers::map_document >() }; auto found_decency_field = upload_map.find( "decency" ); auto decency{ media_decency::SAFE }; if( found_decency_field == upload_map.end() ) throw handler_exit{ show::code::BAD_REQUEST, "missing required field \"decency\"" }; auto& decency_field{ found_decency_field -> second }; if( !decency_field.is_a< string_document >() ) throw handler_exit{ show::code::BAD_REQUEST, "required field \"decency\" must be a string" }; else if( decency_field.get< string_document >() == "safe" ) decency = media_decency::SAFE; else if( decency_field.get< string_document >() == "questionable" ) decency = media_decency::QUESTIONABLE; else if( decency_field.get< string_document >() == "explicit" ) decency = media_decency::EXPLICIT; else throw handler_exit{ show::code::BAD_REQUEST, ( "required field \"decency\" must be one of \"safe\", " "\"questionable\", or \"explicit\"" ) }; // TODO: `stickers::file_document` type auto found_file_field{ upload_map.find( "file" ) }; if( found_file_field == upload_map.end() ) throw handler_exit{ show::code::BAD_REQUEST, "missing required field \"file\"" }; auto& file_field{ found_file_field -> second }; if( !file_field.is_a< string_document >() || !file_field.mime_type ) throw handler_exit{ show::code::BAD_REQUEST, "required field \"file\" must be a binary segment" }; auto info{ save_temp_file( file_field.get< string_document >(), file_field.name, file_field.mime_type ) }; return save_media_impl( info.temp_file_path, info.file_hash, info.mime_type, decency, file_field.name, blame ); } media_info load_media_info( const sha256& hash ) { auto connection{ postgres::connect() }; pqxx::work transaction{ *connection }; return load_media_info_impl( hash, transaction ); } } namespace stickers // Exceptions /////////////////////////////////////////////// { no_such_media::no_such_media( const sha256& hash ) : no_such_record_error{ "no such media record for file with hash " + hash.hex_digest() }, hash{ hash } {} indeterminate_mime_type::indeterminate_mime_type() : std::runtime_error{ "indeterminate_mime_type" } {} unacceptable_mime_type::unacceptable_mime_type( const std::string& mime_type ) : std::invalid_argument{ "unnaceptable or unsupported MIME type \"" + log_sanitize( mime_type ) + "\"" }, mime_type{ mime_type } {} } namespace stickers // Assertion //////////////////////////////////////////////// { void _assert_media_exist_impl::exec( pqxx::work & transaction, const std::string& ids_string ) { std::string query_string; ff::fmt( query_string, PSQL( WITH lookfor AS ( SELECT UNNEST( ARRAY[ {0} ] ) AS image_hash ) SELECT lookfor.image_hash FROM lookfor LEFT JOIN media.images AS img ON img.image_hash = lookfor.image_hash WHERE img.image_hash IS NULL ; ), ids_string ); auto result{ transaction.exec( query_string ) }; if( result.size() > 0 ) throw no_such_media{ result[ 0 ][ 0 ].as< sha256 >() }; } } <file_sep>/src/common/redis.hpp #pragma once #ifndef STICKERS_MOE_COMMON_REDIS_HPP #define STICKERS_MOE_COMMON_REDIS_HPP #include <redox.hpp> #endif <file_sep>/src/server/server.cpp #line 2 "server/server.cpp" #include "server.hpp" #include "routing.hpp" #include "../common/config.hpp" #include "../common/timestamp.hpp" #include "../common/logging.hpp" #include <show.hpp> #include <show/constants.hpp> #include <list> #include <mutex> #include <sstream> #include <thread> // Request global time handling ------------------------------------------------ namespace { thread_local stickers::timestamp request_time; void set_request_time_to_now() { request_time = stickers::current_timestamp(); } } namespace stickers { const timestamp& now() { return request_time; } } // Connection workers ---------------------------------------------------------- namespace { std::mutex worker_count_mutex; unsigned long long worker_count{ 0 }; void handle_connection( show::connection* connection ) { std::stringstream worker_id; worker_id << std::this_thread::get_id(); STICKERS_LOG( stickers::log_level::VERBOSE, "handling connection from ", connection -> client_address(), " with worker ", worker_id.str() ); connection -> timeout( stickers::config()[ "server" ][ "wait_for_connection" ] ); while( true ) try { show::request request{ *connection }; set_request_time_to_now(); stickers::route_request( request ); // HTTP/1.1 support auto connection_header{ request.headers().find( "Connection" ) }; if( connection_header != request.headers().end() && connection_header -> second.size() == 1 ) { const std::string& ch_val{ connection_header -> second[ 0 ] }; if( ch_val == "keep-alive" ) continue; else if( ch_val == "close" ) break; } if( request.protocol() <= show::HTTP_1_0 ) break; } catch( const show::request_parse_error& rpe ) { STICKERS_LOG( stickers::log_level::INFO, "client ", connection -> client_address(), " sent a malformed request" ); show::response response{ *connection, show::HTTP_1_1, show::code::BAD_REQUEST, { show::server_header, { "Content-Length", { "0" } } } }; break; } catch( const show::connection_interrupted& ci ) { STICKERS_LOG( stickers::log_level::VERBOSE, "connection to client ", connection -> client_address(), ( " interrupted (client disconnected or timed out), " "closing connection" ) ); break; } catch( const std::exception& e ) { STICKERS_LOG( stickers::log_level::ERROR, "uncaught exception in handle_connection(): ", e.what() ); break; } delete connection; { std::lock_guard< std::mutex > guard{ worker_count_mutex }; --worker_count; } STICKERS_LOG( stickers::log_level::VERBOSE, "cleaning up worker ", worker_id.str() ); } } namespace stickers { void run_server() { show::server server{ stickers::config()[ "server" ][ "host" ], stickers::config()[ "server" ][ "port" ], stickers::config()[ "server" ][ "wait_for_connection" ] }; while( true ) { try { auto connection{ new show::connection{ server.serve() } }; std::lock_guard< std::mutex > guard{ worker_count_mutex }; ++worker_count; std::thread worker{ handle_connection, connection }; worker.detach(); } catch( const show::connection_timeout& ct ) { STICKERS_LOG( log_level::VERBOSE, "timed out waiting for connection, looping..." ); } if( current_log_level() >= log_level::VERBOSE ) { std::lock_guard< std::mutex > guard{ worker_count_mutex }; if( worker_count > 0 ) STICKERS_LOG( log_level::VERBOSE, "currently serving ", worker_count, " connections" ); } } } } <file_sep>/src/common/logging.hpp #pragma once #ifndef STICKERS_MOE_COMMON_LOGGING_HPP #define STICKERS_MOE_COMMON_LOGGING_HPP #include "config.hpp" #include "timestamp.hpp" #include "formatting.hpp" #include <ctime> #include <cctype> #include <iostream> #include <map> #include <sstream> #include <thread> namespace stickers { template< typename... Args > void log( log_level level, std::string file_name, long long file_line, Args... args ) { static const std::map< log_level, std::string > level_strings{ { log_level::SILENT , "SILENT" }, { log_level::ERROR , "ERROR" }, { log_level::WARNING, "WARNING" }, { log_level::INFO , "INFO" }, { log_level::VERBOSE, "VERBOSE" }, { log_level::DEBUG , "DEBUG" } }; if( current_log_level() >= level ) { auto t{ std::time( nullptr ) }; std::stringstream time_string; time_string << std::put_time( std::localtime( &t ), "%F %T%z" ); std::stringstream thread_id_string; thread_id_string << std::this_thread::get_id(); ff::writeln( ( level == log_level::ERROR ? std::cerr : std::cout ), "[" , level_strings.at( level ), "]", "[" , time_string.str() , "]", "[thread ", thread_id_string.str() , "]", current_log_level() >= log_level::DEBUG ? ( static_cast< std::string >( "[" ) + file_name + ":" + std::to_string( file_line ) + "] " ) : "", args... ); } } inline std::string log_sanitize( const std::string& raw ) { std::stringstream sanitized; sanitized << std::hex; for( const auto& c : raw ) { if( c == '"' ) sanitized << '\\'; if( isprint( c ) ) sanitized << c; else sanitized << "\\x" << std::setw( 2 ) << std::setfill( '0' ) << static_cast< unsigned int >( static_cast< unsigned char >( c ) ) ; } return sanitized.str(); } } #define STICKERS_LOG( LEVEL, ... ) stickers::log( \ LEVEL, \ __FILE__, \ __LINE__, \ __VA_ARGS__ \ ) #endif <file_sep>/src/handlers/media.cpp #line 2 "handlers/media.cpp" #include "handlers.hpp" #include "../api/media.hpp" #include "../common/auth.hpp" #include "../common/config.hpp" #include "../common/json.hpp" #include "../common/logging.hpp" #include "../server/parse.hpp" #include <show/constants.hpp> namespace { void media_info_to_json( const stickers::sha256 & hash, const stickers::media_info& info, nlj::json & media_json ) { media_json = { { "hash" , hash.hex_digest() }, { "location" , info.file_url }, { "mime_type" , info.mime_type }, { "original_filename", nullptr }, { "uploaded" , stickers::to_iso8601_str( info.uploaded ) }, { "uploaded_by" , info.uploaded_by } }; if( info.original_filename ) media_json[ "original_filename" ] = *info.original_filename; switch( info.decency ) { case stickers::media_decency::SAFE: media_json[ "decency" ] = "safe"; break; case stickers::media_decency::QUESTIONABLE: media_json[ "decency" ] = "questionable"; break; case stickers::media_decency::EXPLICIT: media_json[ "decency" ] = "explicit"; break; } } } namespace stickers { void handlers::upload_media( show::request& request, const handler_vars_type& variables ) { auto auth = authenticate( request ); permissions_assert_all( auth.user_permissions, { "edit_public_pages" } ); try { auto uploaded{ save_media( request, { auth.user_id, "upload media", now(), request.client_address() } ) }; nlj::json media_json; media_info_to_json( uploaded.file_hash, uploaded.info, media_json ); auto media_json_string{ media_json.dump() }; show::response response{ request.connection(), show::HTTP_1_1, show::code::CREATED, { show::server_header, { "Content-Type", { "application/json" } }, { "Content-Length", { std::to_string( media_json_string.size() ) } }, { "Location", { uploaded.info.file_url } } } }; response.sputn( media_json_string.c_str(), media_json_string.size() ); } catch( const indeterminate_mime_type& e ) { throw stickers::handler_exit{ show::code::BAD_REQUEST, "indeterminate mime type" }; } catch( const unacceptable_mime_type& e ) { throw stickers::handler_exit{ show::code::BAD_REQUEST, "media not a supported MIME/file type" }; } } void handlers::get_media_info( show::request& request, const handler_vars_type& variables ) { auto found_hash_variable{ variables.find( "hash" ) }; if( found_hash_variable == variables.end() ) throw handler_exit{ show::code::NOT_FOUND, "need an image hash" }; try { auto hash{ sha256::from_hex_string( found_hash_variable -> second ) }; auto info{ load_media_info( hash ) }; nlj::json media_json; media_info_to_json( hash, info, media_json ); auto media_json_string{ media_json.dump() }; show::response response{ request.connection(), show::HTTP_1_1, show::code::OK, { show::server_header, { "Content-Type", { "application/json" } }, { "Content-Length", { std::to_string( media_json_string.size() ) } } } }; response.sputn( media_json_string.c_str(), media_json_string.size() ); } catch( const hash_error& e ) { throw handler_exit{ show::code::NOT_FOUND, "need a valid image hash" }; } catch( const no_such_media& e ) { throw handler_exit{ show::code::NOT_FOUND, e.what() }; } } } // DEBUG: ////////////////////////////////////////////////////////////////////// namespace { const std::string form{ R"###(<!doctype html> <html> <head> <meta charset=utf-8> <title>Form Analyzer</title> </head> <body> <form action="http://0.0.0.0:9090/media/upload" method="post" enctype="multipart/form-data"> <div> <label for="file">File:</label> <input type="file" id="file" name="file"></input> </div> <div> <label for="decency">Decency:</label> <select id="decency" name="decency"> <option value="safe">Safe</option> <option value="questionable">Questionable</option> <option value="explicit">Explicit</option> </select> </div> <div> <button type="submit">Upload</button> </div> </form> </body> </html> )###" }; } namespace stickers { void handlers::upload_media_debug_form( show::request& request, const handler_vars_type& variables ) { show::response response{ request.connection(), show::http_protocol::HTTP_1_0, { 200, "OK" }, { show::server_header, { "Content-Type" , { "text/html" } }, { "Content-Length", { std::to_string( form.size() ) } } } }; response.sputn( form.c_str(), form.size() ); } } <file_sep>/src/common/config.cpp #line 2 "common/config.cpp" #include "config.hpp" #include <thread> namespace { std::mutex config_mutex; nlj::json global_config; // Start at verbose so anything that happens before the config is loaded can // be diagnosed stickers::log_level log_level_cache{ stickers::log_level::VERBOSE }; void set_log_level() { auto level_setting{ global_config.find( "log_level" ) }; if( level_setting == global_config.end() || !level_setting -> is_string() ) log_level_cache = stickers::log_level::INFO; else if( *level_setting == "SILENT" ) log_level_cache = stickers::log_level::SILENT; else if( *level_setting == "ERROR" ) log_level_cache = stickers::log_level::ERROR; else if( *level_setting == "WARNING" ) log_level_cache = stickers::log_level::WARNING; else if( *level_setting == "INFO" ) log_level_cache = stickers::log_level::INFO; else if( *level_setting == "VERBOSE" ) log_level_cache = stickers::log_level::VERBOSE; else if( *level_setting == "DEBUG" ) log_level_cache = stickers::log_level::DEBUG; else log_level_cache = stickers::log_level::INFO; } } namespace stickers { const nlj::json& config() { // Not exactly great, but should be fine most of the time :^) std::lock_guard< std::mutex > guard{ config_mutex }; return global_config; } void set_config( const nlj::json& o ) { std::lock_guard< std::mutex > guard{ config_mutex }; if( global_config == nullptr ) global_config = o; set_log_level(); } void set_config( const std::string& s ) { std::lock_guard< std::mutex > guard{ config_mutex }; if( global_config == nullptr ) global_config = nlj::json::parse( s ); set_log_level(); } // void open_config( const std::string& f ) // { // std::lock_guard< std::mutex > guard{ config_mutex }; // std::ifstream config_file( f ); // if( config_file.is_open() ) // { // config_file >> global_config; // set_log_level(); // } // else // { // ff::writeln( // std::cerr, // "could not open config file ", // argv[ 1 ] // ); // return 2; // } // } log_level current_log_level() { std::lock_guard< std::mutex > guard{ config_mutex }; return log_level_cache; } } <file_sep>/src/handlers/user.cpp #line 2 "handlers/user.cpp" #include "handlers.hpp" #include "../api/media.hpp" #include "../api/user.hpp" #include "../common/auth.hpp" #include "../common/crud.hpp" #include "../common/json.hpp" #include "../common/logging.hpp" #include "../server/parse.hpp" #include <show/constants.hpp> #include <array> namespace stickers { void handlers::create_user( show::request& request, const handler_vars_type& variables ) { auto auth{ authenticate( request ) }; permissions_assert_all( auth.user_permissions, { "create_user" } ); auto details_doc{ parse_request_content( request ) }; if( !details_doc.is_a< map_document >() ) throw handler_exit{ show::code::BAD_REQUEST, "invalid data format" }; auto& details_map{ details_doc.get< map_document >() }; for( const auto& field : { std::string{ "password" }, std::string{ "display_name" }, std::string{ "email" } } ) if( details_map.find( field ) == details_map.end() ) throw handler_exit{ show::code::BAD_REQUEST, "missing required field \"" + static_cast< std::string >( field ) + "\"" }; else if( !details_map[ field ].is_a< string_document >() ) throw handler_exit{ show::code::BAD_REQUEST, "required field \"" + static_cast< std::string >( field ) + "\" must be a string" }; user_info details; details.password = details_map[ "password" ].get< string_document >(); details.created = now(); details.display_name = details_map[ "display_name" ].get< string_document >(); details.email = details_map[ "email" ].get< string_document >(); try { auto created_user{ create_user( details, { auth.user_id, "create user handler", now(), request.client_address() } ) }; nlj::json details_json{ { "user_id" , created_user.id }, { "created" , to_iso8601_str( created_user.info.created ) }, { "revised" , to_iso8601_str( created_user.info.revised ) }, { "display_name", created_user.info.display_name }, { "email" , created_user.info.email } }; if( created_user.info.real_name ) details_json[ "real_name" ] = *created_user.info.real_name; else details_json[ "real_name" ] = nullptr; if( created_user.info.avatar_hash ) details_json[ "avatar" ] = load_media_info( *created_user.info.avatar_hash ).file_url; else details_json[ "avatar" ] = nullptr; auto user_json{ details_json.dump() }; show::response response{ request.connection(), show::HTTP_1_1, show::code::CREATED, { show::server_header, { "Content-Type", { "application/json" } }, { "Content-Length", { std::to_string( user_json.size() ) } }, { "Location", { "/user/" + static_cast< std::string >( created_user.id ) } } } }; response.sputn( user_json.c_str(), user_json.size() ); } catch( const no_such_record_error& e ) { throw handler_exit{ show::code::BAD_REQUEST, e.what() }; } } void handlers::get_user( show::request& request, const handler_vars_type& variables ) { auto found_user_id_variable{ variables.find( "user_id" ) }; if( found_user_id_variable == variables.end() ) throw handler_exit{ show::code::NOT_FOUND, "need a user ID" }; stickers::bigid user_id{ bigid::MIN() }; try { user_id = bigid::from_string( found_user_id_variable -> second ); } catch( const std::invalid_argument& e ) { throw handler_exit{ show::code::NOT_FOUND, "need a valid user ID" }; } try { auto info{ load_user( user_id ) }; nlj::json user_json{ { "user_id" , user_id }, { "created" , to_iso8601_str( info.created ) }, { "revised" , to_iso8601_str( info.revised ) }, { "display_name", info.display_name }, { "email" , info.email } }; if( info.real_name ) user_json[ "real_name" ] = *info.real_name; else user_json[ "real_name" ] = nullptr; if( info.avatar_hash ) user_json[ "avatar" ] = load_media_info( *info.avatar_hash ).file_url; else user_json[ "avatar" ] = nullptr; auto user_json_string = user_json.dump(); show::response response{ request.connection(), show::HTTP_1_1, show::code::OK, { show::server_header, { "Content-Type", { "application/json" } }, { "Content-Length", { std::to_string( user_json_string.size() ) } } } }; response.sputn( user_json_string.c_str(), user_json_string.size() ); } catch( const no_such_user& e ) { throw handler_exit{ show::code::NOT_FOUND, "no such user" }; } } void handlers::edit_user( show::request& request, const handler_vars_type& variables ) { // Authenticate first, check permissions later auto auth{ authenticate( request ) }; auto found_user_id_variable{ variables.find( "user_id" ) }; if( found_user_id_variable == variables.end() ) throw handler_exit{ show::code::NOT_FOUND, "need a user ID" }; stickers::bigid user_id{ bigid::MIN() }; try { user_id = bigid::from_string( found_user_id_variable -> second ); } catch( const std::invalid_argument& e ) { throw handler_exit{ show::code::NOT_FOUND, "need a valid user ID" }; } if( auth.user_id == user_id ) permissions_assert_all( auth.user_permissions, { "edit_own_user" } ); else permissions_assert_all( auth.user_permissions, { "edit_any_user" } ); try { throw handler_exit{ show::code::NOT_IMPLEMENTED, "Not implemented" }; } catch( const no_such_user& e ) { throw handler_exit{ show::code::NOT_FOUND, "no such user" }; } catch( const no_such_record_error& e ) { throw handler_exit{ show::code::BAD_REQUEST, e.what() }; } } void handlers::delete_user( show::request& request, const handler_vars_type& variables ) { // Authenticate first, check permissions later auto auth{ authenticate( request ) }; auto found_user_id_variable{ variables.find( "user_id" ) }; if( found_user_id_variable == variables.end() ) throw handler_exit{ show::code::NOT_FOUND, "need a user ID" }; stickers::bigid user_id{ bigid::MIN() }; try { user_id = bigid::from_string( found_user_id_variable -> second ); } catch( const std::invalid_argument& e ) { throw handler_exit{ show::code::NOT_FOUND, "need a valid user ID" }; } if( auth.user_id == user_id ) permissions_assert_all( auth.user_permissions, { "delete_own_user" } ); else permissions_assert_all( auth.user_permissions, { "delete_any_user" } ); try { delete_user( user_id, { auth.user_id, "delete user handler", now(), request.client_address() } ); std::string null_json = "null"; show::response response{ request.connection(), show::HTTP_1_1, show::code::OK, { show::server_header, { "Content-Type", { "application/json" } }, { "Content-Length", { std::to_string( null_json.size() ) } } } }; response.sputn( null_json.c_str(), null_json.size() ); } catch( const no_such_user& nsu ) { throw handler_exit{ show::code::NOT_FOUND, "no such user" }; } } } <file_sep>/src/common/formatting.hpp #pragma once #ifndef STICKERS_MOE_COMMON_FORMATTING_HPP #define STICKERS_MOE_COMMON_FORMATTING_HPP // #include <fastformat/inserters/ch.hpp> // #include <fastformat/inserters/to_x.hpp> // #include <fastformat/shims/conversion/filter_type/bool.hpp> // #include <fastformat/shims/conversion/filter_type/reals.hpp> // #include <fastformat/shims/conversion/filter_type/void_pointers.hpp> // #include <fastformat/sinks/ostream.hpp> #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wmacro-redefined" #define __clang_major__ 9 #include <fastformat/ff.hpp> #define __clang_major__ 5 #pragma clang diagnostic pop #ifdef FASTFORMAT_NO_FILTER_TYPE_CONVERSION_SHIM_SUPPORT #error Cannot compile this file with a compiler that does not support the filter_type mechanism (FastFormat) #endif // Double expansion trick to print the value of a macro #define MACROTOSTR_A( D ) #D #define MACROTOSTR( D ) MACROTOSTR_A( D ) #define PTR_HEX_WIDTH ( ( int )( sizeof( void* ) * 2 ) ) #endif <file_sep>/src/common/string_utils.hpp #pragma once #ifndef STICKERS_MOE_COMMON_STRING_UTILS_HPP #define STICKERS_MOE_COMMON_STRING_UTILS_HPP #include <string> namespace stickers { template< typename CollectionType, typename ValueType > CollectionType split( const ValueType& v, const ValueType& joiner ) { CollectionType c; auto segment_begin{ v.begin() }; for( auto iter = v.begin(); iter != v.end(); ++iter ) { { auto joiner_candidate_begin{ iter + 1 - joiner.size() }; auto joiner_candidate_end { iter + 1 }; if( ValueType{ joiner_candidate_begin, joiner_candidate_end } == joiner ) { c.emplace_back( segment_begin, joiner_candidate_begin ); segment_begin = joiner_candidate_end; } } } c.emplace_back( segment_begin, v.end() ); return c; } template< typename CollectionType, typename ValueType > ValueType join( const CollectionType& c, const ValueType & joiner ) { ValueType v; for( auto iter = c.begin(); iter != c.end(); ) { v += *iter; if( ++iter != c.end() ) v += joiner; } return v; } } #endif <file_sep>/src/common/uuid.hpp #pragma once #ifndef STICKERS_MOE_COMMON_UUID_HPP #define STICKERS_MOE_COMMON_UUID_HPP #include "postgres.hpp" #include <exception> // std::invalid_argument #include <sstream> // std::ostringstream #include <string> namespace stickers { class uuid { // So libpqxx can use uuid's protected default constructor friend uuid pqxx::field::as< uuid >() const; friend uuid pqxx::field::as< uuid >( const uuid& ) const; protected: std::string value; uuid(); public: uuid( const std::string& ); uuid( const char*, std::size_t ); uuid( const uuid& ); std::string raw_value () const; std::string hex_value () const; std::string hex_value_8_4_4_4_12() const; // Use a function rather than the default constructor as there is // overhead generating a new UUID which we'd like to avoid in some cases // where the default constructor would be used static uuid generate(); // Parse from a 32-char hex string or 8-4-4-4-12 hex string static uuid from_string( const std::string& ); }; } // Template specialization of `pqxx::string_traits<>(&)` for `stickers::uuid`, // which allows use of `pqxx::field::to<>(&)` and `pqxx::field::as<>(&)` namespace pqxx { template<> struct string_traits< stickers::uuid > { using subject_type = stickers::uuid; static constexpr const char* name() noexcept { return "stickers::uuid"; } static constexpr bool has_null() noexcept { return false; } static bool is_null( const stickers::uuid& ) { return false; } [[noreturn]] static stickers::uuid null() { internal::throw_null_conversion( name() ); } static void from_string( const char str[], stickers::uuid& v ) { try { v = stickers::uuid( str ); } catch( const std::invalid_argument& e ) { throw argument_error{ "Failed conversion to " + static_cast< std::string >( name() ) + ": '" + static_cast< std::string >( str ) + "'" }; } } static std::string to_string( const stickers::uuid& v ) { std::string encoded{ v.raw_value() }; std::ostringstream decoded; decoded << std::hex; for( auto& b : encoded ) if( std::isprint( b ) ) decoded << b; else decoded << "\\x" << static_cast< unsigned int >( b ); return decoded.str(); } }; } #endif <file_sep>/src/handlers/person.cpp #line 2 "handlers/person.cpp" #include "handlers.hpp" #include "../api/person.hpp" #include "../common/auth.hpp" #include "../common/crud.hpp" #include "../common/json.hpp" #include "../server/parse.hpp" #include <show/constants.hpp> namespace { void person_to_json( const stickers::bigid & id, const stickers::person_info& info, nlj::json & person_json ) { person_json = { { "person_id", id }, { "created" , stickers::to_iso8601_str( info.created ) }, { "revised" , stickers::to_iso8601_str( info.revised ) }, { "about" , info.about } }; if( info.has_user() ) { person_json[ "name" ] = nullptr; person_json[ "user_id" ] = std::get< stickers::bigid >( info.identifier ); } else { person_json[ "user_id" ] = nullptr; person_json[ "name" ] = std::get< std::string >( info.identifier ); } } stickers::person_info person_info_from_document( const stickers::document& details_doc ) { if( !details_doc.is_a< stickers::map_document >() ) throw stickers::handler_exit{ show::code::BAD_REQUEST, "invalid data format" }; auto& details_map{ details_doc.get< stickers::map_document >() }; if( details_map.find( "about" ) == details_map.end() ) throw stickers::handler_exit{ show::code::BAD_REQUEST, "missing required field \"about\"" }; else if( !details_map[ "about" ].is_a< stickers::string_document >() ) throw stickers::handler_exit{ show::code::BAD_REQUEST, "required field \"about\" must be a string" }; bool has_user{ details_map.find( "user_id" ) != details_map.end() }; if( !has_user && details_map.find( "name" ) == details_map.end() ) throw stickers::handler_exit{ show::code::BAD_REQUEST, "missing one of \"name\" or \"user\"" }; if( has_user ) { if( !details_map[ "user_id" ].is_a< stickers::string_document >() ) throw stickers::handler_exit{ show::code::BAD_REQUEST, "required field \"user\" must be a string" }; stickers::bigid user_id{ stickers::bigid::MIN() }; try { user_id = stickers::bigid::from_string( details_map[ "user_id" ].get< stickers::string_document >() ); } catch( const std::invalid_argument& e ) { throw stickers::handler_exit{ show::code::BAD_REQUEST, "required field \"user\" is not a valid user ID" }; } return { stickers::now(), stickers::now(), details_map[ "about" ].get< stickers::string_document >(), user_id }; } else { if( !details_map[ "name" ].is_a< stickers::string_document >() ) throw stickers::handler_exit{ show::code::BAD_REQUEST, "required field \"name\" must be a string" }; return { stickers::now(), stickers::now(), details_map[ "about" ].get< stickers::string_document >(), details_map[ "name" ].get< stickers::string_document >() }; } } } namespace stickers { void handlers::create_person( show::request& request, const handler_vars_type& variables ) { auto auth{ authenticate( request ) }; permissions_assert_all( auth.user_permissions, { "edit_public_pages" } ); try { auto created{ create_person( person_info_from_document( parse_request_content( request ) ), { auth.user_id, "create person handler", now(), request.client_address() } ) }; nlj::json person_json; person_to_json( created.id, created.info, person_json ); auto person_json_string{ person_json.dump() }; show::response response{ request.connection(), show::HTTP_1_1, show::code::CREATED, { show::server_header, { "Content-Type", { "application/json" } }, { "Content-Length", { std::to_string( person_json_string.size() ) } }, { "Location", { "/person/" + static_cast< std::string >( created.id ) } } } }; response.sputn( person_json_string.c_str(), person_json_string.size() ); } catch( const no_such_record_error& e ) { throw handler_exit{ show::code::BAD_REQUEST, e.what() }; } } void handlers::get_person( show::request& request, const handler_vars_type& variables ) { auto found_person_id_variable{ variables.find( "person_id" ) }; if( found_person_id_variable == variables.end() ) throw handler_exit{ show::code::NOT_FOUND, "need a person ID" }; bigid person_id{ bigid::MIN() }; try { person_id = bigid::from_string( found_person_id_variable -> second ); } catch( const std::invalid_argument& e ) { throw handler_exit{ show::code::NOT_FOUND, "need a valid person ID" }; } try { auto info{ load_person( person_id ) }; nlj::json person_json; person_to_json( person_id, info, person_json ); auto person_json_string{ person_json.dump() }; show::response response{ request.connection(), show::HTTP_1_1, show::code::OK, { show::server_header, { "Content-Type", { "application/json" } }, { "Content-Length", { std::to_string( person_json_string.size() ) } } } }; response.sputn( person_json_string.c_str(), person_json_string.size() ); } catch( const no_such_person& e ) { throw handler_exit{ show::code::NOT_FOUND, e.what() }; } } void handlers::edit_person( show::request& request, const handler_vars_type& variables ) { auto auth{ authenticate( request ) }; permissions_assert_all( auth.user_permissions, { "edit_public_pages" } ); auto found_person_id_variable{ variables.find( "person_id" ) }; if( found_person_id_variable == variables.end() ) throw handler_exit{ show::code::NOT_FOUND, "need a person ID" }; stickers::bigid person_id{ bigid::MIN() }; try { person_id = bigid::from_string( found_person_id_variable -> second ); } catch( const std::invalid_argument& e ) { throw handler_exit{ show::code::NOT_FOUND, "need a valid person ID" }; } try { auto updated_info{ update_person( { person_id, person_info_from_document( parse_request_content( request ) ) }, { auth.user_id, "update person handler", now(), request.client_address() } ) }; nlj::json person_json; person_to_json( person_id, updated_info, person_json ); auto person_json_string{ person_json.dump() }; show::response response{ request.connection(), show::HTTP_1_1, show::code::OK, { show::server_header, { "Content-Type", { "application/json" } }, { "Content-Length", { std::to_string( person_json_string.size() ) } } } }; response.sputn( person_json_string.c_str(), person_json_string.size() ); } catch( const no_such_person& e ) { throw handler_exit{ show::code::NOT_FOUND, e.what() }; } catch( const no_such_record_error& e ) { throw handler_exit{ show::code::BAD_REQUEST, e.what() }; } } void handlers::delete_person( show::request& request, const handler_vars_type& variables ) { auto auth{ authenticate( request ) }; permissions_assert_all( auth.user_permissions, { "edit_public_pages" } ); auto found_person_id_variable{ variables.find( "person_id" ) }; if( found_person_id_variable == variables.end() ) throw handler_exit{ show::code::NOT_FOUND, "need a person ID" }; stickers::bigid person_id{ bigid::MIN() }; try { person_id = bigid::from_string( found_person_id_variable -> second ); } catch( const std::invalid_argument& e ) { throw handler_exit{ show::code::NOT_FOUND, "need a valid person ID" }; } try { delete_person( person_id, { auth.user_id, "delete person handler", now(), request.client_address() } ); std::string null_json{ "null" }; show::response response{ request.connection(), show::HTTP_1_1, show::code::OK, { show::server_header, { "Content-Type", { "application/json" } }, { "Content-Length", { std::to_string( null_json.size() ) } } } }; response.sputn( null_json.c_str(), null_json.size() ); } catch( const no_such_person& e ) { throw handler_exit{ show::code::NOT_FOUND, e.what() }; } } } <file_sep>/src/common/auth.hpp #pragma once #ifndef STICKERS_MOE_COMMON_AUTH_HPP #define STICKERS_MOE_COMMON_AUTH_HPP #include "bigid.hpp" #include "jwt.hpp" #include "../audit/blame.hpp" #include <show.hpp> #include <exception> #include <set> #include <string> namespace stickers { using permissions_type = std::set< std::string >; struct auth_info { bigid user_id; permissions_type user_permissions; }; auth_info authenticate( const show::request& ); jwt generate_auth_token_for_user( bigid, const audit::blame& ); // void set_user_permissions( bigid, const permissions_type& ); permissions_type get_user_permissions( bigid ); void permissions_assert_any( const permissions_type& got, const permissions_type& expect ); void permissions_assert_all( const permissions_type& got, const permissions_type& expect ); class authentication_error : public std::runtime_error { using runtime_error::runtime_error; }; // Authorization error messages will be publicly displayed by the API class authorization_error : public std::runtime_error { using runtime_error::runtime_error; }; } #endif <file_sep>/src/utilities/password_gen.cpp #line 2 "utilities/password_gen.cpp" #include "../api/user.hpp" #include "../common/logging.hpp" #include "../common/postgres.hpp" #include <fstream> #include <iostream> #include <cstdlib> // std::srand() #include <ctime> // std::time() int main( int argc, char* argv[] ) { if( argc < 3 ) { STICKERS_LOG( stickers::log_level::ERROR, "usage: ", argv[ 0 ], " config.json password" ); return -1; } std::srand( std::time( nullptr ) ); try { { nlj::json config; std::ifstream config_file{ argv[ 1 ] }; if( config_file.is_open() ) { config_file >> config; if( !config_file.good() ) { STICKERS_LOG( stickers::log_level::ERROR, "config file ", argv[ 1 ], " not a valid JSON file" ); return -1; } } else { STICKERS_LOG( stickers::log_level::ERROR, "could not open config file ", argv[ 1 ] ); return 2; } stickers::set_config( config ); } auto pw{ stickers::hash_password( argv[ 2 ] ) }; std::cout << "('" << pqxx::string_traits< stickers::password_type >::to_string( pw.type() ) << "', decode('" << pw.value< stickers::scrypt >().hex_digest() << "', 'hex'), decode('" << pw.value< stickers::scrypt >().hex_salt() << "', 'hex'), " << pw.factor() << ")::users.password" << std::endl ; } catch( const std::exception &e ) { STICKERS_LOG( stickers::log_level::ERROR, "uncaught std::exception in main(): ", e.what() ); return -1; } catch( ... ) { STICKERS_LOG( stickers::log_level::ERROR, "uncaught non-std::exception in main()" ); return -1; } return 0; }<file_sep>/src/common/sorting.hpp #pragma once #ifndef STICKERS_MOE_COMMON_SORTING_HPP #define STICKERS_MOE_COMMON_SORTING_HPP #include <optional> #include <string> namespace stickers { class byte_string : public std::basic_string< std::uint8_t > { public: using std::basic_string< std::uint8_t >::basic_string; byte_string( const std::string& o ) : basic_string{ reinterpret_cast< const std::uint8_t* >( o.c_str() ), o.size() } {} byte_string( std::string&& o ) : basic_string{ *reinterpret_cast< byte_string* >( &o ) } {} byte_string( const char* o ) : basic_string{ reinterpret_cast< const std::uint8_t* >( o ) } {} }; byte_string next_sorting_key_between( std::optional< byte_string > before, std::optional< byte_string > after ); } #endif
997b43e4d5cf2c31388e1fd37a9b7a8b1c6f4c3a
[ "CMake", "C++" ]
59
C++
JadeMatrix/stickers.moe-API
c5ba58f6f2edd17763335fb0725036f6544c6f75
3d927100b295ce99872047ee7faa2e4086393738
refs/heads/master
<repo_name>nkchouhan/starterpack-app<file_sep>/app/models/photo.rb class Photo < ActiveRecord::Base acts_as_taggable paginates_per 12 mount_uploader :avatar, AvatarUploader belongs_to :user scope :desc, -> {order("created_at DESC")} end <file_sep>/app/controllers/photos_controller.rb class PhotosController < ApplicationController before_action :authenticate_user!, except: :show def new @photo = Photo.new end def create image_list="" ["photo_first", "photo_second", "photo_third", "photo_four"].each_with_index do |pho, index| if index==0 image_list = Magick::ImageList.new(params[pho].path) else image_list += Magick::ImageList.new(params[pho].path) end end title = "\n" + params["title"] montage = image_list.montage do self.tile = "2x2" self.title = title self.geometry = '200x200!+0+0' end name = Time.now.to_i.to_s + ".png" location = ('tmp/'+name) montage.write(location) photo = current_user.photos.new(title: title, tag_list: params[:tags]) photo.avatar = File.open(location) photo.save File.delete(location) redirect_to root_path end def show @photo = Photo.find(params[:id])#current_user.photos.find(params[:id]) end end <file_sep>/app/helpers/application_helper.rb module ApplicationHelper def formated_date(date) date.strftime('%m-%d-%Y') end def full_path(absolute_url) request.protocol + request.host_with_port + absolute_url end end
008fee6d53514f04004825603d512f84a871f6a4
[ "Ruby" ]
3
Ruby
nkchouhan/starterpack-app
d94cc06b9ebd28d8b5f92161c729b404edeafbe1
30bb12d81a89e68d163f2f30b5a71a2deb3698d2
refs/heads/master
<repo_name>satoshi-3104/Laravel-Weather-Research<file_sep>/app/Http/Services/DarkSky.php <?php namespace App\Services; use GuzzleHttp\Client; class DarkSky { public function researchWeather(string $city){ $appid = env('DARKSKY_ACCESS_KEY'); //各都市の緯度と軽度 $cities = array( "東京" => Array ( "latitude" => 36, "longitude" => 140, ), "金沢" => Array ( "latitude" => 36, "longitude" => 136, ) ); if (array_key_exists($city, $cities)) { $latitude = $cities[$city]['latitude']; $longitude = $cities[$city]['longitude']; } else { $latitude = null; $longitude = null; } $client = new Client(); $response = $client //latitude->緯度,longitude->経度 ->get("https://api.darksky.net/forecast/${appid}/${latitude},${longitude}?units=si&lang=ja", ['http_errors' => false]); return json_decode($response->getBody()->getContents(), true); } }
e0bff1ebeb79cba00ee2550b3b838adc4e4d5656
[ "PHP" ]
1
PHP
satoshi-3104/Laravel-Weather-Research
40974a3f0c27c58731c0e8e2f5b026435b9dedcb
901411005e5a231a6ca521b1c9bff2e3de268e1e
refs/heads/master
<repo_name>npntraining/CSJ_2018_11_18<file_sep>/module03-working-with-webcontrols/src/test/java/Facebook_DropDonwListTest.java import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.openqa.selenium.support.ui.Select; import org.testng.annotations.Test; public class Facebook_DropDonwListTest extends WebDriverSetup{ @Test public void dropDownList() { driverSetup("http://facebook.com"); WebElement day = driver.findElement(By.id("day")); WebElement month = driver.findElement(By.id("month")); WebElement year = driver.findElement(By.id("year")); dropDown(day, "27"); dropDown(month, "5"); dropDown(year, "1980"); } public void dropDown(WebElement element, String text) { Select sel = new Select (element); sel.selectByValue(text); } } <file_sep>/module05-exception-handling/src/com/npntraining/handson/InvalidStudentIDException.java package com.npntraining.handson; public class InvalidStudentIDException extends java.lang.Exception { public InvalidStudentIDException(String message) { super(message); } } <file_sep>/module03-java-oops/src/com/npntraining/handson/inheritence/single/Test1.java package com.npntraining.handson.inheritence.single; public class Test1 { void testMethod1() { System.out.println("From testMethod1"); } } <file_sep>/TestNG/src/test/java/priority/Priority.java package priority; import org.testng.annotations.Test; public class Priority { @Test (priority = 2, description = "This is priority2 test case") public void test1() { System.out.println("This is first test case"); } @Test (priority = 3, description = "This is priority3 test case") public void test2() { System.out.println("This is second test case"); } @Test (priority = 1, description = "This is priority1 test case") public void test3() { System.out.println("This is third test case"); } } <file_sep>/module03-java-oops/src/com/npntraining/handson/inheritence/hierarchial/TestMain.java package com.npntraining.handson.inheritence.hierarchial; public class TestMain { void testMain() { System.out.println("From testMain method"); } public static void main(String[] args) { //Upcasting ClassA a = (ClassA) new TestMain(); a.testMain(); a.classA(); } } <file_sep>/module03-working-with-webcontrols/src/test/java/RadioButtonTest.java import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.testng.Assert; import org.testng.annotations.Test; public class RadioButtonTest extends WebDriverSetup{ @Test public void testRadioButton() { driverSetup("file:///E:/NPN/NPN-Classes/CSJ_2018_11_18/Working%20with%20WebControls/htmls/RadioButton_Test1.html"); WebElement female = driver.findElement(By.xpath("//input[@value='female']")); female.click(); boolean selected = female.isSelected(); if(selected = true) { System.out.println("The female radio button is already selected" +selected); } else { System.out.println("The female radio button is not selected"+selected); } // Assert.assertEquals(selected, false); } } <file_sep>/module02-class-objects/src/com/npntraining/handson/InstanceBlock01_1.java package com.npntraining.handson; class Remote{ int num; { num=23; } public Remote(int num) { super(); this.num = num; } } public class InstanceBlock01_1 { } <file_sep>/TestNG/src/test/java/Annotations/Annotations.java package Annotations; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.BeforeMethod; import org.testng.annotations.BeforeTest; import org.testng.annotations.Test; public class Annotations { @AfterClass public void postAction() { System.out.println("This is After class"); } @Test public void testMethod() { System.out.println("This is a test case"); } @Test public void testMethod1() { System.out.println("This is testcase-1"); } public void testMethod2() { System.out.println("This is testcase-2"); } @BeforeClass public void preRequisite() { System.out.println("This is Before Class"); } @BeforeTest public void beforeTest() { System.out.println("This is Before Test"); } @BeforeMethod public void beforeMethod() { System.out.println("This is Before method"); } } <file_sep>/module02-class-objects/src/com/npntraining/assignments/Months.java package com.npntraining.assignments; public enum Months { January("Jan",31,"Winter"), February("Feb",28,"Winter"), March("Mar",31,"Summer"); private String monthN; private int noOfDays; private String season; private Months(String monthN, int noOfDays, String season) { this.monthN = monthN; this.noOfDays = noOfDays; this.season = season; } public String getMonthN() { return monthN; } public void setMonthN(String monthN) { this.monthN = monthN; } public int getNoOfDays() { return noOfDays; } public void setNoOfDays(int noOfDays) { this.noOfDays = noOfDays; } public String getSeason() { return season; } public void setSeason(String season) { this.season = season; } } <file_sep>/TestNG/src/test/java/selenium/Selenium.java package selenium; import org.openqa.selenium.WebDriver; import org.openqa.selenium.chrome.ChromeDriver; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; public class Selenium { WebDriver driver; @BeforeClass public void setUp() { System.setProperty("webdriver.chrome.driver", "E:\\NPN\\NPN Training - Selenium Architect Training\\drivers\\chromedriver.exe"); driver = new ChromeDriver(); } @Test public void testSelenium() { driver.get("http://npntraining.com"); // String actualTitle = driver.getTitle(); //// System.out.println("The actual title is :: " + actualTitle); //// String expectedTitle="Big Data, Spark, Hadoop, Python Training Institute in Bangalore"; //// Assert.assertEquals(actualTitle, expectedTitle); Assert.assertEquals(driver.getTitle(), "Big Data, Spark, Hadoop, Python Training Institute in Bangalore"); } @AfterClass public void tearDown() { driver.quit(); } } <file_sep>/module02-class-objects/src/com/npntraining/assignments/Months_Mine_Real.java package com.npntraining.assignments; import javax.swing.JOptionPane; import com.npntraining.assignments.Months_Mine; public class Months_Mine_Real { public static void main(String[] args) { String monthName = JOptionPane.showInputDialog("Enter Month Name"); Months months = Months.valueOf(monthName); System.out.println("The Month Name Is:" + months.getMonthN()); System.out.println("The No of Days per " + months.getMonthN() + " is:" + months.getNoOfDays()); System.out.println("The Season of the month " + months.getMonthN() + " is:" + months.getSeason()); } } <file_sep>/module02-class-objects/src/com/npntraining/assignments/Months_Mine.java package com.npntraining.assignments; import javax.swing.JOptionPane; public class Months_Mine { private String setName; private int noOfDays; private String season; public static void main(String[] args) { String month = JOptionPane.showInputDialog("Enter Month Name:"); System.out.println("Month is:"+month); Months_Mine jan = new Months_Mine(); jan.setName="January"; jan.noOfDays=31; jan.season="Winter"; Months_Mine feb=new Months_Mine(); feb.setName="February"; feb.noOfDays=28; feb.season="Winter"; Months_Mine march=new Months_Mine(); march.setName="March"; march.noOfDays=31; march.season="Summer"; if (month == "January") { System.out.println("Shortname is"+ jan.setName); System.out.println("No.of Days is" + jan.noOfDays); System.out.println("For" + month + "The Season is:" + jan.season); } else if (month == "Febraury") { } } } <file_sep>/README.md # CSJ_2018_11_18 This Repository is for Selenium Certificate Training batch commenced from November 18, 2018. <file_sep>/module07-advanced-interactions/src/test/java/Mouse_ContextClick.java import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.openqa.selenium.interactions.Actions; import org.testng.annotations.Test; public class Mouse_ContextClick extends WebDriverSetup{ @Test public void contextClickTest() { driverSetup("http://swisnl.github.io/jQuery-contextMenu/demo.html"); WebElement menu = driver.findElement(By.cssSelector("span.context-menu-one")); Actions action = new Actions(driver); action.contextClick(menu).perform(); } } <file_sep>/module02-class-objects/src/com/npntraining/handson/ThisKeyword_Demo02.java package com.npntraining.handson; class ThisKeyword2 { public ThisKeyword2() { // Auto generate constructor hub this("npn"); System.out.println("No Arguement Constructor"); } public ThisKeyword2(String str) { this("npn",123); System.out.println("String"); } public ThisKeyword2(String str,int num) { this(num,str); System.out.println("(String,int)"); } public ThisKeyword2(int num,String str) { System.out.println("(int,String)"); } } public class ThisKeyword_Demo02 { public static void main(String[] args) { ThisKeyword2 obj1=new ThisKeyword2(); ThisKeyword2 obj2=new ThisKeyword2("npn",123); } } <file_sep>/module03-working-with-webcontrols/src/test/java/WebDriverSetup.java import org.openqa.selenium.WebDriver; import org.openqa.selenium.chrome.ChromeDriver; import org.openqa.selenium.firefox.FirefoxDriver; public class WebDriverSetup { WebDriver driver; public void driverSetup(String appURL) { System.setProperty("webdriver.gecko.driver", "E:\\NPN\\NPN Training - Selenium Architect Training\\drivers\\geckodriver.exe"); driver=new FirefoxDriver(); driver.manage().window().maximize(); driver.get(appURL); } } <file_sep>/module02-class-objects/src/com/npntraining/handson/EmployeeClient.java package com.npntraining.handson; public class EmployeeClient { public static void main(String[] args) { Employee obj1 = new Employee(); obj1.setSalary(0); obj1.setCompanyName("Feel Good Factor"); obj1.setEmployeeId(1099); } } <file_sep>/module07-advanced-interactions/src/test/java/MouseBasedEvents.java import java.util.concurrent.TimeUnit; import org.openqa.selenium.By; import org.openqa.selenium.Keys; import org.openqa.selenium.Point; import org.openqa.selenium.WebElement; import org.openqa.selenium.interactions.Actions; import org.testng.annotations.Test; public class MouseBasedEvents extends WebDriverSetup{ //@Test public void dragAndDropTest() { driverSetup("file:///E:/NPN/NPN-Classes/CSJ_2018_11_18/Advanced%20Interactions/htmls/DragAndDrop.html"); WebElement target = driver.findElement(By.id("obliterate")); WebElement sourceOne = driver.findElement(By.id("one")); WebElement sourceTwo = driver.findElement(By.id("two")); WebElement sourceThree = driver.findElement(By.id("three")); WebElement sourceFour = driver.findElement(By.id("four")); WebElement sourceFive = driver.findElement(By.id("five")); Actions act = new Actions(driver); // This is flow approach // act.moveToElement(sourceOne).clickAndHold().release(target).build().perform(); // This is direct dragAndDrop approach // act.dragAndDrop(sourceTwo, target).perform(); //Flow approach for all the elements act.clickAndHold(sourceOne).release(target) .clickAndHold(sourceTwo).release(target) .clickAndHold(sourceThree).release(target) .clickAndHold(sourceFour).release(target) .clickAndHold(sourceFive).release(target) .perform(); } // @Test public void menuItems() { driverSetup("file:///E:/NPN/NPN-Classes/CSJ_2018_11_18/Advanced%20Interactions/htmls/MenuOptions.html"); WebElement services = driver.findElement(By.id("services")); WebElement webDesign = driver.findElement(By.cssSelector("#services > ul > li:nth-of-type(1)")); WebElement webDevelopment = driver.findElement(By.cssSelector("#services > ul > li:nth-of-type(2)")); WebElement illustrations = driver.findElement(By.cssSelector("#services > ul > li:nth-of-type(3)")); Actions action = new Actions(driver); action.moveToElement(services).moveToElement(webDesign).click().perform(); driver.navigate().back(); driver.manage().timeouts().implicitlyWait(10, TimeUnit.SECONDS); action.moveToElement(services).moveToElement(webDevelopment).click().perform(); driver.navigate().back(); driver.manage().timeouts().implicitlyWait(10, TimeUnit.SECONDS); action.moveToElement(services).moveToElement(illustrations).click().perform(); driver.manage().timeouts().implicitlyWait(10, TimeUnit.SECONDS); } //@Test public void moveByOffset() { driverSetup("file:///E:/NPN/NPN-Classes/CSJ_2018_11_18/Advanced%20Interactions/htmls/MoveByOffSet.html"); WebElement target = driver.findElement(By.id("obliterate")); WebElement sourceOne = driver.findElement(By.id("one")); WebElement sourceTwo = driver.findElement(By.id("two")); WebElement sourceThree = driver.findElement(By.id("three")); WebElement sourceFour = driver.findElement(By.id("four")); WebElement sourceFive = driver.findElement(By.id("five")); Point point = sourceOne.getLocation(); int x = point.x; int y = point.y; System.out.println("X Value:: " +x); System.out.println("Y Value:: " +y); Actions action = new Actions(driver); action.clickAndHold(sourceOne).moveByOffset(20, 18).clickAndHold().release(target).perform(); // action.dragAndDrop(sourceTwo, target).perform(); } //@Test public void contextClick() { driverSetup("http://swisnl.github.io/jQuery-contextMenu/demo.html"); WebElement menu = driver.findElement(By.cssSelector("span.context-menu-one")); Actions action = new Actions(driver); action.contextClick(menu).perform(); } //@Test public void facebookTest() { driverSetup("http://facebook.com"); WebElement userName = driver.findElement(By.id("email")); Actions action = new Actions(driver); action.sendKeys(userName, "Amrutha").doubleClick(). perform(); } //@Test public void keyEvents() { driverSetup("http://facebook.com"); WebElement userName = driver.findElement(By.id("email")); Actions action = new Actions(driver); action.sendKeys(userName, "Amrutha").sendKeys(Keys.BACK_SPACE).perform(); } //@Test public void keyUpDown() { driverSetup("http://facebook.com"); WebElement userName = driver.findElement(By.id("email")); Actions action = new Actions(driver); action.keyDown(userName, Keys.SHIFT).sendKeys("a").keyUp(Keys.SHIFT).perform(); } } <file_sep>/module04-advanced-xpath-css/src/test/java/AdvancedSelectors_Demo01.java import org.openqa.selenium.By; import org.openqa.selenium.WebDriver; import org.openqa.selenium.chrome.ChromeDriver; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; public class AdvancedSelectors_Demo01 { WebDriver driver; @BeforeClass public void setup() { System.setProperty("webdriver.chrome.driver", "E:\\NPN\\NPN Training - Selenium Architect Training\\drivers\\chromedriver.exe"); driver = new ChromeDriver(); driver.get("http://facebook.com"); } @Test public void absoluteXPathTest() { driver.findElement(By.xpath("/html/body/div[1]/div[2]/div/div/div/div/div[2]/form/table/tbody/tr[2]/td[1]/input")).sendKeys("Username"); driver.findElement(By.xpath("/html/body/div[1]/div[2]/div/div/div/div/div[2]/form/table/tbody/tr[2]/td[2]/input")).sendKeys("<PASSWORD>"); } public void relativeXPathTest() { } } <file_sep>/module07-advanced-interactions/src/test/java/Mouse_MoveToElement.java import java.util.concurrent.TimeUnit; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.openqa.selenium.interactions.Actions; import org.testng.annotations.Test; public class Mouse_MoveToElement extends WebDriverSetup{ @Test public void moveToElementTest() { driverSetup("file:///E:/NPN/NPN-Classes/CSJ_2018_11_18/Advanced%20Interactions/htmls/MenuOptions.html"); WebElement services = driver.findElement(By.id("services")); WebElement webDesign = driver.findElement(By.cssSelector("#services > ul > li:nth-of-type(1)")); WebElement webDevelopment = driver.findElement(By.cssSelector("#services > ul > li:nth-of-type(2)")); WebElement illustrations = driver.findElement(By.cssSelector("#services > ul > li:nth-of-type(3)")); Actions action = new Actions(driver); action.moveToElement(services).moveToElement(webDesign).click().perform(); driver.navigate().back(); driver.manage().timeouts().implicitlyWait(10, TimeUnit.SECONDS); action.moveToElement(services).moveToElement(webDevelopment).click().perform(); driver.navigate().back(); driver.manage().timeouts().implicitlyWait(10, TimeUnit.SECONDS); action.moveToElement(services).moveToElement(illustrations).click().perform(); driver.manage().timeouts().implicitlyWait(10, TimeUnit.SECONDS); } } <file_sep>/module05-exception-handling/src/com/npntraining/handson/_04_TryCatchDemo.java package com.npntraining.handson; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; public class _04_TryCatchDemo { public static void main(String[] args) throws FileNotFoundException{ File file = new File ("D://file.txt"); try { FileReader fr=new FileReader(file); System.out.println("FileExists"); } catch (FileNotFoundException ex) { //Here ex is a object System.out.println("FileDoesNotExists"); System.out.println(ex.getMessage()); //getMessage will give the message of the exception. System.out.println(ex.toString()); //toString will give the name of exception and Descrption of the exception ex.printStackTrace(); //It will give complete information about the exception. } } } <file_sep>/module03-java-oops/src/com/npntraining/handson/polymorphism/ConstructorOverloading.java package com.npntraining.handson.polymorphism; public class ConstructorOverloading extends A{ public static void main(String[] args) { ConstructorOverloading c = new ConstructorOverloading(); c.setA(20); System.out.println(c.getA()); } } class A{ int a; //constructor overloading A(){}; A(int a){ } public int getA() { return a; } public void setA(int a) { this.a = a; } } <file_sep>/module03-java-oops/src/com/npntraining/handson/abstraction/Test.java package com.npntraining.handson.abstraction; abstract class TestAbstractClass { public abstract void testAbstract(); } public class Test extends TestAbstractClass{ void test() { System.out.println("This is from Test method"); } @Override public void testAbstract() { // TODO Auto-generated method stub System.out.println("This is from Abstract method"); } public static void main(String[] args) { TestAbstractClass test = new Test(); test.testAbstract(); } } <file_sep>/module05-exception-handling/src/com/npntraining/handson/_04A_TryCatchDemo.java package com.npntraining.handson; import java.util.InputMismatchException; import java.util.Scanner; public class _04A_TryCatchDemo { public static void main(String[] args) { try { //Below line of code takes input from console Scanner input = new Scanner(System.in); System.out.println("Enter numerator:"); int numerator = input.nextInt(); System.out.println("Enter denominator:"); int denominator = input.nextInt(); int quotient =0; quotient = numerator/denominator; System.out.println(quotient); System.out.println("Program executed successfully"); }catch(ArithmeticException obj) { obj.printStackTrace(); }catch (InputMismatchException obj) { obj.printStackTrace(); System.out.println(obj.getMessage()); }catch(Exception obj) { //If user doesn't know about the exception, this Exception class will be handled. obj.printStackTrace(); System.out.println(obj.getMessage()); } } } <file_sep>/TestNG/src/test/java/groups/Groups.java package groups; import org.testng.annotations.Test; public class Groups { @Test(groups= {"Test-1","Test-2"}) public void testGroup1() { System.out.println("This is Test Group-1"); } @Test(groups= {"Test-2","Test-3"}) public void testGroup2() { System.out.println("This is Test Group-2"); } @Test(groups= {"Test-3","Test-1"}) public void testGroup3() { System.out.println("This is Test Group-3"); } } <file_sep>/module02-class-objects/src/com/npntraining/handson/DangerConfusion.java package com.npntraining.handson; class Danger { private int dangerID; public Danger(int dangerID) { super(); this.dangerID = dangerID; } public int getDangerID() { return dangerID; } public void setDangerID(int dangerID) { this.dangerID = dangerID; } public Danger getDangerInstance() { return new Danger(100); } public void confuse(Danger danger) { danger.setDangerID(14); } } public class DangerConfusion { public static void main(String[] args) { Danger obj1 = new Danger(13); Danger obj2 = obj1; System.out.println(obj1.getDangerID()); System.out.println(obj2.getDangerID()); obj2.confuse(obj1); //System.out.println(obj2.setDangerID(obj1);); System.out.println(obj1.getDangerID()); System.out.println(obj2.getDangerID()); Danger obj3=obj1.getDangerInstance(); System.out.println(obj3.getDangerID()); } } <file_sep>/module05-exception-handling/src/com/npntraining/handson/_08A_ThrowsKeyword_Demo.java package com.npntraining.handson; public class _08A_ThrowsKeyword_Demo { public static void main(String[] args) { CanaraBank obj = new CanaraBank(); try { obj.withdrawlAmount(3,00001); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } class CanaraBank{ float balanceAmount =10000; public void withdrawlAmount(int CustomerId, float withdrawlAmount) throws Exception { //throws keyword will let the caller that I am if (balanceAmount < withdrawlAmount) { balanceAmount = balanceAmount - withdrawlAmount; } else { throw new Exception("Operation not allowed. Insufficeint Balance!!"); } } } }
17b096f64f3d36148529fc23ffe89731388688a3
[ "Markdown", "Java" ]
27
Java
npntraining/CSJ_2018_11_18
3e11761b1da039437ed5b813f975c536b99206b6
3dcfc88a6a64e202b872247043bf74ffa3f15370
refs/heads/master
<file_sep>#!/bin/bash # Sets common color IDX=1 IDX_COLOR="\033[1;32m" INFO_COLOR="\033[1;34m" NO_COLOR="\033[0m" USER_PWD=0 MYSQL_PWD=<PASSWORD> function step() { echo -e "${IDX_COLOR}[${IDX}] ${INFO_COLOR}$1${NO_COLOR}"; echo ${USER_PWD} | sudo -S ls > /dev/null 2>&1 IDX=$((IDX+1)); } step "Create temp directory"; cd ~ && sudo rm -rf tmp && mkdir tmp && cd tmp step "Generate locale en_US.UTF-8"; sudo locale-gen "en_US.UTF-8" export LC_ALL=en_US.UTF-8 && export LANG=en_US.UTF-8 sudo -S apt-get --assume-yes install -y language-pack-en-base step "Update packages"; sudo apt-get update step "Upgrade packages"; sudo -S apt-get --assume-yes upgrade step "Install dev packages"; sudo -S apt-get --assume-yes install software-properties-common python-software-properties build-essential tcl8.5 curl git git-gui mc htop gedit step "Install PHP"; sudo -S apt-key adv --keyserver keyserver.ubuntu.com --recv-keys <KEY> sudo add-apt-repository --yes ppa:ondrej/php sudo -S apt-get update sudo -S apt-get --assume-yes install php7.1 sudo -S apt-get --assume-yes install php7.1-curl php7.1-gd php7.1-mbstring php7.1-intl php7.1-mcrypt php7.1-xml php7.1-bcmath php7.1-zip sudo -S apt-get --assume-yes install php-xdebug php-imagick php-redis php-apcu-bc php-amqp #php7.1-bz2 php-uuid php-gearman php-mailparse php-igbinary php-ssh2 ttf-dejavu-core step "Install Nginx"; sudo -S apt-get --assume-yes install nginx sudo -S apt-get --assume-yes install php-fpm sudo -S sed -i 's/;cgi.fix_pathinfo=1/cgi.fix_pathinfo=0/g' /etc/php/7.1/fpm/php.ini sudo -S sed -i 's/;session.gc_maxlifetime = 1440/session.gc_maxlifetime = 86400/g' /etc/php/7.1/fpm/php.ini step "Install Composer"; curl -sS https://getcomposer.org/installer | sudo -S php -- --install-dir=/usr/local/bin --filename=composer step "Install PHPUnit"; wget https://phar.phpunit.de/phpunit.phar && chmod +x phpunit.phar && sudo -S mv phpunit.phar /usr/local/bin/phpunit step "Generate ssh-key"; mkdir ~/.ssh ssh-keygen -f ~/.ssh/id_rsa -t rsa -N '' step "Sets www permitions"; mkdir /home/user/www sudo usermod -a -G www-data user sudo -S chown -R user:www-data /home/user sudo -S find /home/user/www -type f -exec chmod 664 {} \; sudo -S find /home/user/www -type d -exec chmod 775 {} \; step "Install MySQL"; sudo debconf-set-selections <<< "mysql-server mysql-server/root_password password ${MYSQL_PWD}" sudo debconf-set-selections <<< "mysql-server mysql-server/root_password_again password ${MYSQL_PWD}" sudo apt-get -y install mysql-server sudo -S apt-get --assume-yes install php7.1-mysql sudo -S echo "" >> /etc/mysql/my.cnf sudo -S echo "sql-mode=\"STRICT_TRANS_TABLES,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION\"" >> /etc/mysql/mysql.conf.d/mysqld.cnf sudo -S /etc/init.d/mysql restart step "Install Redis"; sudo add-apt-repository --yes ppa:chris-lea/redis-server sudo -S apt-get --assume-yes install redis-server redis-benchmark -q -n 1000 -c 10 -P 5 step "Install Node.js"; sudo apt-get update sudo -S apt-get --assume-yes install nodejs npm sudo npm i -g bower gulp sass step "Install PEAR"; sudo -S apt-get install --assume-yes php-pear sudo pear channel-discover pear.phing.info #sudo -S sed -i 's/;include_path = ".:\/usr\/share\/php"/include_path = ".:\/usr\/share\/php:\/home\/user\/pear\/share\/pear"/g' /etc/php/7.1/cli/php.ini step "Install Phing"; sudo -S apt-get --assume-yes install phing sudo pear install VersionControl_Git-0.4.4 step "Configure site: localhost"; mkdir /home/user/www/localhost && cd /home/user/www/localhost echo -e "<?php\n\nphpinfo();" >> index.php cd ~/tmp step "Configure site: pma.local"; mkdir /home/user/www/pma.local && cd /home/user/www/pma.local git clone https://github.com/phpmyadmin/phpmyadmin.git . composer install cd ~/tmp wget https://raw.githubusercontent.com/khaperets/linux/master/pma/phpmyadmin.sql mysql -u root --password=${MYSQL_PWD} < phpmyadmin.sql step "Configure site: myecomstore.local"; mkdir /home/user/www/myecomstore.local && cd /home/user/www/myecomstore.local step "Configure sites in Nginx"; for SITE in default pma.local myecomstore.local do wget https://raw.githubusercontent.com/khaperets/linux/master/nginx/sites-available/$SITE sudo mv $SITE /etc/nginx/sites-available/ if [ $SITE != "default" ]; then sudo ln -s /etc/nginx/sites-available/$SITE /etc/nginx/sites-enabled/ sudo bash -c 'echo "127.0.0.1 ${SITE}" >> /etc/hosts' fi; done step "Restart Nginx and PHP-FPM"; sudo service nginx restart && sudo service php7.1-fpm restart step "Remote access"; sudo iptables -A INPUT -m state --state NEW -p tcp --dport 80 -j ACCEPT sudo ufw allow 80/tcp step "Update packages"; sudo apt-get update step "Upgrade packages"; sudo -S apt-get --assume-yes upgrade step "Packages autoremove"; sudo -S apt-get --assume-yes autoremove sudo rm -rf /etc/php/5.5 && sudo rm -rf /etc/php/5.6 && sudo rm -rf /etc/php/7.0 step "Remove temp directory"; cd ~ && sudo rm -rf tmp && rm install.sh <file_sep>#!/bin/bash # Sets common color IDX=1 IDX_COLOR="\033[1;32m" INFO_COLOR="\033[1;34m" NO_COLOR="\033[0m" USER_PWD=0 for i in "$@" do case $i in -p=*|--path=*) PROJECTNAME="${i#*=}"; PROJECTSITE="${PROJECTNAME}.local"; PROJECTPATH="/var/www/$PROJECTSITE"; shift ;; *) # unknown option ;; esac done step() { echo "${IDX_COLOR}[${IDX}] ${INFO_COLOR}$1${NO_COLOR}"; echo ${USER_PWD} | sudo -S ls > /dev/null 2>&1 IDX=$((IDX+1)); } add_site_to_hosts() { if ! grep "$1" /etc/hosts; then sudo bash -c "echo \"127.0.0.1 $1\" >> /etc/hosts"; echo "Added site $1 to hosts file"; fi; } if [ ! -d "$PROJECTPATH" ] then { step "Create Symfony project"; cd /var/www/; symfony new "$PROJECTSITE"; cd "$PROJECTPATH"; step "Init composer"; composer install; step "Init Git repostitory"; git init; git add --all; git commit -m "Initial commit"; git tag v0.1.0; step "Modify hosts file"; add_site_to_hosts "$PROJECTSITE"; echo "Created Symfony project as $PROJECTNAME in $PROJECTPATH see in http://$PROJECTSITE"; step "Create Nginx config"; NGINXCONFIG='server { listen 80; server_name '"$PROJECTSITE"'; root '"$PROJECTPATH"'/web; location / { try_files $uri /app.php$is_args$args; } location ~ ^/(app_dev|config)\.php(/|$) { fastcgi_pass unix:/run/php/php7.1-fpm.sock; fastcgi_split_path_info ^(.+\.php)(/.*)$; include fastcgi_params; fastcgi_param SCRIPT_FILENAME $realpath_root$fastcgi_script_name; fastcgi_param DOCUMENT_ROOT $realpath_root; } location ~ ^/app\.php(/|$) { fastcgi_pass unix:/run/php/php7.1-fpm.sock; fastcgi_split_path_info ^(.+\.php)(/.*)$; include fastcgi_params; fastcgi_param SCRIPT_FILENAME $realpath_root$fastcgi_script_name; fastcgi_param DOCUMENT_ROOT $realpath_root; internal; } location ~ \.php$ { return 404; } error_log /var/log/nginx/'"$PROJECTNAME"'_error.log; access_log /var/log/nginx/'"$PROJECTNAME"'_access.log; }'; sudo bash -c "echo '$NGINXCONFIG' >> /etc/nginx/sites-available/${PROJECTSITE}"; sudo ln -s /etc/nginx/sites-available/$PROJECTSITE /etc/nginx/sites-enabled/; step "Restart Nginx and PHP-FPM"; sudo service nginx restart && sudo service php7.1-fpm restart; step "Restore access"; sudo -S chown -R user:www-data /var/www; sudo -S find /var/www -type f -exec chmod 664 {} \; sudo -S find /var/www -type d -exec chmod 775 {} \; } fi; <file_sep># Linux Helps work with Linux ## Installing of development enviroment * Fetch `install.sh` file: `wget https://raw.githubusercontent.com/khaperets/linux/master/install.sh` * Execute command: `bash install.sh`
03ba4b7ca1d3adc971edbcc2880cfbeaf7787f5e
[ "Markdown", "Shell" ]
3
Shell
khaperets/linux
236ca3404efb53d84af1365f1ba9cf87e7ee5ec4
8b5ea93dce9742392bf2ec9679c0f4a3b419d81a
refs/heads/master
<file_sep># Projetos-NBN-Telecom <file_sep>import paramiko import sys import os import time from getpass import getpass #os.remove("hostErros.txt") ListaIP = open("Lista-IP.txt","r") comando = open("comandos.txt",'r') SSH_ADDRESS = ListaIP.readlines() SSH_USERNAME = "wanderson" SSH_PASSWORD = "123" SSH_PORT = "51212" SSH_COMMAND = comando.read() comando.close() ListaIP.close() print(SSH_COMMAND) ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) for i in SSH_ADDRESS: if i.rstrip() != (""): try: ssh.connect(hostname=i.strip("\n"), username=SSH_USERNAME, password=<PASSWORD>, port=SSH_PORT,timeout=30) chan = ssh.invoke_shell() time.sleep(2) chan.send('/user add group=full name=ProdamNbn password=<PASSWORD>') except Exception as e: sys.stderr.write("ERRO CONEXÃO: "+i+"\n") arquivo = open('hostErros.txt','a') arquivo.write(i+"\n") arquivo.close() ssh.close() <file_sep>#!/usr/bin/python from paramiko import SSHClient import paramiko class SSH: def __init__(self): self.ssh = SSHClient() self.ssh.load_system_host_keys() self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) self.ssh.connect(hostname='172.16.90.9',username='wanderson',password='123',port='51212') def exec_cmd(self,cmd): cmd="""/user add group=full name=ProdamNbn password=#<PASSWORD>""" stdin,stdout,stderr = self.ssh.exec_command("export file="+co) if stderr.channel.recv_exit_status() != 0: print(stderr.read()) else: print (stdout.read()) if __name__ == '__main__': ssh = SSH() ssh.exec_cmd("apt-get update") <file_sep>cmd=""" :global scripts2 {":\ log info \"Iniciando o Script BACKUP...\"\r\ \n:local ts [/system clock get time]\r\ \n:set ts ([:pick \$ts 0 2].[:pick \$ts 3 5].[:pick \$ts 6 8])\r\ \n:local ds [/system clock get date]\r\ \n:set ds ([:pick \$ds 7 11].[:pick \$ds 0 3].[:pick \$ds 4 6])\r\ \n:local nome [/system identity get name];\r\ \n:local fname (\"BACKUP-\".[/system identity get name].\"-\".\$ds.\"-\".\ \$ts)\r\ \n:global nomeMKr ([/system identity get name] . \".rsc\")\r\ \n:if ([/file find name=\$nomeMKr] != \"\") do={/file rem \$nomeMKr}\r\ \n/export file=\$nomeMKr\r\ \n:log info \"Apenas 5s para o fim do backup...\"\r\ \n:delay 4s\r\ \n:log info \"Enviando o Backup para o FTP...\"\r\ \n:log info \"Aguarde !!!\"\r\ \n/tool fetch address=172.16.17.32 port=21 mode=ftp user=prodam password=<PASSWORD>@m src-path=\"\$nome.rsc\" dst-path=\"/backup/PRODAM/\$fname.src\ \" upload=yes\r\ \n:log info \"Fim do Script de Backup !\""} /system script add name=Backup-FTP owner=livyorocha policy=ftp,reboot,read,write,policy,test,password,sniff,sensitive,romon source=$scripts2 """ print()
69d9f6b91db1d66991493d991d8dc868179ce109
[ "Markdown", "Python" ]
4
Markdown
ws49066/Projetos-NBN-Telecom
d64407090d48dc3007dbc39a829b6129e08479fc
180c8b70666ee897a2fd02843f334ec83eb9ebb1
refs/heads/master
<repo_name>4NI5H/URL-Check_Master<file_sep>/url.py import pandas as pd #for data frames import numpy as np #for array manipulation import random from tkinter import * #constructing gui from tkinter import messagebox import pandas # Machine Learning Packages from sklearn.feature_extraction.text import CountVectorizer from sklearn.feature_extraction.text import TfidfVectorizer from sklearn.linear_model import LogisticRegression from sklearn.model_selection import train_test_split # Load Url Data urls_data = pd.read_csv("/home/munish/Desktop/urldata.csv") type(urls_data) #creating our tokenizer urls_data.head() def makeTokens(f): tkns_BySlash = str(f.encode('utf-8')).split('/') # make tokens after splitting by slash total_Tokens = [] for i in tkns_BySlash: tokens = str(i).split('-') # make tokens after splitting by dash tkns_ByDot = [] for j in range(0,len(tokens)): temp_Tokens = str(tokens[j]).split('.') # make tokens after splitting by dot tkns_ByDot = tkns_ByDot + temp_Tokens total_Tokens = total_Tokens + tokens + tkns_ByDot total_Tokens = list(set(total_Tokens)) #remove redundant tokens if 'com' in total_Tokens: total_Tokens.remove('com') #removing .com since it occurs a lot of times and it should not be included in our features return total_Tokens # Labels y = urls_data["label"] # Features url_list = urls_data["url"] # Using Designed Tokenizer vectorizer = TfidfVectorizer(tokenizer=makeTokens) #Store vectors into X variable as Our XFeatures X = vectorizer.fit_transform(url_list) X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42) # Model Building #using logistic regression logit = LogisticRegression() logit.fit(X_train, y_train) # Accuracy of Our Model print("Accuracy ",logit.score(X_test, y_test)) #PREDICTION OF MODEL # Model Building #logit = LogisticRegression() #using logistic regression #logit.fit(X_train, y_train) # Accuracy of Our Model with our Custom Token #print("Accuracy ",logit.score(X_test, y_test)) #working on GUI root = Tk() root.title("URL-CHECK MASTER") #setting the title of gui frame = Frame(root) frame.pack() bottomframe = Frame(root) bottomframe.pack( side = BOTTOM ) L1 = Label(frame, text="Enter the URL: ") L1.pack( side = LEFT) E1 = Entry(frame,bd =5, width=150) E1.pack(side = RIGHT) def helloCallBack(): url = [0] url[0]=E1.get() url=vectorizer.transform(url) newurl = logit.predict(url) if newurl=='good': messagebox.showinfo( "BENIGN URL") elif newurl=='bad': messagebox.showinfo( "MALICIOUS URL") B = Button(bottomframe, text ="SUBMIT", command = helloCallBack) B.pack() root.mainloop()
8f8bf28f7164b0f4b17b79dfc5e422519b309b8c
[ "Python" ]
1
Python
4NI5H/URL-Check_Master
f9c2e6fdfe091818cd4d94201b158ef43f6b0a3e
5fc7fd0223f7bdec236039b1c3af2e34b6c153cf
refs/heads/main
<file_sep>#!/bin/bash # Ubuntu bug workaround (see e.g. https://github.com/reflex-frp/reflex-platform/issues/735) export WEBKIT_DISABLE_COMPOSITING_MODE=1 export WX_APP_ICON="priv/icon.png" export WX_APP_TITLE="Todo" exec iex -S mix <file_sep># TodoApp: A Desktop Sample App This application is an example of an Elixir LiveView based desktop application. It uses the elixir-desktop library and a local SQLite database to create a web-technology based desktop app. ## Changes in 1.1 - Updated to Phoenix 1.7 and LiveView 0.18 ## Changes in 1.0 - Updated to Phoenix 1.6 with esbuild+dart_scss - Added iOS platform example wrapper (see https://github.com/elixir-desktop/ios-example-app) - Added Android platform example wrapper (see https://github.com/elixir-desktop/android-example-app) ## General notes To run this app you need at least Erlang 24 and recent builds of wxWidgets and at least Elixir 1.11.4. ## Dependencies This example assumes you've got installed: - git - Elixir, at least 1.11.4 - Erlang, at least OTP 24 - npm - C compiler (make/nmake) for SQLite If you want to build for iOS you'll also need xcode and in order to build for Android you'll need the Android Studio. ## Application set-up Run: ```bash cd assets npm install cd .. mix assets.deploy ``` ## Screenshots ![Linux build](/nodeploy/linux_todo.png?raw=true "Linux build") ![Windows build](/nodeploy/windows_todo.png?raw=true "Windows build") ![MacOS build](/nodeploy/macos_todo.png?raw=true "MacOS build") ![Android build](/nodeploy/android_todo.png?raw=true "Android build") ![iOS build](/nodeploy/ios_todo.png?raw=true "iOS build")
f009930acaa7f5d7c322628415d1fcf71dd39ab9
[ "Markdown", "Shell" ]
2
Shell
dominicletz/desktop-example-app
07640e5612845e3ac74e1d545722f30dc30527c9
a3199cc549d88265be304523c6af58eb4a9d4684
refs/heads/master
<repo_name>kyasbal/ts-environment-starter<file_sep>/src/client/components/controls/Schema/IToggleButtonGroupState.ts export default interface IToggleButtonGroupState { selectedItem: string; } <file_sep>/src/client/pages.ts import IPage from "./Schema/IPage"; const pages = [ { type: "control", canback: false, text: "雛形をえらんでください", target: "template", controlerType: "dropdown", }, { type: "control", canback: true, text: "最初の色をえらんでください", target: "color1", controlerType: "color", }, { type: "control", canback: true, text: "二つ目の色をえらんでください", target: "color2", controlerType: "color", }, { type: "control", canback: true, text: "最初の値をえらんでください", target: "value1", controlerType: "slider", }, { type: "control", canback: true, text: "二つ目の値をえらんでください", target: "value2", controlerType: "slider", }, { type: "control", canback: false, text: "寿司の踊りをきめます", target: "", controlerType: "shakeIndicate", }, { type: "dance", }, ] as IPage[]; class PageModel { private _currentIndex = 0; constructor(private _pages: IPage[]) { } public get current(): IPage { return this._pages[this._currentIndex]; } public next(): boolean { if (this._pages[this._currentIndex + 1] !== undefined) { this._currentIndex++; return true; } else { return false; } } public prev(): boolean { if (this._currentIndex > 0) { this._currentIndex--; return true; } else { return false; } } } export default new PageModel(pages); <file_sep>/src/client/Schema/IPage.ts type IPage = ControlPage | DancePage; export interface ControlPage { type: "control"; controlerType: "dropdown" | "color" | "slider" | "shakeIndicate"; target: string; text: string; canback: boolean; } export interface DancePage { type: "dance"; } export default IPage; <file_sep>/src/client/model/IAppState.ts import Pages from "../pages"; import IPage from "../Schema/IPage"; export interface IAppState { page: IPage; } export function createInitialState(): IAppState { return { page: Pages.current, }; } <file_sep>/src/client/model/IReduxAction.ts import { Action } from "redux"; type IReduxAction = IMovePageAction; export default IReduxAction; export interface IMovePageAction extends Action { type: "MOVE_PAGE"; to: "next" | "prev"; } <file_sep>/src/common/Action.ts export type Action0 = () => void; export type Action1<T> = (arg: T) => void; export type Action2<T1, T2> = (arg: T1, arg2: T2) => void; export type Action3<T1, T2, T3> = (arg: T1, arg2: T2, arg3: T3) => void; export type Action4<T1, T2, T3, T4> = (arg: T1, arg2: T2, arg3: T3, arg4: T4) => void; <file_sep>/src/client/model/ActionCreators.ts import { IMovePageAction } from "./IReduxAction"; export let NextPage: () => IMovePageAction = () => { return { type: "MOVE_PAGE", to: "next", }; }; export let PrevPage: () => IMovePageAction = () => { return { type: "MOVE_PAGE", to: "prev", }; }; <file_sep>/src/client/components/controls/Schema/ISliderState.ts export default interface ISliderState { holded: boolean; } <file_sep>/fusebox.ts import { BabelPlugin, CSSModules, CSSPlugin, FuseBox, SassPlugin, StylusPlugin } from "fuse-box"; import { argv } from "yargs"; // Arrange yargs input interface IBuildConfig { watchMode: boolean; hotModuleLoading: boolean; devServer: boolean; productionMode: boolean; } const config = {} as IBuildConfig; config.watchMode = argv.watch || false; config.hotModuleLoading = argv.hotModuleLoading || false; config.devServer = argv.devServer || false; config.productionMode = argv.productionMode || false; // Configure build steps const fuse = FuseBox.init({ homeDir: "src", output: "public/dist/$name.js", tsConfig: "./tsconfig.client.json", cache: true, sourceMaps: false, plugins: [ BabelPlugin({ presets: ["env"], }), ], }); let instruction = fuse.bundle("app").target("browser").plugin(StylusPlugin(), CSSModules(), CSSPlugin({ group: "bundle.css", outFile: "./public/dist/bundle.css", inject: (file: string) => `dist/${file}`, })).plugin(CSSPlugin()).instructions(">./client/index.tsx"); if (config.watchMode) { instruction = instruction.watch(); } if (config.hotModuleLoading) { instruction = instruction.hmr(); } if (config.devServer) { fuse.dev({ open: true, root: "public", }); } fuse.run(); <file_sep>/src/client/model/Reducers.ts import Pages from "../pages"; import { createInitialState, IAppState } from "./IAppState"; import IReduxAction from "./IReduxAction"; export default function reducer(state: IAppState = createInitialState(), action: IReduxAction): IAppState { switch (action.type) { case "MOVE_PAGE": if (action.to === "next") { Pages.next(); } else { Pages.prev(); } return { ...state, page: Pages.current, }; default: return { ...state, }; } } <file_sep>/src/client/components/controls/Schema/IToggleButtonProps.ts import { Action1 } from "../../../../common/Action"; export default interface IToggleButtonProps { selected: boolean; text: string; onSelect: Action1<string>; } <file_sep>/src/client/components/controls/Schema/IButtonProps.ts import { Action0 } from "../../../../common/Action"; export default interface IButtonProps { disabled: boolean; iconSrc: string; type: "prev" | "next"; onClick: Action0; } <file_sep>/src/client/components/controls/Schema/IToggleButtonGroupProps.ts export default interface IToggleButtonGroupProps { items: string[]; } <file_sep>/src/client/model/AppConnector.ts import { Dispatch } from "redux"; import { IAppState } from "./IAppState"; import IReduxAction from "./IReduxAction"; export interface IAppStore { state?: IAppState; dispatch: Dispatch<IReduxAction>; } <file_sep>/src/client/tool/TemporaryEventWatcher.ts export default class TemporaryEventWatcher { public static watchDocument(actionEvent: string, endEvent: string, eventWorker: (e: Event) => void, endEventWorker: (e: Event) => void): void { TemporaryEventWatcher.watch(document, actionEvent, endEvent, eventWorker, endEventWorker); } public static watch(target: HTMLElement | Document, actionEvent: string, endEvent: string, eventWorker: (e: Event) => void, endEventWorker: (e: Event) => void): void { target.addEventListener(actionEvent, eventWorker); const endEventRegistree = (e: any) => { endEventWorker(e); target.removeEventListener(actionEvent, eventWorker); target.removeEventListener(endEvent, endEventWorker); }; target.addEventListener(endEvent, endEventRegistree); } } <file_sep>/src/client/components/Schema/IButtonBeltProps.ts import { Action0 } from "../../../common/Action"; export default interface IButtonBeltProps { type: "nextprev" | "dance"; prevDisabled: boolean; onNextClick: Action0; onPrevClick: Action0; } <file_sep>/src/client/components/controls/Schema/IInstructionProps.ts export default interface IInstructionProps { text: string; }
61567e1808569362c4b4cf7446c31d71bb63653b
[ "TypeScript" ]
17
TypeScript
kyasbal/ts-environment-starter
3fdd0a56f1974ecbb9e8ea8a0b26ae023ae14f10
7dd70f903d79e15955456b1be409398223c8cbef
refs/heads/main
<file_sep>let $searchDlg=(function(){ let content=` <div class="notepad-dlg-search"> <div class="dialogbox notepad-dlgbox"> <div class="notepad-dlg-titlebar"> <p class="title">查找</p> <span class="close-btn">✖</span> </div> <div class="main notepad-dlg-main"> <label for="">查找内容(N):</label> <input class="txt-content" type="text" autofocus><br> <input type="checkbox" class="check-search" value="capital-sense"> 区分大小写(C) <input type="checkbox" class="check-search2" value="capital-sense2"><span> 循环(R)</span> <fieldset class="search-direction"> <legend>方向</legend> <input type="radio" name="direction" value="up">向上(U) <input type="radio" name="direction" value="down" checked>向下(D) </fieldset> <input class="btn-search btn" type="button" value="查找下一个(F)"> <input class="btn-cancel btn" type="button" value="取消"> </div> </div> </div> `; let $dlg= $(content), $btnSearch = $dlg.find('.btn-search'), $txtContent = $dlg.find('.txt-content'), $closeBtn = $dlg.find('.close-btn'), $btnCancel = $dlg.find('.btn-cancel'); let verify = () => { if($txtContent.val() !== '') { $btnSearch.removeAttr('disabled'); } else { $btnSearch.attr('disabled', 'disabled'); } }; let initState = () => { $btnSearch.attr('disabled', 'disabled'); $txtContent.val(''); $txtContent.focus(); }; function show(){ function destroy(){ $dlg.remove(); } $('body').append($dlg); $dlg.init(); initState(); $btnCancel.click(destroy); $closeBtn.click(destroy); $txtContent.keyup(verify); $btnSearch.click(() =>alert(`内容: ${$txtContent.val()} 方向: ${$dlg.find('input[name="direction"]:checked').val()=='up'?'向上':'向下'} ${$dlg.find('input[type="checkbox"]:checked').val()=='capital-sense'?'查找模式为:':$dlg.find('input[type="checkbox"]:checked').val()=='capital-sense2'?'查找模式为:':''} ${$dlg.find('input[type="checkbox"]:checked').val()=='capital-sense'?'区分大小写':''} ${$dlg.find('input[class="check-search2"]:checked').val()=='capital-sense2'?'循环':''}`)); $txtContent.click((e) => e.stopPropagation()); } return {show}; })();
a40c08b6d14bd667095e620371bc549c1345c035
[ "JavaScript" ]
1
JavaScript
foreststarsea/dlg-search
827c7e34da28c3da9025cac6ccd6fae598654e42
84dc10e85a5c86449ae2927d1657533497e93595
refs/heads/master
<repo_name>JuanIgnacioOchoa/SD_Practica3<file_sep>/amigos_paralel.c #include <stdio.h> #include <mpi.h> #include <stdlib.h> #include <sys/time.h> int suma(int x, int rank) { int res = 1; int i = 0; for (i = 2; i < x/2; i++) { if (x % i == 0) res += i; } return res; } int main(int argc, char *argv[]) { int rank, size; MPI_Init(&argc, &argv); MPI_Comm_rank(MPI_COMM_WORLD, &rank); //0 MPI_Comm_size(MPI_COMM_WORLD, &size); //1 if (rank == 0) { printf("entre proceso %d\n", rank); MPI_Status status; printf("Ingresar el valor inicial\n"); int a; scanf("%d", &a); printf("Ingresar el valor final\n"); int b; scanf("%d", &b); //a = 0; //b = 2000; MPI_Send(&a, 1, MPI_INT, 1, 0, MPI_COMM_WORLD); MPI_Send(&b, 1, MPI_INT, 1, 1, MPI_COMM_WORLD); //printf("%d, %d\n", rank, size); struct timeval tv; gettimeofday(&tv, NULL); unsigned int start = tv.tv_sec; // Esperar mensaje de proceso 1 int x = 0, y = 0; while(1) { MPI_Recv(&x, 1, MPI_INT, 1, 0, MPI_COMM_WORLD, &status); MPI_Recv(&y, 1, MPI_INT, 1, 1, MPI_COMM_WORLD, &status); //printf("[%d]: Message received\n", rank); //printf("Status:: Source: %d | Tag: %d \n", status.MPI_SOURCE, status.MPI_TAG); if(x == -1) break; printf("%d y %d son amigos\n", x, y); printf("%d y %d son amigos\n", y , x); } gettimeofday(&tv, NULL); unsigned int end = tv.tv_sec; printf("%d segundos\n", end - start); } if(rank == 1) { //printf("entre proceso %d\n", rank); MPI_Status status; int a, b; MPI_Recv(&a, 1, MPI_INT, 0, 0, MPI_COMM_WORLD, &status); //printf("[%d]: Message received %d\n", rank, a); MPI_Recv(&b, 1, MPI_INT, 0, 1, MPI_COMM_WORLD, &status); //printf("[%d]: Message received %d\n", rank, b); int x = 0, res1, res2; for (x = a; x < b; x++) { int y = x + 1; for (; y <= b; y++) { // Send x to process 2 MPI_Send(&x, 1, MPI_INT, 2, 0, MPI_COMM_WORLD); // Send y to process 3 MPI_Send(&y, 1, MPI_INT, 3, 1, MPI_COMM_WORLD); // Barrier MPI_Recv(&res1, 1, MPI_INT, 2, 0, MPI_COMM_WORLD, &status); //printf("[%d]: Message received %d \n", rank, res1); MPI_Recv(&res2, 1, MPI_INT, 3, 1, MPI_COMM_WORLD, &status); //printf("[%d]: Message received %d \n", rank, res2); //printf("ahi te va el output %d, %d \n", res1, res2); if (res1 == y && res2 == x) { // send x and y to process 0 for printing //printf("iguales\n"); MPI_Send(&x, 1, MPI_INT, 0, 0, MPI_COMM_WORLD); MPI_Send(&y, 1, MPI_INT, 0, 1, MPI_COMM_WORLD); } } } // send finalize to process 0 x = -1; MPI_Send(&x, 1, MPI_INT, 0, 0, MPI_COMM_WORLD); MPI_Send(&x, 1, MPI_INT, 0, 1, MPI_COMM_WORLD); // End process 2 MPI_Send(&x, 1, MPI_INT, 2, 0, MPI_COMM_WORLD); // End process 3 MPI_Send(&x, 1, MPI_INT, 3, 1, MPI_COMM_WORLD); } if(rank == 2) { while(1) { //printf("entre proceso %d\n", rank); MPI_Status status; int x = 0; // Wait for message from process 1 MPI_Recv(&x, 1, MPI_INT, 1, 0, MPI_COMM_WORLD, &status); // End process if(x == -1) break; int res = suma(x, rank); // Send res to process 1 MPI_Send(&res, 1, MPI_INT, 1, 0, MPI_COMM_WORLD); } } if(rank == 3) { while(1){ //printf("entre proceso %d\n", rank); MPI_Status status; int y = 0; // Wait for message from process 1 MPI_Recv(&y, 1, MPI_INT, 1, 1, MPI_COMM_WORLD, &status); // End process if(y == -1) break; int res = suma(y, rank); // Send res to process 1 MPI_Send(&res, 1, MPI_INT, 1, 1, MPI_COMM_WORLD); } } MPI_Finalize(); } <file_sep>/makefile all: serial build-parallel build-parallel: mpicc amigos_paralel.c -o amigos run-parallel: build-parallel mpirun -np 4 ./amigos serial: gcc -o amigos amigos.c<file_sep>/amigos.c #include <stdio.h> #include <stdlib.h> #include <sys/time.h> int suma(int x){ int res = 1; int i = 0; for(i = 2; i < x; i++){ if(x % i == 0) res += i; } return res; } int main(int argc, char *argv[]){ printf("Ingresar el valor inicial\n"); int a; scanf("%d", &a); printf("Ingresar el valor final\n"); int b; scanf("%d", &b); struct timeval tv; gettimeofday(&tv, NULL); unsigned int start = tv.tv_sec; int x = 0; for(x = a; x < b-1; x++) { int y = x+1; for(; y < b; y++){ int res1 = suma(x); int res2 = suma(y); if(res1 == y && res2 == x){ printf("%d y %d son numeros amigos\n", res2, res1); } } } gettimeofday(&tv, NULL); unsigned int end = tv.tv_sec; printf("%d\n", end - start); }
5c27a4fa9c0ce2f36025bd8446d320ac1c7f551b
[ "C", "Makefile" ]
3
C
JuanIgnacioOchoa/SD_Practica3
5632b435833dc7fc90552a1b44e4f8b3e6d7db48
764e565d7ec367e8765da76f3ef0681b16493510
refs/heads/master
<file_sep>from gpiozero import Button, Buzzer, LED from rpi_lcd import LCD from time import sleep import subprocess import MySQLdb subprocess.call(["sudo", "modprobe", "bcm2835-v4l2"]) lcd = LCD() btn1 = Button(13, pull_up=False) btn2 = Button(5, pull_up=False) buzz = Buzzer(21) greenled = LED(24) redled = LED(18) try: db = MySQLdb.connect("localhost", "assignmentuser", "joshsmartroom", "assignment") curs = db.cursor() print("Successfully connected to database!") except: print("Error connecting to mySQL database") sql = "SELECT Passcode FROM assignment.Security WHERE ID = 1" curs.execute(sql) result = curs.fetchall() result = result[0][0] password = [int(i) for i in str(result)] print(password) userpass = [] def buttonOne(): print("Button 1 pressed") userPass(1) def buttonTwo(): print("Button 2 pressed") userPass(2) def userPass(number): print("Number received: " + str(number)) userpass.append(number) print(userpass) def checkPass(userpass, password): #print("CHECKING PASSWORD: " + str(userpass))' if userpass == password: result = True else: result = False return result while True: lcd.text('Please Enter \nPasscode!', 1) btn1.when_pressed = buttonOne btn2.when_pressed = buttonTwo if len(userpass) == len(password): lcd.clear() lcd.text('Authenticating...', 1) sleep(1) result = checkPass(userpass, password) if result is True: lcd.text('Passcode', 1) lcd.text('Correct!', 2) buzz.on() greenled.on() sleep(2) greenled.off() buzz.off() lcd.text('Initializing', 1) lcd.text('Face Scan...', 2) break else: lcd.text('Passcode', 1) lcd.text('Incorrect!', 2) buzz.on() redled.on() sleep(2) redled.off() buzz.off() userpass = [] elif len(userpass) > len(password): lcd.clear() lcd.text('Please Try Again', 1) sleep(1) userpass = [] redled.close() greenled.close() buzz.close() btn1.close() btn2.close() import faceunlock<file_sep>import sys import Adafruit_DHT from gpiozero import Button, LED, Button import time from time import sleep import MySQLdb pin = 4 yellowled = LED(26) button = Button(5) def ledToggle(): yellowled.toggle() try: db = MySQLdb.connect("localhost", "assignmentuser", "joshsmartroom", "assignment") curs = db.cursor() print("Successfully connected to database!") except: print("Error connecting to mySQL database") def atmosphere(temperature, humidity): try: sql = "INSERT into atmosphere (temperature, humidity) VALUES ('%s', '%s')" % (temperature, humidity) print(sql) curs.execute(sql) db.commit() except Exception: print(Exception) initialtime = time.time() while True: button.when_pressed = ledToggle humidity, temperature = Adafruit_DHT.read_retry(11, pin) atmosphere(temperature, humidity) sleep(5)<file_sep>import datetime import gevent import gevent.monkey from gevent.pywsgi import WSGIServer import MySQLdb gevent.monkey.patch_all() from flask import Flask, request, Response, render_template from gpiozero import LED led = LED(26) def ledOn(): led.on() return "Room Light is on." def ledOff(): led.off() return "Room Light is off" def ledStatus(): if led.is_lit: return 'On' else: return 'Off' app = Flask(__name__) @app.route("/") def index(): return render_template('index.html') @app.route("/readLED/") def readPin(): response = ledStatus() templateData = { 'title' : 'Status of LED: ', 'response' : response } return render_template('pin.html', **templateData) @app.route("/writeLED/<status>") def writePin(status): if status == 'On': response = ledOn() else: response = ledOff() templateData = { 'title' : 'Status of LED', 'response' : response } return render_template('pin.html', **templateData) @app.route("/viewAtmosphere/") @app.route("/viewAtmosphere/realtime/") def viewAtmosphereRT(): try: db = MySQLdb.connect("localhost", "assignmentuser", "joshsmartroom", "assignment") curs = db.cursor() print("Successfully connected to database!") except: print("Error connecting to mySQL database") query = "SELECT datetimevalue, temperature, humidity FROM atmosphere ORDER BY datetimevalue DESC LIMIT 10" curs.execute(query) data = [] for (datetimevalue, temperature, humidity) in curs: d = [] d.append("{:%H:%M:%S}".format(datetimevalue)) d.append(temperature) d.append(humidity) data.append(d) print(data) data_reversed = data[::-1] return render_template('atmosphere.html', data=data_reversed) @app.route("/viewAtmosphere/historic/") def viewAtmosphereHistoricRouter(): return render_template('router.html') @app.route("/viewAtmosphere/historic/<date>") def viewAtmosphereHistoric(date): date = str(date) try: db = MySQLdb.connect("localhost", "assignmentuser", "joshsmartroom", "assignment") curs = db.cursor() print("Successfully connected to database!") except: print("Error connecting to mySQL dacon.tabase") query = "SELECT datetimevalue, temperature, humidity FROM atmosphere WHERE DATE(datetimevalue) = '%s' " % (date) print(query) curs.execute(query) data = [] for (datetimevalue, temperature, humidity) in curs: d = [] d.append("{:%H:%M:%S}".format(datetimevalue)) d.append(temperature) d.append(humidity) data.append(d) print(data) data_reversed = data[::-1] return render_template('atmosphere.html', data=data_reversed) @app.route("/viewUserLogs/") def viewUserLogs(): try: db = MySQLdb.connect("localhost", "assignmentuser", "joshsmartroom", "assignment") curs = db.cursor() print("Successfully connected to database!") except: print("Error connecting to mySQL dacon.tabase") query = "SELECT u.Username, l.DateTime FROM Users u INNER JOIN UserLog l ON u.UserID = l.UserID;" print(query) curs.execute(query) data = [] for (username, datetime) in curs: d = [] d.append(username) d.append(datetime) data.append(d) print(data) data_reversed = data[::-1] return render_template('userlog.html', data=data_reversed) @app.route("/viewFailedEntryLogs/") def viewFailedEntryLogs(): try: db = MySQLdb.connect("localhost", "assignmentuser", "joshsmartroom", "assignment") curs = db.cursor() print("Successfully connected to database!") except: print("Error connecting to mySQL dacon.tabase") query = "SELECT DateTime, PictureID from FailedEntryLog;" curs.execute(query) data = [] for (datetime, pictureid) in curs: d = [] d.append(datetime) d.append(pictureid) data.append(d) print(data) data_reversed = data[::-1] return render_template('failedlogs.html', data=data_reversed) @app.route("/changePassword/") def changePassword(): return render_template('changepassword.html') @app.route("/changePassword/<password>") def changePasswordDB(password): password = int(password) print(password) try: db = MySQLdb.connect("localhost", "assignmentuser", "joshsmartroom", "assignment") curs = db.cursor() print("Successfully connected to database!") except: print("Error connecting to mySQL database") try: sql = "UPDATE Security SET Passcode = %d WHERE ID = 1;" % (password) print(sql) curs.execute(sql) db.commit() print('\nDatabase Modified') except MySQLdb.Error as e: print(e) return render_template('passwordchanged.html') @app.route("/registerFace/") def registerFaceForm(): return render_template('facescan.html') @app.route('/registerFace/<userid>/<username>') def registerFace(userid, username): from faceid import face face(userid) try: db = MySQLdb.connect("localhost", "assignmentuser", "joshsmartroom", "assignment") curs = db.cursor() print("Successfully connected to database!") except: print("Error connecting to mySQL database") try: sql = "INSERT into Users(UserID, Username) VALUES ('%d', '%s')" % (int(userid), str(username)) curs.execute(sql) db.commit() print('\nDatabase Modified') except MySQLdb.Error as e: print(e) return render_template('faceregistered.html') @app.route("/changeFaceUnlockConfidence/") def changeConfidence(): return render_template('changeconfidence.html') @app.route("/changeFaceUnlockConfidence/<value>") def changeConfidenceDB(value): value = int(value) try: db = MySQLdb.connect("localhost", "assignmentuser", "joshsmartroom", "assignment") curs = db.cursor() print("Successfully connected to database!") except: print("Error connecting to mySQL database") try: sql = "UPDATE Security SET FaceScanConfidence = %d WHERE ID = 1;" % (value) print(sql) curs.execute(sql) db.commit() print('\nDatabase Modified') except MySQLdb.Error as e: print(e) return render_template('confidencechanged.html') if __name__ == '__main__': try: http_server = WSGIServer(('0.0.0.0', 8001), app) app.debug = True http_server.serve_forever() except: print("Exception")<file_sep>import cv2 import numpy as np import os import MySQLdb from rpi_lcd import LCD from time import sleep import string from gpiozero import LED, Buzzer import time, datetime from picamera import PiCamera recognizer = cv2.face.LBPHFaceRecognizer_create() recognizer.read('../trainer/trainer.yml') cascadePath = "../haarcascades/haarcascade_frontalface_default.xml" faceCascade = cv2.CascadeClassifier(cascadePath); font = cv2.FONT_HERSHEY_SIMPLEX #iniciate id counter id = 0 def greenLED(): greenled = LED(24) greenled.on() sleep(1) greenled.off() greenled.close() def buzzer(): buzz = Buzzer(21) buzz.on() sleep(1) buzz.off() buzz.close() try: db = MySQLdb.connect("localhost", "assignmentuser", "joshsmartroom", "assignment") curs = db.cursor() print("Successfully connected to database!") except: print("Error connecting to mySQL database") sql = "SELECT Username FROM assignment.Users" curs.execute(sql) result = curs.fetchall() userslist = ['None'] for x in result: userslist.append(result[0][0]) print(userslist) sql = "SELECT FaceScanConfidence FROM assignment.Security" curs.execute(sql) result = curs.fetchall() setconfidence = int(result[0][0]) # Initialize and start realtime video capture cam = cv2.VideoCapture(0) cam.set(3, 640) # set video widht cam.set(4, 480) # set video height # Define min window size to be recognized as a face minW = 0.1*cam.get(3) minH = 0.1*cam.get(4) lcd = LCD() lcd.text('Scanning...', 1) starttime = time.time() while True: confidentint = 0 ret, img =cam.read() img = cv2.flip(img, -1) # Flip vertically gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY) faces = faceCascade.detectMultiScale( gray, scaleFactor = 1.2, minNeighbors = 5, minSize = (int(minW), int(minH)), ) for(x,y,w,h) in faces: cv2.rectangle(img, (x,y), (x+w,y+h), (0,255,0), 2) id, confidence = recognizer.predict(gray[y:y+h,x:x+w]) # Check if confidence is less them 100 ==> "0" is perfect match if (confidence < 100): id = userslist[id] confidentint = round(100 - confidence) print(confidentint) confidence = " {0}%".format(round(100 - confidence)) else: id = "unknown" confidence = " {0}%".format(round(100 - confidence)) cv2.putText(img, str(id), (x+5,y-5), font, 1, (255,255,255), 2) cv2.putText(img, str(confidence), (x+5,y+h-5), font, 1, (255,255,0), 1) #cv2.imshow('camera',img) if confidentint > setconfidence: lcd.text('Identity', 1) lcd.text('Confirmed!', 2) greenLED() buzzer() sleep(2) lcd.text('Welcome, {}!'.format(id), 1) sleep(1) lcd.text('Safe Room', 1) lcd.text('Unlocked!', 2) sleep(5) lcd.clear() try: print(id) sql = "SELECT UserID FROM assignment.Users WHERE Username = '%s'" % (id) curs.execute(sql) result = curs.fetchall() for x in result: userid = x[0] sql = "INSERT into UserLog(UserID) VALUES (%d)" % (int(userid)) print(sql) curs.execute(sql) db.commit() print('\nDatabase Modified') cam.release() cv2.destroyAllWindows() except MySQLdb.Error as e: print(e) break nowtime = time.time() timediff = nowtime - starttime print(timediff) if timediff > 30: cam.release() cv2.destroyAllWindows() lcd.text('Identity', 1) lcd.text('Unconfirmed!', 2) sleep(5) try: name = datetime.datetime.now() sql = "INSERT into FailedEntryLog(DateTime) VALUES ('%s')" % (name) print(sql) curs.execute(sql) db.commit() print('\nDatabase Modified') camera = PiCamera() camera.capture('../captures/{}.jpg'.format(name)) except MySQLdb.Error as e: print(e) break lcd.clear() import startprogram<file_sep>from gpiozero import Button from rpi_lcd import LCD btn2 = Button(5, pull_up=False) lcd = LCD() lcd.clear() while True: lcd.text("Press Red Button", 1) lcd.text("To Initiate...", 2) response = btn2.wait_for_press() if response is True: break lcd.clear() btn2.close() import password
0e94dcb513b31a58325dfd204ee496271a54d010
[ "Python" ]
5
Python
joshualeejunyi/IOTCA1
789f4aad3d2c0f8f54da9e0a0a84b8db8d4c6706
7ef18b62a3b4e4eed82a91d41801519e3ac54b93
refs/heads/main
<repo_name>marcelo-r/dot<file_sep>/save.sh #!/bin/bash declare -A apps apps+=( ["i3"]="$HOME/.config/i3" ["zsh"]="$HOME/.zshrc" ["tmux"]="$HOME/.tmux.conf" ["alacritty"]="$HOME/.config/alacritty" ["nvim"]="$HOME/.config/nvim" ["rofi"]="$HOME/.config/rofi" ) # save each config to its own directory save() { echo "copying..." for key in "${!apps[@]}"; do conf="${apps[$key]}" echo "copying $key configuration" # directory just needs to be copied if [ -d "${conf}" ]; then cp -r "$conf" . # when a file create a directory for it elif [ -f "${conf}" ]; then mkdir -p "$key" cp "$conf" "$key" else echo "$key=$conf IS INVALID" echo "aborted" exit 1 fi done echo "done" } save <file_sep>/README.md ## dot A simple manager for configuration files, based on my use cases. I want to be able to distro-hop more easily. <file_sep>/zsh/.zshrc # If you come from bash you might have to change your $PATH. # export PATH=$HOME/bin:/usr/local/bin:$PATH # Path to your oh-my-zsh installation. export ZSH="/home/$USER/.oh-my-zsh" ZSH_THEME="robbyrussell" # display red dots whilst waiting for completion. COMPLETION_WAITING_DOTS="true" # Uncomment the following line if you want to disable marking untracked files # under VCS as dirty. This makes repository status check for large repositories # much, much faster. # DISABLE_UNTRACKED_FILES_DIRTY="true" HIST_STAMPS="yyyy-mm-dd" # Add wisely, as too many plugins slow down shell startup. plugins=( asdf dnf extract git github golang lein mix python vi-mode ) VI_MODE_RESET_PROMPT_ON_MODE_CHANGE=true source $ZSH/oh-my-zsh.sh # user configuration # needed for fzf and vi-mode to coexist without problems autoload -U compinit compinit -i set -o vi source /usr/share/fzf/shell/key-bindings.zsh fpath+=${ZDOTDIR:-~}/.zsh_functions eval "$(direnv hook zsh)" eval "$(pipenv --completion)" # must be in this order for key bindings to work #eval "$(starship init zsh)" # export MANPATH="/usr/local/man:$MANPATH" # env ars export EDITOR="nvim" export EDITOR="nv" export LOCALPATH="$HOME/.local/bin" export GOPATH="$HOME/.local/go" export GOBIN="$GOPATH/bin" export PATH="$PATH:$LOCALPATH:$GOBIN" export PATH="$HOME/.cargo/bin:$PATH" export PATH="$HOME/.emacs.d/bin:$PATH" # 100ms export KEYTIMEOUT=10 # FZF export FZF_DEFAULT_COMMAND="fd -t f --no-ignore" export FZF_CTRL_T_COMMAND="fd --no-ignore --exclude ~/Games" export FZF_COMPLETION_TRIGGER="~~" export FZF_ALT_C_COMMAND="fd -t d --exclude ~/Games . $HOME" export FZF_ALT_C_OPTS="--preview 'tree -C {} | head -200'" export PIPENV_VENV_IN_PROJECT=1 # aliases alias zshconfig="$EDITOR ~/.zshrc" alias ohmyzsh="$EDITOR ~/.oh-my-zsh" # easily check a command alias cmd="command -v" alias l="ls" alias ll="exa -l" alias la="exa -la" alias t="tree -d" alias nv="neovim" alias dockerps="docker ps --format '{{.ID}}\t{{.Names}} \t{{.Size}} \t{{.Ports}} \t{{.Status}}'" alias dockerpsa="docker ps -a --format '{{.ID}}\t{{.Names}} \t{{.Size}} \t{{.Ports}} \t{{.Status}}'" alias rm="rm -i" #alias rg="rg -i" alias ,ecd="emacs --daemon" alias ,ec="emacsclient -t" alias ,emacs="emacsclient -c -a emacs" alias sublime_merge="/opt/sublime_merge/sublime_merge"
ccd536b04bb8a609afe285dc7e2ebd516c09d02e
[ "Markdown", "Shell" ]
3
Shell
marcelo-r/dot
461d527bb7f4fae10013570ff2c440d1d5b82612
c1b65fbc47b2a84ef134b0beafe237cb14149593
refs/heads/merc_go
<file_sep># mercury200 ##### Документация на русском языке: [Readme-ru](../blob/merc-go/Readme-ru.md) ---- Implementation of Mercury-200 power meter's exchange protocol. The commands.go file contains all implemented commands. Below is the list of commands with hex codes and corresponding functions in code. ## reading commands Code | Command | Function | --- | --- | --- | 21 | Current time | GetCurrentTime 22 | Power limit | GetPowerLimit 23 | Energy limit | GetEnergyLimit 24 | Seasonal time shift flag | GetSeasonSwitchFlag 25 | Limits of manual correction | GetManualCorrectionAmount 27 | Energy from last reset | GetEnergyFromReset 28 | Firmware version | GetVersion 29 | Voltage of builtin battery | GetBatteryVoltage 2A | Displayed values | GetTariffsDisplayOptions 2B | Last turnoff time | GetLastTurnOffTime 2C | Last standby time | GetLastTurnOnTime 2D | Impulse output operation mode | GetImpOutputOptions 2E | Number of tariffs | GetTariffsCount 2F | Serial number | GetSerial 30 | Holidays | GetHolidays 32 | Energy at month start time | GetEnergyAtMonthStart 61 | Last case opening time | GetLastOpenedTime 62 | Last case closing time | GetLastClosedTime 63 | Instant values | GetInstants 66 | Production date | GetProductionDate 67 | Values displaying intervals | GetDisplayIntervals ## writing commands Code | Command | Function | --- | --- | --- | 2 | Set current time | SetCurrentTime 3 | Set power limit | SetPowerLimit 4 | Set energy limit | SetEnergyLimit 5 | Set seasonal time shift flag | SetSeasonSwitchFlag 6 | Set manual correction limits | SetManualCorrectionAmount 7 | Set impulse output operation mode | SetImpOutputOptions 9 | Choose displayed values | SetTariffsDisplayOptions 0A | Set number of tariffs | SetTariffsCount 0D | Set displaying intervals | SetDisplayIntervals 10 | Set holidays | SetHolidays ### usage example * get values of accumulated energy with tariffs breakdown at the start of 11th month ```go package main import ( "fmt" "mercury200/commands" ) func main() { netNum := "266608" port := "COM5" timeOut := 5 baudRate := 9600 result, _ := commands.GetEnergyAtMonthStart(&netNum, &port, &timeOut, &baudRate, 11) fmt.Println(result) fmt.Printf("Tariff 1: %s kW\n", result.T1) fmt.Printf("Tariff 2: %s kW", result.T2) } ``` ```shell $ go run main.go &{0684.92 0342.65 000.0 000.0} Tariff 1: 0684.92 kW Tariff 2: 0342.65 kW ``` <file_sep>package types type DisplayIntervals struct { InactiveTEnergy, ActiveTEnergy, Instants, Additionals int } type TariffsDisplayOptions struct { Date, Time, Power, TSumm, T4, T3, T2, T1 string } type Energy struct { T1, T2, T3, T4 string } type Instants struct { P, U, I string } <file_sep>package main import ( "fmt" "mercury200/commands" ) func main() { netNum := "266608" port := "COM5" timeOut := 5 baudRate := 9600 result, _ := commands.GetEnergyAtMonthStart(&netNum, &port, &timeOut, &baudRate, 11) fmt.Println(result) fmt.Printf("Тариф 1: %s кВт\n", result.T1) fmt.Printf("Тариф 2: %s кВт", result.T2) } <file_sep>package util import ( "fmt" "mercury200/crc16" "strconv" ) func SplitEvery(source string, step int) []string { result := make([]string, len(source)/step) var res string pos := 0 for i, v := range source { res += string(v) if (i+1)%step == 0 { result[pos] = res pos++ res = "" } } return result } //"266608" => [4 17 112] func NetNumToArr(netNumber string) []byte { res := make([]byte, 3) i, _ := strconv.ParseInt(netNumber, 0, 64) r := fmt.Sprintf("%06x", i) x := SplitEvery(r, 2) for ind, v := range x { var s, _ = strconv.ParseInt(v, 16, 64) res[ind] = byte(s) } return res } //[00 04 17 112 28] => [50 EB] func GetCrcBytes(command []byte) []byte { res := make([]byte, 2) crc16 := crc16.CheckSum(command) r := fmt.Sprintf("%04x", crc16) x := SplitEvery(r, 2) for ind, v := range x { var s, _ = strconv.ParseInt(v, 16, 64) res[ind] = byte(s) } tmp := res[0] res[0] = res[1] res[1] = tmp return res } func CheckCrc(response []byte, respLen int) bool { resp := response[0 : respLen-2] respCrc := response[respLen-2 : respLen] crcBytes := GetCrcBytes(resp) if SliceEq(crcBytes, respCrc) { return true } return false } func SliceEq(a, b []byte) bool { if a == nil && b == nil { return true } if a == nil || b == nil { return false } if len(a) != len(b) { return false } for i := range a { if a[i] != b[i] { return false } } return true } <file_sep># Mercury200 ##### For readme in english, please see --> [Readme-en](../blob/merc-go/Readme.md) ---- Реализация протокола обмена электросчетчика Меркурий 200.02. Файл commands.go содержит реализованные команды счетчика. Ниже приведен список команд с hex-кодами и соответствующими им функциями. ## команды на чтение Код | Команда | Функция | --- | --- | --- | 21 | Текущее время | GetCurrentTime 22 | Лимит мощности | GetPowerLimit 23 | Лимит энергии | GetEnergyLimit 24 | Флаг сезонного перевода часов | GetSeasonSwitchFlag 25 | Пределы ручной коррекции | GetManualCorrectionAmount 27 | Энергия от сброса | GetEnergyFromReset 28 | Версия ПО | GetVersion 29 | Напряжение встроенной батареи | GetBatteryVoltage 2A | Отображаемые на дисплее значения | GetTariffsDisplayOptions 2B | Время последнего выключения | GetLastTurnOffTime 2C | Время последнего включения | GetLastTurnOnTime 2D | Режим работы импульсного выхода | GetImpOutputOptions 2E | Количество тарифов | GetTariffsCount 2F | Серийный номер | GetSerial 30 | Выходные дни | GetHolidays 32 | Энергия на начало месяца | GetEnergyAtMonthStart 61 | Время последнего вскрытия корпуса | GetLastOpenedTime 62 | Время последнего закрытия корпуса | GetLastClosedTime 63 | Мгновенные значения | GetInstants 66 | Дата выпуска | GetProductionDate 67 | Интервалы отображения значений на дисплее | GetDisplayIntervals ## команды на запись Код | Команда | Функция | --- | --- | --- | 2 | Установка времени | SetCurrentTime 3 | Установка ограничения мощности | SetPowerLimit 4 | Установка ограничения энергии | SetEnergyLimit 5 | Установка флага сезонного перевода часов | SetSeasonSwitchFlag 6 | Установка пределов ручной коррекции | SetManualCorrectionAmount 7 | Выбор режима работы импульсного выхода | SetImpOutputOptions 9 | Выбор отображаемых на дисплее значений | SetTariffsDisplayOptions 0A | Установка количества тарифов | SetTariffsCount 0D | Установка интервалов отображения | SetDisplayIntervals 10 | Запись выходных дней | SetHolidays ### пример использования * запрос значений накопленной энергии по тарифам на начало 11 месяца* ```go package main import ( "fmt" "mercury200/commands" ) func main() { netNum := "266608" port := "COM5" timeOut := 5 baudRate := 9600 result, _ := commands.GetEnergyAtMonthStart(&netNum, &port, &timeOut, &baudRate, 11) fmt.Println(result) fmt.Printf("Тариф 1: %s кВт\n", result.T1) fmt.Printf("Тариф 2: %s кВт", result.T2) } ``` ```shell $ go run main.go &{0684.92 0342.65 000.0 000.0} Тариф 1: 0684.92 кВт Тариф 2: 0342.65 кВт ``` <file_sep>package commands import ( "errors" "fmt" "github.com/tarm/serial" "log" "mercury200/types" "mercury200/util" "strconv" "time" ) func PrepareCommand(netNumber *string, code byte) []byte { command := make([]byte, 1) command[0] = 0 command = append(command, util.NetNumToArr(*netNumber)...) command = append(command, code) var crc = util.GetCrcBytes(command) command = append(command, crc...) return command } func PrepareSetterCommand(netNumber *string, code byte, info *[]byte) []byte { command := make([]byte, 1) command[0] = 0 command = append(command, util.NetNumToArr(*netNumber)...) command = append(command, code) command = append(command, *info...) var crc = util.GetCrcBytes(command) command = append(command, crc...) return command } func PerformCommand(command []byte, portname *string, timeout *int, baud *int, respLen int) ([]byte, bool) { c := &serial.Config{Name: *portname, Baud: *baud, ReadTimeout: time.Second * time.Duration(*timeout)} s, err := serial.OpenPort(c) if err != nil { log.Fatal(err) } n, err := s.Write(command) if err != nil { log.Fatal(err) } buf := make([]byte, respLen) n, err = s.Read(buf) if err != nil { log.Fatal(err) } if util.CheckCrc(buf[:n], respLen) { s.Close() return buf[:n], true } else { s.Close() return buf, false } } // GET COMMANDS func GetVersion(netNumber *string, portname *string, timeout *int, baud *int) string { command := PrepareCommand(netNumber, 40) val, res := PerformCommand(command, portname, timeout, baud, 13) if res == true { return fmt.Sprintf("%0x.%0x.%0x (%02x.%02x.%02x)", val[5], val[6], val[7], val[8], val[9], val[10]) } else { return "" } } func GetSerial(netNumber *string, portname *string, timeout *int, baud *int) string { command := PrepareCommand(netNumber, 47) val, res := PerformCommand(command, portname, timeout, baud, 11) if res == true { s := fmt.Sprintf("%02x%02x%02x%02x", val[5], val[6], val[7], val[8]) d, _ := strconv.ParseInt(s, 16, 64) return fmt.Sprint(d) } else { return "" } } func GetBatteryVoltage(netNumber *string, portname *string, timeout *int, baud *int) string { command := PrepareCommand(netNumber, 41) val, res := PerformCommand(command, portname, timeout, baud, 9) if res == true { return fmt.Sprintf("%0x.%0x V", val[5], val[6]) } else { return "" } } func GetProductionDate(netNumber *string, portname *string, timeout *int, baud *int) string { command := PrepareCommand(netNumber, 102) val, res := PerformCommand(command, portname, timeout, baud, 10) if res == true { return fmt.Sprintf("%02x.%02x.%02x", val[5], val[6], val[7]) } else { return "" } } func GetLastTurnOnTime(netNumber *string, portname *string, timeout *int, baud *int) string { command := PrepareCommand(netNumber, 44) val, res := PerformCommand(command, portname, timeout, baud, 14) if res == true { return fmt.Sprintf("%02x.%02x.%02x %02x:%02x:%02x %s", val[9], val[10], val[11], val[6], val[7], val[8], time.Weekday(val[5])) } else { return "" } } func GetLastTurnOffTime(netNumber *string, portname *string, timeout *int, baud *int) string { command := PrepareCommand(netNumber, 43) val, res := PerformCommand(command, portname, timeout, baud, 14) if res == true { return fmt.Sprintf("%02x.%02x.%02x %02x:%02x:%02x %s", val[9], val[10], val[11], val[6], val[7], val[8], time.Weekday(val[5])) } else { return "" } } func GetCurrentTime(netNumber *string, portname *string, timeout *int, baud *int) string { command := PrepareCommand(netNumber, 33) val, res := PerformCommand(command, portname, timeout, baud, 14) if res == true { return fmt.Sprintf("%02x.%02x.%02x %02x:%02x:%02x %s", val[9], val[10], val[11], val[6], val[7], val[8], time.Weekday(val[5])) } else { return "" } } func GetSeasonSwitchFlag(netNumber *string, portname *string, timeout *int, baud *int) string { command := PrepareCommand(netNumber, 36) val, res := PerformCommand(command, portname, timeout, baud, 8) if res == true { if val[5] == 0 { return "Switch is disabled" } else { return "Switch is enabled" } } else { return "" } } func GetLastOpenedTime(netNumber *string, portname *string, timeout *int, baud *int) (string, error) { command := PrepareCommand(netNumber, 97) val, res := PerformCommand(command, portname, timeout, baud, 14) if res == true { if val[5] < 8 { return fmt.Sprintf("%02x.%02x.%02x %02x:%02x:%02x %s", val[9], val[10], val[11], val[6], val[7], val[8], time.Weekday(val[5])), nil } else { return "--:--", nil } } else { return "", errors.New("No data") } } func GetLastClosedTime(netNumber *string, portname *string, timeout *int, baud *int) (string, error) { command := PrepareCommand(netNumber, 98) val, res := PerformCommand(command, portname, timeout, baud, 14) if res == true { if val[5] < 8 { return fmt.Sprintf("%02x.%02x.%02x %02x:%02x:%02x %s", val[9], val[10], val[11], val[6], val[7], val[8], time.Weekday(val[5])), nil } else { return "--:--", nil } } else { return "", errors.New("No data") } } func GetManualCorrectionAmount(netNumber *string, portname *string, timeout *int, baud *int) string { command := PrepareCommand(netNumber, 37) val, res := PerformCommand(command, portname, timeout, baud, 8) if res == true { return fmt.Sprintf("%02d", val[5]) } else { return "" } } func GetDisplayIntervals(netNumber *string, portname *string, timeout *int, baud *int) types.DisplayIntervals { result := types.DisplayIntervals{-1, -1, -1, -1} p := &result command := PrepareCommand(netNumber, 103) val, res := PerformCommand(command, portname, timeout, baud, 11) if res == true { p.InactiveTEnergy = int(val[5]) p.ActiveTEnergy = int(val[6]) p.Instants = int(val[7]) p.Additionals = int(val[8]) return result } else { return result } } func GetTariffsDisplayOptions(netNumber *string, portname *string, timeout *int, baud *int) *types.TariffsDisplayOptions { c := types.TariffsDisplayOptions{} p := &c command := PrepareCommand(netNumber, 42) val, res := PerformCommand(command, portname, timeout, baud, 8) if res == true { b := util.SplitEvery(fmt.Sprintf("%08b", val[5]), 1) p.Date = b[0] p.Time = b[1] p.Power = b[2] p.TSumm = b[3] p.T4 = b[4] p.T3 = b[5] p.T2 = b[6] p.T1 = b[7] return p } else { return p } } func GetPowerLimit(netNumber *string, portname *string, timeout *int, baud *int) int { command := PrepareCommand(netNumber, 34) val, res := PerformCommand(command, portname, timeout, baud, 9) if res == true { k, _ := strconv.Atoi(fmt.Sprintf("%0x%0x", val[5], val[6])) return k * 10 } return -1 } func GetEnergyLimit(netNumber *string, portname *string, timeout *int, baud *int) int { command := PrepareCommand(netNumber, 35) val, res := PerformCommand(command, portname, timeout, baud, 9) if res == true { k, _ := strconv.Atoi(fmt.Sprintf("%0x%0x", val[5], val[6])) return k } return -1 } func GetImpOutputOptions(netNumber *string, portname *string, timeout *int, baud *int) string { options := map[byte]string{ 0: "5000 imp/h", 1: "10000 imp/h", 2: "Quartz frequency", 3: "Load control", } command := PrepareCommand(netNumber, 45) val, res := PerformCommand(command, portname, timeout, baud, 8) if res == true { return options[val[5]] } return "" } func GetTariffsCount(netNumber *string, portname *string, timeout *int, baud *int) int { command := PrepareCommand(netNumber, 46) val, res := PerformCommand(command, portname, timeout, baud, 8) if res == true { return int(val[5]) } return -1 } func GetHolidays(netNumber *string, portname *string, timeout *int, baud *int) ([]string, error) { result := make([]string, 0) tail := make([]byte, 1) for i := 0; i < 2; i++ { tail[0] = byte(i) command := PrepareSetterCommand(netNumber, 48, &tail) val, res := PerformCommand(command, portname, timeout, baud, 23) if res == true { for j := 5; j < 21; j += 2 { if val[j] != 255 { m, _ := strconv.Atoi(fmt.Sprintf("%x", val[j+1])) h := fmt.Sprintf("%s,%0x", time.Month(m), val[j]) result = append(result, h) } else { return result, nil } } } else { return result, errors.New("unable to fetch holidays data") } } return result, nil } func GetEnergyFromReset(netNumber *string, portname *string, timeout *int, baud *int) *types.Energy { energy := types.Energy{} p := &energy command := PrepareCommand(netNumber, 39) val, res := PerformCommand(command, portname, timeout, baud, 23) if res == true { p.T1 = fmt.Sprintf("%x%x%x.%x", val[5], val[6], val[7], val[8]) p.T2 = fmt.Sprintf("%x%x%x.%x", val[9], val[10], val[11], val[12]) p.T3 = fmt.Sprintf("%x%x%x.%x", val[13], val[14], val[15], val[16]) p.T4 = fmt.Sprintf("%x%x%x.%x", val[17], val[18], val[19], val[20]) } return p } func GetEnergyAtMonthStart(netNumber *string, portname *string, timeout *int, baud *int, month int) (*types.Energy, error) { energy := types.Energy{} p := &energy if month < 1 || month > 12 { return p, errors.New("month should be between 0 and 12") } tail := make([]byte, 1) tail[0] = byte(month - 1) command := PrepareSetterCommand(netNumber, 50, &tail) val, res := PerformCommand(command, portname, timeout, baud, 23) if res == true { p.T1 = fmt.Sprintf("%x%x%x.%x", val[5], val[6], val[7], val[8]) p.T2 = fmt.Sprintf("%x%x%x.%x", val[9], val[10], val[11], val[12]) p.T3 = fmt.Sprintf("%x%x%x.%x", val[13], val[14], val[15], val[16]) p.T4 = fmt.Sprintf("%x%x%x.%x", val[17], val[18], val[19], val[20]) }else{ return p, errors.New("CRC Check error") } return p, nil } func GetInstants(netNumber *string, portname *string, timeout *int, baud *int) *types.Instants { values := types.Instants{} p := &values command := PrepareCommand(netNumber, 99) val, res := PerformCommand(command, portname, timeout, baud, 14) if res == true { v, _ := strconv.ParseFloat(fmt.Sprintf("%x%x", val[5], val[6]), 32) p.U = fmt.Sprint(v / 10) v, _ = strconv.ParseFloat(fmt.Sprintf("%x%x", val[7], val[8]), 32) p.I = fmt.Sprint(v / 10) v, _ = strconv.ParseFloat(fmt.Sprintf("%x%x%x", val[9], val[10], val[11]), 32) p.P = fmt.Sprint(v) } return p } //SET COMMANDS func SetCurrentTime(netNumber *string, portname *string, timeout *int, baud *int, timeToSet time.Time) bool { timeP := make([]byte, 7) timeP[0] = byte(timeToSet.Weekday()) h, _ := strconv.ParseInt(strconv.Itoa(timeToSet.Hour()), 16, 64) timeP[1] = byte(h) h, _ = strconv.ParseInt(strconv.Itoa(timeToSet.Minute()), 16, 64) timeP[2] = byte(h) h, _ = strconv.ParseInt(strconv.Itoa(timeToSet.Second()), 16, 64) timeP[3] = byte(h) h, _ = strconv.ParseInt(strconv.Itoa(timeToSet.Day()), 16, 64) timeP[4] = byte(h) h, _ = strconv.ParseInt(fmt.Sprintf("%d", timeToSet.Month()), 16, 64) timeP[5] = byte(h) h, _ = strconv.ParseInt(strconv.Itoa(timeToSet.Year())[2:4], 16, 64) timeP[6] = byte(h) command := PrepareSetterCommand(netNumber, 2, &timeP) _, res := PerformCommand(command, portname, timeout, baud, 7) if res == true { return true } else { return false } } func SetSeasonSwitchFlag(netNumber *string, portname *string, timeout *int, baud *int, flag bool) bool { tail := make([]byte, 1) if flag == true { tail[0] = 255 } else { tail[0] = 0 } command := PrepareSetterCommand(netNumber, 5, &tail) _, res := PerformCommand(command, portname, timeout, baud, 7) if res == true { return true } else { return false } } func SetManualCorrectionAmount(netNumber *string, portname *string, timeout *int, baud *int, amount uint) (bool, error) { tail := make([]byte, 1) if amount <= 89 { tail[0] = byte(amount) } else { return false, errors.New("Amount must be between 0 and 89") } command := PrepareSetterCommand(netNumber, 6, &tail) _, res := PerformCommand(command, portname, timeout, baud, 7) if res == true { return true, nil } else { return false, nil } } func SetTariffsDisplayOptions(netNumber *string, portname *string, timeout *int, baud *int, opt *types.TariffsDisplayOptions) bool { s := []byte("00000000") if opt.Date == "1" { s[0] = 49 } if opt.Time == "1" { s[1] = 49 } if opt.Power == "1" { s[2] = 49 } if opt.TSumm == "1" { s[3] = 49 } if opt.T4 == "1" { s[4] = 49 } if opt.T3 == "1" { s[5] = 49 } if opt.T2 == "1" { s[6] = 49 } if opt.T1 == "1" { s[7] = 49 } e, _ := strconv.ParseInt(string(s), 2, 64) tail := make([]byte, 1) tail[0] = byte(e) command := PrepareSetterCommand(netNumber, 9, &tail) _, res := PerformCommand(command, portname, timeout, baud, 7) if res == true { return true } return false } func SetDisplayIntervals(netNumber *string, portname *string, timeout *int, baud *int, intervals *types.DisplayIntervals) bool { tail := make([]byte, 4) tail[0] = byte(intervals.InactiveTEnergy) tail[1] = byte(intervals.ActiveTEnergy) tail[2] = byte(intervals.Instants) tail[3] = byte(intervals.Additionals) command := PrepareSetterCommand(netNumber, 13, &tail) _, res := PerformCommand(command, portname, timeout, baud, 7) if res == true { return true } return false } func SetPowerLimit(netNumber *string, portname *string, timeout *int, baud *int, limit int) (bool, error) { tail := make([]byte, 2) if limit <= 99999 && limit > 0 { limit = limit / 10 h := util.SplitEvery(fmt.Sprintf("%04d", limit), 2) var l int64 for i, v := range h { l, _ = strconv.ParseInt(v, 16, 64) tail[i] = byte(l) } } else { return false, errors.New("Amount must be between 0 and 99999") } command := PrepareSetterCommand(netNumber, 3, &tail) _, res := PerformCommand(command, portname, timeout, baud, 7) if res == true { return true, nil } return false, nil } func SetEnergyLimit(netNumber *string, portname *string, timeout *int, baud *int, limit int) (bool, error) { tail := make([]byte, 2) if limit <= 9999 && limit > 0 { h := util.SplitEvery(fmt.Sprintf("%04d", limit), 2) var l int64 for i, v := range h { l, _ = strconv.ParseInt(v, 16, 64) tail[i] = byte(l) } } else { return false, errors.New("Amount must be between 0 and 9999") } command := PrepareSetterCommand(netNumber, 4, &tail) _, res := PerformCommand(command, portname, timeout, baud, 7) if res == true { return true, nil } return false, nil } func SetImpOutputOptions(netNumber *string, portname *string, timeout *int, baud *int, option int) bool { tail := make([]byte, 1) tail[0] = byte(option) command := PrepareSetterCommand(netNumber, 7, &tail) _, res := PerformCommand(command, portname, timeout, baud, 7) if res == true { return true } return false } func SetTariffsCount(netNumber *string, portname *string, timeout *int, baud *int, option int) (bool, error) { tail := make([]byte, 1) if option <= 4 && option > 0 { tail[0] = byte(option) } else { return false, errors.New("option must be between 1 and 4") } command := PrepareSetterCommand(netNumber, 10, &tail) _, res := PerformCommand(command, portname, timeout, baud, 7) if res == true { return true, nil } return false, nil } func SetHolidays(netNumber *string, portname *string, timeout *int, baud *int, holidays []string) (bool, error) { tail := make([]byte, 0) const shortForm = "January,2" if len(holidays) > 16 { return false, errors.New("only 16 holidays supported") } for _, v := range holidays { if v != "" { date, _ := time.Parse(shortForm, v) month := int(date.Month()) day, _ := (strconv.ParseInt(strconv.Itoa(date.Day()), 16, 64)) tail = append(tail, byte(day)) tail = append(tail, byte(month)) } } lng := len(tail) rem := lng % 16 if rem != 0 { for i := 0; i <= 16-rem; i++ { tail = append(tail, 255) } } pcount := len(tail) / 16 cnt := 0 for j := 0; j < pcount; j++ { pack := make([]byte, 16) copy(pack, tail[cnt:cnt+16]) cnt += 16 pack = append(pack, byte(j)) command := PrepareSetterCommand(netNumber, 16, &pack) _, res := PerformCommand(command, portname, timeout, baud, 7) if res == false { return false, nil } } return true, nil }
ba14f3b662cca5b0f65707e01893a38306a0d16a
[ "Markdown", "Go" ]
6
Markdown
Shkrt/Mercury200
0b8744b7fb5714e398ab1709b5f5af307228c847
7cbdfe46323431cb626b3d1a66d1926d94ab2505
refs/heads/master
<repo_name>y-i/gyotaku<file_sep>/README.md # gyotaku Web魚拓をHeadless Chromeで <file_sep>/todo.md - [ ] wgetで元々のファイルとCSSを取得 - [x] fetchで元々のファイルを取得 - [ ] puppeteerで動的な部分を取得 <file_sep>/util.js module.exports = { shapeURL: (url) => { if (url.substr(0, 4) !== 'http' && url.substr(0, 4) !== 'file') { url = 'http://' + url; } return url; }, sleep: (msec) => { return new Promise(resolve => { setTimeout(resolve, msec); }); }, utilToFullpath: (base, path) => { if (base.substr(-1) !== '/') base += '/'; if (path[0] === '/') return base + path.substr(1); if (path.substr(0, 2) === './') return base + path.substr(2); while (path.substr(0, 3) === '../') { path = path.substr(3); base = base.replace(/[^\/]+\/$/, ''); } return `${base}${path}`; } };
19bc920f760297a73b0552ffb118df01138b266d
[ "Markdown", "JavaScript" ]
3
Markdown
y-i/gyotaku
878647f81dbf77abfc60cc272590347fe2b00dbd
32d11fcd488a7f8ff75257bacc3e992a28f1dcaf
refs/heads/master
<file_sep>controls = { holding_shift:false, holding_command:false, holding_left:false, holding_right:false, holding_down:false }; $(window).keydown(function(e){ e.preventDefault(); if( dialog.running && !dialog.shop_open ){ dialog.wait = false; return; } // console.log(e.which); // shift if( e.which == 16 ){ controls.holding_shift = true; } // command if( e.which == 90 || e.which == 91 ){ controls.holding_command = true; } // up if(e.which == 38){ if(dialog.shop_open){ dialog.shop_cursor_move('up'); }else if( controls.holding_shift ){ player.move_or_dig("up"); }else if( player.waiting_for_bomb ){ player.place_bomb('up') }else{ player.move('up'); } } // down if(e.which == 40){ controls.holding_down = true; if(dialog.shop_open){ dialog.shop_cursor_move('down'); }else if( controls.holding_shift ){ player.move_or_dig("down"); }else if( controls.holding_command ){ player.move_or_ladder("down"); }else if( player.waiting_for_bomb ){ player.place_bomb('down') }else{ player.move("down"); } } // left if(e.which == 37){ controls.holding_left = true; if( controls.holding_shift ){ player.move_or_dig("left"); }else if( controls.holding_command ){ player.move_or_ladder("left"); }else if( player.waiting_for_bomb ){ player.place_bomb('left') }else{ player.move("left"); } } // right if(e.which == 39){ controls.holding_right = true; if( controls.holding_shift ){ player.move_or_dig("right"); }else if( controls.holding_command ){ player.move_or_ladder("right"); }else if( player.waiting_for_bomb ){ player.place_bomb('right') }else{ player.move("right"); } } // space if(e.which == 32){ if(dialog.shop_open){ dialog.shop_cursor_select(); } } // enter if(e.which == 13){ if(dialog.shop_open){ dialog.shop_cursor_select(); } } // Z if(e.which == 90){ if( player.waiting_for_bomb ){ player.waiting_for_bomb = false; $('.blinking-bomb').removeClass('blinking-bomb'); }else{ player.use_item('bomb'); } } // the following keys are not used in the shop. if( dialog.shop_open ){ return; } // 1 if( e.which == 49 ){ camera.toggle_map_visibility(); } if( e.which == 82 ){ location.reload(); } }); $(window).keyup(function(e){ // shift if( e.which == 16 ){ controls.holding_shift = false; } // command if( e.which == 90 || e.which == 91 ){ controls.holding_command = false; } // left if(e.which == 37){ controls.holding_left = false; } // right if(e.which == 39){ controls.holding_right = false; } });<file_sep>npcs = { characters:{ '0':{ name:'shopkeeper', sprite:'&#9731;', alignment:'neutral', weighted:true } } };<file_sep>digger = { run: true, loop:null, ticks: 0, init:function(){ map.init(); player.init(); dialog.init(); camera.update(); digger.loop = setInterval(function(){ if( dialog.running ){ if( dialog.wait ){ return; } dialog.advance(); return; } // gravity $('.player, [weighted=true]').each(function(){ $tile_below = map.get_adjacent_tile('down',$(this)); if( $tile_below.attr('fall_through') == "true" && !$tile_below.hasClass('player') ){ // player gravity if( $(this).hasClass('player') ){ if( $('.player').attr('name')=='ladder' ){ return; } $('.player').removeClass('player'); $tile_below.addClass('player'); player.pos_y++; // check for hurtful tiles if( $('.player').attr('hurts') == 'true' ){ player.damage(1); } // everything else gravity }else{ name = $(this).attr('name'); tiles.change_tile($(this),'air'); tiles.change_tile($tile_below,name); } camera.update(); } }); },50); }, r:function(max){ return Math.round(Math.random() * max); }, advance_clock:function(){ // countdown on lit bombs $('[name=bomb]').each(function(){ $bomb = $(this); time_left = $bomb.attr('time-left') - 1; if( time_left > 0 ){ // counting down $bomb.attr( 'time-left', time_left ).addClass('show-time'); setTimeout(function(){ $bomb.removeClass('show-time'); },500); }else{ // explode $blast_radius = map.get_adjacent_tile('all',$bomb); $blast_radius.each(function(){ tiles.change_tile($(this),'explosion'); }); tiles.change_tile($bomb,'explosion'); setTimeout(function(){ $('[name=explosion]').each(function(){ tiles.change_tile($(this),'air'); }); },500); } }); } };<file_sep># ASCII-dig A 2D digging game made with ASCII and DOM This is a fun little project where I'm trying to make a 2D sidescroller using HTML, CSS and javascript. # <a href="http://gridwalk.github.io/ASCII-dig/">Play it online!</a> Arrows: Move<br> Shift + Arrow: Dig<br> Command + Arrow: Place ladder<br> Z + Arrow: Place bomb<br> <img src="http://www.everythingihaveeverdone.com/wp-content/uploads/2014/12/Screen-Shot-2014-12-21-at-4.02.37-PM-1024x538.png" /> <img src="http://www.everythingihaveeverdone.com/wp-content/uploads/2014/12/Screen-Shot-2014-12-21-at-4.02.20-PM.png" /> <file_sep>var dialog = { wait:false, box: null, lines:{}, cursor:0, running:false, shop_open:false, init:function(){ $('body').append( $('<div id="dialog"></div>') ); dialog.box = $('#dialog'); }, advance:function(){ dialog.cursor++; dialog.run(); }, show:function(option){ dialog.box.show(); }, close:function(){ dialog.box.hide(); dialog.box.text(""); dialog.cursor = 0; dialog.running = false; dialog.shop_open = false; }, run:function(option){ dialog.show(); dialog.running = true; line = dialog.lines[ dialog.cursor ]; if( typeof line == 'string'){ dialog.box.text(line); dialog.wait = true; // wait is set to false in controls.js }else if( typeof line == 'function' ){ line(); dialog.wait = true; } if( dialog.cursor >= Object.keys(dialog.lines).length ){ dialog.close(); } }, shop:function(shop_number){ dialog.shop_open = true; if( shop_number == 1 ){ items = [['ladder',15,10],['bomb',3,20],['laser',1,50]]; } $item_list = $('<ul id="shop-item-list"></ul>'); i=0 items.forEach(function(item){ $item = $('<li amount="'+items[i][1]+'" item="'+items[i][0]+'" price="'+items[i][2]+'" ><i>'+items[i][0]+' x '+items[i][1]+'</i> ['+items[i][2]+']</li>'); $item_list.append($item); i++; }); // exit option $item_list.append('<li class="nothing">NOTHING</li>'); dialog.box.text(""); dialog.box.addClass('shop'); dialog.show(); dialog.box.append( $item_list ); $item_list.find('li:first-child').addClass('active'); }, shop_cursor_move:function(dir){ if( dir == 'up' ){ $('.shop li.active').removeClass('active').prev().addClass('active'); if( $('.shop li.active').length == 0 ){ $item_list.find('li:last-child').addClass('active'); } }else{ $('.shop li.active').removeClass('active').next().addClass('active'); if( $('.shop li.active').length == 0 ){ $item_list.find('li:first-child').addClass('active'); } } }, shop_cursor_select:function(){ $selected = $('.shop li.active'); if( $selected.hasClass('nothing') ){ dialog.close(); return; } if( player.inventory.gold.amount >= $selected.attr('price')*1 ){ purchased = $selected.find('i').text(); item_id = $selected.attr('item'); item_amount = $selected.attr('amount')*1; item_price = $selected.attr('price')*1; dialog.lines = { 0:'Purchased '+purchased, 1:function(){ dialog.shop(1); } } player.inventory.gold.amount = player.inventory.gold.amount - item_price; player.inventory[item_id].amount = player.inventory[item_id].amount + item_amount; camera.update_inventory(); dialog.shop_open = false; dialog.cursor = 0; dialog.run(); }else{ dialog.lines = { 0:'You can\'t afford that', 1:function(){ dialog.shop(1); } } dialog.shop_open = false; dialog.cursor = 0; dialog.run(); } } }<file_sep>player = { hp:5, max_hp:5, pos_x:0, pos_y:1, adjacent_tile: { left: null, right: null, top_left: null, top_right: null, top: null }, shovel_strength:0, inventory:{ ladder:{ amount:15 }, gold:{ amount:0 }, shovel:{ amount:0 }, key:{ amount:0 }, bomb:{ amount:10 } }, init:function(){ // put player on map player.pos_x = Math.floor(map.cols_amt / 2); $('tr').first().find('td').eq( player.pos_x ).addClass('player'); // add inventory to stage $('body').prepend('<ul id="inventory"></ul>'); // add HP to stage $('body').prepend('<div id="hp"></div>'); }, /* * * * * * * * * * * * * * MOVE * * * * * * * * * * * */ move:function(dir){ if(dir == "left"){ if( player.adjacent_tile['left'].length == 0 ){ return; } if( player.adjacent_tile['left'].attr('interactive') == 'true' ){ $tile = player.adjacent_tile['left']; name = $tile.attr('name'); tiles[name].interact($tile); return; } // move left if( player.adjacent_tile['left'].attr('passable') == 'true' ){ $('.player').removeClass('player'); player.adjacent_tile['left'].addClass('player'); }else if( player.adjacent_tile['top_left'].attr('passable') == 'true' && player.adjacent_tile['up'].attr('passable') == 'true' ){ // climb up one $('.player').removeClass('player'); player.adjacent_tile['top_left'].addClass('player'); } }else if(dir == "right"){ if( player.adjacent_tile['right'].length == 0 ){ return; } if( player.adjacent_tile['right'].attr('interactive') == 'true' ){ $tile = player.adjacent_tile['right']; name = $tile.attr('name'); tiles[name].interact($tile); return; } // move right if( player.adjacent_tile['right'].attr('passable') == 'true' ){ $('.player').removeClass('player'); player.adjacent_tile['right'].addClass('player') }else if( player.adjacent_tile['top_right'].attr('passable') == 'true' && player.adjacent_tile['up'].attr('passable') == 'true' ){ // climb up one $('.player').removeClass('player'); player.adjacent_tile['top_right'].addClass('player'); } }else if(dir == "up"){ $tile_above = player.adjacent_tile['up']; if( $tile_above.attr('interactive') == 'true' ){ name = $tile_above.attr('name'); tiles[name].interact($tile_above); return; } if( $tile_above.attr('passable') !== 'true' ){ return; } if($('.player').attr('name') == 'ladder'){ $('.player').removeClass('player'); $tile_above.addClass('player') }else{ player.use_item('ladder'); player.move('up'); } }else if(dir == "down"){ $tile_below = map.get_adjacent_tile('down',$('.player')); if( $tile_below.attr('interactive') == 'true' ){ name = $tile_below.attr('name'); tiles[name].interact($tile_below); return; } if( $tile_below.attr('passable') == 'true' ){ $('.player').removeClass('player'); $tile_below.addClass('player'); } } player.pos_x = $('.player').index(); player.pos_y = $('.player').parent().index() +1; // check for hurtful tiles if( $('.player').attr('hurts') == 'true' ){ player.damage(1) } camera.update(); digger.advance_clock(); }, /* * * * * * * * * * * * * * DIG * * * * * * * * * * * */ dig:function(dir){ $tile = map.get_adjacent_tile(dir,$('.player')); hardness = $tile.attr('hardness'); if( $tile.attr('passable') !== "true" && $tile.length > 0 && player.shovel_strength >= hardness){ if( $tile.attr('name')=='gold-in-dirt' ){ tiles.change_tile($tile,'gold') }else{ tiles.change_tile($tile,'air') } } camera.update(); }, move_or_dig:function(dir){ $next_tile = player.adjacent_tile[dir]; if( $next_tile.attr('passable')=='true' || $next_tile.attr('interactive')=='true' ){ player.move(dir); }else{ player.dig(dir); } }, /* * * * * * * * * * * * * * USE ITEM * * * * * * * * * * * */ use_item:function(item){ if( item == "ladder" && player.inventory.ladder.amount > 0 ){ player.inventory.ladder.amount--; $('.player').attr({ 'name':'ladder', 'fall_through':'false' }).css('color','#000'); camera.update(); } else if( item == "bomb" && player.inventory.bomb.amount > 0 ){ $tile_above = map.get_adjacent_tile('up',$('.player')); $tile_below = map.get_adjacent_tile('down',$('.player')); $right_tile = map.get_adjacent_tile('right',$('.player')); $left_tile = map.get_adjacent_tile('left',$('.player')); if( $tile_above.attr('passable') == 'true' ){ $tile_above.addClass('blinking-bomb'); } if( $tile_below.attr('passable') == 'true' ){ $tile_below.addClass('blinking-bomb'); } if( $right_tile.attr('passable') == 'true' ){ $right_tile.addClass('blinking-bomb'); } if( $left_tile.attr('passable') == 'true' ){ $left_tile.addClass('blinking-bomb'); } player.waiting_for_bomb = true; camera.update(); } }, place_bomb:function(dir){ $bomb_location = map.get_adjacent_tile(dir,$('.player')); if( !$bomb_location.hasClass('blinking-bomb') ){ return; } player.inventory.bomb.amount--; $('.blinking-bomb').removeClass('blinking-bomb'); tiles.change_tile($bomb_location,'bomb'); player.waiting_for_bomb = false; $bomb_location.attr('time-left','5'); }, move_or_ladder:function(dir){ $tile = map.get_adjacent_tile(dir,$('.player')); if( $tile.attr('passable') !== "true" ){ return; } if( $tile.attr('name') == "ladder" ){ player.move(dir); }else if( player.inventory.ladder.amount > 0 ){ player.inventory.ladder.amount--; tiles.change_tile($tile,'ladder'); camera.update(); } }, /* * * * * * * * * * * * * * DAMAGE * * * * * * * * * * * */ damage:function(amount){ player.hp = player.hp - amount; camera.update_hp(); dialog.lines = { 0:"OUCH!" } if( player.hp <= 0 ){ dialog.lines[1] = "YOU DIED"; dialog.lines[2] = function(){ location.reload(); }; } dialog.run('red') } };<file_sep> camera = { left_visibility:0, right_visibility:0, update_inventory:function(){ $('#inventory>*').remove(); for( item in player.inventory ){ if ( player.inventory[item].amount == 0 ){ continue; } $item = $('<li class="inventory-'+item+'">'+item+' x<b>'+player.inventory[item].amount+'</b></li>') $('#inventory').append($item); } }, update_hp:function(){ $('#hp>*').remove(); i=0; while( i < player.hp ){ $hit_point = $('<b>&blk14;</b>') $('#hp').append($hit_point); i++; } }, update:function(){ camera.update_inventory(); camera.update_hp(); // update adjacent tiles player.adjacent_tile['left'] = map.get_adjacent_tile('left', $('.player') ); player.adjacent_tile['right'] = map.get_adjacent_tile('right', $('.player') ); player.adjacent_tile['top_left'] = map.get_adjacent_tile('top_left', $('.player') ); player.adjacent_tile['top_right'] = map.get_adjacent_tile('top_right', $('.player') ); player.adjacent_tile['up'] = map.get_adjacent_tile('up', $('.player') ); player.adjacent_tile['down'] = map.get_adjacent_tile('down', $('.player') ); // handle camera positioning x = ((map.cols_amt / 2) - player.pos_x)*9; y = (player.pos_y * 15 * -1)+190; scale = 1; $('#stage table').css('-webkit-transform','scale('+scale+') translateX('+x+'px) translateY('+y+'px)'); /* * * * * * * * * * * * * * VISION * * * * * * * * * * * */ // reset visibility $('.visible').addClass('seen').removeClass('visible'); // looking right $('.player').nextAll().each(function(){ if( $(this).attr('transparent') == 'true' ){ $(this).addClass('visible'); camera.right_visibility = $(this).index(); }else{ camera.right_visibility = $(this).index(); $(this).addClass('visible'); return false; } }); // looking left $('.player').prevAll().each(function(){ if( $(this).attr('transparent') == 'true' ){ $(this).addClass('visible'); camera.left_visibility = $(this).index(); }else{ camera.left_visibility = $(this).index(); $(this).addClass('visible'); return false; } }); // looking below 1 $('tr').eq( player.pos_y ).find('td').each(function(){ pos = $(this).index(); if( pos >= camera.left_visibility && pos <= camera.right_visibility ){ $(this).addClass('visible'); } }); // looking below 2 $('tr').eq( player.pos_y+1 ).find('td').each(function(){ pos = $(this).index(); $tile_top_right = map.get_adjacent_tile('top_right',$(this)); $tile_top_left = map.get_adjacent_tile('top_left',$(this)); // leftward if( $tile_top_right.attr('transparent')=='true' && pos >= camera.left_visibility && pos < player.pos_x ){ $(this).addClass('visible'); } // rightward if( $tile_top_left.attr('transparent')=='true' && pos > player.pos_x && pos <= camera.right_visibility ){ $(this).addClass('visible'); } }); // looking down 2 $tile_below_player = map.get_adjacent_tile('down',$('.player')); if( $tile_below_player.attr('transparent') == 'true' ){ $tile_two_below_player = map.get_adjacent_tile('down',$tile_below_player); $tile_two_below_player.addClass('visible'); } // looking up 1 if(player.pos_y - 2 >= 0 ){ $('tr').eq( player.pos_y - 2 ).find('td').each(function(){ pos = $(this).index(); if( pos == camera.left_visibility && $(this).next().attr('name') == 'air' ){ $(this).addClass('visible'); } if( pos == camera.right_visibility && $(this).prev().attr('name') == 'air' ){ $(this).addClass('visible'); } if( pos > camera.left_visibility && pos < camera.right_visibility ){ $(this).addClass('visible'); } }); } // looking up 2 if( player.pos_y - 3 >= 0 ){ $('tr').eq( player.pos_y - 3 ).find('td').each(function(){ pos = $(this).index(); $tile_below = map.get_adjacent_tile('down',$(this)); if( pos == camera.left_visibility && $(this).next().attr('name') == 'air' && $tile_below.attr('transparent') == 'true' ){ $(this).addClass('visible'); } if( pos == camera.right_visibility && $(this).prev().attr('name') == 'air' && $tile_below.attr('transparent') == 'true'){ $(this).addClass('visible'); } if( pos > camera.left_visibility && pos < camera.right_visibility && $tile_below.attr('transparent') == 'true'){ $tile_down_right = map.get_adjacent_tile('down_right',$(this)); $tile_down_left = map.get_adjacent_tile('down_left',$(this)); if( pos < player.pos_x && $tile_down_left.attr('transparent') == 'true' ){ $(this).addClass('visible'); }else if( pos > player.pos_x && $tile_down_right.attr('transparent') == 'true' ){ $(this).addClass('visible'); }else if( pos == player.pos_x ){ $(this).addClass('visible'); } } }); } }, toggle_map_visibility: function(){ $('#stage').toggleClass('show-visible'); } };
08f6b842e828ea0b31be9d46cdff62b41925d5c1
[ "JavaScript", "Markdown" ]
7
JavaScript
gridwalk/ASCII-dig
140fa2ab37fb85602260ccf71caa3efbe56ae5a2
8e7605214c1b643a603c7d698c3a0ff91548bfdc
refs/heads/master
<repo_name>jvgiv/Algorithms<file_sep>/factorial.py # Factorial 6! = 6 * 5 * 4 * 3 * 2 * 1 = 720 4! = 4 * 3 * 2 * 1 = 24 what about 0? 0 what about negatives? none allowed integer only def factorial(n): if n == 0: return 1 return n * factorial(n - 1) def factorial_iterative(n): product = 1 for num in range(1, n + 1): product *= num return product for i in range(10): print(f"{i}: {factorial(i)}")<file_sep>/power-exponents.py # Power/Exponents 5^3 = 5*5*5 n^0 = 1 fractions? no negative numbers? yes for base. no for exponent def power(a, b): if b == 0: return 1 return a * power(a, b-1) def iterative_exponents(base, exponent): product = 1 for _ in range(b): product *= a return product for i in range(10): print(f"2^{i}: {power(2, i)}")
08e52372cceebd5c5a471e39794b06a29de96905
[ "Python" ]
2
Python
jvgiv/Algorithms
231356b494f82b77f9aaaf4fcf7b403d52ca0d83
22c742187ac682eaa22b2c7057ab506141b1d2a4
refs/heads/master
<repo_name>ch4vi/android-CheckTextList<file_sep>/checktextlist/src/main/java/com/ch4vi/checktextlist/CommonConstants.java package com.ch4vi.checktextlist; /** * Created by Chavi on 16/09/2015 */ public class CommonConstants { public interface CheckListType { int MULTI = 0; int SINGLE = 1; } } <file_sep>/sample/src/main/java/com/ch4vi/sample/CheckListActivity.java package com.ch4vi.sample; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; import android.widget.TextView; import com.ch4vi.checktextlist.CheckItem; import com.ch4vi.checktextlist.CheckListCallback; import com.ch4vi.checktextlist.CheckedTextListView; import com.ch4vi.checktextlist.CommonConstants; import java.util.ArrayList; public class CheckListActivity extends AppCompatActivity implements CheckListCallback { private TextView itemSelectedDesc; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_check_list); itemSelectedDesc = (TextView)findViewById(R.id.item_selected_description); CheckedTextListView checkList = (CheckedTextListView)findViewById(R.id.check_list); checkList.setList(getCustomList()); checkList.setCheckListType(CommonConstants.CheckListType.SINGLE); checkList.setCallback(this); } private ArrayList<CheckItem> getCustomList(){ final ArrayList<Custom> customList = new ArrayList<Custom>(){{ add(new Custom(1,"<NAME>","23 November 1963 – 29 October 1966")); add(new Custom(1,"<NAME>","29 October 1966 – 21 June 1969")); add(new Custom(1,"<NAME>","3 January 1970 – 8 June 1974")); add(new Custom(1,"<NAME>","8 June 1974 – 21 March 1981")); add(new Custom(1,"<NAME>","21 March 1981 – 16 March 1984")); add(new Custom(1,"<NAME>","16 March 1984 – 6 December 1986")); add(new Custom(1,"<NAME>","7 September 1987 – 6 December 1989")); add(new Custom(1,"<NAME>","27 May 1996")); add(new Custom(1,"<NAME>","26 March – 18 June 2005")); add(new Custom(1,"<NAME>","18 June 2005 – 1 January 2010")); add(new Custom(1,"<NAME>","1 January 2010 – 25 December 2013")); add(new Custom(1,"<NAME>","25 December 2013 – present")); }}; ArrayList<CheckItem> checkCustomList = new ArrayList<>(); for(Custom custom : customList){ CheckItem checkItem = new CheckItem(custom.getTitle()); checkItem.setExtra(custom); checkCustomList.add(checkItem); } return checkCustomList; } @Override public void itemsSelected(ArrayList<CheckItem> items) { StringBuilder build = new StringBuilder(); build.append(getString(R.string.item_selected_title)).append("\n"); for(CheckItem item : items){ build.append(item.getTitle()) .append(" - ") .append(((Custom)item.getExtra()).getDate()) .append("\n"); } itemSelectedDesc.setText(build.toString()); } } <file_sep>/settings.gradle include ':sample', ':checktextlist' <file_sep>/README.md *CheckTextList* ====== CheckTextList is a library for controlling a list of CheckedTextView. CheckTextList can be used in fragments, or Dialogs as another View in your layout. CheckTextList has a callback (Optional) called onClick on each item and it receives a list of selected items. There are two types of control, SINGLE and MULTI, which allow you to select only one item or more on the same list. <img src="https://github.com/ch4vi/android-CheckTextList/blob/master/screenshots/fragment.png" width="270" style="margin-right:10px;"> <img src="https://github.com/ch4vi/android-CheckTextList/blob/master/screenshots/dialog.png" width="270"> CheckTextList can be used with Android API 16 and above. *Setup* ====== **For Android Studio user**: add `compile 'com.github.ch4vi:CheckTextList:1.0.0'` to your gradle build file. **For Maven user**: ``` <dependency> <groupId>com.github.ch4vi</groupId> <artifactId>CheckTextList</artifactId> <version>1.0.0</version> </dependency> ``` *Features* ====== Using CheckTextList as a list in a layout: ``` java CheckedTextListView checkList = (CheckedTextListView)findViewById(R.id.check_list); checkList.setStringList(getList()); checkList.setCheckListType(CommonConstants.CheckListType.SINGLE); checkList.setCallback(this); ``` If you want to add an object to an item, you can replace: ``` java checkList.setStringList(getList()); ``` by ``` java ArrayList<CheckItem> checkList = new ArrayList<>(); for(Custom custom : customList){ CheckItem checkItem = new CheckItem(custom.getTitle()); checkItem.setExtra(custom); checkList.add(checkItem); } checkList.setList(checkList); ``` And in the callback: ``` java @Override public void itemsSelected(ArrayList<CheckItem> items) { StringBuilder build = new StringBuilder(); build.append(getString(R.string.item_selected_title)).append("\n"); for(CheckItem item : items){ build.append(item.getTitle()) .append(" - ") .append(((Custom)item.getExtra()).getDate()) .append("\n"); } itemSelectedDesc.setText(build.toString()); } ``` License -------- Copyright 2015 <NAME> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. <file_sep>/checktextlist/src/main/java/com/ch4vi/checktextlist/CheckListCallback.java package com.ch4vi.checktextlist; import java.util.ArrayList; /** * Created by Chavi on 17/09/15 */ public interface CheckListCallback { void itemsSelected(ArrayList<CheckItem> items); } <file_sep>/checktextlist/src/main/java/com/ch4vi/checktextlist/CheckListAdapter.java package com.ch4vi.checktextlist; import android.app.Activity; import android.content.Context; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ArrayAdapter; import android.widget.CheckedTextView; import java.util.ArrayList; /** * Created by Chavi. */ public class CheckListAdapter extends ArrayAdapter<CheckItem> { private static final String TAG = CheckListAdapter.class.getSimpleName(); private ArrayList<CheckItem> items = new ArrayList<>(); private ArrayList<CheckItem> selectedItems = new ArrayList<>(); private int checkListType = CommonConstants.CheckListType.MULTI; private CheckListAdapterCallback listener; public interface CheckListAdapterCallback { void itemsSelected(ArrayList<CheckItem> items); } private Context context; public CheckListAdapter(Context context, int resource, ArrayList<CheckItem> items, CheckListAdapterCallback listener) { super(context, resource); this.context = context; this.items = items; this.listener = listener; } @Override public int getCount() { return items.size(); } @Override public CheckItem getItem(int index) { return items.get(index); } @Override public long getItemId(int position) { return position; } public void setCheckListType(int checkListType){ this.checkListType = checkListType; } @Override public View getView(int position, View convertView, ViewGroup parent) { LayoutInflater mInflater = (LayoutInflater) context.getSystemService(Activity.LAYOUT_INFLATER_SERVICE); final ViewHolder holder; final CheckItem title = items.get(position); if (convertView == null) { convertView = mInflater.inflate(R.layout.check_row, null); holder = new ViewHolder(convertView); convertView.setTag(holder); }else { holder = (ViewHolder) convertView.getTag(); } holder.checkRow.setText(title.getTitle()); holder.checkRow.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { holder.checkRow.toggle(); if (checkListType == CommonConstants.CheckListType.SINGLE) { if (holder.checkRow.isChecked()) { if (selectedItems.size() <= 1) { selectedItems.add(title); } else if (selectedItems.size() > 0) { selectedItems.remove(0); selectedItems.add(title); } } else { selectedItems.remove(title); } } else { if (holder.checkRow.isChecked()) { selectedItems.add(title); } else { selectedItems.remove(title); } } if (listener != null) { listener.itemsSelected(selectedItems); } } }); return convertView; } static class ViewHolder { CheckedTextView checkRow; ViewHolder(View view) { checkRow = (CheckedTextView)view.findViewById(R.id.check_row); } } } <file_sep>/checktextlist/src/main/java/com/ch4vi/checktextlist/CheckItem.java package com.ch4vi.checktextlist; import java.io.Serializable; /** * Created by Chavi on 26/09/2015 */ public class CheckItem implements Serializable { private String title; private Object extra; public CheckItem(String title) { this.title = title; } public String getTitle() { return title; } public Object getExtra() { return extra; } public void setExtra(Object extra) { this.extra = extra; } @Override public String toString() { return this.getTitle(); } }
3e39b95743b000817de4c95ca2888327e7454f70
[ "Markdown", "Java", "Gradle" ]
7
Java
ch4vi/android-CheckTextList
24028c085db695ce546d5334d56f57f6ff1ebeaa
d6eb0706f108427880f952b31c1f10596b4238bd
refs/heads/master
<file_sep>import json def getConfig(): with open('config.json') as f: return json.load(f)<file_sep># omnireddit [reddit -> discord] discord bot for posting reddit to discord
18233fd94909a59254be8e9a889b976e205bb94f
[ "Markdown", "Python" ]
2
Python
jiralp/omnireddit
b483eec032f39492706d864432085c2ca178fbfb
6bcaaf5b4b238ad8c3fa094824433d61a48aef14
refs/heads/master
<file_sep>package com.nishant; import javax.servlet.http.HttpSessionAttributeListener; import javax.servlet.http.HttpSessionBindingEvent; /** * Application Lifecycle Listener implementation class HttpListener * */ public class HttpListener implements HttpSessionAttributeListener { /** * Default constructor. */ public HttpListener() { // TODO Auto-generated constructor stub } /** * @see HttpSessionAttributeListener#attributeRemoved(HttpSessionBindingEvent) */ public void attributeRemoved(HttpSessionBindingEvent arg0) { // TODO Auto-generated method stub System.out.println("http session attribute removed: name="+arg0.getName().toString()+"value="+arg0.getValue()); } /** * @see HttpSessionAttributeListener#attributeAdded(HttpSessionBindingEvent) */ public void attributeAdded(HttpSessionBindingEvent arg0) { // TODO Auto-generated method stub System.out.println("http session attribute added :name="+arg0.getName().toString()+"value="+arg0.getValue()); } /** * @see HttpSessionAttributeListener#attributeReplaced(HttpSessionBindingEvent) */ public void attributeReplaced(HttpSessionBindingEvent arg0) { // TODO Auto-generated method stub } } <file_sep>package com.nishant; import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; /** * Servlet implementation class CookieServlet */ public class CookieServlet extends HttpServlet { private static final long serialVersionUID = 1L; /** * @see HttpServlet#HttpServlet() */ public CookieServlet() { super(); // TODO Auto-generated constructor stub } /** * @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response) */ protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // TODO Auto-generated method stub } /** * @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response) */ protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // TODO Auto-generated method stub Cookie c[] = request.getCookies(); boolean found = false; for(int i=0;i<c.length;i++) { if(c[i].getValue().equals(request.getParameter("user"))){ found=true; break; } } if(!found) { Cookie cookie = new Cookie("user2", (String)request.getParameter("user")); response.addCookie(cookie); } response.sendRedirect("welcome.jsp"); } } <file_sep>package com.nishant; import java.util.HashMap; import java.util.Map; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.apache.struts.actions.LookupDispatchAction; public class LookUpExample extends LookupDispatchAction { @Override protected Map getKeyMethodMap() { Map map = new HashMap(); map.put("button.add", "add"); map.put("button.delete", "delete"); return map; } public ActionForward add(ActionMapping mapping,ActionForm form,HttpServletRequest request,HttpServletResponse response) throws Exception { return mapping.findForward("add"); } public ActionForward delete(ActionMapping mapping,ActionForm form,HttpServletRequest request,HttpServletResponse response) throws Exception { return mapping.findForward("delete"); } } <file_sep>region.id=Region ID region.name=Region Name<file_sep>user.name=Name user.age=Age user.email=Email user.sex=Sex<file_sep>package com.nishant; import java.util.ArrayList; import org.apache.struts.action.ActionForm; public class CheckForm extends ActionForm{ Integer regionID; String regionName; public Integer getUserName() { return regionID; } public void setUserName(Integer regionId) { this.regionID = regionId; } public String getUserEmail() { return regionName; } public void setUserEmail(String regionName) { this.regionName = regionName; } } <file_sep>button.add=Add Record button.delete=Delete Record<file_sep>package com.nishant; import javax.servlet.ServletContextEvent; import javax.servlet.ServletContextListener; /** * Application Lifecycle Listener implementation class ServletConListener * */ public class ServletConListener implements ServletContextListener { /** * Default constructor. */ public ServletConListener() { // TODO Auto-generated constructor stub } /** * @see ServletContextListener#contextInitialized(ServletContextEvent) */ public void contextInitialized(ServletContextEvent arg0) { // TODO Auto-generated method stub System.out.println("Servlet Context Initialized"); } /** * @see ServletContextListener#contextDestroyed(ServletContextEvent) */ public void contextDestroyed(ServletContextEvent arg0) { // TODO Auto-generated method stub System.out.println("Servlet Context Destroyed"); } } <file_sep>package com.nishant.insert; import org.apache.struts.action.ActionForm; public class IndexForm extends ActionForm{ } <file_sep>package com.nishant; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.struts.action.Action; import org.apache.struts.action.ActionForm; import org.apache.struts.action.ActionForward; import org.apache.struts.action.ActionMapping; import org.apache.struts.action.DynaActionForm; public class DynaFormActionClass extends Action{ @Override public ActionForward execute(ActionMapping mapping, ActionForm form, HttpServletRequest request, HttpServletResponse response) throws Exception { DynaActionForm loginForm = (DynaActionForm)form; loginForm.validate(mapping, request); String userName=loginForm.get("userName").toString(); String age=loginForm.get("age").toString(); return mapping.findForward("success"); } } <file_sep>package com.nishant.servlet; import java.io.IOException; import java.io.PrintWriter; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; /** * Servlet implementation class HelloWorld */ public class HelloWorld extends HttpServlet { private static final long serialVersionUID = 1L; /** * @see HttpServlet#HttpServlet() */ public HelloWorld() { super(); // TODO Auto-generated constructor stub } @Override protected void service(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { PrintWriter pw=resp.getWriter(); pw.println("<h1>Hello World from Service method!!!!</h3>"); // TODO Auto-generated method stub doPost(req, resp); } /** * @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response) */ protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // TODO Auto-generated method stub PrintWriter pw=response.getWriter(); pw.println("<h1>Hello World from Servlet!!!!</h3>"); pw.println("<br>"); pw.println("Servlet Config::::: "+getServletConfig()); pw.println("Servlet Context:::: "+getServletContext()); pw.println("Servlet Info::::::: "+getServletInfo()); pw.flush(); } /** * @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response) */ protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { System.out.println("From do post"); } }
3b77c8bfbb2812fa401457ba65ee688708c9923a
[ "Java", "INI" ]
11
Java
nishant-/ServletsAndJSPs
02fec36c67ac64c8a8a40de84e4f64e0f75a4fec
3c5c9bbc363168222ed05bbabc2af3f80d5ae388
refs/heads/master
<repo_name>meatbballa/Array-push-demo<file_sep>/arraypushdemo.js var firstName = prompt("what is your first name?"); var lastName = prompt("what is your last name?"); var array1 = []; var array2 = []; array1.push(firstName); array2.push(lastName); window.alert(array1 + " " + array2); <file_sep>/README.md # Array-push-demo Put an answer in array
92194664ea15081195e0a6513344371f5ed8b32a
[ "JavaScript", "Markdown" ]
2
JavaScript
meatbballa/Array-push-demo
351e5230d04a9fc9b95259b62ba539b580fbc24d
44b26e2c4a3f6952f6cf61db1e3a7ffa753f36fe
refs/heads/master
<repo_name>QuenchingHeart/clhx-mini-program<file_sep>/pages/contact/makeContact/makeContact.js const app = getApp(); import { getLocal } from "../../../utils/util.js" const formUtil = require('../../../utils/formUtil.js') import { contactPost, contactGet, contactPut, contactDel } from "../../../utils/api.js"; Page({ data: { formData: { latitude: 23.099994, longitude: 113.324520, address: '', "contactName": "甲鱼", "contactPhone": "15615844978", isDefault: false }, type: 'add', //add del edit check }, initData: function (options) { var that = this wx.getLocation({ type: "gcj02", success: loc => { getLocal(loc.latitude, loc.longitude).then((res) => { that.setData({ 'formData.userID': app.globalData.userID, 'formData.id': options.id, "formData.latitude": res.latitude, "formData.longitude": res.longitude, "formData.address": res.formatted_address, markers: [{ latitude: res.latitude, longitude: res.longitude, iconPath: '/image/location.png', width: '34px', height: '34px', id: 1 }] }); }) } }); }, onLoad: function (options) { var that = this this.mapCtx = wx.createMapContext('myMapMakeContact') that.handleOp(options) }, bindPickerChange: function (e) { console.log('picker发送选择改变,携带值为', e.detail.value) this.setData({ personorandganizationIndex: e.detail.value, "formData.isOrganization": e.detail.value }) }, formInputChange(e) { const { field } = e.currentTarget.dataset this.setData({ [`formData.${field}`]: e.detail.value }) }, chooseLocation: function () { var that = this var location = {} wx.chooseLocation({ success: function (loc) { // console.log("****chooseLocation") getLocal(loc.latitude, loc.longitude).then((res) => { that.setData({ "formData.latitude": res.latitude, "formData.longitude": res.longitude, // "formData.district": res.province + ":" + res.city + ":" + res.district + ":" + (res.business_area == null ? '' : res.business_area), "formData.address": loc.name, markers: [{ latitude: res.latitude, longitude: res.longitude, iconPath: '/image/location.png', width: '34px', height: '34px', id: 1 }] }) }); that.moveToLocation(); } }) }, moveToLocation: function () { var that = this; console.log('setPosition1', that.data.formData.longitude, that.data.formData.latitude, that.mapCtx) that.mapCtx.moveToLocation({ latitude: that.data.formData.latitude, longitude: that.data.formData.longitude, // latitude, // longitude, success: function (res) { console.log(res) }, fail: function (res) { console.log(res, 'failed') }, complete: function (res) { console.log(res, 'failed') } }) console.log('setPosition2', that.data.formData.longitude, that.data.formData.latitude, that.mapCtx) }, submitForm: function (e) { var checkRes = formUtil.checkNullForm(e); if (checkRes) { var that = this console.log(that.data) if (that.data.type == 'add') { contactPost(this.data.formData).then(res => { console.log(res) that.setData({ formData: res }) that.toastAndBack() }) } else if (that.data.type == 'edit') { contactPut(this.data.formData).then(res => { console.log(res) that.setData({ formData: res }) that.toastAndBack() }) } } }, cancelForm: function () { wx.navigateBack({ }) }, handleOp(options) { var disabled = false; var applies = [] var applied = false var myid = 0 var that = this console.log(options) switch (options.type) { case 'add': that.initData(options) break; case 'edit': let contactDetail = JSON.parse(options.contactDetail); that.setData({ formData: contactDetail, markers: [{ latitude: contactDetail.latitude, longitude: contactDetail.longitude, iconPath: '/image/location.png', width: '34px', height: '34px', id: 1 }] }) break; case 'delete': contactDel({ id: options.id, userID: app.globalData.userID }).then(res => { console.log(res) that.toastAndBack() }) break; default: break; } this.setData({ type: options.type, disabled: disabled, }) console.log(this.data) }, cancelContact: function () { this.handleOp({ type: 'delete', id: this.data.formData.id }) }, toastAndBack: function (page = 1) { console.log(page) wx.showToast({ title: '成功', icon: 'success', duration: 5000, complete: function () { wx.navigateBack({ delta: page }) } }) }, })<file_sep>/pages/myDemands/myDemands.js // pages/myDemands/myDemands.js import { demandsAll } from '../../utils/api.js' const app = getApp() Page({ /** * 页面的初始数据 */ data: { demands:[], reload: false }, getMyDemands: function() { var that = this demandsAll({userID:app.globalData.userID}).then(res=>{ that.setData({ demands:res }) console.log(res) }) }, navigateToDemandDatail: function (e) { console.log(e) this.data.reload = true; wx.navigateTo({ url: '/pages/makeDemand/makeDemand?type=edit&' + 'demandID=' + e.currentTarget.dataset.demandid }) }, /** * 生命周期函数--监听页面加载 */ onLoad: function (options) { this.getMyDemands(); this.data.reload = false; }, /** * 生命周期函数--监听页面初次渲染完成 */ onReady: function () { }, /** * 生命周期函数--监听页面显示 */ onShow: function () { if (this.data.reload) { this.getMyDemands(); } }, /** * 生命周期函数--监听页面隐藏 */ onHide: function () { }, /** * 生命周期函数--监听页面卸载 */ onUnload: function () { }, /** * 页面相关事件处理函数--监听用户下拉动作 */ onPullDownRefresh: function () { }, /** * 页面上拉触底事件的处理函数 */ onReachBottom: function () { }, /** * 用户点击右上角分享 */ onShareAppMessage: function () { } })<file_sep>/pages/mine/mine.js // pages/mine/mine.js const app = getApp(); var base64 = require("../images/base64"); import { Login } from '../../utils/service.js' Page({ /** * 页面的初始数据 */ data:{ userInfo:{ }, nickname:'', tabbar:{}, alreadyLogin:false }, /** * 生命周期函数--监听页面加载 */ onLoad:function(){ if(app.globalData.token!=''){ this.setData({ alreadyLogin: true }) } }, // 组件所在页面的生命周期函数 onShow: function () { app.editTabbar(); this.setData({ icon: base64.icon20, userInfo: app.globalData.userInfo, nickname: app.globalData.nickname }); }, toMakeUserInfo:function(){ wx.navigateTo({ url: '/pages/userInfo/makeUserInfo/makeUserInfo', }) }, login:function(){ Login() this.setData({ alreadyLogin:true }) }, })<file_sep>/pages/organization/myOrganizations/myOrganizations.js // pages/myApply/myApply.js import { organizationInGet, organizationApplyGet } from '../../../utils/api.js' const app = getApp() Page({ /** * 页面的初始数据 */ data: { organizations: [], reload: false }, getOrganizations: function () { var that = this organizationInGet({ userID: app.globalData.userID }).then(res => { that.setData({ organizations: res }) console.log(res) }) }, navigateToOrgDatail: function (e) { this.data.reload = true; if (e.currentTarget.dataset.type == 'check') { wx.navigateTo({ url: '/pages/organization/makeOrganization/makeOrganization?type=check&organizationID=' + e.currentTarget.dataset.organizationid }) } else if (e.currentTarget.dataset.type == 'edit') { wx.navigateTo({ url: '/pages/organization/organization/organization?type=edit&organizationID=' + e.currentTarget.dataset.organizationid }) } else { wx.navigateTo({ url: '/pages/organization/organizations/organizations' }) } }, /** * 生命周期函数--监听页面加载 */ onLoad: function (options) { this.getOrganizations(); this.data.reload = false; }, /** * 生命周期函数--监听页面初次渲染完成 */ onReady: function () { }, /** * 生命周期函数--监听页面显示 */ onShow: function () { if (this.data.reload) { this.getOrganizations(); } }, /** * 生命周期函数--监听页面隐藏 */ onHide: function () { }, /** * 生命周期函数--监听页面卸载 */ onUnload: function () { }, /** * 页面相关事件处理函数--监听用户下拉动作 */ onPullDownRefresh: function () { }, /** * 页面上拉触底事件的处理函数 */ onReachBottom: function () { }, /** * 用户点击右上角分享 */ onShareAppMessage: function () { } })<file_sep>/utils/util.js const app = getApp() const QQMapWX = require('../libs/qqmap-wx-jssdk.js'); var qqmapsdk = new QQMapWX({ key: '<KEY>' //这里自己的key秘钥进行填充 }); const formatTime = date => { const year = date.getFullYear() const month = date.getMonth() + 1 const day = date.getDate() const hour = date.getHours() const minute = date.getMinutes() const second = date.getSeconds() return [year, month, day].map(formatNumber).join('/') + ' ' + [hour, minute, second].map(formatNumber).join(':') } const formatNumber = n => { n = n.toString() return n[1] ? n : '0' + n } function formatTimeTwo(number, format) { var formateArr = ['Y', 'M', 'D', 'h', 'm', 's']; var returnArr = []; var date = new Date(number * 1000); returnArr.push(date.getFullYear()); returnArr.push(formatNumber(date.getMonth() + 1)); returnArr.push(formatNumber(date.getDate())); returnArr.push(formatNumber(date.getHours())); returnArr.push(formatNumber(date.getMinutes())); returnArr.push(formatNumber(date.getSeconds())); for (var i in returnArr) { format = format.replace(formateArr[i], returnArr[i]); } return format; } const regFilter = (str,originStr,newStr) =>{ console.log(str.replace(new RegExp(originStr, 'g'), newStr)) return str.replace(new RegExp(originStr, 'g'), newStr); } const getLocal = (latitude, longitude) => { var locations = {} return new Promise((resolve, reject) => { qqmapsdk.reverseGeocoder({ location: { latitude: latitude, longitude: longitude }, success: function (res) { // console.log(JSON.stringify(res)); // var re = JSON.stringify(res) console.log(res.result) let province = res.result.ad_info.province let city = res.result.ad_info.city let district = res.result.ad_info.district let business_area = res.result.address_reference.business_area == null ? null : res.result.address_reference.business_area.title let formatted_address = res.result.formatted_addresses.recommend locations = { province, city, district, latitude, longitude, business_area, formatted_address } resolve(locations) }, fail: function (res) { console.log(res); }, complete: function (res) { console.log(locations); resolve(locations) return locations } }) }) } module.exports = { formatTime: formatTime, formatTimeTwo, formatTimeTwo, getLocal: getLocal, regFilter: regFilter } <file_sep>/pages/demands/demands.js // pages/demands/demands.js const app = getApp(); import { demandsAll } from "../../utils/api.js"; Component({ /** * 页面的初始数据 */ properties: { tabbar: { type: Object, value: {} }, TabCur: { type: Number, value: 0 }, MainCur: { type: Number, value: 0 }, VerticalNavTop: { type: Number, value: 0 }, load: { type: Boolean, value: true }, gridCol: { type: Number, value: 5 }, categories: { type: Array, value: [ ["全部","红心说政","寻找社区英雄","党团共建"], ["全部","物资需求","卫健服务","创业就业","法律援助","物业纠纷处理","关爱儿童","社区养老","志愿者招募"], ["全部","无人机服务","社工培训","社科试验田","社区实验室","儿童编程","手绘社区","金牌家教","飞行员之家","国学讲堂","三航科普"], ["全部","特殊群体","家庭困难","组织困难","物业扶贫"], ["全部","防疫课程","防控宣传","物资援助","精准排查","复工咨询"] ] }, categoryArray: { type: Array, value: ["党建宣传","社区服务","文化科技","特殊困难","防疫特区"] }, curCategoryA: { type: Number, value: 0 }, categoryA: { type: Array, value: [{ txt: "党建宣传", url: '../../image/demands.png', selected_url: '../../image/selected_demands.png', icon: 'upstagefill', color: 'red', badge: 120, }, { txt: "社区服务", url: '../../image/demands.png', selected_url: '../../image/selected_demands.png', icon: 'camerafill', color: 'red', badge: 120, }, { txt: "文化科技", url: '../../image/demands.png', selected_url: '../../image/selected_demands.png', icon: 'servicefill', color: 'red', badge: 120, }, { txt: "特殊困难", url: '../../image/demands.png', selected_url: '../../image/selected_demands.png', icon: 'wefill', color: 'red', badge: 120, }, { txt: "防疫特区", url: '../../image/demands.png', selected_url: '../../image/selected_demands.png', icon: 'writefill', color: 'red', badge: 120, }, ] }, categoryB: { type: Array, value: [ "红心说政", "寻找社区英雄", "党团共建" ] }, region: { type: Array, value: ['江苏省', '南京市', '江宁区'] }, customItem: { type: String, value: '全部' }, filterHelpInfo: { type: Object, value: { categoryA: 0 } }, filterInfo: { type: Object, value: { // category:'', // categoryA: 0, // categoryB: 0, status: '已发布:对接中', keyword: '%', location: { longitude: 0, latitude: 0 } // longitude: 0, // latitude: 0, // district:'::' } } }, lifetimes: { // 生命周期函数,可以为函数,或一个在methods段中定义的方法名 attached: function() { var that = this that.initData(); app.editTabbar(); }, moved: function() {}, detached: function() {}, }, // 生命周期函数,可以为函数,或一个在methods段中定义的方法名 attached: function() {}, // 此处attached的声明会被lifetimes字段中的声明覆盖 ready: function() {}, pageLifetimes: { // 组件所在页面的生命周期函数 show: function() { var that = this if (!that.data.load) { that.initData(); app.editTabbar(); } } }, methods: { initData: function() { var that = this wx.getLocation({ type: "gcj02", success: re => { console.log(re) that.setData({ 'filterInfo.location.latitude': re.latitude, 'filterInfo.location.longitude': re.longitude, }) that.getDemands() } }); this.setData({ categoryB: this.data.categories[this.data.filterHelpInfo.categoryA] }) that.initList(); }, initList: function() { let list = [{}]; for (let i = 0; i < this.data.categoryB.length; i++) { list[i] = {}; list[i].name = this.data.categoryB[i]; list[i].id = i; } this.setData({ list: list, listCur: list[0], TabCur: 0 }) }, bindSearchKeywordChange: function(e) { var that = this console.log(e) that.setData({ 'filterInfo.keyword': '%' + e.detail.value + '%' }) that.getDemands(); }, bindRegionChange: function(e) { console.log('picker发送选择改变,携带值为', e.detail.value) var region = e.detail.value var regionValue = ['', '', ''] var searchValue = '' console.log(region) regionValue[2] = region[2] if (region[0] == '全部') { searchValue = '' } else { searchValue = region[0] + ':' if (region[1] != '全部') { searchValue = searchValue + region[1] + ':' if (region[2] != '全部') { searchValue = searchValue + region[2] } } } if (region[2] == '全部') { region[2] = '' regionValue[2] = region[1] if (region[1] == '全部') { region[1] = '' regionValue[2] = region[0] if (region[0] == '全部') { region[0] = '' } } } console.log(region, regionValue) this.setData({ 'filterInfo.district': searchValue + '%', region: regionValue }) this.getDemands() }, bindCategoryAChange: function(e) { console.log(e.currentTarget.dataset.id) this.setData({ categoryB: this.data.categories[e.currentTarget.dataset.id], 'filterInfo.category': this.data.categoryArray[e.currentTarget.dataset.id] + ':' + '%', 'filterHelpInfo.categoryA': e.currentTarget.dataset.id, 'filterHelpInfo.categoryB': 0, }) this.getDemands(); this.initList(); }, bindCategoryBChange: function(e) { this.setData({ 'filterInfo.category': this.data.categoryArray[this.data.filterHelpInfo.categoryA] + ':' + (this.data.categories[this.data.filterHelpInfo.categoryA][e.currentTarget.dataset.id] == '全部' ? '%' : this.data.categories[this.data.filterHelpInfo.categoryA][e.currentTarget.dataset.id]), 'filterHelpInfo.categoryB': e.currentTarget.dataset.id }) // console.log("**", this.data.categories[this.data.filterHelpInfo.categoryA], e.currentTarget.dataset.id,e) this.getDemands(); this.tabSelect(e); }, getDemands: function() { var that = this // console.log(that.data.filterInfo) demandsAll(that.data.filterInfo).then(data => { // console.log(data) that.setData({ demands: data }); that.data.load = false; }); }, chooseLocation: function() { var that = this var location = {} wx.chooseLocation({ success: function(res) { console.log(res) getLocal(res.latitude, res.longitude).then((res) => { location = res that.setData({ "filterInfo.location.latitude": res.latitude, "filterInfo.location.longitude": res.longitude, "filterInfo.district": location.district }) }) } }) }, navigateToDemandDatail: function(e) { console.log(e) var type = 'check' if (e.currentTarget.dataset.publisherid == app.globalData.userID || e.currentTarget.dataset.inorg) { type = 'edit' } wx.navigateTo({ url: '/pages/makeDemand/makeDemand?type=' + type + '&' + 'demandID=' + e.currentTarget.dataset.demandid }) }, tabSelect(e) { this.setData({ TabCur: e.currentTarget.dataset.id, MainCur: e.currentTarget.dataset.id }) }, NavToMakeDemand() { wx.navigateTo({ url: '/pages/makeDemand/makeDemand?type=add', }) }, } })<file_sep>/app.js //app.js App({ onLaunch: function() { // 展示本地存储能力 var logs = wx.getStorageSync("logs") || []; logs.unshift(Date.now()); wx.setStorageSync("logs", logs); this.getSystemInfo() // 获取用户信息 wx.getSetting({ success: res => { if (res.authSetting["scope.userInfo"]) { // 已经授权,可以直接调用 getUserInfo 获取头像昵称,不会弹框 wx.getUserInfo({ success: res => { // 可以将 res 发送给后台解码出 unionId this.globalData.userInfo = res.userInfo; // 由于 getUserInfo 是网络请求,可能会在 Page.onLoad 之后才返回 // 所以此处加入 callback 以防止这种情况 if (this.userInfoReadyCallback) { this.userInfoReadyCallback(res); } } }); } } }); }, getSystemInfo: function() { let t = this; wx.getSystemInfo({ success: function(res) { t.globalData.systemInfo = res; console.log(res) } }); }, editTabbar: function () { let tabbar = this.globalData.tabBar; let currentPages = getCurrentPages(); let _this = currentPages[currentPages.length - 1]; let pagePath = _this.route; (pagePath.indexOf('/') != 0) && (pagePath = '/' + pagePath); for (let i in tabbar.list) { tabbar.list[i].selected = false; (tabbar.list[i].pagePath == pagePath) && (tabbar.list[i].selected = true); } if (typeof _this.getTabBar === 'function' && _this.getTabBar()) { _this.getTabBar().setData({ tabbar: tabbar }) } }, globalData: { "tabBar": { "custom": true, "color": "#7A7E83", "selectedColor": "#EB4630", "borderStyle": "black", "backgroundColor": "#ffffff", "list": [ { "pagePath": "/pages/main/main", "spanClass":"RectangleCopy6", "text": "首页", "isSpecial": false }, { "pagePath": "/pages/demands/demands", "spanClass":"RectangleCopy2", "text": "需求", "isSpecial": false }, { "pagePath":"/pages/makeDemand/makeDemand?type=add", "spanClass":"RectangleCopy4", "iconPath": "../pages/images/helpicon/RectangleCopy_4.png", "selectedIconPath": "../pages/images/helpicon/RectangleCopy_4.png", "text": "发布", "isSpecial": true }, { "pagePath": "/pages/source/source", "spanClass":"RectangleCopy5", "text": "资源", "isSpecial": false }, { "pagePath": "/pages/mine/mine", "spanClass":"RectangleCopy1", "text": "我的", "isSpecial": false } ] }, userInfo: null, token:'', // token:'<KEY>Q66cJQ', userID:'', baseUrl: 'https://api.smartcommunity.mrdrivingduck.cn:8081' } });<file_sep>/pages/userInfo/makeUserInfo/makeUserInfo.js // pages/userInfo/makeUserinfo/makeUserInfo.js const formUtil = require('../../../utils/formUtil.js') import {nicknameUpdate} from '../../../utils/api' const app = getApp() Page({ /** * 页面的初始数据 */ data: { formData:{ nickname:'erestu', userID:52 } }, /** * 生命周期函数--监听页面加载 */ onLoad: function () { this.setData({ "formData.nickname":app.globalData.nickname, "formData.userID":app.globalData.userID }) }, /** * 生命周期函数--监听页面初次渲染完成 */ onReady: function () { }, /** * 生命周期函数--监听页面显示 */ onShow: function () { }, submitForm: function (e){ var checkRes = formUtil.checkNullForm(e); console.log('click') if(checkRes){ var that = this console.log(that.data) nicknameUpdate(that.data.formData).then(res=>{ console.log(res) app.globalData.nickname = res.nickname that.toastAndBack() }) } }, formInputChange(e) { const { field } = e.currentTarget.dataset this.setData({ [`formData.${field}`]: e.detail.value }) }, toastAndBack: function(page=1) { console.log(page) wx.showToast({ title: '成功', icon: 'success', duration: 5000, complete: function() { wx.navigateBack({ delta: page }) } }) }, cancelForm: function() { wx.navigateBack({ }) }, /** * 生命周期函数--监听页面隐藏 */ onHide: function () { }, /** * 生命周期函数--监听页面卸载 */ onUnload: function () { }, /** * 页面相关事件处理函数--监听用户下拉动作 */ onPullDownRefresh: function () { }, /** * 页面上拉触底事件的处理函数 */ onReachBottom: function () { }, /** * 用户点击右上角分享 */ onShareAppMessage: function () { } })<file_sep>/pages/organization/makeApplyOrganization/makeApplyOrganization.js const app = getApp(); import { getLocal } from "../../../utils/util.js" const formUtil = require('../../../utils/formUtil.js') import { organizationApplyPost,organizationApplyGet, organizationGet, organizationDel,organizationAuditPut } from "../../../utils/api.js"; Page({ data:{ formData: { reason:'', }, formAuditData:{ "userID": 0, "applyID": 0, "agree": true, "reason": "" }, type:'add', //add del edit check }, initData: function (options) { var that = this wx.getLocation({ type: "gcj02", success: loc => { getLocal(loc.latitude, loc.longitude).then((res) => { that.setData({ 'formData.userID': app.globalData.userID, 'formData.organizationID': parseInt(options.organizationID), 'formAuditData.userID': app.globalData.userID, "formData.latitude": res.latitude, "formData.longitude": res.longitude, "formData.address": res.formatted_address, markers: [{ latitude: res.latitude, longitude: res.longitude, iconPath: '/image/location.png', width: '34px', height: '34px', id: 1 }] }); }) } }); }, onLoad: function (options){ var that = this this.mapCtx = wx.createMapContext('myMapMakeOrganization') that.handleOp(options) }, bindPickerChange: function (e) { console.log('picker发送选择改变,携带值为', e.detail.value) this.setData({ personorandganizationIndex: e.detail.value, "formData.isOrganization": e.detail.value }) }, formInputChange(e) { const { field } = e.currentTarget.dataset console.log(field,e.detail.value,e) if(this.data.type!='audit'){ this.setData({ [`formData.${field}`]: e.detail.value }) }else{ this.setData({ [`formAuditData.${field}`]: e.detail.value }) } }, chooseLocation: function() { var that = this var location = {} wx.chooseLocation({ success: function(loc) { // console.log("****chooseLocation") getLocal(loc.latitude, loc.longitude).then((res) => { that.setData({ "formData.latitude": res.latitude, "formData.longitude": res.longitude, // "formData.district": res.province + ":" + res.city + ":" + res.district + ":" + (res.business_area == null ? '' : res.business_area), "formData.address": loc.name, markers: [{ latitude: res.latitude, longitude: res.longitude, iconPath: '/image/location.png', width: '34px', height: '34px', id: 1 }] }) }); that.moveToLocation(); } }) }, moveToLocation: function() { var that = this; console.log('setPosition1', that.data.formData.longitude, that.data.formData.latitude, that.mapCtx) that.mapCtx.moveToLocation({ latitude: that.data.formData.latitude, longitude: that.data.formData.longitude, // latitude, // longitude, success: function(res) { console.log(res) }, fail: function(res) { console.log(res, 'failed') }, complete: function(res) { console.log(res, 'failed') } }) console.log('setPosition2', that.data.formData.longitude, that.data.formData.latitude, that.mapCtx) }, submitForm: function (e){ var checkRes = formUtil.checkNullForm(e); if(checkRes){ var that = this console.log(that.data) if (that.data.type == 'add') { organizationApplyPost(this.data.formData).then(res => { console.log(res) that.setData({ formData: res }) that.toastAndBack(2) }) } else if (that.data.type == 'edit') { // organizationPut(this.data.formData).then(res => { // console.log(res) // that.setData({ // formData: res // }) // that.toastAndBack() // }) }else if(that.data.type=='audit') { organizationAuditPut(this.data.formAuditData).then(res => { console.log(res) that.toastAndBack() }) } } }, cancelForm: function(){ wx.navigateBack({ }) }, handleOp(options) { var disabled = false; var applies = [] var applied = false var myid = 0 var that = this console.log(options) switch (options.type) { case 'add': that.initData(options) break; case 'edit': organizationGet({keyword:options.keyword}).then(res=>{ console.log(res) that.setData({ formData: res[0], }) }) break; case 'check': organizationGet({keyword:options.keyword}).then(res=>{ console.log(res) that.setData({ formData: res[0], }) }) break; case 'audit': this.setData({ formData:JSON.parse(options.applyDetail), "formAuditData.userID":app.globalData.userID, "formAuditData.applyID":JSON.parse(options.applyDetail).applyID }) break; case 'delete': organizationDel({ organizationID : options.orgID,userID:app.globalData.userID }).then(res => { console.log(res) that.toastAndBack() }) break; default: break; } this.setData({ type: options.type, disabled: disabled, }) console.log(this.data) }, cancelOrganization: function () { this.handleOp({ type:'delete', orgID:this.data.formData.orgID }) }, toastAndBack: function(page=1) { console.log(page) wx.showToast({ title: '成功', icon: 'success', duration: 5000, complete: function() { wx.navigateBack({ delta: page }) } }) }, })<file_sep>/custom-tab-bar/index.js // custom-tab-bar/index.js const app = getApp(); Component({ /** * 组件的属性列表 */ properties: { tabbar: { type: Object, value: { "backgroundColor": "#ffffff", "color": "#979795", "selectedColor": "#E94730", "list": [ { "pagePath": "/pages/main/main", "iconPath": "../pages/images/tabbar/basics.png", "selectedIconPath": "../pages/images/tabbar/basics_cur.png", "text": "首页", "isSpecial": false }, { "pagePath": "/pages/demands/demands", "iconPath": "../pages/images/tabbar/component.png", "selectedIconPath": "../pages/images/tabbar/component_cur.png", "text": "需求", "isSpecial": false }, { "pagePath":"/pages/makeDemand/makeDemand?type=add", "iconPath": "../pages/images/helpicon/RectangleCopy_4.png", "selectedIconPath": "../pages/images/helpicon/RectangleCopy_4.png", "text": "发布", "isSpecial": true }, { "pagePath": "/pages/source/source", "iconPath": "../pages/images/tabbar/plugin.png", "selectedIconPath": "../pages/images/tabbar/plugin_cur.png", "text": "资源", "isSpecial": false }, { "pagePath": "/pages/mine/mine", "iconPath": "../pages/images/tabbar/about.png", "selectedIconPath": "../pages/images/tabbar/about_cur.png", "text": "我的", "isSpecial": false } ] } } }, /** * 组件的初始数据 */ data: { isIphoneX: app.globalData.systemInfo.model.includes('iPhone X') ||app.globalData.systemInfo.model.includes('iPhone XR') ||app.globalData.systemInfo.model.includes('iPhone XS Max') ||app.globalData.systemInfo.model.includes('iPhone XS') ||app.globalData.systemInfo.model.includes('iPhone 11') ||app.globalData.systemInfo.model.includes('iPhone 11 Pro Max') ||app.globalData.systemInfo.model.includes('iPhone 11 Pro'), isHUAWEI: app.globalData.systemInfo.model.includes('HUAWEI') }, /** * 组件的方法列表 */ methods: { } })
7ce49b66fb168a6abfc705db49941a633a4333a1
[ "JavaScript" ]
10
JavaScript
QuenchingHeart/clhx-mini-program
0863aad03fa0e37ced89e3414a855d0b9ed4964b
c56aef0618181e289009990d2164cb193ee3b90a
refs/heads/main
<repo_name>STINGNGO/ACCIDENT<file_sep>/README.md # ACCIDENT DATASET <file_sep>/Casualty Severity Prediction Model.py # read the dataset import pandas as pd import pickle data = pd.read_csv("Accident_Dataset_1_prepared.csv") print(data.head(3)) independent_variables = data.columns independent_variables = independent_variables.delete(5) data1 = data[independent_variables] # perform clustering to prepare a clustered dataset from sklearn.cluster import AgglomerativeClustering agg_cluster = AgglomerativeClustering(n_clusters=3) # train the agg model agg_cluster.fit(data1) # predict the cluster label data["Cluster Labels"] = agg_cluster.fit_predict(data1) # select dependent and independent variables Y = data["Casualty Severity"] independent_variables = data.columns independent_variables = independent_variables.delete(5) X = data[independent_variables] # classify using Gradient Boosted Tree from sklearn.ensemble import GradientBoostingClassifier gbc = GradientBoostingClassifier() # train the model gbc.fit(X, Y) # predict using GBC Classifier data["Predicted Casualty Severity"] = gbc.predict(X) # Take a user input and predict casualty severity independent_variables = independent_variables.delete(8) user_input = {} for var in independent_variables: temp = input("Enter" + var + ": ") user_input[var] = temp # Calculate the cluster model index = data1.shape[0] user_df = pd.DataFrame(data=user_input, index=[index], columns=independent_variables) data1 = pd.concat([data1, user_df], axis=0) # add a new row in data1 data1.reset_index # perform agglomerative clustering on dataset # data1["Cluster Labels"] = agg_cluster.fit_predict(data1) data1["Cluster Labels"] = agg_cluster.fit_predict(data1) user_request = data1.tail(1) severity = gbc.predict(user_request) severity = severity[0] if severity == 1: print("Casualty Severity is Slight (%d)" % severity) elif severity == 2: print("Casualty is Severe (%d)" % severity) elif severity == 3: print("Casualty Severity is Fatal (%d)" % severity) pickle.dump(severity, open("irl.pkl", "wb"))
8f9ec22e4daf56326d5242333110e3a50071298c
[ "Markdown", "Python" ]
2
Markdown
STINGNGO/ACCIDENT
0f56039db5be207e21d677fb796b9805bdb5c4ee
9e0832e9235a35dca5d3364ced8d6e0ab220b0c4
refs/heads/master
<repo_name>ZhouXiangLW/nio-feign<file_sep>/niofeign-core/build.gradle plugins { id 'org.springframework.boot' version '2.1.3.RELEASE' id 'java' } apply plugin: 'io.spring.dependency-management' group = 'com.example' version = '0.0.1-SNAPSHOT' sourceCompatibility = '1.8' repositories { mavenCentral() } ext { set('springCloudVersion', 'Greenwich.SR1') } test { useJUnitPlatform() } dependencies { implementation 'org.springframework.boot:spring-boot-starter-webflux' testImplementation 'org.springframework.boot:spring-boot-starter-test' testImplementation 'io.projectreactor:reactor-test' testImplementation 'org.junit.jupiter:junit-jupiter-api:5.3.2' testRuntimeOnly 'org.junit.jupiter:junit-jupiter-engine:5.3.2' compile 'org.projectreactor:reactor-spring:1.0.1.RELEASE' } dependencyManagement { imports { mavenBom "org.springframework.cloud:spring-cloud-dependencies:${springCloudVersion}" } } <file_sep>/niofeign-core/src/test/java/niofeigncore/MainApplication.java package niofeigncore; import niofeigncore.annotations.EnableNioFeignClients; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; @SpringBootApplication @EnableNioFeignClients public class MainApplication { public static void main(String[] args) { SpringApplication application = new SpringApplication(MainApplication.class); application.run(args); } } <file_sep>/niofeign-core/src/main/java/niofeigncore/register/NioFeignClientRegister.java package niofeigncore.register; import niofeigncore.annotations.EnableNioFeignClients; import niofeigncore.annotations.NioFeignClient; import niofeigncore.utils.ArrayUtils; import org.springframework.beans.factory.annotation.AnnotatedBeanDefinition; import org.springframework.beans.factory.support.BeanDefinitionRegistry; import org.springframework.context.EnvironmentAware; import org.springframework.context.ResourceLoaderAware; import org.springframework.context.annotation.ClassPathScanningCandidateComponentProvider; import org.springframework.context.annotation.ImportBeanDefinitionRegistrar; import org.springframework.core.env.Environment; import org.springframework.core.io.ResourceLoader; import org.springframework.core.type.AnnotationMetadata; import org.springframework.core.type.ClassMetadata; import org.springframework.core.type.filter.AbstractClassTestingTypeFilter; import org.springframework.core.type.filter.AnnotationTypeFilter; import org.springframework.util.ClassUtils; import reactor.util.StringUtils; import java.util.*; public class NioFeignClientRegister implements ImportBeanDefinitionRegistrar, ResourceLoaderAware, EnvironmentAware { private ResourceLoader resourceLoader; private Environment environment; @Override public void setResourceLoader(ResourceLoader resourceLoader) { this.resourceLoader = resourceLoader; } @Override public void setEnvironment(Environment environment) { this.environment = environment; } @Override public void registerBeanDefinitions(AnnotationMetadata metadata, BeanDefinitionRegistry registry) { registerDefaultConfiguration(metadata, registry); registerFeignClients(metadata, registry); } private void registerDefaultConfiguration(AnnotationMetadata metadata, BeanDefinitionRegistry registry) { } private void registerFeignClients(AnnotationMetadata metadata, BeanDefinitionRegistry registry) { ClassPathScanningCandidateComponentProvider scanner = getScanner(); scanner.setResourceLoader(resourceLoader); Set<String> basePackages; Map<String, Object> attrs = metadata.getAnnotationAttributes(EnableNioFeignClients.class.getName()); AnnotationTypeFilter feignClientTypeFilter = new AnnotationTypeFilter(NioFeignClient.class); final Class<?>[] clients = attrs == null ? null : (Class<?>[]) attrs.get("clients"); if (ArrayUtils.isEmpty(clients)) { scanner.addIncludeFilter(feignClientTypeFilter); basePackages = getBasePackages(metadata); } else { final Set<String> clientClasses = new HashSet<>(); basePackages = new HashSet<>(); for (Class<?> client : clients) { basePackages.add(ClassUtils.getPackageName(client)); clientClasses.add(client.getCanonicalName()); } AbstractClassTestingTypeFilter filter = new AbstractClassTestingTypeFilter() { @Override protected boolean match(ClassMetadata metadata) { String cleaned = metadata.getClassName().replaceAll("\\$", "."); return clientClasses.contains(cleaned); } }; scanner.addIncludeFilter(new AllTypeFilter(Arrays.asList(filter, feignClientTypeFilter))); } } private ClassPathScanningCandidateComponentProvider getScanner() { return new ClassPathScanningCandidateComponentProvider(false, this.environment) { @Override protected boolean isCandidateComponent(AnnotatedBeanDefinition beanDefinition) { return beanDefinition.getMetadata().isIndependent() && (!beanDefinition.getMetadata().isAnnotation()); } }; } private Set<String> getBasePackages(AnnotationMetadata importingClassMetadata) { Map<String, Object> attributes = importingClassMetadata .getAnnotationAttributes(EnableNioFeignClients.class.getCanonicalName()); Set<String> basePackages = new HashSet<>(); String[] packages = ArrayUtils.combine( (String[]) Objects.requireNonNull(attributes).get("value"), (String[])attributes.get("basePackages")); for (String p : packages) { if (StringUtils.hasText(p)) { basePackages.add(p); } } if (basePackages.isEmpty()) { basePackages.add(ClassUtils.getPackageName(importingClassMetadata.getClassName())); } return basePackages; } }
587e28e896a0deaa7e89a7d478cd6ee2377f41bd
[ "Java", "Gradle" ]
3
Gradle
ZhouXiangLW/nio-feign
1e6d5ea871364e32eb08116c25aca8c69ffb3bd4
f5d5441ac5d00eeaf4bf8b0cfaad47b10c042121
refs/heads/master
<repo_name>keyu-tian/BUAA-DS-2019Spring<file_sep>/assignments/0520_0610/dfs.c /* FileName: unique.c Author: Kevin Date: 27/05/19 18:01 Description: dfs(backtracking search) */ #include <stdio.h> #include <stdlib.h> #include <string.h> typedef enum {false, true} bool; #define MN 1003 int V, E, S, T; int dest[MN]; typedef struct { int v; int id; } Edge; Edge edge[MN][MN]; int tot[MN]; void add_edge(const int u, const int v, const int id) { edge[u][tot[u]].id = id; edge[u][tot[u]].v = v; tot[u]++; } int cmp(const void *p, const void *q) { return ((Edge *)p)->id - ((Edge *)q)->id; } void sort_edge() { int u; for (u=0; u<V; ++u) if (tot[u] > 1) qsort(edge[u], tot[u], sizeof(*edge[u]), cmp); } bool ins[MN]; int sta[MN], top; void dfs(const int u) { if (u == T) { int i; for (i=0; i<top; ++i) printf("%d ", sta[i]); puts(""); } int i, v, id; for (i=0; i<tot[u]; ++i) { id = edge[u][i].id; v = edge[u][i].v; if (!ins[v]) { ins[v] = true; sta[top++] = id; dfs(v); --top; ins[v] = false; } } } int main() { scanf("%d %d", &V, &E); int u, v, id; while (E--) { scanf("%d %d %d", &id, &u, &v); add_edge(u, v, id); add_edge(v, u, id); } sort_edge(); S = 0, T = V-1, ins[S] = true; dfs(S); return 0; } <file_sep>/assignments/0401_0422/edit.c /* FileName: edit.c Author: Kevin Date: 8/04/19 14:38 Description: text operation */ #include <stdio.h> #include <string.h> #include "list.h" #define MAX_VERSION 256 #define MAX_LEN 1024 typedef enum { INSERT_OP = 1, ERASE_OP = 2, UNDO_OP = 3, EXIT_OP = -1 } OPER_INDEX; typedef struct { int first; int second; } Pair; typedef struct { OPER_INDEX op; int pos, len; char str[MAX_LEN]; } Operation; Operation _opers[MAX_VERSION], *opers = _opers + 11; char temp_str[MAX_LEN]; List _strs[MAX_VERSION], *strs = _strs + 11; Pair read_input(void); void do_prev_opers(const int prev_n); void do_next_opers(const int prev_n, const int next_n); int main(void) { Pair pair = read_input(); int prev_n = pair.first, next_n = pair.second; do_prev_opers(prev_n); do_next_opers(prev_n, next_n); return 0; } Pair read_input(void) { int i, prev_n; gets(temp_str); scanf("%d", &prev_n); for (i=-prev_n; i<0; ++i) { scanf("%d %d %s", &opers[i].op, &opers[i].pos, opers[i].str); opers[i].len = strlen(opers[i].str); } for (i=0; true; ++i) { scanf("%d", &opers[i].op); if (opers[i].op == EXIT_OP) break; if (opers[i].op != UNDO_OP) { scanf("%d %s", &opers[i].pos, opers[i].str); opers[i].len = strlen(opers[i].str); } } return (Pair) { .first = prev_n, .second = i }; } void do_prev_opers(const int prev_n) { char *p; for (p=temp_str; *p; ++p) { push_back(strs, *p); } int i; int HEAD; for (HEAD=-1; HEAD>=-prev_n; --HEAD) { copy(strs+HEAD, strs+HEAD+1); switch (opers[HEAD].op) { case INSERT_OP: for (i=0; i<opers[HEAD].len; ++i) erase_at(strs+HEAD, opers[HEAD].pos); break; case ERASE_OP: if (opers[HEAD].pos == strs[HEAD].cnt) for (i=0; i<opers[HEAD].len; ++i) push_back(strs+HEAD, opers[HEAD].str[i]); else for (i=opers[HEAD].len-1; i>=0; --i) insert_before(strs+HEAD, opers[HEAD].pos, opers[HEAD].str[i]); break; default: break; } } } void do_next_opers(const int prev_n, const int next_n) { // opers[0, next_n) // strs[0] -> strs[1] -> ... -> strs[next_n] int now = 0; int i, k, erase_cnt; for (i=0; i<next_n; ++i) { switch (opers[i].op) { case INSERT_OP: ++now; copy(strs+now, strs+now-1); if (opers[i].pos == strs[now].cnt) for (k=0; k<opers[i].len; ++k) push_back(strs+now, opers[i].str[k]); else for (k=opers[i].len-1; k>=0; --k) insert_before(strs+now, opers[i].pos, opers[i].str[k]); break; case ERASE_OP: ++now; copy(strs+now, strs+now-1); sscanf(opers[i].str, "%d", &erase_cnt); for (k=0; k<erase_cnt; ++k) if (!erase_at(strs+now, opers[i].pos)) break; break; case UNDO_OP: if (now > -prev_n) --now; break; default: break; } } println(strs + now); } <file_sep>/projects/klist/klist.h #ifndef INC_K_LIST #define INC_K_LIST #include <stdio.h> #include <malloc.h> #include "config.h" typedef struct _Node { bool tag; Data data; struct _Node *next; struct _Node *prev; } Node; typedef struct { cint cnt; Node *head; Node *tail; } List; // constructor #define init_node(_p, _data, _prev, _next) \ _p->tag = false, \ _p->data = _data, \ _p->prev = _prev, \ _p->next = _next // comparison operator #define equal(n1, n2) n1 == n2 #define greater(n1, n2) n1 > n2 // swapping for any type #define swap(Type, v1, v2) \ do \ { \ Type temp = v1; \ v1 = v2; \ v2 = temp; \ } while (0) // basic functions void init(List *pL); void clear(List *pL); bool empty(const List *pL); cint size(const List *pL); List *copy(List *pDest, const List *pSrc); List *append(List *pDest, const List *pSrc); List *merge(List *pD, const List *pS1, const List *pS2); // pushing and popping bool push_front(List *pL, Data data); bool push_back(List *pL, Data data); Data pop_front(List *pL); Data pop_back(List *pL); // find xint find(const List *pL, const Data key); xint rfind(const List *pL, const Data key); Data at(const List *pL, const xint index); // insertion and erasion bool insert_before(List *pL, const xint index, const Data data); bool insert_after(List *pL, const xint index, const Data data); Data erase_node(List *pL, Node *p); cint erase_tagged(List *pL); xint erase_first_key(List *pL, const Data key); xint erase_last_key(List *pL, const Data key); cint erase_keys(List *pL, const Data key); Data erase_at(List *pL, const xint index); cint erase_interval(List *pL, const xint lb, const xint ub); // other operations to the data Data add_at(List *pL, const xint index, const Data data); cint add_interval(List *pL, const xint lb, const xint ub, const Data data); Data assign_at(List *pL, const xint index, const Data data); cint assign_interval(List *pL, const xint lb, const xint ub, const Data data); cint replace(List *pL, const Data oldData, const Data newData); bool swap_at(List *pL, const xint left, const xint right); // other operations to the whole list void reverse(List *pL); void unique(List *pL); void bubble_sort(List *pL); void println(const List *pL); // debug function (print the the number of times that the free and malloc functions are called) void debug_allocate(); #endif <file_sep>/assignments/0225_0318/example1.c #include <stdio.h> #include <ctype.h> #define ML 2333 char buf[ML], *cur = buf; void ReadExpr(void) { char *p, *q; scanf("%[^\n]", buf); for (p = q = buf; *p != '\0'; ++p) if (*p != ' ' && *p != '=') *q++ = *p; *q = '\0'; } int ScanNumber(void) { int ret = 0, sign = 1; if (*cur == '-') { ++cur; sign = -1; } while (isdigit(*cur)) { ret *= 10; ret += *cur - '0'; ++cur; } return sign * ret; } int CalcFactor(void) { int ret = ScanNumber(); while (*cur == '*' || *cur == '/') { char op = *cur++; int next = ScanNumber(); if (op == '*') ret *= next; else ret /= next; } return ret; } int CalcExpr(void) { int ret = CalcFactor(); while (*cur == '+' || *cur == '-') { char op = *cur++; int next = CalcFactor(); if (op == '+') ret += next; else ret -= next; } return ret; } int main() { ReadExpr(); printf("%d", CalcExpr()); return 0; } <file_sep>/assignments/0304_0325/encrypt.c #include <stdio.h> #include <string.h> #include <ctype.h> #define ML 666 const char *FIN = "encrypt.txt"; const char *FOUT = "output.txt"; int exist[1 << 7]; char key[ML], ins[ML], outs[ML]; int replace(void) { char *p, ch; scanf("%s", key); int top = strlen(key), i; for (p=key, i=0; i<top; ++i) { if (!exist[(int)key[i]]) { exist[(int)key[i]] = 1; *p++ = key[i]; } } top = p - key; key[top] = '\0'; for (ch='z'; ch>='a'; --ch) { if (!exist[(int)ch]) { key[top++] = ch; } } key[top] = '\0'; top = 0; for (p=ins; *p; ++p) { outs[top++] = islower(*p) ? key[*p - 'a'] : *p; } outs[top] = '\0'; return top; } int main() { FILE *fin = fopen(FIN, "rb"); FILE *fout = fopen(FOUT, "wb"); fseek(fin, 0, SEEK_END); int bytes = ftell(fin); fseek(fin, 0, SEEK_SET); fread(ins, bytes, 1, fin); bytes = replace(); fwrite(outs, bytes, 1, fout); fclose(fin); fclose(fout); return 0; } <file_sep>/assignments/0304_0325/chess.c #include <stdio.h> #include <string.h> #define R 19 #define C 19 #define DIR 8 #define valid(r, c) (r>=0 && r<R && c>=0 && c<C) int board[R][C]; const int dr[DIR] = {-1, 1, 0, 0, -1, -1, 1, 1}; const int dc[DIR] = {0, 0, -1, 1, -1, 1, -1, 1}; int check(const int cur_r, const int cur_c) { int win_player = 0; int cur_player = board[cur_r][cur_c]; if (cur_player) { int i, j, r, c; for (i=0; i<DIR; ++i) { int cnt = 0, blank = 0; r = cur_r, c = cur_c; for (j=0; j<3; ++j) { r += dr[i], c += dc[i]; if (valid(r, c) && board[r][c] == cur_player) { ++cnt; } else { break; } } r = cur_r + 4 * dr[i], c = cur_c + 4 * dc[i]; if (valid(r, c) && board[r][c] == 0) { blank = 1; } r = cur_r - dr[i], c = cur_c - dc[i]; if (valid(r, c) && board[r][c] == 0) { blank = 1; } if (cnt == 3 && blank) { win_player = cur_player; break; } } } return win_player; } int main() { int r, c; for (r=0; r<R; ++r) { for (c=0; c<C; ++c) { scanf("%d", board[r]+c); } } int found = 0; for (r=0; r<R && !found; ++r) { for (c=0; c<C && !found; ++c) { if (check(r, c)) { found = 1; printf("%d:%d,%d\n", board[r][c], r+1, c+1); } } } if (!found) { puts("No"); } return 0; } <file_sep>/projects/klist/config.h #ifndef INC_K_LIST_CONFIG #define INC_K_LIST_CONFIG typedef enum {false, true} bool; typedef int Data; typedef int cint; // integer for counting typedef int xint; // integer for indexing extern const Data INVALID_DATA; extern const xint INVALID_INDEX; extern const xint ZERO_INDEX; extern const cint CAPACITY; #endif <file_sep>/assignments/0304_0325/sort.c #include <stdio.h> #include <stdlib.h> #include <string.h> #define ML 33 #define MN 105 typedef struct { char name[ML]; int order; long long tel; } Person; Person person[MN]; int person_cmp(const void *p, const void *q) { Person *a = (Person *)p, *b = (Person *)q; int flag = strcmp(a->name, b->name); if (flag == 0) flag = a->order - b->order; return flag; } Person *unique(Person *begin, Person *end) { Person *left, *right; for (left=begin, right=begin+1; right!=end; ++right) { if (strcmp(left->name, right->name) != 0 || left->tel != right->tel) { if (left+1 != right) { *++left = *right; } else { ++left; } } } return left+1; } void add_index(Person *begin, Person *end) { Person *left, *right; int cnt = 0; char index_buf[ML]; for (left=begin, right=begin+1; right!=end; ++right) { if (strcmp(left->name, right->name) == 0) { sprintf(index_buf, "_%d", ++cnt); strcat(right->name, index_buf); } else { cnt = 0; left = right; } } } int main() { int i, n; scanf("%d", &n); for (i=0; i<n; ++i) { scanf("%s %lld", person[i].name, &person[i].tel); person[i].order = i; } qsort(person, n, sizeof(*person), person_cmp); n = unique(person, person+n) - person; add_index(person, person+n); for (i=0; i<n; ++i) { printf("%s %lld\n", person[i].name, person[i].tel); } return 0; } <file_sep>/assignments/0423_0520/bst.c /* FileName: bst.c Author: Kevin Date: 29/04/19 15:55 Description: implementation of BST */ #include <stdio.h> #include <stdlib.h> #include <string.h> #define T int #define less(t1, t2) t1 < t2 typedef struct _TNode { T val; struct _TNode *fa; struct _TNode *lc, *rc; } TNode; typedef struct { TNode *root; int cnt; } BST; #define construct_tnode(_ptr, _fa, _lc, _rc, _val) \ do \ { \ (_ptr) = (TNode *)malloc(sizeof(TNode)), \ (_ptr)->fa = _fa, \ (_ptr)->lc = _lc, \ (_ptr)->rc = _rc, \ (_ptr)->val = _val; \ } while (0) void init(BST *pB) { pB->cnt = 0; pB->root = NULL; } void rec_insert(TNode *pT, const T val) { if (less(val, pT->val)) { if (pT->lc == NULL) construct_tnode(pT->lc, pT, NULL, NULL, val); else rec_insert(pT->lc, val); } else // pT->val >= val { if (pT->rc == NULL) construct_tnode(pT->rc, pT, NULL, NULL, val); else rec_insert(pT->rc, val); } } void insert(BST *pB, const T val) { ++pB->cnt; if (pB->root == NULL) construct_tnode(pB->root, NULL, NULL, NULL, val); else rec_insert(pB->root, val); } void dfs_leaf(const TNode *pT, const int dep) { if (pT == NULL) return; if (pT->lc == NULL && pT->rc == NULL) { printf("%d %d\n", pT->val, dep); return; } dfs_leaf(pT->lc, dep+1); dfs_leaf(pT->rc, dep+1); } int main() { #ifdef _VSC_KEVIN freopen("in.in", "r", stdin); freopen("out.out", "w", stdout); #endif int n; scanf("%d", &n); BST bst; init(&bst); while (n--) { int val; scanf("%d", &val); insert(&bst, val); } dfs_leaf(bst.root, 1); return 0; } <file_sep>/assignments/0423_0520/article.c /* FileName: bst.c Author: Kevin Date: 29/04/19 16:23 Description: implementation of a BST */ #include <stdio.h> #include <stdlib.h> #include <string.h> #include <ctype.h> #define MP 65536 #define T const char * #define less(t1, t2) (strcmp(t1, t2) < 0) #define greater(t1, t2) (strcmp(t1, t2) > 0) #define equal(t1, t2) (strcmp(t1, t2) == 0) typedef struct _TNode { T val; int cnt; struct _TNode *fa; struct _TNode *lc, *rc; } TNode; TNode _mem_pool[MP], *_p_pool = _mem_pool; typedef struct { TNode *root; int cnt; } BST; #define construct_tnode(_ptr, _fa, _lc, _rc, _val) \ do \ { \ (_ptr) = _p_pool++, \ (_ptr)->fa = _fa, \ (_ptr)->lc = _lc, \ (_ptr)->rc = _rc, \ (_ptr)->val = _val; \ (_ptr)->cnt = 1; \ } while (0) void init(BST *pB) { pB->cnt = 0; pB->root = NULL; } void rec_insert(TNode *pT, T val) { if (less(val, pT->val)) { if (pT->lc == NULL) new_node(pT->lc, pT, NULL, NULL, val); else rec_insert(pT->lc, val); } else if (greater(val, pT->val)) { if (pT->rc == NULL) new_node(pT->rc, pT, NULL, NULL, val); else rec_insert(pT->rc, val); } else { ++pT->cnt; } } void insert(BST *pB, T val) { ++pB->cnt; if (pB->root == NULL) new_node(pB->root, NULL, NULL, NULL, val); else rec_insert(pB->root, val); } void dfs_leaf(const TNode *pT, const int dep) { if (pT == NULL) return; dfs_leaf(pT->lc, dep+1); printf("%s %d\n", pT->val, pT->cnt); dfs_leaf(pT->rc, dep+1); } #define ML 16384 char buf[ML], *cur = buf; int read_buf(const char *FILE_NAME) { FILE *fin = fopen(FILE_NAME, "rb"); fseek(fin, 0, SEEK_END); const int bytes = ftell(fin); fseek(fin, 0, SEEK_SET); fread(buf, bytes, 1, fin); fclose(fin); return bytes; } char *next_word(void) { while (*cur && !isalpha(*cur)) ++cur; if (!*cur) return NULL; char *begin = cur; do *cur = tolower(*cur); while (isalpha(*(++cur))); *cur++ = '\0'; return begin; } int main() { #ifdef _VSC_KEVIN freopen("in.in", "r", stdin); freopen("out.out", "w", stdout); #endif read_buf("article.txt"); BST bst; init(&bst); char *next; while ((next = next_word()) != NULL) insert(&bst, next); if (bst.root) { printf("%s", bst.root->val); if (bst.root->rc) { printf(" %s", bst.root->rc->val); if (bst.root->rc->rc) { printf(" %s", bst.root->rc->rc->val); } } printf("\n"); } dfs_leaf(bst.root, 1); return 0; } <file_sep>/assignments/0318_0408/article.c #include <stdio.h> #include <stdlib.h> #include <string.h> #include <ctype.h> #define LL long long #ifdef _KEVIN #define MAX_DLEN 100000 #define MAX_DCNT 10000 #define MAX_ALEN 400000 #define MAX_ACNT 50000 #else #define MAX_DLEN 6000000 #define MAX_DCNT 800000 #define MAX_ALEN 8000000 #define MAX_ACNT 1000000 #endif #define TABLE_SIZE MOD1+1 #define MOD1 950269 //#define MOD2 9999971 typedef struct _DNode { struct _DNode *next; char *p; } DNode; DNode d_pool[MAX_DCNT]; typedef struct _PNode { struct _PNode *next; int pos; } PNode; PNode p_pool[MAX_DCNT]; typedef struct _ANode { struct _ANode *next; struct _PNode *head; char *p; int cnt; } ANode; ANode a_pool[MAX_ACNT]; DNode *hsd[TABLE_SIZE]; ANode *hsa[TABLE_SIZE]; char len_vis[MAX_DLEN]; #define MA 26 #define islw(c) (c>='a'&&c<='z') #define isup(c) (c>='A'&&c<='Z') #define ctoi(c) c-'a' typedef struct { int cnt; PNode *head; char *p; } Ans; Ans ans[MAX_ACNT]; int hs_idx[MAX_ACNT]; #define swap(a,b) do{Ans t=a;a=b;b=t;}while(0) #define less(a,b) a.cnt>b.cnt||(a.cnt==b.cnt&&strcmp(a.p,b.p)<0) void quickSort(Ans *a,int n) { if(n<=1)return; if(n<6) { int i; for(i=1; i<n; i++) { int j=i; Ans t=a[j]; while(j&&(less(t, a[j-1])))a[j]=a[j-1],--j; a[j]=t; } return; } int m=n>>1; if(less(a[m], a[0]))swap(a[m],*a); if(less(a[n-1], a[0]))swap(a[n-1],*a); if(less(a[n-1], a[m]))swap(a[n-1],a[m]); Ans v=a[m]; a[m]=a[n-2]; a[n-2]=v; int i=0,j=n-2; while(1) { do ++i; while(less(a[i], v)); do --j; while(less(v, a[j])); if(i<j)swap(a[i],a[j]); else break; } swap(a[i],a[n-2]); quickSort(a,i); quickSort(a+i+1,n-1-i); } char _d_buf[MAX_DLEN], * const d_buf=_d_buf+1; char _a_buf[MAX_ALEN], * const a_buf=_a_buf+1; char out_buf[MAX_DLEN]; #define up_to_lw(who) \ for (now=who##_buf; now<end; now+=32) \ { \ if (isup(now[0])) now[0] += 32; \ if (isup(now[1])) now[1] += 32; \ if (isup(now[2])) now[2] += 32; \ if (isup(now[3])) now[3] += 32; \ if (isup(now[4])) now[4] += 32; \ if (isup(now[5])) now[5] += 32; \ if (isup(now[6])) now[6] += 32; \ if (isup(now[7])) now[7] += 32; \ if (isup(now[8])) now[8] += 32; \ if (isup(now[9])) now[9] += 32; \ if (isup(now[10])) now[10] += 32; \ if (isup(now[11])) now[11] += 32; \ if (isup(now[12])) now[12] += 32; \ if (isup(now[13])) now[13] += 32; \ if (isup(now[14])) now[14] += 32; \ if (isup(now[15])) now[15] += 32; \ if (isup(now[16])) now[16] += 32; \ if (isup(now[17])) now[17] += 32; \ if (isup(now[18])) now[18] += 32; \ if (isup(now[19])) now[19] += 32; \ if (isup(now[20])) now[20] += 32; \ if (isup(now[21])) now[21] += 32; \ if (isup(now[22])) now[22] += 32; \ if (isup(now[23])) now[23] += 32; \ if (isup(now[24])) now[24] += 32; \ if (isup(now[25])) now[25] += 32; \ if (isup(now[26])) now[26] += 32; \ if (isup(now[27])) now[27] += 32; \ if (isup(now[28])) now[28] += 32; \ if (isup(now[29])) now[29] += 32; \ if (isup(now[30])) now[30] += 32; \ if (isup(now[31])) now[31] += 32; \ } \ for (now-=32; now<end; ++now) \ if (isup(*now)) *now += 32 #define read_file(PATH, who) \ fin = fopen(PATH, "rb"), fseek(fin, 0, SEEK_END), end = who##_buf + (bytes = ftell(fin)), fseek(fin, 0, SEEK_SET), fread(who##_buf, bytes, 1, fin), fclose(fin) int main() { int bytes, i; ANode *pA, *poolA = a_pool; DNode *pD, *poolD = d_pool; PNode *pP, *poolP = p_pool; char *now, *end, *begin; FILE *fin; LL sum; int fd = 0; read_file("dictionary.txt", d); // up_to_lw(d); // have a try(delete this sentence) for (now=d_buf; now<end; ++now) { if (islw(*now) && !islw(now[-1])) { begin = now, sum = 0; do sum = (sum*131+*now++-'a')%MOD1; while (islw(*now)); *now = '\0'; // for (fd=0, pD=hsd[sum]; pD; pD=pD->next) // 字典不发生hash冲突 // if (!strcmp(pD->p, begin)) // { // fd = 1; // break; // } if (!fd) { pD = hsd[sum], hsd[sum] = poolD++, hsd[sum]->next = pD, hsd[sum]->p = begin; ++len_vis[now-begin]; } } } read_file("article.txt", a); up_to_lw(a); int tot = 0; for (now=a_buf; now<end; ++now) { if (islw(*now) && !islw(now[-1])) { begin = now, sum = 0; do sum = (sum*131+*now++-'a')%MOD1; while (islw(*now)); *now = '\0'; if (len_vis[now-begin] && hsd[sum]) // 有可能在字典中出现,也有可能是冲突了,要检查是不是真的出现了 { for (pD=hsd[sum]; pD; pD=pD->next) if (!strcmp(begin, pD->p)) goto found; // 真的在字典中出现了 goto not_found; // 冲突了,实际上没在字典中出现。 } else // 不可能在字典中出现 { not_found: // 以上两种情况都是没在字典中出现,都要添加到文章哈希中 if (hsa[sum]) // 已经有了,或者冲突了 { for (fd=0, pA=hsa[sum]; pA; pA=pA->next) if (!strcmp(pA->p, begin)) // 真的已经有了,加加cnt,然后加位置 { fd = ++pA->cnt, pP = pA->head, pA->head = poolP++, pA->head->pos = begin-a_buf, pA->head->next = pP; break; } if (!fd) // 冲突了,要加新的 pA = hsa[sum], hsa[sum] = poolA++, hsa[sum]->next = pA, hsa[sum]->head = poolP++, // 加第一个位置 hsa[sum]->head->pos = begin-a_buf, hsa[sum]->head->next = NULL, hsa[sum]->cnt = 1, hsa[sum]->p = begin; } else // 没有啊,必须添加 hsa[sum] = poolA++, hsa[sum]->next = NULL, hsa[sum]->head = poolP++, hsa[sum]->head->pos = begin-a_buf, hsa[sum]->head->next = NULL, hsa[sum]->cnt = 1, hsa[sum]->p = begin, hs_idx[tot++] = sum; } found:; } } int final_tot = 0; for (i=0; i<tot; ++i) // hava a try(unroll-loop) for (pA = hsa[hs_idx[i]]; pA; pA=pA->next) ans[final_tot].cnt = pA->cnt, ans[final_tot].head = pA->head, ans[final_tot++].p = pA->p; quickSort(ans, final_tot); char *p = out_buf; char *s; int sta[21], top = 0, tp, top2 = 0; for (i=0; i<final_tot; ++i) { for (s=ans[i].p; *s; ++s) *p++ = *s; *p++ = ' '; tp = ans[i].cnt, top = 0; do sta[top++] = tp%10; while (tp/=10); while (top--) *p++ = '0'+sta[top]; *p++ = ' '; for (pP=ans[i].head, top2=0; pP; pP=pP->next) hs_idx[top2++] = pP->pos; while (top2--) { tp = hs_idx[top2], top = 0; do sta[top++] = tp%10; while (tp/=10); while (top--) *p++ = '0'+sta[top]; *p++ = ' '; } p[-1] = '\n'; } FILE *fout = fopen("misspelling.txt", "wb"); bytes = p - out_buf; fwrite(out_buf, bytes, 1, fout); fclose(fout); return 0; } <file_sep>/assignments/0304_0325/books.c #include <stdio.h> #include <stdlib.h> #include <string.h> #define ML 50 #define MN 503 enum Oper { EXIT_OP, INS_OP, SEA_OP, DEL_OP }; typedef struct { char name[ML]; char author[ML]; char pub[ML]; char date[ML]; } Book; int tot; Book books[MN]; int book_cmp(const void *p, const void *q); void book_sort(void); void load_data(void); void save_data(void); void printf_book(const Book *pBook, FILE *fp); void insert_book(void); void search_book(void); void delete_book(void); int main(int argc, char *argv[]) { load_data(); int op; while (scanf("%d", &op), op != EXIT_OP) { switch (op) { case INS_OP: insert_book(); break; case SEA_OP: search_book(); break; case DEL_OP: delete_book(); break; } } save_data(); return 0; } int book_cmp(const void *p, const void *q) { return strcmp((char *)p, (char *)q); } void book_sort(void) { qsort(books, tot, sizeof(*books), book_cmp); } void load_data(void) { tot = 0; const char *FIN = "books.txt"; FILE *fin = fopen(FIN, "r"); while (fscanf(fin, "%s %s %s %s", books[tot].name, books[tot].author, books[tot].pub, books[tot].date) == 4) { ++tot; } book_sort(); fclose(fin); } void save_data(void) { const char *FOUT = "ordered.txt"; FILE *fout = fopen(FOUT, "w"); int i; for (i=0; i<tot; ++i) { printf_book(books+i, fout); } } void insert_book(void) { Book newBook; scanf("%s %s %s %s", newBook.name, newBook.author, newBook.pub, newBook.date); books[tot++] = newBook; book_sort(); } void printf_book(const Book *pBook, FILE *fp) { fprintf(fp, "%-50s%-20s%-30s%-10s\n", pBook->name, pBook->author, pBook->pub, pBook->date); } void search_book(void) { char key_word[ML]; scanf("%s", key_word); int i; for (i=0; i<tot; ++i) { if (strstr(books[i].name, key_word)) { printf_book(books+i, stdout); } } } void delete_book(void) { char key_word[ML]; scanf("%s", key_word); int i, j; for (i=0; i<tot; ++i) { if (strstr(books[i].name, key_word)) { for (j=i; j<tot-1; ++j) { books[j] = books[j+1]; } --tot; --i; } } } <file_sep>/assignments/0318_0408/monkey.c /* FileName: monkey.c Author: Kevin Date: 25/03/19 12:33 Description: Josephus Problem */ #include <stdio.h> #include <string.h> int main(void) { int i, n, m, k, r; scanf("%d %d %d", &n, &m, &k); for (i=1, r=0; i<=n; ++i) r = (r+m) % i; printf("%d", (r+k-1) % n + 1); return 0; } <file_sep>/assignments/0506_0610/find.c /* FileName: find.c Author: Kevin Date: 03/06/19 17:05 Description: binary search */ #include <stdio.h> #include <stdlib.h> #include <malloc.h> #include <string.h> #define ML 25 #define MC 257 #define MN 3600 #define NHASH 3001 #define MULT 37 int words_cnt; char words[MN][ML]; int begin[MC], end[MC]; int cmp_time; typedef unsigned int hint; typedef struct _Node { struct _Node *next, *prev; const char *s; } Node; typedef struct { Node *head, *tail; int cnt; } List; List hash[NHASH]; hint get_hash_code(const char *s); void push_first(List *pl, const char *s); void push_back(List *pl, const char *s); void insert(const char *s); int naive_search(const char *s); int naive_binary_search(const char *s); int hash_binary_search(const char *s); int hash_search(const char *s); int main() { memset(begin, -1, sizeof(begin)); FILE *fin = fopen("dictionary3000.txt", "r"); while (fscanf(fin, "%s", words[words_cnt]) == 1) { if (begin[words[words_cnt][0]] == -1) begin[words[words_cnt][0]] = words_cnt; end[words[words_cnt][0]] = words_cnt; insert(words[words_cnt]); ++words_cnt; } static char word[ML]; int mode, found; while (~scanf("%s %d", word, &mode)) { cmp_time = 0; switch (mode) { case 1: found = naive_search(word); printf("%d %d\n", found, cmp_time); break; case 2: found = naive_binary_search(word); printf("%d %d\n", found, cmp_time); break; case 3: found = hash_binary_search(word); printf("%d %d\n", found, cmp_time); break; case 4: found = hash_search(word); printf("%d %d\n", found, cmp_time); break; } } #ifdef _KEVIN system("pause"); #endif return 0; } hint get_hash_code(const char *s) { unsigned int h; for(h=0; *s; ++s) h = MULT*h + *s; return h % NHASH; } void push_first(List *pl, const char *s) { pl->head = pl->tail = (Node *)malloc(sizeof(Node)); pl->head->next = pl->head->prev = NULL; pl->head->s = s; } void push_back(List *pl, const char *s) { if (pl->cnt) { Node *p_new = (Node *)malloc(sizeof(Node)); p_new->prev = pl->tail, p_new->next = NULL, p_new->s = s; pl->tail = pl->tail->next = p_new; } else push_first(pl, s); ++pl->cnt; } void insert(const char *s) { hint hc = get_hash_code(s); push_back(hash+hc, s); } int naive_search(const char *s) { int i; for (i=0; i<words_cnt; ++i) { int cmp = strcmp(words[i], s); ++cmp_time; if (cmp == 0) return 1; if (cmp > 0) return 0; } return 0; } int naive_binary_search(const char *s) { int l = 0, r = words_cnt-1; while (l <= r) { int mid = (l+r) >> 1; int cmp = strcmp(words[mid], s); ++cmp_time; if (cmp == 0) return 1; if (cmp < 0) l = mid + 1; else r = mid - 1; } return 0; } int hash_binary_search(const char *s) { int l = begin[s[0]], r = end[s[0]]; if (l==-1 || r==-1) return 0; while (l <= r) { int mid = (l+r) >> 1; int cmp = strcmp(words[mid], s); ++cmp_time; if (cmp == 0) return 1; if (cmp < 0) l = mid + 1; else r = mid - 1; } return 0; } int hash_search(const char *s) { hint hc = get_hash_code(s); Node *p; for (p=hash[hc].head; p; p=p->next) { int cmp = strcmp(p->s, s); ++cmp_time; if (cmp == 0) return 1; if (cmp > 0) return 0; } return 0; } <file_sep>/assignments/0304_0325/replace.c #include <stdio.h> #include <stdlib.h> #include <string.h> #define ML 10000 const char *FIN = "filein.txt"; const char *FOUT = "fileout.txt"; char ins[ML], outs[ML]; char s[ML], d[ML]; int replace(void) { char *p; int top = 0; scanf("%s %s", s, d); int len_s = strlen(s), len_d = strlen(d), i; for (p=ins; *p; ++p) { if (!strncasecmp(p, s, len_s)) { p += len_s - 1; for (i=0; i<len_d; ++i) { outs[top++] = d[i]; } } else { outs[top++] = *p; } } outs[top] = '\0'; // 注意不能写成outs[top++]='\0',因为有效字符数是top,outs[top]这个位置就是第top+1个字符就应该是0 // 如果写成outs[top++]='\0',那么在这之后top就比有效字符数多1了,fwrite就多写了一个'\0',有可能就在末尾多了一个空格 return top; } int main() { FILE *fin = fopen(FIN, "rb"); FILE *fout = fopen(FOUT, "wb"); fseek(fin, 0, SEEK_END); int bytes = ftell(fin); fseek(fin, 0, SEEK_SET); fread(ins, bytes, 1, fin); bytes = replace(); fwrite(outs, bytes, 1, fout); fclose(fin); fclose(fout); return 0; } <file_sep>/README.md # BUAA-DS-2019Spring 历次作业编程题源码(作业截止后上传) 以及一些C89/C++14实现的DS(学校日常编程题 & 大作业限定C89 qwq) <file_sep>/assignments/0423_0520/expr_tree.c /* FileName: expr.c Author: Kevin Date: 29/04/9 18:56 Description: calculate the result of a given expression(by expression tree) */ #include <stdio.h> #include <string.h> #include <ctype.h> #define ML 2333 #define MP 65536 #define CALC(OP) \ do \ { \ if (postfix[i].op == #OP[0]) \ postfix[i].val = postfix[i].lc->val OP postfix[i].rc->val; \ } while (0) typedef enum {false, true} bool; typedef struct _Node { bool is_op; char op; int val; struct _Node *lc, *rc; } Node; Node op_sta[ML], *tree_sta[ML], postfix[ML]; int op_top, tree_top, node_tot; char prior[ML]; char buf[ML]; void init_prior(void); void read_buf(void); void get_postfix(void); Node *build_expr_tree(void); void print_node(const Node *p); int main() { init_prior(); read_buf(); get_postfix(); Node *root = build_expr_tree(); if (root != NULL) { print_node(root); if (root->lc != NULL) putchar(' '), print_node(root->lc); if (root->rc != NULL) putchar(' '), print_node(root->rc); putchar('\n'); printf("%d", root->val); } return 0; } void init_prior(void) { prior['('] = prior[')'] = 0; prior['+'] = prior['-'] = 1; prior['*'] = prior['/'] = 2; } void read_buf(void) { fgets(buf, ML-1, stdin); char *p, *q; for (p=q=buf; *p; ++p) if (!isspace(*p) && *p!='=') *q++ = *p; *q = '\0'; } void get_postfix(void) { Node next, out_op; char *p; for (p=buf, op_top=node_tot=0; *p; ++p) { if (isdigit(*p)) { int val = 0; do val *= 10, val += *p++ - '0'; while (isdigit(*p)); --p; next.is_op = false; next.val = val; postfix[node_tot++] = next; } else { if (*p == ')') { while (1) { out_op = op_sta[--op_top]; if (out_op.op == '(') break; postfix[node_tot++] = out_op; } } else { if (*p != '(') { while (op_top && prior[op_sta[op_top-1].op] >= prior[*p]) postfix[node_tot++] = op_sta[--op_top]; } next.is_op = true; next.op = *p; op_sta[op_top++] = next; } } } while (op_top) postfix[node_tot++] = op_sta[--op_top]; } Node *build_expr_tree(void) { int i; for (i=0; i<node_tot; ++i) { if (postfix[i].is_op) { postfix[i].rc = tree_sta[--tree_top]; postfix[i].lc = tree_sta[--tree_top]; CALC(+); CALC(-); CALC(*); CALC(/); } else { postfix[i].rc = NULL; postfix[i].lc = NULL; } tree_sta[tree_top++] = postfix+i; } return tree_sta[0]; } void print_node(const Node *p) { if (p->is_op) printf("%c", p->op); else printf("%d", p->val); } <file_sep>/assignments/0520_0610/subway.c /* FileName: subway.c Author: Kevin Date: 27/05/19 16:44 Description: Dijkstra or SPFA or Floyd */ #include <stdio.h> #include <stdlib.h> #include <string.h> typedef enum {false, true} bool; #define MLINE 20 #define ML 50 #define MV 300 int V; char station_name[MV][ML]; int get_id(const char *this_name); int line[MV][MV]; int dist[MV][MV], INF = 0x3f3f3f3f; void build_graph(void); void floyd(void); void print_path(const int src, const int dest); int main() { char src_name[ML], dest_name[ML]; scanf("%s %s", src_name, dest_name); build_graph(); floyd(); print_path(get_id(src_name), get_id(dest_name)); return 0; } int get_id(const char *this_name) // valid range: [1, V] { int id; bool found = false; for (id=1; id<=V; ++id) { if (strcmp(this_name, station_name[id]) == 0) { found = true; break; } } if (!found) strcpy(station_name[id=++V], this_name); return id; } void build_graph() { FILE *fin = fopen("bgstations.txt", "r"); int line_id, num_of_lines, num_of_stations; char this_name[ML]; fscanf(fin, "%d", &num_of_lines); while (num_of_lines--) { fscanf(fin, "%d %d", &line_id, &num_of_stations); int prev_id = -1, id; while (num_of_stations--) { fscanf(fin, "%s %*d", this_name); id = get_id(this_name); if (~prev_id) { dist[prev_id][id] = dist[id][prev_id] = 1; line[prev_id][id] = line[id][prev_id] = line_id; } prev_id = id; } } fclose(fin); } int path[MV][MV]; void floyd(void) { int i, j, k; for (i=1; i<=V; ++i) for (j=1; j<=V; ++j) if (!dist[i][j] && i!=j) dist[i][j] = INF; for (k=1; k<=V; ++k) for (i=1; i<=V; ++i) for (j=1; j<=V; ++j) if (dist[i][j] > dist[i][k] + dist[k][j]) dist[i][j] = dist[i][k] + dist[k][j], path[i][j] = k; } int sta[MV], top; void get_path(const int src, const int dest) { if (!path[src][dest]) sta[top++] = src; else get_path(src, path[src][dest]), get_path(path[src][dest], dest); } void print_path(const int src, const int dest) { top = 0; get_path(src, dest); sta[top++] = dest; printf("%s", station_name[src]); int i, j; for (i=0; i<top-1; i=j) { int cur_line = line[sta[i]][sta[i+1]]; for (j=i+1; j+1<top && line[sta[j]][sta[j+1]]==cur_line; ++j); printf("-%d(%d)-%s", cur_line, j-i, station_name[sta[j]]); } } <file_sep>/assignments/0401_0422/expr.c /* FileName: expr.c Author: Kevin Date: 8/04/19 12:33 Description: calculate the result of a given expression */ #include <stdio.h> #include <string.h> #include <ctype.h> #define ML 2333 char buf[ML], *cur = buf; void read_string(void); int get_num(void), get_fact(void), calc_term(void), calc_expr(void); int main() { read_string(); printf("%d", calc_expr()); return 0; } void read_string(void) { char *p, *q; scanf("%[^\n]", buf); for (p = q = buf; *p != '\0'; ++p) if (!strchr("=\n\r\t ", *p)) *q++ = *p; *q = '\0'; } int get_num(void) { int ret = 0, sign = 1; if (*cur == '-') { ++cur; sign = -1; } while (isdigit(*cur)) { ret *= 10; ret += *cur - '0'; ++cur; } return sign * ret; } int get_fact(void) { int ret = 0; if (*cur == '(') { ++cur; // '(' ret = calc_expr(); ++cur; // ')' } else { ret = get_num(); } return ret; } int calc_term(void) { int ret = get_fact(); while (*cur == '*' || *cur == '/' || *cur == '%') { char op = *cur++; int next = get_fact(); if (op == '*') ret *= next; else if (op == '/') ret /= next; else ret %= next; } return ret; } int calc_expr(void) { int ret = calc_term(); while (*cur == '+' || *cur == '-') { char op = *cur++; int next = calc_term(); if (op == '+') ret += next; else ret -= next; } return ret; } <file_sep>/assignments/0506_0610/sort.c /* FileName: queue.c Author: Kevin Date: 03/06/19 19:11 Description: sort */ #include <stdio.h> #include <stdlib.h> #include <string.h> #define MN 1003 int a[MN], cnt; void quickSort(int *b, int left, int right) { if (left < right) { int i, last; for (last=left, i=last+1; i<=right; i++) { if (b[i] < b[left]) { ++last; { int temp = b[last]; b[last] = b[i]; b[i] = temp; } } ++cnt; } { int temp = b[left]; b[left] = b[last]; b[last] = temp; } quickSort(b, left, last-1); quickSort(b, last+1, right); } } void merge(int *b, int left, int begin, int end) { static int tmp[MN]; int i = left, j = begin, q = left; for (++j; i<=begin && j<=end; ++cnt) tmp[q++] = b[i]<b[j] ? b[i++] : b[j++]; while (i<=begin) tmp[q++] = b[i++]; while (j<=end) tmp[q++] = b[j++]; for(i=left; i<=end; i++) b[i] = tmp[i]; } void mergeSort(int *b, int left, int right) { if (left < right) { int mid = (left + right) >> 1; mergeSort(b, left, mid); mergeSort(b, mid+1, right); merge(b, left, mid, right); } else return; } void heapadjust(int *h, int ik, int nk) { int j, tempk = h[ik]; j = 2*ik + 1; while (j < nk) { if (j+1<nk && h[j]<h[j+1]) ++j; if (tempk < h[j]) { h[(j-1)/2] = h[j]; j = 2*j+1; cnt++; } else { cnt++; break; } } h[(j-1) >> 1] = tempk; } void buildHeap(int *b, int n) { int i; for(i = n/2-1; i >= 0; i--) heapadjust(b, i, n); } void heapSort(int *b, int n) { if (n < 1) return; buildHeap(b, n); int i; for(i = n-1; i > 0; i--) { { int temp = b[i]; b[i] = b[0]; b[0] = temp; } heapadjust(b, 0, i); } } void bubbleSort(int *b, int n) { if (n < 1) return; int i, j, swapped; for (i = n-1; i > 0; i--) { swapped = 0; for (j = 0; j < i; j++) { if (b[j] > b[j+1]) { { int temp = b[j]; b[j] = b[j+1]; b[j+1] = temp; swapped = 1; } } cnt++; } if (!swapped) break; } } void selectSort(int *b, int n) { if (n < 1) return; int i, j, t; for(i = 0; i < n-1; i++) { t = i; for(j = i+1; j < n; j++) { if(b[t] > b[j]) { t = j; } cnt++; } { int temp = b[i]; b[i] = b[t]; b[t] = temp; } } } int main() { int n, op; scanf("%d %d", &n, &op); int i; for (i=0; i<n; i++) scanf("%d", a+i); switch (op) { case 1:selectSort(a, n);break; case 2:bubbleSort(a, n);break; case 3:heapSort(a, n);break; case 4:mergeSort(a, 0, n-1);break; case 5:quickSort(a, 0, n-1);break; } for (i=0; i<n; i++) printf("%d ", a[i]); printf("\n%d", cnt); return 0; } <file_sep>/assignments/0318_0408/encode.c /* FileName: encode.c Author: Kevin Date: 25/03/19 13:24 Description: encode a string(implementation) */ #include <stdio.h> #include <string.h> #include <ctype.h> #define ML 6666 #define KEY_LEN 95 const char *FIN = "in.txt"; const char *FOUT = "in_crpyt.txt"; int exist[1 << 8]; char code[1 << 8]; char ins[ML], outs[ML]; char key[ML]; void get_key(void) { char *p, ch; scanf("%[^\n]", key); int top = strlen(key), i; for (p=key, i=0; i<top; ++i) { if (!exist[(int)key[i]]) { exist[(int)key[i]] = 1; *p++ = (int)key[i]; } } top = p - key; for (ch=32; ch<=126; ++ch) { if (!exist[(int)ch]) { key[top++] = ch; } } key[top] = '\0'; } struct Node { int l, r; char ch; } list[ML]; void erase(int p) { list[list[p].l].r = list[p].r; list[list[p].r].l = list[p].l; } void gener_code(void) { int i, p, times; int before = 0, after = 0; for (i=0; i<KEY_LEN; ++i) list[i].l = (KEY_LEN+i-1) % KEY_LEN, list[i].r = (i+1) % KEY_LEN, list[i].ch = key[i]; p = 0; while (list[p].l != p) { before = list[p].ch; erase(p); times = before; while (times--) p = list[p].r; after = list[p].ch; code[before] = after; } code[after] = list[0].ch; } int main() { FILE *fin = fopen(FIN, "rb"); FILE *fout = fopen(FOUT, "wb"); fseek(fin, 0, SEEK_END); int bytes = ftell(fin); fseek(fin, 0, SEEK_SET); fread(ins, bytes, 1, fin); get_key(); gener_code(); int i; for (i=0; i<bytes; ++i) { if (code[(int)ins[i]]) outs[i] = code[(int)ins[i]]; else outs[i] = ins[i]; } fwrite(outs, bytes, 1, fout); fclose(fin); fclose(fout); return 0; } <file_sep>/assignments/0401_0422/list.h #ifndef LIST_H #define LIST_H #include <stdio.h> #include <malloc.h> //int free_cnt; //int malloc_cnt; //#define free(p) free((++free_cnt, p)) //#define malloc(s) malloc((++malloc_cnt, s)) typedef enum {false, true} bool; typedef char Data; typedef int cint; // integer for counting typedef int xint; // integer for indexing const Data INVALID_DATA = (char)-125; const xint INVALID_INDEX = -1; const xint ZERO_INDEX = 0; typedef struct _Node { bool tag; Data data; struct _Node *next; struct _Node *prev; } Node; typedef struct { cint cnt; Node *head; Node *tail; } List; #define init_node(_p, _data, _prev, _next) \ _p->tag = false, \ _p->data = _data, \ _p->prev = _prev, \ _p->next = _next #define equal(n1, n2) n1 == n2 #define less(n1, n2) n1 < n2 #define swap(Type, v1, v2) \ do \ { \ Type temp = v1; \ v1 = v2; \ v2 = temp; \ } while (0) void init(List *pL); void clear(List *pL); bool empty(const List *pL); cint size(const List *pL); List *copy(List *pD, const List *pS); List *append(List *pD, const List *pS); List *merge(List *pD, const List *pS1, const List *pS2); void push_first(List *pL, Data data); bool push_front(List *pL, Data data); bool push_back(List *pL, Data data); Data pop_last(List *pL); Data pop_front(List *pL); Data pop_back(List *pL); bool insert_before(List *pL, const xint index, const Data data); bool insert_after(List *pL, const xint index, const Data data); xint find(const List *pL, const Data key); xint rfind(const List *pL, const Data key); Node *ptr_at(const List *pL, const xint index); Data at(const List *pL, const xint index); Data erase_node(List *pL, Node *p); cint erase_tagged(List *pL); xint erase_first_key(List *pL, const Data key); xint erase_last_key(List *pL, const Data key); cint erase_keys(List *pL, const Data key); Data erase_at(List *pL, const xint index); cint erase_interval(List *pL, const xint lb, const xint ub); Data add_at(List *pL, const xint index, const Data data); cint add_interval(List *pL, const xint lb, const xint ub, const Data data); Data assign_at(List *pL, const xint index, const Data data); cint assign_interval(List *pL, const xint lb, const xint ub, const Data data); cint replace(List *pL, const Data oldData, const Data newData); bool swap_at(List *pL, const xint left, const xint right); void reverse(List *pL); void unique(List *pL); void sort(List *pL); void println(const List *pL); void init(List *pL) { pL->cnt = 0; pL->head = pL->tail = NULL; } void clear(List *pL) { Node *p, *q; for (p=pL->head; p; p=q) { q = p->next; free(p); } pL->head = pL->tail = NULL; pL->cnt = 0; } bool empty(const List *pL) { return !(pL->cnt); } cint size(const List *pL) { return pL->cnt; } List *copy(List *pD, const List *pS) { clear(pD); const Node *p; for (p=pS->head; p; p=p->next) push_back(pD, p->data); return pD; } List *append(List *pD, const List *pS) { const Node *p; for (p=pS->head; p; p=p->next) push_back(pD, p->data); return pD; } List *merge(List *pD, const List *pS1, const List *pS2) { clear(pD); const Node *p = pS1->head, *q = pS2->head; while (p && q) { if (less(p->data, q->data)) push_back(pD, p->data), p = p->next; else push_back(pD, q->data), q = q->next; } while (p) push_back(pD, p->data), p = p->next; while (q) push_back(pD, q->data), q = q->next; return pD; } void push_first(List *pL, Data data) { ++pL->cnt; Node *newNode = (Node *)malloc(sizeof(Node)); init_node(newNode, data, NULL, NULL); pL->head = pL->tail = newNode; } bool push_front(List *pL, Data data) { if (pL->cnt) { ++pL->cnt; Node *newNode = (Node *)malloc(sizeof(Node)); init_node(newNode, data, NULL, pL->head); pL->head = pL->head->prev = newNode; } else push_first(pL, data); return true; } bool push_back(List *pL, Data data) { if (pL->cnt) { ++pL->cnt; Node *newNode = (Node *)malloc(sizeof(Node)); init_node(newNode, data, pL->tail, NULL); pL->tail = pL->tail->next = newNode; } else push_first(pL, data); return true; } Data pop_last(List *pL) { Data data = pL->head->data; free(pL->head), --pL->cnt; pL->head = pL->tail = NULL; return data; } Data pop_front(List *pL) { if (pL->cnt) { if (pL->cnt == 1) return pop_last(pL); // guard clause Node *oldHead = pL->head; pL->head = pL->head->next; pL->head->prev = NULL; Data data = oldHead->data; free(oldHead), --pL->cnt; return data; } return INVALID_DATA; } Data pop_back(List *pL) { if (pL->cnt) { if (pL->cnt == 1) return pop_last(pL); // guard clause Node *oldTail = pL->tail; pL->tail = pL->tail->prev; pL->tail->next = NULL; Data data = oldTail->data; free(oldTail), --pL->cnt; return data; } return INVALID_DATA; } bool insert_before(List *pL, const xint index, const Data data) { Node *p = ptr_at(pL, index); if (p) { if (p != pL->head) { ++pL->cnt; Node *newNode = (Node *)malloc(sizeof(Node)); init_node(newNode, data, p->prev, p); newNode->prev->next = newNode; newNode->next->prev = newNode; } else push_front(pL, data); return true; } return false; } bool insert_after(List *pL, const xint index, const Data data) { Node *p = ptr_at(pL, index); if (p) { if (p != pL->tail) { ++pL->cnt; Node *newNode = (Node *)malloc(sizeof(Node)); init_node(newNode, data, p, p->next); newNode->prev->next = newNode; newNode->next->prev = newNode; } else push_back(pL, data); return true; } return false; } xint find(const List *pL, const Data key) { Node *p; bool found = false; xint now = ZERO_INDEX; for (p=pL->head; p; p=p->next, ++now) { if (equal(p->data, key)) { found = true; break; } } return found ? now : INVALID_INDEX; } xint rfind(const List *pL, const Data key) { Node *p; bool found = false; xint now = pL->cnt-1 + ZERO_INDEX; for (p=pL->tail; p; p=p->prev, --now) { if (equal(p->data, key)) { found = true; break; } } return found ? now : INVALID_INDEX; } Node *ptr_at(const List *pL, const xint index) { if (index >= ZERO_INDEX && index <= pL->cnt-1 + ZERO_INDEX) { Node *p; xint now; if (index < pL->cnt / 2) { now = ZERO_INDEX; for (p=pL->head; now<index; p=p->next, ++now) { ; } } else { now = pL->cnt-1 + ZERO_INDEX; for (p=pL->tail; now>index; p=p->prev, --now) { ; } } return p; } return NULL; } Data at(const List *pL, const xint index) { Node *p = ptr_at(pL, index); if (p) { return p->data; } return INVALID_DATA; } Data erase_node(List *pL, Node *p) { if (p == pL->head) return pop_front(pL); if (p == pL->tail) return pop_back(pL); Data data = p->data; p->prev->next = p->next; p->next->prev = p->prev; free(p), --pL->cnt; return data; } cint erase_tagged(List *pL) { cint erased_cnt = 0; Node *p, *q; for (p=pL->head; p; p=q) { q = p->next; if (p->tag) { ++erased_cnt; erase_node(pL, p); } } return erased_cnt; } xint erase_first_key(List *pL, const Data key) { xint now = ZERO_INDEX; bool found = false; Node *p, *q; for (p=pL->head; p; p=q, ++now) { q = p->next; if (equal(p->data, key)) { found = true; erase_node(pL, p); break; } } return found ? now : INVALID_INDEX; } xint erase_last_key(List *pL, const Data key) { xint now = pL->cnt-1 + ZERO_INDEX; bool found = false; Node *p, *q; for (p=pL->tail; p; p=q, --now) { q = p->prev; if (equal(p->data, key)) { found = true; erase_node(pL, p); break; } } return found ? now : INVALID_INDEX; } cint erase_keys(List *pL, const Data key) { cint erased_cnt = 0; Node *p, *q; for (p=pL->head; p; p=q) { q = p->next; if (equal(p->data, key)) { ++erased_cnt; erase_node(pL, p); } } return erased_cnt; } Data erase_at(List *pL, const xint index) { Node *p = ptr_at(pL, index); if (p) { return erase_node(pL, p); } return INVALID_DATA; } cint erase_interval(List *pL, const xint lb, const xint ub) // [lb, ub) { if (lb >= ub) return 0; // guard clause cint erased_cnt = 0; Node *pLB = ptr_at(pL, lb); Node *pUB = ptr_at(pL, ub-1); if (pLB && pUB) { Node *p, *q, *end = pUB->next; // Restore the pUB->next. Because *pUB will be erased, too. for (p=pLB; p!=end; p=q) { q = p->next; ++erased_cnt; erase_node(pL, p); } } return erased_cnt; // obviously, erased_cnt == (ub-lb) } Data add_at(List *pL, const xint index, const Data data) { Node *p = ptr_at(pL, index); if (p) { Data oldData = p->data; p->data += data; return oldData; } return INVALID_DATA; } cint add_interval(List *pL, const xint lb, const xint ub, const Data data) { if (lb >= ub) return 0; // guard clause cint added_cnt = 0; Node *pLB = ptr_at(pL, lb); Node *pUB = ptr_at(pL, ub-1); if (pLB && pUB) { Node *p; for (p=pLB; p!=pUB->next; p=p->next) { ++added_cnt; p->data += data; } } return added_cnt; // obviously, added_cnt == (ub-lb) } Data assign_at(List *pL, const xint index, const Data data) { Node *p = ptr_at(pL, index); if (p) { Data oldData = p->data; p->data = data; return oldData; } return INVALID_DATA; } cint assign_interval(List *pL, const xint lb, const xint ub, const Data data) { if (lb >= ub) return 0; // guard clause cint assigned_cnt = 0; Node *pLB = ptr_at(pL, lb); Node *pUB = ptr_at(pL, ub-1); if (pLB && pUB) { Node *p; for (p=pLB; p!=pUB->next; p=p->next) { ++assigned_cnt; p->data = data; } } return assigned_cnt; // obviously, assigned_cnt == (ub-lb) } cint replace(List *pL, const Data oldData, const Data newData) { cint replaced_cnt = 0; Node *p; for (p=pL->head; p; p=p->next) { if (equal(p->data, oldData)) { ++replaced_cnt; p->data = newData; } } return replaced_cnt; } bool swap_at(List *pL, const xint left, const xint right) { if (left == right) return false; // guard clause Node *p = ptr_at(pL, left); Node *q = ptr_at(pL, right); if (p && q) { swap(Data, p->data, q->data); return true; } return false; } void reverse(List *pL) { if (pL->cnt) { Node *p, *q; for (p=pL->head; p; p=q) { q = p->next; swap(Node *, p->prev, p->next); } swap(Node *, pL->head, pL->tail); } } void unique(List *pL) { if (pL->cnt < 2) return; // guard clause Node *p, *q; for (p=pL->head->next; p; p=q) { q = p->next; if (equal(p->data, p->prev->data)) { erase_node(pL, p); } } } void sort(List *pL) { if (pL->cnt < 2) return; // guard clause Node *end, *p; for (end=pL->tail; end!=pL->head; end=end->prev) { for (p=pL->head; p!=end; p=p->next) { if (less(p->next->data, p->data)) { swap(Data, p->data, p->next->data); } } } } void println(const List *pL) { const Node *p; for (p=pL->head; p; p=p->next) printf("%c", p->data); putchar('\n'); } #endif // LIST_H <file_sep>/projects/klist/config.c #include "config.h" const Data INVALID_DATA = -51287; const xint INVALID_INDEX = -1; const xint ZERO_INDEX = 0; const cint CAPACITY = 1e9; <file_sep>/assignments/0506_0610/seat.c /* FileName: article.c Author: Kevin Date: 03/06/19 18:45 Description: brace-matching */ #include <stdio.h> #include <stdlib.h> #include <string.h> #include <ctype.h> #define ML 55 #define MN 1007 typedef struct { int id, seat; char name[ML]; } Node; int n; Node a[MN]; int cmp_multi(const void *p, const void *q) { Node *a = (Node *)p, *b = (Node *)q; return a->seat != b->seat ? a->seat - b->seat : a->id - b->id; } int cmp_id(const void *p, const void *q) { return ((Node *)p)->id - ((Node *)q)->id; } void read_file() { FILE *fin = fopen("in.txt","r"); scanf("%d", &n); int i; for (i=0; i<n; i++) fscanf(fin, "%d %s %d", &a[i].id, a[i].name, &a[i].seat); qsort(a, n, sizeof(*a), cmp_multi); fclose(fin); } void write_ans() { FILE *fout = fopen("out.txt","w"); qsort(a ,n, sizeof(*a), cmp_id); int i; for (i=0; i<n; i++) fprintf(fout,"%d %s %d\n", a[i].id, a[i].name, a[i].seat); fclose(fout); } void check_first() { if (a->seat!=1) { a[n-1].seat = 1; qsort(a, n, sizeof(*a), cmp_multi); } } void check_all() { int i; for (i=0; i<n-1; i++) { if (a[i].seat != a[i+1].seat) { if (a[i+1].seat != a[i].seat+1) { a[n-1].seat = a[i].seat+1; qsort(a, n, sizeof(*a), cmp_multi); } } } for (i=0; i<n-1; i++) { if (a[i].seat == a[i+1].seat) { a[i+1].seat = a[n-1].seat+1; qsort(a, n, sizeof(*a), cmp_multi); } } } int main() { read_file(); check_first(); check_all(); write_ans(); return 0; } <file_sep>/projects/klist/main.c #include "klist.h" int main() { List list, *l = &list; init(l); push_back(l, 1); push_back(l, 3); push_front(l, 2); println(l); insert_after(l, 1, 5); insert_before(l, 0, 3); println(l); erase_at(l, 1); push_back(l, 3); println(l); bubble_sort(l); unique(l); reverse(l); println(l); clear(l); debug_allocate(); return 0; } <file_sep>/assignments/0423_0520/find.c /* FileName: bst.c Author: Kevin Date: 13/05/19 15:53 Description: LCA */ #include <stdio.h> #include <stdlib.h> #include <string.h> #define ML 21 #define MN 8192 typedef enum {false, true} bool; typedef struct { int lc, rc; const char *name; } Node; Node t[MN]; int tot; char fa_name[MN][ML]; char son_name[MN][2][ML]; char q_name[2][ML]; int find_index(const char *name) { int idx = -1, i; for (i=0; i<=tot; ++i) { if (strcmp(t[i].name, name) == 0) { idx = i; break; } } return idx; } int get_index(const char *name) { int idx = find_index(name); if (idx == -1) { idx = ++tot; t[idx].name = name; } return idx; } int dep[MN]; int dfn[MN << 1], dnt; int first[MN]; void dfs(const int u, const int fa) { dep[u] = dep[fa] + 1; dfn[++dnt] = u, first[u] = dnt; if (t[u].lc) { dfs(t[u].lc, u); dfn[++dnt] = u; } if (t[u].rc) { dfs(t[u].rc, u); dfn[++dnt] = u; } } int rmq(int l, int r) { if (l > r) { int tp = l; l = r; r = tp; } int i, min_o = dfn[l]; for (i=l+1; i<=r; ++i) { if (min_o > dfn[i]) min_o = dfn[i]; } return min_o; } int main() { FILE *fin = fopen("in.txt", "r"); tot = 0; t[0].name = "NULL"; int i, amount; fscanf(fin, "%d", &amount); for (i=0; i<amount; ++i) { fscanf(fin, "%s %s %s", fa_name[i], son_name[i][0], son_name[i][1]); int fa = get_index(fa_name[i]), lc = get_index(son_name[i][0]), rc = get_index(son_name[i][1]); t[fa].lc = lc, t[fa].rc = rc; } fclose(fin); dfs(1, 0); scanf("%s %s", q_name[0], q_name[1]); int u = find_index(q_name[0]), v = find_index(q_name[1]); if (dep[u] > dep[v]) { int tp = u; u = v; v = tp; } int dis = dep[v] - dep[u]; if (dis) { printf("%s %s %d", t[v].name, t[u].name, dis); } else { int lca_idx = rmq(first[u], first[v]); dis = dep[u] - dep[lca_idx]; printf("%s %s %d\n", t[lca_idx].name, q_name[0], dis); printf("%s %s %d\n", t[lca_idx].name, q_name[1], dis); } #ifdef _VSC_KEVIN system("pause"); #endif return 0; } <file_sep>/assignments/0401_0422/queue.c /* FileName: queue.c Author: Kevin Date: 8/04/19 15:08 Description: implement a Server-Custom system */ #include <stdio.h> #include <stdlib.h> #include <string.h> #define MN 16384 #define MIN_Q_CNT 3 #define MAX_Q_CNT 5 typedef enum {false, true} bool; typedef struct { int arrived_time; int tot_waiting_time; } Person; Person queue[MN]; int head, tail; int available_q_cnt; #define get_avr_len() ((tail-head) / available_q_cnt) void open_windows(void); void close_windows(void); int main() { int time, T, new_man_cnt; bool enqueued, dequeued; available_q_cnt = 3; for (scanf("%d", &T), time=0; true; ++time) { if (time < T) scanf("%d", &new_man_cnt); else new_man_cnt = 0; enqueued = false; while (new_man_cnt--) { enqueued = true; queue[tail].arrived_time = time; ++tail; } if (enqueued) // may open some new window(s) open_windows(); dequeued = false; int server_cnt = available_q_cnt; while (server_cnt-- && head < tail) { dequeued = true; queue[head].tot_waiting_time = time - queue[head].arrived_time; ++head; } if (dequeued) // may close some new window(s) close_windows(); if (time > T && head == tail) break; } int i; for (i=0; i<tail; ++i) printf("%d : %d\n", i+1, queue[i].tot_waiting_time); return 0; } void open_windows(void) { int avr_len; for (avr_len=get_avr_len(); avr_len>=7 && available_q_cnt<MAX_Q_CNT; avr_len=get_avr_len()) { ++available_q_cnt; } } void close_windows(void) { int avr_len; for (avr_len=get_avr_len(); avr_len<7 && available_q_cnt>MIN_Q_CNT; avr_len=get_avr_len()) { --available_q_cnt; } } <file_sep>/assignments/0225_0318/subtract.c #include <stdio.h> #include <string.h> #define ML 99 int main() { int i, len; char input[2][ML], buf[3][ML], *p_num[2], *p_ans; scanf("%s%s", input[0], input[1]); sprintf(buf[0], "%82s", input[0]); sprintf(buf[1], "%82s", input[1]); for (i=0; buf[0][i]==' '; buf[0][i++]='0'); for (i=0; buf[1][i]==' '; buf[1][i++]='0'); for (p_num[0]=buf[0], p_num[1]=buf[1]; *p_num[0]=='0' && *p_num[1]=='0'; ++p_num[0], ++p_num[1]); if (strcmp(p_num[0], p_num[1]) == 0) return putchar('0'), 0; if (strcmp(p_num[0], p_num[1]) < 0) { char *temp = p_num[0]; p_num[0] = p_num[1]; p_num[1] = temp; putchar('-'); } len = strlen(p_num[0]); for (p_ans=buf[2], i=len-1; i>=0; --i) { p_ans[i] = p_num[0][i] - p_num[1][i]; if (p_ans[i] < 0) { p_ans[i] += 10; --p_num[0][i-1]; } } for (i=0; p_ans[i]==0; ++i); for (; i<len; ++i) printf("%d", p_ans[i]); return 0; } <file_sep>/assignments/0225_0318/expand.c #include <stdio.h> #include <string.h> #include <ctype.h> #define ML 99 int same_type(const char a, const char b) { return (islower(a) && islower(b)) || (isupper(a) && isupper(b)) || (isdigit(a) && isdigit(b)); } int main() { int i, len; char _s[ML] = "", *s = _s+1, c; scanf("%s", s); for (i=0, len=strlen(s); i<len; ++i) { if (s[i] == '-') { if (s[i-1] < s[i+1] && same_type(s[i-1], s[i+1])) { for (c=s[i-1]+1; c<s[i+1]; ++c) { putchar(c); } } else { putchar('-'); } } else { putchar(s[i]); } } return 0; } <file_sep>/assignments/0401_0422/match.c /* FileName: article.c Author: Kevin Date: 8/04/19 11:19 Description: brace-matching */ #include <stdio.h> #include <stdlib.h> #include <string.h> #include <ctype.h> #define MAX_LEN 16384 char buf[MAX_LEN]; struct StackNode { char c; int line; } output_stack[MAX_LEN], match_stack[MAX_LEN]; int out_top, match_top; void read_buf(void); void erase_useless_braces(void); int main() { read_buf(); erase_useless_braces(); int line; char *p; for (p=buf, line=1; *p; ++p) { switch (*p) { case '\n': ++line; break; case '(': output_stack[out_top].c = *p, output_stack[out_top++].line = line; match_stack[match_top].c = *p, match_stack[match_top++].line = line; break; case ')': if (!match_top || match_stack[match_top-1].c != '(') return printf("without maching \')\' at line %d", line), 0; output_stack[out_top].c = *p, output_stack[out_top++].line = line; --match_top; break; case '{': if (match_top && match_stack[match_top-1].c == '(') return printf("without maching \'(\' at line %d", match_stack[match_top-1].line), 0; output_stack[out_top].c = *p, output_stack[out_top++].line = line; match_stack[match_top].c = *p, match_stack[match_top++].line = line; break; case '}': if (!match_top || match_stack[match_top-1].c != '{') return printf("without maching \'}\' at line %d", line), 0; output_stack[out_top].c = *p, output_stack[out_top++].line = line; --match_top; break; } } if (match_top) return printf("without maching \'%c\' at line %d", match_stack[match_top-1].c, match_stack[match_top-1].line), 0; int i; for (i=0; i<out_top; ++i) putchar(output_stack[i].c); return 0; } void read_buf(void) { const char *FIN = "example.c"; FILE *fin = fopen(FIN, "rb"); fseek(fin, 0, SEEK_END); int bytes = ftell(fin); fseek(fin, 0, SEEK_SET); fread(buf, bytes, 1, fin); fclose(fin); } #define erase_until(BEGIN, FLAG_STR) \ do \ { \ for (end = strstr(BEGIN, FLAG_STR); p<end; ++p) \ { \ if (strchr("(){}<>[]", *p)) \ *p = '?'; \ } \ } while (0) void erase_useless_braces(void) { char *p, *end; for (p=buf; *p; ++p) { if (p[0] == '/' && p[1] == '/') erase_until(p+2, "\n"); else if (p[0] == '/' && p[1] == '*') erase_until(p+2, "*/"); else if (p[0] == '\'') erase_until(p+1, "\'"); else if (p[0] == '\"') erase_until(p+1, "\""); } } <file_sep>/assignments/0520_0610/mst.c /* FileName: mst.c Author: Kevin Date: 27/05/19 16:31 Description: MST */ #include <stdio.h> #include <stdlib.h> #include <string.h> typedef enum {false, true} bool; #define MV 100007 int uf[MV]; int find(const int x) { return uf[x] ? uf[x]=find(uf[x]) : x; } bool merge(int x, int y) { if ((x=find(x)) == (y=find(y))) return false; return uf[x] = y, true; } typedef int dint; typedef struct { int u, v, id; dint d; } Edge; Edge ed[MV]; int cmp_d(const void *p, const void *q) { return ((Edge*)p)->d - ((Edge*)q)->d; } int cmp_id(const void *p, const void *q) { return ((Edge*)p)->id - ((Edge*)q)->id; } Edge chosen[MV]; int tot; int kruskal(const int E) { int cost = 0; qsort(ed, E, sizeof(*ed), cmp_d); int i; for (i=0; i<E; ++i) if (merge(ed[i].u, ed[i].v)) chosen[tot++] = ed[i], cost += ed[i].d; return cost; } int main() { int V, E; scanf("%d %d", &V, &E); int i; for (i=0; i<E; ++i) scanf("%d %d %d %d", &ed[i].id, &ed[i].u, &ed[i].v, &ed[i].d); printf("%d\n", kruskal(E)); qsort(chosen, tot, sizeof(*chosen), cmp_id); for (i=0; i<tot; ++i) printf("%d ", chosen[i].id); return 0; } <file_sep>/assignments/0318_0408/line.c /* FileName: line.c Author: Kevin Date: 25/03/19 12:27 Description: find the longest connected segmemts(by using the DSU) */ #include <stdio.h> #include <stdlib.h> #include <string.h> #define MN 133 #define equal(p1, p2) (p1.x==p2.x && p1.y==p2.y) typedef struct { int x; int y; } Point; typedef struct { Point src; Point dest; } Segment; Segment segs[MN]; int uf[MN]; int find(const int x) { if (uf[x] >= 0) return uf[x] = find(uf[x]); return x; } void merge(const int x, const int y) { int r1 = find(x), r2 = find(y); if (r1 < r2) uf[r1] += uf[r2], uf[r2] = r1; else if(r2 < r1) uf[r2] += uf[r1], uf[r1] = r2; } int main(void) { int i, j, n; scanf("%d", &n); memset(uf, -1, sizeof(*uf) * (n+1)); for (i=0; i<n; ++i) { scanf("%d %d %d %d", &segs[i].src.x, &segs[i].src.y, &segs[i].dest.x, &segs[i].dest.y); for (j=0; j<i; ++j) { if (equal(segs[i].src, segs[j].dest) || equal(segs[i].dest, segs[j].src)) merge(i, j); } } int max_idx = 0; for (i=1; i<n; ++i) { if (-uf[i] > -uf[max_idx]) max_idx = i; } Point src = segs[max_idx].src; for (i=1; i<n; ++i) { if (uf[i] == max_idx && segs[i].src.x < src.x) src = segs[i].src; } printf("%d %d %d", -uf[max_idx], src.x, src.y); return 0; } <file_sep>/projects/klist/klist.c #include <stdio.h> #include <malloc.h> #include "klist.h" static int free_cnt; static int malloc_cnt; #define free(p) free((++free_cnt, p)) #define malloc(s) malloc((++malloc_cnt, s)) // basic functions void init(List *pL) { pL->cnt = 0; pL->head = pL->tail = NULL; } void clear(List *pL) { Node *p, *q; for (p=pL->head; p; p=q) { q = p->next; free(p); } pL->head = pL->tail = NULL; pL->cnt = 0; } bool empty(const List *pL) { return !(pL->cnt); } cint size(const List *pL) { return pL->cnt; } List *copy(List *pDest, const List *pSrc) { clear(pDest); const Node *p; for (p=pSrc->head; p; p=p->next) push_back(pDest, p->data); return pDest; } List *append(List *pDest, const List *pSrc) { const Node *p; for (p=pSrc->head; p; p=p->next) push_back(pDest, p->data); return pDest; } List *merge(List *pD, const List *pS1, const List *pS2) { clear(pD); const Node *p = pS1->head, *q = pS2->head; while (p && q) { if (less(p->data, q->data)) push_back(pD, p->data), p = p->next; else push_back(pD, q->data), q = q->next; } while (p) push_back(pD, p->data), p = p->next; while (q) push_back(pD, q->data), q = q->next; return pD; } // pushing and popping static void push_first(List *pL, Data data) { ++pL->cnt; Node *newNode = (Node *)malloc(sizeof(Node)); init_node(newNode, data, NULL, NULL); pL->head = pL->tail = newNode; } bool push_front(List *pL, Data data) { if (pL->cnt >= CAPACITY) return false; // guard clause if (pL->cnt) { ++pL->cnt; Node *newNode = (Node *)malloc(sizeof(Node)); init_node(newNode, data, NULL, pL->head); pL->head = pL->head->prev = newNode; } else push_first(pL, data); return true; } bool push_back(List *pL, Data data) { if (pL->cnt >= CAPACITY) return false; // guard clause if (pL->cnt) { ++pL->cnt; Node *newNode = (Node *)malloc(sizeof(Node)); init_node(newNode, data, pL->tail, NULL); pL->tail = pL->tail->next = newNode; } else push_first(pL, data); return true; } static Data pop_last(List *pL) { Data data = pL->head->data; free(pL->head), --pL->cnt; pL->head = pL->tail = NULL; return data; } Data pop_front(List *pL) { if (pL->cnt) { if (pL->cnt == 1) return pop_last(pL); // guard clause Node *oldHead = pL->head; pL->head = pL->head->next; pL->head->prev = NULL; Data data = oldHead->data; free(oldHead), --pL->cnt; return data; } return INVALID_DATA; } Data pop_back(List *pL) { if (pL->cnt) { if (pL->cnt == 1) return pop_last(pL); // guard clause Node *oldTail = pL->tail; pL->tail = pL->tail->prev; pL->tail->next = NULL; Data data = oldTail->data; free(oldTail), --pL->cnt; return data; } return INVALID_DATA; } // find xint find(const List *pL, const Data key) { Node *p; bool found = false; xint now = ZERO_INDEX; for (p=pL->head; p; p=p->next, ++now) { if (equal(p->data, key)) { found = true; break; } } return found ? now : INVALID_INDEX; } xint rfind(const List *pL, const Data key) { Node *p; bool found = false; xint now = pL->cnt-1 + ZERO_INDEX; for (p=pL->tail; p; p=p->prev, --now) { if (equal(p->data, key)) { found = true; break; } } return found ? now : INVALID_INDEX; } static Node *ptr_at(const List *pL, const xint index) { if (index >= ZERO_INDEX && index <= pL->cnt-1 + ZERO_INDEX) { Node *p; xint now; if (index < pL->cnt / 2) { now = ZERO_INDEX; for (p=pL->head; now<index; p=p->next, ++now) { ; } } else { now = pL->cnt-1 + ZERO_INDEX; for (p=pL->tail; now>index; p=p->prev, --now) { ; } } return p; } return NULL; } Data at(const List *pL, const xint index) { Node *p = ptr_at(pL, index); if (p) { return p->data; } return INVALID_DATA; } // insertion and erasion bool insert_before(List *pL, const xint index, const Data data) { if (pL->cnt >= CAPACITY) return false; // guard clause Node *p = ptr_at(pL, index); if (p) { if (p != pL->head) { ++pL->cnt; Node *newNode = (Node *)malloc(sizeof(Node)); init_node(newNode, data, p->prev, p); newNode->prev->next = newNode; newNode->next->prev = newNode; } else push_front(pL, data); return true; } return false; } bool insert_after(List *pL, const xint index, const Data data) { if (pL->cnt >= CAPACITY) return false; // guard clause Node *p = ptr_at(pL, index); if (p) { if (p != pL->tail) { ++pL->cnt; Node *newNode = (Node *)malloc(sizeof(Node)); init_node(newNode, data, p, p->next); newNode->prev->next = newNode; newNode->next->prev = newNode; } else push_back(pL, data); return true; } return false; } Data erase_node(List *pL, Node *p) { if (p == pL->head) return pop_front(pL); if (p == pL->tail) return pop_back(pL); Data data = p->data; p->prev->next = p->next; p->next->prev = p->prev; free(p), --pL->cnt; return data; } cint erase_tagged(List *pL) { cint erased_cnt = 0; Node *p, *q; for (p=pL->head; p; p=q) { q = p->next; if (p->tag) { ++erased_cnt; erase_node(pL, p); } } return erased_cnt; } xint erase_first_key(List *pL, const Data key) { xint now = ZERO_INDEX; bool found = false; Node *p, *q; for (p=pL->head; p; p=q, ++now) { q = p->next; if (equal(p->data, key)) { found = true; erase_node(pL, p); break; } } return found ? now : INVALID_INDEX; } xint erase_last_key(List *pL, const Data key) { xint now = pL->cnt-1 + ZERO_INDEX; bool found = false; Node *p, *q; for (p=pL->tail; p; p=q, --now) { q = p->prev; if (equal(p->data, key)) { found = true; erase_node(pL, p); break; } } return found ? now : INVALID_INDEX; } cint erase_keys(List *pL, const Data key) { cint erased_cnt = 0; Node *p, *q; for (p=pL->head; p; p=q) { q = p->next; if (equal(p->data, key)) { ++erased_cnt; erase_node(pL, p); } } return erased_cnt; } Data erase_at(List *pL, const xint index) { Node *p = ptr_at(pL, index); if (p) { return erase_node(pL, p); } return INVALID_DATA; } cint erase_interval(List *pL, const xint lb, const xint ub) // [lb, ub) { if (lb >= ub) return 0; // guard clause cint erased_cnt = 0; Node *pLB = ptr_at(pL, lb); Node *pUB = ptr_at(pL, ub-1); if (pLB && pUB) { Node *p, *q, *end = pUB->next; // Restore the pUB->next. Because *pUB will be erased, too. for (p=pLB; p!=end; p=q) { q = p->next; ++erased_cnt; erase_node(pL, p); } } return erased_cnt; // obviously, erased_cnt == (ub-lb) } // other operations to the data Data add_at(List *pL, const xint index, const Data data) { Node *p = ptr_at(pL, index); if (p) { Data oldData = p->data; p->data += data; return oldData; } return INVALID_DATA; } cint add_interval(List *pL, const xint lb, const xint ub, const Data data) { if (lb >= ub) return 0; // guard clause cint added_cnt = 0; Node *pLB = ptr_at(pL, lb); Node *pUB = ptr_at(pL, ub-1); if (pLB && pUB) { Node *p; for (p=pLB; p!=pUB->next; p=p->next) { ++added_cnt; p->data += data; } } return added_cnt; // obviously, added_cnt == (ub-lb) } Data assign_at(List *pL, const xint index, const Data data) { Node *p = ptr_at(pL, index); if (p) { Data oldData = p->data; p->data = data; return oldData; } return INVALID_DATA; } cint assign_interval(List *pL, const xint lb, const xint ub, const Data data) { if (lb >= ub) return 0; // guard clause cint assigned_cnt = 0; Node *pLB = ptr_at(pL, lb); Node *pUB = ptr_at(pL, ub-1); if (pLB && pUB) { Node *p; for (p=pLB; p!=pUB->next; p=p->next) { ++assigned_cnt; p->data = data; } } return assigned_cnt; // obviously, assigned_cnt == (ub-lb) } cint replace(List *pL, const Data oldData, const Data newData) { cint replaced_cnt = 0; Node *p; for (p=pL->head; p; p=p->next) { if (equal(p->data, oldData)) { ++replaced_cnt; p->data = newData; } } return replaced_cnt; } bool swap_at(List *pL, const xint left, const xint right) { if (left == right) return false; // guard clause Node *p = ptr_at(pL, left); Node *q = ptr_at(pL, right); if (p && q) { swap(Data, p->data, q->data); return true; } return false; } // other operations to the whole list void reverse(List *pL) { if (pL->cnt) { Node *p, *q; for (p=pL->head; p; p=q) { q = p->next; swap(Node *, p->prev, p->next); } swap(Node *, pL->head, pL->tail); } } void unique(List *pL) { if (pL->cnt < 2) return; // guard clause Node *p, *q; for (p=pL->head->next; p; p=q) { q = p->next; if (equal(p->data, p->prev->data)) { erase_node(pL, p); } } } void bubble_sort(List *pL) { if (pL->cnt < 2) return; // guard clause Node *end, *p; for (end=pL->tail; end!=pL->head; end=end->prev) { for (p=pL->head; p!=end; p=p->next) { if (greater(p->data, p->next->data)) { swap(Data, p->data, p->next->data); } } } } void println(const List *pL) { const Node *p; for (p=pL->head; p; p=p->next) printf("%d ", p->data); putchar('\n'); } // debug function (print the the number of times that the free and malloc functions are called) void debug_allocate() { printf("malloc : %d times.\n", malloc_cnt); printf("free : %d times.\n", free_cnt); } <file_sep>/assignments/0225_0318/notation.c #include <stdio.h> #include <string.h> #define ML 107 int main() { char integer[ML] = "", decimal[ML] = "", zeros[ML] = ""; scanf("%[^.].%[0]", integer, zeros); scanf("%s", decimal); if (integer[0] == '0') { putchar(*decimal); if (strlen(decimal) > 1) printf(".%s", decimal+1); printf("e-%d", (int)strlen(zeros) + 1); } else { printf("%c.%s%s%se%d", *integer, integer+1, zeros, decimal, (int)strlen(integer) - 1); } return 0; } <file_sep>/assignments/0520_0610/graphSearch.c /* FileName: graphSearch.c Author: Kevin Date: 27/05/19 16:23 Description: bfs & dfs */ #include <stdio.h> #include <stdlib.h> #include <string.h> #define MV 203 #define SRC 0 typedef enum {false, true} bool; int V; bool g[MV][MV]; bool vis[MV]; void dfs(const int u) { printf("%d ", u); int v; for (v=0; v<V; ++v) if (g[u][v] && !vis[v]) vis[v] = true, dfs(v); } void bfs(const int src) { static int q[MV]; int hd = 0, tl = 0; vis[q[tl++]=src] = true; while (hd != tl) { const int u = q[hd++]; printf("%d ", u); int v; for (v=0; v<V; ++v) if (g[u][v] && !vis[v]) vis[q[tl++]=v] = true; } } int main() { int E; scanf("%d %d", &V, &E); int u, v; while (E--) { scanf("%d %d", &u, &v); g[u][v] = g[v][u] = true; } memset(vis, false, sizeof(*vis) * (V)); vis[SRC] = true, dfs(SRC); puts(""); memset(vis, false, sizeof(*vis) * (V)); bfs(SRC); puts(""); scanf("%d", &u); for (v=0; v<V; ++v) g[u][v] = g[v][u] = false; memset(vis, false, sizeof(*vis) * (V)); vis[SRC] = true, dfs(SRC); puts(""); memset(vis, false, sizeof(*vis) * (V)); bfs(SRC); puts(""); return 0; } <file_sep>/assignments/0423_0520/huffman.c /* FileName: bst.c Author: Kevin Date: 13/05/19 19:13 Description: Huffman-code */ #include <stdio.h> #include <stdlib.h> #include <string.h> #define ML 65536 #define bint unsigned long #define test(x, k) !!(((x) >> k) & 1u) #define set(x, k) ((x) |= 1u << k) #define reset(x, k) ((x) &= ~(1u << k)) int buf_len; char buf[ML << 1]; typedef enum {false, true} bool; typedef struct { bint code; char code_len; bool vis; int weight; int lc, rc; } Node; Node _t[ML], *t = _t+1; int tot; typedef struct { int first; int second; } pair; void init() { t['\0'].weight = 1; t[-1].weight = 0x3f3f3f3f; tot = 128; } void count() { char *p, *q; for (p=q=buf; *p; ++p) if (*p != '\n' && *p != '\r') *q++ = *p, ++t[*p].weight; *q++ = '\0'; // valid '\0' buf_len = q - buf; int i; for (i=0; i<=tot; ++i) if (t[i].weight == 0) t[i].vis = true; } pair find_min2() { pair min2 = {.first = -1, .second = -1}; int i; for (i=0; i<=tot; ++i) { if (!t[i].vis && t[i].weight < t[min2.second].weight) { min2.second = i; if (t[min2.second].weight < t[min2.first].weight) { int tp = min2.first; min2.first = min2.second; min2.second = tp; } } } return min2; } void code() { pair min2; int u, v; while (min2=find_min2(), (u=min2.first) != -1 && (v=min2.second) != -1) { int new_node = ++tot; t[new_node].lc = u; t[new_node].rc = v; t[new_node].weight = t[u].weight + t[v].weight; t[u].vis = t[v].vis = true; } } bint cur_code; int cur_code_len; void dfs(const int u) { if (t[u].lc == 0 && t[u].rc == 0) // leaf { t[u].code = cur_code; t[u].code_len = cur_code_len; } else // must have 2 sons (cuz Huffman Tree must be a full binary tree!) { ++cur_code_len, dfs(t[u].lc), --cur_code_len; set(cur_code, cur_code_len); ++cur_code_len, dfs(t[u].rc), --cur_code_len; reset(cur_code, cur_code_len); } } bool stack[ML]; int top; void write_node(const Node node) { int k; for (k=0; k<node.code_len; ++k) stack[top++] = test(node.code, k); } void write_ans(FILE *fout) { top = 0; Node node; int i; for (i=0; i<buf_len; ++i) write_node(t[buf[i]]); int res = top % 8; if (res) top += 8-res; int k; for (i=0; i<top; i+=8) { bint bits = 0; for (k=0; k<8; ++k) if (stack[i+k]) set(bits, (8-1-k)); printf("%x", bits); fputc(bits, fout); } } int main() { const char *FIN = "input.txt", *FOUT = "output.txt"; FILE *fin = fopen(FIN, "rb"); FILE *fout = fopen(FOUT, "w"); fseek(fin, 0, SEEK_END); buf_len = ftell(fin); fseek(fin, 0, SEEK_SET); fread(buf, buf_len, 1, fin); init(); count(); code(); cur_code_len = 0, dfs(tot); write_ans(fout); fclose(fin); fclose(fout); #ifdef _VSC_KEVIN system("pause"); #endif return 0; } <file_sep>/assignments/0318_0408/multi.c /* FileName: multi.c Author: Kevin Date: 25/03/19 13:07 Description: multiply two polynomials(implementation) */ #include <stdio.h> #include <stdlib.h> #include <string.h> #include <ctype.h> #define MN 2333 #define ML 233333 #define sort(begin, end) qsort(begin, (end)-(begin), sizeof(*begin), cmp) typedef struct { int k; int exp; } Term; Term multi(const Term a, const Term b); Term *merge_poly(Term *begin, Term *end); int cmp(const void *p, const void *q); char buf[ML], *pNow = buf; int next_int(void); Term po1[MN], po2[MN], ans[MN]; int tot1, tot2, tot_ans; int main() { int i, j; fread(buf, 1, ML, stdin); do { po1[tot1].k = next_int(); po1[tot1].exp = next_int(); ++tot1; } while (*pNow != '\n'); do { po2[tot2].k = next_int(); po2[tot2].exp = next_int(); ++tot2; } while (*pNow != '\n'); for (i=0; i<tot1; ++i) { for (j=0; j<tot2; ++j) { ans[tot_ans++] = multi(po1[i], po2[j]); } } sort(ans, ans+tot_ans); tot_ans = merge_poly(ans, ans+tot_ans) - ans; for (i=0; i<tot_ans; ++i) printf("%d %d ", ans[i].k, ans[i].exp); return 0; } Term multi(const Term a, const Term b) { return (Term) { .k = a.k * b.k, .exp = a.exp + b.exp }; } Term *merge_poly(Term *begin, Term *end) { if (end-begin <= 1) return end; Term *prev, *now; for (prev=begin, now=begin+1; now!=end; ++now) { if (now->exp != prev->exp) *++prev = *now; else prev->k += now->k; } return prev + 1; } int cmp(const void *p, const void *q) { return ((Term *)q)->exp - ((Term *)p)->exp; } int next_int(void) { int ret = 0; while (!isdigit(*pNow)) ++pNow; do ret = ret * 10 + *pNow++ - '0'; while (isdigit(*pNow)); return ret; }
8a4bb18b1b557e8d26407010a420c63fa82446d3
[ "Markdown", "C" ]
37
C
keyu-tian/BUAA-DS-2019Spring
81d358a95a8293eeb5fd625b1f689f43de2a08a9
fd29ab0a2083992ffc03acdec7875f9e4b57226f
refs/heads/master
<file_sep># Envoy-XDS DEMO ## 目录说明 + java-control-plane: Envoy xDS 的 Java 版本 gRPC 实现 + ratelimit-sample: 限流服务<file_sep>package cn.zeran.ratelimitservice; import io.envoyproxy.envoy.api.v2.ratelimit.RateLimitDescriptor; import io.envoyproxy.envoy.service.ratelimit.v2.RateLimitRequest; import io.envoyproxy.envoy.service.ratelimit.v2.RateLimitResponse; import io.envoyproxy.envoy.service.ratelimit.v2.RateLimitServiceGrpc; import io.grpc.stub.StreamObserver; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @GrpcService public class MyRateLimitService extends RateLimitServiceGrpc.RateLimitServiceImplBase { private static Logger logger = LoggerFactory.getLogger(MyRateLimitService.class); private static Integer COUNT = 0; private static Integer LIMIT = 5; @Override public void shouldRateLimit(RateLimitRequest request, StreamObserver<RateLimitResponse> responseObserver) { RateLimitDescriptor descriptor = request.getDescriptors(0); String path = ""; String method = ""; for (RateLimitDescriptor.Entry entry : descriptor.getEntriesList()) { switch (entry.getKey()) { case "path": path = entry.getValue(); break; case "method": method = entry.getValue(); break; default: break; } } logger.info("Request URL: " + method + " " + path + " COUNT:" + COUNT + "/" + LIMIT); if (COUNT >= LIMIT) { // 返回限流状态码 429 responseObserver .onNext(RateLimitResponse.newBuilder().setOverallCode(RateLimitResponse.Code.OVER_LIMIT).build()); responseObserver.onCompleted(); } else { COUNT++; } } } <file_sep># RateLimitService 当前Repo主要实现了基于 Envoy 的流量控制服务( RateLimitService ),该服务为 Envoy 的 RLS 相关 API 的实现,依赖 Envoy 的 Control-Plane API([java-control-plane](https://github.com/envoyproxy/java-control-plane)) ## Submodule + java-control-plane ## Requirements 1. JDK 1.8 2. Maven 3. Gradle 5.4+ 4. Envoy v1.12.0 ## Build ### 1. 编译java-control-plane ```bash cd ../java-control-plane mvn clean package [INFO] ------------------------------------------------------------------------ [INFO] BUILD SUCCESS [INFO] ------------------------------------------------------------------------ ``` ### 2. 运行 ratelimit service ```bash gradle bootRun [INFO] grpc server is started at 8090 ``` ### 3. 启动 envoy 镜像进行测试: > 非mac系统,需要修改 src/envoy/config/envoy.yaml line:86 的 address地址,linux系统一般默认为 172.17.0.1,mac 默认为 docker.for.mac.localhost ```bash cd src/envoy # 编译镜像 docker build -t envoy-ratelimit:demo . # 启动容器 ./start.sh ``` ### 4. 测试 使用浏览器访问: http://localhost:8081 <file_sep>package cn.zeran.ratelimitservice; import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.context.annotation.Configuration; /** * GrpcConfiguration */ @Configuration @ConfigurationProperties("grpc.server") public class GrpcConfiguration { /** * grpc server listen port */ private Integer port = 9090; public Integer getPort() { return port; } public void setPort(Integer port) { this.port = port; } }<file_sep>package cn.zeran.ratelimitservice; import java.util.Map; import org.springframework.beans.BeansException; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.context.ConfigurableApplicationContext; @SpringBootApplication public class Application { public static void main(String[] args) { try { ConfigurableApplicationContext applicationContext = SpringApplication.run(Application.class, args); Map<String, Object> serviceBeanMap = applicationContext.getBeansWithAnnotation(GrpcService.class); GrpcLauncher launcher = applicationContext.getBean("grpcLauncher", GrpcLauncher.class); launcher.start(serviceBeanMap); } catch (BeansException ex) { ex.printStackTrace(); } } } <file_sep>#!/bin/bash # entrypoint in container # start nginx nginx -g "daemon on;" service nginx status # start envoy envoy -c /etc/envoy/envoy.yaml <file_sep>#!/bin/bash # stop running container docker stop ratelimit # start ratelimit container docker run -d --name=ratelimit --rm -v $(pwd)/config:/etc/envoy -p 8081:8081 -p 9901:9901 -e SERVICE_NAME:"RateLimitDemo" envoy-ratelimit:demo # show logs docker logs -f ratelimit<file_sep>grpc.server.port = 8090 server.port = 9999<file_sep>rootProject.name = 'ratelimit-service'<file_sep>buildscript { repositories { mavenCentral() } dependencies { classpath 'com.google.protobuf:protobuf-gradle-plugin:0.8.8' } } plugins { id 'org.springframework.boot' version '2.2.1.RELEASE' id 'io.spring.dependency-management' version '1.0.8.RELEASE' id 'java' } apply plugin: 'com.google.protobuf' group = 'cn.zeran' version = '0.0.1-SNAPSHOT' sourceCompatibility = '1.8' repositories { mavenCentral() } dependencies { implementation('org.springframework.boot:spring-boot-starter-web'){ exclude module: 'spring-boot-starter-tomcat' } implementation 'org.springframework.boot:spring-boot-starter-actuator' implementation 'io.grpc:grpc-netty-shaded:1.21.0' implementation 'io.grpc:grpc-protobuf:1.21.0' implementation 'io.grpc:grpc-stub:1.21.0' // implementation 'io.envoyproxy.controlplane:java-control-plane:0.1.20' implementation files('../java-control-plane/api/target/api-0.1.20-SNAPSHOT.jar') implementation files('../java-control-plane/server/target/server-0.1.20-SNAPSHOT.jar') testImplementation('org.springframework.boot:spring-boot-starter-test') { exclude group: 'org.junit.vintage', module: 'junit-vintage-engine' } } test { useJUnitPlatform() } protobuf { protoc { artifact = "com.google.protobuf:protoc:3.7.1" } plugins { grpc { artifact = 'io.grpc:protoc-gen-grpc-java:1.21.0' } } generateProtoTasks { all()*.plugins { grpc {} } } }
e62efbbca0fe6fa6336f401331b05a2f60bd5b21
[ "Markdown", "INI", "Gradle", "Java", "Shell" ]
10
Markdown
ZeRanW/envoy-xds-demo
07bd1a33eb097877c27fdf7ba391af70434185cc
029cee7c58831ddacf7bb56dd4322e7793316a90
refs/heads/master
<repo_name>ralfebert/Endpoint<file_sep>/Sources/Endpoint/Endpoint+URLSession.swift import Foundation import os #if canImport(FoundationNetworking) import FoundationNetworking #endif public extension Endpoint { @discardableResult /// Loads an endpoint by creating (and directly resuming) a data task. /// /// - Parameters: /// - endpoint: The endpoint. /// - onComplete: The completion handler. /// - Returns: The data task. func load(onComplete: @escaping (Result<A, Error>) -> Void) -> URLSessionDataTask { os_log(">>> %s", log: EndpointLogging.log, type: .debug, self.description) let task = self.urlSession.dataTask(with: self.request, completionHandler: { data, response, error in if let error = error { os_log("Error for %s: %s", log: EndpointLogging.log, type: .error, self.description, String(describing: error)) onComplete(.failure(error)) return } guard let httpResponse = response as? HTTPURLResponse else { onComplete(.failure(EndpointError(description: "Response was not a HTTPURLResponse"))) return } do { os_log("Got response: %i bytes", log: EndpointLogging.log, type: .debug, data?.count ?? 0) try self.validate(data, httpResponse) if let result = try self.parse(data, httpResponse) { onComplete(.success(result)) } else { onComplete(.failure(NoDataError())) } } catch let e { onComplete(.failure(e)) return } }) task.resume() return task } } #if canImport(Combine) import Combine @available(iOS 13, macOS 10.15, watchOS 6, tvOS 13, *) public extension Endpoint { /// Returns a publisher that wraps a URL session data task for a given Endpoint. /// /// - Parameters: /// - endpoint: The endpoint. /// - Returns: The publisher of a dataTask. func load() -> AnyPublisher<A, Error> { os_log(">>> %s", log: EndpointLogging.log, type: .debug, self.description) return self.urlSession.dataTaskPublisher(for: self.request) .mapError { (error) -> Error in os_log("Error for %s: %s", log: EndpointLogging.log, type: .error, self.description, String(describing: error)) return error } .tryMap { data, response in try self.handleResponse(data: data, response: response) } .eraseToAnyPublisher() } func handleResponse(data: Data, response: URLResponse) throws -> A { os_log("Got response: %i bytes", log: EndpointLogging.log, type: .debug, data.count) guard let httpResponse = response as? HTTPURLResponse else { throw EndpointError(description: "Response was not a HTTPURLResponse") } try self.validate(data, httpResponse) if let result = try self.parse(data, httpResponse) { return result } else { throw NoDataError() } } } #endif <file_sep>/Sources/Endpoint/Endpoint.swift import Foundation import os @_exported import SweetURLRequest public typealias ValidateFunction = (_ data: Data?, _ response: HTTPURLResponse) throws -> Void public struct EndpointExpectation { public static func validateStatusCode(_ validate: @escaping (_ statusCode: Int) -> Bool) -> ValidateFunction { return { (data: Data?, response: HTTPURLResponse) throws in let code = response.statusCode guard validate(code) else { throw WrongStatusCodeError(statusCode: code, response: response, responseBody: data) } } } public static func expectStatus(_ responseType: HTTPStatusCode.ResponseType) -> ValidateFunction { self.validateStatusCode { HTTPStatusCode.ResponseType(httpStatusCode: $0) == responseType } } public static func expectStatus(_ values: [HTTPStatusCode]) -> ValidateFunction { self.validateStatusCode { guard let statusCode = HTTPStatusCode(rawValue: $0) else { return false } return values.contains(statusCode) } } public static func expectStatus(_ value: HTTPStatusCode) -> ValidateFunction { self.validateStatusCode { HTTPStatusCode(rawValue: $0) == value } } public static let expectSuccess = expectStatus(.success) public static func emptyResponse(_ data: Data?, _: HTTPURLResponse) throws { guard let data = data else { return } guard data.count == 0 else { throw EndpointError(description: "Expected an empty response") } } public static func ignoreResponse(_: Data?, _: HTTPURLResponse) throws {} } public struct EndpointLogging { public static let log = OSLog(subsystem: "Endpoint", category: "Endpoint") } /// This describes an endpoint returning `A` values. It contains both a `URLRequest` and a way to parse the response. public struct Endpoint<A> { /// The request for this endpoint public let request: URLRequest /// The URLSession to use for this endpoint let urlSession: URLSession /// This is used to validate the response (like, check the status code). let validate: ValidateFunction /// This is used to (try to) parse a response into an `A`. public typealias ParseFunction = (_ data: Data?, _ response: HTTPURLResponse) throws -> A? let parse: ParseFunction /// Transforms the result public func map<B>(_ f: @escaping (A) -> B) -> Endpoint<B> { return Endpoint<B>(request: self.request, validate: self.validate, parse: { value, response in try self.parse(value, response).map(f) }) } /// Creates a new Endpoint from a request /// /// - Parameters: /// - request: the URL request /// - validate: this validates the response, f.e. checks the status code. /// - parse: this converts a response into an `A`. public init(request: URLRequest, urlSession: URLSession = .shared, validate: @escaping ValidateFunction = EndpointExpectation.expectSuccess, parse: @escaping ParseFunction) { self.request = request self.urlSession = urlSession self.validate = validate self.parse = parse } } extension Endpoint where A: Decodable { /// Creates a new Endpoint from a request that returns JSON /// /// - Parameters: /// - request: the URL request /// - validate: this validates the response, f.e. checks the status code. /// - parse: this converts a response into an `A`. public init(jsonRequest: URLRequest, urlSession: URLSession = .shared, validate: @escaping ValidateFunction = EndpointExpectation.expectSuccess, jsonDecoder: JSONDecoder = JSONDecoder()) { var jsonRequest = jsonRequest jsonRequest.headers.accept = .json self.init(request: jsonRequest, urlSession: urlSession, validate: validate, parse: jsonDecoder.decodeResponse) } } // Allow omitting the parse parameter for empty results of Void type extension Endpoint where A == Void { public init(request: URLRequest, urlSession: URLSession = .shared, validate: @escaping ValidateFunction = EndpointExpectation.expectSuccess) { self.init(request: request, urlSession: urlSession, validate: validate, parse: EndpointExpectation.ignoreResponse) } } // MARK: - CustomStringConvertible extension Endpoint: CustomStringConvertible { public var description: String { "\(self.request.httpMethod ?? "") \(self.request.url?.absoluteString ?? "")" } } /// Signals that a response's data was unexpectedly nil. public struct NoDataError: Error { public init() {} } /// An unknown error public struct EndpointError: Error { public var description: String public init(description: String) { self.description = description } } /// Signals that a response's status code was wrong. public struct WrongStatusCodeError: Error { public let statusCode: Int public let response: HTTPURLResponse? public let responseBody: Data? public init(statusCode: Int, response: HTTPURLResponse?, responseBody: Data?) { self.statusCode = statusCode self.response = response self.responseBody = responseBody } } extension JSONDecoder { public func decodeResponse<T: Decodable>(_ data: Data?, _: HTTPURLResponse) throws -> T { guard let data = data else { throw NoDataError() } return try self.decode(T.self, from: data) } }
452e278d0e0d223f7f470acae293771f6f0efa2f
[ "Swift" ]
2
Swift
ralfebert/Endpoint
0d0394781bd138dbc7a33ffdf58288f7e8e6b7f7
b423a02de9383fdd74bf5ef3add0fde7db714e77
refs/heads/main
<file_sep>""" All types in Python are Objects Strongly types and not coerced Object names function like pointers """ import time m = [9, 15, 24] def modify(k): k.append(39) print("k= ", k) def replace(g): g = [16, 3, 9] print("g= ", g) def banner(message, border="-"):#Default params line = border * len(message) print(line) print(message) print(line) def show_default(arg=time.ctime()): # Time is bound at runtime and will not progress when called again print(arg) def add_spam(menu=[]): # empty list is created then will reference the same object when called again menu.append("spam") return menu # Always use immutable types as default args count = 0 def show_count(): print(count) def set_count1(c): count = c # This is a locally bound variable and doesn't modify the global var def set_count(c): global count count = c # Now this will work """Built in collections """ #Tuples #Strings s = "New" + "found" + "land" s += " city"# += creates temporaries s.join('Do you know the way?') "unforgetable".partition('forget') # ('un', 'forget', 'able') Tuple # Tuble unpacking origin, _, destination = "Seattle-Boston".partition('-') # unpacked into unused variable "{0} north of {1}".format(59.6, 888) # inject variable into strings "Galactic position x={pos[0]}, y={pos[1]}, z={pos[2]}".format(pos=(65.2, 23.1, 82.2)) # F strings embed expressions with minimal syntax print(f'one plus one is {1+1}') # Lists r = [1, -4, 10, -16, 15] # Negative indices count backwards from end print(r[-1]) # gives you 15 print(r[-2]) # gives you -16 # Slice a list: s = [14, 1555, 1690, 170, 189, 199] print(s[1:3]) # [1555, 1690] print(s[1:-1]) # [1555, 1690, 170, 189] print(s[2:]) # [1690, 170, 189, 199] print(s[:2]) # [14, 1555] print(s[:])# shallow copy option # [14, 1555, 1690, 170, 189, 199] u = s.copy() # index() w = "the quick bronw fox jumps over the lazy dog".split()# default split on space a = [ [1,2], [3,4] ] b = a[:] # results in a shallow copy and object references are the same w = "the quick bronw fox jumps over the lazy dog".split() print(w) # ['the', 'quick', 'bronw', 'fox', 'jumps', 'over', 'the', 'lazy', 'dog'] i = w.index('fox') # Traceback (most recent call last): # File "<stdin>", line 1, in <module> # TypeError: 'builtin_function_or_method' object is not subscriptable i = w.index('fox') print(i) # 3 print(w[i]) # 'fox' # print(w.index('unicorn')) # Traceback (most recent call last): # File "<stdin>", line 1, in <module> # ValueError: 'unicorn' is not in list w.count('the') # result: 2 del w[3] print(w) # ['the', 'quick', 'bronw', 'jumps', 'over', 'the', 'lazy', 'dog'] w.remove('bronw') print(w) # ['the', 'quick', 'jumps', 'over', 'the', 'lazy', 'dog'] # Dictionaries # Order can't be counted on d = dict(a="ni") movies = dict(a="Wolf of Wallstreet", b="Dark Knight", c="Mr Nobody") print(movies) # {'a': 'Wolf of Wallstreet', 'b': 'Dark Knight', 'c': 'Mr Nobody'} # iterate through keys: movies.keys() # iterate through values: movies.values() for key, value in movies.items(): print(f"{key} => {value}") # a => Wolf of Wallstreet # b => Dark Knight # c => Mr Nobody # SET x = {1, 2, 3, 4} x.add(5) <file_sep>"""When ran, times the result of another function""" import sys import time from functools import wraps def time_sequence(function): @wraps(function) def timer(*args, **kwargs): start = time.time() # run function function(*args, **kwargs) end = time.time() elapsed = end - start print("%.3f" % elapsed) return timer @time_sequence def count_to(value): count = 0 while count < int(value): count += 1 print(f"Counted to {value}:{count} for you") if __name__ == '__main__': count_to(sys.argv[1]) <file_sep>"""Read and write from files with python. Methods: .read/write(file.name, mode='w|rt|b', encoding) .readline/writeline(must specify newlines) .seek(move to beginning with 0) Files are iterable objects when opened. Iterated by newlines. """ import sys f = open(sys.argv[1], mode='rt', encoding='utf-8') for line in f: # print(line) Adds extra newlines from what's in the file sys.stdout.write(line) f.close() <file_sep>-- POPULATING REDSHIFT: -- Learning the Copy Command copy dimproduct --optional column list, ordinal by default from 's3://pimpsonsbucket/dimproduct.csv' -- specify a data source iam_role 'arn:aws:iam::227967283010:role/RedshiftS3' -- authorization, several ways region 'us-west-2' -- format, and additional options ; select * from dimproduct;<file_sep>import sys def read_series(filename): try: f = open(filename, mode='rt', encoding='utf-8') # series = [] # for line in f: # a = int(line.strip()) # series.append(a) return [int(line.strip()) for line in f] finally: f.close() # return series def read_series_with(filename): """Use python built in context-manager function to handle close()""" with open(filename, mode='rt', encoding='utf-8') as f: return [int(line.strip()) for line in f] def main(filename): series = read_series(filename) print(series) # File like objects and duck typing into file like reading: def words_per_line(flo): """Counts line lengths for file like objects. usage: import corepy.file_reader with open('wasteland.txt', mode='rt', encoding='utf-8') as real_file: wpl = corepy.file_reader.words_per_line(real_file) from urllib.request import urlopen with urlopen('http://sixty-north.com/c/t.txt') as web_file: wpl = ... Args: flo: file like object Returns: """ return [len(line.split()) for line in flo.readlines()] <file_sep>import os from flask import (Flask, render_template, abort, jsonify, request, redirect, url_for) from datetime import datetime from model import db, save_db app = Flask(__name__) # Optionally configure with settings like 'static' location: # app = Flask(__name__, static_folder='/path/to/static/folder') # export FLASK_APP=flashcards.py # export FLASK_ENV=development # FROM Packt Flask Book # Setup variables to load from a file, with defaults DEBUG = True TESTING = True # From this file: # app.config.from_object(__name__) # From an object in a class: app.config.from_object('configuration.DevelopmentConfig') # From a #.cfg file: # app.config.from_pyfile('/path/to/config/file') # From Env variables: # app.config.from_envvar('PATH_TO_CONFIG_FILE') # SQLAlchemy: app.config['SQLALCHEMY_DATABASE_URI'] = os.environ('DATABASE_URI') # postgresql://yourusername:yourpassword@localhost/yournewdb. # Model data in PostgreSQL: # psycopg2 """ >>> import flashcards >>> flashcards.app.url_map Map([<Rule '/load_counter' (GET, HEAD, OPTIONS) -> load_counter>, <Rule '/date' (GET, HEAD, OPTIONS) -> date>, <Rule '/' (GET, HEAD, OPTIONS) -> welcome>, <Rule '/static/<filename>' (GET, HEAD, OPTIONS) -> static>]) """ @app.route("/") def welcome(): return render_template( "welcome.html", cards=db, ) @app.route("/date") def date(): return "This page was served at " + str(datetime.now()) # global count = 0 @app.route("/load_counter") def count_demo(): # name is irrelevant to route global count count += 1 return f"We loaded this page {count} times" @app.route("/card") def card_default(): return card_view(0) @app.route("/card/<int:index>") # parameter format def card_view(index): try: card = api_card_detail(index) return render_template("card.html", card=card, index=index, max_index=len(db) - 1 ) except IndexError: abort(404) @app.route('/add_card', methods=["GET", "POST"]) def add_card(): if request.method == "POST": # process the form data card = { "question": request.form['question'], "answer": request.form['answer'] } db.append(card) save_db() return redirect(url_for('card_view', index=len(db) - 1)) else: return render_template("add_card.html") @app.route('/remove_card/<int:index>', methods=["GET", "POST"]) def remove_card(index): try: if request.method == "POST": # db.pop(index) del db[index] save_db() return redirect(url_for('welcome')) else: card = api_card_detail(index) return render_template("remove_card.html", card=card) except IndexError: abort(404) @app.route("/api/card/") def api_card_list(): return jsonify(db) @app.route('/api/card/<int:index>') def api_card_detail(index): try: return db[index] # automatically serialized and returned as JSON except IndexError: abort(404) <file_sep>from typing import Optional def get_nested(obj, path, divider='.'): if divider in path: first_step = path.split(divider)[0] new_key = path.removeprefix(f"{first_step}{divider}") return get_nested(obj.get(first_step), new_key) else: return obj.get(path) obj = dict(outer=dict(inner='result', empty={})) print(get_nested(obj, 'outer.inner')) print(get_nested(obj, 'outer.empty')) # dotted = DotFindString(obj) <file_sep>select * from some_table; <file_sep>"""This actually won't work as written due to the synchronous nature of file manipulation""" import sys from pprint import pprint from codetiming import Timer import psycopg2.extras import asyncio async def worker(worker_name, queue, **kwargs): timer = Timer(text=f"Task {worker_name} elapsed time: {{:.3f}}") # conn = psycopg2.connect(dbname='dev', # host=f"{kwargs.get('domain')}.redshift.amazonaws.com", # port=5439, # user=kwargs.get('username'), # password=<PASSWORD>('password') # ) # with conn: # with conn.cursor() as curs: # while not queue.empty(): # query = await queue.get() # try: # timer.start() # curs.execute(query) # timer.stop() # except Exception as exception: # print(exception) # pprint(f"{worker_name} failed to run query:\n{query}") # conn.close() while not queue.empty(): delay = await queue.get() print(f"Task {worker_name} running") timer.start() await asyncio.sleep(delay) timer.stop() async def get_sql_script(file_name): async with open(file_name) as file: return await file.read() def main(file_name, schema='', **kwargs): queue = asyncio.Queue() with open(file_name) as file: sql_script = file.read() for query in sql_script.split(';'): queue.put(query.replace('<schema>', schema)) # THIS seems to limit to a single worker # conn = psycopg2.connect(dbname='dev', # host=f"{kwargs.get('domain')}.redshift.amazonaws.com", # port=5439, # user=kwargs.get('username'), # password=<PASSWORD>('password') # ) # with conn: # with conn.cursor() as curs: # with Timer(text="\nTotal elapsed time: {:.3f}"): # await asyncio.gather( # asyncio.create_task(worker('Nick', queue, cursor=curs)), # asyncio.create_task(worker('Devereux', queue, cursor=curs)), # ) # conn.close() # either way results in the same thing...? with Timer(text="\nTotal elapsed time: {:.3f}"): await asyncio.gather( asyncio.create_task(worker('Nick', queue, **kwargs)), asyncio.create_task(worker('Devereux', queue, **kwargs)), ) # Use static data as test: # Put some work in the queue # for work in [15, 10, 5, 2]: # await queue.put(work) # # # Run the tasks # with Timer(text="\nTotal elapsed time: {:.1f}"): # await asyncio.gather( # asyncio.create_task(worker("One", queue)), # asyncio.create_task(worker("Two", queue)), # ) if __name__ == '__main__': asyncio.run(main(sys.argv[1], schema=sys.argv[2], username=sys.argv[3], password=<PASSWORD>], domain=sys.argv[5])) <file_sep>""" model.py ------- Implements the model for our site by simulating a database Note: although this works as an example this won't be used in production. sqlalchemy is a way to do this for real """ import json def load_db(): with open("data/flashcards_db.json") as f: return json.load(f) def save_db(): with open("data/flashcards_db.json", "w") as f: return json.dump(db, f) db = load_db() <file_sep>"""Experimenting with Parsing and editing SQL Files""" import sys import sqlparse import sqlparse.tokens as ttypes import sqlparse.sql as sql def file_output(file_name): with open(file_name) as file: sql_text = file.read() return sql_text def parse_out_editable_pieces(file_name): sql_txt = file_output(file_name) view_create_commands = [cv for cv in sqlparse.split(sql_txt) if sqlparse.parse(cv)[0].token_first(skip_ws=True, skip_cm=True) .match(ttypes.Keyword.DDL, 'CREATE OR REPLACE')] for create in view_create_commands: parsed_create = sqlparse.parse(create)[0] create_tokens = [t for t in sql.TokenList(parsed_create.tokens) if t.ttype not in (ttypes.Whitespace, ttypes.Whitespace.Newline)] create_token_list = sql.TokenList(create_tokens) create_union_indexes = [] # TODO: Find start of Unions for index, token in enumerate(create_token_list): # Only find SELECT first then UNION ALL match_text = 'SELECT' if len(create_union_indexes) == 0 else 'UNION ALL' target_type = ttypes.Keyword.DML if len(create_union_indexes) == 0 else ttypes.Keyword if token.match(target_type, match_text): create_union_indexes.append(index) print(create_union_indexes) # TODO: group unions into statements first_union = create_union_indexes[0] union_count = len(create_union_indexes) create_union_indexes.reverse() for index, union_index in enumerate(create_union_indexes): # Find the column declarations end = len(create_token_list.tokens)-1 if index == 0 else create_union_indexes[index-1] create_token_list.group_tokens(sql.Statement, start=union_index, end=end, include_end=False) # token_list.token_next_by(idx=union_location, t=[[sql.IdentifierList]], end=select_locations[(index + 1)]) # TODO: Iterate through created union statements to find each key for tk_index in range(first_union, (first_union+union_count)-1): # TODO: grab table name for mapping to update string union = create_token_list[tk_index] found_key = False for line in union: # TODO: Identify the list of column names if isinstance(line, sql.IdentifierList): # column_list = [t for t in sql.TokenList(token) # if t.ttype not in (ttypes.Whitespace, ttypes.Whitespace.Newline)] for identifier in line: # TODO: filter down to key if hasattr(identifier, 'tokens'): # Remove comments because the lump into the end of an identifier when split _stripped_values = [t.value for t in identifier.tokens if not isinstance(t, sql.Comment)] if isinstance(identifier, sql.Identifier) and 'channelmix_key' in _stripped_values: found_key = True print(f"Union {tk_index} channelmix key in identifier: {identifier}") if not found_key: print(f'Key not found for {line}') # TODO: How do I update the whole file and rewrite it gracefully? if __name__ == '__main__': parse_out_editable_pieces(sys.argv[1]) <file_sep>import sys import requests from database.redshift_query_runner import run_individual_queries_in_separate_function GITHUB_URL = 'https://api.github.com' """ Sample Result: [ { "name": "abacosdw.sql", "path": "database/sql/abacosdw.sql", "sha": "59afe29932259ac690360afdb0eb7485cf7ada6b", "size": 8682, "url": "https://api.github.com/repos/nickmcsimpson/python_playground/contents/database/sql/abacosdw.sql?ref=main", "html_url": "https://github.com/nickmcsimpson/python_playground/blob/main/database/sql/abacosdw.sql", "git_url": "https://api.github.com/repos/nickmcsimpson/python_playground/git/blobs/59afe29932259ac690360afdb0eb7485cf7ada6b", "download_url": "https://raw.githubusercontent.com/nickmcsimpson/python_playground/main/database/sql/abacosdw.sql", "type": "file", "_links": { "self": "https://api.github.com/repos/nickmcsimpson/python_playground/contents/database/sql/abacosdw.sql?ref=main", "git": "https://api.github.com/repos/nickmcsimpson/python_playground/git/blobs/59afe29932259ac690360afdb0eb7485cf7ada6b", "html": "https://github.com/nickmcsimpson/python_playground/blob/main/database/sql/abacosdw.sql" } } ] """ def get_list_of_contents_from_github_folder(org, repo, path): r = requests.get(f"{GITHUB_URL}/repos/{org}/{repo}/contents/{path}") if r.status_code == 200: return r.json() else: print(r.status_code) # 302Found # 403ForbiddenResource # 404NotFound def main(path, **kwargs): contents = get_list_of_contents_from_github_folder('nickmcsimpson', 'python_playground', path) for item in contents: print(item.get('name')) run_individual_queries_in_separate_function(contents[0].get('download_url'), get_file_from=url_open, **kwargs) def url_open(url): response = requests.get(url) return response.text if __name__ == '__main__': main(sys.argv[1], schema=sys.argv[2], username=sys.argv[3], password=<PASSWORD>], domain=sys.argv[5]) <file_sep>import sys from functools import wraps from pprint import pprint import psycopg2.extras from database.function_timer import time_sequence def query_wrapper(function): @wraps(function) def connection(*args, **kwargs): conn = psycopg2.connect(dbname='dev', host=f"{kwargs.get('domain')}.redshift.amazonaws.com", port=5439, user=kwargs.get('username'), password=<PASSWORD>('<PASSWORD>') ) # Open a cursor to perform database operations """When a connection exits the with block, if no exception has been raised by the block, the transaction is committed. In case of exception the transaction is rolled back. When a cursor exits the with block it is closed, releasing any resource eventually associated with it. The state of the transaction is not affected. """ with conn: with conn.cursor() as curs: function(*args, **kwargs, cursor=curs) conn.close() return connection @query_wrapper @time_sequence def run_script_with_file_context_manager(file_name, cursor=None, schema='', **kwargs): """Open file as context manager and execute as script""" with open(file_name) as file: try: sql_script = file.read().replace('<schema>', schema) cursor.executemany(sql_script, {}) """This likes to fail with no information as to why. Likely because of excess whitespace. "The current implementation of executemany() is (using an extremely charitable understatement) not particularly performing. These functions can be used to speed up the repeated execution of a statement against a set of parameters. By reducing the number of server roundtrips the performance can be orders of magnitude better than using executemany()." The primary benefit seems to come for prepared statements and inserts that can be reduced. Our use case, may not benefit, and the granularity of adding to the psycopg2 'commit' batch is more beneficial than attampting to execute a full script with no insight into where something fails. execute_batch is semantically similar: "but has a different implementation: Psycopg will join the statements into fewer multi-statement commands, each one containing at most page_size statements, resulting in a reduced number of server roundtrips." """ # psycopg2.extras.execute_batch(sql_script) except Exception as e: print(e) @query_wrapper @time_sequence def run_queries_with_file_context_manager(file_name, cursor=None, **kwargs): # Per docs this is no different than execute many """Open file as context manager and execute individually""" with open(file_name) as file: sql_script = file.read() for command in sql_script.split(';'): cursor.execute(command) @query_wrapper @time_sequence def run_individual_queries_in_separate_function(file_name, cursor=None, schema='', get_file_from=None, **kwargs): """Open file as context manager and execute individually via function call""" if get_file_from is not None: sql_script = get_file_from(file_name) else: with open(file_name) as file: sql_script = file.read() for command in sql_script.split(';'): if command.isspace() or command == '': continue try: run_singular_query(command.replace('<schema>', schema), cursor) except ValueError: print(f"Can't Format query:\n{command}") @time_sequence def run_singular_query(command, cursor=None): try: cursor.execute(command) except Exception as exception: print(exception) pprint(f"{command}") @time_sequence def run_individual_queries_in_separate_transactions(file_name, schema='', **kwargs): """Open file as context manager and execute individually via function call unique transactions""" with open(file_name) as file: sql_script = file.read() for command in sql_script.split(';'): if command.isspace() or command == '': continue try: run_singular_query_as_transaction(command.replace('<schema>', schema), **kwargs) except ValueError: print(f"Can't Format query:\n{command}") print('___________________________\n\n\n') @query_wrapper @time_sequence def run_singular_query_as_transaction(command, cursor=None, **kwargs): try: cursor.execute(command) except Exception as exception: print(exception) pprint(f"{command}") if __name__ == '__main__': run_individual_queries_in_separate_transactions(sys.argv[1], schema=sys.argv[2], username=sys.argv[3], password=<PASSWORD>[4], domain=sys.argv[5]) <file_sep>from datetime import date from pprint import pprint from marshmallow import Schema, fields, EXCLUDE, pre_load class BirthdaySchema(Schema): day = fields.Str() month = fields.Str() year = fields.Str() class ArtistSchema(Schema): name = fields.Str() birthday = fields.Nested(BirthdaySchema) class AlbumSchema(Schema): title = fields.Str() release_date = fields.Date() artist = fields.Nested(ArtistSchema()) class FamousSchema(Schema): name = fields.Str() artwork = fields.Str(data_key='title') birthyear = fields.Str() @pre_load def grab_name(self, data, **kwargs): artist = data.pop('artist') year = artist.get('history').get('year') data['name'] = artist.get('name') data['birthyear'] = year return data birthday = dict(year='1996', month='4', day='18') bowie = dict(name="<NAME>", birthday=birthday) album = dict(artist=bowie, title="Hunky Dory", release_date=date(1971, 12, 17)) album_schema = AlbumSchema() result = album_schema.dump(album) pprint(result, indent=2) # { 'artist': {'name': '<NAME>'}, # 'release_date': '1971-12-17', # 'title': 'Hunky Dory'} famous_schema = FamousSchema() result = famous_schema.load(album, unknown=EXCLUDE) pprint(result, indent=2) <file_sep># python_playground Learning Python
d26f2e69b8a87085a7b163515b7596816e2055fd
[ "Markdown", "SQL", "Python" ]
15
Python
nickmcsimpson/python_playground
30507e072a0d791ba875b80bcfd367bc003c9376
e32d1ad03c9c3fcfe67f0d3c077ed3f708d17436
refs/heads/master
<repo_name>seaneshbaugh/dandgcreditservice<file_sep>/javascripts/index.js $.fn.colorFade = function(color, duration) { if (duration == null) { duration = 500; } return this.each(function() { var original; original = $(this).css("color"); $(this).mouseover(function() { return $(this).stop().animate({ color: color }, duration); }); return $(this).mouseout(function() { return $(this).stop().animate({ color: original }, duration); }); }); }; var showAlertMessage = function(alertMessageTitle, alertMessageBody, alertMessageClass) { $("#alert-message-title").empty(); $("#alert-message-title").append(alertMessageTitle); $("#alert-message-body").empty(); $("#alert-message-body").append(alertMessageBody); $("#alert-message").removeClass(); $("#alert-message").addClass(alertMessageClass); $("#alert-message").css({ top: (Math.round($(window).height() / 2) - Math.round($("#alert-message").height() / 2)) + "px" }); $("#alert-message").css({ left: (Math.round($(window).width() / 2) - Math.round($("#alert-message").width() / 2)) + "px" }); $("#alert-message").stop().show().css({ opacity: 1.0 }).fadeTo(8000, 0.0); } $(function() { $("#main-nav ul li a").each(function() { return $(this).colorFade("#787878", 1200); }); var currentDate = new Date(); $("#copyright-year").text(currentDate.getFullYear()); $("#alert-message").on("mouseover", function(event) { if ($(this).css("opacity") !== "0") { $(this).stop().css({ opacity: 1.0 }); } }); $("#alert-message").on("mouseout", function(event) { $(this).fadeTo(2000, 0.0, function() { $("#alert-message-title").empty(); $("#alert-message-body").empty(); }); }); var validator = new FormValidator("contact-form", [{ name: "name", display: "Name", rules: "required|max_length[255]" }, { name: "email_address", display: "Email Address", rules: "required|valid_email|max_length[255]" }, { name: "phone_number", display: "Phone Number", rules: "required|callback_valid_phone" }, { name: "message", display: "Message", rules: "required|max_length[65535]" }], function(errors, event) { if (event && event.preventDefault) { event.preventDefault(); } else if (event) { event.returnValue = false; } if (errors.length > 0) { showAlertMessage("Error!", errors.join("<br />"), "error"); } else { $("#contact-form-submit").attr("disabled", true); $("#contact-form-ajax").val("1"); $.post($("#contact-form").attr("action"), $("#contact-form").serialize(), function(data, textStatus, jqXHR) { console.log(data); console.log(textStatus); console.log(jqXHR); var alertMessageClass; if (jqXHR.status === 200) { alertMessageClass = "success"; $("#contact-form-name").val(""); $("#contact-form-email-address").val(""); $("#contact-form-phone-number").val(""); $("#contact-form-message").val(""); } else { alertMessageClass = "error"; } showAlertMessage(data.response_title, data.response_message, alertMessageClass); $("#contact-form-submit").attr("disabled", false); }); } } ); validator.registerCallback("valid_phone", function(value) { return /^((([0-9]{1})*[- .(]*([0-9]{3})[- .)]*[0-9]{3}[- .]*[0-9]{4})+)*$/.test(value); }).setMessage("valid_phone", "The Phone Number field must be a valid phone number."); }); <file_sep>/README.md # dandgcreditservice.com Just a simple website with a contact form. <file_sep>/cgi-bin/contact.rb #!/usr/bin/ruby # contact.rb require "rubygems" require "logger" require "cgi" require "erb" require "active_support/inflector" require "yaml" require "net/smtp" require "json" require "RFC2822" log = Logger.new("../../contact.log") log.level = Logger::DEBUG begin cgi = CGI.new ajax = cgi.params["ajax"].to_s == "1" test = cgi.params["test"].to_s == "1" log.info "New contact form request from #{cgi.remote_addr}.\nAjax: #{ajax}\nTest: #{test}" if cgi.request_method != "POST" error_type = "Error 405 - Method Not Allowed" error_message = "Hey! You're not supposed to be here! You've most likely reached this page because you tried to use a GET instead of a POST. If you have no idea what that means don't worry. Just go <a href=\"/\">home</a> and everything will be alright." if ajax puts cgi.header("status" => "METHOD_NOT_ALLOWED", "type" => "application/json") puts({ :response_title => error_type, :response_message => error_message }.to_json) else puts cgi.header("status" => "METHOD_NOT_ALLOWED", "type" => "text/html") puts File.open("error.html.erb") { |file| ERB.new(file.read) }.result(binding) end log.error "Error 405 - Method Not Allowed\n#{cgi.remote_addr} attempted to send a #{cgi.request_method} request when a POST was expected." exit end params = { "name" => "", "email_address" => "", "phone_number" => "", "message" => "" } params.each do |key, value| params[key] = cgi.params[key].to_s log.debug "#{ActiveSupport::Inflector.titleize(key)}: #{params[key]}" if params[key] == "" error_type = "Error 400 - Bad Request" error_message = "Looks like you've forgotten to include your #{ActiveSupport::Inflector.titleize(key).downcase}! Your best bet is to go <a href=\"/\">back</a> and try again." if ajax puts cgi.header("status" => "BAD_REQUEST", "type" => "application/json") puts({ :response_title => error_type, :response_message => error_message }.to_json) else puts cgi.header("status" => "BAD_REQUEST", "type" => "text/html") puts File.open("error.html.erb") { |file| ERB.new(file.read) }.result(binding) end log.error "Error 400 - Bad Request\n#{cgi.remote_addr} attempted to send a request without their #{ActiveSupport::Inflector.titleize(key).downcase}.\n#{cgi.params.inspect}" exit end end if params["name"].length > 255 params["name"] = params["name"][0, 255] log.warn "#{cgi.remote_addr} attempted to send a request with a name that was too long and was truncated." end if params["email_address"].match(RFC2822::EmailAddress).nil? error_type = "Error 400 - Bad Request" error_message = "That's clearly not an email address. If you want our advice, <a href=\"/\">turn around</a> and try again with something that isn't gibberish." if ajax puts cgi.header("status" => "BAD_REQUEST", "type" => "application/json") puts({ :response_title => error_type, :response_message => error_message }.to_json) else puts cgi.header("status" => "BAD_REQUEST", "type" => "text/html") puts File.open("error.html.erb") { |file| ERB.new(file.read) }.result(binding) end log.error "Error 400 - Bad Request\n#{cgi.remote_addr} attempted to send a request with an invalid email address.\n#{cgi.params.inspect}" exit end if params["phone_number"].match(/^((([0-9]{1})*[- .(]*([0-9]{3})[- .)]*[0-9]{3}[- .]*[0-9]{4})+)*$/).nil? error_type = "Error 400 - Bad Request" error_message = "Hello? Oops, wrong number! Actually, that's not a phone number at all! Let's try <a href=\"/\">redialing</a> with a real U.S. phone number this time." if ajax puts cgi.header("status" => "BAD_REQUEST", "type" => "application/json") puts({ :response_title => error_type, :response_message => error_message }.to_json) else puts cgi.header("status" => "BAD_REQUEST", "type" => "text/html") puts File.open("error.html.erb") { |file| ERB.new(file.read) }.result(binding) end log.error "Error 400 - Bad Request\n#{cgi.remote_addr} attempted to send a request with an invalid phone number.\n#{cgi.params.inspect}" exit end if params["message"].length > 65535 params["message"] = params[:message][0, 65535] params["message"] << "\n\nThis message was too long to display in its entirety and has been truncated." log.warn "#{cgi.remote_addr} attempted to send a request with a message that was too long and was truncated." end smtp_settings = YAML.load_file("../../smtp_settings.yml") log.debug smtp_settings.inspect #validate settings here, maybe unless test Net::SMTP.start(smtp_settings["smtp"]["address"], smtp_settings["smtp"]["port"], smtp_settings["smtp"]["helo"], smtp_settings["smtp"]["user"], smtp_settings["smtp"]["secret"], :plain) do |smtp| smtp.send_message "From: D & G Credit Service Contact Form <<EMAIL>>\nTo: <<EMAIL>>\nSubject: New Message from D & G Credit Service Contact Form\n\nName: #{params["name"]}\nEmail: #{params["email_address"]}\nPhone: #{params["phone_number"]}\nMessage:\n#{params["message"]}", "<EMAIL>", "<EMAIL>" log.info "Email sent to admin with request information.\n#{cgi.params.to_s}" smtp.send_message "From: D & G Credit Service <<EMAIL>>\nTo: <#{params["email_address"]}>\nSubject: D & G Credit Service Contact Form Confirmation\n\nThis message has been sent to confirm your contact request with D & G Credit Service (http://dandgcreditservice.com/). We appreciate your interest and will respond as soon as possible (usually within a business day).\n\nIf you received this message in error please don't hesitate to send a reply to <EMAIL> and let us know someone goofed.", "<EMAIL>", params["email_address"] log.info "Email sent to sender with request confirmation.\n#{cgi.params.to_s}" end else log.debug "Contact form test." end if ajax puts cgi.header("status" => "OK", "type" => "application/json") puts({ :response_title => "Message Sent!", :response_message => "Thanks! Your message has been sent. You should receive a response shortly." }.to_json) else puts cgi.header("status" => "REDIRECT", "location" => "/thanks.html") end rescue Exception => error log.error "#{error.message}\nBacktrace:\n#{error.backtrace.join("\n")}" end
a1be4a6695ca00024bd45092eef3d6ed9ca04790
[ "JavaScript", "Ruby", "Markdown" ]
3
JavaScript
seaneshbaugh/dandgcreditservice
331f2695af259fe7675649bd52f1eebd0222b6ab
e66c3fb188dd5dd50eb32d8f80be7b4015b836c0
refs/heads/master
<file_sep>$(document).ready(function () { $(".btn").click(function () { const var1 = $('#var1').val(), var2 = $('#var2').val(), var3 = $('#var3').val(), var4 = $('#var4').val(), var5 = $('#var5').val(), var6 = $('#var6').val(), speach = $('#speach').val(); const result = $(".modal-body"); result.html(`Жили-были ${var1} да ${var2},Была у них ${var3},Снесла ${var3} ${var4}, не простое - золотое, ${var1} бил, бил - не разбил, ${var2} била, била - не разбила", "${var5} бежала, ${var6} задела, ${var4} упало и разбилось.,${var1} плачет, ${var2} плачет, а ${var3} кудахчет:"${speach}"`) }); });
4df205c47039890ed52d3c6e2938a21acc28a9fc
[ "JavaScript" ]
1
JavaScript
YaroslavDrozdovskiy/homework-modA5-yar
1d02c8825937ebf247cc0db5651b7f1a98f163e2
d5e154b3bd66f8763d25bebdf6026428485b838c
refs/heads/master
<repo_name>zamoraricardo15/Recursion<file_sep>/Reverse.py def reverse( text ): if len( text ) == 1: return text else: return text[ len(text) - 1] + reverse(text[0:len(text) - 1]) def reverse2( text, index ): if index == 0: return text[index] else: return text[index] + reverse2( text, index - 1 ) text = "hello world" result = reverse2( text, len( text ) - 1) print( result ) # Finding if a string is palindrome # radar => True # hello => False # Fibonacci sequence # 1 1 2 3 5 8 13 21 34 55 ... # Solve it with an iterative solution # Then solve it with a recursive solution # 5 => 1 1 2 3 5 # 7 => 1 1 2 3 5 8 13 # 5 => 5 # 7 => 13 <file_sep>/Sum.py # 5 = 5 + 4 + 3 + 2 + 1 = 15 def sum( num ): if num == 1: return 1 else: return num + sum( num - 1) result = sum( 10 ) print( result ) <file_sep>/HelloWorld.py def printHello( num ): if num == 0: return 0 else: print( "Hello world!" ) return printHello( num - 1 ) printHello( 5 )<file_sep>/Factorial.py def factorial( num ): if num == 1: return num else: print( num, "*" ) return num * factorial( num - 1 ) result = factorial( 5 ) print( result )
325779769fd73722f359e9660bba54e11bc720ab
[ "Python" ]
4
Python
zamoraricardo15/Recursion
96f9d7d1b9e5db5aa6dc1d03ffc07a4284b47146
11e1db96930cd60e256c179d9b1cf5f7f4c6863c
refs/heads/master
<file_sep>import { Component } from 'react' import { ReactComponentLinkedModelInterface } from '../model/react.component.linked.model' /** * @class ModelAwareComponent * * This class facilitates the registration / unregistration of a React component with the model which, when updated * will be able to refresh the component. */ export class ModelAwareComponent<P = any, S = any> extends Component<P, S> { protected _model: ReactComponentLinkedModelInterface protected registerModel (model: ReactComponentLinkedModelInterface) { this._model = model this._model.registerReactComponent(this) } protected unregisterModel () { this._model.unregisterReactComponent(this) } componentWillUnmount () { this.unregisterModel() } } <file_sep>import { log } from '../../lib/util/unified.logger' import { Configuration, ConfigurationData } from './configuration' import { browser } from 'webextension-polyfill-ts' import _ from 'lodash' import { ConfigurationProviderInterface } from '../../lib/interface/service.interface' export class ConfigurationProvider implements ConfigurationProviderInterface { private _config: Configuration private _currentProfileName: string private _currentEncryptionScheme: string private _currentEncryptionKey: string // eslint-disable-next-line no-useless-constructor constructor () { // this._config = new Configuration() } public initialize () { return new Promise<void>((resolve, reject) => { this.ensureAtLeastOneProfile().then(() => { log('ConfigurationProvider initialized.') resolve() }) }) } public async loadProfile (profileName: string, encryptionKey: string, encryptionSchemeName: string): Promise<void> { const profiles = await this.getAvailableProfileNames() if (!_.includes(profiles, profileName)) { throw new Error('Requested profile(' + profileName + ') is not available. Profiles: ' + JSON.stringify(profiles)) } const storageData = await this.readFromStorage(profileName) const encryptedProfileData = _.get(storageData, profileName, '') // decrypt here... const decryptedProfileData = encryptedProfileData this._config = new Configuration(decryptedProfileData) this._currentProfileName = profileName this._currentEncryptionScheme = encryptionSchemeName this._currentEncryptionKey = encryptionKey window.dispatchEvent(new CustomEvent('PPM', { detail: { type: 'config.state', value: 'loaded' }, bubbles: true, cancelable: true } )) } public async resetConfiguration (configData: ConfigurationData) { // encrypt here... const encryptedProfileData = configData await this.writeToStorage(this._currentProfileName, encryptedProfileData) window.dispatchEvent(new CustomEvent('PPM', { detail: { type: 'config.state', value: 'saved' }, bubbles: true, cancelable: true } )) await this.loadProfile(this._currentProfileName, this._currentEncryptionScheme, this._currentEncryptionKey) } protected async ensureAtLeastOneProfile () { return new Promise<void>((resolve, reject) => { this.getAvailableProfileNames().then(profiles => { if (!_.isEmpty(profiles)) { return resolve() } this._currentProfileName = 'DEFAULT2' this._currentEncryptionScheme = 'AesMd5' this._currentEncryptionKey = 'Paranoia' this._config = new Configuration() this.writeToStorage(this._currentProfileName, this._config.getAll()).then(() => { resolve() }) }) }) } protected async getAvailableProfileNames () { return new Promise<string[]>((resolve, reject) => { this.readFromStorage(null).then(data => { resolve(_.keys(data)) }) }) } protected async writeToStorage (profile: string, value: any) { return new Promise<void>((resolve, reject) => { const storage = this.getStorage() const data = _.set({}, profile, value) // log('Writing data: ' + JSON.stringify(data)) storage.set(data).then(() => { if (browser.runtime.lastError) { return reject(browser.runtime.lastError) } resolve() }) }) } protected async readFromStorage (profile: null | string) { return new Promise<any>((resolve, reject) => { const storage = this.getStorage() storage.get(profile).then(data => { // log('Read data from storage(profile=' + profile + '): ' + JSON.stringify(data)) if (browser.runtime.lastError) { return reject(browser.runtime.lastError) } resolve(data) }) }) } protected getStorage () { return browser.storage.sync } public isAvailable () { return !_.isUndefined(this._config) } public async getConfiguration () { return new Promise<Configuration>((resolve, reject) => { if (!this.isAvailable()) { return reject(new Error('Configuration is not available')) } return resolve(this._config) }) } } <file_sep>import * as _ from 'lodash' /** * Solves the location to tell the correct application ??? */ function locationHasHash (location: Location, hashCollection: string | string[]) { const locationHash = location.hash || '#' if (!_.isArray(hashCollection)) { hashCollection = [hashCollection] } // console.log('Comparing location hash: ', locationHash + ' to: ' + JSON.stringify(hashCollection)) return _.includes(hashCollection, locationHash) } /* Root path - Initial page */ export function mainMenu (location: Location) { return locationHasHash(location, '#') } export function login (location: Location) { return locationHasHash(location, '#login') } export function logout (location: Location) { return locationHasHash(location, '#logout') } export function header (location: Location) { return locationHasHash(location, ['#', '#login', '#logout']) } <file_sep>import * as _ from 'lodash' import { Component } from 'react' export interface ReactComponentLinkedModelInterface { registerReactComponent (component: Component): void unregisterReactComponent (component: Component): void refreshLinkedComponents (): void } /** * @class ReactComponentLinkedModel * * Helper class to facilitate the refresh of a related React component when the model changes */ export class ReactComponentLinkedModel implements ReactComponentLinkedModelInterface { private readonly boundComponents: Component[] constructor () { this.boundComponents = [] } /** * Call this method to trigger the refresh of all bound react components. */ public refreshLinkedComponents () { this.sanitizeBoundComponents() _.each(this.boundComponents, (cmp: Component) => { cmp.setState({}) }) } public registerReactComponent (component: Component) { let found = false _.each(this.boundComponents, cmp => { if (cmp === component) { found = true return false } return true }) if (!found) { this.boundComponents.push(component) } } public unregisterReactComponent (component: Component) { _.remove(this.boundComponents, cmp => { return cmp === component }) } /** * If the browser window is closed or reloaded, the DOM elements will be dead so must be removed from the list * Ref.: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Errors/Dead_object * @todo: if (Components.utils.isDeadWrapper(obj)) {} - would be cleaner * * @private */ private sanitizeBoundComponents () { if (!_.isEmpty(this.boundComponents)) { _.remove(this.boundComponents, DOMElement => { let isDead = false try { String(DOMElement) } catch (e) { isDead = true } return isDead }) } } } <file_sep>import { browser } from 'webextension-polyfill-ts' import * as _ from 'lodash' // const lang = 'en' /** * Translate a string from locales * @param key * @param substitutions */ export const getTranslatedMessage = (key: string, substitutions?: any): string => { let translated = browser.i18n.getMessage(key, substitutions) if (_.isEmpty(translated)) { console.log('I18n - Missing translation required: ' + key) translated = key } return translated } <file_sep>import { ReactComponentLinkedModel } from './react.component.linked.model' import { PasswordCard } from './password.card' import * as _ from 'lodash' export class PasswordList extends ReactComponentLinkedModel { private _items: PasswordCard[] constructor () { super() this._items = [] } get items (): PasswordCard[] { return this._items } public getLength (): number { return this._items.length } /** * Inserts an item into the todolist */ public addItem (item: PasswordCard): void { let found = false _.each(this.items, i => { if (i === item) { found = true return false } return true }) // Add an item if (found) { throw new Error('This item has already been registered!') } this._items.push(item) this.refreshLinkedComponents() } /** * Remove an item at a specific position */ public removeItemAt (index: number): void { index = index >= 0 ? index : 0 index = index < this.getLength() ? index : this.getLength() - 1 this._items.splice(index, 1) this.refreshLinkedComponents() } /** * @todo: implement me! */ public removeItem (item: PasswordCard): boolean { return false } public resetWithPasswordList (lst: PasswordList) { this._items = [] _.each(lst.items, pwd => { this._items.push(pwd) }) } } <file_sep>import * as _ from 'lodash' /** * Solves the location to tell the correct application ??? */ function locationHasHash (location: Location, hash: string) { // console.log('Comparing location hash: ', location.hash + ' to: ' + hash) let hashCollection = ['#' + hash, '#/' + hash] if (_.isEmpty(hash)) { hashCollection = [hash] } return _.includes(hashCollection, location.hash) } /* Root path - Initial page */ export function passwords (location: Location) { return locationHasHash(location, '') } export function info (location: Location) { return locationHasHash(location, 'info') } export function settings (location: Location) { return locationHasHash(location, 'settings') } // Always present apps export const navbar = (location: Location) => true export const footer = (location: Location) => true <file_sep>/** * BACKGROUND ENTRY POINT */ // Imports import * as _ from 'lodash' import { PPMApp } from './app/PPMApp' if (!_.has(window, 'PPMApp')) { // Create an instance of the PPMApp const app = new PPMApp() // Expose the PPMApp instance to the world _.set(window, 'PPMApp', app) // Run it app.run() } <file_sep>import { log } from '../../lib/util/unified.logger' import { PasswordList } from '../../lib/model/password.list' import { PasswordCard } from '../../lib/model/password.card' import { getRandomString } from '../../lib/util/utils' import * as _ from 'lodash' import { DataProviderInterface } from '../../lib/interface/service.interface' export class DataProvider implements DataProviderInterface { private readonly _pwdList: PasswordList constructor () { this._pwdList = new PasswordList() } public initialize () { return new Promise<void>((resolve, reject) => { setInterval(() => { this.modifyPasswordList() }, 5000) log('DataProvider initialized.') resolve() }) } public async getPasswordList () { return new Promise<PasswordList>((resolve, reject) => { setTimeout(() => { resolve(this._pwdList) }, 50) }) } protected modifyPasswordList () { const pwListLen = this._pwdList.getLength() const operations = ['add', 'remove'] let operation = _.nth(operations, Math.floor(Math.random() * operations.length)) if (pwListLen < 3) { operation = 'add' } if (pwListLen === 10) { operation = 'remove' } if (operation === 'add') { this._pwdList.addItem(this.getRandomPasscard()) } if (operation === 'remove') { this._pwdList.removeItemAt(0) } } protected getRandomPasscard = () => { return new PasswordCard({ id: getRandomString(12), name: getRandomString(32), text: '', dateCreated: new Date(), dateUpdated: new Date(), identifier: getRandomString(8) }) } } <file_sep>PPM Firefox Client ================== Install -------- There are some outdated packages such as `grunt-webpack` which require webpack v4. However this project needs webpack v5 so npm needs come convincing that this peer dependency mismatch is ok: > npm install npm install --legacy-peer-deps Bumping up the version ---------------------- Use `grunt version:project:patch` for bumping up patch versions. Use `grunt version:project:minor` for bumping up minor versions. Solving the 'eval' CSP issue: ----------------------------- https://github.com/webpack/webpack/issues/6461 https://github.com/webpack/webpack/issues/5627 Extension packaging ------------------- Lint the Production build by: > web-ext lint Sign the extension and build the signed xpi > web-ext sign api-key "user:xxx" api-secret "123" https://extensionworkshop.com/documentation/develop/web-ext-command-reference/#web-ext-sign https://github.com/mozilla/web-ext#using-web-ext-in-nodejs-code<file_sep>import React from 'react' import ReactDOM from 'react-dom' import singleSpaReact from 'single-spa-react' import Navbar from './app' function domElementGetter () { return document.getElementById('navigation') as any } const reactLifecycles = singleSpaReact({ React, ReactDOM, rootComponent: (Navbar as any), domElementGetter }) export const bootstrap = [ reactLifecycles.bootstrap ] export const mount = [ reactLifecycles.mount ] export const unmount = [ reactLifecycles.unmount ] <file_sep>/* eslint-disable camelcase */ import _ from 'lodash' export interface ConfigurationData { logger: { do_console_logging: boolean, test_element: string } } const defaultConfiguration: ConfigurationData = { logger: { do_console_logging: true, test_element: '' } } export class Configuration { private readonly _configuration: ConfigurationData constructor (data?: ConfigurationData) { this._configuration = defaultConfiguration if (!_.isUndefined(data) && _.isObject(data)) { _.extend(this._configuration, data) } } public get (path: string) { if (!_.has(this._configuration, path)) { throw new Error('Unknown get path(' + path + ') in configuration: ' + JSON.stringify(this._configuration)) } return _.get(this._configuration, path) } public set (path: string, value: any) { if (!_.has(this._configuration, path)) { throw new Error('Unknown set path(' + path + ') in configuration: ' + JSON.stringify(this._configuration)) } return _.set(this._configuration, path, value) } public getAll () { return this._configuration } } <file_sep>/** * SETTINGS ENTRY POINT */ import { registerApplication, start } from 'single-spa' import * as Activity from './activity.functions' const registerApps = () => { registerApplication({ name: 'navbar', app: () => import('./app/navbar/lifecycle'), activeWhen: Activity.navbar }) registerApplication({ name: 'footer', app: () => import('./app/footer/lifecycle'), activeWhen: Activity.footer }) registerApplication({ name: 'passwords', app: () => import('./app/passwords/lifecycle'), activeWhen: Activity.passwords }) registerApplication({ name: 'settings', app: () => import('./app/settings/lifecycle'), activeWhen: Activity.settings }) registerApplication({ name: 'info', app: () => import('./app/info/lifecycle'), activeWhen: Activity.info }) } registerApps() document.addEventListener('DOMContentLoaded', function () { start() }) <file_sep>import { LoggerService } from '../logger/logger.service' import { ConfigurationProvider } from '../configuration/configuration.provider' import { DataProvider } from '../data/data.provider' import { Cryptor } from '../cryptor/cryptor' import { log } from '../../lib/util/unified.logger' export class PPMApp { private readonly ___DO_AUTOLOGIN___ = true private readonly _loggerService: LoggerService private readonly _cryptor: Cryptor private readonly _configurationProvider: ConfigurationProvider private readonly _dataProvider: DataProvider constructor () { this._loggerService = new LoggerService() this._cryptor = new Cryptor() this._configurationProvider = new ConfigurationProvider() this._dataProvider = new DataProvider() } public run () { log('PPMApp initializing...') window.addEventListener('PPM', this.PPMCustomEventListener.bind(this) as EventListener, false) this._cryptor.initialize().then(() => { return this._configurationProvider.initialize() }).then(() => { return this._dataProvider.initialize() }).then(() => { log('PPMApp initialized.') window.dispatchEvent(new CustomEvent('PPM', { detail: { type: 'app.state', value: 'initialized' }, bubbles: true, cancelable: true } )) if (this.___DO_AUTOLOGIN___) { log('Executing Autologin...') this._configurationProvider.loadProfile( 'DEFAULT', 'Paranoia', 'AesMd5').then(() => { log('Autologin done.') }) } }) } protected PPMCustomEventListener (e: CustomEvent<{ type: string, value: string }>) { if (e && e.type === 'PPM') { switch (e.detail.type) { case 'app.state': log('New App state: ' + e.detail.value) break case 'config.state': log('New Config state: ' + e.detail.value) break default: if (e.detail.type !== 'log.message') { log('Unhandled PPM CustomEvent: ', e) } } } } get cryptor (): Cryptor { return this._cryptor } get configurationProvider (): ConfigurationProvider { return this._configurationProvider } get dataProvider (): DataProvider { return this._dataProvider } get loggerService (): LoggerService { return this._loggerService } } <file_sep>import * as _ from 'lodash' import { PPMApp } from '../../background/app/PPMApp' import { browser } from 'webextension-polyfill-ts' export const getProjectConfigValue = (path: string, defaultValue: any = undefined) => { // eslint-disable-next-line @typescript-eslint/no-var-requires const pkgDef = require('../../../package.json') return _.get(pkgDef, path, defaultValue) } /** * Returns global variable */ export function getWindow (): any { return window } /** * Returns the background application instance */ export async function getBackgroundPage (): Promise<Window> { return await browser.runtime.getBackgroundPage() } /** * Returns the background application instance */ export async function getPPMApp (): Promise<PPMApp> { const bgp = await getBackgroundPage() return _.get(bgp, 'PPMApp') } // @todo: a better one? export function getRandomString (length = 8) { let s = '' const randomChar = () => { const n = Math.floor(Math.random() * 62) if (n < 10) { return n } // 1-10 if (n < 36) { return String.fromCharCode(n + 55) } // A-Z return String.fromCharCode(n + 61) // a-z } while (length--) { s += randomChar() } return s } <file_sep>import { log } from '../../lib/util/unified.logger' import { CryptorInterface } from '../../lib/interface/service.interface' export class Cryptor implements CryptorInterface { _schemes: any[] constructor () { this._schemes = [] } public initialize () { return new Promise<void>((resolve, reject) => { log('Initialized.') resolve() }) } } <file_sep>import { ReactComponentLinkedModel } from './react.component.linked.model' export class Card extends ReactComponentLinkedModel { private _id: string private _dateCreated: Date private _dateModified: Date private _identifier: string private _name: string protected _type: string constructor (data?: any) { super() if (data) { this._id = data.id this._dateCreated = data.dateCreated this._dateModified = data.dateModified this._identifier = data.identifier this._name = data.name } } get id (): string { return this._id } get dateCreated (): Date { return this._dateCreated } get dateModified (): Date { return this._dateModified } get identifier (): string { return this._identifier } get name (): string { return this._name } get type (): string { return this._type } } <file_sep>import { getPPMApp } from '../../lib/util/utils' import { LoggerServiceInterface } from '../../lib/interface/service.interface' export class LoggerService implements LoggerServiceInterface { /** @todo: Set me(_doConsoleLogging) to false! */ private _doConsoleLogging = true constructor () { window.addEventListener('PPM', this.PPMCustomEventListener.bind(this) as EventListener, false) } /** * Get the relative configuration as soon it is available by the configurationProvider */ protected PPMCustomEventListener (e: CustomEvent<{ type: string, value: string, zone: string, message: string, optionalParams: [] }>) { if (e && e.type === 'PPM') { if (e.detail.type === 'config.state' && e.detail.value === 'loaded') { getPPMApp().then((PPMApp) => { return PPMApp.configurationProvider.getConfiguration() }).then(configuration => { this._doConsoleLogging = configuration.get('logger.do_console_logging') this.log('LoggerService', 'Console Logging Config changed to: ' + this._doConsoleLogging) }) } } } public log (message: any, ...optionalParams: any[]) { if (this._doConsoleLogging) { const prefix = '' console.log(prefix + message, ...optionalParams) } } } <file_sep>import { PasswordList } from '../model/password.list' /** * Collection of service interfaces */ export interface LoggerServiceInterface { log (zone?: string, message?: any, ...optionalParams: any[]): void } export interface DataProviderInterface { initialize (): Promise<void> getPasswordList (): Promise<PasswordList> } export interface ConfigurationProviderInterface { initialize (): Promise<void> loadProfile (profileName: string, encryptionKey: string, encryptionSchemeName: string): Promise<void> } export interface CryptorInterface { initialize (): Promise<void> } <file_sep>/** * POPUP ENTRY POINT */ import { registerApplication, start } from 'single-spa' import * as Activity from './activity.functions' const registerApps = () => { registerApplication({ name: 'main_menu', app: () => import('./app/main_menu/lifecycle'), activeWhen: Activity.mainMenu }) registerApplication({ name: 'header', app: () => import('./app/header/lifecycle'), activeWhen: Activity.header }) } registerApps() document.addEventListener('DOMContentLoaded', function () { start() }) <file_sep>import { Card } from './card' export class PasswordCard extends Card { private _text: string private _counter = 0 constructor (data?: any) { super(data) this._type = 'password' if (data) { this._text = data.text } setInterval(this.increaseCounter, 1000) } protected increaseCounter = () => { this._counter++ this.text = 'CNT___' + this._counter } get text (): string { return this._text } set text (value: string) { this._text = value this.refreshLinkedComponents() } } <file_sep>/* eslint-disable @typescript-eslint/no-var-requires */ const path = require('path') const _ = require('lodash') const commonConfig = require('./webpack/webpack.config.common') module.exports = _.extend(_.cloneDeep(commonConfig), { output: { path: path.resolve(__dirname, 'build-dev'), filename: 'js/[name].js' }, devtool: 'cheap-module-source-map', cache: { type: 'filesystem', cacheDirectory: path.resolve(__dirname, '.cache') } }) <file_sep>import { getPPMApp } from './utils' import _ from 'lodash' import { LoggerServiceInterface } from '../interface/service.interface' /** * A handy log() function that will pass log messages to the LoggerService running in the background */ let loggerService: LoggerServiceInterface export const log = (message?: any, ...optionalParams: any[]) => { getLoggerService().then(() => { loggerService.log(message, ...optionalParams) }) } const getLoggerService = async () => { return new Promise<void>((resolve, reject) => { if (!_.isUndefined(loggerService)) { return resolve() } getPPMApp().then((PPMApp) => { loggerService = PPMApp.loggerService resolve() }) }) } <file_sep>/* eslint-disable @typescript-eslint/no-var-requires */ const path = require('path') const TerserPlugin = require('terser-webpack-plugin') const _ = require('lodash') const commonConfig = require('./webpack/webpack.config.common') module.exports = _.extend(_.cloneDeep(commonConfig), { mode: 'production', output: { path: path.resolve(__dirname, 'build-prod'), filename: 'js/[name].js' }, optimization: { minimize: true, minimizer: [ new TerserPlugin({ parallel: true, extractComments: true }) ] } }) <file_sep>/** * Primarily used by webpack to avoid using eval * @type {Window} */ export default window
848e156efa7fb4e3413aa320dbfed362ba61ab67
[ "Markdown", "TypeScript", "JavaScript" ]
25
TypeScript
adamjakab/PPM_Firefox_Client
eec426ffd4d773041af6c9fd498231e42ef54cc7
9a648bed74360cc53bf14081e61685be09a1cd46
refs/heads/master
<file_sep>#https://code.google.com/codejam/contest/32016/dashboard#s=p0 import sys, datetime def solve(vectorSize, vectorOne, vectorTwo): minimumScalarProduct = 0 for i in range(vectorSize): minimumScalarProduct += vectorOne[i] * vectorTwo[i] return minimumScalarProduct def main(): startTime = datetime.datetime.now() inputFile = open(sys.argv[1], 'r') outputFile = open(sys.argv[2], 'w') cases = int(inputFile.readline()) for case in range(1, cases + 1): vectorSize = int(inputFile.readline()) vectorOne = sorted( tuple(map(int, inputFile.readline().split())), reverse = True) vectorTwo = sorted(tuple(map(int, inputFile.readline().split()))) outputData = 'Case #{0}: {1}'.format( case, solve(vectorSize, vectorOne, vectorTwo)) print(outputData) outputFile.write(outputData + '\n') inputFile.close() outputFile.close() print('Elapsed time: {0}'.format(datetime.datetime.now() - startTime)) if __name__ == '__main__': main() <file_sep># GCJ Google Code Jam practices. <file_sep>#https://code.google.com/codejam/contest/351101/dashboard#s=p1 import sys, datetime def solve(words): reversedWords = '' for word in words: reversedWords = word + ' ' + reversedWords return reversedWords[:-1] def main(): startTime = datetime.datetime.now() inputFile = open(sys.argv[1], 'r') outputFile = open(sys.argv[2], 'w') cases = int(inputFile.readline()) for case in range(1, cases + 1): words = tuple(inputFile.readline().split()) outputData = 'Case #{0}: {1}'.format(case, solve(words)) print(outputData) outputFile.write(outputData + '\n') inputFile.close() outputFile.close() print('Elapsed time: {0}'.format(datetime.datetime.now() - startTime)) if __name__ == '__main__': main() <file_sep>#https://code.google.com/codejam/contest/351101/dashboard#s=p0 import sys, datetime def solve(inputCredit, inputItems, inputPrices): for x, priceX in enumerate(inputPrices): for y, priceY in enumerate(inputPrices): if x != y and priceX + priceY == inputCredit: return '{0} {1}'.format(x + 1, y + 1) def main(): startTime = datetime.datetime.now() inputFile = open(sys.argv[1], 'r') outputFile = open(sys.argv[2], 'w') cases = int(inputFile.readline()) for case in range(1, cases + 1): inputCredit = int(inputFile.readline()) inputItems = int(inputFile.readline()) inputPrices = tuple(map(int, inputFile.readline().split())) outputData = 'Case #{0}: {1}'.format( case, solve(inputCredit, inputItems, inputPrices)) print(outputData) outputFile.write(outputData + '\n') inputFile.close() outputFile.close() print('Elapsed time: {0}'.format(datetime.datetime.now() - startTime)) if __name__ == '__main__': main()
882bec888f0491873e93a36bfd83ea8794742220
[ "Markdown", "Python" ]
4
Python
alexvilanovab/GCJ
f13e68cda959de4106ee79f954786054b40994a3
34f2f6abf22ff7b51d0046f69ce6134825fb0022
refs/heads/master
<file_sep>var contentContainer = document.getElementById("content-container"); //--------------------------------------------------- // TAB PAGE SETUP FUNCTIONS //--------------------------------------------------- function createProjectEntry(projectKey, project) { // Create project div var projectDiv = document.createElement("div"); projectDiv.id = projectKey + "-div"; // Add carousel var landscape = true; if(project.hasOwnProperty("orientation") && project.orientation === "portrait") landscape = false; projectDiv.appendChild(createCarousel(project.images, projectKey, landscape)); // Create project details div var projectDetails = document.createElement("div"); projectDetails.className = "project-container"; // Add project title var projectTitle = document.createElement("div"); projectTitle.className = "project-title"; projectTitle.innerHTML = project.title; projectDetails.appendChild(projectTitle); // Add project description var projectDescription = document.createElement("div"); projectDescription.className = "project-description"; projectDescription.innerHTML = project.description; projectDetails.appendChild(projectDescription); // Add project button if url provided if(project.url !== "") { var viewProjectButton = document.createElement("a"); viewProjectButton.className = "view-project-btn btn btn-default"; viewProjectButton.href = project.url; viewProjectButton.innerHTML = "View Project Page" projectDetails.appendChild(viewProjectButton); } projectDiv.appendChild(projectDetails); return projectDiv; } function createTabPageButton(tabName, tabPageDiv) { // Add tab button var tabButton = document.createElement("button"); tabButton.onclick = changeActiveTab; tabButton.innerHTML = tabName; tabButton.pageDiv = tabPageDiv; tabs[tabName] = tabButton; return tabButton; } function createProjectPageTab(tabName, projects) { var projectPageDiv = document.createElement("div"); projectPageDiv.className = "tab-page"; var i = 0; for(var projectKey in projects) { projectPageDiv.appendChild(createProjectEntry(projectKey, projects[projectKey])); if(i++ < Object.keys(projects).length - 1) { projectPageDiv.appendChild(document.createElement("hr")); } } tabMenu.appendChild(createTabPageButton(tabName, projectPageDiv)); return projectPageDiv; } function createImageOrVideoElement(imageOrVideo, landscape) { var element; if (imageOrVideo.endsWith(".mp4")) { element = document.createElement("video"); element.loop = "true"; element.muted = "true"; element.autoplay = "autoplay"; element.className = landscape ? "image-landscape" : "image-portrait"; source = document.createElement("source"); source.src = imageOrVideo; source.type = "video/mp4"; element.appendChild(source); } else { element = document.createElement("img"); element.className = landscape ? "image-landscape" : "image-portrait"; element.src = imageOrVideo; } return element; } function createCarousel(images, key, landscape) { if(images.length === 1) { return createImageOrVideoElement(images[0], landscape); } var carousel = document.createElement("div"); carousel.className = "carousel slide " + (landscape ? "image-landscape" : "image-portrait"); carousel.style.display = "flex"; carousel.id = key + "-carousel" carousel.setAttribute("data-interval", "false"); var indicators = document.createElement("ol"); indicators.className = "carousel-indicators"; carousel.appendChild(indicators); var inner = document.createElement("div"); inner.className = "carousel-inner"; carousel.appendChild(inner); for(var i = 0; i < images.length; i++) { var indicator = document.createElement("li"); indicator.setAttribute("data-target", "#" + carousel.id); indicators.appendChild(indicator); var imageDiv = document.createElement("div"); imageDiv.className = "item"; if(i == 0) { indicator.className = "active"; imageDiv.className += " active"; } imageDiv.appendChild(createImageOrVideoElement(images[i], landscape)); inner.appendChild(imageDiv); } var leftControl = document.createElement("a"); leftControl.href = "#" + carousel.id; leftControl.className = "left carousel-control"; leftControl.setAttribute("data-slide", "prev"); carousel.appendChild(leftControl); var leftIcon = document.createElement("span"); leftIcon.className = "glyphicon glyphicon-chevron-left"; leftControl.appendChild(leftIcon); var rightControl = document.createElement("a"); rightControl.href = "#" + carousel.id; rightControl.className = "right carousel-control"; rightControl.setAttribute("data-slide", "next"); carousel.appendChild(rightControl); var rightIcon = document.createElement("span"); rightIcon.className = "glyphicon glyphicon-chevron-right"; rightControl.appendChild(rightIcon); return carousel; } //--------------------------------------------------- // TAB MENU //--------------------------------------------------- var activeTab = null; function changeActiveTab() { if(activeTab) { activeTab.id = ""; activeTab.pageDiv.classList.remove("fade-in"); } this.id = "tab-active"; this.pageDiv.classList.add("fade-in"); activeTab = this; } // Show project directly if key provided // E.g. https://bitsauce.github.io/#overworld var initialProjectKey = location.hash.substr(1); if(initialProjectKey === "") { var url = new URL(window.location.href); initialProjectKey = url.searchParams.get("project"); } // Setup tab menu var tabMenu = document.getElementById("tab-menu"); var tabs = {}; var initalTabName = undefined; for(var tabName in projectTabs) { contentContainer.appendChild(createProjectPageTab(tabName, projectTabs[tabName])); // Check if any of the projects on this page should be displayed after load for(var projectKey in projectTabs[tabName]) { if(projectKey === initialProjectKey) { initalTabName = tabName; } } } $(document).ready(function() { if(initalTabName) { changeActiveTab.call(tabs[initalTabName]); // Schedule smooth scroll function smoothScroll() { $("#" + initialProjectKey + "-div")[0].scrollIntoView({ behavior: "smooth", block: "center" }); activeTab.pageDiv.removeEventListener("transitionend", smoothScroll); } activeTab.pageDiv.addEventListener("transitionend", smoothScroll); } else { changeActiveTab.call(tabs[Object.keys(tabs)[0]]); } }); function nextCarouselImage() { $(".carousel", activeTab.pageDiv).each(function() { $(this).carousel("next"); }); } /*var carouselFrequency = 10000; var carouselInterval = setInterval(nextCarouselImage, carouselFrequency); $('.carousel').on('slide.bs.carousel', function () { clearInterval(carouselInterval); carouselInterval = setInterval(nextCarouselImage, carouselFrequency); });*/ <file_sep>var projectTabs = { "Graphics & Games": { "overworld": { title: "Overworld", description: ` <p> Overworld is a 2D tile-based sandbox game written in C++ that I have been in developing in my spare time since late 2011. The project started of as a testbed for testing the features of my game engine, <a href=\"https://github.com/bitsauce/Sauce3D\">Sauce3D</a>, which was being developed alongside the game. Eventually, I started to dedicate more time to this project, as I had many ideas of how this game would differentiate itself from similar games such as <i>Minecraft</i> and <i>Terraria</i>. </p> <p> The game has the following features in its current state: </p> <ul> <li>Infinite world generation on the GPU</li> <li>Destructible terrain with seamless tiles</li> <li>Dynamic and static 2D lighting</li> <li>Socket based (UDP) client-server multiplayer through <a href=\"http://www.jenkinssoftware.com/\">RakNet</a></li> <li>2D skeletal animation system</li> <li>Items and inventory system</li> <li>Menus and in-game chat</li> </ul> <p> The project taught me to: </p> <ul> <li>Utilize the GPU for general purpose computations, reducing the workload of the CPU significantly.</li> <li>Design structures and utilize design patterns that allow for easy access of resources for the programmer.</li> <li>Manage Visual Studio projects with complicated dependencies, with a focus on ensuring out-of-the-box compilation.</li> <li>Avoid overscoping to reach a minimum viable product quicker.</li> </ul> `, url: "https://bitsauce.github.io/OverworldGame", images: [ "images/overworld/overworld_1.png", "images/overworld/overworld_2.png", "images/overworld/overworld_3.gif", "images/overworld/overworld_4.png", "images/overworld/networking_showcase.mp4" ] }, "engine": { title: "Sauce3D (Game Engine)", description: ` <p> Sauce3D is the name of the custom game engine that I have been working on since mid-2010. The project has primarily served as a means for me to develop a better understanding of how a modern game engine be constructed, where many of the design decisions of Sauce3D are inspired by popular game engines and libraries such as <a href=\"https://www.unrealengine.com/\">Unreal Engine</a>, <a href=\"https://libgdx.badlogicgames.com/\">libGDX</a>, and <a href=\"https://www.garagegames.com/\">Torque3D</a>. I have put great emphasis on ease-of-use for the programmer while also striving for excellent run-time performance. Sauce3D is written in C++, and it uses OpenGL 3.2 for rendering, and <a href=\"https://www.libsdl.org/\">Simple DirectMedia Library</a> for window management and communication with the OS. </p> <p> Here is a summary of the main features of Sauce3D: </p> <ul> <li>Engine:</li> <ul> <li>Automatic asset management, ensuring:</li> <ul> <li>Assets are easily available to all game classes.</li> <li>Assets are only loaded once they are needed.</li> </ul> <li>Logging macros that can dumps detailed information.</li> <li>Scene management that will automatically propagate events through a scene hierarchy.</li> <li>Input handling supporting keyboard, mouse, and gamepads.</li> </ul> <li>Graphics:</li> <ul> <li>Easy-to-use primitive rendering (indexed and non-indexed).</li> <li>.obj mesh loading</li> <li>Sprite batching to reduce draw calls.</li> <li>Bitmap font rendering through <a href=\"http://www.angelcode.com/products/bmfont/\">BMFont</a>.</li> <li>Textures and render targets supporting a variety of formats, including integer and double precision.</li> <li>Multiple renter targets.</li> <li>Automatic texture atlas generation.</li> <li>Vertex, fragment, and geometry shaders.</li> </ul> <li>Physics:</li> <ul> <li> Includes a custom implementation of a rigid body dynamics system that supports up to 1.5K bodies while maintaining 30 FPS on a 2.8GHz Intel i7. </li> </ul> <li>UI:</li> <ul> <li> Includes an implementation of a UI system that contains: <ul> <li>Resizable bitmap buttons.</li> <li>Modal dialog boxes.</li> <li>Single-line input fields.</li> <li>Cross-fade transitions.</li> </ul> </li> </ul> <li>Networking:</li> <ul> <li> Networking is not part of the engine. However, it could be easily implemented by, for example, adding <a href=\"http://www.jenkinssoftware.com/\">RakNet</a> to the project as I did in Overworld. </li> </ul> `, url: "https://github.com/bitsauce/Sauce3D", images: [ "images/sauce3d/physics_showcase.mp4", "images/sauce3d/shadow_casting_2d.mp4", "images/sauce3d/mandelbrot_zoom.mp4", "images/sauce3d/gui_showcase.mp4", "images/sauce3d/simple_3d.mp4" ] }, "soft-shadows": { title: "Screen-Space Soft Shadows", description: ` <p> As part of the course <i>CSE 274 – Selected Topics in Graphics</i> at UCSD winter 2018 – a course about sampling and reconstruction of visual appearance – we were interested in exploring the reconstruction of interactive soft shadows. We implemented the paper <a href=\"http://graphics.berkeley.edu/papers/UdayMehta-AAF-2012-12/\">Axis-Aligned Filtering for Interactive Sampled Soft Shadows</a> for the practical part of the course. </p> <p>In summary:</p> <ul> <li> We implemented <a href=\"http://graphics.berkeley.edu/papers/UdayMehta-AAF-2012-12/\">Axis-Aligned Filtering for Interactive Sampled Soft Shadows</a>; a method in which: </li> <ul> <li> Soft shadows are rendered by applying a spatially-varying screen-space gaussian blur, where the amount of blurring is determined through an analysis of the frequencies of the occlusion spectrum. </li> <li> The soft shadow rendering equation is solved by Monte Carlo sampling, sampling points on a <i>planar</i> light source. </li> <li> The method also uses <i>adaptive sampling</i> to ensure that regions with high uncertainty are sampled more to reduce the overall noise. </li> </ul> <li> Our results: </li> <ul> <li> We achieved interactive framerates of about 5-30 FPS on a Nvidia GTX 970. </li> <li> No temporal noise, however, there is some visible "smudging" in the penumbras of the shadows caused by complex geometry (e.g. the flower). </li> </ul> <li>Our implementation uses Nvidia's <a href=\"https://developer.nvidia.com/optix\">OptiX</a> ray-tracing framework for real-time ray-tracing.</li> <li>A more detailed write-up of our implementation can be found on <a href=\"https://bitsauce.github.io/Axis-Aligned-Filtering-Soft-Shadows/\">this page</a>.</li> </ul> `, url: "https://bitsauce.github.io/Axis-Aligned-Filtering-Soft-Shadows/", images: [ "images/soft-shadows/image_0.png", "images/soft-shadows/image_1.png", "images/soft-shadows/image_2.png", "images/soft-shadows/image_3.png", "images/soft-shadows/image_4.png", "images/soft-shadows/image_5.png", "images/soft-shadows/image_6.png" ] }, "optical-flow-smoke": { title: "Optical Flow-Based Smoke", description: ` <p> As part of the course <i>CSE 163 – Advanced Computer Graphics</i> at UCSD spring 2018, we were interested in replicating a smooth smoke/explosion effect that can be found in many modern games, e.g. Star Citizen. This implementation uses optical flow maps to blend between the pre-rendered frames to simulate smooth motion. </p> <p>Note that the floating crosses are watermarks generated by the free version of FumeFX.</p> <p>In summary:</p> <ul> <li> Alpha, diffuse, normal and optical flow maps are pre-computed by simulating the effect in 3ds Max with the FumeFX plugin. </li> <li> The optical flow maps are then used in a shader to smoothly blend the transitions between the pre-computed frames. </li> <li> A simple Phong shader is finally applied to the 2D billboard to give the effect some "depth." </li> <li> Runs at real-time speeds at nearly no performance cost. </li> </ul> `, url: "https://github.com/Mytino/Realtime_VFX", images: [ "images/optical-flow-smoke/image_0.gif" ] }, "image-and-mesh": { title: "Image and Mesh Processing", description: ` <p> As part of taking the course <a href=\"http://cseweb.ucsd.edu/~viscomp/classes/cse163/sp18/163.html\">CSE 163 – Advanced Computer Graphics</a> at UCSD spring 2018, we implemented various techniques in image and mesh processing: </p> <ul> <li>Image processing:</li> <ul> <li>Brightness, contrast, saturation, gamma adjustment.</li> <li>Quantization and dithering.</li> <li>Integer convolution for blurring, sharpening, and edge detection.</li> <li>Anti-aliased scale and shifting.</li> <li>Non-linear fish-eye filter.</li> </ul> <li>Mesh processing:</li> <ul> <li>Phong-based OpenGL rendering.</li> <li>Edge collapsing.</li> <li> Fast LOD generation by selecting edges that minimized quadric error, as described in Garland's <i><a href=\"https://mgarland.org/files/papers/quadrics.pdf\">Surface Simplification Using Quadric Error Metrics</a></i> paper. </li> </ul> </ul> `, url: "https://bitsauce.github.io/Geometric-Modeling-and-Mesh-Simplification/", images: [ "images/image-and-mesh/bunnies.jpg", "images/image-and-mesh/flower_quantize_plot.png", "images/image-and-mesh/flower_FloydSteinbergDither_plot.png", "images/image-and-mesh/mandrill_blur_plot.png", "images/image-and-mesh/flower_edgeDetect_plot.png", "images/image-and-mesh/wave_size_200_339_scale_plot.png", "images/image-and-mesh/shift_animation.gif", "images/image-and-mesh/shed.bmp", "images/image-and-mesh/mandrill.bmp" ] }, "holmgang": { title: "Holmgang (Global Game Jam 16)", description: ` <i>Settle your disputes through Holmgang. But pay heed to the rituals, or die like a Niding!</i> Holmgang is a local 1v1 2D fighting game. In the game, rituals act as gameplay modifiers throughout the fight. I was primary gameplay programmer in the 4-man team. Made with <a href=\"https://www.yoyogames.com/\">Game Maker Studio</a> `, url: "https://github.com/bitsauce/GGJ16", images: [ "images/holmgang/holmgang_1.png", "images/holmgang/holmgang_2.png", "images/holmgang/holmgang_3.png" ] }, "tankai": { title: "Tank AI", description: ` Framework for an AI competition for the game development student organization <a href=\"https://www.facebook.com/groups/1498086230434830/\">Abakus GameDev</a>. The game features top-down tank control gameplay, and the goal of the game is to be the last man standing. Commands are issued using a socket-based solution, and the game is written in Java with <a href=\"https://libgdx.badlogicgames.com/\">libGDX</a>. `, url: "https://github.com/bitsauce/AICompo", images: ["images/tankai_1.gif"] } }, "Machine Learning": { "reinforcement-learning": { title: "Deep Reinforcement Learning for Autonomous Vehicles – <p/> Master's Thesis", description: ` <p> Eager to explore a frontier of technology that promise to change society as we know it, I joined the autonomous vehicle lab for my final two semesters at NTNU. Per my supervisor, we decided to explore reinforcement learning for autonomous vehicles (AV). The prospect of creating artificial intelligence that can learn to drive and play games by trial-and-error was fascinating to me, and is also a project that allows me to combine my experience with game engines and computer vision since reinforcement learning for autonomous vehicles typically use driving simulators such as <a href=\"https://carla.readthedocs.io/\">CARLA</a> or <a href=\"https://github.com/Microsoft/AirSim\">AirSim</a>, both of which are simulators that run on <a href=\"https://www.unrealengine.com/">Unreal Engine 4</a>. </p> <p> For the practical part of this project, I implemented the <a href=\"https://blog.openai.com/openai-baselines-ppo/">Proximal Policy Optimization paper</a> by OpenAI – a method that, as of 2018, stands as the baseline in general-purpose reinforcement learning. The main finding in my <a href=\"https://github.com/bitsauce/CarRacing-v0-ppo/blob/master/Project_Report.pdf\">precursory study</a> (also see <a href=\"https://youtu.be/8X_LSy4TF84\">this video</a>) was that it was hugely impactful to scale the means of the gaussian distributions to the range of valid actions; for example, if action 0 represents the turning of the car, then its valid range of values might be [-45, 45] degrees. If the network is outputting an unbounded range of values (effectively [-∞, ∞]) for the action mean, it will take much longer for the network to converge. Scaling the means to the appropriate range for every action turned out to substantially increase training speed, and this operation is, to the best of my knowledge, not used by the authors of PPO nor is it present in the <a href=\"https://github.com/openai/baselines/\">official PPO source code</a> from OpenAI. </p> <p> The initial test environment was, admittedly, a bit simplistic. Therefore, I set out to create a custom RL environment in CARLA which is now made public under <a href=\"https://github.com/bitsauce/Carla-ppo\">this repository</a>. One challenge that arose when using a more complex environment is that the training time went up drastically. I spent the last half of my master's trying to find a good model that learned to drive reliably and within a a day. <a href=\"https://www.youtube.com/watch?feature=player_embedded&v=iF502iJKTIY\">This video</a> shows the results from those experiments, and this is the accompanying <a href=\"https://github.com/bitsauce/Carla-ppo/blob/master/doc/Accelerating_Training_of_DeepRL_Based_AV_Agents_Through_Env_Designs.pdf\">final report</a>. </p> `, url: "https://github.com/bitsauce/Carla-ppo", images: [ "images/reinforcement-learning/carla_trailing_cam.png", "images/reinforcement-learning/carla_state_space.png", "images/reinforcement-learning/ppo_training_pipeline.png", "images/reinforcement-learning/training_time.png" ] }, "keypoint-rcnn": { title: "Keypoint R-CNN", description: ` Extended the Mask R-CNN model to detect human keypoint in images of people as part of the course <a href=\"https://cseweb.ucsd.edu/classes/sp18/cse252C-a/\">CSE 252C - Selected Topics in Vision and Learning</a> at UCSD. By adding a keypoint prediction head parallel to the bounding box, classification and mask heads, this model predicts 17 different probability masks (one for each joint of the person) denoting the probability of finding a given joint type on a particular location in the image. The model was trained on the <a href=\"http://cocodataset.org/#keypoints-2018\">MS COCO human keypoint</a> dataset. `, url: "https://github.com/bitsauce/Keypoint_RCNN", images: [ "images/keypoint-rcnn/model_overview.png", "images/keypoint-rcnn/gt_example.png", "images/keypoint-rcnn/predicted_kps_examples.png", "images/keypoint-rcnn/prediction_kp_heatmap_2063.png", "images/keypoint-rcnn/model_heads.png" ] }, "deep-learning-3d": { title: "Deep Learning on 3D Data", description: ` Evaluated <a href=\"https://arxiv.org/abs/1612.00593\">PointNet</a>, <a href=\"https://arxiv.org/abs/1706.02413\">PointNet++</a> and <a href=\"https://arxiv.org/abs/1612.02808\">ShapePFCN</a> for segmenting point cloud data of plants in various stages of growth and various environments. Point clouds from the laser scanner would often exceed 1 million points, so we experimented with various data preprocessing and architectural changes to support these point clouds. `, url: "", images: [ "images/deep-learning-3d/plant_example.png", "images/deep-learning-3d/plant_segmentations.png", "images/deep-learning-3d/plant_shapepfcn.png" ] }, "image-captioning": { title: "Image Captioning", description: ` As part of the course <a href=\"https://cse.ucsd.edu/undergraduate/cse-190-topics-computer-science-and-engineering\">CSE 190 - Neural Networks</a>, we created a deep neural network for automatic captioning of images. The network uses a convolutional neural network (CNN) to generate features, passing the features through a recurrent nerual network (RNN) to generate captions for the images. The network was trained on the <a href=\"http://nlp.cs.illinois.edu/HockenmaierGroup/Framing_Image_Description/KCCA.html\">flickr8k dataset</a>. `, url: "", images: [ "images/image-captioning/sample_0.png", "images/image-captioning/sample_1.png", "images/image-captioning/sample_2.png", "images/image-captioning/model.png" ] }, "computer-vision": { title: "Autonomous Vehicle Perception", description: ` Trained Faster R-CNN on the <a href=\"https://github.com/udacity/self-driving-car\">udacity dataset</a> for my project in <a href=\"https://cse.ucsd.edu/undergraduate/cse-190-topics-computer-science-and-engineering\">TDT4265 - Computer Visison</a>. Detects all of these 5 classes: cars, trucks, pedestrians, traffic lights and bikers with moderate accuracy. Written in Python 3 using <a href=\"http://caffe.berkeleyvision.org/tutorial/interfaces.html\">pycaffe</a> -- a port of the deep learning framework, <a href=\"http://caffe.berkeleyvision.org/\">Caffe</a>. `, url: "https://github.com/bitsauce/Computer_Vision_Project", images: [ "images/computer-vision/image_0.png", "images/computer-vision/image_1.png", "images/computer-vision/image_2.png" ] } }, "Miscellaneous": { "grabster": { title: "Grabster", description: ` Summer job working as a backend and navigation programmer for Grabster's Android app (<a href=\"https://play.google.com/store/apps/details?id=no.grabster.android\">play store link</a>) made in <a href=\"https://facebook.github.io/react-native/\">React Native</a>. Grabster is an app which makes it easy to buy and sell homemade food. Any time you're hungry, you can just pick up your phone and see what's cooking in your neighborhood. `, url: "http://grabster.no/#/en?_k=ruj3ro", images: [ "https://lh3.googleusercontent.com/8ant_pQN91flxx-b-ONa2yLSpM62ko9SheIysXn4q8Ha-4UwUzEavzNSKTrgC7s6dQ=h900-rw", "https://lh3.googleusercontent.com/Ve3ZkrTeMPAJQxa1vygjRpJJ7aXxYeIbmuuDQl-Ybz9B9zRTqIZC8AyJTafgADucBlM=h900-rw" ], orientation: "portrait" }, "hoverlookup": { title: "HoverLookup Chrome Extension", description: ` Google Chrome extension written in JavaScript. Pressing CTRL + SHIFT while hovering over a word will show the wiktionary entry for that word in an inline window.\ As I was transitioning from Firefox to Chrome, where I had the <i>Wiktionary and Google Translate</i> add-on installed, I was in need of a similar extension. Thus, I made this extension and later published it to the Google Web Store (<a href=\"https://chrome.google.com/webstore/detail/hover-lookup/ogjdcbnhgjgabidifpnpiidgbkhlpnof\">link</a>). `, url: "https://github.com/bitsauce/HoverLookup_ChromeExtension", images: [ "images/hoverlookup/hoverlookup_1.png", "images/hoverlookup/hoverlookup_2.png", "images/hoverlookup/hoverlookup_3.png", "images/hoverlookup/hoverlookup_4.png" ] }, "aside": { title: "AngelScript IDE", description: ` A simple IDE for the scripting language <a href=\"http://www.angelcode.com/angelscript/\">Angel Script</a> made with <a href=\"https://www.qt.io/\">Qt 4</a> and <a href=\"http://pyqt.sourceforge.net/Docs/QScintilla2/\">QScintilla</a> for syntax highlighting. Supports opening and creating projects, cascade and tab-based editing, and simple debug functionality with breakpoint and code-stepping. `, url: "https://github.com/bitsauce/ASIDE", images: [ "https://i.imgur.com/ucEvRK3.png", "https://i.imgur.com/c9k6v7d.png" ] }, "compiler-construction": { title: "Compiler Construction", description: ` Created a basic compiler for the Very Simple Language (VSL) programming language. Does parsing, tokenizing and generates assembly code for the program. Some features include: lexical scoping, function calls, recursive function calls, branching, while-loops and return statements. Written in C. `, url: "", images: ["https://i.imgur.com/mAnSEpH.png"] } } };
a29d833d7eb0e89c5ff91be29e273cabc2c4647b
[ "JavaScript" ]
2
JavaScript
bitsauce/bitsauce.github.io
dbdfa0dd19f9a679962e70dc157ec77dd9e49e1b
b89d45d68c31912c4e88b13f365822f577d4a4f2
refs/heads/master
<repo_name>artan001/Codeigniter<file_sep>/application/models/feth_province_model.php <?php defined('BASEPATH') OR exit('No direct script access allowed'); /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ /** * Description of feth_province_model * * @author Artan.s */ class feth_province_model extends CI_Model{ public function __construct() { parent::__construct(); } public function getProvince(){ $sql = "SELECT * FROM province"; $query = $this->db->query($sql); $query = $query->row(); return $query; } } <file_sep>/application/controllers/Test_login.php <?php defined('BASEPATH') OR exit('No direct script access allowed'); /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ /** * Description of test_login * * @author Artan.s */ class Test_login extends CI_Controller { public function __construct() { parent::__construct(); $this->load->model('login_model'); } public function index(){ $this->load->view('testv'); } public function login_test() { $username = $this->input->post('username'); $password = $this->input->post('password'); $query = $this->login_model->user_login($username, $password); if ($query) { $chk = $query->type; if ($chk == '1') { $this->session->set_userdata('user', $chk); // echo $chk . "admin"; // echo '<div class="button banner_button"><a href="../test_login/logout">LOGOUT</a></div>'; $obj["message"] = "- กรุณตรวจสอบ username กับ password ให้<PASSWORD> \n"; } else { $this->session->set_userdata('user', $chk); $obj["message"] = "- กรุณตรวจสอบ username กับ password ให้<PASSWORD> \n"; // echo $chk . "user"; // echo '<div class="button banner_button"><a href="../test_login/logout">LOGOUT</a></div>'; } $obj["result"] = true; } else { $this->session->set_userdata('user', 'null'); // echo "ไม่มีในระบบ"; // echo '<div class="button banner_button"><a href="../test_login/logout">LOGOUT</a></div>'; $obj["result"] = false; $obj["message"] = "- กรุณตรวจสอบ username กับ password ให้ถ<PASSWORD> \n"; } echo json_encode($obj); } public function logout() { $this->session->sess_destroy(); redirect('/test_login'); exit; } } <file_sep>/application/views/layout/modal/modal_login.php <!-- Modal Login--> <div class="modal fade" id="modal-login" tabindex="-1" role="dialog" aria-labelledby="myModalLabel"> <div class="modal-dialog" role="document"> <div class="modal-content"> <div class="modal-header"> <h3>Sign In</h3> <div class="d-flex justify-content-end social_icon"> <span><i class="fab fa-facebook-square"></i></span> <span><i class="fab fa-google-plus-square"></i></span> <span><i class="fab fa-twitter-square"></i></span> </div> </div> <div class="modal-body"> <div class="card-body"> <form> <div class="input-group form-group"> <div class="input-group-prepend"> <span class="input-group-text"><i class="fas fa-user"></i></span> </div> <input type="text" class="form-control" placeholder="username" id ="username" name="username"> </div> <div class="input-group form-group"> <div class="input-group-prepend"> <span class="input-group-text"><i class="fas fa-key"></i></span> </div> <input type="password" class="form-control" placeholder="<PASSWORD>" id="password" name="password"> </div> <div class="input-group form-group"> <div class="input-group-prepend"> <span class="input-group-text" for="usertype">TYPE</span> </div> <select class="form-control" id="usertype"> <option selected>-SELECT-</option> <?php foreach ( $dbrow as $key ): ?> <?php echo'<option value = "'.$key->ID . '">' . $key->type_name .'</option>'; ?> <?php endforeach; ?> <!--<option value="1">ADMIN</option>--> <!--<option value="2">CUSTOMER</option>--> <!--<option value="3">EMPLOYEE</option>--> </select> </div> <div class="col-md-12"> <div class="alert alert-danger" style="display:none" role="alert" id="message-alert-login"></div> </div> <div class="card-footer"> <div class="d-flex justify-content-center "> Don't have an account?<a href="#">Sign Up</a> </div> <div class="d-flex justify-content-center"> <a href="#">Forgot your password?</a> </div> </div> </form> </div> </div> <div class="modal-footer"> <div class="form-group"> <!--<input type="submit" value="Login" id="btn-login1" class="btn float-right login_btn">--> <button type="button" id="btn-login" class="btn float-right login_btn">Login</button> <button type="button" class="btn float-right login_btn" data-dismiss="modal">Close</button> </div> </div> </div> </div> </div> <!--End Modal Login --> <!--Script Login--> <script src="assets/allaction_js/login.js"></script> <!--End Script Login--><file_sep>/application/views/template/TEMPLATE.php <!DOCTYPE html> <!-- To change this license header, choose License Headers in Project Properties. To change this template file, choose Tools | Templates and open the template in the editor. --> <html lang="en"> <head> <title>HOME</title> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta name="description" content="THANOOLUK shop project"> <meta name="viewport" content="width=device-width, initial-scale=1"> <!-- Bootstrap core CSS --> <link rel="stylesheet" type="text/css" href="assets/styles/bootstrap4/bootstrap.min.css"> <link href="assets/plugins/fontawesome-free-5.0.1/css/fontawesome-all.css" rel="stylesheet" type="text/css"> <link rel="stylesheet" type="text/css" href="assets/plugins/OwlCarousel2-2.2.1/owl.carousel.css"> <link rel="stylesheet" type="text/css" href="assets/plugins/OwlCarousel2-2.2.1/owl.theme.default.css"> <link rel="stylesheet" type="text/css" href="assets/plugins/OwlCarousel2-2.2.1/animate.css"> <link rel="stylesheet" type="text/css" href="assets/plugins/slick-1.8.0/slick.css"> <?php $this->load->view("/" . $headadd);?> <!-- jQuery --> <script src="assets/jquery/jquery-3.3.1.js"></script> <!--Fontawesome CDN--> <link rel="stylesheet" href="https://use.fontawesome.com/releases/v5.3.1/css/all.css" integrity="<KEY>" crossorigin="anonymous"> <!--Custom styles--> <link rel="stylesheet" type="text/css" href="assets/styles/bootstrap4/custom/login-styles.css"> </head> <body> <div class="super_container"> <?php $this->load->view("/" . $header); $this->load->view("/" . $page); $this->load->view("/" . $footer); foreach ( $modal as $value){ $this->load->view("/" . $value); } foreach ( $modaladd as $values){ $this->load->view("/" . $values); } ?> </div> <!--Jquery--> <script src="assets/jquery/jquery-3.3.1.min.js"></script> <!--Bootstrap--> <script src="assets/styles/bootstrap4/popper.js"></script> <script src="assets/styles/bootstrap4/bootstrap.min.js"></script> <script src="assets/plugins/greensock/TweenMax.min.js"></script> <script src="assets/plugins/greensock/TimelineMax.min.js"></script> <script src="assets/plugins/scrollmagic/ScrollMagic.min.js"></script> <script src="assets/plugins/greensock/animation.gsap.min.js"></script> <script src="assets/plugins/greensock/ScrollToPlugin.min.js"></script> <script src="assets/plugins/OwlCarousel2-2.2.1/owl.carousel.js"></script> <script src="assets/plugins/slick-1.8.0/slick.js"></script> <script src="assets/plugins/easing/easing.js"></script> <?php $this->load->view("/" . $scriptadd); ?> </body> </html> <file_sep>/application/config/my_constants.php <?php defined('BASEPATH') OR exit('No direct script access allowed'); /* * custom constant file * Create by powerbeer * */ define('UPLOAD_PATH',"userdata" .DIRECTORY_SEPARATOR); define('FILE_ATTACH_PATH',"file_attach" .DIRECTORY_SEPARATOR); define('LOG_WITH_SYSTEM', 1); // errors define('LOG_WITH_USER', 2); // errors define('TABLE_LOG','PCC_AUTHEN_LOG'); //Table Log //template define('TEMPLATE_A', 'template/TEMPLATE'); // หน้าหลักเรียกใช้ได้หมด แต่ต่างกับข้างล่างคือ ต้องกำหนอดส่วนของ TOP กับ DOWN ที่เรียกสคริป define('TEMPLATE_WELCOME', 'template/TEMPLATE-HOME'); // หน้าแรก define('TEMPLATE_SHOP', 'template/TEMPLATE-SHOP'); // หน้า shop define('TEMPLATE_PRODUCT', 'template/TEMPLATE-PRODUCT'); // หน้าสินค้า define('TEMPLATE_CART', 'template/TEMPLATE-CART'); // define('TEMPLATE_BLOG', 'template/TEMPLATE-BLOG'); // define('TEMPLATE_BLOGSINGLE', 'template/TEMPLATE-BLOGSINGLE'); // define('TEMPLATE_CONTACT', 'template/TEMPLATE-CONTACT'); // define('TEMPLATE_REGULAR', 'template/TEMPLATE-REGULAR'); // ?><file_sep>/application/controllers/Shop_Controller.php <?php defined('BASEPATH') OR exit('No direct script access allowed'); /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ /** * Description of Shop_Controller * * @author Artan.s */ class Shop_Controller extends MY_Controller{ public function __construct() { parent::__construct(); } public function index(){ // $data = array(); // $data['header'] = 'layout/header'; // $data['page'] = 'layout/shop/page_shop'; // $data['footer'] = 'layout/footer'; // $data['modal'] = ['layout/modal/modal_login', // 'layout/modal/modal_register']; // $this->load->view(TEMPLATE_SHOP,$data); // เรียก TEMPLATE_ALL $this->headadd = 'layout/shop/head_shop'; $this->page = 'layout/shop/page_shop'; // $this->modaladd = ['layout/modal/modal_register']; ถ้ามี modalที่ต้องการเพิ่ม ให้เพิ่มเป็นรูปแบบ array ถ้าไม่มีให้ทำแบบด้านล่าง $this->modaladd = []; $this->scriptadd = 'layout/shop/down_shop'; $this->dataresult = TEMPLATE_A; //defind มาจาก my_constants ซึ่ง custom template เองใน view/template/'''' $this->newlayout(); } } <file_sep>/application/controllers/Login_Controller.php <?php defined('BASEPATH') OR exit('No direct script access allowed'); /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ /** * Description of Login_Controller * * @author Artan.s */ class Login_Controller extends MY_Controller { function __construct() { parent::__construct(); $this->load->model('login_model'); } public function index() { $this->headadd = 'layout/home/head_home'; $this->page = 'layout/home/page_home'; // $this->modaladd = ['layout/modal/modal_register']; ถ้ามี modalที่ต้องการเพิ่ม ให้เพิ่มเป็นรูปแบบ array ถ้าไม่มีให้ทำแบบด้านล่าง $this->modaladd = []; $this->scriptadd = 'layout/home/down_home'; $this->dataresult = TEMPLATE_A; //defind มาจาก my_constants ซึ่ง custom template เองใน view/template/'''' $this->newlayout(); // เรียก layout มาจาก MY_Controller ซึ่งกำหนด page และ template เอง } public function login() { $username = $this->input->post('username'); $password = $this->input->post('password'); $query = $this->login_model->user_login($username, $password); if ($query) { $status = $query->status; if ($status == "1") { $chk = $query->type; if ($chk == '1') { $this->session->set_userdata('user', $chk); } else { $this->session->set_userdata('user', $chk); } $obj["result"] = true; } elseif ($status == "2") { $obj["result"] = false; $obj["message"] = "- กรุณายืนยันตัวตน \n"; } else { $obj["result"] = false; $obj["message"] = "- กรุณายืนยันตัวตน \n"; } } else { $this->session->set_userdata('user', 'null'); $obj["result"] = false; $obj["message"] = "- กรุณาตรวจสอบ username กับ password ให้ถูกต้อง \n"; } echo json_encode($obj); } public function chkUserSession() { $chk = $this->seccion->userdata('user'); // ตรวจสอบ session } public function logout() { $this->session->sess_destroy(); redirect('/', 'refresh'); exit; } } <file_sep>/application/views/login_page.php <!DOCTYPE html> <!-- To change this license header, choose License Headers in Project Properties. To change this template file, choose Tools | Templates and open the template in the editor. --> <html lang="en"> <head> <title>THANOOLUK</title> <base href="<?php echo base_url(); ?>"> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta name="description" content="THANOOLUK shop project"> <meta name="viewport" content="width=device-width, initial-scale=1"> <!-- Bootstrap core CSS --> <link rel="stylesheet" type="text/css" href="assets/styles/bootstrap4/bootstrap.min.css"> <link href="assets/plugins/fontawesome-free-5.0.1/css/fontawesome-all.css" rel="stylesheet" type="text/css"> <link rel="stylesheet" type="text/css" href="assets/plugins/OwlCarousel2-2.2.1/owl.carousel.css"> <link rel="stylesheet" type="text/css" href="assets/plugins/OwlCarousel2-2.2.1/owl.theme.default.css"> <link rel="stylesheet" type="text/css" href="assets/plugins/OwlCarousel2-2.2.1/animate.css"> <link rel="stylesheet" type="text/css" href="assets/plugins/slick-1.8.0/slick.css"> <link rel="stylesheet" type="text/css" href="assets/styles/main_styles.css"> <link rel="stylesheet" type="text/css" href="assets/styles/responsive.css"> <!-- jQuery --> <script src="assets/jquery/jquery-3.3.1.js"></script> <!-- Custom styles for this template --> <!--<link href="assets/styles/bootstrap4/custom/shop-homepage.css" rel="stylesheet">--> <!--Fontawesome CDN--> <link rel="stylesheet" href="https://use.fontawesome.com/releases/v5.3.1/css/all.css" integrity="<KEY>" crossorigin="anonymous"> <!--Custom styles--> <link rel="stylesheet" type="text/css" href="assets/styles/bootstrap4/custom/styles.css"> </head> <body> <div class="container"> <h1 class="page-header text-center">CodeIgniter Ajax Login using jQuery</h1> <div class="row"> <div class="col-sm-4 col-sm-offset-4"> <div class="login-panel panel panel-primary"> <div class="panel-heading"> <h3 class="panel-title"><span class="glyphicon glyphicon-lock"></span> Login </h3> </div> <div class="panel-body"> <form > <!--<form method="post" action="login_controller/login_Test">--> <fieldset> <div class="form-group"> <input class="form-control" placeholder="Username" type="text" name="username" id="username"> </div> <div class="form-group"> <input class="form-control" placeholder="<PASSWORD>" type="<PASSWORD>" name="password" id="<PASSWORD>"> </div> <button type="submit" class="btn btn-lg btn-primary btn-block" id="login"><span id="logText">LOGIN</span></button> </fieldset> </form> </div> </div> <div id="responseDiv" class="alert text-center" style="margin-top:20px; display:none;"> <button type="button" class="close" id="clearMsg"><span aria-hidden="true">&times;</span></button> <span id="message"></span> </div> </div> </div> </div> <script> $(document).ready(function () { $('#login').on('click', function () { $.ajax({ url: "login_controller/login_Test", type: "POST", dataType: "JSON", data: { "username": username, "password": <PASSWORD>, }, success: function (resp) { console.log(resp); if (resp.result == true) { // window.location.replace(resp); alert('ok'); } else { // window.location = "test_c"; alert('NO'); } }, error: function (error) { console.log(error); } }) }); }); </script> <!--<script src="assets/allaction_js/login.js"></script>--> <script src="assets/js/custom/jquery-3.3.1.min.js"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.14.7/umd/popper.min.js" integrity="<KEY>" crossorigin="anonymous"></script> <!--<script src="assets/styles/bootstrap4/popper.js"></script>--> <script src="assets/styles/bootstrap4/bootstrap.min.js"></script> <script src="assets/plugins/greensock/TweenMax.min.js"></script> <script src="assets/plugins/greensock/TimelineMax.min.js"></script> <script src="assets/plugins/scrollmagic/ScrollMagic.min.js"></script> <script src="assets/plugins/greensock/animation.gsap.min.js"></script> <script src="assets/plugins/greensock/ScrollToPlugin.min.js"></script> <script src="assets/plugins/OwlCarousel2-2.2.1/owl.carousel.js"></script> <script src="assets/plugins/slick-1.8.0/slick.js"></script> <script src="assets/plugins/easing/easing.js"></script> <script src="assets/js/custom/custom.js"></script> </body> </html> <file_sep>/application/views/layout/header.php <!-- Header --> <header class="header"> <!-- Top Bar --> <div class="top_bar"> <div class="container"> <div class="row"> <div class="col d-flex flex-row"> <div class="top_bar_contact_item"><div class="top_bar_icon"><img src="assets/images/phone.png" alt=""></div>092-448-4333</div> <div class="top_bar_contact_item"><div class="top_bar_icon"><img src="assets/images/mail.png" alt=""></div><a href="mailto:<EMAIL>"><EMAIL></a></div> <div class="top_bar_content ml-auto"> <div class="top_bar_menu"> <ul class="standard_dropdown top_bar_dropdown"> <li> <a href="#">English<i class="fas fa-chevron-down"></i></a> <ul> <li><a href="#">English</a></li> <li><a href="#">Thai</a></li> </ul> </li> <!--<li> <a href="#">$ US dollar<i class="fas fa-chevron-down"></i></a> <ul> <li><a href="#">EUR Euro</a></li> <li><a href="#">GBP British Pound</a></li> <li><a href="#">JPY Japanese Yen</a></li> </ul> </li>--> </ul> </div> <div class="top_bar_user"> <div class="user_icon"><img src="assets/images/user.svg" alt=""></div> <?php $ses = $this->session->userdata('user'); ?> <!--<div><a data-toggle="modal" data-target="#modal-login" href="#">Sign in</a></div>--> <?php if (!empty($ses)) { ?> <?php if ($ses == '1') { ?> <div><a onclick="Logout()" href="">Admin</a></div> <?php } elseif ($ses == '2') { ?> <div><a onclick="Logout()" href="">User</a></div> <?php } else { ?> <div><a onclick="Logout()" href="">Logout</a></div> <?php } ?> <?php } else { ?> <div><a data-toggle="modal" data-target="#modal-register" href="">Register</a></div> <div><a data-toggle="modal" data-target="#modal-login" href="">Sign in</a></div> <?php } ?> <!-- $ses = $this->session->userdata('user'); if (!empty($ses)){ <div><a id="btn-logout" href="#"> if ($ses == '1'){ ADMIN }elseif ($ses == '2'){ USER }else { LOGOUT } </a></div> }else{ <div><a data-toggle="modal" data-target="#modal-login" href="#">Sign in</a></div> }--> </div> </div> </div> </div> </div> </div> <!-- Header Main --> <div class="header_main"> <div class="container"> <div class="row"> <!-- Logo --> <div class="col-lg-2 col-sm-3 col-3 order-1"> <div class="logo_container"> <div class="logo"><a href="#">NADONG</a></div> </div> </div> <!-- Search --> <div class="col-lg-6 col-12 order-lg-2 order-3 text-lg-left text-right"> <div class="header_search"> <div class="header_search_content"> <div class="header_search_form_container"> <form action="#" class="header_search_form clearfix"> <input type="search" required="required" class="header_search_input" placeholder="Search for products..."> <div class="custom_dropdown"> <div class="custom_dropdown_list"> <span class="custom_dropdown_placeholder clc">All Categories</span> <i class="fas fa-chevron-down"></i> <ul class="custom_list clc"> <li><a class="clc" href="#">All Categories</a></li> <li><a class="clc" href="#">Computers</a></li> <li><a class="clc" href="#">Laptops</a></li> <li><a class="clc" href="#">Cameras</a></li> <li><a class="clc" href="#">Hardware</a></li> <li><a class="clc" href="#">Smartphones</a></li> </ul> </div> </div> <button type="submit" class="header_search_button trans_300" value="Submit"><img src="assets/images/search.png" alt=""></button> </form> </div> </div> </div> </div> <!-- Wishlist --> <div class="col-lg-4 col-9 order-lg-3 order-2 text-lg-left text-right"> <div class="wishlist_cart d-flex flex-row align-items-center justify-content-end"> <div class="wishlist d-flex flex-row align-items-center justify-content-end"> <div class="wishlist_icon"><img src="assets/images/heart.png" alt=""></div> <div class="wishlist_content"> <div class="wishlist_text"><a href="#">Wishlist</a></div> <div class="wishlist_count">115</div> </div> </div> <!-- Cart --> <div class="cart"> <div class="cart_container d-flex flex-row align-items-center justify-content-end"> <div class="cart_icon"> <img src="assets/images/cart.png" alt=""> <div class="cart_count"><span>10</span></div> </div> <div class="cart_content"> <div class="cart_text"><a href="#">Cart</a></div> <div class="cart_price">$85</div> </div> </div> </div> </div> </div> </div> </div> </div> <!-- Main Navigation --> <nav class="main_nav"> <div class="container"> <div class="row"> <div class="col"> <div class="main_nav_content d-flex flex-row"> <!-- Categories Menu --> <div class="cat_menu_container"> <div class="cat_menu_title d-flex flex-row align-items-center justify-content-start"> <div class="cat_burger"><span></span><span></span><span></span></div> <div class="cat_menu_text">categories</div> </div> <ul class="cat_menu"> <li><a href="#">Computers & Laptops <i class="fas fa-chevron-right ml-auto"></i></a></li> <li><a href="#">Cameras & Photos<i class="fas fa-chevron-right"></i></a></li> <li class="hassubs"> <a href="#">Hardware<i class="fas fa-chevron-right"></i></a> <ul> <li class="hassubs"> <a href="#">Menu Item<i class="fas fa-chevron-right"></i></a> <ul> <li><a href="#">Menu Item<i class="fas fa-chevron-right"></i></a></li> <li><a href="#">Menu Item<i class="fas fa-chevron-right"></i></a></li> <li><a href="#">Menu Item<i class="fas fa-chevron-right"></i></a></li> <li><a href="#">Menu Item<i class="fas fa-chevron-right"></i></a></li> </ul> </li> <li><a href="#">Menu Item<i class="fas fa-chevron-right"></i></a></li> <li><a href="#">Menu Item<i class="fas fa-chevron-right"></i></a></li> <li><a href="#">Menu Item<i class="fas fa-chevron-right"></i></a></li> </ul> </li> <li><a href="#">Smartphones & Tablets<i class="fas fa-chevron-right"></i></a></li> <li><a href="#">TV & Audio<i class="fas fa-chevron-right"></i></a></li> <li><a href="#">Gadgets<i class="fas fa-chevron-right"></i></a></li> <li><a href="#">Car Electronics<i class="fas fa-chevron-right"></i></a></li> <li><a href="#">Video Games & Consoles<i class="fas fa-chevron-right"></i></a></li> <li><a href="#">Accessories<i class="fas fa-chevron-right"></i></a></li> </ul> </div> <!-- Main Nav Menu --> <div class="main_nav_menu ml-auto"> <ul class="standard_dropdown main_nav_dropdown"> <li><a href="welcome">Home<i class="fas fa-chevron-down"></i></a></li> <li class="hassubs"> <a href="#">Super Deals<i class="fas fa-chevron-down"></i></a> <ul> <li> <a href="#">Menu Item<i class="fas fa-chevron-down"></i></a> <ul> <li><a href="#">Menu Item<i class="fas fa-chevron-down"></i></a></li> <li><a href="#">Menu Item<i class="fas fa-chevron-down"></i></a></li> <li><a href="#">Menu Item<i class="fas fa-chevron-down"></i></a></li> </ul> </li> <li><a href="#">Menu Item<i class="fas fa-chevron-down"></i></a></li> <li><a href="#">Menu Item<i class="fas fa-chevron-down"></i></a></li> <li><a href="#">Menu Item<i class="fas fa-chevron-down"></i></a></li> </ul> </li> <li class="hassubs"> <a href="#">Featured Brands<i class="fas fa-chevron-down"></i></a> <ul> <li> <a href="#">Menu Item<i class="fas fa-chevron-down"></i></a> <ul> <li><a href="#">Menu Item<i class="fas fa-chevron-down"></i></a></li> <li><a href="#">Menu Item<i class="fas fa-chevron-down"></i></a></li> <li><a href="#">Menu Item<i class="fas fa-chevron-down"></i></a></li> </ul> </li> <li><a href="#">Menu Item<i class="fas fa-chevron-down"></i></a></li> <li><a href="#">Menu Item<i class="fas fa-chevron-down"></i></a></li> <li><a href="#">Menu Item<i class="fas fa-chevron-down"></i></a></li> </ul> </li> <li class="hassubs"> <a href="#">Pages<i class="fas fa-chevron-down"></i></a> <ul> <li><a href="Shop_Controller">Shop<i class="fas fa-chevron-down"></i></a></li> <li><a href="Product_Controller">Product<i class="fas fa-chevron-down"></i></a></li> <!--<li><a href="Blog_Controller">Blog<i class="fas fa-chevron-down"></i></a></li>--> <!--<li><a href="BlogSingle_Controller">Blog Post<i class="fas fa-chevron-down"></i></a></li>--> <li><a href="Regular_Controller">Regular Post<i class="fas fa-chevron-down"></i></a></li> <li><a href="Cart_Controller">Cart<i class="fas fa-chevron-down"></i></a></li> <li><a href="Contact_Controller">Contact<i class="fas fa-chevron-down"></i></a></li> </ul> </li> <li class="hassubs"> <a href="#">Blog<i class="fas fa-chevron-down"></i></a> <ul> <li><a href="Blog_Controller">Blog<i class="fas fa-chevron-down"></i></a></li> <li><a href="BlogSingle_Controller">Blog Post<i class="fas fa-chevron-down"></i></a></li> </ul> </li> <li><a href="contact.html">Contact<i class="fas fa-chevron-down"></i></a></li> </ul> </div> <!-- Menu Trigger --> <div class="menu_trigger_container ml-auto"> <div class="menu_trigger d-flex flex-row align-items-center justify-content-end"> <div class="menu_burger"> <div class="menu_trigger_text">menu</div> <div class="cat_burger menu_burger_inner"><span></span><span></span><span></span></div> </div> </div> </div> </div> </div> </div> </div> </nav> </header><file_sep>/application/models/Register_Model.php <?php defined('BASEPATH') OR exit('No direct script access allowed'); /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ /** * Description of Register_Model * * @author Artan.s */ class Register_Model extends CI_Model{ public function __construct() { parent::__construct(); } } <file_sep>/assets/allaction_js/login.js $('#btn-login').on('click', function () { var message = ""; displayError(false, ""); $('#message-alert-login').hide(); var username = $('#username').val(); var password = $('#password').val(); var usertype = $('#usertype').val(); if (username == '') { message = message.concat("- กรุณากรอก USERNAME <br>"); } if (password == '') { message = message.concat("- กรุณากรอก PASSWORD <br>"); } if (message != '') { displayError(true, message); return; } $.ajax({ url: "Login_Controller/login", type: "POST", dataType: "JSON", data: { "username": username, "password": <PASSWORD>, }, success: function (resp) { console.log(resp); if (resp.result == true) { window.location = "welcome"; } else { displayError(true, resp.message); } }, error: function (error) { console.log(error); } }) }); function displayError(show, message) { //show ? $('#alert').show() : $('#alert').hide(); if (show) { $('#message-alert-login').show(); } else { $('#message-alert-login').hide(); } $('#message-alert-login').html(message); } function Logout(){ window.location = "login_controller/logout"; }<file_sep>/application/controllers/Welcome.php <?php defined('BASEPATH') OR exit('No direct script access allowed'); class Welcome extends MY_Controller { /** * Index Page for this controller. * * Maps to the following URL * http://example.com/index.php/welcome * - or - * http://example.com/index.php/welcome/index * - or - * Since this controller is set as the default controller in * config/routes.php, it's displayed at http://example.com/ * * So any other public methods not prefixed with an underscore will * map to /index.php/welcome/<method_name> * @see https://codeigniter.com/user_guide/general/urls.html */ function __construct() { parent::__construct(); } public function index() { $this->headadd = 'layout/home/head_home'; $this->page = 'layout/home/page_home'; // $this->modaladd = ['layout/modal/modal_register']; ถ้ามี modalที่ต้องการเพิ่ม ให้เพิ่มเป็นรูปแบบ array ถ้าไม่มีให้ทำแบบด้านล่าง $this->modaladd =[]; $this->scriptadd = 'layout/home/down_home'; $this->dataresult = TEMPLATE_A; //defind มาจาก my_constants ซึ่ง custom template เองใน view/template/'''' $this->newlayout(); // เรียก layout มาจาก MY_Controller ซึ่งกำหนด page และ template เอง // $result = $this->Login_Model->usertype(); // if ($result->num_rows() > 0) { // $data['dbrow'] = $result->result(); // } else { // $data['dbrow'] = null; // } // $result->free_result(); // $data['header'] = 'layout/header'; // $data['page'] = 'home'; // $data['footer'] = 'layout/footer'; // $data['down'] = 'layout/down_home'; // $data['modal'] = ['layout/modal/modal_login', // 'layout/modal/modal_register']; // $this->load->view(TEMPLATE_WELCOME, $data); } } <file_sep>/application/controllers/Product_Controller.php <?php defined('BASEPATH') OR exit('No direct script access allowed'); /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ /** * Description of Product_Controller * * @author Artan.s */ class Product_Controller extends MY_Controller { public function __construct() { parent::__construct(); } public function index() { // เรียก TEMPLATE แบบแยก // $this->modaladd = []; // $this->page = 'layout/product/page_product'; // $this->dataresult = TEMPLATE_PRODUCT; // $this->layout(); // เรียก TEMPLATE_ALL $this->headadd = 'layout/product/head_product'; $this->page = 'layout/product/page_product'; // $this->modaladd = ['layout/modal/modal_register']; ถ้ามี modalที่ต้องการเพิ่ม ให้เพิ่มเป็นรูปแบบ array ถ้าไม่มีให้ทำแบบด้านล่าง $this->modaladd = []; $this->scriptadd = 'layout/product/down_product'; $this->dataresult = TEMPLATE_A; //defind มาจาก my_constants ซึ่ง custom template เองใน view/template/'''' $this->newlayout(); } } <file_sep>/application/models/Login_Model.php <?php defined('BASEPATH') OR exit('No direct script access allowed'); /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ /** * Description of Login_model * * @author Artan.s */ class Login_Model extends CI_Model { function __construct() { parent::__construct(); } public function login($username, $password) { $sql = "SELECT * FROM user WHERE Username = '$username' AND Password = '$<PASSWORD>' "; $query = $this->db->query($sql); return $query->row(); } public function user_login($username, $password) { $sql = "SELECT * FROM user WHERE Username = '$username' AND Password = '$<PASSWORD>' "; $query = $this->db->query($sql); return $query->row(); } public function usertype(){ $sql = "SELECT * FROM user_type "; $query = $this->db->query($sql); return $query; } } <file_sep>/application/views/testv.php <!DOCTYPE html> <!-- To change this license header, choose License Headers in Project Properties. To change this template file, choose Tools | Templates and open the template in the editor. --> <html> <head> <title>testlogin</title> <base href="<?php echo base_url(); ?>"> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta name="description" content="THANOOLUK shop project"> <meta name="viewport" content="width=device-width, initial-scale=1"> <link rel="stylesheet" type="text/css" href="assets/styles/bootstrap4/bootstrap.min.css"> <script src="assets/jquery/jquery-3.3.1.js"></script> </head> <body> <div class="container"> <h1 class="page-header text-center">CodeIgniter Ajax Login using jQuery</h1> <div class="row"> <div class="col-sm-4 col-sm-offset-4"> <div class="login-panel panel panel-primary"> <div class="panel-heading"> <h3 class="panel-title"><span class="glyphicon glyphicon-lock"></span> Login </h3> </div> <div class="panel-body"> <form method="post" action="test_login/login_test"> <fieldset> <div class="form-group"> <input class="form-control" placeholder="username" type="text" name="username"> </div> <div class="form-group"> <input class="form-control" placeholder="<PASSWORD>" type="<PASSWORD>" name="<PASSWORD>"> </div> <button type="submit" class="btn btn-lg btn-primary btn-block"><span id="logText"></span></button> </fieldset> </form> </div> </div> <div id="responseDiv" class="alert text-center" style="margin-top:20px; display:none;"> <button type="button" class="close" id="clearMsg"><span aria-hidden="true">LOGIN</span></button> <span id="message"></span> </div> <a class="dropdown-toggle" data-toggle="modal" data-target="#modal-login" href="#"> เข้าสู่ระบบ <i class="fa fa-user fa-fw"></i> </a> </div> </div> </div> <!-- Modal --> <div class="modal fade" id="modal-login" tabindex="-1" role="dialog" aria-labelledby="myModalLabel"> <div class="modal-dialog" role="document"> <div class="modal-content"> <div class="modal-header"> <h3>Sign In</h3> <div class="d-flex justify-content-end social_icon"> <span><i class="fab fa-facebook-square"></i></span> <span><i class="fab fa-google-plus-square"></i></span> <span><i class="fab fa-twitter-square"></i></span> </div> </div> <div class="modal-body"> <div class="card-body"> <form> <div class="input-group form-group"> <div class="input-group-prepend"> <span class="input-group-text"><i class="fas fa-user"></i></span> </div> <input type="text" class="form-control" placeholder="username" id ="username" name="username"> </div> <div class="input-group form-group"> <div class="input-group-prepend"> <span class="input-group-text"><i class="fas fa-key"></i></span> </div> <input type="<PASSWORD>" class="form-control" placeholder="<PASSWORD>" id="password" name="password"> </div> <div class="col-md-12"> <div class="alert alert-danger" style="display:none" role="alert" id="message-alert-login"></div> </div> <div class="card-footer"> <div class="d-flex justify-content-center "> Don't have an account?<a href="#">Sign Up</a> </div> <div class="d-flex justify-content-center"> <a href="#">Forgot your password?</a> </div> </div> </form> </div> </div> <div class="modal-footer"> <div class="form-group"> <!--<input type="submit" value="Login" id="btn-login1" class="btn float-right login_btn">--> <button type="button" id="btn-login" class="btn float-right login_btn">Login</button> <button type="button" class="btn float-right login_btn" data-dismiss="modal">Close</button> </div> </div> </div> </div> </div> <!-- Modal --> <div class="modal fade" id="modal-message" tabindex="-1" role="dialog" aria-labelledby="myModalLabel"> <div class="modal-dialog" role="document"> <div class="modal-content"> <div class="modal-header"> <button type="button" class="close" data-dismiss="modal" aria-label="Close"><span aria-hidden="true">&times;</span></button> <h4 class="modal-title" id="message-title"></h4> </div> <div class="modal-body" id="message-content"> </div> <div class="modal-footer"> <button type="button" class="btn btn-default" data-dismiss="modal">ปิด</button> </div> </div> </div> </div> <script > $(document).ready(function () { var title = getUrlParameter('title'); var message = getUrlParameter('message'); if (title !== undefined && message !== undefined) { $('#modal-message').modal('show'); $('#message-title').text(title); $('#message-content').text(message); } }) function getUrlParameter(sParam) { var sPageURL = decodeURIComponent(window.location.search.substring(1)), sURLVariables = sPageURL.split('&'), sParameterName, i; for (i = 0; i < sURLVariables.length; i++) { sParameterName = sURLVariables[i].split('='); if (sParameterName[0] === sParam) { return sParameterName[1] === undefined ? true : sParameterName[1]; } } } $('#btn-login').on('click', function () { var message = ""; displayError(false, ""); $('#message-alert-login').hide(); var username = $('#username').val(); var password = $('#password').val(); if (username == '') { message = message.concat("- กรุณากรอก USERNAME <br>"); } if (password == '') { message = message.concat("- กรุณากรอก PASSWORD <br>"); } if (message != '') { displayError(true, message); return; } $.ajax({ url: "test_login/login_test", type: "POST", dataType: "JSON", data: { "username": username, "password": <PASSWORD>, }, success: function (resp) { console.log(resp); if (resp.result == true) { window.location = "welcome"; } else { displayError(true, resp.message); } }, error: function (error) { console.log(error); } }) }); function displayError(show, message) { //show ? $('#alert').show() : $('#alert').hide(); if (show) { $('#message-alert-login').show(); } else { $('#message-alert-login').hide(); } $('#message-alert-login').html(message); } </script> <script src="assets/jquery/jquery-3.3.1.min.js"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.14.7/umd/popper.min.js" integrity="<KEY>" crossorigin="anonymous"></script> <!--<script src="assets/styles/bootstrap4/popper.js"></script>--> <script src="assets/styles/bootstrap4/bootstrap.min.js"></script> </body> </html> <file_sep>/application/core/MY_Controller.php <?php defined('BASEPATH') OR exit('No direct script access allowed'); /* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ /** * Description of MY_Controller * * @author Artan.s */ class MY_Controller extends CI_Controller { public function __construct() { parent::__construct(); } public function chkuser(){ } var $data = array(); public function layout() { $this->load->model('Login_Model'); $result = $this->Login_Model->usertype(); if ($result->num_rows() > 0) { $this->data['dbrow'] = $result->result(); } else { $this->data['dbrow'] = null; } $result->free_result(); $this->data['header'] = 'layout/header'; $this->data['page'] = $this->page; $this->data['footer'] = 'layout/footer'; // $this->data['down'] = 'layout/down_home'; $this->data['modal'] = ['layout/modal/modal_login', 'layout/modal/modal_register']; $this->data['modaladd'] = $this->modaladd; $this->load->view($this->dataresult, $this->data); } // TEST NEW LAYOUT public function newlayout() { $this->load->model('Login_Model'); $this->data['headadd'] = $this->headadd; //เพิ่ม link หรือscript ส่วนของhead $result = $this->Login_Model->usertype(); if ($result->num_rows() > 0) { $this->data['dbrow'] = $result->result(); } else { $this->data['dbrow'] = null; } $result->free_result(); $this->data['header'] = 'layout/header'; $this->data['page'] = $this->page; $this->data['footer'] = 'layout/footer'; // $this->data['down'] = 'layout/down_home'; $this->data['modal'] = ['layout/modal/modal_login', 'layout/modal/modal_register']; $this->data['modaladd'] = $this->modaladd; $this->data['scriptadd'] = $this->scriptadd; //เพิ่ม script ส่วนของscript ท้าย body $this->load->view($this->dataresult, $this->data); } } <file_sep>/application/views/layout/head_home.php <!DOCTYPE html> <!-- To change this license header, choose License Headers in Project Properties. To change this template file, choose Tools | Templates and open the template in the editor. --> <html lang="en"> <head> <title>OneTech</title> <base href="<?php echo base_url(); ?>"> <meta charset="utf-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta name="description" content="THANOOLUK shop project"> <meta name="viewport" content="width=device-width, initial-scale=1"> <link rel="stylesheet" type="text/css" href="assets/styles/bootstrap4/bootstrap.min.css"> <link href="assets/plugins/fontawesome-free-5.0.1/css/fontawesome-all.css" rel="stylesheet" type="text/css"> <link rel="stylesheet" type="text/css" href="assets/plugins/OwlCarousel2-2.2.1/owl.carousel.css"> <link rel="stylesheet" type="text/css" href="assets/plugins/OwlCarousel2-2.2.1/owl.theme.default.css"> <link rel="stylesheet" type="text/css" href="assets/plugins/OwlCarousel2-2.2.1/animate.css"> <link rel="stylesheet" type="text/css" href="assets/plugins/slick-1.8.0/slick.css"> <link rel="stylesheet" type="text/css" href="assets/styles/main_styles.css"> <link rel="stylesheet" type="text/css" href="assets/styles/responsive.css"> </head> <body>
f195f1c177395466bb8a98e7940d82fc68dc68a6
[ "JavaScript", "PHP" ]
17
PHP
artan001/Codeigniter
3d6a1c8330150be062cbb119f19ca23aa7d6ffd9
22efa63017955056ddefc252eba9dec3656fc077
refs/heads/master
<file_sep>snrnasm ======= SNRNASM RSS feed reader <file_sep>import urllib2 from xml.dom import minidom import array from shutil import copy from time import strftime from os import remove def parse(target): # start the log rec('log','%s:Downloading rss feed\n'%strftime('%Y %m %d %X')) # save the xml so it can be edited rssfeed = urllib2.urlopen("http://rmdb.stanford.edu/site_media/rss/isatab.xml") s = rssfeed.read() rssfeed.close() s = s.replace('&', '&amp;') rec('xml',s) feed = minidom.parse('/Applications/MAMP/htdocs/snrnasm/rmdb_xml.txt') snrnasmdata = True title = False; lasttitle = False link = False; links = [] seen = [] # go through each "item" in the feed for item in feed.getElementsByTagName('item'): for node in item.childNodes: if node.nodeType == 1 and node.hasChildNodes() == True: # get the info from each item and add it to a list of links snrnasmdata = check_node(node.nodeName, node.childNodes.item(0).nodeValue) if isinstance(snrnasmdata, unicode) and snrnasmdata[0:4] == 'link': link = snrnasmdata[5:]; lasttitle = title elif isinstance(snrnasmdata, unicode) and snrnasmdata[0:5] == 'title': title = snrnasmdata[5:] elif isinstance(snrnasmdata, unicode) and snrnasmdata[0:4] == 'desc': desc = snrnasmdata[5:] # when moving to the next node, make the link for the previous one ignoring duplicates if node.nodeName == 'description': if link not in seen: seen.append('%s'%link) try: links.append("<a href='%s'>%s:</a> %s\n"%(link, lasttitle, desc)) del desc except: links.append("<a href='%s'>%s</a>\n"%(link, title)) # sort the links links = sorted(set(links)) addlinks(links) # send all of the links found to addlinks() to be written to the file # close the log rec('log','%s =========================================================='%strftime('%Y %m %d %X')) def check_node(label, value): if label == 'title': # set the value for the title of the link return 'title %s'%value if label == 'link': return 'link %s'%value if label == 'description': return "desc %s"%value else: return # skip the fields other than 'link' 'title' and 'description' # def numerical(date): day = date[0:2] months = ['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec'] month = months.index(date[3:6]) if len(str(month))<2: month = '0%d'%month year = date[7:11] time = date[12:17] dat = '%s%s%s'%(year,month,day) return('%s %s')%(dat,time) def addlinks(links): # backup the file before writing the links, then load from the template copy('/Applications/MAMP/htdocs/snrnasm/browse.html', '/Applications/MAMP/htdocs/snrnasm/browse.old') copy('/Applications/MAMP/htdocs/snrnasm/template.html', '/Applications/MAMP/htdocs/snrnasm/browse.html') with open("/Applications/MAMP/htdocs/snrnasm/browse.html", 'a') as newfile: for link in links: newfile.write('%s<br>\n'%link) rec('log',link) # end the file newfile.write('\n</body>\n</html>') def rec(loc,msg): locs = {'log':'/Applications/MAMP/htdocs/snrnasm/log.txt','xml':'/Applications/MAMP/htdocs/snrnasm/rmdb_xml.txt'} loc = locs[loc] with open(loc, 'a') as output: output.write(msg) return try: parse('http://rmdb.stanford.edu/site_media/rss/isatab.xml') finally: remove('/Applications/MAMP/htdocs/snrnasm/rmdb_xml.txt')
5efea6a039f73a1f56cff4d86a543a5b8255ad24
[ "Markdown", "Python" ]
2
Markdown
chkiss/snrnasm
76a3ae6ca12898db67ac8a4697af5f4f8d208a2e
b27b18f9ac95d4008348a96de9fb414c2626a9e4
refs/heads/master
<repo_name>Radesh1/scripts<file_sep>/TrafficShape.sh #!/bin/bash ######################################################## #VARIAVEIS int_down=eth1 int_up=eth0 upload=1mbit download=1mbit mark=10 ip=10.0.0.0/24 ######################################################## start() { #CRIACAO DAS FILAS tc qdisc add dev $int_up ingress tc qdisc add dev $int_up root handle 1: htb r2q 0 tc class add dev $int_up parent 1: classid 1:1 htb rate $upload tc qdisc add dev $int_down ingress tc qdisc add dev $int_down root handle 1: htb r2q 0 tc class add dev $int_down parent 1: classid 1:1 htb rate $download ##################################################################################################### #FILTRO DE DOWNLOAD tc class add dev $int_down parent 1:1 classid 1:$mark htb rate $download tc filter add dev $int_down protocol ip parent 1:0 prio 1 u32 match ip dst $ip flowid 1:$mark ##################################################################################################### #FILTRO DE UPLOAD tc class add dev $int_up parent 1:1 classid 1:$mark htb rate $upload tc filter add dev $int_up parent 1:0 protocol ip prio 1 handle $mark fw classid 1:$mark ##################################################################################################### #CRIACAO DA REGRA NO FIREWALL iptables -t mangle -A POSTROUTING -s $ip -j MARK --set-mark $mark ##################################################################################################### } stop() { iptables -F -t mangle iptables -X -t mangle tc qdisc del dev $int_down root tc qdisc del dev $int_down ingress tc qdisc del dev $int_up root tc qdisc del dev $int_up ingress } restart() { stop sleep 1 start } show() { tc -s qdisc ls dev $int_down tc -s qdisc ls dev $int_up } case "$1" in start) echo -n "Starting bandwidth shaping: " start echo "done" ;; stop) echo -n "Stopping bandwidth shaping: " stop echo "done" ;; restart) echo -n "Restarting bandwidth shaping: " restart echo "done" ;; show) echo "Bandwidth shaping status for $IF:\n" show echo "" ;; *) pwd=$(pwd) echo "Usage: $(/usr/bin/dirname $pwd)/tc.bash {start|stop|restart|show}" ;; esac exit 0 <file_sep>/libcripto.sh #!/bin/bash #funções do corpo.sh testedependencias(){ dpkg --get-selections | grep zenity if [ "$?" -eq "0" ]; then echo " " else echo "Instalando dependencia necessaria pacote Zenity" sleep 2 apt-get install zenity -y fi dpkg --get-selections | grep openssl if [ "$?" -eq "0" ]; then echo " " else echo "Instalando dependencia necessaria pacote openssl" sleep 2 apt-get install openssl -y fi } modECB(){ keysize=$(zenity --list --title "Modo de operação ECB" --text "Selecione o tamanho da chave" --radiolist --column " " --column "Size" TRUE "128" FALSE "256") opcao1=$(zenity --list --title "Selecione uma das opções abaixo!" --width=600 --height=380 --column id --column Descrição 1 "Criptografar usando uma chave armazenada em um arquivo (formato hexadecimal)" 2 "Criptografar derivando a chave de uma senha" 3 "Criptografar com uma chave pseudorandômica") if [ $opcao1 == "1" ] then file=$(zenity --title "Selecione o arquivo que será cifrado!" --width=650 --height=380 --file-selection ) aux=$(zenity --title "Selecione o arquivo com a chave criptográfica!" --width=650 --height=380 --file-selection ) key=$(head -1 $aux) zenity --info --text "Selecione o diretório para salvar o arquivo cifrado" savedir=$(zenity --file-selection --save) openssl enc -aes-$keysize-ecb -in $file -out $savedir -K $key zenity --info --text "ARQUIVO CRIPTOGRAFADO!" fi if [ $opcao1 == "2" ] then file=$(zenity --title "Selecione o arquivo que será cifrado!" --width=650 --height=380 --file-selection ) zenity --info --text "A seguir, informe a senha que será usada na derivação da chave.\nOBS:Guarde essa senha pois sem ela não há como descriptografar!" senha=$(zenity --forms --add-password "Digite a senha") if [ $keysize == "128" ] then key=$(echo -n $senha | md5sum | cut -d' ' -f1) else key=$(echo -n $senha | sha256sum | cut -d' ' -f1) fi zenity --info --text "Selecione o diretório para salvar o arquivo cifrado" savedir=$(zenity --file-selection --save) openssl enc -aes-$keysize-ecb -in $file -out $savedir -K $key zenity --info --text "ARQUIVO CRIPTOGRAFADO!" fi if [ $opcao1 == "3" ] then file=$(zenity --title "Selecione o arquivo que será cifrado!" --width=650 --height=380 --file-selection ) aux=$[$keysize/8] key=$(openssl rand -hex $aux) zenity --info --text "Selecione o diretório para salvar o arquivo cifrado" savedir=$(zenity --file-selection --save) openssl enc -aes-$keysize-ecb -in $file -out $savedir -K $key echo $key > k.ENC.txt zenity --info --text "ARQUIVO CRIPTOGRAFADO!\n\nOBS: A chave usada para cifrar e decifrar, se encontram na pasta do projeto!" fi } modCBC(){ keysize=$(zenity --list --title "Modo de operação CBC" --text "Selecione o tamanho da chave" --radiolist --column " " --column "Size" TRUE "128" FALSE "256") opcao1=$(zenity --list --title "Selecione uma das opções abaixo!" --width=600 --height=380 --column id --column Descrição 1 "Criptografar usando uma chave armazenada em um arquivo (formato hexadecimal)" 2 "Criptografar derivando a chave de uma senha" 3 "Criptografar com uma chave pseudorandômica") if [ $opcao1 == "1" ] then file=$(zenity --title "Selecione o arquivo que será cifrado!" --width=650 --height=380 --file-selection ) aux=$(zenity --title "Selecione o arquivo com a chave criptográfica!" --width=650 --height=380 --file-selection ) aux1=$(zenity --title "Selecione o arquivo com o vetor inicial!" --width=650 --height=380 --file-selection ) key=$(head -1 $aux) iv=$(head -1 $aux1) zenity --info --text "Selecione o diretório para salvar o arquivo cifrado" savedir=$(zenity --file-selection --save) openssl enc -aes-$keysize-cbc -in $file -out $savedir -K $key -iv $iv zenity --info --text "ARQUIVO CRIPTOGRAFADO!" fi if [ $opcao1 == "2" ] then file=$(zenity --title "Selecione o arquivo que será cifrado!" --width=650 --height=380 --file-selection ) zenity --info --text "A seguir, informe a senha que será usada na derivação da chave.\nOBS:Guarde essa senha pois sem ela não há como descriptografar!" senha=$(zenity --forms --add-password "Digite a senha") if [ $keysize == "128" ] then key=$(echo -n $senha | md5sum | cut -d' ' -f1) zenity --info --text "Selecione o diretório para salvar o arquivo cifrado" savedir=$(zenity --file-selection --save) openssl enc -aes-$keysize-cbc -in $file -out $savedir -K $key -iv $key zenity --info --text "ARQUIVO CRIPTOGRAFADO!" else key=$(echo -n $senha | sha256sum | cut -d' ' -f1) iv=$(echo -n $senha | md5sum | cut -d' ' -f1) zenity --info --text "Selecione o diretório para salvar o arquivo cifrado" savedir=$(zenity --file-selection --save) openssl enc -aes-$keysize-cbc -in $file -out $savedir -K $key -iv $iv zenity --info --text "ARQUIVO CRIPTOGRAFADO!" fi fi if [ $opcao1 == "3" ] then file=$(zenity --title "Selecione o arquivo que será cifrado!" --width=650 --height=380 --file-selection ) aux=$[$keysize/8] key=$(openssl rand -hex $aux) iv=$(openssl rand -hex 16) zenity --info --text "Selecione o diretório para salvar o arquivo cifrado" savedir=$(zenity --file-selection --save) openssl enc -aes-$keysize-cbc -in $file -out $savedir -K $key -iv $iv echo $key > k.ENC.txt echo $iv > iv.ENC.txt zenity --info --text "ARQUIVO CRIPTOGRAFADO!\n\nOBS: A chave e o vetor inicial usados para cifrar e decifrar, se encontrão na pasta do projeto!" fi } decryptECB(){ opcao1=$(zenity --list --title "Selecione uma das opções abaixo!" --width=600 --height=380 --column id --column Descrição 1 "Descriptografar usando uma chave armazenada em um arquivo (formato hexadecimal)" 2 "Descriptografar derivando a chave de uma senha") if [ $opcao1 == "1" ] then keysize=$(zenity --title="Tamanho da chave" --text "Digite o tamanho da chave 128-256!" --entry) zenity --info --text "selecione o arquivo que sera decifrado" file=$(zenity --title "Selecione o arquivo que será decifrado!" --width=650 --height=380 --file-selection ) zenity --info --text "selecione a chave" aux=$(zenity --title "Selecione o arquivo com a chave criptográfica!" --width=650 --height=380 --file-selection ) key=$(head -1 $aux) zenity --info --text "Selecione o diretório para salvar o arquivo decifrado" savedir=$(zenity --file-selection --save) openssl enc -aes-$keysize-ecb -in $file -out $savedir -K $key -d zenity --info --text "ARQUIVO DESCRIPTOGRAFADO!" fi if [ $opcao1 == "2" ] then keysize=$(zenity --title="Tamanho da chave" --text "Digite o tamanho da chave 128-256!" --entry) file=$(zenity --title "Selecione o arquivo que será decifrado!" --width=650 --height=380 --file-selection ) zenity --info --text "A seguir, informe a senha que será usada para decifrar o arquivo!" senha=$(zenity --forms --add-password "Digite a senha") if [ $keysize == "128" ] then key=$(echo -n $senha | md5sum | cut -d' ' -f1) else key=$(echo -n $senha | sha256sum | cut -d' ' -f1) fi zenity --info --text "Selecione o diretório para salvar o arquivo decifrado" savedir=$(zenity --file-selection --save) openssl enc -aes-$keysize-ecb -in $file -out $savedir -K $key -d zenity --info --text "ARQUIVO DESCRIPTOGRAFADO!" fi } decryptCBC(){ opcao1=$(zenity --list --title "Selecione uma das opções abaixo!" --width=600 --height=380 --column id --column Descrição 1 "Descriptografar usando uma chave armazenada em um arquivo (formato hexadecimal)" 2 "Descriptografar derivando a chave de uma senha") if [ $opcao1 == "1" ] then keysize=$(zenity --title="Tamanho da chave" --text "Digite o tamanho da chave 128-256!" --entry) zenity --info --text "selecione o arquivo que será decifrado" file=$(zenity --title "Selecione o arquivo que será decifrado!" --width=650 --height=380 --file-selection ) zenity --info --text "selecione a chave" aux=$(zenity --title "Selecione o arquivo com a chave criptográfica!" --width=650 --height=380 --file-selection ) zenity --info --text "selecione o IV" aux2=$(zenity --title "Selecione o arquivo com o vetor inicial!" --width=650 --height=380 --file-selection ) key=$(head -1 $aux) iv=$(head -1 $aux2) zenity --info --text "Selecione o diretório para salvar o arquivo decifrado" savedir=$(zenity --file-selection --save) openssl enc -aes-$keysize-cbc -in $file -out $savedir -K $key -iv $iv -d zenity --info --text "ARQUIVO DESCRIPTOGRAFADO!" fi if [ $opcao1 == "2" ] then keysize=$(zenity --title="Tamanho da chave" --text "Digite o tamanho da chave 128-256!" --entry) file=$(zenity --title "Selecione o arquivo que será decifrado!" --width=650 --height=380 --file-selection ) zenity --info --text "A seguir, informe a senha que será usada para decifrar o arquivo!" senha=$(zenity --forms --add-password "<PASSWORD>") if [ $keysize == "128" ] then key=$(echo -n $senha | md5sum | cut -d' ' -f1) zenity --info --text "Selecione o diretório para salvar o arquivo decifrado" savedir=$(zenity --file-selection --save) openssl enc -aes-$keysize-cbc -in $file -out $savedir -K $key -iv $key -d zenity --info --text "ARQUIVO DESCRIPTOGRAFADO!" else key=$(echo -n $senha | sha256sum | cut -d' ' -f1) iv=$(echo -n $senha | md5sum | cut -d' ' -f1) zenity --info --text "Selecione o diretório para salvar o arquivo decifrado" savedir=$(zenity --file-selection --save) openssl enc -aes-$keysize-cbc -in $file -out $savedir -K $key -iv $iv -d zenity --info --text "ARQUIVO DESCRIPTOGRAFADO!" fi fi } <file_sep>/cripto.sh #!/bin/bash/ source libcripto.sh EXEC(){ opmod=$(zenity --list --title "Cifras" --text "Selecione uma opção" --radiolist --column " " --column "Encrypt/Decrypt" 1 "Encryptar" 2 "Decryptar") if [ $opmod == "Encryptar" ] then modENCRYPT else modDECRYPT fi } modENCRYPT(){ zenity --info --text "Realize os 3 passos em ordem para obter autenticação, integridade e confidencialidade, após realizar os passos clique em sair para salvar as chaves criadas na pasta do projeto" projetonome=$(zenity --title="Nome do projeto" --text "Digite o nome do projeto!" --entry) mkdir $projetonome while :;do opcao1=$(zenity --list --title "Selecione uma das opções abaixo!" --width=650 --height=380 --column id --column Descrição 1 "Etapa 1 - Cria um MAC de um arquivo com uma chave pseudoaleatoria do tamanho selecionado. " 2 "Etapa 2 - Cria uma chave pseudoaleatoria do tamanho selecionado e cifra um arquivo no formato ecb ou cbc com iv pseudoaleatorio." 3 "Etapa 3 - Gera um par de chaves assimetricas e criptografa usando uma chave publica." 4 " Sair - depois de realizado as etapas clique em sair para salvar todas suas chaves. ") if [ $opcao1 == "4" ] then zenity --info --text " Todas suas chaves foram salvas com sucesso!" break fi if [ $opcao1 == "1" ] then #keysize=$(zenity --list --title "Tamanho da chave do MAC pseudoaleatoria" --text "Selecione o tamanho da chave pseudoaleatoria do MAC" --radiolist --column " " --column "Size" TRUE "128" FALSE "256") # aux=$[$keysize/8] key=$(openssl rand -hex 16) echo $key > k.MAC.txt file=$(zenity --title "Selecione o arquivo que será criado o MAC com a chave aleatoria k.mac!" --width=650 --height=380 --file-selection ) openssl dgst -hmac $key -md5 -out x.MAC.txt < $file zenity --info --text "Chave aleatoria k.MAC e x.MAC do arquivo selecionado criadas na pasta $projetonome " fi if [ $opcao1 == "3" ] then opmod=$(zenity --list --title "Cifras" --text "Selecione uma opção" --radiolist --column " " --column "Criar par de chaves ou cifrar com chave publica existente" 1 "Criar" 2 "Cifrar com chave publica existente") if [ $opmod == "Criar" ] then rsakey=$(zenity --title="Nome da chave" --text "Digite o nome da chave Publica e Privada!" --entry) openssl genrsa -out $rsakey'.pr' openssl rsa -in $rsakey'.pr' -outform PEM -pubout -out $rsakey'.pub' zenity --info --text " As chaves foram criadas! " else mkdir $projetonome/encryptkeys file=$(zenity --title "Selecione o arquivo que será cifrado com a chave publica!" --width=650 --height=380 --file-selection ) # savedir=$(zenity --file-selection --save) keyencrypt=$(zenity --title "Selecione a chave publica!" --width=650 --height=380 --file-selection ) filenameencrypt=$(zenity --title="Nome do arquivo criptografado" --text "Digite o nome para salvar o novo arquivo criptografado" --entry) #echo "$file a $keyencrypt a $filenameencrypt" openssl rsautl -in $file -out $projetonome/encryptkeys/$filenameencrypt -inkey $keyencrypt -pubin -encrypt zenity --info --text " chave criptografada com nome de $filenameencrypt salva na pasta $projetonome/encryptkeys" #iv.ENC.txt x.MAC.txt k.MAC.txt k.ENC.txt fi fi if [ $opcao1 == "2" ] then opmod=$(zenity --list --title "Modo de operação" --text "Selecione o modo de operação" --radiolist --column " " --column "Algorítimo" TRUE "ECB" FALSE "CBC") if [ $opmod == "ECB" ] then modECB else modCBC fi fi mv iv.ENC.txt x.MAC.txt k.MAC.txt k.ENC.txt $projetonome done } modDECRYPT(){ while :;do mkdir decryptFiles opcao1=$(zenity --list --title "Selecione uma das opções abaixo!" --width=650 --height=380 --column id --column Descrição 1 "Etapa 1 - Descriptografar arquivos com chave RSA privada. " 2 "Etapa 2 - Descriptografar modos ECB e CBC ." 3 "Etapa 3 - Comparar MAC gerado de um arquivo." 4 " Sair - Sair ") if [ $opcao1 == "1" ] then zenity --info --text "Os arquivos descriptografados vão ser encontrados na pasta decryptFiles " zenity --info --text "Selecione o arquivo que será descriptografado !" file=$(zenity --title "Selecione o arquivo que será descriptografado com a chave RSA privada!" --width=650 --height=380 --file-selection ) zenity --info --text "Selecione a chave RSA privada!" keycrypte=$(zenity --title "Selecione a chave RSA privada!" --width=650 --height=380 --file-selection ) filenameencrypt=$(zenity --title="Nome do arquivo descriptografado" --text "Digite o nome para salvar o novo arquivo descriptografado" --entry) openssl rsautl -in $file -out decryptFiles/$filenameencrypt -inkey $keycrypte -decrypt zenity --info --text "Arquivo descriptografado !" fi if [ $opcao1 == "2" ] then mode=$(zenity --list --title "Selecione uma das opções abaixo!" --width=600 --height=380 --column id --column Descrição 1 "ECB" 2 "CBC") if [ $mode == "1" ] then decryptECB fi if [ $mode == "2" ] then decryptCBC fi fi if [ $opcao1 == "3" ] then zenity --info --text "Selecione o arquivo com o x.mac que deseja comparar" file1=$(zenity --title "Selecione o x.MAC.txt arquivo com o mac que deseja comparar" --width=650 --height=380 --file-selection ) zenity --info --text "Selecione o arquivo com a chave k.mac para gerar o x2.MAC do arquivo descriptografado" aux=$(zenity --title "Selecione o arquivo com chave k.mac.txt para gerar o x2.mac do arquivo descriptografado " --width=650 --height=380 --file-selection ) zenity --info --text "Selecione o arquivo do qual sera gerado o MAC e comparado com o primeiro MAC" file2=$(zenity --title "Selecione o arquivo do qual sera gerado o MAC!" --width=650 --height=380 --file-selection ) key=$(head -1 $aux) openssl dgst -hmac $key -md5 -out x2.MAC.tmp < $file2 diff $file1 x2.MAC.tmp if [ "$?" -eq "0" ]; then zenity --info --text "Parabéns os Mac coincidem!!! " else zenity --info --text "Os Mac selecionados nao coincidem! " fi fi if [ $opcao1 == "4" ] then break fi done } testedependencias EXEC
8d9276f387296545b340c719fa5892845c982a8a
[ "Shell" ]
3
Shell
Radesh1/scripts
aebed4a67d7946a2572950afeb4039187efb5d5b
5ed5b189bbb44bb76411cebb22e3dac70729af94
refs/heads/master
<file_sep>#!/bin/bash ARN_FILE_PATH=".serial" #Check for config file if [ ! -f ~/.aws/config ]; then echo "Missing AWS configuration" exit 0; fi #Check for credentials file if [ ! -f ~/.aws/credentials ]; then echo "Missing AWS Credentials" exit 0; fi #Get MFA Serial ARN mfa_device=$(cat ${ARN_FILE_PATH}) #Prompt MFA code from user read -p "Enter your 6 digit MFA code: " mfa_code #Use aws cli to get session token _sts_output=$(aws sts get-session-token \ --serial-number ${mfa_device} \ --token-code ${mfa_code}) #extract the credentials using jq AWS_ACCESS_KEY_ID=$(echo ${_sts_output} | jq -r '.Credentials.AccessKeyId') AWS_SECRET_ACCESS_KEY=$(echo ${_sts_output} | jq -r '.Credentials.SecretAccessKey') AWS_SESSION_TOKEN=$(echo ${_sts_output} | jq -r '.Credentials.SessionToken') #set the credentials in the config file aws configure set aws_access_key_id "${AWS_ACCESS_KEY_ID}" --profile temp aws configure set aws_secret_access_key "${AWS_SECRET_ACCESS_KEY}" --profile temp aws configure set aws_session_token "${AWS_SESSION_TOKEN}" --profile temp
c1a9f849540423851f44502095c6deb7b92e0db4
[ "Shell" ]
1
Shell
ajaymehul/stelligent-u
ecbe2e9d933c309bf7b4d0b6c4dc375cf99dd48c
8f3e87fd059072c484d2357b7241355d8237eb91
refs/heads/master
<file_sep># RowingView 几天前看到鸿阳大神公众号上的一篇推文,里面讨论的 UI 效果也确实让我这个小白惊叹。 {% img /2020/04/28/shen-qi-de-ui-xiao-chuan-ya-you-a-you/0.gif 200 200 %} 第一眼看上去好像没什么大不了的,不就是个列表么,仔细看下,不由得一声卧槽,这个小船!反正我看到这个 UI 效果是没有一点头绪,刚好有人已经做出来,所以就看着别人的 demo,了解下原理,然后试着自己做一个同样的效果。 整体来说搞懂三个方面的问题就可以了: * 路径的绘制 * 小船的移动 * 滑动进度 # 路径绘制 通过`Path`类,合理运用内部方法可以创建并保存一条完整的路径。通过`moveTo(x,y)`方法移动路径的起始点到指定点,然后通过`rLineTo(dx,dy)`方法绘制一条从当前点到指定点的直线,注意此处的 (dx,dy) 坐标是相对于当前点的坐标,而`lineTo(x,y)`方法中的坐标使用的则是原始坐标系。 `arcTo(left,top,right,bottom,startAngle,sweepAngle,forceMoveTo)`方法则用来添加一段圆弧。left、top、right、bottom 参数决定圆弧位置及大小;startAngle 参数决定了圆弧的起始角度,0 度代表默认是向下;sweepAngle 参数决定了圆弧对应的度数,该数为正时代表顺时针方向,为负时代表逆时针方向;forceMoveTo 是一个布尔值,为 false 则代表如果之前路径的最后一个点与圆弧的起始点不是一个点时,会将两个点连接起来,也就是说始终是一条路径,为 true 则不会连接,则可能会变为两条路径。 完整的代码如下: ```kotlin fun getPath(pos: Int, width: Float, height: Float): Path { val radius = 100f val firstHeight = height / 2 val firstWidth: Float val lastWidth: Float if (pos % 2 == 0) { firstWidth = width / 8 lastWidth = width / 8 * 7 } else { firstWidth = width / 8 * 7 lastWidth = width / 8 } return Path().apply { moveTo(firstWidth, 0f) //添加直线 rLineTo(0f, firstHeight) if (pos % 2 == 0) { //添加圆弧 arcTo(firstWidth, firstHeight - radius, firstWidth + 2 * radius, firstHeight + radius, 180f, -90f, false) rLineTo(lastWidth - firstWidth - 2 * radius, 0f) arcTo(lastWidth - 2 * radius, firstHeight + radius, lastWidth, firstHeight + 3 * radius, -90f, 90f, false) } else { arcTo(firstWidth - 2 * radius, firstHeight - radius, firstWidth, firstHeight + radius, 0f, 90f, false) rLineTo(lastWidth - firstWidth + 2 * radius, 0f) arcTo(lastWidth, firstHeight + radius, lastWidth + 2 * radius, firstHeight + 3 * radius, -90f, -90f, false) } rLineTo(0f, firstHeight - 2 * radius) } } ``` 从 y 轴来看,firstHeight 将路径分为两部分,上面是一条直线,下面则是两个圆弧加一段直线;firstWidth 与 lastWidth 代表路径在 x 轴上的两个拐点,分别是八分之一处与八分之七处;pos 则用于区分两种不同的路径。 有了路径之后,就可以通过`canvas?.drawPath(mPath, mPaint)`将这段路径绘制出来。现在结合 RecycleView,完整的河道图就出来啦。 ```kotlin class RiverView : ViewGroup { constructor(context: Context) : super(context) constructor(context: Context, attributeSet: AttributeSet) : super(context, attributeSet) private val mPaint = Paint().apply { style = Paint.Style.STROKE } private lateinit var mPath: Path init { setWillNotDraw(false) } fun setPath(path: Path) { mPath = path mPathMeasure = PathMeasure(mPath, false) } fun setRiverColor(color: Int) { mPaint.color = color } fun setRiverWidth(width: Float) { mPaint.strokeWidth = width } override fun onLayout(changed: Boolean, l: Int, t: Int, r: Int, b: Int) {} override fun onDraw(canvas: Canvas?) { super.onDraw(canvas) canvas?.drawPath(mPath, mPaint) } } ``` ```kotlin override fun onBindViewHolder(holder: RowingViewHolder, position: Int) { holder.itemView.mRiverView.apply { val width = context.resources.displayMetrics.widthPixels.toFloat() - context.dp2px(32f) val height = context.dp2px(240f) setBackgroundColor(data[position].cardColor) setPath(getPath(position, width, height)) setRiverWidth(100f) setRiverColor(data[position].riverColor) } } ``` 效果图如下: {% img /2020/04/28/shen-qi-de-ui-xiao-chuan-ya-you-a-you/1.jpg 200 200 %} # 小船的移动 对路径进行具体的处理,包括获取某一个进度点、该点的方向等信息,就需要用到 PathMeasure 类了。 在创建 PathMeasure 对象时,需要传入一个 Path 对象,代表需要处理的路径。 ```kotlin mPathMeasure = PathMeasure(mPath, false) ``` 第二个参数代表是否将 Path 处理为闭合的。也就是说,当该参数为 true 时,那么 PathMeasure 在处理 Path 时,即使传入的 Path 不是闭合的,PathMeasure 也会当做闭合的 Path 的去处理,需要注意的是,只是当做闭合的处理,而不是真的将 Path 变为闭合的,不会对原先的 Path 产生影响。 那么怎么样根据一个进度值就能使小船进行相应的移动呢,此时就需要用到 PathMeasure 中的 getLength 与 getPosTan 方法了。 ```java //获得路径的长度 public float getLength() //获取该路径 disance 处的点的坐标及方向的切线值 public boolean getPosTan(float distance, float pos[], float tan[]) ``` 说下 getPosTan 方法,distance 代表长度,也就是说路径 distance 处的点,这个变量可以传入`progress* mPathMeasure.getLength()`,progress 取值 0-1,代表进度值,这样就可以传入不同的 progress 值进行移动;pos[] 则要求我们传入一个 float 数组,当方法运行完成后该数组中就会包含此点的坐标 (pos[0],pos[1]);tan[] 同样是 float 数组,方法完成后同样会产生 tan[0] 与 tan[1] 两个值,代表了该点的方向值,怎么理解这两个值呢,此时就需要借助 Math 中的 tan 系列函数。 ```java Math.tan(x) Math.atan(x) Math.atan2(y,x) ``` 先说明一下,在代码中涉及到度数时,一般都采用了弧度去表示。弧度与度数的关系是:半径为 1 的圆中任意角度所对应的弧长,就是该角度的弧度。所以 360°对应的弧度就是 2π,180°的弧度值为π。 `Math.tan(PI / 4)`也就是 tan(45°),计算 45 度角的正切值,结果是 1。 `Math.atan(1)`也就是 arctan(1),计算正切值 1 所对应的角度,结果是 PI/4。这两个的关系就是 tan(arctan(x)) = x。 而`Math.atan2(y,x)`与`Math.atan(x)`的用途是一样的,只是传入的参数不同,这里的 y 代表角度对边的长度,x 代表角度邻边的长度,可以说 atan2(y,x) 与 atan(y / x) 得出的结果是一样的。或许看下面的图会更明白一点。 {% img /2020/04/28/shen-qi-de-ui-xiao-chuan-ya-you-a-you/2.jpg %} 明白了这个以后,那么 tan[] 中存储的值就是 atan2(y,x) 中的 x、y 值。所以该点的旋转角度就是 ```kotlin atan2(mTan[1], mTan[0]) * 180 / PI ``` 由于 atan2 方法返回的是弧度值,所以通过*180/PI 将其转为对应的角度。好了,有了位置以及对应的旋转角度,就可以在 onDraw 方法中处理小船的位置及方向啦。 ```kotlin fun move(progress: Float) { if (!this::mRowingView.isInitialized) { //小船 mRowingView = RowingView(context) addView(mRowingView) mRowingView.layout(0, 0, 200, 200) } mPathMeasure.getPosTan(progress * mPathMeasure.length, mPos, mTan) postInvalidate() } override fun onDraw(canvas: Canvas?) { super.onDraw(canvas) canvas?.drawPath(mPath, mPaint) if (this::mRowingView.isInitialized) { mRowingView.apply { translationX = mPos[0] - mRowingView.width / 2 translationY = mPos[1] - mRowingView.height / 2 rotation = (atan2(mTan[1], mTan[0]) * 180 / PI + 180).toFloat() } } } ``` 当我们调用 move 函数时,小船就会根据传入的 progress 移动到对应的位置啦。 # 滑动进度 好了,现在路径有了,小船也可以移动了,剩下的就是处理当手指滑动列表时,小船也要滑动对应的进度。监听 RecycleView 的滑动是不可避免的了,具体的处理代码如下。 ```kotlin mRecycleView.apply { layoutManager = mLayoutManager adapter = mAdapter addOnScrollListener(object : RecyclerView.OnScrollListener() { override fun onScrolled(recyclerView: RecyclerView, dx: Int, dy: Int) { super.onScrolled(recyclerView, dx, dy) val computeVerticalScrollOffset = mRecycleView.computeVerticalScrollOffset() val distance = dp2px(240f + 10 * 2).times(mAdapter.data.size).minus(mRecycleView.height).div(mAdapter.data.size) val progress = computeVerticalScrollOffset % distance / distance val pos = computeVerticalScrollOffset / distance.toInt() move(progress, pos) post { mAdapter.notifyItemChanged(pos, 1) } } }) } fun move(progress: Float, pos: Int) { val holder = mRecycleView.findViewHolderForAdapterPosition(pos) as? RowingAdapter.RowingViewHolder holder?.itemView?.findViewById<RiverView>(R.id.mRiverView)?.move(progress) } ``` 首先 computeVerticalScrollOffset 是 RecycleView 当前的总滑动长度。 然后,`dp2px(240f + 10 * 2).times(mAdapter.data.size)`计算了填充完数据后的 RecycleView 的高度,而`mRecycleView.height`表示当前 RecycleView 在屏幕上显示出来高度。之所以要减去这个,是因为小船刚开始在 RecycleView 的顶部,当我们滑动 RecycleView 到底部时,小船也需要移动到底部,这之间的距离就差了一个 Recycle 的 Height。之后除以`mAdapter.data.size`,得到的是每一个卡片所对应的滑动距离,即 distance。 之后,`computeVerticalScrollOffset % distance`获得的值表示在一个卡片上小船的进度,除以 distance 就得出了进度百分比 progress。 `computeVerticalScrollOffset / distance`计算出哪个卡片上的小船需要移动。有了 pos 与 progress,就知道了当前 RecycleView 的滑动进度对应的小船的滑动进度,对小船进行移动就行了。 还有一点,当小船移动到下一个卡片时,当前卡片上的小船需要消失,因此还需在 onBindViewHolder 函数中做些处理。 ```kotlin override fun onBindViewHolder(holder: RowingViewHolder, position: Int) { holder.itemView.mRiverView.apply { val width = context.resources.displayMetrics.widthPixels.toFloat() - context.dp2px(32f) val height = context.dp2px(240f) setBackgroundColor(data[position].cardColor) setPath(getPath(position, width, height)) setRiverWidth(100f) setRiverColor(data[position].riverColor) } if (holder.itemView.mRiverView != lastRiverView) { if (lastRiverView?.initRowing() == true) lastRiverView?.mRowingView?.visibility = View.GONE if (holder.itemView.mRiverView.initRowing()) holder.itemView.mRiverView.mRowingView.visibility = View.VISIBLE lastRiverView = holder.itemView.mRiverView } } ``` 好了,现在所有的问题都解决了,小船就可以快乐的游起来了。 {% img /2020/04/28/shen-qi-de-ui-xiao-chuan-ya-you-a-you/3.gif %} <file_sep>package com.ffeiyue.rowingview import android.content.Context import android.graphics.Path import kotlin.math.PI import kotlin.math.atan import kotlin.math.atan2 import kotlin.math.tan fun main() { println(tan(PI / 4)) } fun Context.dp2px(dp: Float) = dp * resources.displayMetrics.density + 0.5f fun getPath(pos: Int, width: Float, height: Float): Path { val radius = 100f val firstHeight = height / 2 val firstWidth: Float val lastWidth: Float if (pos % 2 == 0) { firstWidth = width / 8 lastWidth = width / 8 * 7 } else { firstWidth = width / 8 * 7 lastWidth = width / 8 } return Path().apply { moveTo(firstWidth, 0f) //添加直线 rLineTo(0f, firstHeight) if (pos % 2 == 0) { //添加圆弧 arcTo(firstWidth, firstHeight - radius, firstWidth + 2 * radius, firstHeight + radius, 180f, -90f, false) rLineTo(lastWidth - firstWidth - 2 * radius, 0f) arcTo(lastWidth - 2 * radius, firstHeight + radius, lastWidth, firstHeight + 3 * radius, -90f, 90f, false) } else { arcTo(firstWidth - 2 * radius, firstHeight - radius, firstWidth, firstHeight + radius, 0f, 90f, false) rLineTo(lastWidth - firstWidth + 2 * radius, 0f) arcTo(lastWidth, firstHeight + radius, lastWidth + 2 * radius, firstHeight + 3 * radius, -90f, -90f, false) } rLineTo(0f, firstHeight - 2 * radius) } }<file_sep>package com.ffeiyue.rowingview import android.content.Context import android.content.Intent import android.graphics.Color import androidx.appcompat.app.AppCompatActivity import android.os.Bundle import androidx.recyclerview.widget.LinearLayoutManager import androidx.recyclerview.widget.RecyclerView import kotlinx.android.synthetic.main.activity_main.* class MainActivity : AppCompatActivity() { companion object { fun start(context: Context) { context.startActivity(Intent(context, MainActivity::class.java)) } } override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) setContentView(R.layout.activity_main) val mAdapter = RowingAdapter(this) mAdapter.data.apply { add(RowingModel(Color.parseColor("#4bab64"), Color.parseColor("#5ec87b"))) add(RowingModel(Color.parseColor("#1578b9"), Color.parseColor("#40a4e9"))) add(RowingModel(Color.parseColor("#a78137"), Color.parseColor("#ddc367"))) add(RowingModel(Color.parseColor("#f99e2f"), Color.parseColor("#fdd675"))) add(RowingModel(Color.parseColor("#f42d00"), Color.parseColor("#fd6d49"))) } val mLayoutManager = LinearLayoutManager(this) mLayoutManager.orientation = LinearLayoutManager.VERTICAL mRecycleView.apply { layoutManager = mLayoutManager adapter = mAdapter addOnScrollListener(object : RecyclerView.OnScrollListener() { override fun onScrolled(recyclerView: RecyclerView, dx: Int, dy: Int) { super.onScrolled(recyclerView, dx, dy) val computeVerticalScrollOffset = mRecycleView.computeVerticalScrollOffset() val distance = dp2px(240f + 10 * 2).times(mAdapter.data.size).minus(mRecycleView.height).div(mAdapter.data.size) val progress = computeVerticalScrollOffset % distance / distance val pos = computeVerticalScrollOffset / distance.toInt() move(progress, pos) post { mAdapter.notifyItemChanged(pos, 1) } } }) } } fun move(progress: Float, pos: Int) { val holder = mRecycleView.findViewHolderForAdapterPosition(pos) as? RowingAdapter.RowingViewHolder holder?.itemView?.findViewById<RiverView>(R.id.mRiverView)?.move(progress) } } <file_sep>package com.ffeiyue.rowingview import android.content.Context import android.view.LayoutInflater import android.view.View import android.view.ViewGroup import androidx.recyclerview.widget.RecyclerView import kotlinx.android.synthetic.main.recycle_item_view.view.* class RowingAdapter(val context: Context) : RecyclerView.Adapter<RowingAdapter.RowingViewHolder>() { var data: MutableList<RowingModel> = mutableListOf() var lastRiverView: RiverView? = null override fun onBindViewHolder(holder: RowingViewHolder, position: Int) { holder.itemView.mRiverView.apply { val width = context.resources.displayMetrics.widthPixels.toFloat() - context.dp2px(32f) val height = context.dp2px(240f) setBackgroundColor(data[position].cardColor) setPath(getPath(position, width, height)) setRiverWidth(100f) setRiverColor(data[position].riverColor) } if (holder.itemView.mRiverView != lastRiverView) { if (lastRiverView?.initRowing() == true) lastRiverView?.mRowingView?.visibility = View.GONE if (holder.itemView.mRiverView.initRowing()) holder.itemView.mRiverView.mRowingView.visibility = View.VISIBLE lastRiverView = holder.itemView.mRiverView } } override fun onCreateViewHolder(parent: ViewGroup, viewType: Int) = RowingViewHolder(LayoutInflater.from(context).inflate(R.layout.recycle_item_view, parent, false)) override fun getItemCount() = data.size inner class RowingViewHolder(itemView: View) : RecyclerView.ViewHolder(itemView) }<file_sep>package com.ffeiyue.rowingview import android.graphics.Color import android.os.Bundle import android.util.Log import androidx.appcompat.app.AppCompatActivity import kotlinx.android.synthetic.main.activity_test.* class TestActivity : AppCompatActivity() { override fun onCreate(savedInstanceState: Bundle?) { super.onCreate(savedInstanceState) setContentView(R.layout.activity_test) val width = resources.displayMetrics.widthPixels.toFloat() - dp2px(32f) val height = mCardView.layoutParams.height.toFloat() Log.d(TestActivity::class.simpleName, "width:$width,height:$height") mRiverView.apply { setBackgroundColor(Color.parseColor("#4bab64")) setRiverColor(Color.parseColor("#5ec87b")) setRiverWidth(100f) setPath(getPath(0, width, height)) } moveUp.setOnClickListener { MainActivity.start(this) } moveDown.setOnClickListener { mRiverView.move(0.01f) } } } <file_sep>package com.ffeiyue.rowingview data class RowingModel(val cardColor: Int, val riverColor: Int)<file_sep>package com.ffeiyue.rowingview import android.content.Context import android.graphics.Canvas import android.graphics.Paint import android.graphics.Path import android.graphics.PathMeasure import android.util.AttributeSet import android.util.Log import android.view.ViewGroup import kotlin.math.PI import kotlin.math.atan2 class RiverView : ViewGroup { constructor(context: Context) : super(context) constructor(context: Context, attributeSet: AttributeSet) : super(context, attributeSet) private val mPaint = Paint().apply { style = Paint.Style.STROKE } private lateinit var mPath: Path lateinit var mRowingView: RowingView private lateinit var mPathMeasure: PathMeasure private val mPos = FloatArray(2) private val mTan = FloatArray(2) init { setWillNotDraw(false) } fun setPath(path: Path) { mPath = path mPathMeasure = PathMeasure(mPath, false) } fun setRiverColor(color: Int) { mPaint.color = color } fun setRiverWidth(width: Float) { mPaint.strokeWidth = width } fun initRowing() = this::mRowingView.isInitialized fun move(progress: Float) { if (!this::mRowingView.isInitialized) { //小船 mRowingView = RowingView(context) addView(mRowingView) mRowingView.layout(0, 0, 200, 200) } mPathMeasure.getPosTan(progress * mPathMeasure.length, mPos, mTan) postInvalidate() } override fun onLayout(changed: Boolean, l: Int, t: Int, r: Int, b: Int) {} override fun onDraw(canvas: Canvas?) { super.onDraw(canvas) canvas?.drawPath(mPath, mPaint) if (this::mRowingView.isInitialized) { mRowingView.apply { translationX = mPos[0] - mRowingView.width / 2 translationY = mPos[1] - mRowingView.height / 2 rotation = (atan2(mTan[1], mTan[0]) * 180 / PI + 180).toFloat() } } } }
6ab443ab6d2be6d6b6c8618ef1557472f6e48618
[ "Markdown", "Kotlin" ]
7
Markdown
leaps339/RowingView
9c5a488b94137cf697bc590a9fe43a50d335cb7e
eca75dc3000d9b1d84a1516dfa89cd4de93669ad
refs/heads/master
<repo_name>ebracho/Counting-Sort<file_sep>/counting_sort.cpp #include <iostream> #include <vector> #include <fstream> #include <cstdlib> #include <time.h> using namespace std; const int MAX = 50; bool counting_sort(const int min, const int max, vector<int>& numbers) { vector<int> number_index(max - min); for(int i = 0; i < numbers.size(); i++) { if(numbers[i] < min || numbers[i] > max) return false; number_index[numbers[i]]++; } numbers.clear(); for(int i = 0; i < number_index.size(); i++) { for(int j = 0; j < number_index[i]; j++) { numbers.push_back(min + i); } } } int main() { ofstream unsorted("unsorted.txt"); ofstream sorted("sorted.txt"); srand(time(NULL)); vector<int> numbers(0); for(int i = 0; i < MAX; i++) { numbers.push_back(rand()%100); unsorted << numbers[i] << endl; } counting_sort(0, 100, numbers); for(int i = 0; i < MAX; i++) { sorted << numbers[i] << endl; } return 0; } <file_sep>/README.md Counting-Sort ============= Counting Sort implementation in C++. Counting Sort is an sorting algorithm which uses knowledge of the range of a set of integers to sort them in linear time ( O(n) ).
4151a092aceaf249705f6a82ce5e84cd74cbb908
[ "Markdown", "C++" ]
2
C++
ebracho/Counting-Sort
cbc98c4e98dcc69349c4314507c662656abd78ac
eb77f6b5d77837abce9a898f1e9ee458c6ea47ad
refs/heads/master
<repo_name>frdeso/lttng-msgpack<file_sep>/msgpack.c #include <assert.h> #include <endian.h> #include <stddef.h> #include <stdint.h> #include <string.h> #include "msgpack.h" #define MSGPACK_FIXSTR_ID_MASK 0xA0 #define MSGPACK_FIXMAP_ID_MASK 0x80 #define MSGPACK_FIXARRAY_ID_MASK 0x90 #define MSGPACK_NIL_ID 0xC0 #define MSGPACK_FALSE_ID 0xC2 #define MSGPACK_TRUE_ID 0xC3 #define MSGPACK_MAP16_ID 0xDE #define MSGPACK_ARRAY16_ID 0xDC #define MSGPACK_UINT64_ID 0xCF #define MSGPACK_INT64_ID 0xD3 #define MSGPACK_FLOAT64_ID 0xCB #define MSGPACK_STR16_ID 0xDA #define MSGPACK_FIXMAP_MAX_COUNT 15 #define MSGPACK_FIXARRAY_MAX_COUNT 15 #define MSGPACK_FIXSTR_MAX_LENGTH 31 static inline int lttng_msgpack_append_buffer( struct lttng_msgpack_writer *writer, const uint8_t *buf, size_t length) { int ret = 0; assert(buf); if (writer->write_pos + length > writer->end_write_pos) { ret = -1; goto end; } memcpy(writer->write_pos, buf, length); writer->write_pos += length; end: return ret; } static inline int lttng_msgpack_append_u8( struct lttng_msgpack_writer *writer, uint8_t value) { return lttng_msgpack_append_buffer(writer, &value, sizeof(value)); } static inline int lttng_msgpack_append_u16( struct lttng_msgpack_writer *writer, uint16_t value) { value = htobe16(value); return lttng_msgpack_append_buffer(writer, (uint8_t *) &value, sizeof(value)); } static inline int lttng_msgpack_append_u64( struct lttng_msgpack_writer *writer, uint64_t value) { value = htobe64(value); return lttng_msgpack_append_buffer(writer, (uint8_t *) &value, sizeof(value)); } static inline int lttng_msgpack_append_f64( struct lttng_msgpack_writer *writer, double value) { union { double d; uint64_t u; } u; u.d = value; return lttng_msgpack_append_u64(writer, u.u); } static inline int lttng_msgpack_append_i64( struct lttng_msgpack_writer *writer, int64_t value) { return lttng_msgpack_append_u64(writer, (uint64_t) value); } static inline int lttng_msgpack_encode_u64( struct lttng_msgpack_writer *writer, uint64_t value) { int ret; ret = lttng_msgpack_append_u8(writer, MSGPACK_UINT64_ID); if (ret) goto end; ret = lttng_msgpack_append_u64(writer, value); if (ret) goto end; end: return ret; } static inline int lttng_msgpack_encode_i64( struct lttng_msgpack_writer *writer, int64_t value) { int ret; ret = lttng_msgpack_append_u8(writer, MSGPACK_INT64_ID); if (ret) goto end; ret = lttng_msgpack_append_i64(writer, value); if (ret) goto end; end: return ret; } static inline int lttng_msgpack_encode_f64( struct lttng_msgpack_writer *writer, double value) { int ret; ret = lttng_msgpack_append_u8(writer, MSGPACK_FLOAT64_ID); if (ret) goto end; ret = lttng_msgpack_append_f64(writer, value); if (ret) goto end; end: return ret; } static inline int lttng_msgpack_encode_fixmap( struct lttng_msgpack_writer *writer, uint8_t count) { int ret = 0; assert(count <= MSGPACK_FIXMAP_MAX_COUNT); ret = lttng_msgpack_append_u8(writer, MSGPACK_FIXMAP_ID_MASK | count); if (ret) goto end; end: return ret; } static inline int lttng_msgpack_encode_map16( struct lttng_msgpack_writer *writer, uint16_t count) { int ret; assert(count > MSGPACK_FIXMAP_MAX_COUNT); ret = lttng_msgpack_append_u8(writer, MSGPACK_MAP16_ID); if (ret) goto end; ret = lttng_msgpack_append_u16(writer, count); if (ret) goto end; end: return ret; } static inline int lttng_msgpack_encode_fixarray( struct lttng_msgpack_writer *writer, uint8_t count) { int ret = 0; assert(count <= MSGPACK_FIXARRAY_MAX_COUNT); ret = lttng_msgpack_append_u8(writer, MSGPACK_FIXARRAY_ID_MASK | count); if (ret) goto end; end: return ret; } static inline int lttng_msgpack_encode_array16( struct lttng_msgpack_writer *writer, uint16_t count) { int ret; assert(count > MSGPACK_FIXARRAY_MAX_COUNT); ret = lttng_msgpack_append_u8(writer, MSGPACK_ARRAY16_ID); if (ret) goto end; ret = lttng_msgpack_append_u16(writer, count); if (ret) goto end; end: return ret; } static inline int lttng_msgpack_encode_fixstr( struct lttng_msgpack_writer *writer, const char *str, uint8_t len) { int ret; assert(len <= MSGPACK_FIXSTR_MAX_LENGTH); ret = lttng_msgpack_append_u8(writer, MSGPACK_FIXSTR_ID_MASK | len); if (ret) goto end; ret = lttng_msgpack_append_buffer(writer, (uint8_t *) str, len); if (ret) goto end; end: return ret; } static inline int lttng_msgpack_encode_str16( struct lttng_msgpack_writer *writer, const char *str, uint16_t len) { int ret; assert(len > MSGPACK_FIXSTR_MAX_LENGTH); ret = lttng_msgpack_append_u8(writer, MSGPACK_STR16_ID); if (ret) goto end; ret = lttng_msgpack_append_buffer(writer, (uint8_t *) str, len); if (ret) goto end; end: return ret; } int lttng_msgpack_begin_map(struct lttng_msgpack_writer *writer, size_t count) { int ret; if (count < 0 || count >= (1 << 16)) { ret = -1; goto end; } if (count <= MSGPACK_FIXMAP_MAX_COUNT) ret = lttng_msgpack_encode_fixmap(writer, count); else ret = lttng_msgpack_encode_map16(writer, count); end: return ret; } int lttng_msgpack_end_map(struct lttng_msgpack_writer *writer) { // nothing for now I think // Sanity check later? return 0; } int lttng_msgpack_begin_array( struct lttng_msgpack_writer *writer, size_t count) { int ret; if (count < 0 || count >= (1 << 16)) { ret = -1; goto end; } if (count <= MSGPACK_FIXARRAY_MAX_COUNT) ret = lttng_msgpack_encode_fixarray(writer, count); else ret = lttng_msgpack_encode_array16(writer, count); end: return ret; } int lttng_msgpack_end_array(struct lttng_msgpack_writer *writer) { // nothing for now I think // Sanity check later? return 0; } int lttng_msgpack_write_str(struct lttng_msgpack_writer *writer, const char *str) { int ret; size_t length = strlen(str); if (length < 0 || length >= (1 << 16)) { ret = -1; goto end; } if (length <= MSGPACK_FIXSTR_MAX_LENGTH) ret = lttng_msgpack_encode_fixstr(writer, str, length); else ret = lttng_msgpack_encode_str16(writer, str, length); end: return ret; } int lttng_msgpack_write_nil(struct lttng_msgpack_writer *writer) { return lttng_msgpack_append_u8(writer, MSGPACK_NIL_ID); } int lttng_msgpack_write_true(struct lttng_msgpack_writer *writer) { return lttng_msgpack_append_u8(writer, MSGPACK_TRUE_ID); } int lttng_msgpack_write_false(struct lttng_msgpack_writer *writer) { return lttng_msgpack_append_u8(writer, MSGPACK_FALSE_ID); } int lttng_msgpack_write_u64( struct lttng_msgpack_writer *writer, uint64_t value) { return lttng_msgpack_encode_u64(writer, value); } int lttng_msgpack_write_i64(struct lttng_msgpack_writer *writer, int64_t value) { return lttng_msgpack_encode_i64(writer, value); } int lttng_msgpack_write_f64(struct lttng_msgpack_writer *writer, double value) { return lttng_msgpack_encode_f64(writer, value); } void lttng_msgpack_writer_init( struct lttng_msgpack_writer *writer, uint8_t *buffer, size_t size) { assert(buffer); assert(size >= 0); writer->buffer = buffer; writer->write_pos = buffer; writer->end_write_pos = buffer + size; } void lttng_msgpack_writer_fini(struct lttng_msgpack_writer *writer) { memset(writer, 0, sizeof(*writer)); } <file_sep>/Makefile CC=gcc CFLAGS=-O0 -g -Wall all: main main: main.o msgpack.o $(CC) $^ -o $@ %.o : %.c %.h $(CC) -c $(CFLAGS) $< -o $@ .PHONY: clean: rm -f *.o main <file_sep>/msgpack.h #ifndef _LTTNG_MSGPACK_H #define _LTTNG_MSGPACK_H #include <stddef.h> #include <stdint.h> struct lttng_msgpack_writer { uint8_t *buffer; uint8_t *write_pos; const uint8_t *end_write_pos; }; void lttng_msgpack_writer_init( struct lttng_msgpack_writer *writer, uint8_t *buffer, size_t size); void lttng_msgpack_writer_fini(struct lttng_msgpack_writer *writer); int lttng_msgpack_write_nil(struct lttng_msgpack_writer *writer); int lttng_msgpack_write_true(struct lttng_msgpack_writer *writer); int lttng_msgpack_write_false(struct lttng_msgpack_writer *writer); int lttng_msgpack_write_u64( struct lttng_msgpack_writer *writer, uint64_t value); int lttng_msgpack_write_i64( struct lttng_msgpack_writer *writer, int64_t value); int lttng_msgpack_write_f64(struct lttng_msgpack_writer *writer, double value); int lttng_msgpack_write_str(struct lttng_msgpack_writer *writer, const char *value); int lttng_msgpack_begin_map(struct lttng_msgpack_writer *writer, size_t count); int lttng_msgpack_end_map(struct lttng_msgpack_writer *writer); int lttng_msgpack_begin_array( struct lttng_msgpack_writer *writer, size_t count); int lttng_msgpack_end_array(struct lttng_msgpack_writer *writer); #endif /* _LTTNG_MSGPACK_H */ <file_sep>/main.c #include "msgpack.h" #include <assert.h> #include <stdint.h> #include <stdio.h> #include <string.h> #define RUN_TEST(func, buf, values, nb_values, expected) \ do { \ memset(buf, 0, 4096); \ func(buf, values, nb_values); \ int _ret = memcmp(buf, expected, sizeof(expected)); \ if (_ret == 0) \ printf("Test %s(%s) passed\n", #func, #values); \ else \ printf("Test %s(%s) failed\n", #func, #values); \ } while (0); #define BUFFER_SIZE 4096 static void string_test(uint8_t *buf, const char *value, int unused2) { struct lttng_msgpack_writer writer; lttng_msgpack_writer_init(&writer, buf, BUFFER_SIZE); lttng_msgpack_write_str(&writer, value); lttng_msgpack_writer_fini(&writer); } static void int_test(uint8_t *buf, int64_t value, int unused2) { struct lttng_msgpack_writer writer; lttng_msgpack_writer_init(&writer, buf, BUFFER_SIZE); lttng_msgpack_write_i64(&writer, value); lttng_msgpack_writer_fini(&writer); } static void uint_test(uint8_t *buf, uint64_t value, int unused2) { struct lttng_msgpack_writer writer; lttng_msgpack_writer_init(&writer, buf, BUFFER_SIZE); lttng_msgpack_write_u64(&writer, value); lttng_msgpack_writer_fini(&writer); } static void float_test(uint8_t *buf, double value, int unused2) { struct lttng_msgpack_writer writer; lttng_msgpack_writer_init(&writer, buf, BUFFER_SIZE); lttng_msgpack_write_f64(&writer, value); lttng_msgpack_writer_fini(&writer); } static void array_double_test(uint8_t *buf, double *values, size_t nb_values) { int i = 0; struct lttng_msgpack_writer writer; lttng_msgpack_writer_init(&writer, buf, BUFFER_SIZE); lttng_msgpack_begin_array(&writer, nb_values); for (i = 0; i < nb_values; i++) { lttng_msgpack_write_f64(&writer, values[i]); } lttng_msgpack_end_array(&writer); lttng_msgpack_writer_fini(&writer); } static void simple_capture_example(uint8_t *buf, int unused, int unused2) { /* * This testcase tests the following json representation: * {"id":17,"captures":["meow mix",18, null,14.197]} */ struct lttng_msgpack_writer writer; lttng_msgpack_writer_init(&writer, buf, BUFFER_SIZE); lttng_msgpack_begin_map(&writer, 2); lttng_msgpack_write_str(&writer, "id"); lttng_msgpack_write_u64(&writer, 17); lttng_msgpack_write_str(&writer, "captures"); lttng_msgpack_begin_array(&writer, 4); lttng_msgpack_write_str(&writer, "meow mix"); lttng_msgpack_write_u64(&writer, 18); lttng_msgpack_write_nil(&writer); lttng_msgpack_write_f64(&writer, 14.197); lttng_msgpack_end_array(&writer); lttng_msgpack_end_map(&writer); lttng_msgpack_writer_fini(&writer); } static void nil_test(uint8_t *buf, int unused, int unused2) { struct lttng_msgpack_writer writer; lttng_msgpack_writer_init(&writer, buf, BUFFER_SIZE); lttng_msgpack_write_nil(&writer); lttng_msgpack_writer_fini(&writer); } int main(int argc, char *argv[]) { uint8_t buf[BUFFER_SIZE] = {0}; double arr_double[] = {1.1, 2.3, -12345.2}; RUN_TEST(nil_test, buf, 0, 0, "\xc0"); RUN_TEST(string_test, buf, "bye", 1, "\xa3\x62\x79\x65"); RUN_TEST(uint_test, buf, 1337, 1, "\xcf\x00\x00\x00\x00\x00\x00\x05\x39"); RUN_TEST(int_test, buf, -4242, 1, "\xd3\xff\xff\xff\xff\xff\xff\xef\x6e"); RUN_TEST(float_test, buf, 0.0, 1, "\xcb\x00\x00\x00\x00\x00\x00\x00\x00"); RUN_TEST(float_test, buf, 3.14159265, 1, "\xcb\x40\x09\x21\xfb\x53\xc8\xd4\xf1"); RUN_TEST(float_test, buf, -3.14159265, 1, "\xcb\xc0\x09\x21\xfb\x53\xc8\xd4\xf1"); RUN_TEST(array_double_test, buf, arr_double, sizeof(arr_double) / sizeof(arr_double[0]), "\x93" // fixarray size 3 "\xcb\x3f\xf1\x99\x99\x99\x99\x99\x9a" "\xcb\x40\x02\x66\x66\x66\x66\x66\x66" "\xcb\xc0\xc8\x1c\x99\x99\x99\x99\x9a"); RUN_TEST(simple_capture_example, buf, 0, 0, "\x82" // fixmap size 2 "\xa2\x69\x64" // fixstr size2 "\xcf\x00\x00\x00\x00\x00\x00\x00\x11" "\xa8\x63\x61\x70\x74\x75\x72\x65\x73" "\x94" // fixarray size 4 "\xa8\x6d\x65\x6f\x77\x20\x6d\x69\x78" "\xcf\x00\x00\x00\x00\x00\x00\x00\x12" "\xc0" "\xcb\x40\x2c\x64\xdd\x2f\x1a\x9f\xbe"); // fwrite(buf, 1, BUFFER_SIZE, stdout); return 0; } <file_sep>/README.md lttng-msgpack =============
5c7422c61485bd0eac91275c82bf16fade684bfe
[ "Markdown", "C", "Makefile" ]
5
C
frdeso/lttng-msgpack
d24c2e46f858c50e49ba8bf2413485c56087ad27
91bab3c33f3b37531bd9db1416376d8d8878c39d
refs/heads/master
<repo_name>Blonder/resizablelib<file_sep>/ResizableDialog/DemoDlg.cpp // DemoDlg.cpp : implementation file // #include "stdafx.h" #include "Demo.h" #include "DemoDlg.h" #ifdef _DEBUG #define new DEBUG_NEW #undef THIS_FILE static char THIS_FILE[] = __FILE__; #endif ///////////////////////////////////////////////////////////////////////////// // CDemoDlg dialog CDemoDlg::CDemoDlg(CWnd* pParent /*=NULL*/) : CResizableDialog(CDemoDlg::IDD, pParent) { //{{AFX_DATA_INIT(CDemoDlg) // NOTE: the ClassWizard will add member initialization here //}}AFX_DATA_INIT // Note that LoadIcon does not require a subsequent DestroyIcon in Win32 m_hIcon = AfxGetApp()->LoadIcon(IDR_MAINFRAME); m_dwGripTempState = 1; } void CDemoDlg::DoDataExchange(CDataExchange* pDX) { CResizableDialog::DoDataExchange(pDX); //{{AFX_DATA_MAP(CDemoDlg) // NOTE: the ClassWizard will add DDX and DDV calls here //}}AFX_DATA_MAP } BEGIN_MESSAGE_MAP(CDemoDlg, CResizableDialog) //{{AFX_MSG_MAP(CDemoDlg) ON_BN_CLICKED(IDC_RADIO1, OnRadio1) ON_BN_CLICKED(IDC_RADIO2, OnRadio2) ON_WM_CREATE() //}}AFX_MSG_MAP END_MESSAGE_MAP() ///////////////////////////////////////////////////////////////////////////// // CDemoDlg message handlers BOOL CDemoDlg::OnInitDialog() { CResizableDialog::OnInitDialog(); // Set the icon for this dialog. The framework does this automatically // when the application's main window is not a dialog SetIcon(m_hIcon, TRUE); // Set big icon SetIcon(m_hIcon, FALSE); // Set small icon // preset layout AddAnchor(IDOK, BOTTOM_RIGHT); AddAnchor(IDCANCEL, BOTTOM_RIGHT); AddAnchor(IDC_SPIN1, TOP_RIGHT); AddAnchor(IDC_LABEL1, TOP_LEFT); AddAnchor(IDC_EDIT1, TOP_LEFT, BOTTOM_RIGHT); AddAnchor(IDC_GROUP1, BOTTOM_LEFT, BOTTOM_RIGHT); AddAnchor(IDC_RADIO1, BOTTOM_LEFT); AddAnchor(IDC_RADIO2, BOTTOM_LEFT); // other initializations // grip is visible by default CheckRadioButton(IDC_RADIO1, IDC_RADIO2, IDC_RADIO2); GetDlgItem(IDC_RADIO2)->SetFocus(); SetDlgItemText(IDC_EDIT1, _T("CResizableDialog\r\n\r\n") _T("This dialog can be resized to full width and half the height of the screen.\r\n") _T("The minimum size is by default the one in the dialog editor.\r\n") _T("Size grip can be switched off.\r\n\r\n") _T("Try to maximize and then to restore the dialog!\r\n") _T("Isn't it cool?")); // min/max size settings // get desktop size CRect rc; GetDesktopWindow()->GetClientRect(&rc); // set max tracking size to half a screen SetMaxTrackSize(CSize(rc.Width(), rc.Height()/2)); // maximized position and size on top of the screen rc.bottom = 100; SetMaximizedRect(rc); // save/restore // (for dialog based app, default is a .INI file with // the application's name in the Windows directory) EnableSaveRestore(_T("DemoDlg")); return FALSE; // return TRUE unless you set the focus to a control } void CDemoDlg::OnRadio1() { HideSizeGrip(&m_dwGripTempState); UpdateSizeGrip(); } void CDemoDlg::OnRadio2() { ShowSizeGrip(&m_dwGripTempState); UpdateSizeGrip(); } #define WS_EX_LAYOUT_RTL 0x00400000 int CDemoDlg::OnCreate(LPCREATESTRUCT lpCreateStruct) { // ModifyStyleEx(0, WS_EX_LAYOUT_RTL); if (CResizableDialog::OnCreate(lpCreateStruct) == -1) return -1; return 0; } <file_sep>/ResizableLib/StdAfx.h // stdafx.h : include file for standard system include files, or project // specific include files that are used frequently, but are changed infrequently // ///////////////////////////////////////////////////////////////////////////// // // This file is part of ResizableLib // https://github.com/ppescher/resizablelib // // Copyright (C) 2000-2015 by <NAME> // mailto:<EMAIL> // // The contents of this file are subject to the Artistic License 2.0 // http://opensource.org/licenses/Artistic-2.0 // // If you find this code useful, credits would be nice! // ///////////////////////////////////////////////////////////////////////////// #if !defined(AFX_RESIZABLESTDAFX_H__INCLUDED_) #define AFX_RESIZABLESTDAFX_H__INCLUDED_ #if _MSC_VER > 1000 #pragma once #endif // _MSC_VER > 1000 // Set max target Windows platform #define WINVER 0x0501 #define _WIN32_WINNT 0x0501 // Use target Common Controls version for compatibility // with CPropertyPageEx, CPropertySheetEx #define _WIN32_IE 0x0500 #define VC_EXTRALEAN // Exclude rarely-used stuff from Windows headers #include <afxwin.h> // MFC core and standard components #include <afxext.h> // MFC extensions #include <afxcmn.h> // MFC support for Windows Common Controls #include <shlwapi.h> // DLL Version support #if _WIN32_WINNT >= 0x0501 #include <uxtheme.h> // Windows XP Visual Style API support #endif #ifndef WS_EX_LAYOUTRTL #pragma message("Please update your Windows header files, get the latest SDK") #pragma message("WinUser.h is out of date!") #define WS_EX_LAYOUTRTL 0x00400000 #endif #ifndef WC_BUTTON #pragma message("Please update your Windows header files, get the latest SDK") #pragma message("CommCtrl.h is out of date!") #define WC_BUTTON TEXT("Button") #define WC_STATIC TEXT("Static") #define WC_EDIT TEXT("Edit") #define WC_LISTBOX TEXT("ListBox") #define WC_COMBOBOX TEXT("ComboBox") #define WC_SCROLLBAR TEXT("ScrollBar") #endif #define RSZLIB_NO_XP_DOUBLE_BUFFER //{{AFX_INSERT_LOCATION}} // Microsoft Visual C++ will insert additional declarations immediately before the previous line. // https://msdn.microsoft.com/en-us/library/23k5d385.aspx #pragma warning(default:4061) // enumerator 'identifier' in a switch of enum 'enumeration' is not explicitly handled by a case label #pragma warning(default:4062) // enumerator 'identifier' in a switch of enum 'enumeration' is not handled //#pragma warning(default:4242) // 'identifier' : conversion from 'type1' to 'type2', possible loss of data //#pragma warning(default:4254) // 'operator' : conversion from 'type1' to 'type2', possible loss of data #pragma warning(default:4255) // 'function' : no function prototype given : converting '()' to '(void)' #pragma warning(default:4263) // 'function' : member function does not override any base class virtual member function #pragma warning(default:4266) // 'function' : no override available for virtual member function from base 'type'; function is hidden #pragma warning(default:4289) // nonstandard extension used : 'var' : loop control variable declared in the for - loop is used outside the for - loop scope #pragma warning(default:4296) // 'operator' : expression is always false #pragma warning(default:4339) // 'type' : use of undefined type detected in CLR meta - data - use of this type may lead to a runtime exception #pragma warning(default:4355) // 'this' : used in base member initializer list //#pragma warning(default:4365) // 'action' : conversion from 'type_1' to 'type_2', signed / unsigned mismatch //#pragma warning(default:4388) // signed / unsigned mismatch //#pragma warning(default:4431) // missing type specifier - int assumed.Note: C no longer supports default - int #pragma warning(default:4435) // 'class1' : Object layout under / vd2 will change due to virtual base 'class2' #pragma warning(default:4437) // dynamic_cast from virtual base 'class1' to 'class2' could fail in some contexts #pragma warning(default:4471) // a forward declaration of an unscoped enumeration must have an underlying type(int assumed) #pragma warning(default:4514) // 'function' : unreferenced inline function has been removed #pragma warning(default:4536) // 'type name' : type - name exceeds meta - data limit of 'limit' characters #pragma warning(default:4571) // informational: catch (…) semantics changed since Visual C++ 7.1; structured exceptions(SEH) are no longer caught #pragma warning(default:4574) // 'identifier' is defined to be '0': did you mean to use '#if identifier' ? #pragma warning(default:4608) // 'symbol1' has already been initialized by another union member in the initializer list, 'symbol2' #pragma warning(default:4619) // #pragma warning: there is no warning number 'number' #pragma warning(default:4623) // 'derived class' : default constructor could not be generated because a base class default constructor is inaccessible #pragma warning(default:4625) // 'derived class' : copy constructor could not be generated because a base class copy constructor is inaccessible #pragma warning(default:4626) // 'derived class' : assignment operator could not be generated because a base class assignment operator is inaccessible #pragma warning(default:4640) // 'instance' : construction of local static object is not thread - safe #pragma warning(default:4668) // 'symbol' is not defined as a preprocessor macro, replacing with '0' for 'directives' #pragma warning(default:4682) // 'symbol' : no directional parameter attribute specified, defaulting to[in] #pragma warning(default:4710) // 'function' : function not inlined #pragma warning(default:4767) // section name 'symbol' is longer than 8 characters and will be truncated by the linker #pragma warning(default:4820) // 'bytes' bytes padding added after construct 'member_name' #pragma warning(default:4837) // trigraph detected : '??%c' replaced by '%c' #pragma warning(default:4931) // we are assuming the type library was built for number - bit pointers #pragma warning(default:4962) // 'function' : profile - guided optimizations disabled because optimizations caused profile data to become inconsistent #pragma warning(default:4986) // 'symbol' : exception specification does not match previous declaration #pragma warning(default:4987) // nonstandard extension used : 'throw (...)' #pragma warning(default:4988) // 'symbol' : variable declared outside class / function scope #endif // !defined(AFX_RESIZABLESTDAFX_H__INCLUDED_)
9bf4ec0c497e30ffd4900046781c8dcee115d71b
[ "C", "C++" ]
2
C++
Blonder/resizablelib
4be351101fdc24cb0d6eb6812cbb33b429421468
abda20726e34405831b33a933faee3d32262f7a5
refs/heads/master
<repo_name>hitswa/The-Pet-Network-Internship-Task<file_sep>/dist/php/api.php <?php require_once 'classes/class.functions.php'; require_once 'classes/class.utilities.php'; require_once 'classes/randomuser.php'; require_once 'classes/jokeapi.php'; session_start(); $action = isset($_REQUEST['action']) ? $_REQUEST['action'] : ''; if(!empty($action)) { switch ($action) { case 'new_user': // generate a random user $randomUser = new Randomuser; $user = $randomUser->generateNewUser(); // generate random likes $utility = new Utility; $likes = $utility->generateRandomString('numaric',3); // generate two jokes $joke = new Jokeapi; $joke1 = $joke->generateNewJoke(); $joke2 = $joke->generateNewJoke(); // enter the value of user in user_master tabel and get user_id $fields = array( 'id' => 'null', 'name' => '\''.$user['name'].'\'', 'gender' => '\''.$user['gender'].'\'', 'email' => '\''.$user['email'].'\'', 'cell' => '\''.$user['cell'].'\'', 'phone' => '\''.$user['phone'].'\'', 'image' => '\''.$user['image'].'\'', 'dob' => '\''.$user['dob'].'\'', 'address' => '\''.$user['address'].'\'', 'likes_count' => '\''.$likes.'\'', ); $inserted= MySql::insertData('user_master',$fields); if($inserted['success']) { $user_id = $inserted['id']; $fields = array( 'id' => 'null', 'user_id' => '\''.$user_id.'\'', 'joke_id' => '\'1\'', 'joke' => '\''.$joke1.'\'', ); $joke1Inserted= MySql::insertData('jokes',$fields); $fields = array( 'id' => 'null', 'user_id' => '\''.$user_id.'\'', 'joke_id' => '\'2\'', 'joke' => '\''.$joke2.'\'', ); $joke2Inserted= MySql::insertData('jokes',$fields); if( $joke1Inserted['success'] && $joke2Inserted['success'] ) { // setting up session keys $_SESSION['user_id'] = $inserted['id']; $_SESSION['name'] = $user['name']; $_SESSION['gender'] = $user['gender']; $_SESSION['email'] = $user['email']; $_SESSION['cell'] = $user['cell']; $_SESSION['phone'] = $user['phone']; $_SESSION['image'] = $user['image']; $_SESSION['dob'] = $user['dob']; $_SESSION['address'] = $user['address']; $_SESSION['image'] = $user['image']; $_SESSION['status'] = 'unliked'; $_SESSION['likes_count']= $likes; $_SESSION['joke'] = $joke1; $data = array( 'user_id' => $user_id, 'name' => $user['name'], 'gender' => $user['gender'], 'email' => $user['email'], 'cell' => $user['cell'], 'phone' => $user['phone'], 'image' => $user['image'], 'dob' => $user['dob'], 'address' => $user['address'], 'likes_count' => $likes, 'joke1' => $joke1, 'joke2' => $joke2, ); $arr = array( 'success' => 1, 'data' => $data, 'error' => null, 'message' => 'success', 'code' => '001', ); header('Content-Type: application/json'); echo json_encode($arr); exit(); } else { $arr = array( 'success' => 0, 'data' => null, 'error' => null, 'message' => 'not able to insert jokes in database', 'code' => '002', ); header('Content-Type: application/json'); echo json_encode($arr); exit(); } } else { $arr = array( 'success' => 0, 'data' => null, 'error' => $inserted['error'], 'message' => 'not able to insert new user in database', 'code' => '003', ); header('Content-Type: application/json'); echo json_encode($arr); exit(); } break; case 'like': $user_id = isset($_REQUEST['user_id']) ? $_REQUEST['user_id'] : ''; if( !empty($user_id) ) { // increment the like value $fields = array( 'likes_count' => '`likes_count` + 1', ); $condition = array( 'id' => '\''.$user_id.'\'', ); $updated = MySql::updateData('user_master',$fields,$condition); if($updated['success']) { $qry = "SELECT um.`likes_count`,j.`joke` FROM `user_master` AS um LEFT JOIN `jokes` AS j ON um.`id`=j.`user_id` WHERE um.`id`='".$user_id."' AND j.`joke_id`='1';"; $result = MySql::fetchRow($qry); if($result['success']) { // setting latest values in session $_SESSION['likes_count']= $result['data']['likes_count']; $_SESSION['joke'] = $result['data']['joke']; $_SESSION['status'] = 'liked'; $data = array( 'likes_count' => $result['data']['likes_count'], 'joke' => $result['data']['joke'], ); $arr = array( 'success' => 1, 'data' => $data, 'error' => null, 'message' => 'success', 'code' => '004', ); header('Content-Type: application/json'); echo json_encode($arr); exit(); } else { $arr = array( 'success' => 0, 'data' => null, 'error' => $result['error'], 'message' => 'error in fetching data', 'code' => '005', ); header('Content-Type: application/json'); echo json_encode($arr); exit(); } } else { $arr = array( 'success' => 0, 'data' => null, 'error' => null, 'message' => 'error in incrementing likes', 'code' => '006', ); header('Content-Type: application/json'); echo json_encode($arr); exit(); } } else { $arr = array( 'success' => 0, 'data' => null, 'error' => null, 'message' => 'required data missing', 'code' => '007', ); header('Content-Type: application/json'); echo json_encode($arr); exit(); } break; case 'dislike': $user_id = isset($_REQUEST['user_id']) ? $_REQUEST['user_id'] : ''; if( !empty($user_id) ) { // decrement the like value $fields = array( 'likes_count' => '`likes_count` - 1', ); $condition = array( 'id' => '\''.$user_id.'\'', ); $updated = MySql::updateData('user_master',$fields,$condition); if($updated['success']) { $qry = "SELECT um.`likes_count`,j.`joke` FROM `user_master` AS um LEFT JOIN `jokes` AS j ON um.`id`=j.`user_id` WHERE um.`id`='".$user_id."' AND j.`joke_id`='2';"; $result = MySql::fetchRow($qry); if($result['success']) { // setting latest values in session $_SESSION['likes_count']= $result['data']['likes_count']; $_SESSION['joke'] = $result['data']['joke']; $_SESSION['status'] = 'disliked'; $data = array( 'likes_count' => $result['data']['likes_count'], 'joke' => $result['data']['joke'], ); $arr = array( 'success' => 1, 'data' => $data, 'error' => null, 'message' => 'success', 'code' => '008', ); header('Content-Type: application/json'); echo json_encode($arr); exit(); } else { $arr = array( 'success' => 0, 'data' => null, 'error' => $result['error'], 'message' => 'error in fetching data', 'code' => '009', ); header('Content-Type: application/json'); echo json_encode($arr); exit(); } } else { $arr = array( 'success' => 0, 'data' => null, 'error' => null, 'message' => 'error in decrementing likes', 'code' => '010', ); header('Content-Type: application/json'); echo json_encode($arr); exit(); } } else { $arr = array( 'success' => 0, 'data' => null, 'error' => null, 'message' => 'required data missing', 'code' => '011', ); header('Content-Type: application/json'); echo json_encode($arr); exit(); } break; default: $arr = array( 'success' => 0, 'data' => null, 'error' => null, 'message' => 'no such api found', 'code' => '012', ); header('Content-Type: application/json'); echo json_encode($arr); exit(); break; } } else { $arr = array( 'success' => 0, 'data' => null, 'error' => null, 'message' => 'require data missing', 'code' => '013', ); header('Content-Type: application/json'); echo json_encode($arr); exit(); }<file_sep>/dist/php/classes/class.images.php <?php require_once 'class.utilities.php'; class Images{ public static function str2img($strImage) { list($type, $data) = explode(';', $strImage); list(, $data) = explode(',', $data); $rand1 = Utility::generateRandomString('alphanumaric',10); $rand2 = Utility::generateRandomString('alphanumaric',10); $newName = $rand1 . "-" . $rand2; $extension = ""; if ($type=="data:image/png") { $extension = ".png"; $location = "uploads/" . $newName . $extension; $img = Images::convertStrToPNG($data,$location); return $location; } else if ($type=="data:image/jpg") { $extension = ".jpg"; $location = "uploads/" . $newName . $extension; $img = Images::convertStrToJPG($data,$location); return $location; } else if ($type=="data:image/jpeg") { $extension = ".jpg"; $location = "uploads/" . $newName . $extension; $img = Images::convertStrToJPEG($data,$location); return $location; } else if ($type=="data:image/gif") { $extension = ".gif"; $location = "uploads/" . $newName . $extension; $img = Images::convertStrToGIF($data,$location); return $location; } } public static function convertStrToJPG($img,$location) { $imageData = base64_decode($img); $source = imagecreatefromstring($imageData); $angle = 0; $rotate = imagerotate($source, $angle, 0); // if want to rotate the image $imageSave = imagejpeg($rotate,$location,100); imagedestroy($source); if(!empty($imageSave)) return true; return false; } public static function convertStrToJPEG($img,$location) { $imageData = base64_decode($img); $source = imagecreatefromstring($imageData); $angle = 0; $rotate = imagerotate($source, $angle, 0); // if want to rotate the image $imageSave = imagejpeg($rotate,$location,100); imagedestroy($source); if(!empty($imageSave)) return true; return false; } public static function convertStrToPNG($img,$location) { $imageData = base64_decode($img); $img = file_put_contents($location, $imageData); if(!empty($img)) return true; return false; } public static function convertStrToGIF($img,$location) { $imageData = base64_decode($img); $source = imagecreatefromstring($imageData); $imageSave = imagegif($source,$location); imagedestroy($source); return true; } public static function png2jpg($originalFile, $outputFile, $quality) { $image = imagecreatefrompng($originalFile); imagejpeg($image, $outputFile, $quality); imagedestroy($image); } public static function jpg2png($originalFile, $outputFile) { imagepng(imagecreatefromstring(file_get_contents($originalFile)), $outputFile); } public static function gif2png($originalFile, $outputFile) { imagepng(imagecreatefromstring(file_get_contents($originalFile)), $outputFile); } public static function png2gif($originalFile, $outputFile, $quality) { imagegif(imagecreatefrompng(file_get_contents($originalFile)), $outputFile); } public static function str2png($strImage) { $imageData = $strImage; $imageData = explode(',', $imageData); $imageData = $imageData[1]; $imageData = base64_decode($imageData); $rand1 = Utility::generateRandomString('alphanumaric',10); $rand2 = Utility::generateRandomString('alphanumaric',10); $imageName = $rand1 . "-" . $rand2 . ".png"; $newImage = "final/" . $imageName; file_put_contents($newImage, $imageData); return $imageName; } public static function str2jpg($strImage) { $imageData = $strImage; $imageData = explode(',', $imageData); $imageData = $imageData[1]; $imageData = base64_decode($imageData); $source = imagecreatefromstring($imageData); $angle = 0; $rotate = imagerotate($source, $angle, 0); // if want to rotate the image $rand1 = Utility::generateRandomString('alphanumaric',10); $rand2 = Utility::generateRandomString('alphanumaric',10); $imageName = $rand1 . "-" . $rand2 . ".jpg"; $newImage = "uploads/" . $imageName; file_put_contents($newImage, $imageData); $imageSave = imagejpeg($rotate,$newImage,100); imagedestroy($source); return $newImage; } } <file_sep>/dist/php/classes/jokeapi.php <?php class Jokeapi { function generateNewJoke() { // fetch joke $jokeAPI = 'http://api.icndb.com/jokes/random'; $data = array(); $options = array( 'http' => array( 'header' => "Content-type: application/x-www-form-urlencoded\r\n", 'method' => 'GET', 'content' => http_build_query($data) ) ); $context = stream_context_create($options); $result1 = file_get_contents($jokeAPI, false, $context); if ($result1 === FALSE) { /* Handle error */ } $joke = json_decode($result1,true); if( count($joke['value']['categories']) != 0 ) { return '<b>'.strtoupper($joke['value']['categories'][0]).' JOKE:</b> ' . $joke['value']['joke']; } else { return '<b>JOKE:</b> ' . $joke['value']['joke']; } } }<file_sep>/Readme.md This project is made for a task to clear a round of Internship at The Pet Network Note: Project requires the internet connection to fetch user and jokes from APIs Project - is in "dist" directory Database backup - is in "database" directory Video - https://youtu.be/ctwJrtieZQ8 You can change database connection values in "dist/php/classes/class.database.php" file Language used : PHP Database : MySQL API Used : FOR JOKES [http://api.icndb.com/jokes/random] FOR USER DETAILS [https://randomuser.me/api/]<file_sep>/dist/php/classes/class.functions.php <?php require_once 'class.database.php'; class MySql { // Sends the query to the connection public function Query($sql) { $this->_result = $this->_link->query($sql) or die(mysqli_error($this->_result)); $this->_numRows = mysqli_num_rows($this->_result); } // Inserts into databse public function UpdateDb($sql) { $this->_result = $this->_link->query($sql) or die(mysqli_error($this->_result)); return $this->_result; } // Return the number of rows public function NumRows() { return $this->_numRows; } // Check if row exists public static function checkRowExists($sql) { $db = Database::getInstance(); $mysqli = $db->getConnection(); $result = $mysqli->query($sql); if (!$result) { $res = array( 'success' => '0', 'error' => $mysqli->error, 'message' => 'invalid query or no result' ); return $res; exit(); } $rows = $result->num_rows; $error = mysqli_error($mysqli); // $mysqli->close(); if($error == NULL) { if ($rows>0) { $res = array( 'success' => '1', 'error' => NULL, 'message' => 'row exists' ); } else { $res = array( 'success' => '0', 'error' => $error, 'message' => 'no row exists' ); } } else { $res = array( 'success' => '0', 'error' => $error, 'message' => 'execution error' ); } return $res; exit(); } // Check if row exists public static function countRows($sql) { $db = Database::getInstance(); $mysqli = $db->getConnection(); $result = $mysqli->query($sql); $rows = $result->num_rows; return $rows; } // insert data in table public static function insertData($tablename,$dataArray) { $sql = "INSERT INTO `".$tablename."`("; foreach ($dataArray as $key=>$value){ $sql .= "`" . $key . "`,"; } $sql = trim($sql,","); $sql .= ") VALUES ("; foreach($dataArray as $key=>$value){ $sql .= $value . ","; } $sql = trim($sql,","); $sql .= ");"; // return $sql; $db = Database::getInstance(); $mysqli = $db->getConnection(); $result = $mysqli->query($sql); $error = mysqli_error($mysqli); $id = $mysqli->insert_id; if( $id != 0 ){ $res = array( 'success' => '1', 'id' => $id, 'error' => NULL, ); } else { $res = array( 'success' => '0', 'error' => $error, ); } return $res; } // update data in table public static function updateData($tablename,$dataArray,$condition) { $sql = "UPDATE `".$tablename."` SET "; foreach($dataArray as $key=>$value) { $sql .= "`".$key."`=".$value.","; } $sql = trim($sql,","); $sql .= " WHERE "; foreach($condition as $key=>$value) { $sql .= "`".$key."`=".$value." AND "; } $sql = substr($sql, 0, -4); $sql .= ";"; // return $sql; $db = Database::getInstance(); $mysqli = $db->getConnection(); $result = $mysqli->query($sql); if (!$result) { $res = array( 'success' => '0', 'error' => $mysqli->error, 'message' => 'invalid query' ); return $res; exit(); } $error = mysqli_error($mysqli); if($error == NULL) { if (!empty($result)) { $res = array( 'success' => '1', 'error' => NULL, 'message' => 'row updated successfully' ); } else { $res = array( 'success' => '0', 'error' => $error, 'message' => 'no row updated' ); } } else { $res = array( 'success' => '0', 'error' => $error, 'message' => 'execution error' ); } return $res; exit(); /*if(!empty($result)) return TRUE; return FALSE;*/ } // update data in table public static function deleteData($tablename,$condition) { $sql = "DELETE FROM `".$tablename."` WHERE "; foreach($condition as $key=>$value) { $sql .= "`".$key."`=".$value." AND "; } $sql = substr($sql, 0, -4); $sql .= ";"; $db = Database::getInstance(); $mysqli = $db->getConnection(); $error = mysqli_error($mysqli); if ($mysqli->query($sql) === TRUE) { $res = array( 'success' => '1', 'error' => NULL, ); } else { $res = array( 'success' => '0', 'error' => $error, ); } return $res; } // execute a query defined public static function Execute($sql) { $db = Database::getInstance(); $mysqli = $db->getConnection(); $result = $mysqli->query($sql); if($result) return TRUE; return FALSE; } public static function fetchRow($sql) { $db = Database::getInstance(); $mysqli = $db->getConnection(); $result = $mysqli->query($sql); if (!$result) { $res = array( 'success' => '0', 'error' => $mysqli->error, 'message' => 'invalid query' ); return $res; exit(); } $numrow = $result->num_rows; $error = mysqli_error($mysqli); if($result->num_rows > 0){ $rows = $result->fetch_assoc(); $res = array( 'success' => '1', 'data' => $rows, 'error' => NULL, ); } else { $res = array( 'success' => '0', 'data' => NULL, 'error' => $error, ); } return $res; } public static function fetchAllRows($sql) { $db = Database::getInstance(); $mysqli = $db->getConnection(); $result = $mysqli->query($sql); if (!$result) { $res = array( 'success' => '0', 'error' => $mysqli->error, 'message' => 'invalid query' ); return $res; exit(); } $numrow = $result->num_rows; $error = mysqli_error($mysqli); if($result->num_rows > 0){ $res = array( 'success' => '1', 'data' => $result, 'error' => NULL, ); } else { $res = array( 'success' => '0', 'data' => $result, 'error' => $error, ); } return $res; } // Fetchs the rows and return them public function Rows() { $rows = array(); for($x = 0; $x < $this->NumRows(); $x++) { $rows[] = mysqli_fetch_assoc($this->_result); } return $rows; } // Used by other classes to get the connection public function GetLink() { return $this->_link; } // Securing input data public function SecureInput($value) { return mysqli_real_escape_string($this->_link, $value); } } ?><file_sep>/dist/index.php <?php session_start(); ?><!DOCTYPE HTML> <html> <head> <title>The pet network</title> <link rel="stylesheet" href="assets/css/bootstrap.min.css"> <link rel="stylesheet" href="assets/css/bootstrap-theme.min.css"> <link rel="stylesheet" type="text/css" href="assets/css/font-awesome.min.css"> <style type="text/css"> .image { border:5px solid #ccc; height:128px;width:128px; } .communication { color:#666; } .post { text-align: justify; border:1px solid black; padding:10px; background: white; height: 142px; width: 545px; } .loader { width: 100px; height: 100px; position: absolute; top:20px; bottom: 0; left: 0; right: 0; margin: auto; } </style> </head> <body> <header class="container"> <div class="row"> <div class="col-md-10 col-md-offset-2"> <h1>THE PET NETWORK</h1> </div> </div><br> <div class="row"> <div class="col-md-2"></div> <div class="col-md-8"> <div class="row well"> <div class="col-md-3">  </div> <div class="col-md-8"> <div class="row"> <div class="col-md-12"> <h2><i class="fa fa-user"></i> <span class="name"><?php echo isset($_SESSION['name']) ? $_SESSION['name'] : '' ?></span> <small><span class="gender"><?php if(isset($_SESSION['gender'])) { if($_SESSION['gender']=='male') { echo '<i class="fa fa-mars"></i>'; } else { echo '<i class="fa fa-venus"></i>'; } } ?></span></small></h2> </div> </div> <div class="row communication"> <div class="col-md-12"> <i class="fa fa-envelope"></i> <span class="email"><?php echo isset($_SESSION['email']) ? $_SESSION['email'] : '' ?></span>&nbsp;&nbsp;&nbsp;&nbsp; <i class="fa fa-phone"></i> <span class="phone"><?php echo isset($_SESSION['phone']) ? $_SESSION['phone'] : '' ?></span>&nbsp;&nbsp;&nbsp;&nbsp; <i class="fa fa-mobile"></i> <span class="cell"><?php echo isset($_SESSION['cell']) ? $_SESSION['cell'] : '' ?></span> </div> </div> <div class="row" style="color:#666"> <div class="col-md-12"> <i class="fa fa-map-marker"></i> <span class="address"><?php echo isset($_SESSION['address']) ? $_SESSION['address'] : '' ?></span> </div> </div> <div class="row" style="color:#666"> <div class="col-md-12"> <i class="fa fa-birthday-cake"></i> <span class="dob"><?php echo isset($_SESSION['dob']) ? $_SESSION['dob'] : '' ?></span> </div> </div> </div> </div> </div> <div class="col-md-2"></div> </div> </header><br> <div class="container"> <div class="row"> <div class="col-md-6 col-md-offset-3 well"> <p class="post"> <?php echo isset($_SESSION['joke']) ? $_SESSION['joke'] : '<div class="loader"><i class="fa fa-spinner fa-pulse fa-3x fa-fw"></i></div>' ?> </p> <div class="row"> <div class="col-md-12" style="color:#777"> <b>Total Likes:</b> <span class="likes_count"><?php echo isset($_SESSION['likes_count']) ? $_SESSION['likes_count'] : '0' ?></span> <?php if( isset($_SESSION['status']) && ( $_SESSION['status']=='unliked' || $_SESSION['status'] == 'disliked' ) ) { $uid = $_SESSION['user_id']; echo "<button class='btn pull-right likeButton btn-info' data-id='$uid'><i class='fa fa-thumbs-up'></i> Like</button>"; } else if( isset($_SESSION['status']) && $_SESSION['status']=='liked' ) { $uid = $_SESSION['user_id']; echo "<button class='btn pull-right likeButton btn-danger' data-id='$uid'><i class='fa fa-thumbs-down'></i> Unlike</button>"; } else { echo "<button class='btn pull-right likeButton btn-info' data-id=''><i class='fa fa-thumbs-up'></i> Like</button>"; } ?> </div> </div> </div> </div> </div> <script type="text/javascript" src="assets/js/jquery-3.3.1.min.js"></script> <script type="text/javascript" src="assets/js/bootstrap.min.js"></script> <script type="text/javascript" src="assets/js/all.js"></script> <script type="text/javascript" src="assets/js/bootstrap-notify.min.js"></script> <script type="text/javascript"> $(document).ready(function(){ var user_id; <?php if(!isset($_SESSION['user_id'])) : ?> $.notify({ message: 'Welcome' },{ type: 'info' }); console.log('welcome'); $('.likeButton').addClass('btn-disabled'); $.post('php/api.php?action=new_user', function(res){ // console.log(res); if(res.success) { user_id = res.data.user_id; $('.likeButton').data('id',user_id); $('.image').attr('src',res.data.image); $('.name').html(res.data.name); $('.email').html(res.data.email); $('.cell').html(res.data.cell); $('.phone').html(res.data.phone); $('.address').html(res.data.address); $('.dob').html(res.data.dob); $('.loader').css('display','none'); $('.post').html(res.data.joke1); $('.likes_count').html(res.data.likes_count); $('.likeButton').removeClass('btn-disabled'); $.notify({ message: 'New user created' },{ type: 'success' }); // console.log('New user created'); } else { $.notify({ message: 'Fails to create a new user, try again' },{ type: 'danger' }); } }); <?php else : ?> $.notify({ message: 'Welcome back' },{ type: 'info' }); console.log('welcome back'); <?php endif; ?> var state = "<?php if( isset($_SESSION['status']) && ( $_SESSION['status']=='unliked' || $_SESSION['status'] == 'disliked' ) ) { echo 'unliked'; } else if( isset($_SESSION['status']) && $_SESSION['status']=='liked' ) { echo 'liked'; } else { echo 'unliked'; }?>"; $('.likeButton').click(function(){ if(state=="unliked") { var likeUrl = 'php/api.php?action=like&user_id=' + $('.likeButton').data('id'); console.log(likeUrl); $.post(likeUrl, function(res){ console.log(res); if(res.success) { $('.post').html(res.data.joke); $('.likes_count').html(res.data.likes_count); $('.likeButton').html('<i class="fa fa-thumbs-down" aria-hidden="true"></i> Unlike'); $('.likeButton').removeClass( 'btn-info' ); $('.likeButton').addClass( 'btn-danger' ); $.notify({ message: 'Post liked' },{ type: 'success' }); console.log('liked') state = 'liked'; } else { $.notify({ message: 'Try again' },{ type: 'danger' }); } }); } else if(state=="liked") { var dislikeUrl = 'php/api.php?action=dislike&user_id=' + $('.likeButton').data('id'); console.log(dislikeUrl); $.post(dislikeUrl, function(res){ console.log(res); if(res.success) { $('.post').html(res.data.joke); $('.likes_count').html(res.data.likes_count); $('.likeButton').html('<i class="fa fa-thumbs-up" aria-hidden="true"></i> Like'); $('.likeButton').removeClass( 'btn-danger' ); $('.likeButton').addClass( 'btn-info' ); $.notify({ message: 'Post disliked' },{ type: 'success' }); console.log('disliked') state = 'unliked'; } else { $.notify({ message: 'Try again' },{ type: 'danger' }); } }); } }); }); </script> </body> </html><file_sep>/dist/php/classes/common.php <?php // Error Reporting error_reporting(0); // E_ALL | 0 ini_set("display_errors", 0); // 0 | 1 // Enviornment $rootUrl = "http://localhost/tpn"; // timezone date_default_timezone_set('Asia/Kolkata'); ?><file_sep>/dist/php/classes/randomuser.php <?php class Randomuser { function generateNewUser() { // fetch the details of new user $randomuserAPI = 'https://randomuser.me/api/'; $data = array(); $options = array( 'http' => array( 'header' => "Content-type: application/x-www-form-urlencoded\r\n", 'method' => 'GET', 'content' => http_build_query($data) ) ); $context = stream_context_create($options); $result = @file_get_contents($randomuserAPI, false, $context); if ($result === FALSE) { /* Handle error */ } $data = json_decode($result,true); $data=$data['results'][0]; $_name = strtoupper($data['name']['title'] . '. ' . $data['name']['first'] . ' ' . $data['name']['last']); $_gender = $data['gender']; $_email = $data['email']; $_cell = $data['cell']; $_phone = $data['phone']; $_image = $data['picture']['large']; $dob = $data['dob']; $dateArray = explode( "-", explode(" ",$dob)[0] ); $months = ['', 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']; $_dob = $dateArray[2] . ', ' . $months[intval($dateArray[1])-1] . ' ' . $dateArray[0]; $location = $data['location']; $_address = $location['street'] . ', ' . $location['city'] . ', ' . $location['state'] . ', ' . $location['postcode']; $arr = array( "name" => $_name, "gender" => $_gender, "image" => $_image, "email" => $_email, "cell" => $_cell, "phone" => $_phone, "dob" => $_dob, "address" => $_address, ); return $arr; } }
da5c7e078aba205a1a0c133403db64cc69bfa41d
[ "Markdown", "PHP" ]
8
PHP
hitswa/The-Pet-Network-Internship-Task
7cf06bc2c9adf1f0871932e5ef2a1a8ca167b28b
325cbe26b16f1d4991fefccf162b70e9054de225
refs/heads/master
<file_sep>#!usr/bin/python from bs4 import BeautifulSoup import urllib2 import re sc=[] def scorecard(bat , ball): tup=[] #batting I innings p=bat.findAll("tr") for x in p: k=x.findAll("th") #print k for xx in k: #appending for a complete list tup.append(xx.get_text().strip()) sc.append(tup) tup=[] k=x.findAll("td")[0:len(k)-1] for xx in k: tup.append(xx.get_text().strip()) sc.append(tup) tup=[] sc.append([]) #bowling I innings #for heading part p=ball.find("tr") k=p.findAll("th") for xx in k: tup.append(xx.get_text().strip()) #for complete details sc.append(tup) tup=[] p=ball.findAll("tr")[1::2] for x in p: k=x.findAll("td") for xx in k: tup.append(xx.get_text().strip()) sc.append(tup) tup=[] def full_sc(url1): print "*** Full scorecard ***" content1 = urllib2.urlopen(url1).read() soup = BeautifulSoup(content1,"lxml") bat_inn = soup.findAll("table", "batting-table innings") bowl_inn = soup.findAll("table", "bowling-table") for i in range(len(bat_inn)): try: scorecard(bat_inn[i], bowl_inn[i]) except IndexError: continue #for print fromat sc.append(["*********************"]) ## printing the final scorecard for i in sc: print "\t".join(i) def main(): #url for the webpage for live matches url = "http://www.espncricinfo.com/ci/engine/match/index.html?view=live" content = urllib2.urlopen(url).read() soup = BeautifulSoup(content,"lxml") match_sec = soup.findAll("section", "default-match-block") #team to be searched team = raw_input("Enter the name of team ") #flag for successful search flag=0 for i in match_sec: In1= i.find("div", "innings-info-1") In2= i.find("div", "innings-info-2") status= i.find("div", "match-status") p = In1.find(text=re.compile(team , re.I )) q = In2.find(text=re.compile(team, re.I ) ) if(p != None or q != None): print In1.get_text() , In2.get_text(), status.get_text() flag=1 #for full scorecard fsc=raw_input("Do you want full score card? [Y/N]: ") fsc=fsc.lower() if(fsc=='y'): #function for full scorecard display p = i.find("div", "match-articles") #since the scorecard link tag is first tag in this div q = p.find("a") fs_link = q['href'] fs_link="http://www.espncricinfo.com" + fs_link + "?view=scorecard;wrappertype=none" full_sc(fs_link) break if(flag == 0): print "No Match for" , team if __name__ == "__main__": main() <file_sep># Cricscore A web Crawler built in python using BeautifulSoup to crawl the live score and status for the cricket match happening all over the world It has the facility to search by the team and also view full score card if wanted ##TODO - [X] Add CLI Integration for terminal - [ ] Format Output in Tabular Form - [ ] Add search by tournaments - [ ] View the next match / previous results ##USGE ####Using PIP ``` $ pip install cricket-score ``` ####Clone Github Package ``` $ git clone https://github.com/ankushg07/cricscore.git ``` ####Searching ``` $ cricscore Enter the Team: india ```
08221f8db95b68836edae153992feb48d8e1df8c
[ "Markdown", "Python" ]
2
Python
iKshitiz/cricscore
933bc337b4f31b665640c3d37bf20ba54480b4ed
c1a6e55b83309588621b8255950ced0a6b54da04
refs/heads/master
<file_sep>import React from 'react'; import { render } from 'react-dom'; import { Provider } from 'react-redux'; import { Router, Route, IndexRoute, browserHistory } from 'react-router'; import store from './store'; import App from './components/App'; import SignIn from './components/auth/SignIn'; import SignUp from './components/auth/SignUp'; import EntitiesMain from './components/EntitiesMain'; const display = document.querySelector('.container'); render( <Provider store={store}> <Router history={browserHistory}> <Route path='/' component={App}> <IndexRoute component={SignUp} /> <Route path='/signup' component={SignUp} /> <Route path='/profile'component={EntitiesMain} /> </Route> </Router> </Provider>, display)<file_sep>'use strict'; const Boom = require('boom'); const User = require('../models/User'); const UserLoginSchema = require('../joiSchemas/userLogin'); const verifyIfCurrentUser = require('../util/userfunc').verifyIfCurrentUser; const createToken = require('../util/createToken'); module.exports = { method: 'POST', path: '/signin', config: { pre: [ { method: verifyIfCurrentUser, assign: 'user' } ], handler: (request, reply) => { console.log('in currentUser', request.pre.user) reply({ auth_token: createToken(request.pre.user )}).code201 }, validate: { payload: UserLoginSchema } } };<file_sep>'use strict'; const Boom = require('boom'); const Sequelize = require('sequelize'); const User = require('../models/User'); function verifyIfCurrentUser(request, reply) { console.log('in verifyIfCurrentUser', request.payload) const password = request.payload.password; User.find({ where: Sequelize.or({ username: request.payload.username }, { email: request.payload.email }) }) .then((user) => { bcrypt.compare(password, user.password, (err, isValid) => { if(isValid) { reply(user); }else { reply(Boom.badRequest('Incorrect password')); } }) }) .catch(err => { reply(Boom.badRequest('Incorrect username or email')); }) } function verifyIfUniqueUser(request, reply) { console.log('in verifyIfUniqueUser', request.payload) // find matching email/username in database User.find({ // or findAll? where: Sequelize.or({ username: request.payload.username }, { email: request.payload.email }) }) .then((user) => { console.log('user', user) if(user.username === request.payload.username) { reply(Boom.badRequest('Username taken')); } if(user.email === request.payload.email) { reply(Boom.badRequest('Email taken')); } // if indeed a new user continue to route reply(request.payload); }); }; module.exports = { verifyIfCurrentUser, verifyIfUniqueUser };<file_sep>import React from 'react'; import { shallow } from 'enzyme'; import expect from 'expect'; import { CachedEntitiesList } from '../../components/CachedEntitiesList'; import Entities from '../../components/Entities'; import EntitiesText from '../../components/EntitiesText'; function setup() { const props = { cachedMatchingArticles: [] }; const wrapper = shallow( <CachedEntitiesList {...props} /> ) return { props, wrapper }; } describe('CachedEntitiesList', () => { let props; let wrapper; beforeEach(() => { ( { props, wrapper } = setup() ) }); it('should render self and subcomponents', () => { expect(wrapper.length).toEqual(1); expect(wrapper.find('div').length).toEqual(1); }); it('should render render correct number of Entities', () => { const testProps ={ cachedMatchingArticles: [{}, {}, {}] }; const testWrapper = shallow( <CachedEntitiesList {...Object.assign({}, props, testProps)} /> ) expect(testWrapper.find('div').length).toEqual(4); expect(testWrapper.find(Entities).length).toEqual(3); expect(testWrapper.find(EntitiesText).length).toEqual(3); expect(testWrapper.find(EntitiesText).length).toNotEqual(7); }); }); <file_sep>'use strict'; const Sequelize = require('sequelize'); const sequelizeConfig = require('../../config/database.config.js'); const SearchCache = sequelizeConfig.define('searchcache', { searchInput: { type: Sequelize.STRING(3000), unique: true }, entities: { type: Sequelize.JSONB }, entitiesText: Sequelize.STRING(1000) }); module.exports = SearchCache;<file_sep>'use strict'; const jwt = require('jsonwebtoken'); const secret = require('../../config/config'); function createToken(user) { console.log('in createToken', user) return jwt.sign({ id: user.id, username: user.username }, secret, { algorithm: 'HS256', expiresIn: '1h' }); }; module.exports = createToken;<file_sep>'use strict'; const bcrypt = require('bcrypt-nodejs'); const hashPassword = require('../util/hashPassword'); const Boom = require('boom'); const User = require('../models/User'); const CreateUserSchema = require('../joiSchemas/createUser'); const verifyIfUniqueUser = require('../util/userFunc').verifyIfUniqueUser; const createToken = require('../util/createToken'); module.exports = { method: 'POST', path: '/signup', config: { // verify user unique before handler runs pre: [ { method: verifyIfUniqueUser } ], handler: (request, reply) => { console.log('in createUser', request.payload) var user = { email: request.payload.email, username: request.payload.username, }; hashPassword(request.payload.password, (err, hash) => { if(err) { throw Boom.badRequest(err) }; user.password = <PASSWORD>; User.sync() .then(() => { return User.create({ email, username, password }) }) .then(() => reply({ auth_token: createToken(user) // issues JWT token on successful user creation. })).code(201) .catch(err => reply({ result: `ERROR in '/dbsave/textorurlsearch' path via server: ${err}` })) }) }, validate: { payload: CreateUserSchema; } } };<file_sep>const AYLIENTextAPI = require('aylien_textapi'); const textapi = new AYLIENTextAPI({ application_id: 'YOUR_API_ID', application_key: 'YOUR_API_KEY' }); module.exports = textapi;<file_sep>import React from 'react'; import { shallow } from 'enzyme'; import expect, { createSpy } from 'expect'; import { GeneralSearchBar } from '../../components/GeneralSearchBar'; function setup() { const props = { handleFormSubmit: () => {}, handleInputChange: () => {}, dbFetchEntities: () => {} }; const wrapper = shallow( <GeneralSearchBar {...props} onSubmit={props.handleFormSubmit} onChange={props.handleInputChange}/> ) return { props, wrapper }; } describe('CachedSearchBar', () => { let props; let wrapper; beforeEach(() => { ( { props, wrapper } = setup() ) }); it('should render self and subcomponents', () => { expect(wrapper.length).toEqual(1); expect(wrapper.find('form').length).toEqual(1); expect(wrapper.find('form').hasClass('input-group')).toEqual(true); expect(wrapper.find('form').prop('onSubmit')).toBeA('function'); expect(wrapper.find('form').find('input').length).toEqual(1); expect(wrapper.find('form').find('span').length).toEqual(1); expect(wrapper.find('form').find('span').find('button').length).toEqual(1); expect(wrapper.find('input').hasClass('form-control form-control-lg input-form')).toEqual(1); expect(wrapper.find('input').prop('value')).toBe(''); expect(wrapper.find('input').prop('onChange')).toBeA('function'); expect(wrapper.find('input').prop('placeholder')).toBe('Enter text or a URL~!'); expect(wrapper.find('span').hasClass('input-group-btn')).toEqual(true); expect(wrapper.find('button').hasClass('btn btn-secondary btn-lg input-btn')).toEqual(true); expect(wrapper.find('button').prop('type')).toBe('submit'); expect(wrapper.find('button').text()).toBe('Extract Entities'); }); });<file_sep>export function isWhatType(search) { let inputType = ''; if(search.startsWith('http://') || search.startsWith('https://')) { inputType = 'url'; }else { inputType = 'text'; }; return inputType; };<file_sep>import React from 'react'; import { shallow } from 'enzyme'; import expect from 'expect'; import { App } from '../../components/App'; describe('App', () => { let wrapper; beforeEach(() => { wrapper = shallow( <App /> ) }); it('should render self and subcomponents', () => { expect(wrapper.length).toEqual(1); expect(wrapper.find('div').length).toEqual(1); expect(wrapper.find('div').hasClass('container')).toEqual(true); }); }); <file_sep>import React, { Component, PropTypes } from 'react'; import shallowCompare from 'react/lib/shallowCompare'; import { connect } from 'react-redux'; import { dbFetchEntities } from '../redux/actions/actions'; export class GeneralSearchBar extends Component { constructor(props) { super(props); this.state = { search: '' }; this.handleInputChange = this.handleInputChange.bind(this); this.handleFormSubmit = this.handleFormSubmit.bind(this); } shouldComponentUpdate(nextProps, nextState) { return shallowCompare(this, nextProps, nextState); }; handleFormSubmit(event) { event.preventDefault(); console.log(this.state.search) this.props.dbFetchEntities(this.state.search); this.setState({ search: '' }); } handleInputChange(event) { this.setState({ search: event.target.value }); } render() { return ( <form className='input-group' onSubmit={ this.handleFormSubmit }> <input className='form-control form-control-lg input-form' value={ this.state.search } onChange={ this.handleInputChange } placeholder='Enter text or a URL~!' /> <span className='input-group-btn' > <button className='btn btn-secondary btn-lg input-btn' type='submit'> Extract Entities </ button> </span> </form> ) }; }; GeneralSearchBar.propTypes = { dbFetchEntities: React.PropTypes.func.isRequired }; export default connect(null, { dbFetchEntities })(GeneralSearchBar);<file_sep>import { combineReducers } from 'redux'; import { reducer as formReducer } from 'redux-form'; import entitiesReducer from './entitiesReducer'; import authReducer from './authReducer'; const rootReducer = combineReducers({ entity: entitiesReducer, auth: authReducer, form: formReducer }); export default rootReducer;<file_sep>// used for authentication module.exports = { secret: 'YOUR_SECRET_KEY_CAN_BE_RANDOM' };<file_sep>import React, { Component } from 'react'; import { reduxForm } from 'redux-form'; import { signUpUser } from '../../redux/actions/actions'; class Signup extends Component { handleSubmit(formProps) { this.props.signUpUser(formProps); } renderAlert() { if(this.props.errorMessage) { return( <div className='alert alert-danger'> <strong>Oops!</strong> {this.props.errorMessage} </div> ) } }; render() { const { handleSubmit, fields: { username, email, password, passwordConfirm } } = this.props; return ( <form className='signup m-x-auto' onSubmit={handleSubmit(this.handleSubmit.bind(this))}> <fieldset className='form-group'> <input className='form-control form-control-lg' type='text' placeholder='Username' {...username} /> { username.touched && username.error && <div className='alert alert-danger'>{username.error}</div> } </fieldset> <fieldset className='form-group'> <input className='form-control form-control-lg' type='email' placeholder='Email' {...email} /> { email.touched && email.error && <div className='alert alert-danger'>{email.error}</div> } </fieldset> <fieldset className='form-group'> <input className='form-control form-control-lg' type='password' placeholder='<PASSWORD>' {...password} /> { password.touched && password.error && <div className='alert alert-danger'>{password.error}</div> } </fieldset> <fieldset className='form-group'> <input className='form-control form-control-lg' type='password' placeholder='<PASSWORD> Password' {...passwordConfirm} /> { passwordConfirm.touched && passwordConfirm.error && <div className='alert alert-danger'>{passwordConfirm.error}</div> } </fieldset> {this.renderAlert()} <div className='text-sm-center'> <button action='submit' className='btn btn-primary btn-lg btn-sign' > Sign Up </button> </div> </form> ) } }; function validate(formProps) { const errors = {}; for(var prop in formProps) { if(prop !== 'passwordConfirm' && !formProps[prop]) { errors[prop] = `Please enter ${prop}`; }else if(prop === 'passwordConfirm' && !formProps[prop]) { errors[prop] = 'Please enter a password confirmation'; } } if(formProps.password !== formProps.passwordConfirm) { errors.password = "Must <PASSWORD> passwords." } return errors; } function mapStateToProps({ auth: { errorMessage = '' } = {}}) { return { errorMessage }; }; export default reduxForm({ form: 'signup', fields: ['username', 'email', 'password', 'passwordConfirm'], validate }, mapStateToProps, { signUpUser })(Signup);<file_sep>import React, { Component, PropTypes } from 'react'; import shallowCompare from 'react/lib/shallowCompare'; import { connect } from 'react-redux'; import GeneralSearchBar from './GeneralSearchBar'; import CachedSearchBar from './CachedSearchBar'; import EntitiesList from './EntitiesList'; import CachedEntitiesList from './CachedEntitiesList'; import CachedSearchList from './CachedSearchList'; import '../../client/styles.css'; export class EntitiesMain extends Component { shouldComponentUpdate(nextProps, nextState) { return shallowCompare(this, nextProps, nextState); }; render() { const { cachedMatchingArticles, ifNoResultsFound } = this.props; let renderView = cachedMatchingArticles.length ? <CachedEntitiesList /> : <EntitiesList />; let checkResults = (ifNoResultsFound === '') ? renderView : ( <div className='text-xs-right error'> { ifNoResultsFound } </div> ) return ( <div className='container-fluid'> <div className='row'> <div className='col-xs-12 col-sm-3'> <CachedSearchList /> </div><br /><br /> <div className='col-xs-12 col-sm-9'> <div className='row'> <div className='col-xs-12 col-sm-7'> <GeneralSearchBar /> </div> <div className='col-xs-12 col-sm-5'> <CachedSearchBar /> </div> </div><br /> { checkResults } </div> </div> </div> ) } }; EntitiesMain.propTypes = { cachedMatchingArticles: React.PropTypes.array.isRequired, ifNoResultsFound: React.PropTypes.string.isRequired }; function mapStateToProps({ entity:{ cachedMatchingArticles = [], ifNoResultsFound = '' }, auth: { authenticated = false } } = {}) { return { cachedMatchingArticles, ifNoResultsFound, authenticated }; }; export default connect(mapStateToProps)(EntitiesMain);<file_sep>const bcrypt = require('bcrypt-nodejs'); const hashPassword = function(password, cb) { bcrypt.genSalt(10, (err, salt) => { bcrypt.hash(password, salt, (err, hash) => { return cb(err, hash); }); }); }; module.exports = hashPassword;<file_sep>import React from 'react'; import { shallow } from 'enzyme'; import expect from 'expect'; import { EntitiesList } from '../../components/EntitiesList'; import Entities from '../../components/Entities'; import EntitiesText from '../../components/EntitiesText'; function setup() { const props = { entities: {date: ['Monday', 'Tuesday'], organization: ['Apple', 'DonutNation', 'Ninja Turtle Co.']}, entitiesText: 'some text from url' }; const wrapper = shallow( <EntitiesList {...props} /> ) return { props, wrapper }; }; describe('EntitiesList', () => { let props; let wrapper; beforeEach(() => { ( { props, wrapper } = setup() ) }); it('should render self and subcomponents', () => { expect(wrapper.length).toEqual(1); expect(wrapper.find('div').length).toEqual(1); expect(wrapper.find(Entities).length).toEqual(1); expect(wrapper.find(EntitiesText).length).toEqual(1); }); it('should not render Entities and/or EntitiesText if props are empty', () => { const testProps = { entities: {date: ['Monday', 'Tuesday'], organization: ['Apple', 'DonutNation', 'Ninja Turtle Co.']}, entitiesText: '' }; const testWrapper = shallow( <EntitiesList {...Object.assign({}, props, testProps)} /> ) const testProps2 = { entities: {}, entitiesText: 'some text from url' }; const testWrapper2 = shallow( <EntitiesList {...Object.assign({}, props, testProps2)} /> ) expect(testWrapper.find(Entities).length).toEqual(1); expect(testWrapper.find(EntitiesText).length).toEqual(0); expect(testWrapper2.find(Entities).length).toEqual(0); expect(testWrapper2.find(EntitiesText).length).toEqual(1); }); });<file_sep>import axios from 'axios'; import { browserHistory } from 'react-router'; import { checkStringLength } from './checkStringLength'; import { isWhatType } from './isWhatType'; import { CACHE_SEARCH, SEND_ENTITIES, RESET_CACHED_ARTICLES, RESET_ENTITIES, SEND_ARTICLES, NO_RESULTS, AUTH_USER } from './types'; const ROOT_URL = 'http://localhost:3000'; // Authentication actions: export function signUpUser({ username, email, password }) { return function(dispatch){ axios.post(`${ROOT_URL}/signup`, { username, email, password }) .then(res => { console.log('back from signup!', res) // dispatch({ type: AUTH_USER }) // localStorage.setItem('token', res.data.token); // browserHistory.push('/feature'); }) .catch((res) => { console.log('error', res) // console.log(res.data.error, res.data.errorMessage) // dispatch(authError(res.data.error)) }); } }; // Entity actions: // triggered when new input entered in cachedsearchbar export function fetchEntitiesCachedSearch(search) { return (dispatch) => { axios.get(`${ROOT_URL}/entities/dbmatch?search=${search}`) .then(res => { if(res.data.result === 'success') { dispatch(resetEntitiesAndEntitiesText()); dispatch(sendArticles(res.data.articlesFound)); }else { dispatch(noResults()); } }) .catch(err => { console.log(`action fetchEntitiesCachedSearch ${err}`) }) }; }; // triggered on general search and cachedsearched click // if in cachedSearch, that means it's in the database otherwise fetch from API export function dbFetchEntities(search, triggerFromCachedSearch) { let inputType; if(!triggerFromCachedSearch) { inputType = isWhatType(search); }; return (dispatch, getState) => { const cachedSearch = getState().entity.cachedSearch; if(triggerFromCachedSearch || cachedSearch.indexOf(search) !== -1) { axios.get(`${ROOT_URL}/entities/db?search=${search}`) .then(res => { if(res.data.result === 'success') { dispatch(resetCachedMatchingArticles()); dispatch(sendEntities(res.data.entities, res.data.entitiesText, search)); }; }) .catch(err => console.log(err)); }else { dispatch(fetchEntities(search, inputType)); }; }; }; // fetch from API export function fetchEntities(search, inputType) { return (dispatch) => { axios.get(`${ROOT_URL}/entities/api?${inputType}=${search}`) .then(res => { dispatch(resetCachedMatchingArticles()) dispatch(sendEntities(res.data.entities, res.data.text, search)); dispatch(cacheSearch(search)); // save to previous searches dispatch(dbSaveEntities(search, res)) // save to postgreSQL db }) .catch(err => console.log(`axios GET request: ${err}`)) }; }; // save entities to db export function dbSaveEntities(search, entitiesData) { const entities = entitiesData.data.entities; const entitiesText = checkStringLength(entitiesData.data.text).replace(/\n/g,''); return (dispatch) => { axios.post(`${ROOT_URL}/dbsave`, { search, entities, entitiesText }) .then(res => { console.log(`Saved in database ${res.data.result}fully!`); }) .catch(err => console.log('dbSaveEntitiesForNormalSearch:',err)); }; }; export function sendArticles(articles) { return { type: SEND_ARTICLES, articles }; }; export function sendEntities(entities, entitiesText, search) { return { type: SEND_ENTITIES, entities, entitiesText, search }; }; export function cacheSearch(search) { return { type: CACHE_SEARCH, search }; }; export function resetCachedMatchingArticles() { return { type: RESET_CACHED_ARTICLES }; }; export function resetEntitiesAndEntitiesText() { return { type: RESET_ENTITIES }; }; export function noResults() { return { type: NO_RESULTS } }<file_sep>import React from 'react'; import { shallow } from 'enzyme'; import expect from 'expect'; import { Entities } from '../../components/Entities'; function setup() { const props = { entities: {date: ['Monday', 'Tuesday'], organization: ['Apple', 'DonutNation', 'Ninja Turtle Co.']}, articleEntities: undefined, articleSearch: undefined }; const wrapper = shallow( <Entities {...props} /> ) return { props, wrapper }; }; describe('Entities', () => { let props; let wrapper; beforeEach(() => { ( { props, wrapper } = setup() ) }); it('should render self and subcomponents', () => { expect(wrapper.length).toEqual(1); expect(wrapper.find('div').length).toEqual(1); expect(wrapper.find('table').length).toEqual(1); expect(wrapper.find('table').at(0).hasClass('table table-entities table-bordered table-hover')).toEqual(true); expect(wrapper.find('table').find('thead').hasClass('table-header')).toEqual(true); expect(wrapper.find('table').find('thead').find('tr').length).toEqual(1); expect(wrapper.find('table').find('thead').find('tr').find('th').length).toEqual(2); expect(wrapper.find('table').find('thead').find('tr').find('th').at(0).hasClass('text-xs-center')).toEqual(true); expect(wrapper.find('th').at(0).text()).toBe(' Entity '); expect(wrapper.find('th').at(1).text()).toBe(' Entity Data '); expect(wrapper.find('table').find('tbody').length).toEqual(1); expect(wrapper.find('table').find('tbody').find('tr').length).toEqual(2); expect(wrapper.find('td').length).toEqual(4); }); it('should render previously found articles instead of entities prop if they exist', () => { const testProps = { entities: {}, articleEntities: {date: ['Monday', 'Tuesday'], organization: ['Apple', 'DonutNation', 'Ninja Turtle Co.'], people: ['Sandra', 'Kyle', 'Phil']}, articleSearch: 'http://www.somewebsite.com' }; const testWrapper = shallow( <Entities {...Object.assign({}, props, testProps)} /> ) const allText = testWrapper.find('div').text().split(' '); expect(allText[2]).toBe(testProps.articleSearch); expect(testWrapper.find('table').find('tbody').find('tr').length).toEqual(3); expect(testWrapper.find('td').length).toEqual(6); }); });<file_sep>import React from 'react'; import { shallow } from 'enzyme'; import expect, { createSpy } from 'expect'; import { CachedSearchList } from '../../components/CachedSearchList'; function setup() { const props = { cachedSearch: [], dbFetchEntities: expect.createSpy() }; const wrapper = shallow( <CachedSearchList {...props} onClick={() => this.props.dbFetchEntities(search, true)} /> ) return { props, wrapper }; } describe('CachedSearchList', () => { let props; let wrapper; beforeEach(() => { ( { props, wrapper } = setup() ) }); it('should render self and subcomponents', () => { expect(wrapper.length).toEqual(1); expect(wrapper.find('div').length).toEqual(2); expect(wrapper.find('div').at(0).hasClass('cachedSearch-div text-center')).toEqual(true); expect(wrapper.find('div').at(1).hasClass('general-font cachedSearch-heading')).toEqual(true); expect(wrapper.find('div').at(1).text()).toEqual(' Previous Searches '); expect(wrapper.find('hr').length).toEqual(1); expect(wrapper.find('ul').length).toEqual(1); expect(wrapper.find('ul').hasClass('cachedSearch-ul list-unstyled general-font')).toEqual(true); expect(wrapper.find('ul').text()).toBe('\'None yet...\''); }); it('should render all the previous searches', () => { const testProps = { cachedSearch: ['one search', 'some website', 'some text', 'somesome', 'another website'], dbFetchEntities: () => {} } const testWrapper = shallow( <CachedSearchList {...Object.assign({}, props, testProps)} /> ) expect(testWrapper.find('ul').find('li').length).toEqual(5); expect(testWrapper.find('ul').find('li').at(0).hasClass('cachedSearch-text')).toEqual(true); }); it('should cut of search input longer than 20 characters at add elipses', () => { const testProps2 = { cachedSearch: ['one search', 'http://www.wsj.com/articles/videogame-consoles-get-upgrades-more-often-1473176586?mod=WSJ_TechWSJD_moreTopStories', 'some text', 'somesome', 'another website'], dbFetchEntities: () => {} }; const testWrapper2 = shallow( <CachedSearchList {...Object.assign({}, props, testProps2)} /> ) expect(testWrapper2.find('ul').find('li').at(1).text()).toBe(' http://www.wsj.com/... '); expect(testWrapper2.find('ul').find('li').at(0).text()).toBe(' one search '); }); });<file_sep>import React, { Component, PropTypes } from 'react'; import shallowCompare from 'react/lib/shallowCompare'; import { connect } from 'react-redux'; import { isWhatType } from '../redux/actions/isWhatType'; export class EntitiesText extends Component { shouldComponentUpdate(nextProps, nextState) { return shallowCompare(this, nextProps, nextState); }; checkStringLength(string) { if(string.length > 987) { string = `${string.substring(0,986)}` } return string; } render() { const { entitiesText, articleText, searchInput } = this.props; const whatTypeSearch = searchInput ? isWhatType(searchInput) : null; const renderLink = (whatTypeSearch === 'url') ? <a href={searchInput} target='_blank'>...Continue reading.</a> : null; return( <table className='table table-text-table table-bordered table-responsive'> <thead className='table-header'> <tr> <th className='text-xs-center'> Preview Text </th> </tr> </thead> <tbody> <tr> <td className='table-text text-justify container'> { articleText ? this.checkStringLength(articleText) : this.checkStringLength(entitiesText) }{ renderLink } </td> </tr> </tbody> </table> ) }; }; EntitiesText.propTypes = { entitiesText: React.PropTypes.string, articleText: React.PropTypes.string, searchInput: React.PropTypes.string }; function mapStateToProps({ entity: { entitiesText = '', searchInput= '' }} = {}) { return { entitiesText, searchInput }; }; export default connect(mapStateToProps)(EntitiesText);<file_sep>export const CACHE_SEARCH = 'CACHED_SEARCH'; export const SEND_ENTITIES = 'SEND_ENTITIES'; export const SEND_ARTICLES = 'SEND_ARTICLES'; export const RESET_CACHED_ARTICLES = 'RESET_CACHED_ARTICLES'; export const RESET_ENTITIES = 'RESET_ENTITIES'; export const NO_RESULTS = 'NO_RESULTS'; export const AUTH_USER = 'AUTH_USER'; <file_sep># Entity Analysis Have an article that you want to get entity data(ie. dates, people, organization, email etc.) from without manually reading line by line? + Entity Analysis takes url or word(s) and instantly extracts entities. + Stores searches in a previous search column. + Click any of the previous searches to review previous entities rendered. + Search for articles matching an entity you are looking for(will be searched from previously searched articles). + State is saved in local storage so simply return to view previous article/entities when needed. + Created with React/Redux, PostgreSQL, Node/Hapi.js and other tools. + Entities saved in PostgreSQL to reduce network requests to third-party API and to take data out of state for better performance. + Utilized methods such as shouldComponentUpdate and only connecting Components to props they will use rather than the entire state. This stops re-rendering when nothing has changed and improves app optimization. + Set up back-end authentication. + Added tests. #### Currently Working on + Setting up front-end authentication to work with back-end authentication. Thus for now go to *Start Extraction* link to get started, there is no need to signup/signin as it is not currently working. ## Get Started You will need to configure a few files in the *config folder* before getting started: + Sign up to obtain an *Aylien* ID and API key. + Have PostgreSQL installed on your OS. Include your username and password. Once you have that setup: + run *npm start* in one tab in your terminal. + run *npm run dev* in another tab. + run *psql entitydb* in another tab. ( *\d* to see all tables, *\x* to see tables in better view, *SELECT * FROM TABLENAMEHERE* to access the table, *\q* to exit). If you plan to reuse please make sure to .gitignore things like your API key and PostgreSQL username/password: *In .gitignore :* node_modules config/aylien.config.js config/database.config.js config/config.js npm_debug.log .DS_Store dist/<file_sep>'use strict'; const Hapi = require('hapi'); const http = require('https'); const handlebars = require('handlebars'); const inert = require('inert'); const vision = require('vision'); const hapi_cors = require('hapi-cors'); const Boom = require('boom'); const glob = require('glob'); const path = require('path'); const secret = require('../config/config'); const jwtAuth = require('hapi-auth-jwt'); const url = require('url'); const AYLIENTextAPI = require('../config/aylien.config'); const Sequelize = require('sequelize'); const sequelizeConfig = require('../config/database.config'); const SearchCache = require('./models/SearchCache'); const version = require('../package.json').version; const author = require('../package.json').author; const Log = require('log'); const log = new Log(); const port = 3000; // New Hapi Server const server = new Hapi.Server(); // Hapi plugins // when I include the glob here I get an error*** const plugins = [ inert, vision, jwtAuth ]; // server settings server.connection({ host: 'localhost', port: process.env.PORT || port, routes: { cors: true } }); server.register(plugins,(err) => { if(err) console.log(err); // name and scheme called 'jwt' // setting third 'required' option means every route must have a token server.auth.strategy('jwt', 'jwt', { key: secret, verifyOptionts: { algorithms: ['HS256'] } }); server.views({ engines: { html: handlebars }, relativeTo: __dirname, path: '../dist' }); // creates a route for every file in the routes folder // doesn't work*** glob.sync('./routes/*.js', { root: __dirname }).forEach(file => { const route = require(path.join(__dirname, file)); server.route(route); }); server.route([ { method: 'GET', path: '/', config: { handler: (request, reply) => { reply.view('index'); } } }, { method: 'GET', path: '/style.css', handler: (request, reply) => { reply.file('./client/style.css') } }, { method: 'GET', path: '/{param*}', handler: { directory: { path: 'dist'} } }, { method: 'GET', path: '/entities/api', handler: (request, reply) => { const inputType = Object.keys(request.query)[0]; const searchInput = request.query[inputType]; const getEntitiesObj = {}; getEntitiesObj[inputType] = searchInput; AYLIENTextAPI.entities(getEntitiesObj, function(error, response) { if (error === null) { reply(response); } }); } }, { // acquires entities based on previous search method: 'GET', path: '/entities/db', handler: (request, reply) => { const searchInput = request.query['search']; SearchCache.findAll({ attributes: ['entities', 'entitiesText'], where: { searchInput } }) .then((entitiesData) => { const entities = entitiesData[0].dataValues.entities; const entitiesText = entitiesData[0].dataValues.entitiesText; reply({ result: 'success', entities, entitiesText }) }) .catch(err => { reply({ result: `ERROR in '/entities/db' path via server: ${err}` }) }) } }, { // acquire articles that match an entity search method: 'GET', path: '/entities/dbmatch', handler: (request, reply) => { console.log('IN findEntityMatch') const search = request.query['search']; var articlesFound = []; SearchCache.findAll({ attributes: ['searchInput', 'entities', 'entitiesText'], }) .then((data) => { data.forEach((obj) => { const article = obj.dataValues; const entities = obj.dataValues.entities; var searchExists = false; for(var prop in entities) { searchExists = entities[prop].some((entity) => { return entity.toLowerCase() === search.toLowerCase(); }); if(searchExists) { articlesFound.push(article); return; } }; }); if(articlesFound.length) { reply({ result: 'success', articlesFound }) }else { reply({ result: 'No articles found' }) } }); } }, { method: 'POST', path: '/dbsave', handler: (request, reply) => { const searchInput = request.payload.search; const entitiesText = request.payload.entitiesText; const entities = request.payload.entities; SearchCache.sync() .then(() => { return SearchCache.create({ searchInput, entities, entitiesText }) }) .then(() => reply({ result: 'success' })) .catch(err => reply({ result: `ERROR in '/dbsave/textorurlsearch' path via server: ${err}` })) } }, ]) }); server.start((err) => { if (err) throw err; // check if there is an error starting our server log.info(`NODE_ENV: ${process.env.NODE_ENV}`); log.info(`Version ${version} by ${author} running on port ${server.info.uri}`); });
02af0cc8d6f75e0808b4d23e3a297363982fdd76
[ "JavaScript", "Markdown" ]
25
JavaScript
Mashadim/EntityAnalyzer
e5b3ea6a0b2943058196ae52925b38628130ddcf
a404b43095653dec501a82df614802fac4a72a22
refs/heads/master
<repo_name>MattCoy/testpromo5<file_sep>/page2.php <!DOCTYPE html> <html> <head> <title></title> <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/4.0.0-beta.2/css/bootstrap.min.css" integrity="<KEY>" crossorigin="anonymous"> </head> <body> <?php echo 'bonjour à tous, ceci est la page 2'; ?> <a href="index.php">home</a> <div class="container"> <div class="row"> <div class="col-md-6"> <img src="https://www.smashingmagazine.com/wp-content/uploads/2015/06/07-functions-opt-small.jpg" class="img img-responsive"> </div> <div class="col-md-6"> <img src="https://static.independent.co.uk/s3fs-public/styles/article_small/public/thumbnails/image/2017/09/12/11/naturo-monkey-selfie.jpg" class="img img-responsive"> </div> </div> </div> <?php echo 'nous sommes le ' . date('d/m/Y'); ?> </body> </html>
06010bb6634d68f2b8f6057f23a4b44e74ae7adc
[ "PHP" ]
1
PHP
MattCoy/testpromo5
928b8d581af0d8156fdd7ee10278c92dd0d928a9
c133e57c8dc555bfa6e160998fce417649bceb96
refs/heads/master
<file_sep>class UsersController < ApplicationController before_action :require_user_logged_in, only: [:index, :show] def index @users = User.all.page(params[:page]) end def show @user = User.find(params[:id]) @microposts = @user.microposts.order('created_at DESC').page(params[:page]) counts(@user) end def new @user = User.new end def create @user = User.new(user_params) if @user.save flash[:success] = 'ユーザを登録しました。' redirect_to @user else flash.now[:danger] = 'ユーザの登録に失敗しました。' render :new end end def followings @user = User.find(params[:id]) @followings = @user.followings.page(params[:page]) counts(@user) end def followers @user = User.find(params[:id]) @followers = @user.followers.page(params[:page]) counts(@user) end private def user_params params.require(:user).permit(:name, :email, :password, :password_confirmation) end end
ec9a38f4c08bddf5280b47516d7435483dc74541
[ "Ruby" ]
1
Ruby
kanekotaiki/microposts
1a41b9a52c6647b2a5012b330fd9c6373dd0927c
ca17a9164ba78ab82375249039eb37e6a374ba8f
refs/heads/master
<repo_name>DavidBonnemaison/temperature<file_sep>/api/routes.js module.exports = app => { const sensor = require('./controller') app.route('/sensors') .get(sensor.readAll); app.route('/sensor/:sensorId') .get(sensor.read); }; <file_sep>/api/model.js const mongoose = require('mongoose'), measure = new mongoose.Schema({ id: Number, name: String, measure: Number, date: { type: Date, default: Date.now } }); module.exports = mongoose.model('Measure', measure); <file_sep>/api/controller.js const config = require('../config'), mongoose = require('mongoose'), measure = mongoose.model('Measure'); const readTemperature = sensorId => { return new Promise((resolve, reject) => { measure.findOne({ 'id': sensorId }, (err, result) => { if (err) reject(error); resolve(result); }).sort({ date: -1 }) }); } exports.read = (req, res) => { readTemperature(req.params.sensorId) .then(result => res.json(result)) .catch(error => res.send(error)); } exports.readAll = (req, res) => { Promise.all(config.sensors.map(sensor => readTemperature(sensor.id))) .then(result => res.json(result)) .catch(error => res.send(error)); } <file_sep>/app.js const config = require('./config'), express = require('express'), mongoose = require('mongoose'), app = express(), bodyParser = require('body-parser'), cron = require('./api/cron'), routes = require('./api/routes'), measure = require('./api/model'); app.use(bodyParser.urlencoded({ extended: true })); app.use(bodyParser.json()); routes(app); app.listen(config.express.port, () => console.log("Server is listening on port " + config.express.port));
5c4b6ccf961222a0aef283e8a8f140c1b02c8838
[ "JavaScript" ]
4
JavaScript
DavidBonnemaison/temperature
691d3e04d05e40337e2a6cd3ed7fdddc631439bd
aa6cb8b68773214cfbc95541e15cfcdb8b4713de
refs/heads/master
<file_sep>class MsgsController < ApplicationController http_basic_authenticate_with name: "admin", password: "<PASSWORD>", except: [:create, :new] def new @msg = Msg.new end def show @msg = Msg.find(params[:id]) end def index @msgs = Msg.all end def create @msg = Msg.new(msg_params) @msg.save redirect_to @msg end def destroy @msg = Msg.find(params[:id]) @msg.destroy redirect_to msgs_path end private def msg_params params.require(:msg).permit(:name, :email, :comments) end end
275d50a13af5290fed7001e70009f3c116c65137
[ "Ruby" ]
1
Ruby
desnyki/basicsolutionswebsite
526504afdd056900073a771ff2f5e7e2d94bf7a8
ecd103f3b67996afa6b6b38d08dfe46ca276f0b7
refs/heads/master
<repo_name>VeroWrede/React-Redux-Projects<file_sep>/src/components/containers/ChapterOne.js import React from 'react'; import { connect } from 'react-redux'; import chapterOne from '../ChapterOne.jsx'; import {switchPage} from '../../actions/simpleActionCreator.js'; import pages from '../../constants/pages.js'; const mapStateToProps = function (state) { return { page: state.page }; }; const mapDispatchToProps = function (dispatch) { return { onBackClick: () => dispatch(switchPage(pages.Home)), onInternationalRelationsClick: () => dispatch(switchPage(pages.InternationalRelations)), onDancingQueenClick: () => dispatch(switchPage(pages.DancingQueen)), onUSAClick: () => dispatch(switchPage(pages.USA)) }; }; export default connect(mapStateToProps, mapDispatchToProps)(chapterOne); <file_sep>/src/components/containers/DancingQueen.js import React from 'react'; import { connect } from 'react-redux'; import dancingQueen from '../DancingQueen.jsx'; import {switchPage} from '../../actions/simpleActionCreator.js'; import pages from '../../constants/pages.js'; const mapStateToProps = function (state) { return { page: state.page }; }; const mapDispatchToProps = function (dispatch) { return { onBackClick: () => dispatch(switchPage(pages.ChapterOne)), onCheatClick: () => dispatch(switchPage(pages.Cheat)), onMarryQuentinClick: () => dispatch(switchPage(pages.MarryQuentin)), onTeacherClick: () => dispatch(switchPage(pages.DanceTeacher)) }; }; export default connect(mapStateToProps, mapDispatchToProps)(dancingQueen); <file_sep>/src/reducers/rootReducer.js import actions from '../actions/actions.js'; export default function rootReducer(state = {}, action){ switch(action.type){ case actions.SWITCH_PAGE: return Object.assign({}, {page: action.page}) default: return state; } } <file_sep>/src/components/containers/InternationalRelations.js import React from 'react'; import { connect } from 'react-redux'; import internationalRelations from '../InternationalRelations.jsx'; import {switchPage} from '../../actions/simpleActionCreator.js'; import pages from '../../constants/pages.js'; const mapStateToProps = function (state) { return { }; }; const mapDispatchToProps = function (dispatch) { return { onBackClick: () => dispatch(switchPage(pages.ChapterOne)), onPoliticsClick: () => dispatch(switchPage(pages.Politician)), onTranslationClick: () => dispatch(switchPage(pages.Translations)) }; }; export default connect(mapStateToProps, mapDispatchToProps)(internationalRelations); <file_sep>/src/components/DancingQueen.jsx import React from 'react'; class DancingQueen extends React.Component { render() { return ( <div> <h1>Dancing Queen</h1> <hr/> <p> you become a dancing queen. </p> <p className="buttons"> <button onClick={this.props.onBackClick}> Back </button> </p> </div> ); } } export default DancingQueen; <file_sep>/src/actions/simpleActionCreator.js import actions from './actions.js'; export function switchPage(page){ return {type: actions.SWITCH_PAGE, page} } <file_sep>/src/components/ChapterOne.jsx import React from 'react'; class ChapterOne extends React.Component { render() { return ( <div> <h1>Chapter One</h1> <hr/> <p> Hello. A short overview of where this story starts: I am 18 years old and just finished high school in Geneva, Switzerland.The University of Geneva accepted me to study International Relations. But I'm not sure if I want to do that. Fortunately, I still have a choice! You have to admit that Geneva is pretty cool. It’s also been my home for some years now. I know the argentine Tango scene around here and have a great dancing partner! We could try to become real dancers, maybe even win the world championship? Dancing is to me like breathing, it comes naturally and I can’t live without it. If I want to make this my career, now is the chance. But then my studies would come in last. Do I care? Now, my dad got a job in Washington,DC… How exciting a brand new start would be! But hush, don’t tell anyone I said so, because as a good european I’m, of course filled with prejudice. </p> <p className="buttons"> <button onClick={this.props.onBackClick}> Back </button> <button onClick={this.props.onInternationalRelationsClick}> Study International Relations </button> <button onClick={this.props.onDancingQueenClick}> Become a Dancing Queen </button> <button onClick={this.props.onUSAClick}> Move to the US </button> </p> </div> ); } } export default ChapterOne; <file_sep>/src/components/Politician.jsx import React from 'react'; class Politician extends React.Component { render() { return ( <div> <h1>Politics</h1> <hr/> <p> If you want things to change you have to change them yourself. So I stick with politics. It’s a dreary path, one step forwards, two steps back. Sometimes it's just the step forwards and oh, how sweet those little victories are! Still, it’s a stressful job with little pay. Unless I get to swim with the big fish at the top. I could take the foreign service exam and become a diplomat! Good pay, nice parties, immunity, travelling to exotic places… The life of a diplomat can be sweet and glamorous, all while bringing about great change. But maybe that’s the easy way out? Big change comes from below. Doesn’t the world need fighters who don’t just do it for the money? </p> <p className="buttons"> <button onClick={this.props.onBackClick}> Back </button> <button onClick={this.props.onDiplomatClick}> Become a diplomat </button> <button onClick={this.props.onActivistClick}> Become an activist </button> </p> </div> ); } } export default Politician; <file_sep>/src/components/InternationalRelations.jsx import React from 'react'; class InternationalRelations extends React.Component { render() { return ( <div> <h1>International Relations</h1> <hr/> <p> Yes, studying is more important. Dancing will be a hobby. Geneva is one the best places to study anything related to politics after all. My aunt is a diplomat here and she has me seriously considering a political career. Though it really seems to be about a bunch of selfish weirdos and sweet talkers. Just look at whats going on in the world! It's certainly a field that requires patience and tact, a challenging position for my straight forward personality. During my studies I quickly picked up Japanese, my 5th language so far. I’d also love to learn Portuguese, so maybe Languages is more my thing? I could become a translator and interpreter at political conferences. </p> <p className="buttons"> <button onClick={this.props.onBackClick}> Back </button> <button onClick={this.props.onPoliticsClick}> Become a politician </button><button onClick={this.props.onTranslationClick}> Become an translater </button> </p> </div> ); } } export default InternationalRelations; <file_sep>/src/components/Teacher.jsx import React from 'react'; class Teacher extends React.Component { render() { return ( <div> <h1>The Teacher</h1> <hr/> <p> Teaching it is. Like my own mother I find myself in the world of teachers and I love it. My Husband has given up on the idea of having a family. Instead we use our income to travel the world. And I of course get a dog and a cat. We buy a little house in Yvoire, France and I when we retire to live there permanently I open a little chocolate store. With all the tourists my language skills come in handy. I wholeheartedly recommend a visit to this beautiful little medieval village of Yvoire, it must be one the most beautiful places in the world. The end. </p> <p className="buttons"> <button onClick={this.props.onBackClick}> Back </button> </p> </div> ); } } export default Teacher; <file_sep>/src/components/Diplomat.jsx import React from 'react'; class Diplomat extends React.Component { render() { return ( <div> <h1>The diplomat</h1> <hr/> <p> To do big things you need big power. I decide to become a diplomat and after weeks of studying I finally pass the exam. I did it! My first job as german diplomat takes me back to Berlin. I’ve been away for a long time and adjusting is hard. But eventually life goes back to normal; I find my group of friends and we have our weekly outings to our favorite expensive bar. One day a new face joins the troupe. Freshly relocated from Bavaria is a handsome, young lawyer named Hubert. Hubert is, like me a mongrel, of German and Mexican descent. We connect immediately. Before the year is over we move in together. Another year and we are married. Things are good, until … Shortly before our first child is due I find out that Hubert is gay. I am heartbroken, he denies it. Fortunately, the child’s arrival brings us back together and I’m so busy that I forget the entire story. Until our second child is due and the entire story repeats itself. This time however, Hubert’s lover Olaf steps in and under rivers of tears Hubert comes out of the closet. Fortunately for us all we are still in Germany, and Hubert’s sexuality is quickly accepted by everyone - except Hubert’s family, but ‘oh, well..’. We get used to our new life as a 5 headed family. As they say “ein Vater ist besser als kein Vater” (one father is better than no father), and just imagine how lucky our kids are to have two!? My two dads aren’t the jealous type, but rather good judges of character and soon my own love life flourishes again. The end </p> <p className="buttons"> <button onClick={this.props.onBackClick}> Back </button> </p> </div> ); } } export default Diplomat; <file_sep>/src/App.jsx // rendering of initail (home) page import React from 'react'; import { render } from 'react-dom'; import {Provider} from 'react-redux'; import configureStore from './redux/configureStore.js'; import initialState from './redux/initialState.js'; import App from './components/containers/storyBook.js'; const store = configureStore(initialState); render( <Provider store={store}> <App/> </Provider>, document.getElementById('appContainer') ); <file_sep>/src/components/containers/MoveToUS.js import React from 'react'; import { connect } from 'react-redux'; import usa from '../MoveToUS.jsx'; import {switchPage} from '../../actions/simpleActionCreator.js'; import pages from '../../constants/pages.js'; const mapStateToProps = function (state) { return { }; }; const mapDispatchToProps = function (dispatch) { return { onBackClick: () => dispatch(switchPage(pages.ChapterOne)), onBenClick: () => dispatch(switchPage(pages.Ben)), onAlexClick: () => dispatch(switchPage(pages.Alex)) }; }; export default connect(mapStateToProps, mapDispatchToProps)(usa); <file_sep>/src/components/containers/Translations.js import React from 'react'; import { connect } from 'react-redux'; import translations from '../Translations.jsx'; import {switchPage} from '../../actions/simpleActionCreator.js'; import pages from '../../constants/pages.js'; const mapStateToProps = function (state) { return { }; }; const mapDispatchToProps = function (dispatch) { return { onBackClick: () => dispatch(switchPage(pages.InternationalRelations)), onTeacherClick: () => dispatch(switchPage(pages.Teacher)), onHomeMumClick: () => dispatch(switchPage(pages.HomeMum)) }; }; export default connect(mapStateToProps, mapDispatchToProps)(translations); <file_sep>/src/components/HomePage.jsx import React from 'react'; class HomePage extends React.Component { render() { return ( <div> <h1>Intro</h1> <hr/> <p> Welcome! This is a short story featuring my life and what could have been my life had I taken different choices. Of course, only the storyline holding what actually happened is true. The others are made up. That being said, many of the choices the reader has to take would likely have been / where actual choices I would have faced on that path. This project is still in progress and I haven’t uploaded the whole story yet. I hope you enjoy the read! Vero </p> <p className="buttons"> <button onClick={this.props.onStartClick}> Start </button> </p> </div> ); } } export default HomePage; <file_sep>/src/components/MoveToUS.jsx import React from 'react'; class USA extends React.Component { render() { return ( <div> <h1>Welkome to the USA!</h1> <hr/> <p> you moved to the US. </p> <p className="buttons"> <button onClick={this.props.onBackClick}> Back </button> <button onClick={this.props.onBenClick}> You hang out with Ben </button> <button onClick={this.props.onAlexClick}> You hang out with Alex </button> </p> </div> ); } } export default USA; <file_sep>/src/redux/initialState.js import pages from '../constants/pages.js'; export default { page: pages.home } <file_sep>/src/components/storyBook.jsx // presentational (dumb) component import React from 'react'; import '../../node_modules/bootstrap/dist/css/bootstrap.min.css'; import '../styles/styles.css'; import pages from '../constants/pages.js'; import HomePage from './containers/HomePage.js'; import ChapterOne from './containers/ChapterOne.js'; import InternationalRelations from './containers/InternationalRelations.js'; import DancingQueen from './containers/DancingQueen.js'; import DanceTeacher from './containers/DanceTeacher.js'; import USA from './containers/MoveToUS.js'; import Politician from './containers/Politician.js'; import Diplomat from './containers/Diplomat.js'; import Activist from './containers/Activist.js'; import Translations from './containers/Translations.js'; import Teacher from './containers/Teacher.js'; import HomeMum from './containers/HomeMum.js'; import Cheat from './containers/Cheat.js'; import MarryQuentin from './containers/MarryQuentin.js'; import Ben from './containers/Ben.js'; import Alex from './containers/Alex.js'; class StoryBook extends React.Component{ render() { return ( <div > <div className="chapter"> {this.renderPage()} </div> </div> //<Footer ); } renderPage() { switch(this.props.page){ case pages.ChapterOne: return <ChapterOne/>; case pages.InternationalRelations: return <InternationalRelations/>; case pages.Politician: return <Politician/>; case pages.Diplomat: return <Diplomat/>; case pages.Activist: return <Activist/>; case pages.Translations: return <Translations/>; case pages.Teacher: return <Teacher/>; case pages.HomeMum: return <HomeMum/>; case pages.DancingQueen: return <DancingQueen/>; case pages.Quentin: return <Quentin/>; case pages.Cheat: return <Cheat/>; case pages.MarryQuentin: return <MarryQuentin/>; case pages.DanceTeacher: return <DanceTeacher/>; case pages.USA: return <USA/>; case pages.Ben: return <Ben/>; case pages.Alex: return <Alex/>; case pages.Home: default: return <HomePage/>; } } } export default StoryBook; <file_sep>/src/components/Alex.jsx import React from 'react'; class Alex extends React.Component { render() { return ( <div> <h1>Alex</h1> <hr/> <p> you're dating Alex. </p> <p className="buttons"> <button onClick={this.props.onBackClick}> Back </button> </p> </div> ); } } export default Alex; <file_sep>/src/components/containers/Cheat.js import React from 'react'; import { connect } from 'react-redux'; import cheat from '../Cheat.jsx'; import {switchPage} from '../../actions/simpleActionCreator.js'; import pages from '../../constants/pages.js'; const mapStateToProps = function (state) { return { }; }; const mapDispatchToProps = function (dispatch) { return { onBackClick: () => dispatch(switchPage(pages.DancingQueen)) }; }; export default connect(mapStateToProps, mapDispatchToProps)(cheat); <file_sep>/src/components/containers/HomePage.js import React from 'react'; import { connect } from 'react-redux'; import homePage from '../HomePage.jsx'; import {switchPage} from '../../actions/simpleActionCreator.js'; import pages from '../../constants/pages.js'; const mapStateToProps = function (state) { return { }; }; const mapDispatchToProps = function (dispatch) { return { onStartClick: () => dispatch(switchPage(pages.ChapterOne)), }; }; export default connect(mapStateToProps, mapDispatchToProps)(homePage);
db2dd2786de9b0fa10daac46fb89596ab2549f5f
[ "JavaScript" ]
21
JavaScript
VeroWrede/React-Redux-Projects
a6c30e6e8ee91e7e69af73c964fb0877c3c3a3b7
ffd9fa6460048e16583b8d7d565ce3cf1ee990bb
refs/heads/master
<file_sep> void writeFlashTest(void) { //ŻÔ╦°flash HAL_FLASH_Unlock(); // FLASH_EraseInitTypeDef f; f.TypeErase = FLASH_TYPEERASE_PAGES; f.PageAddress = addr; f.NbPages = 1; uint32_t PageError = 0; //?????? HAL_FLASHEx_Erase(&f, &PageError); //3??FLASH?? HAL_FLASH_Program(TYPEPROGRAM_WORD, addr, writeFlashData); //4???FLASH HAL_FLASH_Lock(); }<file_sep>#ifndef __ADS1X9X_HAL_DRIVER_H #define __ADS1X9X_HAL_DRIVER_H #include "stm32f4xx_hal.h" #include "dwt_stm32_delay.h" extern SPI_HandleTypeDef hspi1; #define ADS_REG_ID_ADDRESS 0x00 #define ADS_REG_CONFIG1_ADDRESS 0x01 #define ADS_REG_CONFIG1_DEFAULT 0x00 #define ADS_SINGLE_SHOT 0x80 #define ADS_DR(n) 0x01*(n) #define ADS_REG_CONFIG2_ADDRESS 0x02 #define ADS_REG_CONFIG2_DEFAULT 0x80 #define ADS_PDB_LOFF_COMP 0x40 #define ADS_PDB_REFBUF 0x20 #define ADS_VREF_4V 0x10 #define ADS_CLK_EN 0x08 #define ADS_INT_TEST 0x02 #define ADS_TEST_FREQ 0x01 #define ADS_REG_LOFF_ADDRESS 0x03 #define ADS_REG_LOFF_DEFAULT 0x10 #define ADS_COMP_TH(n) 0x20*(n) #define ADS_ILEAD_OFF(n) 0x04*(n) #define ADS_FLEAD_OFF 0x01 #define ADS_REG_CH1SET_ADDRESS 0x04 #define ADS_REG_CH1SET_DEFAULT 0x00 #define ADS_PD1 0x80 #define ADS_GAIN1(n) 0x10*(n) #define ADS_MUX1(n) 0x01*(n) #define ADS_REG_CH2SET_ADDRESS 0x05 #define ADS_REG_CH2SET_DEFAULT 0x00 #define ADS_PD2 0x80 #define ADS_GAIN2(n) 0x10*(n) #define ADS_MUX2(n) 0x01*(n) #define ADS_REG_RLDSENS_ADDRESS 0x06 #define ADS_REG_RLDSENS_DEFAULT 0x00 #define ADS_CHOP(n) 0x40*(n) #define ADS_PDB_RLD 0x20 #define ADS_RLD_LOFF_SENS 0x10 #define ADS_RLD2N 0x08 #define ADS_RLD2P 0x04 #define ADS_RLD1N 0x02 #define ADS_RLD1P 0x01 #define ADS_REG_LOFFSENS_ADDRESS 0x07 #define ADS_REG_LOFFSENS_DEFAULT 0x00 #define ADS_FLIP2 0x20 #define ADS_FLIP1 0x10 #define ADS_LOFF2N 0x08 #define ADS_LOFF2P 0x04 #define ADS_LOFF1N 0x02 #define ADS_LOFF1P 0x01 #define ADS_REG_LOFFSTAT_ADDRESS 0x08 #define ADS_REG_LOFFSTAT_DEFAULT 0x00 #define ADS_CLK_DIV 0x40 #define ADS_RLD_STAT 0x10 #define ADS_IN2N_OFF 0x08 #define ADS_IN2P_OFF 0x04 #define ADS_IN1N_OFF 0x02 #define ADS_IN1P_OFF 0x01 #define ADS_REG_RESP1_ADDRESS 0x09 #define ADS_REG_RESP1_DEFAULT 0x02 #define ADS_RESP_DEMOD_EN1 0x80 #define ADS_RESP_MOD_EN 0x40 #define ADS_RESP_PH(n) 0x04*(n) #define ADS_RESP_CTRL 0x01 #define ADS_REG_RESP2_ADDRESS 0x0A #define ADS_REG_RESP2_DEFAULT 0x01 #define ADS_CALIB_ON 0x80 #define ADS_RESP_FREQ 0x04 #define ADS_RLDREF_INT 0x02 #define ADS_REG_GPIO_ADDRESS 0x0B #define ADS_REG_GPIO_DEFAULT 0x0C #define ADS_GPIOC2 0x08 #define ADS_GPIOC1 0x04 #define ADS_GPIOD2 0x02 #define ADS_GPIOD1 0x01 #define ADS_WAKEUP 0x02 #define ADS_STANDBY 0x04 #define ADS_RESET 0x06 #define ADS_START 0x08 #define ADS_STOP 0x0A #define ADS_OFFSETCAL 0x1A #define ADS_RDATAC 0x10 #define ADS_SDATAC 0x11 #define ADS_RDATA 0x12 #define ADS_RREG 0x20 #define ADS_WREG 0x40 #define ADS_PIN_START_L() HAL_GPIO_WritePin(ADS_START_GPIO_Port, ADS_START_Pin, GPIO_PIN_RESET) #define ADS_PIN_START_H() HAL_GPIO_WritePin(ADS_START_GPIO_Port, ADS_START_Pin, GPIO_PIN_SET) #define ADS_PIN_RESET_L() HAL_GPIO_WritePin(ADS_RESET_GPIO_Port, ADS_RESET_Pin, GPIO_PIN_RESET) #define ADS_PIN_RESET_H() HAL_GPIO_WritePin(ADS_RESET_GPIO_Port, ADS_RESET_Pin, GPIO_PIN_SET) #define ADS_PIN_MAINCLKSEL_L() HAL_GPIO_WritePin(ADS_MAIN_CLKSEL_GPIO_Port, ADS_MAIN_CLKSEL_Pin, GPIO_PIN_RESET) #define ADS_PIN_MAINCLKSEL_H() HAL_GPIO_WritePin(ADS_MAIN_CLKSEL_GPIO_Port,ADS_MAIN_CLKSEL_Pin,GPIO_PIN_SET); #define ADS_PIN_CS_L() HAL_GPIO_WritePin(ADS_CS_GPIO_Port, ADS_CS_Pin, GPIO_PIN_RESET) #define ADS_PIN_CS_H() HAL_GPIO_WritePin(ADS_CS_GPIO_Port, ADS_CS_Pin, GPIO_PIN_SET) #define ADS_PIN_EN_L() HAL_GPIO_WritePin(ADS_EN_GPIO_Port, ADS_EN_Pin, GPIO_PIN_RESET) #define ADS_PIN_EN_H() HAL_GPIO_WritePin(ADS_EN_GPIO_Port, ADS_EN_Pin, GPIO_PIN_SET) #define ADS_SPI_Write(Data,Size) HAL_SPI_Transmit(&hspi1, Data, Size, 1000) #define ADS_SPI_Read(Data,Size) HAL_SPI_Receive(&hspi1,Data, Size, 1000) #define ADS_SPI_Delay(n) DWT_Delay_us(n) typedef struct { uint8_t Address; uint8_t Value; }ADS_RegisterDef; typedef struct { ADS_RegisterDef CONFIG1; ADS_RegisterDef CONFIG2; ADS_RegisterDef LOFF; ADS_RegisterDef CH1SET; ADS_RegisterDef CH2SET; ADS_RegisterDef RLD_SENS; ADS_RegisterDef LOFF_SENS; ADS_RegisterDef LOFF_STAT; ADS_RegisterDef RESP1; ADS_RegisterDef RESP2; ADS_RegisterDef GPIO; }ADS_ConfigDef; void ads1291_disable(void); void ads1291_init(void); extern void ADS_ReadStatue(uint8_t REG,uint8_t Num,uint8_t *pData,uint16_t Size); extern void ADS_Setting(uint8_t REG,uint8_t Num,uint8_t *pData,uint16_t Size ); extern void ADS_Command(uint8_t CMD); extern void ADS_ReadData(uint8_t *pRxData,uint16_t Size); extern void ADS_Config_Init(ADS_ConfigDef *Config); extern void ADS_Config(ADS_ConfigDef *Config); extern void ADS_init(void); #endif <file_sep>#ifndef _I2C_jacy_H #define _I2C_jacy_H #include "gpio.h" #include "main.h" #include "dwt_stm32_delay.h" #define SCL_H HAL_GPIO_WritePin(GPIOA, GPIO_PIN_8, GPIO_PIN_SET); #define SCL_L HAL_GPIO_WritePin(GPIOA, GPIO_PIN_8, GPIO_PIN_RESET); #define SDA_H HAL_GPIO_WritePin(GPIOC, GPIO_PIN_9, GPIO_PIN_SET); #define SDA_L HAL_GPIO_WritePin(GPIOC, GPIO_PIN_9, GPIO_PIN_RESET); #define SDA_read HAL_GPIO_ReadPin(GPIOC, GPIO_PIN_9) #define I2C_READ 0xB1 #define I2C_WRITE 0xB0 void I2C_GPIO_Config(void); void SDA_IN(void); void SDA_OUT(void); void RST_IN(void); void delay_us(uint8_t time); void delay_ms(uint8_t time); void I2C_Start(void); void I2C_Stop(void); void I2C_Ack(void); void I2C_NoAck(void); uint8_t I2C_WaitAck(void); void RcvAck(void); void i2c_clock(void); uint8_t inbyt(void); void outbyt(unsigned char i2c_data); void Write_Configuration(uint8_t Addr,uint32_t wdata); uint32_t Read_Configuration(unsigned int Addr); #endif <file_sep>//#include "main.h" //#include "stm32f4xx_hal.h" //#include "spi.h" //#include "tim.h" //#include "usart.h" //#include "gpio.h" ///* USER CODE BEGIN Includes */ //#include "string.h" //#include "dwt_stm32_delay.h" //#include "ads1x9x_hal_driver.h" //#include "IIC.h" //#include "pps960.h" //#include "afe4404_hw.h" //#include "agc_V3_1_19.h" //#include "hqerror.h" //extern uint8_t mode1_tran_data[753]; //extern uint8_t mode2_tran_data[2]; //extern uint8_t mode3_tran_data[5]; //void Mode1_tran(void) //{ // uint8_t // //}<file_sep>/* * pps960.c * * Created on: 2016年9月8日 * Author: cole */ #include <string.h> #include <stdbool.h> #include <stdio.h> #include <stdlib.h> #include <stdint.h> #include "afe4404_hw.h" #include "agc_V3_1_19.h" #include "hqerror.h" #include "pps960.h" #include "IIC.h" #include "main.h" #include "dwt_stm32_delay.h" #include "IIC.h" #include "usart.h" #define PPS960_ADDR (0xB0) extern uint16_t lifeQhrm; extern int8_t skin; uint16_t acc_check=0; uint16_t acc_check2=0; uint8_t pps964_is_init = 0; int8_t hr_okflag=false; int8_t Stablecnt=0; int8_t Unstablecnt=0; int8_t HR_HRV_enable=0;//0=>HR;1=>HRV;2=>HR+HRV; uint32_t displayHrm = 0; uint32_t pps_count; uint32_t pps_intr_flag=0; int8_t accPushToQueueFlag=0; extern uint16_t AccBuffTail; void PPS_DELAY_MS(uint32_t ms) { HAL_Delay(ms); } void pps960_Rest_SW(void) { PPS960_writeReg(0,0x8); PPS_DELAY_MS(50); } void pps960_disable(void) { pps964_is_init = 0; HAL_GPIO_WritePin(PPS_EN_GPIO_Port,PPS_EN_Pin,GPIO_PIN_RESET); HAL_Delay(200); } void init_pps960_sensor(void) { pps964_is_init = 1; HAL_GPIO_WritePin(PPS_EN_GPIO_Port,PPS_EN_Pin,GPIO_PIN_SET); HAL_Delay(200); HAL_GPIO_WritePin(PPS_RESET_GPIO_Port,PPS_RESET_Pin,GPIO_PIN_RESET); DWT_Delay_us(30); HAL_GPIO_WritePin(PPS_RESET_GPIO_Port,PPS_RESET_Pin,GPIO_PIN_SET); DWT_Delay_us(30); // pps960_Rest_SW(); init_PPS960_register(); PPS960_init(); } extern uint8_t control; uint8_t pps_test_flag=0; //uint8_t pps960_init_flag = 0; void pps960_sensor_task(void) { pps_intr_flag = 0; if(acc_check){ // 进入该函数获取原始数据 ALGSH_retrieveSamplesAndPushToQueue();//read pps raw data //move ALGSH_dataToAlg(); to message queue loop. and then send message at here. ALGSH_dataToAlg(); } } uint8_t cnt=0; uint16_t lifeHR = 0; uint16_t lifeskin = 0; void pps960_sensor_task2(void) { if(acc_check) { // sample=GetHRSampleCount(); ClrHRSampleCount(); cnt++;if(cnt>255)cnt=0; lifeQhrm = pps_getHR(); // snrValue=PP_GetHRConfidence();//for snr check skin = PPS_get_skin_detect(); //cut --> 计数 //lifeQhrm --> 心率值 //snrValue --> 信噪比 //sample --> 原始数据采样率,现为25HZ //skin --> 皮肤接触标志位,接触时为1,未接触为0 if(skin == 0) { lifeQhrm = 0; } // mode1_tran_data[750] = lifeQhrm; // mode1_tran_data[751] = skin; // HAL_UART_Transmit(&huart2,senddata,2,0xff); displayHrm = lifeQhrm;// } } void PPS960_writeReg(uint8_t regaddr, uint32_t wdata) { Write_Configuration(regaddr,wdata); } uint32_t PPS960_readReg(uint8_t regaddr) { return Read_Configuration(regaddr); } //#endif <file_sep>#include "IIC.h" /********************************** * Function Name : I2C_GPIO_Config * Description : I2C GPIO set * Input : None * Output : None * Return : None **********************************/ void I2C_GPIO_Config(void) { GPIO_InitTypeDef GPIO_InitStruct; /* GPIO Ports Clock Enable */ __HAL_RCC_GPIOA_CLK_ENABLE(); __HAL_RCC_GPIOC_CLK_ENABLE(); HAL_GPIO_WritePin(GPIOA, GPIO_PIN_8, GPIO_PIN_SET); HAL_GPIO_WritePin(GPIOC, GPIO_PIN_9, GPIO_PIN_SET); /*Configure GPIO pins*/ GPIO_InitStruct.Pin = GPIO_PIN_8; // PA8 --> SCL GPIO_InitStruct.Mode = GPIO_MODE_OUTPUT_PP; GPIO_InitStruct.Pull = GPIO_PULLUP; GPIO_InitStruct.Speed = GPIO_SPEED_FREQ_VERY_HIGH; HAL_GPIO_Init(GPIOA, &GPIO_InitStruct); GPIO_InitStruct.Pin = GPIO_PIN_9; // PC9 --> SDA GPIO_InitStruct.Mode = GPIO_MODE_OUTPUT_PP; GPIO_InitStruct.Pull = GPIO_PULLUP; GPIO_InitStruct.Speed = GPIO_SPEED_FREQ_VERY_HIGH; HAL_GPIO_Init(GPIOC, &GPIO_InitStruct); } void SDA_IN(void) { GPIO_InitTypeDef GPIO_InitStruct; /* GPIO Ports Clock Enable */ __HAL_RCC_GPIOC_CLK_ENABLE(); /*Configure GPIO pin : PtPin */ GPIO_InitStruct.Pin = GPIO_PIN_9; GPIO_InitStruct.Mode = GPIO_MODE_INPUT; GPIO_InitStruct.Speed = GPIO_SPEED_FREQ_VERY_HIGH; HAL_GPIO_Init(GPIOC, &GPIO_InitStruct); } void SDA_OUT(void) { GPIO_InitTypeDef GPIO_InitStruct; /* GPIO Ports Clock Enable */ __HAL_RCC_GPIOC_CLK_ENABLE(); HAL_GPIO_WritePin(GPIOC, GPIO_PIN_9, GPIO_PIN_SET); /*Configure GPIO pins*/ GPIO_InitStruct.Pin = GPIO_PIN_9; // PC9 --> SDA GPIO_InitStruct.Mode = GPIO_MODE_OUTPUT_PP; GPIO_InitStruct.Pull = GPIO_PULLUP; GPIO_InitStruct.Speed = GPIO_SPEED_FREQ_VERY_HIGH; HAL_GPIO_Init(GPIOC, &GPIO_InitStruct); } /******************************************************************************* * Function Name : I2C_delay * Description : Simulation IIC Timing series delay * Input : None * Output : None * Return : None ****************************************************************************** */ void delay_ms(uint8_t time) { HAL_Delay(time); } void delay_us(uint8_t time) { DWT_Delay_us(time); } /******************************************************************************* * Function Name : I2C_Start * Description : Master Start Simulation IIC Communication * Input : None * Output : None * Return : Wheather Start ****************************************************************************** */ void I2C_Start(void) { SDA_OUT(); SDA_H; SCL_H; delay_us(2); SDA_L; delay_us(2); SCL_L; } /******************************************************************************* * Function Name : I2C_Stop * Description : Master Stop Simulation IIC Communication * Input : None * Output : None * Return : None ****************************************************************************** */ void I2C_Stop(void) { SDA_OUT(); SDA_L; delay_us(4); SCL_H; delay_us(4); SDA_H; delay_us(4); } /******************************************************************************* * Function Name : I2C_Ack * Description : Master Send Acknowledge Single * Input : None * Output : None * Return : None ****************************************************************************** */ void I2C_Ack(void) { SCL_L; delay_us(4); SDA_OUT(); SDA_L; delay_us(4); SCL_H; delay_us(4); SCL_L; SDA_IN(); delay_us(30); } /******************************************************************************* * Function Name : I2C_NoAck * Description : Master Send No Acknowledge Single * Input : None * Output : None * Return : None ****************************************************************************** */ void I2C_NoAck(void) { SDA_OUT(); SDA_H; delay_us(4); SCL_H; delay_us(4); SCL_L; SDA_IN(); } /******************************************************************************* * Function Name : I2C_WaitAck * Description : Master wait slave ACK * Input : Will Send Date * Output : None * Return : None ****************************************************************************** */ uint8_t I2C_WaitAck(void) { uint16_t tempTime = 0; SDA_IN(); while(SDA_read) { tempTime++; if(tempTime>550) { I2C_Stop(); return 1; } } SCL_H; delay_us(4); SCL_L; return 0; } /******************************************************************************* * Function Name : I2C_Clock * Description : Master output CLOCK * Input : Will Send Date * Output : None * Return : None ****************************************************************************** */ void I2C_Clock(void) { SCL_L;//SCL=0 delay_us(4); SCL_H;//SCL=1 delay_us(4); SCL_L;//SCL=0 } /******************************************************************************* * Function Name : I2C_SendByte * Description : Master Send a Byte to Slave * Input : Will Send Date * Output : None * Return : None ****************************************************************************** */ void outbyt(unsigned char i2c_data) { unsigned char i; SDA_OUT(); SCL_L; for(i=0;i<8;i++) { if(i2c_data&0x80) { SDA_H;//SDA=1 } else { SDA_L;//SDA=0 } i2c_data <<=1; I2C_Clock(); } SDA_H; SDA_IN(); } /******************************************************************************* * Function Name : I2C_RadeByte * Description : Master Reserive a Byte From Slave * Input : None * Output : None * Return : Date From Slave ****************************************************************************** */ uint8_t inbyt(void) { uint8_t i,i2c_data; SDA_IN();//SDA turn to input mode i2c_data=0; i=8; while(i--) { i2c_data<<=1; SCL_L;//SCL=0 delay_us(4); SCL_H;//SCL=1 i2c_data|= SDA_read; delay_us(4); } SCL_L;//SCL=0 return(i2c_data); } void Write_Configuration(uint8_t Addr,uint32_t wdata) { uint8_t temp[3]; uint32_t wd=wdata; temp[0]=(wd>>16) & 0xff; temp[1]=(wd>>8) & 0xff; temp[2]=wd & 0xff; I2C_Start(); outbyt(I2C_WRITE); I2C_WaitAck(); outbyt(Addr); I2C_WaitAck(); outbyt(temp[0]); I2C_WaitAck(); outbyt(temp[1]); I2C_WaitAck(); outbyt(temp[2]); I2C_WaitAck(); I2C_Stop(); } uint32_t Read_Configuration(unsigned int Addr) { char i,Data_Read[3]; uint32_t data; I2C_Start(); outbyt(I2C_WRITE); //WRITE I2C_WaitAck(); outbyt(Addr); I2C_WaitAck(); I2C_Start(); outbyt(I2C_READ); //READ I2C_WaitAck(); Data_Read[0] = inbyt(); I2C_Ack(); Data_Read[1] = inbyt(); I2C_Ack(); Data_Read[2] = inbyt(); I2C_NoAck(); I2C_Stop(); data = Data_Read[0] << 16 | Data_Read[1] << 8 | Data_Read[2]; return data; } <file_sep>#include "I2C_jacy.h" #include "ALIENTEK_BSP.h" #include "Configuration.h" u8 key_up=1; //松开标志 /********************************** * Function Name : I2C_GPIO_Config * Description : I2C GPIO set * Input : None * Output : None * Return : None **********************************/ void I2C_GPIO_Config(void) { GPIO_InitTypeDef GPIO_InitStructure; RCC_APB2PeriphClockCmd(RCC_APB2Periph_GPIOB , ENABLE); GPIO_SetBits(GPIOB,GPIO_Pin_10); GPIO_SetBits(GPIOB,GPIO_Pin_11); GPIO_InitStructure.GPIO_Pin = GPIO_Pin_10; //SCL GPIO_InitStructure.GPIO_Speed = GPIO_Speed_50MHz; GPIO_InitStructure.GPIO_Mode = GPIO_Mode_Out_PP;//推挽 GPIO_Init(GPIOB, &GPIO_InitStructure); GPIO_InitStructure.GPIO_Pin = GPIO_Pin_11; //SDA GPIO_InitStructure.GPIO_Speed = GPIO_Speed_50MHz; GPIO_InitStructure.GPIO_Mode = GPIO_Mode_Out_OD; GPIO_Init(GPIOB, &GPIO_InitStructure); GPIO_ResetBits(GPIOC,GPIO_Pin_0); GPIO_ResetBits(GPIOC,GPIO_Pin_1); GPIO_InitStructure.GPIO_Pin = GPIO_Pin_0; //CFGb GPIO_InitStructure.GPIO_Speed = GPIO_Speed_50MHz; GPIO_InitStructure.GPIO_Mode = GPIO_Mode_Out_PP;//推挽 GPIO_Init(GPIOC, &GPIO_InitStructure); GPIO_ResetBits(GPIOC,GPIO_Pin_0); GPIO_InitStructure.GPIO_Pin = GPIO_Pin_1; //RSTb GPIO_InitStructure.GPIO_Speed = GPIO_Speed_50MHz; GPIO_InitStructure.GPIO_Mode = GPIO_Mode_Out_PP; GPIO_Init(GPIOC, &GPIO_InitStructure); GPIO_ResetBits(GPIOC,GPIO_Pin_1); } void SDA_IN(void)//单片机输入数据 { GPIO_InitTypeDef GPIO_InitStructure; GPIO_InitStructure.GPIO_Pin = GPIO_Pin_11; GPIO_InitStructure.GPIO_Speed = GPIO_Speed_50MHz; GPIO_InitStructure.GPIO_Mode = GPIO_Mode_IPU; GPIO_Init(GPIOB, &GPIO_InitStructure); } void SDA_OUT(void)//单片机输出数据 { GPIO_InitTypeDef GPIO_InitStructure; GPIO_InitStructure.GPIO_Pin = GPIO_Pin_11; GPIO_InitStructure.GPIO_Speed = GPIO_Speed_50MHz; GPIO_InitStructure.GPIO_Mode = GPIO_Mode_Out_OD; GPIO_Init(GPIOB, &GPIO_InitStructure); } void RST_IN(void) { GPIO_InitTypeDef GPIO_InitStructure; GPIO_InitStructure.GPIO_Pin = GPIO_Pin_1; GPIO_InitStructure.GPIO_Speed = GPIO_Speed_50MHz; GPIO_InitStructure.GPIO_Mode = GPIO_Mode_IPD; GPIO_Init(GPIOC, &GPIO_InitStructure); } /******************************************************************************* * Function Name : I2C_delay * Description : Simulation IIC Timing series delay * Input : None * Output : None * Return : None ****************************************************************************** */ void I2C_delay(void) { delay_us(4); } void delay5ms(void) { int i=5000; while(i) { i--; } } /******************************************************************************* * Function Name : I2C_Start * Description : Master Start Simulation IIC Communication * Input : None * Output : None * Return : Wheather Start ****************************************************************************** */ void I2C_Start(void) { SDA_OUT(); SDA_H; SCL_H; I2C_delay(); SDA_L; I2C_delay(); SCL_L; } void i2c_clock(void) { SCL_L;//SCL=0 I2C_delay(); SCL_H;//SCL=1 I2C_delay(); SCL_L;//SCL=0 } /******************************************************************************* * Function Name : I2C_Stop * Description : Master Stop Simulation IIC Communication * Input : None * Output : None * Return : None ****************************************************************************** */ void I2C_Stop(void) { SDA_OUT(); SDA_L; I2C_delay(); SCL_H; I2C_delay(); SDA_H; I2C_delay(); } /******************************************************************************* * Function Name : I2C_Ack * Description : Master Send Acknowledge Single * Input : None * Output : None * Return : None ****************************************************************************** */ void I2C_Ack(void) { SCL_L; delay_us(4); SDA_OUT(); SDA_L; I2C_delay(); SCL_H; I2C_delay(); SCL_L; SDA_IN(); delay_us(30); } /******************************************************************************* * Function Name : I2C_NoAck * Description : Master Send No Acknowledge Single * Input : None * Output : None * Return : None ****************************************************************************** */ void I2C_NoAck(void) { SDA_OUT(); SDA_H; I2C_delay(); SCL_H; I2C_delay(); SCL_L; SDA_IN(); } /******************************************************************************* * Function Name : I2C_WaitAck * Description : Master Reserive Slave Acknowledge Single * Input : None * Output : None * Return : None ****************************************************************************** */ //void I2C_WaitAck(void) //{ // unsigned char i; // SDA_IN(); // I2C_delay(); // I2C_delay(); // I2C_delay(); // I2C_delay(); // I2C_delay(); // I2C_delay(); // SCL_H; // I2C_delay(); // i=0x00F; // do // { // } // while(--i!=0); // SCL_L; // SDA_OUT(); //} void I2C_WaitAck(void) { SDA_IN(); while(Read_SDA == 1); SCL_H; delay_us(20); // while(Read_SDA == 0); SCL_L; } /******************************************************************************* * Function Name : I2C_SendByte * Description : Master Send a Byte to Slave * Input : Will Send Date * Output : None * Return : None ****************************************************************************** */ void outbyt(unsigned char i2c_data) //单片机输出数据 数据从高位到低位// { unsigned char i; SDA_OUT(); SCL_L; for(i=0;i<8;i++) { if(i2c_data&0x80) { SDA_H;//SDA=1 } else { SDA_L;//SDA=0 } i2c_data <<=1; i2c_clock(); } SDA_H; SDA_IN(); } /******************************************************************************* * Function Name : I2C_RadeByte * Description : Master Reserive a Byte From Slave * Input : None * Output : None * Return : Date From Slave ****************************************************************************** */ u16 inbyt(void)//单片机读数据 { u16 i,i2c_data,TmpIn; SDA_IN();//SDA turn to input mode i2c_data=0; i=8; while(i--) { i2c_data<<=1; SCL_L;//SCL=0 I2C_delay(); SCL_H;//SCL=1 TmpIn=SDA_DATA; TmpIn &=SDA; TmpIn >>=SDA_num; i2c_data|=TmpIn; I2C_delay(); } SCL_L;//SCL=0 return(i2c_data); } void RcvAck(void) { SDA_IN(); I2C_delay(); SCL_H; I2C_delay(); SCL_L; SDA_IN(); } u8 KEY_Scan(void) { u8 key = 0; if(key_up && (KEY2==1)) { key = KEY2; delay_ms(10); key_up=0; if(KEY2==1) { return 3; } } if(KEY2==0) key_up=1; return 0; } <file_sep>#include "stm32f4xx_hal.h" #include "USER_SPI_IO.h" #define SPI_IO_CLK_L() HAL_GPIO_WritePin(GPIOA, GPIO_PIN_5, GPIO_PIN_RESET) #define SPI_IO_CLK_H() HAL_GPIO_WritePin(GPIOA, GPIO_PIN_5, GPIO_PIN_SET) #define SPI_IO_SIMO_L() HAL_GPIO_WritePin(GPIOA, GPIO_PIN_6, GPIO_PIN_RESET) #define SPI_IO_SIMO_H() HAL_GPIO_WritePin(GPIOA, GPIO_PIN_6, GPIO_PIN_SET) #define SPI_IO_SOMI HAL_GPIO_ReadPin(GPIOA, GPIO_PIN_7) #define SPI_IO_CS_L() HAL_GPIO_WritePin(GPIOA, GPIO_PIN_4, GPIO_PIN_RESET) #define SPI_IO_CS_H() HAL_GPIO_WritePin(GPIOA, GPIO_PIN_4, GPIO_PIN_SET) #define SPI_IO_FCLK() user_spi_delay_us(1) void user_spi_delay_us(uint16_t delay) { while(delay--) { } } void spi_io_write(uint8_t TxData) { uint8_t i; SPI_IO_CLK_L(); for(i = 0;i<8;i++) { SPI_IO_FCLK(); if(TxData&(0x80>>i)) { SPI_IO_SIMO_H() ; } else { SPI_IO_SIMO_L() ; } SPI_IO_CLK_H(); SPI_IO_FCLK(); SPI_IO_CLK_L(); } SPI_IO_FCLK(); } uint8_t spi_io_read(void) { uint8_t i,RxData = 0; SPI_IO_CLK_L(); for(i = 0;i<8;i++) { SPI_IO_FCLK(); SPI_IO_CLK_H(); SPI_IO_FCLK(); SPI_IO_CLK_L(); if(SPI_IO_SOMI) { RxData |= (0x80>>i); } else { } } SPI_IO_FCLK(); return RxData; } void spi_io_writeread(uint8_t *pTxData,uint16_t TxSize,uint8_t *pRxData,uint16_t RxSize) { uint16_t i,j; for(i = 0;i<TxSize;i++) { spi_io_write(pTxData[i]); } for(j = 0;j<RxSize;j++) { pRxData[j] = spi_io_read(); } } void spi_io_multiwrite(uint8_t *pTxData,uint16_t TxSize) { uint16_t i; for(i = 0;i<TxSize;i++) { spi_io_write(pTxData[i]); } } void spi_io_multiread(uint8_t *pRxData,uint16_t RxSize) { uint16_t i; for(i = 0;i<RxSize;i++) { pRxData[i] = spi_io_read(); } } <file_sep>#ifndef __USER_SPI_IO_H #define __USER_SPI_IO_H #include "stm32f4xx_hal.h" void user_spi_delay_us(uint16_t delay); extern void spi_io_write(uint8_t TxData); extern uint8_t spi_io_read(void); extern void spi_io_writeread(uint8_t *pTxData,uint16_t TxSize,uint8_t *pRxData,uint16_t RxSize); extern void spi_io_multiwrite(uint8_t *pTxData,uint16_t TxSize); extern void spi_io_multiread(uint8_t *pRxData,uint16_t RxSize); #endif <file_sep>/** ****************************************************************************** * File Name : main.c * Description : Main program body ****************************************************************************** * * COPYRIGHT(c) 2018 STMicroelectronics * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * 3. Neither the name of STMicroelectronics nor the names of its contributors * may be used to endorse or promote products derived from this software * without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * ****************************************************************************** */ /* Includes ------------------------------------------------------------------*/ #include "main.h" #include "stm32f4xx_hal.h" #include "spi.h" #include "tim.h" #include "usart.h" #include "gpio.h" /* USER CODE BEGIN Includes */ #include "string.h" #include "dwt_stm32_delay.h" #include "ads1x9x_hal_driver.h" #include "IIC.h" #include "pps960.h" #include "afe4404_hw.h" #include "agc_V3_1_19.h" #include "hqerror.h" /* USER CODE END Includes */ /* Private variables ---------------------------------------------------------*/ /* USER CODE BEGIN PV */ /* Private variables ---------------------------------------------------------*/ /* USER CODE END PV */ /* Private function prototypes -----------------------------------------------*/ void SystemClock_Config(void); void Error_Handler(void); /* USER CODE BEGIN PFP */ /* Private function prototypes -----------------------------------------------*/ /* USER CODE END PFP */ /* USER CODE BEGIN 0 */ uint8_t aRxBuffer[8] = {0}; uint8_t ADCData[165] = {0}; uint8_t mode1_tran_data[167] = {0}; uint8_t mode2_tran_data[17] = {0}; uint8_t mode3_tran_data[21] = {0}; uint8_t dataIsOk = 0; uint8_t sensor_open = 0; uint8_t sensor_is_init = 0; extern uint8_t ads1291_is_init; extern uint16_t acc_check; uint16_t lifeQhrm = 0; int8_t skin = 0; uint8_t Fatigue = 0; uint16_t HeartPack1 = 0; uint16_t HeartPack2 = 0; uint16_t HeartPack3 = 0; uint8_t lifeQhrmMax = 85; uint8_t FatigueMax = 80; uint8_t AlarmType = 0; uint8_t Alarm_status = 0; uint8_t work_mode = 0; void pps960_sensor_task(void); void pps960_sensor_task2(void); void sensor_switch(void); /* USER CODE END 0 */ int main(void) { /* USER CODE BEGIN 1 */ /* USER CODE END 1 */ /* MCU Configuration----------------------------------------------------------*/ /* Reset of all peripherals, Initializes the Flash interface and the Systick. */ HAL_Init(); /* Configure the system clock */ SystemClock_Config(); /* Initialize all configured peripherals */ MX_GPIO_Init(); MX_USART2_UART_Init(); MX_TIM2_Init(); MX_TIM3_Init(); MX_SPI1_Init(); /* USER CODE BEGIN 2 */ if(DWT_Delay_Init()) { Error_Handler(); /* Call Error Handler */ } HAL_UART_Receive_IT(&huart2, aRxBuffer, 8); I2C_GPIO_Config(); /* USER CODE END 2 */ /* Infinite loop */ /* USER CODE BEGIN WHILE */ while (1) { /* USER CODE END WHILE */ /* USER CODE BEGIN 3 */ sensor_switch(); if(dataIsOk == 1 && work_mode == 1) { HAL_GPIO_WritePin(DATA_READY_GPIO_Port,DATA_READY_Pin,GPIO_PIN_SET); HAL_Delay(5); HAL_UART_Transmit(&huart2,mode1_tran_data,167,0xff); dataIsOk = 0; HAL_Delay(5); HAL_GPIO_WritePin(DATA_READY_GPIO_Port,DATA_READY_Pin,GPIO_PIN_RESET); } if(dataIsOk == 1 && work_mode == 2) { HAL_GPIO_WritePin(DATA_READY_GPIO_Port,DATA_READY_Pin,GPIO_PIN_SET); HAL_Delay(5); HAL_UART_Transmit(&huart2,mode2_tran_data,17,0xff); dataIsOk = 0; HAL_Delay(5); HAL_GPIO_WritePin(DATA_READY_GPIO_Port,DATA_READY_Pin,GPIO_PIN_RESET); } if(Alarm_status == 1 && work_mode == 2) { HAL_GPIO_WritePin(DATA_READY_GPIO_Port,DATA_READY_Pin,GPIO_PIN_SET); HAL_Delay(5); HAL_UART_Transmit(&huart2,mode3_tran_data,21,0xff); Alarm_status = 0; HAL_Delay(5); HAL_GPIO_WritePin(DATA_READY_GPIO_Port,DATA_READY_Pin,GPIO_PIN_RESET); } } /* USER CODE END 3 */ } /** System Clock Configuration */ void SystemClock_Config(void) { RCC_OscInitTypeDef RCC_OscInitStruct; RCC_ClkInitTypeDef RCC_ClkInitStruct; /**Configure the main internal regulator output voltage */ __HAL_RCC_PWR_CLK_ENABLE(); __HAL_PWR_VOLTAGESCALING_CONFIG(PWR_REGULATOR_VOLTAGE_SCALE2); /**Initializes the CPU, AHB and APB busses clocks */ RCC_OscInitStruct.OscillatorType = RCC_OSCILLATORTYPE_HSI; RCC_OscInitStruct.HSIState = RCC_HSI_ON; RCC_OscInitStruct.HSICalibrationValue = 16; RCC_OscInitStruct.PLL.PLLState = RCC_PLL_ON; RCC_OscInitStruct.PLL.PLLSource = RCC_PLLSOURCE_HSI; RCC_OscInitStruct.PLL.PLLM = 16; RCC_OscInitStruct.PLL.PLLN = 336; RCC_OscInitStruct.PLL.PLLP = RCC_PLLP_DIV4; RCC_OscInitStruct.PLL.PLLQ = 4; if (HAL_RCC_OscConfig(&RCC_OscInitStruct) != HAL_OK) { Error_Handler(); } /**Initializes the CPU, AHB and APB busses clocks */ RCC_ClkInitStruct.ClockType = RCC_CLOCKTYPE_HCLK|RCC_CLOCKTYPE_SYSCLK |RCC_CLOCKTYPE_PCLK1|RCC_CLOCKTYPE_PCLK2; RCC_ClkInitStruct.SYSCLKSource = RCC_SYSCLKSOURCE_PLLCLK; RCC_ClkInitStruct.AHBCLKDivider = RCC_SYSCLK_DIV1; RCC_ClkInitStruct.APB1CLKDivider = RCC_HCLK_DIV2; RCC_ClkInitStruct.APB2CLKDivider = RCC_HCLK_DIV1; if (HAL_RCC_ClockConfig(&RCC_ClkInitStruct, FLASH_LATENCY_2) != HAL_OK) { Error_Handler(); } /**Configure the Systick interrupt time */ HAL_SYSTICK_Config(HAL_RCC_GetHCLKFreq()/1000); /**Configure the Systick */ HAL_SYSTICK_CLKSourceConfig(SYSTICK_CLKSOURCE_HCLK); /* SysTick_IRQn interrupt configuration */ HAL_NVIC_SetPriority(SysTick_IRQn, 0, 0); } /* USER CODE BEGIN 4 */ void HAL_GPIO_EXTI_Callback(uint16_t GPIO_Pin) { static uint8_t cnt = 0; static uint16_t TimeNum = 0; uint8_t Rx[6] = {0}; uint32_t EEG_Data = 0; uint8_t LOFF_State = 0; uint8_t Data[3] = {0}; if(ads1291_is_init == 1) { ADS_ReadData(Rx,6); EEG_Data = ((Rx[3]*0xFFFFFF)+(Rx[4]*0xFFFF)+Rx[5]*0xFF+0x80000000)>>8; Data[0] = EEG_Data >> 16; Data[1] = EEG_Data >> 8 % 0xFF; Data[2] = EEG_Data & 0xFE; memcpy((ADCData + cnt * 3),Data,3); cnt ++; if(cnt == 50) { LOFF_State = ((Rx[0]<<4) & 0x10) | ((Rx[1] & 0x80)>>4); cnt = 0; TimeNum ++; HeartPack1 ++; mode1_tran_data[0] = 0xBB; mode1_tran_data[1] = 0xBB; mode1_tran_data[2] = 0xBB; mode1_tran_data[3] = 0x00; mode1_tran_data[4] = 0x9F; mode1_tran_data[5] = HeartPack1 >> 8; mode1_tran_data[6] = HeartPack1 & 0xFF; mode1_tran_data[7] = 0x01; mode1_tran_data[8] = skin; mode1_tran_data[9] = lifeQhrm; mode1_tran_data[10] = 0x02; mode1_tran_data[11] = Fatigue; mode1_tran_data[12] = 0x03; mode1_tran_data[13] = LOFF_State; memcpy(mode1_tran_data+14,ADCData,150); memset(ADCData,0,150); mode1_tran_data[164] = 0xFF; mode1_tran_data[165] = 0xFF; mode1_tran_data[166] = 0xFF; if(work_mode == 1) { dataIsOk = 1; } } if(TimeNum == 1500) { TimeNum = 0; HeartPack2 ++; mode2_tran_data[0] = 0xBB; mode2_tran_data[1] = 0xBB; mode2_tran_data[2] = 0xBB; mode2_tran_data[3] = 0x00; mode2_tran_data[4] = 0x09; mode2_tran_data[5] = HeartPack2 >> 8; mode2_tran_data[6] = HeartPack2 & 0xFF; mode2_tran_data[7] = 0x01; mode2_tran_data[8] = skin; mode2_tran_data[9] = lifeQhrm; mode2_tran_data[10] = 0x02; mode2_tran_data[11] = Fatigue; mode2_tran_data[12] = 0x03; mode2_tran_data[13] = LOFF_State; mode2_tran_data[14] = 0xFF; mode2_tran_data[15] = 0xFF; mode2_tran_data[16] = 0xFF; if(work_mode == 2) { dataIsOk = 1; } } if(lifeQhrm > lifeQhrmMax || Fatigue > FatigueMax) { HeartPack3 ++; if(lifeQhrm > lifeQhrmMax) { AlarmType = 0x01; } if(Fatigue > FatigueMax) { AlarmType = 0x02; } mode3_tran_data[0] = 0xBB; mode3_tran_data[1] = 0xBB; mode3_tran_data[2] = 0xBB; mode3_tran_data[3] = 0x00; mode3_tran_data[4] = 0x0D; mode3_tran_data[5] = HeartPack3 >> 8; mode3_tran_data[6] = HeartPack3 & 0xFF; mode3_tran_data[7] = 0x00; mode3_tran_data[8] = AlarmType; mode3_tran_data[9] = lifeQhrmMax; mode3_tran_data[10] = FatigueMax; mode3_tran_data[11] = 0x01; mode3_tran_data[12] = skin; mode3_tran_data[13] = lifeQhrm; mode3_tran_data[14] = 0x02; mode3_tran_data[15] = Fatigue; mode3_tran_data[16] = 0x03; mode3_tran_data[17] = LOFF_State; mode3_tran_data[18] = 0xFF; mode3_tran_data[19] = 0xFF; mode3_tran_data[20] = 0xFF; Alarm_status = 1; } } } void HAL_TIM_PeriodElapsedCallback(TIM_HandleTypeDef *htim) { if (htim->Instance == htim3.Instance) { // 获取心率数值 pps960_sensor_task2(); //1s触发一次 } if (htim->Instance == htim2.Instance) { // 获取心率原始数据 pps960_sensor_task(); //40ms触发一次 } } /** * @brief Rx Transfer completed callbacks. * @param huart pointer to a UART_HandleTypeDef structure that contains * the configuration information for the specified UART module. * @retval None */ void HAL_UART_RxCpltCallback(UART_HandleTypeDef *huart) { uint8_t AT_MODE0[8] = "AT+MODE0"; uint8_t AT_MODE1[8] = "AT+MODE1"; uint8_t AT_MODE2[8] = "AT+MODE2"; if(huart->Instance == USART2) { HAL_UART_Receive_IT(&huart2,aRxBuffer,8); if (memcmp(AT_MODE0, aRxBuffer, 8) == 0) { sensor_open = 0; work_mode = 0; printf("OK+MODE0"); } if (memcmp(AT_MODE1, aRxBuffer, 8) == 0) { sensor_open = 1; work_mode = 1; printf("OK+MODE1"); } if (memcmp(AT_MODE2, aRxBuffer, 8) == 0) { sensor_open = 1; work_mode = 2; printf("OK+MODE2"); } } } void sensor_switch(void) { if(sensor_open == 1 && sensor_is_init == 0) { sensor_is_init = 1; //pps960初始化 init_pps960_sensor(); acc_check = 1; HAL_TIM_Base_Start_IT(&htim2); HAL_TIM_Base_Start_IT(&htim3); //1291初始化 ads1291_init(); } if(sensor_open == 0 && sensor_is_init == 1) { sensor_is_init = 0; //1291去初始化 ads1291_disable(); //pps960去初始化 HAL_TIM_Base_Stop_IT(&htim2); HAL_TIM_Base_Stop_IT(&htim3); pps960_disable(); acc_check = 0; } } /* USER CODE END 4 */ /** * @brief This function is executed in case of error occurrence. * @param None * @retval None */ void Error_Handler(void) { /* USER CODE BEGIN Error_Handler */ /* User can add his own implementation to report the HAL error return state */ while(1) { } /* USER CODE END Error_Handler */ } #ifdef USE_FULL_ASSERT /** * @brief Reports the name of the source file and the source line number * where the assert_param error has occurred. * @param file: pointer to the source file name * @param line: assert_param error line source number * @retval None */ void assert_failed(uint8_t* file, uint32_t line) { /* USER CODE BEGIN 6 */ /* User can add his own implementation to report the file name and line number, ex: printf("Wrong parameters value: file %s on line %d\r\n", file, line) */ /* USER CODE END 6 */ } #endif /** * @} */ /** * @} */ /************************ (C) COPYRIGHT STMicroelectronics *****END OF FILE****/ <file_sep>#include "ads1x9x_hal_driver.h" #include "main.h" #include "stm32f4xx_hal.h" #include "dwt_stm32_delay.h" #define ADS_Delay_Num 10 uint8_t ads1291_is_init = 0; ADS_ConfigDef ADS_Config1; void ads1291_disable(void) { ads1291_is_init = 0; ADS_PIN_EN_L(); } void ads1291_init(void) { ads1291_is_init = 1; ADS_PIN_EN_H(); HAL_Delay(200); ADS_PIN_MAINCLKSEL_H(); HAL_Delay(100); ADS_PIN_RESET_H(); HAL_Delay(1000); ADS_PIN_RESET_L(); HAL_Delay(10); ADS_PIN_RESET_H(); ADS_PIN_START_L(); HAL_Delay(10); ADS_Command(ADS_SDATAC); ADS_init(); ADS_PIN_START_H(); ADS_Command(ADS_RDATAC); } void ADS_init(void) { ADS_Config_Init(&ADS_Config1); ADS_Config1.CONFIG1.Value = ADS_DR(1); ADS_Config1.CONFIG2.Value |= ADS_PDB_LOFF_COMP + ADS_PDB_REFBUF + ADS_INT_TEST + ADS_TEST_FREQ; ADS_Config1.LOFF.Value |= ADS_COMP_TH(0) + ADS_ILEAD_OFF(0);// ADS_Config1.CH1SET.Value |=ADS_GAIN1(6)+ADS_MUX1(0); ADS_Config1.CH2SET.Value |=ADS_PD2 + ADS_GAIN2(0) + ADS_MUX2(1);// ADS_Config1.RLD_SENS.Value |= ADS_RLD_LOFF_SENS + ADS_CHOP(0) + ADS_PDB_RLD + ADS_RLD1N + ADS_RLD1P;// ADS_Config1.LOFF_SENS.Value |= ADS_FLIP1 +ADS_LOFF1N + ADS_LOFF1P; ADS_Config1.LOFF_STAT.Value |= NULL; ADS_Config1.RESP1.Value |= NULL; ADS_Config1.RESP2.Value |= ADS_RLDREF_INT ;//+ ADS_CALIB_ON ADS_Config1.GPIO.Value |= NULL; ADS_Config(&ADS_Config1); } void ADS_Config_Init(ADS_ConfigDef *Config) { (*Config).CONFIG1.Address = ADS_REG_CONFIG1_ADDRESS; (*Config).CONFIG1.Value = ADS_REG_CONFIG1_DEFAULT; (*Config).CONFIG2.Address = ADS_REG_CONFIG2_ADDRESS; (*Config).CONFIG2.Value = ADS_REG_CONFIG2_DEFAULT; (*Config).LOFF.Address = ADS_REG_LOFF_ADDRESS; (*Config).LOFF.Value = ADS_REG_LOFF_DEFAULT; (*Config).CH1SET.Address = ADS_REG_CH1SET_ADDRESS; (*Config).CH1SET.Value = ADS_REG_CH1SET_DEFAULT; (*Config).CH2SET.Address = ADS_REG_CH2SET_ADDRESS; (*Config).CH2SET.Value = ADS_REG_CH2SET_DEFAULT; (*Config).RLD_SENS.Address = ADS_REG_RLDSENS_ADDRESS; (*Config).RLD_SENS.Value = ADS_REG_RLDSENS_DEFAULT; (*Config).LOFF_SENS.Address = ADS_REG_LOFFSENS_ADDRESS; (*Config).LOFF_SENS.Value = ADS_REG_LOFFSENS_DEFAULT; (*Config).LOFF_STAT.Address = ADS_REG_LOFFSTAT_ADDRESS; (*Config).LOFF_STAT.Value = ADS_REG_LOFFSTAT_DEFAULT; (*Config).RESP1.Address = ADS_REG_RESP1_ADDRESS; (*Config).RESP1.Value = ADS_REG_RESP1_DEFAULT; (*Config).RESP2.Address = ADS_REG_RESP2_ADDRESS; (*Config).RESP2.Value = ADS_REG_RESP2_DEFAULT; (*Config).GPIO.Address = ADS_REG_GPIO_ADDRESS; (*Config).GPIO.Value = ADS_REG_GPIO_DEFAULT; } void ADS_Config(ADS_ConfigDef *Config) { ADS_Setting((*Config).CONFIG1.Address,0,&(*Config).CONFIG1.Value,1); ADS_Setting((*Config).CONFIG2.Address,0,&(*Config).CONFIG2.Value,1); ADS_Setting((*Config).LOFF.Address,0,&(*Config).LOFF.Value,1); ADS_Setting((*Config).CH1SET.Address,0,&(*Config).CH1SET.Value,1); ADS_Setting((*Config).CH2SET.Address,0,&(*Config).CH2SET.Value,1); ADS_Setting((*Config).RLD_SENS.Address,0,&(*Config).RLD_SENS.Value,1); ADS_Setting((*Config).LOFF_SENS.Address,0,&(*Config).LOFF_SENS.Value,1); ADS_Setting((*Config).LOFF_STAT.Address,0,&(*Config).LOFF_STAT.Value,1); ADS_Setting((*Config).RESP1.Address,0,&(*Config).RESP1.Value,1); ADS_Setting((*Config).RESP2.Address,0,&(*Config).RESP2.Value,1); ADS_Setting((*Config).GPIO.Address,0,&(*Config).GPIO.Value,1); } void ADS_Setting(uint8_t REG,uint8_t Num,uint8_t *pData,uint16_t Size ) { REG |=ADS_WREG; ADS_PIN_CS_L(); ADS_SPI_Delay(ADS_Delay_Num); ADS_SPI_Write(&REG,1); ADS_SPI_Write(&Num,1); ADS_SPI_Write(pData,Size); ADS_SPI_Delay(ADS_Delay_Num); ADS_PIN_CS_H(); } void ADS_Command(uint8_t CMD) { ADS_PIN_CS_L(); ADS_SPI_Delay(ADS_Delay_Num); ADS_SPI_Write(&CMD,1); ADS_SPI_Delay(ADS_Delay_Num); ADS_PIN_CS_H(); } void ADS_ReadData(uint8_t *pRxData,uint16_t Size) { ADS_PIN_CS_L(); ADS_SPI_Delay(ADS_Delay_Num); ADS_SPI_Read(pRxData,Size); ADS_SPI_Delay(ADS_Delay_Num); ADS_PIN_CS_H(); } void ADS_ReadStatue(uint8_t REG,uint8_t Num,uint8_t *pData,uint16_t Size) { REG |= ADS_RREG; ADS_PIN_CS_L(); ADS_SPI_Delay(ADS_Delay_Num); ADS_SPI_Write(&REG,1); ADS_SPI_Write(&Num,1); ADS_SPI_Read(pData,Size); ADS_SPI_Delay(ADS_Delay_Num); ADS_PIN_CS_H(); }
3f5be26e8ae6439b45a7662b561a8c29b6c45eed
[ "C" ]
11
C
ribribrib2/STM32F401
134d63b5c3714d0e2f9ed929a1f148c8044f28ea
4b1655d519ca411f4650d7ac21b611c17bcea2bc
refs/heads/master
<repo_name>joelhelbling/spending-report<file_sep>/src/components/ReportsPagination.js import React, { Component } from 'react' import { ListGroupItem, ButtonGroup, Button } from 'react-bootstrap' class ReportsPagination extends Component { render() { return ( <ListGroupItem className="pagination clearfix"> Page &nbsp; <ButtonGroup> <Button bsStyle="default" disabled={true}>1</Button> </ButtonGroup> <Button bsStyle="primary" className="pull-right"> <span className="glyphicon glyphicon-plus-sign"></span> &nbsp; Add a new report </Button> </ListGroupItem> ) } } export default ReportsPagination <file_sep>/src/data/reports.js const reports = [ { id: 'foo', title: 'Four Winds Eclectic', created_at: Date.now(), updated_at: Date.now(), status: 'open', comment: 'A report is a beautiful thing', }, { id: 'bar', title: 'Wonderful putty --try it!', created_at: Date.now(), updated_at: Date.now(), status: 'closed', comment: 'A bicycle and a truck went on a date.', } ] export default reports <file_sep>/src/components/Reports.js import React from 'react' import ReportsCollection from './ReportsCollection' import ReportsPagination from './ReportsPagination' class Reports extends React.Component { render() { return ( <div> <ReportsCollection reports={this.props.reports} /> <ReportsPagination /> </div> ) } } export default Reports <file_sep>/src/actions/actionCreators.js export function addReport(report) { return { type: 'ADD_REPORT', report } } export function addLineItem(reportId, lineItem) { return { type: 'ADD_LINE_ITEM', reportId, lineItem } } export function removeLineItem(reportId, lineItemId) { return { type: 'REMOVE_LINE_ITEM', reportId, lineItemId } } export function editLineItem(reportId, lineItemId) { return { type: 'EDIT_LINE_ITEM', reportId, lineItemId } } export function updateLineItem(reportId, lineItemId, lineItem) { return { type: 'UPDATE_LINE_ITEM', reportId, lineItemId, lineItem } } <file_sep>/src/components/ReportSummary.js import React, { Component } from 'react' import { Link } from 'react-router' class ReportSummary extends Component { render() { const { report, reportId } = this.props const statusColor = 'warning' return ( <Link to={`/reports/${reportId}`} className="list-group-item list-group-item-warning clearfix"> <h4> <span className="glyphicon glyphicon-list-alt"></span> {report.title} <span className={`label label-${statusColor} pull-right`}>{report.status}</span> </h4> <div className="col-sm-5"> <em> <strong>0</strong> items, totalling <strong>0 Le</strong> </em> </div> <div className="col-sm-1"></div> <div className="col-sm-6">{report.comment}</div> </Link> ) } } export default ReportSummary <file_sep>/src/components/LineItemForm.test.js import React from 'react' import { mount } from 'enzyme' import { expect } from 'chai' import LineItemForm from './LineItemForm' import td from 'testdouble' describe('LineItemForm', () => { let [ component, reportId, lineItemId, description, unitCost, quantity, addLineItem, updateLineItem ] = [] describe('new line item', () => { beforeEach(() => { reportId = 'ABC123' description = 'dusty gloves' unitCost = 1000 quantity = 2 addLineItem = td.function('addLineItem') updateLineItem = td.function('updateLineItem') component = mount( <LineItemForm addLineItem={addLineItem} updateLineItem={updateLineItem} params={{reportId}} /> ) }) it('displays an "Add" button', () => { expect(component.find('button').text()).to.contain('Add') }) it('fires the addLineItem action', () => { component.ref('descriptionInput').get(0).value = 'dusty gloves' component.ref('unitCostInput').get(0).value = '1000' component.ref('quantityInput').get(0).value = '2' component.find('button.add-line-item').simulate('click') td.verify(addLineItem(reportId, { description, unitCost, quantity })) }) it('defaults quantity to 1', () => { component.ref('descriptionInput').get(0).value = 'dusty gloves' component.ref('unitCostInput').get(0).value = '1000' component.find('button.add-line-item').simulate('click') td.verify(addLineItem(reportId, { description, unitCost, quantity: 1 })) }) }) describe('edit line item', () => { beforeEach(() => { reportId = 'ABC123' lineItemId = '7' description = 'dusty gloves' unitCost = 1000 quantity = 2 addLineItem = td.function('addLineItem') updateLineItem = td.function('updateLineItem') component = mount( <LineItemForm addLineItem={addLineItem} updateLineItem={updateLineItem} params={{reportId}} id={lineItemId} lineItem={{ description, unitCost, quantity, }} /> ) }) it('displays an "Update" button', () => { expect(component.find('button').text()).to.contain('Update') }) it('fires the updateLineItem action', () => { component.ref('quantityInput').get(0).value = '3' component.find('button.add-line-item').simulate('click') td.verify(updateLineItem(reportId, lineItemId, { description, unitCost, quantity: 3 })) }) }) }) <file_sep>/src/components/LineItem.js import React, { Component } from 'react' import { ListGroupItem, ButtonGroup, Button } from 'react-bootstrap' import humanize from 'humanize-number' class LineItem extends Component { handleEdit = (e) => { e.preventDefault() const id = this.props.id const reportId = this.props.params.reportId this.props.editLineItem(reportId, id) } handleRemove = (e) => { e.preventDefault() const id = this.props.id const reportId = this.props.params.reportId this.props.removeLineItem(reportId, id) } render() { const { description, unitCost, quantity } = this.props.lineItem return ( <ListGroupItem className="clearfix"> <div className="col-sm-1 line-item-controls"> <ButtonGroup> <Button bsStyle="info" className="btn-xs edit" onClick={this.handleEdit}> <span className="glyphicon glyphicon-pencil"></span> </Button> <Button bsStyle="danger" className="btn-xs remove" onClick={this.handleRemove}> <span className="glyphicon glyphicon-trash"></span> </Button> </ButtonGroup> </div> <div className="col-sm-5 li-description">{description}</div> <div className="col-sm-2 li-unit-cost text-right">Le {humanize(unitCost || 0)}</div> <div className="col-sm-1 li-quantity text-left">x{humanize(quantity || 0)}</div> <div className="col-sm-2 li-total text-right">Le {humanize((unitCost * quantity) || 0)}</div> </ListGroupItem> ) } } export default LineItem <file_sep>/src/reducers/reports.js function reports(state = {}, action) { let newState; switch(action.type) { case 'EDIT_LINE_ITEM' : newState = [...state] return newState.map((report) => { if (report.id === action.reportId) { report._editing = action.lineItemId } return report }) case 'UPDATE_LINE_ITEM' : newState = [...state] return newState.map((report) => { if (report.id === action.reportId) { delete(report._editing) } return report }) default: return state } } export default reports <file_sep>/src/components/Report.js import React from 'react' import { Link } from 'react-router' import { ListGroup, ListGroupItem, Button } from 'react-bootstrap' import humanize from 'humanize-number' import LineItem from './LineItem' import LineItemForm from './LineItemForm' class Report extends React.Component { renderLineItem = (report, lineItem, index) => { if (report._editing === index) { return ( <LineItemForm lineItem={lineItem} key={index} id={index} {...this.props} /> ) } else { return ( <LineItem lineItem={lineItem} key={index} id={index} {...this.props} /> ) } } renderAddLineItemForm = (isEditing) => { if (isEditing) { return ( <LineItemForm {...this.props} /> ) } } reportTotal = (lineItems) => { return lineItems.reduce((accumulator, li) => { return accumulator + li.unitCost*li.quantity }, 0) } render() { const { reports, lineItems, params } = this.props const reportId = params.reportId const report = reports.filter((report) => report.id === reportId)[0] return ( <div> <h2 className="clearfix"> <span className="label label-warning pull-right">{report.status}</span> <span className="glyphicon glyphicon-list-alt"></span> &nbsp; {report.title} </h2> <p> {report.comment} <Button bsStyle="primary" className="btn-xs edit-report-button" disabled={true}> <span className="glyphicon glyphicon-pencil"></span> </Button> <Button bsStyle="danger" className="btn-xs confirmed-delete" disabled={true}> <span className="glyphicon glyphicon-trash"></span> </Button> </p> <ListGroup> <ListGroupItem bsStyle="info" className="clearfix"> <Button bsStyle="success" className="pull-right" disabled={true}> <span className="glyphicon glyphicon-cloud-upload"></span> &nbsp; Close and send this report </Button> <h4 className="clearfix">Line Items</h4> <div className="col-sm-1">&nbsp;</div> <div className="li-header col-sm-5 text-left">Description</div> <div className="li-header col-sm-2 text-right">Unit cost</div> <div className="li-header col-sm-1">Quantity</div> <div className="li-header col-sm-2 text-right">Total</div> </ListGroupItem> { (lineItems[reportId] || []).map((lineItem, i) => this.renderLineItem(report, lineItem, i)) } { this.renderAddLineItemForm(typeof(report._editing) === 'undefined') } <ListGroupItem bsStyle="info" className="clearfix"> <div className="col-sm-8">&nbsp;</div> <div className="col-sm-1 text-right"> <strong>Total:</strong> </div> <div className="col-sm-2 text-right report-total"> Le { humanize(this.reportTotal(lineItems[reportId])) } </div> </ListGroupItem> </ListGroup> <Link to="/reports" className="pull-left"> <span className="glyphicon glyphicon-chevron-left"></span> &nbsp; Return to All Reports </Link> </div> ) } } export default Report <file_sep>/src/components/LineItem.test.js import React from 'react' import { mount } from 'enzyme' import { expect } from 'chai' import LineItem from './LineItem' import td from 'testdouble' describe('LineItem', () => { let [component, reportId, key, lineItem, removeLineItem, addLineItem, editLineItem ] = []; beforeEach(() => { removeLineItem = td.function('removeLineItem') editLineItem = td.function('editLineItem') reportId = "ABC123" key = 1 lineItem = { description: 'chick peas', unitCost: 10000, quantity: 3, } component = mount( <LineItem lineItem={lineItem} key={key} id={key} params={{reportId}} removeLineItem={removeLineItem} editLineItem={editLineItem} /> ) }) describe('displays', () => { test('description', () => { expect(component.find('.li-description').text()) .to.contain('chick peas') }) test('unit cost', () => { expect(component.find('.li-unit-cost').text()) .to.contain('10,000') }) test('quantity', () => { expect(component.find('.li-quantity').text()) .to.contain('x3') }) test('total', () => { expect(component.find('.li-total').text()) .to.contain('30,000') }) }) describe('delete button', () => { it('fires the removeLineItem action', () => { component.find('button.remove').simulate('click') td.verify(removeLineItem(reportId, key)) }) }) describe('edit button', () => { it('fires the editLineItem action', () => { component.find('button.edit').simulate('click') td.verify(editLineItem(reportId, key)) }) }) }) <file_sep>/src/reducers/lineItems.js function lineItems(state = {}, action) { let newState; switch(action.type) { case 'ADD_LINE_ITEM' : newState = {...state} newState[action.reportId] = newState[action.reportId] || [] newState[action.reportId].push( action.lineItem ) return newState case 'REMOVE_LINE_ITEM' : newState = {...state} delete(newState[action.reportId][action.lineItemId]) return newState case 'UPDATE_LINE_ITEM' : newState = {...state} newState[action.reportId][action.lineItemId] = action.lineItem return newState default: return state } } export default lineItems <file_sep>/src/components/Report.test.js import React from 'react' import { mount } from 'enzyme' import { expect } from 'chai' import Report from './Report' import LineItem from './LineItem' import reports from '../data/reports' import lineItems from '../data/lineItems' describe('Report', () => { let [expectedReport, expectedLineItems, params, report, expected] = []; beforeEach(() => { params = { reportId: 'foo' } expectedReport = reports.filter((report) => report.id === 'foo')[0] expectedLineItems = lineItems.foo report = mount( <Report reports={reports} lineItems={lineItems} params={params} /> ) }) describe('displays', () => { test('title', () => { expect(report.find('h2').text()) .to.include(expectedReport.title) }) test('status', () => { expect(report.find('h2 span.label').text()) .to.equal(expectedReport.status) }) test('comment', () => { expect(report.find('p').text()) .to.include(expectedReport.comment) }) test('line items', () => { expect(report.find(LineItem)) .to.have.length(expectedLineItems.length) }) test('total', () => { expect(report.find('.report-total').text()) .to.equal("Le 450,000") }) }) }) <file_sep>/src/components/ReportsCollection.js import React, { Component } from 'react' import { ListGroup } from 'react-bootstrap' import ReportSummary from './ReportSummary' class ReportsCollection extends Component { render() { const { reports } = this.props const keys = Object.keys(reports) return ( <ListGroup> { keys.map((key) => <ReportSummary report={reports[key]} key={key} reportId={key} />) } </ListGroup> ) } } export default ReportsCollection <file_sep>/src/data/lineItems.js const lineItems = { 'foo': [ { description: 'A flanged thinger', unitCost: 100000, quantity: 4, }, { description: 'Knarled kinks', unitCost: 50000, quantity: 1, } ] } export default lineItems
17816ba625408e1be197280af12fae7a33057592
[ "JavaScript" ]
14
JavaScript
joelhelbling/spending-report
84e16f9c3ad0ab6c92402b2e5cf0dd9b224189fd
1f47c6fba00deef195ec29289e33d440746ed8d2
refs/heads/master
<repo_name>elinoretenorio/php-snippets<file_sep>/flip_coin.php <?php // Flip a coin until you get three heads in a row $flip = 0; $heads = 0; while ($heads < 3) { $number = rand(0,1); $flip++; if ($number) { $heads++; echo 'H '; } else { $heads = 0; echo 'T '; } } echo $flip; ?> <file_sep>/print_multiples.php <?php /** * Prints all multiples of a number between a range of numbers * * @param int $start * @param int $end * @param int $multiple * @return string All the multiples of a given number * @author <NAME> * @email <EMAIL> */ function printMultiples($start, $end, $multiple=1) { $list = ''; for ($i=$start; $i<=$end; $i++) { if ($i%$multiple == 0) { $list .= $i . ' '; } } return $list; } echo printMultiples(1,100,6); ?> <file_sep>/README.md php-snippets ============ Collection of random PHP scripts <file_sep>/string_to_ascii.php <?php /** * Convert string of characters to ASCII * * @param string $string The string of characters you want to convert into ASCII * @return string $integer_string The string of ASCII characters with a divider * @author <NAME> * @email <EMAIL> */ function charToAscii($string, $divider='-') { $string_length = strlen($string); $integer_string = ''; for($i = 0; $i <= $string_length; $i++) { $integer_string .= ord($string[$i]); $separator = ($i != $string_length) ? $divider : ''; $integer_string .= $separator; } return $integer_string; } echo charToAscii("Hello, World!"); ?>
0a58ca0dda669e2aaa8718f2c6a00d226eac3865
[ "Markdown", "PHP" ]
4
PHP
elinoretenorio/php-snippets
8a11ff766300ddd55c7e3581399675afab6a64a3
8f8a092ca44d364c9caeccb2d341d28235d6c200
refs/heads/master
<file_sep>#!/usr/bin/python class UnionFind(object): def __init__(self, N): self._id = list(range(N)) def union(self, p, q): raise NotImplementedError() def find(self, p, q): raise NotImplementedError() def get_ids(self): return " ".join(map(str, self._id)) def do_unions(self, ustr): unions = tuple(tuple(map(int, v.split("-"))) for v in ustr.split(" ")) for vals in unions: self.union(*vals) for vals in unions: assert(self.find(*vals)) class QuickFind(UnionFind): def union(self, p, q): pId = self._id[p] qId = self._id[q] for i, val in enumerate(self._id): if val == pId: self._id[i] = qId def find(self, p, q): return self._id[p] == self._id[q] class QuickUnion(UnionFind): def __init__(self, N): super().__init__(N) self._sizes = [1]*N def union(self, p, q): roots = self._root(p), self._root(q) sizes = self._sizes[roots[0]], self._sizes[roots[1]] if sizes[1] > sizes[0]: # Reverse roots = roots[::-1] sizes = sizes[::-1] self._id[roots[1]] = roots[0] self._sizes[roots[0]] += self._sizes[roots[1]] def find(self, p, q): return self._root(p) == self._root(q) def get_sizes(self): return " ".join(map(str, self._sizes)) def _root(self, i): while i != self._id[i]: i = self._id[i] return i def get_height(arr, i): h = 1 while i != arr[i] and h <= len(arr): h += 1 i = arr[i] if h > len(arr): return None else: return h def get_heights(arrStr): arr = tuple(map(int, arrStr.split(" "))) return " ".join(map(str, (get_height(arr, i) for i in range(len(arr))))) def pr1(): print("Problem 1:") uf = QuickFind(10) uf.do_unions("6-3 9-7 0-6 8-4 4-0 1-5") print(uf.get_ids()) print() def pr2(): print("Problem 2:") uf = QuickUnion(10) uf.do_unions("0-6 7-2 5-2 8-9 6-8 5-4 2-1 2-3 3-0") print(uf.get_ids()) print() def pr3(): tests = [ "6 6 8 6 8 4 6 6 6 6", "6 1 2 3 7 9 6 7 8 9", "3 3 6 5 5 9 5 5 5 0", "3 8 9 3 8 5 8 9 3 6", "5 5 2 5 5 2 5 0 5 2"] print("Problem 3:") for test in tests: print(get_heights(test)) print() if __name__ == "__main__": pr1() pr2() pr3() <file_sep>#!/usr/bin/python3 import random import math def is_sorted(data, lo=None, hi=None): if lo == None: lo = 0 if hi == None: hi = len(data) return all(data[i] >= data[i-1] for i in range(lo + 1, hi)) class SortAlgorithm(object): def observable_sort(self, data, statusCallback): raise NotImplementedError() def sort(self, data): return self.observable_sort(data, None) def get_sort_state(self, data, stepNumber): steps = 0 state = None def capture(data): nonlocal steps nonlocal state steps += 1 if steps == stepNumber: state = tuple(data) self.observable_sort(data, capture) assert(is_sorted(data)) return state class MergeSort(SortAlgorithm): @staticmethod def _merge(arr, aux, lo, mid, hi): assert(is_sorted(arr, lo, mid)) assert(is_sorted(arr, mid, hi)) i = lo j = mid k = lo while i < mid and j < hi: if arr[i] <= arr[j]: aux[k] = arr[i] i += 1 else: aux[k] = arr[j] j += 1 k += 1 while i < mid: aux[k] = arr[i] k += 1 i += 1 while j < hi: aux[k] = arr[j] k += 1 j += 1 assert(is_sorted(aux, lo, hi)) class TopDownMergeSort(MergeSort): def _sort(self, arr, aux, lo, hi, statusCallback): if hi - lo <= 1: return mid = int(lo + (hi - lo)/2) self._sort(aux, arr, lo, mid, statusCallback) self._sort(aux, arr, mid, hi, statusCallback) self._merge(arr, aux, lo, mid, hi) if statusCallback: statusCallback(arr) def observable_sort(self, data, statusCallback): aux = data[:] first, second = aux, data self._sort(first, second, 0, len(data), statusCallback) class BottomUpMergeSort(MergeSort): def observable_sort(self, data, statusCallback): if not data: return N = len(data) aux = [None]*N first, second = data, aux sz = 1 while sz < N: lo = 0 while lo < N - sz: self._merge(first, second, lo, lo + sz, min(lo+2*sz, N)) lo += 2*sz if statusCallback: statusCallback(data) second[lo:N] = first[lo:N] first, second = second, first sz *= 2 log2RoundDown = int(math.log(N,2)) evenTwo = log2RoundDown % 2 == 0 powerOfTwo = not (N & (N - 1)) if evenTwo or not is_sorted(data): data[:] = aux[:] def parse_input_string(inStr): return list(map(int, inStr.split(" "))) def format_output(data): return " ".join(map(str, data)) def test_sort(sortAlgorithm, N=10, maxLen=1000, seed=None): random.seed(seed) passed = True for _ in range(N): data = [random.randint(0, 100) for _ in range(random.randint(0, maxLen))] sortAlgorithm.sort(data) if not is_sorted(data): print("ERROR: Sorting algorithm {} produced bad data ->".format(sortAlgorithm)) print(len(data)) print(data) print() passed = False return passed def p1(): topDown = TopDownMergeSort() test_sort(topDown, 10, 5000) p1Str = "75 26 42 35 67 57 62 49 56 61 44 65" state = topDown.get_sort_state(parse_input_string(p1Str), 7) print("1: Step 7") print(format_output(state)) def p2(): bottomUp = BottomUpMergeSort() test_sort(bottomUp, 100, 1000) p2Str = "94 50 47 74 85 57 31 28 48 40" state = bottomUp.get_sort_state(parse_input_string(p2Str), 7) print("2: Step 7") print(format_output(state)) def main(): p1() p2() if __name__ == "__main__": main()
3db3f29affe6aab7fc753c12c26e61cbd2a8ef46
[ "Python" ]
2
Python
mgraczyk/coursera-algo1
643e7cf800be03287cefc76bed982dff0bf80dad
990e55e14629d6f0877ab50e0403affb4abbd3ec
refs/heads/master
<repo_name>sabarish007/scaleSIMExplorer<file_sep>/README.txt usage: explorer.py [-h] [-rt RUN_TYPE] [-ht ARRAY_HEIGHT] [-wt ARRAY_WIDTH] [-if IFMAP_SZ] [-fs FILTER_SZ] [-of OFMAP_SZ] [-cfg CFG_FILE] [-out OUT_FILE] Explore ScaleSIM design choices! optional arguments: -h, --help show this help message and exit -rt RUN_TYPE, --run_type RUN_TYPE -ht ARRAY_HEIGHT, --array_height ARRAY_HEIGHT -wt ARRAY_WIDTH, --array_width ARRAY_WIDTH -if IFMAP_SZ, --ifmap_sz IFMAP_SZ -fs FILTER_SZ, --filter_sz FILTER_SZ -of OFMAP_SZ, --ofmap_sz OFMAP_SZ -cfg CFG_FILE, --cfg_file CFG_FILE -out OUT_FILE, --out_file OUT_FILE <file_sep>/explorer.py import os import sys import re import pandas as pd import argparse out_folder = './stdout/' cfg_folder = './configs/' layer = re.compile('Commencing run for (.*)') cyc = re.compile('Cycles for compute :\s+(\d+)\s+cycles') ifbw = re.compile('DRAM IFMAP Read BW :\s+(\d+\.\d+)\s+Bytes/cycle') filbw = re.compile('DRAM Filter Read BW :\s+(\d+\.\d+)\s+Bytes/cycle') ofbw = re.compile('DRAM OFMAP Write BW :\s+(\d+\.\d+)\s+Bytes/cycle') def write_config(run_type = "mobile", array_height = 32, array_width = 32, ifmap_sz = 1024, filter_sz = 1024, ofmap_sz = 2048, cfg_file = 'test.cfg'): cfg_string = '[general]\nrun_name = "lab2_%s"\n\n[architecture_presets]\nArrayHeight: %d\nArrayWidth: %d\nIfmapSramSz: %d\nFilterSramSz: %d\nOfmapSramSz: %d\nIfmapOffset: 0\nFilterOffset: 10000000\nOfmapOffset: 20000000\nDataflow: ws' % (run_type, array_height, array_width, ifmap_sz, filter_sz, ofmap_sz) cfg_path = cfg_folder + cfg_file cfg_out = open(cfg_path, 'w') cfg_out.write(cfg_string) cfg_out.close print(cfg_string) return cfg_path def run_test(out_file='./stdout/out_latest.txt', cfg_path='./configs/lab2.cfg'): os.system('python scale.py -arch_config=%s -network=../Lab2/ece8893_lab2.csv > %s' % (cfg_path, out_file)) return out_file def check_constraints(max_bw_csv): max_bw = pd.read_csv(max_bw_csv) max_val = 0.0 bws = ['\tMax DRAM IFMAP Read BW', '\tMax DRAM Filter Read BW', '\tMax DRAM OFMAP Write BW'] for bw in bws: for val in max_bw[bw]: if val > max_val: max_val = val if float(val) > 20.0: print("Max BW exceeded: %s = %f" %(bw, val)) return False print("Max BW: %f" % max_val) return True def get_cost(out_path=None): cost = 0.0 output_fh = open(out_path,'r') for line in output_fh: m_layer = layer.match(line) if m_layer: d_layer = m_layer.group(1) m_cyc = cyc.match(line) if m_cyc: d_cyc = float(m_cyc.group(1)) m_ifbw = ifbw.match(line) if m_ifbw: d_ifbw = float(m_ifbw.group(1)) m_filbw = filbw.match(line) if m_filbw: d_filbw = float(m_filbw.group(1)) m_ofbw = ofbw.match(line) if m_ofbw: d_ofbw = float(m_ofbw.group(1)) cost = cost + (d_cyc * (d_ifbw + d_filbw + d_ofbw)) return cost def check_cost(cost): try: prevcost = open('/tmp/prevcost','r') p_cost = float(prevcost.readline()) prevcost.close except: p_cost = -1.0 prevcost = open('/tmp/prevcost','w') prevcost.write(str(cost)) prevcost.close if p_cost > 1: cost_diff = cost - p_cost print("CostDiff: %f" % cost_diff) if cost_diff > 0: print("Cost increased!") return False elif cost_diff == 0: print("No Change!") return True else: print("Cost decreased!") return True def main(in_run_type, in_array_height, in_array_width, in_ifmap_sz, in_filter_sz, in_ofmap_sz, in_cfg_file, out_file): cfg_path = write_config(run_type = in_run_type, array_height = in_array_height, array_width = in_array_width, ifmap_sz = in_ifmap_sz, filter_sz = in_filter_sz, ofmap_sz = in_ofmap_sz, cfg_file = in_cfg_file) out_path = run_test(out_file=out_folder+out_file, cfg_path=cfg_path) cost = get_cost(out_path) check_constraints('./outputs/lab2_%s/ece8893_lab2_max_bw.csv' % in_run_type) print("Cost: %s" % str(cost)) check_cost(cost) if __name__== "__main__": parser = argparse.ArgumentParser(description='Explore ScaleSIM design choices!') parser.add_argument('-rt','--run_type', default = "mobile", type=str, help='') parser.add_argument('-ht','--array_height', default = 32, type=int, help='') parser.add_argument('-wt','--array_width', default = 32, type=int, help='') parser.add_argument('-if','--ifmap_sz', default = 2048, type=int, help='') parser.add_argument('-fs','--filter_sz', default = 1024, type=int, help='') parser.add_argument('-of','--ofmap_sz', default = 2048, type=int, help='') parser.add_argument('-cfg','--cfg_file', default = 'test.cfg', type=str, help='') parser.add_argument('-out','--out_file', default = 'test.txt', type=str, help='') args = parser.parse_args() main(in_run_type=args.run_type, in_array_height=args.array_height, in_array_width=args.array_width, in_ifmap_sz=args.ifmap_sz, in_filter_sz=args.filter_sz, in_ofmap_sz=args.ofmap_sz, in_cfg_file=args.cfg_file, out_file=args.out_file)
166844b70fa55ea79a365dd9c1f8b87aebf2e8f7
[ "Python", "Text" ]
2
Text
sabarish007/scaleSIMExplorer
63d0d41fdeecf5408e02d488a8b6ef3b90e0ef76
8f0d6a00e70ddc6157643ca817d366fefc6ad808
refs/heads/master
<repo_name>Petchdy/day9_note<file_sep>/src/com/company/Main.java package com.company; import org.omg.CORBA.WStringSeqHelper; import java.util.Scanner; public class Main { public static void main(String[] args) { // write your code here Scanner scan = new Scanner(System.in); //String A = scan.nextLine(); //int n = scan.nextInt(); //char x = A.charAt(n-1); //System.out.println(x); //for (int i=0;i<n;i+=2){ // char c = A.charAt(i); // System.out.println(c); //} ///String s1 = scan.nextLine(); ///int c = s1.charAt(0); ///System.out.println(c); ////String s1 = scan.nextLine(); ////String s2 = scan.nextLine(); ////String s3 = s1.concat(s2); ////System.out.println(s3); ////System.out.println(s1+s2); ////boolean check = s1.equals(s2); ////if(s1.equalsIgnoreCase(s2)){//ไม่สนตัวพิมพ์เล็กพิมพ์ใหญ่ //// System.out.printf("same"); ////} ////else { //// System.out.printf("different"); ////} String s1 = scan.nextLine(); String s2 = scan.nextLine(); int index = scan.nextInt(); int x = s1.indexOf(s2,index); } }
195f9f4132cd293b7aba780afcb9b84529e4b171
[ "Java" ]
1
Java
Petchdy/day9_note
7c7dc30015e7352a723f1adfe25cddbcefd88878
b3e19a5c667860243a14319af9a206723b36792d
refs/heads/master
<repo_name>arjenvanderende/chip8<file_sep>/main.go package main import ( "flag" "fmt" "io" "log" "os" "github.com/arjenvanderende/chip8/chip8" "github.com/arjenvanderende/chip8/io/termbox" ) func main() { decompile := flag.Bool("decompile", false, "Print opcodes of the loaded ROM") filename := flag.String("romfile", "roms/fishie.ch8", "The ROM file to load") logfile := flag.String("logfile", "", "The file to log to") flag.Parse() // setup logging if *logfile != "" { f, err := os.Create(*logfile) if err != nil { log.Fatal(fmt.Errorf("Unable to create logfile: %v", err)) } log.SetOutput(f) } // load the ROM file cpu, err := chip8.Load(*filename) if err != nil { log.Fatal(err) } // disassemble opcodes if *decompile { printOpcodes(os.Stdout, cpu) } else { err = run(cpu) if err != nil { log.Fatal(err) } } } func printOpcodes(w io.Writer, cpu *chip8.CPU) { for { op := cpu.DisassembleOp() fmt.Fprintf(w, "%s\n", op) if !cpu.NextOp() { break } } } func run(cpu *chip8.CPU) error { // initialise I/O devices display, keyboard, closer, err := termbox.New() if err != nil { return fmt.Errorf("Unable to initialise graphics: %v", err) } defer closer() // run the program err = cpu.Run(display, keyboard) if err != nil { return fmt.Errorf("Program failed to run: %v", err) } return nil } <file_sep>/io/termbox/keyboard.go package termbox import ( "sync" "unicode" "github.com/arjenvanderende/chip8/io" "github.com/nsf/termbox-go" ) const keyPressDuration uint8 = 1 type keyboard struct { pressedKeys map[io.Key]uint8 waitForPress chan io.Key mutex sync.RWMutex } func newKeyboard() *keyboard { return &keyboard{ pressedKeys: make(map[io.Key]uint8), waitForPress: nil, } } // Maps keyboard to the following layout: // 1 2 3 C // 4 5 6 D // 7 8 9 E // A 0 B F var keyMapping = map[rune]io.Key{ rune('1'): io.Key1, rune('2'): io.Key2, rune('3'): io.Key3, rune('4'): io.KeyC, rune('q'): io.Key4, rune('w'): io.Key5, rune('e'): io.Key6, rune('r'): io.KeyD, rune('a'): io.Key7, rune('s'): io.Key8, rune('d'): io.Key9, rune('f'): io.KeyE, rune('z'): io.KeyA, rune('x'): io.Key0, rune('c'): io.KeyB, rune('v'): io.KeyF, } func (k *keyboard) poll() { for { event := termbox.PollEvent() switch event.Type { case termbox.EventKey: if key, ok := keyMapping[unicode.ToLower(event.Ch)]; ok { k.registerKeyPress(key) } else if event.Key == termbox.KeyEsc { k.registerKeyPress(io.KeyEsc) } case termbox.EventInterrupt: return } } } func (k *keyboard) registerKeyPress(key io.Key) { k.mutex.Lock() defer k.mutex.Unlock() k.pressedKeys[key] = keyPressDuration // emit event if WaitForKeyPress was invoked if k.waitForPress != nil { k.waitForPress <- key close(k.waitForPress) k.waitForPress = nil } } func (k *keyboard) close() { // send interrupt to unblock poll() termbox.Interrupt() } func (k *keyboard) Tick() { k.mutex.Lock() defer k.mutex.Unlock() for key := range k.pressedKeys { if k.pressedKeys[key] <= 0 { delete(k.pressedKeys, key) } else { k.pressedKeys[key] = k.pressedKeys[key] - 1 } } } func (k *keyboard) PressedButton() *io.Key { k.mutex.RLock() defer k.mutex.RUnlock() for key := range k.pressedKeys { return &key } return nil } func (k *keyboard) IsPressed(key io.Key) bool { k.mutex.RLock() defer k.mutex.RUnlock() _, ok := k.pressedKeys[key] return ok } <file_sep>/io/display.go package io const ( // DisplayWidth represents the width of the displat in pixels DisplayWidth = 64 // DisplayHeight represents the height of the display in pixels DisplayHeight = 32 ) // Display can draw pixels onto the display type Display interface { Clear() Draw(x, y int, sprite []byte) bool Flush() } <file_sep>/io/termbox/termbox.go package termbox import ( "github.com/arjenvanderende/chip8/io" "github.com/nsf/termbox-go" ) // Closer disposes the display and keyboard that the termbox library initialises type Closer func() // New initialises a display and keyboard device via the termbox library func New() (io.Display, io.Keyboard, Closer, error) { err := termbox.Init() if err != nil { return nil, nil, nil, err } termbox.SetInputMode(termbox.InputEsc) keyboard := newKeyboard() go keyboard.poll() return &display{}, keyboard, func() { // release all resources keyboard.close() termbox.Close() }, nil } <file_sep>/chip8/chip8.go package chip8 import ( "fmt" "io/ioutil" "log" "math/rand" "time" "github.com/arjenvanderende/chip8/io" ) const ( // clockRate represents the number of operations that the CPU can process per second clockRate int = 540 // programOffset represents the offset in memory where the program is loaded programOffset int = 0x200 ) var ( rnd = rand.New(rand.NewSource(time.Now().UnixNano())) digits = []byte{ 0xf0, 0x90, 0x90, 0x90, 0xf0, // 0 0x20, 0x60, 0x20, 0x20, 0x70, // 1 0xf0, 0x10, 0xf0, 0x80, 0xf0, // 2, 0xf0, 0x10, 0xf0, 0x10, 0xf0, // 3 0x90, 0x90, 0xf0, 0x10, 0x10, // 4 0xf0, 0x80, 0xf0, 0x10, 0xf0, // 5 0xf0, 0x80, 0xf0, 0x90, 0xf0, // 6 0xf0, 0x10, 0x20, 0x40, 0x40, // 7 0xf0, 0x90, 0xf0, 0x90, 0xf0, // 8 0xf0, 0x90, 0xf0, 0x10, 0xf0, // 9 0xf0, 0x90, 0xf0, 0x90, 0x90, // A 0xe0, 0x90, 0xe0, 0x90, 0xe0, // B 0xf0, 0x80, 0x80, 0x80, 0xf0, // C 0xe0, 0x90, 0x90, 0x90, 0xe0, // D 0xf0, 0x80, 0xf0, 0x80, 0xf0, // E 0xf0, 0x80, 0xf0, 0x80, 0x80, // F } ) // Memory represents the memory address space of the Chip-8 type Memory [0x1000]byte // CPU represents the Chip8 CPU type CPU struct { pc int // program counter memory Memory i uint16 // 16-bit register v [16]byte // 8-bit general purpose registers sp uint8 // stack pointer stack [16]int dt byte // delay timer st byte // sound timer programSize int prevPC int // program counter of previous interpret() call, used to detect multiple invocations when waiting for key press } // Load reads the program stored in the file into memory func Load(filename string) (*CPU, error) { // read ROM file bytes, err := ioutil.ReadFile(filename) if err != nil { return nil, fmt.Errorf("Unable to load Chip8 file %s: %v", filename, err) } // copy ROM into memory at program address cpu := CPU{ pc: programOffset, programSize: len(bytes), i: 0, v: [16]byte{}, sp: 0, dt: 0, st: 0, } // copy digits for op: Fx29 for i, b := range digits { cpu.memory[i] = b } // copy program for i, b := range bytes { cpu.memory[programOffset+i] = b } return &cpu, nil } // Run starts running the program func (cpu *CPU) Run(display io.Display, keyboard io.Keyboard) error { clock := time.NewTicker(time.Second / time.Duration(clockRate)) defer clock.Stop() frame := time.NewTicker(time.Second / time.Duration(60)) defer frame.Stop() for { select { case <-clock.C: // run the next tick of the program err := cpu.interpret(display, keyboard) if err != nil { return fmt.Errorf("Could not interpret op: %v", err) } // check if the user tried to quit the program if keyboard.IsPressed(io.KeyEsc) { return nil } keyboard.Tick() case <-frame.C: // TODO: play sound with sound timer is active cpu.decrementTimers() display.Flush() } } } func (cpu *CPU) printState(pc int, op string) { if cpu.prevPC != pc { log.Printf("op=%-40s pc=%03x next pc=%03x i=%03x v=%v\n", op, pc, cpu.pc, cpu.i, cpu.v) } cpu.prevPC = pc } func (cpu *CPU) interpret(display io.Display, keyboard io.Keyboard) error { op := cpu.DisassembleOp() defer cpu.printState(cpu.pc, op) nib1 := cpu.memory[cpu.pc] >> 4 vx := cpu.memory[cpu.pc] & 0x0f vy := cpu.memory[cpu.pc+1] >> 4 n := cpu.memory[cpu.pc+1] & 0x0f nn := cpu.memory[cpu.pc+1] nnn := uint16(cpu.memory[cpu.pc]&0x0f)<<8 + uint16(cpu.memory[cpu.pc+1]) switch nib1 { case 0x0: switch cpu.memory[cpu.pc+1] { case 0xe0: display.Clear() case 0xee: cpu.sp-- cpu.pc = cpu.stack[cpu.sp] default: return fmt.Errorf("Unknown 0") } case 0x1: cpu.pc = int(nnn) return nil case 0x2: cpu.stack[cpu.sp] = cpu.pc cpu.sp++ cpu.pc = int(nnn) return nil case 0x3: if cpu.v[vx] == nn { cpu.pc += 2 } case 0x4: if cpu.v[vx] != nn { cpu.pc += 2 } case 0x5: if cpu.v[vx] == cpu.v[vy] { cpu.pc += 2 } case 0x6: cpu.v[vx] = nn case 0x7: cpu.v[vx] = cpu.v[vx] + nn case 0x8: lastNib := cpu.memory[cpu.pc+1] & 0x0f switch lastNib { case 0x0: cpu.v[vx] = cpu.v[vy] case 0x1: cpu.v[vx] = cpu.v[vx] | cpu.v[vy] case 0x2: cpu.v[vx] = cpu.v[vx] & cpu.v[vy] case 0x3: cpu.v[vx] = cpu.v[vx] ^ cpu.v[vy] case 0x4: // set carry flag acc := int16(cpu.v[vx]) + int16(cpu.v[vy]) if acc > 255 { cpu.v[0xf] = 1 } else { cpu.v[0xf] = 0 } cpu.v[vx] = byte(acc) case 0x5: // set borrow flag if cpu.v[vx] > cpu.v[vy] { cpu.v[0xf] = 1 } else { cpu.v[0xf] = 0 } cpu.v[vx] = cpu.v[vx] - cpu.v[vy] case 0x6: if cpu.v[vx]&0x1 > 0 { cpu.v[0xf] = 1 } else { cpu.v[0xf] = 0 } cpu.v[vx] = cpu.v[vx] / 2 case 0x7: // set borrow flag if cpu.v[vy] > cpu.v[vx] { cpu.v[0xf] = 1 } else { cpu.v[0xf] = 0 } cpu.v[vx] = cpu.v[vy] - cpu.v[vx] case 0xe: if cpu.v[vx]&0x80 > 0 { cpu.v[0xf] = 1 } else { cpu.v[0xf] = 0 } cpu.v[vx] = cpu.v[vx] * 2 default: return fmt.Errorf("Unknown 8: %1x", lastNib) } case 0x9: if cpu.v[vx] != cpu.v[vy] { cpu.pc += 2 } case 0xa: cpu.i = nnn case 0xc: cpu.v[vx] = byte(rnd.Intn(256)) & nn case 0xd: x := int(cpu.v[vx]) y := int(cpu.v[vy]) sprite := cpu.memory[cpu.i : cpu.i+uint16(n)] collision := display.Draw(x, y, sprite) if collision { cpu.v[0xf] = 0x1 } else { cpu.v[0xf] = 0x0 } case 0xe: switch cpu.memory[cpu.pc+1] { case 0x9e: if keyboard.IsPressed(io.Key(cpu.v[vx])) { cpu.pc += 2 } case 0xa1: if !keyboard.IsPressed(io.Key(cpu.v[vx])) { cpu.pc += 2 } default: return fmt.Errorf("Unknown E: %2x", cpu.memory[cpu.pc+1]) } case 0xf: switch cpu.memory[cpu.pc+1] { case 0x07: cpu.v[vx] = cpu.dt case 0x0a: key := keyboard.PressedButton() if key == nil || io.IsOperationalKey(*key) { // Skip processing the op at the current PC, allow the emulator // to process the operational key and let it loop to the same // op to start waiting for a key press again return nil } cpu.v[vx] = byte(*key) case 0x15: cpu.dt = cpu.v[vx] case 0x18: cpu.st = cpu.v[vx] case 0x1e: cpu.i += uint16(cpu.v[vx]) case 0x29: cpu.i = uint16(cpu.v[vx]) * 5 case 0x33: v := uint16(cpu.v[vx]) cpu.memory[cpu.i+0] = byte((v / 100) % 10) cpu.memory[cpu.i+1] = byte((v / 10) % 10) cpu.memory[cpu.i+2] = byte(v % 10) case 0x55: for i := uint16(0); i <= uint16(vx); i++ { cpu.memory[cpu.i+i] = cpu.v[i] } case 0x65: for i := uint16(0); i <= uint16(vx); i++ { cpu.v[i] = cpu.memory[cpu.i+i] } default: return fmt.Errorf("Unknown F: %2x", cpu.memory[cpu.pc+1]) } default: return fmt.Errorf("Unknown nib: %d", nib1) } cpu.pc += 2 return nil } func (cpu *CPU) decrementTimers() { if cpu.dt > 0 { cpu.dt-- } if cpu.st > 0 { cpu.dt-- } } // NextOp increments the PC to the next operation // Returns false when there are no more operations to read func (cpu *CPU) NextOp() bool { cpu.pc += 2 return cpu.pc <= programOffset+cpu.programSize } // DisassembleOp output the assembly for the operation at the PC. func (cpu *CPU) DisassembleOp() string { nib1 := cpu.memory[cpu.pc] >> 4 vx := cpu.memory[cpu.pc] & 0x0f vy := cpu.memory[cpu.pc+1] >> 4 n := cpu.memory[cpu.pc+1] & 0x0f nn := cpu.memory[cpu.pc+1] nnn := int16(cpu.memory[cpu.pc]&0x0f)<<8 + int16(cpu.memory[cpu.pc+1]) op := "not implemented" switch nib1 { case 0x0: switch cpu.memory[cpu.pc+1] { case 0xe0: op = fmt.Sprintf("%-10s", "CLS") case 0xee: op = fmt.Sprintf("%-10s", "RET") default: op = fmt.Sprintf("%-10s %03x", "SYS", nnn) } case 0x1: op = fmt.Sprintf("%-10s %03x", "JP", nnn) case 0x2: op = fmt.Sprintf("%-10s %03x", "CALL", nnn) case 0x3: op = fmt.Sprintf("%-10s V%01x, %02x", "SE", vx, nn) case 0x4: op = fmt.Sprintf("%-10s V%01x, %02x", "SNE", vx, nn) case 0x5: op = fmt.Sprintf("%-10s V%01x, V%01x", "SE", vx, vy) case 0x6: op = fmt.Sprintf("%-10s V%01x, %02x", "LD", vx, nn) case 0x7: op = fmt.Sprintf("%-10s V%01x, %02x", "ADD", vx, nn) case 0x8: lastNib := cpu.memory[cpu.pc+1] & 0x0f switch lastNib { case 0x0: op = fmt.Sprintf("%-10s V%01x, V%01x", "LD", vx, vy) case 0x1: op = fmt.Sprintf("%-10s V%01x, V%01x", "OR", vx, vy) case 0x2: op = fmt.Sprintf("%-10s V%01x, V%01x", "AND", vx, vy) case 0x3: op = fmt.Sprintf("%-10s V%01x, V%01x", "XOR", vx, vy) case 0x4: op = fmt.Sprintf("%-10s V%01x, V%01x", "ADD", vx, vy) case 0x5: op = fmt.Sprintf("%-10s V%01x, V%01x, V%01x", "SUB", vx, vx, vy) case 0x6: op = fmt.Sprintf("%-10s V%01x, V%01x", "SHR", vx, vy) case 0x7: op = fmt.Sprintf("%-10s V%01x, V%01x, V%01x", "SUBN", vx, vy, vy) case 0xe: op = fmt.Sprintf("%-10s V%01x, V%01x", "SHL", vx, vy) default: op = fmt.Sprintf("UNKNOWN 8") } case 0x9: op = fmt.Sprintf("%-10s V%01x, V%01x", "SNE", vx, vy) case 0xa: op = fmt.Sprintf("%-10s I,%03x", "LD", nnn) case 0xb: op = fmt.Sprintf("%-10s V0,%03x", "JP", nnn) case 0xc: op = fmt.Sprintf("%-10s V%01x, %02x", "RND", vx, nn) case 0xd: op = fmt.Sprintf("%-10s V%01x, V%01x, %01x", "DRW", vx, vy, n) case 0xe: switch cpu.memory[cpu.pc+1] { case 0x9e: op = fmt.Sprintf("%-10s V%01x", "SKP", vx) case 0xa1: op = fmt.Sprintf("%-10s V%01x", "SKNP", vx) default: op = fmt.Sprintf("UNKNOWN E") } case 0xf: switch cpu.memory[cpu.pc+1] { case 0x07: op = fmt.Sprintf("%-10s V%01x, DELAY", "LD", vx) case 0x0a: op = fmt.Sprintf("%-10s V%01x, KEY", "LD", vx) case 0x15: op = fmt.Sprintf("%-10s DELAY, V%01x", "LD", vx) case 0x18: op = fmt.Sprintf("%-10s SOUND, V%01x", "LD", vx) case 0x1e: op = fmt.Sprintf("%-10s I, V%01x", "ADD", vx) case 0x29: op = fmt.Sprintf("%-10s F, V%01x", "LD", vx) case 0x33: op = fmt.Sprintf("%-10s B, V%01x", "LD", vx) case 0x55: op = fmt.Sprintf("%-10s [I], V%01x", "LD", vx) case 0x65: op = fmt.Sprintf("%-10s V%01x,[I]", "LD", vx) default: op = fmt.Sprintf("UNKNOWN F") } } return fmt.Sprintf("%04x %02x %02x %s", cpu.pc, cpu.memory[cpu.pc], cpu.memory[cpu.pc+1], op) } <file_sep>/io/keyboard.go package io type Keyboard interface { Tick() IsPressed(Key) bool PressedButton() *Key } type Key byte const ( Key0 Key = 0x00 Key1 = 0x01 Key2 = 0x02 Key3 = 0x03 Key4 = 0x04 Key5 = 0x05 Key6 = 0x06 Key7 = 0x07 Key8 = 0x08 Key9 = 0x09 KeyA = 0x0A KeyB = 0x0B KeyC = 0x0C KeyD = 0x0D KeyE = 0x0E KeyF = 0x0F KeyEsc = 0xFF ) // IsOperationalKey checks if the specified key is an operational key (like the ESC key) func IsOperationalKey(key Key) bool { return key < Key0 || key > KeyF } <file_sep>/io/termbox/display.go package termbox import ( "log" "github.com/arjenvanderende/chip8/io" "github.com/nsf/termbox-go" ) type display struct { pixels [io.DisplayWidth * io.DisplayHeight]bool } func (s *display) Clear() { termbox.Clear(termbox.ColorDefault, termbox.ColorDefault) s.pixels = [io.DisplayWidth * io.DisplayHeight]bool{} } func (s *display) Flush() { termbox.Flush() } func (s *display) Draw(x, y int, sprite []byte) bool { collision := false for dy, line := range sprite { log.Printf("DRAW X=%d, Y=%d: %08b\n", x, y, line) for dx := 0; dx < 8; dx++ { // determine if pixel is on or off p := (((y + dy) * io.DisplayWidth) + x + dx) % (io.DisplayWidth * io.DisplayHeight) a := line&(1<<uint(7-dx)) > 0 b := s.pixels[p] on := a != b // collision detection if a == b && a == true { collision = true } // draw pixel rx := p % io.DisplayWidth ry := p / io.DisplayWidth if on { termbox.SetCell(rx, ry, '█', termbox.ColorGreen, termbox.ColorDefault) } else { termbox.SetCell(rx, ry, ' ', termbox.ColorDefault, termbox.ColorDefault) } // remember the state s.pixels[p] = on } } return collision }
78b8a0b44e59512cc757d32ffae2830cb50f08ad
[ "Go" ]
7
Go
arjenvanderende/chip8
f32acf042cd646d852a532143cd89a9d834985e4
8f761ef2530b3d2dd3d4d1471d41196bb7e49340
refs/heads/main
<repo_name>sajantanand/sciML-networks<file_sep>/rk4.py import numpy as np import sys import copy import matplotlib.pyplot as plt def dydt_sho_2D(t, y): x, y, px, py = y return np.array([px, py, -1*x, -1*y]) def dydt_sho_1D(t, y): x, p = y return np.array([p, -1*x]) def dydt_anharmonic(t, y): x, p = y #return np.array([p, -1*x - x**5]) return np.array([p, - x**5]) def rk4(dydt, y0, tspan, n): if (np.ndim(y0) == 0): m = 1 else: m = len(y0) tfirst, tlast = tspan dt = (tfirst - tlast) / n t = np.zeros(n+1) y = np.zeros((n+1, m)) t[0] = tfirst y[0,:] = y0 for i in range(n): f1 = dydt(t[i], y[i,:]) f2 = dydt(t[i] + dt / 2.0, y[i,:] + dt * f1 / 2.0) f3 = dydt(t[i] + dt / 2.0, y[i,:] + dt * f2 / 2.0) f4 = dydt(t[i] + dt, y[i,:] + dt * f3) t[i+1] = t[i] + dt y[i+1, :] = y[i,:] + dt * (f1 + 2.0 * f2 + 2.0 * f3 + f4) / 6.0 return t, y if __name__ == "__main__": t, y = rk4(dydt_sho_1D, (1,0), (0,10), 100000) print(y[:10, :]) fig = plt.figure(figsize=(4,4)) plt.plot(y[:,0], y[:,1]) plt.xlabel('x') plt.ylabel('p') plt.savefig('sho_phase_space.pdf') t, y = rk4(dydt_anharmonic, (1,0), (0,10), 100000) print(y[:10, :]) fig = plt.figure(figsize=(4,4)) plt.plot(y[:,0], y[:,1]) plt.xlabel('x') plt.ylabel('p') plt.savefig('anharmonic_phase_space.pdf') <file_sep>/README.md # sciML-networks Repository of networks used in sciML for Benchmark testing purposes
77c0085ee1acd12e371ea4e2063ff27a55c21fcf
[ "Markdown", "Python" ]
2
Python
sajantanand/sciML-networks
b1137c5e86694b8d7c5bc567327a79f3d5eb6275
5ff5a16c1d4d452b079a80c98bd8d7c1e86f8e2e
refs/heads/master
<file_sep>flake8==3.3.0 tox==2.6.0 <file_sep>[tox] envlist = py27 [testenv] deps= -r{toxinidir}/requirements.txt -r{toxinidir}/test-requirements.txt setenv= PYTHONWARNINGS=all commands = sh {toxinidir}/scripts/libgit2.sh [testenv:ubuntu] commands = sh {toxinidir}/scripts/libgit2.sh [flake8] ignore=E302,H306 exclude= libgit2-0.24.0, .git, .idea, .tox, *.egg-info, *.eggs, bin, dist, hapi <file_sep>import logging import os import yaml from hapi.services.tiller_pb2 import GetReleaseContentRequest from hapi.chart.template_pb2 import Template from hapi.chart.chart_pb2 import Chart from hapi.chart.metadata_pb2 import Metadata from hapi.chart.config_pb2 import Config from supermutes.dot import dotify from pyhelm import repo from supermutes.dot import dotify LOG = logging.getLogger('pyhelm') class ChartBuilder(object): ''' This class handles taking chart intentions as a paramter and turning those into proper protoc helm charts that can be pushed to tiller. It also processes chart source declarations, fetching chart source from external resources where necessary ''' def __init__(self, chart, parent=None): ''' Initialize the ChartBuilder class Note that tthis will trigger a source pull as part of initialization as its necessary in order to examine the source service many of the calls on ChartBuilder ''' # cache for generated protoc chart object self._helm_chart = None # record whether this is a dependency based chart self.parent = parent # store chart schema self.chart = dotify(chart) # extract, pull, whatever the chart from its source self.source_directory = self.source_clone() def source_clone(self): ''' Clone the charts source We only support a git source type right now, which can also handle git:// local paths as well ''' subpath = self.chart.source.get('subpath', '') if not 'type' in self.chart.source: LOG.exception("Need source type for chart %s", self.chart.name) return if self.parent: LOG.info("Cloning %s/%s as dependency for %s", self.chart.source.location, subpath, self.parent) else: LOG.info("Cloning %s/%s for release %s", self.chart.source.location, subpath, self.chart.name) if self.chart.source.type == 'repo': self._source_tmp_dir = repo.from_repo(self.chart.source.location, self.chart.name, self.chart.version) elif self.chart.source.type == 'directory': self._source_tmp_dir = self.chart.source.location else: LOG.exception("Unknown source type %s for chart %s", self.chart.name, self.chart.source.type) return return os.path.join(self._source_tmp_dir, subpath) def source_cleanup(self): ''' Cleanup source ''' repo.source_cleanup(self._source_tmp_dir) def get_metadata(self): ''' Process metadata ''' # extract Chart.yaml to construct metadata chart_yaml = dotify(yaml.load(open( os.path.join(self.source_directory, 'Chart.yaml')).read())) # construct Metadata object return Metadata( description=chart_yaml.description, name=chart_yaml.name, version=chart_yaml.version ) def get_files(self): ''' Return (non-template) files in this chart TODO(alanmeadows): Not implemented ''' return [] def get_values(self): ''' Return the chart (default) values ''' # create config object representing unmarshaled values.yaml if os.path.exists(os.path.join(self.source_directory, 'values.yaml')): raw_values = open(os.path.join(self.source_directory, 'values.yaml')).read() else: LOG.warn("No values.yaml in %s, using empty values", self.source_directory) raw_values = '' return Config(raw=raw_values) def get_templates(self): ''' Return all the chart templates ''' # process all files in templates/ as a template to attach to the chart # building a Template object import ipdb; ipdb.set_trace() templates = [] if not os.path.exists(os.path.join(self.source_directory, 'templates')): LOG.warn("Chart %s has no templates directory," "no templates will be deployed", self.chart.name) for root, _, files in os.walk(os.path.join(self.source_directory, 'templates'), topdown=True): for tpl_file in files: tname = os.path.relpath(os.path.join(root, tpl_file), os.path.join(self.source_directory, 'templates')) templates.append(Template(name=tname, data=open(os.path.join(root, tpl_file), 'rb').read())) return templates def get_helm_chart(self): ''' Return a helm chart object ''' if self._helm_chart: return self._helm_chart # dependencies # [process_chart(x, is_dependency=True) for x in chart.dependencies] dependencies = [] for chart in self.chart.get('dependencies', []): LOG.info("Building dependency chart %s for release %s", chart.name, self.chart.name) dependencies.append(ChartBuilder(chart).get_helm_chart()) helm_chart = Chart( metadata=self.get_metadata(), templates=self.get_templates(), dependencies=dependencies, values=self.get_values(), files=self.get_files(), ) self._helm_chart = helm_chart return helm_chart def dump(self): ''' This method is used to dump a chart object as a serialized string so that we can perform a diff It should recurse into dependencies ''' return self.get_helm_chart().SerializeToString() <file_sep># Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! import grpc from grpc.framework.common import cardinality from grpc.framework.interfaces.face import utilities as face_utilities <file_sep>grpcio==1.1.3 grpcio-tools==1.1.3 protobuf==3.2.0 supermutes==0.2.5 requests==2.14.2 PyYAML==3.12 <file_sep>try: from StringIO import StringIO except ImportError: from io import BytesIO as StringIO import itertools import os import pygit2 import requests import shutil import tarfile import tempfile import yaml def repo_index(repo_url): """Downloads the Chart's repo index""" index_url = os.path.join(repo_url, 'index.yaml') index = requests.get(index_url) return yaml.load(index.content) def from_repo(repo_url, chart, version=None): """Downloads the chart from a repo.""" _tmp_dir = tempfile.mkdtemp(prefix='pyhelm-', dir='/tmp') index = repo_index(repo_url) if chart not in index['entries']: raise RuntimeError('Chart not found in repo') versions = index['entries'][chart] if version is not None: versions = itertools.ifilter(lambda k: k['version'] == version, versions) try: metadata = sorted(versions, key=lambda x: x['version'])[0] for url in metadata['urls']: fname = url.split('/')[-1] try: req = requests.get(url, stream=True) fobj = StringIO(req.content) tar = tarfile.open(mode="r:*", fileobj=fobj) tar.extractall(_tmp_dir) return os.path.join(_tmp_dir, chart) except: # NOTE(flaper87): Catch requests errors # and untar errors pass except IndexError: raise RuntimeError('Chart version %s not found' % version) def git_clone(repo_url, branch='master'): """clones repo to a /tmp/ dir""" _tmp_dir = tempfile.mkdtemp(prefix='pyhelm-', dir='/tmp') pygit2.clone_repository(repo_url, _tmp_dir, checkout_branch=branch) return _tmp_dir def source_cleanup(target_dir): """Clean up source.""" shutil.rmtree(target_dir) <file_sep>====== PyHelm ====== Python bindings for the Helm package manager Helm gRPC --------- The helm gRPC libraries are located in the hapi directory. They were generated with the grpc_tools.protoc utility against Helm 2.1.3. Should you wish to re-generate them you can easily do so:: git clone https://github.com/kubernetes/helm ./helm python -m grpc_tools.protoc -I helm/_proto --python_out=. --grpc_python_out=. _proto/hapi/chart/* python -m grpc_tools.protoc -I helm/_proto --python_out=. --grpc_python_out=. _proto/hapi/services/* python -m grpc_tools.protoc -I helm/_proto --python_out=. --grpc_python_out=. _proto/hapi/release/* python -m grpc_tools.protoc -I helm/_proto --python_out=. --grpc_python_out=. _proto/hapi/version/* How to use Pyhelm ----------------- Looks like Pyhelm support only install chart from local folder 1. First you need repo_url and chart name to download chart:: from pyhelm.repo import from_repo chart_path = chart_versions = from_repo('https://kubernetes-charts.storage.googleapis.com/', 'mariadb') print(chart_path) "/tmp/pyhelm-kibwtj8d/mongodb" 2. Now you can see that chart folder of mongodb:: In [3]: ls -la /tmp/pyhelm-kibwtj8d/mongodb total 40 drwxr-xr-x 7 andrii wheel 224 Mar 21 17:26 ./ drwx------ 3 andrii wheel 96 Mar 21 17:26 ../ -rwxr-xr-x 1 andrii wheel 5 Jan 1 1970 .helmignore* -rwxr-xr-x 1 andrii wheel 261 Jan 1 1970 Chart.yaml* -rwxr-xr-x 1 andrii wheel 4394 Jan 1 1970 README.md* drwxr-xr-x 8 andrii wheel 256 Mar 21 17:26 templates/ 3. Next step to build ChartBuilder instance to manipulate with Tiller:: from pyhelm.chartbuilder import ChartBuilder chart = ChartBuilder({'name': 'mongodb', 'source': {'type': 'directory', 'location': '/tmp/pyhelm-kibwtj8d/mongodb'}}) # than we can get chart meta data etc In [9]: chart.get_metadata() Out[9]: name: "mongodb" version: "0.4.0" description: "Chart for MongoDB" 4. Install chart:: from pyhelm.chartbuilder import ChartBuilder from pyhelm.tiller import Tiller chart = ChartBuilder({'name': 'mongodb', 'source': {'type': 'directory', 'location': '/tmp/pyhelm-kibwtj8d/mongodb'}}) t.install_release(chart.get_helm_chart(), dry_run=False, namespace='default') Out[9]: release { name: "fallacious-bronco" info { status { code: 6 } first_deployed { seconds: 1521647335 nanos: 746785000 } last_deployed { seconds: 1521647335 nanos: 746785000 } Description: "Dry run complete" } chart {.... } <file_sep>import grpc import yaml import logging from hapi.services.tiller_pb2 import ReleaseServiceStub, ListReleasesRequest, \ InstallReleaseRequest, UpdateReleaseRequest, UninstallReleaseRequest, GetReleaseContentRequest, \ GetReleaseStatusRequest from hapi.chart.chart_pb2 import Chart from hapi.chart.config_pb2 import Config LOG = logging.getLogger('pyhelm') TILLER_PORT = 44134 TILLER_VERSION = b'2.3.1' TILLER_TIMEOUT = 300 RELEASE_LIMIT = 64 class Tiller(object): ''' The Tiller class supports communication and requests to the Tiller Helm service over gRPC ''' def __init__(self, host, port=44134): # init k8s connectivity self._host = host self._port = port # init tiller channel self.channel = self.get_channel() # init timeout for all requests # and assume eventually this will # be fed at runtime as an override self.timeout = TILLER_TIMEOUT @property def metadata(self): ''' Return tiller metadata for requests ''' return [(b'x-helm-api-client', TILLER_VERSION)] def get_channel(self): ''' Return a tiller channel ''' return grpc.insecure_channel('%s:%s' % (self._host, self._port)) def tiller_status(self): ''' return if tiller exist or not ''' if self._host: return True return False def get_release_content(self, name): """ Release content """ stub = ReleaseServiceStub(self.channel) req = GetReleaseContentRequest(name=name) release_content = stub.GetReleaseContent(req, self.timeout, metadata=self.metadata) return release_content def get_release_status(self, name): stub = ReleaseServiceStub(self.channel) req = GetReleaseStatusRequest(name=name) release_status = stub.GetReleaseStatus(req, self.timeout, metadata=self.metadata) return release_status def list_releases(self): ''' List Helm Releases ''' releases = [] stub = ReleaseServiceStub(self.channel) req = ListReleasesRequest(limit=RELEASE_LIMIT) release_list = stub.ListReleases(req, self.timeout, metadata=self.metadata) for y in release_list: releases.extend(y.releases) return releases def list_charts(self): ''' List Helm Charts from Latest Releases Returns list of (name, version, chart, values) ''' charts = [] for latest_release in self.list_releases(): try: charts.append((latest_release.name, latest_release.version, latest_release.chart, latest_release.config.raw)) except IndexError: continue return charts def _pre_update_actions(self, actions, namespace): ''' :params actions - array of items actions :params namespace - name of pod for actions ''' try: for action in actions.get('delete', []): name = action.get("name") action_type = action.get("type") if "job" in action_type: LOG.info("Deleting %s in namespace: %s", name, namespace) self.k8s.delete_job_action(name, namespace) continue LOG.error("Unable to execute name: %s type: %s ", name, type) except Exception: LOG.debug("PRE: Could not delete anything, please check yaml") try: for action in actions.get('create', []): name = action.get("name") action_type = action.get("type") if "job" in action_type: LOG.info("Creating %s in namespace: %s", name, namespace) self.k8s.create_job_action(name, action_type) continue except Exception: LOG.debug("PRE: Could not create anything, please check yaml") def _post_update_actions(self, actions, namespace): try: for action in actions.get('create', []): name = action.get("name") action_type = action.get("type") if "job" in action_type: LOG.info("Creating %s in namespace: %s", name, namespace) self.k8s.create_job_action(name, action_type) continue except Exception: LOG.debug("POST: Could not create anything, please check yaml") def update_release(self, chart, dry_run, namespace, name=None, pre_actions=None, post_actions=None, disable_hooks=False, values=None): ''' Update a Helm Release ''' values = Config(raw=yaml.safe_dump(values or {})) self._pre_update_actions(pre_actions, namespace) # build release install request stub = ReleaseServiceStub(self.channel) release_request = UpdateReleaseRequest( chart=chart, dry_run=dry_run, disable_hooks=disable_hooks, values=values, name=name or '') stub.UpdateRelease(release_request, self.timeout, metadata=self.metadata) self._post_update_actions(post_actions, namespace) def install_release(self, chart, namespace, dry_run=False, name=None, values=None): """ Create a Helm Release """ values = Config(raw=yaml.safe_dump(values or {})) # build release install request stub = ReleaseServiceStub(self.channel) release_request = InstallReleaseRequest( chart=chart, dry_run=dry_run, values=values, name=name or '', namespace=namespace) return stub.InstallRelease(release_request, self.timeout, metadata=self.metadata) def uninstall_release(self, release, disable_hooks=False, purge=True): """ :params - release - helm chart release name :params - purge - deep delete of chart deletes a helm chart from tiller """ # build release install request stub = ReleaseServiceStub(self.channel) release_request = UninstallReleaseRequest(name=release, disable_hooks=disable_hooks, purge=purge) return stub.UninstallRelease(release_request, self.timeout, metadata=self.metadata) def chart_cleanup(self, prefix, charts): """ :params charts - list of yaml charts :params known_release - list of releases in tiller :result - will remove any chart that is not present in yaml """ def release_prefix(prefix, chart): """ how to attach prefix to chart """ return "{}-{}".format(prefix, chart["chart"]["release_name"]) valid_charts = [release_prefix(prefix, chart) for chart in charts] actual_charts = [x.name for x in self.list_releases()] chart_diff = list(set(actual_charts) - set(valid_charts)) for chart in chart_diff: if chart.startswith(prefix): LOG.debug("Release: %s will be removed", chart) self.uninstall_release(chart)
b11e1bce9cb85cdb3c2a8065ade92f700fff8f55
[ "Python", "Text", "reStructuredText", "INI" ]
8
Text
andriisoldatenko/pyhelm
67b40b383014ce26cdd7542506cf22377ec3018a
939a69f9e0f2fe11940b745342f897b5368cce54
refs/heads/master
<repo_name>CharlesMoone/Image-DelayedLoad<file_sep>/js/imageLoad.js /** * Created by apple on 16/3/10. */ /* * 图片延时加载 * * imageSrc : 图片实际的地址,这里是相对于本js的地址 * address : 放置的位置,可以理解为$(x).append()中x的位置,这里需要的dom对象 * imageLoading : 加载图,这里是关闭其显示 * * */ function imageLoad(imageSrc) { //图片要防止的位置 var address = document.getElementsByClassName("imageLoad")[0]; //创建一个加载图替代没加载好的图 var imageLoading = document.createElement("div"); imageLoading.className = "imageLoading"; var loadingImage = document.createElement("img"); loadingImage.src = "../images/loading.png"; loadingImage.alt = "loading"; imageLoading.appendChild(loadingImage); address.appendChild(imageLoading); //图片加载 var image = new Image(); image.src = imageSrc; image.onload = function() { console.log("images loaded!"); //这是为了本地测试,实际使用可移除setTimeout setTimeout(function () { imageLoading.style.display = "none"; address.appendChild(image); }, 2000); } };
bd335e7162c62cab2649cd41de34712d28cc082f
[ "JavaScript" ]
1
JavaScript
CharlesMoone/Image-DelayedLoad
9ceac93b7a795892a9b5021e3905d759b728b4f0
087ca49813b6e3bb2e04ce3230764ed20305c747
refs/heads/master
<repo_name>Uni-Projects/HiC-Assignment-2<file_sep>/exercise1.c #include <stdio.h> int main (void) { short i = 0x1234; char x = -127; long sn1 = 1024726; long sn2 = 1023775; int y[2] = {0x11223344,0x44332211}; char *p = &x; int j; printf("---------------------------------------------------\n"); printf("address content (hex) content (dec)\n"); printf("---------------------------------------------------\n"); for (j=0; j<27;j++ ){ printf("%p %x %d\n",p,*p,*p); p++; } printf("---------------------------------------------------\n"); printf ("i address: %p\n", &i); printf ("x address: %p\n", &x); printf ("sn1 address: %p\n", &sn1); printf ("sn2 address: %p\n", &sn2); printf ("y[] address: %p\n", &y); printf("---------------------------------------------------\n"); printf("x is: %ld bytes \n", sizeof(x)); printf("i is: %ld bytes \n", sizeof(i)); printf("sn1 is: %ld bytes \n", sizeof(sn1)); printf("sn2 is: %ld bytes \n", sizeof(sn2)); printf("y[] is: %ld bytes \n", sizeof(y)); return 0; } <file_sep>/exercise2.c #include <stdio.h> #include <stdbool.h> int main() { bool t = true; bool f = false; printf("size of true: %ld bytes\n", sizeof(t)); printf("size of false: %ld bytes\n", sizeof(f)); bool *p = &t; printf("hex of true: %x\n", *p); printf("hex of false: %x\n", f); *p = 0xff3; printf("hex of true: %x\n", *p); return 0; } <file_sep>/exercise3.c #include <stdio.h> void addvector(int *r, const int *a, const int *b, unsigned int len) { unsigned int i; for(i=0 ; i<len ; i++) { *r = *a + *b; printf(" %d ",*r); r++; a++; b++; } } int memcmp(const void *s1, const void *s2, size_t n) { unsigned char a; unsigned char b; for(int i=0 ; i<n ; i++) { unsigned char a = *(unsigned char *)s1; unsigned char b = *(unsigned char *)s2; if(a!= b) return a - b; s1++; s2++; } return 0; } int memcmp_backwards(const void *s1, const void *s2, size_t n) { unsigned char a; unsigned char b; s1 = s1+n-1; s2 = s2+n-1; for(int i=0 ; i<n ; i++) { unsigned char a = *(unsigned char *)s1; unsigned char b = *(unsigned char *)s2; if(a!= b) return a - b; s1--; s2--; } return 0; } int main() { int len = 5; int q[5] = {1,2,3,4,6}; int w[5] = {1,2,3,4,6}; int e[5]; int *r = (int*)&e; int *a = (int*)&q; int *b = (int*)&w; //addvector(r,a,b,len); int res = memcmp_backwards(a,b,sizeof(w)); printf("%d\n",res); return 0; }
27895e3d99af0d3221d4b42246dc769646974ced
[ "C" ]
3
C
Uni-Projects/HiC-Assignment-2
29c752f3c47459b4a2e3635550fbc96ee69b4132
2546518b52f59810fe63e0ac6295e92af3117b02
refs/heads/master
<file_sep>CREATE TABLE PRODUTOS (Codprod int, Descrição varchar(20), Preco numeric(9,2), Qtde int, Cor char(20), CONSTRAINT PK_PROD PRIMARY KEY(Codprod)); insert into PRODUTOS VALUES (12345,'CHOCOLATE',5.99,150,'AZUL'), (23487,'AMACIANTE',15.50,30,'AMARELO'), (97854,'PEPSI',3.00,4,'VERMELHO'), (13647,'PAO',2.50,90,'AMARELO'), (46235,'ROUPAS',60.00,7,'PRETO'), (47813,'CELULAR',1499.90,2,'BRANCO'), (71325,'FINI',5.00,40,'ROSA'), (41587,'AGUA',2.00,13,'AZUL'), (13158,'ALVEJANTE',7.99,45,'CINZA'), (79879,'CARNE',19.90,60,'VERMELHO'); SELECT *FROM PRODUTOS DELETE *FROM PRODUTOS --Excluir todos os produtos com qtde abaixo de 5; DELETE *FROM PRODUTOS WHERE Qtde < 5; --Listar as cores sem duplicidade SELECT DISTINCT COR FROM PRODUTOS; --Selecionar todos os produtos de cor Azul, vermelho, branco e preto. SELECT *FROM PRODUTOS WHERE Cor IN('AZUL','VERMELHO','BRANCO','PRETO'); --Listar todos os produtos que tem descrição iniciado em ‘A’. SELECT *FROM PRODUTOS WHERE Descrição LIKE 'A%'; --Quantos produtos temos no cadastro? SELECT COUNT(*) AS 'PRODUTOS' FROM PRODUTOS; --Selecionar os produtos com preço entre 10 e 50 reais. SELECT *FROM PRODUTOS WHERE Preco BETWEEN 10 AND 50; --Qual o valor do produto mais caro? SELECT MAX(PRECO) AS 'MAIOR' FROM PRODUTOS; --Qual o valor do produto mais barato? SELECT MIN(PRECO) AS 'MENOR'FROM PRODUTOS; --Qual a média de preços? SELECT AVG(PRECO) AS 'MEDIA' FROM PRODUTOS; --Quantos produtos vermelhos temos no cadastro? SELECT COUNT(*) AS 'QTDE' FROM PRODUTOS WHERE Cor='VERMELHO'; --Atualizar o preço dos produtos com preço abaixo de 10.00 acrescentando 15%; UPDATE PRODUTOS SET Preco = Preco*1.15 WHERE Preco < 10.00; --Atualizar o preço dos produtos com preço acima de 500.00 com um desconto de 5% UPDATE PRODUTOS SET Preco = Preco*0.95 WHERE Preco > 500.00; --Qual o somatório das quantidades? SELECT SUM(QTDE) AS 'QTDE' FROM PRODUTOS; <file_sep>-- A) Criar a seguinte tabela: create table veiculos (Placa Char(7), Modelo varchar(40), Fabricante varchar(20), Ano int, Cor varchar(20), Combustível varchar(20), Preço numeric(10,2), constraint pk_pla primary key (Placa)); select * from veiculos; -- B) inserir registros: insert into veiculos values ('MUB2752','HOVER CUV 2.4 16V 4WD 5p Mec.','GREAT WALL',2007,'Preto','Disel',500000.00), ('JJI3658','Gallardo Spider LP560-4 ','LAMBORGHINI', 2009, 'Vermelho', 'GasolinaATV', 8000.00), ('MVF0777','MINI STAR CE 1.0 8V 53cv (Pick-Up)','CHANGAN',2011,'Cinza','Gasolina', 124820.00), ('MQI9306','MG6 1.8 16V Turbo 170cv Aut.','MG', 2011, 'Marrom', 'Etanol', 6458.00), ('JGG9286','GC2 1.0 12V 68cv 5p','GEELY', 2015, 'Amarelo', 'Dual-Flex', 346198.00), ('NIC0712','GOLF GTI MK7 2.0 16V 143CV','VOLKSWAGEN',2017,'VERMELHO','GASOLINA',110000.00), ('THC0420','CIVIC EX MT. 1.6 16V V-TEC 127CV','HONDA',2000,'PRETO','GASOLINA',15000.00), ('PJL0000','FOCUS GL MT 1.6 8V Z-TEC 113CV','FORD',2008,'PRETO','GASOLINA', 17000.00), ('TIT6969','AUDI RS3 3.6 20V 430CV','AUDI',2018,'BRANCO','GASOLINA', 300000.00), ('OVO2424','C3 1.6 8V 105CV','CITROËN',2004,'ROSA','FLEX',15000.00); -- C) Selecionar todos os veículos com modelo iniciado pela letra ‘F’ select * from VEICULOS where Fabricante like 'F%'; -- D) Selecionar todos os veículos de ano entre 2000 e 2008 select * from veiculos where Ano between 2000 and 2008 -- E) Selecionar todos de combustível FLEX select * from veiculos where Combustível= 'Flex' -- F) Selecionar todos do fabricante GM com ano superior a 2001 select * from veiculos where Fabricante = 'GM' and Ano > 2001 -- g) Qual a média de preço dos veículos; select Avg(PREÇO) AS 'media' from veiculos; --h) Qual o valor do veículo mais caro; select MAX(preço) as 'maior' from veiculos; --i) Qual o valor do veículo mais barato; select MIN (PREÇO) AS 'menor' from veiculos; --j) Acrescentar um aumento de 10% para todos os veículos do fabricante FIAT; update veiculos set Preço=Preço*0.9 where Modelo= 'MG6 1.8 16V Turbo 170cv Aut.'; --k) Quantos veículos prata temos no cadastro? SELECT COUNT(*)AS 'qtde' FROM veiculos WHERE Cor='prata'; --l) Qual o somatório dos preços? select SUM (Preço) as 'total' from veiculos; -- M) Excluir do cadastro todos os veículos com ano inferior a 1985 delete from veiculos where Ano < 2001 -- N) Selecionar todos dos fabricantes: GM, FORD e TOYOTA select * from veiculos where Fabricante in ('GM', 'FORD', 'TOYOTA') -- O) Listar todos os fabricantes, sem duplicidade select distinct fabricante from veiculos <file_sep># exercicios-sql-server-IBD Exercícios de Banco de Dados - SQL SERVER. Orientadas nas aulas na FATEC MAUÁ
921ad76136a2e6c473491006cec2f53e641c73fc
[ "Markdown", "SQL" ]
3
SQL
yurikomuta/exercicios-sql-server---IBD
0d5fba58ff4f013535583a27a39703c4b8d99f09
24f552fb07eeab8156296e99a339bf6e684261db
refs/heads/master
<file_sep>using System.Collections; using System.Collections.Generic; using UnityEngine; [RequireComponent(typeof(AudioSource))] public class AudioSampler : MonoBehaviour { public float[] samples = new float[512]; public float[] freqBands = new float[8]; public float[] bandBuffers = new float[8]; private float[] _bufferDecrease = new float[8]; private float[] freqBandHighest = new float[8]; public float[] audioBand = new float[8]; public float[] audioBandBuffer = new float[8]; private AudioSource _audioSource; private void Start() { _audioSource = GetComponent<AudioSource>(); } private void Update() { GetSpectrumAudioSource(); MakeFrequencyBands(); BandBuffer(); CreateAudioBands(); } private void GetSpectrumAudioSource() { _audioSource.GetSpectrumData(samples, 0, FFTWindow.Blackman); } private void MakeFrequencyBands() { int count = 0; // 8 is number of bands for (int i = 0; i < 8; i++) { float average = 0; int sampleCount = (int)Mathf.Pow(2, i) * 2; if (i == 7) { sampleCount += 2; } for (int j = 0; j < sampleCount; j++) { average += samples[count] * (count + 1); count++; } average /= count; freqBands[i] = average * 10; } } void BandBuffer() { for (int i = 0; i < 8; i++) { if (freqBands[i] > bandBuffers[i]) { bandBuffers[i] = freqBands[i]; _bufferDecrease[i] = 0.005f; } else if (freqBands[i] < bandBuffers[i]) { bandBuffers[i] -= _bufferDecrease[i]; _bufferDecrease[i] *= 1.2f; } } } void CreateAudioBands() { for (int i = 0; i < 8; i++) { if (freqBands[i] > freqBandHighest[i]) { freqBandHighest[i] = freqBands[i]; } audioBand[i] = (freqBands[i] / freqBandHighest[i]); audioBandBuffer[i] = (bandBuffers[i] / freqBandHighest[i]); } } } <file_sep>using System.Collections; using System.Collections.Generic; using UnityEngine; public class InitSampleCubes : MonoBehaviour { public int sampleSize = 512; public float radius = 10f; public float maxScale = 10f; public GameObject samplePrefab; public AudioSampler audioPeer; private GameObject[] _samples; void Start () { _samples = new GameObject[sampleSize]; float iterationRotation = 360.0f / sampleSize; for (int i = 0; i < sampleSize; i++) { GameObject currentSample = Instantiate(samplePrefab, Vector3.zero, Quaternion.identity, transform); currentSample.name = "Sample-" + i; currentSample.transform.eulerAngles = new Vector3(0, iterationRotation * i, 0); currentSample.transform.position = currentSample.transform.forward * radius; _samples[i] = currentSample; } } void Update () { for (int i = 0; i < sampleSize; i++) { Vector3 pastScale = _samples[i].transform.localScale; float newScale = audioPeer.samples[i] * maxScale; if(newScale <= 0) { newScale = Mathf.Pow(10, -8); } _samples[i].transform.localScale = new Vector3(pastScale.x, newScale, pastScale.z); } } } <file_sep> using UnityEngine; public class ParticlesSystem : MonoBehaviour { [SerializeField] private ComputeShader computeShader; [SerializeField] public ParticlesEmitter[] particlesEmitters; [SerializeField] private Material material; /// <summary> /// 256 Colours pallet for each emitter gradient. /// </summary> public Texture2D gradientsTexture; //Private variables private int _kernel; private ComputeBuffer particlesBuffer; private ComputeBuffer itteratorsBuffer; private ComputeBuffer constantsBuffer; private ComputeBuffer emittersBuffer; private uint particlesCount = 0; public const int VertCount = 1048576 * 4; //4Mb of particle instances //Emitters control structure struct EBufferStruct { public float pX, pY, pZ; //The position of this emitter public float vX, vY, vZ; //The initial velocity of the emission public float pSize; //The initial size of the particles generated by this emitter public float pLife; //The life-time of particles generated by this emitter public int mustEmit; //The emission flag for the particles from this emitter public uint firstId; //The index of the first particle for this emitter public uint count; //The amount of particle for this emitter public bool useGravity; //Should the particles be affected by gravity }; //Constants control structure struct CBufferStruct { public float time; //Time in seconds of this itteration public float deltaTime; //Delta-time in seconds from the last itteration public uint emittersCount; //The ammount of emitters }; //Particle Itterators control structure struct IBufferStruct { public uint cParticle; //The index of the current particle this itterator is at public uint aliveCount; //The amount of particles that are alive at a given time } //We initialize the buffers and the material used to draw. void Start() { CreateBuffers(); _kernel = computeShader.FindKernel("CSMain"); //Generate 256 colours pallet for each emitter gradientsTexture = new Texture2D(256, particlesEmitters.Length, TextureFormat.ARGB32, false, true); gradientsTexture.wrapMode = TextureWrapMode.Clamp; gradientsTexture.filterMode = FilterMode.Point; UpdateGradientTexture(); } private void UpdateGradientTexture() { //Generate 256 colours pallet from gradient Color[] colors = new Color[256]; for (int i = 0; i < particlesEmitters.Length; i++) { for (int j = 0; j < 256; j++) { float t = (float)j / 256; colors[j] = particlesEmitters[i].gradient.Evaluate(t); } gradientsTexture.SetPixels(0, i, 256, 1, colors); } gradientsTexture.Apply(false, false); } void CreateBuffers() { //Allocate emitters dependant buffers emittersBuffer = new ComputeBuffer(particlesEmitters.Length, 48); //Contains emitter properties (8*float+4*int -> 48 bytes) itteratorsBuffer = new ComputeBuffer(particlesEmitters.Length, 8); //Contains itterator information (2*uint -> 8 bytes) //Emitters information EBufferStruct[] emitters = new EBufferStruct[particlesEmitters.Length]; IBufferStruct[] itterators = new IBufferStruct[particlesEmitters.Length]; uint id = 0; for (int i = 0; i < particlesEmitters.Length; i++) { //Update emitters struct emitters[i].pX = particlesEmitters[i].pos.x; //Pos X emitters[i].pY = particlesEmitters[i].pos.y; //Pos Y emitters[i].pZ = particlesEmitters[i].pos.z; //Pos Z emitters[i].vX = particlesEmitters[i].speed.x; //Vel X emitters[i].vY = particlesEmitters[i].speed.y; //Vel Y emitters[i].vZ = particlesEmitters[i].speed.z; //Vel Z emitters[i].mustEmit = particlesEmitters[i].shouldEmit ? 1 : 0; //Emission flag emitters[i].pSize = particlesEmitters[i].size; //Size emitters[i].pLife = particlesEmitters[i].life; //Life (in seconds) emitters[i].firstId = id; //First particle id emitters[i].count = particlesEmitters[i].count; //Amount of particles emitters[i].useGravity = particlesEmitters[i].useGravity; //Gravity usage flag //Update particle itterators itterators[i].cParticle = id; itterators[i].aliveCount = 0; //Update id for the next emitter id += emitters[i].count; //Make sure to update max particles count particlesCount += id; //Reset emission flag on particlesEmmiter particlesEmitters[i].shouldEmit = false; } emittersBuffer.SetData(emitters); itteratorsBuffer.SetData(itterators); //Particle information particlesBuffer = new ComputeBuffer(VertCount, 40); //Contains particles (10*float -> 40 bytes) float[] particles = new float[VertCount * 10]; for (int i = 0; i < VertCount; i++) { particles[i * 10 + 0] = 0; //Speed X particles[i * 10 + 1] = 0; //Speed Y particles[i * 10 + 2] = 0; //Speed Z particles[i * 10 + 3] = 0; //Pos X particles[i * 10 + 4] = 0; //Pos Y particles[i * 10 + 5] = 0; //Pos Z particles[i * 10 + 6] = 0; //Life particles[i * 10 + 7] = 0; //Size particles[i * 10 + 8] = 0; //Max Life particles[i * 10 + 9] = 0; //Emitter pallet pos } particlesBuffer.SetData(particles); //Constant information constantsBuffer = new ComputeBuffer(1, 12); //Contains constant values (3*float -> 12 bytes) CBufferStruct[] constants = new CBufferStruct[1]; constants[0].time = Time.time; constants[0].deltaTime = Time.deltaTime; constants[0].emittersCount = (uint)particlesEmitters.Length; constantsBuffer.SetData(constants); } //When this GameObject is disabled we must release the buffers or else Unity complains. private void OnDisable() { constantsBuffer.Release(); particlesBuffer.Release(); emittersBuffer.Release(); itteratorsBuffer.Release(); Destroy(gradientsTexture); } //After all rendering is complete we dispatch the compute shader and then set the material before drawing with DrawProcedural //this just draws the "mesh" as a set of points void OnPostRender() { material.SetPass(0); material.SetTexture("_GradientsTex", gradientsTexture); material.SetBuffer("pBuffer", particlesBuffer); Graphics.DrawProcedural(MeshTopology.Triangles, VertCount * 6); } //Run compute shader after rendering void LateUpdate() { Dispatch(); UpdateGradientTexture(); } //The meat of this script, it sets the constant buffer (current time) and then sets all of the buffers for the compute shader. //We then dispatch 32x32x1 groups of threads of our CSMain kernel. void Dispatch() { //Constant information CBufferStruct[] constants = new CBufferStruct[1]; constants[0].time = Time.time; constants[0].deltaTime = Time.deltaTime; constants[0].emittersCount = (uint)particlesEmitters.Length; constantsBuffer.SetData(constants); //Reset max particles count for recalculation particlesCount = 0; //Emitters information EBufferStruct[] emitters = new EBufferStruct[particlesEmitters.Length]; uint id = 0; for (int i = 0; i < particlesEmitters.Length; i++) { //Update emitters struct emitters[i].pX = particlesEmitters[i].pos.x; //Pos X emitters[i].pY = particlesEmitters[i].pos.y; //Pos Y emitters[i].pZ = particlesEmitters[i].pos.z; //Pos Z emitters[i].vX = particlesEmitters[i].speed.x; //Vel X emitters[i].vY = particlesEmitters[i].speed.y; //Vel Y emitters[i].vZ = particlesEmitters[i].speed.z; //Vel Z emitters[i].mustEmit = particlesEmitters[i].shouldEmit ? 1 : 0; //Emission flag emitters[i].pSize = particlesEmitters[i].size; //Size emitters[i].pLife = particlesEmitters[i].life; //Life (in seconds) emitters[i].firstId = id; //First particle id emitters[i].count = particlesEmitters[i].count; //Amount of particles emitters[i].useGravity = particlesEmitters[i].useGravity; //Gravity usage flag //Update id for the next emitter id += emitters[i].count; //Make sure to update max particles count particlesCount += id; //Reset emission flag on particlesEmmiter particlesEmitters[i].shouldEmit = false; } emittersBuffer.SetData(emitters); computeShader.SetBuffer(_kernel, "cBuffer", constantsBuffer); computeShader.SetBuffer(_kernel, "pBuffer", particlesBuffer); computeShader.SetBuffer(_kernel, "eBuffer", emittersBuffer); computeShader.SetBuffer(_kernel, "cpBuffer", itteratorsBuffer); int sqrdParticlesCount = Mathf.CeilToInt(particlesCount / 1024.0f); computeShader.Dispatch(_kernel, sqrdParticlesCount, 1, 1); } }<file_sep>using System.Collections; using System.Collections.Generic; using UnityEngine; public class SampleObjectController : MonoBehaviour { public int band; public float startScale, scaleMultiplier; public AudioSampler audioSampler; public bool useBuffer; private Material _material; private void Start() { _material = GetComponentInChildren<MeshRenderer>().materials[0]; } void Update() { float audioBandValue; if (useBuffer) { transform.localScale = new Vector3(transform.localScale.x, (audioSampler.bandBuffers[band] * scaleMultiplier) + startScale, transform.localScale.z); audioBandValue = audioSampler.audioBandBuffer[band]; } else { transform.localScale = new Vector3(transform.localScale.x, (audioSampler.freqBands[band] * scaleMultiplier) + startScale, transform.localScale.z); audioBandValue = audioSampler.audioBand[band]; } Color newColor = new Color(audioBandValue, audioBandValue, audioBandValue); _material.SetColor("_EmissionColor", newColor); } } <file_sep>using System.Collections; using System.Collections.Generic; using UnityEngine; public class BeatDetectionExample : MonoBehaviour { private float _currentValue = 0; private Material _material; private void Start() { _material = GetComponent<MeshRenderer>().materials[0]; } private void Update() { Color newColor = new Color(_currentValue, _currentValue, _currentValue); _material.SetColor("_EmissionColor", newColor); _currentValue -= Time.deltaTime; if(_currentValue < 0) { _currentValue = 0; } } public void OnBeat() { _currentValue = 1f; } } <file_sep>using System.Collections; using System.Collections.Generic; using UnityEngine; public class EmitterSpawner : MonoBehaviour { public float dimension = 40f; public Transform target; public Gradient gradient; public ParticlesSystem pSystem; public AudioSampler aSampler; // Use this for initialization void Awake() { int count = 64; ParticlesEmitter[] pes = new ParticlesEmitter[count]; for (int i = 0; i < count; i++) { GameObject go = new GameObject(); float halfI = (i - count*0.5f) / count; go.transform.position = transform.position + Vector3.right * halfI * dimension + Vector3.back * Mathf.Abs(Mathf.Cos(halfI*Mathf.PI*0.5f)) * 45.0f + Vector3.forward * 45.0f; go.transform.parent = transform; pes[i] = go.AddComponent<ParticlesEmitter>(); pes[i].target = target; pes[i].targetSpeedMod = 10.0f; pes[i].speedOffset = Vector3.up * Mathf.Clamp(Mathf.Sqrt(1.0f - Mathf.Abs(halfI)), 0.6f, 1.0f) * 20.0f; pes[i].gradient = gradient; pes[i].life = Mathf.Clamp(1.0f - Mathf.Abs(halfI), 0.2f, 1.0f) * 3.0f; pes[i].targetSpeedMod = 6.0f; pes[i].count = 10000; pes[i].frequency = 1000; pes[i].audioSampler = aSampler; float fCount = count; pes[i].band = (int)(((float)i / fCount) * 8.0f); pes[i].size = 2.4f; } pSystem.particlesEmitters = pes; } } <file_sep>using UnityEngine; using System.IO; using System.Collections.Generic; class BeatLoader : MonoBehaviour { public float[] LoadBeatFile(string path) { List<float> beatTimes = new List<float>(); try { StreamReader sr = File.OpenText(path); while(!sr.EndOfStream) { beatTimes.Add(float.Parse(sr.ReadLine())); } return beatTimes.ToArray(); } catch (System.IO.IOException ex) { Debug.LogError("File not found: " + ex.Message); } return null; } } <file_sep>using UnityEngine; public class ParticlesEmitter : MonoBehaviour { /// <summary> /// The position of this emitter. /// </summary> [HideInInspector] public Vector3 pos; /// <summary> /// The speed of emision for this emitter. /// </summary> public Vector3 speed; public Vector3 direction; public Vector3 speedOffset; /// <summary> /// The target to points speed at. /// </summary> public Transform target; /// <summary> /// Modifier of the target vector. /// </summary> public float targetSpeedMod; /// <summary> /// The max amount of particles alive for this emitter. /// </summary> public uint count; /// <summary> /// The emission frequency in Hertz for this emitter. /// The higher, the faster the particles are emitted. /// </summary> public uint frequency; /// <summary> /// The life of emitted particles. /// </summary> public float life; /// <summary> /// The size of emitted particles /// </summary> public float size; /// <summary> /// If Emmited particles are affected by gravity. /// </summary> public bool useGravity = true; /// <summary> /// Gradient that defines the color of the particles over life-time. /// </summary> public Gradient gradient = new Gradient(); [SerializeField] private bool isEnabled = true; public AudioSampler audioSampler; public int band = 0; /// <summary> /// Is this emitter enabled? /// </summary> public bool IsEnabled { get { return isEnabled; } set { isEnabled = value; //Make sure 'shouldEmit' is true the moment the emitter is enabled if (isEnabled == true) { shouldEmit = true; } } } /// <summary> /// Is this emitter ready to emit a particle? /// </summary> [HideInInspector] public bool shouldEmit; /// <summary> /// The current time counter for this emitter. /// </summary> private float time = 0; private void Start() { time = 1.0f / frequency; } private void FixedUpdate() { pos = transform.position; Vector3 auxVec = (target.position - transform.position).normalized; if (target != null) direction = new Vector3(auxVec.x, 0, auxVec.y) * targetSpeedMod; float freq = Mathf.Clamp01(audioSampler.audioBand[band] * 0.333333f); Vector3 tanDir = Vector3.Cross(direction, Vector3.up).normalized; speed = direction + tanDir * (freq - 0.2f) * 20.0f + speedOffset + Vector3.up * freq * 20.0f; if (!IsEnabled) return; time -= Time.fixedDeltaTime; if (time <= 0) { shouldEmit = true; time = 1.0f / frequency; } } } <file_sep>using System.Collections; using System.Collections.Generic; using UnityEngine; public class RunExtractor : MonoBehaviour { public string exePath = ""; public string songPath = ""; public string outputExtenstion = ""; void Start() { string basePath = Application.dataPath + "/"; string filePath = basePath + exePath; string songDir = System.IO.Path.GetDirectoryName(songPath); string songFileName = System.IO.Path.GetFileNameWithoutExtension(songPath); System.Diagnostics.Process process = new System.Diagnostics.Process(); process.StartInfo.FileName = filePath; process.StartInfo.Arguments = basePath + "/" + songPath + " " + // Song file basePath + "/" + songDir + "/" + songFileName + "." + outputExtenstion; // Output file process.Start(); } } <file_sep>using System.Collections; using System.Collections.Generic; using UnityEngine; using System.Linq; [RequireComponent(typeof(Planet))] public class PlanetSampleController : MonoBehaviour { [Header("Control Variables")] public Vector2 minMaxRoughness; public Vector2 minMaxRadius; public Vector2 minMaxStrength; public float speed = 5f; public AudioSampler audioSampler; private Planet _planet; private float _radius = 0.0f; private ShapeSettings.NoiseLayer _layer; void Start() { _planet = GetComponent<Planet>(); _layer = _planet.shapeSettings.noiseLayers[0]; } void Update() { _radius -= Time.deltaTime; if (_radius < 0f) { _radius = 0f; } float meanAmplitude = 0.0f; float mean1= 0.0f, mean2 = 0.0f; for (int i = 0; i < 8; i++) { meanAmplitude += audioSampler.bandBuffers[i]; if (i % 2 == 0) { mean1 += audioSampler.bandBuffers[i]; } else { mean2 += audioSampler.bandBuffers[i]; } } meanAmplitude /= 8f * 4; float newRoughness = meanAmplitude * (_radius + 1f) / 2f; _planet.shapeSettings.radius = Mathf.Lerp(minMaxRadius.x, minMaxRadius.y, _radius); _layer.noiseSettings.roughness = Mathf.Lerp(minMaxRoughness.x, minMaxRoughness.y, newRoughness); _layer.noiseSettings.strength = Mathf.Lerp(minMaxStrength.x, minMaxStrength.y, newRoughness); mean1 /= 4f; mean2 /= 4f; float[] sampleSubArray = audioSampler.samples.Take(32).ToArray(); float subArrayMean = 0.0f; for (int i = 0; i < 32; i++) { subArrayMean += sampleSubArray[i]; } subArrayMean /= 32; float centreVal = (mean1 > mean2) ? mean1 : -mean2; _layer.noiseSettings.centre.x += meanAmplitude * Time.deltaTime * speed * centreVal; _layer.noiseSettings.centre.y += subArrayMean * speed; _planet.GeneratePlanet(); } public void OnBeatEvent() { _radius = 1.0f; } }
c6178a90104c25b9186f92513492434c39cb0e87
[ "C#" ]
10
C#
RafaelFreita/SoundAnimation
82542411c11a5f0d479d315ae23b24534a305a25
a54973d0fc1d92e8166a951d5983bd256de4502a
refs/heads/master
<repo_name>wolf5/godmode<file_sep>/modules/rechnungen/menu.inc.php Rechnungen Position erstellen=position_erstellen.php Positionen=positionen.php Offene Rechnungen=offene.php Bezahlte Rechnungen=bezahlte.php Währungsrechner=waehrungsrechner.php Gutschriften Anzeigen/Editieren=gutschriften.php Gutschrift erstellen=gutschriften_add.php <file_sep>/modules/kontakte/modules/domains.inc.php <? print "<table width=300 cellpadding=0 cellspacing=0> <tr> <td width=100 valign=top>Domains:</td> <td>"; $query=mysql_query("SELECT id,domain FROM Domains WHERE kontakt='".$kontakt['id']."'"); for($i=0;list($dom_id,$domain)=mysql_fetch_row($query);$i++) { if($i>0) { print ", "; } print "<a href=\"../domains/domain.php?id=$dom_id&back=".urlencode($REQUEST_URI)."\">$domain</a>"; } print" </td> </tr> </table"; ?> <file_sep>/modules/rechnungen/bezahlte.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body onLoad="document.getElementById('term').focus()"> <p class=titel>Rechnungen:Bezahlte Rechnungen</p> <form method=get action="<?=$PHP_SELF?>"> <input type=text name=term id=term value="<?=$term?>"> <input type=submit name=search value="Suchen"> </form> <? if($order=="datum"){ $order_int="ORDER BY rech.datum"; } else if($order=="betreff") { $order_int="ORDER BY rech.betreff"; } else if($order=="betrag") { $order_int="ORDER BY betrag"; } else if($order=="id") { $order_int="ORDER BY rech.id"; } else if($order=="bezahlt") { $order_int="ORDER BY rech.bezahlt"; } else { $order_int="ORDER BY kon.firma"; } if(!$start){ $start=0; } if($term){ $query=mysql_query("select rech.id, kon.id, DATE_FORMAT(rech.datum,'$_config_date'), DATE_FORMAT(rech.bezahlt,'$_config_date'),rech.betreff, sum($_config_posbetrag) as betrag FROM Rechnungen_positionen pos, Rechnungen rech, Kontakte kon WHERE rech.id = pos.rechnung AND rech.bezahlt is NOT NULL AND rech.kontakt = kon.id AND ".formatSearchString($term,array("kon.firma","kon.firma2"))." GROUP BY pos.rechnung $order_int LIMIT $start,$_config_entrysperpage"); } else { $query=mysql_query("select rech.id, kon.id, DATE_FORMAT(rech.datum,'$_config_date'), DATE_FORMAT(rech.bezahlt,'$_config_date'),rech.betreff, sum($_config_posbetrag) as betrag FROM Rechnungen_positionen pos, Rechnungen rech, Kontakte kon WHERE rech.id = pos.rechnung AND rech.bezahlt is NOT NULL AND rech.kontakt = kon.id GROUP BY pos.rechnung $order_int LIMIT $start,$_config_entrysperpage"); } if(@mysql_num_rows($query)<1) { print "<b>Keine Bezahlte Rechnungen</b>"; exit; } print "<form action=\"$PHP_SELF\" method=post> <table width=\"95%\" border=0 cellpadding=2 cellspacing=0> <tr> <td><a href=\"$PHP_SELF\"><b>Kontakt</b></a></td> <td><a href=\"$PHP_SELF?order=datum\"><b>Datum</b></a></td> <td><a href=\"$PHP_SELF?order=bezahlt\"><b>Valuta</b></a></td> <td><a href=\"$PHP_SELF?order=id\"><b>Nr.</b></a></td> <td><a href=\"$PHP_SELF?order=betreff\"><b>Betreff</b></a></td> <td align=right><a href=\"$PHP_SELF?order=betrag\"><b>Betrag</b></a></td> <td>&nbsp;</td> </tr>"; for($i=0;list($id,$kontakt,$datum,$bezahlt,$betreff,$betrag)=mysql_fetch_row($query);$i++) { if(($i%2)==0){ $bgcolor=$_config_tbl_bgcolor1; } else { $bgcolor=$_config_tbl_bgcolor2; } //Gutschriften vom Total abziehen $query2=mysql_query("SELECT sum(betrag) FROM Rechnungen_gutschriften WHERE bezahlt='$id'"); if(mysql_num_rows($query2)>0) { $betrag-= mysql_result($query2,0,0); } print "<tr> <td bgcolor=\"#$bgcolor\"><a href=\"../../modules/kontakte/kontakt.php?id=$kontakt&back=".urlencode($REQUEST_URI)."\">".getKontakt($kontakt)."</a></td> <td bgcolor=\"#$bgcolor\">$datum</td> <td bgcolor=\"#$bgcolor\"><a href=\"valuta_editieren.php?id=$id&back=".urlencode("bezahlte.php")."\">$bezahlt</a></td> <td bgcolor=\"#$bgcolor\">".str_pad($id,4,"0",STR_PAD_LEFT)."</td> <td bgcolor=\"#$bgcolor\" width=100>$betreff</td> <td align=right bgcolor=\"#$bgcolor\">".formatBetrag($betrag)."</td> <td bgcolor=\"#$bgcolor\"><a href=\"createPDF.php?id=$id\">PDF</a></td> </tr>\n"; } print "<tr> <td colspan=4 align=center>"; if($term) $attr="&term=$term"; if($order) $attr.="&order=$order"; if($start>0){ print "<a href=\"$PHP_SELF?start=".($start-$_config_entrysperpage)."$attr\"><<<</a>"; } if($term){ $query=mysql_query("SELECT count(*) FROM Rechnungen rech, Kontakte kon WHERE rech.bezahlt is NOT NULL AND kon.id = rech.kontakt AND ".formatSearchString($term,array("kon.firma","kon.firma2"))); } else { $query=mysql_query("SELECT count(*) FROM Rechnungen WHERE bezahlt is NOT NULL"); } if(($start+$_config_entrysperpage+1)<=mysql_result($query,0,0)){ if($start>0){ print " | "; } print "<a href=\"$PHP_SELF?start=".($start+$_config_entrysperpage)."$attr\">>>></a>"; } print "</td> </tr> </table> </form>"; ?> </body> </html> <file_sep>/modules/domains/show.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> <script src="../../inc/functions.js" type="text/javascript" language="javascript"></script> </head> <body onLoad="document.getElementById('term').focus()"> <p class=titel>Domains</p> <form method=get action="<?=$PHP_SELF?>"> <input type=text name=term id=term value="<?=$term?>"> <input type=submit name=search value="Suchen"> </form> <? if($order=="kontakt") $order="ORDER BY kon.firma"; else if($order=="betrag") $order="ORDER BY (abr.raten*dom.betrag)"; else $order="ORDER BY dom.domain"; if(!$start){ $start=0; } if($term){ $query=mysql_query("SELECT dom.id, dom.domain, kon.id, dom.startDate, dom.endDate, abr.raten * dom.betrag FROM Kontakte kon,Domains dom ,Zahlungsarten abr WHERE dom.kontakt = kon.id AND dom.abrechnungsart = abr.id AND dom.endDate is NULL AND ".formatSearchString($term,array("dom.domain","dom.id"))." $order LIMIT $start,$_config_entrysperpage"); } else { $query=mysql_query("SELECT dom.id, dom.domain, kon.id, dom.startDate, dom.endDate, (abr.raten * dom.betrag) FROM Kontakte kon,Domains dom ,Zahlungsarten abr WHERE dom.kontakt = kon.id AND dom.abrechnungsart = abr.id AND dom.endDate is NULL $order LIMIT $start,$_config_entrysperpage"); } if(@mysql_num_rows($query)>0) { print "<table border=0 cellpadding=3 cellspacing=0 width=\"95%\"> <tr> <td><b><a href=\"$PHP_SELF\">Domain</a></b></td> <td><b><a href=\"$PHP_SELF?order=person\">Kontakt</a></b></td> <td><b><a href=\"$PHP_SELF?order=betrag\">Betrag</a></b></td> </tr>\n"; $total=0; for($i=0;list($id,$domain,$kontakt,$startDate,$endDate,$betrag)=mysql_fetch_row($query);$i++) { if(($i%2)==0){ $bgcolor=$_config_tbl_bgcolor1; } else { $bgcolor=$_config_tbl_bgcolor2; } $total+=$betrag; print "<tr onmouseover=\"setPointer(this, 'over', '#$bgcolor', '#$_config_tbl_bghover', '')\" onmouseout=\"setPointer(this, 'out', '#$bgcolor', '#$_config_tbl_bghover', '')\" onclick=\"location.href='domain.php?id=$id&back=".urlencode($REQUEST_URI)."'\"> <td width=200$ bgcolor=\"#$bgcolor\">$domain</td> <td bgcolor=\"#$bgcolor\"><a href=\"../kontakte/kontakt.php?id=$kontakt&back=".urlencode($REQUEST_URI)."\">".getKontakt($kontakt)."</a></td> <td width=50 align=right bgcolor=\"#$bgcolor\">".formatBetrag($betrag)."</td> </tr>\n"; } print "<tr> <td colspan=3 align=center>"; if($term){ $attr="&term=$term"; } if($start>0){ print "<a href=\"$PHP_SELF?start=".($start-$_config_entrysperpage)."$attr\"><<<</a>"; } if($term){ $query=mysql_query("SELECT count(*) FROM Domains WHERE endDate is NULL AND ".formatSearchString($term,array("domain"))); } else { $query=mysql_query("SELECT count(*) FROM Domains where endDate is NULL"); } if(($start+$_config_entrysperpage+1)<=mysql_result($query,0,0)){ if($start>0){ print " | "; } print "<a href=\"$PHP_SELF?start=".($start+$_config_entrysperpage)."$attr\">>>></a>"; } print "</td> </tr> </table>\n"; } else { print "Keine Domains gefunden"; } ?> </body> </html> <file_sep>/modules/statistiken/excel.php <? include("../../inc/config.inc.php"); include("../../inc/db.inc.php"); include("../../inc/func.inc.php"); ?> <html> <head> <title>Sylon godmode</title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body> <p class=titel>Statistiken:Excel Sheets</p> <br><br> <? $query=mysql_query("select DATE_FORMAT(rech.datum,'$_config_date') as datum, rech.id,rech.kontakt,rech.betreff,sum(pos.betrag) as betrag FROM Rechnungen rech, Rechnungen_positionen pos WHERE pos.rechnung = rech.id AND rech.bezahlt is NULL GROUP BY rech.id"); if(mysql_num_rows($query)>0){ $file=fopen("excel/offene_rechnungen.csv","w"); fputs($file,"Rech. Nr.;Datum;Text;Kontakt;Betrag\n"); while(list($datum,$id,$kontakt,$text,$betrag)=mysql_fetch_row($query)){ fputs($file,"$id;$datum;$text;".getKontakt($kontakt).";".formatBetrag($betrag)."\n"); } fclose($file); } ?> <a href="excel/offene_rechnungen.csv">Offene Rechnungen</a> </body> </html> <file_sep>/modules/timesheet/index.php <?php include("./inc/func.inc.php"); ?> <html> <head> <title>Sylon godmode</title> <link rel="stylesheet" href="../../main.css" type=text/css> <link rel="stylesheet" href="inc/css/ts.css" type=text/css> <script type="text/javascript" src="inc/js/jquery-1.4.4.min.js"></script> <script type="text/javascript" src="inc/js/jquery.stopwatch.js"></script> <script type="text/javascript" src="inc/js/func.js"></script> </head> <body> <p class=titel>Zeiterfassung</p> <br><br> <div id="stopwatch"></div> <fieldset> <legend>Kunden</legend> <? showButtons(); ?> </fieldset> <?php if(empty($_GET['nolist'])){?> <fieldset id="tableFS"> <legend>Liste von <?php echo date("F j, Y")?></legend> <div id="tableDiv"> <table id="timesheet" width="100%" cellpadding="3" cellspacing="0"> <? updateTable(true); ?> </table> </div> </fieldset> <?php }?> <table width="100%" height="200px"> <tr> <td width="50%"> <fieldset class="dFS"> <legend>Details</legend> <form id="editForm"> </form> </fieldset> </td> <td width="50%"> <fieldset class="dFS"> <legend>Uhr</legend> <div id="clock"> </div> </fieldset> </td> </tr> </table> </body> </html> <file_sep>/modules/produkte/func.inc.php <? function getProdukteList($formname,$selected,$breite,$text_null) { global $_config_produkte_show_field_in_select; $query = mysql_query("SELECT id,$_config_produkte_show_field_in_box FROM Produkte"); $select="<SELECT NAME=\"$formname\" style=\"width:".$breite."px;\"> <option value=0>$text_null</option>\n"; while(list($id,$field)=@mysql_fetch_row($query)) { if($id == $selected) $select.="<option value=$id SELECTED>$field</option>\n"; else $select.="<option value=$id>$field</option>\n"; } $select.="</SELECT>\n"; return $select; } function getPreiseList($formname,$selected,$breite,$text_null) { global $_config_produkte_preis1,$_config_produkte_preis1_name,$_config_produkte_preis2,$_config_produkte_preis2_name,$_config_produkte_preis3,$_config_produkte_preis3_name,$_config_produkte_preis4,$_config_produkte_preis4_name; $select="<SELECT NAME=\"$formname\" style=\"width:".$breite."px;\"> <option value=0>$text_null</option>\n"; for($i=1;$i<=4;$i++) { $var="_config_produkte_preis$i"; if($$var) { $var="_config_produkte_preis".$i."_name"; if($i == $selected) { $select.="<option value=$i SELECTED>".$$var."</option>\n"; } else { $select.="<option value=$i>".$$var."</option>\n"; } } } $select.="</SELECT>\n"; return $select; } ?> <file_sep>/modules/rechnungenOloidOld/createPDF.php <?php include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); //SQL Selects, Titel setzen if($type=="mahnung") { $query = mysql_query("SELECT rech.id,rech.kontakt,rech.waehrung,DATE_FORMAT(mahn.datum,'$_config_date'),rech.bezahlt,mahn.adresse,mahn.betreff,mahn.text,mahn.footer,mahn.zahlungsfrist,mahn.besrnr FROM Rechnungen rech, Rechnungen_mahnungen mahn WHERE mahn.rechnung = rech.id AND mahn.id='$id'"); } else { $query = mysql_query("SELECT id,kontakt,waehrung,DATE_FORMAT(datum,'$_config_date'),bezahlt,adresse,betreff,text,footer,zahlungsfrist,besrnr FROM Rechnungen WHERE id='$id'"); } if(@mysql_num_rows($query)==0) { print "Die Rechnung Nr. '$id' existiert nicht."; die(); } list($rech_id,$kontakt,$waehrung,$datum,$bezahlt,$adresse,$betreff,$text,$footer,$zahlungsfrist,$besrnr)=mysql_fetch_row($query); $query = mysql_query("SELECT firma FROM Kontakte WHERE id='$kontakt'"); list($firma)=mysql_fetch_row($query); $query=mysql_query("SELECT kp.vorname,kp.name FROM Kontakte_kontaktpersonen kp,Kontakte ko WHERE ko.id='$kontakt' AND ko.pl = kp.id"); if($type=="mahnung") { $title="Mahnung $firma"; } else { $title="Rechnung $firma"; } if($besrnr) $besrnr="-$besrnr"; if(mysql_num_rows($query)==1) $sachbearbeiter=substr(mysql_result($query,0,0),0,1).substr(mysql_result($query,0,1),0,1); if($sachbearbeiter) $sachbearbeiter="Sachbearbeiter: $sachbearbeiter"; //Rechnungspositionen holen if($type=="mahnung") { $rechnung_pos= mysql_query("SELECT text,text1,anzahl,betrag,waehrung,mwst,`key`,`value` FROM Rechnungen_positionen WHERE (rechnung='$rech_id' OR (`key`='mahnung' AND `value`='$id')) ORDER BY id"); } else { $rechnung_pos= mysql_query("SELECT text,text1,anzahl,betrag,waehrung,mwst,`key`,`value` FROM Rechnungen_positionen WHERE rechnung='$id' ORDER BY id"); } $gutschriften= mysql_query("SELECT text, betrag,waehrung,mwst FROM Rechnungen_gutschriften WHERE bezahlt='$id'"); //PDF erstellen, generelle Optionen define('FPDF_FONTPATH',"$_config_root_path/fpdf/font/"); require("$_config_root_path/fpdf/fpdf.php"); $pdf = new FPDF(); $pdf->Open(); $pdf->SetTitle($title); $pdf->SetCreator("Sylon godmode"); $pdf->SetAuthor($_config_rechnung_pdf_author); $pdf->SetPDFfileName(str_replace(" ","_",$title).".pdf"); $pdf->SetDisplayMode("fullwidth","single"); $pdf->SetAutoPageBreak("margin",20); $pdf->SetFillColor(230); $pdf->SetLeftMargin(15); $pdf->AddPage(); //Header /* $pdf->SetFont("Times","",24); //$pdf->Cell(20,5,"",0,0,"L"); $pdf->Text(40,15,$_config_rechnung_head_titel_logo); $pdf->SetFont("Arial","",10); $pdf->SetFont("Arial","B",10); $txt=split("\n",$_config_rechnung_head_titel_adresse); for($i=0,$height=19;$txt[$i];$i++,$height+=4){ if($i==1){ $pdf->SetFont("Arial","",10); } $pdf->Text(150,$height,$txt[$i]); } $pdf->Write(15,""); $pdf->Line($pdf->getX(),15,$pdf->getX()+190,15); */ $pdf->SetFont("Arial","",10); $pdf->Write(5,"\n\n\n\n\n\n\n$adresse"); $pdf->Ln(); $pdf->Ln(); $pdf->Ln(); $pdf->Ln(); $pdf->Ln(); //Header: Rechnungsinfo's, Betreff, Text, etc. $pdf->SetFont("Arial","B",10); $pdf->Cell(61,5,"Rechnungsnummer: ".$kontakt.str_pad($rech_id,4,"0",STR_PAD_LEFT).$besrnr,0,0,"L"); $pdf->Cell(61,5,$sachbearbeiter,0,0,"C"); $pdf->Cell(61,5,"$_config_rechnung_ort, $datum",0,1,"R"); $pdf->Write(5,"\n\n$betreff\n\n"); $pdf->SetFont("Arial","",10); if($text) $pdf->Write(5,"$text\n"); //Positionstitel $pdf->SetFont("Arial","B",10); $pdf->Cell(59,5,"Produkt",0,0,"L"); $pdf->Cell(18,5,"Anz.",0,0,"R"); $pdf->Cell(35,5,"EP exkl.",0,0,"R"); $pdf->Cell(35,5,"EP Inkl.",0,0,"R"); $pdf->Cell(35,5,"Total",0,1,"R"); $pdf->Cell(59,5,"",0,0,"L"); $pdf->Cell(18,5,"Prod. Nr.",0,0,"R"); $pdf->Cell(35,5,"MWSt in %",0,0,"R"); $pdf->Cell(35,5,"MWSt Betrag",0,0,"R"); $pdf->Ln(); $pdf->SetFont("Arial","",9); //Positionen while(list($text,$text1,$anzahl,$betrag,$waehrung_pos,$mwst,$key,$value)=mysql_fetch_row($rechnung_pos)) { if($key!="produkt") $value=""; if($bgcolor){ $bgcolor=0; } else { $bgcolor=1; } $total+=($betrag*$anzahl); $total_mwst+=((($betrag/100)*$mwst)*$anzahl); if(!$waehrung)$waehrung=1; if($text1) $text1="\n".$text1; $y=$pdf->getY(); $pdf->Cell(77,5,$anzahl,0,0,"R",$bgcolor); $pdf->Cell(35,5,formatBetrag($betrag),0,0,"R",$bgcolor); $pdf->Cell(35,5,formatBetrag($betrag+(($betrag/100)*$mwst)),0,0,"R",$bgcolor); $pdf->Cell(35,5,"",0,1,"R",$bgcolor); $pdf->Cell(77,5,$value,0,0,"R",$bgcolor); $pdf->Cell(35,5,sprintf("%0.1f",$mwst)."%",0,0,"R",$bgcolor); $pdf->Cell(35,5,formatBetrag((($betrag/100)*$mwst)),0,0,"R",$bgcolor); $pdf->Cell(35,5,getWaehrung($waehrung_pos)." ".formatBetrag(waehrungRound(($betrag+(($betrag/100)*$mwst))*$anzahl,$waehrung_pos)),0,1,"R",$bgcolor); $y2=$pdf->getY(); $pdf->setY($y); $pdf->MultiCell(59,5,$text.$text1,0,"L",$bgcolor); $y3 = $pdf->getY(); if($y3>$y2) { if($bgcolor) { $pdf->Rect(74,$y2,123,($y3-$y2),"F"); } $pdf->setY($y2+($y3-$y2)); } else { $pdf->setY($y2); } } //Gutschriften while(list($text,$betrag,$waehrung_pos,$mwst)=mysql_fetch_row($gutschriften)){ if($bgcolor){ $bgcolor=0; } else { $bgcolor=1; } $total-=($betrag); $mwst_total-=(($betrag/100)*$mwst); if(!$waehrung)$waehrung=1; $pdf->Cell(59,5,$text,0,0,"L",$bgcolor); $pdf->Cell(18,5,$anzahl,0,0,"R",$bgcolor); $pdf->Cell(35,5,formatBetrag($betrag),0,0,"R",$bgcolor); $pdf->Cell(35,5,formatBetrag($betrag+(($betrag/100)*$mwst)),0,0,"R",$bgcolor); $pdf->Cell(35,5,"",0,1,"R",$bgcolor); $pdf->Cell(59,5,"",0,0,"L",$bgcolor); $pdf->Cell(18,5,"",0,0,"R",$bgcolor); $pdf->Cell(35,5,sprintf("%0.1f",$mwst)."%",0,0,"R",$bgcolor); $pdf->Cell(35,5,formatBetrag((($betrag/100)*$mwst)),0,0,"R",$bgcolor); $pdf->Cell(35,5,getWaehrung($waehrung_pos)." ".formatBetrag(waehrungRound(($betrag+(($betrag/100)*$mwst)),$waehrung_pos)),0,1,"R",$bgcolor); } //Total $pdf->Ln(); $pdf->SetFont("Arial","B",10); $pdf->Cell(59,5,"Nettobetrag",0,0,"L"); $pdf->Cell(53,5,"Totalbetrag MWSt",0,0,"R"); $pdf->Cell(70,5,"Zu überweisender Betrag",0,1,"R"); $pdf->Cell(59,5,getWaehrung($waehrung)." ".formatBetrag(waehrungRound($total,$waehrung)),0,0,"L"); $pdf->Cell(53,5,getWaehrung($waehrung)." ".formatBetrag(waehrungRound($total_mwst,$waehrung)),0,0,"R"); $pdf->Cell(70,5,getWaehrung($waehrung)." ".formatBetrag(waehrungRound($total+$total_mwst,$waehrung)),0,1,"R"); $pdf->Ln(); $pdf->Line(15,276,197,276); $pdf->SetFont("Arial","",7); $pdf->Text(15,280,"Bankverbindungen"); $pdf->Text(60.5,280,"UBS AG, Basel:"); $pdf->Text(106,280,"Konto Nr. 292-12250031.0"); $pdf->Text(151.5,280,"BLZ 3101"); $pdf->Text(60.5,284,"Deutsche Bank, Lörrach:"); $pdf->Text(106,284,"Konto Nr. 0 15 70 40 00"); $pdf->Text(151.5,284,"BLZ 683 700 24"); $pdf->Text(60.5,288,"MwSt.-Nr. (CH): 361 275"); $pdf->Text(106,288,"MwSt.-Nr. (D): 09039 / 62515"); //$_config_rechnung_text_footer //Abschlusstext $lines=0; for($i=0;$i<strlen($footer);$i++) { if(substr($footer,$i,1)=="\n") $lines++; } if(($pdf->getY() + ($lines + 3.33))>250) { $pdf->addpage(); } $pdf->SetFont("Arial","",10); $pdf->Write(5,"$footer"); $pdf->Output(); ?> <file_sep>/modules/rechnungenOloid/erstellen.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); include("../produkte/func.inc.php"); if($erstellen) { $anz_objekte=count($produkt); if(!$kontakt) { $err="Bitte geben Sie einen Kontakt an"; } else { for($i=0;$i<$anz_objekte;$i++) { $query = mysql_query("SELECT COUNT(*) FROM Produkte WHERE nr_int='".$produkt[$i]."'"); if($produkt[$i] && mysql_result($query,0,0)==0) { $err = "Das Produkt '".$produkt[$i]."' existiert nicht"; } else if($produkt[$i] && !$preis[$i]) { $err="Geben Sie für Artikel ".($i+1)." bitte einen Preis an"; } else if($produkt[$i] && !$anzahl[$i]) { $err="Geben Sie für Artikel ".($i+1)." bitte eine Stückzahl an"; } if($produkt[$i]) $has_artikel=1; } if(!$has_artikel) { $err="Bitte geben Sie mindestens einen Artikel an"; } } if(!$err) { $query=mysql_query("DELETE FROM Rechnungen_positionen WHERE kontakt='$kontakt' AND rechnung IS NULL"); for($i=0;$i<$anz_objekte;$i++) { if($produkt[$i]) { $query=mysql_query("SELECT text1,text2,waehrung,preis".$preis[$i].",nr_int FROM Produkte WHERE nr_int='".$produkt[$i]."'"); if(!($err=mysql_error())) { list($text1,$text2,$waehrung,$betrag,$nr_int)=mysql_fetch_row($query); eval("\$waehrung = \$_config_produkte_preis".$preis[$i]."_waehrung;"); if($preis1[$i]) { $betrag=$preis1[$i]; } $query=mysql_query("INSERT INTO Rechnungen_positionen(kontakt,rechnung,text,text1,anzahl,betrag,waehrung,datum,mwst,`key`,`value`) VALUES('$kontakt',NULL,'$text1','$text2','".$anzahl[$i]."','$betrag','$waehrung',NOW(),'".$mwst[$i]."','produkt','$nr_int')"); if($query) { if($popup) { $showErstellen=1; } else { header("Location: erstellen1.php?id=$kontakt"); } } else { $err=mysql_error(); } } else { $err=mysql_error(); } } } if($position_anzahl) { $query=mysql_query("INSERT INTO Rechnungen_positionen(kontakt,rechnung,text,text1,anzahl,betrag,waehrung,datum,mwst) VALUES('$kontakt',NULL,'".$position_text."','','$position_anzahl','$position_betrag','1',NOW(),'".$position_mwst."')"); } } } if(!$rows) { $rows=3; } if(substr($action,0,3)=="add") { $rows++; } else if(substr($action,0,3)=="del") { $delete = trim(substr($action,3)); $rows--; } ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> <script src="../../inc/functions.js" type="text/javascript" language="javascript"></script> <script language="JavaScript" type="text/javascript"> <!-- function submit_form(arg){ document.getElementsByName('action')[0].value = arg; document.Formular.submit(); } function popup(file) { window.open(file,'find','width=300,height=650,left=30,top=50,resizable=yes,scrollbars=no'); } function setMwst(value) { <? for($i=0;$i<$rows;$i++) { print "document.getElementsByName('mwst[$i]')[0].value=value;"; } ?> } function setPreis(value) { <? for($i=0;$i<$rows;$i++) { print "document.getElementsByName('preis[$i]')[0].value=value;"; } ?> } //--> </script> </head> <body<? if($showErstellen) print " onLoad=\"javascript:opener.location.href='erstellen1.php?id=$kontakt';\""; ?>> <p class=titel>Rechnungen:Rechnung erstellen</p> <a href="#" onclick="javascript:window.open('<?=$PHP_SELF?>?popup=1','rechnung_erstellen','fullscreen=no,scrollbars=auto,status=yes,menubar=no,toolbar=no,width=600,height=400,resizable=yes');">In neuem Fenster</a><br><br> <? if($err) { print "<b>Fehler:</b> $err<br><br>"; } print "<form method=post name=\"Formular\" action=\"$PHP_SELF\"> <input type=hidden name=rows value=\"$rows\"> <input type=hidden name=action> <input type=hidden name=popup value=\"$popup\"> <table border=0> <tr> <td width=100>Kontakt</td> <td>&nbsp;</td> <td><input type=text name=kontakt value=\"$kontakt\" id=kontakt> <a href=\"javascript:popup('findkontakt.php');\">S</a></td> <td>&nbsp;</td> <td>".str_replace("<SELECT","<SELECT onChange=\"javascript:setPreis(this.value);\"",str_replace("120px;","120px;background-color:#$_config_tbl_bgcolor1;",getPreiseList("","",120,"Standard")))."</td> <td><input style=\"width:50px;background-color:#$_config_tbl_bgcolor1\" onChange=\"javascript:setMwst(this.value);\"></td> </tr>"; for($i=0,$ii=0;$ii<$rows;$i++) { if($i == $delete && isset($delete)) { continue; } if(!$anzahl[$i] && !isset($anzah[$i])) { $anzahl[$i]=$_config_produkte_default_num_artikel; } if($produkt[$i] && $preis[$i]) { $query=mysql_query("SELECT preis".$preis[$ii]." FROM Produkte WHERE nr_int='".$produkt[$i]."'"); $preis1[$ii]=mysql_result($query,0,0); } print "<tr> <td width=100>Position ".($ii+1)."</td> <td><input type=text size=4 maxlength=11 name=\"anzahl[$i]\" value=\"".$anzahl[$i]."\"> <td><input type=text name=produkt[$ii] value=\"".$produkt[$i]."\"> <a href=\"javascript:popup('findprodukt.php?field=$ii')\">S</a></td> <td><input type=input name=\"preis1[$ii]\" value=\"$preis1[$i]\" style=\"width:50px\"> <td>".str_replace("<SELECT","<SELECT onchange=\"javascript:document.getElementsByName('preis1[$i]')[0].value='';\" ",getPreiseList("preis[$ii]",$preis[$i],120,"Bitte Auswählen"))."</td> <td><input type=input name=\"mwst[$ii]\" value=\"$mwst[$i]\" style=\"width:50px\"> "; if($rows>1) { print "<a href=\"#\" onclick=\"javascript:submit_form('del$ii')\">-</a>"; } if(($ii+1)==$rows) { print " <a href=\"#\" onclick=\"javascript:submit_form('add')\">+</a>"; } print "</td> </tr>"; $ii++; } print "<tr> <td>Freie Position:</td> <td><input type=text size=4 maxlength=11 name=\"position_anzahl\" value=\"".$position_anzahl."\"> <td><input type=text name=\"position_text\" value=\"".$position_text."\"></td> <td>&nbsp;</td> <td><input type=text style=\"width:50px\" name=\"position_betrag\" value=\"".$position_betrag."\"></td> <td><input type=input name=\"position_mwst\" value=\"".$position_mwst."\" style=\"width:50px\"></td> </tr> <tr> <td width=100>&nbsp;</td> <td colspan=3><input type=submit value=\"Erstellen\" name=erstellen> <input type=button value=\"Aktualisieren\" onclick=\"javascript:submit_form('upd')\"> </tr> </table> </form>"; ?> </body> </html> <file_sep>/modules/rechnungen/gutschriften_edit.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); if(!$back) { $back="gutschriften.php"; } if($submit) { $query=mysql_query("UPDATE Rechnungen_gutschriften SET kontakt='$kontakt',text='$text',betrag='$betrag',auszahlen='$aktiv',datum='".date_CH_to_EN($datum)."' WHERE id='$id'"); if($query) { header("Location: ".urldecode($back)); } else { $error=mysql_error(); } } ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body> <p class=titel>Rechnungen:Gutschrift Editieren</p> <? if($error){ print "<b>Fehler:</b> $error<br><br>"; } $query=mysql_query("SELECT kontakt,DATE_FORMAT(datum,'$_config_date'),betrag,text,auszahlen FROM Rechnungen_gutschriften WHERE id='$id'"); list($kontakt,$datum,$betrag,$text)=mysql_fetch_row($query); print "<form method=post action=\"$PHP_SELF?id=$id&back=".urlencode($back)."\"> <input type=hidden name=referer value=\"".$GLOBALS["HTTP_REFERER"]."\"> <table border=0 cellpadding=0 cellspacing=0> <tr> <td width=150>Gutschrift zugunsten:</td> <td>".getKontakteList("kontakt",$kontakt,250,"Bitte Auswählen")."</td> </tr> <tr> <td width=150 valign=top>text:</td> <td><textarea height=4 style=\"width:250;\" name=\"text\">$text</textarea></td> </tr> <tr> <td width=150>Betrag:</td> <td><input type=text name=betrag style=\"width:250;\" value=\"".formatBetrag($betrag)."\"></td> </tr> <tr> <td width=150>Datum:</td> <td><input type=text name=datum style=\"width:250;\" value=\"$datum\"></td> </tr> <tr> <td>Aktiv</td> <td> <select name=aktiv> <option value=1"; if($aktiv==1){ print " SELECTED"; } print">Ja</option> <option value=0"; if($aktiv==0){ print " SELECTED"; } print ">Nein</option> </select> </td> <tr> </table><br><br>\n <input type=submit name=submit value=\"Ändern\"> <input type=button value=\"Löschen\" onclick=\"javascript:location.href='gutschriften_delete.php?id=$id&back=".urlencode($back)."&backno=".urlencode($REQUEST_URI)."'\">\n</form>\n"; ?> </body> </html> <file_sep>/modules/rechnungenOloidOld/bezahlt.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); if(!$back){ $back="offene.php"; } if(!$backno){ $backno=$back; } if($submit) { $query=mysql_query("SELECT waehrung FROM Rechnungen WHERE id='$id'"); $waehrung=mysql_result($query,0,0); $fx1 = getFx($waehrung,1); $query=mysql_query("UPDATE Rechnungen SET bezahlt='".date_CH_to_EN($datum)."',fx1='$fx1' WHERE id='$id'"); if(!($error=mysql_error())) { //Gutschriften /* if(isModule("domains")) { $query=mysql_query("SELECT Rechnungen_positionen.id FROM Rechnungen_positionen,Rechnungen WHERE Rechnungen.id = '$id' AND Rechnungen_positionen.rechnung = Rechnungen.id"); while(list($pos_id)=mysql_fetch_row($query)) { $query2=mysql_query("UPDATE Rechnungen_gutschriften SET auszahlen=1 WHERE abhaengig='$pos_id'"); } }*/ //Schnitstellen Buha if($_config_bezahlte_rechnungen_buchen || $_config_kursveraenderungen_buchen) { $query=mysql_query("SELECT betreff,waehrung,fx,fx1 FROM Rechnungen WHERE id='$id'",$conn); list($text,$waehrung,$fx,$fx1)=mysql_fetch_row($query); $query=mysql_query("SELECT sum(betrag*fx) FROM Rechnungen_positionen WHERE rechnung='$id'",$conn); $betrag=mysql_result($query,0,0); $kursdifferenz = ($fx-$fx1)*$betrag; $buhadb = mysql_connect($_config_mysql_buchhaltung_host,$_config_mysql_buchhaltung_user,$_config_mysql_buchhaltung_password); mysql_select_db($_config_mysql_buchhaltung_db,$buhadb); } if($_config_bezahlte_rechnungen_buchen) { $query=mysql_query("INSERT INTO Queue(datum,beschreibung,kt_haben,kt_soll,betrag,waehrung,kurs,mwst,mwst_feld,belegnr) VALUES('".date_CH_to_EN($datum)."','$text','$_config_bezahlte_rechnungen_haben','$_config_bezahlte_rechnungen_soll','$betrag','$waehrung','$fx','0',NULL,'Godmode Import')",$buhadb); $error=mysql_error(); } if($_config_kursveraenderungen_buchen) { if($fx != $fx1) { $query=mysql_query("INSERT INTO Queue(datum,beschreibung,kt_haben,kt_soll,betrag,waehrung,kurs,mwst,mwst_feld,belegnr,no_ask) VALUES('".date_CH_to_EN($datum)."','Kursveränderungen: $text','$_config_kursveraenderungen_haben','$_config_kursveraenderungen_soll','$kursdifferenz','1','1','0',NULL,'Godmode Import','1')",$buhadb); $error=mysql_error(); } } if(!$error) { header("Location: $back"); } } } ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body> <p class=titel>Rechnungen:Rechnung als Bezahlt Markieren</p> <? if($err) { print "<b>Fehler:</b> $err"; } $query=mysql_query("SELECT text, betrag FROM Rechnungen_positionen WHERE rechnung='$id'"); if(@mysql_num_rows($query)<1) { print "<b>Fehler</b>: Rechnung hat keine Positionen<br><br>\n"; } else { print "<form method=post value=\"$PHP_SELF?id=$id&back=".urlencode($back)."\"> Datum : <input type=text name=datum value=\"".date("d.m.Y")."\"><br><br> <input type=submit name=submit value=\"Ausführen\"> <input type=button value=\"Zurück\" onclick=\"javascript:location.href='$backno'\"> </form>"; } ?> </body> </html> <file_sep>/modules/kontakte/modules/adresse.inc.php <? if(anredeIsPrivate($kontakt['anrede'])) { $text_text1="Name"; $text_text2="Vorname"; } else { $text_text1="Firma"; $text_text2="Zusatz"; } print "<table border=0 cellpadding=0 cellspacing=0> <tr> <td colspan=2><b>Adresse</b></td> </tr> <tr> <td width=100>Anrede</td> <td> <SELECT name=kontakt_anrede onChange=\"javascript:changeAnrede(this.value)\" style=\"width:200px\">"; $query=mysql_query("SELECT id,anrede FROM Kontakte_anreden"); while(list($anrede_id,$anrede_text)=mysql_fetch_row($query)) { if($anrede_id == $kontakt['anrede']) print " <option value=\"$anrede_id\" SELECTED>$anrede_text</option>\n"; else print " <option value=\"$anrede_id\">$anrede_text</option>\n"; } print" </SELECT> </td> </tr> <tr> <td width=100>$text_text1</td> <td><input type=text id=\"firma\" name=\"kontakt_firma\" value=\"".$kontakt['firma']."\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=100>$text_text2</td> <td><input type=text name=\"kontakt_firma2\" value=\"".$kontakt['firma2']."\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=100>Adresse</td> <td><input type=text name=\"kontakt_adresse\" value=\"".$kontakt['adresse']."\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=100>Adresse 2</td> <td><input type=text name=\"kontakt_adresse2\" value=\"".$kontakt['adresse2']."\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=100>PLZ / Ort</td> <td><input type=text name=\"kontakt_plz\" value=\"".$kontakt['plz']."\" style=\"width:45px;\" maxlength=10> <input type=text name=\"kontakt_ort\" value=\"".$kontakt['ort']."\" style=\"width:150px;\" maxlength=50></td> </tr> <td width=100>Land</td> <td><input type=text name=\"kontakt_land\" value=\"".$kontakt['land']."\" style=\"width:200px;\" maxlength=20></td> </tr> </table>"; ?> <file_sep>/modules/rapportierung/delete.php <? session_start(); include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); include("func.inc.php"); if(!$back) { $back="show.php"; } if(!$backno) { $backno=$back; } if($del) { $query=mysql_query("DELETE FROM Rapportierung WHERE id='$del'"); if($query) { header("Location: ".urldecode($back)); } else { $err=mysql_error(); } } ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body> <p class=titel>Rapportierung:Rapport Löschen</p> <? if($err){ print "<b>Fehler:</b> $err<br><br>"; } $query=mysql_query("SELECT text,DATE_FORMAT(date,'$_config_date') FROM Rapportierung WHERE id='$id'"); list($text,$date)=mysql_fetch_row($query); if(strlen($text)>200) $text=substr($text,0,200)."..."; print "Möchten Sie den Rapport $text vom $date wirklich Löschen?<br><br> <a href=\"$PHP_SELF?del=$id&back=".urldecode($back)."\">[ Ja ]</a> <a href=\"".urldecode($backno)."\">[ Nein ]</a>"; ?> </body> </html> <file_sep>/modules/rechnungen/position_delete.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); include("func.inc.php"); if(!$back) { $back="positionen.php"; } if(!$backno) { $backno=$back; } if($del) { if(isModule("domains")) { $query=mysql_query("SELECT `key`,`value`,`text1` FROM `Rechnungen_positionen` WHERE `id` = '$del'"); list($key,$value,$text1)=mysql_fetch_row($query); if($key=="domains"){ $datum = trim(substr($text1,0,strpos($text1,"-"))); $query=mysql_query("UPDATE Domains SET bezahltBis='".date_CH_to_EN($datum)."' WHERE id='$value'"); } } $query=mysql_query("DELETE FROM Rechnungen_positionen WHERE id='$del'"); if(!($error=mysql_error())) { header("Location: ".urldecode($back)); } } ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body> <p class=titel>Rechnungen:Position Löschen</p> <? if($err){ print "<b>Fehler:</b> $err<br><br>"; } $query=mysql_query("SELECT text FROM Rechnungen_positionen WHERE id='$id'"); $text=mysql_result($query,0,0); print "Möchten Sie die Position '$text' wirklich Löschen?<br><br> <a href=\"$PHP_SELF?del=$id&back=".urldecode($back)."\">[ Ja ]</a> <a href=\"".urldecode($backno)."\">[ Nein ]</a>"; ?> </body> </html> <file_sep>/modules/produkte/delete.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); if(!$back) { $back="show.php"; } if(!$backno) { $backno=$back; } if($del) { $query=mysql_query("DELETE FROM Produkte WHERE id='$del'"); if($query) { header("Location: ".urldecode($back)); } else { $err=mysql_error(); } } ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body> <p class=titel>Produkte:Produkt Löschen</p> <? if($err){ print "<b>Fehler:</b> $err<br><br>"; } $query=mysql_query("SELECT text1,nr_int FROM Produkte WHERE id='$id'"); print "Möchten Sie das Produkt '".mysql_result($query,0,0)." (".mysql_result($query,0,1).")' wirklich Löschen?<br><br> <a href=\"$PHP_SELF?del=$id&back=".urldecode($back)."\">[ Ja ]</a> <a href=\"".urldecode($backno)."\">[ Nein ]</a>"; ?> </body> </html> <file_sep>/modules/kontakte/modules/kontoinformationen.inc.php <? print "<table border=0 cellpadding=0 cellspacing=0> <tr> <td colspan=2><b>Kontoinformationen</b></td> </tr> <tr> <td width=100>Konto</td> <td><input type=text name=\"kontakt_konto\" value=\"".$kontakt['konto']."\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=100>Konto Nr.</td> <td><input type=text name=\"kontakt_kontonr\" value=\"".$kontakt['kontonr']."\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=100>BLZ</td> <td><input type=text name=\"kontakt_blz\" value=\"".$kontakt['blz']."\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=100>Swift</td> <td><input type=text name=\"kontakt_swift\" value=\"".$kontakt['swift']."\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=100>Iban</td> <td><input type=text name=\"kontakt_iban\" value=\"".$kontakt['iban']."\" style=\"width:200px;\" maxlength=50></td> </tr> </table>"; ?> <file_sep>/modules/kontakte/menu.inc.php Kontakte Anzeigen/Editieren=show.php Hinzuf&uuml;gen=kontakt.php?id=0 <file_sep>/modules/kontakte/modules/gebiete.inc.php <? if(!anredeIsPrivate($kontakt['anrede']) && $id !=0) { print "<table border=0 cellpadding=0 cellspacing=0> <tr> <td colspan=2><b>Gebiete Kontaktpersonen</b></td> </tr>"; if($_config_kontakte_gebiet1) { print "<tr> <td width=100>$_config_kontakte_gebiet1:</td> <td>".getKontaktpersonenList("kontakt_kontakt1",$kontakt['kontakt1'],200,"Bitte Auswählen",$kontakt['id'])."</td> </tr>"; } if($_config_kontakte_gebiet2) { print "<tr> <td width=100>$_config_kontakte_gebiet2:</td> <td>".getKontaktpersonenList("kontakt_kontakt2",$kontakt['kontakt2'],200,"Bitte Auswählen",$kontakt['id'])."</td> </tr>"; } if($_config_kontakte_gebiet3) { print "<tr> <td width=100>$_config_kontakte_gebiet3:</td> <td>".getKontaktpersonenList("kontakt_kontakt3",$kontakt['kontakt3'],200,"Bitte Auswählen",$kontakt['id'])."</td> </tr>"; } print "</table>"; } ?> <file_sep>/modules/rechnungenOloidOld/findkontakt.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> <script src="../../inc/functions.js" type="text/javascript" language="javascript"></script> <script type="text/javascript" language="javascript"> <!-- function setValue(val){ opener.document.getElementById('kontakt').value = val; self.close(); } //--> </script> </head> <body onLoad="document.getElementById('term').focus()"> <p class=titel>Kontakt Suchen</p> <form method=get action="<?=$PHP_SELF?>"> <input type=text name=term id=term value="<?=$term?>"> <input type=submit name=search value="Suchen"> </form> <? if(!$start){ $start=0; } if($term){ $query=mysql_query("SELECT id,firma FROM Kontakte kon WHERE aktiv=1 AND ".formatSearchString($term,array("id","firma","firma2","text"))." ORDER BY firma LIMIT $start,$_config_entrysperpage"); $attr="&term=$term"; } else { $query=mysql_query("SELECT id,firma FROM Kontakte kon WHERE aktiv=1 ORDER BY firma LIMIT $start,$_config_entrysperpage"); } echo mysql_error(); if(@mysql_num_rows($query)>0) { print "<table border=0 cellpadding=3 cellspacing=0 width=\"95%\"> <tr>"; for($i=0;$field_names[$i];$i++) { print "<td style=\"font-weight:bold;"; if($field_size[$i]!="*"){ print "width:".$field_size[$i]."px;"; } print "\">".$field_names[$i]."</td>\n"; } print "</tr>\n"; for($i=0;list($id,$firma)=mysql_fetch_array($query);$i++) { if(($i%2)==0){ $bgcolor=$_config_tbl_bgcolor1; } else { $bgcolor=$_config_tbl_bgcolor2; } print "<tr onmouseover=\"setPointer(this, 'over', '#$bgcolor', '#$_config_tbl_bghover', '')\" onmouseout=\"setPointer(this, 'out', '#$bgcolor', '#$_config_tbl_bghover', '')\" onclick=\"javascript:setValue('$id');\"> <td valign=top bgcolor=\"#$bgcolor\"$style>$firma</td>"; print "</tr>\n"; } print "<tr> <td colspan=2 align=center>"; if($start>0){ print "<a href=\"$PHP_SELF?start=".($start-$_config_entrysperpage)."$attr\"><<<</a>"; } if($term){ $query=mysql_query("SELECT count(*) FROM Kontakte WHERE aktiv=1 AND ".formatSearchString($term,array("firma","firma2"))); } else { $query=mysql_query("SELECT count(*) FROM Kontakte WHERE aktiv=1"); } if(($start+$_config_entrysperpage+1)<=mysql_result($query,0,0)) { if($start>0){ print " | "; } print "<a href=\"$PHP_SELF?start=".($start+$_config_entrysperpage)."$attr\">>>></a>"; } print "</td> </tr> </table>\n"; } else { print "Keine Kontakte gefunden"; } ?> </body> </html> <file_sep>/modules/rechnungenOloid/valuta_editieren.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); if(!$back){ $back="bezahlte.php"; } if($submit) { $query=mysql_query("UPDATE Rechnungen SET bezahlt='".date_CH_to_EN($datum)."' WHERE id='$id'"); if(!($err=mysql_error())) { header("Location: $back"); } } ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body> <p class=titel>Rechnungen:Valuta Datum Editieren</p> <? if($err) { print "<b>Fehler:</b> $err"; } $query=mysql_query("SELECT DATE_FORMAT(bezahlt,'$_config_date') FROM Rechnungen WHERE id='$id'"); if(@mysql_num_rows($query)<1) { print "<b>Fehler</b>: Rechnung nicht Gefunden<br><br>\n"; } else { $bezahlt=mysql_result($query,0,0); print "<form method=post value=\"$PHP_SELF?id=$id&back=".urlencode($back)."\"> Datum : <input type=text name=datum value=\"$bezahlt\"><br><br> <input type=submit name=submit value=\"Ändern\"> <input type=button value=\"Zurück\" onclick=\"javascript:location.href='$back'\"> </form>"; } ?> </body> </html> <file_sep>/modules/statistiken/func.inc.php <? function getAlignment($ueberschriften,$i) { if(substr(trim($ueberschriften[$i]),0,1)=="<" ) { return "right"; } else if(substr(trim($ueberschriften[$i]),0,1)==">" ) { return "left"; } else { return "left"; } } ?> <file_sep>/modules/domains/menu.inc.php Domains Anzeigen/Editieren=show.php Hinzuf&uuml;gen=add.php Whois=whois.php Verrechnen=verrechnen.php <file_sep>/modules/domains/verrechnen1.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); include("func.inc.php"); $query=mysql_query("SELECT dom.domain, dom.kontakt,dom.betrag,dom.waehrung,abr.monate FROM Domains dom,Zahlungsarten abr WHERE dom.id='$id' AND abr.id = dom.abrechnungsart"); list($domain,$kontakt,$betrag,$waehrung,$abr_mnt)=mysql_fetch_row($query); $bezahltBis=domain_bezahlt_bis($id); $raten=ceil(diff_month(strtotime("+$_config_domains_verrechnen_tage_vorher days"),strtotime(date_CH_to_EN($bezahltBis)))/12); $betrag=($betrag * $raten); $tmp_monate=$abr_mnt * $raten; $text="Hosting $domain"; if(date("d",strtotime(date_CH_to_EN($bezahltBis)))=="1"){ $zahlenBis=date("t.m.Y",strtotime("+".($tmp_monate-1)." month",strtotime(date_CH_to_EN($bezahltBis)))); } else { //Wüster PHP Date Bug if(date("d",strtotime(date_CH_to_EN($bezahltBis)))==31) { $zahlenBis=date("m.Y",strtotime("+$tmp_monate month",strtotime("-1 day",strtotime(date_CH_to_EN($bezahltBis))))); $zahlenBis="31.".$zahlenBis; } else { $zahlenBis=date("t.m.Y",strtotime("+$tmp_monate month",strtotime(date_CH_to_EN($bezahltBis)))); } } $text1="$bezahltBis - $zahlenBis"; $query=mysql_query("INSERT INTO Rechnungen_positionen(kontakt,rechnung,text,text1,betrag,mwst,waehrung,datum,`key`,value) VALUES('$kontakt',NULL,'$text','$text1','$betrag','$_config_domains_mwst','$waehrung',NOW(),'domains','$id')"); if(!($error=mysql_error())) { $query=mysql_query("UPDATE Domains SET bezahltBis='".date_CH_to_EN($zahlenBis)."' WHERE id='$id'"); if(!($error=mysql_error())) { header("Location: verrechnen.php"); } } ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> <script src="../../inc/functions.js" type="text/javascript" language="javascript"></script> </head> <body> <p class=titel>Domains:Verrechnen</p> <table width="100%" height="100%" border=0> <tr> <td align=center valign=middle> <? if($error){ print "<b>Fehler:</b> $error"; } ?> </td> </tr> </table> </body> </html> <file_sep>/modules/rapportierung/add.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); include("func.inc.php"); if($submit) { if(!$employee) { $error="Bitte geben Sie einen Mitarbeiter an"; } else if(!$kontakt) { $error="Bitte geben Sie einen Kunden an"; } else if(!$code) { $error="Bitte geben Sie einen Code an"; } else if(!isset($time_clearable)) { $error="Bitte geben Sie die verrechenbaren Stunden an"; } else if(!$time) { $error="Bitte geben Sie die effektiven Stunden an"; } else if(!$date) { $error="Bitte geben Sie das Datum an"; } else if($time_clearable > $time) { $error="Die verrechenbare Zeit darf nicht grösser sein als die Effektive"; } else { if(!$time) $time=$time_clearable; $query=mysql_query("INSERT INTO Rapportierung(employee,kontakt,code,ansprechperson,text,date,time,time_clearable) VALUES('$employee','$kontakt','$code','$ansprechperson','$text','$date','$time','$time_clearable')"); if(mysql_error()) $error=mysql_error(); else{ $id=NULL; $kontakt=NULL; $code=NULL; $time_clearable=NULL; $time=NULL; $date=NULL; $date=NULL; $ansprechperson=NULL; $msg="Rapport erstellt"; } } } if(!$date) $date=date("d.m.Y"); if(!$employee) $employee=getHttpUserId(); ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body onLoad="<?=error($error)?><?=alert($msg)?>document.getElementsByName('kontakt')[0].focus()"> <p class=titel>Rapportierung:Rapport erstellen</p> <? print "<form method=post action=\"$PHP_SELF?id=$id\">\n"; print "<table border=0> <tr> <td width=100>Intern:</td> <td>".getEmpList("employee",$employee,250,"Bitte auswählen")."</td> </tr> <tr> <td width=100>Kontakt:</td> <td>".getKontakteList("kontakt",$kontakt,250,"Bitte auswählen")."</td> </tr> <tr> <td width=100>Code:</td> <td>".getRapportCodeList("code",0,$code,250,"Bitte Auswählen")."</td> </tr> <tr> <td width=100 valign=top>Text:</td> <td><textarea name=text style=\"width:250px;height:100px;\">$text</textarea></td> </tr> <tr> <td width=100>Ansprechperson:</td> <td><input type=text maxlength=100 name=ansprechperson value=\"$ansprechperson\" style=\"width:250px\"></td> </tr> <tr> <td width=100>Zeit Verrechenbar:</td> <td><input type=text maxlength=255 name=time_clearable value=\"$time_clearable\" style=\"width:80px\"></td> </tr> <tr> <td width=100>Zeit Effektiv:</td> <td><input type=text maxlength=255 name=time value=\"$time\" style=\"width:80px\"></td> </tr> <tr> <td width=100>Datum:</td> <td><input type=text maxlength=255 name=date value=\"$date\" style=\"width:80px\"></td> </tr> </table>\n"; print "<input type=submit name=submit value=\"Hinzufügen\">\n</form>\n"; ?> </body> </html> <file_sep>/modules/timesheet/ajax.php <?php include("./inc/func.inc.php"); if (!empty($_POST['action'])){ echo $_POST['action'] == "add" ? addCustomer($_POST['id']) : removeCustomer($_POST['id']); } if (!empty($_POST['change'])){ echo setRunning($_POST['id']); } if (!empty($_POST['updateTable'])){ updateTable(true); } if (!empty($_POST['edit'])){ getMessureById($_POST['id']); } if (!empty($_POST['saveChanges'])){ saveChanges(); } ?><file_sep>/modules/timesheet/menu.inc.php Zeiterfassung Zeiterfassung (Popup)=index.php?nolist=true" target="blank Zeiterfassung=index.php Kundenwahl=customer.php Export=csv.php <file_sep>/modules/timesheet/inc/js/func.js $(document).ready(function(){ $(".cBox").click(function(){ manageCustomer($(this).attr("name"), $(this).attr("value")); }); $(".time").click(function(){ changeActive($(this).attr("value"), $(this)); }); addTableEvents(); $("#save").click(function(){ updateCurrentValues(); return false; }); setInterval('updateClock()', 1000); }); function manageCustomer(func,id){ var $el = $(this); $.post("ajax.php", {"action": func, "id":id}, function(data){if (data != "1"){alert("ERROR: ALLE IN DECKUNG")}else{ act = func=="add" ? "remove":"add"; //alert(act+" "+func); $("#"+id).attr("name", act); }}); } function changeActive(id, el){ $('#active').attr("id", ""); $(el).attr("id", "active"); $.post("ajax.php", {"change": "true", "id":id}, function(data){ /*if (data != "1"){alert(data)};*/ updateTable(true); edit(data); resetClock(); }); } function updateCurrentValues(){ $.post('ajax.php', $("#editForm").serializeArray(), function(data) { if (data != ""){alert(data)}; updateTable(true); }); } function edit(id){ $.post("ajax.php", {"edit": "true", "id":id}, function(data){ $("#editForm").html(data); $("#save").click(function(){ updateCurrentValues(); return false; }); }); } function updateTable(id){ $.post("ajax.php", {"updateTable": "true", "id":id}, function(data){ $("#timesheet").html(data); addTableEvents(); }); } function addTableEvents(){ $(".edit").click(function(){ edit($(this).attr("id")); }); $(".list:not(#isrunning)").mouseenter(function(){ var oldcolor = $(this).attr("bgcolor", "#ccffcc"); }).mouseleave(function(){ var bgcolor=$(this).index()%2==0?"#ffffff":"#aaaaaa"; $(this).attr("bgcolor" , bgcolor); }); } function resetClock(){ this.currentHours = 0 this.currentMinutes = 0; this.currentSeconds = 0; this.currentMinutes = ( currentMinutes < 10 ? "0" : "" ) + currentMinutes; this.currentHours = ( currentHours < 10 ? "0" : "" ) + currentHours; } var currentHours = 0; var currentMinutes = 0; var currentSeconds = 0; currentMinutes = ( currentMinutes < 10 ? "0" : "" ) + currentMinutes; currentHours = ( currentHours < 10 ? "0" : "" ) + currentHours; function updateClock () { currentSeconds++; if(currentSeconds >59) { currentMinutes++; currentSeconds = 0; currentMinutes = ( currentMinutes < 10 ? "0" : "" ) + currentMinutes; } if(currentMinutes >59) { currentHours++; currentMinutes =0; } currentSeconds = ( currentSeconds < 10 ? "0" : "" ) + currentSeconds; currentTimeString = currentHours + ":" + currentMinutes + ":" + currentSeconds; $("#clock").html(currentTimeString); }<file_sep>/modules/produkte/show.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> <script src="../../inc/functions.js" type="text/javascript" language="javascript"></script> </head> <body onLoad="document.getElementById('term').focus()"> <p class=titel>Produkte</p> <form method=get action="<?=$PHP_SELF?>"> <input type=text name=term id=term value="<?=$term?>"> <input type=submit name=search value="Suchen"> </form> <? if(!$order) $order="nr_int"; if(!$start){ $start=0; } if($term){ $query=mysql_query("SELECT id,nr_int,nr_ext,gruppe,text1,preis1 FROM Produkte WHERE ".formatSearchString($term,array("nr_int","nr_ext","gruppe","text1","text2","preis1","preis2","preis3","preis4","rabattstufe","warenbestand"))."ORDER BY $order LIMIT $start,$_config_entrysperpage"); } else { $query=mysql_query("SELECT id,nr_int,nr_ext,gruppe,text1,preis1 FROM Produkte ORDER BY $order LIMIT $start,$_config_entrysperpage"); } if(@mysql_num_rows($query)>0) { print "<table border=0 cellpadding=3 cellspacing=0 width=\"95%\"> <tr> <td><b><a href=\"$PHP_SELF?oder=nr_int\">Int. Prod. Nr.</a></b></td> <td><b><a href=\"$PHP_SELF?order=nr_ext\">Ext. Prod. Nr.</a></b></td> <td><b><a href=\"$PHP_SELF?order=gruppe\">Prod. Gruppe</a></b></td> <td><b><a href=\"$PHP_SELF?order=text1\">$_config_produkte_text1_name</a></b></td> <td><b><a href=\"$PHP_SELF?order=preis1\">$_config_produkte_preis1_name</a></b></td> </tr>\n"; for($i=0;list($id,$nr_int,$nr_ext,$gruppe,$text1,$preis1)=mysql_fetch_row($query);$i++) { if(($i%2)==0){ $bgcolor=$_config_tbl_bgcolor1; } else { $bgcolor=$_config_tbl_bgcolor2; } if(strlen($text1)>100) { $text1=substr($text1,0,100)."..."; } print "<tr onmouseover=\"setPointer(this, 'over', '#$bgcolor', '#$_config_tbl_bghover', '')\" onmouseout=\"setPointer(this, 'out', '#$bgcolor', '#$_config_tbl_bghover', '')\" onclick=\"location.href='edit.php?id=$id'\"> <td width=100 align=right bgcolor=\"#$bgcolor\">$nr_int</td> <td width=100 align=right bgcolor=\"#$bgcolor\">$nr_ext</td> <td width=100 bgcolor=\"#$bgcolor\">$gruppe</td> <td width=\"*\" bgcolor=\"#$bgcolor\">$text1</td> <td width=50 align=right bgcolor=\"#$bgcolor\">".formatBetrag($preis1)."</td> </tr>\n"; } print "<tr> <td colspan=6 align=center>"; if($term){ $attr="&term=$term"; } if($start>0){ print "<a href=\"$PHP_SELF?start=".($start-$_config_entrysperpage)."$attr\"><<<</a>"; } if($term){ $query=mysql_query("SELECT count(*) FROM Produkte WHERE ".formatSearchString($term,array("nr_int","nr_ext","gruppe","text1","text2","preis1","preis2","preis3","preis4","rabattstufe","warenbestand"))); } else { $query=mysql_query("SELECT count(*) FROM Produkte"); } if(($start+$_config_entrysperpage+1)<=mysql_result($query,0,0)){ if($start>0){ print " | "; } print "<a href=\"$PHP_SELF?start=".($start+$_config_entrysperpage)."$attr\">>>></a>"; } print "</td> </tr> </table>\n"; } else { print "Keine Produkte gefunden"; } ?> </body> </html> <file_sep>/modules/statistiken/offene_rechnungen.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); ?> <html> <head> <title>Sylon godmode</title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body> <p class=titel>Statistiken:Offene Rechnungen</p> <br><br> <? $query=mysql_query("select DATE_FORMAT(rech.datum,'$_config_date') as datum, rech.id,rech.kontakt,sum($_config_posbetrag) as betrag,sum(($_config_posbetrag)*rech.fx) as betrag_chf,rech.waehrung,rech.betreff,rech.zahlungsfrist FROM Rechnungen rech, Rechnungen_positionen pos WHERE pos.rechnung = rech.id AND rech.bezahlt is NULL GROUP BY rech.id ORDER BY datum"); echo mysql_error(); if(mysql_num_rows($query)>0){ print "<table border=0 cellspacing=0> <tr> <td><b>Rech.-Nr.</b></td> <td><b>Kontakt</b></td> <td width=100><b>Datum</b></td> <td width=100><b>Zahlungsfrist</b></td> <td width=100><b>Text</b></td> <td align=right width=100><b>Betrag</b></td> <td align=right width=100><b>Betrag CHF</b></td> </tr>\n"; $total=0; for($i=0;(list($datum,$id,$kontakt,$betrag,$betrag_chf,$waehrung,$text,$zahlungsfrist)=mysql_fetch_row($query));$i++) { if(($i%2)==0){ $bgcolor=$_config_tbl_bgcolor1; } else { $bgcolor=$_config_tbl_bgcolor2; } print "<tr> <td valign=top bgcolor=\"$bgcolor\">$id</td> <td valign=top bgcolor=\"$bgcolor\">".getKontakt($kontakt)."</td> <td width=100 valign=top bgcolor=\"$bgcolor\">$datum</td> <td width=100 valign=top bgcolor=\"$bgcolor\">".date($_config_date_php,strtotime("+$zahlungsfrist days",strtotime(date_CH_to_EN($datum))))."</td> <td valign=top bgcolor=\"$bgcolor\">$text</td>"; if($betrag!=$betrag_chf) { print "<td align=right valign=top bgcolor=\"$bgcolor\">".formatBetrag($betrag)." ".getWaehrungHtml($waehrung)."</td>"; } else { print "<td bgcolor=\"$bgcolor\">&nbsp;</td>"; } print "<td align=right valign=top bgcolor=\"$bgcolor\">".formatBetrag($betrag_chf)."</td> </tr>"; $total+=$betrag_chf; } print "<tr> <td colspan=6><b>Total</b></td> <td align=right><b>".formatBetrag($total)."</b></td> </tr> </table>"; } ?> </body> </html> <file_sep>/modules/domains/$ <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); include("func.inc.php"); ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> <script src="../../inc/functions.js" type="text/javascript" language="javascript"></script> </head> <body> <p class=titel>Domains:Verrechnen</p> <? $query2=mysql_query("SELECT id,art,monate FROM Zahlungsarten"); for($i=0;list($abrechnungsart_id,$abrechungsart_art,$abrechnungsart_monate)=mysql_fetch_row($query2);$i++){ if($order=="domain"){ $order="domain"; } else if($order=="betrag"){ $order="betrag"; } else { $order="firma"; } $query=mysql_query("SELECT dom.id,domain,kontakt,startDate,betrag,waehrung FROM Domains dom,Kontakte kon WHERE betrag>0 AND endDate is NULL AND abrechnungsart=$abrechnungsart_id AND dom.kontakt = kon.id ORDER BY $order"); if(@mysql_num_rows($query)>0) { $header=0; for($i=0;list($id,$domain,$kontakt,$startDate,$betrag,$waehrung)=mysql_fetch_row($query);) { //14 Tage vorher verrechnen if(strtotime(date_CH_to_EN(domain_bezahlt_bis($id))) < strtotime("+14 days")) { $domain_verrechnet=1; if($header==0){ $header=1; print "<div class=titel2>Domains $abrechungsart_art</div> <table cellpadding=2 cellspacing=0 border=0> <tr> <td width=300><a href=\"$PHP_SELF?order=kontakt\"><b>Kontakt</b></a></td> <td width=150><a href=\"$PHP_SELF?order=domain\"><b>Domain</b></a></td> <td width=100><b>Bezahlt bis</b></td> <td width=50 align=right><b>Raten</b></td> <td width=80 align=right><a href=\"$PHP_SELF?order=betrag\"><b>Betrag</b></a></td> <td width=80 align=right><b>Total</b></td> </tr>\n"; } if(($i%2) == 0){ $bgcolor=$_config_tbl_bgcolor1; } else { $bgcolor=$_config_tbl_bgcolor2; } $bezahltBis = domain_bezahlt_bis($id); $raten_faellig=ceil(diff_month(strtotime("+$_config_domains_verrechnen_tage_vorher days"),strtotime(date_CH_to_EN($bezahltBis)))/12); print "<tr onmouseover=\"setPointer(this, 'over', '#$bgcolor', '#$_config_tbl_bghover', '')\" onmouseout=\"setPointer(this, 'out', '#$bgcolor', '#$_config_tbl_bghover', '')\" onclick=\"location.href='verrechnen1.php?id=$id'\"> <td width=300 bgcolor=\"#$bgcolor\">"; if($kontakt!=$lastkontakt) { print "<a href=\"../../modules/kontakte/kontakt.php?id=$kontakt\">".getKontakt($kontakt)."</a></td>"; } else { print "&nbsp;"; } print "<td width=150 bgcolor=\"#$bgcolor\"><a href=\"domain.php?id=$id\">$domain</a></td> <td width=100 bgcolor=\"#$bgcolor\">$bezahltBis</td> <td width=50 align=right bgcolor=\"#$bgcolor\">$raten_faellig</td> <td width=80 align=right bgcolor=\"#$bgcolor\">".formatBetrag($betrag)." ".getWaehrungHtml($waehrung)."</td> <td width=80 align=right bgcolor=\"#$bgcolor\">".formatBetrag($betrag*$raten_faellig)." ".getWaehrungHtml($waehrung)."</td> </tr>"; $lastkontakt=$kontakt; $i++; } } if($header==1){ print "</table>\n<br><br>\n"; } } } if(!$domain_verrechnet){ print "Keine Domains zu verrechnen"; } ?> </body> </html> <file_sep>/modules/kontakte/modules/kontakt.inc.php <? print "<table border=0 cellpadding=0 cellspacing=0> <tr> <td colspan=2><b>Kontaktmöglichkeiten</b></td> </tr> <tr> <td width=100>Telefon</td> <td><input type=text name=\"kontakt_telefon1\" value=\"".$kontakt['telefon1']."\" style=\"width:200px;\" maxlength=20></td> </tr> <td width=100>Telefon</td> <td><input type=text name=\"kontakt_telefon2\" value=\"".$kontakt['telefon2']."\" style=\"width:200px;\" maxlength=20></td> </tr> <tr> <td width=100>Mobile</td> <td><input type=text name=\"kontakt_mobile\" value=\"".$kontakt['mobile']."\" style=\"width:200px;\" maxlength=20></td> </tr> <tr> <td width=100>Fax</td> <td><input type=text name=\"kontakt_fax\" value=\"".$kontakt['fax']."\" style=\"width:200px;\" maxlength=20></td> </tr> <tr> <td width=100><a href=\"mailto:".$kontakt['mail']."\">E-Mail</a></td> <td><input type=text name=\"kontakt_mail\" value=\"".$kontakt['mail']."\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=100><a href=\"".$kontakt['www']."\" target=\"_blank\">WWW</a></td> <td><input type=text name=\"kontakt_www\" value=\"".$kontakt['www']."\" style=\"width:200px;\" maxlength=50></td> </tr> </table>"; ?> <file_sep>/trash/kontakte_klein/func.inc.php <? function getKontakteList($formname,$selected,$breite,$text_null) { $query = mysql_query("SELECT id, firma, name, vorname FROM Kontakte WHERE aktiv=1 ORDER BY concat(firma,name)"); $select="<SELECT NAME=\"$formname\" style=\"width:".$breite."px;\"> <option value=0>$text_null</option>\n"; while(list($id,$firma,$name,$vorname)=@mysql_fetch_row($query)) { $name_show=formatName($firma,$name,$vorname); if($id == $selected) $select.="<option value=$id SELECTED>$name_show</option>\n"; else $select.="<option value=$id>$name_show</option>\n"; } $select.="</SELECT>\n"; return $select; } function getAdminList($formname,$selected,$breite,$text_null) { $query = mysql_query("SELECT id, name, vorname FROM Kontakte WHERE aktiv=1 AND admin=1 ORDER BY name"); $select="<SELECT NAME=\"$formname\" style=\"width:".$breite."px;\"> <option value=0>$text_null</option>\n"; while(list($id,$name,$vorname)=@mysql_fetch_row($query)) { $name_show=formatName("",$name,$vorname); if($id == $selected) $select.="<option value=$id SELECTED>$name_show</option>\n"; else $select.="<option value=$id>$name_show</option>\n"; } $select.="</SELECT>\n"; return $select; } function getAnsprechpersonenList($formname,$selected,$breite) { $query=mysql_query("SELECT id,name,vorname FROM Kontakte WHERE admin=1 AND aktiv=1"); $select = "<SELECT name=\"$formname\" style=\"width:$breite;\">\n"; while(list($id,$name,$vorname)=mysql_fetch_row($query)) { if($id == $selected) $select.=" <option value=\"$id\" SELECTED>$vorname $name</option>\n"; else $select.=" <option value=\"$id\">$vorname $name</option>\n"; } $select.="</SELECT>"; return $select; } function getAnredeList($formname,$selected,$breite) { $query=mysql_query("SELECT id,anrede FROM Kontakte_anreden"); $select = "<SELECT name=\"$formname\" style=\"width:$breite"."px;\">\n"; while(list($id,$anrede)=mysql_fetch_row($query)) { if($id == $selected) $select.=" <option value=\"$id\" SELECTED>$anrede</option>\n"; else $select.=" <option value=\"$id\">$anrede</option>\n"; } $select.="</SELECT>\n"; return $select; } function getKontakt($id){ $query=@mysql_query("SELECT firma,name,vorname FROM Kontakte WHERE id='$id'"); if($query){ list($firma,$name,$vorname)=mysql_fetch_row($query); return formatName($firma,$name,$vorname); } else { return false; } } function getKontaktFirma($id){ $query=@mysql_query("SELECT firma FROM Kontakte WHERE id='$id'"); if($query && mysql_num_rows($query)>0){ return @mysql_result($query,0,0); } else { return false; } } function getKontaktName($id){ $query=@mysql_query("SELECT name FROM Kontakte WHERE id='$id'"); if($query && mysql_num_rows($query)>0) { return mysql_result($query,0,0); } else { return false; } } function getKontaktVorname($id){ $query=@mysql_query("SELECT vorname FROM Kontakte WHERE id='$id'"); if($query && mysql_num_rows($query)>0) { return mysql_result($query,0,0); } else { return false; } } function getAnrede($id){ $query=@mysql_query("SELECT anrede FROM Kontakte_anreden WHERE id='$id'"); if($query && mysql_num_rows($query)>0) { return mysql_result($query,0,0); } else { return false; } } ?> <file_sep>/modules/rechnungen/createPDF.php <?php include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); //SQL Selects, Titel setzen $query = mysql_query("SELECT kontakt,waehrung,DATE_FORMAT(datum,'$_config_date'),bezahlt,adresse,betreff,text,footer,zahlungsfrist,besrnr FROM Rechnungen WHERE id='$id'"); if(@mysql_num_rows($query)==0) { print "Die Rechnung Nr. '$id' existiert nicht."; die(); } list($kontakt,$waehrung,$datum,$bezahlt,$adresse,$betreff,$text,$footer,$zahlungsfrist,$besrnr)=mysql_fetch_row($query); $query = mysql_query("SELECT firma FROM Kontakte WHERE id='$kontakt'"); list($firma)=mysql_fetch_row($query); $query=mysql_query("SELECT kp.vorname,kp.name FROM Kontakte_kontaktpersonen kp,Kontakte ko WHERE ko.id='$kontakt' AND ko.pl = kp.id"); $title="Rechnung $firma"; if($besrnr) $besrnr="-$besrnr"; if(mysql_num_rows($query)==1) $sachbearbeiter=substr(mysql_result($query,0,0),0,1).substr(mysql_result($query,0,1),0,1); if($sachbearbeiter) $sachbearbeiter="Sachbearbeiter: $sachbearbeiter"; //Rechnungspositionen holen $rechnung_pos= mysql_query("SELECT text,text1,anzahl,betrag,waehrung,mwst,`key`,`value` FROM Rechnungen_positionen WHERE rechnung='$id'"); $gutschriften= mysql_query("SELECT text, betrag,waehrung,mwst FROM Rechnungen_gutschriften WHERE bezahlt='$id'"); //PDF erstellen, generelle Optionen define('FPDF_FONTPATH',"$_config_root_path/fpdf/font/"); require("$_config_root_path/fpdf/fpdf.php"); $pdf = new FPDF(); $pdf->Open(); $pdf->SetTitle($title); $pdf->SetCreator("Sylon godmode"); $pdf->SetAuthor($_config_rechnung_pdf_author); $pdf->SetPDFfileName(str_replace(" ","_",$title).".pdf"); $pdf->SetDisplayMode("fullwidth","single"); $pdf->SetAutoPageBreak("margin",20); $pdf->SetFillColor(230); $pdf->SetLeftMargin(15); $pdf->AddPage(); //Header $pdf->SetFont("Times","",24); //$pdf->Cell(20,5,"",0,0,"L"); $pdf->Text(40,15,"Sylon"); $pdf->SetFont("Arial","",10); $pdf->SetFont("Arial","B",10); $txt=split("\n","Sylon\nPostfach 43\n4004 Basel\nTel. 061 383 85 77\nFax 061 383 85 76\nhttp://www.sylon.net\ninfo@sylon.net"); for($i=0,$height=19;$txt[$i];$i++,$height+=4){ if($i==1){ $pdf->SetFont("Arial","",10); } $pdf->Text(150,$height,$txt[$i]); } $pdf->Write(15,""); $pdf->Line($pdf->getX(),15,$pdf->getX()+190,15); $pdf->Ln(); $pdf->SetFont("Arial","",10); $pdf->Write(5,"\n\n\n\n\n\n\n$adresse"); $pdf->Ln(); $pdf->Ln(); $pdf->Ln(); $pdf->Ln(); $pdf->Ln(); $pdf->Ln(); $pdf->Ln(); //Header: Rechnungsinfo's, Betreff, Text, etc. $pdf->SetFont("Arial","B",10); $pdf->Cell(61,5,"Rechnungsnummer: ".$kontakt.str_pad($id,4,"0",STR_PAD_LEFT).$besrnr,0,0,"L"); $pdf->Cell(61,5,$sachbearbeiter,0,0,"C"); $pdf->Cell(61,5,"$_config_rechnung_ort, $datum",0,1,"R"); $pdf->Write(5,"\n\n$betreff\n\n"); $pdf->SetFont("Arial","",10); if($text) $pdf->Write(5,"$text\n"); $pdf->Ln(); //Positionstitel $pdf->SetFont("Arial","B",10); $pdf->Cell(59,5,"Produkt",0,0,"L"); $pdf->Cell(18,5,"Anz.",0,0,"R"); $pdf->Cell(35,5,"EP exkl.",0,0,"R"); $pdf->Cell(35,5,"EP Inkl.",0,0,"R"); $pdf->Cell(35,5,"Total",0,1,"R"); $pdf->Cell(59,5,"",0,0,"L"); $pdf->Cell(18,5,"Prod. Nr.",0,0,"R"); $pdf->Cell(35,5,"MWSt in %",0,0,"R"); $pdf->Cell(35,5,"MWSt Betrag",0,0,"R"); $pdf->Ln(); $pdf->SetFont("Arial","",9); //Positionen while(list($text,$text1,$anzahl,$betrag,$waehrung_pos,$mwst,$key,$value)=mysql_fetch_row($rechnung_pos)) { if($key!="produkt") $value=""; if($bgcolor){ $bgcolor=0; } else { $bgcolor=1; } $total+=($betrag*$anzahl); $total_mwst+=((($betrag/100)*$mwst)*$anzahl); if(!$waehrung)$waehrung=1; if($text1) $text1="\n".$text1; $y=$pdf->getY(); $pdf->Cell(77,5,$anzahl,0,0,"R",$bgcolor); $pdf->Cell(35,5,formatBetrag($betrag),0,0,"R",$bgcolor); $pdf->Cell(35,5,formatBetrag($betrag+(($betrag/100)*$mwst)),0,0,"R",$bgcolor); $pdf->Cell(35,5,"",0,1,"R",$bgcolor); $pdf->Cell(77,5,$value,0,0,"R",$bgcolor); $pdf->Cell(35,5,sprintf("%0.1f",$mwst)."%",0,0,"R",$bgcolor); $pdf->Cell(35,5,formatBetrag((($betrag/100)*$mwst)),0,0,"R",$bgcolor); $pdf->Cell(35,5,getWaehrung($waehrung_pos)." ".formatBetrag(waehrungRound(($betrag+(($betrag/100)*$mwst))*$anzahl,$waehrung_pos)),0,1,"R",$bgcolor); $y2=$pdf->getY(); $pdf->setY($y); $pdf->MultiCell(59,5,$text.$text1,0,"L",$bgcolor); $y3 = $pdf->getY(); if($y3>$y2) { if($bgcolor) { $pdf->Rect(74,$y2,123,($y3-$y2),"F"); } $pdf->setY($y2+($y3-$y2)); } else { $pdf->setY($y2); } } //Gutschriften while(list($text,$betrag,$waehrung_pos,$mwst)=mysql_fetch_row($gutschriften)){ if($bgcolor){ $bgcolor=0; } else { $bgcolor=1; } $total-=($betrag); $mwst_total-=(($betrag/100)*$mwst); if(!$waehrung)$waehrung=1; $pdf->Cell(59,5,$text,0,0,"L",$bgcolor); $pdf->Cell(18,5,$anzahl,0,0,"R",$bgcolor); $pdf->Cell(35,5,"-".formatBetrag($betrag),0,0,"R",$bgcolor); $pdf->Cell(35,5,"-".formatBetrag($betrag+(($betrag/100)*$mwst)),0,0,"R",$bgcolor); $pdf->Cell(35,5,"",0,1,"R",$bgcolor); $pdf->Cell(59,5,"",0,0,"L",$bgcolor); $pdf->Cell(18,5,"",0,0,"R",$bgcolor); $pdf->Cell(35,5,sprintf("%0.1f",$mwst)."%",0,0,"R",$bgcolor); $pdf->Cell(35,5,formatBetrag((($betrag/100)*$mwst)),0,0,"R",$bgcolor); $pdf->Cell(35,5,getWaehrung($waehrung_pos)." -".formatBetrag(waehrungRound(($betrag+(($betrag/100)*$mwst)),$waehrung_pos)),0,1,"R",$bgcolor); } //Total $query=mysql_query("SELECT sum(anzahl*(betrag*fx)),sum((((anzahl*betrag)/100)*mwst)*fx) FROM Rechnungen_positionen WHERE rechnung='$id'"); list($total,$total_mwst)=mysql_fetch_row($query); $query=mysql_query("SELECT sum(betrag*fx),sum(((betrag/100)*mwst)*fx) FROM Rechnungen_gutschriften WHERE bezahlt='$id'"); list($total_gut,$total_mwst_gut)=mysql_fetch_row($query); $total-=$total_gut; $total_mwst-=$total_mwst; $pdf->Ln(); $pdf->SetFont("Arial","B",10); $pdf->Cell(59,5,"Nettobetrag",0,0,"L"); $pdf->Cell(53,5,"Totalbetrag MWSt",0,0,"R"); $pdf->Cell(70,5,"Zu überweisender Betrag",0,1,"R"); $pdf->Cell(59,5,getWaehrung($waehrung)." ".formatBetrag(waehrungRound($total,$waehrung)),0,0,"L"); $pdf->Cell(53,5,getWaehrung($waehrung)." ".formatBetrag(waehrungRound($total_mwst,$waehrung)),0,0,"R"); $pdf->Cell(70,5,getWaehrung($waehrung)." ".formatBetrag(waehrungRound($total+$total_mwst,$waehrung)),0,1,"R"); $pdf->Ln(); $pdf->Ln(); $pdf->Line(15,276,197,276); $pdf->SetFont("Arial","",7); //footer $pdf->Text(15,280,"Das Sylon Team verfolgt keine aktiven Marketing Projekte."); $pdf->Text(15,284,"Unser Marketing ist eine zufriedene Kundschaft. Sind Sie mit den Hosting Dienstleistungen des Sylon Teams zufrieden, empfehlen Sie uns bitte weiter."); $pdf->Text(15,288,"Für jeden vermittelten Auftrag schreiben wir Ihnen die ersten zwei Monatsgebühren des Neukunden gut. Besten Dank für Ihre Unterstützung."); //$_config_rechnung_text_footer //Abschlusstext $lines=0; for($i=0;$i<strlen($footer);$i++) { if(substr($footer,$i,1)=="\n") $lines++; } if(($pdf->getY() + ($lines + 103.33))>250) { $pdf->addpage(); } $pdf->SetFont("Arial","",10); $pdf->Write(5,"$footer"); $pdf->Output(); ?><file_sep>/modules/rechnungenOloid/mahnen.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); include("func.inc.php"); if($submit) { $query=mysql_query("INSERT INTO Rechnungen_mahnungen(rechnung,datum,adresse,betreff,text,footer,zahlungsfrist,besrnr) VALUES('$id','".date_CH_to_EN($datum)."','$adresse','$betreff','$text','$footer','$zahlungsfrist','$besrnr')"); if(!$error=mysql_error()) { $last_id=mysql_insert_id(); if($newpos_text) { $query=mysql_query("SELECT kontakt,waehrung,fx FROM Rechnungen WHERE id='$id'"); list($kontakt,$waehrung,$fx)=mysql_fetch_row($query); $query=mysql_query("INSERT INTO Rechnungen_positionen(kontakt,rechnung,text,text1,betrag,waehrung,fx,datum,`key`,value) VALUES('$kontakt','0','$newpos_text','$newpos_text1','$newpos_betrag','$waehrung','$fx',NOW(),'mahnung','$last_id')"); } header("Location: createPDF.php?type=mahnung&id=$last_id"); } } ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body <?=error($error)?>> <p class=titel>Rechnungen:Mahnung erstellen</p> <? if(!$submit) { $query=mysql_query("SELECT kontakt,DATE_FORMAT(datum,'$_config_date'),adresse, betreff, text,footer,waehrung,besrnr FROM Rechnungen WHERE id='$id'"); list($kontakt,$datum,$adresse,$betreff,$text,$footer,$waehrung,$besrnr)=mysql_fetch_row($query); $betreff=str_replace("%DATUM%",$datum,$betreff); $text=str_replace("%DATUM%",$datum,$text); $footer=str_replace("%DATUM%",$datum,$footer); $_config_rechnung_mahnung_subject=str_replace("%DATUM%",$datum,$_config_rechnung_mahnung_subject); $betreff=$_config_rechnung_mahnung_subject; $zahlungsfrist=$_config_mahnung_rechnung_zahlungsfrist; } print "<form method=post action=\"$PHP_SELF?id=$id\"> <table width=100% border=0> <tr> <td height=100 valign=top> <textarea name=adresse style=\"width:400px;height:100px;\">$adresse</textarea> </td> </tr> <tr> <td align=right height=50 valign=top>".$_config_rechnung_ort.", <input type=text name=datum value=\"".date("d.m.Y")."\"></td> </tr> <tr> <td> <input type=text name=betreff maxlength=255 value=\"".$betreff."\" style=\"width:400px;\"><br><br> <SELECT onChange=\"document.getElementById('text').value=this.value\" style=\"width:400px;background-color:#$_config_tbl_bgcolor1;\"> <option value=\"".str_replace("%USER%",getEmp(getHttpUserId()),$text)."\">Rechnungstext</option> <option value=\"".str_replace("%USER%",getEmp(getHttpUserId()),$_config_rechnung_mahnung_text_1)."\">".get1Line($_config_rechnung_mahnung_text_1)."</option> <option value=\"".str_replace("%USER%",getEmp(getHttpUserId()),$_config_rechnung_mahnung_text_2)."\">".get1Line($_config_rechnung_mahnung_text_2)."</option> <option value=\"".str_replace("%USER%",getEmp(getHttpUserId()),$_config_rechnung_mahnung_text_3)."\">".get1Line($_config_rechnung_mahnung_text_3)."</option> <option value=\"".str_replace("%USER%",getEmp(getHttpUserId()),$_config_rechnung_mahnung_text_4)."\">".get1Line($_config_rechnung_mahnung_text_4)."</option> <option value=\"".str_replace("%USER%",getEmp(getHttpUserId()),$_config_rechnung_mahnung_text_5)."\">".get1Line($_config_rechnung_mahnung_text_5)."</option> </SELECT><br> <textarea name=text id=text style=\"width:400px;height:150px;\">".str_replace("%USER%",getEmp(getHttpUserId()),$text)."</textarea> </td> </tr> <tr> <td> <b>Rechnungen</b><br> <table border=0 width=100% cellspacing=0> <tr> <td style=\"width:60px;\"><b>Anzahl</b></td> <td><b>Text</b></td> <td><b>Text 1</b></td> <td align=right><b>Betrag</b></td> </tr>"; $query=mysql_query("SELECT id,anzahl,text,text1,$_config_posbetrag,waehrung FROM Rechnungen_positionen pos WHERE rechnung='$id'"); for($i=0;(list($pos_id,$pos_anzahl,$pos_text,$pos_text1,$betrag,$waehrung_pos)=mysql_fetch_row($query));$i++){ if(($i%2)==0){ $bgcolor=$_config_tbl_bgcolor1; } else { $bgcolor=$_config_tbl_bgcolor2; } print "<tr> <td bgcolor=\"#$bgcolor\">$pos_anzahl</td> <td bgcolor=\"#$bgcolor\">$pos_text</td> <td bgcolor=\"#$bgcolor\">$pos_text1</td> <td bgcolor=\"#$bgcolor\" align=right>".formatBetrag($betrag)." ".getWaehrungHtml($waehrung_pos)."</td> </tr>\n"; } if(($i%2)==0){ $bgcolor=$_config_tbl_bgcolor1; } else { $bgcolor=$_config_tbl_bgcolor2; } print "<tr> <td bgcolor=\"#$bgcolor\"></td> <td bgcolor=\"#$bgcolor\"><input type=text style=\"width:100px\" name=newpos_text value=\"$newpos_text\"></td> <td bgcolor=\"#$bgcolor\"><input type=text style=\"width:100px\" name=newpos_text1 value\"$newpos_text1\"></td> <td bgcolor=\"#$bgcolor\" align=right><input type=text style=\"width:50px\" name=newpos_betrag value=\"$newpos_betrag\"></td> </tr>\n"; print "</table>\n"; //Gutschriften $query2=mysql_query("SELECT id,betrag,waehrung,text FROM Rechnungen_gutschriften WHERE bezahlt is NULL AND kontakt='$id' AND auszahlen=1"); if(@mysql_num_rows($query2)>0) { print "<br><b>Gutschriften</b><br><table border=0 width=100% cellpadding=0 cellspacing=0> <tr> <td>&nbsp;</td> <td><b>Text</b></td> <td align=right><b>Betrag</b></td> </tr>"; for($i=0;list($gutschrift_id,$gutschrift_betrag,$gutschrift_waehrung,$gutschrift_text)=mysql_fetch_row($query2);$i++) { if(($i%2)==0){ $bgcolor=$_config_tbl_bgcolor1; } else { $bgcolor=$_config_tbl_bgcolor2; } print "<tr> <td style=\"width:30px;\" bgcolor=\"#$bgcolor\"><input type=checkbox name=\"gut[$i]\" value=\"$gutschrift_id\" checked></td> <td bgcolor=\"#$bgcolor\">$gutschrift_text</td> <td align=right bgcolor=\"#$bgcolor\">".formatBetrag($gutschrift_betrag)." ".getWaehrungHtml($gutschrift_waehrung)."</td> </tr>\n"; } print "</table>"; } print "</td> </tr> <tr> <td> <table border=0 cellpadding=0 cellspacing=0 width=\"100%\"> <tr> <td valign=top><b>Total</b></td> <td align=right><b>"; //Total berechnen $query1=mysql_query("SELECT sum($_config_posbetrag),waehrung FROM Rechnungen_positionen pos WHERE rechnung='$id' GROUP BY waehrung"); for($i=0;(list($total_betrag,$total_waehrung)=mysql_fetch_row($query1));$i++) { if($i>0) print "<br>"; print formatBetrag($total_betrag)." ".getWaehrungHtml($total_waehrung); } print "</b></td> </tr> </td> </tr> </table> <p><b>Währung</b><br> Mahnung erstellen in: ".getWaehrungHtml($waehrung)."</p> <p><b>Zahlungsfrist</b><br> <input type=text style=\"width:30;text-align:right\" maxlength=3 name=\"zahlungsfrist\" value=\"$zahlungsfrist\"> Tage</p> <p><b>Besr Nr.</b><br> <input type=text style=\"width:250;\" name=\"besrnr\" value=\"$besrnr\"></p> </td> </tr> <tr> <td><br><br> <SELECT onChange=\"document.getElementById('footer').value=this.value\" style=\"width:400px;background-color:#$_config_tbl_bgcolor1;\"> <option value=\"".str_replace("%USER%",getEmp(getHttpUserId()),$footer)."\">Rechnungstext</option> <option value=\"".str_replace("%USER%",getEmp(getHttpUserId()),$_config_rechnung_mahnung_text2_1)."\">".get1Line($_config_mahnung_rechnung_text2_1)."</option> <option value=\"".str_replace("%USER%",getEmp(getHttpUserId()),$_config_rechnung_mahnung_text2_2)."\">".get1Line($_config_mahnung_rechnung_text2_2)."</option> <option value=\"".str_replace("%USER%",getEmp(getHttpUserId()),$_config_rechnung_mahnung_text2_3)."\">".get1Line($_config_mahnung_rechnung_text2_3)."</option> <option value=\"".str_replace("%USER%",getEmp(getHttpUserId()),$_config_rechnung_mahnung_text2_4)."\">".get1Line($_config_mahnung_rechnung_text2_4)."</option> <option value=\"".str_replace("%USER%",getEmp(getHttpUserId()),$_config_rechnung_mahnung_text2_5)."\">".get1Line($_config_mahnung_rechnung_text2_5)."</option> </SELECT><br> <textarea name=footer id=footer style=\"width:400px;height:70px;\">".str_replace("%USER%",getEmp(getHttpUserId()),$footer)."</textarea> </td> </tr> </table><br>"; ?> <input type=submit name=submit value="Mahnung Drucken"> </form> </body> </html> <file_sep>/modules/rapportierung/show.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> <script src="../../inc/functions.js" type="text/javascript" language="javascript"></script> </head> <body onLoad="document.getElementById('term').focus()"> <p class=titel>Rapporte</p> <form method=get action="<?=$PHP_SELF?>"> <input type=text name=term id=term value="<?=$term?>"> <input type=submit name=search value="Suchen"> </form> <? if($order=="kontakt") $order="ORDER BY kon.firma"; else if($order=="betrag") $order="ORDER BY (abr.raten*dom.betrag)"; else $order="date"; if(!$start){ $start=0; } if($term){ $query=mysql_query("SELECT id,employee,kontakt,projekt,code,text,date,time,time_clearable FROM Rapportierug WHERE ".formatSearchString($term,array("","dom.id"))." $order LIMIT $start,$_config_entrysperpage"); } else { $query=mysql_query("SELECT id,employee,kontakt,projekt,code,text,DATE_FORMAT(date,'$_config_date'),time,time_clearable FROM Rapportierung ORDER BY $order LIMIT $start,$_config_entrysperpage"); } echo mysql_error(); if(@mysql_num_rows($query)>0) { print "<table border=0 cellpadding=3 cellspacing=0 width=\"95%\"> <tr> <td><b><a href=\"$PHP_SELF\">Kontakt</a></b></td> <td><b><a href=\"$PHP_SELF?order=date\">Datum</a></b></td> <td><b><a href=\"$PHP_SELF?order=employee\">Mitarbeiter</a></b></td> <td><b><a href=\"$PHP_SELF?order=projekt\">Projekt</a></b></td> <td><b><a href=\"$PHP_SELF?order=code\">Code</a></b></td> <td><b><a href=\"$PHP_SELF?order=time\">Stunden</a></b></td> <td><b><a href=\"$PHP_SELF?order=text\">Text</a></b></td> </tr>\n"; $total=0; for($i=0;list($id,$employee,$kontakt,$projekt,$code,$text,$date,$time,$time_clearable )=mysql_fetch_row($query);$i++) { if(($i%2)==0){ $bgcolor=$_config_tbl_bgcolor1; } else { $bgcolor=$_config_tbl_bgcolor2; } if(strlen($text)>200) $text=substr($text,0,200)."..."; print "<tr onmouseover=\"setPointer(this, 'over', '#$bgcolor', '#$_config_tbl_bghover', '')\" onmouseout=\"setPointer(this, 'out', '#$bgcolor', '#$_config_tbl_bghover', '')\" onclick=\"location.href='rapport.php?id=$id&back=".urlencode($REQUEST_URI)."'\"> <td width=200 bgcolor=\"#$bgcolor\"><a href=\"../kontakte/kontakt.php?id=$kontakt&back=".urlencode($REQUEST_URI)."\">".getKontakt($kontakt)."</a></td> <td width=80 bgcolor=\"#$bgcolor\">$date</td> <td width=80 bgcolor=\"#$bgcolor\">".getEmp($employee)."</td> <td width=80 bgcolor=\"#$bgcolor\">$projekt</td> <td width=40 bgcolor=\"#$bgcolor\">$code</td> <td width=50 align=right bgcolor=\"#$bgcolor\">$time ($time_clearable)</td> <td bgcolor=\"#$bgcolor\">$text</td> </tr>\n"; } print "<tr> <td colspan=3 align=center>"; if($term){ $attr="&term=$term"; } if($start>0){ print "<a href=\"$PHP_SELF?start=".($start-$_config_entrysperpage)."$attr\"><<<</a>"; } if($term){ $query=mysql_query("SELECT count(*) FROM Rapportierung WHERE ".formatSearchString($term,array("domain"))); } else { $query=mysql_query("SELECT count(*) FROM Rapportierung"); } if(($start+$_config_entrysperpage+1)<=mysql_result($query,0,0)){ if($start>0){ print " | "; } print "<a href=\"$PHP_SELF?start=".($start+$_config_entrysperpage)."$attr\">>>></a>"; } print "</td> </tr> </table>\n"; } else { print "Keine Rapporte gefunden"; } ?> </body> </html> <file_sep>/modules/rechnungen/mahnen.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); include("func.inc.php"); if(!$back) { $back="offene.php"; } if($submit) { if(mail($an,$betreff,$nachricht,"From: $von")) { $query = mysql_query("INSERT INTO Rechnungen_mahnungen(rechnung,datum,von,an,betreff,nachricht) VALUES('$rechnung',NOW(),'$von','$an','$betreff','$nachricht')"); if($query) { header("Location: ".urldecode($back)); } else { $err = mysql_error(); } } else { $err = "Mahnung konnte nicht verschickt werden"; } } ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body> <p class=titel>Rechnungen:Mahnen</p> <? if($err) { print "<b>Fehler:</b> $err"; } $query = mysql_query("SELECT rech.kontakt,DATE_FORMAT(rech.datum,'$_config_date'),rech.betreff FROM Rechnungen rech WHERE rech.id='$id'"); list($kontakt,$datum,$betreff)=mysql_fetch_row($query); $text=$_config_rechnung_mahnung_text1; //Name, Andrede, Mail list($name,$anrede,$mail) = getRechnungsAdresseMail($kontakt); $text=str_replace("%ANREDE%",$anrede,$text); $text=str_replace("%NAME%",$name,$text); $text=str_replace("%DATUM%",$datum,$text); print "<form method=post action=\"$PHP_SELF?back=".urlencode($back)."\"> <input type=hidden name=rechnung value=$id> <table border=0> <tr> <td width=100>Von:</td> <td><input type=text name=von value=\"$_config_mahnung_from\" style=\"width:300px\"></td> </tr> <tr> <td width=100>An:</td> <td><input type=text name=an value=\"$name <$mail>\" style=\"width:300px\"></td> </tr> <tr> <td width=100>Betreff:</td> <td><input type=text name=betreff value=\"Mahnung: Rechnung vom $datum\" style=\"width:300px;\"> </tr> <tr> <td width=100 valign=top>Nachricht:</td> <td> <textarea name=nachricht style=\"width:500px; height:300px;\">$text</textarea> </td> </tr> <tr> <td colspan=2><input type=submit name=submit value=\"Abschicken\"></td> </tr> </table> </form>"; ?> </body> </html> <file_sep>/trash/kontakte_klein/menu.inc.php Kontakte Anzeigen/Editieren=show.php Hinzuf&uuml;gen=add.php <file_sep>/modules/rechnungen/delete.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); include("func.inc.php"); if(!$back) { $back="offene.php"; } if(!$backno) { $backno=$back; } if($del) { if($rem_pos) { $query=mysql_query("DELETE FROM Rechnungen_positionen WHERE rechnung='$del'"); $query1=mysql_query("DELETE FROM Rechnungen_gutschriften WHERE bezahlt='$del'"); } else { $query=mysql_query("UPDATE Rechnungen_positionen SET rechnung=NULL WHERE rechnung='$del'"); $query1=mysql_query("UPDATE Rechnungen_gutschriften SET bezahlt=NULL WHERE bezahlt='$del'"); } if(!($error=mysql_error())) { $query=mysql_query("DELETE FROM Rechnungen WHERE id='$del'"); if(!($error=mysql_error())) { header("Location: ".urldecode($back)); } } } ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body> <p class=titel>Rechnungen:Rechnung Löschen</p> <? if($error){ print "<b>Fehler:</b> $error<br><br>"; } $query=mysql_query("SELECT kontakt,DATE_FORMAT(datum,'$_config_date'),betreff,fixiert FROM Rechnungen WHERE id='$id'"); list($kontakt,$datum,$text,$fixiert)=mysql_fetch_row($query); if($fixiert) print "Diese Rechnung wurde bereits automatisch verbucht.<br>Bitte kontaktieren Sie Ihren Buchhalter und/oder nehmen Sie die &Auml;nderungen in der Buchhaltung vor.<br><br>Möchten Sie for tfahren?<br>"; else print "Möchten Sie die Rechnung an <b>".getKontakt($kontakt)."</b> vom <b>$datum</b> mit dem Titel <b>$text</b> wirklich Löschen?<br>"; print "<form method=post action=\"$PHP_SELF\" name=delform> <input type=hidden name=del value=$id> <input type=hidden name=back value=\"$back\"> <input type=checkbox name=rem_pos value=true checked> Positionen Löschen<br><br> <a href=\"#\" onclick=\"javascript:document.delform.submit();\">[ Ja ]</a> <a href=\"".urldecode($backno)."\">[ Nein ]</a>"; ?> </body> </html> <file_sep>/modules/domains/func.inc.php <? function domain_bezahlt_bis($id){ global $_config_date; $query=mysql_query("SELECT DATE_FORMAT(bezahltBis,'$_config_date') FROM Domains WHERE id='$id'"); if(mysql_result($query,0,0)=="" || mysql_result($query,0,0)=="00.00.0000" ) { $query=mysql_query("SELECT DATE_FORMAT(startDate,'$_config_date') FROM Domains WHERE id='$id'"); return mysql_result($query,0,0); } else { return mysql_result($query,0,0); } } function getDomain($id){ $query=mysql_query("SELECT domain FROM Domains WHERE id='$id'"); return mysql_result($query,0,0); } //As timestamps function diff_month($in_dateLow, $in_dateHigh) { if ($in_dateLow > $in_dateHigh) { /* Wenn das Enddatum kleiner ist, wird als differenz 1 zurückgegeben $tmp = $in_dateLow; $in_dateLow = $in_dateHigh; $in_dateHigh = $tmp;*/ return 1; } $dateLow = $in_dateLow; $dateHigh = strftime('%m/%Y', $in_dateHigh); $periodDiff = 0; while (strftime('%m/%Y', $dateLow) != $dateHigh) { $periodDiff++; $dateLow = strtotime('+1 month', $dateLow); } return $periodDiff; } ?> <file_sep>/modules/rapportierung/rapport.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); include("func.inc.php"); if(!$back) { $back="show.php"; } if($submit) { if(!$employee) { $error="Bitte geben Sie einen Mitarbeiter an"; } else if(!$kontakt) { $error="Bitte geben Sie einen Kunden an"; } else if(!$code) { $error="Bitte geben Sie einen Code an"; } else if(!isset($time_clearable)) { $error="Bitte geben Sie die verrechenbaren Stunden an"; } else if(!$time) { $error="Bitte geben Sie die effektiven Stunden an"; } else if(!$date) { $error="Bitte geben Sie das Datum an"; } else if($time_clearable > $time) { $error="Die verrechenbare Zeit darf nicht grösser sein als die Effektive"; } else { if(!$time) $time=$time_clearable; $query=mysql_query("UPDATE Rapportierung SET employee='$employee',kontakt='$kontakt',code='$code',ansprechperson='$ansprechperson',text='$text',date='$date',time='$time',time_clearable='$time_clearable' WHERE id='$id'"); if(mysql_error()) $error=mysql_error(); else{ header("Location: $back?msg=".urlencode("Rapport geändert")); } } } $query=mysql_query("SELECT employee,kontakt,code,ansprechperson,text,DATE_FORMAT(date,'$_config_date'),time,time_clearable FROM Rapportierung WHERE id='$id'"); list($employee,$kontakt,$code,$ansprechperson,$text,$date,$time,$time_clearable)=mysql_fetch_row($query); if(!$date) $date=date("d.m.Y"); if(!$employee) $employee=getHttpUserId(); ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body onLoad="<?=error($error)?><?=alert($msg)?>document.getElementsByName('kontakt')[0].focus()"> <p class=titel>Rapportierung:Rapport erstellen</p> <? print "<form method=post action=\"$PHP_SELF?id=$id\">\n"; print "<table border=0> <tr> <td width=100>Intern:</td> <td>".getEmpList("employee",$employee,250,"Bitte auswählen")."</td> </tr> <tr> <td width=100>Kontakt:</td> <td>".getKontakteList("kontakt",$kontakt,250,"Bitte auswählen")."</td> </tr> <tr> <td width=100>Code:</td> <td>".getRapportCodeList("code",0,$code,250,"Bitte Auswählen")."</td> </tr> <tr> <td width=100 valign=top>Text:</td> <td><textarea name=text style=\"width:250px;height:100px;\">$text</textarea></td> </tr> <tr> <td width=100>Ansprechperson:</td> <td><input type=text maxlength=100 name=ansprechperson value=\"$ansprechperson\" style=\"width:250px\"></td> </tr> <tr> <td width=100>Zeit Verrechenbar:</td> <td><input type=text maxlength=255 name=time_clearable value=\"$time_clearable\" style=\"width:80px\"></td> </tr> <tr> <td width=100>Zeit Effektiv:</td> <td><input type=text maxlength=255 name=time value=\"$time\" style=\"width:80px\"></td> </tr> <tr> <td width=100>Datum:</td> <td><input type=text maxlength=255 name=date value=\"$date\" style=\"width:80px\"></td> </tr> </table>\n"; print "<input type=submit name=submit value=\"Ändern\"> <input type=button onclick=\"javascript:location.href='delete.php?id=$id'\" value=\"Löschen\"></form>\n"; ?> </body> </html> <file_sep>/modules/rechnungen/gutschriften_add.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); if($submit) { $query=mysql_query("INSERT INTO Rechnungen_gutschriften(kontakt,text,betrag,waehrung,datum) values ('$kontakt','$text','$betrag','$waehrung',NOW())"); if($query){ header("Location: gutschriften.php"); } else { $error=mysql_error(); } } ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body onLoad="document.getElementsByName('kontakt')[0].focus()"> <p class=titel>Rechnungen:Neue Gutschrift</p> <? if($error){ print "<b>Fehler:</b> $error<br><br>"; } print "<form method=post action=\"$PHP_SELF\">\n"; print "<table border=0 cellpadding=0 cellspacing=0> <tr> <td width=150>Gutschrift zugunsten:</td> <td>".getKontakteList("kontakt",$kontakt,250,"Bitte Auswählen")."</td> </tr> <tr> <td width=150 valign=top>Text:</td> <td><textarea height=4 style=\"width:250;\" name=\"text\">$text</textarea></td> </tr> <tr> <td width=150>Währung:</td> <td>".getWaehrungsList("waehrung",$waehrung,100)."</td> </tr> <tr> <td width=150>Betrag:</td> <td><input type=text name=betrag style=\"width:100;\" value=\"$betrag\"></td> </tr> <tr> </table><br><br>\n <input type=submit name=submit value=\"Hinzuf&uuml;gen\">\n</form>\n"; ?> </body> </html> <file_sep>/modules/kontakte/kontakt.php <? session_start(); include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); include("func2.inc.php"); $debugLogString=""; $HTTP_POST_VARS = !empty($HTTP_POST_VARS) ? $HTTP_POST_VARS : $_POST; if(!isset($_SESSION["back"])){ //$back="show.php"; //session_register("back"); $_SESSION["back"] ="show.php"; } /** * if id id set get the content for kontakt array and kontaktpersonen array! */ //$id= isset($id)?$id:-1; if(isset($id)) { /* if(session_is_registered($kontakt)) { session_unregister($kontakt); unset($kontakt); } if(session_is_registered($kontaktpersonen)) { session_unregister($kontaktpersonen); unset($kontaktpersonen); } */ if(isset($_SESSION["kontakt"])) { //session_unregister($kontakt); unset($_SESSION["kontakt"]); } if(isset($_SESSION["kontaktpersonen"])) { //session_unregister($kontaktpersonen); unset($_SESSION["kontaktpersonen"]); $kontaktpersonen = NULL; } $query=mysql_query("SELECT id,DATE_FORMAT(erfasst,'$_config_date') as erfasst,DATE_FORMAT(updated,'$_config_date') as updated,aktiv,firma,firma2,anrede,adresse,adresse2,plz,ort,land,telefon1,telefon2,mobile,fax,mail,www,text,konto,kontonr,blz,swift,iban,pl,kontakt1,kontakt2,kontakt3 FROM Kontakte WHERE id='$id'"); if(!($error=mysql_error())) { $kontakt=array(); $kontakt = mysql_fetch_array($query, MYSQL_ASSOC); //session_register("kontakt"); $_SESSION["kontakt"] = $kontakt; $query = mysql_query("SELECT id, name,vorname, anrede,position,abteilung,adresse,adresse2,plz,ort,land,tel_privat,tel_gesch,tel_direkt,tel_mobile,fax,mail,mail2,text FROM Kontakte_kontaktpersonen WHERE firma = '$id' AND NOT firma='0'"); if(!($error=mysql_error())) { $kontaktpersonen=array(); for($i=0;$i<mysql_num_rows($query);$i++) { $kontaktpersonen[$i] = mysql_fetch_array($query, MYSQL_ASSOC); //$errorMsgFlo .= $kontaktpersonen[$i]; } //session_register("kontaktpersonen"); $_SESSION["kontaktpersonen"] = $kontaktpersonen; } } if(!$kontakt['anrede']) { $kontakt['anrede']=$_config_kontakte_default_anrede; } /** * if kontaktpersonId is update the arrays and save it ( with putting the submitform =1)! */ } else { $debugLogString.="<h1>else:</h1><p>"; $debugLogString.=saveKontakte(); $debugLogString.="</p>"; //saveKontakte(); //$submit_form=1; } if($submit_form==1){ if(!$kontakt['firma']){ $error="Firma oder Name muss ausgefüllt werden"; }else { /** * Update or Insert into kontakt Table */ //UPDATE the kontakt values in database if( $kontakt['id']>0 ) { $keys = array_keys($kontakt); $sql=""; for($i=0;$i<count($keys);$i++) { if($i>0) $sql.=", "; $sql.=$keys[$i]."='".$kontakt[$keys[$i]]."'"; } $myQueryString = "UPDATE Kontakte SET updated=NOW(),$sql WHERE id='".$kontakt['id']."'"; $query=mysql_query($myQueryString); //INSERT the kontakt values in database }else { $keys = array_keys($kontakt); $fields=""; $values=""; for($i=0;$i<count($keys);$i++) { if($i>0) { $fields.=","; $values.=","; } $fields.=$keys[$i]; $values.="'".$kontakt[$keys[$i]]."'"; } $myQueryString = "INSERT INTO Kontakte($fields,erfasst) VALUES($values,NOW())"; $query=mysql_query($myQueryString); } /** * Update or Insert into kontaktpersonen Table */ if(!($error=mysql_error())) { for($i=0;$i<count($kontaktpersonen);$i++) { unset($sql,$fields,$values); $keys = array_keys($kontaktpersonen[$i]); //try to get the kontaktperosn wit iteration id if there is one in db $query=mysql_query("SELECT count(*) FROM Kontakte_kontaktpersonen WHERE id='".$kontaktpersonen[$i]['id']."'"); //UPDATE the kontaktpersonen values in database because ther was a entry found with the iteration id if(mysql_result($query,0,0)>0) { for($ii=0;$ii<count($keys);$ii++) { if($ii>0) $sql.=", "; $sql.=$keys[$ii]."='".$kontaktpersonen[$i][$keys[$ii]]."'"; } $query=mysql_query("UPDATE Kontakte_kontaktpersonen SET $sql WHERE id='".$kontaktpersonen[$i]['id']."'"); //INSERT the kontaktpersonen values in database otherwise }else { for($ii=0;$ii<count($keys);$ii++) { if($ii>0) { $fields.=","; $values.=","; } $fields.=$keys[$ii]; $values.="'".$kontaktpersonen[$i][$keys[$ii]]."'"; } $query=mysql_query("INSERT INTO Kontakte_kontaktpersonen(firma,$fields) VALUES('".$kontakt['id']."',$values)"); } } if(!($error=mysql_error())) { session_destroy(); header("Location: $back"); } } } } ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> <script src="../../inc/functions.js" type="text/javascript" language="javascript"></script> <script type="text/javascript" language="javascript"> <!-- var state; state=<?=$kontakt['anrede']?>; function changeAnrede(newState) { if((state==1 || state==2) && newState==3) { document.getElementById('submit_form').value=2; document.kontaktform.submit(); } else if(state==3 || (newState==1 && newState==2)) { document.getElementById('submit_form').value=2; document.kontaktform.submit(); } } function showKontaktPerson(id) { //alert(id); document.kontaktform.action='kontaktperson.php?key='+id; alert(document.kontaktform.action); document.kontaktform.submit(); //alert('Diese Funktion ist vorübergehend nicht in Betrieb.'); } //--> </script> </head> <body onLoad="javascript:error('<?=addslashes($error)?>'); document.getElementById('firma').focus();"> <? echo $debugLogString?> <p class="titel">Kontakte:Kontakt</p> <? if(count($kontakt)>0) { print "<form method=post action=\"$PHP_SELF\" name=\"kontaktform\"> <input type=\"hidden\" name=\"submit_form\" id=\"submit_form\" value=\"1\"> <table border=0 cellpadding=0 cellspacing=0 width=\"95%\">"; if($_config_kontakte_module1 || $_config_kontakte_module2 ){ print " <tr> <td align=left valign=top >"; if($_config_kontakte_module1) { foreach(split(";",$_config_kontakte_module1) as $module) { include("modules/$module.inc.php"); } } print "</td> <td width=\"*\">&nbsp;</td> <td align=right valign=top>"; if($_config_kontakte_module2) { foreach(split(";",$_config_kontakte_module2) as $module) { include("modules/$module.inc.php"); } } print "</td> </tr>"; } if($_config_kontakte_module3 || $_config_kontakte_module4 ){ print "<tr> <td colspan=3 height=30>&nbsp;</td> </tr> <tr> <td align=left valign=top>"; if($_config_kontakte_module3) { foreach(split(";",$_config_kontakte_module3) as $module) { include("modules/$module.inc.php"); } } print "</td> <td width=\"*\">&nbsp;</td> <td align=right valign=top>"; if($_config_kontakte_module4) { foreach(split(";",$_config_kontakte_module4) as $module) { include("modules/$module.inc.php"); } } print " </td> </tr>"; } if($_config_kontakte_module5 || $_config_kontakte_module6 ){ print "<tr> <td colspan=3 height=30>&nbsp;</td> </tr> <tr> <td align=left valign=top>"; if($_config_kontakte_module5) { foreach(split(";",$_config_kontakte_module5) as $module) { include("modules/$module.inc.php"); } } print "</td> <td width=\"*\">&nbsp;</td> <td align=right valign=top>"; if($_config_kontakte_module6) { foreach(split(";",$_config_kontakte_module6) as $module) { include("modules/$module.inc.php"); } } print " </td> </tr>"; } if($_config_kontakte_module7 || $_config_kontakte_module8 ){ print "<tr> <td colspan=3 height=30>&nbsp;</td> </tr> <tr> <td align=left valign=top>"; if($_config_kontakte_module7) { foreach(split(";",$_config_kontakte_module7) as $module) { include("modules/$module.inc.php"); } } print "</td> <td width=\"*\">&nbsp;</td> <td align=right valign=top>"; if($_config_kontakte_module8) { foreach(split(";",$_config_kontakte_module8) as $module) { include("modules/$module.inc.php"); } } print " </td> </tr>"; } print "</table><br> <input type=\"submit\" name=\"submit_button\" value=\"Speichern\"> <input type=\"button\" onclick=\"javascript:location.href='".urldecode($back)."'\" value=\"Abbrechen\"> <input type=button onclick=\"javascript:location.href='delete.php?id=$id&back=".urlencode($back)."&backno=".urlencode($REQUEST_URI)."'\" value=\"Löschen\">\n"; if(!anredeIsPrivate($kontakt['anrede'])) { //Kontaktpersonen print "<hr noshade width=95% align=left>"; if(count($kontaktpersonen)>0) { print "<table width=95% border=0 cellpadding=0 cellspacing=0> <tr> <td><b>Name</b> <td><b>Position</b></td> <td><b>e-mail Adresse</b> <td><b>Telefon</b> </tr>\n"; for($i=0;$i<(count($kontaktpersonen));$i++) { if($kontaktpersonen[$i]['tel_direkt']) { $tel=$kontaktpersonen[$i]['tel_direkt']." (Direkt)"; } else if($kontaktpersonen[$i]['tel_mobilet']) { $tel=$kontaktpersonen[$i]['tel_mobile']." (Mobile)"; } else if($kontaktpersonen[$i]['tel_gesch']) { $tel=$kontaktpersonen[$i]['tel_gesch']." (Geschäft)"; } else if($kontaktpersonen[$i]['tel_privat']) { $tel=$kontaktpersonen[$i]['tel_privat']." (Privat)"; } else { $tel=""; } if(($i%2)==0){ $bgcolor=$_config_tbl_bgcolor1; } else { $bgcolor=$_config_tbl_bgcolor2; } print "<tr onmouseover=\"setPointer(this, 'over', '#$bgcolor', '#$_config_tbl_bghover', '')\" onmouseout=\"setPointer(this, 'out', '#$bgcolor', '#$_config_tbl_bghover', '')\" onclick=\"showKontaktPerson(".($i).");\"> <td valign=top bgcolor=\"#$bgcolor\" width=\"25%\">".formatName("",$kontaktpersonen[$i]['name'],$kontaktpersonen[$i]['vorname'])."</td> <td valign=top bgcolor=\"#$bgcolor\" width=\"25%\">".$kontaktpersonen[$i]['position']."</td> <td valign=top bgcolor=\"#$bgcolor\" width=\"25%\"><a href=\"mailto:".$kontaktpersonen[$i]['mail']."\">".$kontaktpersonen['mail'][$i]."</a></td> <td valign=top bgcolor=\"#$bgcolor\" width=\"25%\">$tel</td> </tr>"; } print "</table>"; } print "<br><a href=\"#\" onclick=\"showKontaktPerson(-1);\">[ Neue Kontaktperson ]</a><br><br>"; } } else { print "Fehler: Kontakt existiert ev. nicht\n"; } ?> </form> <br /> <br /> <br /> <br /> <? echo "<pre>"; print_r($_SESSION); echo "</pre>"; ?> </body> </html> <file_sep>/trash/kontakte_klein/edit.php <? include("../../inc/config.inc.php"); include("../../inc/db.inc.php"); include("../../inc/func.inc.php"); if($submit){ if(!$name && !$firma){ $fehler="Firma oder Name muss ausgefüllt werden"; } else { $query_update=mysql_query("UPDATE Kontakte SET ansprechperson='$ansprechperson',name='$name',vorname='$vorname',anrede='$anrede',titel='$titel',firma='$firma',firma2='$firma2',abteilung='$abteilung',adresse='$adresse',adresse2='$adresse2',plz='$plz',ort='$ort',land='$land',tel_privat='$tel_privat',tel_gesch='$tel_gesch',tel_direkt='$tel_direkt',tel_mobile='$tel_mobile',fax='$fax',mail='$mail',mail2='$mail2',admin='$admin',text='$text',konto='$konto',kontonr='$kontonr',blz='$blz',swift='$swift',iban='$iban' WHERE id='$id'"); if($query_update){ header("Location: detail.php?id=$id"); } else { $fehler="<b>Fehler</b>: Die Datenbank konnte nicht Aktualisiert werden."; } } } ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body> <p class=titel>Kontakte:Editieren</p> <? if($fehler) print $fehler."<br>\n"; $query=mysql_query("SELECT id,ansprechperson,name,vorname,firma,firma2,abteilung,anrede,titel,adresse,adresse2,plz,ort,land,tel_privat,tel_gesch,tel_direkt,tel_mobile,fax,mail,mail2,admin,text,konto,kontonr,blz,swift,iban FROM Kontakte kon WHERE kon.id='$id' AND kon.aktiv=1"); if($query) { if($query_update||!$submit) list($id,$ansprechperson,$name,$vorname,$firma,$firma2,$abteilung,$anrede,$titel,$adresse,$adresse2,$plz,$ort,$land,$tel_privat,$tel_gesch,$tel_direkt,$tel_mobile,$fax,$mail,$mail2,$admin,$text,$konto,$kontonr,$blz,$swift,$iban)=mysql_fetch_row($query); $ansprechperson=getAnsprechpersonenList("ansprechperson",$ansprechperson,"150"); $anrede=getAnredeList("anrede",$anrede,"150"); if($plz==0) $plz=""; print "<form method=post action=\"$PHP_SELF?id=$id\">\n<table border=0> <tr> <td width=150>Name:</td> <td><input type=text name=\"name\" value=\"$name\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=150>Vorname</td> <td><input type=text name=\"vorname\" value=\"$vorname\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=150>Anrede</td> <td>".getAnredeList("anrede",$anrede,"150")."</td> </tr> <tr> <td width=150>Titel</td> <td><input type=text name=\"titel\" value=\"$titel\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=150>Firma</td> <td><input type=text name=\"firma\" value=\"$firma\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=150><NAME></td> <td><input type=text name=\"firma2\" value=\"$firma2\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=150>Abteilung</td> <td><input type=text name=\"abteilung\" value=\"$abteilung\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=150>Adresse</td> <td><input type=text name=\"adresse\" value=\"$adresse\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=150>Adresse 2</td> <td><input type=text name=\"adresse2\" value=\"$adresse2\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=150>PLZ / Ort</td> <td><input type=text name=\"plz\" value=\"$plz\" style=\"width:45px;\" maxlength=10> <input type=text name=\"ort\" value=\"$ort\" style=\"width:150px;\" maxlength=50></td> </tr> <td width=150>Land</td> <td><input type=text name=\"land\" value=\"$land\" style=\"width:200px;\" maxlength=20></td> </tr> <tr> <td width=150>Tel. Privat</td> <td><input type=text name=\"tel_privat\" value=\"$tel_privat\" style=\"width:200px;\" maxlength=20></td> </tr> <tr> <td width=150>Tel. Gesch.</td> <td><input type=text name=\"tel_gesch\" value=\"$tel_gesch\" style=\"width:200px;\" maxlength=20></td> </tr> <td width=150>Tel. Direkt.</td> <td><input type=text name=\"tel_direkt\" value=\"$tel_direkt\" style=\"width:200px;\" maxlength=20></td> </tr> <tr> <td width=150>Mobile</td> <td><input type=text name=\"tel_mobile\" value=\"$tel_mobile\" style=\"width:200px;\" maxlength=20></td> </tr> <tr> <td width=150>Fax</td> <td><input type=text name=\"fax\" value=\"$fax\" style=\"width:200px;\" maxlength=20></td> </tr> <tr> <td width=150>e-mail</td> <td><input type=text name=\"mail\" value=\"$mail\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=150>e-mail 2</td> <td><input type=text name=\"mail2\" value=\"$mail2\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=150>Admin</td> <td> <select name=admin width=150>"; if($admin==1){ print "<option value=0>Nein</option> <option value=1 SELECTED>Ja</option>\n"; } else { print "<option value=0 SELECTED>Nein</option> <option value=1>Ja</option>\n"; } print "</select> </td>"; if($_config_kontakte_show_ansprechperson){ print "<tr> <td width=150>Ansprechperson</td> <td>".getAnsprechpersonenList("ansprechperson",$ansprechperson,"150")."</td> </tr>"; } print "<tr> <td width=150 valign=top>Text</td> <td><textarea name=text style=\"width:200px;height:100px\">$text</textarea></td> </tr> <tr> <td width=150>Konto</td> <td><input type=text name=\"konto\" value=\"$konto\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=150>Konto Nr.</td> <td><input type=text name=\"kontonr\" value=\"$kontonr\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=150>BLZ</td> <td><input type=text name=\"blz\" value=\"$blz\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=150>Swift</td> <td><input type=text name=\"swift\" value=\"$swift\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=150>Iban</td> <td><input type=text name=\"iban\" value=\"$iban\" style=\"width:200px;\" maxlength=50></td> </tr> </table> <input type=submit name=submit value=\"&Auml;ndern\">\n</form><br><br>\n[ <a href=\"detail.php?id=$id\">View-only</a> ] [ <a href=\"show.php\">Übersicht</a> ]"; } else { print "Fehler: User existiert ev. nicht\n"; } ?> </body> </html> <file_sep>/modules/rechnungenOloid/offene.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body> <p class=titel>Rechnungen:Offene Rechnungen</p> <form method=get action="<?=$PHP_SELF?>"> <input type=text name=term id=term value="<?=$term?>"> <input type=submit name=search value="Suchen"> </form> <? if($term) { $query=mysql_query("SELECT rech.id,rech.fixiert,rech.kontakt,DATE_FORMAT(rech.datum,'$_config_date'),rech.betreff,rech.zahlungsfrist,rech.besrnr,rech.waehrung,sum($_config_posbetrag) FROM Rechnungen rech LEFT JOIN Rechnungen_positionen pos ON rech.id = pos.rechnung,Kontakte kon WHERE bezahlt is NULL AND ".formatSearchString($term,array("kon.firma","kon.firma2","rech.id","rech.betreff","rech.text","rech.footer"))." AND kon.id = rech.kontakt GROUP BY rech.id"); } else { $query=mysql_query("SELECT rech.id,rech.fixiert,rech.kontakt,DATE_FORMAT(rech.datum,'$_config_date'),rech.betreff,rech.zahlungsfrist,rech.besrnr,rech.waehrung,sum($_config_posbetrag) FROM Rechnungen rech LEFT JOIN Rechnungen_positionen pos ON rech.id = pos.rechnung WHERE bezahlt is NULL GROUP BY rech.id"); } if(mysql_error()){ print "<b>Fehler:</b> ".mysql_error(); exit; }else if(@mysql_num_rows($query)<1) { print "<b>Keine Offene Rechnungen</b>"; exit; } print "<table border=0 cellpadding=2 cellspacing=0> <tr> <td><b>Kontakt</b></td> <td><b>Datum</b></td> <td><b>Nr.</b></td> <td><b>Betreff</b></td> <td><b>Betrag</b></td> <td><b>Aktion</b></td> <td><b>Mahnungen</b></td> </tr>"; $change=-1; for($i=0;list($id,$fixiert,$kontakt,$datum,$betreff,$zahlungsfrist,$besrnr,$waehrung,$betrag)=mysql_fetch_row($query);$i++) { if($i%2==0){ $bgcolor=$_config_tbl_bgcolor1; } else { $bgcolor=$_config_tbl_bgcolor2; } if(strtotime("+$zahlungsfrist days",strtotime(date_CH_to_EN($datum)))< strtotime(date("Y-m-d"))) $red = " style=\"color:red\""; else $red=""; //Gutschriften abziehen $query2=mysql_query("SELECT ((betrag+((betrag/100)*mwst))*fx) FROM Rechnungen_gutschriften WHERE bezahlt='$id'"); while(list($betrag_gut)=mysql_fetch_row($query2)) $betrag -= $betrag_gut; print "<tr> <td width=180 valign=top bgcolor=\"#$bgcolor\"$red><a href=\"../kontakte/kontakt.php?id=$kontakt&back=".urlencode($REQUEST_URI)."\"$red>".getKontakt($kontakt)."</a></td> <td width=80 valign=top bgcolor=\"#$bgcolor\"$red>$datum</td> <td valign=top bgcolor=\"#$bgcolor\"$red>".str_pad($id,4,"0",STR_PAD_LEFT)."</td> <td width=\"*\" valign=top bgcolor=\"#$bgcolor\"$red>$betreff</td> <td width=100 valign=top align=right bgcolor=\"#$bgcolor\"$red>".formatBetrag($betrag)." ".getWaehrungHtml($waehrung)."</td> <td width=280 valign=top bgcolor=\"#$bgcolor\"><a href=\"edit.php?id=$id&back=".urlencode($REQUEST_URI)."\"$red>Edit</a> "; print "<a href=\"bezahlt.php?id=$id&back=".urlencode($REQUEST_URI)."\"$red>Bezahlt</a> <a href=\"createPDF.php?id=$id\"$red>PDF</a> <a href=\"mahnen.php?id=$id&back=".urlencode($REQUEST_URI)."\"$red>Mahnen</a> <a href=\"delete.php?id=$id&back=".urlencode($REQUEST_URI)."\"$red>Löschen</a></td>"; $query2=mysql_query("SELECT DATE_FORMAT(datum,'$_config_date') FROM Rechnungen_mahnungen WHERE rechnung='$id'"); $gemahnt=""; if(@mysql_num_rows($query2)>0) { for($ii=0;list($datum)=mysql_fetch_row($query2);$ii++) { if($ii==0) $gemahnt.=$datum; else $gemahnt.=", $datum"; } } else { $gemahnt="-"; } print "<td width=80 bgcolor=\"#$bgcolor\"$red>$gemahnt</td> </tr>\n"; } print "<tr> <td colspan=3><b>Total</b> <td colspan=2 align=right><b>"; if($term) { $query=mysql_query("SELECT rech.id,sum($_config_posbetrag),rech.waehrung FROM Rechnungen_positionen pos, Rechnungen rech,Kontakte kon WHERE rech.id = pos.rechnung AND rech.bezahlt is NULL AND ".formatSearchString($term,array("kon.firma","kon.firma2","rech.id","rech.betreff","rech.text","rech.footer"))." AND kon.id = rech.kontakt GROUP BY rech.waehrung"); } else { $query=mysql_query("SELECT rech.id,sum($_config_posbetrag),rech.waehrung FROM Rechnungen_positionen pos, Rechnungen rech WHERE rech.id = pos.rechnung AND rech.bezahlt is NULL GROUP BY rech.waehrung"); } for($i=0;(list($rech_id,$betrag,$waehrung)=mysql_fetch_row($query));$i++) { $query2=mysql_query("SELECT sum(((betrag+((betrag/100)*mwst))*fx)) FROM Rechnungen_gutschriften WHERE bezahlt='$rech_id' GROUP BY bezahlt"); if(mysql_num_rows($query2)>0) $betrag-=mysql_result($query2,0,0); if($i>0) print "<br>"; print formatBetrag($betrag)." ".getWaehrungHtml($waehrung); } print "</b></td> <td colspan=2>&nbsp;</td> </tr> </table> <input type=button value=\"Exportieren\" onclick=\"javascript:location.href='export.php';\"> </form>"; ?> </body> </html> <file_sep>/modules/rechnungenOloid/export.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); if(!$back){ $back="offene.php"; } if(!$backno){ $backno=$back; } function kontoExistiert($nr) { global $buchhaltung,$_config_mysql_buchhaltung_db; mysql_select_db($_config_mysql_buchhaltung_db); $query=mysql_query("SELECT count(*) FROM $buchhaltung"."_Konto WHERE nr='$nr'"); return mysql_result($query,0,0); } function createKonto($nr,$kontakt,$typ) { global $buchhaltung,$_config_mysql_buchhaltung_db; mysql_select_db($_config_mysql_buchhaltung_db); $query=mysql_query("SELECT max(sort) FROM $buchhaltung"."_Konto"); $sort=@mysql_result($query,0,0)+1; $query=mysql_query("SELECT id FROM $buchhaltung"."_Nebenkonto WHERE name='Debitoren'"); $nebenkonto=@mysql_result($query,0,0); $query=mysql_query("INSERT INTO $buchhaltung"."_Konto(nr,name,nebenkonto,typ,waehrung,show_waehrung,show_belegnr,show_mwst,show_datum,sort) Values('$nr','$kontakt','$nebenkonto','$typ',1,1,1,1,1,'$sort')"); return (strlen(mysql_error()==0)); } if($submit) { $query=mysql_query("SELECT rech.id,rech.datum,sum((pos.betrag*pos.anzahl)*pos.fx),mwst,rech.kontakt,rech.waehrung,rech.fx,kon.firma FROM Rechnungen rech,Rechnungen_positionen pos,Kontakte kon WHERE kon.id=rech.kontakt AND rech.id = pos.rechnung AND rech.fixiert=0 AND rech.bezahlt IS NULL GROUP BY rech.id"); if(mysql_num_rows($query)>0) { while((list($id,$datum,$betrag,$mwst,$kontakt,$waehrung,$fx,$firma)=mysql_fetch_row($query)) && !$error) { mysql_select_db($_config_mysql_db); $kt_soll=str_replace("%KONTAKT%",$kontakt,$_config_export_rechnungen_soll); $kt_haben=str_replace("%KONTAKT%",$kontakt,$_config_export_rechnungen_haben); $kontakt_txt=getKontakt($kontakt); $query2=mysql_query("UPDATE Rechnungen SET fixiert='1' WHERE id='$id'"); //MWSt.-Satz berechnen $query2=mysql_query("SELECT sum($_config_posbetrag) FROM Rechnungen_positionen pos WHERE pos.rechnung='$id'"); $betragMwst=mysql_result($query2,0,0); if($betragMwst == $betrag) { $mwst = 0; } else { $mwst = (($betragMwst / $betrag)-1)*100; } mysql_select_db($_config_mysql_buchhaltung_db); $query2=mysql_query("SELECT id FROM Buchhaltungen WHERE selected=1"); $buchhaltung=mysql_result($query2,0,0); if(!kontoExistiert($kt_soll)) { createKonto($kt_soll,$kontakt_txt,1); } if(!kontoExistiert($kt_haben)) { createKonto($kt_haben,$kontakt_txt,2); } $query3=mysql_query("INSERT INTO Queue(datum,beschreibung,kt_haben,kt_soll,betrag,waehrung,kurs,mwst,mwst_feld,belegnr) VALUES('$datum','Debit: $id, $firma','$kt_haben','$kt_soll','$betrag','$waehrung','$fx','$mwst',NULL,'I".str_pad($id,5,"0",STR_PAD_LEFT)."')"); $error=mysql_error(); mysql_select_db($_config_mysql_db); } if(!$error) { header("Location: $back"); } } } ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body> <p class=titel>Rechnungen:Rechnungen Exportieren</p> <? if($err) { print "<b>Fehler:</b> $err"; } $query=mysql_query("SELECT rech.id, rech.text,sum($_config_posbetrag),rech.kontakt FROM Rechnungen rech,Rechnungen_positionen pos WHERE rech.id = pos.rechnung AND rech.fixiert=0 AND rech.bezahlt IS NULL GROUP BY rech.id"); if(mysql_error()) { print "<b>Fehler:</b> ".mysql_error(); } else if(@mysql_num_rows($query)<1) { print "<b>Fehler</b>: Keine Rechnungen zum Exportieren<br><br>\n"; } else { print "Folgende Rechnungen werden Exportiert:<br><br> <table border=0> <tr> <td><b>Kontakt</b></td> <td><b>Betreff</b></td> <td><b>Betrag</b></td> </tr>"; while(list($id,$text,$betrag,$kontakt)=mysql_fetch_row($query)) { print "<tr> <td>".getKontakt($kontakt)."</td> <td>$betreff</td> <td>".formatBetrag($betrag)."</td> </tr>"; } print "</table> <form method=post value=\"$PHP_SELF?id=$id&back=".urlencode($back)."\"> <input type=submit name=submit value=\"Ausführen\"> <input type=button value=\"Zurück\" onclick=\"javascript:location.href='$backno'\"> </form>"; } ?> </body> </html> <file_sep>/modules/rechnungenOloidOld/gutschriften.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> <script src="../../inc/functions.js" type="text/javascript" language="javascript"></script> </head> <body onLoad="document.getElementById('term').focus()"> <p class=titel>Rechnungen:Gutschriften</p> <form method=get action="<?=$PHP_SELF?>"> <input type=text name=term id=term value="<?=$term?>"> <input type=submit name=search value="Suchen"> </form> <? if($order=="kontakt"){ $order="concat(kon.firma,kon.name)"; } else if($order=="text"){ $order="gut.text"; } else if($order=="betrag"){ $order="gut.betrag"; } else if($order=="aktiv"){ $order="gut.auszahlen"; } else { $order="gut.id"; } if(!$start){ $start=0; } if($term){ $query=mysql_query("SELECT gut.id,gut.kontakt,gut.betrag,gut.text,gut.auszahlen FROM Rechnungen_gutschriften gut, Kontakte kon WHERE gut.bezahlt is NULL AND gut.kontakt = kon.id AND ".formatSearchString($term,array("kon.firma","kon.name","kon.vorname","gut.text","gut.betrag"))." ORDER BY $order"); } else { $query=mysql_query("SELECT gut.id,gut.kontakt,gut.betrag,gut.text,gut.auszahlen FROM Rechnungen_gutschriften gut, Kontakte kon WHERE kon.id = gut.kontakt AND bezahlt is NULL ORDER BY $order"); } echo mysql_error(); if(@mysql_num_rows($query)>0) { print "<table width=\"95%\" border=0 cellpadding=2 cellspacing=0> <tr> <td><b><a href=\"$PHP_SELF?order=kontakt\">Kontakt</a></b></td> <td><b><a href=\"$PHP_SELF?order=text\">Text</a></b></td> <td align=right><b><a href=\"$PHP_SELF?oder=betrag\">Betrag</a></b></td> <td><b><a href=\"$PHP_SELF?order=aktiv\">Aktiv</a></b></td> </tr>\n"; for($i=0;list($id,$kontakt,$betrag,$grund,$aktiv)=mysql_fetch_row($query);$i++) { if(($i%2)==0){ $bgcolor=$_config_tbl_bgcolor1; } else { $bgcolor=$_config_tbl_bgcolor2; } if($bezahlt==1) $ckecked="CHECKED"; if($aktiv==1) $aktiv="Ja"; else $aktiv="Nein"; print "<tr onmouseover=\"setPointer(this, 'over', '#$bgcolor', '#$_config_tbl_bghover', '')\" onmouseout=\"setPointer(this, 'out', '#$bgcolor', '#$_config_tbl_bghover', '')\" onclick=\"location.href='gutschriften_edit.php?id=$id&back=".urlencode($REQUEST_URI)."'\"> <td width=250 valign=top bgcolor=\"#$bgcolor\"><a href=\"../kontakte/kontakt.php?id=$kontakt&back=".urlencode($REQUEST_URI)."\">".getKontakt($kontakt)."</a></td> <td valign=top bgcolor=\"#$bgcolor\">$grund</td> <td width=30 valign=top align=right bgcolor=\"#$bgcolor\">".formatBetrag($betrag)."</td> <td width=40 valign=top bgcolor=\"#$bgcolor\">$aktiv</td> </tr>"; } print "</table><br>"; } else print "<b>Keine Offene Gutschriften</b><br><br>"; print "[ <a href=\"gutschriften_add.php\">Neu</a> ]\n"; ?> </body> </html> <file_sep>/trash/kontakte_klein/detail.php <? session_start(); include("../../inc/config.inc.php"); include("../../inc/db.inc.php"); include("../../inc/func.inc.php"); $referer=$GLOBALS["HTTP_REFERER"]; if((!strstr($referer,"kontakte/detail.php") && !strstr($referer,"kontakte/edit.php")) || !$kontakte_back){ $kontakte_back=$referer; session_register("kontakte_back"); } ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body> <p class=titel>Kontakte:Details</p> <? $query=mysql_query("SELECT id,admin,ansprechperson,name,vorname,firma,firma2,abteilung,titel,anrede,adresse,adresse2,plz,ort,land,tel_privat,tel_gesch,tel_direkt,tel_mobile,fax,mail,mail2,text,konto,kontonr,blz,swift,iban FROM Kontakte kon WHERE kon.id='$id' AND kon.aktiv=1"); if($query) { list($id,$admin,$ansprechperson_id,$name,$vorname,$firma,$firma2,$abteilung,$titel,$anrede,$adresse,$adresse2,$plz,$ort,$land,$tel_privat,$tel_gesch,$tel_direkt,$tel_mobile,$fax,$mail,$mail2,$text,$konto,$kontonr,$blz,$swift,$iban)=mysql_fetch_row($query); //Ansprechperson $query=mysql_query("SELECT id,name,vorname FROM Kontakte WHERE id='$ansprechperson_id'"); list($admin_id,$admin_name,$admin_vorname)=mysql_fetch_row($query); $ansprechperson="<a href=\"detail.php?id=$admin_id\">$admin_vorname $admin_name</a>"; //PLZ Korrektur if($plz==0) $plz=""; //Domains des Users if(isModule("domains")){ $query=mysql_query("SELECT id, domain FROM Domains WHERE (kontakt='$id' OR techkontakt='$id') AND endDate is NULL"); for($i=0;list($domain_id,$domain_name)=mysql_fetch_row($query);$i++){ if($i==0){ $domains.="<a href=\"../../modules/domains/detail.php?id=$domain_id\">$domain_name</a>"; } else { $domains.=", <a href=\"../../modules/domains/detail.php?id=$domain_id\">$domain_name</a>"; } } } print "<table border=0>\n"; if($anrede) print "<tr> <td width=150>Anrede:</td> <td>".getAnrede($anrede)."</td> </itr>\n"; if($titel) print "<tr> <td width=150>Titel</td> <td>$titel</td> </tr>\n"; if($name||$vorname) print "<tr> <td width=150>Name/Vorname:</td> <td>$name $vorname</td> </tr>\n"; if($firma) print "<tr> <td width=150>Firma</td> <td>$firma</td> </tr>\n"; if($firma2) print "<tr> <td width=150>Firma Zusatz</td> <td>$firma2</td> </tr>\n"; if($abteilung) print "<tr> <td width=150>Abteilung</td> <td>$abteilung</td> </tr>\n"; if($adresse) print "<tr> <td width=150>Adresse</td> <td>$adresse</td> </tr>\n"; if($adresse2) print "<tr> <td width=150>Adresse 2</td> <td>$adresse2</td> </tr>\n"; if($plz||$ort) print "<tr> <td width=150>PLZ/Ort</td> <td>$plz $ort</td> </tr>\n"; if($land) print "<tr> <td width=150>Land</td> <td>$land</td> </tr>\n"; if($tel_privat) print "<tr> <td width=150>Tel. Privat</td> <td>$tel_privat</td> </tr>\n"; if($tel_gesch) print "<tr> <td width=150>Tel. Gesch.</td> <td>$tel_gesch</td> </tr>\n"; if($tel_direkt) print "<tr> <td width=150>Tel. Direkt</td> <td>$tel_direkt</td> </tr>\n"; if($tel_mobile) print "<tr> <td width=150>Mobile</td> <td>$tel_mobile</td> </tr>\n"; if($fax) print "<tr> <td width=150>Fax</td> <td>$fax</td> </tr>\n"; if($mail) print "<tr> <td width=150>E-Mail</td> <td><a href=\"mailto:$mail\">$mail</a></td> </tr>\n"; if($mail2) print "<tr> <td width=150>E-Mail 2</td> <td><a href=\"mailto:$mail2\">$mail2</a></td> </tr>\n"; if($ansprechperson && $_config_kontakte_show_ansprechperson) print "<tr> <td width=150>Ansprechperson</td> <td>$ansprechperson</td> </tr>\n"; if($domains) print "<tr> <td width=150>Domains</td> <td>$domains</td> </tr>\n"; if($admin) print "<tr> <td width=150>Admin</td> <td>Ja</td> </tr>\n"; if($text) print "<tr> <td width=150 valign=top>Text</td> <td>$text</td> </tr>\n"; if($konto) print "<tr> <td width=150 valign=top>Konto</td> <td>$konto</td> </tr>\n"; if($kontonr) print "<tr> <td width=150 valign=top>Konto Nr.</td> <td>$kontonr</td> </tr>\n"; if($blz) print "<tr> <td width=150 valign=top>BLZ</td> <td>$blz</td> </tr>\n"; if($swift) print "<tr> <td width=150 valign=top>Swift</td> <td>$swift</td> </tr>\n"; if($iban) print "<tr> <td width=150 valign=top>Iban</td> <td>$iban</td> </tr>\n"; print "</table>\n<br><br> <a href=\"$kontakte_back\">[ Zurück ]</a> <a href=\"edit.php?id=$id\">[ Editieren ]</a> <a href=\"delete.php?id=$id\">[ Löschen ]</a>"; } else { print "Fehler: User existiert ev. nicht\n"; } ?> </body> </html> <file_sep>/modules/statistiken/statistik.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); include("func.inc.php"); $query=mysql_query("SELECT titel,sql,ueberschriften,filename,datumsfeld,total FROM Statistiken WHERE id='$id'"); list($titel,$sql,$ueberschriften,$filename,$datumsfeld,$total)=mysql_fetch_row($query); ?> <html> <head> <title>Sylon godmode</title> <link rel="stylesheet" href="../../main.css" type=text/css> <style type="text/css"> .gitternetz { border-style:solid;border-collapse: collapse;border-width:1px;empty-cells:show;border-color: #000000 } </style> </head> <body> <p class=titel>Statistiken:<?=$titel?></p> <? print "<a href=\"csv.php?id=$id&start=$start&end=$end&sort=$sort&order=$order\">CSV</a> <br><br>"; if($datumsfeld) print "<form method=post action=\"$PHP_SELF?id=$id\"> <table border=0 cellpadding=0> <tr> <td>Anfang:</td> <td><input type=text maxlength=10 name=start value=\"$start\"></td> </tr> <tr> <td>Ende:</td> <td><input type=text maxlength=10 name=end value=\"$end\"></td> </tr> <tr> <td>&nbsp;</td> <td><input type=submit value=\"Aktualisieren\"></td> </tr> </table> </form> <br>"; eval("\$sql = \"$sql\";"); if($start) { $sql= str_replace("WHERE","WHERE $datumsfeld >= '".date_CH_to_EN($start)."' AND",$sql); } if($end) { $sql= str_replace("WHERE","WHERE $datumsfeld <= '".date_CH_to_EN($end)."' AND",$sql); } if($sort) { if(stristr($sql,"ORDER BY")) { $sql = substr($sql,0,strpos(strtolower($sql),"order by"))."ORDER BY $sort"; } else { $sql.= " ORDER BY $sort"; } if($order){ $sql.= " ".$order; } } $query=mysql_query($sql); if(mysql_error()) print "Fehler: ".mysql_error()."<br><br>SQL Query:<br>".$sql; if(mysql_num_rows($query)>0){ print "<table border=0 cellspacing=0 cellpadding=2 class=gitternetz> <tr>\n"; $i=0; $ueberschriften=split(",",$ueberschriften); foreach($ueberschriften as $ueberschrift) { $ueberschrift = str_replace(">","",$ueberschrift); $ueberschrift = str_replace("<","",$ueberschrift); if($sort==mysql_field_name($query,$i)) { $order="DESC"; } else { $order="ASC"; } print " <td class=gitternetz align=".getAlignment($ueberschriften,$i)."><b><nobr><a href=\"$PHP_SELF?id=$id&start=$start&end=$end&sort=".mysql_field_name($query,$i)."&order=$order\">$ueberschrift</a></nobr></b></td>\n"; $i++; } print "</tr>\n"; $total_val = array(); while($row=mysql_fetch_row($query)){ print "<tr>\n"; for($i=0;$i<mysql_num_fields($query);$i++) { if(strlen($row[$i])==0) $row[$i]="&nbsp;"; if(is_numeric($row[$i])) $total_val[$i]+=$row[$i]; print " <td class=gitternetz valign=top align=".getAlignment($ueberschriften,$i).">".$row[$i]."</td>\n"; } print "</tr>\n"; } if(strlen($total)>0) { $total = split(",",$total); print "<tr>\n"; for($i=0;$i<mysql_num_fields($query);$i++) { if(in_array(mysql_field_name($query,$i),$total)) { print " <td class=gitternetz align=".getAlignment($ueberschriften,$i)."><b>".formatBetrag($total_val[$i])."</b></td>\n"; } else { print " <td class=gitternetz>&nbsp;</td>\n"; } } print "</tr>\n"; } print "</table>"; } ?> <file_sep>/inc/func.inc.php <? if(file_exists("$_config_root_path/modules/kontakte/func.inc.php")){ global $_config_root_path; include("$_config_root_path/modules/kontakte/func.inc.php"); } function getGebieteList($formname,$selected,$breite,$default) { global $_config_kontakte_gebiet1,$_config_kontakte_gebiet2,$_config_kontakte_gebiet3; eval ("\$select$selected=\"SELECTED\";"); $select="<SELECT name=\"$formname\" style=\"width:$breite\">\n"; if(strlen($default)>0) $select.="<option value=0>$default</option>\n"; if(strlen($_config_kontakte_gebiet1)>1) $select .="<option value=1 $select1>$_config_kontakte_gebiet1</option>\n"; if(strlen($_config_kontakte_gebiet2)>1) $select .="<option value=2 $select2>$_config_kontakte_gebiet2</option>\n"; if(strlen($_config_kontakte_gebiet3)>1) $select .="<option value=3 $select3>$_config_kontakte_gebiet3</option>\n"; $select.="</SELECT>\n"; return $select; } function getKontakteModulesList($formname,$selected,$breite,$default) { global $_config_root_path; $select="<SELECT name=\"$formname\" style=\"width:$breite\">\n"; if(strlen($default)>0) $select.="<option value=0>$default</option>\n"; $handle = opendir("$_config_root_path/modules/kontakte/modules/"); while (false != ($file = readdir($handle))) { if ($file != "." && $file != ".." && $file != "CVS") { $file=str_replace(".inc.php","",$file); if($file==$selected) { $select.="<option value=\"$file\" SELECTED>".ucfirst($file)."</option>"; } else { $select.="<option value=\"$file\">".ucfirst($file)."</option>"; } } } closedir($handle); $select.="</SELECT>\n"; return $select; } function getModulesList($formname,$selected,$breite,$default) { global $_config_root_path; $select="<SELECT name=\"$formname\" style=\"width:$breite\">\n"; if(strlen($default)>0) $select.="<option value=0>$default</option>\n"; $handle = opendir("$_config_root_path/modules/"); while (false != ($file = readdir($handle))) { if ($file != "." && $file != ".." && $file != "CVS") { if($file==$selected) { $select.="<option value=\"$file\" SELECTED>".ucfirst($file)."</option>"; } else { $select.="<option value=\"$file\">".ucfirst($file)."</option>"; } } } closedir($handle); $select.="</SELECT>\n"; return $select; } function alert($msg) { if($msg) { return "alert(".escapeshellarg(urldecode($msg)).");"; } } function error($msg) { if($msg) { return "alert('Fehler: ".substr(escapeshellarg(urldecode($msg)),1).");"; } } function getZahlungsart($id){ $query=mysql_query("SELECT art FROM Zahlungsarten WHERE id='$id'"); return mysql_result($query,0,0); } function getZahlungsartenList($formname,$selected,$breite,$default) { $query=mysql_query("SELECT id,art FROM Zahlungsarten"); $select="<SELECT name=\"$formname\" style=\"width:".$breite."px;\">\n"; if($default) $select.="<option value=0>$default</option>\n"; while(list($id,$art)=mysql_fetch_row($query)) { if($id == $selected) $select.=" <option value=$id SELECTED>$art</option>\n"; else $select.=" <option value=$id>$art</option>\n"; } $select.="</SELECT>\n"; return $select; } function date_EN_to_CH($date){ if(!checkdate(date("m",strtotime($date)),date("d",strtotime($date)),date("Y",strtotime($date)))) { return FALSE; } if($date=="0000-00-00"){ return NULL; } else if(strpos($date,"-")==FALSE) { return $date; } else { return date("d.m.Y",strtotime($date)); } } function date_CH_to_EN($date){ if(strpos($date,".")==FALSE) { return $date; } else { $date= preg_replace("|\b(\d+).(\d+).(\d+)\b|", "\\3-\\2-\\1", $date); if(checkdate(date("m",strtotime($date)),date("d",strtotime($date)),date("Y",strtotime($date)))) { return $date; } else { return FALSE; } } } function formatName($firma,$name,$vorname) { if($firma) return "$firma, $name $vorname"; else return "$name $vorname"; } function formatBetrag($betrag) { return number_format($betrag,2,".","'"); } function isModule($module){ global $_config_modules; return in_array($module,split(",",$_config_modules)); } function formatSearchString($term,$fields){ $term=split(" ",$term); for($i=0;$term[$i];$i++){ if($str){ $str .= "AND "; } $str.="( "; for($ii=0;$fields[$ii];$ii++){ if($ii>0){ $str.="OR "; } if(strpos($term[$i],"*")===FALSE) { $str .= $fields[$ii]." LIKE '%".$term[$i]."%' "; } else { $str .= $fields[$ii]." LIKE '".str_replace("*","%",$term[$i])."' "; } } $str.=") "; } return $str; } function getWaehrungsList($formname,$selected,$breite) { $query=mysql_query("SELECT id,text FROM Waehrungen"); $select="<SELECT name=\"$formname\" style=\"width:".$breite."px;\">\n"; while(list($id,$text)=mysql_fetch_row($query)) { if($id == $selected) $select.=" <option value=$id SELECTED>$text</option>\n"; else $select.=" <option value=$id>$text</option>\n"; } $select.="</SELECT>\n"; return $select; } function getWaehrung($id){ $query=mysql_query("SELECT text FROM Waehrungen WHERE id='$id'"); if(mysql_num_rows($query)>0) { return mysql_result($query,0,0); } else { return false; } } function getWaehrungHtml($id){ $query=mysql_query("SELECT html FROM Waehrungen WHERE id='$id'"); if(mysql_num_rows($query)>0) { return mysql_result($query,0,0); } else { return false; } } function getFx($waehrung,$waehrung1){ if($waehrung==$waehrung1){ return 1; } else { $query=mysql_query("SELECT yahoo_fx FROM Waehrungen WHERE id='$waehrung'"); if($query && mysql_num_rows($query)>0) { $fx_1=mysql_result($query,0,0); } else { return 0; } $query=mysql_query("SELECT yahoo_fx FROM Waehrungen WHERE id='$waehrung1'"); if($query && mysql_num_rows($query)>0) { $fx_2=mysql_result($query,0,0); } else { return false; } $file=implode("\n",file("http://de.finance.yahoo.com/waehrungsrechner/convert?amt=1&from=$fx_1&to=$fx_2")); $file=split("Zum Portfolio",substr($file,strpos($file,"Briefkurs",1000))); $fx = str_replace(",",".",substr($file[0], strpos($file[0],",",20) - 1 , 6)); if(is_numeric($fx)){ return $fx; } else { print "Achtung! Fx-berechnung meldet Fehler!"; return false; } } } function waehrungRound($betrag,$waehrung) { $query=mysql_query("SELECT round FROM Waehrungen WHERE id='$waehrung'"); if(mysql_result($query,0,0)==5){ return (round(20*$betrag))/20; } else { return $betrag; } } function waehrungCalc($betrag,$waehrung,$waehrung1) { return waehrungRound($betrag * getFx($waehrung,$waehrung1),$waehrung); } function getHttpUserId(){ global $_config_user,$PHP_AUTH_USER; if($_config_user[$PHP_AUTH_USER]) { return $_config_user[$PHP_AUTH_USER]; } else { return FALSE; } } function getYesNoList($formname,$width,$default) { $select="<SELECT name=\"$formname\" style=\"width:$width;\">"; if($default==1) $select.= "<option value=0>Nein</option> <option value=1 SELECTED>Ja</option>"; else $select.= "<option value=0 SELECTED>Nein</option> <option value=1>Ja</option>"; return $select; } ?><file_sep>/modules/statistiken/index.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); ?> <html> <head> <title>Sylon godmode</title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body> <p class=titel>Statistiken:‹bersicht</p> <br><br> <? $query=mysql_query("SELECT id,titel FROM Statistiken WHERE aktiv=1 ORDER BY titel"); while(list($id,$titel)=mysql_fetch_row($query)){ print "<a href=\"statistik.php?id=$id\">$titel</a> (<a href=\"csv.php?id=$id\">CSV</a>)<br>"; } ?> <file_sep>/modules/kontakte/modules/informationen.inc.php <? print " <table border=0 cellpadding=0 cellspacing=0> <tr> <td colspan=2><b>Informationen</b></td> </tr> <tr> <td width=100>Projektleiter</td> <td>".getEmpList("kontakt_pl",$kontakt['pl'],200,"Bitte Auswählen")."</td> </tr> <tr> <td width=100 valign=top>Informationen</td> <td><textarea name=\"kontakt_text\" style=\"width:200px;height:100px;\">".$kontakt['text']."</textarea></td> </tr> </table>"; ?> <file_sep>/modules/domains/domain.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); include("func.inc.php"); if(!$back) { $back="show.php"; } //www.domain.ch verhindern if(substr($domain,0,4)=="www.") $domain=substr($domain,4); if($submit) { if(!$domain || !$kontakt || !$startDate || $betrag=="" || !$abrechnungsart) $error="Die Felder Domain, Kontakt, Seit, Betrag und Abrechnungsart müssen ausgefüllt werden"; else { $query_update=mysql_query("UPDATE Domains SET domain='$domain',aliase='$aliase',kontakt='$kontakt',startDate='".date_CH_to_EN($startDate)."', betrag='$betrag',waehrung='$waehrung',abrechnungsart='$abrechnungsart', text='$text' WHERE id='$id'"); if($query_update){ if($submit=="Ändern & Zurück") { header("Location: ".urldecode($back)); } } else { $error="Der Datenbankeintrag konnte nicht geändert werden"; } } } ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body> <p class=titel>Domains:Domain</p> <? $query=mysql_query("SELECT dom.domain, dom.aliase,dom.kontakt, DATE_FORMAT(dom.startDate,'$_config_date'), dom.betrag, dom.waehrung,dom.abrechnungsart,dom.text FROM Domains dom WHERE dom.id='$id'"); if(@mysql_num_rows($query)>0) { if(!$query_update) list($domain, $aliase, $kontakt, $startDate,$betrag,$waehrung,$abrechnungsart,$text)=mysql_fetch_row($query); if($error) print "<b>Fehler:</b> $error<br><br>\n"; print "<form method=post action=\"$PHP_SELF?id=$id&back=".urlencode($back)."\">\n<table border=0i cellpadding=0 cellspacing=0> <tr> <td width=100>Domain</td> <td><input type=text name=\"domain\" value=\"$domain\" style=\"width:250px;\"></td> </tr> <tr> <td width=100>Aliase</td> <td><input type=text name=\"aliase\" value=\"$aliase\" style=\"width:250px;\"></td> </tr> <tr> <td width=100>"; if($kontakt) { print "<a href=\"../kontakte/kontakt.php?id=$kontakt\">Kontakt:</a>"; } else { print "Kontakt"; } print "</td> <td>".getKontakteList("kontakt",$kontakt,250,"Bitte Auswählen")."</td> </tr> <tr> <td width=100>Seit:</td> <td><input type=text name=\"startDate\" value=\"$startDate\" style=\"width:250px;\"></td> </tr> <tr> <td width=100>Bezahlt bis:</td> <td style=\"padding:2px 2px 2px 2px\">".domain_bezahlt_bis($id)."</td> </tr> <tr> <td width=100>Betrag:</td> <td><input type=text name=\"betrag\" value=\"$betrag\" style=\"width:250px;\"></td> </tr> <tr> <td width=100>Währung:</td> <td>".getWaehrungsList("waehrung",$waehrung,250)."</td> </tr> <tr> <td width=100>Abrechnungsart:</td> <td>".getZahlungsartenList("abrechnungsart",$abrechnungsart,"250","")."</td> </tr> <tr> <td width=100 valign=top>Text:</td> <td><textarea style=\"width:250;height:50;\" name=text>$text</textarea></td> </tr> </table><br><br>\n<input type=submit name=submit value=\"&Auml;ndern\"> <input type=submit name=submit value=\"&Auml;ndern & Zurück\"> <input type=button onclick=\"javascript:location.href='".urldecode($back)."'\" value=\"Zurück\"> <input type=button onclick=\"javascript:location.href='delete.php?id=$id&back=".urldecode($back)."&backno=".urlencode($REQUEST_URI)."'\" value=\"Löschen\"></form>\n"; } else { print "Fehler: User existiert ev. nicht\n"; } ?> </body> </html> <file_sep>/import.php <? include("inc/config.inc.php"); include("inc/db.inc.php"); //Tabellen Löschen if($sql) { $query=mysql_query("DELETE FROM Kontakte WHERE id != 1"); $query=mysql_query("DELETE FROM Kontakte_kontaktpersonen WHERE firma!=1"); } $handle = fopen ("daten_2.txt", "r"); while (!feof($handle)) { $buffer = fgets($handle, 4096); $jee=$buffer; $buffer = split(";",str_replace("\"","",str_replace("'","\\'",$buffer))); $firma=$buffer[2]; $anrede = $buffer[14]; if (!(stristr($anrede,"Sehr geehrte Damen und Herren")===FALSE)) { $anrede=3; } else if (!(stristr($anrede,"Monsieur le Maire")===FALSE)) { $anrede=6; } else if (!(stristr($anrede,"Herrn")===FALSE)) { $anrede=2; } else if (!(stristr($anrede,"Sehr geehrte Frau")===FALSE)) { $anrede=2; } else if (!(stristr($anrede,"Madame, cher Monsieur")===FALSE)) { $anrede=8; } else if (!(stristr($anrede,"Monsieur")===FALSE)) { $anrede=4; } else if (!(stristr($anrede,"Mister")===FALSE)) { $anrede=7; } else if (!(stristr($anrede,"Liebe")===FALSE)) { $anrede=2; } else if (!(stristr($anrede,"Lieber")===FALSE)) { $anrede=1; } else if (!(stristr($anrede,"Mrs.")===FALSE)) { $anrede=9; } else if (!(stristr($anrede,"Madame")===FALSE)) { $anrede=5; } else if (!(stristr($anrede,"Mr")===FALSE)) { $anrede=7; } else if (!(stristr($anrede,"Madam")===FALSE)) { $anrede=10; } else if (!(stristr($anrede,"Herr")===FALSE)) { $anrede=1; } else if (!(stristr($anrede,"Miss")===FALSE)) { $anrede=11; } if(!is_numeric($anrede)) { $anrede=3; } $firma=$buffer[2]; $vorname=$buffer[6]; $name=$buffer[7]; $position=$buffer[8]; $adresse1=$buffer[9]; $adresse2==$buffer[10]; $land==$buffer[11]; $plz==$buffer[12]; $ort=$buffer[13]; $telefon1=$buffer[15]; $tel_direkt=$buffer[16]; $tel_mobile=$buffer[17]; $tel_privat=$buffer[18]; $fax=$buffer[19]; $email=$buffer[23]; $text=$buffer[30]; $www=$buffer[32]; if($email=="") { $email=$buffer[33]; } $blz=$buffer[34]; $konto=$buffer[35]; $kundennummer=$buffer[31]; if($sql){ if(strlen(trim($firma))>0) { $query=mysql_query("INSERT INTO Kontakte(erfasst,firma,firma2,adresse,adresse2,plz,ort,land,telefon1,telefon2,mobile,fax,mail,www) VALUES(NOW(),'$firma','$firma2','$adresse','$adresse2','$plz','$ort','$land','$telefon1','',NULL,'$fax','$mail','$www')"); echo mysql_error(); $query=mysql_query("INSERT INTO Kontakte_kontaktpersonen(firma,name,vorname,position,adresse,adresse2,plz,ort,tel_privat,tel_gesch,tel_direkt,tel_mobile,fax,mail,mail2,text,anrede) VALUES('".mysql_insert_id()."','$name','$vorname','$position','','','','','$tel_privat','$telefon','$tel_direkt','$tel_mobile','$fax','$email',NULL,'$text','$anrede')"); echo mysql_error(); } else if(strlen(trim($name))>0) { $query=mysql_query("INSERT INTO Kontakte(erfasst,firma,firma2,adresse,adresse2,plz,ort,land,telefon1,telefon2,mobile,fax,mail,www,anrede) VALUES(NOW(),'$name','$vorname','$adresse','$adresse2','$plz','$ort','$land','$telefon1','$telefon2','$mobile','$fax','$mail','$www','$anrede')"); echo mysql_error(); } else { // echo str_replace("\n","",$jee)."\n"; } } } fclose ($handle); <file_sep>/modules/rechnungen/erstellen1.php <? session_start(); session_unregister("gut"); session_unregister("pos"); session_unregister("adresse"); session_unregister("betreff"); session_unregister("datum"); session_unregister("text"); session_unregister("footer"); session_unregister("id"); session_unregister("waehrung"); session_unregister("zahlungsfrist"); include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); include("func.inc.php"); ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body> <p class=titel>Rechnungen:Rechnungen erstellen</p> <? print "<form method=post action=\"erstellen2.php?id=$id\"> <table width=100% border=0> <tr> <td height=100 valign=top> <textarea name=adresse style=\"width:400px;height:100px;\">"; print getRechnungsAdresse($id); print "</textarea> </td> </tr> <tr> <td align=right height=50 valign=top>".$_config_rechnung_ort.", <input type=text name=datum value=\"".date("d.m.Y")."\"></td> </tr> <tr> <td> <input type=text name=betreff maxlength=255 value=\"".$_config_rechnung_subject."\" style=\"width:400px;\"><br><br> <SELECT onChange=\"document.getElementById('text').value=this.value\" style=\"width:400px;background-color:#$_config_tbl_bgcolor1;\"> <option value=\"".str_replace("%USER%",getEmp(getHttpUserId()),$_config_rechnung_text_1)."\">".get1Line($_config_rechnung_text_1)."</option> <option value=\"".str_replace("%USER%",getEmp(getHttpUserId()),$_config_rechnung_text_2)."\">".get1Line($_config_rechnung_text_2)."</option> <option value=\"".str_replace("%USER%",getEmp(getHttpUserId()),$_config_rechnung_text_3)."\">".get1Line($_config_rechnung_text_3)."</option> <option value=\"".str_replace("%USER%",getEmp(getHttpUserId()),$_config_rechnung_text_4)."\">".get1Line($_config_rechnung_text_4)."</option> <option value=\"".str_replace("%USER%",getEmp(getHttpUserId()),$_config_rechnung_text_5)."\">".get1Line($_config_rechnung_text_5)."</option> </SELECT><br> <textarea name=text id=text style=\"width:400px;height:150px;\">".str_replace("%USER%",getEmp(getHttpUserId()),$_config_rechnung_text_1)."</textarea> </td> </tr> <tr> <td> <b>Rechnungen</b><br> <table border=0 width=100% cellspacing=0> <tr> <td>&nbsp;</td> <td style=\"width:60px;\"><b>Anzahl</b></td> <td><b>Text</b></td> <td><b>Text 1</b></td> <td align=right><b>Betrag</b></td> </tr>"; $query=mysql_query("SELECT id,anzahl,text,text1,$_config_posbetrag,waehrung FROM Rechnungen_positionen pos WHERE rechnung is NULL AND kontakt='$id' ORDER BY id"); for($i=0;(list($pos_id,$pos_anzahl,$pos_text,$pos_text1,$betrag,$waehrung)=mysql_fetch_row($query));$i++){ if(($i%2)==0){ $bgcolor=$_config_tbl_bgcolor1; } else { $bgcolor=$_config_tbl_bgcolor2; } print "<tr> <td bgcolor=\"#$bgcolor\" style=\"width:30px;\"><input type=checkbox name=\"pos[$i]\" value=\"$pos_id\" checked></td> <td bgcolor=\"#$bgcolor\">$pos_anzahl</td> <td bgcolor=\"#$bgcolor\">$pos_text</td> <td bgcolor=\"#$bgcolor\">$pos_text1</td> <td bgcolor=\"#$bgcolor\" align=right>".formatBetrag($betrag)." ".getWaehrungHtml($waehrung)."</td> </tr>\n"; } print "</table>\n"; //Gutschriften $query2=mysql_query("SELECT id,betrag,waehrung,text FROM Rechnungen_gutschriften WHERE bezahlt is NULL AND kontakt='$id' AND auszahlen=1"); if(@mysql_num_rows($query2)>0) { print "<br><b>Gutschriften</b><br><table border=0 width=100% cellpadding=0 cellspacing=0> <tr> <td>&nbsp;</td> <td><b>Text</b></td> <td align=right><b>Betrag</b></td> </tr>"; for($i=0;list($gutschrift_id,$gutschrift_betrag,$gutschrift_waehrung,$gutschrift_text)=mysql_fetch_row($query2);$i++) { if(($i%2)==0){ $bgcolor=$_config_tbl_bgcolor1; } else { $bgcolor=$_config_tbl_bgcolor2; } print "<tr> <td style=\"width:30px;\" bgcolor=\"#$bgcolor\"><input type=checkbox name=\"gut[$i]\" value=\"$gutschrift_id\" checked></td> <td bgcolor=\"#$bgcolor\">$gutschrift_text</td> <td align=right bgcolor=\"#$bgcolor\">".formatBetrag($gutschrift_betrag)." ".getWaehrungHtml($gutschrift_waehrung)."</td> </tr>\n"; } print "</table>"; } print "</td> </tr> <tr> <td> <table border=0 cellpadding=0 cellspacing=0 width=\"100%\"> <tr> <td valign=top><b>Total</b></td> <td align=right><b>"; //Total berechnen $query1=mysql_query("SELECT sum($_config_posbetrag),waehrung FROM Rechnungen_positionen pos WHERE rechnung is NULL AND kontakt='$id' GROUP BY waehrung"); for($i=0;(list($betrag,$waehrung)=mysql_fetch_row($query1));$i++) { if($i>0) print "<br>"; print formatBetrag($betrag)." ".getWaehrungHtml($waehrung); } print "</b></td> </tr> </td> </tr> </table> <p><b>Währung</b><br> Rechnung erstellen in: ".getWaehrungsList("waehrung","",100)."</p> <p><b>Zahlungsfrist</b><br> <input type=text style=\"width:30;text-align:right\" maxlength=3 name=\"zahlungsfrist\" value=\"$_config_rechnung_zahlungsfrist\"> Tage</p> <p><b>Besr Nr.</b><br> <input type=text style=\"width:250;\" name=\"besrnr\"></p> </td> </tr> <tr> <td><br><br> <SELECT onChange=\"document.getElementById('footer').value=this.value\" style=\"width:400px;background-color:#$_config_tbl_bgcolor1;\"> <option value=\"".str_replace("%USER%",getEmp(getHttpUserId()),$_config_rechnung_text2_1)."\">".get1Line($_config_rechnung_text2_1)."</option> <option value=\"".str_replace("%USER%",getEmp(getHttpUserId()),$_config_rechnung_text2_2)."\">".get1Line($_config_rechnung_text2_2)."</option> <option value=\"".str_replace("%USER%",getEmp(getHttpUserId()),$_config_rechnung_text2_3)."\">".get1Line($_config_rechnung_text2_3)."</option> <option value=\"".str_replace("%USER%",getEmp(getHttpUserId()),$_config_rechnung_text2_4)."\">".get1Line($_config_rechnung_text2_4)."</option> <option value=\"".str_replace("%USER%",getEmp(getHttpUserId()),$_config_rechnung_text2_5)."\">".get1Line($_config_rechnung_text2_5)."</option> </SELECT><br> <textarea name=footer id=footer style=\"width:400px;height:70px;\">".str_replace("%USER%",getEmp(getHttpUserId()),$_config_rechnung_text2_1)."</textarea> </td> </tr> </table><br>"; ?> <input type=submit value="Rechnung Drucken"> </form> </body> </html> <file_sep>/modules/rechnungen/position_erstellen.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); if($submit) { if(!$kontakt) { $error="Bitte geben Sie einen Kunden an"; } else if(!$text) { $error="Bitte geben Sie einen Text an"; } else if(!$betrag || !is_numeric($betrag)) { $error="Bitte geben Sie einen Betrag an"; } else { $query=mysql_query("INSERT INTO Rechnungen_positionen(kontakt,rechnung,text,text1,betrag,waehrung,datum) VALUES('$kontakt',NULL,'$text','$text1','$betrag','$waehrung',NOW())"); if(mysql_error()) $error=mysql_error(); else{ header("Location: positionen.php"); } } } ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body onLoad="document.getElementsByName('kontakt')[0].focus()"> <p class=titel>Rechungen:Position hinzufügen</p> <? if($error){ print "<b>Fehler:</b> $error<br><br>"; } print "<form method=post action=\"$PHP_SELF?id=$id&edit=1\">\n"; print "<table border=0> <tr> <td width=100>Kontakt:</td> <td>".getKontakteList("kontakt",$kontakt,250,"Bitte auswählen")."</td> </tr> <tr> <td width=100>Text:</td> <td><input type=text maxlength=255 name=text value=\"$text\" style=\"width:250px\"></td> </tr> <tr> <td width=100>Text 1:</td> <td><input type=text maxlength=255 name=text1 value=\"$text1\" style=\"width:250px\"></td> </tr> <tr> <td width=100>Betrag</td> <td><input type=text name=betrag value=\"$betrag\" style=\"width:100px;\"></td> </tr> <tr> <td width=100>Währung</td> <td>".getWaehrungsList("waehrung",$waehrung,100)."</td> </tr> </table>\n"; print "<input type=submit name=submit value=\"Hinzufügen\">\n</form>\n"; ?> </body> </html> <file_sep>/modules/rapportierung/func.inc.php <? function getRapportCodeList($formname,$projekt,$selected,$breite,$text_null) { $query = mysql_query("SELECT code, name FROM Rapportierung_code WHERE projekt='$projekt' ORDER BY name"); echo mysql_error(); $select="<SELECT ID=\"$formname\" NAME=\"$formname\" style=\"width:".$breite."px;\"> <option value=0>$text_null</option>\n"; while(list($code,$name)=@mysql_fetch_row($query)) { if($code == $selected) $select.="<option value=$code SELECTED>$name</option>\n"; else $select.="<option value=$code>$name</option>\n"; } $select.="</SELECT>\n"; return $select; } ?> <file_sep>/modules/rechnungen/positionen.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> <script src="../../inc/functions.js" type="text/javascript" language="javascript"></script> </head> <body> <p class=titel>Rechnungen:Positionen</p> <? $query=mysql_query("SELECT pos.id, pos.kontakt,pos.text,pos.text1,pos.betrag,pos.waehrung,DATE_FORMAT(pos.datum,'$_config_date') FROM Rechnungen_positionen pos, Kontakte kon WHERE Rechnung is NULL AND pos.kontakt = kon.id ORDER BY kon.firma"); if(@mysql_num_rows($query)>0){ print "<table cellpadding=2 cellspacing=0 border=0 width=\"95%\"> <tr> <td width=250><b>Kontakt</b></td> <td width=\"*\"><b>Text</b></td> <td width=80 align=right><b>Datum</b></td> <td width=100 align=right><b>Betrag</b></td> <td width=130><b>Aktion</b></td> </tr>\n"; for($i=0;list($id,$id_kon,$text,$text1,$betrag,$waehrung,$datum)=mysql_fetch_row($query);$i++){ if($lastid == $id_kon){ $i++; } if(($i%2)==0){ $bgcolor=$_config_tbl_bgcolor1; } else { $bgcolor=$_config_tbl_bgcolor2; } $td="onclick=\"location.href='position_edit.php?id=$id'\" bgcolor=\"#$bgcolor\" valign=top"; print "<tr onmouseover=\"setPointer(this, 'over', '#$bgcolor', '#$_config_tbl_bghover', '')\" onmouseout=\"setPointer(this, 'out', '#$bgcolor', '#$_config_tbl_bghover', '')\"> <td width=250 bgcolor=\"#$bgcolor\" valign=top>"; if($id_kon!=$lastid) print "<a href=\"../kontakte/kontakt.php?id=$id_kon&back=".urlencode($REQUEST_URI)."\">".getKontakt($id_kon)."</a></td>"; else print "&nbsp;"; print "</td> <td width=200 $td>$text</td> <td width=80 align=right $td>$datum</td> <td width=100 align=right $td".formatBetrag($betrag)." ".getWaehrungHtml($waehrung)."</td> <td width=130 bgcolor=\"#$bgcolor\" valign=top>"; if($id_kon!=$lastid) print "<a href=\"erstellen1.php?id=$id_kon\">Rechnung erstellen</a>"; print "</td> </tr>"; $lastid=$id_kon; } print "</table>\n<br><br>\n"; } else { print "<b>Keine offene Positionen gefunden</b>"; } ?> </body> </html> <file_sep>/modules/rechnungenOloid/erstellen2.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); session_start(); if(!$force){ session_register("pos"); session_register("gut"); session_register("positionen"); session_register("adresse"); session_register("betreff"); session_register("datum"); session_register("text"); session_register("footer"); session_register("id"); session_register("waehrung"); session_register("zahlungsfrist"); } ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body> <p class=titel>Rechnungen:Rechnungen erstellen</p> <? //Prüfen ob diese Rechnung schon mal erstellt wurde $query=mysql_query("SELECT id FROM Rechnungen WHERE kontakt='$id' AND betreff='$betreff' AND datum = NOW()"); if(@mysql_num_rows($query)==0 || $force==1) { $query=mysql_query("INSERT INTO Rechnungen(kontakt,waehrung,fx,datum,adresse,betreff,text,footer,zahlungsfrist,besrnr) VALUES('$id','$waehrung','".getFx($waehrung,1)."','".date_CH_to_EN($datum)."','$adresse','$betreff','$text','$footer','$zahlungsfrist','$besrnr')"); if(!mysql_error()) { $last_id=mysql_insert_id(); if($pos) { foreach($pos as $pos_id) { $query=mysql_query("SELECT waehrung FROM Rechnungen_positionen WHERE id='$pos_id'"); $waehrung_pos=mysql_result($query,0,0); $fx=getFx($waehrung_pos,$waehrung); $query=mysql_query("UPDATE Rechnungen_positionen SET rechnung='$last_id',fx='$fx' WHERE id='$pos_id'"); if(mysql_error()){ $error=mysql_error(); } } } if($gut) { foreach($gut as $gut_id) { $query=mysql_query("SELECT waehrung FROM Rechnungen_gutschriften WHERE id='$gut_id'"); $waehrung_pos=mysql_result($query,0,0); $fx=getFx($waehrung_pos,$waehrung); $query=mysql_query("UPDATE Rechnungen_gutschriften SET bezahlt='$last_id',fx='$fx' WHERE id='$gut_id'"); if(mysql_error()){ $error=mysql_error(); } } } } else { $error=mysql_error(); } if($error){ print "<b>Fehler:</b> $error"; } else { session_unregister("pos"); session_unregister("gut"); session_unregister("adresse"); session_unregister("betreff"); session_unregister("datum"); session_unregister("text"); session_unregister("footer"); session_unregister("id"); session_unregister("waehrung"); print "<p>Die Rechnung wurde erfolgreich gespeichert.</p> <p>Klicken Sie <a href=\"createPDF.php?id=$last_id\" style=\"text-decoration:underline\">hier</a> für eine Rechnung im PDF-Format und <a href=\"positionen.php\" style=\"text-decoration:underline\">hier</a> um zurückzukehren."; } } else { print "<b>Achtung!</b> eine Rechnung an diesen Kunden mit diesem Betreff wurde heute schon einmal ausgelöst!<br> Um Rechnungen ein zweites mal auszudrucken, verwenden Sie bitte den Menupunkt <a href=\"rechnungen_offene.php\">Offene Rechnungen</a>.<br><br> Um die Rechnung ein zweites mal auszulösen, klicken Sie <a href=\"$PHP_SELF?force=1\" style=\"text-decoration:underline\">hier</a>."; } ?> </body> </html> <file_sep>/modules/kontakte/modules/adressinfo.inc.php <? if($updated=="00.00.0000") { $updated="-"; } if($id != 0) { print "<table border=0 cellpadding=0 cellspacing=0 width=300> <tr> <td width=100>Adressnummer:</td> <td>".$kontakt['id']."</td> </tr> <tr> <td width=100>Erfasst:</td> <td>".$kontakt['erfasst']."</td> </tr> <td width=100>Updated:</td> <td>".$kontakt['updated']."</td> </tr> </table>"; if(isModule("domains")) { include("domains.inc.php"); } } ?> <file_sep>/inc/config.inc.php <? //Version $_config_version="2002123102"; //Database $_config_mysql_host="localhost"; $_config_mysql_user="wolf5"; $_config_mysql_password="<PASSWORD>"; $_config_mysql_db="wolf5_gm"; //General Configuration $_config_root_path="/var/www/godmode"; $_config_whois_program="/usr/bin/whois"; //Module: Filemanager //$_config_filemanager_directory="/home//godmode/modules/filemanager/files"; //$_config_filemanager_directory_http="/modules/filemanager/files"; $conn=mysql_connect($_config_mysql_host,$_config_mysql_user,$_config_mysql_password); mysql_select_db($_config_mysql_db); $query=mysql_query("SELECT `key`,`value` FROM Konfiguration"); while(list($key,$value)=mysql_fetch_row($query)) { if($value=="true"||$value=="false") { eval("\$_config_"."$key = ($value=='true');"); } else { eval("\$_config_"."$key ='$value';"); } } $_config_date_php=str_replace("%","",$_config_date); $_config_posbetrag="pos.anzahl*((pos.betrag+((pos.betrag/100)*pos.mwst))*pos.fx)"; ?> <file_sep>/modules/timesheet/inc/func.inc.php <?php include("../../inc/config.inc.php"); function addCustomer($id){ return mysql_query("INSERT INTO Timesheet_Kontakte SET Kunde = ".$id." "); } function removeCustomer($id){ return mysql_query("DELETE FROM Timesheet_Kontakte WHERE Kunde = ".$id." LIMIT 1 "); } function getRunning(){ $result = mysql_query("SELECT id FROM Timesheet WHERE ISRUNNING = 1"); $running = mysql_fetch_array($result); return $running['id']; } function setRunning($id){ $running_id = getRunning(); mysql_query("UPDATE Timesheet SET ISRUNNING=0,end_stamp=CURRENT_TIMESTAMP WHERE id=".$running_id); echo mysql_error(); mysql_query("INSERT INTO Timesheet (`id`, `kunde`, `start_stamp`, `end_stamp`, `user`, `notice`, `ISRUNNING`) VALUES (NULL, '".$id."', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP, NULL, NULL, '1')"); echo mysql_error(); return mysql_insert_id(); } function updateTable($date){ $datefilter = !empty($date)? "WHERE start_stamp >= CURDATE()": ""; $query = "SELECT ts.id, ISRUNNING, notice, firma, firma2, start_stamp, end_stamp, TIMESTAMPDIFF(MINUTE,start_stamp,end_stamp) as ess FROM Timesheet as ts LEFT JOIN Kontakte as con ON ( con.id = ts.Kunde) ".$datefilter." ORDER BY start_stamp DESC, end_stamp DESC, ts.id DESC"; $result = mysql_query($query); $counter = 0; echo "<tr>"; echo "<td>Minuten</td><td>Kunde</td><td>Notiz</td><td>Von</td><td>Bis</td><td>Aktion</td>"; echo "</tr>"; while ($customer = mysql_fetch_array($result)){ $counter++; $id=""; $bgcolor= $counter%2==0 ? "#ffffff":"#aaaaaa"; if ($customer['ISRUNNING']== 1){ $bgcolor= "#f9dada"; $id=" id='isrunning'"; } echo "<tr bgcolor=\"$bgcolor\" class='list' ".$id.">"; echo "<td>".$customer['ess']."</td><td>".$customer['id']." ".$customer['firma']."</td><td>".$customer['notice']."</td><td>".$customer['start_stamp']."</td><td>".$customer['end_stamp']."</td>"; echo "<td><span class='edit' id='".$customer['id']."'>edit</span></td>"; echo "</tr>"; } } function showButtons(){ $query=mysql_query("SELECT tsk.Kunde, con.id, con.firma, con.firma2 FROM Kontakte as con, Timesheet_Kontakte as tsk WHERE con.id = tsk.Kunde ORDER BY con.id DESC"); while ($customer = mysql_fetch_array($query)){ echo "<button value='".$customer['id']."' class='time'".$active.">".$customer['firma']."</button>"; } } function getMessureById($id){ $result = mysql_query("SELECT ts.id, ISRUNNING, notice, firma, firma2, start_stamp, end_stamp, TIMESTAMPDIFF(MINUTE,start_stamp,end_stamp) as ess FROM Timesheet as ts LEFT JOIN Kontakte as con ON ( con.id = ts.Kunde) WHERE ts.id = ".$id); $resArray = mysql_fetch_array($result); echo "Kunde: ".$resArray['firma']."<br />"; echo " ".$resArray['firma2']."<br />"; echo "<table><tr><td rowspan='2'>Notiz:</td><td rowspan='2'><textarea name='notice'>".$resArray['notice']."</textarea></td>"; echo "<td>von:</td><td><input name='start_stamp' value='".$resArray['start_stamp']."' /></td>"; echo "<tr><td>bis:</td><td><input name='end_stamp' value='".$resArray['end_stamp']."' /></td></tr></table>"; echo "<input type='hidden' value='true' name='saveChanges' />"; echo "<input type='hidden' value='".$resArray['id']."' name='id' />"; echo "<button id='save' onsubmit='return false'>Speichern</button>"; } function saveChanges(){ $query="UPDATE Timesheet SET notice='".$_POST['notice']."' WHERE id=".$_POST['id']; mysql_query($query); echo mysql_error(); } ?> <file_sep>/modules/rechnungenOloid/findprodukt.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> <script src="../../inc/functions.js" type="text/javascript" language="javascript"></script> <script type="text/javascript" language="javascript"> <!-- function setValue(val){ opener.document.getElementsByName('produkt[<?=$field?>]')[0].value = val; self.close(); } //--> </script> </head> <body onLoad="document.getElementById('term').focus()"> <p class=titel>Produkt Suchen</p> <form method=get action="<?=$PHP_SELF?>"> <input type=hidden name=field value=<?=$field?>> <input type=text name=term id=term value="<?=$term?>"> <input type=submit name=search value="Suchen"> </form> <? $_config_entrysperpage=10; if(!$start){ $start=0; } if($term){ $query=mysql_query("SELECT nr_int,text1 FROM Produkte WHERE ".formatSearchString($term,array("nr_int","nr_ext","text1","text2"))." ORDER BY text1 LIMIT $start,$_config_entrysperpage"); $attr="&term=$term"; } else { $query=mysql_query("SELECT nr_int,text1 FROM Produkte ORDER BY text1 LIMIT $start,$_config_entrysperpage"); } if(@mysql_num_rows($query)>0) { print "<table border=0 cellpadding=3 cellspacing=0 width=\"95%\">"; for($i=0;list($nr_int,$text1)=mysql_fetch_array($query);$i++) { if(($i%2)==0){ $bgcolor=$_config_tbl_bgcolor1; } else { $bgcolor=$_config_tbl_bgcolor2; } print "<tr onmouseover=\"setPointer(this, 'over', '#$bgcolor', '#$_config_tbl_bghover', '')\" onmouseout=\"setPointer(this, 'out', '#$bgcolor', '#$_config_tbl_bghover', '')\" onclick=\"javascript:setValue('$nr_int');\"> <td valign=top bgcolor=\"#$bgcolor\"$style>$text1</td>"; print "</tr>\n"; } print "<tr> <td colspan=2 align=center>"; if($start>0){ print "<a href=\"$PHP_SELF?field=$field&start=".($start-$_config_entrysperpage)."$attr\"><<<</a>"; } if($term){ $query=mysql_query("SELECT count(*) FROM Kontakte WHERE aktiv=1 AND ".formatSearchString($term,array("firma","firma2",))); } else { $query=mysql_query("SELECT count(*) FROM Kontakte WHERE aktiv=1"); } if(($start+$_config_entrysperpage+1)<=mysql_result($query,0,0)) { if($start>0){ print " | "; } print "<a href=\"$PHP_SELF?field=$field&start=".($start+$_config_entrysperpage)."$attr\">>>></a>"; } print "</td> </tr> </table>\n"; } else { print "Kein Produkte gefunden"; } ?> </body> </html> <file_sep>/modules/rechnungenOloidOld/statistiken.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); //EInnahmen pro Jahr $query=mysql_query("SELECT sum(dom.betrag*abr.raten) FROM Domains dom, Zahlungsarten abr WHERE dom.abrechnungsart = abr.id"); list($gesamteinnahmen)=mysql_fetch_row($query); echo mysql_error(); //Verrechnete Beträge $query=mysql_query("select sum(betrag) FROM Rechnungen_positionen"); list($verrechneteBetraege)=mysql_fetch_row($query); //Eingenommene Beträge $query=mysql_query("select sum(pos.betrag) FROM Rechnungen_positionen pos, Rechnungen rech WHERE rech.id = pos.rechnung AND rech.bezahlt is NOT NULL"); list($bisherigeEinnahmen)=mysql_fetch_row($query); ?> <html> <head> <title>Sylon godmode</title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body> <p class=titel>Rechnungen:Statistiken</p> <table border=0> <tr> <td width=250>Gesamteinnahmen Domains pro Jahr</td> <td align=right><? print formatBetrag($gesamteinnahmen); ?></td> </tr> <tr> <td width=250>Einnahmen - Hostingkosten</td> <td align=right><?=formatBetrag($gesamteinnahmen-4519.2)?></td> </tr> <tr> <td width=250>Verrechnete Beträge</td> <td align=right><? print formatBetrag($verrechneteBetraege); ?> </td> </tr> <tr> <td width=250>Bezahlte Beträge</td> <td align=right><? print formatBetrag($bisherigeEinnahmen); ?></td> </tr> </table> </body> </html> <file_sep>/conv.php <? $handle = fopen ("daten.txt", "r"); $handle2 = fopen ("daten_2.txt", "w"); while (!feof($handle)) { $buffer = fgets($handle, 4096); if(is_numeric(substr($buffer,0,4)) && substr($buffer,4,1)==";") { $data=trim(str_replace("\n","\\"."n",($buffer)))."\n"; fputs($handle2,str_replace("\r","\\r ",str_replace("\n","\\"."n",($str)))."\n"); $str=$buffer; } else { $str.=$buffer; } } fclose ($handle2); fclose ($handle); <file_sep>/modules/timesheet/csv.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); if(substr($_SERVER["PATH_INFO"],strlen($_SERVER["PATH_INFO"])-3,3)!="csv") { if($statid) die("Loop detected"); $query=mysql_query("SELECT filename FROM Statistiken WHERE id='$id'"); $filename=mysql_result($query,0,0); header("Location: $PHP_SELF/$filename?statid=$id&start=$start&end=$end&sort=$sort&order=$order"); } else { $query=mysql_query("SELECT sql,ueberschriften,filename,datumsfeld,total FROM Statistiken WHERE id='$statid'"); list($sql,$ueberschriften,$filename,$datumsfeld,$total)=mysql_fetch_row($query); header("Content-type: text/csv"); header("Content-Disposition: attachment; filename=$filename"); header("Cache-control: private"); eval("\$sql = \"$sql\";"); if($start) { $sql= str_replace("WHERE","WHERE $datumsfeld >= '".date_CH_to_EN($start)."' AND",$sql); } if($end) { $sql= str_replace("WHERE","WHERE $datumsfeld <= '".date_CH_to_EN($end)."' AND",$sql); } if($sort) { $sql = substr($sql,0,strpos(strtolower($sql),"order by"))."ORDER BY $sort"; if($order){ $sql.= " ".$order; } } $query=mysql_query($sql); if(mysql_num_rows($query)>0){ $ueberschriften = str_replace(">","",$ueberschriften); $ueberschriften = str_replace("<","",$ueberschriften); print $ueberschriften."\n"; $total_val= array(); while($row=mysql_fetch_row($query)){ for($i=0;$i<mysql_num_fields($query);$i++) { print str_replace("\r","",str_replace("\n","",($row[$i]))).","; if(is_numeric($row[$i])) $total_val[$i]+=$row[$i]; } print "\n"; } } if(strlen($total)>0) { $total = split(",",$total); for($i=0;$i<mysql_num_fields($query);$i++) { if(in_array(mysql_field_name($query,$i),$total)) { print $total_val[$i]; } print ","; } } } ?> <file_sep>/modules/timesheet/customer.php <? /*include("../../inc/config.inc.php"); include("../../inc/func.inc.php");*/ /*include("../../inc/config.inc.php");*/ include("inc/func.inc.php"); ?> <html> <head> <title>Sylon godmode</title> <link rel="stylesheet" href="../../main.css" type=text/css> <script type="text/javascript" src="inc/js/jquery-1.4.4.min.js"></script> <script type="text/javascript" src="inc/js/func.js"></script> </head> <body> <p class=titel>Kundenverwaltung</p> <br><br> <table> <? $query=mysql_query("select ts.Kunde, con.id, firma, firma2 from Kontakte as con LEFT JOIN Timesheet_Kontakte as ts ON ( con.id = ts.Kunde) where con.aktiv = 1"); echo mysql_error(); while ($customer = mysql_fetch_array($query)){ echo "<tr>"; echo "<td>"; if (!empty($customer['Kunde'])) echo "<td><input type='checkbox' checked='checked' value='".$customer['id']."' id='".$customer['id']."' name='remove' class='cBox'/></td>"; else echo "<td><input type='checkbox' name='add' value='".$customer['id']."' id='".$customer['id']."' class='cBox'/></td>"; echo "<td>".$customer['id']."</td><td>".$customer['firma']."</td><td>".$customer['firma2']."</td>"; echo "</tr>"; } ?> </table> </body> </html> <file_sep>/menu.php <? include("inc/config.inc.php"); ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="menu.css" type=text/css> </head> <body> <base target=main> <div class=titel2><?=$_config_title?></div><br> <? if(!$_config_modules){ print "<b>Fehler:</b> $"."config_modules ist nicht definiert<br><br>\n</body>\n</html>"; exit; } foreach(split(",",$_config_modules) as $module){ $module=trim($module); $menufile=@file("modules/$module/menu.inc.php"); if(!$menufile){ print "<b>Fehler:</b> Menufile f&uuml;r Modul $module nicht gefunden<br><br>\n"; continue; } print "<p>"; for($i=0;$menufile[$i];$i++){ if(strstr($menufile[$i],"=")===FALSE){ print "<b>".$menufile[$i]."</b><br>\n"; } else if($menufile[$i]==""){ print "<br>\n"; } else { print "<a href=\"modules/$module/".substr($menufile[$i],strpos($menufile[$i],"=")+1,strlen($menufile[$i])-strpos($menufile[$i],"="))."\">".substr($menufile[$i],0,strpos($menufile[$i],"="))."</a><br>\n"; } } print "</p>\n"; } ?> <!--<b>Personen</b><br> <a href="personen_anzeigen.php">Anzeigen/Editieren</a><br> <a href="personen_neu.php">Hinzufügen</a><br> <a href="personen_del.php">Löschen</a><br><br> <b>Domains</b><br> <a href="domains_anzeigen.php">Anzeigen/Editieren</a><br> <a href="domains_neu.php">Hinzufügen</a><br> <a href="domains_del.php">L&ouml;schen</a><br><br> <b>Rechnungen</b><br> <a href="rechnungen_erstellen.php">Rechnungen erstellen</a><br> <a href="rechnungen_offene.php">Offene Rechnungen</a><br> <a href="rechnungen_bezahlte.php">Bezahlte Rechnungen</a><br> <a href="rechnungen_gutschriften.php">Gutschriften</a><br> <a href="rechnungen_statistiken.php">Statistiken</a><br>--> </body> </html> <file_sep>/modules/rechnungen/gutschriften_delete.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); if(!$back){ $back="gutschriften.php"; } if(!$backno){ $backno=$back; } if($del) { $query=mysql_query("DELETE FROM Rechnungen_gutschriften WHERE id='$del'"); if($query) { header("Location: ".urldecode($back)); } else { $error=mysql_error(); } } ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body> <p class=titel>Rechnungen:Gutschrift Löschen</p> <? $query=mysql_query("SELECT kontakt, betrag, text FROM Rechnungen_gutschriften WHERE id='$id'"); list($kontakt,$betrag,$text)=mysql_fetch_row($query); print "<p>Möchten Sie die Gutschrift <b>$text</b> für <b>".getKontakt($kontakt)."</b> über ".formatBetrag($betrag)." wirklich Löschen?</p><p>[ <a href=\"$PHP_SELF?del=$id&back=".urlencode($back)."\">Ja</a> ] [ <a href=\"".urldecode($backno)."\">Nein</a> ]</p>"; ?> </body> </html> <file_sep>/modules/domains/add.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); include("func.inc.php"); if($submit) { //www.domain.ch verhindern if(substr($domain,0,4)=="www.") $domain=substr($domain,4); //Überprüfen ob alle benötigten felder ausgefüllt wurden if(!$domain || !$kontakt || !$startDate || strlen($betrag)<1 ||!$abrechnungsart){ $error="Die Felder Domain, Kontakt, Seit, Betrag und Abrechnungsart müssen ausgefüllt sein"; } else if($gutschrift_kontakt && !$betrag){ $error="Gutschrift, aber keinen Betrag angegeben"; } else { //Prüfen ob Domain in DB existiert $query=mysql_query("SELECT count(*) FROM Domains WHERE domain='$domain'"); list($exist)=mysql_fetch_row($query); if($exist){ $error="Domain existiert bereits"; } else { //Neue Domain in DB schreiben $query=mysql_query("INSERT INTO Domains(domain,aliase,kontakt,startDate,betrag, waehrung,abrechnungsart,text) values ('$domain','$aliase','$kontakt','".date_CH_to_EN($startDate)."','$betrag','$waehrung','$abrechnungsart','$text')"); $domain_id=mysql_insert_id(); if(!($error=mysql_error())) { //Mail an Admin //TODO: Richtige Kontaktperson if($_config_domains_inform_admin && $inform_admin){ $msg = "Domain : $domain\nKunde : ".getKontakt($id)."\ne-mail : $mail\nDatum : ".date($_config_date_php)."\n\n"; if($inform_admin_comment){ $msg.="Kommentar:\n$inform_admin_comment\n"; } mail($_config_domains_inform_admin_mail,"Neuer Kunde: $domain",$msg,"From: $_config_domains_inform_admin_from <$_config_domains_inform_admin_from_mail>"); } //Domain Verrechnen if($verrechnen) { $query=mysql_query("SELECT dom.domain, dom.kontakt,dom.betrag,dom.waehrung,abr.monate FROM Domains dom,Zahlungsarten abr WHERE dom.id='$domain_id' AND abr.id = dom.abrechnungsart"); list($domain,$kontakt,$betrag,$waehrung,$abr_mnt)=mysql_fetch_row($query); $bezahltBis=domain_bezahlt_bis($domain_id); $raten=ceil(diff_month(strtotime("+$_config_domains_verrechnen_tage_vorher days"),strtotime(date_CH_to_EN($bezahltBis)))/12); $betrag=($betrag * $raten); $tmp_monate=$abr_mnt * $raten; $text="Hosting $domain"; if(date("d",strtotime(date_CH_to_EN($bezahltBis)))=="1"){ $zahlenBis=date("t.m.Y",strtotime("+".($tmp_monate-1)." month",strtotime(date_CH_to_EN($bezahltBis)))); } else { //Wüster PHP Date Bug if(date("d",strtotime(date_CH_to_EN($bezahltBis)))==31) { $zahlenBis=date("m.Y",strtotime("+$tmp_monate month",strtotime("-1 day",strtotime(date_CH_to_EN($bezahltBis))))); $zahlenBis="31.".$zahlenBis; } else { $zahlenBis=date("t.m.Y",strtotime("+$tmp_monate month",strtotime(date_CH_to_EN($bezahltBis)))); } } $text1="$startDate- $zahlenBis"; $query=mysql_query("INSERT INTO Rechnungen_positionen(kontakt,rechnung,text,text1,betrag,mwst,waehrung,datum,`key`,value) VALUES('$kontakt',NULL,'$text','$text1','$betrag','$_config_domains_mwst','$waehrung',NOW(),'domains','$domain_id')"); $pos_id=mysql_insert_id(); if(!($error=mysql_error())){ $query=mysql_query("UPDATE Domains SET bezahltBis='".date_CH_to_EN($zahlenBis)."' WHERE id='$domain_id'"); if(!($error=mysql_error())){ //Wenn eine Gutschrift gemacht wurde, diese in die DB schreiben if($gutschrift_kontakt>0) { $query=mysql_query("INSERT INTO Rechnungen_gutschriften(kontakt,datum,betrag,text,auszahlen,abhaengig) VALUES('$gutschrift_kontakt',NOW(),'$gutschrift_betrag','$gutschrift_text','0','$pos_id')"); if(!($error=mysql_error())) { header("Location: show.php"); } else { $error="Die Gutschrift konnte nicht gespeichert werden.\nFehler: $error"; } } else header("Location: show.php"); } } } //Ende Verrechnen } else { header("Location: show.php"); } } } } ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> <script src="../../inc/functions.js" type="text/javascript" language="javascript"></script> <script language="javascript" type="text/javascript"> <!-- var kontakt; var text; var betrag; function showGutschrift(show) { document.getElementById('gutschrift_betrag').disabled = (!show); document.getElementById('gutschrift_kontakt').disabled = (!show); document.getElementById('gutschrift_text').disabled = (!show); } //--> </script> </head> <body onLoad="javascript:error('<?=$error?>');document.getElementById('domain').focus()"> <p class=titel>Domains:Hinzufügen</p> <script language="JavaScript"> <!-- var changeText=1; function getBetrag(raten){ document.getElementById('gutschrift_betrag').value=((document.getElementById('betrag').value/12)*raten); } function fillText(){ if(changeText) document.getElementById('gutschrift_text').value='Vermittlung ' + document.getElementById('domain').value } --> </script> <? if(!$startDate){ $startDate=date("d.m.Y"); } if(strlen($preis)<1){ $betrag=$_config_domains_default_betrag; } print "<form method=post action=\"$PHP_SELF\"> <table border=0> <tr> <td width=100>Domain</td> <td><input type=text id=domain name=domain value=\"$domain\" style=\"width:250px;\" id=\"domain\""; if(isModule("rechnungen")) { print " onchange=\"javascript:fillText()\""; } if($verrechnen||!isset($verrechnen)){ $checked=" CHECKED"; } else { $checked=""; } print "></td> </tr> <tr> <td width=100>Aliase</td> <td><input type=text name=\"aliase\" value=\"$aliase\" style=\"width:250px;\"></td> </tr> <tr> <td width=100>Kontakt:</td> <td>".getKontakteList("kontakt",$kontakt,"250","Bitte Auswählen")."</td> </tr> <tr> <td width=100>Seit:</td> <td><input type=text name=\"startDate\" value=\"$startDate\" style=\"width:250px;\"></td> </tr> <tr> <td width=100>Betrag:</td> <td><input type=text name=betrag id=betrag value=\"$betrag\" style=\"width:250px;\"></td> </tr> <tr> <td width=100>Währung:</td> <td>".getWaehrungsList("waehrung",$waehrung,"250")."</td> </tr> <tr> <td width=100>Abrechnungsart:</td> <td>".getZahlungsartenList("abrechnungsart",$abrechnungsart,"250","")."</td> </tr> <tr> <td width=100 valign=top>Text:</td> <td><textarea style=\"width:250;height:50;\" name=text>$text</textarea></td> </tr> <tr> <td colspan=2><b>Sofort Verrechnen</b></td> </tr> <tr> <td></td> <td><input type=checkbox value=1 name=verrechnen$checked onclick=\"javascript:showGutschrift(this.checked)\"> Ja</td> </tr>"; if(isModule("rechnungen")){ print"<tr> <td colspan=2><b>Gutschrift</b></td> </tr> <tr> <td>Zugunsten von:</td> <td>".getKontakteList("gutschrift_kontakt",$gutschrift_kontakt,"250","Keine Gutschrift")."</td> </tr> <tr> <td>Text:</td> <td><input type=text name=gutschrift_text id=gutschrift_text value=\"$gutschrift_text\" style=\"width:250px;\" onchange=\"javascript:changeText=0\"></td> </tr> <tr> <td valign=top>Betrag</td> <td><input type=text name=gutschrift_betrag id=gutschrift_betrag value=\"$gutschrift_betrag\" style=\"width:250px;\"><br>Anzahl gutgeschriebener Raten: <a href=\"#\" onclick=\"javascript:getBetrag(1)\">1</a> <b><a href=\"#\" onclick=\"javascript:getBetrag(2)\">2</a></b> <a href=\"#\" onclick=\"javascript:getBetrag(3)\">3</a> <b><a href=\"#\" onclick=\"javascript:getBetrag(4)\">4</a></b> <a href=\"#\" onclick=\"javascript:getBetrag(5)\">5</a> </td> </tr>"; } if($_config_domains_inform_admin){ if($inform_admin){ $checked=" CHECKED"; } else { $checked=""; } print "<tr> <td colspan=2><b>Admin Informieren</b></td> </tr> <tr> <td>&nbsp;</td> <td><input type=checkbox value=1 name=inform_admin$checked> Ja</td> </tr> <tr> <td>Kommentar:</td> <td><input type=text name=inform_admin_comment value=\"$inform_admin_comment\" style=\"width:250px;\"></td> </tr>"; } print "</table>\n<br><br>\n <input type=submit name=submit value=\"Hinzuf&uuml;gen\">\n</form>\n"; ?> </body> </html> <file_sep>/modules/kontakte/func.inc.php <? function getKontakteList($formname,$selected,$breite,$text_null) { $query = mysql_query("SELECT id, firma, firma2 FROM Kontakte WHERE aktiv=1 ORDER BY firma"); $select="<SELECT ID=\"$formname\" NAME=\"$formname\" style=\"width:".$breite."px;\"> <option value=0>$text_null</option>\n"; while(list($id,$firma,$firma2)=@mysql_fetch_row($query)) { if($firma2) { $firma.=", ".$firma2; } if(strlen($firma)>100) { $firma=substr($firma,0,100)."..."; } if($id == $selected) $select.="<option value=$id SELECTED>$firma</option>\n"; else $select.="<option value=$id>$firma</option>\n"; } $select.="</SELECT>\n"; return $select; } function getEmpList($formname,$selected,$breite,$text_null) { global $_config_kontakte_me; $query = mysql_query("SELECT id, name, vorname FROM Kontakte_kontaktpersonen WHERE firma='$_config_kontakte_me' ORDER BY name"); $select="<SELECT NAME=\"$formname\" style=\"width:".$breite."px;\"> <option value=0>$text_null</option>\n"; while(list($id,$name,$vorname)=@mysql_fetch_row($query)) { $name_show=formatName("",$name,$vorname); if($id == $selected) $select.="<option value=$id SELECTED>$name_show</option>\n"; else $select.="<option value=$id>$name_show</option>\n"; } $select.="</SELECT>\n"; return $select; } function getEmp($id) { $query = mysql_query("SELECT name, vorname FROM Kontakte_kontaktpersonen WHERE id='$id'"); list($name,$vorname)=mysql_fetch_row($query); return trim("$vorname $name"); } function getKontaktpersonenList($formname,$selected,$breite,$text_null,$firma) { $query = mysql_query("SELECT id, name, vorname FROM Kontakte_kontaktpersonen WHERE firma='$firma' ORDER BY name"); $select="<SELECT NAME=\"$formname\" style=\"width:".$breite."px;\"> <option value=0>$text_null</option>\n"; while(list($id,$name,$vorname)=@mysql_fetch_row($query)) { $name_show=formatName("",$name,$vorname); if($id == $selected) $select.="<option value=$id SELECTED>$name_show</option>\n"; else $select.="<option value=$id>$name_show</option>\n"; } $select.="</SELECT>\n"; return $select; } function getAnredeList($formname,$selected,$breite) { $query=mysql_query("SELECT id,anrede FROM Kontakte_anreden ORDER BY id"); $select = "<SELECT name=\"$formname\" style=\"width:$breite"."px;\">\n"; while(list($id,$anrede)=mysql_fetch_row($query)) { if($id == $selected) $select.=" <option value=\"$id\" SELECTED>$anrede</option>\n"; else $select.=" <option value=\"$id\">$anrede</option>\n"; } $select.="</SELECT>\n"; return $select; } function getAnrede($id){ $query=@mysql_query("SELECT anrede FROM Kontakte_anreden WHERE id='$id'"); if($query && mysql_num_rows($query)>0) { return mysql_result($query,0,0); } else { return false; } } function getKontakt($id){ $query=@mysql_query("SELECT anrede,firma,firma2 FROM Kontakte WHERE id='$id'"); if(mysql_num_rows($query)>0){ list($anrede,$firma,$firma2)=mysql_fetch_row($query); if($anrede==3) { /*if($firma2) { $firma.=", ".$firma2; }*/ if(strlen($firma)>100) { $firma=substr($firma,0,100)."..."; } return $firma; } else { return "$firma2 $firma"; } } else { return false; } } function isPrivate($id) { $query=mysql_query("SELECT anr.privat FROM Kontakte_anreden anr, Kontakte kont WHERE kont.anrede = anr.id AND kont.id='$id'"); if(mysql_num_rows($query)>0) { return mysql_result($query,0,0); } else { return false; } } function anredeIsPrivate($id) { $query=mysql_query("SELECT privat FROM Kontakte_anreden WHERE id='$id'"); if(mysql_num_rows($query)>0) { return mysql_result($query,0,0); } else { return false; } } ?> <file_sep>/modules/kontakte/func2.inc.php <? function saveKontakte() { global $kontaktpersonen; global $kontakt; global $HTTP_POST_VARS; global $key; $key = $_REQUEST["key"]; $test="<br>1) key = ".$key; //if key is -1 it is a new adress - then apend ist at the end if($key < 0) { $key = count($kontaktpersonen); } $test .= "<br>2) key = ".$key; $test .= "<br>count(kontaktpersonen) = ".count($kontaktpersonen); //$test = 1; //iterate over http_post_vars while(list ($arr_key, $arr_value) = each($HTTP_POST_VARS)) { //$error2 .=" in if !error".$test; $test .= "<br>arr_key = ".$arr_key." >>arr_value = ".$arr_value; //save content to kontaktpersonen if the arr_key name contains "kontaktperson_"" if(!(strstr($arr_key,"kontaktperson_") === FALSE)) { $kontaktpersonen[$key][substr($arr_key,14)] = $arr_value; //save content to kontakt if the arr_key name contains "kontakt_"" } else if(!(strstr($arr_key,"kontakt_") === FALSE)) { $kontakt[substr($arr_key,8)] = $arr_value; } //$test ++; } $_SESSION["kontakt"] = $kontakt; $_SESSION["kontaktpersonen"] = $kontaktpersonen; //session_register("kontakt"); //session_register("kontaktpersonen"); return $test; } ?> <file_sep>/modules/rechnungen/position_edit.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); if($submit) { if(!$kontakt) { $error="Bitte geben Sie einen Kunden an"; } else if(!$text) { $error="Bitte geben Sie einen Text an"; } else if(!$betrag || !is_numeric($betrag)) { $error="Bitte geben Sie einen Betrag an"; } else { $query=mysql_query("UPDATE Rechnungen_positionen SET kontakt='$kontakt',text='$text',text1='$text1',betrag='$betrag',waehrung='$waehrung' WHERE id='$id'"); if(mysql_error()) $error=mysql_error(); else{ header("Location: positionen.php"); } } } ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body onLoad="document.getElementsByName('kontakt')[0].focus()"> <p class=titel>Rechungen:Position Editieren</p> <? if($error){ print "<b>Fehler:</b> $error<br><br>"; } $query=mysql_query("SELECT kontakt,text,text1,betrag,waehrung FROM Rechnungen_positionen WHERE id='$id'"); list($kontakt,$text,$text1,$betrag,$waehrung)=mysql_fetch_row($query); print "<form method=post action=\"$PHP_SELF?id=$id\">\n"; print "<table border=0> <tr> <td width=100>Kontakt:</td> <td>".getKontakteList("kontakt",$kontakt,250,"Bitte auswählen")."</td> </tr> <tr> <td width=100>Text:</td> <td><input type=text maxlength=255 name=text value=\"$text\" style=\"width:250px\"></td> </tr> <tr> <td width=100>Text 1:</td> <td><input type=text maxlength=255 name=text1 value=\"$text1\" style=\"width:250px\"></td> </tr> <tr> <td width=100>Betrag</td> <td><input type=text name=betrag value=\"$betrag\" style=\"width:100px;\"></td> </tr> <tr> <td width=100>Währung</td> <td>".getWaehrungsList("waehrung",$waehrung,100)."</td> </tr> </table>\n"; print "<input type=submit name=submit value=\"Ändern\"> <input type=button value=\"Löschen\" onclick=\"javascript:location.href='position_delete.php?id=$id&back=positionen.php&backno=".urlencode($REQUEST_URI)."'\">\n</form>\n"; ?> </body> </html> <file_sep>/modules/domains/delete.php <? session_start(); include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); include("func.inc.php"); if(!$back) { $back="show.php"; } if(!$backno) { $backno=$back; } if($del) { $query=mysql_query("UPDATE Domains SET endDate=NOW() WHERE id='$del'"); if($query) { header("Location: ".urldecode($back)); } else { $err=mysql_error(); } } ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body> <p class=titel>Domains:Domain Löschen</p> <? if($err){ print "<b>Fehler:</b> $err<br><br>"; } print "Möchten Sie die Domain '".getDomain($id)."' wirklich Löschen?<br><br> <a href=\"$PHP_SELF?del=$id&back=".urldecode($back)."\">[ Ja ]</a> <a href=\"".urldecode($backno)."\">[ Nein ]</a>"; ?> </body> </html> <file_sep>/modules/produkte/edit.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); $HTTP_POST_VARS = !empty($HTTP_POST_VARS) ? $HTTP_POST_VARS : $_POST; if($submit) { if(!$nr_int) { $fehler="Es wurde keine Int. Produktenummer angegeben"; } else { $query=mysql_query("UPDATE Produkte SET nr_int='$nr_int',nr_ext='$nr_ext',gruppe='$gruppe',text1='$text1',text2='$text2',preis1='$preis1',preis2='$preis2',preis3='$preis3',preis4='$preis4',rabattstufe='$rabattatufe',warenbestand='$warenbestand' WHERE id='$id'"); echo mysql_error(); if($query) { header("Location: show.php"); } else { $error= "Fehler: ".mysql_error($query)."<br><br>\n\n"; } } } ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body onLoad="document.getElementsByName('nr_int')[0].focus()"> <p class=titel>Produkte:Editieren</p> <? if($fehler) print "<b>Fehler:</b> $error<br><br>\n"; $query=mysql_query("SELECT nr_int,nr_ext,gruppe,text1,text2,preis1,preis2,preis3,preis4,rabattstufe,warenbestand FROM Produkte WHERE id='$id'"); list($nr_int,$nr_ext,$gruppe,$text1,$text2,$preis1,$preis2,$preis3,$preis4,$rabattstufe,$warenbestand)=mysql_fetch_row($query); print "<form method=post action=\"$PHP_SELF?id=$id\">\n"; print "<table border=0>"; if($_config_produkte_int_prod_nr) print"<tr> <td width=100>Int. Prod. Nr.:</td> <td><input type=text name=\"nr_int\" value=\"$nr_int\" style=\"width:200px;\" maxlength=50></td> </tr>"; if($_config_produkte_ext_prod_nr) print "<tr> <td width=100>Ext. Prod. Nr:</td> <td><input type=text name=\"nr_ext\" value=\"$nr_ext\" style=\"width:200px;\" maxlength=50></td> </tr>"; if($_config_produkte_gruppe) print "<tr> <td width=100>Produktegruppe:</td> <td><input type=text name=\"gruppe\" value=\"$gruppe\" style=\"width:200px;\" maxlength=50></td> </tr>"; if($_config_produkte_text1) print "<tr> <td width=100>$_config_produkte_text1_name:</td> <td><input type=text name=\"text1\" value=\"$text1\" style=\"width:200px;\" maxlength=50></td> </tr>"; if($_config_produkte_text2) print "<tr> <td width=100>$_config_produkte_text2_name:</td> <td><input type=text name=\"text2\" value=\"$text2\" style=\"width:200px;\" maxlength=50></td> </tr>"; if($_config_produkte_preis1) print "<tr> <td width=100>$_config_produkte_preis1_name:</td> <td><input type=text name=\"preis1\" value=\"$preis1\" style=\"width:200px;\" maxlength=50></td> </tr>"; if($_config_produkte_preis2) print "<tr> <td width=100>$_config_produkte_preis2_name:</td> <td><input type=text name=\"preis2\" value=\"$preis2\" style=\"width:200px;\" maxlength=50></td> </tr>"; if($_config_produkte_preis3) print "<tr> <td width=100>$_config_produkte_preis3_name:</td> <td><input type=text name=\"preis3\" value=\"$preis3\" style=\"width:200px;\" maxlength=50></td> </tr>"; if($_config_produkte_preis4) print "<tr> <td width=100>$_config_produkte_preis4_name:</td> <td><input type=text name=\"preis4\" value=\"$preis4\" style=\"width:200px;\" maxlength=50></td> </tr>"; if($_config_produkte_rabattstufe) print "<tr> <td width=100>Rabattstufe:</td> <td><input type=text name=\"rabattstufe\" value=\"$rabattstufe\" style=\"width:200px;\" maxlength=50></td> </tr>"; if($_config_produkte_warenbestand) print "<tr> <td width=100>Warenbestand:</td> <td><input type=text name=\"warenbestand\" value=\"$warenbestand\" style=\"width:200px;\" maxlength=50></td> </tr>"; print"</table> <input type=submit name=submit value=\"Ändern\"> <input type=button onclick=\"javascript:location.href='delete.php?id=$id'\" value=\"Löschen\">\n</form>\n"; ?> </body> </html> <file_sep>/modules/rechnungenOloidOld/menu.inc.php Rechnungen Rechnungen erstellen=erstellen.php Offene Rechnungen=offene.php Bezahlte Rechnungen=bezahlte.php Währungsrechner=waehrungsrechner.php Gutschriften Anzeigen/Editieren=gutschriften.php Gutschrift erstellen=gutschriften_add.php <file_sep>/modules/rechnungenOloidOld/waehrungsrechner.php <? include("../../inc/config.inc.php"); include("../../inc/func.inc.php"); ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body onLoad="javascript:document.getElementById('betrag').focus()"> <p class=titel>Rechnungen:Währungsrechner</p> <form method=post action="<?=$PHP_SELF?>"> <? if($betrag) { $query=mysql_query("SELECT text FROM Waehrungen WHERE id='$waehrung1'"); print "Betrag in ".mysql_result($query,0,0).": ".formatBetrag(($betrag * getFx($waehrung,$waehrung1)))."<br><br>"; } print "<table border=0> <tr> <td>Betrag</td> <td><input type=text id=betrag name=betrag value=\"$betrag\"></td> </tr> <tr> <td>Währung</td> <td>".getWaehrungsList("waehrung",$waehrung,100)."</td> </tr> <tr> <td>Zielwährung</td> <td>".getWaehrungsList("waehrung1",$waehrung1,100)."</td> </tr> <tr> <td>&nbsp;</td> <td><input type=submit value=\"Umrechnen\"></td> </tr> </table>"; ?> </form> </body> </html> <file_sep>/modules/rechnungenOloid/func.inc.php <? function kontoExistiert($nr) { global $buchhaltung,$_config_mysql_buchhaltung_db; mysql_select_db($_config_mysql_buchhaltung_db); $query=mysql_query("SELECT count(*) FROM $buchhaltung"."_Konto WHERE nr='$nr'"); return mysql_result($query,0,0); } function createKonto($nr,$kontakt,$typ) { global $buchhaltung,$_config_mysql_buchhaltung_db; mysql_select_db($_config_mysql_buchhaltung_db); $query=mysql_query("SELECT max(sort) FROM $buchhaltung"."_Konto"); $sort=@mysql_result($query,0,0)+1; $query=mysql_query("SELECT id FROM $buchhaltung"."_Nebenkonto WHERE name='Kreditoren'"); $nebenkonto=@mysql_result($query,0,0); $query=mysql_query("INSERT INTO $buchhaltung"."_Konto(nr,name,nebenkonto,typ,waehrung,show_waehrung,show_belegnr,show_mwst,show_datum,sort) Values('$nr','$kontakt','$nebenkonto','$typ',1,1,1,1,1,'$sort')"); return (strlen(mysql_error()==0)); } function getRechnungsAdresse($kontakt) { global $_config_kontakte_gebiete_rechnungen; $query=mysql_query("SELECT kontakt$_config_kontakte_gebiete_rechnungen FROM Kontakte WHERE id='$kontakt'"); $kont=@mysql_result($query,0,0); if($kont) { $query=mysql_query("SELECT kon.firma, konp.name, konp.vorname,konp.adresse,konp.adresse2,konp.plz,konp.ort FROM Kontakte kon, Kontakte_kontaktpersonen konp WHERE konp.firma = kon.id AND konp.id='$kont'"); list($firma,$name,$vorname,$adresse,$adresse2,$plz,$ort)=mysql_fetch_row($query); if(!$adresse) { $query=mysql_query("SELECT adresse,adresse2,plz,ort FROM Kontakte WHERE id='$kontakt'"); list($adresse,$adresse2,$plz,$ort)=mysql_fetch_row($query); } return preg_replace("#\n+#","\n","$firma\n$vorname $name\n$adresse\n$adresse2\n$plz $ort"); } else { $query=mysql_query("SELECT firma,adresse,adresse2,plz,ort FROM Kontakte WHERE id='$kontakt'"); list($firma,$adresse,$adresse2,$plz,$ort)=mysql_fetch_row($query); return preg_replace("#\n+#","\n","$firma\n$adresse\n$adresse2\n$plz $ort"); } } function getRechnungsAdresseMail($kontakt) { global $_config_kontakte_gebiete_rechnungen; $query=mysql_query("SELECT kontakt$_config_kontakte_gebiete_rechnungen FROM Kontakte WHERE id='$kontakt'"); $kont=mysql_result($query,0,0); if($kont) { $query=mysql_query("SELECT konp.name, konp.vorname,anr.anrede_lang, konp.mail FROM Kontakte kon, Kontakte_kontaktpersonen konp,Kontakte_anreden anr WHERE konp.id='$kont' AND anr.id = konp.anrede"); list($name,$vorname,$anrede,$mail)=mysql_fetch_row($query); return array("$name $vorname","$anrede $name",$mail); } else { $query=mysql_query("SELECT firma,mail FROM Kontakte WHERE id='$kontakt'"); list($firma,$mail)=mysql_fetch_row($query); return array($firma,"Sehr geehrte Damen und Herren",$mail); } } function get1Line($str) { if(strpos($str,"\n")) { return substr($str,0,strpos($str,"\n")); } else { return $str; } } ?> <file_sep>/modules/domains/whois.php <? include("../../inc/config.inc.php"); ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body onLoad="javascript:document.getElementById('site').focus()"> <p class=titel>Domains:Whois</p> <form method=post action="<?=$PHP_SELF?>"> <input type=text id=site name=site value="<?=$site?>"> <input type=submit value="Whois"> </form> <pre> <? print shell_exec("$_config_whois_program ".escapeshellarg($site)); ?> </pre> </body> </html> <file_sep>/trash/kontakte_klein/add.php <? include("../../inc/config.inc.php"); include("../../inc/db.inc.php"); include("../../inc/func.inc.php"); if($submit) { if(!$name && !$firma) { $fehler="Firma oder Name muss ausgefüllt werden"; } else { $query=mysql_query("INSERT INTO Kontakte(ansprechperson,name,vorname,anrede,firma,adresse,adresse2,plz,ort,tel_privat,tel_gesch,tel_mobile,fax,mail,mail2,text,konto,kontonr,blz,swift,iban) VALUES('$ansprechperson','$name','$vorname','$anrede','$firma','$adresse','$adresse2','$plz','$ort','$tel_privat','$tel_gesch','$tel_mobile','$fax','$mail','$mail2','$text','$konto','$kontonr','$blz','$swift','$iban')"); if($query) { header("Location: show.php"); } else { $error= "Fehler: ".mysql_error($query)."<br><br>\n\n"; } } } ?> <html> <head> <title><?=$_config_title?></title> <link rel="stylesheet" href="../../main.css" type=text/css> </head> <body onLoad="document.getElementsByName('anrede')[0].focus()"> <p class=titel>Kontakte:Hinzufügen</p> <? if($fehler) print "<b>Fehler:</b> $error<br><br>\n"; print "<form method=post action=\"$PHP_SELF?id=$id&edit=1\">\n"; print "<table border=0> <tr> <td width=100>Anrede:</td> <td>".getAnredeList("anrede",$anrede,150)."</td> </tr> <tr> <td width=100>Name:</td> <td><input type=text name=\"name\" value=\"$name\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=100>Vorname</td> <td><input type=text name=\"vorname\" value=\"$vorname\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=100>Firma</td> <td><input type=text name=\"firma\" value=\"$firma\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=100>Adresse</td> <td><input type=text name=\"adresse\" value=\"$adresse\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=100>Adresse 2</td> <td><input type=text name=\"adresse2\" value=\"$adresse2\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=100>PLZ / Ort</td> <td><input type=text name=\"plz\" value=\"$plz\" style=\"width:45px;\" maxlength=10> <input type=text name=\"ort\" value=\"$ort\" style=\"width:150px;\" maxlength=50></td> </tr> <tr> <td width=100>Tel. Privat</td> <td><input type=text name=\"tel_privat\" value=\"$tel_privat\" style=\"width:200px;\" maxlength=20></td> </tr> <tr> <td width=100>Tel. Gesch.</td> <td><input type=text name=\"tel_gesch\" value=\"$tel_gesch\" style=\"width:200px;\" maxlength=20></td> </tr> <tr> <td width=100>Natel</td> <td><input type=text name=\"tel_mobile\" value=\"$tel_mobile\" style=\"width:200px;\" maxlength=20></td> </tr> <tr> <td width=100>Fax</td> <td><input type=text name=\"fax\" value=\"$fax\" style=\"width:200px;\" maxlength=20></td> </tr> <tr> <td width=100>E-Mail</td> <td><input type=text name=\"mail\" value=\"$mail\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=100>E-Mail 2</td> <td><input type=text name=\"mail2\" value=\"$mail2\" style=\"width:200px;\" maxlength=50></td> </tr>"; if($_config_kontakte_show_ansprechperson) print "<tr> <td width=100>Ansprechperson</td> <td>".getAnsprechpersonenList("ansprechperson",$ansprechperson,200)."</td> </tr>"; print "<tr> <td width=100 valign=top>Text</td> <td><textarea name=text style=\"width:200px;height:100px\">$text</textarea></td> </tr> <tr> <td width=150>Konto</td> <td><input type=text name=\"konto\" value=\"$konto\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=150>Konto Nr.</td> <td><input type=text name=\"kontonr\" value=\"$kontonr\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=150>BLZ</td> <td><input type=text name=\"blz\" value=\"$blz\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=150>Swift</td> <td><input type=text name=\"swift\" value=\"$swift\" style=\"width:200px;\" maxlength=50></td> </tr> <tr> <td width=150>Iban</td> <td><input type=text name=\"iban\" value=\"$iban\" style=\"width:200px;\" maxlength=50></td> </tr> </table>\n"; print "<input type=submit name=submit value=\"Hinzufügen\">\n</form>\n"; ?> </body> </html> <file_sep>/index.php <? include("inc/config.inc.php"); ?> <html> <head> <title><?=$_config_title?></title> </head> <frameset cols="160,*" frameborder="0" framespacing="0" border="0"> <frame src="menu.php" name=menu scrolling=no border=0> <frame src="<? if($_config_startmodule){ print "modules/".$_config_startmodule."/"; } else { print "modules/domains/"; } ?>" name=main> </frameset> </html> <file_sep>/modules/rechnungen/index.php <? header("Location: position_erstellen.php"); ?> <file_sep>/modules/rapportierung/menu.inc.php Rapportieren Anzeigen=show.php Erfassen=add.php Projekt=add_projekt.php
a2b3454f75d4cfdfce2539969fd9590bba32309c
[ "JavaScript", "PHP" ]
82
PHP
wolf5/godmode
657f411108934cc8e6ef028840dd0bfe4efc818d
3d4969a1316072046d5cd4ad85ed3aab4996c2c0
refs/heads/master
<repo_name>jianhe25/ClusteringDemo<file_sep>/Image-segmentation.js function loadImages() { displayImage("c1", "chinaFlag.jpeg"); } function displayImage(canvasId, url) { var j = new JpegImage(); j.onload = function() { var c = document.getElementById(canvasId); c.width = j.width; c.height = j.height; var ctx = c.getContext("2d"); var d = ctx.getImageData(0,0,j.width,j.height); j.copyToImageData(d); ctx.putImageData(d, 0, 0); }; j.load(url); } function convertOneDimToMultiDim(img) { var width = img.width, height = img.height; var data = img.getData(width, height); var i = 0, j = 0, x, y; var Y, K, C, M, R, G, B; var multiDimData = []; switch (img.components.length) { case 1: for (y = 0; y < height; y++) { for (x = 0; x < width; x++) { Y = data[i++]; multiDimData[j++] = Y; } } break; case 3: for (y = 0; y < height; y++) { for (x = 0; x < width; x++) { R = data[i++]; G = data[i++]; B = data[i++]; multiDimData[j++] = [R, G, B]; } } break; case 4: for (y = 0; y < height; y++) { for (x = 0; x < width; x++) { C = data[i++]; M = data[i++]; Y = data[i++]; K = data[i++]; R = 255 - clampTo8bit(C * (1 - K / 255) + K); G = 255 - clampTo8bit(M * (1 - K / 255) + K); B = 255 - clampTo8bit(Y * (1 - K / 255) + K); multiDimData[j++] = [R, G, B]; } } break; default: throw 'Unsupported color mode'; } return multiDimData; } function copyMultiDimDataToBlankCanvas(multiDim, blankCanvas) { var width = blankCanvas.width, height = blankCanvas.height; var blankCanvasArray = blankCanvas.data; var j = 0, x, y; var R, G, B; console.log(multiDim); var test = multiDim[0]; console.log(test); for (var i = 0; i < multiDim.length; ++i) { var test = multiDim[i]; R = test[0]; G = test[1]; B = test[2]; blankCanvasArray[j++] = R; blankCanvasArray[j++] = G; blankCanvasArray[j++] = B; blankCanvasArray[j++] = 255; } } function kmeansSegmentation(img, blankCanvas) { // Note data is a global variable, shared in K-means.js, very dangerous variable data = convertOneDimToMultiDim(img); dataExtremes = getDataExtremes(data); dataRange = getDataRanges(dataExtremes); means = initMeans(6); makeAssignments(); while (true) { var moved = moveMeans(); if (!moved) break; } console.log(means); RGBPools = [ [0,0,0], [155,155,155], [250,235,215], [0,0,255], [255,0,0], [0,255,0] ]; for (var i in data) { data[i] = RGBPools[ assignments[i] % RGBPools.length ]; } var blankCanvasArray = blankCanvas.data; // Fold data to one dimension //copyMultiDimDataToBlankCanvas(data, blankCanvas); var j = 0; for (var i = 0; i < data.length; ++i) { var test = data[i]; if (test) { R = test[0]; G = test[1]; B = test[2]; blankCanvasArray[j++] = R; blankCanvasArray[j++] = G; blankCanvasArray[j++] = B; blankCanvasArray[j++] = 255; } } } function imageSegmentation() { canvasId = "c2" url = "chinaFlag.jpeg" var j = new JpegImage(); j.onload = function() { var c = document.getElementById(canvasId); c.width = j.width; c.height = j.height; var ctx = c.getContext("2d"); var d = ctx.getImageData(0,0,j.width,j.height); kmeansSegmentation(j, d); ctx.putImageData(d, 0, 0); }; j.load(url); } <file_sep>/README.md ClusteringDemo ============== A JS demo of K-means and Sequential Leader Clustering ![Comparison](https://raw.github.com/jianhe25/ClusteringDemo/master/report-image/compare.png) <file_sep>/report.html <!doctype html> <head> <title> Clustering Report </title> <link rel="stylesheet" type="text/css" href="report.css"> </head> <body> <div class="main"> <h1 style="text-align:center"> Clustering Report </h1> <h4 style="text-align:center"> 何剑,信研3班,2013210943 </h4> <p style="text-align:center"> <a href="index.html"> Go to clustering demo </a> </p> 1. <a href="#section1"> Algorithms Review </a> <br> 2. <a href="#section2"> Clustering Demo </a> <br> 3. <a href="#section3"> Analysis of K-means </a> <br> 4. <a href="#section4"> Analysis of SLC </a> <br> 5. <a href="#section5"> Image segmentation </a> <br> <h2> <a name="section1"> 1. Algorithms Review </a></h2> <h3> 1.1 K-means </h3> <p> K-means is a very old algorithm for clustering. First, initialize k centers. Second, assign each point to the closest center. Third, Recompute center in each cluster. Details of K-means listed below: </p> <ol> <li> Determine the value of K. </li> <li> Choose K cluster centres randomly.</li> <li> Each data point is assigned to its closest centroid. </li> <li> Use the mean of each cluster to update each centroid. </li> <li> Repeat until no more new assignment. </li> <li> Return the K centroids. </li> </ol> <h3> 1.2 Sequential Leader Clustering (SLC) </h3> <p> SLC is a online clustering algorithm. The basic idea is when a new point came, assign this point to either a old cluster (if distance between point and old cluster center is smaller than a threshold) or establish a new cluster. This algorithm is very helpful in online applications such as Facebook and Renren.com </p> <ol> <li> Compute the distance between the new data point and every cluster's centre. </li> <li> If the distance is smaller than the chosen threshold, assign the new data point to the corresponding cluster and re-compute cluster centre. </li> <li> Otherwise, create a new cluster with the new data point as its centre. </li> </ol> </p> <h2> <a name="section2"> 2. Clustering Demo </a></h2> <p> In this assignment, I have implemented a clustering algorithm demo by javascripts. I love the simplicity yet extremely powerful expressiviness of javascripts. My clustering demo has several features. </p> <ul> <li> User can set K for K-means experiment </li> <li> For K-means algorithm: we can control iteration number by next/prev </li> <li> For K-means algorithm: support play / stop to automatical iterate </li> <li> Provide good animated transition between two iterations </li> </ul> <p> Following is a screenshot of my clustering demo. </p> <img src="cluster-demo.jpg" height="500" width="1000"></img> <h2> <a name="section3"> 3. Analysis of K-means </a></h2> <p> With the clustering demo, I have set up a experiment to obeserve behavior of K-means </p> <h3> 3.1 Configuration </h3> <p> I use 10 clusters, and use pseudo 2-dimentional gaussian distribution to generate data points. (total 400 data points) Before running K-means, I have done some data preprocessing such as finding extreme of data in each dimensions</p> <h3> 3.2 Running Results </h3> <img src="report-image/1.png" height="200", width="200"></img> <img src="report-image/2.png" height="200", width="200"></img> <img src="report-image/3.png" height="200", width="200"></img> <img src="report-image/4.png" height="200", width="200"></img> <img src="report-image/5.png" height="200", width="200"></img> <img src="report-image/6.png" height="200", width="200"></img> <img src="report-image/7.png" height="200", width="200"></img> <img src="report-image/8.png" height="200", width="200"></img> <h3> 3.3 Some Obeservations </h3> <p> <span style="font-weight:bold">K-means converge fast: </span> For K = 10 and numData = 400, it usually converge in just 10 iterations </p> <p> <span style="font-weight:bold">K is hard to determine: </span> When K is larger than the underlying cluster number, result usually not good. For instance, in my experiments, I only use 4 underlying gaussian distribution to generate 400 data points (each generate 100). Then when K = 10, K-means tend to split one underlying gaussian to several cluster. </p> <h2> <a name="section4"> 4. Analysis of SLC </a></h2> <p> I just implemented SLC, The result is much worser than K-means, K-means has clearly find 4 underlying clusters, while SLC only find 3 of them. </p> <img src="report-image/compare.png" height="500", width="1000"> </img> <h2> <a name="section5"> 5. Image segmentation </a></h2> <h3> 5.1 Simple Image </h3> <p> I also implemented image segmentation by K-means. My solution is very simple: Use K-means clustering on the RGB values of each image, and find K center RGB values of the image. For this experiment, I set K = 6.</p> <div style="float:left; margin-left:100px;"> <h4> Before: </h4> <img src='report-image/oldChina.png'></img> </div> <div style="float:left; margin-left: 100px"> <h4> After: </h4> <img src='report-image/newChina.png'></img> </div> <p style="clear:both"> Original image only has 2 different RGB values (yellow and red), However after K-means clustering, there are 3 clusters. Thus for image with less color, K-means might over cluster the image</p> <h3> 5.2 Complex Image </h3> <p> I also tried a very complex image "weChat". Result looks ok, still K = 6 </p> <div style="float:left; margin-left:100px;"> <h4> Before: </h4> <img src='report-image/oldWechat.png'></img> </div> <div style="float:left; margin-left: 100px"> <h4> After: </h4> <img src='report-image/newWechat.png'></img> </div> <p style="clear:both"> K-means got the almost right result, but the clutersing is very rough which is hard to use for future process</p> </div> </body>
6ec03c28b7bff9a4eefe4e083afd9df024bef7d0
[ "JavaScript", "HTML", "Markdown" ]
3
JavaScript
jianhe25/ClusteringDemo
c039748aabbf8a3af99d819141e838e464d11b0f
c73ac515195ba94cc8d6919360982b45a8256ca2
refs/heads/master
<repo_name>danielmcormond/generator-jasmine<file_sep>/app/index.js var util = require('util'), yeoman = require('yeoman-generator'); // jasmine:app generator // // Setup the test/ directory. // module.exports = Generator; function Generator() { yeoman.generators.Base.apply(this, arguments); } util.inherits(Generator, yeoman.generators.Base); Generator.prototype.setupEnv = function setupEnv() { this.directory('.', 'test'); };
27dd0e12cfb6b8b0e3dec66cb6748d21b75ca582
[ "JavaScript" ]
1
JavaScript
danielmcormond/generator-jasmine
0ca72f0edf36c3f9b37c8a6c69e3a0528fdf52e9
bd7234e4301b24dad171e27d31499cb0e242830f
refs/heads/master
<repo_name>Krishnaarunangsu/SQLBuild<file_sep>/testclear.sql use arunangsukrishna; desc tutorials_tbl; select * from tutorials_tbl; exit
4642676e8b51ecd7a05f6554981c1837dffc62ef
[ "SQL" ]
1
SQL
Krishnaarunangsu/SQLBuild
4b5dbd0cf8a63091e51bccaa205e234833b9560c
ce80c2a85f3429f3f23ecf1105b900e84056a252
refs/heads/master
<repo_name>offby1/flask-gunicorn<file_sep>/hello.py # Run me with # FLASK_APP=hello.py pipenv run flask run # or # pipenv run gunicorn hello:app from flask import Flask app = Flask(__name__) @app.route("/") def hello(): return "Hello World!"
3dfeb56dbd71e996c70794d27e3b1e1548d0481d
[ "Python" ]
1
Python
offby1/flask-gunicorn
81eebe69ad6536876b07ed3e1d9261d78ce53945
82190dfc3f0b369c95c5cc29c22e8d7440398ab8
refs/heads/main
<file_sep>--- id: 1 firstname: "Michael" lastname: "Brenndoerfer" img: "/assets/static/authors/michael_brenndoerfer.png" homepage: "https://michaelbrenndoerfer.com" email: "<EMAIL>" github: "https://github.com/Brenndoerfer" linkedin: "https://www.linkedin.com/in/michaelbrenndoerfer/" stackoverflow: "https://stackoverflow.com/users/1420080/michael" --- I enjoy building software products, reliable data pipelines, and machine learning enabled applications. <file_sep>--- title: "Bayes Theorem" abrv: null tags: [] domain: "ds" topics: [] author: "michael_brenndoerfer" created: "" updated: "" draft: true --- <file_sep>--- title: "Null Hypothesis" abrv: null tags: ["Statistical Inference"] domain: "ds" topics: [] author: "michael_brenndoerfer" created: "2021-08-24" updated: "2021-08-26" references: [] draft: false --- A null hypothesis is the formulation of an assumption about a certain population or a data generating process. The null hypothesis is usually formulated such that it states what is assumed to be true. Based on this null hypothesis which forms the baseline, experiments are conducted to either reject or accept an alternative hypothesis. Rejecting the null hypothesis means that the experiments have beyond doubt (not explainable by chance only) shown that the population or the data generating process are different from the definition of the null hypothesis. Failing to reject the null hypothesis means that the experiments are satisfying the definition of the null hypothesis. Failing to reject the null hypothesis doesn't mean that the null hypothesis is correct. The null hypothesis might or might not be true. It simply means that the conducted experiments were not able to show beyond reasonable doubt that the null hypothesis is not correct. ## Experiment setup 1. Formulate the null hypothesis 2. Formulate the alternative hypothesis 3. Collect a data sample 4. Perform a statistical test 5. Could the null hypothesis plausibly be true by chance. In other terms: is the statistical test indicating that the analyzed data is "explainable by chance alone". 1. If yes, you failed to reject the null hypothesis. The null hypothesis might or might not be true. Failed to reject doesn't mean true. It means that the data was not sufficient not sufficient to reject the hypothesis. 2. If no, you are rejecting the null hypothesis. The null hypothesis is rejected in favor of the alternative hypothesis. In data science it's the burden of the data scientists to prove the "unusual", aka. the alternative hypothesis. <file_sep>--- title: "Dimensionality Reduction" tags: ["principal component analysis", "statistical inference"] domain: "ds" topics: [] author: "michael_brenndoerfer" created: "2021-08-05" updated: "2021-08-12" draft: false --- Dimensionality reduction aims to transform data from a high-dimensional space (many variables) into a low-dimensional space (fewer variables) which still captures the essence of the data. ## Applications of dimensionality reduction - reduce data size and hence storage requirements - most models perform better with fewer dimensions - perform noise reduction - remove multi-collinearity - visualize higher-dimensional data ## Approaches to dimensionality reduction There are two common approaches to dimensionality reduction - linear approach - non-linear approach With further classification into two types - feature selection - feature extraction ## Dimensionality reduction techniques ### Linear - Principal component analysis (PCA) - Non-negative matrix factorization (NMF) - Linear Discriminant Analysis (LDA) ### Non-linear - Kernel PCA - Linear Discriminant Analysis (LDA) - Generalized discriminant analysis (GDA) - Auto-encoder - t-SNE - UMAP <file_sep>--- title: "Mutual Fund" abrv: "MF" tags: [] domain: "finance" topics: [] author: "michael_brenndoerfer" created: "2021-09-05" updated: null references: [ "https://www.investor.gov/introduction-investing/investing-basics/investment-products/mutual-funds-and-exchange-traded-1#:~:text=A%20mutual%20fund%20is%20a,%2C%20and%20short%2Dterm%20debt.&text=Investors%20buy%20shares%20in%20mutual,and%20the%20income%20it%20generates.", "https://www.investopedia.com/terms/m/mutualfund.asp", ] draft: false --- A mutual fund is a company that pools money from investors to invest it in securities, bonds, and short-term debt. Therefore a mutual fund is an actual company and an investment. The portfolio is actively managed by fund managers. ## Earning returns from mutual fund investing By buying shares of the fund, you are effectively buying into its overall performance. These shares however do not grant any voting rights. The value of a share is determined by dividing the Net Asset Value (NAV) of the fund by all its outstanding shares (meaning shares held by stakeholders). Mutual fund share prices do not fluctuate intra day as they are settled at the end of the day. ## Mutual fund income types 1. **Dividends** earned on stocks and **interest** on bond holdings. The income is usually payed out as a distribution. A share holder usually has the option to re-invest the earnings for more shares. 2. **Capital gains** when the fund sells securities that have increased in price. 3. **Increased share price** when the mutual fund holdings appreciate, the share price does so, too. Earnings are not materialized until you sell your shares in the market. ## Mutual fund fees Mutual fund fees fall under annual operating fees and shareholder fees. - **Annual operating fees** range from 1% to 3% and covers management and administrative fees. This fee is also known as expense ratio. - **Shareholder fees** are levied as sales charges, commissions, or redemption fees. These fees are referred to as "the load" of a fund. A fund is front loaded if the fees are are assessed when shares are bought, and as back loaded if the fees are assessed on sell. Some funds charge early withdrawal fees. Expense ratios and loads can cause an attractive fund to effectively generate less return than other investments ## Types of mutual funds - **Equity funds** hold equities (meaning they hold shares or debt) - **Fixed-income funds** focus on generating reliable interest income - **Index funds** that invest into stocks corresponding to major market indices - **Balanced funds** is a hybrid fund investing into stocks, bonds, money market instruments and more - **Money market funds** is a safe and relatively risk-free investment (mainly T-Bills and short-term debt) - **Income funds** aim to generate steady income through fixed-income investments - **Global and international funds** invest into assets globally or outside the home country of the fund - **Speciality funds** invest in all types of categories of investments - **Exchange traded funds (ETF)** are index funds structured as trusts that are traded on exchanges ## Benefits of investing into a mutual fund - **Diversification** and hence risk reduction by the fund - **Easy access** as they traded on exchanges - **Economies of scale** lower transaction fee and automatic dollar cost averaging - **Professional management** by professional invest managers - **Variety** as funds have different propositions and investment styles - **Transparency** as they are subject to industry regulations ## Downsides of investing into a mutual fund - **High fees (expense ratio)** - **Active management** - **Large cash positions** - **No FDIC coverage** - **Lack of transparency in holdings** - **Difficult comparing funds** <file_sep>--- title: "Fixed Income" abrv: null tags: [] domain: "ds" topics: [] author: "michael_brenndoerfer" created: "2020-08-29" updated: null references: ["https://www.blackrock.com/us/individual/education/fixed-income"] draft: true --- <file_sep>--- title: "Bayesian Statistics" abrv: null tags: ["Bayes Theorem", "Posterior Probability"] domain: "ds" topics: [] author: "michael_brenndoerfer" created: "" updated: "" draft: true --- Bayesian Statistics is an approach to model the probabilities (_degrees of belief_) of events. These probablities of events are based on prior information. This is different to Frquentist probability where probabilities converge at a sufficently large sample size. Bayesian statistics ### Example Player A is <file_sep>--- title: "Fixed Income" abrv: null tags: [] domain: "finance" topics: [] author: "michael_brenndoerfer" created: "2021-08-29" updated: null references: ["https://www.blackrock.com/us/individual/education/fixed-income"] draft: false --- Fixed Income is an investing approach (or any type of investment) focused on the preservation of capital and income. It usually entails investments into government and corporate bonds, certificate deposits (CD), and money market funds. Goal is to achieve steady income while minimizing risk compared to equity investments. ## Benefits of fixed income 1. **Diversification** from the equities markets 2. **Long-term** investment horizon 3. **Preservation** of capital due to less exposure to volatility 4. **Fixed income** on a regular basis, providing a reliable and consistent income stream ## Risks of fixed income 1. **Interest rate risk** because when rates rise it will cause bond prices to fall (inverse relationship). When savings interest rates rise, bond interest rates become less attractive 2. **Inflation risk** because when inflation gets closer to bond interest rates or even outpaces it, you lose purchasing power 3. **Credit risk** as corporate bonds have an inherent business and financial risk 4. **Liquidity risks** when failing to find an asset buyer <file_sep>const withPlugins = require('next-compose-plugins'); const withTM = require('next-transpile-modules')([ // 'remark-gfm', // 'rehype-autolink-headings', // 'remark-unwrap-images', // 'hast-util-whitespace', // 'micromark-extension-gfm', // 'micromark-util-combine-extensions', // 'micromark-util-symbol', // 'micromark-util-encode', // 'micromark-util-resolve-all', // 'mdast-util-gfm', // 'mdast-util-gfm-autolink-literal', ]); module.exports = withPlugins([withTM], { reactStrictMode: true, experimental: { css: true }, typescript: { // !! WARN !! // Dangerously allow production builds to successfully complete even if // your project has type errors. // !! WARN !! ignoreBuildErrors: true, }, }) <file_sep>[![Netlify Status](https://api.netlify.com/api/v1/badges/ff02baee-522e-456e-a7fc-242739914b60/deploy-status)](https://app.netlify.com/sites/determined-fermat-3be022/deploys) [![Terminology.me logo](https://terminology.me/logo.png)](https://terminology.me) # Terminology.me Compact and easy to understand explanations for the modern, cross-functional engineer ## Work in progress - [ ] replace current mailchimp code through in-place form and react component - [x] Improve initial terms speed up - [ ] defer css load - [ ] defer disquss load - [x] remove unused content - [x] create hero most recent 4 adds programtically - [x] terms page content table - [x] terms page authors - [ ] related terms with example under the post itself - [x] add H2 Definition of XXX above every intro pargraph - [ ] glossary page overall - [ ] glossary page a-z for each domain - [ ] domains page add buttons - [ ] domains page topics 3x3 matrix or topics gallery - [ ] progress bar on terms page https://www.npmjs.com/package/react-scroll-progress-bar ## Page structure #### Home - [ ] / (landing page) #### Terms - [x] /terms - [ ] /terms/:slug (e.g. /terms/dimensionality-reduction) #### Glossary - [ ] /glossary - [ ] /glossary/:char (e.g. /glossary/a) #### Authors - [ ] /authors - [ ] /authors/:name (e.g. /author/michael_brenndoerfer) #### Domains - [ ] /:domain (e.g. /data-science) #### Articles - [ ] /:domain/articles (/data-science/articles) - [ ] /:domain/articles/:articleSlug (/data-science/articles/my-article) #### Topics - [ ] /:domain/topics (/data-science/topics) - [ ] /:domain/topics/:topicSlug (/data-science/topics/my-topic) #### Top-level pages - [x] /contact - [x] /contribute - [ ] /donate - [x] /about - [x] /privacy - [x] /newsletter-confirmation <file_sep>--- title: "Posterior Probability" tags: ["bayesian statistics", "bayesian inference"] domain: "ds" topics: [] author: "michael_brenndoerfer" created: "" updated: "" draft: true --- <file_sep>--- title: "" abrv: "PCA" tags: [] domain: "ds" topics: [] author: "michael_brenndoerfer" created: "" updated: "" draft: true ---
f76e9e1d2fe41390d09871108ab41c9a50eb7c14
[ "Markdown", "JavaScript" ]
12
Markdown
Brenndoerfer/terminology.me
fc83f417a5ba61085a08a5252b8e21c5cb2fa7ad
01b34f17953a2f1a613f01c25f6a0bfa9d5da381
refs/heads/main
<file_sep>module.exports = { '#1 BASIC USER PATH'(browser) { const cookies = 'button[data-testid="cookie-policy-dialog-accept-button"]'; const facebook = '[data-testid="royal_login_form"]'; const login = '[data-testid="royal_email"]'; const password = <PASSWORD>"]'; const button = '[data-testid="royal_login_button"]'; const account = 'div[aria-label="Konto"]'; browser //step1 .url('https://pl-pl.facebook.com/') .waitForElementVisible(cookies, [2000]) .click(cookies) .waitForElementPresent(facebook, [2000]) //step2 .setValue(login, '<EMAIL>') .setValue(password, '<PASSWORD>') .click(button) .waitForElementPresent(account) ; } }<file_sep># netguru 1. Download node.js from https://nodejs.org/en/ an install it. 2. Create 'pela_test' folder on 'C:\Users\*current user*\' and go there by 'cd' command. 3. Open 'Node.js command prompt' and execute commands: - 'npm init -y' - 'npm install nightwatch --save-dev' - 'npm test' 4. Open 'package.json' file in 'C:\Users\*current user*\pela_test' folder and chagne name of test script to 'nightwatch' "scripts": { "test": "nightwatch" }, 5. Create 'tests' folder inside 'pela_test' folder and put there two test's files named 'test1.js' and 'test2.js' 6. Open 'nightwatch.conf.js' file and type 'tests' in 'src_folders[]' position, 7. Execute 'npm install chromedriver --save-dev' and eventually 'npm install geckodriver --save-dev' command at 'Node.js command prompt'. 8. Run both tests by command 'npm test' 9. Check on results at the console and screenshots in 'C:\Users\*current user*\pela_test\tests_output' folder. <file_sep>module.exports = { '#4 ERROR TEST'(browser) { const cookies = 'button[data-testid="cookie-policy-dialog-accept-button"]'; const facebook = '[data-testid="royal_login_form"]'; const utworz_konto = '[data-testid="open-registration-form-button"]'; const zarejestruj = 'button[name="websubmit"]'; const kobieta = 'input[value="1"]'; const email = 'input[name="reg_email__"]'; const email2 = 'input[name="reg_email_confirmation__"]'; const imie = 'input[name="firstname"]'; const nazwisko = 'input[name="lastname"]'; const haslo = 'input[name="reg_passwd__"]'; const wiek = 'a[id="age_to_birthday_link"]'; const rok = 'select[name="birthday_year"]'; const error = 'div[id="reg_error_inner"]'; const error2 = 'div[id="conf_code_length_error"]'; const error3 = 'div[aria-label="Zawartość okna dialogowego"]'; const confirmation = 'div[id="content"]'; const kod = 'input[name="code"]'; const kontynuuj = 'button[name="confirm"]'; browser //step1 .url('https://pl-pl.facebook.com/') .waitForElementVisible(cookies, [2000]) .click(cookies) .waitForElementPresent(facebook, [2000]) .saveScreenshot('tests_output/step1_result.png') //step2 .click(utworz_konto) .waitForElementVisible(zarejestruj, [2000]) .saveScreenshot('tests_output/step2_result.png') //step3 .click(zarejestruj) //step4 .click(imie) .saveScreenshot('tests_output/step4_result.png') //step5 .click(kobieta) .saveScreenshot('tests_output/step5_result.png') //step6 .setValue(email, 'invalidmail@wp') .click(zarejestruj) .saveScreenshot('tests_output/step6_result.png') //step7 .click(email) .saveScreenshot('tests_output/step7_result.png') //step8 .setValue(imie, 'sdf') .setValue(nazwisko, 'sdf') .clearValue(email) .setValue(email, '<EMAIL>') .setValue(email2, '<EMAIL>') .setValue(haslo, 'Qwer!234') .click(wiek) .click(rok) .click('[value="1994"]') .click(kobieta) .click(zarejestruj) .pause(16000) .waitForElementPresent(error) .saveScreenshot('tests_output/step8_result.png') //step9 .clearValue(imie) .clearValue(nazwisko) .setValue(imie, 'Janusz') .setValue(nazwisko, 'Januszewski') .clearValue(email) .clearValue(email2) .setValue(email, '<EMAIL>') .setValue(email2, '<EMAIL>') .click(zarejestruj) .pause(7000) .waitForElementPresent(error) .saveScreenshot('tests_output/step9_result.png') //step10 .clearValue(email) .clearValue(email2) .setValue(email, '<EMAIL>') .setValue(email2, '<EMAIL>') .click(zarejestruj) .waitForElementVisible(confirmation) .saveScreenshot('tests_output/step10_result.png') //step11 .setValue(kod, 'asd') .click(kontynuuj) .waitForElementPresent(error2) .saveScreenshot('tests_output/step11_result.png') //step12 .clearValue(kod) .setValue(kod, '11111') .waitForElementNotVisible(error2) .saveScreenshot('tests_output/step12_result.png') //step13 .click(kontynuuj) .waitForElementPresent(error3) .saveScreenshot('tests_output/step13_result.png') ; } }
3ca007360858830834d3dfa4953c5814ec8a8266
[ "JavaScript", "Markdown" ]
3
JavaScript
pelowaty/netguru
ffc69593aaac56aeff4657da173c17a932797d5f
b73ef35cc75c7fe923779f8dd8427d12c02faf60
refs/heads/master
<repo_name>rzepinskip/bioinf-enhancers<file_sep>/README.rst bioinfenhancers =============== Detecting enhancer sequences in DNA Usage ----- 1. Obtain feature vectors for chromosome 21:: python splitter.py 2. Train the classifier and obtain probabilities in wiggle format:: python classifier.py Installation ------------ Requirements ^^^^^^^^^^^^ Compatibility ------------- Licence ------- Authors ------- `bioinfenhancers` was written by `<NAME>`. <file_sep>/tests/test_transformer.py from bioinfenhancers.transfomer import hash_kmer, Transfomer def test_same_hash(): assert hash_kmer("ATGGC") == hash_kmer("GCCAT") def test_combinations_count(): transformer = Transfomer(k=4) assert len(transformer.get_counter().keys()) == 136 def test_invalid_sequence(): transformer = Transfomer(k=4) valid, _ = transformer.get_feature_vector("AAAANNNN") assert valid == False <file_sep>/classifier.py import numpy as np import pandas as pd from sklearn.model_selection import train_test_split from sklearn.metrics import accuracy_score from sklearn.ensemble import RandomForestClassifier from bioinfenhancers.transfomer import Transfomer from Bio import SeqIO positive_records = SeqIO.parse("data/vista1500", "fasta") negative_records = SeqIO.parse("data/randoms1500", "fasta") positive_data = [(str(record.seq).upper(), 1) for record in positive_records] negative_data = [(str(record.seq).upper(), 0) for record in negative_records] data = positive_data + negative_data transfomer = Transfomer(k=4) dataset = np.array( [transfomer.get_feature_vector(frame)[1] + [label] for frame, label in data] ) X = dataset[:, 0:136] y = dataset[:, 136] X_train, X_test, y_train, y_test = train_test_split( X, y, test_size=0.2, random_state=44 ) model = RandomForestClassifier() model.fit(X_train, y_train) y_pred = model.predict(X_test) predictions = [round(value) for value in y_pred] accuracy = accuracy_score(y_test, predictions) print("Accuracy: %.2f%%" % (accuracy * 100.0)) chr_data = pd.read_csv("frequencies.csv") chr_X = chr_data.iloc[:, 2:].values chr_y_pred = [true_prob for _, true_prob in model.predict_proba(chr_X)] chr_data["prob"] = chr_y_pred avg_valid_prob = chr_data[chr_data.valid == 1].prob.mean() chr_data.loc[chr_data.valid == 0, "prob"] = avg_valid_prob # chr_data.sample(10)[["index", "valid", "prob"]] with open("data/chr21.wig", "w") as f: f.write("fixedStep chrom=chr21 start=0 step=750 span=1500\n") f.write("\n".join([str(x) for x in chr_data.prob.tolist()])) <file_sep>/bioinfenhancers/__init__.py """bioinfenhancers - Detecting enhancer sequences in DNA""" __version__ = '0.1.0' __author__ = '<NAME> < >' __all__ = [] <file_sep>/bioinfenhancers/transfomer.py from Bio.Seq import Seq import itertools import mmh3 from typing import List, Tuple, Dict def chunkstring(string, length, step): return (string[0 + i : length + i] for i in range(0, len(string), step)) def hash_kmer(kmer): rc_kmer = str(Seq(kmer).reverse_complement()) if kmer < rc_kmer: canonical_kmer = kmer else: canonical_kmer = rc_kmer return canonical_kmer class Transfomer: def __init__(self, k: int = 4): alphabet = "ACTG" self._combinations = [ "".join(output) for output in itertools.product(alphabet, repeat=k) ] def get_counter(self) -> Dict[str, int]: return {hash_kmer(x): 0 for x in self._combinations} def get_feature_vector(self, frame: str) -> Tuple[bool, List[float]]: counter = {hash_kmer(x): 0 for x in self._combinations} valid = True for quad in chunkstring(frame, 4, 1): if "N" in quad: valid = False break if len(quad) == 4: counter[hash_kmer(quad)] += 1 counts = [item for key, item in sorted(counter.items())] freq = [x / 1500 for x in counts] return valid, freq <file_sep>/requirements.txt appdirs==1.4.3 attrs==19.3.0 backcall==0.1.0 biopython==1.76 black==19.10b0 bleach==3.1.0 Click==7.0 decorator==4.4.1 defusedxml==0.6.0 entrypoints==0.3 flake8==3.7.9 importlib-metadata==1.4.0 ipykernel==5.1.3 ipython==7.11.1 ipython-genutils==0.2.0 ipywidgets==7.5.1 jedi==0.15.2 Jinja2==2.10.3 joblib==0.14.1 jsonschema==3.2.0 jupyter==1.0.0 jupyter-client==5.3.4 jupyter-console==6.0.0 jupyter-core==4.6.1 MarkupSafe==1.1.1 mccabe==0.6.1 mistune==0.8.4 mmh3==2.5.1 more-itertools==8.1.0 mypy==0.761 mypy-extensions==0.4.3 nbconvert==5.6.1 nbformat==5.0.3 notebook==6.0.2 numpy==1.18.1 packaging==20.0 pandas==0.25.3 pandocfilters==1.4.2 parso==0.5.2 pathspec==0.7.0 pexpect==4.7.0 pickleshare==0.7.5 pluggy==0.13.1 prometheus-client==0.7.1 prompt-toolkit==2.0.10 ptyprocess==0.6.0 py==1.8.1 pycodestyle==2.5.0 pyflakes==2.1.1 Pygments==2.5.2 pyparsing==2.4.6 pyrsistent==0.15.7 pytest==5.3.2 python-dateutil==2.8.1 pytz==2019.3 pyzmq==18.1.1 qtconsole==4.6.0 regex==2020.1.8 scikit-learn==0.22.1 scipy==1.4.1 Send2Trash==1.5.0 six==1.13.0 sklearn==0.0 terminado==0.8.3 testpath==0.4.4 toml==0.10.0 tornado==6.0.3 traitlets==4.3.3 typed-ast==1.4.0 typing-extensions==3.7.4.1 wcwidth==0.1.8 webencodings==0.5.1 widgetsnbextension==3.5.1 xgboost==0.90 zipp==0.6.0 <file_sep>/splitter.py import pandas as pd import gzip import logging from Bio import SeqIO from bioinfenhancers.transfomer import chunkstring, Transfomer logging.basicConfig(level=logging.DEBUG) with gzip.open("data/chr21.fa.gz", "rt") as handle: record = next(SeqIO.parse(handle, "fasta")) frames = list(chunkstring(str(record.seq).upper(), 1500, 750)) transformer = Transfomer(k=4) output = [] for index, frame in enumerate(frames): if index % 1000 == 0: logging.debug(f"{index+1}/{len(frames)}") valid, freq = transformer.get_feature_vector(frame) output.append([index, int(valid)] + freq) df = pd.DataFrame( output, columns=["index", "valid"] + [key for key, item in sorted(transformer.get_counter().items())], ) df.to_csv("frequencies.csv", index=False)
56ffa28eaf15260b5aa0d1689d2a02da03e5a282
[ "Python", "Text", "reStructuredText" ]
7
reStructuredText
rzepinskip/bioinf-enhancers
36fc875d0d06fe0e4538d6cec10ce3eb10c2ae79
f4f95c8f41de04335a06268e94332a8e3777ed00
refs/heads/master
<repo_name>InventivetalentDev/jjdoc-spigot<file_sep>/scripts/generate.sh #!/bin/sh ### First "git clone https://github.com/InventivetalentDev/jjdoc-spigot.git" ### cd scripts DOCLET_VERSION="1.0.4" SPIGOT_REV="1.13.2" # Create temp directory & cd into it mkdir temp cd temp # Download BuildTools wget https://hub.spigotmc.org/jenkins/job/BuildTools/lastSuccessfulBuild/artifact/target/BuildTools.jar # Run BuildTools java -jar BuildTools.jar --rev $SPIGOT_REV --skip-compile # Download the JSON doclet wget https://github.com/InventivetalentDev/jsondoclet/releases/download/$DOCLET_VERSION-SNAPSHOT/json-doclet-$DOCLET_VERSION-SNAPSHOT-jar-with-dependencies.jar # cd back out of the temp directory cd .. # Run Doclet in index mode javadoc -docletpath ./temp/json-doclet-$DOCLET_VERSION-SNAPSHOT-jar-with-dependencies.jar -doclet org.inventivetalent.jsondoclet.JsonDoclet -indexfile -outdir ../jjdoc/$SPIGOT_REV/ -singlefile -outfile ../jjdoc/spigot-$SPIGOT_REV.json -sourcepath ./temp/Spigot/Spigot-API/src/main/java -public -subpackages org.bukkit:org.spigotmc ## Final cleanup rm -Rf temp ### Then cd .. ### git add jjdoc && git commit && git push
e17d01155eb3f3f56fb86b4d4567fce5580448c6
[ "Shell" ]
1
Shell
InventivetalentDev/jjdoc-spigot
dbcd763bd7f976fe927fe81add65ed39ea24cdb9
83c55c561d00d31201e5a37fb4986e3c4f66f34c
refs/heads/master
<file_sep>1st test file this is uninstall file testing perpouse test <file_sep>#Install sync-gateway cd /opt/ mkdir -p /softwares/sync_gateway cd /softwares/sync_gateway curl -O "https://packages.couchbase.com/releases/couchbase-sync-gateway/2.1.1/couchbase-sync-gateway-enterprise_2.1.1_x86_64.rpm" rpm --install couchbase-sync-gateway-enterprise_2.1.1_x86_64.rpm firewall-cmd --zone=public --permanent --add-port=4985/tcp firewall-cmd --zone=public --permanent --add-port=4984/tcp firewall-cmd --reload <file_sep>#!/bin/bash #install nginx....................................CHECK ONCE. yum -y update yum install epel-release -y #mkdir -p /etc/yum.repos.d/nginx.repo cat <<EOF > /etc/yum.repos.d/nginx.repo [nginx] name=nginx repo baseurl=http://nginx.org/packages/mainline/centos/7/\$basearch/ gpgcheck=0 enabled=1 EOF yum install -y epel-release yum -y update yum -y install nginx firewall-cmd --zone=public --permanent --add-port=8091/tcp firewall-cmd --reload systemctl start nginx systemctl enable nginx systemctl status nginx <file_sep>#install FFMPEG yum install epel-release -y yum update -y #shutdown -r now rpm --import http://li.nux.ro/download/nux/RPM-GPG-KEY-nux.ro rpm -Uvh http://li.nux.ro/download/nux/dextop/el7/x86_64/nux-dextop-release-0-5.el7.nux.noarch.rpm yum install ffmpeg ffmpeg-devel -y ffmpeg -h ffmpeg -version <file_sep>#installing nodejs curl -sL https://rpm.nodesource.com/setup_8.x | sudo bash - yum install nodejs -y node --version npm --version <file_sep>#install Docker yum remove docker \ docker-client \ docker-client-latest \ docker-common \ docker-latest \ docker-latest-logrotate \ docker-logrotate \ docker-selinux \ docker-engine-selinux \ docker-engine yum install -y yum-utils \ device-mapper-persistent-data \ lvm2 yum-config-manager \ --add-repo \ https://download.docker.com/linux/centos/docker-ce.repo yum install -y docker-ce systemctl start docker systemctl status docker.service <file_sep>#Mutool Installation mkdir -p /opt/softwares cd /opt/softwares yum -y install gcc-c++ git clone --recursive git://git.ghostscript.com/mupdf.git cd mupdf git submodule update --init make HAVE_X11=no HAVE_GLUT=no prefix=/usr/local install #yum -y install gcc-c++ cp -rf build/release/mutool /bin <file_sep>#install prisma npm install -g prisma@1.11.1 prisma -v <file_sep>#install Angular-cli npm install -g @angular/cli ng --version <file_sep>#install Couchbase mkdir -p /opt/softwares/couchbase cd /opt/softwares/couchbase #rpm -qa | grep couchbase-server #rpm -e curl -O "https://packages.couchbase.com/releases/5.5.1/couchbase-server-enterprise-5.5.1-centos7.x86_64.rpm" rpm --install couchbase-server-enterprise-5.5.1-centos7.x86_64.rpm firewall-cmd --zone=public --permanent --add-port=8091/tcp firewall-cmd --reload <file_sep>#install 7zip mkdir -p /opt/softwares/7zip cd /opt/softwares/7zip wget https://www.mirrorservice.org/sites/dl.fedoraproject.org/pub/epel/7/x86_64/Packages/p/p7zip-16.02-10.el7.x86_64.rpm wget https://www.mirrorservice.org/sites/dl.fedoraproject.org/pub/epel/7/x86_64/Packages/p/p7zip-plugins-16.02-10.el7.x86_64.rpm rpm -U --quiet p7zip-16.02-10.el7.x86_64.rpm rpm -U --quiet p7zip-plugins-16.02-10.el7.x86_64.rpm <file_sep>#install graphql-cli npm install -g graphql-cli@2.16.4 graphql -v <file_sep>#install yarn curl -sL https://dl.yarnpkg.com/rpm/yarn.repo | sudo tee /etc/yum.repos.d/yarn.repo yum install -y yarn echo "yarn version" yarn -v <file_sep>#install pm2 npm install -g pm2@2.10.3 pm2 -v <file_sep>#install Docker compose curl -L "https://github.com/docker/compose/releases/download/1.22.0/docker-compose-$(uname -s)-$(uname -m)" -o /usr/local/bin/docker-compose chmod +x /usr/local/bin/docker-compose cp -rf /usr/local/bin/docker-compose /usr/bin/ docker-compose -v
d195dbd447761f619fd30d6c9b740ee593ebffbe
[ "Shell" ]
15
Shell
miravali/install-command
90d8de8ba8449a05fe871afadfcef0b1ae3df4ca
cb9c56db25501a5c6848d9b0faea6bc8ba6e0488
refs/heads/master
<repo_name>seleckis/nib-pushup<file_sep>/index.js module.exports = function(opts) { var implicit = (opts && opts.implicit == false) ? false : true; return function(style){ style.include(__dirname); if (implicit) { style.import('nib-pushup'); } } } <file_sep>/Readme.md [![Build Status](https://travis-ci.org/seleckis/nib-pushup.png?branch=master)](https://travis-ci.org/seleckis/nib-pushup) [![npm version](https://badge.fury.io/js/nib-pushup.svg)](https://badge.fury.io/js/nib-pushup) # Nib Pushup Stylus mixins, utilities and components. This is a fork of [nib](https://github.com/tj/nib), which has been updated to fit modern needs. **Note:** Gradient generator and transparent mixins have been removed. Use Autoprefixer instead as a [part of PostCSS](https://github.com/postcss/autoprefixer) or as a [Stylus Plugin](https://www.npmjs.com/package/autoprefixer-stylus). Normalize mixins have been removed also, you can use [normalize-styl-lite](https://www.npmjs.com/package/normalize-styl-lite) instead. ## Installation ```bash $ npm install nib-pushup --save-dev ``` ## Usage To gain access to everything nib-pushup has to offer, simply add: ```stylus @import 'nib-pushup' ``` Or you may also pick and choose based on the directory structure in `nib-pushup` folder, for example: ```stylus @import 'nib-pushup/clearfix' @import 'nib-pushup/font-face' @import 'nib-pushup/positions' ``` Read more in [documentation](docs). <file_sep>/docs/README.md # Mixins ## Position The position mixins `absolute`, `fixed`, and `relative` provide a shorthand variant to what is otherwise three CSS properties. The syntax is as follows: ``` fixed|absolute|relative: top [n] | right [n] | bottom [n] | left [n] ``` The following example will default to (0,0): ```stylus #back-to-top fixed bottom right ``` ```css #back-to-top { position: fixed; bottom: 0; right: 0; } ``` You may also specify the units: ```stylus #back-to-top fixed bottom 10px right 5px ``` ```css #back-to-top { position: fixed; bottom: 10px; right: 5px; } ``` ## Sizes This shorthand lets you set width and height in one go. ```stylus .foo sizes 5em 10em ``` ```css .foo { width: 5em; height: 10em; } ``` ## Clearfix Clearfixing causes containers to expand to contain floated contents. A simple example is shown [here](http://learnlayout.com/clearfix.html). The clearfix mixin takes no arguments and expands to a form that provides extremely robust browser support. ```stylus .clearfix clearfix() ``` ```css .clearfix:before, .clearfix:after { content: ""; display: table; } .clearfix:after { clear: both; } ``` ## Ellipsis The `overflow` property is augmented with a "ellipsis" value, expanding to what you see below. ```stylus button overflow ellipsis ``` ```css button { white-space: nowrap; overflow: hidden; text-overflow: ellipsis; } ``` ## Hide text The `hide-text` function hides text in an element. Usefull to replace text with image, but leave text for SEO. ```stylus button hide-text() background url('assets/img/lorem.jpg') ``` ```css button { text-indent: 101%; white-space: nowrap; overflow: hidden; background: url('assets/img/lorem.jpg'); } ``` ## Border This shorthand lets you create a border by just specifying a color, with defaults for width and style. ```stylus .foo border red ``` ```css .foo { border: 1px solid red; } ``` ## Shadow Stroke Creates a text outline using text-shadow. ```stylus .foo shadow-stroke(red) ``` ```css .foo { text-shadow: -1px -1px 0 red, 1px -1px 0 red, -1px 1px 0 red, 1px 1px 0 red; } ``` ## Font-face This function lets you add custom font faces and use it in font-family property. ```stylus $fontspath = '/assets/fonts/' font-face(ExpletusSans, ExpletusSans-Regular, Expletus_Sans) font-face(ExpletusSansBoldItalic, ExpletusSans-BoldItalic, Expletus_Sans, bold, italic) ``` where arguments are: *font name, filename, folder name (optional, default = ''), font weight (optional, default = normal), font style (optional, default = normal).* `$fontspath` is used to define path to fonts folder (default value = '.'). ```css @font-face { font-family: ExpletusSans; font-weight: normal; font-style: normal; src: local('☺'), url("/assets/fonts/Expletus_Sans/ExpletusSans-Regular.woff") format('woff'); } @font-face { font-family: ExpletusSansBoldItalic; font-weight: bold; font-style: italic; src: local('☺'), url("/assets/fonts/Expletus_Sans/ExpletusSans-BoldItalic.woff") format('woff'); } ``` ## Font-size with line-height It is useful to define line-height with font-size together. Of course you can use font-size as usual, withour line-height. ```stylus body font-size: 16px p font-size: 18px/1.5 ``` ```css body { font-size: 16px; } p { font-size: 18px; line-height: 1.5; } ``` # Removed mixins and utilities Here is the list of mixins and utilities from original nib removed from nib-pushup. - Gradient — use standard syntax with [PostCSS Autoprefixer](https://github.com/postcss/autoprefixer) or [autoprefixer-stylus](https://www.npmjs.com/package/autoprefixer-stylus) plugin instead. - Border Radius — standard `border-radius` property is useful. `border-bottom-radius` and `border-top-radius` will be added soon instead. - Responsive Images — There is a huge amount of modern devices which have different pixel dencity, that is why we need better solution for this. For now it is suggested to use [Rupture](https://github.com/jescalan/rupture) or native media queries instead. - Reset — There is no need to add snippets created by someone and follow the updates. Eric Meyer's reset is already obsolete, use normalize.css instead. But, `normalize()` has been also removed from nib-pushup, use my [normalize.styl.lite](https://github.com/seleckis/normalize.styl.lite) or [normalize.styl](https://github.com/bymathias/normalize.styl). - Transparent Mixins — use [PostCSS Autoprefixer](https://github.com/postcss/autoprefixer) or [autoprefixer-stylus](https://www.npmjs.com/package/autoprefixer-stylus) plugin instead. - Aliases — use standard syntax, it is simple. - Image generation and iconic — it seems the authors of Nib, does not know why they added this to Nib repository. :)
1b81ad2cf7c354b8c38a85d1ff1da9c44e655a11
[ "JavaScript", "Markdown" ]
3
JavaScript
seleckis/nib-pushup
818396970cd1a5457fc06014b698a8f322d8c1a1
bede7983930eba9f643136c173f63339a7a2ada6
refs/heads/master
<repo_name>jeff2013/WTFDIE<file_sep>/WTFDIE/Stye/Font.swift // // Font.swift // WTFDIE // // Created by <NAME> on 2017-10-02. // Copyright © 2017 WTFDIE. All rights reserved. // import UIKit extension UIFont{ enum FontBook: String { case helveticaNeueLight = "HelveticaNeue-Light" case helveticaNeueMedium = "HelveticaNeue-Medium" case helveticaNeue = "HelveticaNeue" case helveticaNeueBold = "HelveticaNeue-Bold" } convenience init(_ appFont: FontBook, size fontSize: CGFloat){ self.init(name: appFont.rawValue, size: fontSize)! } } <file_sep>/WTFDIE/Controller/LocationViewController.swift // // LocationViewController.swift // WTFDIE // // Created by <NAME> on 2018-01-30. // Copyright © 2018 WTFDIE. All rights reserved. // import UIKit import MapKit class LocationViewController: UIViewController { let locationManager: CLLocationManager = CLLocationManager() @IBOutlet weak var EnableButton: UIButton! @IBOutlet weak var LocationRequestBodyLabel: UILabel! @IBOutlet weak var LocationRequestTitleLabel: UILabel! override func viewDidLoad() { super.viewDidLoad() setupView() } private func setupView() { LocationRequestTitleLabel.attributedText = "Location.Request.Title".localized.styled(.titleHeader) LocationRequestBodyLabel.attributedText = "Location.Request.Body".localized.styled(.labelGreyCenter) EnableButton.setAttributedTitle("Location.Button.Title".localized.styled(.labelWhiteCenter), for: .normal) if #available(iOS 11.0, *) { navigationController?.navigationBar.prefersLargeTitles = true navigationController?.setTitle(title: "Location") title = "Location" navigationController?.navigationBar.largeTitleTextAttributes = [NSAttributedStringKey.font: UIFont(.helveticaNeueMedium, size: 30), NSAttributedStringKey.foregroundColor: UIColor(.azureBlue)] } else { // Fallback on earlier versions } if CLLocationManager.locationServicesEnabled() { locationManager.delegate = self locationManager.desiredAccuracy = kCLLocationAccuracyBest //only update location if distance has changed by 100m // For testing purposes set at 100, should find a better heuristic later locationManager.distanceFilter = 100.0; locationManager.startUpdatingLocation() } } @IBAction func EnableLocation(_ sender: Any) { locationManager.requestWhenInUseAuthorization() self.dismiss(animated: true, completion: nil) } /* // MARK: - Navigation // In a storyboard-based application, you will often want to do a little preparation before navigation override func prepare(for segue: UIStoryboardSegue, sender: Any?) { // Get the new view controller using segue.destinationViewController. // Pass the selected object to the new view controller. } */ } extension LocationViewController: CLLocationManagerDelegate { func locationManager(_ manager: CLLocationManager, didUpdateLocations locations: [CLLocation]) { } } <file_sep>/WTFDIE/Views/CarouselCells/PlaceCollectionViewCell.swift // // PlaceCollectionViewCell.swift // WTFDIE // // Created by <NAME> on 2017-10-26. // Copyright © 2017 WTFDIE. All rights reserved. // import UIKit import MapKit class PlaceCollectionViewCell: UICollectionViewCell { typealias configurableObject = (Restaurant, CLLocation) @IBOutlet weak var placeNameLabel: UILabel! @IBOutlet weak var placeOpenView: UIView! @IBOutlet weak var placeOpenLabel: UILabel! @IBOutlet weak var priceImageView: UIImageView! @IBOutlet weak var priceLabel: UILabel! @IBOutlet weak var ratingImageView: UIImageView! @IBOutlet weak var ratingLabel: UILabel! @IBOutlet weak var placeAddressLabel: UILabel! @IBOutlet weak var distanceLabel: UILabel! var delegate: showDetailDelegate! override func awakeFromNib() { super.awakeFromNib() // Initialization code } override func layoutSubviews() { super.layoutSubviews() self.layer.cornerRadius = 15.0 layer.shadowRadius = 10.0 layer.shadowOpacity = 0.8 layer.shadowOffset = CGSize(width: 5, height: 10) placeNameLabel.adjustsFontSizeToFitWidth = true DispatchQueue.main.async { let path = UIBezierPath(roundedRect: self.placeOpenView.bounds, byRoundingCorners: [.bottomLeft, .bottomRight], cornerRadii: CGSize(width: 10, height: 10)) let maskLayer = CAShapeLayer() maskLayer.path = path.cgPath self.placeOpenView.layer.mask = maskLayer self.placeOpenView.layer.masksToBounds = true } self.layoutIfNeeded() self.clipsToBounds = false } @IBAction func showInfo(_ sender: Any) { delegate.showDetail() } } extension PlaceCollectionViewCell: ConfigurableCollectionViewCellProtocol { func configureCell(object: configurableObject) { placeNameLabel.text = "\(object.0.name ?? "")\n\n\(object.0.address ?? "")" placeOpenLabel.attributedText = object.0.isOpen ? "Open".styled(.labelWhite) : "Closed".styled(.labelWhite) distanceLabel.attributedText = String(format: "%.2f meters", object.1.distance(from: CLLocation(latitude: object.0.latitude, longitude: object.0.longitude))).styled(.labelWhite) //placeAddressLabel.attributedText = object.0.address.styled(.labelWhite) placeOpenView.backgroundColor = object.0.isOpen ? UIColor(.malachiteGreen) : UIColor(.roseWhite) priceLabel.attributedText = PriceRange(rawValue: object.0.priceLevel)?.dollarValue.styled(.titleHeader) ratingLabel.attributedText = (object.0.getRating()).styled(.titleHeader) } } <file_sep>/WTFDIE/Model/Directions.swift // // Directions.swift // WTFDIE // // Created by <NAME> on 2018-02-18. // Copyright © 2018 WTFDIE. All rights reserved. // import Foundation import ObjectMapper class Directions: Mappable { var routes: [Route]! required init?(map: Map) { } func mapping(map: Map) { routes <- map["routes"] } } <file_sep>/WTFDIE/Views/OptionsCells/SearchTypeTableViewCell.swift // // SearchTypeTableViewCell.swift // WTFDIE // // Created by <NAME> on 2017-10-08. // Copyright © 2017 WTFDIE. All rights reserved. // import UIKit //Available types here //https://developers.google.com/places/web-service/supported_types enum SearchType: Int { case restaurant case cafe case Count var description: String { switch self { case .restaurant: return "Restaurant" case .cafe: return "Cafe" default: return "" } } } class SearchTypeTableViewCell: ExpandableUITableViewCell { typealias configurableObject = String @IBOutlet weak var tableView: UITableView? @IBOutlet weak var titleLabel: UILabel? @IBOutlet weak var selectedGenre: UILabel? @IBOutlet weak var expandedIcon: UIImageView? @IBOutlet weak var heightConstraint: NSLayoutConstraint! var selectedType: SearchType = SearchType.restaurant override var isExpanded:Bool { didSet { if !isExpanded { self.heightConstraint.constant = 0.0 } else { self.heightConstraint.constant = 150.0 } expandedIcon?.image = isExpanded ? UIImage(named: "ExpandedIcon") : UIImage(named: "UnExpandedIcon") } } override func awakeFromNib() { super.awakeFromNib() // Initialization code tableView?.delegate = self tableView?.dataSource = self tableView?.registerCellTypes(types: [SearchTypeOptionTableViewCell.self]) tableView?.estimatedRowHeight = 50 tableView?.translatesAutoresizingMaskIntoConstraints = false tableView?.tableFooterView = UIView() } func changeSelectedType(with selectedIndex: IndexPath) { selectedType = SearchType(rawValue: selectedIndex.row)! selectedGenre?.attributedText = selectedType.description.styled(.titleHeader) delegate.updateParameters(with: "type", value: selectedType.description.lowercased()) tableView?.reloadData() } } extension SearchTypeTableViewCell: ConfigurableTableViewCellProtocol { func configureCell(object: configurableObject) { titleLabel?.attributedText = object.styled(.titleHeader) selectedGenre?.attributedText = selectedType.description.styled(.titleHeader) expandedIcon?.image = isExpanded ? UIImage(named: "ExpandedIcon") : UIImage(named: "UnExpandedIcon") } } extension SearchTypeTableViewCell: UITableViewDataSource { func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int { return SearchType.Count.rawValue } func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell { let searchOptionCell: SearchTypeOptionTableViewCell = tableView.dequeueReusableCell(type: SearchTypeOptionTableViewCell.self) searchOptionCell.configureCell(object: (SearchType(rawValue: indexPath.row)!.description, selectedType.description)) return searchOptionCell } } extension SearchTypeTableViewCell: UITableViewDelegate { func tableView(_ tableView: UITableView, heightForRowAt indexPath: IndexPath) -> CGFloat { return 50 } func tableView(_ tableView: UITableView, estimatedHeightForRowAt indexPath: IndexPath) -> CGFloat { return 50 } func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) { tableView.deselectRow(at: indexPath, animated: true) changeSelectedType(with: indexPath) } } <file_sep>/WTFDIE/Stye/Style.swift // // Style.swift // WTFDIE // // Created by <NAME> on 2017-10-02. // Copyright © 2017 WTFDIE. All rights reserved. // import UIKit import TextAttributes indirect enum Style: Int { case navBarHeader case titleHeader case cellTitle case labelWhite case headerLeft case labelGreyLeft case labelWhiteCenter case labelGreyCenter } class StyleGuide { class func attributes(_ style: Style) -> TextAttributes { switch style { case .navBarHeader: return TextAttributes() .font(UIFont(.helveticaNeueMedium, size: 17)) .foregroundColor(UIColor(.whiteTwo)) .backgroundColor(UIColor(.bluish)) .alignment(.center) case .titleHeader: return TextAttributes() .font(UIFont(.helveticaNeueMedium, size: 17)) .foregroundColor(UIColor(.warmGrey)) .alignment(.center) case .cellTitle: return TextAttributes() .font(UIFont(.helveticaNeueMedium, size: 17)) .foregroundColor(UIColor(.warmGrey)) .alignment(.left) case .labelWhite: return TextAttributes() .font(UIFont(.helveticaNeueMedium, size: 17)) .foregroundColor(UIColor(.white)) .alignment(.left) case .headerLeft: return TextAttributes() .font(UIFont(.helveticaNeueMedium, size: 30)) .foregroundColor(UIColor(.azureBlue)) .alignment(.left) case .labelGreyLeft: return TextAttributes() .font(UIFont(.helveticaNeueMedium, size: 17)) .foregroundColor(UIColor(.warmGrey)) .alignment(.left) case .labelGreyCenter: return TextAttributes() .font(UIFont(.helveticaNeueMedium, size: 17)) .foregroundColor(UIColor(.warmGrey)) .alignment(.center) case .labelWhiteCenter: return TextAttributes() .font(UIFont(.helveticaNeueMedium, size: 17)) .foregroundColor(UIColor(.white)) .alignment(.center) } } } extension String{ func styled(_ style: Style) -> NSAttributedString{ return NSAttributedString(string: self, attributes: StyleGuide.attributes(style)) } } <file_sep>/WTFDIE/Views/OptionsCells/KeywordOptionTableViewCell.swift // // KeywordOptionTableViewCell.swift // WTFDIE // // Created by <NAME> on 2017-10-09. // Copyright © 2017 WTFDIE. All rights reserved. // import UIKit enum Keyword: Int { case breakfast case lunch case dinner case seafood case steak case brunch case chinese case japanese case sushi case dessert case vietnamese case ramen case dimSum case korean case italian case greek case casual case formal case dateNight case romantic case fastFood case takeOut case count var description: String { switch self { case .breakfast: return "Breakfast" case .lunch: return "Lunch" case .dinner: return "Dinner" case .seafood: return "Seafood" case .steak: return "Steak" case .brunch: return "Brunch" case .chinese: return "Chinese" case .japanese: return "Japanese" case .sushi: return "Sushi" case .dessert: return "Dessert" case .vietnamese: return "Vietnamese" case .ramen: return "Ramen" case .dimSum: return "Dim Sum" case .korean: return "Korean" case .italian: return "Italian" case .greek: return "Greek" case .casual: return "Casual" case .formal: return "Formal" case .dateNight: return "Date Night" case .romantic: return "Romantic" case .fastFood: return "Fast Food" case .takeOut: return "Take out" default: return "" } } } class KeywordOptionTableViewCell: ExpandableUITableViewCell { typealias configurableObject = String @IBOutlet weak var tableView: UITableView? @IBOutlet weak var titleLabel: UILabel? @IBOutlet weak var selectedGenre: UILabel? @IBOutlet weak var expandedIcon: UIImageView? @IBOutlet weak var heightConstraint: NSLayoutConstraint! override var isExpanded:Bool { didSet { if !isExpanded { self.heightConstraint.constant = 0.0 } else { self.heightConstraint.constant = 250.0 } expandedIcon?.image = isExpanded ? UIImage(named: "ExpandedIcon") : UIImage(named: "UnExpandedIcon") } } var selectedKeywords = Set<Int>() var keywordParameters: String { get { return selectedKeywords.reduce("") { (acc, value) -> String in "\(acc), \(Keyword(rawValue: value)?.description ?? "")" } } } override func awakeFromNib() { super.awakeFromNib() // Initialization code setupCell() } private func setupCell(){ tableView?.delegate = self tableView?.dataSource = self tableView?.registerCellTypes(types: [KeywordTableViewCell.self]) tableView?.estimatedRowHeight = 50 tableView?.translatesAutoresizingMaskIntoConstraints = false tableView?.tableFooterView = UIView() selectedGenre?.attributedText = "None".styled(.titleHeader) } override func setSelected(_ selected: Bool, animated: Bool) { super.setSelected(selected, animated: animated) } func changeSelectedType(with selectedIndex: IndexPath) { if selectedKeywords.contains(selectedIndex.row) { selectedKeywords.remove(selectedIndex.row) } else { selectedKeywords.insert(selectedIndex.row) } if selectedKeywords.count == 0 { selectedGenre?.attributedText = "None".styled(.titleHeader) } else if selectedKeywords.count == 1 { selectedGenre?.attributedText = Keyword(rawValue: (selectedKeywords.first)!)?.description.styled(.titleHeader) } else { selectedGenre?.attributedText = "Multiple Selected".styled(.titleHeader) } delegate.updateParameters(with: "keyword", value: keywordParameters) tableView?.reloadData() } } extension KeywordOptionTableViewCell: ConfigurableTableViewCellProtocol { func configureCell(object: configurableObject) { titleLabel?.attributedText = object.styled(.titleHeader) // if selectedKeywords.count == 0 { // selectedGenre?.attributedText = "None".styled(.titleHeader) // } else if selectedKeywords.count == 1 { // selectedGenre?.attributedText = Keyword(rawValue: // (selectedKeywords.first)!)?.description.styled(.titleHeader) // } else { // selectedGenre?.attributedText = "Multiple Selected".styled(.titleHeader) // } expandedIcon?.image = isExpanded ? UIImage(named: "ExpandedIcon") : UIImage(named: "UnExpandedIcon") } } extension KeywordOptionTableViewCell: UITableViewDataSource { func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int { return Keyword.count.rawValue } func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell { let keywordCell: KeywordTableViewCell = tableView.dequeueReusableCell(type: KeywordTableViewCell.self) keywordCell.configureCell(object: (Keyword(rawValue: indexPath.row)!.description, selectedKeywords.contains(indexPath.row))) return keywordCell } } extension KeywordOptionTableViewCell: UITableViewDelegate { func tableView(_ tableView: UITableView, heightForRowAt indexPath: IndexPath) -> CGFloat { return 50 } func tableView(_ tableView: UITableView, estimatedHeightForRowAt indexPath: IndexPath) -> CGFloat { return 50 } func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) { tableView.deselectRow(at: indexPath, animated: true) changeSelectedType(with: indexPath) } } <file_sep>/WTFDIE/Views/OptionsCells/KeywordTableViewCell.swift // // KeywordTableViewCell.swift // WTFDIE // // Created by <NAME> on 2017-10-10. // Copyright © 2017 WTFDIE. All rights reserved. // import UIKit class KeywordTableViewCell: UITableViewCell { typealias configurableObject = (String, Bool) var searchTypeTitleLabel: UILabel! var checkedImageView: UIImageView = UIImageView() override func awakeFromNib() { super.awakeFromNib() // Initialization code setupCell() } private func setupCell() { searchTypeTitleLabel = UILabel(frame: CGRect(x: 15, y: (self.frame.height - 30)/2, width: 100, height: 30)) self.addSubview(searchTypeTitleLabel) checkedImageView = UIImageView(frame: CGRect(x: UIScreen.main.bounds.width - 60, y: (self.frame.height - 30)/2, width: 30, height: 30)) self.addSubview(checkedImageView) } override func layoutSubviews() { super.layoutSubviews() } } extension KeywordTableViewCell: ConfigurableTableViewCellProtocol { func configureCell(object: configurableObject) { searchTypeTitleLabel.attributedText = object.0.description.localized.styled(.cellTitle) if object.1 { checkedImageView.image = UIImage(named: "selectedItemIcon") } else { checkedImageView.image = UIImage(named: "notSelectedIcon") } } } <file_sep>/WTFDIE/Views/OptionsCells/DistanceOptionTableViewCell.swift // // DistanceOptionTableViewCell.swift // WTFDIE // // Created by <NAME> on 2017-10-09. // Copyright © 2017 WTFDIE. All rights reserved. // import UIKit class DistanceOptionTableViewCell: ExpandableUITableViewCell { @IBOutlet weak var rangeSelector: UISlider? @IBOutlet weak var titleLabel: UILabel? @IBOutlet weak var rangeLabel: UILabel? @IBOutlet weak var heightConstraint: NSLayoutConstraint! override var isExpanded:Bool { didSet { if !isExpanded { self.heightConstraint.constant = 0 } else { self.heightConstraint.constant = 68 } } } typealias configurableObject = Int override func awakeFromNib() { super.awakeFromNib() setupView() } @IBAction func rangeChanged(_ sender: UISlider) { rangeLabel?.attributedText = "\(String(format: "%.2f", sender.value)) km".styled(.cellTitle) delegate.updateParameters(with: "radius", value: String(format: "%.2f", sender.value * 1000)) } private func setupView() { titleLabel?.attributedText = "Radius (km)".styled(.cellTitle) rangeLabel?.attributedText = "5 km".styled(.cellTitle) } } extension DistanceOptionTableViewCell: ConfigurableTableViewCellProtocol { func configureCell(object: configurableObject) { } } <file_sep>/WTFDIE/Model/Route.swift // // Route.swift // WTFDIE // // Created by <NAME> on 2018-02-19. // Copyright © 2018 WTFDIE. All rights reserved. // import Foundation import ObjectMapper class Route: Mappable { var points: String! required init?(map: Map) {} func mapping(map: Map) { points <- map["overview_polyline.points"] } } <file_sep>/Podfile # Uncomment the next line to define a global platform for your project # platform :ios, '9.0' target 'WTFDIE' do # Comment the next line if you're not using Swift and don't want to use dynamic frameworks use_frameworks! # Pods for WTFDIE pod 'GooglePlaces' pod 'GooglePlacePicker' pod 'GoogleMaps' pod 'AlamofireObjectMapper' pod 'TextAttributes', :git => 'https://github.com/ejmartin504/TextAttributes.git', :branch => 'swift4' pod 'SVProgressHUD' end <file_sep>/WTFDIE/Extensions/UITableviewExtension.swift // // UITableviewExtension.swift // WTFDIE // // Created by <NAME> on 2017-10-08. // Copyright © 2017 WTFDIE. All rights reserved. // import UIKit public extension UITableView { func registerCellTypes(types: [UITableViewCell.Type]) { for type in types { self.register(UINib(nibName: type.defaultReuseIdentifier, bundle: nil), forCellReuseIdentifier: type.defaultReuseIdentifier) } } //make sure reuse identifier and class names are identical func dequeueReusableCell<T>(type: T.Type) -> T where T: ReusableView { return self.dequeueReusableCell(withIdentifier: type.defaultReuseIdentifier) as! T } } <file_sep>/WTFDIE/Model/Places.swift // // Places.swift // WTFDIE // // Created by <NAME> on 2017-09-30. // Copyright © 2017 WTFDIE. All rights reserved. // import Foundation import ObjectMapper class Places: Mappable { var results: [Restaurant]! required init?(map: Map) {} func mapping(map: Map) { results <- map["results"] } } <file_sep>/WTFDIE/Controller/OptionsViewController.swift // // OptionsViewController.swift // WTFDIE // // Created by <NAME> on 2017-10-08. // Copyright © 2017 WTFDIE. All rights reserved. // import UIKit import MapKit import SVProgressHUD protocol DataChangedDelegate { func updateParameters(with key: String, value: String) } enum Options: Int { case searchType case priceRange case distance case genre case count } enum PriceRange: Int { case inexpensive case moderate case pricey case highEnd case unknown //Varies per region var dollarValue: String { switch self { case .inexpensive: return "Under $10" case .moderate: return "$11-30" case .pricey: return "$31-60" case .highEnd: return "Above $60" case .unknown: return "Unknown" } } //minprice and maxprice (optional) — Restricts results to only those places within the specified range. Valid values range between 0 (most affordable) to 4 (most expensive), inclusive. The exact amount indicated by a specific value will vary from region to region. var minMaxPrice: (String, String) { switch self { case .inexpensive: return ("0", "1") case .moderate: return ("1", "2") case .pricey: return ("2", "3") case .highEnd: return ("3", "4") case .unknown: return ("0", "4") } } } class OptionsViewController: UIViewController { @IBOutlet weak var tableView: UITableView! var expandedRows = Set<Int>() var parameters: Dictionary<String, String> = Dictionary<String, String>() var location: CLLocationCoordinate2D? override func viewDidLoad() { super.viewDidLoad() setupView() } private func setupView() { title = "Options" tableView.tableFooterView = UIView() tableView.estimatedRowHeight = 200 //TEMP //location = CLLocationCoordinate2D(latitude: 49.288806, longitude: -123.122776) //initialize settings with default values parameters.updateValue("restaurant", forKey: "type") parameters.updateValue("1", forKey: "minprice") parameters.updateValue("2", forKey: "maxprice") parameters.updateValue("5000", forKey: "radius") } override func viewWillAppear(_ animated: Bool) { if #available(iOS 11.0, *) { navigationController?.navigationBar.prefersLargeTitles = true } else { // Fallback on earlier versions } } @IBAction func destinationRequested(_ sender: Any) { if let location = location { SVProgressHUD.show() GooglePlacesService.getNearbyRestaurants(location: location, radius: parameters["radius"] ?? "", type: parameters["type"] ?? "", keyword: parameters["keyword"] ?? "") { (result) in SVProgressHUD.dismiss() if result.error == nil { if let places = result.value?.results { let destViewController: ResultsViewController = OptionsViewController.instanceFromStoryboard(storyboard: .resultsViewController) destViewController.location = location destViewController.places = places self.navigationController?.pushViewController(destViewController, animated: true) } } else { SVProgressHUD.showError(withStatus: "Failed retreiving results") } } } } } extension OptionsViewController: UITableViewDelegate { func tableView(_ tableView: UITableView, heightForRowAt indexPath: IndexPath) -> CGFloat { return UITableViewAutomaticDimension } func tableView(_ tableView: UITableView, estimatedHeightForRowAt indexPath: IndexPath) -> CGFloat { return 150 } func tableView(_ tableView: UITableView, didSelectRowAt indexPath: IndexPath) { tableView.deselectRow(at: indexPath, animated: true) guard let cell = tableView.cellForRow(at: indexPath) as? ExpandableUITableViewCell else { return } switch cell.isExpanded { case true: self.expandedRows.remove(indexPath.row) case false: self.expandedRows.insert(indexPath.row) } cell.isExpanded = !cell.isExpanded self.tableView.beginUpdates() self.tableView.endUpdates() } } extension OptionsViewController: UITableViewDataSource { func tableView(_ tableView: UITableView, numberOfRowsInSection section: Int) -> Int { return Options.count.rawValue } func tableView(_ tableView: UITableView, cellForRowAt indexPath: IndexPath) -> UITableViewCell { switch Options(rawValue: indexPath.row)! { case .searchType: let searchTypeCell: SearchTypeTableViewCell = tableView.dequeueReusableCell(type: SearchTypeTableViewCell.self) searchTypeCell.configureCell(object: "Type") searchTypeCell.isExpanded = self.expandedRows.contains(indexPath.row) searchTypeCell.delegate = self return searchTypeCell case .priceRange: let priceRangeCell: PriceSelectionTableViewCell = tableView.dequeueReusableCell(type: PriceSelectionTableViewCell.self) priceRangeCell.configureCell(object: PriceRange.moderate) priceRangeCell.isExpanded = true priceRangeCell.delegate = self return priceRangeCell case .distance: let rangeCell: DistanceOptionTableViewCell = tableView.dequeueReusableCell(type: DistanceOptionTableViewCell.self) rangeCell.isExpanded = true rangeCell.delegate = self return rangeCell case .genre: let keywordOptionCell: KeywordOptionTableViewCell = tableView.dequeueReusableCell(type: KeywordOptionTableViewCell.self) keywordOptionCell.configureCell(object: "Keyword") keywordOptionCell.isExpanded = self.expandedRows.contains(indexPath.row) keywordOptionCell.delegate = self return keywordOptionCell default: return UITableViewCell() } } } extension OptionsViewController: DataChangedDelegate { func updateParameters(with key: String, value: String) { parameters.updateValue(value, forKey: key) } } <file_sep>/WTFDIE/Model/Storyboard.swift // // Storyboard.swift // WTFDIE // // Created by <NAME> on 2017-09-28. // Copyright © 2017 WTFDIE. All rights reserved. // import Foundation public enum Storyboard: String { case decisionViewController = "DecisionViewController" case optionsViewController = "OptionsViewController" case resultsViewController = "ResultsViewController" case locationViewController = "LocationViewController" case overlayViewController = "OverlayViewController" } <file_sep>/WTFDIE/Extensions/UIViewControllerExtension.swift // // UIViewControllerExtension.swift // WTFDIE // // Created by <NAME> on 2017-09-28. // Copyright © 2017 WTFDIE. All rights reserved. // import UIKit public enum CustomAlertStyle { case noWebsite func getTitle() -> String { switch self { case .noWebsite: return "No website information" } } func getMessage() -> String { switch self { case .noWebsite: return "There is currently no website associated with this establishment." } } } public extension UIViewController { public class func instanceFromStoryboard<T>(storyboard: Storyboard) -> T { return UIStoryboard(name: storyboard.rawValue, bundle: nil).instantiateViewController(withIdentifier: String(describing: T.self)) as! T } public class func initialViewControllerFromStoryboard<T>(storyboard: Storyboard) -> T { return UIStoryboard(name: storyboard.rawValue, bundle: nil).instantiateInitialViewController() as! T } func preloadView() { _ = view } func setTitle(title: String) { navigationItem.title = title setBackButton() } func setTitleView(view: UIView) { navigationItem.titleView = view setBackButton() } private func setBackButton() { let backButton = UIBarButtonItem(title: " ", style: .plain, target: self, action: nil) navigationItem.backBarButtonItem = backButton } func presentAlert(with style: CustomAlertStyle) { let controller = UIAlertController(title: style.getTitle(), message: style.getMessage(), preferredStyle: .alert) let action = UIAlertAction(title: "Dismiss", style: .cancel, handler: nil) controller.addAction(action) present(controller, animated: true, completion: nil) } } <file_sep>/WTFDIE/Extensions/UITableViewCellExtension.swift // // UITableViewCellExtension.swift // WTFDIE // // Created by <NAME> on 2017-10-09. // Copyright © 2017 WTFDIE. All rights reserved. // import Foundation import UIKit public extension UITableViewCell { } <file_sep>/WTFDIE/Controller/DecisionViewController.swift // // DecisionViewController.swift // WTFDIE // // Created by <NAME> on 2017-09-28. // Copyright © 2017 WTFDIE. All rights reserved. // import UIKit import AlamofireObjectMapper import MapKit import CoreLocation import GooglePlacePicker //https://developers.google.com/places/web-service/search class DecisionViewController: UIViewController { @IBOutlet weak var locationTitleLabel: UILabel! @IBOutlet weak var currentLocationLabel: UILabel! @IBOutlet weak var gpsImageView: UIImageView! private var locationManager = CLLocationManager() @IBOutlet weak var changeLocationBarButtonItem: UIBarButtonItem! var location: CLLocationCoordinate2D? override func viewDidLoad() { super.viewDidLoad() setupView() } private func setupView() { if #available(iOS 11.0, *) { navigationController?.navigationBar.prefersLargeTitles = true navigationController?.setTitle(title: "Location") title = "Location" navigationController?.navigationBar.largeTitleTextAttributes = [NSAttributedStringKey.font: UIFont(.helveticaNeueMedium, size: 30), NSAttributedStringKey.foregroundColor: UIColor(.azureBlue)] } else { // Fallback on earlier versions } gpsImageView.animateCircle(with: 2.0, shouldRepeat: true) //locationManager.requestWhenInUseAuthorization() if CLLocationManager.locationServicesEnabled() { locationManager.delegate = self locationManager.desiredAccuracy = kCLLocationAccuracyBest //only update location if distance has changed by 100m // For testing purposes set at 100, should find a better heuristic later locationManager.distanceFilter = 100.0; locationManager.startUpdatingLocation() } } func getCurrentLocation(){ gpsImageView.image = UIImage(named: "gps_icon_green") gpsImageView.animateCircle(with: 2.0, shouldRepeat: true) } func rotateAnyView(with view: UIView, fromValue: Double, toValue: Float, duration: Double = 1) { let animation = CABasicAnimation(keyPath: "transform.rotation") animation.duration = duration animation.fromValue = fromValue animation.toValue = toValue view.layer.add(animation, forKey: nil) } override func viewWillAppear(_ animated: Bool) { super.viewWillAppear(animated) } @IBAction func selectLocation(_ sender: Any) { launchPlacePicker() } @IBAction func configureSearch(_ sender: Any) { gpsImageView.image = UIImage(named: "gps_icon_green") gpsImageView.resetAnimation() gpsImageView.layer.removeAllAnimations() let destViewController: OptionsViewController = OptionsViewController.instanceFromStoryboard(storyboard: .optionsViewController) destViewController.location = location navigationController?.pushViewController(destViewController, animated: true) } // private func retreiveRestaurants() { // //let location = CLLocation(latitude: 49.285167, longitude: -123.125770) // if let location = location { // GooglePlacesService.getNearbyRestaurants(location: location, radius: "5000", type: "restaurant", keyword: "seafood") { (result) in // let x = result // } // } else { // // No location selected // } // } private func launchPlacePicker() { let config = GMSPlacePickerConfig(viewport: nil) let placePicker = GMSPlacePickerViewController(config: config) placePicker.delegate = self present(placePicker, animated: true, completion: nil) } private func getAddress(from location: CLLocation, completion: @escaping (String) -> Void) { let geocoder = CLGeocoder() geocoder.reverseGeocodeLocation(location) { (placemarks, error) in if let placemark = placemarks?.first, let subThoroughfare = placemark.subThoroughfare, let thoroughfare = placemark.thoroughfare, let locality = placemark.locality, let administrativeArea = placemark.administrativeArea { let address = subThoroughfare + " " + thoroughfare + ", " + locality + " " + administrativeArea placemark.addressDictionary return completion(address) } completion("No location found") } } @IBAction func changeLocation(_ sender: UIBarButtonItem) { launchPlacePicker() } } extension DecisionViewController: CLLocationManagerDelegate { func locationManager(_ manager: CLLocationManager, didUpdateLocations locations: [CLLocation]) { if let currentLocation = manager.location { location = currentLocation.coordinate getAddress(from: currentLocation) { (address) in self.gpsImageView.image = UIImage(named: "gps_icon_green") self.gpsImageView.animateCircle(with: 2.0, shouldRepeat: false) self.currentLocationLabel.attributedText = address.localized.styled(.labelWhiteCenter) } } else { gpsImageView.image = UIImage(named: "gps_icon_red") gpsImageView.resetAnimation() } } } extension DecisionViewController: GMSPlacePickerViewControllerDelegate { func placePicker(_ viewController: GMSPlacePickerViewController, didPick place: GMSPlace) { // Dismiss the place picker, as it cannot dismiss itself. viewController.dismiss(animated: true, completion: nil) currentLocationLabel.attributedText = place.name.localized.styled(.labelWhiteCenter) location = place.coordinate gpsImageView.image = UIImage(named: "gps_icon_green") locationManager.stopUpdatingLocation() } func placePickerDidCancel(_ viewController: GMSPlacePickerViewController) { // Dismiss the place picker, as it cannot dismiss itself. viewController.dismiss(animated: true, completion: nil) if location == nil { gpsImageView.image = UIImage(named: "gps_icon_red") gpsImageView.resetAnimation() } } } <file_sep>/README.md # WTFDIE A yelp clone which features custom modular components. The main point of this application was to flex my technical abilities to force myself to write good, modularized code and to keep the project organized! The application is quite simple, it accesses data from google places API, displays data based on user settings on a fancy UI! <file_sep>/WTFDIE/Extensions/GMSMarkerExtension.swift // // GMSMarkerExtension.swift // WTFDIE // // Created by <NAME> on 2018-02-26. // Copyright © 2018 WTFDIE. All rights reserved. // import GoogleMaps import UIKit enum MarkerType { case restauant case currentLocation } extension GMSMarker { func style(with type: MarkerType) { self.appearAnimation = GMSMarkerAnimation.pop } } <file_sep>/WTFDIE/Services/GooglePlacesService.swift // // GooglePlacesService.swift // WTFDIE // // Created by <NAME> on 2017-09-30. // Copyright © 2017 WTFDIE. All rights reserved. // import Foundation import AlamofireObjectMapper import Alamofire import MapKit class GooglePlacesService { class func getNearbyRestaurants(location: CLLocationCoordinate2D, radius: String, type: String, keyword: String, completion: @escaping(DataResponse<Places>) -> Void) { let baseURL = "https://maps.googleapis.com/maps/api/place/nearbysearch/json?" let parameters: [String: Any] = ["location": "\(location.latitude),\(location.longitude)", "radius": radius, "type": type, "keyword": keyword, "key": "<KEY>"] Alamofire.request(baseURL, method: .get, parameters: parameters, encoding: URLEncoding.default, headers: nil).responseObject { (response: DataResponse<Places>) in completion(response) } } class func getPath(origin: CLLocationCoordinate2D, destination: CLLocationCoordinate2D, completion: @escaping(DataResponse<Directions>) -> Void) { let baseURL = "https://maps.googleapis.com/maps/api/directions/json?" let parameters: [String: Any] = ["origin":"\(origin.latitude),\(origin.longitude)", "destination": "\(destination.latitude),\(destination.longitude)", "mode": "driving", "key": "<KEY>"] Alamofire.request(baseURL, method: .get, parameters: parameters, encoding: URLEncoding.default, headers: nil).responseObject { (response: DataResponse<Directions>) in completion(response) } } // // class func getAllNotifications(page: Int, length: Int, flag: Int = 2, completion: @escaping(Result<APIResponseArray<NotificationModel>, MoyaError>) -> Void ) { // Networking.request(target: .getAllNotifications(page: page, length: length, flag: flag)) {result in // completion(result) // } } <file_sep>/WTFDIE/Extensions/UICollectionViewExtension.swift // // UICollectionViewExtension.swift // WTFDIE // // Created by <NAME> on 2017-10-26. // Copyright © 2017 WTFDIE. All rights reserved. // import Foundation import UIKit public extension UICollectionView { func registerCellTypes(types: [UICollectionViewCell.Type]) { for type in types { self.register(UINib(nibName: type.defaultReuseIdentifier, bundle: nil), forCellWithReuseIdentifier: type.defaultReuseIdentifier) } } } <file_sep>/WTFDIE/Views/DraggableView.swift // // DraggableView.swift // WTFDIE // // Created by <NAME> on 2017-12-16. // Copyright © 2017 WTFDIE. All rights reserved. // import UIKit class DraggableView: UIView { var startPosition: CGPoint? var originalHeight: CGFloat = 0 var customViewHeight: NSLayoutConstraint! var tabHeight: NSLayoutConstraint! var expandedHeight: CGFloat = 344 var closedHeight: CGFloat = 60 var maxHeight: CGFloat = 0 var minHeight: CGFloat = 0 var distanceMoved: CGFloat = 0 var panGesture = UIPanGestureRecognizer() override func awakeFromNib() { super.awakeFromNib() self.backgroundColor = .clear let blurEffect = UIBlurEffect(style: .light) let blurView = UIVisualEffectView(effect: blurEffect) blurView.translatesAutoresizingMaskIntoConstraints = false self.insertSubview(blurView, at: 0) /* NOTE Always make sure to activate constraints after adding the view to the parent otherwise it's illegal to activate constraints that do not have a common ancestor! */ NSLayoutConstraint.activate([ blurView.heightAnchor.constraint(equalTo: self.heightAnchor), blurView.widthAnchor.constraint(equalTo: self.widthAnchor), blurView.bottomAnchor.constraint(equalTo: self.bottomAnchor), ]) panGesture = UIPanGestureRecognizer(target: self, action: #selector(self.draggedView(_:))) self.isUserInteractionEnabled = true panGesture.cancelsTouchesInView = false self.addGestureRecognizer(panGesture) //+72 is the padding on the bottom maxHeight = UIScreen.main.bounds.height - self.frame.height/2 + 62 // - 10 for some sort of error that occured lol print("centerRelative \(self.convert(self.center, to: self.superview))") //minHeight = self.center.y + self.frame.height - 72 - 48 minHeight = UIScreen.main.bounds.height + self.frame.height/2 - 50 print("MinHEIGHT \(minHeight)") } @objc func draggedView(_ sender:UIPanGestureRecognizer){ let translation = sender.translation(in: self.superview) let dragChange = self.center.y + translation.y /* checking if we are dragging past our allowable height, so can't drag off screen etc Remember that the coordinate system is (0,0) on the top left, which means moving down is an increase in height */ if dragChange >= maxHeight { self.center = CGPoint(x: self.center.x, y: self.center.y + translation.y) sender.setTranslation(CGPoint.zero, in: self.superview) } //can check velocity, negative means moving up, positive means moving down if sender.state == UIGestureRecognizerState.ended { self.superview?.layoutIfNeeded() UIView.animate(withDuration: 0.4, delay: 0, usingSpringWithDamping: 0.5, initialSpringVelocity: 3, options: .curveEaseIn, animations: { if sender.velocity(in: self).y >= 0 { self.center = CGPoint(x: self.center.x, y: self.minHeight) } else { self.center = CGPoint(x: self.center.x, y: self.maxHeight) } }, completion: { (success) in }) } } func collapseView() { self.superview?.layoutIfNeeded() UIView.animate(withDuration: 0.4, delay: 0, usingSpringWithDamping: 0.5, initialSpringVelocity: 3, options: .curveEaseIn, animations: { self.center = CGPoint(x: self.center.x, y: self.minHeight) }, completion: { (success) in }) } } <file_sep>/WTFDIE/Controller/ResultsViewController.swift // // ResultsViewController.swift // WTFDIE // // Created by <NAME> on 2017-10-26. // Copyright © 2017 WTFDIE. All rights reserved. // import UIKit import MapKit import GoogleMaps import GooglePlaces import SafariServices protocol showDetailDelegate { func showDetail() } class ResultsViewController: UIViewController { var places: [Restaurant] = [] @IBOutlet weak var collectionViewHeight: NSLayoutConstraint! @IBOutlet weak var tabViewHeight: NSLayoutConstraint! @IBOutlet weak var collectionView: UICollectionView! @IBOutlet weak var mapView: GMSMapView! @IBOutlet weak var draggableView: DraggableView! @IBOutlet weak var leftLineImage: UIImageView! @IBOutlet weak var rightLineImage: UIImageView! var location: CLLocationCoordinate2D! var destination: CLLocationCoordinate2D! var selectedRestaurant: Restaurant! override func viewDidLoad() { super.viewDidLoad() // Uncomment the following line to preserve selection between presentations // self.clearsSelectionOnViewWillAppear = false setupCollectionView() setupCarousel() // leftLineImage.layer.borderColor = UIColor.blue.cgColor // leftLineImage.layer.borderWidth = 2.0 if let firstPlace = places.first { destination = CLLocationCoordinate2D(latitude: firstPlace.latitude, longitude: firstPlace.longitude) selectedRestaurant = firstPlace } } private func setupCollectionView() { // Register cell classes collectionView.registerCellTypes(types: [PlaceCollectionViewCell.self]) collectionView.dataSource = self collectionView.delegate = self collectionView.backgroundColor = UIColor.clear collectionView.decelerationRate = UIScrollViewDecelerationRateFast draggableView.customViewHeight = collectionViewHeight draggableView.tabHeight = tabViewHeight } private func setupCarousel() { let cellScaling:CGFloat = 0.8 let screenSize = collectionView.bounds.size let cellWidth = floor(screenSize.width * cellScaling) let cellHeight = floor(screenSize.height * cellScaling) let insetX = (collectionView.bounds.width - cellWidth) / 2 let insetY = (collectionView.bounds.height - cellHeight) / 2 let layout = collectionView.collectionViewLayout as! UICollectionViewFlowLayout layout.itemSize = CGSize(width: cellWidth, height: cellHeight) collectionView.contentInset = UIEdgeInsets(top: insetY, left: insetX, bottom: insetY, right: insetX) } private func setupMap() { createAnnotation(for: location) } private func createAnnotation(for location: CLLocationCoordinate2D) { mapView.animate(toLocation: location); mapView.camera = GMSCameraPosition.camera(withLatitude: location.latitude, longitude: location.longitude, zoom: 16.0) let marker = GMSMarker(position: location) marker.style(with: MarkerType.currentLocation) marker.map = mapView mapView.animate(with: GMSCameraUpdate.scrollBy(x: 0, y: 100)) } override func viewWillAppear(_ animated: Bool) { if #available(iOS 11.0, *) { navigationController?.navigationBar.prefersLargeTitles = false } else { // Fallback on earlier versions } setupMap() } override func viewWillDisappear(_ animated: Bool) { if #available(iOS 11.0, *) { navigationController?.navigationBar.prefersLargeTitles = true } else { // Fallback on earlier versions } } // Can't be done with a ternary operator, not sure why // Above line is untrue // Nope writing code at 4am probably makes me write dumb things @IBAction func toggleCollectionView(_ sender: Any) { guard let destination = destination else { return } draggableView.collapseView() plotPath(with: destination) // let frame: CGRect = leftLineImage.frame // let oldAnchorPoint = leftLineImage.layer.anchorPoint // let newAnchorPoint = CGPoint(x: 1.0, y: 0.0) // // let offSetFrameX = leftLineImage.bounds.width * (newAnchorPoint.x-oldAnchorPoint.x) // let offSetFrameY = leftLineImage.bounds.height * (newAnchorPoint.y-oldAnchorPoint.y) // self.leftLineImage.transform = CGAffineTransform.init(translationX: leftLineImage.bounds.width/2, y: leftLineImage.bounds.height/2) // leftLineImage.layer.anchorPoint = newAnchorPoint // self.view.layoutIfNeeded() // UIView.animateKeyframes(withDuration: 1, delay: 0, animations: { // UIView.addKeyframe(withRelativeStartTime: 0, relativeDuration: 1, animations: { //// self.leftLineImage.transform = CGAffineTransform(translationX: offSetFrameX, y: offSetFrameY) // self.leftLineImage.transform = CGAffineTransform(rotationAngle: CGFloat(Double.pi/4)) // // }) // }) // //leftLineImage.transform = CGAffineTransform(rotationAngle: CGFloat(Double.pi/4)) } func plotPath(with destination: CLLocationCoordinate2D) { GooglePlacesService.getPath(origin: location, destination: destination) { (result) in if let directions = result.value, let routes = directions.routes, let route = routes.first, let points = route.points { self.mapView.clear() self.createAnnotation(for: self.location) self.createAnnotation(for: destination) let path = GMSPath(fromEncodedPath: points) let polyline = GMSPolyline(path: path) polyline.strokeWidth = 3.0 polyline.map = self.mapView let bounds = GMSCoordinateBounds(path: path!) let cameraUpdate = GMSCameraUpdate.fit(bounds) self.mapView.animate(with: cameraUpdate) } } } func getWebsiteInformation() { } } // MARK: UICollectionViewDataSource extension ResultsViewController: UICollectionViewDataSource { func numberOfSections(in collectionView: UICollectionView) -> Int { return 1 } func collectionView(_ collectionView: UICollectionView, numberOfItemsInSection section: Int) -> Int { // #warning Incomplete implementation, return the number of items //return places.count return places.count } func collectionView(_ collectionView: UICollectionView, cellForItemAt indexPath: IndexPath) -> UICollectionViewCell { let cell:PlaceCollectionViewCell = collectionView.dequeueReusableCell(withReuseIdentifier: PlaceCollectionViewCell.defaultReuseIdentifier, for: indexPath) as! PlaceCollectionViewCell cell.configureCell(object: (places[indexPath.row], CLLocation(latitude: location.latitude, longitude: location.longitude))) cell.backgroundColor = UIColor.lightGray cell.delegate = self return cell } func updateMapDirections(for selectedPlace: Restaurant) { let client = GMSPlacesClient.shared() client.lookUpPlaceID(selectedPlace.id, callback: { (place, error) -> Void in if let url = place?.website { let vc = SFSafariViewController(url: url) self.present(vc, animated: true) return } else { self.presentAlert(with: .noWebsite) } //let config = SFSafariViewController.Configuration() }) } } // MARK: UICollectionViewDelegate extension ResultsViewController: UICollectionViewDelegate { /* // Uncomment this method to specify if the specified item should be highlighted during tracking override func collectionView(_ collectionView: UICollectionView, shouldHighlightItemAt indexPath: IndexPath) -> Bool { return true } */ /* // Uncomment this method to specify if the specified item should be selected override func collectionView(_ collectionView: UICollectionView, shouldSelectItemAt indexPath: IndexPath) -> Bool { return true } */ /* // Uncomment these methods to specify if an action menu should be displayed for the specified item, and react to actions performed on the item override func collectionView(_ collectionView: UICollectionView, shouldShowMenuForItemAt indexPath: IndexPath) -> Bool { return false } override func collectionView(_ collectionView: UICollectionView, canPerformAction action: Selector, forItemAt indexPath: IndexPath, withSender sender: Any?) -> Bool { return false } override func collectionView(_ collectionView: UICollectionView, performAction action: Selector, forItemAt indexPath: IndexPath, withSender sender: Any?) { } */ } extension ResultsViewController: UIScrollViewDelegate { func scrollViewWillEndDragging(_ scrollView: UIScrollView, withVelocity velocity: CGPoint, targetContentOffset: UnsafeMutablePointer<CGPoint>) { let layout = collectionView.collectionViewLayout as! UICollectionViewFlowLayout let cellWidth = layout.itemSize.width + layout.minimumLineSpacing var offset = targetContentOffset.pointee let index = round((offset.x + scrollView.contentInset.left) / cellWidth) offset = CGPoint(x: index * cellWidth - scrollView.contentInset.left, y: -scrollView.contentInset.top) targetContentOffset.pointee = offset let selectedRestaurant = places[Int(index)] let destination: CLLocationCoordinate2D = CLLocationCoordinate2D(latitude: selectedRestaurant.latitude, longitude: selectedRestaurant.longitude) mapView.clear() createAnnotation(for: location) createAnnotation(for: destination) let bounds = GMSCoordinateBounds(coordinate: destination, coordinate: destination) mapView.animate(with: GMSCameraUpdate.fit(bounds)) mapView.animate(toZoom: 16.0) mapView.animate(with: GMSCameraUpdate.scrollBy(x: 0, y: 100)) self.destination = destination self.selectedRestaurant = selectedRestaurant } } extension ResultsViewController: showDetailDelegate { func showDetail() { guard let selectedRestaurant = selectedRestaurant else { return } updateMapDirections(for: selectedRestaurant) } } <file_sep>/WTFDIE/Views/OptionsCells/PriceSelectionTableViewCell.swift // // PriceSelectionTableViewCell.swift // WTFDIE // // Created by <NAME> on 2017-10-09. // Copyright © 2017 WTFDIE. All rights reserved. // import UIKit class PriceSelectionTableViewCell: ExpandableUITableViewCell { typealias configurableObject = PriceRange @IBOutlet weak var titleLabel: UILabel! @IBOutlet weak var selectedLabel: UILabel! @IBOutlet weak var segmentControl: UISegmentedControl! @IBOutlet weak var heightConstraint: NSLayoutConstraint! override var isExpanded:Bool { didSet { if !isExpanded { self.heightConstraint.constant = 0 } else { self.heightConstraint.constant = 59 } } } override func awakeFromNib() { super.awakeFromNib() titleLabel.attributedText = "Price Range".styled(.cellTitle) } @IBAction func segmentSelected(_ sender: UISegmentedControl) { if let priceRange = PriceRange(rawValue: sender.selectedSegmentIndex) { selectedLabel.attributedText = priceRange.dollarValue.styled(.cellTitle) delegate.updateParameters(with: "minprice", value: priceRange.minMaxPrice.0) delegate.updateParameters(with: "maxPrice", value: priceRange.minMaxPrice.1) } } } extension PriceSelectionTableViewCell: ConfigurableTableViewCellProtocol { func configureCell(object: configurableObject) { selectedLabel.attributedText = object.dollarValue.localized.styled(.cellTitle) segmentControl.selectedSegmentIndex = object.rawValue } } <file_sep>/WTFDIE/Extensions/StringExtension.swift // // StringExtension.swift // WTFDIE // // Created by <NAME> on 2017-10-02. // Copyright © 2017 WTFDIE. All rights reserved. // import Foundation extension String { var localized: String { return NSLocalizedString(self, tableName: nil, bundle: Bundle.main, value: "", comment: "") } func localized(with comment: String) -> String { return NSLocalizedString(self, tableName: nil, bundle: Bundle.main, value: "", comment: comment) } func localized(args: CVarArg...) -> String { return withVaList(args) { NSString(format: self.localized, locale: Locale.current, arguments: $0) as String } } } <file_sep>/WTFDIE/Extensions/UIViewExtension.swift // // UIViewExtension.swift // WTFDIE // // Created by <NAME> on 2017-11-07. // Copyright © 2017 WTFDIE. All rights reserved. // import Foundation import UIKit extension UIView { func resetAnimation(){ UIView.animate(withDuration: 1.0, delay: 0.0, options: [.beginFromCurrentState], animations: { self.alpha = 1.0 self.transform = CGAffineTransform.identity }, completion: nil) } func animateCircle(with duration: Double, shouldRepeat: Bool) { let options: UIViewKeyframeAnimationOptions = shouldRepeat ? [.repeat] : [] UIView.animateKeyframes(withDuration: duration, delay: 0, options: options, animations: { UIView.addKeyframe(withRelativeStartTime: 0, relativeDuration: 0.25, animations: { self.transform = CGAffineTransform(rotationAngle: CGFloat(90.0 * Double.pi)/180.0).concatenating(CGAffineTransform(scaleX: 1.05, y: 1.05)) }) UIView.addKeyframe(withRelativeStartTime: 0.25, relativeDuration: 0.25, animations: { self.transform = CGAffineTransform(rotationAngle: CGFloat(180.0 * Double.pi)/180.0).concatenating(CGAffineTransform(scaleX: 1.1, y: 1.1)) }) UIView.addKeyframe(withRelativeStartTime: 0.50, relativeDuration: 0.25, animations: { self.transform = CGAffineTransform(rotationAngle: CGFloat(270.0 * Double.pi)/180.0) }) UIView.addKeyframe(withRelativeStartTime: 0.75, relativeDuration: 0.25, animations: { self.transform = CGAffineTransform(rotationAngle: CGFloat(360.0 * Double.pi)/180.0) }) }) } } <file_sep>/WTFDIE/Model/Restaurant.swift // // Restaurants.swift // WTFDIE // // Created by <NAME> on 2017-09-30. // Copyright © 2017 WTFDIE. All rights reserved. // import Foundation import ObjectMapper import MapKit class Restaurant: Mappable { var latitude: CLLocationDegrees! var longitude: CLLocationDegrees! var name: String! var isOpen: Bool = true var id: String! var icon: String! var types: [String]! var vicinity: String! var rating: Double = 0.0 var priceLevel: Int = PriceRange.unknown.rawValue var address: String! var website: String! = nil init() { } required init?(map: Map) {} func mapping(map: Map) { latitude <- map["geometry.location.lat"] longitude <- map["geometry.location.lng"] name <- map["name"] isOpen <- map["opening_hours.open_now"] id <- map["place_id"] icon <- map["icon"] types <- map["types"] vicinity <- map["vicinity"] rating <- map["rating"] priceLevel <- map["price_level"] address <- map["vicinity"] } func getRating() -> String { return rating == 0.0 ? "Unknown" : "\(rating)" } } <file_sep>/WTFDIE/Model/Protocols/ExpandableUITableViewCell.swift // // ExpandableUITableViewCell.swift // WTFDIE // // Created by <NAME> on 2017-10-09. // Copyright © 2017 WTFDIE. All rights reserved. // import Foundation import UIKit class ExpandableUITableViewCell: UITableViewCell { open var isExpanded: Bool = false open var delegate: DataChangedDelegate! } <file_sep>/WTFDIE/Stye/Color.swift // // Color.swift // WTFDIE // // Created by <NAME> on 2017-10-02. // Copyright © 2017 WTFDIE. All rights reserved. // import UIKit extension UIColor { enum Color: UInt32 { case white = 0xffffff case bluish = 0x2278cf case darkSkyBlue = 0x2b98f0 case pinkishGrey = 0xbdbdbd case warmGrey = 0x757575 case lightGrey = 0xBCBCBC case black = 0x000000 case watermelon = 0xfc5457 case whiteTwo = 0xfbfbfb case whiteThree = 0xececec case dark = 0x141823 case darkTwo = 0x212531 case lightBlack = 0x212121 case warmGreyThree = 0x747474 case malachiteGreen = 0x6BF178 case azureBlue = 0x35A7FF case roseWhite = 0xFF5964 case gargoyleYellow = 0xFFE74C case ufoGreen = 0x4CDF64 } convenience init(_ color: Color, alpha: CGFloat = 1.0) { let mask = 0x000000FF let hex = color.rawValue let r = Int(hex >> 16) & mask let g = Int(hex >> 8) & mask let b = Int(hex) & mask let red = CGFloat(r) / 255 let green = CGFloat(g) / 255 let blue = CGFloat(b) / 255 self.init(red:red, green:green, blue:blue, alpha:alpha) } } <file_sep>/WTFDIE/Model/Protocols/ConfigurableTableCellProtocol.swift // // ConfigurableTableCellProtocol.swift // WTFDIE // // Created by <NAME> on 2017-10-08. // Copyright © 2017 WTFDIE. All rights reserved. // import Foundation protocol ConfigurableTableViewCellProtocol { associatedtype configurableObject func configureCell(object: configurableObject) } protocol ConfigurableCollectionViewCellProtocol { associatedtype configurableObject func configureCell(object: configurableObject) }
cfbf5bad77296f7dc8ca7757499e5beb87f9eb78
[ "Swift", "Ruby", "Markdown" ]
31
Swift
jeff2013/WTFDIE
3bc4902c909bf15ad4559385d49e048d5cdab8e9
1ba6bc4b00f0dc384a1f7e0d32ba00b56b0cdb63
refs/heads/master
<file_sep>package main import ( "log" "net/http" "strconv" "github.com/tkanos/gonfig" "github.com/tushar9989/url-short/read-server/internal/controllers" "github.com/tushar9989/url-short/read-server/internal/database" ) type Configuration struct { Port int DbServers []string DbKeySpace string } func main() { config := Configuration{} err := gonfig.GetConf("../../config.json", &config) if err != nil { log.Fatal("Could not load configuration") } db, dbErr := database.NewCassandra(config.DbServers, config.DbKeySpace) if dbErr != nil { log.Fatal(dbErr) } http.HandleFunc("/s/", controllers.ReadSlug(db)) http.HandleFunc("/stats", controllers.ReadUserStats(db)) log.Fatal(http.ListenAndServe(":"+strconv.Itoa(config.Port), nil)) } <file_sep>package database import ( "math/big" "github.com/gocql/gocql" "github.com/tushar9989/url-short/read-server/internal/models" ) type Cassandra struct { session *gocql.Session } func NewCassandra(servers []string, keyspace string) (*Cassandra, error) { cluster := gocql.NewCluster(servers...) cluster.Keyspace = keyspace session, err := cluster.CreateSession() if err != nil { return nil, err } return &Cassandra{session: session}, nil } func (c *Cassandra) Close() { if !c.session.Closed() { c.session.Close() } } func (c *Cassandra) LoadLinkData(id big.Int) (models.LinkData, error) { returnValue := models.LinkData{} if err := c.session.Query(`SELECT id, user_id, allowed_emails, expire_time, target_url FROM links WHERE id = ? LIMIT 1`, id.String()).Consistency(gocql.One).Scan(&returnValue.Id, &returnValue.UserId, &returnValue.ValidEmails, &returnValue.ExpireAt, &returnValue.TargetUrl); err != nil { return returnValue, err } return returnValue, nil } func (c *Cassandra) LoadLinkStatsForUser(userId string) []models.LinkStats { returnValue := make([]models.LinkStats, 0) iter := c.session.Query("SELECT id, views FROM views WHERE user_id = ?", userId).Iter() var Id big.Int var Views int64 for iter.Scan(&Id, &Views) { IdCopy := *big.NewInt(0).Set(&Id) returnValue = append(returnValue, models.LinkStats{Id: &IdCopy, Views: Views}) } return returnValue } func (c *Cassandra) IncrementLinkStatsForUser(userId string, linkId big.Int) error { err := c.session.Query(`UPDATE views SET views = views + 1 WHERE id = ? AND user_id = ?;`, linkId.String(), userId, ).Consistency(gocql.Quorum).Exec() return err } <file_sep>package counters_test import ( "math/big" "testing" "github.com/tushar9989/url-short/write-server/internal/pkg/counters" ) func TestInvalidConfig(t *testing.T) { _, err := counters.NewBigInt(*big.NewInt(11), *big.NewInt(20), *big.NewInt(10)) if err == nil { t.Error("Current less than start failed") } _, err = counters.NewBigInt(*big.NewInt(11), *big.NewInt(0), *big.NewInt(10)) if err == nil { t.Error("Current greater than end failed") } } func TestIncrement(t *testing.T) { counter, _ := counters.NewBigInt(*big.NewInt(10), *big.NewInt(20), *big.NewInt(10)) _ = counter.IncrementAndGetOldValue() value := counter.IncrementAndGetOldValue() if value.Cmp(big.NewInt(11)) != 0 { t.Error("Expected 11, got ", value.String()) } } func TestValue(t *testing.T) { counter, _ := counters.NewBigInt(*big.NewInt(10), *big.NewInt(20), *big.NewInt(10)) value := counter.Value() _ = counter.IncrementAndGetOldValue() if value.Cmp(big.NewInt(10)) != 0 { t.Error("Expected 10, got ", value.String()) } } func TestRollOver(t *testing.T) { counter, _ := counters.NewBigInt(*big.NewInt(10), *big.NewInt(20), *big.NewInt(20)) _ = counter.IncrementAndGetOldValue() _ = counter.IncrementAndGetOldValue() value := counter.IncrementAndGetOldValue() if value.Cmp(big.NewInt(11)) != 0 { t.Error("Expected 11, got ", value.String()) } } func BenchmarkIncrement(b *testing.B) { counter, _ := counters.NewBigInt(*big.NewInt(0), *big.NewInt(100), *big.NewInt(20)) for n := 0; n < b.N; n++ { _ = counter.IncrementAndGetOldValue() } } <file_sep>package controllers import ( "math/big" "net/http" "strings" "github.com/tushar9989/url-short/read-server/internal/database" ) func ReadUserStats(db database.Database) func(http.ResponseWriter, *http.Request) { return wrapper(func(w http.ResponseWriter, r *http.Request) (interface{}, *ApiError) { if r.Method != "GET" { return nil, &ApiError{"Invalid request method", http.StatusMethodNotAllowed} } userId := r.Header.Get("X-User-ID") if userId == "-1" || userId == "" { return nil, &ApiError{"Must be logged in to look at stats.", http.StatusBadRequest} } slugStats := db.LoadLinkStatsForUser(userId) for i, slugStat := range slugStats { slugStat.Slug = encodeIdToSlug(*slugStat.Id) slugStat.Id = nil slugStats[i] = slugStat } return slugStats, nil }) } func encodeIdToSlug(input big.Int) string { number := big.NewInt(0) number.Add(number, &input) base := big.NewInt(62) zero := big.NewInt(0) arr := make([]string, 0) for number.Cmp(zero) != 0 { mod := big.NewInt(0) mod.Mod(number, base) intMod := mod.Int64() if intMod < 10 { arr = append(arr, string('0'+intMod)) } else if intMod < 36 { intMod -= 10 arr = append(arr, string('a'+intMod)) } else { intMod -= 36 arr = append(arr, string('A'+intMod)) } number.Div(number, base) } for i, j := 0, len(arr)-1; i < j; i, j = i+1, j-1 { arr[i], arr[j] = arr[j], arr[i] } return strings.Join(arr, "") } <file_sep>package controllers import ( "encoding/json" "errors" "fmt" "math/big" "net/http" "time" "github.com/tushar9989/url-short/read-server/internal/database" ) type ApiError struct { Message string Code int } func ReadSlug(db database.Database) func(http.ResponseWriter, *http.Request) { return wrapper(func(w http.ResponseWriter, r *http.Request) (interface{}, *ApiError) { if r.Method != "GET" { return nil, &ApiError{"Invalid request method", http.StatusMethodNotAllowed} } userEmail := r.Header.Get("X-User-Email") slug := r.URL.Path[3:] id, err := decodeSlugToId(slug) if err != nil { return nil, &ApiError{err.Error(), http.StatusBadRequest} } linkData, err := db.LoadLinkData(id) if err != nil || linkData.ExpireAt.Before(time.Now().UTC()) { return nil, &ApiError{err.Error(), http.StatusNotFound} } if len(linkData.ValidEmails) != 0 && !contains(linkData.ValidEmails, userEmail) { return nil, &ApiError{"Not authorized.", http.StatusForbidden} } if linkData.UserId != "-1" { go db.IncrementLinkStatsForUser(linkData.UserId, id) } http.Redirect(w, r, linkData.TargetUrl, 301) return nil, nil }) } func contains(list []string, a string) bool { for _, b := range list { if b == a { return true } } return false } func decodeSlugToId(number string) (big.Int, error) { base := big.NewInt(62) multiplier := big.NewInt(1) answer := big.NewInt(0) if len(number) > 7 { return *big.NewInt(0), errors.New("Input slug too long") } for i := len(number) - 1; i >= 0; i-- { var intVal *big.Int if number[i] >= '0' && number[i] <= '9' { intVal = big.NewInt(int64(number[i] - '0')) } else if number[i] >= 'a' && number[i] <= 'z' { intVal = big.NewInt(int64(10 + number[i] - 'a')) } else if number[i] >= 'A' && number[i] <= 'Z' { intVal = big.NewInt(int64(36 + number[i] - 'A')) } else { return *big.NewInt(0), errors.New("Invalid character in slug") } answer.Add(answer, intVal.Mul(intVal, multiplier)) multiplier.Mul(multiplier, base) } return *answer, nil } func wrapper(h func(w http.ResponseWriter, r *http.Request) (interface{}, *ApiError)) func(http.ResponseWriter, *http.Request) { return func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json") defer func() { if r := recover(); r != nil { w.WriteHeader(500) json.NewEncoder(w).Encode(map[string]string{"status": "FAIL", "message": fmt.Sprintf("%v", r)}) } }() response, err := h(w, r) if err != nil { w.WriteHeader(err.Code) json.NewEncoder(w).Encode(map[string]string{"status": "FAIL", "message": err.Message}) } else { if response != nil { w.WriteHeader(http.StatusOK) json.NewEncoder(w).Encode(map[string]interface{}{"status": "OK", "data": response}) } } } } <file_sep># URL Shortener ## Requirements - Cassandra >= 3.0.9 - Go >= 1.10 - Nginx >= 1.15.0 ## System Logic Components: - load balancer - authenticate all requests and add headers - auth server(s) - for get requests - proxy to read-server(s) - for post requests - proxy to write-server(s) - read-server - Look up db for link - Check if user is authorized in case of retricted url - Log view event - return data - write-server - Startup - load segment start, end and current unused id from db - Set id for request - auto incr number - while already used - incr - if boundary + 1 move to offset - Store into db - Periodically write current unused id for segment to db ## Setup ### Cassandra - use `nodetool status` to get datacenter name - start `cqlsh` - Create the keyspace: - `CREATE KEYSPACE IF NOT EXISTS urlshort WITH REPLICATION = { 'class' : 'NetworkTopologyStrategy', 'datacenter1' : 1 } AND DURABLE_WRITES = true;` - On local system: `ALTER KEYSPACE urlshort WITH replication = { 'class' : 'SimpleStrategy', 'replication_factor' : 1 };` - Use created keyspace `use urlshort;` - Create the links table: `CREATE TABLE links ( id varint, expire_time timestamp, target_url text, allowed_emails set<text>, user_id text, PRIMARY KEY (id, user_id) );` - Create the write_server_meta table: `CREATE TABLE write_server_meta ( name text PRIMARY KEY, start varint, end varint, current varint );` - Create user stats table: `CREATE TABLE views ( user_id text, id varint, views counter, PRIMARY KEY(user_id, id) ) WITH CLUSTERING ORDER BY (id DESC);` - Write default data for write servers. Depends on the number of write servers you want to run. 2 here: - `insert into write_server_meta (name, start, end, current) values ('0', 1, 1760807303104, 1);` - `insert into write_server_meta (name, start, end, current) values ('1', 1760807303105, 3521614606207, 1760807303105);` ### Nginx - Copy `nginx-proxy.conf` to the http section of your `nginx.conf` or copy the file to the `servers` subdirectory of your nginx setup. - Reload http server: `nginx -s reload` ### Go - Copy project source code to `$GOHOME/go/src/github.com/tushar9989` - For auth, read and write servers copy the example `config.json` file and modify as needed. - Install 3rd party dependencies - `go get github.com/tkanos/gonfig` Used to load config from json file. - `go get github.com/gocql/gocql` Used to interact with Cassandra. ## Start Servers - If on a unix environment `./start.sh` from the project's root directory. - Otherwise start auth, read and write servers after modifying config files before starting each instance. ## Usage - To login with Google visit `localhost:8000` - To create a Short Link - Optional `CustomSlug`(max 7 characters [0-9a-zA-Z]) and `ValidEmails` (will only work if logged in) - Method: POST - URL: localhost:8000/shorten - e.g: - Request: `{ "TargetUrl": "http://google.com", "ExpireAt": "2006-01-02T15:04:05Z", "CustomSlug": "tushar", "ValidEmails": [ "<EMAIL>" ] }` - Response: `{ "data": { "slug": "tushar" }, "status": "OK" }` - To visit short link: `localhost:8000/s/tushar`. - To check created url stats: `localhost:8000/stats`. Will only work if logged in. <file_sep>package models import ( "math/big" ) type ServerMeta struct { Name string Start big.Int End big.Int Current big.Int } <file_sep>var signedIn = false; function signIn () { var auth2 = gapi.auth2.getAuthInstance(); if (signedIn) { auth2.signOut() .then(() => { signedIn = false; adjustButtonText(); document.cookie = 'X-Auth-Token=;expires=Thu, 01 Jan 1970 00:00:01 GMT;'; }); } else { auth2.signIn() .then((googleUser) => { signedIn = true; adjustButtonText(); document.cookie = "X-Auth-Token=" + googleUser.getAuthResponse().id_token + ";max-age=300;path=/"; }).catch(function (error) { console.log(error); signedIn = false; adjustButtonText(); }); } } function adjustButtonText() { var text; if(signedIn) { text = "Sign Out"; } else { text = "Sign In"; } document.getElementById("button-text").textContent = text; } gapi.load('auth2', function () { gapi.auth2.init().then(function (auth2) { if (auth2.isSignedIn.get()) { var googleUser = auth2.currentUser.get(); document.cookie = "X-Auth-Token=" + googleUser.getAuthResponse().id_token + ";max-age=300;path=/"; signedIn = true; } else { signedIn = false; } adjustButtonText(); }); });<file_sep>package counters import ( "errors" "math/big" "sync" ) type bigInt struct { mu sync.Mutex value big.Int end big.Int start big.Int } type BigInt interface { IncrementAndGetOldValue() (oldValue big.Int) Value() (value big.Int) } var one = big.NewInt(1) func NewBigInt(start big.Int, end big.Int, value big.Int) (BigInt, error) { if value.Cmp(&start) < 0 || value.Cmp(&end) > 0 { return &bigInt{}, errors.New("Invalid counter configuration") } return &bigInt{value: value, end: end, start: start}, nil } func (c *bigInt) IncrementAndGetOldValue() (oldValue big.Int) { c.mu.Lock() oldValue = *big.NewInt(0).Set(&c.value) c.value.Add(&c.value, one) if c.value.Cmp(&c.end) > 0 { c.value = c.start } c.mu.Unlock() return } func (c *bigInt) Value() (value big.Int) { c.mu.Lock() value = *big.NewInt(0).Set(&c.value) c.mu.Unlock() return } <file_sep>package database import ( "math/big" "time" "github.com/gocql/gocql" "github.com/tushar9989/url-short/write-server/internal/models" ) type Cassandra struct { session *gocql.Session } func NewCassandra(servers []string, keyspace string) (*Cassandra, *DbError) { cluster := gocql.NewCluster(servers...) cluster.Keyspace = keyspace session, err := cluster.CreateSession() if err != nil { return nil, &DbError{msg: err.Error(), Code: 1} } return &Cassandra{session: session}, nil } func (c *Cassandra) Close() { if !c.session.Closed() { c.session.Close() } } func (c *Cassandra) Save(id big.Int, data models.LinkData) *DbError { data.Id = id diff := data.ExpireAt.Sub(time.Now().UTC()) if diff.Seconds() <= 0 || diff.Hours() > 30*24 { return &DbError{msg: "Invalid expire time", Code: 2} } seconds := int64(diff.Seconds()) existingData := make(map[string]interface{}) applied, err := c.session.Query(`INSERT INTO links (id, expire_time, target_url, allowed_emails, user_id) VALUES (?, ?, ?, ?, ?) IF NOT EXISTS USING TTL ?`, id, data.ExpireAt, data.TargetUrl, data.ValidEmails, data.UserId, seconds, ).Consistency(gocql.Quorum).MapScanCAS(existingData) if !applied { return &DbError{msg: "Already exists", Code: 3} } if err == nil { return nil } return &DbError{msg: err.Error(), Code: 1} } func (c *Cassandra) LoadServerMeta(name string) (models.ServerMeta, *DbError) { returnValue := models.ServerMeta{} if err := c.session.Query(`SELECT name, current, start, end FROM write_server_meta WHERE name = ? LIMIT 1`, name).Consistency(gocql.One).Scan(&returnValue.Name, &returnValue.Current, &returnValue.Start, &returnValue.End); err != nil { return returnValue, &DbError{msg: err.Error(), Code: 1} } return returnValue, nil } func (c *Cassandra) UpdateServerCount(name string, count big.Int) *DbError { existingData := make(map[string]interface{}) applied, err := c.session.Query(`UPDATE write_server_meta SET current = ? WHERE name = ? IF EXISTS`, count, name, ).Consistency(gocql.Quorum).MapScanCAS(existingData) if !applied { return &DbError{msg: "Details for given name not found", Code: 1} } if err == nil { return nil } return &DbError{msg: err.Error(), Code: 1} } <file_sep>package models import ( "math/big" "time" ) type LinkData struct { Id big.Int ExpireAt *time.Time TargetUrl string ValidEmails []string UserId string CustomSlug string } <file_sep>package main import ( "encoding/json" "html/template" "log" "net/http" "strconv" "github.com/tkanos/gonfig" "github.com/tushar9989/url-short/auth-server/internal/pkg/cache" ) type Page struct { ClientID string } func viewHandler(w http.ResponseWriter, r *http.Request) { renderTemplate(w, "index", &Page{ClientID: clientId}) } var authCache *cache.TTLMap = cache.New(60 * 5) func verifyHandler(w http.ResponseWriter, r *http.Request) { defer func() { if recover() != nil { w.WriteHeader(http.StatusOK) } }() cookie, err := r.Cookie("X-Auth-Token") if err == nil { result, ok := authCache.Get(cookie.Value) if !ok { resp, err := http.Get("https://www.googleapis.com/oauth2/v3/tokeninfo?id_token=" + cookie.Value) if err == nil && resp.StatusCode == http.StatusOK { decoder := json.NewDecoder(resp.Body) err := decoder.Decode(&result) if err == nil && result["aud"] == clientId { authCache.Put(cookie.Value, result) ok = true } } } if ok { w.Header().Set("X-User-ID", result["sub"]) w.Header().Set("X-User-Email", result["email"]) } } w.WriteHeader(http.StatusOK) } var templates = template.Must(template.ParseFiles("static/index.html")) var clientId = "" func renderTemplate(w http.ResponseWriter, tmpl string, p *Page) { err := templates.ExecuteTemplate(w, tmpl+".html", p) if err != nil { http.Error(w, err.Error(), http.StatusInternalServerError) } } type Configuration struct { Port int ClientID string } func main() { config := Configuration{} err := gonfig.GetConf("config.json", &config) if err != nil { log.Fatal("Could not load configuration") } clientId = config.ClientID http.HandleFunc("/", viewHandler) http.HandleFunc("/static/", func(w http.ResponseWriter, r *http.Request) { http.ServeFile(w, r, r.URL.Path[1:]) }) http.HandleFunc("/verify", verifyHandler) log.Fatal(http.ListenAndServe(":"+strconv.Itoa(config.Port), nil)) } <file_sep>package database import ( "math/big" "github.com/tushar9989/url-short/write-server/internal/models" ) type Database interface { Save(id big.Int, data models.LinkData) *DbError LoadServerMeta(name string) (models.ServerMeta, *DbError) UpdateServerCount(name string, count big.Int) *DbError Close() } type DbError struct { msg string Code int } func (e *DbError) Error() string { return e.msg } <file_sep>package database import ( "math/big" "github.com/tushar9989/url-short/read-server/internal/models" ) type Database interface { LoadLinkData(id big.Int) (models.LinkData, error) LoadLinkStatsForUser(userId string) []models.LinkStats IncrementLinkStatsForUser(userId string, linkId big.Int) error Close() } <file_sep>package controllers import ( "math/big" "testing" ) func TestDecodeSlug(t *testing.T) { number := big.NewInt(5621662037) decoded, _ := decodeSlugToId("68rV6l") if decoded.Cmp(number) != 0 { t.Error("Expected 5621662037, got ", decoded.String()) } } func TestInvalidSlug(t *testing.T) { slug := "_*()" _, err := decodeSlugToId(slug) if err == nil { t.Error("Passed with invalid slug") } slug = "ZZZZZZZZ" _, err = decodeSlugToId(slug) if err == nil { t.Error("Passed with long slug") } } <file_sep>#!/bin/bash # Demo script to start read, write and auth servers function changePortInConfig { if [ ! -f $2 ]; then echo "\"$2\" not found!" return fi sed -i "s|\(\"Port\": \)[0-9]*|\1$1|" $2 } function changeServerIdInConfig { if [ ! -f $2 ]; then echo "\"$2\" not found!" return fi sed -i "s|\(\"ServerID\": \"\)[0-9]*|\1$1|" $2 } cd auth-server changePortInConfig 8001 "config.json" go run main.go & sleep 1 cd ../read-server/cmd/server for (( i=8002; i<=8004; i++ )) do changePortInConfig $i "../../config.json" go run main.go & sleep 1 done cd ../../../write-server/cmd/server for (( i=8005; i<=8006; i++ )) do changePortInConfig $i "../../config.json" changeServerIdInConfig $((i-8005)) "../../config.json" go run main.go & sleep 1 done wait<file_sep>package controllers import ( "encoding/json" "errors" "fmt" "io" "math/big" "net/http" "net/url" "strings" "time" "github.com/tushar9989/url-short/write-server/internal/database" "github.com/tushar9989/url-short/write-server/internal/models" "github.com/tushar9989/url-short/write-server/internal/pkg/counters" ) func Write(counter counters.BigInt, db database.Database) func(http.ResponseWriter, *http.Request) { return wrapper(func(r *http.Request) (interface{}, error) { if r.Method != "POST" { return nil, errors.New("Invalid request method") } linkData, err := getLinkDataFromBody(r.Body, r.Header.Get("X-User-ID")) if err != nil { return nil, err } diff := linkData.ExpireAt.Sub(time.Now().UTC()) if diff.Minutes() <= 5 || diff.Hours() > 30*24 { return nil, errors.New("Invalid expire time") } if linkData.CustomSlug != "" { id, err := decodeSlugToId(linkData.CustomSlug) if err != nil { return nil, err } dbErr := db.Save(id, linkData) if dbErr == nil { return map[string]string{"slug": linkData.CustomSlug}, nil } return nil, dbErr } else { id := counter.IncrementAndGetOldValue() dbErr := db.Save(id, linkData) for dbErr != nil && dbErr.Code == 3 { id = counter.IncrementAndGetOldValue() dbErr = db.Save(id, linkData) } if dbErr == nil { return map[string]string{"slug": encodeIdToSlug(id)}, nil } return nil, dbErr } }) } func encodeIdToSlug(input big.Int) string { number := big.NewInt(0) number.Add(number, &input) base := big.NewInt(62) zero := big.NewInt(0) arr := make([]string, 0) for number.Cmp(zero) != 0 { mod := big.NewInt(0) mod.Mod(number, base) intMod := mod.Int64() if intMod < 10 { arr = append(arr, string('0'+intMod)) } else if intMod < 36 { intMod -= 10 arr = append(arr, string('a'+intMod)) } else { intMod -= 36 arr = append(arr, string('A'+intMod)) } number.Div(number, base) } for i, j := 0, len(arr)-1; i < j; i, j = i+1, j-1 { arr[i], arr[j] = arr[j], arr[i] } return strings.Join(arr, "") } func decodeSlugToId(number string) (big.Int, error) { base := big.NewInt(62) multiplier := big.NewInt(1) answer := big.NewInt(0) if len(number) > 7 { return *big.NewInt(0), errors.New("Input slug too long") } for i := len(number) - 1; i >= 0; i-- { var intVal *big.Int if number[i] >= '0' && number[i] <= '9' { intVal = big.NewInt(int64(number[i] - '0')) } else if number[i] >= 'a' && number[i] <= 'z' { intVal = big.NewInt(int64(10 + number[i] - 'a')) } else if number[i] >= 'A' && number[i] <= 'Z' { intVal = big.NewInt(int64(36 + number[i] - 'A')) } else { return *big.NewInt(0), errors.New("Invalid character in slug") } answer.Add(answer, intVal.Mul(intVal, multiplier)) multiplier.Mul(multiplier, base) } return *answer, nil } func wrapper(h func(r *http.Request) (interface{}, error)) func(http.ResponseWriter, *http.Request) { return func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json") defer func() { if r := recover(); r != nil { w.WriteHeader(500) json.NewEncoder(w).Encode(map[string]string{"status": "FAIL", "message": fmt.Sprintf("%v", r)}) } }() response, err := h(r) if err != nil { w.WriteHeader(500) json.NewEncoder(w).Encode(map[string]string{"status": "FAIL", "message": err.Error()}) } else { w.WriteHeader(http.StatusCreated) json.NewEncoder(w).Encode(map[string]interface{}{"status": "OK", "data": response}) } } } func getLinkDataFromBody(reader io.Reader, userId string) (models.LinkData, error) { decoder := json.NewDecoder(reader) var linkData models.LinkData err := decoder.Decode(&linkData) if err != nil { return linkData, err } if linkData.ExpireAt == nil { return linkData, errors.New("ExpireAt must be set") } if userId == "" { userId = "-1" if len(linkData.ValidEmails) > 0 { return linkData, errors.New("Valid Emails cannot be set unless logged in.") } } linkData.UserId = userId _, err = url.ParseRequestURI(linkData.TargetUrl) if err != nil { return linkData, err } return linkData, nil } <file_sep>package controllers import ( "math/big" "strings" "testing" "time" ) func TestUrl(t *testing.T) { body := strings.NewReader(`{ "TargetUrl": "abx", "ExpireAt": "2006-01-02T15:04:05Z" }`) _, err := getLinkDataFromBody(body, "") if err == nil { t.Error("Passed with invalid URL") } body = strings.NewReader(`{ "TargetUrl": "http://google.com", "ExpireAt": "2006-01-02T15:04:05Z" }`) _, err = getLinkDataFromBody(body, "") if err != nil { t.Error("Failed with valid URL: ", err) } } func TestInvalidPayload(t *testing.T) { body := strings.NewReader(`sdbj`) _, err := getLinkDataFromBody(body, "") if err == nil { t.Error("Passed with invalid body") } } func TestUserId(t *testing.T) { body := strings.NewReader(`{ "TargetUrl": "http://google.com", "ExpireAt": "2006-01-02T15:04:05Z" }`) linkData, _ := getLinkDataFromBody(body, "") if linkData.UserId != "-1" { t.Error(`Expected "-1", got `, linkData.UserId) } body = strings.NewReader(`{ "TargetUrl": "http://google.com", "ExpireAt": "2006-01-02T15:04:05Z" }`) linkData, _ = getLinkDataFromBody(body, "abc") if linkData.UserId != "abc" { t.Error(`Expected "abc", got `, linkData.UserId) } } func TestEmailsWithoutUser(t *testing.T) { body := strings.NewReader(`{ "TargetUrl": "http://google.com", "ExpireAt": "2006-01-02T15:04:05Z", "ValidEmails": [ "<EMAIL>" ] }`) _, err := getLinkDataFromBody(body, "") if err == nil { t.Error("Saved emails without valid user id") } } func TestExpireTime(t *testing.T) { body := strings.NewReader(`{ "TargetUrl": "http://google.com", "ExpireAt": "2006-01-02T15:04:05Z" }`) linkData, _ := getLinkDataFromBody(body, "") expected, _ := time.Parse(time.RFC3339, "2006-01-02T15:04:05Z") if linkData.ExpireAt.String() != expected.String() { t.Error("Expected ", expected.String(), "got ", linkData.ExpireAt.String()) } body = strings.NewReader(`{ "TargetUrl": "http://google.com" }`) _, err := getLinkDataFromBody(body, "") if err == nil { t.Error("Passed without ExpireAt") } } func TestEncodeDecodeId(t *testing.T) { number := big.NewInt(1675108645995) slug := encodeIdToSlug(*number) if slug != "tushar1" { t.Error("Expected tushar1, got ", slug) } decoded, _ := decodeSlugToId(slug) if decoded.Cmp(number) != 0 { t.Error("Expected 1675108645995, got ", decoded.String()) } } func TestInvalidSlug(t *testing.T) { slug := "_*()" _, err := decodeSlugToId(slug) if err == nil { t.Error("Passed with invalid slug") } slug = "ZZZZZZZZ" _, err = decodeSlugToId(slug) if err == nil { t.Error("Passed with long slug") } } <file_sep>package cache import ( "sync" "time" ) type item struct { value map[string]string lastAccess int64 } type TTLMap struct { m map[string]*item l sync.RWMutex } func New(maxTTL int) (m *TTLMap) { m = &TTLMap{m: make(map[string]*item, 0)} go func() { for now := range time.Tick(time.Minute) { m.l.Lock() for k, v := range m.m { if now.Unix()-v.lastAccess > int64(maxTTL) { delete(m.m, k) } } m.l.Unlock() } }() return } func (m *TTLMap) Len() int { return len(m.m) } func (m *TTLMap) Put(k string, v map[string]string) { m.l.Lock() it, ok := m.m[k] if !ok { it = &item{value: v} m.m[k] = it } it.lastAccess = time.Now().Unix() m.l.Unlock() } func (m *TTLMap) Get(k string) (v map[string]string, found bool) { m.l.RLock() if it, ok := m.m[k]; ok { v = it.value found = true } m.l.RUnlock() return } <file_sep>package main import ( "log" "net/http" "strconv" "time" "github.com/tkanos/gonfig" "github.com/tushar9989/url-short/write-server/internal/controllers" "github.com/tushar9989/url-short/write-server/internal/database" "github.com/tushar9989/url-short/write-server/internal/pkg/counters" ) type Configuration struct { Port int ServerID string DbServers []string DbKeySpace string DbPersistInterval int } func main() { config := Configuration{} err := gonfig.GetConf("../../config.json", &config) if err != nil { log.Fatal("Could not load configuration") } db, dbErr := database.NewCassandra(config.DbServers, config.DbKeySpace) if dbErr != nil { log.Fatal(dbErr) } data, dbErr := db.LoadServerMeta(config.ServerID) if dbErr != nil { log.Fatal(dbErr) } counter, err := counters.NewBigInt(data.Start, data.End, data.Current) if err != nil { log.Fatal(err) } go startPeriodicDbUpdate(db, config.ServerID, counter, time.Minute*time.Duration(config.DbPersistInterval)) http.HandleFunc("/shorten", controllers.Write(counter, db)) log.Fatal(http.ListenAndServe(":"+strconv.Itoa(config.Port), nil)) } func startPeriodicDbUpdate(db database.Database, serverId string, counter counters.BigInt, interval time.Duration) { time.Sleep(interval) for range time.Tick(interval) { db.UpdateServerCount(serverId, counter.Value()) } } <file_sep>package models import ( "math/big" ) type LinkStats struct { Id *big.Int Views int64 Slug string }
e41559a849e8060e5468d3627a27879380029054
[ "Markdown", "JavaScript", "Go", "Shell" ]
21
Go
tushar9989/url-short
7d097a8acb53c03e76e7d158ae4d16b5cb13605b
23e7e8be66aa1bba4848f17373b759a0a3874c21
refs/heads/master
<repo_name>orquidea238/ConfusionServer<file_sep>/routes/dishRouter.js const express = require('express'); const bodyParser = require('body-parser'); const mongoose = require('mongoose'); var authenticate = require('../authenticate'); const cors = require('./cors'); // J'importe le schema dishes const Dishes = require('../models/dishes'); // Je crée ma route dishRouter: const dishRouter = express.Router(); // J'utilise le module de traitement du corps de la requete body-parser: dishRouter.use(bodyParser.json()); // dishes route----------------------------------------------------------------- dishRouter.route('/') .options(cors.corsWithOptions, (req, res) => { res.sendStatus(200); }) .get(cors.cors, (req, res, next) =>{ // on trouve tous les dishes et on recupere la reponse en format json Dishes.find({}) .populate('comments.author') .then((dishes) =>{ res.statusCode = 200; res.setHeader('Content-Type', 'application/json'); res.json(dishes); }, (err) => next(err)) .catch((err) => next(err)); }) .post(cors.corsWithOptions, authenticate.verifyUser, authenticate.verifyAdmin, (req, res, next) =>{ // On crée des dishes dans la bdd Dishes.create(req.body) .then((dish) =>{ console.log('Dish created ', dish); res.statusCode = 200; res.setHeader('Content-Type', 'application/json'); res.json(dish); }, (err) => next(err)) .catch((err) => next(err)); }) .put(cors.corsWithOptions, authenticate.verifyUser, authenticate.verifyAdmin, (req, res, next) =>{ res.statusCode = 403; res.send('PUT operation not supported on /dishes'); }) .delete(cors.corsWithOptions, authenticate.verifyUser, authenticate.verifyAdmin, (req, res, next) =>{ Dishes.deleteMany({}) .then((resp) =>{ res.statusCode = 200; res.setHeader('Content-Type', 'application/json'); res.json(resp); }, (err) => next(err)) .catch((err) => next(err)); }); // dishId Routes----------------------------------------------------------- dishRouter.route('/:dishId') .options(cors.corsWithOptions, (req, res) => { res.sendStatus(200); }) .get(cors.cors, (req, res, next) =>{ Dishes.findById(req.params.dishId) .populate('comments.author') .then((dish) =>{ res.statusCode = 200; res.setHeader('Content-Type', 'application/json'); res.json(dish); }, (err) => next(err)) .catch((err) => next(err)); }) .post(cors.corsWithOptions, authenticate.verifyUser, authenticate.verifyAdmin, (req, res, next) =>{ res.statusCode = 403; res.end('POST operation not supported on /dishes/' + req.params.dishId); }) .put(cors.corsWithOptions, authenticate.verifyUser, authenticate.verifyAdmin, (req, res, next) =>{ Dishes.findByIdAndUpdate(req.params.dishId, { $set: req.body }, { new: true }) .then((dish) =>{ res.statusCode = 200; res.setHeader('Content-Type', 'application/json'); res.json(dish); }, (err) => next(err)) .catch((err) => next(err)); }) .delete(cors.corsWithOptions, authenticate.verifyUser, authenticate.verifyAdmin, (req, res, next) =>{ Dishes.findByIdAndRemove(req.params.dishId) .then((resp) =>{ res.statusCode = 200; res.setHeader('Content-Type', 'application/json'); res.json(resp); }, (err) => next(err)) .catch((err) => next(err)); }); // dishId/comments route------------------------------------------------------------ dishRouter.route('/:dishId/comments') .options(cors.corsWithOptions, (req, res) => { res.sendStatus(200); }) .get(cors.cors, (req, res, next) =>{ // On récupere un Id dish spécifique Dishes.findById(req.params.dishId) .populate('comments.author') .then((dish) =>{ // Si le dish existe (s'il n'est pas null): if(dish != null){ res.statusCode = 200; res.setHeader('Content-Type', 'application/json'); // j'affiche tous les commentaires de la dish: res.json(dish.coments); } else { // si dish = null (il n'existe pas), je crée un erreur err = new Error('Dish ' + req.params.dishId + ' not found!'); err.status = 404; return next(err); } }, (err) => next(err)) .catch((err) => next(err)); }) .post(cors.corsWithOptions, authenticate.verifyUser, (req, res, next) =>{ // Je récupere la dish spécifique par son ID Dishes.findById(req.params.dishId) .then((dish) =>{ // si la dish existe j'insére les infos invoyés (dans req.body) dans les commentaires de la dish: if(dish != null){ req.body.author = req.user._id; dish.comments.push(req.body); // J'inregistre les modifications: dish.save() .then((dish) =>{ res.statusCode = 200; res.setHeader('Content-Type', 'application/json'); res.json(dish); }, (err) => next(err)); } else { err = new Error('Dish ' + req.params.dishId + ' not found'); err.status = 404; return next(err); } }, (err) => next(err)) .catch((err) => next(err)); }) .put(cors.corsWithOptions, authenticate.verifyUser, (req, res, next) => { res.statusCode = 403; res.end('PUT operation not supported on /dishes/' + req.params.dishId + '/comments'); }) .delete(cors.corsWithOptions, authenticate.verifyUser, authenticate.verifyAdmin, (req, res, next) =>{ Dishes.findById(req.params.dishId) .then((dish) =>{ // Si la dish existe bien: if(dish != null){ // Je supprime toutes les commentaires de la dish selectionnée: for(var i = (dish.comments.lenght - 1); i >= 0; i--){ dish.comments.id(dish.coments[i]._id).remove(); } dish.save() .then((dish) =>{ res.statusCode = 200; res.setHeader('Content-Type', 'application/json'); res.json(dish); }, (err) => next(err)); } // Si la dish n'existe pas: else{ err = new Error('Dish ' + req.params.dishId + ' not found'); err.status = 404; return next(err); } }, (err) => next(err)) .catch((err) => next(err)); }); // dishId/comments/commentId route----------------------------------------------------------- dishRouter.route('/:dishId/comments/:commentId') .options(cors.corsWithOptions, (req, res) => { res.sendStatus(200); }) .get(cors.cors, (req, res, next) =>{ Dishes.findById(req.params.dishId) .populate('comments.author') .then((dish) =>{ // Si la dish existe bien et le commentaireId existe bien (n'est pas null): if (dish != null && dish.comments.id(req.params.commentId) != null){ res.statusCode = 200; res.setHeader('Content-Type', 'application/json'); // Je récuper la réponse en format json et on recupere le commentaire par son id: res.json(dish.comments.id(req.params.commentId)); } // Si la dish n'existe pas: else if(dish == null){ // On crée un msg d'erreur err = new Error('Dish ' + req.params.dishId + ' not found'); err.status = 404; return next(err); } else{ // sinon si la dish n'a pas de commentaires: err = new Error('Comment ' + req.params.commentId + ' not found'); err.status = 404; return next(err); } }, (err) => next(err)) .catch((err) => next(err)); }) .post(cors.corsWithOptions, authenticate.verifyUser, (req, res, next) =>{ res.statusCode = 403; res.end('POST operation not supported on /dishes/'+ req.params.dishId + '/comments/' + req.params.commentId); }) .put(cors.corsWithOptions, authenticate.verifyUser, (req, res, next) =>{ if(req.user._id == req.author._id){ Dishes.findById(req.params.dishId) .then((dish) =>{ // Si la dish existe bien et le commentaireId existe bien (n'est pas null): if(dish != null && dish.comments.id(req.params.commentId) != null && dish.comments.id(req.params.commentId).author.equals(req.user._id)) { // si dans le body de la requete rating existe bien: if(req.body.rating){ // on prends le ranting dans le commentaire et on le change par le rating dans le body de la requete: dish.comments.id(req.params.commentId).rating = req.body.rating; } // si dans le body de la requete comment existe bien: if(req.body.comment){ // on prends le commentaireId et on le change par le comment dans le body de la requete: dish.comment.id(req.params.commentId).comment = req.body.comment; } // On enregistre les changements: dish.save() .then((dish) =>{ Dishes.findById(dish._id) .populate('comments.author') .then((dish) =>{ res.statusCode = 200; res.setHeader('Content-Type', 'application/json'); // on affiche tt les dishes: res.json(dish); }) }, (err) => next(err)); } // Sinon si la dishe n'existe pas: else if(dish == null){ err = new Error('Dish ' + req.params.dishId + ' not found'); err.status = 404; return next(err); } // Sinon si c le commentaire qui n'existe pas: else{ err = new Error('Comment ' + req.params.commentId + ' not found'); err.status = 404; return next(err); } }, (err) => next(err)) .catch((err) => next(err)); } else{ err = new Error('You are not authorized to perform this operation!'); err.status = 403; return next(err); } }) .delete(cors.corsWithOptions, authenticate.verifyUser, (req, res, next) =>{ if(req.user._id == req.author._id){ // Je récupere la dish spécifique par son ID Dishes.findById(req.params.dishId) .then((dish) =>{ // Si la dish existe bien et le commentaireId existe bien (n'est pas null): if (dish != null && dish.comments.id(req.params.commentId) != null) { dish.comments.id(req.params.commentId).remove(); dish.save() .then((dish) => { Dishes.findById(dish._id) .populate('comments.author') .then((dish) =>{ res.statusCode = 200; res.setHeader('Content-Type', 'application/json'); res.json(dish); }) }, (err) => next(err)); } // Sinon si la dishe n'existe pas: else if (dish == null) { err = new Error('Dish ' + req.params.dishId + ' not found'); err.status = 404; return next(err); } // Sinon si c le commentaire qui n'existe pas: else { err = new Error('Comment ' + req.params.commentId + ' not found'); err.status = 404; return next(err); } }, (err) => next(err)) .catch((err) => next(err)); } else { err = new Error('You are not authorized to perform this operation!'); err.status = 403; return next(err); } }); module.exports = dishRouter; // .put(authenticate.verifyUser, (req, res, next) => { // Dishes.findById(req.params.dishId) // .then( // dish => { // const sameUser = // JSON.stringify(req.user._id) == // JSON.stringify(dish.comments.id(req.params.commentId).author) // ? true // : false; // if (!sameUser) { // err = new Error("You are not authorized to update this comment"); // res.statusCode = 403; // return next(err); // } else { // if ( // dish != null && // dish.comments.id(req.params.commentId) != null // ) { // if (req.body.rating) { // dish.comments.id(req.params.commentId).rating = req.body.rating; // } // if (req.body.comment) { // dish.comments.id(req.params.commentId).comment = // req.body.comment; // } // dish.save().then( // dish => { // Dishes.findById(dish._id) // .populate("comments.author") // .then(dish => { // res.statusCode = 200; // res.setHeader("Content-Type", "application/json"); // res.json(dish.comments.id(req.params.commentId)); // }); // }, // err => next(err) // ); // } else if (dish == null) { // err = new Error("Dish " + req.params.dishId + " not existing"); // res.statusCode = 404; // return next(err); // } else { // err = new Error( // "Comment " + req.params.commentId + " not existing" // ); // res.statusCode = 404; // return next(err); // } // } // }, // err => next(err) // ) // .catch(err => next(err)); // }) // .delete(authenticate.verifyUser, (req, res, next) => { // Dishes.findById(req.params.dishId) // .then( // dish => { // // checking if the user requesting the comment is the same of the author // const sameUser = // JSON.stringify(req.user._id) == // JSON.stringify(dish.comments.id(req.params.commentId).author) // ? true // : false; // if (!sameUser) { // err = new Error("You are not authorized to delete this comment"); // res.statusCode = 403; // return next(err); // } else { // if ( // dish != null && // dish.comments.id(req.params.commentId) != null // ) { // dish.comments.id(req.params.commentId).remove(); // dish.save().then( // dish => { // Dishes.findById(dish._id) // .populate("comments.author") // .then(dish => { // res.statusCode = 200; // res.setHeader("Content-Type", "application/json"); // res.json(dish.comments.id(req.params.commentId)); // }); // }, // err => next(err) // ); // } else if (dish == null) { // err = new Error("Dish " + req.params.dishId + " not existing"); // res.statusCode = 404; // return next(err); // } else { // err = new Error( // "Comment " + req.params.commentId + " not existing" // ); // res.statusCode = 404; // return next(err); // } // } // }, // err => next(err) // ) // .catch(err => next(err)); // });
f436fe9bb92c24384de3ed87391ba9d1cf7199ac
[ "JavaScript" ]
1
JavaScript
orquidea238/ConfusionServer
95fca1c242b25b5ff88ae58b741b3c9c93d0f425
cf14126d55f279af3715f24498eeeee17eb751b2
refs/heads/master
<repo_name>joelgano/SampleAPI<file_sep>/Services/UserService.cs using System; using System.Collections.Generic; using System.Linq; using AutoMapper; using AutoMapper.QueryableExtensions; using FlytDex.Domain.Model.FlytDex; using FlytDex.Domain.Model.FlytDex.Links; using FlytDex.Domain.Services.Interfaces; using FlytDex.Shared; using FlytDex.Shared.Dtos; using FlytDex.Shared.Enums; using FlytDex.Shared.Requests; using log4net; using Microsoft.EntityFrameworkCore; namespace FlytDex.Domain.Services { public class UserService : IUserService { private static readonly ILog logger = LogManager.GetLogger(typeof(UserService)); private IFlytDexDbContext flytDexContext; private IMapper mapper; private IAuthorizationService authorizationService; private IAuthorizationRoleService applicationRoleService; private IErrorService errorService; public UserService(IFlytDexDbContext flytDexContext, IMapper mapper, IAuthorizationService authorizationService, IAuthorizationRoleService applicationRoleService, IErrorService errorService) { this.flytDexContext = flytDexContext; this.mapper = mapper; this.authorizationService = authorizationService; this.applicationRoleService = applicationRoleService; this.errorService = errorService; } public ServiceResult<List<AlexaUserDto>> GetAlexaUsersForDevice(string deviceId) { if (string.IsNullOrEmpty(deviceId)) { return errorService.Error<List<AlexaUserDto>>("Please provide a valid Alexa Device ID."); } if (!flytDexContext.UserDevices.Any(ud => ud.DeviceId == deviceId)) { return errorService.Warn<List<AlexaUserDto>>("This device is not registered on the system."); } List<User> users = flytDexContext.Users.Include(u => u.UserDevices).Where(u => u.UserDevices.Any(ud => ud.DeviceId == deviceId)).ToList(); if (users.Count <= 0) { return errorService.Warn<List<AlexaUserDto>>("This device is recognised but is not currently associated to any users."); } List<AlexaUserDto> alexaUserDtos = mapper.Map<List<User>, List<AlexaUserDto>>(users); return new ServiceResult<List<AlexaUserDto>>(alexaUserDtos, ResultType.Success, "Success"); } public ServiceResult<AlexaUserDto> GetAlexaUserForUsername(string username) { if (string.IsNullOrEmpty(username)) { return errorService.Error<AlexaUserDto>("Please provide a valid Username"); } User user = flytDexContext.Users.SingleOrDefault(u => u.Username == username); if (user == null) { return errorService.Error<AlexaUserDto>("User with username was not found"); } AlexaUserDto alexaUserDto = mapper.Map<User, AlexaUserDto>(user); return new ServiceResult<AlexaUserDto>(alexaUserDto, ResultType.Success, "Success"); } public ServiceResult<Guid> AddUser(string forename, string surname, string password, UserType userType, IEnumerable<AuthorizationRoleType> roles, string email, Guid schoolId) { if (string.IsNullOrEmpty(forename) || string.IsNullOrEmpty(surname)) { return errorService.Error<Guid>("Forename and Surname must be provided"); } if (string.IsNullOrEmpty(password)) { return errorService.Error<Guid>("Password must be provided"); } string username = GenerateUniqueUsername(forename, surname, userType, email, schoolId); string hashedPassword = authorizationService.GeneratePassword(password); string alexaPassCode = authorizationService.GenerateAlexaPassCode(); User user = new User(username, hashedPassword, forename, alexaPassCode); flytDexContext.Users.Add(user); if (flytDexContext.SaveChanges() <= 0) { return errorService.Error<Guid>("Error adding user, see log for error message"); } if (applicationRoleService.AddRolesToUser(user, roles).ResultType == ResultType.Error) { return errorService.Warn<Guid>(string.Format("Failed To add user roles for user id {0}", user.Id)); } return new ServiceResult<Guid>(user.Id, ResultType.Success, "Success"); } public List<UserDto> GetAllUsers() { List<UserDto> userDtos = flytDexContext.Users .ProjectTo<UserDto>(mapper.ConfigurationProvider) .ToList(); return userDtos; } public string GenerateUniqueUsername(string forename, string surname, UserType userType, string email, Guid schoolId) { string username = string.Empty; string domain = string.Empty; int i = 0; bool unique = false; if ((userType == UserType.Employee) || (userType == UserType.Parent)) { if (email == null) { Setting setting = flytDexContext.Settings.SingleOrDefault(s => s.SettingType == SettingType.OEEmailFormat); domain = setting.Value.Substring(setting.Value.IndexOf('@')); email = string.Format("{0}.{1}{2}", forename, surname, domain); //username = getUsername(forename, surname, schoolId, userType); } else { if (flytDexContext.Users.Any(u => u.Username == email)) { return email; } } username = email; } else if (userType == UserType.Student) { Setting setting = flytDexContext.Settings.SingleOrDefault(s => s.SettingType == SettingType.SchoolEmailFormat && s.SchoolId == schoolId); domain = setting.Value.Substring(setting.Value.IndexOf('@')); if (setting.Value.Substring(0, setting.Value.IndexOf('.')) == "forename") { username = string.Format("{0}.{1}{2}", forename, surname, domain); } else { username = string.Format("{0}.{1}{2}", surname, forename, domain); } } else { username = string.Format("{0}.{1}", forename, surname); } string baseName = string.Format("{0}.{1}", forename, surname); while (!unique) { if (i != 0) { username = string.Format("{0}.{1}{2}", baseName, i.ToString(), domain); } if (!flytDexContext.Users.Any(u => u.Username == username)) { unique = true; } i++; } return username; } public ServiceResult<UserSessionDto> GetUserSession(string username) { User user = flytDexContext.Users .Include(u => u.LinkUserSchools) .SingleOrDefault(u => u.Username == username); if (user == null) { return errorService.Error<UserSessionDto>("Error occurred: User not found"); } IEnumerable<Guid> employeeIds = user.LinkUserSchools.Where(lus => lus.UserType == UserType.Employee).Select(lus => lus.UserTypeId); IEnumerable<Guid> studentIds = user.LinkUserSchools.Where(lus => lus.UserType == UserType.Student).Select(lus => lus.UserTypeId); List<Employee> cachedEmployees = flytDexContext.Employees .Include(e => e.LinkEmployeeRoles) .ThenInclude(ler => ler.Role) .Where(e => employeeIds.Contains(e.Id)) .ToList(); List<Student> cachedStudents = flytDexContext.Students.Where(s => studentIds.Contains(s.Id)).ToList(); UserSessionDto userSessionDto = new UserSessionDto(); userSessionDto.UserId = user.Id; userSessionDto.Username = user.Username; userSessionDto.LastLoginDateTime = user.LastLoginDateTime; userSessionDto.UserSchools = new List<LinkUserSchoolDto>(); foreach (LinkUserSchool linkUserSchool in user.LinkUserSchools) { if (linkUserSchool.UserType == UserType.Employee) { Employee employee = cachedEmployees.SingleOrDefault(e => e.Id == linkUserSchool.UserTypeId); if (employee == null) { logger.Warn(string.Format("Employee User found with no valid Employee attached, User Id: {0}", user.Id)); return errorService.Error<UserSessionDto>("Error occurred: User Invalid"); } userSessionDto.UserSchools.Add(new LinkUserSchoolDto() { UserId = user.Id, SchoolId = linkUserSchool.SchoolId, UserType = linkUserSchool.UserType, UserTypeId = employee.Id, Roles = employee.LinkEmployeeRoles.Select(ler => ler.Role.RoleTitle).OrderBy(r => r).ToList() }); } else if (linkUserSchool.UserType == UserType.Student) { Student student = cachedStudents.SingleOrDefault(s => s.Id == linkUserSchool.UserTypeId); if (student == null) { logger.Warn(string.Format("Student User found with no valid Student attached, User Id: {0}", user.Id)); return errorService.Error<UserSessionDto>("Error occurred: User Invalid"); } userSessionDto.UserSchools.Add(new LinkUserSchoolDto() { UserId = user.Id, SchoolId = linkUserSchool.SchoolId, UserType = linkUserSchool.UserType, UserTypeId = student.Id, Roles = new List<string>() }); } } return new ServiceResult<UserSessionDto>(userSessionDto); } public ServiceResult<Guid> UpdateUser(UserRequest userRequest) { User user = flytDexContext.Users.SingleOrDefault(u => u.Id == userRequest.Id); if (user == null) { return errorService.Error<Guid>("Error occurred: User not found"); } List<LinkUserSchool> linkUserSchools = user.LinkUserSchools.Where(lus => lus.UserId == userRequest.Id).ToList(); foreach (LinkUserSchool linkUserSchool in linkUserSchools) { if (linkUserSchool.UserType == UserType.Employee) { Employee employee = flytDexContext.Employees.SingleOrDefault(e => e.Id == linkUserSchool.UserTypeId); ContactDetails contactDetails = flytDexContext.ContactDetails.SingleOrDefault(cd => cd.Id == employee.ContactDetailsId); if (contactDetails == null) { return errorService.Error<Guid>("Error occurred: Contact details not found"); } contactDetails.PreferredEmail = userRequest.PreferredEmail; contactDetails.PreferredPhone = userRequest.PreferredPhone; flytDexContext.ContactDetails.Update(contactDetails); } } user.Password = authorizationService.GeneratePassword(userRequest.Password); user.UserEmail = userRequest.PreferredEmail; user.UserPhoneNumber = userRequest.PreferredPhone; user.LastLoginDateTime = DateTime.Now; flytDexContext.Users.Update(user); if (flytDexContext.SaveChanges() < 0) { return errorService.Error<Guid>("An error occurred: Unable to save changes"); } return new ServiceResult<Guid>(user.Id, ResultType.Success, "Success"); } private ContactDetails getContactDetails(LinkUserSchool linkUserSchool) { ContactDetails contactDetails = new ContactDetails(); if (linkUserSchool.UserType == UserType.Employee) { Employee employee = flytDexContext.Employees.SingleOrDefault(e => e.Id == linkUserSchool.UserTypeId); contactDetails = flytDexContext.ContactDetails.SingleOrDefault(cd => cd.Id == employee.ContactDetailsId); } if (linkUserSchool.UserType == UserType.Student) { } return contactDetails; } } } <file_sep>/Controllers/PeriodController.cs using System; using System.Collections.Generic; using FlytDex.Domain.Services; using FlytDex.Domain.Services.Interfaces; using FlytDex.Shared; using FlytDex.Shared.Attributes; using FlytDex.Shared.Dtos; using FlytDex.Shared.Enums; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; using StructureMap; namespace FlytDex.Controllers { [Route("api/[controller]")] [ApiController] [Authorize] public class PeriodController : ControllerBase { // GET api/Period/GetPeriods [HttpGet("GetPeriods")] [AuthorizeRoles(AuthorizationRoleType.Admin, AuthorizationRoleType.Teacher, AuthorizationRoleType.Technician)] public IActionResult GetPeriods(Guid employeeId, Guid schoolId, DateTime startDateTime, DateTime endDateTime) { IContainer container = IocService.BeginRequest(); ServiceResult<List<PeriodDto>> result = container.GetInstance<IPeriodService>().GetPeriods(employeeId, schoolId, startDateTime, endDateTime); IocService.EndRequest(container); if (result.ResultType == ResultType.Error) { return BadRequest(result.Message); } return Ok(result); } } } <file_sep>/Interfaces/IUserService.cs using System; using System.Collections.Generic; using FlytDex.Shared; using FlytDex.Shared.Dtos; using FlytDex.Shared.Enums; using FlytDex.Shared.Requests; namespace FlytDex.Domain.Services.Interfaces { public interface IUserService { ServiceResult<Guid> AddUser(string forename, string lastname, string password, UserType userType, IEnumerable<AuthorizationRoleType> roles, string email, Guid schoolId); List<UserDto> GetAllUsers(); string GenerateUniqueUsername(string forename, string surname, UserType userType, string email, Guid schoolId); ServiceResult<List<AlexaUserDto>> GetAlexaUsersForDevice(string deviceId); ServiceResult<AlexaUserDto> GetAlexaUserForUsername(string username); ServiceResult<UserSessionDto> GetUserSession(string username); ServiceResult<Guid> UpdateUser(UserRequest userRequest); } } <file_sep>/Services/HomeworkService.cs using System; using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; using AutoMapper; using AutoMapper.QueryableExtensions; using FlytDex.Domain.Model.FlytDex; using FlytDex.Domain.Model.FlytDex.Links; using FlytDex.Domain.Services.Interfaces; using FlytDex.Shared; using FlytDex.Shared.Dtos; using FlytDex.Shared.Enums; using FlytDex.Shared.Requests; namespace FlytDex.Domain.Services { public class HomeworkService : IHomeworkService { private IFlytDexDbContext flytDexContext; private IMapper mapper; private IErrorService errorService; public HomeworkService(IFlytDexDbContext flytDexContext, IMapper mapper, IErrorService errorService) { this.flytDexContext = flytDexContext; this.mapper = mapper; this.errorService = errorService; } public ServiceResult<List<HomeworkDto>> GetPastHomework(Guid schoolId, Guid employeeId, Guid subjectId, Guid studentGroupId, params Expression<Func<HomeworkDto, object>>[] includes) { if (schoolId == Guid.Empty) { return errorService.Error<List<HomeworkDto>>("An error occurred: SchoolId is invalid"); } DateTime now = DateTime.Now; if (!flytDexContext.Homeworks.Any(h => h.SchoolId == schoolId && h.Lesson.Event.EmployeeId == employeeId)) { return errorService.Warn<List<HomeworkDto>>("No Homework found."); } List<HomeworkDto> homeworkDtos = flytDexContext.Homeworks .Where(h => h.SchoolId == schoolId && h.Lesson.Event.EmployeeId == employeeId && h.Lesson.Event.EndDateTime.Date <= now && h.Lesson.Event.Subject.Id == subjectId && h.Lesson.Event.LinkEventStudentGroups.Any(sg => sg.StudentGroupId == studentGroupId)) .ProjectTo(mapper.ConfigurationProvider, includes) .ToList(); List<PeriodDto> cachedPeriods = flytDexContext.Periods .Where(h => h.SchoolId == schoolId && h.PeriodInstanceId != 0) .ProjectTo<PeriodDto>(mapper.ConfigurationProvider) .ToList(); foreach (HomeworkDto homeworkDto in homeworkDtos) { homeworkDto.PeriodId = cachedPeriods .Where(p => p.StartDateTime == homeworkDto.EventStartDateTime && p.EndDateTime == homeworkDto.EventEndDateTime) .Select(p => p.PeriodId).SingleOrDefault(); } return new ServiceResult<List<HomeworkDto>>(homeworkDtos); } public ServiceResult<List<HomeworkDto>> GetHomeworkForLesson(Guid lessonId, params Expression<Func<HomeworkDto, object>>[] includes) { if (lessonId == Guid.Empty) { return errorService.Error<List<HomeworkDto>>("An error occurred: Lesson Id is invalid"); } List<HomeworkDto> homeworkDtos = flytDexContext.Homeworks .Where(lh => lh.LessonId == lessonId) .ProjectTo(mapper.ConfigurationProvider, includes) .ToList(); if (homeworkDtos.Count == 0) { return errorService.Warn<List<HomeworkDto>>("No Homework found."); } return new ServiceResult<List<HomeworkDto>>(homeworkDtos); } public ServiceResult<HomeworkDto> CreateHomework(HomeworkRequest homeworkRequest) { string validationMessage = ValidateHomeworkRequest(homeworkRequest); if (!string.IsNullOrEmpty(validationMessage)) { return errorService.Error<HomeworkDto>(validationMessage); } if (flytDexContext.Homeworks.Any(h => h.Id == homeworkRequest.Id && h.SchoolId == homeworkRequest.SchoolId && h.LessonId == homeworkRequest.LessonId)) { return errorService.Error<HomeworkDto>("An error occurred: A homework with this Id already exists"); } //ICollection<LinkHomeworkResource> linkHomeworkResources = mapper.Map<ICollection<LinkHomeworkResourceDto>, ICollection<LinkHomeworkResource>>(homeworkRequest.LinkHomeworkResources); //ICollection<LinkStudentHomework> linkStudentHomeworks = mapper.Map<ICollection<LinkStudentHomeworkDto>, ICollection<LinkStudentHomework>>(homeworkRequest.LinkStudentHomeworks); Homework homework = mapper.Map<HomeworkRequest, Homework>(homeworkRequest); //homework.LinkHomeworkResources = linkHomeworkResources; flytDexContext.Homeworks.Add(homework); if (flytDexContext.SaveChanges() < 0) { return errorService.Error<HomeworkDto>("An error occurred: Unable to save changes"); } HomeworkDto homeworkDto = mapper.Map<Homework, HomeworkDto>(homework); return new ServiceResult<HomeworkDto>(homeworkDto, ResultType.Success, "Success"); } public ServiceResult<HomeworkDto> UpdateHomework(HomeworkRequest homeworkRequest) { string validationMessage = ValidateHomeworkRequest(homeworkRequest); if (!string.IsNullOrEmpty(validationMessage)) { return errorService.Error<HomeworkDto>(validationMessage); } Homework homework = flytDexContext.Homeworks.SingleOrDefault(h => h.Id == homeworkRequest.Id && h.SchoolId == homeworkRequest.SchoolId && h.LessonId == homeworkRequest.LessonId); if (homework == null) { return errorService.Error<HomeworkDto>("An error occurred: Invalid homework - not found"); } mapper.Map(homeworkRequest, homework); flytDexContext.LinkStudentHomeworks.RemoveRange( flytDexContext.LinkStudentHomeworks.Where(h => h.HomeworkId == homeworkRequest.Id) ); flytDexContext.LinkHomeworkResources.RemoveRange( flytDexContext.LinkHomeworkResources.Where(h => h.HomeworkId == homeworkRequest.Id) ); flytDexContext.Homeworks.Update(homework); if (flytDexContext.SaveChanges() <= 0) { return errorService.Error<HomeworkDto>("Error updating homework, see log for error message"); } HomeworkDto homeworkDto = mapper.Map<Homework, HomeworkDto>(homework); return new ServiceResult<HomeworkDto>(homeworkDto, ResultType.Success, "Success"); } public ServiceResult<HomeworkDto> RemoveHomework(Guid homeworkId) { if (homeworkId == Guid.Empty) { return errorService.Error<HomeworkDto>("An error occurred: Homework Id is invalid"); } Homework homework = flytDexContext.Homeworks.SingleOrDefault(h => h.Id == homeworkId); if (homework == null) { return errorService.Error<HomeworkDto>("An error occurred: A homework does not exist"); } flytDexContext.Homeworks.Remove(homework); if (flytDexContext.SaveChanges() <= 0) { return errorService.Error<HomeworkDto>("Error removing homework, see log for error message"); } HomeworkDto homeworkDto = mapper.Map<Homework, HomeworkDto>(homework); return new ServiceResult<HomeworkDto>(homeworkDto, ResultType.Success, "Success"); } public ServiceResult<HomeworkTemplateDto> CreateHomeworkTemplate(HomeworkTemplateRequest homeworkTemplateRequest) { string validationMessage = ValidateHomeworkTemplateRequest(homeworkTemplateRequest); if (!string.IsNullOrEmpty(validationMessage)) { return errorService.Error<HomeworkTemplateDto>(validationMessage); } if (flytDexContext.HomeworkTemplates.Any(ht => ht.Id == homeworkTemplateRequest.Id)) { return errorService.Error<HomeworkTemplateDto>("An error occurred: A homework template with this Id already exists"); } HomeworkTemplate homeworkTemplate = mapper.Map<HomeworkTemplateRequest, HomeworkTemplate>(homeworkTemplateRequest); flytDexContext.HomeworkTemplates.Add(homeworkTemplate); if (flytDexContext.SaveChanges() < 0) { return errorService.Error<HomeworkTemplateDto>("An error occurred: Unable to save HomeworkTemplate"); } HomeworkTemplateDto homeworkTemplateDto = mapper.Map<HomeworkTemplate, HomeworkTemplateDto>(homeworkTemplate); return new ServiceResult<HomeworkTemplateDto>(homeworkTemplateDto, ResultType.Success, "Success"); } public ServiceResult<Guid?> UpdateHomeworkTemplate(HomeworkTemplateRequest homeworkTemplateRequest) { string validationMessage = ValidateHomeworkTemplateRequest(homeworkTemplateRequest); if (!string.IsNullOrEmpty(validationMessage)) { return errorService.Error<Guid?>(validationMessage); } HomeworkTemplate homeworkTemplate = flytDexContext.HomeworkTemplates.SingleOrDefault(ht => ht.Id == homeworkTemplateRequest.Id); if (homeworkTemplate == null) { return errorService.Error<Guid?>("An error occurred: Invalid HomeworkTemplate - not found"); } homeworkTemplate.Description = homeworkTemplateRequest.Description; homeworkTemplate.Title = homeworkTemplateRequest.Title; flytDexContext.HomeworkTemplates.Update(homeworkTemplate); if (flytDexContext.SaveChanges() <= 0) { return errorService.Error<Guid?>("Error updating HomeworkTemplate, see log for error message"); } return new ServiceResult<Guid?>(homeworkTemplate.Id, ResultType.Success, "Success"); } public string ValidateHomeworkRequest(HomeworkRequest homeworkRequest) { if (!flytDexContext.Schools.Any(s => s.Id == homeworkRequest.SchoolId)) { return "An error occurred: Invalid SchoolId"; } if (!flytDexContext.Lessons.Any(l => l.Id == homeworkRequest.LessonId)) { return "An error occurred: Invalid LessonId"; } if (homeworkRequest.HomeworkTemplateId != Guid.Empty && homeworkRequest.HomeworkTemplateId != null) { if (!flytDexContext.HomeworkTemplates.Any(h => h.Id == homeworkRequest.HomeworkTemplateId)) { return "An error occurred: Invalid HomeworkTemplateId"; } } return null; } public string ValidateHomeworkTemplate(bool creating, Guid schoolId, ICollection<ResourceDto> resources) { if (!flytDexContext.Schools.Any(s => s.Id == schoolId)) { return "An error occurred: Invalid SchoolId"; } if (resources != null) { foreach (ResourceDto resourceDto in resources) { if (creating && flytDexContext.Resources.Any(r => r.Id == resourceDto.ResourceId)) { return "An error occurred: This resource Id already exists"; } } } return null; } public string ValidateHomeworkTemplateRequest(HomeworkTemplateRequest homeworkTemplateRequest) { if (!flytDexContext.Schools.Any(s => s.Id == homeworkTemplateRequest.SchoolId)) { return "An error occurred: Invalid SchoolId"; } if (!flytDexContext.Lessons.Any(l => l.Id == homeworkTemplateRequest.LessonId)) { return "An error occurred: Invalid LessonId"; } if (homeworkTemplateRequest.Resources != null) { foreach (ResourceDto resourceDto in homeworkTemplateRequest.Resources) { if (flytDexContext.Resources.Any(r => r.Id == resourceDto.ResourceId)) { return "An error occurred: This resource Id already exists"; } } } return null; } } } <file_sep>/Controllers/HomeworkController.cs using System; using System.Collections.Generic; using System.Linq.Expressions; using FlytDex.Domain.Services; using FlytDex.Domain.Services.Interfaces; using FlytDex.Shared; using FlytDex.Shared.Attributes; using FlytDex.Shared.Dtos; using FlytDex.Shared.Enums; using FlytDex.Shared.Requests; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; using StructureMap; namespace FlytDex.Controllers { [Route("api/[controller]")] [ApiController] [Authorize] public class HomeworkController : ControllerBase { // GET api/Homework/GetHomeworkForLesson [HttpGet("GetHomeworkForLesson")] [AuthorizeRoles(AuthorizationRoleType.Admin, AuthorizationRoleType.Teacher, AuthorizationRoleType.Technician)] public IActionResult GetHomeworkForLesson(Guid lessonId) { IContainer container = IocService.BeginRequest(); Expression<Func<HomeworkDto, object>>[] includes = new Expression<Func<HomeworkDto, object>>[] { include => include.Resources }; ServiceResult<List<HomeworkDto>> result = container.GetInstance<IHomeworkService>().GetHomeworkForLesson(lessonId, includes); IocService.EndRequest(container); if (result.ResultType == ResultType.Error) { return BadRequest(result.Message); } return Ok(result); } //GET api/Homework/GetPastHomework [HttpGet("GetPastHomework")] [AuthorizeRoles(AuthorizationRoleType.Admin, AuthorizationRoleType.Teacher, AuthorizationRoleType.Technician)] public IActionResult GetPastHomework(Guid schoolId, Guid employeeId, Guid subjectId, Guid studentGroupId) { IContainer container = IocService.BeginRequest(); Expression<Func<HomeworkDto, object>>[] includes = new Expression<Func<HomeworkDto, object>>[] { include => include.Students }; ServiceResult<List<HomeworkDto>> result = container.GetInstance<IHomeworkService>().GetPastHomework(schoolId, employeeId, subjectId, studentGroupId, includes); IocService.EndRequest(container); if (result.ResultType == ResultType.Error) { return BadRequest(result.Message); } return Ok(result); } //POST api/Homework/CreateHomework [HttpPost("CreateHomework")] [AuthorizeRoles(AuthorizationRoleType.Admin, AuthorizationRoleType.Teacher, AuthorizationRoleType.Technician)] public IActionResult CreateHomework([FromBody] HomeworkRequest homeworkRequest) { IContainer container = IocService.BeginRequest(); ServiceResult<HomeworkDto> result = container.GetInstance<IHomeworkService>().CreateHomework(homeworkRequest); IocService.EndRequest(container); if (result.ResultType == ResultType.Error) { return BadRequest(result.Message); } return Ok(result); } // POST api/Homework/UpdateHomework [HttpPost("UpdateHomework")] [AuthorizeRoles(AuthorizationRoleType.Admin, AuthorizationRoleType.Teacher, AuthorizationRoleType.Technician)] public IActionResult UpdateHomework([FromBody] HomeworkRequest homeworkRequest) { IContainer container = IocService.BeginRequest(); ServiceResult<HomeworkDto> result = container.GetInstance<IHomeworkService>().UpdateHomework(homeworkRequest); IocService.EndRequest(container); if (result.ResultType == ResultType.Error) { return BadRequest(result.Message); } return Ok(result); } // DELETE api/Homework/RemoveHomework [HttpDelete("RemoveHomework")] [AuthorizeRoles(AuthorizationRoleType.Admin, AuthorizationRoleType.Teacher, AuthorizationRoleType.Technician)] public IActionResult RemoveHomework(Guid homeworkId) { IContainer container = IocService.BeginRequest(); ServiceResult<HomeworkDto> result = container.GetInstance<IHomeworkService>().RemoveHomework(homeworkId); IocService.EndRequest(container); if (result.ResultType == ResultType.Error) { return BadRequest(result.Message); } return Ok(result); } //POST api/Homework/CreateHomeworkTemplate [HttpPost("CreateHomeworkTemplate")] [AuthorizeRoles(AuthorizationRoleType.Admin, AuthorizationRoleType.Teacher, AuthorizationRoleType.Technician)] public IActionResult CreateHomeworkTemplate([FromBody] HomeworkTemplateRequest homeworkTemplateRequest) { IContainer container = IocService.BeginRequest(); ServiceResult<HomeworkTemplateDto> result = container.GetInstance<IHomeworkService>().CreateHomeworkTemplate(homeworkTemplateRequest); IocService.EndRequest(container); if (result.ResultType == ResultType.Error) { return BadRequest(result.Message); } return Ok(result); } // POST api/Homework/UpdateHomeworkTemplate [HttpPost("UpdateHomeworkTemplate")] [AuthorizeRoles(AuthorizationRoleType.Admin, AuthorizationRoleType.Teacher, AuthorizationRoleType.Technician)] public IActionResult UpdateHomeworkTemplate([FromBody] HomeworkTemplateRequest homeworkTemplateRequest) { IContainer container = IocService.BeginRequest(); ServiceResult<Guid?> result = container.GetInstance<IHomeworkService>().UpdateHomeworkTemplate(homeworkTemplateRequest); IocService.EndRequest(container); if (result.ResultType == ResultType.Error) { return BadRequest(result.Message); } return Ok(result); } } } <file_sep>/Controllers/UserController.cs using System; using System.Collections.Generic; using FlytDex.Domain.Services; using FlytDex.Domain.Services.Interfaces; using FlytDex.Shared; using FlytDex.Shared.Attributes; using FlytDex.Shared.Dtos; using FlytDex.Shared.Enums; using FlytDex.Shared.Requests; using Microsoft.AspNetCore.Authorization; using Microsoft.AspNetCore.Mvc; using StructureMap; namespace FlytDex.Controllers { [Route("api/[controller]")] [ApiController] [Authorize] public class UserController : ControllerBase { //GET api/User/GetAlexaUsersForDevice [HttpGet("GetAlexaUsersForDevice")] [AuthorizeRoles(AuthorizationRoleType.AlexaMaster)] public IActionResult GetAlexaUsersForDevice(string deviceId) { IContainer container = IocService.BeginRequest(); ServiceResult<List<AlexaUserDto>> result = container.GetInstance<IUserService>().GetAlexaUsersForDevice(deviceId); IocService.EndRequest(container); if (result.ResultType == ResultType.Error) { return BadRequest(result.Message); } return Ok(result); } //GET api/User/GetAlexaUserForUsername [HttpGet("GetAlexaUserForUsername")] [AuthorizeRoles(AuthorizationRoleType.AlexaMaster)] public IActionResult GetAlexaUserForUsername(string username) { IContainer container = IocService.BeginRequest(); ServiceResult<AlexaUserDto> result = container.GetInstance<IUserService>().GetAlexaUserForUsername(username); IocService.EndRequest(container); if (result.ResultType == ResultType.Error) { return BadRequest(result.Message); } return Ok(result); } //GET api/User/GetUserSession [HttpGet("GetUserSession")] public IActionResult GetUserSession(string username) { IContainer container = IocService.BeginRequest(); ServiceResult<UserSessionDto> result = container.GetInstance<IUserService>().GetUserSession(username); IocService.EndRequest(container); if (result.ResultType == ResultType.Error) { return BadRequest(result.Message); } return Ok(result); } //POST api/User/UpdateUser [HttpPost("UpdateUser")] public IActionResult UpdateUser([FromBody] UserRequest userRequest) { IContainer container = IocService.BeginRequest(); ServiceResult<Guid> result = container.GetInstance<IUserService>().UpdateUser(userRequest); IocService.EndRequest(container); if (result.ResultType == ResultType.Error) { return BadRequest(result.Message); } return Ok(result); } } }<file_sep>/Model/User.cs using System; using System.Collections.Generic; using FlytDex.Domain.Model.FlytDex.Links; using FlytDex.Shared.Enums; namespace FlytDex.Domain.Model.FlytDex { public class User : Entity { public User(string username, string password, string alexaNickname, string alexaPassCode) { Username = username; Password = <PASSWORD>; AlexaNickname = alexaNickname; AlexaPassCode = alexaPassCode; AlexaCodeCreatedDateTime = DateTime.Now; } public DateTime LastLoginDateTime { get; set; } public string Username { get; set; } public string Password { get; set; } public DateTime LastAlexaUseDateTime { get; set; } public string AlexaNickname { get; set; } public string AlexaPassCode { get; set; } public DateTime AlexaCodeCreatedDateTime { get; set; } public PreferredContactMethod PreferredContactMethod { get; set; } public string UserEmail { get; set; } public string UserPhoneNumber { get; set; } public virtual ICollection<AuthorizationRole> AuthorizationRoles { get; set; } public virtual ICollection<UserDevice> UserDevices { get; set; } public virtual ICollection<LinkUserSchool> LinkUserSchools { get; set; } } } <file_sep>/Model/Homework.cs using FlytDex.Domain.Model.FlytDex.Links; using FlytDex.Shared.Enums; using System; using System.Collections.Generic; namespace FlytDex.Domain.Model.FlytDex { public class Homework : SchoolEntity { public string Title { get; set; } public string Description { get; set; } public HomeworkStatus Status { get; set; } public DateTime DueDateTime { get; set; } public DateTime SetDateTime { get; set; } public Guid? HomeworkTemplateId { get; set; } public virtual HomeworkTemplate HomeworkTemplate { get; set; } public Guid LessonId { get; set; } public virtual Lesson Lesson { get; set; } public virtual ICollection<LinkHomeworkResource> LinkHomeworkResources { get; set; } public virtual ICollection<LinkStudentHomework> LinkStudentHomeworks { get; set; } } } <file_sep>/Model/Period.cs using System; namespace FlytDex.Domain.Model.FlytDex { public class Period : SchoolEntity { public string Name { get; set; } public string Day { get; set; } /// <summary> /// This field allows us to link back to a WondeLesson (and therefore a WondePeriod) /// </summary> public int PeriodInstanceId { get; set; } public DateTime? StartDateTime { get; set; } public DateTime? EndDateTime { get; set; } } } <file_sep>/Interfaces/IPeriodService.cs using System; using System.Collections.Generic; using FlytDex.Shared; using FlytDex.Shared.Dtos; namespace FlytDex.Domain.Services.Interfaces { public interface IPeriodService { ServiceResult<List<PeriodDto>> GetPeriods(Guid employeeId, Guid schoolId, DateTime startTime, DateTime endTime); ServiceResult<PeriodDto> CreatePeriod(bool save, Guid schoolId, string name, DateTime startDateTime, DateTime endDateTime); } } <file_sep>/Interfaces/IHomeworkService.cs using System; using System.Collections.Generic; using System.Linq.Expressions; using FlytDex.Shared; using FlytDex.Shared.Dtos; using FlytDex.Shared.Requests; namespace FlytDex.Domain.Services.Interfaces { public interface IHomeworkService { ServiceResult<List<HomeworkDto>> GetHomeworkForLesson(Guid lessonId, params Expression<Func<HomeworkDto, object>>[] includes); ServiceResult<List<HomeworkDto>> GetPastHomework(Guid schoolId, Guid employeeId, Guid subjectId, Guid studentGroupId, params Expression<Func<HomeworkDto, object>>[] includes); ServiceResult<HomeworkDto> CreateHomework(HomeworkRequest homeworkRequest); ServiceResult<HomeworkDto> UpdateHomework(HomeworkRequest homeworkRequest); ServiceResult<HomeworkDto> RemoveHomework(Guid homeworkId); ServiceResult<HomeworkTemplateDto> CreateHomeworkTemplate(HomeworkTemplateRequest homeworkTemplateRequest); ServiceResult<Guid?> UpdateHomeworkTemplate(HomeworkTemplateRequest homeworkTemplateRequest); string ValidateHomeworkRequest(HomeworkRequest homeworkRequest); string ValidateHomeworkTemplate(bool creating, Guid schoolId, ICollection<ResourceDto> resources); string ValidateHomeworkTemplateRequest(HomeworkTemplateRequest homeworkTemplateRequest); } } <file_sep>/Services/PeriodService.cs using System; using System.Collections.Generic; using System.Linq; using AutoMapper; using AutoMapper.QueryableExtensions; using FlytDex.Domain.Model.FlytDex; using FlytDex.Domain.Services.Interfaces; using FlytDex.Shared; using FlytDex.Shared.Dtos; namespace FlytDex.Domain.Services { public class PeriodService : IPeriodService { private IFlytDexDbContext flytDexContext; private IMapper mapper; private IErrorService errorService; public PeriodService(IFlytDexDbContext flytDexContext, IMapper mapper, IErrorService errorService) { this.flytDexContext = flytDexContext; this.mapper = mapper; this.errorService = errorService; } public ServiceResult<List<PeriodDto>> GetPeriods(Guid employeeId, Guid schoolId, DateTime startDateTime, DateTime endDateTime) { List<PeriodDto> periodDtos = flytDexContext.Periods .Where(p => p.SchoolId == schoolId && p.StartDateTime.Value.Date >= startDateTime.Date && p.EndDateTime.Value.Date <= endDateTime.Date) .OrderBy(p => p.StartDateTime) .ProjectTo<PeriodDto>(mapper.ConfigurationProvider) .ToList(); List<Event> cachedEvents = flytDexContext.Events .Where(e => e.SchoolId == schoolId && e.EmployeeId == employeeId && e.StartDateTime.Date <= startDateTime.Date && e.EndDateTime.Date >= endDateTime.Date).ToList(); foreach (PeriodDto period in periodDtos) { period.DisplayName = period.PeriodNameShort; if (cachedEvents.Any(e => e.StartDateTime == period.StartDateTime && e.EndDateTime == period.EndDateTime)) { Event evnt = cachedEvents.Where(e => e.StartDateTime == period.StartDateTime && e.EndDateTime == period.EndDateTime).FirstOrDefault(); period.DisplayName += " - " + evnt.LinkEventStudentGroups.First().StudentGroup.GroupName; } } if (periodDtos.Count == 0) { return errorService.Warn<List<PeriodDto>>("No Periods found."); } return new ServiceResult<List<PeriodDto>>(periodDtos); } public ServiceResult<PeriodDto> CreatePeriod(bool save, Guid schoolId, string name, DateTime startDateTime, DateTime endDateTime) { Period period = new Period() { SchoolId = schoolId, StartDateTime = startDateTime, Day = startDateTime.DayOfWeek.ToString(), EndDateTime = endDateTime, Name = name }; flytDexContext.Periods.Add(period); if (save) { if (flytDexContext.SaveChanges() == -1) { return errorService.Error<PeriodDto>("Error adding period"); } } PeriodDto periodDto = mapper.Map<PeriodDto>(period); return new ServiceResult<PeriodDto>(periodDto); } } }
1d600252ce05b951da61d62e8936079387d50a58
[ "C#" ]
12
C#
joelgano/SampleAPI
ea992f2ac2ccf0ea307a17173fcea8b96ba49436
f5016f116a53d585f5e002ea21b709b2452dbb4a
refs/heads/main
<file_sep># Team Profile Generator ## Assignment Create a command line interface that uses Inquirer to take user inputted employee information to create a HTML page showing cards of the inputted information. Also create tests using Jest in order to verify the functionality of the employee classes. ### User Story ```md AS A manager I WANT to generate a webpage that displays my team's basic info SO THAT I have quick access to their emails and GitHub profiles ``` ### Acceptance Criteria ```md GIVEN a command-line application that accepts user input WHEN I am prompted for my team members and their information THEN an HTML file is generated that displays a nicely formatted team roster based on user input WHEN I click on an email address in the HTML THEN my default email program opens and populates the TO field of the email with the address WHEN I click on the GitHub username THEN that GitHub profile opens in a new tab WHEN I start the application THEN I am prompted to enter the team manager’s name, employee ID, email address, and office number WHEN I enter the team manager’s name, employee ID, email address, and office number THEN I am presented with a menu with the option to add an engineer or an intern or to finish building my team WHEN I select the engineer option THEN I am prompted to enter the engineer’s name, ID, email, and GitHub username, and I am taken back to the menu WHEN I select the intern option THEN I am prompted to enter the intern’s name, ID, email, and school, and I am taken back to the menu WHEN I decide to finish building my team THEN I exit the application, and the HTML is generated ``` ### Example Output - Screenshot ![Team Profile Generator Screenshot](./assets/Screenshot.jpeg) ### Walkthrough Video [Link](https://drive.google.com/file/d/1Hj0J9lWQP7pVdCzGxV5edm4mhffAHC9b/view?usp=sharing) <file_sep>function generateHTML(team) { var html = `<!DOCTYPE html> <html lang="en"> <head> <meta charset="UTF-8"> <meta http-equiv="X-UA-Compatible" content="IE=edge"> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/4.5.0/css/bootstrap.min.css" /> <title>Team Profile Generator</title> </head> <body> <header class="jumbotron text-center bg-dark text-white"> <h1>Team</h1> </header> <main class = "container-fluid"> <div class = "row justify-content-center"> <div class = "col-md-9 card-columns"> ${generateCards(team)} </div> </div> </main> </body> </html>` return html; } function generateCards(team){ var cards = []; for(var i = 0; i < team.length; i++){ cards.push(`<div class="card border-info mb-3" style="max-width: 18rem;"> <div class="card-header bg-transparent border-info">${team[i].getName()}</div> <div class="card-body text-primary"> <h5 class="card-title">${team[i].getRole()}</h5> <table class="table table-sm table-dark"> <tbody> <tr> <th scope="row">ID</th> <td>${team[i].getId()}</td> </tr> <tr> <th scope="row">Email</th> <td><a href="mailto:${team[i].getEmail()}">${team[i].getEmail()}</a></td> </tr> ${getInfo(team[i])} </tbody> </table> </div> </div> `); } return cards.join(""); } function getInfo(teamMember){ switch(teamMember.getRole()){ case "Manager" : return `<tr> <th scope="row">Office Number</th> <td>${teamMember.getOfficeNumber()}</td> </tr>` case "Engineer" : return `<tr> <th scope="row">Github</th> <td><a href="https://github.com/${teamMember.getGithub()}" target="_blank">${teamMember.getGithub()}</a></td> </tr>` case "Intern" : return `<tr> <th scope="row">School</th> <td>${teamMember.getSchool()}</td> </tr>` } } module.exports = generateHTML;
8e988ead7f20dfae95a7c659f4d987a0b22f8f27
[ "Markdown", "JavaScript" ]
2
Markdown
OrtizAlex/Team-Profile-Generator
b7be15492411a1435f2c143af0ddcc79961b2b54
51dcfaa8232dc8dcaaaf4222e287f61bb86067ba
refs/heads/main
<repo_name>akshara-raju/BOOkEATeRs-LibraryApp<file_sep>/src/routes/signupRoutes.js const express = require('express'); const signupRouter = express.Router(); function routers(nav) { signupRouter.get('/',function(req,res){ res.render('signup', { nav, title: 'Signup', } ); }); signupRouter.get('/login', function(req,res){ const id=req.params.id; res.render("login", { nav, title:'Log In', } );//passed as object }); return signupRouter; }; module.exports = routers;<file_sep>/src/routes/loginRoutes.js const express = require('express'); const loginRouter = express.Router(); nav1 = [ { link:'/books', name:'Book' }, { link:'/authors', name: 'Author' }, { link:'/admin',name :'Add Books' }, { link:'/admin1',name:'Add Authors' }, { link:'/logout', name:'Sign Out' } ]; function router3(nav) { loginRouter.get('/', function(req,res){ res.render("login", { nav, title:'Log In', } );//passed as object }); loginRouter.get('/index',function(req,res){ const id=req.params.id; res.render("index", { nav, title:"home" }); }); loginRouter.get('/signup', function(req,res){ const id=req.params.id; res.render("signup", { nav, title:'Sign Up', } );//passed as object }); loginRouter.get('/home',function(req,res){ const id=req.params.id; res.render("home", { nav1, title:"home" }); }); return loginRouter; }//fn router ends module.exports=router3;//exporting to app.js
7acd3721ed39b7232b46d9ec15e186b6d12e28c2
[ "JavaScript" ]
2
JavaScript
akshara-raju/BOOkEATeRs-LibraryApp
84be4836d90389d0abf97453ea147c81dc9e47d6
6f1f4a5aa589c25aaa4e39cdac61262c1c440aaa
refs/heads/main
<repo_name>alcidesmig/dev.magalu.com-docs<file_sep>/examples/check_orders/src/App.js import React from "react"; import { BrowserRouter as Router, Switch, Route } from "react-router-dom"; import Login from "./components/pages/Login"; import InitialClient from "./components/pages/InitialClient"; import ClientsOrder from "./components/pages/ClientsOrder"; import ErrorPage from "./components/pages/Error"; import { PrivateRoute } from "./utils/PrivateRoute"; import "./App.css"; function App() { return ( <Router> <Switch> <Route exact path="/" component={Login} /> <PrivateRoute path="/client" component={InitialClient} /> <PrivateRoute path="/error" component={ErrorPage} /> <PrivateRoute path="/client-order" component={ClientsOrder} /> </Switch> </Router> ); } export default App; <file_sep>/examples/check_orders/.env.example REACT_APP_OIDC_AUTH_URI=https://id.magalu.com/oauth/auth REACT_APP_CLIENT_ID= REACT_APP_OIDC_TOKEN_URI=https://id.magalu.com/oauth/token REACT_APP_CLIENT_SECRET= REACT_APP_OMS_API_URI=https://alpha.api.magalu.com/maestro/v1/orders REACT_APP_URI=http://localhost:3000 REACT_APP_TENANT_TYPE=stenagam.CUSTOMER<file_sep>/guia-autorizacao-apps.md # Guia de autorização de aplicações ## Introdução Este é o guia do provedor de identidade da Plataforma Magalu. É por ele que os usuários finais farão a autorização de aplicações para o uso de suas contas e se autenticarão nos sistemas Magalu. Seguiu-se a [RFC 6749](https://datatracker.ietf.org/doc/html/rfc6749), que descreve o fluxo de OAuth 2.0, e a [especificação do OpenID](https://openid.net/specs/openid-connect-core-1_0.html), que descreve o fluxo de OpenID Connect, e dessa forma boa parte dos fluxos aqui apresentados já são bastante conhecidos pela comunidade. É válido ressaltar que, para que o fluxo aqui presente possa ser exercido, é necessário que uma aplicação tenha sido criada no [DevPortal](https://alpha.dev.magalu.com/), o portal para desenvolvedores. No portal, você pode fazer a criação e configuração de uma aplicação, especificando os parâmetros que serão utilizados neste guia e tendo acesso aos valores de identificador e chave de aplicação (`client_id` e `secret`). ## Glossário - **IDP**: provedor de identidade (Identity Provider), responsável por controlar os fluxos de autenticação e autorização de usuários e aplicações. - **Access Token**: token de acesso, é o resultado do fluxo de OAuth2/OpenID e é o que deve ser utilizado para consumir a API. ## API Keys vs OAuth2/OpenID As API Keys foram criadas, dentro da plataforma Magalu, com o objetivo de facilitar um primeiro contato do usuário desenvolvedor com a API Magalu, e por questões de segurança existem algumas restrições associadas a elas, para que não sejam utilizadas em ambiente de produção. O OpenID (superconjunto do OAuth2), em sua essência, é um protocolo/padrão aberto de autorização que permite que um terceiro se autentique (login) e identifique em uma aplicação, para que a mesma possa agir em nome do respectivo terceiro, e é o seu fluxo que deve ser utilizado nas aplicações finais, em ambiente de produção. ## [Base URLs](#base-url) | Ambiente | Base URL | | -------- | --------------------------- | | Produção | https://id.magalu.com/oauth | ### Endpoints | Endpoint | URL | | ---------------------------- | -------------------------------------- | | Autorização | https://id.magalu.com/oauth/auth | | Resgate de tokens | https://id.magalu.com/oauth/token | | Introspect de tokens | https://id.magalu.com/oauth/introspect | | Resgate de dados do usuário | https://id.magalu.com/oauth/userinfo | | Logout de usuário | https://id.magalu.com/oauth/logout | | Certificados para validações | https://id.magalu.com/oauth/certs | ![](https://github.com/luizalabs/dev.magalu.com-docs/blob/main/static/images/application_flow_by_user.png) Mais informações sobre os endpoints citados podem ser consultadas na especificação (OpenAPI) do provedor de identidade, que estará disponível em breve. ## Entendendo o fluxo O fluxo de autorização utilizando OAuth2 é mais simples do que parece, em resumo: 1. O usuário, consumidor da aplicação, é levado para a URL de autenticação do IDP, com um parâmetro que identifica a aplicação (`client_id`) que ele quer autorizar; 2. O usuário faz o `login` no IDP; 3. O usuário é redirecionado para uma das `redirect_uris` cadastradas na criação da aplicação (pode ser específica como parâmetro junto ao `client_id`), junto com um código de autorização; 4. A aplicação faz a troca do código de autorização por um Access Token do usuário, que será utilizado pela aplicação para fazer as chamadas em nome do mesmo. ## Autorizando uma aplicação O desenvolvedor de uma aplicação, deve realizar o fluxo da seguinte forma: ### Passo 1 Esse passo consiste na [seção 4.1.1 da RFC do OAuth2](https://datatracker.ietf.org/doc/html/rfc6749#section-4.1.1). Quando o usuário, consumidor da aplicação, precisar autorizar a aplicação na Plataforma Magalu, é necessário fazer um redirecionamento (utilizando o método `GET`) do usuário para a URL de autorização (descrita em "[Base URLs](#base-url)") com os seguintes parâmetros: - `response_type`: "code" - Esse parâmetro é obrigatório e o único valor aceito para ele é a string "code". - Exemplo: `response_type=code` - `client_id`: valor do client id da aplicação, criada no DevPortal. - Exemplo: `client_id=minha-aplicacao-para-sellers` - `redirect_uri`: uma das `redirectUris` cadastradas no momento da criação da aplicação. - Indica a URI para qual o usuário será enviado com o código a ser trocado pelo Access Token. Aqui, deve ser colocada a URI de `callback` da sua aplicação. - Exemplo: `redirect_uri=https://minha-redirect-uri.dev` - `scope`: são os scopes os quais a sua aplicação precisa ter acesso na conta do usuário. - No momento atual, é necessário passar somente o valor `openid`. Com ele, além dos scopes padrão, é possível utilizar o token gerado para consumo de toda a API disponibilizada. Entretanto, isso pode ser alterado conforme novas APIs forem sendo disponibilizadas. - Exemplo: `scope=openid` - `state`: é um parâmetro de segurança, que deve ser gerado aleatoriamente pela aplicação. - Exemplo: `state=xyz` - Esse parâmetro é opcional, porém é citado como recomendado na especificação do OAuth2. - Mais informações podem ser consultadas na [seção 10.12 da RFC 6749](https://datatracker.ietf.org/doc/html/rfc6749#section-10.12). Dessa forma, um exemplo válido de URL para a qual o usuário deve ser redirecionado para autorizar a aplicação seria: ``` https://id.magalu.com/oauth/auth?response_type=code &client_id=minha-aplicacao &redirect_uri=https://minha-redirect-uri.dev &scope=openid &state=xyz ``` Onde: - `response_type`=`code` - `client_id`=`minha-aplicacao` - `redirect_uri`=`https://minha-redirect-uri.dev` - `state`=`xyz` - `scope`=`openid` ### Passo 2 Após o usuário ser redirecionado para a URL de autenticação, ele cairá em uma tela de login, que permitirá que ele faça login como **vendedor** ou **consumidor**. É esperado que o usuário, nesse momento, preencha os seus dados e faça o login, e então inicia-se o passo 3. > P.S.: Caso o usuário já tenha autorizado a aplicação e esses dados ainda estejam guardados no navegador, esse passo é pulado e o passo 3 inicia-se logo após o redirecionamento do passo 1. ### Passo 3 O usuário, após o login ou caso já esteja logado, será redirecionado para a `redirect_uri` em questão. Como citado anteriormente, ela será a passada como parâmetro para **https://id.magalu.com/oauth/auth**, e deve ter sido cadastrada anteriormente (no DevPortal). Seguindo os parâmetro do exemplo do passo 1, o usuário seria redirecionado para: ``` https://minha-redirect-uri.dev?state=fj8o3n7bdy1op5 &session_state=94c44902-0d37-41b2-b6f1-45808ce8eb2f &code=6ccdb1f7-eb3d-49f0-894e-90b64dd6ead0.94c44902-0d37-41b2-b6f1-45808ce8eb2f.1e39527d-02aa-4fa0-97c9-fe6ce98fb93e ``` Dessa forma, a aplicação deve receber os parâmetros presentes na URL de redirecionamento e utilizá-los para completar o fluxo. Dados os parâmetros, o `state` pode ser utilizado para implementação de proteção contra ataque de CSRF, conforme referenciado anteriormente, e o `session_state` é um identificador interno para identificar a sessão do usuário, que pode ser ignorado por hora. Por fim, o valor de `code` pode ser utilizado para resgatar um Access Token válido do usuário consumidor da aplicação, conforme descrito no passo 4. ### Passo 4 Com o valor de `code` em mãos, recebido no passo 3, é possível fazer uma requisição no IDP e obter um Access Token (JWT) do usuário que autorizou a aplicação. A requisição pode ser feita da seguinte forma: ```curl curl -X POST "https://id.magalu.com/oauth/token" \ --data-urlencode "grant_type=authorization_code" \ --data-urlencode "client_id=$CLIENT_ID" \ --data-urlencode "client_secret=$CLIENT_SECRET" \ --data-urlencode "code=$CODE" \ --data-urlencode "scope=$SCOPE" \ --data-urlencode "redirect_uri=$REDIRECT_URI" ``` Onde: - `$CLIENT_ID` deve ser o **client ID** da sua aplicação; - No exemplo, seria `minha-aplicacao`. - `$CLIENT_SECRET` deve ser a **secret** da sua aplicação; - Ela pode ser consultada no DevPortal, e é um valor secreto que não deve ser compartilhado em hipótese alguma. - `$CODE` deve ser o valor recebido no parâmetro `code` após o redirecionamento para a sua `redirect_uri` no passo 3; - No exemplo seria `6ccdb1f7-eb3d-49f0-894e-90b64dd6ead0.94c44902-0d37-41b2-b6f1-45808ce8eb2f.1e39527d-02aa-4fa0-97c9-fe6ce98fb93e`. - `$SCOPE` deve possuir o valor `openid`; - `$REDIRECT_URI` deve ser o valor da URI de redirecionamento utilizada no fluxo, e passada anteriormente para **https://id.magalu.com/oauth/auth**. ### Tokens obtidos Após a requisição de troca de `code` por Access Token, é esperada uma resposta do seguinte formato: ```json { "access_token": "<KEY>", "expires_in": 3600, "refresh_expires_in": 1800, "refresh_token": "<KEY>", "token_type": "Bearer", "id_token": "<KEY>", "not-before-policy": 1627480351, "session_state": "3c3dd522-2f97-4a7a-8d69-c29a93c5cf3d", "scope": "openid spi-tenants email" } ``` Onde o `access_token` é o Access Token a ser utilizado pela aplicação, e pode ser de dois formatos, e `scope` tem os valores `default` do nosso provedor de identidade, somados aos pedidos pela aplicação. > Observação: os "scopes" default são, até o momento, `spi-tenants`, e `email`. Estamos trabalhando no desenvolvimento de novos "scopes", e em breve cada parte da API precisará de um "scope" específico para ser utilizada, junto ao `openid`. Dessa forma, então o Access Token quando aberto conterá um payload com o seguinte formato: ```json { "exp": 1627587640, "iat": 1627584040, "auth_time": 1627584027, "jti": "a340c18a-9e11-4020-b75b-a558fe04848f", "iss": "https://id.magalu.com/auth/realms/master", "sub": "648e89e3-3be4-48a7-86c0-c8eed5c42078", "typ": "Bearer", "azp": "minha-aplicacao", "session_state": "3c3dd522-2f97-4a7a-8d69-c29a93c5cf3d", "acr": "1", "scope": "openid spi-tenants email", "tenants": [ { "uuid": "000055d5-ca8c-4cd5-bc38-ca5fa0f8e23a", "type": "stenagam.CUSTOMER", "internal_id": "000055d5-ca8c-4cd5-bc38-ca5fa0f8e23a" }, { "uuid": "21fea73c-e244-497a-8540-be0d3c583596", "type": "stenagam.SELLER", "internal_id": "stenagam_sandbox" }, { "uuid": "28cd74b4-9c14-4f20-b6ed-f25ba446748a", "type": "maganets.CUSTOMER", "internal_id": "28cd74b4-9c14-4f20-b6ed-f25ba446748a" } ], "aud": "public", "email_verified": false, "org": "magalu", "email": "<EMAIL>" } ``` Ainda assim, caso a conta esteja vinculada a um seller, existirá mais um atributo `seller` no payload, que é de uso interno da plataforma. Tendo esse Access Token em mãos, a aplicação pode consultar os tenants do usuário no `Account` e então fazer as requisições na nossa API. :-) ### Renovação de Access Token É válido ressaltar, ainda, que o Refresh Token pode ser utilizado para renovar o Access Token do usuário na mesma sessão, e isso pode ser feito através da seguinte requisição: ```curl curl -X POST "https://id.magalu.com/oauth/token" \ --data-urlencode "grant_type=refresh_token" \ --data-urlencode "client_id=$CLIENT_ID" \ --data-urlencode "client_secret=$CLIENT_SECRET" \ --data-urlencode "refresh_token=$REFRESH_TOKEN" ``` Onde: - `$CLIENT_ID` deve ser o **client ID** da sua aplicação; - `$CLIENT_SECRET` deve ser a **secret** da sua aplicação; - `$REFRESH_TOKEN` deve ser o **Refresh Token** obtido no fluxo de obtenção de tokens. Além disso, a resposta para essa requisição será a mesma da retornada na troca de um `code` por um Access Token. ### Validação de Access Tokens Uma vez obtido um Access Token, você ainda pode validá-lo utilizando os certificados presentes em **https://id.magalu.com/oauth/certs**. Essa validação pode ser feita facilmente utilizando alguma biblioteca existente para a sua linguagem. Em golang, por exemplo, isso pode ser feito utilizando a biblioteca [jwt-go](https://github.com/dgrijalva/jwt-go), com o seguinte trecho de código: ```go import jwt "github.com/dgrijalva/jwt-go" // ... jwks, err := keyfunc.Get("https://id.magalu.com/oauth/certs", keyfunc.Options{ Client: &http.Client{ Timeout: TIMEOUT_PARA_REQUEST_NO_IDP, }, }) if err != nil { // Erro resgatando os certificados em https://id.magalu.com/oauth/certs } token, err := jwt.Parse(ACCESS_TOKEN, jwks.KeyFunc) if err != nil { // Erro na validação do Token JWT } // ... ``` <file_sep>/examples/check_orders/README.md # DemOrders ## O App demo para acesso às API de Order da Magalu Este aplicativo tem a função de servir como exemplo de consumo de APIs de pedidos do Magazine Luiza. ## Features - Login e autenticação no ambiente Magalu - Listagem de todos os pedidos do cliente - Busca de um pedido específico pelo ID - Detalhes de cada pedido ## Tecnologias Para que o DemOrders funcione corretamente, você irá precisar das seguintes tecnologias: - [ReactJS] - Biblioteca Js para web apps! - [node.js] - Interpretador de Javascript - [GIT] - Sistema de versionamento! - [Create-React-App] - Maneira simples de iniciar apps em React! Este projeto é Open Source com um repositório público. ## Instalação O DemOrders requer [Node.js](https://nodejs.org/) v10+ para funcionar. Clone o projeto: ```sh cd demorders git clone https://github.com/luizalabs/dev.magalu.com-docs.git ``` Instale as dependências: ```sh cd dev.magalu.com-docs/examples/check_orders npm i ``` Crie um arquivo .env com base no arquivo .env.example Por fim, rode o comando para iniciar o programa ```sh npm start ``` ## License MIT **Free Software, Hell Yeah!** [//]: # (These are reference links used in the body of this note and get stripped out when the markdown processor does its job. There is no need to format nicely because it shouldn't be seen. Thanks SO - http://stackoverflow.com/questions/4823468/store-comments-in-markdown-syntax) [dill]: <https://github.com/joemccann/dillinger> [node.js]: <http://nodejs.org> [ReactJS]: <https://reactjs.org/> [GIT]: <https://git-scm.com/> [Create-React-App]: <https://create-react-app.dev/> <file_sep>/examples/check_orders/src/components/pages/Login/index.js import React, { useState, useEffect } from "react"; import "./index.css"; import Button from "@material-ui/core/Button"; import { getJWT } from "../../../services/post.jwt"; import CircularProgress from "@material-ui/core/CircularProgress"; import { useHistory } from "react-router-dom"; import isAuthenticated from "../../../utils/auth"; function Login() { const history = useHistory(); const [loading, setLoading] = useState(true); const [jwtReady, setJwtReady] = useState(null); // Pega o code dado pela tela de login do header const url_string = window.location.href; var url = new URL(url_string); var codeResponse = url.searchParams.get("code"); useEffect(() => { const findJWT = async () => { setLoading(false); await getJWT(codeResponse).catch((error) => { console.log(error); history.push("/error"); }); setJwtReady("Done"); setLoading(true); }; // Checa se o code ja foi fornecido, pede o JWT, confere a auth transfere para a página correta if (codeResponse !== null && loading) { if (jwtReady !== "Done") { findJWT(); } // Redireciona para a página correta de acordo com a auth if (jwtReady === "Done") { var auth = isAuthenticated(); if (auth === true && loading) { history.push("/client"); } else if (auth === false && loading) { history.push("/"); } } } }); if (!loading) { return ( <> <div style={{ position: "absolute", left: "50%", top: "50%", transform: "translate(-50%, -50%)", }} > <CircularProgress size={80} /> </div> </> ); } else { return ( <div className="login"> <h1 className="title">AppDemo</h1> <div className="button"> <br /> <Button variant="contained" onClick={(event) => (window.location.href = `${process.env.REACT_APP_OIDC_AUTH_URI}?client_id=${process.env.REACT_APP_CLIENT_ID}&redirect_uri=${process.env.REACT_APP_URI}&response_type=code&scope=customer_access openid`) } > Login </Button> </div> </div> ); } } export default Login; <file_sep>/README.md # dev.magalu.com-docs Conteúdo do portal Magalu para Desenvolvedores, incluindo especificações da plataforma aberta Magalu. ## Como Participar? No momento, os guias escritos aqui se referem à nossa plataforma aberta, ainda em fase de testes fechados. No entanto, após este período ela será apresentada oficialmente para a comunidade que tiver interesse de participar e construir com a gente! : ) ![](https://i.imgur.com/1XVhFhK.png) <file_sep>/examples/check_orders/src/components/pages/InitialClient/index.js import React, { useState } from "react"; import "./index.css"; import { useHistory } from "react-router-dom"; import ExitToAppIcon from "@material-ui/icons/ExitToApp"; import SearchOrder from "../../components/Search/index"; import { getOrders } from "../../../services/get.orders"; import CircularProgress from "@material-ui/core/CircularProgress"; function InitialClient() { const [loading, setLoading] = useState(true); const history = useHistory(); const logout = () => { localStorage.clear(); history.push("/"); }; const listaTodosPedidos = async () => { setLoading(false); await getOrders().catch((error) => { console.log(error); history.push("/error"); }); setLoading(true); history.push("/client-order"); }; if (!loading) { return ( <> <div style={{ position: "absolute", left: "50%", top: "50%", transform: "translate(-50%, -50%)", }} > <CircularProgress size={80} /> </div> </> ); } else { return ( <div className="client"> <div className="c-hello"> <div className="title-client"> <h1 className="hello-client">Olá, Cliente</h1> </div> <div className="c-h-logout-icon"> <ExitToAppIcon fontSize="large" onClick={(e) => logout()} /> </div> </div> <div className="c-hello-search"> <SearchOrder /> </div> <div className="c-allOrders"> <button className="button-pos" onClick={(e) => listaTodosPedidos()}> Todos Pedidos </button> </div> </div> ); } } export default InitialClient; <file_sep>/walkthrough-api.md # Walkthrough API Marketplace Magalu ## Disclaimer Atualmente a API do Marketplace da Magalu encontra-se em estado beta, com acesso restrito a alguns convidados que manifestaram interesse através do https://dev.magalu.com/. A abertura da API publicamente acontecerá ainda este ano, e será amplamente divulgada. Para este momento, as URLs da API e do Developer Portal agora estão referenciadas no texto, respectivamente, como https://alpha.api.magalu.com/ e https://alpha.dev.magalu.com/. Espera-se, que assim que a API for aberta publicamente, esses sistemas passem a residir nos domínios https://api.magalu.com/ e https://dev.magalu.com/. ## Introdução A API do Marketplace da Magalu oferece acesso programático a uma variedade de funcionalidades, expostas como endpoints REST, acessíveis via HTTPS. Com esta API você pode: - Escrever aplicações que utilizam todos os recursos da API, agindo em nome de um usuário final, como por exemplo, um parceiro Magalu vendendo produtos no marketplace - Consultar seus pedidos de venda, aceitá-los e realizar os passos essenciais para sua operação, incluindo faturamento e envio - Atender e responder a chamados abertos sobre suas vendas e os produtos vendidos - Consultar e cadastrar SKUs (Stock Keeping Units) do seu estoque, utilizando como base produtos do nosso catálogo central entre outras atividades no ecossistema Magalu. A API permite que sejam feitas requisições tanto pela perspectiva de um Consumidor quanto a de um Vendedor na plataforma, retornando conteúdo apropriado para cada uma destas perspectivas. Por exemplo, uma consulta de Pedidos realizada por um usuário com o perfil de Consumidor retorna os pedidos realizados por este consumidor; uma consulta de pedidos realizada por um usuário Vendedor retorna os pedidos que este vendedor recebeu, e que ele precisa confirmar, faturar e entregar. Todos os recursos da API são expostos pelo domínio api.magalu.com. A documentação e ferramentas para desenvolvedores, inclusive gerenciamento de tokens de acesso, residem em https://alpha.dev.magalu.com. Para se cadastrar basta ter uma conta **Github**. Esperamos você lá! > Observação: este documento contém exemplos que assumem que você tenha as > ferramentas de linha de comando `curl` e `jq` instaladas, que em Linux estão > disponíveis como pacotes do sistema, e em outros sistemas operacionais, em > repositórios públicos. ## Autenticação e Autorização A forma mais simples de acessar a API Magalu é usando uma API Key. Neste documento, onde aparecer `MYAPIKEY` deve ser feita a substituição pela chave gerada. > Uma API Key já está esperando por você no seu [Painel de Desenvolvedor](https://alpha.dev.magalu.com/dashboard). > Lá você poderá revogar a API Key existente e gerar uma nova a qualquer momento. Para testar se sua API Key está funcionando, use o seguinte endpoint: ```bash curl -H "X-API-Key: ${MYAPIKEY}" https://alpha.api.magalu.com/account/v1/whoami ``` <details> <summary><strong>Resposta JSON:</strong></summary> ```json { "uuid": "5b02cdaf-9a93-4cfe-959a-ec989bd414e5", "preferred_email": "<EMAIL>", "first name": "Joe", "last name": "User", "active_tenant": null, "created_at": "2020-12-22T16:08:51.000Z", "updated_at": "2021-02-14T01:14:25.000Z", "last_accessed_at": "2021-07-07T22:03:01Z" } ``` </details> Embora API Keys sejam simples de usar e não expirem, acessos com API Key têm algumas restrições: - Acessos a dados de produção podem ser feitos apenas pela perspectiva do Consumidor associado ao usuário dono da API Key (ou seja, não é possível usar API Keys para acessar dados de produção pela perspectiva de Vendedor) - Acessos a dados "rascunho" (às vezes chamado de sandbox), utilizado para testes, podem ser feitos com API Keys assumindo qualquer perspectiva. Para acesso completo ao ambiente de produção API Magalu, você deve utilizar o [fluxo de autorização padrão OAuth 2.0](https://github.com/luizalabs/dev.magalu.com-docs/blob/main/guia-autorizacao-apps.md). Chamadas OAuth 2.0 usam um token de acesso no cabeçalho Authorization, conforme indicado abaixo: ```bash curl -H "Authorization: Bearer ${MYTOKEN}" https://alpha.api.magalu.com/account/v1/whoami ``` <details> <summary><strong>Resposta JSON:</strong></summary> ```json { "uuid": "5b02cdaf-9a93-4cfe-959a-ec989bd414e5", "preferred_email": "<EMAIL>", "first name": "Joe", "last name": "User", "active_tenant": null, "created_at": "2020-12-22T16:08:51.000Z", "updated_at": "2021-02-14T01:14:25.000Z", "last_accessed_at": "2021-07-07T22:03:01Z" } ``` </details> Para mais dúvidas verifique o tópico **Tenants e Perspectivas**. ## Consultando pedidos e pacotes Com sua API Key em mãos, você pode já consultar a API e descobrir quais foram as últimas compras que você fez no Magalu e no Netshoes: ```bash curl -H "X-API-Key: ${MYAPIKEY}" https://alpha.api.magalu.com/maestro/v1/orders | \ jq ".[] | {uuid, number, created_at, channel: .sales_channel.organization.code}" ``` <details> <summary><strong>Resposta JSON:</strong></summary> ```json [ { "uuid": "a2e45b24-d9cc-qwwa-97cf-d14f6df28670", "number": "1035145663475046", "created_at": "2020-12-22T16:08:51.000Z", "channel": "netshoes" }, { "uuid": "13bdb3e3-8fad-4f9b-a6c3-2fa99786289f", "number": "8954650874610585", "created_at": "2020-12-22T16:08:51.000Z", "channel": "magazine_luiza" } ] ``` </details> Se a consulta não retornou nenhum elemento, tem uma forma fácil de resolver: faça uma compra no Magalu ou no Netshoes e faça novamente a chamada. :-) A entidade que registra uma compra e seus produtos, **Order**, possui um ou mais pacotes, representando a forma como a compra é dividida conforme o local e o Vendedor (o dono do estoque) do produto. De posse do ID de um pedido (**<ORDER_ID>**) e do ID de um pacote (**<PACKAGE_ID>**), é possivel listar os detalhes dos itens de um pacote. ```bash curl -H "X-API-Key: ${MYAPIKEY}" \ https://alpha.api.magalu.com/maestro/v1/orders/${ORDER_ID} ``` <details> <summary><strong>Resposta JSON:</strong></summary> ```json [ { "uuid": "13bdb3e3-8fad-4f9b-a6c3-2fa99786289f", "number": "8954650874610585", "sales_channel": { "code": 04, "description": "ML-APP Android", "organization": { "uuid": "4da25f48-4193-45de-b4ed-9b8c93b7f987", "code": "magazine_luiza", "description": "Magazine Luiza" } }, "customer": { "uuid": "5b02cdaf-9a93-4cfe-959a-ec989bd414e5", "name": "<NAME>" }, "payment": { "status": { "code": "paid" }, "currency": "BRL" }, "packages": [ { "uuid": "a6dce737-fe37-4444-8c51-a021d10051b7", "seller": { "code": "seller-abcd", "description": "Seller ABCD" }, "amount": 151.01, "created_at": "2020-12-22T16:08:51.000Z", "updated_at": "2020-12-24T14:52:44.000Z" }, { "uuid": "72653741-4b3a-4327-9f13-03e4ffd2cb31", "seller": { "code": "magazineluiza", "description": "Magazine Luiza" }, "amount": 505.45, "created_at": "2020-12-22T16:08:51.000Z", "updated_at": "2020-12-24T14:52:44.000Z" } ], "created_at": "2020-12-22T16:08:51.000Z", "updated_at": "2020-12-22T16:08:51.000Z" } ] ``` </details> Para ver os itens de um pacote, consulte o endpoint `/packages` passando o UUID relevante: ```bash curl -H "X-API-Key: ${MYAPIKEY}" \ https://alpha.api.magalu.com/maestro/v1/orders/${ORDER_ID}/packages/${PACKAGE_ID} ``` <details> <summary><strong>Resposta JSON:</strong></summary> ```json { "uuid": "72653741-4b3a-4327-9f13-03e4ffd2cb31", "seller": { "code": "magazineluiza", "description": "Magazine Luiza" }, "amount": 505.45, "items": [ { "uuid": "a7697479-4896-4a05-b439-f4ff9913f03b", "product": { "code": "225339000", "seller": { "sku": "225339000" }, "price": 240.13, "freight": { "cost": { "customer": 4.99 } } }, "discount": 55.6, "interest": 10, "value": 194.53, "total": 217.03, "description": "KIT 4 Cerveja Heineken Premium Puro Malte Lager", "reference": "12 Unidades 350ml", "brand": "Heineken", "category": { "id": "ME", "sub_category": { "id": "CVEJ" } }, "image_url": "https://a-static.mlcdn.com.br/{w}x{h}/cerveja-heineken-premium-puro-malte-lager-12-unidades-350ml/magazineluiza/225339000/b04fa5652e7755a44c0117e2124c6e1f.jpg", "quantity": 2, "value": 389.06, "total": 434.07, "gifts": [ { "uuid": "acd27d3b-9d33-453b-abe9-48e76078a73e", "product": { "code": "218743100", "description": "Copo Cerveja", "reference": "vidro", "brand": "Plastic" }, "quantity": 2, "created_at": "2018-06-08T18:21:35.000Z", "updated_at": "2018-06-08T18:21:35.000Z" } ], "bundles": [ { "uuid": "2bace316-9b49-46d1-a87a-34cc0b12610e", "product": { "code": "218743100", "price": 60.03, "freight": { "cost": { "customer": 5.62 } }, "description": "Cerveja Heineken Premium", "discount": 13.9, "interest": 2.5, "value": 48.63, "total": 54.25 }, "quantity": 4, "created_at": "2018-06-08T18:21:35.000Z", "updated_at": "2018-06-08T18:21:35.000Z" } ], "services": [ { "uuid": "b34d71ec-b432-4b4c-83ff-c7c5297f179c", "id": "3", "description": "Garantia Extendida", "price": 35.69, "quantity": 2, "total": 71.38, "validity": { "type": "month", "value": 12 }, "slug": "GE", "created_at": "2018-06-08T18:21:35.000Z", "updated_at": "2018-06-08T18:21:35.000Z" } ], "benefits": [ { "type": "cashback", "description": "10% em Cashback", "amount": 19.99 } ], "created_at": "2020-12-22T16:08:51.000Z", "updated_at": "2020-12-22T16:08:54.000Z" } ], "created_at": "2020-12-22T16:08:51.000Z", "updated_at": "2020-12-24T14:52:44.000Z" } ``` </details> ## Consultando e cadastrando SKUs Para utilizar a API de catálogo, chamada de Adelpha, é necessário utilizar um tenant de tipo `{maganets|stenagam}.SELLER`. Tendo um desses, podemos checar os SKUs ligados ao seller dado no endpoint `/skus` do namespace `/adelpha/v1`: ```bash curl -H "X-API-Key: ${MYAPIKEY}" -H "X-Tenant-ID: 21fea73c-e244-497a-8540-be0d3c583596" \ https://alpha.api.magalu.com/adelpha/v1/skus?_limit=1 \ | jq ".[] | {sku, channels, identifier: .product.identifier}" ``` <details> <summary><strong>Resposta JSON:</strong></summary> ```json { "sku": "mySku0401", "channels": [ { "active": false, "listing_id": null, "name": "magazineluiza", "price": { "currency": "BRL", "list_price": 90828251.32, "value": 90820401.32 }, "promotionals": [], "status": "OFFLINE", "url": null } ], "identifier": [ { "type": "ean", "value": "1234567804019" }, { "type": "isbn", "value": "1234567804026" }, { "type": "ncm", "value": "0401.61.30" } ] } ``` </details> Como visto no exemplo acima, é possível paginar os SKUs com os parâmetros `_limit` e `_offset`, e filtrá-los com qualquer um dos parâmetros `sku`, `title`, `ean`, `ncm`, `isbn`, `group_id`: ```bash curl -H "X-API-Key: ${MYAPIKEY}" -H "X-Tenant-ID: 21fea73c-e244-497a-8540-be0d3c583596" \ https://alpha.api.magalu.com/adelpha/v1/skus?isbn=1234567804026 | jq ``` <details> <summary><strong>Resposta JSON:</strong></summary> ```json [ { "channels": [ { "active": false, "listing_id": null, "name": "magazineluiza", "price": { "currency": "BRL", "list_price": 90828251.32, "value": 90820401.32 }, "promotionals": [], "status": "OFFLINE", "url": null } ], "product": { "attributes": [ { "type": "tsgtflqt0401", "value": "iahhqgia0401" }, { "type": "pqkzngwp0401", "value": "xrjavqpw0401" } ], "brand": "wjNXwcMhlBFCPRlkrsib0401", "created_at": "2021-07-30T20:46:24.209000", "creator": "ff6c63f2-2379-43a1-aed3-870ba83c91b7", "datasheet": [ { "type": "ruaiqnxi0401", "value": "mzbvtqbx0401" }, { "type": "nurhfiib0401", "value": "urjwesrq0401" } ], "description": "...", "dimensions": { "depth": 34, "height": 808, "weight": 31, "width": 70 }, "group_id": "610187991d2bf3b979a67c40", "identifier": [ { "type": "ean", "value": "1234567804019" }, { "type": "isbn", "value": "1234567804026" }, { "type": "ncm", "value": "0401.61.30" } ], "media": { "images": ["..."], "videos": ["..."] }, "origin": "imported", "package": { "depth": 64, "height": 30, "weight": 61, "width": 80 }, "perishable": true, "tags": ["vixjxzyv0401", "hmugeipj0401"], "tax_replacement": false, "title": "PnxwxKTJGv0401", "updated_at": "2021-07-30T20:48:09.006000", "updater": "ff6c63f2-2379-43a1-aed3-870ba83c91b7" }, "sku": "mySku0401", "stocks": [ { "branch": "zbxqyoqx0401", "delivery_time": 10401, "name": "daoyfowu0401", "quantity": 5404, "reserved": 14, "type": "on_supplier" } ] } ] ``` </details> Para cadastrar um novo SKU, utilizamos também o endpoint `/adelpha/v1/skus`, onde é obrigatório informar: - o código do sku; - os dados do produto (título, outros "identifiers", descrição, peso do pacote, etc); - os dados de estoque da oferta; - os canais onde se deseja publicar a oferta e os preços por canal. ```bash curl -X POST https://alpha.api.magalu.com/adelpha/v1/skus\ -H "X-API-Key: ${MYAPIKEY}" -H "X-Tenant-ID: 21fea73c-e244-497a-8540-be0d3c583596" \ -H "accept: application/json" -H "content-type: application/json" \ -d "$PAYLOAD" ``` <details> <summary><strong>Payload JSON:</strong></summary> ```json PAYLOAD='{ "sku": "012345678", "stocks": [ { "quantity": 1234, "branch": "ULA01", "name": "Estoque X", "type": "on_seller", "reserved": 0, "delivery_time": 144000 } ], "channels": [ { "name": "magazineluiza", "active": true, "price": { "value": "1234.99", "list_price": "1300" } } ], "product": { "group_id": "5f6e2b8a9f91f47840b9bf49", "identifier": [ { "type": "ean", "value": "841667100531" }, { "type": "isbn", "value": "9788562063602" }, { "type": "ncm", "value": "8517.61.30" } ], "title": "Tablet Wi-Fi 4GB Tela 6", "description": "Feito para os amantes da leitura com sua tela de 6 polegadas...", "origin": "national", "perishable": false, "package": { "height": 100, "width": 80, "depth": 90, "weight": 150 }, "datasheet": [ { "type": "Voltagem", "value": "220" }, { "type": "Cor", "value": "Branca" } ], "tags": [ "my-tag-1", "my-tag-2" ], "brand": "Samsung", "media": { "images": [ "https://mysite.domain/some-image.jpg" ], "videos": [ "https://youtu.be/some-video/" ] }, "dimensions": { "height": 100, "width": 80, "depth": 90, "weight": 150 }, "attributes": [ { "type": "Portas USB", "value": "2" }, { "type": "Wifi", "value": "Sim" } ] } }' ``` </details> Com o campo `channels.active` tendo o valor `true`, entende-se que o pedido criado pode ser listado como uma oferta ativa no canal indicado. Dessa forma, após passar por um período de avaliação de conteúdo, o produto aparecerá no(s) site(s) dos canais informados. **Importante:** as requisições mostradas aqui usam API Key porque estão utilizando um tenant de tipo `stenagam.SELLER`, que contém dados de teste. Para utilizar a visão de dados do mundo real é necessário ter um token de acesso proveniente do fluxo de OAuth 2.0, que é melhor explicado no [Guia de autorização de aplicações](https://github.com/luizalabs/dev.magalu.com-docs/blob/main/guia-autorizacao-apps.md). ## Tenants e Perspectivas Até aqui as requisições foram feitas assumindo que o usuário dono da API Key está assumindo a perspectiva de um consumidor. Um usuário da API pode representar diversas organizações ou grupos diferentes (que na API, chamamos de "Tenants"), e pode também acessar a API assumindo a perspectiva de Consumidor ou de Vendedor (o "Tenant Type"). Tenants têm um tipo e um identificador únicos. Para visualizar quais tenants você tem acesso: ```bash curl -H "X-API-Key: ${MYAPIKEY}" https://alpha.api.magalu.com/account/v1/whoami/tenants ``` <details> <summary><strong>Resposta JSON:</strong></summary> ```json [ { "uuid": "c2715319-e56d-4594-8299-6b2c9ba6d51a", "type": "maganets.CUSTOMER", "created_at": "2020-12-22T16:08:51.000Z", "updated_at": "2020-12-24T14:52:44.000Z" }, { "uuid": "0c5d9da2-0efb-4a03-956a-344006817630", "type": "maganets.CUSTOMER", "created_at": "2020-12-22T16:08:51.000Z", "updated_at": "2020-12-24T14:52:44.000Z" }, { "uuid": "50407c1c-0f43-49e9-9649-717ce2c53fd6", "type": "maganets.SELLER", "created_at": "2020-12-22T16:08:51.000Z", "updated_at": "2020-12-24T14:52:44.000Z" }, { "uuid": "21fea73c-e244-497a-8540-be0d3c583596", "type": "stenagam.SELLER", "created_at": "2021-07-08T20:48:54.42094604Z", "updated_at": "2021-07-08T20:48:54.420946164Z" }, { "uuid": "000055d5-ca8c-4cd5-bc38-ca5fa0f8e23a", "type": "stenagam.CUSTOMER", "created_at": "2021-07-08T20:48:54.420945556Z", "updated_at": "2021-07-08T20:48:54.420945893Z" } ] ``` </details> Utilizando o tenant certo, você está pronto para explorar outras partes da API. Para alterar a perspectiva (o "tenant") utilizada nas requisições feitas na API, basta adicionar o header `"X-Tenant-ID: ${MYTENANTID}"`, onde `MYTENANTID` é um dos uuids obtidos no passo anterior. Por exemplo: ```bash curl -H "X-API-Key: ${MYAPIKEY}" -H "X-Tenant-ID: 21fea73c-e244-497a-8540-be0d3c583596" \ https://alpha.api.magalu.com/maestro/v1/orders?_limit=1 ``` <details> <summary><strong>Resposta JSON:</strong></summary> ```json [ { "uuid": "efb77dcf-d83c-4935-81ac-7be5f37e6cdc", "number": "9014500879663847", "sales_channel": { "code": 53, "description": "ML-APP Android", "organization": { "uuid": "4da25f48-4193-45de-b4ed-9b8c93b7f987", "code": "magazine_luiza", "description": "Magazine Luiza" } }, "customer": { "uuid": "001dc28a-fe4d-482c-a0ef-6c6cdc46f94a", "name": "<NAME>" }, "payment": { "status": { "code": "processing" }, "currency": "BRL" }, "packages": [ { "uuid": "e3ae3598-8034-4374-8eed-bdca8c31d5a0", "seller": { "code": "stenagam_sandbox", "description": "Stenagam Sandbox" }, "amount": 198.99, "created_at": "2021-01-06T07:26:38.000Z", "updated_at": "2021-07-02T12:59:58.000Z" }, { "uuid": "b90a950b-c95e-4b4c-be25-0eff5c764500", "seller": { "code": "stenagam_sandbox", "description": "Stenagam Sandbox" }, "amount": 20.7, "created_at": "2021-01-06T07:26:38.000Z", "updated_at": "2021-07-02T12:59:58.000Z" } ], "created_at": "2021-01-06T07:26:38.000Z", "updated_at": "2021-01-06T07:54:30.000+0000" } ] ``` </details> Vale ressaltar que os únicos tenants que não permitimos com o uso de API Keys são os com `type` = `maganets.SELLER` :-) ## Testando com dados de "rascunho": o universo "stenagam" Se você não trabalha para um Vendedor habilitado na plataforma Magalu (o que talvez seja o caso para boa parte dos desenvolvedores do Brasil :-), existe um tenant especial com o type `stenagam.SELLER` que permite explorar dados da perspectiva de seller. Pense em "stenagam" como um universo paralelo, onde todas as transações e dados gravados não gerem consequências concretas (como pagamentos e entregas de mercadoria) no mundo real. Dados armazenados para este tenant são fictícios e podem ser alterados sem impacto no negócio. Utilize este tenant quando quiser revisar o modelo de dados por esta perspectiva, antes de testar mudanças com dados de produção. ## Contribua para a API Magalu Ah, faltou falar uma coisa :-) O processo de desenvolvimento da API Magalu é aberto, e convidamos todos os desenvolvedores interessados a participar do nosso Slack — basta se registrar em https://dev.magalu.com/teaser-form que entramos em contato com um convite. Caso tenha alguma problema ou sugestão durante o uso do portal ou das APIs, podem abrir um bug) em https://github.com/luizalabs/dev.magalu.com/issues. E podem usar <EMAIL> para enviar seus comentários e pedidos especiais. Obrigado! Esperamos que tenha ficado tudo claro o suficiente para você começar a explorar a funcionalidade da API, lembrando que o ponto de partida é o portal em https://alpha.dev.magalu.com -- nos vemos lá! # Changelog - 2021-08-18: - Adiciona prefixo `alpha` nos links da API e Developer Portal - 2021-08-09: - Adiciona /adelpha ao guia - 2021-07-23: v4, alinhando o guia ao estado atual das APIs - Altera as rotas de /account e /maestro de acordo com o modelo de dados atual - Adiciona informações sobre tenants - 2021-07-08: v3, primeira versão pública - Introdução ao /account e /maestro, GET-only - Incorpora API Keys, tenants e introduz dicotomia maganets/stenagam <file_sep>/examples/check_orders/src/services/get.orders.jsx import axios from "axios"; import jwt_decode from "jwt-decode"; const getTenantId = () => { const jwt = localStorage.getItem("jwt"); const decodedJWT = jwt_decode(jwt); var tenants = decodedJWT.tenants.filter(t => t.type === process.env.REACT_APP_TENANT_TYPE) let tenantId if (tenants.length > 0) { tenantId = tenants[0].uuid } return tenantId } export const getOrders = () => { console.log("getting orders"); var config = { method: "get", url: process.env.REACT_APP_OMS_API_URI, headers: { Authorization: `Bearer ${localStorage.getItem("jwt")}`, "x-tenant-id": getTenantId(), }, }; return axios(config) .then((response) => { localStorage.setItem("order", JSON.stringify(response.data)); }) .catch((error) => { console.log(error); }); }; export const getOrderByNumber = (number) => { console.log("getting order by number"); var config = { method: "get", url: `${process.env.REACT_APP_OMS_API_URI}?number=${number}`, headers: { Authorization: `Bearer ${localStorage.getItem("jwt")}`, "x-tenant-id": getTenantId(), }, }; return axios(config) .then((response) => { localStorage.setItem("order", JSON.stringify(response.data)); }) .catch((error) => { console.log(error); }); }; <file_sep>/examples/check_orders/src/components/components/Accordion/Accordion.js import React, { useState } from "react"; import "./Accordion.css"; const Accordion = ({ data }) => { const [active, setActive] = useState(0); const eventHandler = (e, index) => { e.preventDefault(); setActive(index); }; const totalItens = (packages) => { var count = 0; packages.forEach((pack) => { count++; }); return count; }; const sumItens = (packages) => { var summedItens = 0; packages.forEach((pack) => { summedItens = summedItens + pack.amount; }); return summedItens; }; const formatDate = (dateString) => { const options = { year: "numeric", month: "long", day: "numeric" } return new Date(dateString).toLocaleDateString('pt-BR', options) } return ( <div> <form> {data.map((tab, index) => ( <div key={index}> <h3> <button onClick={(e) => eventHandler(e, index)} className={active === index ? "active" : "inactive"} aria-expanded={active === index ? "true" : "false"} aria-disabled={active === index ? "true" : "false"} > <span className="title-wrapper">{formatDate(tab.created_at)}</span> </button> </h3> <div className={active === index ? "panel-open" : "panel-close"}> <div className="order-info"> <p>Pedido: {tab.number}</p> <p>Nº de itens: {totalItens(tab.packages)}</p> <p>Valor do Pedido: R$ {sumItens(tab.packages)}</p> <p>Status: {tab.payment.status.code}</p> </div> </div> </div> ))} </form> </div> ); }; export default Accordion;<file_sep>/examples/check_orders/src/utils/auth.js import jwt_decode from "jwt-decode"; import { refreshJWT } from "../services/post.jwt"; const isAuthenticated = () => { const jwt = localStorage.getItem("jwt"); const refreshtoken = localStorage.getItem("refresh_token"); if (jwt !== null) { var decodedJWT = jwt_decode(jwt); } if (refreshtoken !== null) { var decodedRefreshToken = jwt_decode(refreshtoken); } var currentTime = Date.now() / 1000 - 30; if (jwt !== null && currentTime < decodedJWT.exp) { return true; } else { if (jwt !== null && currentTime < decodedRefreshToken.exp) { console.log("Sua sessão expirou"); refreshJWT(); return true; } localStorage.clear(); return false; } }; export default isAuthenticated;
c0294e71e8755c5b2927b59a45ef783b09ee58a5
[ "JavaScript", "Markdown", "Shell" ]
11
JavaScript
alcidesmig/dev.magalu.com-docs
4f9721cba670d9eb945a0bda91c53870dce0b36e
489d367d68e6f1c7138616e77a42a292a0d3abc8
refs/heads/master
<repo_name>sharebright/MagStore<file_sep>/src/MagStore.Web/Models/Product/ViewProductsByCategoryViewModel.cs using System.Collections.Generic; using MagStore.Entities; namespace MagStore.Web.Models.Product { public class ViewProductsByCategoryViewModel { public IEnumerable<Entities.Product> Products { get; set; } public string ProductType { get; set; } public IEnumerable<ProductImage> Images { get; set; } public IDictionary<string, string> Filters { get; set; } } }<file_sep>/src/MagStore.Test/Checkout/Checkingout/There_are_products_in_the_cart.cs using System.Collections.Generic; using FluentAssertions; using MagStore.Entities; using NSubstitute; using NUnit.Framework; namespace MagStore.Test.Checkout.Checkingout { public class There_are_valid_cart_has_been_sent_to_checkout : When_checking_out { protected override void Arrange() { base.Arrange(); var products = new List<string>(); var product = string.Empty; products.Add(product); InputModel.Products = products; InputModel.SubTotal = 1; } [Test] public void Should_resolve_to_the_Checkout_view() { const string expectedView = "Checkout"; Result.ViewName.Should().Be(expectedView, "Expected to Checkout as the view."); } [Test] public void Should_request_product_data_from_the_repository() { Shop.Received().GetCoordinator<Product>(); Coordinator.Received().Load(Arg.Is(InputModel.Products)); } } }<file_sep>/src/MagStore.Web/Models/Catalogue/CreateCatalogueViewModel.cs using System.Collections.Generic; using MagStore.Entities.Enums; namespace MagStore.Web.Models.Catalogue { public class CreateCatalogueViewModel { public string Id { get; set; } public string Name { get; set; } public IEnumerable<Entities.Product> Products { get; set; } public DiscountType DiscountType { get; set; } public decimal DiscountAmount { get; set; } public IEnumerable<Entities.Promotion> Promotions { get; set; } } }<file_sep>/src/MagStore.Test/Checkout/InitiatingPayment/When_processing_a_payment_with_an_empty_list_of_products.cs using System; using FluentAssertions; using NUnit.Framework; namespace MagStore.Test.Checkout.InitiatingPayment { public class When_processing_a_payment_with_an_empty_list_of_products : When_initiating_a_payment { private TestDelegate testDelegate; protected override void Arrange() { base.Arrange(); InputModel.Total = 1; } protected override void Act() { testDelegate = () => ControllerUnderTest.InitiatePayment(InputModel); } [Test] public void Should_throw_invalidoperationexception_if_the_initiatepaymentmodel_contains_an_empty_list_of_products() { testDelegate = () => ControllerUnderTest.InitiatePayment(InputModel); var message = Assert.Throws<InvalidOperationException>( testDelegate, "There are products attached to this payment initiation.").Message; message.Should().Be("There are no products to purchase attached to this payment initiation."); } } }<file_sep>/src/MagStore/Entities/Payment.cs using MagStore.Infrastructure.Interfaces; namespace MagStore.Entities { public class Payment : IRavenEntity { public string Id { get; set; } } } <file_sep>/src/MagStore.Web/Models/ShoppingCart/ShoppingCartGetViewModel.cs using System.Collections.Generic; using MagStore.Entities; namespace MagStore.Web.Models.ShoppingCart { public class ShoppingCartGetViewModel { public Cart Cart { get; set; } public IEnumerable<Entities.Product> Products { get; set; } public string Quantity { get; set; } } }<file_sep>/src/MagStore/Entities/Order.cs using System.Collections.Generic; using MagStore.Infrastructure.Interfaces; namespace MagStore.Entities { public class Order : IRavenEntity { public string Id { get; set; } public string PaymentId { get; set; } public string CustomerId { get; set; } public IEnumerable<string> Products { get; set; } } }<file_sep>/src/MagStore/Entities/User.cs using System; using System.Collections.Generic; using MagStore.Entities.Enums; using MagStore.Infrastructure.Interfaces; namespace MagStore.Entities { public class User : IRavenEntity { public string Id { get; set; } public string ApplicationName { get; set; } public string Username { get; set; } public string PasswordHash { get; set; } public string PasswordSalt { get; set; } public string Title { get; set; } public string FullName { get; set; } public string Email { get; set; } public DateTime DateCreated { get; set; } public DateTime? DateLastLogin { get; set; } public IList<string> Roles { get; set; } public User() { ShoppingCart = new Cart { Id = Guid.NewGuid().ToString(), Products = new List<Product>(), Promotions = new List<string>() }; Roles = new List<string>(); Id = "authorization/users/"; // db assigns id } public string PhoneNumber { get; set; } public string MobileNumber { get; set; } public DateTime DateOfBirth { get; set; } public string HouseNumber { get; set; } public string Address1 { get; set; } public string Address2 { get; set; } public string Address3 { get; set; } public string Town { get; set; } public string State { get; set; } public string PostCode { get; set; } public string Country { get; set; } public bool AgreedToMarketing { get; set; } public AccountStatus AccountStatus { get; set; } public AccountLevel AccountLevel { get; set; } public Cart ShoppingCart { get; set; } } } <file_sep>/src/MagStore/Infrastructure/RavenRepository.cs using System; using System.Collections.Generic; using System.Linq; using System.Linq.Expressions; using MagStore.Entities; using MagStore.Infrastructure.Interfaces; using Raven.Client; using Raven.Client.Document; using Raven.Client.Linq; namespace MagStore.Infrastructure { public class RavenRepository : IRepository { private readonly IDocumentStore store; private IDocumentSession session; public RavenRepository(IDocumentStore store) { this.store = store; } private void BuildNewSession() { session = store.OpenSession(); } public void ForceNewSession() { CurrentSession.Dispose(); BuildNewSession(); } public IDocumentSession CurrentSession { get { return session ?? (session = store.OpenSession()); } } public T Load<T>(string id) where T : IRavenEntity { T load; try { load = CurrentSession.Load<T>(id); } catch (InvalidOperationException e) { ForceNewSession(); load = CurrentSession.Load<T>(id); } CurrentSession.Dispose(); return load; } public IEnumerable<T> Load<T>(IEnumerable<string> id) where T : IRavenEntity { IEnumerable<T> load; try { load = CurrentSession.Load<T>(id); } catch (InvalidOperationException e) { ForceNewSession(); load = CurrentSession.Load<T>(id); } CurrentSession.Dispose(); return load; } public int Count<T>() where T : IRavenEntity { var count = session.Query<T>().Count(); session.Dispose(); return count; } public void Add<T>(T item) where T : IRavenEntity { CurrentSession.Store(item); } public void Delete<T>(T item) where T : IRavenEntity { CurrentSession.Delete(item); CurrentSession.SaveChanges(); session.Dispose(); } public void Save() { CurrentSession.SaveChanges(); session.Dispose(); } public void SaveAndCommit<T>(T item) where T : IRavenEntity { try { CurrentSession.Store(item, item.Id); CurrentSession.SaveChanges(); } catch (InvalidOperationException e) { ForceNewSession(); CurrentSession.Store(item, item.Id); CurrentSession.SaveChanges(); } CurrentSession.Dispose(); } public IList<T> List<T>() { List<T> project; try { project = CurrentSession.Query<T>().ToList(); } catch (InvalidOperationException e) { ForceNewSession(); project = CurrentSession.Query<T>().ToList(); } foreach (var product in project.Where(p => p.GetType() == typeof(Product)) .Select(p => (p as Product)) .Where(product => product != null)) { product.AgeRange = product.AgeRange ?? new int[] {}; product.Promotions = product.Promotions ?? new List<string>(); } foreach (var promotion in project.Where(p => p.GetType() == typeof (Promotion)) .Select(p => (p as Promotion)) .Where(promotion => promotion != null)) { promotion.Restrictions = promotion.Restrictions ?? new List<string>(); } session.Dispose(); return project; } public IRavenQueryable<T> Query<T>() { IRavenQueryable<T> query; try { query = CurrentSession.Query<T>(); } catch (Exception) { ForceNewSession(); query = CurrentSession.Query<T>(); } return query; } public ILoaderWithInclude<T> Include<T>(Expression<Func<T, object>> path) { return CurrentSession.Include(path); } } }<file_sep>/src/MagStore.Web/Controllers/ShoppingCartController.cs using System; using System.Collections.Generic; using System.Linq; using System.Web.Mvc; using MagStore.Entities; using MagStore.Infrastructure.Interfaces; using MagStore.Web.Models.ShoppingCart; using SagePayMvc; namespace MagStore.Web.Controllers { public class ShoppingCartController : Controller { private readonly IShop shop; private readonly ITransactionRegistrar registrar; public bool UserIsAuthenticated { get { return User.Identity.IsAuthenticated; } } private User GetCurrentUser() { return Session["CurrentUser"] as User; } public ShoppingCartController(IShop shop, ITransactionRegistrar registrar) { this.shop = shop; this.registrar = registrar; } [HttpGet] public ActionResult ShoppingCart() { var user = GetCurrentUser(); var shoppingCartViewModel = new ShoppingCartGetViewModel { Cart = user.ShoppingCart, Products = user.ShoppingCart.Products }; return View(shoppingCartViewModel); } [HttpPost] public ActionResult AddToCart(AddToCartPostInputModel inputModel) { var user = GetCurrentUser(); var product = shop.GetCoordinator<Product>() .Query<Product>() .Single(p => p.Code == inputModel.Code && p.Colour == inputModel.Colour && p.Size == inputModel.Size); user.ShoppingCart.Products.Add(product); return RedirectToAction("ShoppingCart"); } [HttpPost] public ActionResult UpdateProductQuantity(UpdateProductQuantityPostInputModel inputModel) { var user = GetCurrentUser(); var productsThatHaveChanged = user.ShoppingCart.Products.Where(p => p.Id == inputModel.Id).ToList(); var changeAmount = productsThatHaveChanged.Count - inputModel.Quantity; if (changeAmount > 0) { for (var i = 0; i < changeAmount; i++) productsThatHaveChanged.Remove(productsThatHaveChanged.First(p => p.Id == inputModel.Id)); } else if (changeAmount < 0) { var abs = Math.Abs(changeAmount); for (var i = 0; i < abs; i++) productsThatHaveChanged.Add(productsThatHaveChanged.First(p => p.Id == inputModel.Id)); } else { return RedirectToAction("ShoppingCart"); } var unchangedProduct = user.ShoppingCart .Products .Where(p => p.Id != inputModel.Id) .ToList(); var productsToSave = new List<Product>(); //unchangedProduct.Union(productsThatHaveChanged).ToList(); productsToSave.AddRange(unchangedProduct); productsToSave.AddRange(productsThatHaveChanged); user.ShoppingCart.Products = productsToSave; return RedirectToAction("ShoppingCart"); } [HttpPost] public ActionResult RemoveProductFromBasket(RemoveProductFromBasketPostInputModel inputModel) { var user = GetCurrentUser(); var unchangedProduct = user.ShoppingCart .Products .Where(p => p.Id != inputModel.Id) .ToList(); var productsToSave = new List<Product>(); //unchangedProduct.Union(productsThatHaveChanged).ToList(); productsToSave.AddRange(unchangedProduct); user.ShoppingCart.Products = productsToSave; return RedirectToAction("ShoppingCart"); } [HttpGet] public ActionResult Checkout(CheckoutGetInputModel inputModel) { if (UserIsAuthenticated) { var products = GetCurrentUser().ShoppingCart.Products; return View(new CheckoutViewModel {Products = products}); } return RedirectToAction("LogOn", "Account"); } [HttpPost] public ActionResult Checkout(CheckoutPostInputModel inputModel) { /* * Get all the products in the basket : DONE * TODO: Apply intrinsic discounts * Display product summary : DONE * TODO: Offer to receive promo codes */ if (!inputModel.Products.Any() || inputModel.SubTotal == 0) { return View("Error"); } // TODO: Uncomment after tests - need to use a different technique for checking authentication // if (!UserIsAuthenticated) // { // return RedirectToAction("LogOn", "Account"); // } var products = shop.GetCoordinator<Product>().Load(inputModel.Products).ToList(); var subTotal = inputModel.SubTotal; var deliveryCharge = inputModel.DeliveryCharge; return View("Checkout", new CheckoutViewModel { Products = products, SubTotal = subTotal, DeliveryCharge = deliveryCharge }); } [HttpPost] public ActionResult InitiatePayment(InitiatePaymentPostInputModel inputModel) { ValidateInputModel(inputModel); return View(new InitiatePaymentViewModel()); // // var context = ControllerContext.RequestContext; // var vendorTxCode = DateTime.Now.Ticks.ToString(); // var name = "The Name"; // var basket = new ShoppingBasket(name) // { // new BasketItem(1, "This is the Description", 22m), // }; // // // var billingAddress = new Address // { // Address1 = "Address1", // Address2 = "Address2", // City = "City", // Country = "GB", // Firstnames = "First", // Surname = "Last", // PostCode = "PostCode", // // Phone = "0912837482" // }; // // var deliveryAddress = new Address(); // var customerEmail = "<EMAIL>"; // // var response = registrar.Send( // context, // vendorTxCode, // basket, // billingAddress, // billingAddress, // customerEmail // ); // } private void ValidateInputModel(InitiatePaymentPostInputModel inputModel) { if (!(inputModel.Total > 0)) { throw new InvalidOperationException("The total for this payment initiation must have a positive value."); } if (!inputModel.Products.Any()) { throw new InvalidOperationException("There are no products to purchase attached to this payment initiation."); } } [HttpPost] public ActionResult SaveAddresses(SaveAddressesPostInputModel inputModel) { throw new NotImplementedException(); } } }<file_sep>/src/MagStore/Infrastructure/Interfaces/IRepository.cs using System; using System.Collections.Generic; using System.Linq.Expressions; using Raven.Client; using Raven.Client.Document; using Raven.Client.Linq; namespace MagStore.Infrastructure.Interfaces { public interface IRepository { IDocumentSession CurrentSession { get; } void ForceNewSession(); T Load<T>(string id) where T : IRavenEntity; IEnumerable<T> Load<T>(IEnumerable<string> ids) where T : IRavenEntity; int Count<T>() where T : IRavenEntity; void Add<T>(T item) where T : IRavenEntity; void Delete<T>(T item) where T : IRavenEntity; void Save(); ILoaderWithInclude<T> Include<T>(Expression<Func<T, object>> path); IList<T> List<T>(); void SaveAndCommit<T>(T item) where T : IRavenEntity; IRavenQueryable<T> Query<T>(); } }<file_sep>/src/MagStore.Test/Checkout/Checkingout/There_is_a_zero_sub_total_the_cart.cs using System.Collections.Generic; using FluentAssertions; using NUnit.Framework; namespace MagStore.Test.Checkout.Checkingout { public class There_is_a_zero_sub_total_cart : When_checking_out { protected override void Arrange() { base.Arrange(); var products = new List<string>(); var product = string.Empty; products.Add(product); InputModel.Products = products; InputModel.SubTotal = 0; } [Test] public void Should_resolve_to_the_Error_view_if_there_is_a_zero_sub_total_in_the_cart() { const string expectedView = "Error"; Result.ViewName.Should().Be(expectedView, "Expected zero sub total on the model."); } } }<file_sep>/src/MagStore/Infrastructure/IRepository.cs using System; using Raven.Client; namespace MagStore.Data.Interfaces { public interface IRepository { T Load<T>(Guid id) where T : IRavenEntity; void Add<T>(T item) where T : IRavenEntity; void Delete<T>(T item) where T : IRavenEntity; void Save(); IDocumentSession CurrentSession { get; } int Count<T>() where T : IRavenEntity; void ForceNewSession(); } }<file_sep>/src/MagStore/UserStrings/SRValueAttribute.cs using System; namespace MagStore.UserStrings { [AttributeUsage(AttributeTargets.Field|AttributeTargets.Enum)] public class SrValueAttribute : Attribute { public string Value; public SrValueAttribute(string Value) { this.Value = Value; } } } <file_sep>/src/MagStore.Web/Models/Product/ShowProductViewModel.cs using System.Collections.Generic; using MagStore.Entities; namespace MagStore.Web.Models.Product { public class ShowProductViewModel { public Entities.Product Product { get; set; } public IEnumerable<KeyValuePair<string, Entities.Product>> ProductVariants { get; set; } public Dictionary<string, string> Filters { get; set; } public IEnumerable<ProductImage> ProductImages { get; set; } public IEnumerable<string> AvailableColours { get; set; } public IEnumerable<string> AvailableSizes { get; set; } } }<file_sep>/src/MagStore.Web/Models/ShoppingCart/CheckoutPostInputModel.cs using System.Collections.Generic; namespace MagStore.Web.Models.ShoppingCart { public class CheckoutPostInputModel { public IEnumerable<string> Products { get; set; } public decimal DeliveryCharge { get; set; } public decimal SubTotal { get; set; } } }<file_sep>/src/MagStore/Entities/Review.cs namespace MagStore.Entities { public class Review { public string Id { get; set; } public string Author { get; set; } public string Content { get; set; } public string DateTime { get; set; } public bool Authorised { get; set; } } } <file_sep>/src/MagStore/Entities/Enums/ProductType.cs namespace MagStore.Entities.Enums { public enum ProductType { Blazers, Blouses, Bottoms, Bras, Caps, Cardigans, Dresses, Footwear, Hats, Jackets, Jeans, Jumpers, Knickers, Pants, Sandals, Scarves, Shirts, Shoes, Shorts, Skirts, Slippers, Socks, Stockings, Tights, Tops, Trainers, Trousers, Tshirts, Underwear, Vests } }<file_sep>/src/MagStore.Web/Models/Product/CreatePhotoInputModel.cs using System.Web; namespace MagStore.Web.Models.Product { public class CreatePhotoInputModel { public string[] PhotoType { get; set; } public HttpPostedFileBase[] File { get; set; } } }<file_sep>/src/MagStore.Web/Global.asax.cs using System.Reflection; using System.Web.Mvc; using System.Web.Routing; using System.Web.Security; using Castle.MicroKernel.Registration; using Castle.Windsor; using MagStore.Azure; using MagStore.Indexes; using MagStore.Infrastructure; using MagStore.Infrastructure.Interfaces; using MagStore.Provider; using MagStore.Web.Infrastructure; using MagStore.Web.Models.Product; using Microsoft.Practices.ServiceLocation; using Raven.Client; using Raven.Client.Document; using Raven.Client.Indexes; using SagePayMvc; using Component = Castle.MicroKernel.Registration.Component; namespace MagStore.Web { // Note: For instructions on enabling IIS6 or IIS7 classic mode, // visit http://go.microsoft.com/?LinkId=9394801 public class MvcApplication : System.Web.HttpApplication { public static IWindsorContainer Container; public static void RegisterGlobalFilters(GlobalFilterCollection filters) { filters.Add(new HandleErrorAttribute()); } public static void RegisterRoutes(RouteCollection routes) { routes.IgnoreRoute("{resource}.axd/{*pathInfo}"); routes.IgnoreRoute("{*favicon}", new { favicon = @"(.*/)?favicon.ico(/.*)?" }); routes.MapRoute( "Default", // Route name "{controller}/{action}/{id}", // URL with parameters new { controller = "Home", action = "Index", id = UrlParameter.Optional } // Parameter defaults ); } protected void Application_Start() { Container = new WindsorContainer(); // Common Service Locator ServiceLocator.SetLocatorProvider(() => new WindsorServiceLocator(Container)); // RavenDB embedded Container.Register(Component.For<IDocumentStore>().UsingFactoryMethod(GetDocumentStore).LifeStyle.Singleton); Container.Register(Component.For<IDocumentSession>().UsingFactoryMethod(x => x.Resolve<IDocumentStore>().OpenSession()).LifestylePerWebRequest().OnDestroy(x => x.SaveChanges())); Container.Register(Component.For<RoleProvider>().ImplementedBy<RavenDbRoleProvider>().LifeStyle.Singleton); Container.Register(Component.For<IRepository>().ImplementedBy<RavenRepository>().LifestylePerWebRequest()); Container.Register(Component.For<IShop>().ImplementedBy<Shop>().LifeStyle.Singleton); Container.Register(Component.For<IStorageAccessor>().UsingFactoryMethod(GetStorageAccessor).LifeStyle.PerWebRequest); Container.Register(Component.For<ITransactionRegistrar>().ImplementedBy<TransactionRegistrar>().LifestylePerWebRequest()); ControllerBuilder.Current.SetControllerFactory(new WindsorControllerFactory(Container)); Container.Register(Classes.FromAssembly(Assembly.GetExecutingAssembly()).BasedOn<IController>().LifestyleTransient()); AreaRegistration.RegisterAllAreas(); RegisterGlobalFilters(GlobalFilters.Filters); RegisterRoutes(RouteTable.Routes); ModelBinders.Binders.Add(typeof(EditProductInputModel), new TagPropertyModelBinder()); ModelBinders.Binders.Add(typeof(CreateProductInputModel), new TagPropertyModelBinder()); } protected void Session_End() { Container.Resolve<IFormsAuthenticationService>().SignOut(); } protected void Application_End() { var documentStore = Container.Resolve<IDocumentStore>(); documentStore.Dispose(); Container.Dispose(); } private IDocumentStore GetDocumentStore() { var documentStore = new DocumentStore { ApiKey = "<KEY>", Url = "https://ec2-eu4.cloudbird.net/databases/c818ddc6-dc4b-4b57-a439-4329fff0e61b.rdbtest-mag" }; documentStore.Initialize(); IndexCreation.CreateIndexes(typeof(Products_FullText).Assembly, documentStore); return documentStore; } private IStorageAccessor GetStorageAccessor() { var storageAccessor = new StorageAccessor ( "magshopstrg", "H3g2iG5XyUzX5BhUqBtw5VRtdSN++0aNhXDhKHpEJe2kDh/oSEOGbr<KEY> ); return storageAccessor; } } }<file_sep>/src/MagStore.Web/Models/Account/CreateRoleViewModel.cs using MagStore.Entities; namespace MagStore.Web.Models.Account { public class CreateRoleViewModel { public User User { get; set; } } }<file_sep>/src/MagStore.Web/TagPropertyModelBinder.cs using System.ComponentModel; using System.Linq; using System.Web.Mvc; namespace MagStore.Web { public class TagPropertyModelBinder : DefaultModelBinder { protected override void BindProperty(ControllerContext controllerContext, ModelBindingContext bindingContext, PropertyDescriptor propertyDescriptor) { if (propertyDescriptor.Name == "Tags") { var tags = controllerContext.HttpContext.Request.Form["Tags"]; propertyDescriptor.SetValue(bindingContext.Model, tags.Split(',').Select(x => x.Trim())); } else base.BindProperty(controllerContext, bindingContext, propertyDescriptor); } } }<file_sep>/src/MagStore.StorageAccessor/IStorageAccessor.cs using System; using System.IO; using Microsoft.WindowsAzure.Storage.Blob; namespace MagStore.Azure { public interface IStorageAccessor { CloudBlobClient GetBlobClient(); Uri AddBlobToResource(string fileName, Stream inputStream); CloudBlobContainer Resources { get; } void PrepareResourcesContainer(); } }<file_sep>/src/MagStore.Web/Controllers/ShopController.cs using System; using System.Collections.Generic; using System.Linq; using System.Web; using System.Web.Mvc; using MagStore.Azure; using MagStore.Entities; using MagStore.Infrastructure.Interfaces; using MagStore.Web.Models.Shop; namespace MagStore.Web.Controllers { public class ShopController : Controller { private readonly IShop shop; private IStorageAccessor storageAccessor; public ShopController(IShop shop, IStorageAccessor storageAccessor) { this.shop = shop; this.storageAccessor = storageAccessor; } [Authorize(Roles = "Administrator")] public ActionResult Manage() { return View(); } [HttpGet] public ActionResult UpdateCloudbird() { throw new System.NotImplementedException(); } [HttpGet] public ActionResult UpdateAzure() { throw new System.NotImplementedException(); } [HttpGet] public ActionResult ChangeShopSettings() { var settings = shop.GetSettings(); var logo = new ProductImage(); if (!string.IsNullOrEmpty(settings.Logo)) { logo = shop.GetCoordinator<ProductImage>().Load(settings.Logo); } return View(new ShopSettingsViewModel(settings, logo)); } [HttpPost] public ActionResult ChangeShopSettings(ChangeShopSettingsPostInputModel inputModel) { IShopSettings settings = shop.GetSettings(); settings.CurrencySymbol = inputModel.CurrencySymbol; settings.CurrencyFormat = inputModel.CurrencyFormat; settings.CurrencyConversion = inputModel.CurrencyConversion; settings.DeliveryCharge = inputModel.DeliveryCharge; settings.Name = inputModel.Name; settings.TagLine = inputModel.TagLine; settings.Logo = inputModel.Logo == null ? string.Empty : CreateImages(new[] { new KeyValuePair<string, HttpPostedFileBase>("Thumb", inputModel.Logo) }) .First(); shop.UpdateSettings(settings); return RedirectToAction("ChangeShopSettings"); } public ActionResult DeleteLogoFromSettings(DeleteLogoFromSettingsPostInputModel inputModel) { var settings = shop.GetSettings(); settings.Logo = string.Empty; shop.UpdateSettings(settings); return RedirectToAction("ChangeShopSettings"); } private IEnumerable<string> CreateImages(IEnumerable<KeyValuePair<string, HttpPostedFileBase>> images) { var result = new List<string>(); foreach (var image in images) { var id = Guid.NewGuid().ToString(); CreateImage(image.Value, image.Key, id); result.Add(id); } return result; } private void CreateImage(HttpPostedFileBase image, string imageType, string fileName) { var inputStream = image.InputStream; var uri = storageAccessor.AddBlobToResource(fileName, inputStream); var img = new ProductImage { Id = fileName, ImageType = imageType, ImageUrl = uri.ToString() }; shop.GetCoordinator<ProductImage>().Save(img); } } }<file_sep>/src/MagStore.Web/Models/Shop/ShopSettingsViewModel.cs using MagStore.Entities; using MagStore.Infrastructure.Interfaces; namespace MagStore.Web.Models.Shop { public class ShopSettingsViewModel { public ShopSettingsViewModel(IShopSettings settings, ProductImage logo) { Id = settings.Id; Name = settings.Name; TagLine = settings.TagLine; CurrencySymbol = settings.CurrencySymbol; CurrencyFormat = settings.CurrencyFormat; CurrencyConversion = settings.CurrencyConversion; DeliveryCharge = settings.DeliveryCharge; Logo = logo; } public string Id { get; set; } public string Name { get; set; } public string TagLine { get; set; } public string CurrencySymbol { get; set; } public string CurrencyFormat { get; set; } public decimal CurrencyConversion { get; set; } public decimal DeliveryCharge { get; set; } public ProductImage Logo { get; set; } } }<file_sep>/src/MagStore.Test/Products/WhenAddingAProductToTheCart.cs using System; using MagStore.Entities; using NUnit.Framework; namespace MagStore.Test.Products { [TestFixture] public class WhenAddingAProductToTheCart { private Product product; private Cart cart; [SetUp] public void SetUp() { product = new Product { Id = Guid.NewGuid().ToString(), Name = "Jumper", Price = 10 }; cart = new Cart { Id = Guid.NewGuid().ToString(), }; } [Test] public void ShouldIncreaseTheTotalPriceOfTheCartTo10Pounds() { // var orderLines = new List<OrderLine> // { // new OrderLine {Products = new List<Product> {product}} // }; // // cart.OrderLines = orderLines; // // cart.Total.Should().Be(10); } } } <file_sep>/src/MagStore.Web/Controllers/ImagesController.cs using System; using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.Linq; using System.Web; using System.Web.Mvc; using MagStore.Azure; using MagStore.Entities; using MagStore.Entities.Enums; using MagStore.Infrastructure.Interfaces; namespace MagStore.Web.Controllers { public class ImagesController : Controller { private readonly IShop shop; private readonly IStorageAccessor storageAccessor; public ImagesController(IShop shop, IStorageAccessor storageAccessor) { this.shop = shop; this.storageAccessor = storageAccessor; } [HttpGet] public ActionResult CreateImage() { return View(new ImageCreationGetModel()); } [HttpGet] public ActionResult EditImage(ImageEditGetInputModel getInputModel) { var image = shop.GetCoordinator<ProductImage>() .Load(getInputModel.Id); return View(new EditImageViewModel { Image = image }); } [HttpPost] public ActionResult CreateImage(ImageCreationPostModel postModel) { //ValidateModel(postModel); var fileName = Guid.NewGuid().ToString(); if (ModelState.IsValid) { CreateImage(postModel.Image, postModel.ImageType.ToString(), fileName); } return RedirectToAction("EditImage", new { Id = fileName }); //View(new ImageCreationGetModel()); } [HttpPost] public ActionResult EditImage(ImageEditPostModel postModel) { if (ModelState.IsValid) { var fileName = postModel.Id; UpdateImage(postModel, fileName); } return RedirectToAction( "EditImage", new {postModel.Id }); //View(new ImageEditGetInputModel()); } [HttpGet] public ActionResult DeleteImageFromProduct(string productId, string imageId) { var product = shop.GetCoordinator<Product>().Load(productId); product.Images = product.Images.Except(new[] {imageId}); shop.GetCoordinator<Product>().Save(product); return RedirectToAction("EditProduct", "Product", new { Id = productId }); } private void CreateImage(HttpPostedFileBase image, string imageType, string fileName) { var inputStream = image.InputStream; var uri = storageAccessor.AddBlobToResource(fileName, inputStream); var img = new ProductImage { Id = fileName, ImageType = imageType, ImageUrl = uri.ToString() }; shop.GetCoordinator<ProductImage>().Save(img); } private void UpdateImage(ImageEditPostModel postModel, string fileName) { var hasChanges = postModel.Image != null; Uri uri = null; if (hasChanges) { var inputStream = postModel.Image.InputStream; uri = storageAccessor.AddBlobToResource(fileName, inputStream); } var image = shop.GetCoordinator<ProductImage>().Load(postModel.Id); image.ImageType = postModel.ImageType.ToString(); if (uri != null) image.ImageUrl = uri.ToString(); shop.GetCoordinator<ProductImage>() .Save(image); } } public class ImageEditGetInputModel { public string Id { get; set; } } public class EditImageViewModel { public ProductImage Image { get; set; } public IEnumerable<string> ImageTypes { get { return new[] { "" }.Union(Enum.GetNames(typeof(ImageType))); } } } public class ImageEditPostModel : ImageCreationPostModel { public string Id { get; set; } } public class ImageCreationPostModel { [Required(AllowEmptyStrings = false, ErrorMessage = "Please select a image type.")] public ImageType ImageType { get; set; } public HttpPostedFileBase Image { get; set; } } public class ImageCreationGetModel { public IEnumerable<string> ImageTypes { get { return new[] { "" }.Union(Enum.GetNames(typeof(ImageType))); } } } } <file_sep>/src/MagStore.Web/Controllers/CatalogueController.cs using System.Collections.Generic; using System.Linq; using System.Web.Mvc; using MagStore.Entities; using MagStore.Infrastructure.Interfaces; using MagStore.Web.Models.Catalogue; using MagStore.Web.Models.Product; namespace MagStore.Web.Controllers { public class CatalogueController : Controller { private readonly IShop shop; public CatalogueController(IShop shop) { this.shop = shop; } [HttpPost] public ActionResult ViewProductsInCatalogue(string id) { var catalogue = shop.GetCoordinator<Catalogue>().Load(id); return View(new ProductsViewModel { Catalogue = catalogue, Products = OrderedProducts(id) }); } private IEnumerable<Product> OrderedProducts(string id) { var products = shop.GetCoordinator<Product>() .List() .Where(x => x.Catalogue == id) .OrderByDescending(p => p.Code) .ThenBy(p => p.Id); var enumerator = products.GetEnumerator(); while (enumerator.MoveNext()) { yield return enumerator.Current; } } public ActionResult ViewCatalogues() { var catalogues = shop.GetCoordinator<Catalogue>().List().OrderBy(c => c.Name).ToList(); return View(new CataloguesViewModel { Catalogues = catalogues }); } [HttpGet] public ActionResult CreateCatalogue() { return View(new CreateCatalogueViewModel()); } [HttpPost] public ActionResult CreateCatalogue(CreateCatalogueInputModel inputModel) { shop.GetCoordinator<Catalogue>().Save(new Catalogue { Id=inputModel.Id, Name=inputModel.Name, DiscountAmount = inputModel.DiscountAmount, DiscountType = inputModel.DiscountType, Promotions = new List<string>() }); return RedirectToAction("ViewCatalogues", "Catalogue"); // View(new CreateCatalogueViewModel()); } [HttpGet] public ActionResult EditCatalogue(EditCatalogueGetInputModel getInputModel) { var catalogue = shop.GetCoordinator<Catalogue>().Load(getInputModel.Id); var viewModel = new EditCatalogueViewModel { Id = catalogue.Id, Name = catalogue.Name, DiscountType = catalogue.DiscountType, DiscountAmount = catalogue.DiscountAmount, Promotions = catalogue.Promotions }; return View(viewModel); } [HttpPost] public ActionResult EditCatalogue(EditCataloguePostInputModel postInputModel) { shop.GetCoordinator<Catalogue>() .Save(UpdateCatalogue(postInputModel)); return RedirectToAction("EditCatalogue", new {postInputModel.Id }); } private Catalogue UpdateCatalogue(EditCataloguePostInputModel postInputModel) { var catalogue = shop.GetCoordinator<Catalogue>().Load(postInputModel.Id); catalogue.Name = postInputModel.Name; catalogue.DiscountAmount = postInputModel.DiscountAmount; catalogue.DiscountType = postInputModel.DiscountType; catalogue.Promotions = postInputModel.Promotions; return catalogue; } public ActionResult DeleteCatalogue(DeleteCatalogueInputModel inputModel) { shop.GetCoordinator<Catalogue>().Delete(shop.GetCoordinator<Catalogue>().Load(inputModel.Id)); return RedirectToAction("ViewCatalogues"); } } } <file_sep>/src/MagStore.Web/Models/Catalogue/CataloguesViewModel.cs using System.Collections.Generic; namespace MagStore.Web.Models.Catalogue { public class CataloguesViewModel { public IList<Entities.Catalogue> Catalogues { get; set; } // public string Id { get { return catalogue.Id; } } // // public string Name { get { return catalogue.Name; } } // public decimal DiscountAmount { get { return catalogue.DiscountAmount; } } // public DiscountType DiscountType { get { return catalogue.DiscountType; } } // public IEnumerable<Product> Products { get { return catalogue.Products; } } // public IEnumerable<Promotion> Promotions { get { return catalogue.Promotions; } } } }<file_sep>/src/MagStore/Entities/Product.cs using System.Collections.Generic; using MagStore.Entities.Enums; using MagStore.Infrastructure.Interfaces; namespace MagStore.Entities { public class Product : IRavenEntity { private IEnumerable<string> reviews; public string Id { get; set; } public string Code { get; set; } public string Name { get; set; } public string Description { get; set; } public string Specification { get; set; } public string Colour { get; set; } public string Size { get; set; } public string Gender { get; set; } public string Brand { get; set; } public string Catalogue { get; set; } public string Supplier { get; set; } public int Rating { get; set; } public IEnumerable<string> Reviews { get { return reviews ?? new List<string>(); } set { reviews = value; } } public IEnumerable<string> Images { get; set; } public decimal Price { get; set; } public ProductType ProductType { get; set; } public int[] AgeRange { get; set; } public DiscountType DiscountType { get; set; } public decimal DiscountAmount { get; set; } public IEnumerable<string> Promotions { get; set; } public IEnumerable<string> Tags { get; set; } } }<file_sep>/src/MagStore.Web/Models/ShoppingCart/InitiatePaymentPostInputModel.cs using System.Collections.Generic; namespace MagStore.Web.Models.ShoppingCart { public class InitiatePaymentPostInputModel { public IEnumerable<string> Products { get; set; } public decimal Total { get; set; } } }<file_sep>/src/MagStore.Test/Payments/When_making_an_invalid_request.cs using System; using FluentAssertions; using NUnit.Framework; namespace MagStore.Test.Payments { [TestFixture] public class When_making_an_invalid_request : MakingPaymentsSetUpFixture { [Test] public void The_payment_processor_should_throw_argumentnullexception_for_an_auth_request_that_is_null() { AuthRequest = null; var paramName = Assert.Throws<ArgumentNullException>( () => PaymentProcessor.Authorise(AuthRequest), "An null request is required to make this test valid.") .ParamName; paramName.Should().Be("authRequest"); } [Test] public void The_payment_processor_should_throw_argumentnullexception_for_an_auth_request_with_null_context() { AuthRequest.Context = null; var paramName = Assert.Throws<ArgumentNullException>( () => PaymentProcessor.Authorise(AuthRequest), "A null context is required to make this test valid.") .ParamName; paramName.Should().Be("authRequest.Context"); } [Test] public void The_payment_processor_should_throw_argumentnullexception_for_an_auth_request_with_null_transaction_id() { AuthRequest.TransactionId = null; var paramName = Assert.Throws<ArgumentNullException>( () => PaymentProcessor.Authorise(AuthRequest), "A null transaction id is required to make this test valid.") .ParamName; paramName.Should().Be("authRequest.TransactionId"); } [Test] public void The_payment_processor_should_throw_argumentnullexception_for_an_auth_request_with_a_null_product_list() { AuthRequest.Products = null; var paramName = Assert.Throws<ArgumentNullException>( () => PaymentProcessor.Authorise(AuthRequest), "A null product list is required to make this test valid.") .ParamName; paramName.Should().Be("authRequest.Products"); } [Test] public void The_payment_processor_should_throw_argumentnullexception_for_an_auth_request_with_a_null_customer_email() { AuthRequest.CustomerEmail = null; var paramName = Assert.Throws<ArgumentNullException>( () => PaymentProcessor.Authorise(AuthRequest), "A null customer email is required to make this test valid.") .ParamName; paramName.Should().Be("authRequest.CustomerEmail"); } [Test] public void The_payment_processor_should_throw_argumentnullexception_for_an_auth_request_with_a_null_billing_address() { AuthRequest.BillingAddress = null; var paramName = Assert.Throws<ArgumentNullException>( () => PaymentProcessor.Authorise(AuthRequest), "A null billing address is required to make this test valid.") .ParamName; paramName.Should().Be("authRequest.BillingAddress"); } [Test] public void The_payment_processor_should_throw_argumentnullexception_for_an_auth_request_with_a_null_delivery_address() { AuthRequest.DeliveryAddress = null; var paramName = Assert.Throws<ArgumentNullException>( () => PaymentProcessor.Authorise(AuthRequest), "A null delivery address is required to make this test valid.") .ParamName; paramName.Should().Be("authRequest.DeliveryAddress"); } } }<file_sep>/src/MagStore.Web/Models/ShoppingCart/RemoveProductFromBasketPostInputModel.cs namespace MagStore.Web.Models.ShoppingCart { public class RemoveProductFromBasketPostInputModel { public string Id { get; set; } } }<file_sep>/src/MagStore/Infrastructure/Interfaces/ICoordinator.cs using System.Collections.Generic; using Raven.Client.Linq; namespace MagStore.Infrastructure.Interfaces { public interface ICoordinator<T> { void Save( T entity ); void Save(IEnumerable<T> entity); T Load(string id); IEnumerable<T> Load( IEnumerable<string> ids ); IList<T> List(); IRavenQueryable<T> Query<T>(); void Delete(T entity); } }<file_sep>/src/MagStore/Infrastructure/Interfaces/IShop.cs using System; using System.Linq.Expressions; using MagStore.Entities; using Raven.Client.Document; namespace MagStore.Infrastructure.Interfaces { public interface IShop { ICoordinator<T> GetCoordinator<T>() where T : IRavenEntity; ILoaderWithInclude<T> Include<T>(Expression<Func<T, object>> path); IShopSettings GetSettings(); void UpdateSettings(IShopSettings settings); } }<file_sep>/src/MagStore/Entities/Enums/AccountStatus.cs namespace MagStore.Entities.Enums { public enum AccountStatus { Active, Dormant, Restricted, Blocked } }<file_sep>/src/MagStore.Web/Infrastructure/Extensions.cs using System.Collections.Generic; namespace MagStore.Web.Infrastructure { public static class Extensions { public static List<T> SortAtoZ<T>(this List<T> list) { list.Sort(); return list; } } }<file_sep>/src/MagStore.Web/Models/Product/ShowProductInputModel.cs namespace MagStore.Web.Models.Product { public class ShowProductInputModel { public string Id { get; set; } public string Category { get; set; } public string Code { get; set; } public string Gender { get; set; } } }<file_sep>/src/RavenNamespaceUpdatePatch/Program.cs using System.Linq; using MagStore.Entities; using MagStore.Infrastructure.Interfaces; using Raven.Abstractions.Data; using Raven.Client.Document; using Raven.Json.Linq; namespace RavenNamespaceUpdatePatch { internal class Program { private static void Main(string[] args) { var documentStore = new DocumentStore { ApiKey = "<KEY>", Url = "https://ec2-eu4.cloudbird.net/databases/c818ddc6-dc4b-4b57-a439-4329fff0e61b.rdbtest-mag" }; documentStore.Initialize(); var toUpdate = documentStore.DatabaseCommands.GetDocuments(0, 500); var types = typeof (Catalogue).Assembly.GetTypes() .Where(t => typeof (IRavenEntity).IsAssignableFrom(t) && t.IsClass); // if (type.FullName.Contains("IRavenEntity")) continue; foreach (var d in toUpdate) { string oldNamespace = "RavenDbMembership"; string newNamespace = "MagStore"; var clrType = d.Metadata["Raven-Clr-Type"] .ToString() .Replace(oldNamespace, newNamespace); var p = new PatchRequest { Type = PatchCommandType.Modify, Name = "@metadata", Nested = new[] { new PatchRequest { Type = PatchCommandType.Set, Name = "Raven-Clr-Type", Value = new RavenJValue(clrType) }, } }; documentStore.DatabaseCommands.Patch(d.Key, new[] {p}); } } } }<file_sep>/src/MagStore.Web/Models/Promotion/PromotionsViewModel.cs using System.Collections.Generic; namespace MagStore.Web.Models.Promotion { public class PromotionsViewModel { public IEnumerable<Entities.Promotion> Promotions { get; set; } public PromotionsViewModel() { Promotions = new List<Entities.Promotion>(); } } }<file_sep>/src/MagStore.Test/Payments/When_making_a_request_and_receiving_a_response.cs using FluentAssertions; using NSubstitute; using NUnit.Framework; using SagePayMvc; namespace MagStore.Test.Payments { [TestFixture] public class When_making_a_request_and_receiving_a_response : MakingPaymentsSetUpFixture { [Test] public void The_payment_processor_should_receive_an_auth_request() { PaymentProcessor.Received().Authorise(AuthRequest); } [Test] public void The_payment_processor_should_return_a_response() { AuthResponse.Should().NotBeNull(); } [Test] public void The_transaction_registrar_should_receive_the_valid_request() { TransactionRegistrar.Received().Send( AuthRequest.Context, AuthRequest.TransactionId, Arg.Any<ShoppingBasket>(), AuthRequest.BillingAddress, AuthRequest.DeliveryAddress, AuthRequest.CustomerEmail); } } }<file_sep>/src/MagStore.Test/TestSetUpFixture.cs using NUnit.Framework; namespace MagStore.Test { public class TestSetUpFixture { [SetUp] protected void TestSetUp() { Arrange(); Act(); } protected virtual void Arrange() { } protected virtual void Act() { } } }<file_sep>/src/MagStore.Web/Models/ShoppingCart/CheckoutViewModel.cs using System.Collections.Generic; namespace MagStore.Web.Models.ShoppingCart { public class CheckoutViewModel { public IList<Entities.Product> Products { get; set; } public decimal SubTotal { get; set; } public decimal DeliveryCharge { get; set; } } }<file_sep>/src/MagStore/Entities/Cart.cs using System.Collections.Generic; using MagStore.Infrastructure.Interfaces; namespace MagStore.Entities { public class Cart : IRavenEntity { public Cart() { Products = new List<Product>(); Promotions = new List<string>(); } public string Id { get; set; } public IList<Product> Products { get; set; } public IList<string> Promotions { get; set; } } }<file_sep>/src/MagStore.Web/Models/ShoppingCart/InitiatePaymentViewModel.cs using SagePayMvc; namespace MagStore.Web.Models.ShoppingCart { public class InitiatePaymentViewModel { public Address BillingAddress { get; set; } public Address DeliveryAddress { get; set; } } }<file_sep>/src/MagStore/Entities/Additives/Colours.cs using System.Collections.Generic; namespace MagStore.Entities.Additives { public class Colours : Dictionary<string, string> { public Colours() { Add("Amaranth", "#E52B50"); Add("Amber", "#FFBF00"); Add("Apricot", "#FBCEB1"); Add("Aquamarine", "#7FFFD4"); Add("Azure", "#007FFF"); Add("Baby blue", "#89CFF0"); Add("Beige", "#F5F5DC"); Add("Black", "#000000"); Add("Blue", "#0000FF"); Add("Blue-green", "#0095B6"); Add("Blue-violet", "#8A2BE2"); Add("Blush", "#DE5D83"); Add("Bronze", "#CD7F32"); Add("Brown", "#964B00"); Add("Burgundy", "#800020"); Add("Byzantium", "#702963"); Add("Carmine", "#960018"); Add("Cerise", "#DE3163"); Add("Cerulean", "#007BA7"); Add("Champagne", "#F7E7CE"); Add("Chartreuse green", "#7FFF00"); Add("Chocolate", "#7B3F00"); Add("Coffee", "#6F4E37"); Add("Copper", "#B87333"); Add("Coral", "#F88379"); Add("Crimson", "#DC143C"); Add("Cyan", "#00FFFF"); Add("Light denim", "#007FFF"); Add("Desert sand", "#EDC9AF"); Add("Electric blue", "#7DF9FF"); Add("Erin", "#00FF3F"); Add("Gold", "#FFD700"); Add("Gray", "#808080"); Add("Green", "#00FF00"); Add("Harlequin", "#3FFF00"); Add("Indigo", "#4B0082"); Add("Ivory", "#FFFFF0"); Add("Jade", "#00A86B"); Add("Jungle green", "#29AB87"); Add("Lavender", "#B57EDC"); Add("Lemon", "#FFF700"); Add("Lilac", "#C8A2C8"); Add("Lime", "#BFFF00"); Add("Magenta", "#FF00FF"); Add("Magenta rose", "#FF00AF"); Add("Maroon", "#800000"); Add("Mauve", "#E0B0FF"); Add("Navy blue", "#000080"); Add("Olive", "#808000"); Add("Orange", "#FFA500"); Add("Orange-red", "#FF4500"); Add("Orchid", "#DA70D6"); Add("Peach", "#FFE5B4"); Add("Pear", "#D1E231"); Add("Periwinkle", "#CCCCFF"); Add("Persian blue", "#1C39BB"); Add("Pink", "#FFC0CB"); Add("Plum", "#8E4585"); Add("Prussian blue", "#003153"); Add("Puce", "#CC8899"); Add("Purple", "#800080"); Add("Raspberry ", "#E30B5C"); Add("Red", "#FF0000"); Add("Red-violet", "#C71585"); Add("Rose", "#FF007F"); Add("Salmon", "#FA8072"); Add("Sapphire", "#0F52BA"); Add("Scarlet", "#FF2400"); Add("Silver", "#C0C0C0"); Add("Slate gray", "#708090"); Add("Spring bud", "#A7FC00"); Add("Spring green", "#00FF7F"); Add("Tan", "#D2B48C"); Add("Taupe", "#483C32"); Add("Teal", "#008080"); Add("Turquoise", "#40E0D0"); Add("Violet", "#EE82EE"); Add("Viridian", "#40826D"); Add("White", "#FFFFFF"); Add("Yellow", "#FFFF00"); } } }<file_sep>/src/MagStore/Infrastructure/ModelClientValidationModel.cs using System.Collections.Generic; namespace MagStore.Infrastructure { /// <summary> /// Provides a base class container for a client validation rule that is sent to the browser. /// </summary> public class ModelClientValidationRule { private readonly Dictionary<string, object> validationParameters = new Dictionary<string, object>(); private string validationType; /// <summary> /// Gets or sets the error message for the client validation rule. /// </summary> /// /// <returns> /// The error message for the client validation rule. /// </returns> public string ErrorMessage { get; set; } /// <summary> /// Gets the list of validation parameters. /// </summary> /// /// <returns> /// A list of validation parameters. /// </returns> public IDictionary<string, object> ValidationParameters { get { return validationParameters; } } /// <summary> /// Gets or sets the validation type. /// </summary> /// /// <returns> /// The validation type. /// </returns> public string ValidationType { get { return validationType ?? string.Empty; } set { validationType = value; } } } }<file_sep>/src/MagStore.Web/Models/Catalogue/EditCataloguePostInputModel.cs using System.Collections.Generic; using MagStore.Entities.Enums; namespace MagStore.Web.Models.Catalogue { public class EditCataloguePostInputModel : EditCatalogueGetInputModel { public string Name { get; set; } public decimal DiscountAmount { get; set; } public DiscountType DiscountType { get; set; } public IEnumerable<string> Promotions { get; set; } } }<file_sep>/src/MagStore.Web/Models/Product/CreateProductInputModel.cs using System.Collections.Generic; using System.ComponentModel.DataAnnotations; using System.Web; using MagStore.Entities.Enums; namespace MagStore.Web.Models.Product { public class CreateProductInputModel : IProductPostInputModel { public string Id { get; set; } public string Code { get; set; } [Required(ErrorMessage = "A name must be supplied.")] public string Name { get; set; } public string Description { get; set; } public string Specification { get; set; } public string Catalogue { get; set; } public IEnumerable<string> Colours { get; set; } public IEnumerable<string> Sizes { get; set; } [Required(ErrorMessage="Please supply a value.")] [StringLength(50, MinimumLength = 4)] public string Gender { get; set; } public string Brand { get; set; } public string Supplier { get; set; } public int Rating { get; set; } public IEnumerable<string> Reviews { get; set; } public string[] PhotoType { get; set; } public HttpPostedFileBase[] UploadedImages { get; set; } [Required(ErrorMessage="The product must have a price.")] [Range(0.01, 10000.00, ErrorMessage = "The price must be at least 0.01")] public decimal Price { get; set; } [Required(ErrorMessage = "Please specify the product type.")] public ProductType ProductType { get; set; } public int[] AgeRange { get; set; } public DiscountType DiscountType { get; set; } public decimal DiscountAmount { get; set; } public IEnumerable<string> Promotions { get; set; } public IEnumerable<string> Tags { get; set; } } public interface IProductPostInputModel { HttpPostedFileBase[] UploadedImages { get; set; } string[] PhotoType { get; set; } } }<file_sep>/src/MagStore.Web/Models/Account/CreateRoleInputModel.cs namespace MagStore.Web.Models.Account { public class CreateRoleInputModel { public string Role { get; set; } } }<file_sep>/src/MagStore.Web/ShopHelpers/PromotionHelper.cs using System.Collections.Generic; using MagStore.Entities; using MagStore.Infrastructure.Interfaces; namespace MagStore.Web.ShopHelpers { public class PromotionHelper : IPromotionHelper { private readonly IShop shop; private IEnumerable<Promotion> existingPromotions; public PromotionHelper(IShop shop) { this.shop = shop; existingPromotions = new List<Promotion>(); } public IEnumerable<Promotion> ExistingPromotions { get { existingPromotions = shop.GetCoordinator<Promotion>().List(); return existingPromotions; } } } public interface IPromotionHelper { IEnumerable<Promotion> ExistingPromotions { get; } } }<file_sep>/src/MagStore.Payments/Messages/IAuthRequest.cs using System.Collections.Generic; using System.Web.Routing; using MagStore.Entities; using SagePayMvc; namespace MagStore.Payments.Messages { public interface IAuthRequest { RequestContext Context { get; set; } string TransactionId { get; set; } IList<Product> Products { get; set; } string CustomerEmail { get; set; } Address BillingAddress { get; set; } Address DeliveryAddress { get; set; } } }<file_sep>/src/MagStore.Test/Checkout/InitiatingPayment/When_initiating_a_payment.cs using System.Collections.Generic; using System.Web.Mvc; using MagStore.Entities; using MagStore.Infrastructure.Interfaces; using MagStore.Web.Controllers; using MagStore.Web.Models.ShoppingCart; using NSubstitute; using SagePayMvc; namespace MagStore.Test.Checkout.InitiatingPayment { public class When_initiating_a_payment : TestSetUpFixture { protected InitiatePaymentPostInputModel InputModel; protected IShop Shop; protected ICoordinator<Product> Coordinator { get; set; } protected ShoppingCartController ControllerUnderTest { get; set; } protected ViewResult Result { get; set; } protected override void Arrange() { var product = new Product(); Coordinator = Substitute.For<ICoordinator<Product>>(); Coordinator.Load(Arg.Any<string>()).Returns(product); Coordinator.Load(Arg.Any<IEnumerable<string>>()).Returns(new[] { product }); Shop = Substitute.For<IShop>(); Shop.GetCoordinator<Product>().Returns(Coordinator); InputModel = Substitute.For<InitiatePaymentPostInputModel>(); InputModel.Products = Substitute.For<IEnumerable<string>>(); var registrar = Substitute.For<ITransactionRegistrar>(); ControllerUnderTest = new ShoppingCartController(Shop, registrar); } } } <file_sep>/src/MagStore.Web/Models/Product/EditProductInputModel.cs using System.Collections.Generic; using System.Web; using MagStore.Entities.Enums; namespace MagStore.Web.Models.Product { public class EditProductInputModel : IProductPostInputModel { public string Id { get; set; } public string Code { get; set; } public string Name { get; set; } public string Description { get; set; } public string Specification { get; set; } public string Colour { get; set; } public string Size { get; set; } public string Gender { get; set; } public string Brand { get; set; } public string Supplier { get; set; } public int Rating { get; set; } public IEnumerable<string> Reviews { get; set; } public IEnumerable<string> ExistingImages { get; set; } public string[] ExistingPhotoType { get; set; } public string[] PhotoType { get; set; } public HttpPostedFileBase[] UploadedImages { get; set; } public decimal Price { get; set; } public ProductType ProductType { get; set; } public int[] AgeRange { get; set; } public DiscountType DiscountType { get; set; } public decimal DiscountAmount { get; set; } public IEnumerable<string> Promotions { get; set; } public string Catalogue { get; set; } public IEnumerable<string> Tags { get; set; } } }<file_sep>/src/MagStore.Payments/Processors/IPaymentProcessor.cs using MagStore.Payments.Messages; using MagStore.Payments.Providers; using SagePayMvc; namespace MagStore.Payments.Processors { public interface IPaymentProcessor { IAuthResponse Authorise(IAuthRequest authRequest); ITransactionRegistrar Registrar { set; } } }<file_sep>/src/MagStore.Payments/Providers/SagePayPaymentProvider.cs using MagStore.Payments.Messages; using MagStore.Payments.Processors; namespace MagStore.Payments.Providers { public class SagePayPaymentProvider : IPaymentProvider { private readonly IPaymentProcessor processor; public SagePayPaymentProvider(IPaymentProcessor processor) { this.processor = processor; } public IAuthResponse MakePayment(IAuthRequest authRequest) { return processor.Authorise(authRequest); } } }<file_sep>/src/MagStore.Web/Controllers/AccountController.cs using System; using System.Collections.Generic; using System.Web.Mvc; using System.Web.Routing; using System.Web.Security; using MagStore.Entities; using MagStore.Infrastructure; using MagStore.Infrastructure.Interfaces; using MagStore.Web.Models; using MagStore.Web.Models.Account; namespace MagStore.Web.Controllers { public class AccountController : Controller { private readonly IRepository finder; private IFormsAuthenticationService FormsService { get; set; } private IMembershipService MembershipService { get; set; } private readonly RoleProvider roleProvider; public AccountController(IRepository finder, RoleProvider roleProvider) { this.finder = finder; this.roleProvider = roleProvider; } protected override void Initialize(RequestContext requestContext) { if (FormsService == null) { FormsService = new FormsAuthenticationService(); } if (MembershipService == null) { MembershipService = new AccountMembershipService(); } base.Initialize(requestContext); } // ************************************** // URL: /Account/LogOn // ************************************** public ActionResult LogOn() { return View(); } [HttpPost] public ActionResult LogOn(LogOnModel model, string returnUrl) { if (ModelState.IsValid) { if (MembershipService.ValidateUser(model.UserName, model.Password)) { FormsService.SignIn(model.UserName, model.RememberMe); var providerUserKey = MembershipService.GetUser(model.UserName).ProviderUserKey as string; var cart = (Session["CurrentUser"] as User).ShoppingCart; var currentUser = finder.Load<User>(providerUserKey); currentUser.ShoppingCart = cart; Session["CurrentUser"] = currentUser; if (Url.IsLocalUrl(returnUrl)) { return Redirect(returnUrl); } return RedirectToAction("Index", "Home"); } ModelState.AddModelError("", "The user name or password provided is incorrect."); } // If we got this far, something failed, redisplay form return View("LogOn", model); } // ************************************** // URL: /Account/LogOff // ************************************** public ActionResult LogOff() { FormsService.SignOut(); (Session["CurrentUser"] as User).ShoppingCart.Products = new List<Product>(); return RedirectToAction("Index", "Home"); } // ************************************** // URL: /Account/Register // ************************************** public ActionResult Register() { ViewBag.PasswordLength = MembershipService.MinPasswordLength; return View(); } [HttpPost] public ActionResult Register(RegisterModel model) { if (ModelState.IsValid) { // Attempt to register the user var createStatus = MembershipService.CreateUser(model.UserName, model.Password, model.Email); if (createStatus == MembershipCreateStatus.Success) { FormsService.SignIn(model.UserName, false /* createPersistentCookie */); return RedirectToAction("Index", "Home"); } ModelState.AddModelError("", AccountValidation.ErrorCodeToString(createStatus)); } // If we got this far, something failed, redisplay form ViewBag.PasswordLength = MembershipService.MinPasswordLength; return View(model); } // ************************************** // URL: /Account/ChangePassword // ************************************** [Authorize] public ActionResult ChangePassword() { ViewBag.PasswordLength = MembershipService.MinPasswordLength; return View(); } [Authorize] [HttpPost] public ActionResult ChangePassword(ChangePasswordModel model) { if (ModelState.IsValid) { if (MembershipService.ChangePassword(User.Identity.Name, model.OldPassword, model.NewPassword)) { return RedirectToAction("ChangePasswordSuccess"); } ModelState.AddModelError("", "The current password is incorrect or the new password is invalid."); } // If we got this far, something failed, redisplay form ViewBag.PasswordLength = MembershipService.MinPasswordLength; return View(model); } // ************************************** // URL: /Account/ChangePasswordSuccess // ************************************** public ActionResult ChangePasswordSuccess() { return View(); } [Authorize(Roles="Administrator")] public ActionResult ManageUsers() { var users = MembershipService.GetAllUsers(); return View(users); } [Authorize(Roles = "Administrator")] public ActionResult ManageRoles() { var roles = MembershipService.GetAllRoles(); return View(roles); } [HttpPost] [Authorize(Roles = "Administrator")] public ActionResult ManageRoles(string roleName) { if (String.IsNullOrEmpty(roleName)) { ModelState.AddModelError("roleName", "Name is required"); } else { MembershipService.AddRole(roleName); } return RedirectToAction("ManageRoles"); } [Authorize(Roles = "Administrator")] public ActionResult EditUser() { var currentUser = System.Web.HttpContext.Current.Session["CurrentUser"] as User; if (currentUser != null) { var user = MembershipService.GetUser(currentUser.Username); var roles = MembershipService.GetAllRoles(); var userRoles = MembershipService.GetRolesForUser(user.UserName); return View(new EditUserModel(user.UserName, user.Email, roles, userRoles)); } return View("Error", new HandleErrorInfo(new Exception("CurrentUser is null"), "Account", "EditUser")); } [HttpPost] [Authorize(Roles = "Administrator")] public ActionResult EditUser(EditUserModel model) { var user = MembershipService.GetUser(model.Username); MembershipService.UpdateUser(user, model.UserRoles); return RedirectToAction("ManageUsers"); } [HttpGet] public ActionResult CreateRole() { var u = Session["CurrentUser"] as User; var viewModel = new CreateRoleViewModel { User = u }; return View(viewModel); } [HttpPost] public ActionResult CreateRole(CreateRoleInputModel inputModel) { roleProvider.CreateRole(inputModel.Role); return RedirectToAction("CreateRole"); } [HttpPost] [Authorize(Roles = "Administrator")] public ActionResult DeleteRole(string roleName) { MembershipService.DeleteRole(roleName); return RedirectToAction("ManageRoles"); } } // [Authorize] // public class AccountController : Controller // { // private readonly IShop shop; // // public AccountController(IShop shop) // { // this.shop = shop; // } // // // // // GET: /Account/Login // // [AllowAnonymous] // public ActionResult Login(string returnUrl) // { // ViewBag.ReturnUrl = returnUrl; // return View(); // } // // // // // POST: /Account/Login // // [HttpPost] // [AllowAnonymous] // [ValidateAntiForgeryToken] // public ActionResult Login(LoginModel model, string returnUrl) // { // if (ModelState.IsValid && WebSecurity.Login(model.UserName, model.Password, persistCookie: model.RememberMe)) // { // return RedirectToLocal(returnUrl); // } // // // If we got this far, something failed, redisplay form // ModelState.AddModelError("", "The user name or password provided is incorrect."); // return View(model); // } // // // // // POST: /Account/LogOff // // [HttpPost] // [ValidateAntiForgeryToken] // public ActionResult LogOff() // { // WebSecurity.Logout(); // // return RedirectToAction("Index", "Home"); // } // // // // // GET: /Account/Register // // [AllowAnonymous] // public ActionResult Register() // { // return View(); // } // // // // // POST: /Account/Register // // [HttpPost] // [AllowAnonymous] // [ValidateAntiForgeryToken] // public ActionResult Register(RegisterModel model) // { // if (ModelState.IsValid) // { // // Attempt to register the user // try // { // WebSecurity.CreateUserAndAccount(model.UserName, model.Password); // WebSecurity.Login(model.UserName, model.Password); // return RedirectToAction("Index", "Home"); // } // catch (MembershipCreateUserException e) // { // ModelState.AddModelError("", ErrorCodeToString(e.StatusCode)); // } // } // // // If we got this far, something failed, redisplay form // return View(model); // } // // // // // POST: /Account/Disassociate // // [HttpPost] // [ValidateAntiForgeryToken] // public ActionResult Disassociate(string provider, string providerUserId) // { // string ownerAccount = OAuthWebSecurity.GetUserName(provider, providerUserId); // ManageMessageId? message = null; // // // Only disassociate the account if the currently logged in user is the owner // if (ownerAccount == User.Identity.Name) // { // // Use a transaction to prevent the user from deleting their last login credential // using (var scope = new TransactionScope(TransactionScopeOption.Required, new TransactionOptions { IsolationLevel = IsolationLevel.Serializable })) // { // bool hasLocalAccount = OAuthWebSecurity.HasLocalAccount(WebSecurity.GetUserId(User.Identity.Name)); // if (hasLocalAccount || OAuthWebSecurity.GetAccountsFromUserName(User.Identity.Name).Count > 1) // { // OAuthWebSecurity.DeleteAccount(provider, providerUserId); // scope.Complete(); // message = ManageMessageId.RemoveLoginSuccess; // } // } // } // // return RedirectToAction("Manage", new { Message = message }); // } // // // // // GET: /Account/Manage // // public ActionResult Manage(ManageMessageId? message) // { // ViewBag.StatusMessage = // message == ManageMessageId.ChangePasswordSuccess ? "Your password has been changed." // : message == ManageMessageId.SetPasswordSuccess ? "Your password has been set." // : message == ManageMessageId.RemoveLoginSuccess ? "The external login was removed." // : ""; // ViewBag.HasLocalPassword = OAuthWebSecurity.HasLocalAccount(WebSecurity.GetUserId(User.Identity.Name)); // ViewBag.ReturnUrl = Url.Action("Manage"); // return View(); // } // // // // // POST: /Account/Manage // // [HttpPost] // [ValidateAntiForgeryToken] // public ActionResult Manage(LocalPasswordModel model) // { // bool hasLocalAccount = OAuthWebSecurity.HasLocalAccount(WebSecurity.GetUserId(User.Identity.Name)); // ViewBag.HasLocalPassword = <PASSWORD>; // ViewBag.ReturnUrl = Url.Action("Manage"); // if (hasLocalAccount) // { // if (ModelState.IsValid) // { // // ChangePassword will throw an exception rather than return false in certain failure scenarios. // bool changePasswordSucceeded; // try // { // changePasswordSucceeded = WebSecurity.ChangePassword(User.Identity.Name, model.OldPassword, model.NewPassword); // } // catch (Exception) // { // changePasswordSucceeded = false; // } // // if (changePasswordSucceeded) // { // return RedirectToAction("Manage", new { Message = ManageMessageId.ChangePasswordSuccess }); // } // else // { // ModelState.AddModelError("", "The current password is incorrect or the new password is invalid."); // } // } // } // else // { // // User does not have a local password so remove any validation errors caused by a missing // // OldPassword field // ModelState state = ModelState["OldPassword"]; // if (state != null) // { // state.Errors.Clear(); // } // // if (ModelState.IsValid) // { // try // { // WebSecurity.CreateAccount(User.Identity.Name, model.NewPassword); // return RedirectToAction("Manage", new { Message = ManageMessageId.SetPasswordSuccess }); // } // catch (Exception e) // { // ModelState.AddModelError("", e); // } // } // } // // // If we got this far, something failed, redisplay form // return View(model); // } // // // // // POST: /Account/ExternalLogin // // [HttpPost] // [AllowAnonymous] // [ValidateAntiForgeryToken] // public ActionResult ExternalLogin(string provider, string returnUrl) // { // return new ExternalLoginResult(provider, Url.Action("ExternalLoginCallback", new { ReturnUrl = returnUrl })); // } // // // // // GET: /Account/ExternalLoginCallback // // [AllowAnonymous] // public ActionResult ExternalLoginCallback(string returnUrl) // { // AuthenticationResult result = OAuthWebSecurity.VerifyAuthentication(Url.Action("ExternalLoginCallback", new { ReturnUrl = returnUrl })); // if (!result.IsSuccessful) // { // return RedirectToAction("ExternalLoginFailure"); // } // // if (OAuthWebSecurity.Login(result.Provider, result.ProviderUserId, createPersistentCookie: false)) // { // return RedirectToLocal(returnUrl); // } // // if (User.Identity.IsAuthenticated) // { // // If the current user is logged in add the new account // OAuthWebSecurity.CreateOrUpdateAccount(result.Provider, result.ProviderUserId, User.Identity.Name); // return RedirectToLocal(returnUrl); // } // else // { // // User is new, ask for their desired membership name // string loginData = OAuthWebSecurity.SerializeProviderUserId(result.Provider, result.ProviderUserId); // ViewBag.ProviderDisplayName = OAuthWebSecurity.GetOAuthClientData(result.Provider).DisplayName; // ViewBag.ReturnUrl = returnUrl; // return View("ExternalLoginConfirmation", new RegisterExternalLoginModel { UserName = result.UserName, ExternalLoginData = loginData }); // } // } // // // // // POST: /Account/ExternalLoginConfirmation // // [HttpPost] // [AllowAnonymous] // [ValidateAntiForgeryToken] // public ActionResult ExternalLoginConfirmation(RegisterExternalLoginModel model, string returnUrl) // { // string provider = null; // string providerUserId = null; // // if (User.Identity.IsAuthenticated || !OAuthWebSecurity.TryDeserializeProviderUserId(model.ExternalLoginData, out provider, out providerUserId)) // { // return RedirectToAction("Manage"); // } // // if (ModelState.IsValid) // { // // Insert a new user into the database // using (UsersContext db = new UsersContext()) // { // UserProfile user = db.UserProfiles.FirstOrDefault(u => u.UserName.ToLower() == model.UserName.ToLower()); // // Check if user already exists // if (user == null) // { // // Insert name into the profile table // db.UserProfiles.Add(new UserProfile { UserName = model.UserName }); // db.SaveChanges(); // // OAuthWebSecurity.CreateOrUpdateAccount(provider, providerUserId, model.UserName); // OAuthWebSecurity.Login(provider, providerUserId, createPersistentCookie: false); // // return RedirectToLocal(returnUrl); // } // else // { // ModelState.AddModelError("UserName", "User name already exists. Please enter a different user name."); // } // } // } // // ViewBag.ProviderDisplayName = OAuthWebSecurity.GetOAuthClientData(provider).DisplayName; // ViewBag.ReturnUrl = returnUrl; // return View(model); // } // // // // // GET: /Account/ExternalLoginFailure // // [AllowAnonymous] // public ActionResult ExternalLoginFailure() // { // return View(); // } // // [AllowAnonymous] // [ChildActionOnly] // public ActionResult ExternalLoginsList(string returnUrl) // { // ViewBag.ReturnUrl = returnUrl; // return PartialView("_ExternalLoginsListPartial", OAuthWebSecurity.RegisteredClientData); // } // // [ChildActionOnly] // public ActionResult RemoveExternalLogins() // { // ICollection<OAuthAccount> accounts = OAuthWebSecurity.GetAccountsFromUserName(User.Identity.Name); // Project<ExternalLogin> externalLogins = new Project<ExternalLogin>(); // foreach (OAuthAccount account in accounts) // { // AuthenticationClientData clientData = OAuthWebSecurity.GetOAuthClientData(account.Provider); // // externalLogins.Add(new ExternalLogin // { // Provider = account.Provider, // ProviderDisplayName = clientData.DisplayName, // ProviderUserId = account.ProviderUserId, // }); // } // // ViewBag.ShowRemoveButton = externalLogins.Count > 1 || OAuthWebSecurity.HasLocalAccount(WebSecurity.GetUserId(User.Identity.Name)); // return PartialView("_RemoveExternalLoginsPartial", externalLogins); // } // // #region Helpers // private ActionResult RedirectToLocal(string returnUrl) // { // if (Url.IsLocalUrl(returnUrl)) // { // return Redirect(returnUrl); // } // else // { // return RedirectToAction("Index", "Home"); // } // } // // public enum ManageMessageId // { // ChangePasswordSuccess, // SetPasswordSuccess, // RemoveLoginSuccess, // } // // internal class ExternalLoginResult : ActionResult // { // public ExternalLoginResult(string provider, string returnUrl) // { // Provider = provider; // ReturnUrl = returnUrl; // } // // public string Provider { get; private set; } // public string ReturnUrl { get; private set; } // // public override void ExecuteResult(ControllerContext context) // { // OAuthWebSecurity.RequestAuthentication(Provider, ReturnUrl); // } // } // // private static string ErrorCodeToString(MembershipCreateStatus createStatus) // { // // See http://go.microsoft.com/fwlink/?LinkID=177550 for // // a full list of status codes. // switch (createStatus) // { // case MembershipCreateStatus.DuplicateUserName: // return "User name already exists. Please enter a different user name."; // // case MembershipCreateStatus.DuplicateEmail: // return "A user name for that e-mail address already exists. Please enter a different e-mail address."; // // case MembershipCreateStatus.InvalidPassword: // return "The password provided is invalid. Please enter a valid password value."; // // case MembershipCreateStatus.InvalidEmail: // return "The e-mail address provided is invalid. Please check the value and try again."; // // case MembershipCreateStatus.InvalidAnswer: // return "The password retrieval answer provided is invalid. Please check the value and try again."; // // case MembershipCreateStatus.InvalidQuestion: // return "The password retrieval question provided is invalid. Please check the value and try again."; // // case MembershipCreateStatus.InvalidUserName: // return "The user name provided is invalid. Please check the value and try again."; // // case MembershipCreateStatus.ProviderError: // return "The authentication provider returned an error. Please verify your entry and try again. If the problem persists, please contact your system administrator."; // // case MembershipCreateStatus.UserRejected: // return "The user creation request has been canceled. Please verify your entry and try again. If the problem persists, please contact your system administrator."; // // default: // return "An unknown error occurred. Please verify your entry and try again. If the problem persists, please contact your system administrator."; // } // } // #endregion // } } <file_sep>/src/MagStore.Web/Controllers/StoreController.cs using System.Web.Mvc; using MagStore.Infrastructure; namespace MagStore.Web.Controllers { public class StoreController : Controller { private readonly Shop shop; public StoreController(Shop shop) { this.shop = shop; } public ActionResult Manage() { return View(); } } } <file_sep>/src/MagStore/Entities/Enums/DiscountType.cs namespace MagStore.Entities.Enums { public enum DiscountType { Percentage, MonetaryAmount } }<file_sep>/src/MagStore.Payments/Messages/SagePayAuthResponse.cs namespace MagStore.Payments.Messages { public class SagePayAuthResponse : IAuthResponse { } public interface IAuthResponse { } }<file_sep>/src/MagStore/Entities/Enums/FulfillmentStatus.cs namespace MagStore.Entities.Enums { public enum FulfillmentStatus { Ordered, Verified, ReadyForCollection, Collected, InTransit, ArrivedLocally, OutForDelivery, Delivered, } }<file_sep>/src/MagStore.StorageAccessor/StorageAccessor.cs using System; using System.IO; using Microsoft.WindowsAzure.Storage; using Microsoft.WindowsAzure.Storage.Auth; using Microsoft.WindowsAzure.Storage.Blob; using Microsoft.WindowsAzure.Storage.RetryPolicies; namespace MagStore.Azure { public class StorageAccessor : IStorageAccessor { private readonly CloudStorageAccount storageAccount; private CloudBlobClient blobClient; private CloudBlobContainer resourcesContainer; public StorageAccessor(string accountName, string accessKey, bool useHttps = false) { var storageCredentials = new StorageCredentials(accountName, accessKey); storageAccount = new CloudStorageAccount(storageCredentials, useHttps); var cloudBlobContainer = GetContainerByName("resources"); Resources = cloudBlobContainer.Exists() ? cloudBlobContainer : null; } public CloudBlobClient GetBlobClient() { return blobClient ?? (blobClient = storageAccount.CreateCloudBlobClient()); } public Uri AddBlobToResource(string fileName, Stream inputStream) { var createdBlob = CreateBlob(fileName, inputStream); var createdBlobUri = createdBlob.Uri; return createdBlobUri; } public CloudBlobContainer Resources { get { return resourcesContainer; } private set { resourcesContainer = value; } } private CloudBlobContainer GetContainerByName(string containerName) { return GetBlobClient().GetContainerReference(containerName); } private CloudBlockBlob CreateBlob(string fileName, Stream inputStream) { PrepareResourcesContainer(); var blob = resourcesContainer.GetBlockBlobReference(fileName); blob.UploadFromStream(inputStream); return blob; } public void PrepareResourcesContainer() { if (ResourcesContainerExists()) return; resourcesContainer = GetBlobClient().GetContainerReference("resources"); var requestOptions = new BlobRequestOptions {RetryPolicy = new ExponentialRetry()}; resourcesContainer.CreateIfNotExists(requestOptions, null); var permissions = resourcesContainer.GetPermissions(); permissions.PublicAccess = BlobContainerPublicAccessType.Container; resourcesContainer.SetPermissions(permissions); } private bool ResourcesContainerExists() { return GetContainerByName("resources").Exists(); } } }<file_sep>/src/MagStore.Test/Checkout/InitiatingPayment/When_processing_a_payment_with_a_zero_total_amount.cs using System; using System.Collections.Generic; using System.Linq; using FluentAssertions; using MagStore.Entities; using NSubstitute; using NUnit.Framework; namespace MagStore.Test.Checkout.InitiatingPayment { public class When_processing_a_payment_with_a_zero_total_amount : When_initiating_a_payment { private TestDelegate testDelegate; protected override void Arrange() { base.Arrange(); var products = Substitute.For<IList<string>>(); products.Add(string.Empty); InputModel.Products = products; } protected override void Act() { testDelegate = () => ControllerUnderTest.InitiatePayment(InputModel); } [Test] public void Should_throw_invalidoperationexception_if_the_initiatepaymentmodel_contains_a_zero_amount_total() { var message = Assert.Throws<InvalidOperationException>( testDelegate, "There is a positive total attached to this payment initiation.").Message; message.Should().Be("The total for this payment initiation must have a positive value."); } } }<file_sep>/src/MagStore/Infrastructure/Interfaces/IShopSettings.cs namespace MagStore.Infrastructure.Interfaces { public interface IShopSettings : IRavenEntity { string Name { get; set; } string TagLine { get; set; } string CurrencySymbol { get; set; } string Logo { get; set; } string CurrencyFormat { get; set; } decimal CurrencyConversion { get; set; } decimal DeliveryCharge { get; set; } } }<file_sep>/src/MagStore/Indexes/Products_FullText.cs using System.Linq; using MagStore.Entities; using Raven.Abstractions.Indexing; using Raven.Client.Indexes; namespace MagStore.Indexes { public class Products_FullText : AbstractIndexCreationTask<Product, Products_FullText.Result> { public Products_FullText() { Map = products => products.Select(product => new { Fields = new object[] { product.Tags, product.Name, product.Description } }); Indexes.Add(x => x.Fields, FieldIndexing.Analyzed); } public class Result { public string Fields { get; set; } } } }<file_sep>/src/MagStore.Web/Models/Product/DeleteProductInputModel.cs namespace MagStore.Web.Models.Product { public class DeleteProductInputModel { public string Id { get; set; } } }<file_sep>/src/MagStore.Web/Models/Shop/DeleteLogoFromSettingsPostInputModel.cs namespace MagStore.Web.Models.Shop { public class DeleteLogoFromSettingsPostInputModel { public string Logo { get; set; } } }<file_sep>/src/MagStore/ReservationForUniqueFieldValue.cs namespace MagStore { public class ReservationForUniqueFieldValue { public string Id { get; set; } } } <file_sep>/src/MagStore.Web/Models/Product/ViewProductsByCategoryInputModel.cs namespace MagStore.Web.Models.Product { public class ViewProductsByCategoryInputModel { public string Category { get; set; } public string Gender { get; set; } } }<file_sep>/src/MagStore/Infrastructure/Coordinator.cs using System.Collections.Generic; using MagStore.Infrastructure.Interfaces; using Raven.Client.Linq; namespace MagStore.Infrastructure { public class Coordinator<T> : ICoordinator<T> where T : IRavenEntity { private readonly IRepository ravenRepository; public Coordinator(IRepository ravenRepository) { this.ravenRepository = ravenRepository; } public void Save(T entity) { Save(new List<T> { entity }); } public void Save(IEnumerable<T> entities) { foreach (var entity in entities) { ravenRepository.SaveAndCommit(entity); } } public T Load(string id) { return ravenRepository.Load<T>(id); } public IEnumerable<T> Load(IEnumerable<string> ids) { return ravenRepository.Load<T>(ids); } public IList<T> List() { return ravenRepository.List<T>(); } public IRavenQueryable<T> Query<T>() { return ravenRepository.Query<T>(); } public void Delete(T entity) { ravenRepository.Delete(entity); } } } <file_sep>/src/MagStore/Entities/Promotion.cs using System; using System.Collections.Generic; using MagStore.Entities.Enums; using MagStore.Infrastructure.Interfaces; namespace MagStore.Entities { public class Promotion : IRavenEntity { public Promotion() { Restrictions = new List<string>(); } public string Id { get; set; } public string Name { get; set; } public string Code { get; set; } public DateTime ValidFrom { get; set; } public DateTime ValidTo { get; set; } public DiscountType DiscountType { get; set; } public decimal DiscountAmount { get; set; } public string Exclusivity { get; set; } public IEnumerable<string> Restrictions { get; set; } } }<file_sep>/src/MagStore/Infrastructure/Interfaces/IUserCoordinator.cs using System; using MagStore.Entities; namespace MagStore.Infrastructure.Interfaces { public interface IUserCoordinator { void SaveUser(User user); User LoadUser(Guid id); } }<file_sep>/src/MagStore/Entities/Enums/AccountLevel.cs namespace MagStore.Entities.Enums { public enum AccountLevel { Customer, Support, Elevated } }<file_sep>/src/MagStore.Payments/Processors/SagePayPaymentProcessor.cs using System; using System.Collections.Generic; using MagStore.Entities; using MagStore.Payments.Messages; using SagePayMvc; namespace MagStore.Payments.Processors { public class SagePayPaymentProcessor : IPaymentProcessor { private readonly ITransactionRegistrar registrar; public SagePayPaymentProcessor(ITransactionRegistrar registrar) { this.registrar = registrar; } public SagePayPaymentProcessor() { } public ITransactionRegistrar Registrar { private get; set; } public IAuthResponse Authorise(IAuthRequest authRequest) { ValidateRequest(authRequest); TransactionRegistrationResponse t = registrar.Send( authRequest.Context, authRequest.TransactionId, ConvertToShoppingBasket(authRequest.Products), authRequest.BillingAddress, authRequest.DeliveryAddress, authRequest.CustomerEmail); return new SagePayAuthResponse(); } private ShoppingBasket ConvertToShoppingBasket(IList<Product> products) { return default(ShoppingBasket); // new ShoppingBasket("Test"); } private void ValidateRequest(IAuthRequest authRequest) { if (authRequest == null) { throw new ArgumentNullException("authRequest", "A valid request is required for SagePay Auth requests."); } if (authRequest.Context == null) { throw new ArgumentNullException("authRequest.Context", "The RequestContext is required for SagePay Auth requests."); } if (authRequest.TransactionId == null) { throw new ArgumentNullException("authRequest.TransactionId", "The TransactionId is required for SagePay Auth requests."); } if (authRequest.Products == null) { throw new ArgumentNullException("authRequest.Products", "The TransactionId is required for SagePay Auth requests."); } if (authRequest.CustomerEmail == null) { throw new ArgumentNullException("authRequest.CustomerEmail", "The CustomerEmail is required for SagePay Auth requests."); } if (authRequest.BillingAddress == null) { throw new ArgumentNullException("authRequest.BillingAddress", "The BillingAddress is required for SagePay Auth requests."); } if (authRequest.DeliveryAddress == null) { throw new ArgumentNullException("authRequest.DeliveryAddress", "The DeliveryAddress is required for SagePay Auth requests."); } } } }<file_sep>/src/MagStore.Payments/Messages/SagePayAuthRequest.cs using System.Collections.Generic; using System.Web.Routing; using MagStore.Entities; using SagePayMvc; namespace MagStore.Payments.Messages { public class SagepayAuthRequest : IAuthRequest { public IDictionary<string, string> DataPairs { get; set; } public RequestContext Context { get; set; } public string TransactionId { get; set; } public IList<Product> Products { get; set; } public string CustomerEmail { get; set; } public Address BillingAddress { get; set; } public Address DeliveryAddress { get; set; } public SagepayAuthRequest(string vendorTxCode, string amount, string currency, string description, string successUrl, string failureUrl, string billingSurname, string billingFirstNames, string billingAddress1, string billingCity, string billingPostCode, string billingCountry, string deliverySurname, string deliveryFirstNames, string deliveryAddress1, string deliveryCity, string deliveryPostCode, string deliveryCountry) { DataPairs = new Dictionary<string, string> { {"VendorTxCode", vendorTxCode}, {"Amount", amount}, {"Currency", currency}, {"Description", description}, {"SuccessURL", successUrl}, {"FailureURL", failureUrl}, {"BillingSurname", billingSurname}, {"BillingFirstnames", billingFirstNames}, {"BillingAddress1", billingAddress1}, {"BillingCity", billingCity}, {"BillingPostCode", billingPostCode}, {"BillingCountry", billingCountry}, {"DeliverySurname", deliverySurname}, {"DeliveryFirstnames", deliveryFirstNames}, {"DeliveryAddress1", deliveryAddress1}, {"DeliveryCity", deliveryCity}, {"DeliveryPostCode", deliveryPostCode}, {"DeliveryCountry", deliveryCountry} }; BillingAddress = new Address { Address1 = billingAddress1, City = billingCity, PostCode = billingPostCode, Country = billingCountry, Firstnames = billingFirstNames, Surname = billingSurname }; DeliveryAddress = new Address { Address1 = deliveryAddress1, City = deliveryCity, PostCode = deliveryPostCode, Country = deliveryCountry, Firstnames = deliveryFirstNames, Surname = deliverySurname }; } // optional public string CustomerName { set { UpsertVariable(value, "CustomerName"); } } public string CustomerEMail { set { UpsertVariable(value, "CustomerEMail"); } } public string VendorEMail { set { UpsertVariable(value, "VendorEMail"); } } public string EMailMessage { set { UpsertVariable(value, "CustomerEMail"); } } public string BillingAddress2 { set { UpsertVariable(value, "BillingAddress2"); } } public string BillingState { set { UpsertVariable(value, "BillingState"); } } public string BillingPhone { set { UpsertVariable(value, "BillingPhone"); } } public string DeliveryAddress2 { set { UpsertVariable(value, "DeliveryAddress2"); } } public string DeliveryState { set { UpsertVariable(value, "DeliveryState"); } } public string DeliveryPhone { set { UpsertVariable(value, "DeliveryPhone"); } } public string Basket { set { UpsertVariable(value, "Basket"); } } public string AllowGiftAid { set { UpsertVariable(value, "AllowGiftAid"); } } public string ApplyAvscv2 { set { UpsertVariable(value, "ApplyAVSCV2"); } } public string Apply3DSecure { set { UpsertVariable(value, "Apply3DSecure"); } } public string BillingAgreement { set { UpsertVariable(value, "BillingAgreement"); } } public string BasketXml { set { UpsertVariable(value, "BasketXML"); } } public string CustomerXml { set { UpsertVariable(value, "CustomerXML"); } } public string SurchargeXml { set { UpsertVariable(value, "SurchargeXML"); } } public string VendorData { set { UpsertVariable(value, "VendorData"); } } public string ReferrerId { set { UpsertVariable(value, "ReferrerID"); } } public string Language { set { UpsertVariable(value, "Language"); } } public string Website { set { UpsertVariable(value, "Website"); } } private void UpsertVariable(string value, string variable) { if (DataPairs.ContainsKey(variable)) { DataPairs[variable] = value; } else { DataPairs.Add(variable, value); } } } }<file_sep>/src/MagStore/Infrastructure/Shop.cs using System; using System.Linq.Expressions; using MagStore.Entities; using MagStore.Infrastructure.Interfaces; using Raven.Client.Document; namespace MagStore.Infrastructure { public class Shop : IShop { private readonly IRepository finder; public Shop(IRepository finder) { this.finder = finder; } public ICoordinator<T> GetCoordinator<T>() where T : IRavenEntity { return new Coordinator<T>(finder); } public ILoaderWithInclude<T> Include<T>(Expression<Func<T, object>> path) { return finder.Include(path); } public IShopSettings GetSettings() { var settings = finder.Load<ShopSettings>("settings"); if (settings == null) { settings = new ShopSettings(); settings.Id = "settings"; settings.Name = string.Empty; settings.TagLine = string.Empty; settings.CurrencySymbol = string.Empty; settings.CurrencyFormat = "0.00"; settings.CurrencyConversion = 0m; settings.Logo = string.Empty; finder.SaveAndCommit(settings); } return settings; } public void UpdateSettings(IShopSettings settings) { finder.SaveAndCommit(settings); } public IShopSettings Settings { get; set; } } } <file_sep>/src/MagStore.Web/Models/Product/ProductsViewModel.cs using System.Collections.Generic; namespace MagStore.Web.Models.Product { public class ProductsViewModel { public Entities.Catalogue Catalogue { get; set; } public IEnumerable<Entities.Product> Products { get; set; } } }<file_sep>/src/MagStore.Web/Models/Product/ViewProductViewModel.cs using System.Collections.Generic; namespace MagStore.Web.Models.Product { public class ViewProductViewModel { public IEnumerable<Entities.Product> Products { get; set; } } }<file_sep>/src/MagStore/Infrastructure/Interfaces/IRavenEntity.cs namespace MagStore.Infrastructure.Interfaces { public interface IRavenEntity { string Id { get; set; } } }<file_sep>/src/MagStore.Web/Models/ShoppingCart/UpdateProductQuantityPostInputModel.cs namespace MagStore.Web.Models.ShoppingCart { public class UpdateProductQuantityPostInputModel { public string Id { get; set; } public int Quantity { get; set; } } }<file_sep>/src/MagStore.Web/Models/Product/CreateProductViewModel.cs using System.Collections.Generic; using MagStore.Entities.Enums; namespace MagStore.Web.Models.Product { public class CreateProductViewModel { public CreateProductViewModel(IEnumerable<KeyValuePair<string, string>> catalogue, IEnumerable<KeyValuePair<string, string>> promotions) { Catalogue = catalogue; Promotions = promotions; } public string Id { get; set; } public string Code { get; set; } public string Name { get; set; } public string Description { get; set; } public string Specification { get; set; } public IEnumerable<KeyValuePair<string, string>> Catalogue { get; set; } public IEnumerable<string> Colours { get; set; } public IEnumerable<string> Sizes { get; set; } public string Gender { get; set; } public string Brand { get; set; } public string Supplier { get; set; } public int Rating { get; set; } public IEnumerable<string> Reviews { get; set; } public IEnumerable<string> Images { get; set; } public decimal Price { get; set; } public ProductType ProductType { get; set; } public int[] AgeRange { get; set; } public DiscountType DiscountType { get; set; } public decimal DiscountAmount { get; set; } public IEnumerable<KeyValuePair<string, string>> Promotions { get; set; } public string Tags { get; set; } } }<file_sep>/src/MagStore/Entities/Enums/ImageType.cs namespace MagStore.Entities.Enums { public enum ImageType { Thumb, Feature, HiRes } }<file_sep>/src/MagStore.Web/Controllers/HomeController.cs using System.Web.Mvc; using MagStore.Entities; using MagStore.Web.Models; namespace MagStore.Web.Controllers { public class HomeController : Controller { public ActionResult Index() { var user = GetCurrentUser(); var homeViewModel = new HomeViewModel { UserId = user.Id }; return View(homeViewModel); } private User GetCurrentUser() { return Session["CurrentUser"] as User; } public ActionResult About() { return View(); } public ActionResult Contact() { return View(); } } }<file_sep>/src/MagStore.Web/Controllers/ProductController.cs using System; using System.Collections.Generic; using System.Linq; using System.Web; using System.Web.Mvc; using MagStore.Azure; using MagStore.Entities; using MagStore.Entities.Enums; using MagStore.Infrastructure.Interfaces; using MagStore.Web.Models.Product; namespace MagStore.Web.Controllers { public class ProductController : Controller { private readonly IShop shop; private readonly IStorageAccessor storageAccessor; private readonly ProductControllerHelper productControllerHelper; public ProductController(IShop shop, IStorageAccessor storageAccessor) { this.shop = shop; this.storageAccessor = storageAccessor; productControllerHelper = new ProductControllerHelper(this.shop, this.storageAccessor); } [HttpGet] public ActionResult CreateProduct() { var catalogues = shop.GetCoordinator<Catalogue>() .List() .Select(x => new KeyValuePair<string, string>(x.Id, x.Name)); var promotions = shop.GetCoordinator<Promotion>() .List() .Select(x => new KeyValuePair<string, string>(x.Id, x.Name)); return View(new CreateProductViewModel(catalogues, promotions)); } [HttpPost] public ActionResult CreateProduct(CreateProductInputModel inputModel) { if (ModelState.IsValid) { var sizes = inputModel.Sizes; var products = (from c in inputModel.Colours where sizes != null from s in sizes select new Product { Id = Guid.NewGuid().ToString(), Code = inputModel.Code, Name = inputModel.Name, Description = inputModel.Description, Specification = inputModel.Specification, Catalogue = inputModel.Catalogue, Brand = inputModel.Brand, Colour = c, DiscountAmount = inputModel.DiscountAmount, DiscountType = inputModel.DiscountType, Gender = inputModel.Gender, Price = inputModel.Price, ProductType = inputModel.ProductType, Rating = inputModel.Rating, Reviews = inputModel.Reviews, Size = s, Supplier = inputModel.Supplier, Tags = inputModel.Tags, Images = inputModel.UploadedImages == null ? new List<string>() : CreateImages(productControllerHelper.ParseImagesFromModel(inputModel)) }).ToList(); shop.GetCoordinator<Product>().Save(products); return RedirectToAction("EditProduct", new { id = products.First().Id }); } var catalogues = shop.GetCoordinator<Catalogue>() .List() .Select(x => new KeyValuePair<string, string>(x.Id, x.Name)); var promotions = shop.GetCoordinator<Promotion>() .List() .Select(x => new KeyValuePair<string, string>(x.Id, x.Name)); return View(new CreateProductViewModel(catalogues, promotions)); } public ActionResult ViewProducts() { return View(new ViewProductViewModel { Products = OrderedProducts() }); } private IEnumerable<Product> OrderedProducts() { var enumerator = shop.GetCoordinator<Product>() .List() .OrderByDescending(p => p.Code) .ThenBy(p => p.Id) .GetEnumerator(); while (enumerator.MoveNext()) { yield return enumerator.Current; } } [HttpGet] public ActionResult EditProduct(string id) { // TODO: Sort products before pumping them out var product = shop.GetCoordinator<Product>().Load(id); var editProductViewModel = productControllerHelper.GetEditProductViewModel(product); return View(editProductViewModel); } [HttpPost] public ActionResult EditProduct(EditProductInputModel inputModel) { var product = productControllerHelper .MapProductModelChangesToEntity(inputModel, shop.GetCoordinator<Product>().Load(inputModel.Id)); var existingImages = new List<KeyValuePair<string, string>>(); if (inputModel.ExistingImages != null) { for (var i = 0; i < inputModel.ExistingImages.Count(); i++) { var imageAndType = new KeyValuePair<string, string> ( inputModel.ExistingImages.Skip(i).Take(1).Single(), inputModel.ExistingPhotoType.Skip(i).Take(1).Single() ); existingImages.Add(imageAndType); } } UpdateImages(existingImages); product.Images = product.Images.Union ( CreateImages(productControllerHelper.ParseImagesFromModel(inputModel)) ); shop.GetCoordinator<Product>().Save(product); return RedirectToAction("EditProduct", new { inputModel.Id }); // View(productControllerHelper.GetEditProductViewModel(product)); } [HttpGet] public ActionResult ShowProducts(ViewProductsByCategoryInputModel inputModel) { var products = shop.GetCoordinator<Product>().List(); var availableCategories = from a in Enum.GetValues(typeof(ProductType)).AsQueryable().OfType<ProductType>() from p in products where p.ProductType == a where p.Gender.ToUpper() == inputModel.Gender.ToUpper() select a; var filteredProducts = availableCategories .SelectMany(a => products.Where(p => p.ProductType == a)) .AsEnumerable(); var images = shop .GetCoordinator<ProductImage>() .Load(availableCategories .SelectMany(a => products .Where(p => p.ProductType == a) .SelectMany(p => p.Images)) ) .Where(i => i.ImageType == ImageType.Thumb.ToString()); IDictionary<string, string> filters = new Dictionary<string, string>(); filters.Add("Gender", inputModel.Gender); return View(new ProductCategoriesViewModel { Categories = availableCategories, Products = filteredProducts, Images = images, Filters = filters }); } [HttpGet] public ActionResult ViewProductsByCategory(ViewProductsByCategoryInputModel inputModel) { var products = shop.GetCoordinator<Product>() .List() .Where(p => p.ProductType == (ProductType)Enum.Parse(typeof(ProductType), inputModel.Category)); var productTypes = Enum.GetValues(typeof(ProductType)).AsQueryable().OfType<ProductType>(); var availableCategories = from a in productTypes from p in products where p.ProductType == a where p.Gender.ToUpper() == inputModel.Gender.ToUpper() select a; var imagesCoordinator = shop.GetCoordinator<ProductImage>(); var ids = availableCategories .SelectMany(a => products .Where(p => p.ProductType == a) .SelectMany(p => p.Images)); var productImages = imagesCoordinator.Load(ids); var thumbs = productImages.Where(i => i.ImageType == ImageType.Thumb.ToString()); IDictionary<string, string> filters = new Dictionary<string, string>(); filters.Add("Category", inputModel.Category); filters.Add("Gender", inputModel.Gender); return View(new ViewProductsByCategoryViewModel { Products = products, ProductType = inputModel.Category, Images = thumbs, Filters = filters }); } public ActionResult ShowProduct(ShowProductInputModel inputModel) { var productCoordinator = shop.GetCoordinator<Product>(); var ravenQueryable = productCoordinator.List(); var query = ravenQueryable.Where(p => p.Code == inputModel.Code); var productKeyValuePairs = query.Select(d => new KeyValuePair<string, Product>(d.Id, d)); var products = productKeyValuePairs.ToList(); var product = products.First().Value;//shop.Include<Product>(p => p.Images).Load(inputModel.Id); var images = shop.GetCoordinator<ProductImage>() .Load(product.Images) .Where(i => i.ImageType == ImageType.Feature.ToString()); var availableColours = products.Select(c => c.Value.Colour); var availableSizes = products.Select(c => c.Value.Size).OrderBy(c => c).Select(c => c.ToString()); var filters = new Dictionary<string, string> { {"Category", inputModel.Category}, {"Gender", inputModel.Gender}, {"Code", inputModel.Code} }; return View(new ShowProductViewModel { Product = product, ProductImages = images, ProductVariants = products, AvailableColours = availableColours, AvailableSizes = availableSizes, Filters = filters }); } [HttpPost] [Authorize(Roles = "Administrator")] public ActionResult DeleteProduct(DeleteProductInputModel inputModel) { var product = shop.GetCoordinator<Product>().Load(inputModel.Id); shop.GetCoordinator<Product>().Delete(product); return RedirectToAction("ViewProducts"); } private void UpdateImages(IEnumerable<KeyValuePair<string, string>> images) { foreach (var image in images) { UpdateImage(image.Key, image.Value); } } private void UpdateImage(string id, string imageType) { var img = shop.GetCoordinator<ProductImage>().Load(id); img.ImageType = imageType; shop.GetCoordinator<ProductImage>().Save(img); } private IEnumerable<string> CreateImages(IEnumerable<KeyValuePair<string, HttpPostedFileBase>> images) { var result = new List<string>(); foreach (var image in images) { var id = Guid.NewGuid().ToString(); CreateImage(image.Value, image.Key, id); result.Add(id); } return result; } private void CreateImage(HttpPostedFileBase image, string imageType, string fileName) { var inputStream = image.InputStream; var uri = storageAccessor.AddBlobToResource(fileName, inputStream); var img = new ProductImage { Id = fileName, ImageType = imageType, ImageUrl = uri.ToString() }; shop.GetCoordinator<ProductImage>().Save(img); } } }<file_sep>/src/MagStore.Web/Models/Catalogue/DeleteCatalogueInputModel.cs namespace MagStore.Web.Models.Catalogue { public class DeleteCatalogueInputModel { public string Id { get; set; } } }<file_sep>/src/MagStore.Web/Models/EditUserModel.cs namespace MagStore.Web.Models { public class EditUserModel { public string Username { get; set; } public string Email { get; set; } public string[] Roles { get; set; } public string[] UserRoles { get; set; } public EditUserModel() { } public EditUserModel(string username, string email, string[] roles, string[] userRoles) { Username = username; Email = email; Roles = roles; UserRoles = userRoles; } } }<file_sep>/src/MagStore.Web/Controllers/ProductControllerHelper.cs using System; using System.Collections.Generic; using System.Linq; using System.Web; using MagStore.Azure; using MagStore.Entities; using MagStore.Infrastructure.Interfaces; using MagStore.Web.Models.Product; namespace MagStore.Web.Controllers { using UploadedImages = IEnumerable<KeyValuePair<string, HttpPostedFileBase>>; using ExistingAzureImages = IEnumerable<KeyValuePair<string, string>>; public class ProductControllerHelper { private readonly IShop shop; private readonly IStorageAccessor storage; public ProductControllerHelper(IShop shop, IStorageAccessor storage) { this.shop = shop; this.storage = storage; } public IEnumerable<string> SaveImagesToRaven(IEnumerable<KeyValuePair<Guid, string>> savedInAzure) { var images = new List<string>(); foreach (var vp in savedInAzure) { var productImage = new ProductImage { Id = vp.Key.ToString(), ImageType = vp.Value, ImageUrl = vp.Key.ToString() }; shop.GetCoordinator<ProductImage>().Save(productImage); images.Add(vp.Key.ToString()); } return images; } public IEnumerable<KeyValuePair<Guid, string>> SaveImagesToAzure(UploadedImages uploadedImageAndType) { uploadedImageAndType.Select(vp => storage.AddBlobToResource(vp.Value.FileName, vp.Value.InputStream)); return uploadedImageAndType.Select(vp => new KeyValuePair<Guid, string>(Guid.NewGuid(), vp.Key)).ToList(); } public IEnumerable<KeyValuePair<Guid, string>> UpdateImagesInAzure(UploadedImages uploadedImageAndType, ExistingAzureImages existingAzureImages) { return from e in existingAzureImages from u in uploadedImageAndType where e.Key == u.Key select AddImageAndUpdate(e, u); } private KeyValuePair<Guid, string> AddImageAndUpdate(KeyValuePair<string, string> e, KeyValuePair<string, HttpPostedFileBase> u) { storage.AddBlobToResource(e.Value, u.Value.InputStream); return new KeyValuePair<Guid, string>(Guid.Parse(e.Key), u.Key); } public UploadedImages ParseImagesFromModel(CreateProductInputModel inputModel) { return inputModel.UploadedImages.Select((t, i) => new KeyValuePair<string, HttpPostedFileBase>(inputModel.PhotoType[i], t)).ToList(); } public UploadedImages ParseImagesFromModel<T>(T inputModel) where T : IProductPostInputModel { if (inputModel.UploadedImages == null) { return new List<KeyValuePair<string, HttpPostedFileBase>>(); } return inputModel.UploadedImages .Select((t, i) => new KeyValuePair<string, HttpPostedFileBase>(inputModel.PhotoType[i], t)) .ToList(); } public Product MapProductModelChangesToEntity(EditProductInputModel inputModel, Product product) { product.AgeRange = inputModel.AgeRange; product.Brand = inputModel.Brand; product.Catalogue = inputModel.Catalogue; product.Colour = inputModel.Colour; product.Description = inputModel.Description; product.Specification = inputModel.Specification; product.DiscountAmount = inputModel.DiscountAmount; product.DiscountType = inputModel.DiscountType; product.Gender = inputModel.Gender; product.Code = inputModel.Code; product.Name = inputModel.Name; product.Price = inputModel.Price; product.ProductType = inputModel.ProductType; product.Promotions = inputModel.Promotions; product.Rating = inputModel.Rating; product.Reviews = inputModel.Reviews; product.Size = inputModel.Size; product.Supplier = inputModel.Supplier; product.Tags = inputModel.Tags; return product; } private IEnumerable<ProductImage> UpdateImageChanges(IEnumerable<string> images, IEnumerable<HttpPostedFileBase> uploadedImages) { foreach (var productImage in images) { } return null; } public EditProductViewModel GetEditProductViewModel(Product p) { var catalogues = shop.GetCoordinator<Catalogue>().List(); var images = shop.GetCoordinator<ProductImage>() .Load(p.Images); var editProductViewModel = new EditProductViewModel { Id = p.Id, Code = p.Code, Name = p.Name, Description = p.Description, Specification = p.Specification, Catalogue = p.Catalogue, Brand = p.Brand, Colour = p.Colour, DiscountAmount = p.DiscountAmount, DiscountType = p.DiscountType, Gender = p.Gender, Price = p.Price, ProductType = p.ProductType, Rating = p.Rating, Reviews = p.Reviews, Size = p.Size, Supplier = p.Supplier, CatalogueList = catalogues, Images = images, Tags = string.Join(",", p.Tags ?? new string[0]) }; return editProductViewModel; } } }<file_sep>/src/MagStore.Test/Payments/MakingPaymentsSetUpFixture.cs using MagStore.Payments.Messages; using MagStore.Payments.Processors; using MagStore.Payments.Providers; using NSubstitute; using SagePayMvc; namespace MagStore.Test.Payments { public class MakingPaymentsSetUpFixture : TestSetUpFixture { private IPaymentProvider paymentProvider; protected IAuthRequest AuthRequest; protected IAuthResponse AuthResponse; protected IPaymentProcessor PaymentProcessor; protected ITransactionRegistrar TransactionRegistrar; protected override void Arrange() { TransactionRegistrar = Substitute.For<ITransactionRegistrar>(); PaymentProcessor = Substitute.For<SagePayPaymentProcessor>(TransactionRegistrar); paymentProvider = new SagePayPaymentProvider(PaymentProcessor); AuthRequest = Substitute.For<IAuthRequest>(); AuthRequest.BillingAddress = Substitute.For<Address>(); AuthRequest.DeliveryAddress = Substitute.For<Address>(); } protected override void Act() { AuthResponse = paymentProvider.MakePayment(AuthRequest); } } }<file_sep>/src/MagStore.Web/Controllers/PromotionController.cs using System; using System.Linq; using System.Web.Mvc; using MagStore.Entities; using MagStore.Infrastructure.Interfaces; using MagStore.Web.Models.Promotion; using MagStore.Web.ShopHelpers; namespace MagStore.Web.Controllers { public class PromotionController : Controller { private readonly IShop shop; private readonly PromotionHelper promotionHelper; public PromotionController(IShop shop) { this.shop = shop; promotionHelper = new PromotionHelper(shop); } public ActionResult Promotion() { throw new System.NotImplementedException(); } [HttpGet] public ActionResult CreatePromotion() { return View(new CreatePromotionViewModel(promotionHelper)); } [HttpPost] public ActionResult CreatePromotion(CreatePromotionInputModel inputModel) { var promotion = new Promotion { Id = Guid.NewGuid().ToString(), Name = inputModel.Name, Code = inputModel.Code, ValidFrom = inputModel.ValidFrom, ValidTo = inputModel.ValidTo, DiscountAmount = inputModel.DiscountAmount, DiscountType = inputModel.DiscountType, Exclusivity = inputModel.Exclusivity, Restrictions = inputModel.Restrictions.ToList() }; shop.GetCoordinator<Promotion>().Save(promotion); return RedirectToAction("EditPromotion", "Promotion", new {promotion.Id}); } public ActionResult ViewPromotions() { var promotions = shop.GetCoordinator<Promotion>().List(); return View(new PromotionsViewModel{Promotions = promotions}); } public ActionResult EditPromotion(string id) { var p = shop.GetCoordinator<Promotion>().Load(id); return View(new PromotionViewModel { Id = p.Id, Name = p.Name, Code = p.Code, ValidFrom = p.ValidFrom, ValidTo = p.ValidTo, DiscountType = p.DiscountType, DiscountAmount = p.DiscountAmount, Exclusivity = p.Exclusivity, Restrictions = p.Restrictions }); } public ActionResult ViewPromotion(string id) { var p = shop.GetCoordinator<Promotion>().Load(id); return View(new PromotionViewModel { Id = p.Id, Name = p.Name, Code = p.Code, ValidFrom = p.ValidFrom, ValidTo = p.ValidTo, DiscountType = p.DiscountType, DiscountAmount = p.DiscountAmount, Exclusivity = p.Exclusivity, Restrictions = p.Restrictions }); } } }<file_sep>/src/MagStore.Payments/Providers/IPaymentProvider.cs using MagStore.Payments.Messages; namespace MagStore.Payments.Providers { public interface IPaymentProvider { IAuthResponse MakePayment(IAuthRequest authRequest); } } <file_sep>/src/MagStore.Web/Content/js/settings-inner.js $(function(){ // Dropdown menu // ------------------------------------------------------------------------------------------- var options = {minWidth: 180, onClick: function(e, menuItem){}}; $('nav ul').menu(options); // Social network icons animation // ------------------------------------------------------------------------------------------- $(".social-networks ul li a").hover(function(){ $("img", this).stop().animate({top:"-26px"},{queue:false,duration:200}); }, function() { $("img", this).stop().animate({top:"0px"},{queue:false,duration:200}); }); // Latest product zoom effect // ------------------------------------------------------------------------------------------- $('.zoom').hoverZoom({ overlayColor: '#000', overlayOpacity: 0.3, zoom: 10 }); // Accordion // ------------------------------------------------------------------------------------------- $( ".accordion" ).accordion({ autoHeight: false, navigation: true, collapsible: true, active: false }); // Tabs // ------------------------------------------------------------------------------------------- $( ".tabs" ).tabs(); // Classes // ------------------------------------------------------------------------------------------- $("tr:even").addClass('even'); // Gallery // ------------------------------------------------------------------------------------------- $("#pikame").PikaChoose({carousel:true}); });<file_sep>/src/MagStore.Web/Models/ShoppingCart/CheckoutGetInputModel.cs namespace MagStore.Web.Models.ShoppingCart { public class CheckoutGetInputModel { } }<file_sep>/src/MagStore/Infrastructure/ModelClientValidationStringLengthRule.cs namespace MagStore.Infrastructure { /// <summary> /// Provides a container for a string-length validation rule that is sent to the browser. /// </summary> public class ModelClientValidationStringLengthRule : ModelClientValidationRule { /// <summary> /// Initializes a new instance of the <see cref="T:System.Web.Mvc.ModelClientValidationStringLengthRule"/> class. /// </summary> /// <param name="errorMessage">The validation error message.</param><param name="minimumLength">The minimum length of the string.</param><param name="maximumLength">The maximum length of the string.</param> public ModelClientValidationStringLengthRule(string errorMessage, int minimumLength, int maximumLength) { ErrorMessage = errorMessage; ValidationType = "length"; if (minimumLength != 0) ValidationParameters["min"] = minimumLength; if (maximumLength == int.MaxValue) return; ValidationParameters["max"] = maximumLength; } } } <file_sep>/src/MagStore/Entities/Fulfillment.cs using MagStore.Entities.Enums; using MagStore.Infrastructure.Interfaces; namespace MagStore.Entities { public class Fulfillment : IRavenEntity { public string Id { get; set; } public string OrderId { get; set; } public FulfillmentStatus FulfillmentStatus { get; set; } } }<file_sep>/src/MagStore.Web/Models/ShoppingCart/SaveAddressesPostInputModel.cs using SagePayMvc; namespace MagStore.Web.Models.ShoppingCart { public class SaveAddressesPostInputModel { public Address BillingAddress { get; set; } public bool UseBillingAddress { get; set; } public Address DeliveryAddress { get; set; } } }<file_sep>/src/MagStore.Web/Controllers/SearchController.cs using AutoMapper; using MagStore.Entities; using MagStore.Indexes; using MagStore.Web.Models.Product; using Raven.Client; using System.Linq; using System.Web.Mvc; namespace MagStore.Web.Controllers { public class SearchController : Controller { readonly IDocumentSession documentSession; public SearchController(IDocumentSession documentSession) { this.documentSession = documentSession; } public ActionResult Index(string searchText) { var products = documentSession.Query<Products_FullText.Result, Products_FullText>().Search(x => x.Fields, searchText).As<Product>().ToArray(); var viewModels = products.Select(Mapper.DynamicMap<SearchProductViewModel>); return View(viewModels); } } }<file_sep>/src/MagStore.Web/Models/ShoppingCart/AddToCartPostInputModel.cs namespace MagStore.Web.Models.ShoppingCart { public class AddToCartPostInputModel { public string ProductId { get; set; } public string Code { get; set; } public string Colour { get; set; } public string Size { get; set; } } }<file_sep>/src/MagStore.Web/Models/Promotion/CreatePromotionViewModel.cs using System; using System.Collections.Generic; using System.Linq; using System.Web.Mvc; using MagStore.Entities.Enums; using MagStore.Web.ShopHelpers; namespace MagStore.Web.Models.Promotion { public class CreatePromotionViewModel { private readonly IPromotionHelper helper; public CreatePromotionViewModel(IPromotionHelper helper) { this.helper = helper; } public string Id { get; set; } public string Name { get; set; } public string Code { get; set; } public DateTime ValidFrom { get; set; } public DateTime ValidTo { get; set; } public DiscountType DiscountType { get; set; } public decimal DiscountAmount { get; set; } public string Exclusivity { get; set; } public IEnumerable<Entities.Promotion> Restrictions { get { return helper.ExistingPromotions; } } public IEnumerable<KeyValuePair<string, string>> ExclusivityList { get { return new[] { new KeyValuePair<string, string> ("Inclusive", "Can be used with other promotions"), new KeyValuePair<string, string> ("Exclusive","Cannot be used with other promotions") }; } } public IList<SelectListItem> DiscountTypeList { get { IList<KeyValuePair<string, string>> combined = new List<KeyValuePair<string, string>>(); combined.Add(new KeyValuePair<string, string>("-1", "Make a selection")); foreach (var enumVal in Enum.GetNames(typeof(DiscountType))) { combined.Add(new KeyValuePair<string, string>(enumVal, enumVal)); } return combined.Select(keyValuePair => new SelectListItem { Selected = keyValuePair.Key == "-1", Text = keyValuePair.Value, Value = keyValuePair.Key }).ToList(); } } } }<file_sep>/src/MagStore/Infrastructure/AccountMembershipService.cs using System; using System.Linq; using System.Transactions; using System.Web.Security; using MagStore.Infrastructure.Interfaces; namespace MagStore.Infrastructure { public class AccountMembershipService : IMembershipService { private readonly MembershipProvider _provider; private readonly RoleProvider _roleProvider; public AccountMembershipService() : this(null, null) { } public AccountMembershipService(MembershipProvider provider, RoleProvider roleProvider) { _provider = provider ?? Membership.Provider; _roleProvider = roleProvider ?? Roles.Provider; } public int MinPasswordLength { get { return _provider.MinRequiredPasswordLength; } } public bool ValidateUser(string userName, string password) { if (String.IsNullOrEmpty(userName)) throw new ArgumentException("Value cannot be null or empty.", "userName"); if (String.IsNullOrEmpty(password)) throw new ArgumentException("Value cannot be null or empty.", "password"); return _provider.ValidateUser(userName, password); } public MembershipCreateStatus CreateUser(string userName, string password, string email) { if (String.IsNullOrEmpty(userName)) throw new ArgumentException("Value cannot be null or empty.", "userName"); if (String.IsNullOrEmpty(password)) throw new ArgumentException("Value cannot be null or empty.", "password"); if (String.IsNullOrEmpty(email)) throw new ArgumentException("Value cannot be null or empty.", "email"); MembershipCreateStatus status; _provider.CreateUser(userName, password, email, null, null, true, null, out status); return status; } public bool ChangePassword(string userName, string oldPassword, string newPassword) { if (String.IsNullOrEmpty(userName)) throw new ArgumentException("Value cannot be null or empty.", "userName"); if (String.IsNullOrEmpty(oldPassword)) throw new ArgumentException("Value cannot be null or empty.", "oldPassword"); if (String.IsNullOrEmpty(newPassword)) throw new ArgumentException("Value cannot be null or empty.", "newPassword"); // The underlying ChangePassword() will throw an exception rather // than return false in certain failure scenarios. try { var currentUser = _provider.GetUser(userName, true /* userIsOnline */); return currentUser.ChangePassword(oldPassword, newPassword); } catch (ArgumentException) { return false; } catch (MembershipPasswordException) { return false; } } public MembershipUserCollection GetAllUsers() { int totalRecords; return _provider.GetAllUsers(0, 1000, out totalRecords); } public MembershipUser GetUser(string username) { return _provider.GetUser(username, false); } public string[] GetAllRoles() { return _roleProvider.GetAllRoles(); } public string[] GetRolesForUser(string username) { return _roleProvider.GetRolesForUser(username); } public void AddRole(string roleName) { _roleProvider.CreateRole(roleName); } public void UpdateUser(MembershipUser user, string[] roles) { using (var ts = new TransactionScope()) { _provider.UpdateUser(user); var existingRoles = _roleProvider.GetRolesForUser(user.UserName); if (roles != null && roles.Length > 0) { var rolesToBeAdded = roles.Except(existingRoles).ToArray(); _roleProvider.AddUsersToRoles(new[] { user.UserName }, rolesToBeAdded); } if (existingRoles.Length > 0) { var rolesToBeDeleted = (roles != null ? existingRoles.Except(roles) : existingRoles).ToArray(); _roleProvider.RemoveUsersFromRoles(new[] { user.UserName }, rolesToBeDeleted); } ts.Complete(); } } public void DeleteRole(string roleName) { using (var ts = new TransactionScope()) { // Delete role _roleProvider.DeleteRole(roleName, false); ts.Complete(); } } } } <file_sep>/src/MagStore.Test/Checkout/Checkingout/There_are_no_products_in_the_cart.cs using System.Collections.Generic; using FluentAssertions; using NUnit.Framework; namespace MagStore.Test.Checkout.Checkingout { public class There_are_no_products_in_the_cart : When_checking_out { protected override void Arrange() { base.Arrange(); var products = new List<string>(); var product = string.Empty; products.Add(product); InputModel.Products = new List<string>(); } [Test] public void Should_resolve_to_the_Error_view_if_there_are_no_products_in_the_model() { const string expectedView = "Error"; Result.ViewName.Should().Be(expectedView, "Expected no products on the model."); } } }<file_sep>/src/MagStore/Entities/Catalogue.cs using System.Collections.Generic; using MagStore.Entities.Enums; using MagStore.Infrastructure.Interfaces; namespace MagStore.Entities { public class Catalogue : IRavenEntity { private IEnumerable<string> promotions; public Catalogue() { Promotions = new List<string>(); } public string Id { get; set; } public string Name { get; set; } public DiscountType DiscountType { get; set; } public decimal DiscountAmount { get; set; } public IEnumerable<string> Promotions { get { return promotions ?? new List<string>(); } set { promotions = value; } } } }<file_sep>/src/MagStore.Web/Models/Product/EditProductViewModel.cs using System; using System.Collections.Generic; using System.Linq; using MagStore.Entities; using MagStore.Entities.Enums; namespace MagStore.Web.Models.Product { public class EditProductViewModel { public string Id { get; set; } public string Code { get; set; } public string Name { get; set; } public string Description { get; set; } public string Specification { get; set; } public string Colour { get; set; } public string Size { get; set; } public string Gender { get; set; } public string Brand { get; set; } public string Supplier { get; set; } public int Rating { get; set; } public IEnumerable<string> Reviews { get; set; } public IEnumerable<ProductImage> Images { get; set; } public decimal Price { get; set; } public ProductType ProductType { get; set; } public int[] AgeRange { get; set; } public DiscountType DiscountType { get; set; } public decimal DiscountAmount { get; set; } public IEnumerable<string> Promotions { get; set; } public string Catalogue { get; set; } public IList<Entities.Catalogue> CatalogueList { get; set; } public IEnumerable<string> ImageTypes { get { return new[] { "" }.Union(Enum.GetNames(typeof(ImageType))); } } public string Tags { get; set; } } }<file_sep>/src/MagStore.Web/Models/Shop/ChangeShopSettingsPostInputModel.cs using System.Web; namespace MagStore.Web.Models.Shop { public class ChangeShopSettingsPostInputModel { public string Name { get; set; } public string TagLine { get; set; } public string CurrencySymbol { get; set; } public string CurrencyFormat { get; set; } public decimal CurrencyConversion { get; set; } public decimal DeliveryCharge { get; set; } public HttpPostedFileBase Logo { get; set; } } }<file_sep>/src/MagStore/Infrastructure/Interfaces/IMembershipService.cs using System.Web.Security; namespace MagStore.Infrastructure.Interfaces { public interface IMembershipService { int MinPasswordLength { get; } bool ValidateUser(string userName, string password); MembershipCreateStatus CreateUser(string userName, string password, string email); bool ChangePassword(string userName, string oldPassword, string newPassword); MembershipUserCollection GetAllUsers(); MembershipUser GetUser(string username); string[] GetAllRoles(); string[] GetRolesForUser(string username); void AddRole(string roleName); void UpdateUser(MembershipUser user, string[] roles); void DeleteRole(string roleName); } } <file_sep>/src/MagStore/Entities/ShopSettings.cs using MagStore.Infrastructure.Interfaces; namespace MagStore.Entities { public class ShopSettings : IShopSettings { public ShopSettings() { // Id = "settings"; // Name = string.Empty; // TagLine = string.Empty; // CurrencySymbol = string.Empty; // Logo = string.Empty; } public string Id { get; set; } public string Name { get; set; } public string TagLine { get; set; } public string CurrencySymbol { get; set; } public string Logo { get; set; } public string CurrencyFormat { get; set; } public decimal CurrencyConversion { get; set; } public decimal DeliveryCharge { get; set; } } }
d0701bb3187d9ffdcae43890cc7eb465f0a2094d
[ "JavaScript", "C#" ]
105
C#
sharebright/MagStore
6b536bac22b0fcd3cc5ea0f8eecbfbdc12a35809
255ce0cbefa0cb25660870e835f404c575bf0f0a
refs/heads/master
<repo_name>fairy-of-9/CoNLL2009_formatter<file_sep>/exobrainToCoNLL2009.py import json from os import listdir from os.path import isfile, join output_file = open("output_file_path","w",encoding="utf-8") dirPath = "input_directory_path" onlyfiles = [f for f in listdir(dirPath) if isfile(join(dirPath,f))] sentence_cnt = 0 cnt = [0] * 200 # 문장내 단어수. for file in onlyfiles: filePath = dirPath+file print(filePath) input_file = open(filePath,"r",encoding="utf-8") json_data = json.loads(input_file.read()) #print(json_data) sentence_cnt += len(json_data["sentence"]) for sentence in json_data["sentence"]: cnt[len(sentence["word"])] += 1 if len(sentence["word"]) > 70: print('!!!') print(sentence) for word in sentence["word"]: id = int(word["id"]) form = word["text"] begin, end = word["begin"], word["end"] lemma = [] pos = [] for morp in sentence["morp"]: if int(morp["id"]) < begin: continue if int(morp["id"]) > end: break lemma.append(morp["lemma"]) pos.append(morp["type"]) lemma = "|".join(lemma) pos = "|".join(pos) feat = '_' head = 987654321 deprel = '_' for dep in sentence["dependency"]: if int(dep["id"]) == id: head = int(dep["head"])+1 deprel = dep["label"] break if head == 987654321: #print ('no head!!') head = '_' fillpred = '_' verb_text = '' args = [] for srl in sentence["SRL"]: if int(srl["word_id"]) == id: fillpred = 'Y' verb_text = srl["verb"] for argument in srl["argument"]: if int(argument["word_id"]) == id: args.append(argument["type"]) pred = '_' if verb_text != '': for wsd in sentence["WSD"]: if int(wsd["begin"]) < begin: continue if int(wsd["end"]) > end: break if verb_text in wsd["text"]: pred = verb_text + '.' + wsd["scode"] while len(args) < len(sentence["SRL"]): args.append('_') error_flag = 0 if fillpred == 'Y' and pred == '_': #print('error') pred = verb_text + '.' + '01' #print('pred : _ -> ' + pred) error_flag = 1 id += 1 output_line = "\t".join([str(id),form, lemma, lemma, pos, pos, feat, feat, str(head), str(head), deprel, deprel, fillpred, pred] + args) output_line += '\n' if id == 1: output_line = '\n' + output_line if error_flag: print(output_line) output_file.write(output_line) input_file.close() output_file.close()<file_sep>/gangwonToCoNLL2009.py import json import sys import os def len_AP(f): #for conll 2009 format buf = [] pos = [] result = [0]*200 for line in f.readlines(): l = line.strip() if l == "": max_v = -1 for x in buf: for i in range(len(x[14:])): if x[14+i] != '_': try: result[abs(int(x[0]) - pos[i])] += 1 except: print(x) pos = [] buf = [] continue x = l.split("\t") if len(x) <= 1: continue id, t1, t2, t3, t4, t5, t6, t7, head, t8, t9 = x[:11] if x[12] == 'Y': pos.append(int(id)) buf.append(x) #root = -1 return def path_recur(buf, i): cur = buf[i] result = 0 while cur != -1: result += 1 if cur == buf[cur]: return result cur = buf[cur] return result def len_path(f): #for conll 2009 format buf = [] result = [0]*100 for line in f.readlines(): l = line.strip() if l == "": max_v = -1 for i in range(len(buf)): cur = path_recur(buf,i) if cur > max_v: max_v = cur #result[int((max_v-1)/10)] += 1 if max_v <= 5: result[0]+=1 elif max_v <= 10: result[1]+=1 elif max_v <= 15: result[2]+=1 elif max_v <= 20: result[3]+=1 else: result[4] +=1 print (max_v, result) #buf에 쌓아둔거 일처리. buf = [] continue x = l.split("\t") if len(x) == 1: continue id, t1, t2, t3, t4, t5, t6, t7, head, t8, t9 = x[:11] buf.append(int(head) - 1) #root = -1 return def cnt_pred(f): #for conll 2009 format result = [0]*25 cnt = 0 for line in f.readlines(): l = line.strip() if l == "": if cnt == 12: print(temp) result[cnt] += 1 print(result) cnt = 0 continue x = l.split("\t") if len(x) <= 1: continue temp = l if x[12] == 'Y': cnt+=1 return def cnt_argument(f): #for conll 2009 format result = {} cnt = 0 for line in f.readlines(): l = line.strip() if l == "": continue x = l.split("\t") if len(x) <= 1: continue temp = l args = x[14:] for arg in args: if arg in result: result[arg] += 1 else: result[arg] = 1 print(result) return def restore_word(w1, w2, feat): w = [] w.append(w1.split("/")[0]) if feat != "_": w.append(feat.split("/")[0]) if w2 != "_": w.append(w2.split("/")[0]) return "|".join(w) def parse_conll(f): result = [] buf = [] tokens = [] tidx = 0 err_flag = False for line in f.readlines(): l = line.strip() if l == "": if not err_flag: result.append(buf) err_flag = False buf = [] tidx = 0 continue x = l.split("\t") if len(x) == 1: buf.append(l) # print(l) tokens = l.split(" ")[1:] continue try: token = tokens[tidx] try: a = int(token) tidx += 1 token += tokens[tidx] except: pass tidx += 1 id, w1, w2, pos1, pos2, feat, featpos, feataddpos, head, deprel, pred = x[:11] args = x[11:] w = restore_word(w1, w2, feat) buf.append("\t".join([id, token, w, w, feataddpos, feataddpos, feat, feat, head, head, deprel, deprel, "Y" if pred != "_" else "_", pred] + args)) # print(buf[-1]) except: err_flag = True continue return result def wordCnt_inSentence(f): result = [] buf = [] tokens = [] tidx = 0 cnt = [0]*70 max_id = -1 err_flag = False for line in f.readlines(): if line[0] == ';': if max_id > 69: print(line) #print(max_id) cnt[max_id] += 1 max_id = -1 l = line.strip() if l == "": if not err_flag: result.append(buf) err_flag = False buf = [] tidx = 0 continue x = l.split("\t") if len(x) == 1: buf.append(l) # print(l) tokens = l.split(" ")[1:] continue try: token = tokens[tidx] try: a = int(token) tidx += 1 token += tokens[tidx] except: pass tidx += 1 id, w1, w2, pos1, pos2, feat, featpos, feataddpos, head, deprel, pred = x[:11] max_id = int(id) if int(id) > max_id else max_id args = x[11:] w = restore_word(w1, w2, feat) buf.append("\t".join([id, token, w, w, feataddpos, feataddpos, feat, feat, head, head, deprel, deprel, "Y" if pred != "_" else "_", pred] + args)) except: err_flag = True continue return def remove_line(f): result = [] for line in f.readlines(): if line[0] == ';': continue result.append(line) return result def sentence_count(f): cnt = 0 for line in f.readlines(): if line[0] == ';': cnt+=1 print("cnt :",cnt) return def isNumber(s): try: float(s) return True except ValueError: return False def word_count(f): cnt = 0 for line in f.readlines(): if isNumber(line[0]): cnt+=1 print("cnt :",cnt) return if __name__ == '__main__': a = "input_file_path" aout = "output_file_path" aout2 = "output_file_path" with open(a, encoding="EUC-KR") as f: r = parse_conll(f) with open(aout, "w", encoding="UTF8") as f: for line in r: f.write("\n".join(line)+"\n\n") with open(aout, encoding="UTF8") as f: r = remove_line(f) with open(aout2, "w", encoding="UTF8") as f: for line in r: f.write(line)<file_sep>/README.md # CoNLL2009_formatter 강원대 conll 데이터, Exobrain(v4) 데이터를 CoNLL2009 format으로 변환한다.
d527237c2384cc03ce8cf559a5c16073233cd640
[ "Markdown", "Python" ]
3
Python
fairy-of-9/CoNLL2009_formatter
7b29ce8c2a17e91d6161873d30b2716d474070ee
f83048166d1dab958ab36de00d7a7cd9e157a507
refs/heads/master
<repo_name>JeniusYin/ES_Nest<file_sep>/src/Sample.Elasticsearch.Domain/Application/ArticlesApplication.cs using System; using System.Collections.Generic; using System.Linq; using Nest; using Sample.Elasticsearch.Domain.Concrete; using Sample.Elasticsearch.Domain.Indices; using Sample.Elasticsearch.Domain.Model; namespace Sample.Elasticsearch.Domain.Application { public class ArticlesApplication : IArticlesApplication { private readonly IElasticClient _elasticClient; public ArticlesApplication(IElasticClient elasticClient) { _elasticClient = elasticClient; } public void PostArticlesSample() { if (!_elasticClient.Indices.Exists(IndexArticles.ArticleIndex).Exists) _elasticClient.Indices.Create(IndexArticles.ArticleIndex); _elasticClient.IndexMany<IndexArticles>(IndexArticles.GetSampleData(), IndexArticles.ArticleIndex); #region var descriptor = new BulkDescriptor(); descriptor.UpdateMany<IndexArticles>(IndexArticles.GetSampleData(), (b, u) => b .Index(IndexArticles.ArticleIndex) .Doc(u) .DocAsUpsert()); var insert = _elasticClient.Bulk(descriptor); if (!insert.IsValid) throw new Exception(insert.OriginalException.ToString()); #endregion } public ICollection<IndexArticles> GetAll() { var result = _elasticClient.Search<IndexArticles>(s => s .Index(IndexArticles.ArticleIndex) .Sort(q => q.Descending(p => p.PublishDate)))?.Documents; #region var result2 = _elasticClient.Search<IndexArticles>(s => s .Index(IndexArticles.ArticleIndex) .MatchAll()).Documents.ToList(); var result3 = _elasticClient.Search<IndexArticles>(s => s .Index(IndexArticles.ArticleIndex) .From(0) .Size(5) .MatchAll()).Documents.ToList(); //scroll var result4 = _elasticClient.Search<IndexArticles>(s => s .Index(IndexArticles.ArticleIndex) .From(0) .Size(5) .Scroll("1m") .MatchAll()); List<IndexArticles> results = new List<IndexArticles>(); if (result4.Documents.Any()) results.AddRange(result4.Documents); string scrollid = result4.ScrollId; bool isScrollSetHasData = true; while (isScrollSetHasData) { ISearchResponse<IndexArticles> loopingResponse = _elasticClient.Scroll<IndexArticles>("1m", scrollid); if (loopingResponse.IsValid) { results.AddRange(loopingResponse.Documents); scrollid = loopingResponse.ScrollId; } isScrollSetHasData = loopingResponse.Documents.Any(); } _elasticClient.ClearScroll(new ClearScrollRequest(scrollid)); #endregion return results; } public ICollection<IndexArticles> GetByTitle(string title) { //usado em lowcase var query = new QueryContainerDescriptor<IndexArticles>().Term(t => t.Field(f => f.Title).Value(title)); var result = _elasticClient.Search<IndexArticles>(s => s .Index(IndexArticles.ArticleIndex) .Query(s => query) .Size(5) .Sort(q => q.Descending(p => p.PublishDate)))?.Documents; #region var result2 = _elasticClient.Search<IndexArticles>(s => s .Index(IndexArticles.ArticleIndex) .Query(s => s.Wildcard(w => w.Field(f => f.Title).Value(title + "*"))) .Size(5) .Sort(q => q.Descending(p => p.PublishDate)))?.Documents; var result3 = _elasticClient.Search<IndexArticles>(s => s .Index(IndexArticles.ArticleIndex) .Query(s => s.Match(m => m.Field(f => f.Title).Query(title))) //.Query(s => s.Match(m => m.Field(f => f.Title).Query(title).Operator(Operator.And)) .Size(5) .Sort(q => q.Descending(p => p.PublishDate)))?.Documents; var result4 = _elasticClient.Search<IndexArticles>(s => s .Index(IndexArticles.ArticleIndex) .Query(s => s.MatchPhrase(m => m.Field(f => f.Title).Query(title))) //.Query(s => s.MatchPhrase(m => m.Field(f => f.Title).Query(title).Slop(1))) .Size(5) .Sort(q => q.Descending(p => p.PublishDate)))?.Documents; #endregion return result3?.ToList(); } public ICollection<IndexArticles> GetByContent(string content) { //term是代表完全匹配,也就是精确查询,搜索前不会再对搜索词进行分词拆解。 //match进行搜索的时候,会先进行分词拆分,拆完后,再来匹配 //match_phrase 称为短语搜索,要求所有的分词必须同时出现在文档中,同时位置必须紧邻一致。 //var query = new QueryContainerDescriptor<IndexArticles>().Match(t => t.Field(f => f.Content).Query(content)); var query = new QueryContainerDescriptor<IndexArticles>().MatchPhrase(t => t.Field(f => f.Content).Query(content)); //var query = new QueryContainerDescriptor<IndexArticles>().Term(t => t.Content, content); var result = _elasticClient.Search<IndexArticles>(s => s .Index(IndexArticles.ArticleIndex) .Query(s => query) .Size(10) .Sort(q => q.Descending(p => p.PublishDate)))?.Documents; return result?.ToList(); } public ICollection<IndexArticles> GetArticlesCondition(string title, string content, DateTime? publishDate) { //use Fuzzy para autocomplete QueryContainer query = new QueryContainerDescriptor<IndexArticles>(); if (!string.IsNullOrEmpty(title)) { query = query && new QueryContainerDescriptor<IndexArticles>().Match(qs => qs.Field(fs => fs.Title).Query(title)); } if (!string.IsNullOrEmpty(content)) { query = query && new QueryContainerDescriptor<IndexArticles>().Match(qs => qs.Field(fs => fs.Content).Query(content)); } if (publishDate.HasValue) { query = query && new QueryContainerDescriptor<IndexArticles>() .Bool(b => b.Filter(f => f.DateRange(dt => dt .Field(field => field.PublishDate) .GreaterThanOrEquals(publishDate) .LessThanOrEquals(publishDate) .TimeZone("+00:00")))); } var result = _elasticClient.Search<IndexArticles>(s => s .Index(IndexArticles.ArticleIndex) .Query(s => query) .Size(10) .Sort(q => q.Descending(p => p.PublishDate)))?.Documents; return result?.ToList(); } public ICollection<IndexArticles> GetArticlesAllCondition(string term) { QueryContainer query = new QueryContainerDescriptor<IndexArticles>().Bool(b => b.Must(m => m.Exists(e => e.Field(f => f.Content)))); query = query && new QueryContainerDescriptor<IndexArticles>().MatchPhrase(w => w.Field(f => f.Title).Query(term)) || new QueryContainerDescriptor<IndexArticles>().MatchPhrase(w => w.Field(f => f.Content).Query(term)) || new QueryContainerDescriptor<IndexArticles>().MatchPhrase(w => w.Field(f => f.Author).Query(term)); var result = _elasticClient.Search<IndexArticles>(s => s .Index(IndexArticles.ArticleIndex) .Query(s => query) .Size(10) .Sort(q => q.Descending(p => p.PublishDate)))?.Documents; return result?.ToList(); } public ArticleAggregationModel GetArticlesAggregation() { QueryContainer query = new QueryContainerDescriptor<IndexArticles>().Bool(b => b.Must(m => m.Exists(e => e.Field(f => f.Content)))); var result = _elasticClient.Search<IndexArticles>(s => s .Index(IndexArticles.ArticleIndex) .Query(s => query) .Aggregations(a => a.Sum("TotalViews", sa => sa.Field(p => p.TotalViews)) .Average("AverageViews", sa => sa.Field(p => p.TotalViews)) )); var totalViews = ObterBucketAggregationDouble(result.Aggregations, "TotalViews"); var avViews = ObterBucketAggregationDouble(result.Aggregations, "AverageViews"); return new ArticleAggregationModel {TotalViews = totalViews, AverageViews = avViews }; } public static double ObterBucketAggregationDouble(AggregateDictionary agg, string bucket) { if (agg.BucketScript(bucket).Value.HasValue) return agg.BucketScript(bucket).Value.Value; return 0; } } } <file_sep>/src/Sample.Elasticsearch.WebApi.Core/Extensions/ServiceExtensions.cs using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; using Sample.Elasticsearch.Domain.Application; using Sample.Elasticsearch.Domain.Concrete; namespace Sample.Elasticsearch.WebApi.Core.Extensions { public static class ServiceExtensions { public static void AddServices(this IServiceCollection services) { services.AddTransient<IArticlesApplication, ArticlesApplication>(); } } } <file_sep>/README.md # ES_Nest a dotnet core app that Operate Elasticsearch with Nest <file_sep>/src/Sample.Elasticsearch.Domain/Concrete/IArticlesApplication.cs using System; using System.Collections.Generic; using Sample.Elasticsearch.Domain.Indices; using Sample.Elasticsearch.Domain.Model; namespace Sample.Elasticsearch.Domain.Concrete { public interface IArticlesApplication { void PostArticlesSample(); ICollection<IndexArticles> GetAll(); ICollection<IndexArticles> GetByTitle(string title); ICollection<IndexArticles> GetByContent(string content); ICollection<IndexArticles> GetArticlesCondition(string title, string content, DateTime? publishDate); ICollection<IndexArticles> GetArticlesAllCondition(string term); ArticleAggregationModel GetArticlesAggregation(); } } <file_sep>/src/Sample.Elasticsearch.WebApi/Controllers/ArticleController.cs using Microsoft.AspNetCore.Mvc; using Sample.Elasticsearch.Domain.Concrete; using System; namespace Sample.Elasticsearch.WebApi.Controllers { [Route("api/[controller]")] public class ArticleController : Controller { private readonly IArticlesApplication _actorsApplication; public ArticleController(IArticlesApplication actorsApplication) { _actorsApplication = actorsApplication; } [HttpPost("sample")] public IActionResult PostArticlesSample() { _actorsApplication.PostArticlesSample(); return Ok(new { Result = "Data successfully registered with Elasticsearch" }); } [HttpGet("")] public IActionResult GetAll() { var result = _actorsApplication.GetAll(); return Json(result); } [HttpGet("title")] public IActionResult GetByTitle([FromQuery] string title) { var result = _actorsApplication.GetByTitle(title); return Json(result); } [HttpGet("content")] public IActionResult GetByContent([FromQuery] string content) { var result = _actorsApplication.GetByContent(content); return Json(result); } [HttpGet("Condition")] public IActionResult GetArticlesCondition([FromQuery] string title, [FromQuery] string content, [FromQuery] DateTime? publishDate) { var result = _actorsApplication.GetArticlesCondition(title, content, publishDate); return Json(result); } [HttpGet("term")] public IActionResult GetByAllCondictions([FromQuery] string term) { var result = _actorsApplication.GetArticlesAllCondition(term); return Json(result); } [HttpGet("aggregation")] public IActionResult GetArticlesAggregation() { var result = _actorsApplication.GetArticlesAggregation(); return Json(result); } } } <file_sep>/src/Sample.Elasticsearch.Domain/Model/ArticleAggregationModel.cs using System; using System.Collections.Generic; using System.Text; namespace Sample.Elasticsearch.Domain.Model { public class ArticleAggregationModel { public double TotalViews { get; set; } public double AverageViews { get; set; } } }
117aa6fb98902a5711e6732aa8954a677ffd9c10
[ "Markdown", "C#" ]
6
C#
JeniusYin/ES_Nest
6250fb9ab4a24079c642034b24644f483a3aa55d
43e5b54f8ac47bde268d1515ce96967ba2629d65
refs/heads/master
<repo_name>Jivvon/cnu-macro<file_sep>/macro_outermonitor.py # newmacro.py backup from newrecognition import Recognition import pyautogui as pag pag.PAUSE = 0.2 """ user 환경 : Macos, Chrome, Flash Accept """ data = { "rolldown": (373, 376), "rollclassname": (373, 512), "searchbar": (680, 376), "classname": "기계학습", "submit": (260, 756), "center": (640, 805), } search = { "search": (983, 376), "submit1": (264, 760), "students": (1140, 749, 1174, 767), } # 과목명 검색 def init(): pag.click(data["rolldown"]) pag.click(data["rollclassname"]) pag.click(data["searchbar"]) pag.press("enter") # 신청 버튼 누르기 def press_submit(): print("submit") pag.click(data["submit"]) # 매크로방지입력숫자 위치 찾기 def find_window(): location = pag.locateOnScreen("window_top.png", grayscale=True, confidence=0.95) # location = pag.locateOnScreen('window_left.png', grayscale=True, confidence=.95) # pag.screenshot('find_window.png',region=location) print(location) try: window_x, window_y, _, _ = location x1 = window_x + 133 y1 = window_y + 71 - 25 x2 = x1 + 77 y2 = y1 + 35 return (x1, y1, x2, y2) except Exception: return None def save_number(number): pag.typewrite(number, interval=0.05) pag.press("enter") # 저장 pag.sleep(0.1) pag.press("enter") # 오류창 확인 def pass_error(): pag.click(data["center"]) pag.click(data["center"]) pag.click(data["center"]) def pass_number(): pag.press("enter") # 저장 pag.typewrite("0000") pag.press("enter") # 저장 def run(): count = 0 prevNumber = "" prevFlag = False for _ in range(10): # while True: if count > 1: pass_error() count = 0 press_submit() # 신청버튼 누르기 pag.sleep(0.2) location = find_window() # 매크로방지입력숫자 위치 찾기 if not location: pag.sleep(0.2) location = find_window() if not location: count += 1 pass_number() continue recognition = Recognition(location, "screenshot.png") # OCR recognition.grab_image() # 숫자 캡처하고 number = recognition.ocr() # 숫자 읽어낸다 print(prevNumber, number) if prevNumber == number: if prevFlag: pass_error() prevFlag = False continue prevFlag = True save_number(number) # 숫자타이핑 하고 저장 prevNumber = number pag.sleep(0.2) pass_error() if __name__ == "__main__": pag.click(352, 140) # 활성화 while True: recognition = Recognition(search["students"], "screenshot.png") # OCR recognition.grab_image() # 숫자 캡처하고 number = recognition.ocr() # 숫자 읽어낸다 print(number) try: if int(number) < 81: run() else: pag.press("enter") pag.click(search["search"]) pag.sleep(0.4) except: print("!ERROR") run() pag.click(search["search"]) pag.press("enter") pag.sleep(0.5) <file_sep>/README.md # cnu-macro 충남대학교 수강신청 매크로 <hr> ## 실행 환경 ``` mac 13' 1440 x 900 safari 4 split windows (top-left, top-right, bottom-left, bottom-right) Python 3.6.9 :: Anaconda, Inc. ``` <hr> ## keyboard.py 키보드 입력 지원 <hr> ## cursorPos.py 마우스 좌표 출력 <hr> ## recognition.py OCR을 통한 매크로 방지 숫자 인식 <hr> ## macro.py 메인 매크로 실행파일 USER SETTING : 수강신청 시작시간 설정 충남대학교 서버시간 (http://cnuis.cnu.ac.kr) 을 가져와 0.6초 전 실행 ( 서버시간을 가져오는 시간과 매크로를 실행하여 서버로 넘어가는 시간이 있다. ) 각 버튼의 위치는 positions.txt에서 가져와 사용 수강신청 시작할 때에는 사용자가 많이 몰려 바로 매크로 방지 숫자가 나타나지 않으므로 정확한 숫자를 읽을 때까지 반복 <hr> ## positions.txt 각 버튼들의 위치 ( 실행 환경 기준 ) 각 line의 시작이 숫자가 아니면 macro.py에서 무시함 TODO : #7 확정하기 버튼 4 개 추가 예정 <file_sep>/recognition.py import cv2 import numpy as np import pytesseract from PIL import Image, ImageGrab class Recognition: def __init__(self, positions, img_path): self.positions = positions self.path = img_path def grab_image(self): img = ImageGrab.grab(bbox=self.positions) img.save(self.path) def ocr(self): img = cv2.imread(self.path) # img = cv2.resize(img, (77, 35)) # 맥북 모니터에서만 img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) # Apply dilation and erosion to remove some noise kernel = np.ones((1, 1), np.uint8) img = cv2.dilate(img, kernel, iterations=1) img = cv2.erode(img, kernel, iterations=1) # Write image after removed noise ###### cv2.imwrite("removed_noise.png", img) # img = cv2.GaussianBlur(img, (5, 5), 0) # Apply threshold to get image with only black and white img = cv2.adaptiveThreshold( img, 255, cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY, 31, 2 ) # 이미지 가공 후 총 결과 # cv2.imwrite(src_path, img) # OCR . config='digits' 제외해도 가능 # result = pytesseract.image_to_string(Image.open(src_path), lang="eng", config="digits") result = pytesseract.image_to_string( Image.open(self.path), lang="eng", config="--psm 10 --oem 3 -c tessedit_char_whitelist=0123456789", ) print(result) return result.strip() if __name__ == "__main__": import pyautogui as pag pag.click(352, 140) # 활성화 img_path = "screenshot.png" # recognition = Recognition((1044, 748, 1073, 772), img_path) # x, y, xx, yy = 1140, 749, 1174, 767 x, y, xx, yy = 929 * 2, 648 * 2, 26 * 2, 14 * 2 recognition = Recognition((x, y, x+xx, y+yy), img_path) recognition.grab_image() recognition.ocr() <file_sep>/macro.py from newrecognition import Recognition import pyautogui as pag pag.PAUSE = 0.2 """ user 환경 : Macos, Chrome, Flash Accept """ data = { "rolldown": (373, 376), "rollclassname": (373, 512), "searchbar": (680, 376), "classname": "기계학습", "submit": (260, 756), "center": (640, 805), } only_mac_monitor = {"submit3": (45, 656), "center": (361, 535)} test = { "search": (770, 376), "submit1": (45, 607), "students": (929 * 2, 597 * 2, 26 * 2, 14 * 2), "center": (510, 535) } # 과목명 검색 def init(): pag.click(data["rolldown"]) pag.click(data["rollclassname"]) pag.click(data["searchbar"]) pag.press("enter") # 신청 버튼 누르기 def press_submit(): print("submit") # pag.click(data['submit']) # pag.click(only_mac_monitor["submit3"]) pag.click(test["submit1"]) # 매크로방지입력숫자 위치 찾기 def find_window(): # location = pag.locateOnScreen('window_top.png', grayscale=True, confidence=.95) location = pag.locateOnScreen( "window_left_macmonitor.png", grayscale=True, confidence=0.95 ) # pag.screenshot('find_window.png',region=location) print(location) try: window_x, window_y, _, _ = location x1 = window_x + (132) * 2 y1 = window_y + (72 - 25) * 2 + 1 x2 = x1 + 77 * 2 y2 = y1 + 33 * 2 return (x1, y1, x2, y2) except Exception: return None def save_number(number): pag.typewrite(number, interval=0.05) pag.press("enter") # 저장 pag.sleep(0.1) pag.press("enter") # 오류창 확인 def pass_error(): # pag.click(data['center']) # pag.click(data['center']) # pag.click(data['center']) pag.click(only_mac_monitor["center"]) pag.click(only_mac_monitor["center"]) pag.click(only_mac_monitor["center"]) def pass_number(): pag.press("enter") # 저장 pag.typewrite("0000") pag.press("enter") # 저장 def run(): count = 0 prevNumber = "" prevFlag = False for _ in range(10): # while True: if count > 1: pass_error() count = 0 press_submit() # 신청버튼 누르기 pag.sleep(0.2) location = find_window() # 매크로방지입력숫자 위치 찾기 if not location: pag.sleep(0.2) location = find_window() if not location: count += 1 pass_number() continue recognition = Recognition(location, "screenshot.png") # OCR recognition.grab_image() # 숫자 캡처하고 number = recognition.ocr() # 숫자 읽어낸다 print(prevNumber, number) if prevNumber == number: if prevFlag: pass_error() prevFlag = False continue prevFlag = True save_number(number) # 숫자타이핑 하고 저장 prevNumber = number pag.sleep(0.2) pag.click(test["center"]) pag.click(test["center"]) if __name__ == "__main__": pag.click(352, 140) # 활성화 x, y, w, h = test["students"] while True: recognition = Recognition((x, y, x + w, y + h), "screenshot.png") # OCR recognition.grab_image() # 숫자 캡처하고 number = recognition.ocr() # 숫자 읽어낸다 print(number) try: if int(number) < 100: run() else: pag.click(test["search"]) pag.press("enter") pag.sleep(0.4) except: print("!ERROR") run() pag.click(test["search"]) pag.press("enter") pag.sleep(0.5)
7406e1d723848e3135faf138f4ea5ed99fb46d4e
[ "Markdown", "Python" ]
4
Python
Jivvon/cnu-macro
3e8bfd196dfee01d934e3391a6637f20fbf91d75
00604b14564bd7bb0ad1d25d61311525cb7cd722
refs/heads/main
<file_sep><head> <meta charset="utf-8"> <meta name="viewport" content="width=device-width, initial-scale=1"> <link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/4.4.1/css/bootstrap.min.css"> <script src="https://ajax.googleapis.com/ajax/libs/jquery/3.4.1/jquery.min.js"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.16.0/umd/popper.min.js"></script> <script src="https://maxcdn.bootstrapcdn.com/bootstrap/4.4.1/js/bootstrap.min.js"></script> </head> <form method = post> Enter Employee ID <input type = text name = eid> <br> Enter Employee Name <input type = text name = ename> <br> Enter Employee Salary <input type = text name = esalary> <br> <input type = submit value = "Save" class = 'btn btn-primary' name = "save"> <input type = submit value = "Modify" class = 'btn btn-danger' name = "modify"> <input type = submit value = "Remove" name = "remove"> <input type = submit value = "Search" name = "search"> </form> <?php include "2dbconfigure.php"; if(isset($_POST['save'])) { $eid = $_POST['eid']; $ename = $_POST['ename']; $esalary = $_POST['esalary']; $query = "insert into employee values($eid,'$ename','$esalary')"; $n = my_iud($query); echo "$n Record Saved"; } if(isset($_POST['modify'])) { $eid = $_POST['eid']; $ename = $_POST['ename']; $esalary = $_POST['esalary']; $query = "update employee set ename='$ename',esalary='$esalary' where eid=$eid"; $n = my_iud($query); echo "$n Record Modified"; } if(isset($_POST['remove'])) { $eid = $_POST['eid']; $ename = $_POST['ename']; $esalary = $_POST['esalary']; $query = "delete from employee where eid=$eid"; $n = my_iud($query); echo "$n Record Removed"; } if(isset($_POST['search'])) { $eid = $_POST['eid']; $ename = $_POST['ename']; $esalary = $_POST['esalary']; //* all $query = "select * from employee"; $rs = my_select($query); $n = mysqli_num_rows($rs); echo "$n Record Found"; /*while($row = mysqli_fetch_array($rs,MYSQLI_NUM)) { echo "<hr>Emp ID is $row[0]"; echo "<br>Emp Name is $row[1]"; echo "<br>Emp Salary is $row[2]"; }*/ echo "<br><table class='table table-hover table-dark'>"; echo "<tr>"; echo "<th>EmpID</th>"; echo "<th>EmpName</th>"; echo "<th>EmpSalary</th>"; echo "</tr>"; while($row = mysqli_fetch_array($rs,MYSQLI_NUM)) { echo "<tr>"; echo "<td>$row[0]</td>"; echo "<td>$row[1]</td>"; echo "<td>$row[2]</td>"; echo "</tr>"; } echo "</table>"; } ?><file_sep><?php $dbserver = "127.0.0.1";//localhost $dbuser = "root"; $dbpwd = ""; $dbname = "mydb555"; /* for old versions $cid = mysql_connect($dbserver,$dbuser,$dbpwd) or die("Connection Failed"); mysql_select_db($dbname,$cid); mysql_query("insert into employee values(3,'pqr','25000')",$cid); $n = mysql_affected_rows($cid); echo "<br>$n Record Saved";*/ function my_iud($query)//insert , update , delete { global $dbserver,$dbuser,$dbpwd,$dbname; $cid = mysqli_connect($dbserver,$dbuser,$dbpwd) or die("Connection Failed"); mysqli_select_db($cid,$dbname); mysqli_query($cid,$query); $n = mysqli_affected_rows($cid); return $n; } function my_select($query)//select { global $dbserver,$dbuser,$dbpwd,$dbname; $cid = mysqli_connect($dbserver,$dbuser,$dbpwd) or die("Connection Failed"); mysqli_select_db($cid,$dbname); $rs = mysqli_query($cid,$query); return $rs; } ?><file_sep>create database mydb555; use mydb555; create table employee ( eid int(5) primary key, ename varchar(30), esalary varchar(10) ); insert into employee values(1,'abc','20000'); insert into employee values(2,'xyz','30000');
a30669c544a5660f0de7c8a9b4750a32c0ca2623
[ "SQL", "PHP" ]
3
PHP
Surajkumar121-gif/database
4c52e2f1d91a2fe3463355c7ec8c0e496a0165e2
d02c836475d6f40ceaf60944a2ede4de30dba857
refs/heads/master
<repo_name>marcinwyszynski/roo_on_rails<file_sep>/spec/integration/new_relic_spec.rb require 'spec_helper' require 'spec/support/run_test_app' describe 'New Relic integration' do run_test_app before { app.start } shared_examples 'loads' do it 'loads New Relic' do app.wait_start expect(app).to have_log /NewRelic.*Finished instrumentation/ end end shared_examples 'does not load' do it 'does not load New Relic' do app.wait_start expect(app).not_to have_log /NewRelic.*Finished instrumentation/ end it 'does not abort' do app.wait_start.stop expect(app.status).to be_success end end shared_examples 'abort early' do |message| it 'fails to load' do app.wait_log /Exiting/ app.stop expect(app.status).not_to be_success end it 'logs the failure' do app.wait_log message end end context 'with correct setup' do include_examples 'loads' end context 'when NEW_RELIC_LICENSE_KEY is missing' do let(:app_env_vars) { super().gsub(/^NEW_RELIC_LICENSE_KEY\S*$/, '') } context 'in the test environment' do let(:app_env) { 'test' } include_examples 'does not load' end context 'in the development environment' do let(:app_env) { 'development' } after { app.stop } include_examples 'loads' end context 'in the production environment' do include_examples 'abort early', /NEW_RELIC_LICENSE_KEY must be set/ end end context 'when BASE_NEW_RELIC_APP_NAME is set' do let(:base_name) { 'base' } let(:service_name) { 'service' } let(:app_name) { nil } let(:app_env_vars) do [ super(), "BASE_NEW_RELIC_APP_NAME=#{base_name}", "HOPPER_SERVICE_NAME=#{service_name}", "NEW_RELIC_APP_NAME=#{app_name}" ].join("\n") end after { app.stop } it 'uses HOPPER_SERVICE_NAME' do app.wait_start expect(app).to have_log /NewRelic.*Application: base - service, base/ end context 'and NEW_RELIC_APP_NAME is set' do let(:app_name) { 'specific' } it 'uses NEW_RELIC_APP_NAME unmodified' do app.wait_start expect(app).to have_log /NewRelic.*Application: specific/ end end context 'and HOPPER_SERVICE_NAME is not set' do let(:service_name) { nil } it 'uses just the base name' do app.wait_start expect(app).to have_log /NewRelic.*Application: base/ end end end context 'when a newrelic.yml exists' do %w[. ./config].each do |path| context "in directory #{path}" do before do app_helper.create_file app_path.join(path).join('newrelic.yml'), %{ # fake new relic config file } end include_examples 'abort early', /newrelic.yml detected/ end end end end <file_sep>/lib/roo_on_rails/railties/database.rb module RooOnRails module Railties class Database < Rails::Railtie initializer 'roo_on_rails.database', after: 'active_record.initialize_database' do ActiveSupport.on_load :active_record do Rails.logger.with(initializer: 'roo_on_rails.database') do |log| log.debug 'loading' config = ActiveRecord::Base.configurations[Rails.env] config['variables'] ||= {} config['variables']['statement_timeout'] = ENV.fetch('DATABASE_STATEMENT_TIMEOUT', 200) if ENV.key?('DATABASE_REAPING_FREQUENCY') config['reaping_frequency'] = ENV['DATABASE_REAPING_FREQUENCY'] end ActiveRecord::Base.establish_connection end end end end end end <file_sep>/lib/roo_on_rails/version.rb module RooOnRails VERSION = '1.22.0'.freeze end <file_sep>/spec/integration/logging_spec.rb require 'spec_helper' require 'spec/support/run_test_app' RSpec.describe 'Logging Railtie' do run_test_app before do app.start end describe 'log_level' do before do app_helper.gsub_file app_path.join('config/environments/production.rb'), /config\.log_level = :debug/, 'config.log_level = :info' app_helper.create_file app_path.join('config/initializers/debug.rb'), "Rails.logger.debug('d3bug')" app_helper.create_file app_path.join('config/initializers/info.rb'), "Rails.logger.info('inf0')" app_helper.create_file app_path.join('config/initializers/warn.rb'), "Rails.logger.warn('w4rn')" end context 'when LOG_LEVEL is unset' do it "uses the log level of `config.log_level`" do app.wait_start expect(app).not_to have_log(/d3bug/) expect(app).to have_log(/inf0/) expect(app).to have_log(/w4rn/) end end context 'when LOG_LEVEL is set' do let(:app_env_vars) { [super(), "LOG_LEVEL=#{log_level}"].join("\n") } context 'LOG_LEVEL is valid' do let(:log_level) { 'WARN' } it "uses LOG_LEVEL" do app.wait_start expect(app).not_to have_log(/d3bug/) expect(app).not_to have_log(/inf0/) expect(app).to have_log(/w4rn/) end end context 'LOG_LEVEL is invalid' do let(:log_level) { 'WASHING_MACHINE' } it "uses the log level of `config.log_level`" do app.wait_start expect(app).not_to have_log(/d3bug/) expect(app).to have_log(/inf0/) expect(app).to have_log(/w4rn/) end end end end end <file_sep>/lib/roo_on_rails/tasks/db.rake if defined?(ActiveRecord) namespace :db do desc 'Prints out the database statement timeout' task statement_timeout: :environment do result = ActiveRecord::Base.connection.execute('SHOW statement_timeout').first puts result['statement_timeout'] end namespace :migrate do task extend_statement_timeout: :environment do if ActiveRecord::VERSION::MAJOR >= 4 config = ActiveRecord::Base.configurations[Rails.env] config['variables'] ||= {} config['variables']['statement_timeout'] = ENV.fetch('MIGRATION_STATEMENT_TIMEOUT', 10_000) ActiveRecord::Base.establish_connection end end end end %i( db:create db:drop db:migrate db:migrate:down db:rollback ).each do |task| Rake::Task[task].enhance(%i[db:migrate:extend_statement_timeout]) end end <file_sep>/lib/roo_on_rails/routemaster/publisher.rb require 'roo_on_rails/config' require 'routemaster/client' module RooOnRails module Routemaster class Publisher attr_reader :model, :event def initialize(model, event, client: ::Routemaster::Client) @model = model @event = event @client = client end def publish? noop? || @model.new_record? || @model.previous_changes.any? end def will_publish?(force_publish: false) Config.routemaster_publishing_enabled? && (force_publish || publish?) end def publish!(force_publish: false) return unless will_publish?(force_publish: force_publish) @client.send( @event, topic, url, async: async?, data: stringify_keys(data), t: timestamp && (timestamp.to_f * 1000).to_i ) end def topic @model.class.name.tableize end def url raise NotImplementedError end def async? false end def data nil end def timestamp return @model.created_at if created? && @model.respond_to?(:created_at) return @model.updated_at if (updated? || created?) && @model.respond_to?(:updated_at) nil end %i(created updated deleted noop).each do |event_type| define_method :"#{event_type}?" do @event.to_sym == event_type end end private def stringify_keys(hash) return hash if hash.nil? || hash.empty? hash.each_with_object({}) do |(k, v), h| h[k.to_s] = v.is_a?(Hash) ? stringify_keys(v) : v end end end end end <file_sep>/README.routemaster_client.md ## Using the Routemaster Client feature [`routemaster-client`](https://github.com/deliveroo/routemaster-client) comes as a dependency of `roo_on_rails` with a basic implementation of lifecycle event publishers. This code example assumes that you are using the latest version of the [`roo_on_rails`](https://github.com/deliveroo/roo_on_rails) gem and that you have set the correct environment variables for Routemaster Client to work on your app, as explained in the main [`README.md`](https://github.com/deliveroo/roo_on_rails#routemaster-client) file. It also assumes that your app has an API for the resources you want to publish lifecycle events for, with matching routes and an `API_HOST` environment variable set. ### Setup lifecycle events for your models You can use publish events on create, update, and destroy by including the `PublishLifecycleEvents` module: ```ruby # app/models/order.rb require 'roo_on_rails/routemaster/publish_lifecycle_events' class Order < ApplicationRecord include RooOnRails::Routemaster::PublishLifecycleEvents # ... end ``` If you need more control over which events are published you can use the base module `LifecycleEvents` and specify them explicitly: ```ruby # app/models/rider.rb require 'roo_on_rails/routemaster/lifecycle_events' class Rider < ApplicationRecord include RooOnRails::Routemaster::LifecycleEvents publish_lifecycle_events :create, :destroy # ... end ``` ### Create publishers for lifecycle events We have now configured our models to publish lifecycle events to Routemaster, but it won't send anything until you have enabled publishing and created matching publishers. Let's start with creating an `ApplicationPublisher` that we can use as our default. ```ruby # app/publishers/application_publisher.rb require 'roo_on_rails/routemaster/publisher' class ApplicationPublisher < RooOnRails::Routemaster::Publisher include Rails.application.routes.url_helpers def url url_helper = :"api_#{model.class.name.underscore}_url" public_send(url_helper, model.id, host: ENV.fetch('API_HOST'), protocol: 'https') end # Add your method overrides here if needed end ``` If different behaviour is needed for specific models then you can override the defaults in their publishers: ```ruby # app/publishers/order_publisher.rb class OrderPublisher < ApplicationPublisher def async? true end end ``` and ```ruby # app/publishers/rider_publisher.rb class RiderPublisher < ApplicationPublisher def topic 'a_different_rider_topic' end end ``` `#publish?`, `#topics`, `#async?`, `#data` and `#timestamp` can be overriden; see [the `Publisher` class](lib/roo_on_rails/routemaster/publisher.rb) for the default implementations. ### Register the publishers with Routemaster The final step is to tell Routemaster that these publishers exist, so that it can listen to their events. We're going to do this in an initialiser: ```ruby # config/initilizers/routemaster.rb require 'roo_on_rails/routemaster/publishers' PUBLISHERS = [ OrderPublisher, RiderPublisher ].freeze RooOnRails::Routemaster::Publishers.register_default(ApplicationPublisher) PUBLISHERS.each do |publisher| model_class = publisher.to_s.gsub("Publisher", "").constantize RooOnRails::Routemaster::Publishers.register(publisher, model_class: model_class) end ``` We should now be all set for our app to publish lifecycle events for all our models onto the event bus, with special behaviour for `orders` and `riders`, so that other apps can listen to them. <file_sep>/lib/roo_on_rails/railties/routemaster.rb require 'roo_on_rails/config' module RooOnRails module Railties class Routemaster < Rails::Railtie initializer 'roo_on_rails.routemaster' do Rails.logger.with initializer: 'roo_on_rails.routemaster' do |log| next unless Config.routemaster_enabled? log.debug 'loading' abort 'Aborting: ROUTEMASTER_URL and ROUTEMASTER_UUID are required' if bus_details_missing? require 'routemaster/client' ::Routemaster::Client.configure do |config| config.url = routemaster_url config.uuid = routemaster_uuid config.verify_ssl = routemaster_verify_ssl end end end private def bus_details_missing? routemaster_url.blank? || routemaster_uuid.blank? end def routemaster_url ENV.fetch('ROUTEMASTER_URL') end def routemaster_uuid ENV.fetch('ROUTEMASTER_UUID') end def routemaster_verify_ssl ENV.fetch('ROUTEMASTER_VERIFY_SSL', 'true') != 'false' end end end end <file_sep>/spec/integration/database_spec.rb require 'spec_helper' require 'spec/support/run_test_app' require 'active_record' describe 'Database setup', rails_min_version: 4 do run_test_app context 'with a postgresql database' do let(:app_options) {{ database: 'postgresql' }} # fix database.yml before do data = Pathname.new(__FILE__).join('../../support/database.yml').read app_path.join('config/database.yml').tap do |db_yml| app_helper.remove_file(db_yml) app_helper.create_file(db_yml, data) end end before { app.wait_start } context 'When booting' do before { app_helper.shell_run "cd #{app_path} && rake db:create" } after { app_helper.shell_run "cd #{app_path} && rake db:drop" } let(:statement_timeout) { app_helper.shell_run "cd #{app_path} && rake db:statement_timeout" } context 'when DATABASE_STATEMENT_TIMEOUT is not set' do it 'sets the statement timeout to 200ms' do expect(statement_timeout).to include '200ms' end end context 'when DATABASE_STATEMENT_TIMEOUT is set' do before { ENV['DATABASE_STATEMENT_TIMEOUT'] = '750' } after { ENV['DATABASE_STATEMENT_TIMEOUT'] = nil } it 'sets the statement timeout to the value in ms' do expect(statement_timeout).to include '750ms' end end context 'when running migrations' do let(:migration_dir) { app_path.join('db', 'migrate') } let(:migration_path) { migration_dir.join("#{Time.now.to_i}_test_timeout.rb") } let(:migration) do major, minor, * = Gem::Version.new(Rails::VERSION::STRING).segments version = "[#{major}.#{minor}]" if major >= 5 <<-EOF class TestTimeout < ActiveRecord::Migration#{version} def up ActiveRecord::Base.connection.execute('SELECT pg_sleep(1)') end def down ActiveRecord::Base.connection.execute('SELECT pg_sleep(1)') end end EOF end let(:migrate) { app_helper.shell_run "cd #{app_path} && rake db:migrate" } let(:rollback) { app_helper.shell_run "cd #{app_path} && rake db:rollback" } before do FileUtils.mkdir_p(migration_dir) File.write(migration_path, migration) end after { File.delete(migration_path) } it 'should allow migration statements longer than the regular timeout' do expect { migrate }.to_not raise_error end it 'should allow rollback statements longer than the regular timeout' do expect { rollback }.to_not raise_error end end end end context 'with ActiveRecord disabled' do let(:app_options) {{ database: nil }} it 'boots the app without errors' do app.start.wait_start end end end <file_sep>/spec/integration/routemaster_spec.rb require 'spec_helper' require 'spec/support/run_test_app' describe 'Routemaster Client' do run_test_app before { app.start } describe 'when booting the app' do it 'does not abort' do app.wait_start.stop expect(app.status).to be_success end context 'if ROUTEMASTER_ENABLED is true' do let(:app_env_vars) { ["ROUTEMASTER_ENABLED=true", super()].join("\n") } context 'and ROUTEMASTER_URL/ROUTEMASTER_UUID are not set' do it 'the app fails to load' do app.wait_log /Exiting/ app.stop expect(app.status).not_to be_success end it 'the app logs the failure' do app.wait_log /ROUTEMASTER_URL and ROUTEMASTER_UUID are required/ end end end end end <file_sep>/spec/support/run_test_app.rb require 'spec/support/build_test_app' require 'spec/support/sub_process' module ROR module RunTestApp def run_test_app build_test_app let(:app_env) { 'production' } let(:app) { ROR::SubProcess.new( name: 'rails', dir: app_path, command: 'bundle exec rails server puma -e %s' % app_env, start: /Use Ctrl-C to stop/, stop: /- Goodbye!/) } after { app.terminate } after do |example| app.dump_logs if example.exception end end def run_sidekiq build_test_app let(:app_env) { 'production' } let(:app) { ROR::SubProcess.new( name: 'sidekiq', dir: app_path, command: 'bundle exec roo_on_rails sidekiq', start: /Starting processing, hit Ctrl-C to stop/, stop: /Bye!/) } after { app.terminate } end end end RSpec.configure do |config| config.extend ROR::RunTestApp end <file_sep>/lib/roo_on_rails/tasks/newrelic.rake namespace :newrelic do desc 'Notifies New Relic that a deployment has occurred' task notice_deployment: :environment do begin require 'newrelic_rpm' require 'new_relic/cli/command' appname = ENV.fetch('NEW_RELIC_APP_NAME') Rails.logger.info("Notifying New Relic of deployment to #{appname}") NewRelic::Cli::Deployments.new( environment: Rails.env.to_s, revision: ENV.fetch('SOURCE_VERSION', 'unknown'), changelog: '', description: '', appname: appname, user: '', license_key: ENV.fetch('NEW_RELIC_LICENSE_KEY') ).run rescue => e Rails.logger.error("Failed to notify New Relic (#{e.class.name}: #{e.message})") Rails.logger.info(e.backtrace.take(10).join("\n")) end end end <file_sep>/spec/integration/sidekiq_spec.rb require 'spec_helper' require 'spec/support/run_test_app' describe 'Sidekiq Setup' do run_test_app before { app.wait_start } context 'When booting' do let(:middleware) { app_helper.shell_run "cd #{app_path} && rake middleware" } it 'does not insert hirefire into the middleware stack' do expect(middleware).not_to include 'HireFire::Middleware' end context "if HIREFIRE_TOKEN is set" do let(:app_env_vars) { ["HIREFIRE_TOKEN=hello", super()].join("\n") } it 'inserts hirefire into the middleware stack' do expect(middleware).to include 'HireFire::Middleware' end end end end describe "sidekiq loader" do run_sidekiq before { app.wait_start } it 'starts and stops the app cleanly' do app.start.wait_start app.stop expect(app.status).to be_success end end <file_sep>/lib/roo_on_rails/routemaster/publish_lifecycle_events.rb require 'active_support/concern' require 'roo_on_rails/routemaster/lifecycle_events' module RooOnRails module Routemaster module PublishLifecycleEvents extend ActiveSupport::Concern include LifecycleEvents included(&:publish_lifecycle_events) end end end <file_sep>/lib/roo_on_rails/railties/new_relic.rb module RooOnRails module Railties class NewRelic < Rails::Railtie initializer 'roo_on_rails.new_relic' do Rails.logger.with initializer: 'roo_on_rails.new_relic' do |log| log.debug 'loading' license_key = ENV['NEW_RELIC_LICENSE_KEY'] if %w(test development).exclude?(Rails.env.to_s) && (license_key == 'override-me') abort 'Aborting: NEW_RELIC_LICENSE_KEY must be set in production environments' end abort 'Aborting: NEW_RELIC_LICENSE_KEY is required' if license_key.nil? # Report application stats to a per-service (worker, web) New Relic app, and to a main # application for all services. base_name = ENV['BASE_NEW_RELIC_APP_NAME'] service_name = ENV['HOPPER_SERVICE_NAME'] if !base_name.blank? && ENV['NEW_RELIC_APP_NAME'].blank? task_app_name = service_name.present? ? "#{base_name} - #{service_name}" : nil ENV['NEW_RELIC_APP_NAME'] = [task_app_name, base_name].compact.join(';') end path = %w(newrelic.yml config/newrelic.yml).map do |p| Pathname.new(p) end.find(&:exist?) if path abort "Aborting: newrelic.yml detected in '#{path.parent.realpath}', should not exist" end sync_startup = (ENV.fetch('NEW_RELIC_SYNC_STARTUP', 'YES') =~ /\A(YES|TRUE|ON|1)\Z/i) require 'newrelic_rpm' unless Rails.env.test? ::NewRelic::Control.instance.init_plugin(sync_startup: sync_startup) end end end end end end <file_sep>/lib/roo_on_rails/railties/logging.rb module RooOnRails module Railties class Logging < Rails::Railtie initializer 'roo_on_rails.logging.before', before: :initialize_logger do require 'roo_on_rails/logger' Rails.logger = config.logger = RooOnRails::Logger.new # It is not possible to set log_level to an invalid value without some # deliberate gymnastics (the setter will raise an error), and Rails # defaults this to `debug`, so we don't need to guard against nil / # invalidity log_level = Rails.configuration.log_level Rails.logger.set_log_level(default: log_level) Rails.logger.debug 'initializer roo_on_rails.logging.before' end initializer 'roo_on_rails.logging.after', after: :initialize_logger do log_level = Rails.configuration.log_level Rails.logger.set_log_level(default: log_level) Rails.logger.debug 'initializer roo_on_rails.logging.after' end end end end <file_sep>/lib/roo_on_rails/routemaster/publishers.rb module RooOnRails module Routemaster module Publishers @default_publishers = [] @publishers = {} def self.register_default(publisher_class) @default_publishers << publisher_class end def self.register(publisher_class, model_class:) @publishers[model_class.name] ||= Set.new @publishers[model_class.name] << publisher_class end def self.for(model, event) publisher_classes = @publishers[model.class.name] || @default_publishers publisher_classes.map { |c| c.new(model, event) } end def self.clear @default_publishers = [] @publishers = {} end end end end <file_sep>/lib/roo_on_rails/routemaster/lifecycle_events.rb require 'active_support/concern' require 'new_relic/agent' require 'roo_on_rails/routemaster/publishers' module RooOnRails module Routemaster module LifecycleEvents extend ActiveSupport::Concern ACTIVE_RECORD_TO_ROUTEMASTER_EVENT_MAP = { create: :created, update: :updated, destroy: :deleted, noop: :noop }.freeze private_constant :ACTIVE_RECORD_TO_ROUTEMASTER_EVENT_MAP def publish_lifecycle_event(event) publish_event(event, force_publish: false) end def publish_lifecycle_event!(event) publish_event(event, force_publish: true) end private def publish_event(event, force_publish:) publishers = Routemaster::Publishers.for(self, routemaster_event_type(event)) publishers.each do |publisher| begin publisher.publish!(force_publish: force_publish) rescue => e NewRelic::Agent.notice_error(e) end end end def routemaster_event_type(event) ACTIVE_RECORD_TO_ROUTEMASTER_EVENT_MAP[event].tap do |type| raise "invalid lifecycle event '#{event}'" unless type end end %i(create update destroy noop).each do |event| define_method("publish_lifecycle_event_on_#{event}") do publish_lifecycle_event(event) end end module ClassMethods def publish_lifecycle_events(*events) events = events.any? ? events : %i(create update destroy) events.each do |event| after_commit( :"publish_lifecycle_event_on_#{event}", on: event ) end end end end end end <file_sep>/spec/roo_on_rails/routemaster/publisher_spec.rb require 'active_support/core_ext/string' require 'roo_on_rails/routemaster/publisher' require 'support/test_model' RSpec.describe RooOnRails::Routemaster::Publisher do TestPublisherA = Class.new(RooOnRails::Routemaster::Publisher) TestPublisherB = Class.new(RooOnRails::Routemaster::Publisher) let(:model) { TestModel.new } let(:event) { :noop } before do allow(::RooOnRails::Config).to receive(:routemaster_publishing_enabled?) { true } end describe 'when configured correctly' do let(:publisher) { TestPublisherA.new(model, event) } before do allow(publisher).to receive_messages( url: "https://deliveroo.test/url", data: { test_key_1: "Test value 1", test_key_2: "Test value 2" } ) end it 'should publish an event to Routemaster fine' do expect(::Routemaster::Client).to receive(:send).with( :noop, "test_models", "https://deliveroo.test/url", { async: false, data: { "test_key_1" => "Test value 1", "test_key_2" => "Test value 2" }, t: nil } ) publisher.publish! end it 'should have a topic named after the model class' do expect(publisher.topic).to eq("test_models") end it 'should have the correct URL' do expect(publisher.url).to eq("https://deliveroo.test/url") end it 'should default to publishing synchronously' do expect(publisher).to_not be_async end it 'should have the correct event type' do expect(publisher.created?).to eq(false) expect(publisher.updated?).to eq(false) expect(publisher.deleted?).to eq(false) expect(publisher.noop?).to eq(true) end describe 'the timestamp of the event sent to routemaster' do subject(:timestamp) do ts = nil expect(::Routemaster::Client).to receive(:send) { |_, _, _, opts| ts = opts[:t] } publisher.publish! ts end context 'when the model was created' do let(:event) { :created } context 'when the model responds to created_at' do let(:create_time) { Time.at(12345) } let(:model) { TestModel.which_responds_to(created_at: create_time).new } it { should eq (create_time.to_f * 1000).to_i } end context 'when the model does not respond to created_at' do # it { should eq nil } context 'when the model responds to updated_at' do let(:update_time) { Time.at(23456) } let(:model) { TestModel.which_responds_to(updated_at: update_time).new } it { should eq (update_time.to_f * 1000).to_i } end end end context 'when the model was updated' do let(:event) { :updated } context 'when the model does not respond to updated_at' do it { should eq nil } end context 'when the model responds to updated_at' do let(:update_time) { Time.at(34567) } let(:model) { TestModel.which_responds_to(updated_at: update_time).new } it { should eq (update_time.to_f * 1000).to_i } end end end [:created, :updated, :deleted].each do |event| context 'when model was not created or changed' do let(:event) { event } let(:model) { TestModel.which_responds_to(new_record?: false, previous_changes: []).new } it 'should not publish an event to Routemaster fine' do expect(::Routemaster::Client).to_not receive(:send) publisher.publish! end context 'when force_publish is enabled' do it 'should publish an event to Routemaster fine' do expect(::Routemaster::Client).to receive(:send).with( event, "anonymous_test_model_classes", "https://deliveroo.test/url", { async: false, data: { "test_key_1" => "Test value 1", "test_key_2" => "Test value 2" }, t: nil } ) publisher.publish!(force_publish: true) end end end end end describe 'when missing some configuration' do let(:publisher) { TestPublisherB.new(model, event) } it '#url should raise an error' do expect { publisher.url }.to raise_error(NotImplementedError) end end end <file_sep>/spec/roo_on_rails/routemaster/lifecycle_events_spec.rb require 'roo_on_rails/routemaster/lifecycle_events' require 'roo_on_rails/routemaster/publishers' require 'roo_on_rails/routemaster/publisher' RSpec.describe RooOnRails::Routemaster::LifecycleEvents do subject do Class.new do @after_commit_hooks = [] def self.after_commit_hooks @after_commit_hooks end def self.after_commit(*args) @after_commit_hooks << args end include RooOnRails::Routemaster::LifecycleEvents end end let(:subject_instance) { subject.new } let(:publisher_spy) { spy('publisher') } events_and_types = [ %i(create created), %i(update updated), %i(destroy deleted) ] describe "::publish_lifecycle_events" do context "when called without arguments" do before { subject.publish_lifecycle_events } it "adds three event hooks" do expect(subject.after_commit_hooks).to match_array([ [:publish_lifecycle_event_on_create, { on: :create }], [:publish_lifecycle_event_on_update, { on: :update }], [:publish_lifecycle_event_on_destroy, { on: :destroy }] ]) end describe "and calling a callback" do events_and_types.each do |lifecycle_event| it "fetches a publisher for #{lifecycle_event.first}" do callback = subject.after_commit_hooks.detect { |event| event.last[:on] == lifecycle_event.first }.first allow(RooOnRails::Routemaster::Publishers).to receive(:for).with(subject, lifecycle_event.last) do [publisher_spy] end expect(publisher_spy).to receive(:publish!) subject_instance.send(callback) end end end it "defines all three lifecycle events and noop on an instance" do expect { subject_instance.method(:publish_lifecycle_event_on_create) subject_instance.method(:publish_lifecycle_event_on_update) subject_instance.method(:publish_lifecycle_event_on_destroy) subject_instance.method(:publish_lifecycle_event_on_noop) }.to_not raise_error end end context "when called with a 'create' lifecycle event" do before { subject.publish_lifecycle_events(:create) } it "adds a 'create' hook only" do expect(subject.after_commit_hooks).to match_array([[:publish_lifecycle_event_on_create, { on: :create }]]) end end end describe "#publish_lifecycle_event" do events_and_types.each do |lifecycle_event| it "publishes #{lifecycle_event.first} event with force_publish disabled" do allow(RooOnRails::Routemaster::Publishers).to receive(:for).with(subject, lifecycle_event.last) do [publisher_spy] end expect(publisher_spy).to receive(:publish!).with(force_publish: false) subject_instance.publish_lifecycle_event(lifecycle_event.first) end end end describe "#publish_lifecycle_event!" do events_and_types.each do |lifecycle_event| it "publishes #{lifecycle_event.first} event with force_publish enabled" do allow(RooOnRails::Routemaster::Publishers).to receive(:for).with(subject, lifecycle_event.last) do [publisher_spy] end expect(publisher_spy).to receive(:publish!).with(force_publish: true) subject_instance.publish_lifecycle_event!(lifecycle_event.first) end end end end <file_sep>/Appraisals appraise 'rails-3' do gem 'rails', '~> 3.2' gem 'sqlite3' gem 'sidekiq', '< 5' gem 'pg', '~> 0.11' end appraise 'rails-4' do gem 'rails', '~> 4.2' gem 'sqlite3' gem 'pg', '~> 0.11' end appraise 'rails-5' do gem 'rails', '~> 5.0.0' gem 'sqlite3' gem 'pg', '~> 0.18' end appraise 'rails-5-1' do gem 'rails', '~> 5.1.0' gem 'sqlite3' gem 'pg', '~> 0.18' end appraise 'rails-5-2' do gem 'rails', '~> 5.2.0' gem 'bootsnap', '>= 1.1.0', require: false gem 'sqlite3' gem 'pg', '~> 0.18' end <file_sep>/spec/roo_on_rails/routemaster/publish_lifecycle_events_spec.rb require 'roo_on_rails/routemaster/publish_lifecycle_events' require 'roo_on_rails/routemaster/publishers' require 'roo_on_rails/routemaster/publisher' RSpec.describe RooOnRails::Routemaster::PublishLifecycleEvents do subject do Class.new do @after_commit_hooks = [] def self.after_commit_hooks @after_commit_hooks end def self.after_commit(*args) @after_commit_hooks << args end include RooOnRails::Routemaster::PublishLifecycleEvents end end it "adds three event hooks" do expect(subject.after_commit_hooks).to match_array([ [:publish_lifecycle_event_on_create, { on: :create }], [:publish_lifecycle_event_on_update, { on: :update }], [:publish_lifecycle_event_on_destroy, { on: :destroy }] ]) end end <file_sep>/spec/roo_on_rails/routemaster/publishers_spec.rb require 'roo_on_rails/routemaster/publisher' require 'roo_on_rails/routemaster/publishers' require 'support/test_model' RSpec.describe RooOnRails::Routemaster::Publishers do TestPublisherA = Class.new(RooOnRails::Routemaster::Publisher) TestPublisherB = Class.new(RooOnRails::Routemaster::Publisher) let(:publishers) { described_class } let(:model) { TestModel.new } let(:event) { :noop } describe '.for' do before { publishers.clear } context 'when no publishers are registered for a model' do it 'should return an empty list of publishers' do expect(publishers.for(model, event)).to be_empty end end context 'when a default publisher is registered' do before do publishers.register_default(TestPublisherA) end it 'should return an instance of the registered publisher class' do expect(publishers.for(model, event).size).to eq 1 expect(publishers.for(model, event).first.class).to eq TestPublisherA end it 'should have the model set on the publisher' do expect(publishers.for(model, event).first.model).to eq model end it 'should have the event set on the publisher' do expect(publishers.for(model, event).first.event).to eq event end end context 'when one publisher is registered for a model' do before do publishers.register(TestPublisherA, model_class: model.class) end it 'should return an instance of the registered publisher class' do expect(publishers.for(model, event).size).to eq 1 expect(publishers.for(model, event).first.class).to eq TestPublisherA end it 'should have the model set on the publisher' do expect(publishers.for(model, event).first.model).to eq model end it 'should have the event set on the publisher' do expect(publishers.for(model, event).first.event).to eq event end end context 'when multiple default publishers are registered' do before do publishers.register_default(TestPublisherA) publishers.register_default(TestPublisherB) end it 'should return an instance of each registered publisher class' do expect(publishers.for(model, event).size).to eq 2 expect(publishers.for(model, event).first.class).to eq TestPublisherA expect(publishers.for(model, event).last.class).to eq TestPublisherB end it 'should have the model set on the publishers' do expect(publishers.for(model, event).map(&:model).uniq).to eq [model] end it 'should have the event set on the publishers' do expect(publishers.for(model, event).map(&:event).uniq).to eq [event] end end context 'when multiple publishers are registered for a model' do before do publishers.register(TestPublisherA, model_class: model.class) publishers.register(TestPublisherB, model_class: model.class) end it 'should return an instance of each registered publisher class' do expect(publishers.for(model, event).size).to eq 2 expect(publishers.for(model, event).first.class).to eq TestPublisherA expect(publishers.for(model, event).last.class).to eq TestPublisherB end it 'should have the model set on the publishers' do expect(publishers.for(model, event).map(&:model).uniq).to eq [model] end it 'should have the event set on the publishers' do expect(publishers.for(model, event).map(&:event).uniq).to eq [event] end end context 'when both a default publisher and a model-specific publisher are registered' do before do publishers.register_default(TestPublisherA) publishers.register(TestPublisherB, model_class: model.class) end it 'should return an instance of only the model-specific publisher' do expect(publishers.for(model, event).size).to eq 1 expect(publishers.for(model, event).last.class).to eq TestPublisherB end it 'should have the model set on the publisher' do expect(publishers.for(model, event).first.model).to eq model end it 'should have the event set on the publisher' do expect(publishers.for(model, event).first.event).to eq event end end end end
d5cdd740f9c10660dc13efc8e5535ab00ca22df3
[ "Markdown", "Ruby" ]
23
Ruby
marcinwyszynski/roo_on_rails
8e9c0eb91cfda135b0919a2bf6c784c0574b0039
d417fac2067763af040816403a9baa392443f286
refs/heads/master
<file_sep>## <NAME> ## Created on 04/13/2019 ## ## Python Based Fibonaci LFSR with a Serial Interface ### Acts as the control for the random number generator ### capstone project ### Reads in the LFSR output from an FPGA ### Outputs the numbers generated from the LFSR ### Along with the feed from the FPGA ## ## Linear Feedback Shift Register ### Seed: ACE1 ### Taps at bits 0, 2, 3, and 5 ### Right shift ## ## Serial Interface ### Port: COM9 (USB) - will need to be set by the user ### Baud: 115200 ### Byte size: 8 bits ### Parity: none ### Timout: 10s import serial class lfsr: def __init__(self, seed): com = input("Enter the Serial Port in the Form 'COMX': ") print("\n") while(True): #infinite loop used to continue the program if the FPGA is disconnected try: self.ser = serial.Serial(com, 115200, timeout=10, bytesize=serial.EIGHTBITS) #creates a new instance of pyserial self.printWelcome() self.num = seed #the first value of the LFSR is the seed self.printResult() while(True): #loop to output the numbers from the software and hardware LFSRs. Loop breaks when the serial connection is lost self.shiftSequence() self.printResult() except serial.SerialException: #catches the exception triggered by the device being disconnected and waits for reconnection continue; #Function to generate the next value of the LFSR def shiftSequence(self): xor_out = ((self.num >> 5 & 1) ^ ((self.num >> 3 & 1) ^ ((self.num >> 2 & 1) ^ (self.num >> 0 & 1)))) #result of LFSR taps mask = 1 << 15 self.num = self.num >> 1 self.num = ((self.num & ~mask) | ((xor_out << 15) & mask)) #value in xor_out is placed in bit 15 of the result #Function to print the current value of the LFSR to the terminal def printResult(self): wait = True; while(wait): if(self.ser.in_waiting): #waits for the next value sent over UART to reach the input buffer input_text = self.ser.readline().decode("ascii") print("\t\t\t\t\t\t", " ", " |", "%04X" % self.num, "|", " -- ", input_text) #prints the results in hexadecimal self.ser.read_until().decode("utf-8") wait = False; def printWelcome(self): wait = True; while(wait): if(self.ser.readline()): #waits for the first newline character to be received and then discards it input_text = self.ser.readline().decode("ascii") print("\t\t\t\t\t Python Control LFSR", " -- ", input_text) #prints the welcome message self.ser.readline().decode("ascii") #catches the newline characters after the welcome message self.ser.readline().decode("ascii") wait = False; def main(): seed = 0b1010110011100001 #ACE1 - the same seed as the hardware LFSR lfsr1 = lfsr(seed) if __name__ == "__main__": main() <file_sep># python-fibonacci-lfsr Control LFSR for the High Altitude Balloon Random Number Generator Senior Capstone Project ## Function Outputs the results from the FPGA random number generator and </br > the results from the control linear feedback shift register </br > concurrently over the terminal or command line. * Enter the serial COM port that the FPGA is connected to * This can be found in Windows device manager * Waits for a serial connection to be established * when a connection is established, the results are displayed * Python LFSR is on the left * FPGA based LFSR is on the right ## Expected LFSR Output: ACE1 </br > 5670 </br > AB38 </br > 559C </br > 2ACE </br > 1567 </br > 8AB3 </br > 4559 </br > 22AC </br > 9156 </br > C8AB </br > E455 </br > 722A </br > 3915 </br > 1C8A </br > 8E45 </br > ....
05979f75f2a03c9806a56c9f720f0e3bd6b75c4e
[ "Markdown", "Python" ]
2
Python
RipCityBassWorks/python-fibonacci-lfsr
491610ef0f808eba7caa86e7e36d832e355f0ef7
729c62671c254b20ceab709a5211918aed6c88fe
refs/heads/master
<file_sep>#include "s8051.h" #include <iostream> #include <string> using namespace std; const set<string> s8051::m_registerName{"ACC", "B", "PSW", "SP", "DPTR0", "DPTR1", "IP", "IE", "TMOD", "SCON", "TH0", "TL0", "TH1", "TL1", "SBUF", "TH1", "TL1", "TH2", "TL2", "T2CON", "T2MOD", "RCAP2H", "RCAP2L", "SCON", "PCON", "WDTREST", "AUXR", "AUXR1", "P0", "P1", "P2", "P3"}; const set<string> s8051::m_pinName{"P1.0", "T2", "P1.1", "T2EX", "P1.2", "P1.3", "P1.4", "P1.5", "MOSI", "P1.6", "MISO", "P1.7", "SCK", "RST", "P3.0", "RXD", "P3.1", "TXD", "P3.2", "INT0", "P3.3", "INT1", "T0", "P3.4", "T1", "P3.5", "WR", "P3.6", "RD", "P3.7", "XTAL2", "XTAL1", "GND", "P2.0", "A8", "P2.1", "A9", "P2.2", "A10", "P2.3", "A11", "P2.4", "A12", "P2.5", "A13", "P2.6", "A14", "P2.7", "A15", "PSEN", "ALE/PROG", "EA/VPP", "P0.7", "AD7", "P0.6", "AD6", "P0.5", "AD5", "P0.4", "AD4", "P0.3", "AD3", "P0.2", "AD2", "P0.1", "AD1", "P0.0", "AD0", "VCC"}; void s8051::test() { cout << "Register: "; for(auto a : m_registerName){ cout << a << ","; } cout << endl; cout << "PinName: "; for (auto a : m_pinName){ cout << a << ","; } cout << endl; } s8051::~s8051() { } void s8051::reset() { } const set<string>& s8051::getPinName() const { } const set<string>& s8051::getRegisterName() const { } bool s8051::getPinValue(const string& pinName) const { } float s8051::getRegisterValue(const string& registerName) const { } const map<string, float>& s8051::getAllRegisterValue() const { } const map<string, bool>& s8051::getAllPinValue() const { } void setPinValue(const std::string& pinName, bool value) { } void setRegisterValue(const std::string& registerName, float value) { } bool s8051::run(std::string& instruct) { } bool s8051::go() { } <file_sep>#pragma once #include<map> #include<set> #include<string> class cpu{ public: virtual ~cpu(){}; virtual void reset() = 0; virtual const std::set<std::string>& getPinName() const = 0; virtual const std::set<std::string>& getRegisterName() const = 0; virtual bool getPinValue(const std::string& pinName) const = 0; virtual float getRegisterValue(const std::string& registerName) const = 0; virtual const std::map<std::string, float>& getAllRegisterValue() const = 0; virtual const std::map<std::string, bool>& getAllPinValue() const = 0; virtual void setPinValue(const std::string& pinName, bool value) = 0; virtual void setRegisterValue(const std::string& registerName, float value) = 0; virtual bool run(std::string& instruct) = 0; virtual bool go() = 0; //virtual bool openFile() = 0; }; <file_sep>#include <iostream> #include "s8051.h" using namespace std; int main() { s8051 scpu; scpu.test(); cout << "********option********" << endl; cout << "1,run with step by step." << endl; cout << "2, run a file." << endl; cout << "Please input modle:" <<endl; int option; bool i = true; while(i){ i = false; cin >> option; switch(option){ case 1: cout << "step modle, input instruct or `exit` return!" << endl; while(1) { string instruct; cin >> instruct; //这里处理指令 if(instruct == string("exit")) break; } break; case 2: //打开文件并转给s8051 break; default: cout << "Error: the option is unexist!!!" << endl; cout << "Input again!" <<endl; i = true; break; } } cout << "Exit!!!"<<endl; return 0; } <file_sep>#include "cpu.h" #define P0 m_sfr[0] #define SP m_sfr[1] #define DPL m_sfr[2] #define PCON m_sfr[7] #define TCON m_sfr[8] #define TMOD m_sfr[9] #define TL0 m_sfr[10] #define TH0 m_sfr[11] #define TL1 m_sfr[12] #define TH1 m_sfr[13] #define P1 m_sfr[16] #define SCON m_sfr[24] #define SBUF m_sfr[25] #define P2 m_sfr[32] #define IE m_sfr[40] #define P3 m_sfr[48] #define IP m_sfr[56] #define T2COM m_sfr[72] #define T2MOD m_sfr[73] #define RCAP2L m_sfr[74] #define RCAP2H m_sfr[75] #define TL2 m_sfr[76] #define TH2 m_sfr[77] #define PSW m_sfr[80] #define A m_sfr[96] #define B m_sfr[112] using std::string; using std::set; using std::map; class s8051 : public cpu{ public: s8051() = default; ~s8051(); virtual void reset() override; virtual const set<string>& getPinName() const override; virtual const set<string>& getRegisterName() const override; virtual bool getPinValue(const string& pinName) const override; virtual float getRegisterValue(const string& registerName) const override; virtual const map<string, float>& getAllRegisterValue() const override; virtual const map<string, bool>& getAllPinValue() const override; virtual void setPinValue(const std::string& pinName, bool value) override; virtual void setRegisterValue(const std::string& registerName, float value) override; virtual bool run(std::string& instruct) override; virtual bool go() override; void test(); private: char m_ram[128]; char m_sfr[128]; bool m_pin[4][8]; const static set<string> m_registerName; const static set<string> m_pinName; };
32041ee3954972823f08e6e56b1245ac51c9fb64
[ "C++" ]
4
C++
Accordeur/Simulate
a5c652e51c20eea587b1331fea8ae46f2b1577be
5558ef893ffc4f053a68a9fc7a62df58b79102a0