branch_name
stringclasses 149
values | text
stringlengths 23
89.3M
| directory_id
stringlengths 40
40
| languages
listlengths 1
19
| num_files
int64 1
11.8k
| repo_language
stringclasses 38
values | repo_name
stringlengths 6
114
| revision_id
stringlengths 40
40
| snapshot_id
stringlengths 40
40
|
|---|---|---|---|---|---|---|---|---|
refs/heads/master
|
<repo_name>fsmart85/fsmart85.github.io<file_sep>/Delo/js/script.js
$(function () {
$('.js-table .wp .item1 p').on('click', function () {
$('.js-table img').css("left", "0");
});
$('.js-table .wp .item2 p').on('click', function () {
$('.js-table img').css("left", "17.5%");
});
$('.js-table .wp .item3 p').on('click', function () {
$('.js-table img').css("left", "50%");
});
$('.js-table .wp .item4 p').on('click', function () {
$('.js-table img').css("left", "100%");
});
$('.chek').on('click', function () {
$(this).toggleClass('on');
});
$('.circle').on('click', function () {
$(this).toggleClass('on');
});
});<file_sep>/Indo/js/1.js
$(function(){
var menu = $('.header_menu ul li');
menu.on('click', function(e){
e.preventDefault();
menu.removeClass('active').filter(this).addClass('active');
})
// var img = $('.slider');
// for(img of imgs){
// img.removeClass('active').filter(this).addClass('active');
// }
});
<file_sep>/talent/js/1.js
$(function(){
var menu = $('.header_menu ul li');
menu.on('click', function(e){
e.preventDefault();
menu.removeClass('active').filter(this).addClass('active');
})
// var img = $('.slider');
// for(img of imgs){
// img.removeClass('active').filter(this).addClass('active');
// }
$('.fa').on('click', function () {
$('ul').slideToggle(300, function () {
if($(this).css('display') === 'none'){
$(this).removeAttr('style');
}
})
})
});
<file_sep>/fudi/js/1.js
window.onload = function(){
var div = document.querySelector('header');
function parallax(){
var scroll = window.pageYOffset;
// console.log(scroll);
// console.log(scroll * 0.2);
div.style = 'background-position-y:' + (scroll * 0.3) + 'px';
}
window.onscroll = function(){
parallax();
}
}
$('.slider').owlCarousel({
loop:true,
margin: 68,
responsive:{
0:{
items:1,
},
600:{
items:3,
},
1000:{
items:5,
}
}
});
<file_sep>/timberland/js/script.js
$(function () {
$('.item_card').on('click', function () {
$(location).attr('href', 'item-card.html');
// var card_img = $(this).find('img').attr('src');
// $('.card-sl-img').find('img').attr('src', function () {
// return card_img;
// });
});
$('.list_img li a').on('click', function (e) {
e.preventDefault();
$('.imgBox img').attr("src", $(this).attr("href"));
$(this).parent('li').addClass('active');
$(this).parent('li').siblings().removeClass('active');
});
//
$('.colors li').on('click', function () {
var ptext = $(this).find('p').text();
$('.pord_color h4>span').text(ptext);
});
$('.burger').on('click', function () {
$('.nav_menu').toggleClass('show');
$('.burger').toggleClass('close');
$('.burger .fas').toggleClass('fa-window-close');
});
// $('.accardion_box li a').on('click', function (e) {
// e.preventDefault();
// $(this).siblings('accar_box').addClass('active');
// $(this).closest('li').siblings('li').find('accar_box').removeClass('active');
// $('.accardion_box li a.active').removeClass('active');
// $(this).addClass('active');
//
// });
//
// $('.sign_up').on('click', function () {
// $('.register').toggleClass('show');
// });
//
// $('.catalog_menu a').on('click', function () {
// $('.catalog_menu').toggleClass('close');
// });
//
// $('.responsive_nav-menu').on('click', function () {
// $('.nav_menu').toggleClass('responsive');
// $(this).find('.fas').toggleClass('fa-times');
// });
//
// });
//
// $('.responsive-slick2').slick({
// dots: false,
// infinite: false,
// speed: 300,
// slidesToShow: 4,
// slidesToScroll: 1,
// responsive: [
// {
// breakpoint: 960,
// settings: {
// slidesToShow: 3,
// slidesToScroll: 1,
// infinite: false,
// dots: false
// }
// },
// {
// breakpoint: 600,
// settings: {
// slidesToShow: 2,
// slidesToScroll: 1
// }
// },
// {
// breakpoint: 480,
// settings: {
// slidesToShow: 1,
// slidesToScroll: 1
// }
// }
// // You can unslick at a given breakpoint now by adding:
// // settings: "unslick"
// // instead of a settings object
// ]
});
// $('.fa-chevron-down').on('click', function () {
// $('.fa-chevron-down').toggleClass('active');
// $('.link_regist').toggleClass('active');
// });
//
//
// $('.link_sign_up').on('click', function (e) {
// e.preventDefault();
// $('.modal_sg').fadeIn();
// });
// $('.close').on('click', function () {
// $('.modal_sg').fadeOut();
// });
//
// $('.menu_burger').on('click', function () {
// $('.modal-menu').addClass('show');
// $('.section .banner .slider').css({'z-index': '-1'});
// });
// $('.modal-menu .accardion li a .fa-times').on('click', function () {
// $('.modal-menu').removeClass('show');
// $('.section .banner .slider').css({'z-index': '1'});
// });
//
//
//
//
//
// $('.card-sl-img').slick({
// slidesToShow: 1,
// slidesToScroll: 1,
// autoplay: false,
// autoplaySpeed: 3000
// });
//
// $('.accardion-box .accardion li .accr-link').on('click', function (e) {
// e.preventDefault();
// $(this).siblings('.box').addClass('active');
// $(this).closest('li').siblings('li').find('.box').removeClass('active');
// });
// $('.accardion li .accr-link').on('click', function () {
// $('.accardion li .accr-link.active').removeClass('active');
// $(this).addClass('active');
// });
<file_sep>/Compassovqat/js/script.js
$(function () {
$('.accardion-box .accardion li a').on('click', function (e) {
e.preventDefault();
$(this).siblings('.box').addClass('active');
$(this).closest('li').siblings('li').find('.box').removeClass('active');
});
$('.accardion li a').on('click', function () {
$('.accardion li a.active').removeClass('active');
$(this).addClass('active');
});
$('.korzina').on('click', function () {
$('.korzina-list').slideToggle();
});
$('.burger').on('click', function () {
$('.nav_menu').slideToggle('fast');
});
});
<file_sep>/Seller/js/script.js
$(function () {
$('.slider').slick({
slidesToShow: 1,
slidesToScroll: 1,
autoplay: true,
autoplaySpeed: 5000,
dots: true
});
$('.fa-chevron-down').on('click', function () {
$('.fa-chevron-down').toggleClass('active');
$('.link_regist').toggleClass('active');
});
$('.link_sign_up').on('click', function (e) {
e.preventDefault();
$('.modal_sg').fadeIn();
});
$('.close').on('click', function () {
$('.modal_sg').fadeOut();
});
$('.menu_burger').on('click', function () {
$('.modal-menu').addClass('show');
$('.section .banner .slider').css({'z-index': '-1'});
});
$('.modal-menu .accardion li a .fa-times').on('click', function () {
$('.modal-menu').removeClass('show');
$('.section .banner .slider').css({'z-index': '1'});
});
$('.card').on('click', function () {
$(location).attr('href', 'item-card.html');
var card_img = $(this).find('img').attr('src');
$('.card-sl-img').find('img').attr('src', function () {
return card_img;
});
});
$('.card-sl-img').slick({
slidesToShow: 1,
slidesToScroll: 1,
autoplay: false,
autoplaySpeed: 3000
});
$('.accardion-box .accardion li .accr-link').on('click', function (e) {
e.preventDefault();
$(this).siblings('.box').addClass('active');
$(this).closest('li').siblings('li').find('.box').removeClass('active');
});
$('.accardion li .accr-link').on('click', function () {
$('.accardion li .accr-link.active').removeClass('active');
$(this).addClass('active');
});
});
<file_sep>/TurboGym/js/script.js
$(function () {
$('.btn_burger').on('click', function () {
$('.nav_menu').toggleClass('responsive');
});
$('.accardion li a').on('click', function (e) {
e.preventDefault();
$(this).siblings('.box').addClass('active');
$(this).closest('li').siblings('li').find('.box').removeClass('active');
});
$('li a').on('click', function () {
$('li a.active').removeClass('active');
$(this).addClass('active');
});
$(".header .nav_menu li a").click(function () {
var elementClick = $(this).attr("href");
var destination = $(elementClick).offset().top;
jQuery("html:not(:animated),body:not(:animated)").animate({scrollTop: destination}, 800);
return false;
});
$('.btn_Up').on('click', function () {
$('body, html').animate({'scrollTop': 0}, 800)
});
$(window).scroll(function () {
if($(window).scrollTop() > 200){
$('.btn_Up').addClass('active')
}else {
$('.btn_Up').removeClass('active')
}
});
$('.btn_send').on('click', function () {
$('.forms .form').find('input:text, input, textarea ').val('');
})
});<file_sep>/js/script.js
$(function () {
$('.btn_burger').on('click', function () {
$('.nav_menu').toggleClass('responsive');
});
$(".nav_menu li a").click(function () {
var elementClick = $(this).attr("href");
var destination = $(elementClick).offset().top-20;
jQuery("html:not(:animated),body:not(:animated)").animate({scrollTop: destination}, 800);
return false;
});
$('.logo').on('click', function (e) {
e.preventDefault();
$('body, html').animate({'scrollTop': 0}, 800)
});
$('.img_list li a').on('click', function (e) {
e.preventDefault();
$('.imgBox img').attr("src", $(this).attr("href"));
var ptext = $(this).siblings('.img_link').find('p').text();
$('.imgBox .imgBox_title p').text(ptext);
var h4text = $(this).siblings('.img_link').find('h4').text();
$('.imgBox .imgBox_title h4').text(h4text);
var ahref = $(this).siblings('.img_link').find('a').attr("href");
$('.imgBox .imgBox_title .btn_go').attr("href", ahref);
});
$('.toggle-icon').on('click', function () {
$('.nav_menu').slideToggle();
$('.toggle-icon .fas').toggleClass('fa-times');
});
$('.btn_submit').on('click', function () {
$('input:not(input.btn_submit), textarea').val("");
alert("Ваше сообщение не отправлено!" +
" Отправте сообщение через Telegram.");
});
$('input:not(input.btn_submit), textarea').on('focus', function () {
$(this).attr("placeholder", "");
});
$(window).scroll(function () {
if($(window).scrollTop() > 100){
$('.btn_Up').addClass('active');
$('.nav_top').addClass('fixed');
}else {
$('.btn_Up').removeClass('active');
$('.nav_top').removeClass('fixed');
}
var st = $(window).scrollTop();
$('.banner_img').css('transform', 'translate(0%, '+ st * 0.60 + 'px');
$('header[id], .sec_content[id]').each(function () {
var id = $(this).attr("id");
if($(this).offset().top-200 < $(window).scrollTop()){
$('.nav_menu li a[href="#'+id+'"]').parent().addClass('active').siblings().removeClass('active');
}
})
});
});
<file_sep>/README.md
# fsmart85.githup.io
Portfolio
<file_sep>/myportfolio/js/js.js
$(function() {
$('.item').on('click', function () {
$('.items').toggleClass('items-origin');
});
$(".navbar .collapse .navbar-nav .nav-item .nav-link, .mycard .btn").click(function () {
var elementClick = $(this).attr("href");
var destination = $(elementClick).offset().top;
jQuery("html:not(:animated),body:not(:animated)").animate({scrollTop: destination}, 800);
return false;
});
var menu = $('.header_menu ul li');
menu.on('click', function(e){
e.preventDefault();
menu.removeClass('active').filter(this).addClass('active');
});
$('.fa').on('click', function () {
$('ul').slideToggle(300, function () {
if($(this).css('display') === 'none'){
$(this).removeAttr('style');
}
})
})
$('.owl-carousel').owlCarousel({
rtl: true,
margin: 5,
loop: true,
nav: true,
responsive: {
0: {
items: 2
},
600: {
items: 4
},
1000: {
items: 8
}
}
});
$(window).scroll(function () {
parallax();
});
function parallax() {
var st = $(window).scrollTop();
$('.parallax-bg').css('background-position', 'center ' + (st * 0.6) + 'px');
$('.mycard').css('transform', 'translate(0%, '+ st * 0.15 + 'px')
}
});
<file_sep>/Pantheon/js/script.js
$(document).ready(function(){
$('.sidenav').sidenav();
$('.tabs_list li .tabs_link a').on('click', function (e) {
e.preventDefault();
$(this).parents('li').find('.box').addClass('active');
$(this).parents('li').siblings('li').find('.box').removeClass('active');
});
$('.tabs_life .tabs_list li .tabs_link a').on('click', function () {
$('.tabs_list li .tabs_link.active').removeClass('active');
$(this).parents('.tabs_link').addClass('active');
});
$('.tabs_recidence li .tabs_link .link').on('click', function () {
$(this).parents('li').find('.tabs_box').addClass('active');
$(this).parents('li').siblings('li').find('.tabs_box').removeClass('active');
$(this).parents('.tabs_link').addClass('active');
$(this).parents('li').siblings('li').find('.tabs_link').removeClass('active');
});
});
|
52a79597082b97cccead4492e8017b20df7e9253
|
[
"JavaScript",
"Markdown"
] | 12
|
JavaScript
|
fsmart85/fsmart85.github.io
|
6fcc96d942aaeafc756ba382b75bc3550fcd15e2
|
5c60414c187698a4ee328eee3e1bb4c9d728186d
|
refs/heads/master
|
<file_sep><?php
namespace agilov\yii2testingkit;
use yii\test\FixtureTrait;
/**
* Class FixtureGenerator
*
* Fixture generator helper to generate relational fixtures
*
* @author <NAME> <<EMAIL>>
*/
class FixtureGenerator
{
use FixtureTrait;
/** @var FixtureGenerator */
public static $instance;
/** @var array */
public $data = [];
/** @var array */
public $fixtures = [];
/** @var \Closure */
public $generate;
/**
* @inheritdoc
*/
public function fixtures()
{
return $this->fixtures;
}
public function __construct(callable $generate, array $fixtures = [])
{
$this->generate = $generate;
$this->fixtures = $fixtures;
}
/**
* @param callable $generate
* @param array $fixtures
*/
public static function generate(callable $generate, array $fixtures = [])
{
if (static::$instance === null) {
static::$instance = new FixtureGenerator($generate, $fixtures);
static::$instance->initFixtures();
static::$instance->generate->call(static::$instance);
static::$instance->unloadFixtures();
}
}
/**
* @return array
*/
public static function shiftData()
{
return array_shift(static::$instance->data);
}
}
<file_sep><?php
namespace agilov\yii2testingkit;
use PHPUnit\Framework\TestCase;
/**
* Class FixtureGeneratorTest
* vendor/bin/phpunit src/FixtureGeneratorTest
*
* @author <NAME> <<EMAIL>>
*/
final class FixtureGeneratorTest extends TestCase
{
/**
* vendor/bin/phpunit --filter testGenerate src/FixtureGeneratorTest
*/
public function testGenerate()
{
FixtureGenerator::generate(function () {
/** @var FixtureGenerator $this */
$this->data[] = ['foo' => 'bar'];
});
$this->assertInstanceOf(FixtureGenerator::class, FixtureGenerator::$instance);
$this->assertTrue(FixtureGenerator::$instance->data[0]['foo'] == 'bar');
$this->assertTrue(FixtureGenerator::shiftData() == ['foo' => 'bar']);
$this->assertTrue(count(FixtureGenerator::$instance->data) == 0);
}
}
<file_sep><?php
namespace agilov\yii2testingkit;
use Yii;
use yii\base\ArrayAccessTrait;
use yii\base\InvalidConfigException;
use yii\redis\ActiveRecord;
use yii\test\DbFixture;
/**
* Class RedisFixture
*
* @author <NAME> <<EMAIL>>
*/
class RedisFixture extends DbFixture
{
use ArrayAccessTrait;
public $db = 'redis';
/**
* @var ActiveRecord
*/
public $modelClass;
/**
* @var array the data rows. Each array element represents one row of data (column name => column value).
*/
public $data = [];
/**
* @var string|bool the file path or [path alias](guide:concept-aliases) of the data file that contains the fixture data
* to be returned by [[getData()]]. You can set this property to be false to prevent loading any data.
*/
public $dataFile;
/**
* Loads the fixture.
*
* The default implementation simply stores the data returned by [[getData()]] in [[data]].
* You should usually override this method by putting the data into the underlying database.
*/
public function load()
{
$this->data = $this->getData();
foreach ($this->data as $alias => $row) {
/** @var ActiveRecord $model */
$model = new $this->modelClass($row);
$model->save(false);
$this->data[$alias] = $model->getAttributes();
}
}
/**
* @inheritdoc
*/
public function unload()
{
$this->data = [];
$this->modelClass::deleteAll();
}
/**
* @inheritdoc
*/
public function init()
{
parent::init();
if ($this->modelClass === null) {
throw new InvalidConfigException('"modelClass" must be set.');
}
}
/**
* @return array|mixed
* @throws InvalidConfigException
*/
protected function getData()
{
if ($this->dataFile === null) {
$class = new \ReflectionClass($this);
$dataFile = dirname($class->getFileName()) . '/data/' . $this->modelClass::keyPrefix() . '.php';
return is_file($dataFile) ? require $dataFile : [];
}
$dataFile = Yii::getAlias($this->dataFile);
if (is_file($dataFile)) {
return require $dataFile;
}
throw new InvalidConfigException("Fixture data file does not exist: {$this->dataFile}");
}
}
<file_sep><?php
namespace agilov\yii2testingkit;
/**
* Trait FakerTrait
*
* @author <NAME> <<EMAIL>>
*/
trait FakerTrait
{
/** @var \Faker\Generator */
protected $_faker;
/**
* @return \Faker\Generator
*/
public function fake()
{
if ($this->_faker === null) {
$this->_faker = \Faker\Factory::create();
}
return $this->_faker;
}
}
<file_sep><?php
namespace agilov\yii2testingkit;
use yii\helpers\FileHelper;
/**
* Class FixtureController
*
* @author <NAME> <<EMAIL>>
*/
class FixtureController extends \yii\faker\FixtureController
{
/**
* @var string Group of fixtures (suite)
*/
public $suite;
/**
* @inheritdoc
*/
public function options($actionID)
{
return array_merge(parent::options($actionID), ['suite']);
}
/**
* @inheritdoc
*/
public function generateFixtureFile($templateName, $templatePath, $fixtureDataPath)
{
$fixtures = [];
for ($i = 0; $i < $this->count; $i++) {
$id = $i + 1;
$fixture = $this->generateFixture($templatePath . '/' . $templateName . '.php', $id);
if (!$fixture || $fixture === 1) {
continue;
}
$key = $templateName . '_' . $id;
$fixtures[$key] = $fixture;
}
$content = $this->exportFixtures($fixtures);
// data file full path
$dataFile = $fixtureDataPath . '/' . $templateName . '.php';
// data file directory, create if it doesn't exist
$dataFileDir = dirname($dataFile);
if (!file_exists($dataFileDir)) {
FileHelper::createDirectory($dataFileDir);
}
file_put_contents($dataFile, $content);
}
/**
* @inheritdoc
*/
public function checkPaths()
{
if ($this->suite) {
$this->templatePath = "@app/modules/{$this->suite}/fixtures/templates";
$this->fixtureDataPath = "@app/modules/{$this->suite}/fixtures/data";
}
return;
}
}
<file_sep><?php
namespace agilov\yii2testingkit;
use yii\base\Model;
/**
* Class TestValidatorHelper
*
* @author <NAME> <<EMAIL>>
*/
trait TestValidatorTrait
{
/**
* @param Model $model
* @param array $required
*/
public function checkRequiredAttributes(Model $model, array $required)
{
expect($model->validate())->false();
foreach ($required as $r) {
expect($model->hasErrors($r))->true();
}
}
/**
* @param Model $model
* @param array $invalid
*/
public function tryInvalidAttributes(Model $model, array $invalid)
{
foreach ($invalid as $k => $v) {
if (!is_array($v)) {
$v = [$v];
}
foreach ($v as $val) {
$model->{$k} = $val;
expect($model->validate())->false();
expect($model->hasErrors($k))->true();
}
}
}
}
<file_sep># Yii 2 testing kit
[](https://travis-ci.org/agilov/yii2-testing-kit)
<file_sep><?php
namespace agilov\yii2testingkit;
use yii\test\FixtureTrait;
use Yii;
/**
* Trait ActiveTestTrait
*
* @author <NAME> <<EMAIL>>
*/
trait ActiveTestTrait
{
use FixtureTrait;
public $model;
/**
* @inheritdoc
*/
protected function _before()
{
$this->initFixtures();
$this->model = Yii::createObject($this->modelClass());
}
/**
* @inheritdoc
*/
protected function _after()
{
$this->unloadFixtures();
}
/**
* Class name or config array for Yii::createObject() method
*
* @return string|array
*/
abstract protected function modelClass();
}
<file_sep><?php
namespace agilov\yii2testingkit;
use yii\helpers\VarDumper;
/**
* Trait DynamicFixtureTrait
*
* @author <NAME> <<EMAIL>>
*/
trait DynamicFixtureTrait
{
/**
* @param callable $func
* @param string $filename
* @param array $depends
*/
public function generateFile(callable $func, string $filename, array $depends = [])
{
if (file_exists($filename)) {
return;
}
$generator = new FixtureGenerator($func, $depends);
$generator->initFixtures();
$generator->generate->call($generator);
$generator->unloadFixtures();
$folder = dirname($filename);
if (!file_exists($folder)) {
mkdir($folder, 0777, true);
}
file_put_contents($filename, "<?php\n\nreturn " . VarDumper::export($generator->data) . ";\n");
}
}
|
d57ccc115a1476eb48e947663bc1239acbb30d7c
|
[
"Markdown",
"PHP"
] | 9
|
PHP
|
agilov/yii2-testing-kit
|
57bca94578ff1235454b9d7877e33bc818017beb
|
8f0b0c9e5457abd8ce82cde34a5a7d7ac1c65214
|
refs/heads/master
|
<file_sep>import React from "react";
import { withRouter } from "react-router-dom";
function AuthButton(props) {
const { history } = props;
return props.isAuthenticated ? (
<p>
Welcome!{" "}
<button
onClick={() => {
props.logout(() => history.push("/"));
}}
>
Sign out
</button>
</p>
) : (
<p>You are not logged in.</p>
);
}
const AuthButtonWithRouter = withRouter(AuthButton);
export default AuthButtonWithRouter;
|
55ba64c709ced152ae36476d6a594fa7c1f695eb
|
[
"JavaScript"
] | 1
|
JavaScript
|
muntasir2165/react-auth-example
|
05270dd104d598a002c28c67ff5e1647f8f8e7c8
|
da2c4f623ed1d9b78b816c31ffebc118f04119fa
|
refs/heads/main
|
<repo_name>diegotoledano95/Data-structure-algorithms<file_sep>/binary_search.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue May 25 00:05:06 2021
@author: diegotoledano
"""
l = []
entry = input('Ingresa los numeros que quieres calcular: ')
x = entry.split(',')
for i in x:
l.append(int(i))
target = int(input('Target: '))
l.sort()
print(l)
print(target)
def binary_search(l, target, low=None, high = None):
if low is None:
low = 0
if high is None:
high = len(l) - 1
if high < low:
return -1
midpoint = (high + low)//2
if l[midpoint] == target:
return midpoint
elif target < l[midpoint]:
return binary_search(l, target, low=None, high = midpoint-1)
else:
return binary_search(l, target, low = midpoint+1, high=None)
print(binary_search(l,target,low=None, high = None))
<file_sep>/Binarytree_implementation.py
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Jun 14 13:56:28 2021
@author: diegotoledano
"""
class Node:
def __init__(self, data):
self.data = data
self.left = None
self.right = None
def insert(self, data):
if self.data:
if data < self.data:
if self.left is None:
self.left = Node(data)
else:
self.left.insert(data)
elif data > self.data:
if self.right is None:
self.right = Node(data)
else:
self.right.insert(data)
else:
self.data = data
def Printing(self):
if self.left:
self.left.Printing()
print(self.data)
if self.right:
self.right.Printing()
root = Node(12)
root.insert(6)
root.insert(14)
root.insert(3)
root.insert(11)
root.insert(4)
root.Printing()<file_sep>/README.md
# Data-structure-algorithms
Counting sort exercise. Binary tree implementation. Binary search. Binary tree vertical print.
|
0002e333dc35f29e425df36776fdd22dd99ac02b
|
[
"Markdown",
"Python"
] | 3
|
Python
|
diegotoledano95/Data-structure-algorithms
|
b3bbc1e4a382e14534cd658b8ce59313705289be
|
8364b7dc1a39b596a8f7566c8011c7ff34ad955a
|
refs/heads/master
|
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
namespace PROCOPRU.Commons
{
/// <summary>
/// Enum que detemrina los niveles de usuarios y sus IDs
/// - ADMINISTRADOR (52-57)
/// - MUNICIPIO (58)
/// </summary>
public enum RolesPermisos
{
#region "ADMINISTRADOR"
ADMIN_PUBLICAR_NOTICIA=52,
ADMIN_VER_USUARIO_GENERAL=53,
ADMIN_VER_MUNICIPIOS_GENERAL=54,
ADMIN_CREAR_USUARIO_MUNICIPIO=55,
ADMIN_BORRAR_MUNICIPIO=56,
ADMIN_RETROALIMENTACION=57,
#endregion
#region "MUNICIPIO"
MUNICIPIO_VER_MUNICIPIO =58
#endregion
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using PROCOPRU.Modelo.Helpers;
using System.Linq;
using System.Data.Entity;
namespace PROCOPRU.Modelo
{
[Table("Rolls")]
public class Roll:ErrorHelper
{
public Roll() {
}
[Key]
public int Id { get; set; }
public String Nombre { get; set; }
public virtual ICollection<PermisosDenegadosPorRoll> PermisosDenegadorPorRoll { get; set; }
public virtual ICollection<Usuarios> Usuarios { get; set; }
public static Roll getByid(int Id) {
try {
using (var ctx = new DataModel()) {
return ctx.roll.Include("Usuarios")
.Include("PermisosDenegadorPorRoll")
.Where(r=>r.Id==Id).FirstOrDefault();
}
} catch (Exception) { throw; }
}
}
}
<file_sep>namespace PROCOPRU.Migrations
{
using System;
using System.Data.Entity;
using System.Data.Entity.Migrations;
using System.Linq;
using PROCOPRU.Modelo;
using System.Collections.Generic;
internal sealed class Configuration : DbMigrationsConfiguration<Modelo.DataModel>
{
public Configuration()
{
AutomaticMigrationsEnabled = false;
}
protected override void Seed(Modelo.DataModel context)
{
/* var roles = new List<Roll>{
new Roll{Id=1, Nombre="Developer"},
new Roll{Id=2, Nombre="Administrador"},
new Roll{Id=3,Nombre="Municipio"}
};
roles.ForEach(s => context.roll.Add(s));
context.SaveChanges();
var usuarios = new List<Usuarios>{
new Usuarios{Id=1,RollId=1, Nombre="Developer 1",Email="<EMAIL>",Password="<PASSWORD>"},
new Usuarios{Id=2,RollId=1, Nombre="Developer 2",Email="<EMAIL>",Password="<PASSWORD>"},
new Usuarios{Id=3,RollId=2, Nombre="Administrador",Email="<EMAIL>",Password="<PASSWORD>"},
new Usuarios{Id=4,RollId=3, Nombre="Municipio",Email="<EMAIL>",Password="<PASSWORD>"}
};
usuarios.ForEach(s => context.usuarios.Add(s));
context.SaveChanges();
var permisos = new List<Permisos> {
new Permisos{Id=1,Modulo="Encuesta",Descripcion=""},
new Permisos{Id=2,Modulo="Reportes",Descripcion=""},
new Permisos{Id=3,Modulo="Admini",Descripcion=""},
};
permisos.ForEach(s => context.permisos.Add(s));
context.SaveChanges();*/
}
}
}
<file_sep>namespace PROCOPRU.Migrations
{
using System;
using System.Data.Entity.Migrations;
public partial class ADDCOLUMNASENCABEZADOIMAGENCUESTIONARIOPREGUNTAS : DbMigration
{
public override void Up()
{
AddColumn("dbo.EvaluacionesPreguntas", "EncabezadoCuestionario", c => c.String());
AddColumn("dbo.EvaluacionesPreguntas", "imagenCustrionario", c => c.String());
}
public override void Down()
{
DropColumn("dbo.EvaluacionesPreguntas", "imagenCustrionario");
DropColumn("dbo.EvaluacionesPreguntas", "EncabezadoCuestionario");
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using Modelo.helpers;
namespace Modelo
{
//Clase para gestión de roles de usuarios
public class Roll:ErrorHelper
{
public Roll() { }
[Key]
public int Id { get; set; }
public String Nombre { get; set; }
public virtual ICollection<PermisosDenegadosPorRoll> PermisosDenegadorPorRoll { get; set; }
public virtual ICollection<Usuarios> Usuarios { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Modelo
{
public class DataModelInitialice:System.Data.Entity.DropCreateDatabaseIfModelChanges<DataModel>
{
protected override void Seed(DataModel context)
{
var roles=new List<Roll>{
new Roll{Id=1, Nombre="Developer"},
new Roll{Id=2, Nombre="Administrador"},
new Roll{Id=3,Nombre="Municipio"}
};
roles.ForEach(s => context.roll.Add(s));
context.SaveChanges();
var usuarios = new List<Usuarios>{
new Usuarios{Id=1,RollId=1, Nombre="Developer 1",Email="<EMAIL>",Password="<PASSWORD>"},
new Usuarios{Id=2,RollId=1, Nombre="Developer 2",Email="<EMAIL>",Password="<PASSWORD>"},
new Usuarios{Id=3,RollId=2, Nombre="Administrador",Email="<EMAIL>",Password="<PASSWORD>"},
new Usuarios{Id=4,RollId=3, Nombre="Municipio",Email="<EMAIL>",Password="<PASSWORD>"}
};
usuarios.ForEach(s => context.usuarios.Add(s));
context.SaveChanges();
var permisos = new List<Permisos> {
new Permisos{Id=1,Modulo="Encuesta",Descripcion=""},
new Permisos{Id=2,Modulo="Reportes",Descripcion=""},
new Permisos{Id=3,Modulo="Admini",Descripcion=""},
};
permisos.ForEach(s => context.permisos.Add(s));
context.SaveChanges();
//base.Seed(context);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using PROCOPRU.Modelo;
using PROCOPRU.ViewModel;
using PROCOPRU.Commons;
namespace PROCOPRU.Controllers
{
public class UsuariosController : Controller
{
// GET: Usuarios
public ActionResult Index()
{
ViewBag.Usuarios = Usuarios.getAll();
return View();
}
public ActionResult frmUsuario(int Id) {
ViewBag.IdMuncipio = Id;
return View(new UsuarioViewModel());
}
public ActionResult borrarUsuario(int Id) {
Usuarios usua = Usuarios.getById(Id);
Usuarios.Borrar(Id);
return Redirect("~/CatMunicipio/verDetalle/" + usua.IdMunicipio);
}
public JsonResult GuardarUsuario(UsuarioViewModel nUsuario) {
Usuarios usuario = new Usuarios();
var response = new ResponseModel();
if (ModelState.IsValid)
{
usuario.Email = nUsuario.Email;
usuario.Nombre = nUsuario.Nombre;
usuario.Password = <PASSWORD>(<PASSWORD>);
usuario.TipoUsuario = nUsuario.TipoUsuario;
usuario.IdMunicipio = nUsuario.IdMunicipio; //Municipio.getById(nUsuario.IdMunicipio);
usuario.Status = true;
response = usuario.Guardar();
if (response.response) {
response.href = Url.Content("~/CatMunicipio");
}
}
return Json(response);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using PROCOPRU.Modelo;
using PROCOPRU.Commons.Helper;
namespace PROCOPRU.Controllers
{
public class MensajesController : Controller
{
// GET: Mensajes
public ActionResult Index()
{
ViewBag.Noticias = Mensaje.getAllTypeGeneral();
return View();
}
public ActionResult frmMensaje() {
return View(new Mensaje());
}
public JsonResult GuardarNoticia(Mensaje mensaje) {
mensaje.Remitente = Usuarios.getById(SessionHelper.GetUser());
var response = new ResponseModel();
if (ModelState.IsValid) {
response = mensaje.Guardar();
if (response.response) {
response.href = Url.Content("~/Home/Index");
}
}
return Json( response);
}
public ActionResult Atender(int Id) {
Mensaje.Atender(Id);
return RedirectToAction("index");
}
//// GET: Mensajes/Details/5
//public ActionResult Details(int id)
//{
// return View();
//}
//// GET: Mensajes/Create
//public ActionResult Create()
//{
// return View();
//}
//// POST: Mensajes/Create
//[HttpPost]
//public ActionResult Create(FormCollection collection)
//{
// try
// {
// // TODO: Add insert logic here
// return RedirectToAction("Index");
// }
// catch
// {
// return View();
// }
//}
//// GET: Mensajes/Edit/5
//public ActionResult Edit(int id)
//{
// return View();
//}
//// POST: Mensajes/Edit/5
//[HttpPost]
//public ActionResult Edit(int id, FormCollection collection)
//{
// try
// {
// // TODO: Add update logic here
// return RedirectToAction("Index");
// }
// catch
// {
// return View();
// }
//}
//// GET: Mensajes/Delete/5
//public ActionResult Delete(int id)
//{
// return View();
//}
//// POST: Mensajes/Delete/5
//[HttpPost]
//public ActionResult Delete(int id, FormCollection collection)
//{
// try
// {
// // TODO: Add delete logic here
// return RedirectToAction("Index");
// }
// catch
// {
// return View();
// }
//}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
namespace PROCOPRU.ViewModel
{
public class RestroAlimentacionViewModel
{
public int IdCaptura { get; set; }
//[AllowHtml]
public string HtmlContent { get; set; }
public RestroAlimentacionViewModel() { }
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using PROCOPRU.Modelo;
using PROCOPRU.ViewModel;
namespace PROCOPRU.Controllers
{
public class CapturasController : Controller
{
//
// GET: /Capturas/
public ActionResult Index(int Id = 0)
{
Localidad loc = null;
Captura cap = null;
HttpCookie cookie;
if (Id > 0)
{
cookie = new HttpCookie("idLocalidad");
cookie.Value = Id.ToString();
this.ControllerContext.HttpContext.Response.Cookies.Add(cookie);
}
else
{
if (this.ControllerContext.HttpContext.Request.Cookies.AllKeys.Contains("idLocalidad"))
{
Id = Convert.ToInt32(this.ControllerContext.HttpContext.Request.Cookies["idLocalidad"].Value);
}
else {
Redirect("~/CatMunicipio/Index");
}
}
loc = Localidad.getById(Id);
#region "EVALUACION"
cap = Captura.getByAnio(System.DateTime.Now.ToString("yyyy"), loc.Id);
if (cap == null) {
cap = Captura.CrearNueva(loc);
}
cookie = new HttpCookie("idCaptura");
cookie.Value = cap.Id.ToString();
this.ControllerContext.HttpContext.Response.Cookies.Add(cookie);
ViewBag.Captura = cap;
#endregion
ViewBag.Mesajes = Mensaje.getAllTypeGeneral();
return View(loc);
}
private Evaluacion obtnerEvaluacion(enumTipoLocalidad tipo,enumTipoEvaluacion tipoEva,int idLocalidad) {
var captura = Captura.getByAnio(System.DateTime.Now.ToString("yyyy"), idLocalidad);
Evaluacion eva = null;
ViewBag.Evaluacion = null;
foreach (var item in captura.Evaluaciones.Where(r => r.bStatus == true && r.TipoEvaluacion==tipoEva))
{
if (item.TipoLocalidad == tipo)
{
eva = Evaluacion.getById(item.Id);
ViewBag.Evaluacion = eva;
HttpCookie cookie = new HttpCookie("idEvaluacion"); ;
if (eva != null)
{
cookie.Value = eva.Id.ToString();
this.ControllerContext.HttpContext.Response.Cookies.Add(cookie);
}
else
{
cookie.Value = "";
this.ControllerContext.HttpContext.Response.Cookies.Add(cookie);
}
break;
}
}
return eva;
}
#region "CAPTURAS PRINCIPAL"
public ActionResult PUCiudadP(int idLocalidad, int tipoLoc) {
enumTipoLocalidad tipo = (enumTipoLocalidad)tipoLoc;
var localidad = Localidad.getById(idLocalidad);
ViewBag.Localidad = localidad;
var captura=obtnerEvaluacion(tipo,enumTipoEvaluacion.PU, idLocalidad);
EvaluacionPregunta ePregunta = EvaluacionPregunta.getEvaluacio(enumTipoEvaluacion.PU, tipo);
if (captura != null) {
int TotalPreguntas=captura.RespuestaCuestionario.Count;
if (ePregunta.CantidadPreguntas() > TotalPreguntas) {
return Redirect("~/Capturas/Index/" + localidad.Id);
}
}
return View(ePregunta);
}
public ActionResult CVCiudadP(int idLocalidad, int tipoLoc)
{
enumTipoLocalidad tipo = (enumTipoLocalidad)tipoLoc;
var localidad = Localidad.getById(idLocalidad);
ViewBag.Localidad = localidad;
ViewBag.TipoLocalidad = tipo;
var captura = obtnerEvaluacion(tipo, enumTipoEvaluacion.CV, idLocalidad);
EvaluacionPregunta ePregunta = EvaluacionPregunta.getEvaluacio(enumTipoEvaluacion.CV, tipo);
if (captura != null)
{
int TotalPreguntas = captura.RespuestaCuestionario.Count;
if (ePregunta.CantidadPreguntas() > TotalPreguntas)
{
return Redirect("~/Capturas/Index/" + localidad.Id);
}
}
return View(ePregunta);
}
public ActionResult IECiudadP(int idLocalidad, int tipoLoc)
{
//enumTipoLocalidad tipo = (enumTipoLocalidad)tipoLoc;
//ViewBag.Localidad = Localidad.getById(idLocalidad);
//ViewBag.TipoLocalidad = tipo;
//obtnerEvaluacion(tipo, enumTipoEvaluacion.IE, idLocalidad);
//return View(EvaluacionPregunta.getEvaluacio(enumTipoEvaluacion.IE, tipo));
enumTipoLocalidad tipo = (enumTipoLocalidad)tipoLoc;
var localidad = Localidad.getById(idLocalidad);
ViewBag.Localidad = localidad;
ViewBag.TipoLocalidad = tipo;
var captura = obtnerEvaluacion(tipo, enumTipoEvaluacion.IE, idLocalidad);
EvaluacionPregunta ePregunta = EvaluacionPregunta.getEvaluacio(enumTipoEvaluacion.IE, tipo);
if (captura != null)
{
int TotalPreguntas = captura.RespuestaCuestionario.Count;
if (ePregunta.CantidadPreguntas() > TotalPreguntas)
{
return Redirect("~/Capturas/Index/" + localidad.Id);
}
}
return View(ePregunta);
}
public ActionResult MTCiudadP(int idLocalidad, int tipoLoc)
{
//enumTipoLocalidad tipo = (enumTipoLocalidad)tipoLoc;
//ViewBag.Localidad = Localidad.getById(idLocalidad);
//ViewBag.TipoLocalidad = tipo;
//obtnerEvaluacion(tipo, enumTipoEvaluacion.MT, idLocalidad);
//return View(EvaluacionPregunta.getEvaluacio(enumTipoEvaluacion.MT, tipo));
enumTipoLocalidad tipo = (enumTipoLocalidad)tipoLoc;
var localidad = Localidad.getById(idLocalidad);
ViewBag.Localidad = localidad;
ViewBag.TipoLocalidad = tipo;
var captura = obtnerEvaluacion(tipo, enumTipoEvaluacion.MT, idLocalidad);
EvaluacionPregunta ePregunta = EvaluacionPregunta.getEvaluacio(enumTipoEvaluacion.MT, tipo);
if (captura != null)
{
int TotalPreguntas = captura.RespuestaCuestionario.Count;
if (ePregunta.CantidadPreguntas() > TotalPreguntas)
{
return Redirect("~/Capturas/Index/" + localidad.Id);
}
}
return View(ePregunta);
}
public ActionResult SACiudadP(int idLocalidad, int tipoLoc)
{
//enumTipoLocalidad tipo = (enumTipoLocalidad)tipoLoc;
//ViewBag.Localidad = Localidad.getById(idLocalidad);
//obtnerEvaluacion(tipo, enumTipoEvaluacion.SA, idLocalidad);
//return View(EvaluacionPregunta.getEvaluacio(enumTipoEvaluacion.SA, tipo));
enumTipoLocalidad tipo = (enumTipoLocalidad)tipoLoc;
var localidad = Localidad.getById(idLocalidad);
ViewBag.Localidad = localidad;
ViewBag.TipoLocalidad = tipo;
var captura = obtnerEvaluacion(tipo, enumTipoEvaluacion.SA, idLocalidad);
EvaluacionPregunta ePregunta = EvaluacionPregunta.getEvaluacio(enumTipoEvaluacion.SA, tipo);
if (captura != null)
{
int TotalPreguntas = captura.RespuestaCuestionario.Count;
if (ePregunta.CantidadPreguntas() > TotalPreguntas)
{
return Redirect("~/Capturas/Index/" + localidad.Id);
}
}
return View(ePregunta);
}
public ActionResult DECiudadP(int idLocalidad, int tipoLoc)
{
//enumTipoLocalidad tipo = (enumTipoLocalidad)tipoLoc;
//ViewBag.Localidad = Localidad.getById(idLocalidad);
//obtnerEvaluacion(tipo, enumTipoEvaluacion.DE, idLocalidad);
//return View(EvaluacionPregunta.getEvaluacio(enumTipoEvaluacion.DE, tipo));
enumTipoLocalidad tipo = (enumTipoLocalidad)tipoLoc;
var localidad = Localidad.getById(idLocalidad);
ViewBag.Localidad = localidad;
ViewBag.TipoLocalidad = tipo;
var captura = obtnerEvaluacion(tipo, enumTipoEvaluacion.DE, idLocalidad);
EvaluacionPregunta ePregunta = EvaluacionPregunta.getEvaluacio(enumTipoEvaluacion.DE, tipo);
if (captura != null)
{
int TotalPreguntas = captura.RespuestaCuestionario.Count;
if (ePregunta.CantidadPreguntas() > TotalPreguntas)
{
return Redirect("~/Capturas/Index/" + localidad.Id);
}
}
return View(ePregunta);
}
#endregion
#region "GUARDAR CAPTURAS PRINCIPALES"
[ValidateInput(false)]
public JsonResult GuardarPU(int PV1, int PV2, int PV3, int PV4, int PV5, int PV6, int PV7, int[] txtIdPregunta, string[] PCOMENTA)
{
var response = new ResponseModel();
if (ModelState.IsValid)
{
Captura captura = this.obtenerCapturaCookie();
//Localidad loc = Localidad.getById(IdLocalidad);
var temEva=obtenerEvaluacionCookie();
if (temEva != null) {
temEva.SuspenderEaluacion();
}
Evaluacion evalua =evalua = new Evaluacion();
evalua.TipoEvaluacion = enumTipoEvaluacion.PU;
evalua.TipoLocalidad = (enumTipoLocalidad)captura.getLocalidad().Tipo;
evalua.RespuestaCuestionario = new List<RespuestaCuestionario>();
RespuestaCuestionario resp = new RespuestaCuestionario();
int indice = 0;
resp.Tipo = enumTipoPregunta.BOLEANA;
if (PV1 == 1)
{
resp.bValor = true;
resp.Puntaje = 1;
resp.Confirmada = true;
}
else
{
if (PV1 == 0)
{
resp.bValor = false;
resp.Puntaje = 0;
resp.Confirmada = true;
}
}
resp.Observacion = PCOMENTA[indice];
resp.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(resp);
indice++;
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA;
if (PV2 == 1)
{
resp.bValor = true;
resp.Puntaje = 1;
resp.Confirmada = true;
}
else
{
if (PV2 == 0)
{
resp.bValor = false;
resp.Puntaje = 0;
resp.Confirmada = true;
}
}
resp.Observacion = PCOMENTA[indice];
resp.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(resp);
indice++;
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA;
if (PV3 == 1)
{
resp.bValor = true;
resp.Puntaje = 1;
resp.Confirmada = true;
}
else
{
if (PV3 == 0)
{
resp.bValor = false;
resp.Puntaje = 0;
resp.Confirmada = true;
}
}
resp.Observacion = PCOMENTA[indice];
resp.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(resp);
indice++;
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA;
if (PV4 == 1)
{
resp.bValor = true;
resp.Puntaje = 1;
resp.Confirmada = true;
}
else
{
if (PV4 == 0)
{
resp.bValor = false;
resp.Puntaje = 0;
resp.Confirmada = true;
}
}
resp.Observacion = PCOMENTA[indice];
resp.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(resp);
indice++;
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA;
if (PV5 == 1)
{
resp.bValor = true;
resp.Puntaje = 1;
resp.Confirmada = true;
}
else
{
if (PV5 == 0)
{
resp.bValor = false;
resp.Puntaje = 0;
resp.Confirmada = true;
}
}
resp.Observacion = PCOMENTA[indice];
resp.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(resp);
indice++;
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA;
if (PV6 == 1)
{
resp.bValor = true;
resp.Puntaje = 1;
resp.Confirmada = true;
}
else
{
if (PV6 == 0)
{
resp.bValor = false;
resp.Puntaje = 0;
resp.Confirmada = true;
}
}
resp.Observacion = PCOMENTA[indice];
resp.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(resp);
indice++;
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA;
if (PV7 == 1)
{
resp.bValor = true;
resp.Puntaje = 1;
resp.Confirmada = true;
}
else
{
if (PV7 == 0)
{
resp.bValor = false;
resp.Puntaje = 0;
resp.Confirmada = true;
}
}
resp.Observacion = PCOMENTA[indice];
resp.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(resp);
indice++;
response = Evaluacion.Guardar(evalua, captura);
if (response.response)
{
response.href = Url.Content("~/Capturas/Index/" + captura.getLocalidad().Id);
}
}
return Json(response);
}
[ValidateInput(false)]
public JsonResult GuardarCV(Ppal_CV_ViewModel Encuesta, int[] txtIdPregunta, string[] Comentarios)
{
var response = new ResponseModel();
if (ModelState.IsValid)
{
Captura captura = this.obtenerCapturaCookie();
var temEva = obtenerEvaluacionCookie();
if (temEva != null)
{
temEva.SuspenderEaluacion();
}
Evaluacion evalua = new Evaluacion();
evalua.TipoEvaluacion = enumTipoEvaluacion.CV;
evalua.TipoLocalidad = (enumTipoLocalidad)captura.getLocalidad().Tipo;
evalua.RespuestaCuestionario = new List<RespuestaCuestionario>();
int indice = 0;
foreach (var item in Encuesta.getRespuestas(Comentarios))
{
item.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(item);
indice++;
}
response = Evaluacion.Guardar(evalua, captura);
if (response.response)
{
response.href = Url.Content("~/Capturas/Index/" + captura.getLocalidad().Id);
}
}
else {
foreach (ModelState modelState in ViewData.ModelState.Values)
{
foreach (ModelError error in modelState.Errors)
{
response.message = error.ErrorMessage;
}
}
}
return Json(response);
}
[ValidateInput(false)]
public JsonResult GuardarDE(Ppal_DE_ViewModel Encuesta, int[] txtIdPregunta, string[] Comentarios)
{
var response = new ResponseModel();
if (ModelState.IsValid)
{
Captura captura = this.obtenerCapturaCookie();
var temEva = obtenerEvaluacionCookie();
if (temEva != null)
{
temEva.SuspenderEaluacion();
}
Evaluacion evalua = new Evaluacion();
evalua.TipoEvaluacion = enumTipoEvaluacion.DE;
evalua.TipoLocalidad = (enumTipoLocalidad)captura.getLocalidad().Tipo;
evalua.RespuestaCuestionario = new List<RespuestaCuestionario>();
int indice = 0;
foreach (var item in Encuesta.getRespuestas(Comentarios))
{
item.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(item);
indice++;
}
response = Evaluacion.Guardar(evalua, captura);
if (response.response)
{
response.href = Url.Content("~/Capturas/Index/" + captura.getLocalidad().Id);
}
}
else
{
foreach (ModelState modelState in ViewData.ModelState.Values)
{
foreach (ModelError error in modelState.Errors)
{
response.message = error.ErrorMessage;
}
}
}
return Json(response);
}
[ValidateInput(false)]
public JsonResult GuardarIE(Ppal_IE_ViewModel Encuesta, int[] txtIdPregunta, string[] Comentarios)
{
var response = new ResponseModel();
if (ModelState.IsValid)
{
Captura captura = this.obtenerCapturaCookie();
var temEva = obtenerEvaluacionCookie();
if (temEva != null)
{
temEva.SuspenderEaluacion();
}
Evaluacion evalua = new Evaluacion();
evalua.TipoEvaluacion = enumTipoEvaluacion.IE;
evalua.TipoLocalidad = (enumTipoLocalidad)captura.getLocalidad().Tipo;
evalua.RespuestaCuestionario = new List<RespuestaCuestionario>();
int indice = 0;
foreach (var item in Encuesta.getRespuestas(Comentarios))
{
item.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(item);
indice++;
}
response = Evaluacion.Guardar(evalua, captura);
if (response.response)
{
response.href = Url.Content("~/Capturas/Index/" + captura.getLocalidad().Id);
}
}
else
{
foreach (ModelState modelState in ViewData.ModelState.Values)
{
foreach (ModelError error in modelState.Errors)
{
response.message = error.ErrorMessage;
}
}
}
return Json(response);
}
[ValidateInput(false)]
public JsonResult GuardarMT(Ppal_MT_ViewModel Encuesta, int[] txtIdPregunta, string[] Comentarios)
{
var response = new ResponseModel();
if (ModelState.IsValid)
{
Captura captura = this.obtenerCapturaCookie();
var temEva = obtenerEvaluacionCookie();
if (temEva != null)
{
temEva.SuspenderEaluacion();
}
Evaluacion evalua = new Evaluacion();
evalua.TipoEvaluacion = enumTipoEvaluacion.MT;
evalua.TipoLocalidad = (enumTipoLocalidad)captura.getLocalidad().Tipo;
evalua.RespuestaCuestionario = new List<RespuestaCuestionario>();
int indice = 0;
foreach (var item in Encuesta.getRespuestas(Comentarios))
{
item.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(item);
indice++;
}
response = Evaluacion.Guardar(evalua, captura);
if (response.response)
{
response.href = Url.Content("~/Capturas/Index/" + captura.getLocalidad().Id);
}
}
else
{
foreach (ModelState modelState in ViewData.ModelState.Values)
{
foreach (ModelError error in modelState.Errors)
{
response.message = error.ErrorMessage;
}
}
}
return Json(response);
}
[ValidateInput(false)]
public JsonResult GuardarSA(Ppal_SA_ViewModel Encuesta, int[] txtIdPregunta, string[] Comentarios)
{
var response = new ResponseModel();
if (ModelState.IsValid)
{
Captura captura = this.obtenerCapturaCookie();
var temEva = obtenerEvaluacionCookie();
if (temEva != null)
{
temEva.SuspenderEaluacion();
}
Evaluacion evalua = new Evaluacion();
evalua.TipoEvaluacion = enumTipoEvaluacion.SA;
evalua.TipoLocalidad = (enumTipoLocalidad)captura.getLocalidad().Tipo;
evalua.RespuestaCuestionario = new List<RespuestaCuestionario>();
int indice = 0;
foreach (var item in Encuesta.getRespuestas(Comentarios))
{
item.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(item);
indice++;
}
response = Evaluacion.Guardar(evalua, captura);
if (response.response)
{
response.href = Url.Content("~/Capturas/Index/" + captura.getLocalidad().Id);
}
}
else
{
foreach (ModelState modelState in ViewData.ModelState.Values)
{
foreach (ModelError error in modelState.Errors)
{
response.message = error.ErrorMessage;
}
}
}
return Json(response);
}
#endregion
#region "GUARDAR CAPTURAS COMPLEMENTARIA"
[ValidateInput(false)]
public JsonResult GuardarPUCom(int PV1, int PV2, int PV3, int PV4, int PV5, int PV6, int PV7, int[] txtIdPregunta, string[] PCOMENTA)
{
var response = new ResponseModel();
if (ModelState.IsValid)
{
Captura captura = this.obtenerCapturaCookie();
//Localidad loc = Localidad.getById(IdLocalidad);
var temEva = obtenerEvaluacionCookie();
if (temEva != null)
{
temEva.SuspenderEaluacion();
}
Evaluacion evalua = new Evaluacion();
evalua.TipoEvaluacion = enumTipoEvaluacion.PU;
evalua.TipoLocalidad = (enumTipoLocalidad)captura.getLocalidad().Tipo;
evalua.RespuestaCuestionario = new List<RespuestaCuestionario>();
RespuestaCuestionario resp = new RespuestaCuestionario();
int indice = 0;
resp.Tipo = enumTipoPregunta.BOLEANA;
if (PV1 == 1)
{
resp.bValor = true;
resp.Puntaje = 1;
resp.Confirmada = true;
}
else
{
if (PV1 == 0)
{
resp.bValor = false;
resp.Puntaje = 0;
resp.Confirmada = true;
}
}
resp.Observacion = PCOMENTA[indice];
resp.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(resp);
indice++;
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA;
if (PV2 == 1)
{
resp.bValor = true;
resp.Puntaje = 1;
resp.Confirmada = true;
}
else
{
if (PV2 == 0)
{
resp.bValor = false;
resp.Puntaje = 0;
resp.Confirmada = true;
}
}
resp.Observacion = PCOMENTA[indice];
resp.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(resp);
indice++;
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA;
if (PV3 == 1)
{
resp.bValor = true;
resp.Puntaje = 1;
resp.Confirmada = true;
}
else
{
if (PV3 == 0)
{
resp.bValor = false;
resp.Puntaje = 0;
resp.Confirmada = true;
}
}
resp.Observacion = PCOMENTA[indice];
resp.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(resp);
indice++;
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA;
if (PV4 == 1)
{
resp.bValor = true;
resp.Puntaje = 1;
resp.Confirmada = true;
}
else
{
if (PV4 == 0)
{
resp.bValor = false;
resp.Puntaje = 0;
resp.Confirmada = true;
}
}
resp.Observacion = PCOMENTA[indice];
resp.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(resp);
indice++;
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA;
if (PV5 == 1)
{
resp.bValor = true;
resp.Puntaje = 1;
resp.Confirmada = true;
}
else
{
if (PV5 == 0)
{
resp.bValor = false;
resp.Puntaje = 0;
resp.Confirmada = true;
}
}
resp.Observacion = PCOMENTA[indice];
resp.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(resp);
indice++;
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA;
if (PV6 == 1)
{
resp.bValor = true;
resp.Puntaje = 1;
resp.Confirmada = true;
}
else
{
if (PV6 == 0)
{
resp.bValor = false;
resp.Puntaje = 0;
resp.Confirmada = true;
}
}
resp.Observacion = PCOMENTA[indice];
resp.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(resp);
indice++;
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA;
if (PV7 == 1)
{
resp.bValor = true;
resp.Puntaje = 1;
resp.Confirmada = true;
}
else
{
if (PV7 == 0)
{
resp.bValor = false;
resp.Puntaje = 0;
resp.Confirmada = true;
}
}
resp.Observacion = PCOMENTA[indice];
resp.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(resp);
indice++;
response = Evaluacion.Guardar(evalua, captura);
if (response.response)
{
response.href = Url.Content("~/Capturas/Index/" + captura.getLocalidad().Id);
}
}
else
{
foreach (ModelState modelState in ViewData.ModelState.Values)
{
foreach (ModelError error in modelState.Errors)
{
response.message = error.ErrorMessage;
}
}
}
return Json(response);
}
[ValidateInput(false)]
public JsonResult GuardarCVCom(Comp_CV_ViewModel Encuesta, int[] txtIdPregunta, string[] Comentarios)
{
var response = new ResponseModel();
if (ModelState.IsValid)
{
Captura captura = this.obtenerCapturaCookie();
var temEva = obtenerEvaluacionCookie();
if (temEva != null)
{
temEva.SuspenderEaluacion();
}
Evaluacion evalua = new Evaluacion();
evalua.TipoEvaluacion = enumTipoEvaluacion.CV;
evalua.TipoLocalidad = (enumTipoLocalidad)captura.getLocalidad().Tipo;
evalua.RespuestaCuestionario = new List<RespuestaCuestionario>();
int indice = 0;
foreach (var item in Encuesta.getRespuestas(Comentarios))
{
item.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(item);
indice++;
}
response = Evaluacion.Guardar(evalua, captura);
if (response.response)
{
response.href = Url.Content("~/Capturas/Index/" + captura.getLocalidad().Id);
}
}
else
{
foreach (ModelState modelState in ViewData.ModelState.Values)
{
foreach (ModelError error in modelState.Errors)
{
response.message = error.ErrorMessage;
}
}
}
return Json(response);
}
[ValidateInput(false)]
public JsonResult GuardarDECom(Comp_DE_ViewModel Encuesta, int[] txtIdPregunta, string[] Comentarios)
{
var response = new ResponseModel();
if (ModelState.IsValid)
{
Captura captura = this.obtenerCapturaCookie();
var temEva = obtenerEvaluacionCookie();
if (temEva != null)
{
temEva.SuspenderEaluacion();
}
Evaluacion evalua = new Evaluacion();
evalua.TipoEvaluacion = enumTipoEvaluacion.DE;
evalua.TipoLocalidad = (enumTipoLocalidad)captura.getLocalidad().Tipo;
evalua.RespuestaCuestionario = new List<RespuestaCuestionario>();
int indice = 0;
foreach (var item in Encuesta.getRespuestas(Comentarios))
{
item.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(item);
indice++;
}
response = Evaluacion.Guardar(evalua, captura);
if (response.response)
{
response.href = Url.Content("~/Capturas/Index/" + captura.getLocalidad().Id);
}
}
else
{
foreach (ModelState modelState in ViewData.ModelState.Values)
{
foreach (ModelError error in modelState.Errors)
{
response.message = error.ErrorMessage;
}
}
}
return Json(response);
}
[ValidateInput(false)]
public JsonResult GuardarIECom(Comp_IE_ViewModel Encuesta, int[] txtIdPregunta, string[] Comentarios)
{
var response = new ResponseModel();
if (ModelState.IsValid)
{
Captura captura = this.obtenerCapturaCookie();
var temEva = obtenerEvaluacionCookie();
if (temEva != null)
{
temEva.SuspenderEaluacion();
}
Evaluacion evalua = new Evaluacion();
evalua.TipoEvaluacion = enumTipoEvaluacion.IE;
evalua.TipoLocalidad = (enumTipoLocalidad)captura.getLocalidad().Tipo;
evalua.RespuestaCuestionario = new List<RespuestaCuestionario>();
int indice = 0;
foreach (var item in Encuesta.getRespuestas(Comentarios))
{
item.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(item);
indice++;
}
response = Evaluacion.Guardar(evalua, captura);
if (response.response)
{
response.href = Url.Content("~/Capturas/Index/" + captura.getLocalidad().Id);
}
}
else
{
foreach (ModelState modelState in ViewData.ModelState.Values)
{
foreach (ModelError error in modelState.Errors)
{
response.message = error.ErrorMessage;
}
}
}
return Json(response);
}
[ValidateInput(false)]
public JsonResult GuardarMTCom(Comp_MT_ViewModel Encuesta, int[] txtIdPregunta, string[] Comentarios)
{
var response = new ResponseModel();
if (ModelState.IsValid)
{
Captura captura = this.obtenerCapturaCookie();
var temEva = obtenerEvaluacionCookie();
if (temEva != null)
{
temEva.SuspenderEaluacion();
}
Evaluacion evalua = new Evaluacion();
evalua.TipoEvaluacion = enumTipoEvaluacion.MT;
evalua.TipoLocalidad = (enumTipoLocalidad)captura.getLocalidad().Tipo;
evalua.RespuestaCuestionario = new List<RespuestaCuestionario>();
int indice = 0;
foreach (var item in Encuesta.getRespuestas(Comentarios))
{
item.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(item);
indice++;
}
response = Evaluacion.Guardar(evalua, captura);
if (response.response)
{
response.href = Url.Content("~/Capturas/Index/" + captura.getLocalidad().Id);
}
}
else
{
foreach (ModelState modelState in ViewData.ModelState.Values)
{
foreach (ModelError error in modelState.Errors)
{
response.message = error.ErrorMessage;
}
}
}
return Json(response);
}
[ValidateInput(false)]
public JsonResult GuardarSACom(Comp_SA_ViewModel Encuesta, int[] txtIdPregunta, string[] Comentarios)
{
var response = new ResponseModel();
if (ModelState.IsValid)
{
Captura captura = this.obtenerCapturaCookie();
var temEva = obtenerEvaluacionCookie();
if (temEva != null)
{
temEva.SuspenderEaluacion();
}
Evaluacion evalua = new Evaluacion();
evalua.TipoEvaluacion = enumTipoEvaluacion.SA;
evalua.TipoLocalidad = (enumTipoLocalidad)captura.getLocalidad().Tipo;
evalua.RespuestaCuestionario = new List<RespuestaCuestionario>();
int indice = 0;
foreach (var item in Encuesta.getRespuestas(Comentarios))
{
item.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(item);
indice++;
}
response = Evaluacion.Guardar(evalua, captura);
if (response.response)
{
response.href = Url.Content("~/Capturas/Index/" + captura.getLocalidad().Id);
}
}
else
{
foreach (ModelState modelState in ViewData.ModelState.Values)
{
foreach (ModelError error in modelState.Errors)
{
response.message = error.ErrorMessage;
}
}
}
return Json(response);
}
#endregion
#region "GUARDAR CAPTURAS PUEBLO"
[ValidateInput(false)]
public JsonResult GuardarPUPue(int PV1, int PV2, int PV3, int PV4, int PV5, int PV6, int PV7, int[] txtIdPregunta, string[] PCOMENTA)
{
var response = new ResponseModel();
if (ModelState.IsValid)
{
Captura captura = this.obtenerCapturaCookie();
//Localidad loc = Localidad.getById(IdLocalidad);
var temEva = obtenerEvaluacionCookie();
if (temEva != null)
{
temEva.SuspenderEaluacion();
}
Evaluacion evalua = new Evaluacion();
evalua.TipoEvaluacion = enumTipoEvaluacion.PU;
evalua.TipoLocalidad = (enumTipoLocalidad)captura.getLocalidad().Tipo;
evalua.RespuestaCuestionario = new List<RespuestaCuestionario>();
RespuestaCuestionario resp = new RespuestaCuestionario();
int indice = 0;
resp.Tipo = enumTipoPregunta.BOLEANA;
if (PV1 == 1)
{
resp.bValor = true;
resp.Puntaje = 1;
resp.Confirmada = true;
}
else
{
if (PV1 == 0)
{
resp.bValor = false;
resp.Puntaje = 0;
resp.Confirmada = true;
}
}
resp.Observacion = PCOMENTA[indice];
resp.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(resp);
indice++;
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA;
if (PV2 == 1)
{
resp.bValor = true;
resp.Puntaje = 1;
resp.Confirmada = true;
}
else
{
if (PV2 == 0)
{
resp.bValor = false;
resp.Puntaje = 0;
resp.Confirmada = true;
}
}
resp.Observacion = PCOMENTA[indice];
resp.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(resp);
indice++;
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA;
if (PV3 == 1)
{
resp.bValor = true;
resp.Puntaje = 1;
resp.Confirmada = true;
}
else
{
if (PV3 == 0)
{
resp.bValor = false;
resp.Puntaje = 0;
resp.Confirmada = true;
}
}
resp.Observacion = PCOMENTA[indice];
resp.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(resp);
indice++;
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA;
if (PV4 == 1)
{
resp.bValor = true;
resp.Puntaje = 1;
resp.Confirmada = true;
}
else
{
if (PV4 == 0)
{
resp.bValor = false;
resp.Puntaje = 0;
resp.Confirmada = true;
}
}
resp.Observacion = PCOMENTA[indice];
resp.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(resp);
indice++;
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA;
if (PV5 == 1)
{
resp.bValor = true;
resp.Puntaje = 1;
resp.Confirmada = true;
}
else
{
if (PV5 == 0)
{
resp.bValor = false;
resp.Puntaje = 0;
resp.Confirmada = true;
}
}
resp.Observacion = PCOMENTA[indice];
resp.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(resp);
indice++;
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA;
if (PV6 == 1)
{
resp.bValor = true;
resp.Puntaje = 1;
resp.Confirmada = true;
}
else
{
if (PV6 == 0)
{
resp.bValor = false;
resp.Puntaje = 0;
resp.Confirmada = true;
}
}
resp.Observacion = PCOMENTA[indice];
resp.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(resp);
indice++;
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA;
if (PV7 == 1)
{
resp.bValor = true;
resp.Puntaje = 1;
resp.Confirmada = true;
}
else
{
if (PV7 == 0)
{
resp.bValor = false;
resp.Puntaje = 0;
resp.Confirmada = true;
}
}
resp.Observacion = PCOMENTA[indice];
resp.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(resp);
indice++;
response = Evaluacion.Guardar(evalua, captura);
if (response.response)
{
response.href = Url.Content("~/Capturas/Index/" + captura.getLocalidad().Id);
}
}
else
{
foreach (ModelState modelState in ViewData.ModelState.Values)
{
foreach (ModelError error in modelState.Errors)
{
response.message = error.ErrorMessage;
}
}
}
return Json(response);
}
[ValidateInput(false)]
public JsonResult GuardarCVPue(Pue_CV_ViewModel Encuesta, int[] txtIdPregunta, string[] Comentarios)
{
var response = new ResponseModel();
if (ModelState.IsValid)
{
Captura captura = this.obtenerCapturaCookie(); var temEva = obtenerEvaluacionCookie();
if (temEva != null)
{
temEva.SuspenderEaluacion();
}
Evaluacion evalua = new Evaluacion();
evalua.TipoEvaluacion = enumTipoEvaluacion.CV;
evalua.TipoLocalidad = (enumTipoLocalidad)captura.getLocalidad().Tipo;
evalua.RespuestaCuestionario = new List<RespuestaCuestionario>();
int indice = 0;
foreach (var item in Encuesta.getRespuestas(Comentarios))
{
item.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(item);
indice++;
}
response = Evaluacion.Guardar(evalua, captura);
if (response.response)
{
response.href = Url.Content("~/Capturas/Index/" + captura.getLocalidad().Id);
}
}
else
{
foreach (ModelState modelState in ViewData.ModelState.Values)
{
foreach (ModelError error in modelState.Errors)
{
response.message = error.ErrorMessage;
}
}
}
return Json(response);
}
[ValidateInput(false)]
public JsonResult GuardarDEPue(Pue_DE_ViewModel Encuesta, int[] txtIdPregunta, string[] Comentarios)
{
var response = new ResponseModel();
if (ModelState.IsValid)
{
Captura captura = this.obtenerCapturaCookie();
var temEva = obtenerEvaluacionCookie();
if (temEva != null)
{
temEva.SuspenderEaluacion();
}
Evaluacion evalua = new Evaluacion();
evalua.TipoEvaluacion = enumTipoEvaluacion.DE;
evalua.TipoLocalidad = (enumTipoLocalidad)captura.getLocalidad().Tipo;
evalua.RespuestaCuestionario = new List<RespuestaCuestionario>();
int indice = 0;
foreach (var item in Encuesta.getRespuestas(Comentarios))
{
item.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(item);
indice++;
}
response = Evaluacion.Guardar(evalua, captura);
if (response.response)
{
response.href = Url.Content("~/Capturas/Index/" + captura.getLocalidad().Id);
}
}
else
{
foreach (ModelState modelState in ViewData.ModelState.Values)
{
foreach (ModelError error in modelState.Errors)
{
response.message = error.ErrorMessage;
}
}
}
return Json(response);
}
[ValidateInput(false)]
public JsonResult GuardarIEPue(Pue_IE_ViewModel Encuesta, int[] txtIdPregunta, string[] Comentarios)
{
var response = new ResponseModel();
if (ModelState.IsValid)
{
Captura captura = this.obtenerCapturaCookie();
var temEva = obtenerEvaluacionCookie();
if (temEva != null)
{
temEva.SuspenderEaluacion();
}
Evaluacion evalua = new Evaluacion();
evalua.TipoEvaluacion = enumTipoEvaluacion.IE;
evalua.TipoLocalidad = (enumTipoLocalidad)captura.getLocalidad().Tipo;
evalua.RespuestaCuestionario = new List<RespuestaCuestionario>();
int indice = 0;
foreach (var item in Encuesta.getRespuestas(Comentarios))
{
item.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(item);
indice++;
}
response = Evaluacion.Guardar(evalua, captura);
if (response.response)
{
response.href = Url.Content("~/Capturas/Index/" + captura.getLocalidad().Id);
}
}
else
{
foreach (ModelState modelState in ViewData.ModelState.Values)
{
foreach (ModelError error in modelState.Errors)
{
response.message = error.ErrorMessage;
}
}
}
return Json(response);
}
[ValidateInput(false)]
public JsonResult GuardarMTPue(Pue_MT_ViewModel Encuesta, int[] txtIdPregunta, string[] Comentarios)
{
var response = new ResponseModel();
if (ModelState.IsValid)
{
Captura captura = this.obtenerCapturaCookie();
var temEva = obtenerEvaluacionCookie();
if (temEva != null)
{
temEva.SuspenderEaluacion();
}
Evaluacion evalua = new Evaluacion();
evalua.TipoEvaluacion = enumTipoEvaluacion.MT;
evalua.TipoLocalidad = (enumTipoLocalidad)captura.getLocalidad().Tipo;
evalua.RespuestaCuestionario = new List<RespuestaCuestionario>();
int indice = 0;
foreach (var item in Encuesta.getRespuestas(Comentarios))
{
item.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(item);
indice++;
}
response = Evaluacion.Guardar(evalua, captura);
if (response.response)
{
response.href = Url.Content("~/Capturas/Index/" + captura.getLocalidad().Id);
}
}
else
{
foreach (ModelState modelState in ViewData.ModelState.Values)
{
foreach (ModelError error in modelState.Errors)
{
response.message = error.ErrorMessage;
}
}
}
return Json(response);
}
[ValidateInput(false)]
public JsonResult GuardarSAPue(Pue_SA_ViewModel Encuesta, int[] txtIdPregunta, string[] Comentarios)
{
var response = new ResponseModel();
if (ModelState.IsValid)
{
Captura captura = this.obtenerCapturaCookie();
var temEva = obtenerEvaluacionCookie();
if (temEva != null)
{
temEva.SuspenderEaluacion();
}
Evaluacion evalua = new Evaluacion();
evalua.TipoEvaluacion = enumTipoEvaluacion.SA;
evalua.TipoLocalidad = (enumTipoLocalidad)captura.getLocalidad().Tipo;
evalua.RespuestaCuestionario = new List<RespuestaCuestionario>();
int indice = 0;
foreach (var item in Encuesta.getRespuestas(Comentarios))
{
item.IdPregunta = txtIdPregunta[indice];
evalua.RespuestaCuestionario.Add(item);
indice++;
}
response = Evaluacion.Guardar(evalua, captura);
if (response.response)
{
response.href = Url.Content("~/Capturas/Index/" + captura.getLocalidad().Id);
}
}
else
{
foreach (ModelState modelState in ViewData.ModelState.Values)
{
foreach (ModelError error in modelState.Errors)
{
response.message = error.ErrorMessage;
}
}
}
return Json(response);
}
#endregion
public ActionResult FinalizarReporte() {
return View(this.obtenerCapturaCookie());
}
public JsonResult PublicarEvaluacion(int IdEvaluacion, int IdLocalidad) {
var response = new ResponseModel();
if (ModelState.IsValid) {
response = Captura.EnviarARevision(IdEvaluacion);
if (response.response) {
var loc = Localidad.getById(IdLocalidad);
response.href = Url.Content("~/CatMunicipio/verDetalle/" + loc.Municipio.Id);
}
}
return Json(response);
}
#region "ENVIAR RETRO"
[ValidateInput(false)]
public JsonResult GuardarObservacionPU(int IdCaptura, String txtObservacion, int idLocalidad)
{
var response = new ResponseModel();
if (ModelState.IsValid) {
response = Captura.GuardarRetroAlimentacionEncuesta(IdCaptura, txtObservacion);
if (response.response) {
response.href = Url.Content("~/Capturas/verCaptura/" + idLocalidad);
}
}
return Json(response);
}
#endregion
[HttpPost]
public ActionResult UploadFile(HttpPostedFileBase file) {
if (file == null) return Redirect("~/Capturas/index");
string nameFile = (DateTime.Now.ToString("yyyyMMddHHmmss") + "_" + file.FileName).ToUpper();
file.SaveAs(Server.MapPath("~/Uploads/") + nameFile);
Caracterizacion carac = new Caracterizacion();
carac.sDocumento = nameFile;
carac.iAnioDocumento = Convert.ToInt32(System.DateTime.Now.ToString("yyyy"));
carac.sObservaciones = "";
carac.bValidado = false;
Caracterizacion.Crud(carac);
obtenerCapturaCookie().agregarCaracterizacion(carac);
//Redirect("~/Capturas/index");
return RedirectToAction("index");
}
private Captura obtenerCapturaCookie() {
Captura cap = null;
if (this.ControllerContext.HttpContext.Request.Cookies.AllKeys.Contains("idCaptura")) {
cap = Captura.getById(Convert.ToInt32(this.ControllerContext.HttpContext.Request.Cookies["idCaptura"].Value));
}
return cap;
}
private Evaluacion obtenerEvaluacionCookie() {
Evaluacion eva = null;
if (this.ControllerContext.HttpContext.Request.Cookies.AllKeys.Contains("idEvaluacion"))
{
eva = Evaluacion.getById(Convert.ToInt32(this.ControllerContext.HttpContext.Request.Cookies["idEvaluacion"].Value));
//Captura.getById(Convert.ToInt32(this.ControllerContext.HttpContext.Request.Cookies["idCaptura"].Value));
}
return eva;
}
public ActionResult verCaptura(int id) {
string anio = System.DateTime.Now.ToString("yyyy");
Captura captura = Captura.getByAnio(anio, id);
if (captura.Evaluaciones.Count < 6)
{
return Redirect("~/CatMunicipio/verDetalle/"+ captura.getLocalidad().Municipio.Id);
}
return View(captura);
}
public ActionResult Caracteriza(int idLocalidad, int tipoLoc) {
return View(this.obtenerCapturaCookie());
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Modelo.helpers
{
public class ErrorHelper : IDataErrorInfo, INotifyPropertyChanged
{
public string Error // Part of the IDataErrorInfo Interface
{
get { return "Mensaje de error."; }
}
string IDataErrorInfo.this[string propertyName] // Part of the IDataErrorInfo Interface
{
get { return OnValidate(propertyName); }
}
protected virtual string OnValidate(string propertyName)
{
if (string.IsNullOrEmpty(propertyName))
throw new ArgumentException("Invalid property name", propertyName);
string error = string.Empty;
var value = this.GetType().GetProperty(propertyName).GetValue(this, null);
var results = new List<ValidationResult>(1);
var context = new ValidationContext(this, null, null) { MemberName = propertyName };
var result = Validator.TryValidateProperty(value, context, results);
if (!result)
{
var validationResult = results[0];
error = validationResult.ErrorMessage;
}
return error;
}
public event PropertyChangedEventHandler PropertyChanged;
public void OnPropertyChanged(string propertyName)
{
if (PropertyChanged != null)
{
PropertyChanged(this, new PropertyChangedEventArgs(propertyName));
}
}
}
}
<file_sep>namespace PROCOPRU.Migrations
{
using System;
using System.Data.Entity.Migrations;
public partial class UPDATEMUNICIPIOLOCALIDAD : DbMigration
{
public override void Up()
{
AddColumn("dbo.Localidades", "Pobacion", c => c.Int(nullable: false));
AddColumn("dbo.Localidades", "CodigoLocalidad", c => c.String(nullable: false));
AddColumn("dbo.Localidades", "Status", c => c.Boolean(nullable: false));
AddColumn("dbo.Municipios", "CodigoMunicipio", c => c.String());
DropColumn("dbo.Municipios", "Poblacion");
}
public override void Down()
{
AddColumn("dbo.Municipios", "Poblacion", c => c.Int(nullable: false));
DropColumn("dbo.Municipios", "CodigoMunicipio");
DropColumn("dbo.Localidades", "Status");
DropColumn("dbo.Localidades", "CodigoLocalidad");
DropColumn("dbo.Localidades", "Pobacion");
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using PROCOPRU.Modelo;
namespace PROCOPRU.ViewModel
{
public class Pue_CV_ViewModel
{
public int R_1 { get; set; }
public int R_2 { get; set; }
public int R_3 { get; set; }
public int R_4 { get; set; }
public int R_5 { get; set; }
public int R_6 { get; set; }
public int R_7 { get; set; }
public int R_8 { get; set; }
public int R_9 { get; set; }
public int R_10 { get; set; }
public int R_11 { get; set; }
public int R_12 { get; set; }
public int R_13 { get; set; }
public int R_14 { get; set; }
public int R_15 { get; set; }
public int R_16 { get; set; }
public int R_17 { get; set; }
public int R_18 { get; set; }
public int R_19 { get; set; }
public int R_20 { get; set; }
public int R_21 { get; set; }
public int R_22 { get; set; }
public int R_23 { get; set; }
public int R_24 { get; set; }
public int R_25 { get; set; }
public int R_26 { get; set; }
public int R_27 { get; set; }
public int R_28 { get; set; }
public int R_29 { get; set; }
public int R_30 { get; set; }
public int R_31 { get; set; }
public int R_32 { get; set; }
public int R_33 { get; set; }
public int R_34 { get; set; }
public int R_35 { get; set; }
public int R_36 { get; set; }
public int R_37 { get; set; }
public int R_38 { get; set; }
public int R_39 { get; set; }
public int getPuntaje(int NoPregunta) {
int puntos = 0;
switch (NoPregunta) {
case 1:
puntos = rangoValorA(R_1);
break;
case 2:
puntos = rangoValorA(R_2);
break;
case 3:
puntos = rangoValorA(R_3);
break;
case 4:
puntos = rangoValorB(R_4);
break;
case 5:
if (R_5 == 1) puntos = 1; else puntos = 0;
break;
case 6:
if (R_6 == 1) puntos = 1; else puntos = 0;
break;
case 7:
if (R_7 == 1) puntos = 1; else puntos = 0;
break;
case 8:
if (R_8 == 1) puntos = 1; else puntos = 0;
break;
case 9:
if (R_9 == 1) puntos = 1; else puntos = 0;
break;
case 10:
if (R_10 == 1) puntos = 1; else puntos = 0;
break;
case 11:
if (R_11 == 1) puntos = 1; else puntos = 0;
break;
case 12:
if (R_12 == 1) puntos = 1; else puntos = 0;
break;
case 13:
if (R_13 == 1) puntos = 1; else puntos = 0;
break;
case 14:
if (R_14 == 1) puntos = 1; else puntos = 0;
break;
case 15:
if (R_15 == 1) puntos = 1; else puntos = 0;
break;
case 16:
if (R_16 == 1) puntos = 1; else puntos = 0;
break;
case 17:
if (R_17 == 1) puntos = 1; else puntos = 0;
break;
case 18:
if (R_18 == 1) puntos = 1; else puntos = 0;
break;
case 19:
if (R_19 == 1) puntos = 1; else puntos = 0;
break;
case 20:
if (R_20 == 1) puntos = 1; else puntos = 0;
break;
case 21:
if (R_21 == 1) puntos = 1; else puntos = 0;
break;
case 22:
if (R_22 == 1) puntos = 1; else puntos = 0;
break;
case 23:
if (R_23 == 1) puntos = 1; else puntos = 0;
break;
case 24:
if (R_24 == 1) puntos = 1; else puntos = 0;
break;
case 25:
if (R_25 == 1) puntos = 1; else puntos = 0;
break;
case 26:
if (R_26 == 1) puntos = 1; else puntos = 0;
break;
case 27:
if (R_27 == 1) puntos = 1; else puntos = 0;
break;
case 28:
if (R_28 == 1) puntos = 1; else puntos = 0;
break;
case 29:
if (R_29 == 1) puntos = 1; else puntos = 0;
break;
case 30:
if (R_30 == 1) puntos = 1; else puntos = 0;
break;
case 31:
if (R_31 == 1) puntos = 1; else puntos = 0;
break;
case 32:
if (R_32 == 1) puntos = 1; else puntos = 0;
break;
case 33:
if (R_33 == 1) puntos = 1; else puntos = 0;
break;
case 34:
if (R_34 == 1) puntos = 1; else puntos = 0;
break;
case 35:
if (R_35 == 1) puntos = 1; else puntos = 0;
break;
case 36:
if (R_36 == 1) puntos = 1; else puntos = 0;
break;
case 37:
if (R_37 == 1) puntos = 1; else puntos = 0;
break;
case 38:
if (R_38 == 1) puntos = 1; else puntos = 0;
break;
case 39:
if (R_39 == 1) puntos = 1; else puntos = 0;
break;
default:
puntos = 0;
break;
}
return puntos;
}
public List<RespuestaCuestionario> getRespuestas(string[] Comentarios) {
List<RespuestaCuestionario> respuestas = new List<RespuestaCuestionario>();
#region "PREGUNTA 1"
RespuestaCuestionario resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.VALOR_UNICO;
resp.dValor = R_1;
resp.Puntaje = getPuntaje(1);
if (R_1 > 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[0];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 2"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.VALOR_UNICO;
resp.dValor = R_2;
resp.Puntaje = getPuntaje(2);
if (R_2 > 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[1];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 3"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.VALOR_UNICO;
resp.dValor = R_3;
resp.Puntaje = getPuntaje(3);
if (R_3 > 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[2];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 4"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.VALOR_UNICO_SUBPREGUNTA;
resp.dValor = R_4;
resp.Puntaje = getPuntaje(4);
if (R_4 > 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[3];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 5"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_5);
resp.Puntaje = getPuntaje(5);
if (R_5 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[4];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 6"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_6);
resp.Puntaje = getPuntaje(6);
if (R_6 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[5];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 7"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_7);
resp.Puntaje = getPuntaje(7);
if (R_7 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[6];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 8"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_8);
resp.Puntaje = getPuntaje(8);
if (R_8 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[7];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 9"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_9);
resp.Puntaje = getPuntaje(9);
if (R_9 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[9];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 10"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_10);
resp.Puntaje = getPuntaje(10);
if (R_10 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[9];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 11"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_11);
resp.Puntaje = getPuntaje(11);
if (R_11 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[10];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 12"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_12);
resp.Puntaje = getPuntaje(12);
if (R_12 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[11];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 13"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_13);
resp.Puntaje = getPuntaje(13);
if (R_13 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[12];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 14"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_14);
resp.Puntaje = getPuntaje(14);
if (R_14 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[13];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 15"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_15);
resp.Puntaje = getPuntaje(15);
if (R_15 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[14];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 16"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_16);
resp.Puntaje = getPuntaje(16);
if (R_16 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[15];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 17"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_17);
resp.Puntaje = getPuntaje(17);
if (R_17 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[16];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 18"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_18);
resp.Puntaje = getPuntaje(18);
if (R_18 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[17];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 19"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_19);
resp.Puntaje = getPuntaje(19);
if (R_19 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[19];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 20"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_20);
resp.Puntaje = getPuntaje(20);
if (R_20 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[19];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 21"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_21);
resp.Puntaje = getPuntaje(21);
if (R_21 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[20];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 22"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_22);
resp.Puntaje = getPuntaje(22);
if (R_22 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[21];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 23"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_23);
resp.Puntaje = getPuntaje(23);
if (R_23 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[22];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 24"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_24);
resp.Puntaje = getPuntaje(24);
if (R_24 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[23];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 25"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_25);
resp.Puntaje = getPuntaje(25);
if (R_25 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[24];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 26"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_26);
resp.Puntaje = getPuntaje(26);
if (R_26 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[25];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 27"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_27);
resp.Puntaje = getPuntaje(27);
if (R_27 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[26];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 28"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_28);
resp.Puntaje = getPuntaje(28);
if (R_28 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[27];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 29"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_29);
resp.Puntaje = getPuntaje(29);
if (R_29 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[28];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 30"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_30);
resp.Puntaje = getPuntaje(30);
if (R_30 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[29];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 31"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_31);
resp.Puntaje = getPuntaje(31);
if (R_31 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[30];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 32"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_32);
resp.Puntaje = getPuntaje(32);
if (R_32 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[31];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 33"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.dValor = R_33;
resp.Puntaje = getPuntaje(33);
if (R_33 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[32];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 34"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor =Convert.ToBoolean( R_34);
resp.Puntaje = getPuntaje(34);
if (R_34 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[33];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 35"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor =Convert.ToBoolean(R_35);
resp.Puntaje = getPuntaje(35);
if (R_35 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[34];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 36"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor =Convert.ToBoolean (R_36);
resp.Puntaje = getPuntaje(36);
if (R_36 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[35];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 37"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_37);
resp.Puntaje = getPuntaje(34);
if (R_37 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[36];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 38"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_38);
resp.Puntaje = getPuntaje(38);
if (R_38 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[37];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 39"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_39);
resp.Puntaje = getPuntaje(39);
if (R_39 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[38];
respuestas.Add(resp);
#endregion
return respuestas;
}
private int rangoValorA(int valor) {
if (valor > 50 && valor < 70)
{
return 1;
}
else
{
if (valor >= 70 && valor < 80)
{
return 2;
}
else
{
if (valor >= 80 && valor < 90)
{
return 3;
}
else
{
if (valor >= 90)
{
return 4;
}
else
{
return 0;
}
}
}
}
}
private int rangoValorB(int valor) {
if (valor >= 70 && valor < 80)
{
return 1;
}
else if (valor >= 80 && valor < 90) { return 2; }
else return 0;
}
}
}<file_sep>namespace PROCOPRU.Migrations
{
using System;
using System.Data.Entity.Migrations;
public partial class ADDMENSAJES : DbMigration
{
public override void Up()
{
CreateTable(
"dbo.Avisos",
c => new
{
Id = c.Int(nullable: false, identity: true),
Titulo = c.String(nullable: false),
CuerpoMensaje = c.String(nullable: false),
FechaEnvio = c.DateTime(nullable: false),
TipoMensaje = c.Int(nullable: false),
Destinaratio_Id = c.Int(),
Remitente_Id = c.Int(nullable: false),
})
.PrimaryKey(t => t.Id)
.ForeignKey("dbo.Municipios", t => t.Destinaratio_Id)
.ForeignKey("dbo.Usuarios", t => t.Remitente_Id, cascadeDelete: true)
.Index(t => t.Destinaratio_Id)
.Index(t => t.Remitente_Id);
}
public override void Down()
{
DropForeignKey("dbo.Avisos", "Remitente_Id", "dbo.Usuarios");
DropForeignKey("dbo.Avisos", "Destinaratio_Id", "dbo.Municipios");
DropIndex("dbo.Avisos", new[] { "Remitente_Id" });
DropIndex("dbo.Avisos", new[] { "Destinaratio_Id" });
DropTable("dbo.Avisos");
}
}
}
<file_sep>namespace PROCOPRU.Migrations
{
using System;
using System.Data.Entity.Migrations;
public partial class ADDCAMPOSDOCUMENTO : DbMigration
{
public override void Up()
{
AddColumn("dbo.Caracterizaciones", "sDocumento", c => c.String());
}
public override void Down()
{
DropColumn("dbo.Caracterizaciones", "sDocumento");
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using PROCOPRU.Modelo;
namespace PROCOPRU.ViewModel
{
public class Ppal_MT_ViewModel
{
public int R_1{ get; set; }
public int R_2 { get; set; }
public int R_3 { get; set; }
public int R_4 { get; set; }
public int R_5 { get; set; }
public int R_6 { get; set; }
public int R_7 { get; set; }
public int R_8 { get; set; }
public int R_9 { get; set; }
public int R_10 { get; set; }
public int R_11 { get; set; }
public int R_12 { get; set; }
public int R_13 { get; set; }
public int R_14 { get; set; }
public int R_15 { get; set; }
public int R_16 { get; set; }
public int R_17 { get; set; }
public int R_18 { get; set; }
private int rangoValorA(int valor)
{
if (valor > 60 && valor < 70)
return 1;
else if (valor >= 70 && valor < 80)
return 2;
else //if (valor >= 80 )
return 3;
}
public List<RespuestaCuestionario> getRespuestas(string[] Comentarios)
{
List<RespuestaCuestionario> respuestas = new List<RespuestaCuestionario>();
#region "PREGUNTA 1"
RespuestaCuestionario resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.VALOR_UNICO_SUBPREGUNTA;
resp.dValor = R_1;
resp.Puntaje = getPuntaje(1);
if (R_1 > 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[0];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 2"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_2);
resp.Puntaje = getPuntaje(2);
if (R_2 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[1];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 3"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_3);
resp.Puntaje = getPuntaje(3);
if (R_3 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[2];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 4"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.VALOR_UNICO_SUBPREGUNTA;
resp.dValor = R_4;
resp.Puntaje = getPuntaje(4);
if (R_4 > 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[3];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 5"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_5);
resp.Puntaje = getPuntaje(5);
if (R_5 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[4];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 6"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_6);
resp.Puntaje = getPuntaje(6);
if (R_6 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[5];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 7"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_7);
resp.Puntaje = getPuntaje(7);
if (R_7 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[6];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 8"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_8);
resp.Puntaje = getPuntaje(8);
if (R_8 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[7];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 9"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_9);
resp.Puntaje = getPuntaje(9);
if (R_9 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[9];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 10"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA;
resp.bValor =Convert.ToBoolean( R_10);
resp.Puntaje = getPuntaje(10);
if (R_10 > 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[9];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 11"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA;
resp.bValor = Convert.ToBoolean(R_11);
resp.Puntaje = getPuntaje(11);
if (R_11 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[10];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 12"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA;
resp.bValor = Convert.ToBoolean(R_12);
resp.Puntaje = getPuntaje(12);
if (R_12 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[11];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 13"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.VALOR_UNICO_SUBPREGUNTA;
resp.dValor = R_13;
resp.Puntaje = getPuntaje(13);
if (R_13 > 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[12];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 14"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.VALOR_UNICO_SUBPREGUNTA;
resp.dValor = R_14;
resp.Puntaje = getPuntaje(14);
if (R_14 > 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[13];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 15"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA;
resp.bValor = Convert.ToBoolean(R_15);
resp.Puntaje = getPuntaje(15);
if (R_15 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[14];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 16"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_16);
resp.Puntaje = getPuntaje(16);
if (R_16 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[15];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 17"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_17);
resp.Puntaje = getPuntaje(17);
if (R_17 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[16];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 18"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_18);
resp.Puntaje = getPuntaje(18);
if (R_18 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[17];
respuestas.Add(resp);
#endregion
return respuestas;
}
public int getPuntaje(int NoPregunta)
{
int puntos = 0;
switch (NoPregunta)
{
case 1:
puntos = rangoValorA(R_1);
break;
case 2:
if (R_2 == 1) puntos = 1; else puntos = 0;
break;
case 3:
if (R_3 == 1) puntos = 1; else puntos = 0;
break;
case 4:
if (R_4 > 80) puntos = 1; else puntos = 0;
break;
case 5:
if (R_5 == 1) puntos = 1; else puntos = 0;
break;
case 6:
if (R_6 == 1) puntos = 1; else puntos = 0;
break;
case 7:
if (R_7 == 1) puntos = 1; else puntos = 0;
break;
case 8:
if (R_8== 1) puntos = 1; else puntos = 0;
break;
case 9:
if (R_9 == 1) puntos = 1; else puntos = 0;
break;
case 10:
if (R_10 >= 1) puntos = 1; else puntos = 0;
break;
case 11:
if (R_11 == 1) puntos = 1; else puntos = 0;
break;
case 12:
if (R_12 == 1) puntos = 1; else puntos = 0;
break;
case 13:
if (R_13 >= 80) puntos = 1; else puntos = 0;
break;
case 14:
if (R_14 == 100) puntos = 1; else puntos = 0;
break;
case 15:
if (R_15 == 1) puntos = 1; else puntos = 0;
break;
case 16:
if (R_16 == 1) puntos = 1; else puntos = 0;
break;
case 17:
if (R_17 == 1) puntos = 1; else puntos = 0;
break;
case 18:
if (R_18 == 1) puntos = 1; else puntos = 0;
break;
default:
puntos = 0;
break;
}
return puntos;
}
}
}<file_sep>namespace PROCOPRU.Migrations
{
using System;
using System.Data.Entity.Migrations;
public partial class DROPREQUERIDOBDOCUMENTO : DbMigration
{
public override void Up()
{
AlterColumn("dbo.Caracterizaciones", "bDocumento", c => c.Binary());
}
public override void Down()
{
AlterColumn("dbo.Caracterizaciones", "bDocumento", c => c.Binary(nullable: false));
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
namespace PROCOPRU.Modelo
{
public enum enumTipoLocalidad
{
Ciudad_Principal,
Ciudad_Complementaria,
Localidad_Urbana_y_Pueblo
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using PROCOPRU.ViewModel;
using PROCOPRU.Modelo;
using PROCOPRU.Commons.Tags;
namespace PROCOPRU.Controllers
{
[NoLoginAttribute]
public class LoginController : Controller
{
private Usuarios usuario = new Usuarios();
//
// GET: /Login/
public ActionResult Index()
{
return View();
}
public JsonResult Autenticar(LoginViewModel model)
{
var rModel = new ResponseModel();
if (ModelState.IsValid)
{
this.usuario.Email = model.Correo;
this.usuario.Password = <PASSWORD>;
rModel = usuario.Autenticarse();
if (rModel.response)
{
rModel.href = Url.Content("~/home");
}
}
else
{
rModel.SetResponse(false, "Debe llenar los campos para poder ingresar.");
}
return Json(rModel);
}
}
}<file_sep>namespace PROCOPRU.Migrations
{
using System;
using System.Data.Entity.Migrations;
public partial class UPDATEENCUESTARETROALIMENTACION : DbMigration
{
public override void Up()
{
AddColumn("dbo.Encuestas", "RestroAlimentacion", c => c.String());
}
public override void Down()
{
DropColumn("dbo.Encuestas", "RestroAlimentacion");
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
namespace PROCOPRU.Modelo
{
public enum enumCategoria
{
PRINCIPAL,
COMPLEMENTARIA,
POBLACION
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using PROCOPRU.Modelo;
namespace PROCOPRU.Controllers
{
public class CatMunicipioController : Controller
{
Municipio municipio = new Municipio();
//
// GET: /CatMunicipio/
public ActionResult Index()
{
ViewBag.Municipios = Municipio.getAll();
return View();
}
public ActionResult verDetalle(int id)
{
ViewBag.Municipio = Municipio.getById(id);
ViewBag.Usuario = Usuarios.getAllByMunicipio(id);
return View();
}
public ActionResult frmRegistro()
{
return View(new PROCOPRU.Modelo.Municipio());
}
public JsonResult Guardar(PROCOPRU.Modelo.Municipio mun)
{
var response = new ResponseModel();
if (ModelState.IsValid)
{
response = mun.Guardar();
if (response.response)
{
response.href = Url.Content("~/CatMunicipio");
}
}
return Json(response);
}
public ActionResult frmRegistroCiudad(int Id) {
ViewBag.IdMunicipio = Id;
return View(new PROCOPRU.Modelo.Localidad());
}
public JsonResult GuardarCiudad(PROCOPRU.Modelo.Localidad ciudad, int idMunicipio) {
ciudad.Municipio =Municipio.getById(idMunicipio);
var response = new ResponseModel();
if (ModelState.IsValid)
{
response = ciudad.Guardar();
if (response.response) {
response.href = Url.Content("~/CatMunicipio/verDetalle/" + ciudad.Municipio.Id);
}
}
return Json(response);
}
public ActionResult frmBorrar(int Id) {
return View(Municipio.getById(Id));
}
public JsonResult Borrar(PROCOPRU.Modelo.Municipio mun) {
var response = new ResponseModel();
if (ModelState.IsValid) {
response = mun.Borrar();
if (response.response)
{
response.href = Url.Content("~/CatMunicipio");
}
}
return Json(response);
}
}
}<file_sep>namespace PROCOPRU.Migrations
{
using System;
using System.Data.Entity.Migrations;
public partial class DROPMUNICIPIOIDENCIUDAD : DbMigration
{
public override void Up()
{
DropColumn("dbo.Ciudades", "Municipio_Id");
RenameColumn(table: "dbo.Ciudades", name: "Municipio_Id1", newName: "Municipio_Id");
AlterColumn("dbo.Ciudades", "Municipio_Id", c => c.Int());
}
public override void Down()
{
AlterColumn("dbo.Ciudades", "Municipio_Id", c => c.Int(nullable: false));
RenameColumn(table: "dbo.Ciudades", name: "Municipio_Id", newName: "Municipio_Id1");
AddColumn("dbo.Ciudades", "Municipio_Id", c => c.Int(nullable: false));
}
}
}
<file_sep>namespace PROCOPRU.Migrations
{
using System;
using System.Data.Entity.Migrations;
public partial class ADDREQUERIDOCAPTUROLOCALIDAD : DbMigration
{
public override void Up()
{
DropForeignKey("dbo.Capturas", "Capturo_Id", "dbo.Usuarios");
DropForeignKey("dbo.Capturas", "localidad_Id", "dbo.Localidades");
DropIndex("dbo.Capturas", new[] { "Capturo_Id" });
DropIndex("dbo.Capturas", new[] { "localidad_Id" });
AlterColumn("dbo.Capturas", "Capturo_Id", c => c.Int(nullable: false));
AlterColumn("dbo.Capturas", "localidad_Id", c => c.Int(nullable: false));
CreateIndex("dbo.Capturas", "Capturo_Id");
CreateIndex("dbo.Capturas", "localidad_Id");
AddForeignKey("dbo.Capturas", "Capturo_Id", "dbo.Usuarios", "Id", cascadeDelete: true);
AddForeignKey("dbo.Capturas", "localidad_Id", "dbo.Localidades", "Id", cascadeDelete: true);
}
public override void Down()
{
DropForeignKey("dbo.Capturas", "localidad_Id", "dbo.Localidades");
DropForeignKey("dbo.Capturas", "Capturo_Id", "dbo.Usuarios");
DropIndex("dbo.Capturas", new[] { "localidad_Id" });
DropIndex("dbo.Capturas", new[] { "Capturo_Id" });
AlterColumn("dbo.Capturas", "localidad_Id", c => c.Int());
AlterColumn("dbo.Capturas", "Capturo_Id", c => c.Int());
CreateIndex("dbo.Capturas", "localidad_Id");
CreateIndex("dbo.Capturas", "Capturo_Id");
AddForeignKey("dbo.Capturas", "localidad_Id", "dbo.Localidades", "Id");
AddForeignKey("dbo.Capturas", "Capturo_Id", "dbo.Usuarios", "Id");
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using PROCOPRU.Modelo;
namespace PROCOPRU.ViewModel
{
public class Comp_SA_ViewModel
{
public int R_1 { get; set; }
public int R_2 { get; set; }
public int R_3 { get; set; }
public int R_4 { get; set; }
public int R_5 { get; set; }
public int R_6 { get; set; }
public int R_7 { get; set; }
public int R_8 { get; set; }
public int R_9 { get; set; }
private int rangoValorA(int valor)
{
if (valor >= 50 && valor < 75)
return 1;
else
if (valor >= 75)
return 2;
else
return -1;
}
private int rangoValorB(int valor)
{
if (valor >= 4 && valor < 8)
return 1;
else if (valor >= 8 && valor < 10)
return 2;
else if(valor >= 10)
return 3;
else return -1;
}
public int getPuntaje(int NoPregunta)
{
int puntos = 0;
switch (NoPregunta)
{
case 1:
if (R_1 == 1) puntos = 1; else puntos = 0;
break;
case 2:
if (R_2 == 1) puntos = 1; else puntos = 0;
break;
case 3:
if (R_3 == 1) puntos = 1; else puntos = 0;
break;
case 4:
if (R_4 > 80) puntos = 1; else puntos = 0;
break;
case 5:
if (R_5 == 1) puntos = 1; else puntos = 0;
break;
case 6:
if (R_6 == 1) puntos = 1; else puntos = 0;
break;
case 7:
if (R_7 == 1) puntos = 1; else puntos = 0;
break;
case 8:
puntos = rangoValorB(R_8);
break;
case 9:
if (R_9 == 1) puntos = 1; else puntos = 0;
break;
default:
puntos = 0;
break;
}
return puntos;
}
public List<RespuestaCuestionario> getRespuestas(string[] Comentarios)
{
List<RespuestaCuestionario> respuestas = new List<RespuestaCuestionario>();
#region "PREGUNTA 1"
RespuestaCuestionario resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA;
resp.bValor = Convert.ToBoolean(R_1);
resp.Puntaje = getPuntaje(1);
if (R_1 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[0];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 2"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA;
resp.bValor = Convert.ToBoolean(R_2);
resp.Puntaje = getPuntaje(2);
if (R_2 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[1];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 3"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA;
resp.bValor = Convert.ToBoolean(R_3);
resp.Puntaje = getPuntaje(3);
if (R_3 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[2];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 4"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA;
resp.bValor = Convert.ToBoolean(R_4);
resp.Puntaje = getPuntaje(4);
if (R_4 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[3];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 5"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA;
resp.bValor = Convert.ToBoolean(R_5);
resp.Puntaje = getPuntaje(5);
if (R_5 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[4];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 6"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_6);
resp.Puntaje = getPuntaje(6);
if (R_6 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[5];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 7"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_7);
resp.Puntaje = getPuntaje(7);
if (R_7 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[6];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 8"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.VALOR_UNICO;
resp.dValor = R_8;
resp.Puntaje = getPuntaje(8);
if (R_8 > 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[7];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 9"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA;
resp.bValor = Convert.ToBoolean(R_9);
resp.Puntaje = getPuntaje(9);
if (R_9 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[8];
respuestas.Add(resp);
#endregion
return respuestas;
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using Modelo.helpers;
namespace Modelo
{
[Table("catPermisos")]
public class Permisos : ErrorHelper
{
public Permisos() { }
[Key]
public int Id{get; set;}
[Required(ErrorMessage = "Debe Indicar el Nombre del Modulo")]
public String Modulo { get; set; }
public String Descripcion { get; set; }
public virtual ICollection<PermisosDenegadosPorRoll> PermisoDenegadoPorRoll { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using System.Web;
namespace PROCOPRU.ViewModel
{
public class MunicipoViewModel
{
public int Id { get; set; }
[Required(ErrorMessage = "Se require ingresar el Nombre")]
public String Nombre { get; set; }
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using System.Linq;
using System.Data.Entity;
namespace PROCOPRU.Modelo
{
[Table("Avisos")]
public class Mensaje
{
public Mensaje() {
this.FechaEnvio = System.DateTime.Now;
}
[Key]
public int Id { get; set; }
[Display(Name = "TITULO DE LA NOTA")]
[Required(ErrorMessage = "Debe ingresar el Titulo del mensaje")]
public string Titulo { get; set; }
[Display(Name = "NOTA COMPLETA")]
[Required(ErrorMessage = "Debe ingresar el texto del mensaje")]
[DataType(DataType.MultilineText)]
public string CuerpoMensaje { get; set; }
public DateTime FechaEnvio { get; set; }
public Boolean Atendido { get; set; }
public virtual Usuarios Remitente { get; set; }
public virtual Municipio Municipio { get; set; }
public enumTipoMensaje TipoMensaje{get; set;}
public static List<Mensaje> getAllTypeGeneral() {
try {
using (var ctx = new DataModel()) {
return ctx.mensajes.Where(r => r.TipoMensaje == enumTipoMensaje.GENERAL && r.Atendido == false).ToList();
}
} catch (Exception) { throw; }
}
public static List<Mensaje> getAllTypeDirecto(int IdMunicipio) {
try
{
using (var ctx = new DataModel())
{
return ctx.mensajes.Where(r => r.TipoMensaje == enumTipoMensaje.GENERAL && r.Atendido == false && r.Municipio.Id==IdMunicipio).ToList();
}
}
catch (Exception) { throw; }
}
public ResponseModel Guardar() {
var response = new ResponseModel();
try {
using (var ctx = new DataModel()) {
//ctx.Entry(this).State = EntityState.Added;
ctx.mensajes.Add(this);
response.SetResponse(true);
ctx.SaveChanges();
}
} catch (Exception ex) {
int x = 0;
throw; }
return response;
}
public static Mensaje getById(int Id) {
try {
using (var ctx = new DataModel()) {
return ctx.mensajes.Where(r => r.Id == Id).FirstOrDefault();
}
} catch (Exception ex) {
throw;
}
}
/// <summary>
/// DESACTIVA LA NOTICIA, LA PONE EN ESTATUS TRUE
/// </summary>
/// <param name="Id"></param>
public static void Atender(int Id) {
try {
using (var ctx = new DataModel()) {
var msg = Mensaje.getById(Id);
msg.Atendido = true;
ctx.Entry(msg).State = EntityState.Modified;
ctx.SaveChanges();
}
} catch (Exception) { throw; }
}
}
public enum enumTipoMensaje{
GENERAL,
DIRECTO
}
}<file_sep>namespace PROCOPRU.Migrations
{
using System;
using System.Data.Entity.Migrations;
public partial class UPDATEEVALUACIONESSTATUSBSTATUS : DbMigration
{
public override void Up()
{
AddColumn("dbo.Encuestas", "bStatus", c => c.Boolean(nullable: false));
}
public override void Down()
{
DropColumn("dbo.Encuestas", "bStatus");
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Data.Entity.ModelConfiguration.Conventions;
using System.Data.Entity;
namespace PROCOPRU.Modelo
{
public class DataModel : DbContext
{
public DbSet<Permisos> permisos { get; set; }
public DbSet<Roll> roll { get; set; }
public DbSet<PermisosDenegadosPorRoll> permisosdenegadosporroll { get; set; }
public DbSet<Usuarios> usuarios { get; set; }
public DbSet<Municipio> municipios { get; set; }
public DbSet<Localidad> localidades { get; set; }
public DbSet<Captura> capturas { get; set; }
public DbSet<Caracterizacion> caracterizacion { get; set; }
public DbSet<Evaluacion> evaluacion { get; set; }
public DbSet<RespuestaCuestionario> respuestasCuestionario { get; set; }
public DbSet<EvaluacionPregunta> evaluacionPregunta { get; set; }
public DbSet<EnunciadoPregunta> enunciadoPregunta { get; set; }
public DbSet<Pregunta> pregunta { get; set; }
public DbSet<Mensaje> mensajes { get; set; }
protected override void OnModelCreating(DbModelBuilder modelBuilder)
{
}
}
}
<file_sep>using System;
using System.ComponentModel.DataAnnotations;
using PROCOPRU.Modelo;
namespace PROCOPRU.ViewModel
{
public class UsuarioViewModel
{
[Display(Name = "NOMBRE DEL RESPONBLE DE LA CUENTA")]
[Required(ErrorMessage = "Debe ingresar el nombre del responable")]
public String Nombre { get; set; }
[Display(Name = "CORREO ELECTRONICO")]
[Required(ErrorMessage = "Debe ingresar una direccion de correo")]
public String Email { get; set; }
[Display(Name = "CLAVE DE ACCEO AL SISTEMA")]
[Required(ErrorMessage = "Se require ingrese la clave de acceso")]
public String Password { get; set; }
[Display(Name ="TIPO DE USUARIO")]
[Required(ErrorMessage ="Debe indicar el tipo de Usuario")]
public enumTipoUsuario TipoUsuario { get; set; }
public int IdMunicipio { get; set; }
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using PROCOPRU.Modelo.Helpers;
using System.Data.Entity;
using System.Linq;
namespace PROCOPRU.Modelo
{
[Table("Municipios")]
public class Municipio
{
public Municipio() {
Status = true;
}
[Key]
[Display(Name = "Folio")]
public int Id { get; set; }
[Display(Name = "Nombre del Municipio")]
[Required(ErrorMessage = "Debe ingresar el Nombre del Municipio")]
public String Nombre { get; set; }
//[Display(Name = "Habitantes")]
//public int Poblacion { get; set; }
[StringLength(3,MinimumLength =3,ErrorMessage ="El codigo debe tener 3 digitos")]
public string CodigoMunicipio { get; set; }
public Boolean Status { get; set; }
public virtual ICollection<Localidad> Localidad { get; set; }
//public virtual ICollection<Usuarios> Usuarios { get; set; }
#region "FUNCIONES"
/// <summary>
/// FUNCION QUE ME REGRESA EL LISTADO DE USUARIOS QUE SE ENCUENTRE REGISTRADOS AL MUNICIPIO
/// </summary>
/// <returns>LISTA DE USUARIOS</returns>
public List<Usuarios> ListarUsuarios() {
try {
using (var ctx=new DataModel()) {
return ctx.usuarios.Where(r => r.IdMunicipio == this.Id && r.Status == true).ToList();
}
} catch (Exception ) {
throw;
}
}
public ResponseModel Borrar()
{
var response = new ResponseModel();
try
{
using (var ctx = new DataModel())
{
ctx.Entry(this).State = EntityState.Modified;
response.SetResponse(true);
ctx.SaveChanges();
}
}
catch (Exception) { throw; }
return response;
}
public static List<Municipio> getAll()
{
try
{
using (var ctx = new DataModel())
{
return ctx.municipios.Include("Localidad")
.Where(r => r.Status == true).ToList();
}
}
catch (Exception) { throw; }
}
public ResponseModel Guardar()
{
var response = new ResponseModel();
try
{
using (var ctx = new DataModel())
{
this.Status = true;
ctx.Entry(this).State = EntityState.Added;
response.SetResponse(true);
ctx.SaveChanges();
}
}
catch (Exception) { throw; }
return response;
}
public static Municipio getById(int Id)
{
try
{
using (var ctx = new DataModel())
{
return ctx.municipios.Include("Localidad")
.Where(r => r.Id == Id && r.Status == true).FirstOrDefault();
}
}
catch (Exception) { throw; }
}
#endregion
}
}<file_sep>namespace PROCOPRU.Migrations
{
using System;
using System.Data.Entity.Migrations;
public partial class ADDFECHASCAPTURAS : DbMigration
{
public override void Up()
{
AddColumn("dbo.Capturas", "dFechaRevisado", c => c.DateTime());
AddColumn("dbo.Capturas", "dFechaAceptada", c => c.DateTime());
}
public override void Down()
{
DropColumn("dbo.Capturas", "dFechaAceptada");
DropColumn("dbo.Capturas", "dFechaRevisado");
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using PROCOPRU.Modelo.Helpers;
using System.Data.Entity;
using System.Linq;
namespace PROCOPRU.Modelo
{
[Table("Caracterizaciones")]
public class Caracterizacion
{
public Caracterizacion() {
bValidado = false;
}
[Key]
public int Id { get; set; }
public byte[] bDocumento { get; set; }
public string sDocumento { get; set; }
[Required(ErrorMessage="Debe indicar el año del documento")]
public int iAnioDocumento { get; set; }
public string sObservaciones { get; set; }
public bool bValidado { get; set; }
public string sRetroalimentacion { get; set; }
public static int Crud(Caracterizacion nCaracterizacion) {
try {
using (var ctx=new DataModel()) {
if (nCaracterizacion.Id > 0)
{
ctx.Entry(nCaracterizacion).State = EntityState.Modified;
}
else {
ctx.Entry(nCaracterizacion).State = EntityState.Added;
}
ctx.SaveChanges();
}
} catch (Exception) {
throw;
}
return nCaracterizacion.Id;
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using Modelo.helpers;
namespace Modelo
{
[Table("Usuarios")]
public class Usuarios:ErrorHelper
{
public Usuarios() { }
[Key]
public int Id { get; set; }
public String Nombre { get; set; }
[Required(ErrorMessage="Debe ingresar una direccion de correo")]
public String Email { get; set; }
[Required(ErrorMessage="Se require ingrese la clave de acceso")]
public String Password { get; set; }
public int RollId { get; set; }
public virtual Roll Roll { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using PROCOPRU.Modelo;
using PROCOPRU.Commons.Helper;
namespace PROCOPRU.Controllers
{
public class HomeController : Controller
{
private DataModel db = new DataModel();
public ActionResult Index()
{
ViewBag.NoticiasGeneral = Mensaje.getAllTypeGeneral();
var obj = PROCOPRU.Commons.FrontUser.Get();
return View();
}
protected override void Dispose(bool disposing)
{
db.Dispose();
base.Dispose(disposing);
}
public ActionResult Salir()
{
SessionHelper.DestroyUserSession();
return Redirect("~/");
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using PROCOPRU.Modelo.Helpers;
using System.Data.Entity;
using System.Linq;
using System.Data.Entity.Validation;
namespace PROCOPRU.Modelo
{
[Table("Localidades")]
public class Localidad
{
public Localidad(Municipio municipio) {
this.Status = true;
this.Municipio = municipio;
}
public Localidad() {
this.Status = true;
}
[Key]
[Display(Name = "Folio")]
public int Id { get; set; }
[Display(Name = "NOMBRE")]
[Required(ErrorMessage = "Se require ingrese en nombre de ciudad")]
public String Nombre { get; set; }
[Display(Name = "CANTIDAD DE HABITANTES POBLACÓN")]
[Required(ErrorMessage = "Se require ingrese la cantidad de habitantes en la localidad")]
public int Pobacion { get; set; }
[Display(Name = "UBICACION GEOGRAFICA DE LA LOCALIDAD")]
[Required(ErrorMessage = "Debe indicar la ubicación geografica de la Ciudad")]
public String UbicacionGeografica { get; set; }
[Display(Name = "CÓDIGO")]
[Required(ErrorMessage = "Ingrese el código según INEGI")]
public string CodigoLocalidad { get; set; }
[Display(Name="TIPO DE LOCALIDAD")]
public enumCategoria Tipo { get; set; }
public Boolean Status { get; set; }
public virtual Municipio Municipio { get; set; }
//public virtual ICollection<Captura> Capturas { get; set; }
#region "FUNCIONES"
public List<Captura> getCapturas() {
try {
using (var ctx = new DataModel()) {
return ctx.capturas.Where(r=>r.idLocalidad==this.Id).ToList();
}
} catch (Exception) { throw; }
}
public ResponseModel Guardar()
{
var response = new ResponseModel();
try
{
using (var ctx = new DataModel())
{
/*ctx.municipios.Attach(this.Municipio);
this.Municipio.Ciudad.Add(this);
ctx.localidades.Add(this); */
//ctx.Entry(this).State = EntityState.Added;
ctx.municipios.Attach(this.Municipio);
ctx.localidades.Add(this);
this.Municipio.Localidad.Add(this);
response.SetResponse(true);
ctx.SaveChanges();
}
}
catch (DbEntityValidationException e) {
foreach (var eve in e.EntityValidationErrors)
{
Console.WriteLine("Entity of type \"{0}\" in state \"{1}\" has the following validation errors:",
eve.Entry.Entity.GetType().Name, eve.Entry.State);
foreach (var ve in eve.ValidationErrors)
{
Console.WriteLine("- Property: \"{0}\", Error: \"{1}\"",
ve.PropertyName, ve.ErrorMessage);
}
}
throw; }
return response;
}
public static Localidad getById(int Id) {
try {
using (var ctx = new DataModel()) {
return ctx.localidades.Include("Municipio")
.Where(r=>r.Id==Id).FirstOrDefault();
}
} catch (Exception) { throw; }
}
#endregion
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using System.Web.Mvc;
using PROCOPRU.Modelo;
using PROCOPRU.ViewModel;
namespace PROCOPRU.Controllers
{
public class CuestionarioController : Controller
{
// GET: Cuestionario
public ActionResult Index()
{
return View();
}
public ActionResult PU(int Id) {
switch (Id) {
case 0:
ViewBag.Evaluacion = EvaluacionPregunta.getEvaluacio(enumTipoEvaluacion.PU, enumTipoLocalidad.Ciudad_Principal);
break;
case 1:
ViewBag.Evaluacion = EvaluacionPregunta.getEvaluacio(enumTipoEvaluacion.PU, enumTipoLocalidad.Ciudad_Complementaria);
break;
case 2:
ViewBag.Evaluacion = EvaluacionPregunta.getEvaluacio(enumTipoEvaluacion.PU, enumTipoLocalidad.Localidad_Urbana_y_Pueblo);
break;
}
return View();
}
public ActionResult CV(int Id)
{
switch (Id)
{
case 0:
ViewBag.Evaluacion = EvaluacionPregunta.getEvaluacio(enumTipoEvaluacion.CV, enumTipoLocalidad.Ciudad_Principal);
break;
case 1:
ViewBag.Evaluacion = EvaluacionPregunta.getEvaluacio(enumTipoEvaluacion.CV, enumTipoLocalidad.Ciudad_Complementaria);
break;
case 2:
ViewBag.Evaluacion = EvaluacionPregunta.getEvaluacio(enumTipoEvaluacion.CV, enumTipoLocalidad.Localidad_Urbana_y_Pueblo);
break;
}
return View();
}
public ActionResult IE(int Id)
{
switch (Id)
{
case 0:
ViewBag.Evaluacion = EvaluacionPregunta.getEvaluacio(enumTipoEvaluacion.IE, enumTipoLocalidad.Ciudad_Principal);
break;
case 1:
ViewBag.Evaluacion = EvaluacionPregunta.getEvaluacio(enumTipoEvaluacion.IE, enumTipoLocalidad.Ciudad_Complementaria);
break;
case 2:
ViewBag.Evaluacion = EvaluacionPregunta.getEvaluacio(enumTipoEvaluacion.IE, enumTipoLocalidad.Localidad_Urbana_y_Pueblo);
break;
}
return View();
}
public ActionResult MT(int Id)
{
switch (Id)
{
case 0:
ViewBag.Evaluacion = EvaluacionPregunta.getEvaluacio(enumTipoEvaluacion.MT, enumTipoLocalidad.Ciudad_Principal);
break;
case 1:
ViewBag.Evaluacion = EvaluacionPregunta.getEvaluacio(enumTipoEvaluacion.MT, enumTipoLocalidad.Ciudad_Complementaria);
break;
case 2:
ViewBag.Evaluacion = EvaluacionPregunta.getEvaluacio(enumTipoEvaluacion.MT, enumTipoLocalidad.Localidad_Urbana_y_Pueblo);
break;
}
return View();
}
public ActionResult SA(int Id)
{
switch (Id)
{
case 0:
ViewBag.Evaluacion = EvaluacionPregunta.getEvaluacio(enumTipoEvaluacion.SA, enumTipoLocalidad.Ciudad_Principal);
break;
case 1:
ViewBag.Evaluacion = EvaluacionPregunta.getEvaluacio(enumTipoEvaluacion.SA, enumTipoLocalidad.Ciudad_Complementaria);
break;
case 2:
ViewBag.Evaluacion = EvaluacionPregunta.getEvaluacio(enumTipoEvaluacion.SA, enumTipoLocalidad.Localidad_Urbana_y_Pueblo);
break;
}
return View();
}
public ActionResult DE(int Id)
{
switch (Id)
{
case 0:
ViewBag.Evaluacion = EvaluacionPregunta.getEvaluacio(enumTipoEvaluacion.DE, enumTipoLocalidad.Ciudad_Principal);
break;
case 1:
ViewBag.Evaluacion = EvaluacionPregunta.getEvaluacio(enumTipoEvaluacion.DE, enumTipoLocalidad.Ciudad_Complementaria);
break;
case 2:
ViewBag.Evaluacion = EvaluacionPregunta.getEvaluacio(enumTipoEvaluacion.DE, enumTipoLocalidad.Localidad_Urbana_y_Pueblo);
break;
}
return View();
}
public ActionResult TextArea() {
return View(new RestroAlimentacionViewModel());
}
[HttpPost]
public JsonResult GuardarObservacion(RestroAlimentacionViewModel model) {
var response = new ResponseModel();
if (ModelState.IsValid) {
string x = model.HtmlContent;
}
return Json(response);
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Data.Entity.ModelConfiguration.Conventions;
using System.Data.Entity;
namespace Modelo
{
public class DataModel:DbContext
{
public DataModel()
: base("name=DataModel")
{
}
public DbSet<Permisos> permisos { get; set; }
public DbSet<Roll> roll { get; set; }
public DbSet<PermisosDenegadosPorRoll> permisosdenegadosporroll { get; set; }
public DbSet<Usuarios> usuarios { get; set; }
protected override void OnModelCreating(DbModelBuilder modelBuilder)
{
}
}
}
<file_sep>namespace PROCOPRU.Migrations
{
using System;
using System.Data.Entity.Migrations;
public partial class UPDATERESPUESTAPUNTAJE : DbMigration
{
public override void Up()
{
AddColumn("dbo.RespuestaSCuestionario", "Puntaje", c => c.Double(nullable: false));
AddColumn("dbo.RespuestaSCuestionario", "Confirmada", c => c.Boolean(nullable: false));
}
public override void Down()
{
DropColumn("dbo.RespuestaSCuestionario", "Confirmada");
DropColumn("dbo.RespuestaSCuestionario", "Puntaje");
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using PROCOPRU.Modelo.Helpers;
using System.Data.Entity;
using System.Linq;
namespace PROCOPRU.Modelo
{
[Table("EvaluacionesPreguntas")]
public class EvaluacionPregunta
{
[Key]
public int Id { get; set; }
public enumTipoEvaluacion Tipo { get; set; }
public enumTipoLocalidad TipoLocalidad { get; set; }
public string EncabezadoCuestionario { get; set; }
public string imagenCustrionario { get; set; }
public virtual ICollection<EnunciadoPregunta> EnunciadoPregunta { get; set; }
#region "FUNCIONES"
public int CantidadPreguntas() {
int cantidad = 0;
foreach (var item in EnunciadoPregunta) {
cantidad += item.Preguntas.Count;
}
return cantidad;
}
public static EvaluacionPregunta getEvaluacio(enumTipoEvaluacion tipoEv, enumTipoLocalidad tipoLoc)
{
try
{
using (var ctx = new DataModel())
{
var tem=ctx.evaluacionPregunta.Include("EnunciadoPregunta")
.Include("EnunciadoPregunta.Preguntas")
.Where(r => r.Tipo == tipoEv && r.TipoLocalidad == tipoLoc).FirstOrDefault();
return tem;
}
}
catch (Exception)
{
throw;
}
}
#endregion
}
}<file_sep>namespace PROCOPRU.Migrations
{
using System;
using System.Data.Entity.Migrations;
public partial class ADDEVALUACIONPREGUNTAENUNCUADOPREGUNTAPREGUNTA : DbMigration
{
public override void Up()
{
CreateTable(
"dbo.EnunciadoPreguntas",
c => new
{
Id = c.Int(nullable: false, identity: true),
NumeroPregunta = c.Int(nullable: false),
Enunciado = c.String(),
AyudaEnunciado = c.String(),
EvaluacionPregunta_Id = c.Int(),
})
.PrimaryKey(t => t.Id)
.ForeignKey("dbo.EvaluacionesPreguntas", t => t.EvaluacionPregunta_Id)
.Index(t => t.EvaluacionPregunta_Id);
CreateTable(
"dbo.EvaluacionesPreguntas",
c => new
{
Id = c.Int(nullable: false, identity: true),
Tipo = c.Int(nullable: false),
TipoLocalidad = c.Int(nullable: false),
})
.PrimaryKey(t => t.Id);
CreateTable(
"dbo.Preguntas",
c => new
{
Id = c.Int(nullable: false, identity: true),
TextoPregunta = c.String(),
TipoPregunta = c.Int(nullable: false),
EnunciadoPregunta_Id = c.Int(),
})
.PrimaryKey(t => t.Id)
.ForeignKey("dbo.EnunciadoPreguntas", t => t.EnunciadoPregunta_Id)
.Index(t => t.EnunciadoPregunta_Id);
}
public override void Down()
{
DropForeignKey("dbo.Preguntas", "EnunciadoPregunta_Id", "dbo.EnunciadoPreguntas");
DropForeignKey("dbo.EnunciadoPreguntas", "EvaluacionPregunta_Id", "dbo.EvaluacionesPreguntas");
DropIndex("dbo.Preguntas", new[] { "EnunciadoPregunta_Id" });
DropIndex("dbo.EnunciadoPreguntas", new[] { "EvaluacionPregunta_Id" });
DropTable("dbo.Preguntas");
DropTable("dbo.EvaluacionesPreguntas");
DropTable("dbo.EnunciadoPreguntas");
}
}
}
<file_sep>namespace PROCOPRU.Migrations
{
using System;
using System.Data.Entity.Migrations;
public partial class UPDATENOMBRETABLAS : DbMigration
{
public override void Up()
{
RenameTable(name: "dbo.Evaluaciones", newName: "Encuestas");
RenameTable(name: "dbo.RespuestaSCuestionario", newName: "RespuestasEncuestas");
}
public override void Down()
{
RenameTable(name: "dbo.RespuestasEncuestas", newName: "RespuestaSCuestionario");
RenameTable(name: "dbo.Encuestas", newName: "Evaluaciones");
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using PROCOPRU.Modelo.Helpers;
using System.Data.Entity;
using System.Linq;
namespace PROCOPRU.Modelo
{
[Table("Encuestas")]
public class Evaluacion
{
public Evaluacion()
{
this.dFechaModificacion = DateTime.Now;
this.bStatus = true;
RespuestaCuestionario = new List<RespuestaCuestionario>();
EstadoEvaluacion = enumStatusEvaluacion.CAPTURA;
}
[Key]
public int Id { get; set; }
public DateTime dFechaModificacion { get; set; }
[Required(ErrorMessage = "Debe indicar el tipo de localidad")]
public enumTipoLocalidad TipoLocalidad { get; set; }
public enumTipoEvaluacion TipoEvaluacion { get; set; }
public bool bStatus { get; set; }
public enumStatusEvaluacion EstadoEvaluacion { get; set; }
public string RestroAlimentacion { get; set; }
public virtual ICollection<RespuestaCuestionario> RespuestaCuestionario { get; set; }
public virtual Captura Capturas { get; set; }
#region "FUNCIONES"
public static int PuntajeTotal(int Id) {
int puntos = 0;
try
{
using (var ctx=new DataModel())
{
Evaluacion eva = ctx.evaluacion.Include("RespuestaCuestionario").Where(r => r.Id == Id).FirstOrDefault();
foreach (var item in eva.RespuestaCuestionario)
{
puntos += (int)item.Puntaje;
}
}
}
catch (Exception)
{
throw;
}
return puntos;
}
public void SuspenderEaluacion() {
try
{
using (var ctx=new DataModel())
{
this.bStatus = false;
ctx.Entry(this).State = EntityState.Modified;
ctx.SaveChanges();
}
}
catch (Exception)
{
throw;
}
}
public static ResponseModel Guardar(Evaluacion evalua, Captura captura)
{
var response = new ResponseModel();
try
{
using (var ctx = new DataModel())
{
if (evalua.Id > 0)
{
Evaluacion capturaLocal = ctx.evaluacion.Where(r => r.Id == evalua.Id).FirstOrDefault();
ctx.Entry(capturaLocal).CurrentValues.SetValues(evalua);
}
else
{
ctx.capturas.Attach(captura);
ctx.evaluacion.Add(evalua);
captura.Evaluaciones.Add(evalua);
}
ctx.SaveChanges();
response.SetResponse(true);
ctx.SaveChanges();
}
}
catch (Exception) { throw; }
return response;
}
public static ResponseModel EnviarARevision(int Id) {
var response = new ResponseModel();
try
{
using (var ctx=new DataModel())
{
var eva = ctx.evaluacion.Where(r=>r.Id==Id).FirstOrDefault();
eva.EstadoEvaluacion = enumStatusEvaluacion.EVALUACION;
ctx.Entry(eva).State = EntityState.Modified;
response.SetResponse(true);
ctx.SaveChanges();
}
}
catch (Exception)
{
throw;
}
return response;
}
public static ResponseModel EnviarRetroAlimentacion(int Id)
{
var response = new ResponseModel();
try
{
using (var ctx = new DataModel())
{
var eva = ctx.evaluacion.Where(r => r.Id == Id).FirstOrDefault();
eva.EstadoEvaluacion = enumStatusEvaluacion.RETROALIMENTACION;
ctx.Entry(eva).State = EntityState.Modified;
response.SetResponse(true);
ctx.SaveChanges();
}
}
catch (Exception)
{
throw;
}
return response;
}
public static ResponseModel EnviarAceptada(int Id)
{
var response = new ResponseModel();
try
{
using (var ctx = new DataModel())
{
var eva = ctx.evaluacion.Where(r => r.Id == Id).FirstOrDefault();
eva.EstadoEvaluacion = enumStatusEvaluacion.ACEPTADA;
ctx.Entry(eva).State = EntityState.Modified;
response.SetResponse(true);
ctx.SaveChanges();
}
}
catch (Exception)
{
throw;
}
return response;
}
public double ProcentajeAvance()
{
double porcentaje = 0;
try
{
var item = PROCOPRU.Modelo.RespuestaCuestionario.getAllFaltantes(this.Id);
int faltan = item.Count();
if (RespuestaCuestionario.Count > 0) {
porcentaje = 100 - ((faltan * 100) / RespuestaCuestionario.Count());
}
}
catch (Exception ex)
{
PROCOPRU.Commons.ErrorLog.SaveError(this, ex);
throw;
}
return porcentaje;
}
public static Evaluacion getById(int Id)
{
try
{
using (var ctx = new DataModel())
{
return ctx.evaluacion.Include("RespuestaCuestionario").Where(r => r.Id == Id).FirstOrDefault();
}
}
catch (Exception) { throw; }
}
public RespuestaCuestionario buscarPregunta(int idPregunta) {
return RespuestaCuestionario.Where(r => r.IdPregunta == idPregunta).FirstOrDefault();
}
//public static ResponseModel Crud(Evaluacion evalua) {
// var response = new ResponseModel();
// try {
// using (var ctx = new DataModel()) {
// if (evalua.Id > 0)
// {
// ctx.Entry(evalua).State = EntityState.Modified;
// }
// else {
// ctx.Entry(evalua).State = EntityState.Added;
// }
// response.SetResponse(true);
// ctx.SaveChanges();
// }
// } catch (Exception) { throw; }
// return response;
//}
#endregion
}
public enum enumStatusEvaluacion {
CAPTURA,
EVALUACION,
RETROALIMENTACION,
ACEPTADA
}
public enum enumTipoEvaluacion {
PU,
CV,
IE,
MT,
SA,
DE
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using PROCOPRU.Modelo.Helpers;
using System.Data.Entity;
using System.Linq;
using PROCOPRU.Commons.Helper;
using PROCOPRU.Commons;
namespace PROCOPRU.Modelo
{
[Table("Capturas")]
public class Captura
{
public Captura() {
dFechaCaptura = DateTime.Now;
this.iProcentajeAvance = 0;
EstadoEvaluacion = enumStatusEvaluacion.CAPTURA;
}
[Key]
public int Id { get; set; }
public string Anio { get; set; }
public DateTime dFechaCaptura { get; set; }
public int iProcentajeAvance { get; set; }
public enumStatusEvaluacion EstadoEvaluacion { get; set; }
public DateTime? dFechaRevisado { get; set; }
public DateTime? dFechaAceptada { get; set; }
public virtual Usuarios Capturo { get; set; }
[Required]
public int idLocalidad { get; set; }
//public Localidad localidad { get; set; }
public virtual Caracterizacion Caracterizaciones { get; set; }
public virtual ICollection<Evaluacion> Evaluaciones { get; set; }
#region "FUNCIONES"
public Localidad getLocalidad() {
try {
using (var ctx = new DataModel()) {
return ctx.localidades.Include("Municipio").Where(r=>r.Id==this.idLocalidad).FirstOrDefault();
}
} catch (Exception ex) {
ErrorLog.SaveError(this, ex);
throw; }
}
public static Captura getByAnio(string anio, int idLocalidad)
{
try
{
using (var ctx = new DataModel())
{
return ctx.capturas
.Include("Caracterizaciones")
.Include("Evaluaciones")
.Include("Evaluaciones.Capturas")
.Where(r => r.Anio.Contains(anio) && r.idLocalidad == idLocalidad).FirstOrDefault();
}
}
catch (Exception ex) {
throw;
}
}
public static Captura getById(int Id)
{
try
{
using (var ctx = new DataModel())
{
return ctx.capturas
.Include("Caracterizaciones")
.Include("Evaluaciones")
.Include("Capturo")
.Include("Evaluaciones.Capturas")
.Where(r => r.Id == Id).FirstOrDefault();
}
}
catch (Exception) { throw; }
}
public static Captura CrearNueva(Localidad loc)
{
Captura temp = new Captura();
try
{
using (var ctx = new DataModel())
{
temp.Anio = System.DateTime.Now.ToString("yyyy");
temp.iProcentajeAvance = 0;
temp.Capturo = Usuarios.getById(SessionHelper.GetUser());
temp.idLocalidad = loc.Id;
ctx.usuarios.Attach(temp.Capturo);
ctx.localidades.Attach(loc);
ctx.capturas.Add(temp);
//loc.Capturas.Add(temp);
ctx.SaveChanges();
}
}
catch (Exception ex)
{
ErrorLog.SaveError("CREAR_NUEVA_CAPTURA", ex);
throw;
}
return temp;
}
public void agregarCaracterizacion(Caracterizacion caracteriza)
{
this.Caracterizaciones = caracteriza;
try
{
using (var ctx = new DataModel())
{
var ctem = ctx.capturas.Where(r => r.Id == this.Id).FirstOrDefault();
ctem.Caracterizaciones = caracteriza;
ctx.SaveChanges();
}
}
catch (Exception ex) {
ErrorLog.SaveError(this, ex);
throw; }
}
public static ResponseModel EnviarARevision(int Id)
{
var response = new ResponseModel();
try
{
using (var ctx = new DataModel())
{
var eva = ctx.capturas.Where(r => r.Id == Id).FirstOrDefault();
eva.EstadoEvaluacion = enumStatusEvaluacion.EVALUACION;
eva.dFechaCaptura = System.DateTime.Now;
ctx.Entry(eva).State = EntityState.Modified;
response.SetResponse(true);
ctx.SaveChanges();
}
}
catch (Exception ex)
{
ErrorLog.SaveError("ENVIANDO_REVISION", ex);
throw;
}
return response;
}
public static ResponseModel EnviarRetroAlimentacion(int Id)
{
var response = new ResponseModel();
try
{
using (var ctx = new DataModel())
{
var eva = ctx.capturas.Where(r => r.Id == Id).FirstOrDefault();
eva.EstadoEvaluacion = enumStatusEvaluacion.RETROALIMENTACION;
eva.dFechaRevisado = System.DateTime.Now;
ctx.Entry(eva).State = EntityState.Modified;
response.SetResponse(true);
ctx.SaveChanges();
}
}
catch (Exception ex)
{
ErrorLog.SaveError("ENVIAR_RETRO_ALIMENTACINON", ex);
throw;
}
return response;
}
public static ResponseModel EnviarAceptada(int Id)
{
var response = new ResponseModel();
try
{
using (var ctx = new DataModel())
{
var eva = ctx.capturas.Where(r => r.Id == Id).FirstOrDefault();
eva.EstadoEvaluacion = enumStatusEvaluacion.ACEPTADA;
eva.dFechaAceptada = System.DateTime.Now;
ctx.Entry(eva).State = EntityState.Modified;
response.SetResponse(true);
ctx.SaveChanges();
}
}
catch (Exception ex)
{
ErrorLog.SaveError("ENVIAR ACEPTADA", ex);
throw;
}
return response;
}
public static ResponseModel GuardarRetroAlimentacionEncuesta(int Id, string Observacion) {
var response = new ResponseModel();
try
{
using (var ctx=new DataModel())
{
var enc = ctx.evaluacion.Where(r => r.Id == Id).FirstOrDefault();
enc.RestroAlimentacion = Observacion;
enc.EstadoEvaluacion = enumStatusEvaluacion.RETROALIMENTACION;
ctx.Entry(enc).State = EntityState.Modified;
response.SetResponse(true);
ctx.SaveChanges();
}
}
catch (Exception ex)
{
ErrorLog.SaveError("ENVIAR RETRO ENCUESTA", ex);
throw;
}
return response;
}
#endregion
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using PROCOPRU.Modelo.Helpers;
using System.Data.Entity;
using System.Linq;
namespace PROCOPRU.Modelo
{
[Table("Preguntas")]
public class Pregunta
{
[Key]
public int Id { get; set; }
public string TextoPregunta { get; set; }
public enumTipoPregunta TipoPregunta { get; set; }
public virtual EnunciadoPregunta EnunciadoPregunta { get; set; }
}
}<file_sep>namespace PROCOPRU.Migrations
{
using System;
using System.Data.Entity.Migrations;
public partial class UPDATEROLLPERMISODENEGADOPORROLL : DbMigration
{
public override void Up()
{
DropForeignKey("dbo.PermisosDenegadoPorRoll", "PermisoId", "dbo.catPermisos");
DropForeignKey("dbo.Usuarios", "RollId", "dbo.Rolls");
DropIndex("dbo.PermisosDenegadoPorRoll", new[] { "PermisoId" });
DropIndex("dbo.Usuarios", new[] { "RollId" });
RenameColumn(table: "dbo.PermisosDenegadoPorRoll", name: "PermisoId", newName: "Permiso_Id");
RenameColumn(table: "dbo.Usuarios", name: "RollId", newName: "Roll_Id");
AlterColumn("dbo.Usuarios", "Roll_Id", c => c.Int());
AlterColumn("dbo.PermisosDenegadoPorRoll", "Permiso_Id", c => c.Int());
CreateIndex("dbo.PermisosDenegadoPorRoll", "Permiso_Id");
CreateIndex("dbo.Usuarios", "Roll_Id");
AddForeignKey("dbo.PermisosDenegadoPorRoll", "Permiso_Id", "dbo.catPermisos", "Id");
AddForeignKey("dbo.Usuarios", "Roll_Id", "dbo.Rolls", "Id");
}
public override void Down()
{
DropForeignKey("dbo.Usuarios", "Roll_Id", "dbo.Rolls");
DropForeignKey("dbo.PermisosDenegadoPorRoll", "Permiso_Id", "dbo.catPermisos");
DropIndex("dbo.Usuarios", new[] { "Roll_Id" });
DropIndex("dbo.PermisosDenegadoPorRoll", new[] { "Permiso_Id" });
AlterColumn("dbo.PermisosDenegadoPorRoll", "Permiso_Id", c => c.Int(nullable: false));
AlterColumn("dbo.Usuarios", "Roll_Id", c => c.Int(nullable: false));
RenameColumn(table: "dbo.Usuarios", name: "Roll_Id", newName: "RollId");
RenameColumn(table: "dbo.PermisosDenegadoPorRoll", name: "Permiso_Id", newName: "PermisoId");
CreateIndex("dbo.Usuarios", "RollId");
CreateIndex("dbo.PermisosDenegadoPorRoll", "PermisoId");
AddForeignKey("dbo.Usuarios", "RollId", "dbo.Rolls", "Id", cascadeDelete: true);
AddForeignKey("dbo.PermisosDenegadoPorRoll", "PermisoId", "dbo.catPermisos", "Id", cascadeDelete: true);
}
}
}
<file_sep>namespace PROCOPRU.Migrations
{
using System;
using System.Data.Entity.Migrations;
public partial class UPDATEUSUARIOSIDMUNICIPIO : DbMigration
{
public override void Up()
{
AddColumn("dbo.Usuarios", "IdMunicipio", c => c.Int());
}
public override void Down()
{
DropColumn("dbo.Usuarios", "IdMunicipio");
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using PROCOPRU.Modelo.Helpers;
using System.Data.Entity;
using System.Linq;
namespace PROCOPRU.Modelo
{
[Table("RespuestasEncuestas")]
public class RespuestaCuestionario
{
public RespuestaCuestionario() {
Tipo = enumTipoPregunta.BOLEANA;
SubNombre = "";
bValor = false;
dValorF = 0;
dValorI = 0;
dValor = 0;
Observacion = "";
Retroalimentacion = "";
Puntaje = 0;
Confirmada = false;
}
[Key]
public int Id { get; set; }
public enumTipoPregunta Tipo { get; set; }
/// <summary>
/// VALOR BOOLEANO PARA CUANDO LA PREGUNTA ES TIPO BOLEANO
/// </summary>
public bool bValor { get; set; }
/// <summary>
/// VALOR INICIAL PARA CUANDO LA PREGUNTA ES DE TIPO RANGO
/// </summary>
public double dValorI { get; set; }
/// <summary>
/// VALOR FINAL PARA CUANDO LA PREGUNTA ES TIPO RANGO
/// </summary>
public double dValorF { get; set; }
/// <summary>
/// VALOR UNICO TIPO DOBLE PARA CUANDO LA PREGUNTA ES VALOR UNICO
/// </summary>
public double dValor { get; set; }
/// <summary>
/// CUANDO LA PREGUNTA CONTIENEN INCISOS SE SELECCIONA EL TIPO Y SUBPREGUNTA
/// ADICIONALMENTE SE PONE EL TEXTO DE LA PREGUNTA AQUI
/// </summary>
public string SubNombre { get; set; }
/// <summary>
/// OBSERVACION INGRESADO POR EL CAPTURISTA (MUNICIPIO, ADMINISTRADO Y/O SIDUR)
/// </summary>
public string Observacion { get; set; }
/// <summary>
/// RETROALIMENTACION ES PROPORCIONADA POR EL VALIDADOR DE LA CAPTURA (SIDUR)
/// </summary>
public string Retroalimentacion { get; set; }
public double Puntaje { get; set; }
public Boolean Confirmada { get; set; }
[Required(ErrorMessage ="Se require indique a que pregunta corresponse esta respuesta")]
public int IdPregunta { get; set; }
public virtual Evaluacion Evaluacion { get; set; }
#region "FUNCIONES"
public Pregunta getPregunta() {
try
{
using (var ctx = new DataModel())
{
return ctx.pregunta.Include("EnunciadoPregunta").Where(r => r.Id == this.IdPregunta).FirstOrDefault();
}
}
catch (Exception) { throw; }
}
public static List<RespuestaCuestionario> getAllFaltantes(int idEValuacion)
{
try
{
using (var ctx = new DataModel())
{
return ctx.respuestasCuestionario.Where(r => r.Confirmada == false && r.Evaluacion.Id == idEValuacion).ToList();
}
}
catch (Exception) { throw; }
}
#endregion
}
public enum enumTipoPregunta{
BOLEANA,
RANGO,
VALOR_UNICO,
BOLEANA_SUBPREGUNTA,
RANGO_SUBPREGUNTA,
VALOR_UNICO_SUBPREGUNTA
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using PROCOPRU.Modelo.Helpers;
namespace PROCOPRU.Modelo
{
[Table("PermisosDenegadoPorRoll")]
public class PermisosDenegadosPorRoll : ErrorHelper
{
public PermisosDenegadosPorRoll() { }
[Key]
public int Id { get; set; }
//public int RollId { get; set; }
//public int PermisoId { get; set; }
public virtual Permisos Permiso { get; set; }
public virtual Roll Roll { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using PROCOPRU.Commons.Helper;
using PROCOPRU.Modelo;
namespace PROCOPRU.Commons
{
public class FrontUser
{
public static bool TienePermiso(RolesPermisos valor)
{
var usuario = FrontUser.Get();
if(usuario==null)
SessionHelper.DestroyUserSession();
if (usuario.Roll.PermisosDenegadorPorRoll != null)
{
return !usuario.Roll.PermisosDenegadorPorRoll.Where(x => x.Permiso.Id== (int)valor)
.Any();
}
else {
return true;
}
}
public static Usuarios Get()
{
var user= new Usuarios().Obtener(SessionHelper.GetUser());
return user;
}
}
}<file_sep>namespace PROCOPRU.Migrations
{
using System;
using System.Data.Entity.Migrations;
public partial class ADDVINCULAREVALUACIONCAPTURA : DbMigration
{
public override void Up()
{
RenameTable(name: "dbo.EvaluacionesPU", newName: "Evaluaciones");
DropForeignKey("dbo.Capturas", "Capturo_Id", "dbo.Usuarios");
DropForeignKey("dbo.Capturas", "Caracterizaciones_Id", "dbo.Caracterizaciones");
DropIndex("dbo.Capturas", new[] { "Capturo_Id" });
DropIndex("dbo.Capturas", new[] { "Caracterizaciones_Id" });
AddColumn("dbo.Evaluaciones", "Capturas_Id", c => c.Int());
CreateIndex("dbo.Capturas", "Capturo_Id");
CreateIndex("dbo.Capturas", "Caracterizaciones_Id");
CreateIndex("dbo.Evaluaciones", "Capturas_Id");
AddForeignKey("dbo.Capturas", "Capturo_Id", "dbo.Usuarios", "Id");
AddForeignKey("dbo.Capturas", "Caracterizaciones_Id", "dbo.Caracterizaciones", "Id");
AddForeignKey("dbo.Evaluaciones", "Capturas_Id", "dbo.Capturas", "Id");
}
public override void Down()
{
DropForeignKey("dbo.Evaluaciones", "Capturas_Id", "dbo.Capturas");
DropForeignKey("dbo.Capturas", "Caracterizaciones_Id", "dbo.Caracterizaciones");
DropForeignKey("dbo.Capturas", "Capturo_Id", "dbo.Usuarios");
DropIndex("dbo.Evaluaciones", new[] { "Capturas_Id" });
DropIndex("dbo.Capturas", new[] { "Caracterizaciones_Id" });
DropIndex("dbo.Capturas", new[] { "Capturo_Id" });
DropColumn("dbo.Evaluaciones", "Capturas_Id");
CreateIndex("dbo.Capturas", "Caracterizaciones_Id");
CreateIndex("dbo.Capturas", "Capturo_Id");
AddForeignKey("dbo.Capturas", "Caracterizaciones_Id", "dbo.Caracterizaciones", "Id");
AddForeignKey("dbo.Capturas", "Capturo_Id", "dbo.Usuarios", "Id");
RenameTable(name: "dbo.Evaluaciones", newName: "EvaluacionesPU");
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using PROCOPRU.Modelo.Helpers;
using System.Data.Entity;
using System.Linq;
namespace PROCOPRU.Modelo
{
[Table("EnunciadoPreguntas")]
public class EnunciadoPregunta
{
[Key]
public int Id { get; set; }
public int NumeroPregunta { get; set; }
public string Enunciado { get; set; }
public string AyudaEnunciado { get; set; }
public virtual EvaluacionPregunta EvaluacionPregunta { get; set; }
public virtual ICollection<Pregunta> Preguntas { get; set; }
#region "FUNCIONES"
public static int CantidadPreguntas(int IdEvaluacionPregunta) {
int total = 0;
try
{
using (var ctx = new DataModel()) {
var lista = ctx.enunciadoPregunta.Where(r => r.EvaluacionPregunta.Id == IdEvaluacionPregunta).ToList();
total= lista.Count;
}
}
catch (Exception)
{
throw;
}
return total;
}
#endregion
}
}<file_sep>namespace PROCOPRU.Migrations
{
using System;
using System.Data.Entity.Migrations;
public partial class ADDCAPTURARESPUESTASCARACTERIZACION : DbMigration
{
public override void Up()
{
RenameTable(name: "dbo.Ciudades", newName: "Localidades");
CreateTable(
"dbo.Capturas",
c => new
{
Id = c.Int(nullable: false, identity: true),
dFechaCaptura = c.DateTime(nullable: false),
iProcentajeAvance = c.Int(nullable: false),
Capturo_Id = c.Int(),
Caracterizaciones_Id = c.Int(),
localidad_Id = c.Int(),
})
.PrimaryKey(t => t.Id)
.ForeignKey("dbo.Usuarios", t => t.Capturo_Id)
.ForeignKey("dbo.Caracterizaciones", t => t.Caracterizaciones_Id)
.ForeignKey("dbo.Localidades", t => t.localidad_Id)
.Index(t => t.Capturo_Id)
.Index(t => t.Caracterizaciones_Id)
.Index(t => t.localidad_Id);
CreateTable(
"dbo.Caracterizaciones",
c => new
{
Id = c.Int(nullable: false, identity: true),
bDocumento = c.Binary(nullable: false),
iAnioDocumento = c.Int(nullable: false),
sObservaciones = c.String(),
bValidado = c.Boolean(nullable: false),
sRetroalimentacion = c.String(),
})
.PrimaryKey(t => t.Id);
CreateTable(
"dbo.EvaluacionesPU",
c => new
{
Id = c.Int(nullable: false, identity: true),
dFechaModificacion = c.DateTime(nullable: false),
TipoLocalidad = c.Int(nullable: false),
TipoEvaluacion = c.Int(nullable: false),
})
.PrimaryKey(t => t.Id);
CreateTable(
"dbo.RespuestaSCuestionario",
c => new
{
Id = c.Int(nullable: false, identity: true),
Tipo = c.Int(nullable: false),
bValor = c.Boolean(nullable: false),
dValorI = c.Double(nullable: false),
dValorF = c.Double(nullable: false),
dValor = c.Double(nullable: false),
SubNombre = c.String(),
Observacion = c.String(),
Retroalimentacion = c.String(),
Evaluacion_Id = c.Int(),
})
.PrimaryKey(t => t.Id)
.ForeignKey("dbo.EvaluacionesPU", t => t.Evaluacion_Id)
.Index(t => t.Evaluacion_Id);
}
public override void Down()
{
DropForeignKey("dbo.RespuestaSCuestionario", "Evaluacion_Id", "dbo.EvaluacionesPU");
DropForeignKey("dbo.Capturas", "localidad_Id", "dbo.Localidades");
DropForeignKey("dbo.Capturas", "Caracterizaciones_Id", "dbo.Caracterizaciones");
DropForeignKey("dbo.Capturas", "Capturo_Id", "dbo.Usuarios");
DropIndex("dbo.RespuestaSCuestionario", new[] { "Evaluacion_Id" });
DropIndex("dbo.Capturas", new[] { "localidad_Id" });
DropIndex("dbo.Capturas", new[] { "Caracterizaciones_Id" });
DropIndex("dbo.Capturas", new[] { "Capturo_Id" });
DropTable("dbo.RespuestaSCuestionario");
DropTable("dbo.EvaluacionesPU");
DropTable("dbo.Caracterizaciones");
DropTable("dbo.Capturas");
RenameTable(name: "dbo.Localidades", newName: "Ciudades");
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.ComponentModel.DataAnnotations.Schema;
using PROCOPRU.Modelo.Helpers;
using System.Data.Entity;
using System.Linq;
using PROCOPRU.Commons.Helper;
using PROCOPRU.ViewModel;
using System.Data.Entity.Validation;
namespace PROCOPRU.Modelo
{
[Table("Usuarios")]
public class Usuarios : ErrorHelper
{
public Usuarios() {
Roll = new Roll();
IdMunicipio = null; //new Municipio();
}
[Key]
public int Id { get; set; }
[Display(Name = "NOMBRE DEL RESPONBLE DE LA CUENTA")]
[Required(ErrorMessage = "Debe ingresar el nombre del responable")]
public String Nombre { get; set; }
[Display(Name = "CORREO ELECTRONICO")]
[Required(ErrorMessage = "Debe ingresar una direccion de correo")]
public String Email { get; set; }
[Display(Name = "CLAVE DE ACCEO AL SISTEMA")]
[Required(ErrorMessage = "Se require ingrese la clave de acceso")]
public String Password { get; set; }
public Boolean Status { get; set; }
//public int RollId { get; set; }
public virtual Roll Roll { get; set; }
[Display(Name ="Debe indicar el Id del municipio asigando al usuario")]
public int? IdMunicipio { get; set; }
//public virtual Municipio Municipio { get; set; }
[NotMapped]
public enumTipoUsuario TipoUsuario { get; set; }
#region "FUNCIONES"
public Localidad getMunicipio() {
try {
using (var ctx = new DataModel()) {
return ctx.localidades.Include("Localidad").Where(r => r.Id == this.IdMunicipio).FirstOrDefault();
}
} catch (Exception) { throw; }
}
public static List<Usuarios> getAllByMunicipio(int idMunicipio)
{
try
{
using (var ctx = new DataModel())
{
return ctx.usuarios.Where(r => r.IdMunicipio == idMunicipio && r.Roll.Id == 3).ToList();
}
}
catch (Exception) { throw; }
}
public static List<Usuarios> getAll()
{
try
{
using (var ctx = new DataModel())
{
return ctx.usuarios.Include("Municipio")
.Include("Roll")
.Where(r => r.Status == true && r.Roll.Id == 3).ToList();
}
}
catch (Exception) { throw; }
}
public static Usuarios getById(int id)
{
var usuario = new Usuarios();
try
{
using (var ctx = new DataModel())
{
ctx.Configuration.LazyLoadingEnabled = false;
usuario = ctx.usuarios.Include("Roll")
.Include("Roll.PermisosDenegadorPorRoll")
.Where(x => x.Id == id).SingleOrDefault();
}
}
catch (Exception e)
{
throw;
}
return usuario;
}
public Usuarios Obtener(int id)
{
var usuario = new Usuarios();
try
{
using (var ctx = new DataModel())
{
ctx.Configuration.LazyLoadingEnabled = false;
usuario = ctx.usuarios.Include("Roll")
.Include("Roll.PermisosDenegadorPorRoll")
.Include("Roll.PermisosDenegadorPorRoll.Permiso.PermisoDenegadoPorRoll")
.Where(x => x.Id == id).SingleOrDefault();
}
}
catch (Exception e)
{
throw;
}
return usuario;
}
public ResponseModel Autenticarse()
{
var rm = new ResponseModel();
try
{
using (var ctx = new DataModel())
{
var passEncrip = <PASSWORD>PRU.Commons.Tools.GetMD5(this.Password);
var usuario = ctx.usuarios.Where(x => x.Email == this.Email && x.Password == passEncrip).FirstOrDefault();
if (usuario != null)
{
SessionHelper.AddUserToSession(usuario.Id.ToString());
rm.SetResponse(true);
}
else
{
rm.SetResponse(false, "Acceso denegado al sistema");
}
}
}
catch (Exception e)
{
throw;
}
return rm;
}
public ResponseModel Guardar()
{
var response = new ResponseModel();
try
{
using (var ctx = new DataModel())
{
if (this.TipoUsuario == enumTipoUsuario.MUNICIPIO)
{
this.Roll = Roll.getByid(3);
}
else
{
this.Roll = Roll.getByid(2);
this.IdMunicipio = null; //Municipio.getById(3);
}
//ctx.usuarios.Add(this);
ctx.roll.Attach(this.Roll);
ctx.Entry(this).State = EntityState.Added;
this.Roll.Usuarios.Add(this);
//this.Municipio.Usuarios.Add(this);
response.SetResponse(true);
ctx.SaveChanges();
}
}
catch (DbEntityValidationException e)
{
foreach (var eve in e.EntityValidationErrors)
{
Console.WriteLine("Entity of type \"{0}\" in state \"{1}\" has the following validation errors:",
eve.Entry.Entity.GetType().Name, eve.Entry.State);
foreach (var ve in eve.ValidationErrors)
{
Console.WriteLine("- Property: \"{0}\", Error: \"{1}\"",
ve.PropertyName, ve.ErrorMessage);
}
}
throw;
}
return response;
}
public static void Borrar(int Id)
{
try
{
using (var ctx = new DataModel())
{
var item = ctx.usuarios.Include("Municipio").Include("Roll")
.Where(r => r.Id == Id).SingleOrDefault();
ctx.Entry(item).State = EntityState.Modified;
item.Status = false;
ctx.SaveChanges();
}
}
catch (DbEntityValidationException e)
{
foreach (var eve in e.EntityValidationErrors)
{
Console.WriteLine("Entity of type \"{0}\" in state \"{1}\" has the following validation errors:",
eve.Entry.Entity.GetType().Name, eve.Entry.State);
foreach (var ve in eve.ValidationErrors)
{
Console.WriteLine("- Property: \"{0}\", Error: \"{1}\"",
ve.PropertyName, ve.ErrorMessage);
}
}
throw;
}
}
#endregion
}
public enum enumTipoUsuario {
PROGRAMADOR,
ADMINISTRADOR,
MUNICIPIO
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Web;
using PROCOPRU.Modelo;
namespace PROCOPRU.ViewModel
{
public class Ppal_DE_ViewModel
{
public int R_1 { get; set; }
public int R_2 { get; set; }
public int R_3 { get; set; }
public int R_4 { get; set; }
public int getPuntaje(int NoPregunta)
{
int puntos = 0;
switch (NoPregunta)
{
case 1:
if(R_1 == 1) puntos = 1; else puntos = 0;
break;
case 2:
if (R_2 == 1) puntos = 1; else puntos = 0;
break;
case 3:
if (R_3 == 1) puntos = 1; else puntos = 0;
break;
case 4:
if (R_4 == 1 || R_4 == 2) puntos = R_4; else puntos = 0;
break;
}
return puntos;
}
public List<RespuestaCuestionario> getRespuestas(string[] Comentarios)
{
List<RespuestaCuestionario> respuestas = new List<RespuestaCuestionario>();
#region "PREGUNTA 1"
RespuestaCuestionario resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor = Convert.ToBoolean(R_1);
resp.Puntaje = getPuntaje(1);
if (R_1 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[0];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 2"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA_SUBPREGUNTA;
resp.bValor =Convert.ToBoolean(R_2);
resp.Puntaje = getPuntaje(2);
if (R_2 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[1];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 3"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.BOLEANA;
resp.bValor = Convert.ToBoolean(R_3);
resp.Puntaje = getPuntaje(3);
if (R_3 >= 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[2];
respuestas.Add(resp);
#endregion
#region "PREGUNTA 4"
resp = new RespuestaCuestionario();
resp.Tipo = enumTipoPregunta.VALOR_UNICO;
resp.dValor = R_4;
resp.Puntaje = getPuntaje(4);
if (R_4 > 0)
resp.Confirmada = true;
resp.Observacion = Comentarios[3];
respuestas.Add(resp);
#endregion
return respuestas;
}
}
}
|
156e90f93b2aad38c534639454459806f8deab92
|
[
"C#"
] | 56
|
C#
|
alonsolopezr/pcpu
|
75f0210cc01a74764ee8aa71a6a9fbafd183e8f1
|
45255df29f9c6dba10dc399a6fd18345cd364132
|
refs/heads/master
|
<file_sep># Insta-DB
- A small Instagram like database application run on Oracle database & interface through Python.
## Requirements
[](https://www.python.org/downloads/release/python-365)
[](http://www.oracle.com/technetwork/database/database-technologies/express-edition/downloads/index.html)
[](https://pypi.org/project/cx_Oracle)
## Steps
1. Create your Oracle account & workspace account
2. Run the SQL file in Oracle 11G
3. Open the Python file in IDLE and run it.
## Output -


<file_sep>import cx_Oracle as oracle
con = oracle.connect('system/tejas2304@localhost/xe')
con.autocommit = True
cur = con.cursor()
print("Enter your choice:")
print("1-- Max likes")
print("2-- Min likes")
print("3-- Who liked the most")
print("4-- Music pictures")
print("5-- Popular Tag")
print("6-- Most liked user")
print("7-- Old Tagging")
print("8-- Delete inactive users")
print("9-- Exit")
while True:
ch = int(input("Enter Choice: "))
if ch == 1:
cur.execute("""select pic_id from INSTA_LIKES group by pic_id having count(*)=(select max(count(*)) from INSTA_LIKES group by pic_id)""")
for result in cur:
print("Pic id: ", result[0])
elif ch == 2:
cur.execute("select pic_id from INSTA_LIKES group by pic_id having count(*)=(select min(count(*)) from INSTA_LIKES group by pic_id)")
for result in cur:
print("Pic id: ", result[0])
elif ch == 3:
cur.execute("select user_id from INSTA_LIKES group by user_id having count(user_id)=(select max(count(*)) from INSTA_LIKES group by user_id)")
for result in cur:
print("User id: ", result[0])
elif ch == 4:
cur.execute("select pic_id from INSTA_PICS where music=1")
for result in cur:
print("Pic id: ", result[0])
elif ch == 5:
max = 0
mm = "All"
cur.execute("select count(*) from INSTA_PICS where arts=1")
ft = cur.fetchall()[0][0]
if max < ft:
max = ft
mm = "Arts"
cur.execute("select count(*) from INSTA_PICS where history=1")
ft = cur.fetchall()[0][0]
if max < ft:
max = ft
mm = "History"
cur.execute("select count(*) from INSTA_PICS where engineering=1")
ft = cur.fetchall()[0][0]
if max < ft:
max = ft
mm = "Engineering"
cur.execute("select count(*) from INSTA_PICS where science=1")
ft = cur.fetchall()[0][0]
if max < ft:
max = ft
mm = "Science"
cur.execute("select count(*) from INSTA_PICS where music=1")
ft = cur.fetchall()[0][0]
if max < ft:
max = ft
mm = "Music"
print("Most Popular tag is: ", mm)
elif ch == 6:
users=[]
cur.execute("select user_id from INSTA_USERS")
for result in cur:
users.append(int(result[0]))
mx = 0
us = 0
for user in users:
cur.execute("select count(*) from INSTA_LIKES where pic_id in (select pic_id from INSTA_PICS where user_id=" +str(user) + ")")
fx = cur.fetchall()[0][0]
if fx > mx:
mx = fx
us = user
print("Most liked user is: ", us)
elif ch == 7:
cur.execute("select pic_id from INSTA_PICS where extract(year from sysdate)-extract(year from pic_date)>=3")
print("Old Pics (Pic ids): ")
for result in cur:
print(result[0])
elif ch == 8:
cur.execute("SELECT user_id from INSTA_PICS where extract(year from sysdate)-extract(year from pic_date)<=1")
result = cur.fetchall()
for res in result:
temp = str(res[0])
cur.execute("Delete from INSTA_LIKES where user_id=" + str(res[0]))
con.commit()
cur.execute("SELECT * FROM INSTA_LIKES")
print(cur.fetchall())
con.commit()
cur.execute("Delete from INSTA_PICS where user_id=" + str(res[0]))
con.commit()
cur.execute("Delete from INSTA_USERS where user_id=" + str(res[0]))
print("User ", res[0], " deleted")
elif ch == 9:
break
con.commit()
con.close()
<file_sep> --DROP TABLE INSTA_LIKES
--DROP TABLE INSTA_PICS
--DROP TABLE INSTA_USERS
CREATE TABLE INSTA_USERS(
user_id number(5) primary key,
user_fname varchar2(20),
user_lname varchar2(20),
email varchar2(20)
)
insert into INSTA_USERS values (1,'Apoorva','Chouhan','<EMAIL>')
insert into INSTA_USERS values (2,'Tejas','Jadhav','<EMAIL>')
insert into INSTA_USERS values (5,'Heta','Naik','<EMAIL>')
insert into INSTA_USERS values (8,'Malak','Parmar','<EMAIL>')
insert into INSTA_USERS values (10,'Vatsal','Shah','<EMAIL>')
insert into INSTA_USERS values (11,'Nymphia','Wanchoo','<EMAIL>')
CREATE TABLE INSTA_PICS(
pic_id number(5) primary key,
pic_caption varchar2(50),
pic_path varchar2(50),
pic_date date,
user_id number(10) references INSTA_USERS(user_id),
arts number(1) DEFAULT 0 NOT NULL,
science number(1) DEFAULT 0 NOT NULL,
music number(1) DEFAULT 0 NOT NULL,
history number(1) DEFAULT 0 NOT NULL,
Engineering number(1) DEFAULT 0 NOT NULL
)
insert into INSTA_PICS values (1,'holiday','C:/pics/64565hrht4y.jpg','11-Mar-17',1,1,0,0,1,0)
insert into INSTA_PICS values (2,'holi','C:/mypics/64565hrhjjj4y.jpg','12-May-12',2,0,0,0,1,0)
insert into INSTA_PICS values (3,'bye','C:/photos/6456aa5t4y.jpg','01-Jan-17',5,0,1,0,1,0)
insert into INSTA_PICS values (4,'happy','D:/64565sjsjt4y.jpg','11-Feb-17',8,1,0,0,1,0)
insert into INSTA_PICS values (5,'ssup','D:/pictures/645jajht4y.jpg','11-Mar-17',10,1,0,0,0,1)
insert into INSTA_PICS values (6,'morning','C:/inpics/6akak65hrht4y.jpg','01-Apr-17',11,0,0,1,1,1)
insert into INSTA_PICS values (7,'night','C:/pics/647885hrht4y.jpg','11-Aug-17',1,1,0,1,1,1)
insert into INSTA_PICS values (8,'heyyyaa','C:/tj/pics/6kak565hrht4y.jpg','12-Jul-17',2,0,1,1,1,0)
insert into INSTA_PICS values (9,'hulalla','C:/files/pics/6jaja65hrht4y.jpg','11-Jul-08',8,0,0,0,1,0)
insert into INSTA_PICS values (10, 'yoooo', 'D:/mygalery/ajjarht4y.jpg', '02-Oct-18', 5,1,0,1,1,0)
insert into INSTA_PICS values (11, 'afternoon', 'C:/files/photos/ajaj565hrht4y.jpg', '03-Dec-10',10,1,0,0,1,1)
insert into INSTA_PICS values (12,'cyaaa','C:/files/img/aajht4y.jpg','04-Jul-11',11,1,0,0,1,1)
insert into INSTA_PICS values (13,'noname','C:/instaimgs/647885jsjrht4y.jpg','11-Nov-17',5,1,0,1,1,1)
insert into INSTA_PICS values (14,'justme','D:/instapic/642jmjmjht4y.jpg','11-Sep-17',2,1,0,1,0,1)
CREATE TABLE INSTA_LIKES(
pic_id number(5) references INSTA_PICS(pic_id) not null,
user_id number(10) references INSTA_USERS(user_id) not null
)
insert into INSTA_LIKES values(1,1)
insert into INSTA_LIKES values(7,2)
insert into INSTA_LIKES values(2,10)
insert into INSTA_LIKES values(7,5)
insert into INSTA_LIKES values(7,8)
insert into INSTA_LIKES values(7,11)
insert into INSTA_LIKES values(1,2)
insert into INSTA_LIKES values(11,10)
insert into INSTA_LIKES values(10,8)
insert into INSTA_LIKES values(9,5)
insert into INSTA_LIKES values(10,11)
insert into INSTA_LIKES values(12,10)
insert into INSTA_LIKES values(13,1)
insert into INSTA_LIKES values(8,1)
insert into INSTA_LIKES values(2,5)
insert into INSTA_LIKES values(1,5)
insert into INSTA_LIKES values(7,10)
insert into INSTA_LIKES values(6,2)
insert into INSTA_LIKES values(14,5)
insert into INSTA_LIKES values(2,1)
insert into INSTA_LIKES values(14,1)
insert into INSTA_LIKES values(5,1)
insert into INSTA_LIKES values(4,11)
insert into INSTA_LIKES values(2,2)
|
a607ddfc9d220ecdef9ff8ec467417d84a941a14
|
[
"Markdown",
"SQL",
"Python"
] | 3
|
Markdown
|
TeeeJaey/Insta-DB
|
3b7a147adbe487270867529d97fb50e71ed3f7ff
|
e71700d20604bbbcf988204da902dc2bb72b54b2
|
refs/heads/master
|
<repo_name>garyconnelly1/TheoryOfAlgorithms-SHA256Project<file_sep>/README.md
# TheoryOfAlgorithms-SHA256Project
## Download and Run:
* All you need to run this program is the GCC compiler - https://sourceforge.net/projects/tdm-gcc/ .
* Download this project by either downloading the .zip, or by typing "git clone https://github.com/garyconnelly1/TheoryOfAlgorithms-SHA256Project.git" into a command line.
* To run the program, simply type sha256 and press enter.
* You will be presented with three options;
- Press 1 to hash a file.
- Press 2 to enter a string to hash.
- Press 3 to use the SHA256 login system.
## Sha256:
Sha256 is a hashing algorithm created by the NSA with the idea of creating hashes which are not computationally feasible to inverse. In order to learn how this algorithm worked, I followed the video series by <NAME> as well as following the SHA government standard which can be found online.
## Additional features:
On top of what was completed through watching the video series, a few extra features were added.
### Error checking:
There is error checking on all file inputs to ensure the file exists and can be used.
### Big Endian:
There are functions included that check if the system the program is running on is in Big Endian or Little Endian. The SHA256 algorithm requires Big Endian notation.
### User entered string:
The user has an option to enter their own string which can be hashed. This is done by creating a text file out of the users input, and passing that file to the algorithm.
### Login system:
This was added to showcase a possible use case of the sha256 algorithm. The users can create an "account" by supplying a username and password. The origional password the user enters is saved in a file called by the users username. This file is then hashed and output as a new file with a hashed version of the users password as its contents.
The idea here was to create a login system also whereby the user enters their credentials, their password is hashed and checked against the already hashed password for that user. Unfortunately, I did not get time to finish implementing the login method.
The basic idea of adding this feature, was to showcase that even if a melicious entity gained access to the machine and could read the hashed passwords, they would be useless as they are extremely difficult to inverse(to backwards engineer the hashed value to get the origional input). Obviously in a real system, the origional passwords would not be stored in a plain text file as they are in this case, this was just to make it easier to pass the file to the algorithm.
## Bugs:
While all of the hashed values are consistent(get the same hash value for the same input), they are not consistent with online versions of the sha256 algorithm. This obviously means there is a mistake somewhere in the algorithm that I couldn't find at the time of submission.
## Research:
Along with reading the standards for SHA and watching the video series, I also followed a course on tutorials point to get me up to cratch with C programming. https://www.tutorialspoint.com/cprogramming/ .
## Author:
<NAME> - G00336837.
<file_sep>/sha256.c
// <NAME> - G00336837
#include <stdio.h>
#include <stdint.h>
#include <stdlib.h>
#include <string.h>
// From - esr.ibiblio.org/?p=5095
#define SWAP_UINT32(x) (((x) >> 24) | (((x) & 0x00FF0000) >> 8) | (((x) & 0x0000FF00) << 8 ) | ((x) << 24))
#define IS_BIG_ENDIAN (*(uint16_t *)"\0\xff" < 0x100)
#define DATA_SIZE 1000
#define ERROR_MESSAGE "Invalid input, please try again! \n"
union msgblock{
uint8_t e[64];
uint32_t t[16];
uint64_t s[8];
};
uint32_t Output[8]; // Declare array to allow for global access to the current hash.
enum status {READ, PAD0, PAD1, FINISH};
void sha256(FILE *file);
// =========================================== Added features.
void loginSystem();
void login();
void signUp();
int cfileexists(const char * filename);
void enterString();
// ========================================= End Added features.
int nextMsgBlock(FILE *file, union msgblock *M, enum status *S, uint64_t *noBits);
// See Section 4.1.2 for definition.
uint32_t sig0(uint32_t x);
uint32_t sig1(uint32_t x);
uint32_t SIG0(uint32_t x);
uint32_t SIG1(uint32_t x);
uint32_t Ch(uint32_t x, uint32_t y, uint32_t z);
uint32_t Maj(uint32_t x, uint32_t y, uint32_t z);
// See Section 3.2 for definitions.
uint32_t rotr(uint32_t n, uint32_t x);
uint32_t shr(uint32_t n, uint32_t x);
// Method to convert from Little Endian to Big Endian.
unsigned int LitToBigEndian(unsigned int x);
// The K constants, defined in section 4.4.4.
static const uint32_t K[] = {
0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5,
0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5,
0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3,
0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174,
0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc,
0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,
0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7,
0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967,
0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13,
0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85,
0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3,
0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070,
0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5,
0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3,
0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208,
0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2
};
int main (int argc, char *argv[]){
// login();
FILE *file;
char menuOption[DATA_SIZE]; // Store the current menu choice.
/*
Start Menu.
*/
printf("Press 1 to hash a file:\n");
printf("Press 2 to enter a string to hash:\n");
printf("Press 3 to use a SHA256 login/registration system:\n");
gets(menuOption);
if(strcmp(menuOption, "1") == 0){ // If they chose one, get them to enter the name of the file to hash.
printf("Enter the name of the file you wish to hash(include the extension eg. .txt)");
gets(menuOption);
// Open the file and send it to the hash algorithm.
if((file = fopen(menuOption, "r"))!=NULL){
sha256(file);
}
else{ // If, for any reason this file cannot be opened.
printf("Error occurred while opening file, please try again!");
}
fclose(file);
} // End if menuOption ==1.
else if(strcmp(menuOption, "2") == 0){
enterString(); // Send them to the enterString() method to handle the user input.
}
else if(strcmp(menuOption, "3") == 0){
loginSystem(); // Send them to the loginSystem() method.
}else{// If they enter an option that is not 1, 2 or 3.
printf(ERROR_MESSAGE);
}
return 0;
}
void sha256(FILE *file){
union msgblock M;
uint64_t nobits = 0;
enum status S = READ;
// Message shedule (Section 6.2).
uint32_t W[64];
// Working variables (Section 6.2).
uint32_t a,b,c,d,e,f,g,h;
// Two temporary variables (Section 6.2).
uint32_t T1, T2;
// The hash values (Section 6.2).
uint32_t H[8] = {
0x6a09e667,
0xbb67ae85,
0x3c6ef372,
0xa54ff53a,
0x510e527f,
0x9b05688c,
0x1f83d9ab,
0x5be0cd19
};
// uint32_t M[16] = {0,0,0,0,0,0,0,0};
// For looping.
int t,i;
while(nextMsgBlock(file, &M, &S, &nobits)){
// From page 22, W[t] = M[t] for 0 <= t <= 15.
for(t = 0; t < 16; t++){
// if(IS_BIG_ENDIAN){
W[t] = M.t[t];
// }else{
// W[t] = SWAP_UINT32(M.t[t]) ;
// }
}
for(t = 16; t < 64; t++)
W[t] = sig1(W[t-2]) + W[t-7] + sig0(W[t-15]) + W[t-16];
// Initialize a,b,c, ... ,h as per step 2, Page 22.
a = H[0]; b = H[1]; c = H[2]; d = H[3];
d = H[4]; e = H[5]; f = H[6]; f = H[7];
// Step 3.
for(t = 0; t < 64; t ++){
T1 = h + SIG1(e) + Ch(e,f,g) + K[t] + W[t];
T2 = SIG0(a) + Maj(a,b,c);
h = g;
g = f;
f = e;
e = d + T1;
d = c;
c = b;
b = a;
a = T1 + T2;
}// End while.
// Step 4.
H[0] = a + H[0];
H[1] = b + H[1];
H[2] = c + H[2];
H[3] = d + H[3];
H[4] = e + H[4];
H[5] = f + H[5];
H[6] = g + H[6];
H[7] = h + H[7];
}// End while.
// Check if it is already Big Endian.
if(IS_BIG_ENDIAN){
printf("%08x %08x %08x %08x %08x %08x %08x %08x\n", H[0],H[1],H[2],H[3],H[4],H[5],H[6],H[7]);
for(t = 0; t < 8; t++){
Output[t] =H[t];
}
}else{
printf("Big Endian: %08x %08x %08x %08x %08x %08x %08x %08x\n",
SWAP_UINT32(H[0]),
SWAP_UINT32(H[1]),
SWAP_UINT32(H[2]),
SWAP_UINT32(H[3]),
SWAP_UINT32(H[4]),
SWAP_UINT32(H[5]),
SWAP_UINT32(H[6]),
SWAP_UINT32(H[7])
);
for(t = 0; t < 8; t++){
Output[t] =SWAP_UINT32(H[t]); // Populate the global Output[] array to have global access to the hash values.
}
}// End if else (IS_BIG_ENDIAN).
}// End sha. (Look up Big Endian and Little Endian.)
uint32_t sig0(uint32_t x){
// See Sections 3.2 and 4.1.2 for definitions.
return (rotr(7,x) ^ rotr(18, x) ^ shr(3,x));
}
uint32_t sig1(uint32_t x){
// See Sections 3.2 and 4.1.2 for definitions.
return (rotr(17,x) ^ rotr(19,x) ^ shr(10,x));
}
uint32_t rotr(uint32_t n, uint32_t x){
// See Section 3.2 for definition.
return (x >> n) | (x << (32-n));
}
uint32_t shr(uint32_t n, uint32_t x){
return (x >> n);
}
uint32_t SIG0(uint32_t x){
return (rotr(2,x) ^ rotr(13,x) ^ rotr(22,x));
}
uint32_t SIG1(uint32_t x){
return (rotr(6,x) ^ rotr(11,x) ^ (25,x));
}
uint32_t Ch(uint32_t x, uint32_t y, uint32_t z){
return ((x & y) ^ ((!x) & z));
}
uint32_t Maj(uint32_t x, uint32_t y, uint32_t z){
return ((x & y) ^ (x & z) ^ (y & z));
}
// Method definition for LitToBigEndian.
unsigned int LitToBigEndian(unsigned int x){
return (((x >> 24) & 0x000000ff) | ((x >> 8) & 0x0000ff00) | ((x << 8) & 0x00ff0000) | ((x << 24) & 0xff000000));
}
// Message Block function here.
int nextMsgBlock(FILE *file, union msgblock * M, enum status *S, uint64_t *nobits){
// The number of bytes we get from fread.
uint64_t nobytes;
//For looping.
int i;
// If we have finished all of the message blocks, then S should be FINISH.
if(*S == FINISH)
return 0;
// Otherwise, check if we need another block full of padding.
if(*S == PAD0 || *S == PAD1){
// Set the first 56 bytes to all zero bits.
for(i = 0; i < 56; i++)
M->e[i] = 0x00;
// Set the last 64 bits to the number of bits in the file( should be big-endian).
M->s[7] = *nobits;
// Tell S we are finished.
*S = FINISH;
// If S was PAD1, then set the first bit of M to one.
if(*S == PAD1)
M->e[0] = 0x80;
// Keep the loop in sha256 going for one more iteration.
return 1;
}// End if.
// If we get down here, we havn't finished readong the file (S == READ).
nobytes = fread(M->e, 1, 64, file);
// Keep track of the number of bytes we've read.
*nobits = *nobits + (nobytes * 8);
// If we read less than 56 bits, we can put all padding in this message block.
if(nobytes < 56){
// Add the one bit, as per the standard.
M->e[nobytes] = 0x80;
// Add the zero bits until the last 64 bits.
while(nobytes < 56){
nobytes = nobytes + 1;
M->e[nobytes] = 0x00;
}//End while nobytes < 56
// Append the file size in bits as an unsigned 64 bit int. (Should be Big Endian).
M->e[7] = *nobits;
// Tell S we are finished.
*S = FINISH;
// Otherwise, check if we can put some padding into this message block.
}//End if nobytes < 56
else if (nobytes < 64){
// Tell S we need another message block, with padding but no one bit.
*S = PAD0;
// Put the one bit into the current block.
M->e[nobytes] = 0x80;
// Pad the rest of this block wth zero bits.
while(nobytes < 64){
nobytes = nobytes + 1;
M->e[nobytes] = 0x00;
}// End while nobytes < 64
}// End else if nobytes < 64
// Otherwise, check if we are just at the end of the file.
else if(feof(file)){
// Tell S that we need another message block with all the padding.
*S = PAD1;
}// End else if feof(file)
// If we get this far, then return 1 so that the function is called again.
return 1;
}// End nextMsgBlock
void enterString(){ // Alow the user to enter their own string to be hashed.
char input[DATA_SIZE]; // To store the users input.
FILE *filePointer; // Declare a pointer to the file we want the users input to be written to.
printf("Enter the string you wish to hash:\n");
gets(input);
filePointer = fopen("input.txt", "w"); // Create the file.
if(filePointer == NULL){ // If an error occurred creating the file.
printf("Error writing the input to a file. Please try again.");
exit(EXIT_FAILURE);
}// End if filePointer == NULL.
fputs(input, filePointer); // Write the users input to the file.
fclose(filePointer); // Close the file.
if((filePointer = fopen("input.txt", "r"))!=NULL){ // Open the same file, this time with read permissions.
sha256(filePointer); // Hash the file and display the answer to the user.
}
else{
printf("Error occurred while opening file, please try again!");
}
fclose(filePointer); // Close the file.
}
void loginSystem(){
/*
This is to display a possible application of the SHA256 algorithm.
The only purpose of this particular function, is to send the user to the right part of the program.
*/
char menuOption[DATA_SIZE];
printf("Press 1 to sign up. Press 2 to log in:\n");
gets(menuOption);
if(strcmp(menuOption, "1") == 0){
signUp(); // Send the user to the signup() method.
}else if(strcmp(menuOption, "2") == 0){
// login(); Login method was incomplete, so it is commented out to prevent crashing the program.
}
else{
printf(ERROR_MESSAGE);
}
} // End loginSystem().
void signUp(){
/*
A method to simulate creating a user account, using the sha256 algorithm to hash the users password.
*/
char *a[8]; // Declare array that stores the string values of the uint32_t[] in Output.
/*
Created 8 different temp variables to solve pointer issues.
*/
char temp[8];
char temp1[8];
char temp2[8];
char temp3[8];
char temp4[8];
char temp5[8];
char temp6[8];
char temp7[8];
printf("================== Sign up ================== \n");
char data[DATA_SIZE]; // To get data from the users.
char userName[DATA_SIZE];
char fileName[DATA_SIZE];
FILE *filePointer;
FILE *outFile;
char hashFileName[DATA_SIZE];
//char buffer[DATA_SIZE];
printf("Enter a username: "); // Get the users username.
gets(userName);
strcat(userName, ".txt"); // Append the username to .txt to create a file for that user.
if(cfileexists(userName)){ // Check if that username already exists by seeing if a file by the same name exists.
printf("Username already exists, please try another one!");
exit(EXIT_FAILURE);
}else{
filePointer = fopen(userName, "w"); // Point to the file with write permissions.
if(filePointer == NULL){
printf("Unable to create file. \n");
exit(EXIT_FAILURE);
}
printf("Enter your password: "); // Get the user to enter a password.
gets(data);
fputs(data, filePointer); // Writeh the plain text password to the file.
fclose(filePointer); // Close the file.
// Try to hash that file.
if((filePointer = fopen(userName, "r"))!=NULL){
sha256(filePointer); // Create the hash value of the password.
printf("OUTPUT ---> \n");
// Check if it is already Big Endian.
printf("%08x %08x %08x %08x %08x %08x %08x %08x\n", Output[0],Output[1],Output[2],Output[3],Output[4],Output[5],Output[6],Output[7]);
strcpy(hashFileName, "hash"); // Prefix the word "hash" to the start of the files name.
strcat(hashFileName, userName); // Create a nother file with the hash of the users password, called hash*userName*.txt.
/*
Had to copy elements to the array like this because elements were getting overwridden by the last element.
*/
sprintf(temp, "%08x", Output[0]); // Convert the uint32_t into a hexadecimal string. https://stackoverflow.com/questions/3464194/how-can-i-convert-an-integer-to-a-hexadecimal-string-in-c
a[0] = temp;
printf("HEX %s\n", a[0]);
sprintf(temp1, "%08x", Output[1]);
a[1] = temp1;
printf("HEX %s\n", a[1]);
sprintf(temp2, "%08x", Output[2]);
a[2] = temp2;
printf("HEX %s\n", a[2]);
sprintf(temp3, "%08x", Output[3]);
a[3] = temp3;
printf("HEX %s\n", a[3]);
sprintf(temp4, "%08x", Output[4]);
a[4] = temp4;
printf("HEX %s\n", a[4]);
sprintf(temp5, "%08x", Output[5]);
a[5] = temp5;
printf("HEX %s\n", a[5]);
sprintf(temp6, "%08x", Output[6]);
a[6] = temp6;
printf("HEX %s\n", a[6]);
sprintf(temp7, "%08x", Output[7]);
a[7] = temp7;
printf("HEX %s\n", a[7]);
/*
Also had to write elements to the file like this because elements were getting iverwritten.
*/
printf("Here: %s\n", a[0]); // Print out the first element of a to make sure is wasn't overridden by the last element.
outFile = fopen(hashFileName, "a"); // Write the hashed password to the newly created file.
fputs(a[0], outFile);
fputs(a[1], outFile);
fputs(a[2], outFile);
fputs(a[3], outFile);
fputs(a[4], outFile);
fputs(a[5], outFile);
fputs(a[6], outFile);
fputs(a[7], outFile);
fclose(outFile); // Close the file.
}
else{
printf("Error occurred while opening file, please try again!");
}
fclose(filePointer); // Close the file.
}// End else.
printf("Password created and stored! \n");
}
/*
Didn/t get this method finished so I left it out of menu options.
This file was added near enough to the deadline, so it is incomplete and there is no doubt a lot of bugs in it.
The idea for this method, was to have the user enter a username and password. Search the file system for the file with their username preceeded by the word "hash",
if such a file exists, read it in and save its contents. Hash the password the user just entered to login by saving it to the Output global variable. Convert that
output variable to a string representation of the hexidecimal number. Finally compare that string, with the string read in from the users file. If they match, then
the user is validated and just see a "Welcome to app" message. If not, then the password is incorrect.
*/
void login(){
printf("================== Login ================== \n");
char data[DATA_SIZE];
char name[DATA_SIZE];
char userName[DATA_SIZE];
char password[DATA_SIZE];
char *a[8];
/*
Also created the temp array like this to get around the pointer issue.
*/
char temp[8];
char temp1[8];
char temp2[8];
char temp3[8];
char temp4[8];
char temp5[8];
char temp6[8];
char temp7[8];
strcpy(name, "hash"); // The file we need to find has to begin with the word hash.
char *result;
char *result2;
FILE *outFile;
char hashFileName[DATA_SIZE];
printf("Enter your username: ");
gets(userName);
strcat(userName, ".txt"); // Append ".txt" to the username so it can be searched in the file directory.
strcat(name,userName); // Append this username to "hash".
printf("name ---> %s\n", name);
strcat(hashFileName, "compare");
if(cfileexists(name)){
printf("Enter the password:\n");
gets(data);
strcat(password, name);
strcat(password, "<PASSWORD>");
outFile = fopen(password, "w");
if(outFile == NULL){
printf("Error writing the input to a file. Please try again.");
exit(EXIT_FAILURE);
}// End if filePointer == NULL.
fputs(data, outFile);
fclose(outFile);
// sha256(filePointer);
if((outFile = fopen(password, "r"))!=NULL){
sha256(outFile);
}
else{
printf("Error occurred while opening file, please try again!");
}
fclose(outFile);
}else{ // If the file doesnt exist, that means the username they entered doesn't exist.
printf("User doesnt exist, please sign up to continue.\n");
exit(EXIT_FAILURE);
}
strcat(hashFileName, password); // To give the resulting hashed file a dfferent name.
// Get a handle on the hashed value
sprintf(temp, "%08x", Output[0]);
a[0] = temp;
printf("HEX %s\n", a[0]);
sprintf(temp1, "%08x", Output[1]);
a[1] = temp1;
printf("HEX %s\n", a[1]);
sprintf(temp2, "%08x", Output[2]);
a[2] = temp2;
printf("HEX %s\n", a[2]);
sprintf(temp3, "%08x", Output[3]);
a[3] = temp3;
printf("HEX %s\n", a[3]);
sprintf(temp4, "%08x", Output[4]);
a[4] = temp4;
printf("HEX %s\n", a[4]);
sprintf(temp5, "%08x", Output[5]);
a[5] = temp5;
printf("HEX %s\n", a[5]);
sprintf(temp6, "%08x", Output[6]);
a[6] = temp6;
printf("HEX %s\n", a[6]);
sprintf(temp7, "%08x", Output[7]);
a[7] = temp7;
printf("HEX %s\n", a[7]);
// Write out thr resulting hash to a file:
outFile = fopen(hashFileName, "a");
fputs(a[0], outFile);
fputs(a[1], outFile);
fputs(a[2], outFile);
fputs(a[3], outFile);
fputs(a[4], outFile);
fputs(a[5], outFile);
fputs(a[6], outFile);
fputs(a[7], outFile);
fclose(outFile);
}
/*
* Check if a file exist using fopen() function
* return 1 if the file exist otherwise return 0
*/
int cfileexists(const char * filename){
// printf("Youre userName: ---> %s\n", filename);
/* try to open file to read */
FILE *file;
if (file = fopen(filename, "r")){
fclose(file);
return 1;
}
return 0;
}
|
3773794768a6441fc31a90e1849552ea3a5b2172
|
[
"Markdown",
"C"
] | 2
|
Markdown
|
garyconnelly1/TheoryOfAlgorithms-SHA256Project
|
641dd4672d576d562a22195de85b068ff8cd4134
|
ec527cea7996a09a6ccf0b3536585367ef281597
|
refs/heads/master
|
<file_sep>// let num = 50;
// while (num < 55) {
// console.log(num);
// num++;
// }
// do {
// console.log(num);
// num++;
// }
// while (num < 55);
// for (let i = 1; i < 10; i++) {
// if (i === 6) {
// continue;
// }
// console.log(i);
// }
let num = 20;
function showFirstMessage(text) {
console.log(text);
num = 10;
}
showFirstMessage("hello world");
console.log(num);
// function calc(a, b) {
// return(a + b);
// }
// console.log(calc(7, 8));
// console.log(calc(2, 8));
// console.log(calc(12, 8));
function ret() {
let num = 10;
return num;
}
const anotherNum = ret();
console.log(anotherNum);
const logger = function () {
console.log("hello world");
};
logger();
const calc = (a, b) => {
return a + b;
};
console.log(calc(7, 9));
const str = "test";
const arr = [1, 2, 4];
console.log(str.toLowerCase());
console.log(str.toUpperCase());
console.log(str);
const fruit = "Some fruit";
console.log(fruit.indexOf("fruit"));
const logg = "hello world";
console.log(logg.slice(6));
console.log(logg.substring(6, 11));
console.log(logg.substr(6, 4));
const numm = 13.6;
console.log(Math.round(numm));
const nummm = "12.9px";
console.log(parseFloat(nummm));
function learnJavaScript(lang, callback) {
console.log(`Я учу: ${lang}`);
callback();
}
function done() {
console.log('Я прошел этот урок!');
}
learnJavaScript('JavaScript', done);
|
6f91012ce65e0631cbf3dafeaaeb83adc06a4557
|
[
"JavaScript"
] | 1
|
JavaScript
|
AgniGupta/js
|
912e4ac0b7969b672414ea140820de6ecd2737bc
|
6e06933fb9509ed3967c157a482cbde6e987c34d
|
refs/heads/master
|
<file_sep># IHM jQuery (OC Exercise)
## About
Exercises done with jQuery following the instructions of the activities on OpenClassrooms. It's an app where you can create your own form. If you want to test it, you need to use an Apache Server to run this little app.
Demo : https://snervan.github.io/jquery-ihm-oc/
Link to the exercises :
Activity 1 : https://openclassrooms.com/courses/simplifiez-vos-developpements-javascript-avec-jquery/exercises/461
Activity 2 : https://openclassrooms.com/courses/1631636-simplifiez-vos-developpements-javascript-avec-jquery/exercises/463
Activity 3 : https://openclassrooms.com/courses/1631636-simplifiez-vos-developpements-javascript-avec-jquery/exercises/465
<file_sep>/**************************************************************
* IHM Activity (activity in part 4 on OC) *
* *
* @author : Snervan (RetroMan on OC) *
* @project : IHM Activity *
* @description : generate our own form with IHM buttons *
* on the right. fadeIn and fadeOut right *
* form. Add help messages with AJAX. *
* *
* *
***************************************************************/
$(function() {
$('#droite > :button').hide().fadeIn(2000);
var formGauche = $('<form id="formulaire"></form>');
$('#gauche').prepend(formGauche);
//On empêche la soumission du formulaire de gauche
formGauche.submit(function(event) {
event.preventDefault();
});
//On désactive le bouton "Zone de texte" pour des raisons de cohérence une fois la page chargée
$('#droite > :button:eq(0)').prop("disabled", false);
$('#droite > :button:eq(1)').prop("disabled", true);
//On désactive le "bouton" et l'active uniquement s'il y a au moins un input
$('#droite > :button:eq(2)').prop("disabled", true);
// Mode d'insertion différent du span et de l'input si on a ajouté le bouton au formulaire qu'on a généré
// Permet également de vérifier si on a ajouté le "submit" dans le div gauche
var buttonInserted = false;
/* Change à 'true' si on a ajouté au moins un input de type texte dans le div gauche
Variable utilisé comme condition pour ré-activer correctement le bouton "bouton" si on
a annulé l'ajout du bouton ou que nous avons terminé avec l'un des deux autres formulaires */
var showingAddButton = false;
//Affiche un message d'aide une fois la page chargée pour guider l'utilisateur
$('#droite').append(helpMessages('loadedPage'));
function reEnableButton(buttonToEnable) {
buttonToEnable.prop("disabled", false);
}
//Retourne un nouvel élément créé qui contiendra le message d'aide chargé
function helpMessages(helpName) {
var helpMessage = $("<div id='helpMessage'></div>");
var fileToLoad = 'aide.htm #' + String(helpName);
helpMessage.load(fileToLoad, function() {
helpMessage.hide().fadeIn(1500);
});
return helpMessage;
}
function createForms(formId, inputId, textToInsert) {
var typeForm = $("<form id='" + formId + "'></form>");
typeForm.append($("<label for='" + inputId + "'></label>").text(String(textToInsert + " ")));
typeForm.append($("<input type='text' id='" + inputId + "' required>"));
typeForm.append($("<br>"));
//On créé les input type submit et reset
typeForm.append("<button type='submit'> OK </button>");
typeForm.append("<button type='reset'> Annuler </button>");
typeForm.hide().fadeIn(1500);
return typeForm;
}
function removeForms(typeForm) {
typeForm.finish().fadeOut(400, function() {
$(this).remove();
});
$('#helpMessage').finish().fadeOut(400, function() {
$(this).remove();
});
if(buttonInserted) $('#droite > :button:eq(2)').fadeOut(700, function() {
$(this).remove();
});
$('#droite > :button:eq(0)').css("opacity", "1");
$('#droite > :button:eq(1)').css("opacity", "1");
}
//Evénement "clic" pour le bouton "Label"
$(':button:first').click(function() {
// Sert uniquement à supprimer le message d'aide affiché au lancement une fois la page chargée
if($('#helpMessage').length !== 0) {
$('#helpMessage').finish().fadeOut(200, function() {
$(this).remove();
});
}
var formLabel = createForms("labelNom", "labelText", "Nom du label :");
$(this).prop("disabled", true);
$(this).css("opacity", "0.3");
//"Bouton" désactivé si on click sur le bouton "Label"
$('#droite > :button:eq(2)').prop("disabled", true);
formLabel.submit(function(event) {
event.preventDefault();
var insertSpan = $("<span></span>").text($('#labelText').val() + " : ");
if(buttonInserted) {
$('#formulaire > :input[type="submit"]').before($("<span></span>").text($('#labelText').val() + " : "));
} else {
$('#formulaire').append($("<span></span>").text($('#labelText').val() + " : "));
}
$('#droite > :button:eq(1)').prop("disabled", false);
if (showingAddButton) { $('#droite > :button:eq(2)').prop("disabled", false); }
removeForms(formLabel);
});
formLabel.on('reset', function() {
removeForms(formLabel);
$('#droite > :button:eq(0)').prop("disabled", false);
$('#droite > :button:eq(0)').css("opacity", "1");
if (showingAddButton) { $('#droite > :button:eq(2)').prop("disabled", false); }
});
$('#droite').append(formLabel);
$(formLabel).after(helpMessages('aideLabel'));
$('#labelText').focus();
});
//Evènement "clic" pour le bouton "Zone de texte"
$(':button:eq(1)').click(function() {
$(this).prop("disabled", true);
$(this).css("opacity", "0.3");
//"Bouton" désactivé si on click sur le bouton "Zone de texte"
$('#droite > :button:eq(2)').prop("disabled", true);
var formTextZone = createForms("textZone", "idInput", "id de la zone de texte :");
formTextZone.submit(function(event) {
event.preventDefault();
var inputText = $("<input type='text'>");
inputText.attr('id', $('#idInput').val());
inputText.attr('name', $('#idInput').val());
if(buttonInserted) {
$('#formulaire > :input[type="submit"]').before(inputText);
$('#formulaire > :input[type="submit"]').before($('<br>'));
} else {
$('#formulaire').append(inputText);
$('#formulaire').append($('<br>'));
}
$('#droite > :button:eq(0)').prop("disabled", false);
// Input inséré, on active le bouton "bouton", on met la variable à true car il y a au moins un input
// et on enlève le formulaire d'ajout d'input
$('#droite > :button:eq(2)').prop("disabled", false);
showingAddButton = true;
removeForms(formTextZone);
});
formTextZone.on('reset', function() {
removeForms(formTextZone);
$('#droite > :button:eq(1)').prop("disabled", false);
$('#droite > :button:eq(1)').css("opacity", "1");
if (showingAddButton) { $('#droite > :button:eq(2)').prop("disabled", false); }
});
$('#droite').append(formTextZone);
$(formTextZone).after(helpMessages('aideTextZone'));
$('#idInput').focus();
});
/*Evènement "clic" pour le formulaire d'ajout du submit dans le div gauche.
Le bouton permettant d'afficher le formulaire d'ajout du submit ne s'active
uniquement sur la page web que si on a au moins un input type texte généré dans le formulaire. */
$('#droite > :button:eq(2)').click(function() {
var ButtonToReEnable = null;
$(this).prop("disabled", true);
$(this).css("opacity", "0.3");
var formButton = createForms("addButton", "idButton", "Texte du bouton :");
if($('#droite > :button:eq(1)').is(':disabled')) {
ButtonToReEnable = $('#droite > :button:eq(0)');
$('#droite > :button:eq(0)').prop("disabled", true);
} else {
if($('#droite > :button:eq(0)').is(':disabled')) {
ButtonToReEnable = $('#droite > :button:eq(1)');
$('#droite > :button:eq(1)').prop("disabled", true);
} else {
ButtonToReEnable = null;
}
}
formButton.submit(function(event) {
event.preventDefault();
if($('#formulaire > :input').length > 0) {
buttonInserted = true;
$('#formulaire').append($('<button type="submit"></button>').text($('#idButton').val()));
} else {
console.error("Aucun input dans le formulaire !");
$('#droite > :button:eq(2)').prop("disabled", false);
}
if(ButtonToReEnable !== null) reEnableButton(ButtonToReEnable);
removeForms(formButton);
});
formButton.on('reset', function() {
removeForms(formButton);
$('#droite > :button:eq(2)').prop("disabled", false);
$('#droite > :button:eq(2)').css("opacity", "1");
if(ButtonToReEnable !== null) reEnableButton(ButtonToReEnable);
});
$('#droite').append(formButton);
$(formButton).after(helpMessages('aideBouton'));
$('#idButton').focus();
});
});
|
c3a07ab88e55a445ecefff83f37ca5d6a6c21e71
|
[
"Markdown",
"JavaScript"
] | 2
|
Markdown
|
Snervan/jquery-ihm-oc
|
c2e7e5ed010c7745677898120ab7edacf277377c
|
915ca5b554726d855e23f5778b3745a6a21e8c18
|
refs/heads/master
|
<repo_name>buihoaithuong/NguyenVanDuc<file_sep>/app/src/main/java/com/example/thii/MainActivity.java
package com.example.thii;
import androidx.appcompat.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.AdapterView;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ListView;
import java.util.ArrayList;
public class MainActivity extends AppCompatActivity {
ListView lv;
ArrayList<hoaqua> hoaquaArrayList;
Adapter adapter;
EditText edt1, edt2;
Button btnUpdate, btnView, btnAdd;
int vitri = -1;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
AnhXa();
hoaquaArrayList = new ArrayList<>();
hoaquaArrayList.add(new hoaqua(R.drawable.ic_launcher_background,"Táo","táo là một cây ăn quả"));
hoaquaArrayList.add(new hoaqua(R.drawable.ic_launcher_background,"Cam","táo là một cây ăn quả"));
hoaquaArrayList.add(new hoaqua(R.drawable.ic_launcher_background,"ĐÀo","táo là một cây ăn quả"));
adapter = new Adapter(this, R.layout.dong_hoa_qua, hoaquaArrayList);
lv.setAdapter(adapter);
update();
add();
}
private void add() {
btnAdd.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
String teuDe = edt1.getText().toString();
String moTa = edt2.getText().toString();
hoaquaArrayList.add(new hoaqua(R.drawable.ic_launcher_background,teuDe,moTa));
adapter.notifyDataSetChanged();
}
});
}
private void update() {
lv.setOnItemLongClickListener(new AdapterView.OnItemLongClickListener() {
@Override
public boolean onItemLongClick(AdapterView<?> parent, View view, int position, long id) {
vitri = position;
edt1.setText(hoaquaArrayList.get(vitri).getTieude());
edt2.setText(hoaquaArrayList.get(vitri).getMota());
return false;
}
});
btnUpdate.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
hoaquaArrayList.set(vitri,new hoaqua(
R.drawable.ic_launcher_background,
edt1.getText().toString(),
edt2.getText().toString()
));
adapter.notifyDataSetChanged();
}
});
}
private void AnhXa() {
lv = (ListView)findViewById(R.id.listview);
edt1 = (EditText)findViewById(R.id.edt1);
edt2 = (EditText)findViewById(R.id.edt2);
btnUpdate = (Button)findViewById(R.id.btnupdate);
btnView = (Button)findViewById(R.id.btnview);
btnAdd = (Button)findViewById(R.id.btnadd);
}
}<file_sep>/app/src/main/java/com/example/thii/hoaqua.java
package com.example.thii;
import android.content.Intent;
public class hoaqua {
private int img;
private String tieude;
private String mota;
public hoaqua(int img, String tieude, String mota) {
this.img = img;
this.tieude = tieude;
this.mota = mota;
}
public int getImg() {
return img;
}
public void setImg(int img) {
this.img = img;
}
public String getTieude() {
return tieude;
}
public void setTieude(String tieude) {
this.tieude = tieude;
}
public String getMota() {
return mota;
}
public void setMota(String mota) {
this.mota = mota;
}
}
|
1d33284c585eaa91b148eb4b386fe45edb565dec
|
[
"Java"
] | 2
|
Java
|
buihoaithuong/NguyenVanDuc
|
14ce8e2dfd505a0c6ab55c517846a6c8b55ae299
|
7f2162bb124f26afef72c179b0a77fcd306baf4d
|
refs/heads/master
|
<file_sep>package com.zncm.imbrary;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.view.View;
public class ImActivity extends AppCompatActivity {
Context ctx;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_im);
ctx = this;
findViewById(R.id.btn5).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
startActivity(new Intent(ctx, ImActivity.class));
}
});
}
}
<file_sep>package com.zncm.dminter;
/**
* Created by
*/
public class Demo {
}
<file_sep># JmxAndroid
学习Android的一些demo。还有一些优质库的使用方法简介。
## ColorPickerView
https://github.com/skydoves/ColorPickerView
<file_sep>package com.zncm.jmxandroid.mvp;
/**
* Created by jiaomx on 2017/5/23.
*/
public class MvpAc {
}
<file_sep>package com.zncm.jmxandroid.utils.common.adapter;
import android.content.Context;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import java.util.List;
/**
* Created by jiaomx on 2017/7/12.
*/
public abstract class CommonAdapter<T> extends BaseAdapter {
private Context context;
private List<T> items;
public CommonAdapter(Context context, List<T> items) {
this.context = context;
this.items = items;
}
@Override
public int getCount() {
if (items != null && items.size() > 0) {
return items.size();
} else {
return 0;
}
}
@Override
public Object getItem(int position) {
if (items != null && items.size() > 0) {
return items.get(position);
} else {
return null;
}
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
return convertView(position, convertView, parent);
}
protected abstract View convertView(int position, View convertView, ViewGroup parent);
}
<file_sep>package com.zncm.jmxandroid.activityshortcut;
/**
* Created by jiaomx on 2017/5/11.
*/
public class XX {
}
<file_sep>package com.zncm.jmxandroid.os;
import android.media.AudioManager;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.widget.TextView;
import com.zncm.jmxandroid.R;
import com.zncm.jmxandroid.base.BaseAc;
import butterknife.BindView;
import butterknife.ButterKnife;
/**
* Created by jiaomx on 2017/6/1.
*/
public class AudioAc extends BaseAc {
@BindView(R.id.audioAc)
TextView audioAc;
AudioManager audioManager;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
ButterKnife.bind(this);
audioManager = (AudioManager) getSystemService(AUDIO_SERVICE);
String str = "audioManager::" + audioManager.getMode() + " " + audioManager.getRingerMode() + " " + audioManager.isMicrophoneMute() + " " + audioManager.isSpeakerphoneOn();
audioAc.setText("==>>>" + str);
/**
*3 2 false true 错误
* 0 2 false true 正确
*/
/**
*这个是正确的扬声器模式,控制不好导致全局模式乱掉,插耳机仍然是外放
*/
audioManager.setMode(AudioManager.MODE_NORMAL);
audioManager.setRingerMode(AudioManager.RINGER_MODE_NORMAL);
audioManager.setMicrophoneMute(false);
audioManager.setSpeakerphoneOn(true);
str = "audioManager::" + audioManager.getMode() + " " + audioManager.getRingerMode() + " " + audioManager.isMicrophoneMute() + " " + audioManager.isSpeakerphoneOn();
audioAc.setText("==>>>" + str);
}
@Override
protected int getLayoutResource() {
return R.layout.aa_audioac;
}
}
<file_sep>package com.zncm.jmxandroid.view;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.support.annotation.Nullable;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
import com.zncm.jmxandroid.utils.Xutils;
import java.util.ArrayList;
/**
* Created by jiaomx on 2017/5/12.
*/
public class AZView extends View {
ArrayList<String> letters = new ArrayList<>();
private Paint paint;
int deviceWidth = Xutils.getDeviceWidth();
int deviceHeight = Xutils.getDeviceHeight();
public AZView(Context context) {
super(context);
init();
}
private void init() {
paint = new Paint();
paint.setStrokeWidth(5);
paint.setTextSize(Xutils.dip2px(16));
for (int i = 0; i <26 ; i++) {
char letter = (char) ('A'+i);
letters.add(String.valueOf(letter));
}
}
public AZView(Context context, @Nullable AttributeSet attrs) {
super(context, attrs);
init();
}
public AZView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init();
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
Xutils.debug("onDraw------onDraw");
for (int i = 0; i <letters.size() ; i++) {
char letter = (char) ('A'+i);
canvas.drawText(letter+"",deviceWidth-Xutils.dip2px(20),Xutils.dip2px(20)+deviceHeight*i/30,paint);
Xutils.debug("letter::"+letter+" "+(Xutils.dip2px(20)+deviceHeight*i/30));
}
}
@Override
public boolean onTouchEvent(MotionEvent event) {
switch (event.getAction()){
case MotionEvent.ACTION_DOWN:
break;
case MotionEvent.ACTION_MOVE:
float fingerY = event.getY();
int pos = (int) (Xutils.dip2px(20)-fingerY/(deviceHeight/letters.size()));
Xutils.debug("fingerY::"+fingerY+" "+letters.get(pos));
break;
}
return true;
}
}
<file_sep>package com.zncm.jmxandroid.ui;
import android.content.Context;
import android.os.Bundle;
import com.zncm.jmxandroid.R;
import com.zncm.jmxandroid.base.BaseAc;
public class RockerViewActivity extends BaseAc {
Context ctx;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
ctx = this;
}
@Override
protected int getLayoutResource() {
return R.layout.activity_rockerview;
}
}
<file_sep>package com.zncm.jmxandroid.receiver;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import com.zncm.jmxandroid.MainActivity;
import com.zncm.jmxandroid.utils.Xutils;
public class BootBroadcastReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
if (intent.equals(Intent.ACTION_BOOT_COMPLETED)) {
Xutils.debug("11111111111111111111111111111111");
Xutils.tLong("启动完毕");
// Intent mainIntent = new Intent(context, MainActivity.class);
// mainIntent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
// context.startActivity(mainIntent);
Intent startApp = context.getPackageManager().getLaunchIntentForPackage("com.zncm.jmxandroid");
context.startActivity(startApp);
}
}
}
<file_sep>package com.zncm.jmxandroid.github;
import android.os.Bundle;
import android.view.View;
import android.widget.TextView;
import com.wei.android.lib.fingerprintidentify.FingerprintIdentify;
import com.wei.android.lib.fingerprintidentify.base.BaseFingerprint;
import com.zncm.jmxandroid.R;
import com.zncm.jmxandroid.base.BaseAc;
/**
* Created by jiaomx on 2017/5/31.
*/
public class FingerprintIdentifyAc extends BaseAc {
private TextView mTvTips;
private boolean mIsCalledStartIdentify = false;
private FingerprintIdentify mFingerprintIdentify;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mTvTips = (TextView) findViewById(R.id.mTvTips);
}
@Override
protected int getLayoutResource() {
return R.layout.aa_fingerprint;
}
@Override
protected void onResume() {
super.onResume();
if (mIsCalledStartIdentify) {
mTvTips.append("\nresume identify if needed");
mFingerprintIdentify.resumeIdentify();
return;
}
mIsCalledStartIdentify = true;
mFingerprintIdentify = new FingerprintIdentify(this, new BaseFingerprint.FingerprintIdentifyExceptionListener() {
@Override
public void onCatchException(Throwable exception) {
mTvTips.append("\n" + exception.getLocalizedMessage());
}
});
mTvTips.append("create fingerprintIdentify");
mTvTips.append("\nisHardwareEnable: " + mFingerprintIdentify.isHardwareEnable());
mTvTips.append("\nisRegisteredFingerprint: " + mFingerprintIdentify.isRegisteredFingerprint());
mTvTips.append("\nisFingerprintEnable: " + mFingerprintIdentify.isFingerprintEnable());
if (!mFingerprintIdentify.isFingerprintEnable()) {
mTvTips.append("\nSorry →_→");
return;
}
mTvTips.append("\nstart identify\nput your finger on the sensor");
mFingerprintIdentify.resumeIdentify();
mFingerprintIdentify.startIdentify(3, new BaseFingerprint.FingerprintIdentifyListener() {
@Override
public void onSucceed() {
mTvTips.append("\nonSucceed");
}
@Override
public void onNotMatch(int availableTimes) {
mTvTips.append("\nonNotMatch, " + availableTimes + " chances left");
}
@Override
public void onFailed() {
mTvTips.append("\nonFailed");
}
});
}
@Override
protected void onPause() {
super.onPause();
mTvTips.append("\nrelease");
mFingerprintIdentify.cancelIdentify();
}
public void release(View view) {
mTvTips.append("\nrelease by click");
mFingerprintIdentify.cancelIdentify();
}
}
<file_sep>//package com.zncm.jmxandroid.ui;
//
//import android.content.Context;
//import android.os.Bundle;
//import android.support.v7.app.AppCompatActivity;
//
//import com.zncm.jmxandroid.R;
//import com.zncm.jmxandroid.view.SmoothImageView;
//
///**
// * https://github.com/skydoves/ColorPickerView
// */
//public class ColorpickerviewActivity extends AppCompatActivity {
//
// Context ctx;
//
// @Override
// protected void onCreate(Bundle savedInstanceState) {
// super.onCreate(savedInstanceState);
// setContentView(R.layout.activity_colorpickerview);
// ctx = this;
// }
//}
<file_sep>package com.zncm.jmxandroid.view.pd;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.RectF;
import android.support.annotation.Nullable;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
import com.zncm.jmxandroid.R;
import com.zncm.jmxandroid.utils.Xutils;
/**
* Created by jiaomx on 2017/6/16.
*/
public class PDCropImageView extends View {
Context ctx;
Paint backgroundPaint;
Bitmap opBitmap;
Rect opBitmapRect = new Rect();
enum InOP {
IN_NULL, IN_LT, IN_RT,
IN_LB, IN_RB
}
InOP inOP = InOP.IN_NULL;
enum Status {
STATUS_NULL, STATUS_MOVE, STATUS_SCALE
}
Status status = Status.STATUS_NULL;
RectF opLeftTop;
RectF opRightTop;
RectF opLeftBottom;
RectF opRightBottom;
RectF cropRect = new RectF();
RectF tempRect = new RectF();
RectF imageRect = new RectF();
int opWidth = 46;
RectF outLeft = new RectF();
RectF outTop = new RectF();
RectF outRight = new RectF();
RectF outBottom = new RectF();
int zoomRate = -1;
private float oldx, oldy;
public PDCropImageView(Context context) {
super(context);
ctx = context;
init();
}
public PDCropImageView(Context context, @Nullable AttributeSet attrs) {
super(context, attrs);
ctx = context;
init();
}
private void init() {
backgroundPaint = new Paint();
backgroundPaint.setColor(Color.parseColor("#B0000000"));
opBitmap = BitmapFactory.decodeResource(ctx.getResources(), R.drawable.sticker_rotate);
opBitmapRect.set(0, 0, opBitmap.getWidth(), opBitmap.getHeight());
opLeftTop = new RectF(0, 0, opWidth, opWidth);
opRightTop = new RectF(opLeftTop);
opLeftBottom = new RectF(opLeftTop);
opRightBottom = new RectF(opLeftTop);
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
int w = getWidth();
int h = getHeight();
if (w <= 0 || h <= 0) {
return;
}
outTop.set(0, 0, w, cropRect.top);
outLeft.set(0, cropRect.top, cropRect.left, cropRect.bottom);
outRight.set(cropRect.right, cropRect.top, w, cropRect.bottom);
outBottom.set(0, cropRect.bottom, w, h);
canvas.drawRect(outTop, backgroundPaint);
canvas.drawRect(outLeft, backgroundPaint);
canvas.drawRect(outRight, backgroundPaint);
canvas.drawRect(outBottom, backgroundPaint);
int radius = opWidth >> 1;
opLeftTop.set(cropRect.left - radius, cropRect.top - radius,
cropRect.left + radius, cropRect.top + radius);
opRightTop.set(cropRect.right - radius, cropRect.top - radius,
cropRect.right + radius, cropRect.top + radius);
opLeftBottom.set(cropRect.left - radius, cropRect.bottom - radius,
cropRect.left + radius, cropRect.bottom + radius);
opRightBottom.set(cropRect.right - radius, cropRect.bottom - radius,
cropRect.right + radius, cropRect.bottom + radius);
canvas.drawBitmap(opBitmap, opBitmapRect, opLeftTop, null);
canvas.drawBitmap(opBitmap, opBitmapRect, opRightTop, null);
canvas.drawBitmap(opBitmap, opBitmapRect, opLeftBottom, null);
canvas.drawBitmap(opBitmap, opBitmapRect, opRightBottom, null);
}
@Override
public boolean onTouchEvent(MotionEvent event) {
float x = event.getX();
float y = event.getY();
switch (event.getAction() & MotionEvent.ACTION_MASK) {
case MotionEvent.ACTION_DOWN:
InOP tmp = isSelOp(x, y);
if (tmp != InOP.IN_NULL) {
inOP = tmp;
status = Status.STATUS_SCALE;
} else if (cropRect.contains(x, y)) {
status = Status.STATUS_MOVE;
}
break;
case MotionEvent.ACTION_UP:
status = Status.STATUS_NULL;
break;
case MotionEvent.ACTION_MOVE:
if (status == Status.STATUS_SCALE) {
tempRect = cropRect;
Xutils.debug("-------->>>" + cropRect.left + " " + tempRect.left);
switch (inOP) {
case IN_LT:
cropRect.left = x;
cropRect.top = y;
break;
case IN_RT:
cropRect.right = x;
cropRect.top = y;
break;
case IN_LB:
cropRect.left = x;
cropRect.bottom = y;
break;
case IN_RB:
cropRect.right = x;
cropRect.bottom = y;
break;
}
// if (cropRect.width() < opWidth) {
// cropRect.left = tempRect.left;
// cropRect.right = tempRect.right;
// }
// Xutils.debug("-------->>2>" + cropRect.left + " " + tempRect.left+" "+cropRect.width() +" "+ opWidth);
//
//
// if (cropRect.height() < opWidth) {
// cropRect.top = tempRect.top;
// cropRect.bottom = tempRect.bottom;
// }
if (cropRect.left < imageRect.left) {
cropRect.left = imageRect.left;
}
if (cropRect.right > imageRect.right) {
cropRect.right = imageRect.right;
}
if (cropRect.top < imageRect.top) {
cropRect.top = imageRect.top;
}
if (cropRect.bottom > imageRect.bottom) {
cropRect.bottom = imageRect.bottom;
}
// if (cropRect.left < imageRect.left
// || cropRect.right > imageRect.right
// || cropRect.top < imageRect.top
// || cropRect.bottom > imageRect.bottom
// || cropRect.width() < opWidth
// || cropRect.height() < opWidth) {
// cropRect.set(tempRect);
// }
// switch (inOP) {
// case IN_LT:
// case IN_RT:
// cropRect.bottom = (cropRect.right - cropRect.left) + cropRect.top;
// break;
// case IN_LB:
// case IN_RB:
// cropRect.top = cropRect.bottom
// - (cropRect.right - cropRect.left);
// break;
// }
//
//
// if (cropRect.left < imageRect.left
// || cropRect.right > imageRect.right
// || cropRect.top < imageRect.top
// || cropRect.bottom > imageRect.bottom
// || cropRect.width() < opWidth
// || cropRect.height() < opWidth) {
// cropRect.set(tempRect);
// }
invalidate();
Xutils.debug("zoomRate>>" + zoomRate + " " + cropRect.toString());
} else if (status == Status.STATUS_MOVE) {
tempRect.set(cropRect);// 存贮原有数据,以便还原
translateRect(cropRect, x - oldx, y - oldy);
// 边界判定算法优化
float mdLeft = imageRect.left - cropRect.left;
if (mdLeft > 0) {
translateRect(cropRect, mdLeft, 0);
}
float mdRight = imageRect.right - cropRect.right;
if (mdRight < 0) {
translateRect(cropRect, mdRight, 0);
}
float mdTop = imageRect.top - cropRect.top;
if (mdTop > 0) {
translateRect(cropRect, 0, mdTop);
}
float mdBottom = imageRect.bottom - cropRect.bottom;
if (mdBottom < 0) {
translateRect(cropRect, 0, mdBottom);
}
invalidate();
}
break;
case MotionEvent.ACTION_CANCEL:
break;
}
oldx = x;
oldy = y;
return true;
}
private static final void translateRect(RectF rect, float dx, float dy) {
rect.left += dx;
rect.right += dx;
rect.top += dy;
rect.bottom += dy;
}
private boolean limitZoomRect() {
/**
*限制缩放区域
*/
Xutils.debug("-cropRect.width-" + cropRect.width() + " " + opWidth);
return true;
}
private InOP isSelOp(float x, float y) {
InOP inOP = InOP.IN_NULL;
if (opLeftTop.contains(x, y)) {
inOP = InOP.IN_LT;
} else if (opRightTop.contains(x, y)) {
inOP = InOP.IN_RT;
} else if (opLeftBottom.contains(x, y)) {
inOP = InOP.IN_LB;
} else if (opRightBottom.contains(x, y)) {
inOP = InOP.IN_RB;
}
return inOP;
}
private static void scaleRect(RectF rect, float scaleX, float scaleY) {
float w = rect.width();
float h = rect.height();
float newW = scaleX * w;
float newH = scaleY * h;
float dx = (newW - w) / 2;
float dy = (newH - h) / 2;
rect.left -= dx;
rect.top -= dy;
rect.right += dx;
rect.bottom += dy;
}
// public void setCropRect(RectF rect) {
// this.cropRect = rect;
// imageRect.set(rect);
// cropRect.set(rect);
// scaleRect(cropRect, 0.5f, 0.5f);
// invalidate();
// }
public void setImgRectF(RectF rect) {
this.cropRect = rect;
imageRect.set(rect);
cropRect.set(rect);
scaleRect(cropRect, 0.5f, 0.5f);
invalidate();
}
public Bitmap cropBitmap(Bitmap bitmap){
Xutils.debug("cropBitmap::"+bitmap.getWidth()+" "+bitmap.getHeight()+" cropRect::"+cropRect.toString()+" "+cropRect.width() +" "+ cropRect.height());
/**
* int x, int y, int width, int height
*/
Bitmap resultBit = Bitmap.createBitmap(bitmap,
(int) cropRect.left, (int) ((int) cropRect.top-imageRect.top),
(int) cropRect.width(), (int) cropRect.height());
return resultBit;
}
}
<file_sep>package com.zncm.jmxandroid.os;
import android.app.Activity;
import android.content.Context;
import android.graphics.Color;
import android.os.Bundle;
import android.support.design.widget.AppBarLayout;
import android.support.design.widget.CollapsingToolbarLayout;
import android.support.v7.widget.Toolbar;
import com.zncm.jmxandroid.R;
import com.zncm.jmxandroid.base.BaseAc;
import com.zncm.jmxandroid.utils.StatusBarUtils;
public class PaperDetailsActivity extends BaseAc {
Activity ctx;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
ctx = this;
final Toolbar toolbar = (Toolbar) findViewById(R.id.mToolbar);
// StatusBarUtils.setTranslucentImageHeader(ctx,0,toolbar);
toolbar.setTitleTextColor(Color.TRANSPARENT);
toolbar.inflateMenu(R.menu.menu_search);
final CollapsingToolbarLayout mCollapsingToolbarLayout = (CollapsingToolbarLayout) findViewById(R.id.mCollapsingToolbarLayout);
mCollapsingToolbarLayout.setTitle("");
mCollapsingToolbarLayout.setCollapsedTitleTextColor(getResources().getColor(R.color.white));
mCollapsingToolbarLayout.setExpandedTitleColor(Color.TRANSPARENT);
AppBarLayout mAppBarLayout = (AppBarLayout) findViewById(R.id.mAppBarLayout);
mAppBarLayout.addOnOffsetChangedListener(new AppBarLayout.OnOffsetChangedListener() {
@Override
public void onOffsetChanged(AppBarLayout appBarLayout, int verticalOffset) {
if(Math.abs(verticalOffset) >= appBarLayout.getTotalScrollRange()){
toolbar.setTitleTextColor(getResources().getColor(R.color.white));
mCollapsingToolbarLayout.setTitle("AppbarLayout");
}else{
mCollapsingToolbarLayout.setTitle("");
}
}
});
}
@Override
protected int getLayoutResource() {
return R.layout.activity_paperdetails;
}
}
<file_sep>package com.zncm.jmxandroid.view;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.RectF;
import android.support.annotation.Nullable;
import android.util.AttributeSet;
import android.view.View;
import com.zncm.jmxandroid.R;
/**
* Created by jiaomx on 2017/5/8.
*/
public class BaseView extends View {
private Paint paint;
public BaseView(Context context) {
super(context);
init();
}
private void init() {
paint = new Paint();
paint.setStrokeWidth(5);
}
public BaseView(Context context, @Nullable AttributeSet attrs) {
super(context, attrs);
init();
}
public BaseView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init();
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
paint.setColor(Color.RED); //设置画笔颜色
paint.setStyle(Paint.Style.FILL); //设置画笔为空心
paint.setStrokeWidth((float) 2.0); //设置线宽
canvas.drawColor(Color.WHITE);
canvas.drawRect(100, 100, 200, 600, paint); //绘制矩形
canvas.drawRect(300, 100, 400, 600, paint); //绘制矩形
/**
*@NonNull String text, float x, float y, @NonNull Paint paint
*/
paint.setTextSize(20);
canvas.drawText("你好啊", 200, 200, paint);
/**
*float cx, float cy, float radius, @NonNull Paint paint
*/
paint.setStrokeWidth(5);
paint.setStyle(Paint.Style.STROKE);
paint.setColor(Color.GREEN);
paint.setAntiAlias(true);
canvas.drawCircle(210, 210, 100, paint);
canvas.drawCircle(210, 210, 50, paint);
/**
*float startX, float startY, float stopX, float stopY, @NonNull Paint paint
*/
canvas.drawLine(100, 100, 400, 100, paint);
canvas.drawLine(100, 100, 100, 400, paint);
paint.setColor(Color.YELLOW);
canvas.drawLine(100, 100, 200, 300, paint);
paint.setColor(Color.DKGRAY);
canvas.drawLine(100, 100, 300, 300, paint);
/**
*float left, float top, float right, float bottom
*/
RectF rectF1 = new RectF(100, 100, 400, 400);
canvas.drawRect(rectF1, paint);
paint.setColor(Color.BLUE);
RectF rectF2 = new RectF(200, 200, 400, 400);
canvas.drawRect(rectF2, paint);
/**
*@NonNull RectF oval, float startAngle, float sweepAngle, boolean useCenter,
@NonNull Paint paint
*/
paint.setColor(Color.YELLOW);
RectF rectF3 = new RectF(100, 400, 400, 500);
// canvas.drawRect(rectF3,paint);
canvas.drawArc(rectF3, 180, 180, true, paint);
/**
*@NonNull RectF oval, @NonNull Paint paint
*/
RectF rectF4 = new RectF(100, 500, 600, 600);
canvas.drawOval(rectF4, paint);
/**
*float x0, float y0, float x1, float y1, int colors[], float positions[],
TileMode tile
*/
// Shader shader = new LinearGradient(0,0,100,100,new int[]{Color.RED, Color.GREEN, Color.BLUE, Color.YELLOW,
// Color.LTGRAY},null, Shader.TileMode.REPEAT);
// paint.setShader(shader);
//
// RectF rectF5 = new RectF(100,600,700,700);
// canvas.drawOval(rectF5,paint);
/**
*扇形
*/
paint.setColor(Color.BLACK);
RectF oval2 = new RectF(60, 100, 200, 240);// 设置个新的长方形,扫描测量
canvas.drawArc(oval2, 270, 90, true, paint);
// 画弧,第一个参数是RectF:该类是第二个参数是角度的开始,第三个参数是多少度,第四个参数是真的时候画扇形,是假的时候画弧线
//画椭圆,把oval改一下
oval2.set(210, 100, 250, 130);
canvas.drawOval(oval2, paint);
Path path = new Path();
/**
*float x, float y
*/
path.moveTo(80, 200);// 此点为多边形的起点
path.lineTo(120, 250);
path.lineTo(80, 250);
path.close();
paint.setColor(Color.GREEN);
canvas.drawPath(path,paint);
path.reset();
paint.setStyle(Paint.Style.STROKE);//设置空心
Path path1=new Path();
path1.moveTo(180, 200);
path1.lineTo(200, 200);
path1.lineTo(210, 210);
path1.lineTo(200, 220);
path1.lineTo(180, 220);
path1.lineTo(170, 210);
path1.close();//封闭
canvas.drawPath(path1, paint);
/**
*圆角矩形
*/
paint.setStyle(Paint.Style.FILL);
paint.setColor(Color.GREEN);
paint.setAntiAlias(true);
RectF rectF5 = new RectF(100,600,600,800);
// canvas.drawRect(rectF5,paint);
canvas.drawRoundRect(rectF5,60,60,paint);
/**
*画贝塞尔曲线
*/
paint.reset();
paint.setStyle(Paint.Style.STROKE);
paint.setColor(Color.YELLOW);
Path path2 = new Path();
path2.moveTo(100,300);
/**
*float x1, float y1, float x2, float y2
*/
path2.quadTo(150,300,200,400);
canvas.drawPath(path2,paint);
/**
*画点
* float x, float y, @NonNull Paint paint
*/
paint.setColor(Color.BLACK);
paint.setStrokeWidth(2);
paint.setStyle(Paint.Style.FILL);
canvas.drawPoint(100,1000,paint);
canvas.drawPoints(new float[]{60,400,65,400,70,400},paint);
/**
*画图片
*/
Bitmap bitmap = BitmapFactory.decodeResource(getResources(), R.drawable.ic_launcher);
canvas.drawBitmap(bitmap, 100,800, paint);
}
}
<file_sep>package com.zncm.jmxandroid.view;
import android.support.v4.app.Fragment;
/**
* Created by jiaomx on 2017/6/15.
*/
public class AddTextFt extends Fragment {
}
<file_sep>package com.zncm.jmxandroid.os
import android.os.Bundle
import android.os.Handler
import android.os.Message
import android.widget.Toast
import com.zncm.jmxandroid.R
import com.zncm.jmxandroid.base.BaseAc
import kotlinx.android.synthetic.main.activity_kotlin3.*
import org.jetbrains.anko.ctx
import java.util.*
/**
* Created by jiaomx on 2017/5/22.
*/
public class MyHandlerAc : BaseAc() {
override fun getLayoutResource(): Int {
return R.layout.activity_kotlin3
}
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
tv003.setText("111111")
var handler =object :Handler(){
override fun handleMessage(msg: Message?) {
super.handleMessage(msg)
var msgStr:String = msg!!.obj as String
tv003.setText(msgStr)
Toast.makeText(ctx,msgStr,Toast.LENGTH_SHORT).show()
}
}
tv003.setOnClickListener {
var msg=Message()
msg.obj= Random().nextLong().toString()
handler.handleMessage(msg)
}
}
}
<file_sep>package com.zncm.jmxandroid.mykotlin
import android.app.Activity
import android.os.Bundle
import android.widget.TextView
import com.zncm.jmxandroid.R
/**
* Created by jiaomx on 2017/5/19.
*/
class MyDemo2 : Activity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_kotlin2)
val tv001 = findViewById(R.id.tv001) as TextView
}
}
<file_sep>package com.zncm.jmxandroid.hook;
import android.content.Context;
import android.content.Intent;
import com.zncm.jmxandroid.utils.Xutils;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
/**
* Created by jiaomx on 2017/5/3.
*/
public class HookStartActivityUtils {
private Context myContext;
private Class<?> myProxyClass;
public HookStartActivityUtils(Context myContext, Class<?> myProxyClass) {
this.myContext = myContext;
this.myProxyClass = myProxyClass;
}
public void hookStartActivity() throws Exception {
Class<?> myClass = Class.forName("android.app.IActivityManager");
Class<?> activityManagerNativeClass = Class.forName("android.app.ActivityManagerNative");
Field gDefaultField = activityManagerNativeClass.getDeclaredField("gDefault");
//设置权限
gDefaultField.setAccessible(true);
Object gDefault = gDefaultField.get(null);
Class<?> singletonClass = Class.forName("android.util.Singleton");
Field mInstanceField = singletonClass.getDeclaredField("mInstance");
mInstanceField.setAccessible(true);
Object mInstance = mInstanceField.get(gDefault);
/**
*InvocationHandler 谁去执行
*
*/
mInstance = Proxy.newProxyInstance(HookStartActivityUtils.class.getClassLoader(), new Class[]{myClass}, new startActivityInvocationHandler(mInstance));
mInstanceField.set(gDefault, mInstance);
}
private class startActivityInvocationHandler implements InvocationHandler {
Object object;
public startActivityInvocationHandler(Object object) {
this.object = object;
}
@Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
Xutils.debug("invoke::" + method.getName());
if (method.getName().equals("startActivity")) {
Intent oriInent = (Intent) args[2];
Intent safeIntent = new Intent(myContext, myProxyClass);
args[2] = safeIntent;
safeIntent.putExtra("oriInent", oriInent);
}
return method.invoke(object, args);
}
}
}
<file_sep>package com.zncm.jmxandroid.ui;
import android.app.Activity;
import android.graphics.Point;
import android.graphics.Rect;
import android.os.Bundle;
import android.view.MotionEvent;
import android.view.View;
import android.widget.ImageView;
import android.widget.TextView;
import com.zncm.jmxandroid.R;
public class GlobalLocalAct extends Activity {
private int lastX = 0;
private int lastY = 0;
@Override
protected void onCreate(Bundle savedInstanceState)
{
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_img2);
ImageView imageView = (ImageView) findViewById(R.id.img);
Rect localRect = new Rect();
imageView.getLocalVisibleRect(localRect);
((TextView) findViewById(R.id.local))
.setText("imageViewlocal" + localRect.toString());
Rect globalRect = new Rect();
Point globalOffset = new Point();
imageView.getGlobalVisibleRect(globalRect, globalOffset);
((TextView) findViewById(R.id.global))
.setText("imageViewglobal" + globalRect.toString());
((TextView) findViewById(R.id.offset))
.setText("imageViewglobalOffset:" + globalOffset.x + "," + globalOffset.y);
imageView.setOnTouchListener(new View.OnTouchListener() {
@Override
public boolean onTouch(View v, MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
lastX = (int) event.getRawX();
lastY = (int) event.getRawY();
Rect localRect = new Rect();
v.getLocalVisibleRect(localRect);
((TextView) findViewById(R.id.local))
.setText("local" + localRect.toString());
Rect globalRect = new Rect();
Point globalOffset = new Point();
v.getGlobalVisibleRect(globalRect, globalOffset);
((TextView) findViewById(R.id.global))
.setText("global" + globalRect.toString());
((TextView) findViewById(R.id.offset))
.setText("globalOffset:" + globalOffset.x + "," + globalOffset.y);
break;
case MotionEvent.ACTION_MOVE:
int dx = (int) event.getRawX() - lastX;
int dy = (int) event.getRawY() - lastY;
int left = v.getLeft() + dx;
int top = v.getTop() + dy;
int right = v.getRight() + dx;
int bottom = v.getBottom() + dy;
v.layout(left, top, right, bottom);
lastX = (int) event.getRawX();
lastY = (int) event.getRawY();
localRect = new Rect();
v.getLocalVisibleRect(localRect);
((TextView) findViewById(R.id.local))
.setText("local" + localRect.toString());
globalRect = new Rect();
globalOffset = new Point();
v.getGlobalVisibleRect(globalRect, globalOffset);
((TextView) findViewById(R.id.global))
.setText("global" + globalRect.toString());
((TextView) findViewById(R.id.offset))
.setText("globalOffset:" + globalOffset.x + "," + globalOffset.y);
break;
case MotionEvent.ACTION_UP:
break;
}
return true;
}
});
}
}<file_sep>package com.zncm.jmxandroid.volley;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.widget.ImageView;
import android.widget.TextView;
import com.android.volley.AuthFailureError;
import com.android.volley.Request;
import com.android.volley.Response;
import com.android.volley.VolleyError;
import com.android.volley.toolbox.ImageLoader;
import com.android.volley.toolbox.NetworkImageView;
import com.android.volley.toolbox.StringRequest;
import com.zncm.jmxandroid.R;
import com.zncm.jmxandroid.base.BaseAc;
import com.zncm.jmxandroid.base.MyApp;
import java.util.HashMap;
import java.util.Map;
import butterknife.BindView;
import butterknife.ButterKnife;
/**
* Created by jiaomx on 2017/5/3.
*/
public class VolleyDemo extends BaseAc {
// https://api.unsplash.com/photos/random?client_id=20c1aa97b359765b805e5049e87295d51ff5f3505a6270d810f6bfaf52eedd9f&w=1920&h=1080&orientation=portrait
@BindView(R.id.tvVolley)
TextView tvVolley;
@BindView(R.id.imageView)
ImageView imageView;
@BindView(R.id.networkImageView)
NetworkImageView networkImageView;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
ButterKnife.bind(this);
// volleyGet();
// volleyPost();
volleyImg();
}
@Override
protected int getLayoutResource() {
return R.layout.activity_volleydemo;
}
private void volleyImg() {
String imgUrl = "https://ss0.bdstatic.com/5aV1bjqh_Q23odCf/static/superman/img/logo/bd_logo1_31bdc765.png";
// String url, Response.Listener<Bitmap> listener, int maxWidth, int maxHeight,
// Bitmap.Config decodeConfig, Response.ErrorListener errorListener
// ImageRequest imageRequest = new ImageRequest(imgUrl, new Response.Listener<Bitmap>() {
// @Override
// public void onResponse(Bitmap response) {
// if (response != null) {
// imageView.setImageBitmap(response);
// }
//
// }
// }, 0, 0, Bitmap.Config.RGB_565, new Response.ErrorListener() {
// @Override
// public void onErrorResponse(VolleyError error) {
// Xutils.tShort(error.toString());
// }
// });
// imageRequest.setTag("imgGet");
// MyApp.getRequestQueue().add(imageRequest);
// ImageLoader imageLoader = new ImageLoader(MyApp.requestQueue,new MyBitmapCache());
//// final ImageView view,
//// final int defaultImageResId, final int errorImageResId
// ImageLoader.ImageListener imageListener =ImageLoader.getImageListener(imageView,R.mipmap.ic_launcher,R.mipmap.ic_launcher);
// imageLoader.get(imgUrl,imageListener);
ImageLoader imageLoader2 = new ImageLoader(MyApp.requestQueue, new MyBitmapCache());
networkImageView.setDefaultImageResId(R.drawable.ic_info);
networkImageView.setErrorImageResId(R.drawable.ic_info);
networkImageView.setImageUrl(imgUrl, imageLoader2);
}
private void volleyPost() {
//BasicNetwork.performRequest: Unexpected response code 400 for
String url = "https://api.unsplash.com/photos/random?";
//?client_id=20c1aa97b359765b805e5049e87295d51ff5f3505a6270d810f6bfaf52eedd9f&w=1920&h=1080&orientation=portrait
StringRequest request = new StringRequest(Request.Method.POST, url, new Response.Listener<String>() {
@Override
public void onResponse(String response) {
tvVolley.setText(response);
}
}, new Response.ErrorListener() {
@Override
public void onErrorResponse(VolleyError error) {
tvVolley.setText(error.toString());
}
}) {
@Override
protected Map<String, String> getParams() throws AuthFailureError {
// client_id=20c1aa97b359765b805e5049e87295d51ff5f3505a6270d810f6bfaf52eedd9f&w=1920&h=1080&orientation=portrait
Map<String, String> map = new HashMap<>();
map.put("client_id", "20c1aa97b359765b805e5049e87295d51ff5f3505a6270d810f6bfaf52eedd9f");
return map;
}
};
request.setTag("volleyPost");
MyApp.getRequestQueue().add(request);
}
private void volleyGet() {
String url = "https://api.unsplash.com/photos/random?client_id=20c1aa97b359765b805e5049e87295d51ff5f3505a6270d810f6bfaf52eedd9f&w=1920&h=1080&orientation=portrait";
// StringRequest request = new StringRequest(Request.Method.GET, url, new Response.Listener<String>() {
// @Override
// public void onResponse(String response) {
// tvVolley.setText(response);
// }
// }, new Response.ErrorListener() {
// @Override
// public void onErrorResponse(VolleyError error) {
// tvVolley.setText(error.toString());
// }
// });
// JsonObjectRequest jsonObjectRequest = new JsonObjectRequest(Request.Method.GET, url, null, new Response.Listener<JSONObject>() {
// @Override
// public void onResponse(JSONObject response) {
// tvVolley.setText(response.toString());
// }
// }, new Response.ErrorListener() {
// @Override
// public void onErrorResponse(VolleyError error) {
// tvVolley.setText(error.toString());
// }
// });
// jsonObjectRequest.setTag("volleyGet");
// MyApp.getRequestQueue().add(jsonObjectRequest);
VolleyRequest.RequestGet(this, url, "volleyGet", new VolleyInterface(this, VolleyInterface.mListener, VolleyInterface.mErrorListener) {
@Override
public void onMySuccess(String result) {
tvVolley.setText(result.toString());
}
@Override
public void onMyError(VolleyError result) {
tvVolley.setText(result.toString());
}
});
}
}
<file_sep>package com.zncm.jmxandroid.mykotlin
import android.content.Intent
import android.os.Bundle
import com.zncm.jmxandroid.R
import com.zncm.jmxandroid.base.BaseAc
import kotlinx.android.synthetic.main.activity_m.*
/**
* Created by jiaomx on 2017/5/18.
*/
class MyKotlinDemo : BaseAc() {
override fun getLayoutResource(): Int {
return R.layout.activity_m
}
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
btnMyKotlinDemo2.setOnClickListener {
/**
* android.content.ActivityNotFoundException: Unable to find explicit activity class {com.zncm.jmxandroid/com.zncm.jmxandroid.mykotlin.MyKotlinDemo2}; have you declared this activity in your AndroidManifest.xml?
*
*/
startActivity(Intent(this,MyKotlinDemo2::class.java))
}
}
}<file_sep>package com.zncm.jmxandroid.ui;
import android.content.Context;
import android.webkit.WebView;
/**
* Created by jiaomx on 2017/8/2.
*/
public class MyWebView extends WebView {
public MyWebView(Context context) {
super(context);
}
}
<file_sep>//package com.zncm.jmxandroid.baseview;
//
//import android.content.Context;
//import android.graphics.Bitmap;
//import android.graphics.BitmapFactory;
//import android.graphics.Rect;
//import android.graphics.RectF;
//import android.os.Bundle;
//import android.support.annotation.Nullable;
//import android.view.View;
//import android.widget.Button;
//import android.widget.FrameLayout;
//import android.widget.ImageView;
//import android.widget.RelativeLayout;
//
//import com.zncm.jmxandroid.R;
//import com.zncm.jmxandroid.base.BaseAc;
//import com.zncm.jmxandroid.utils.Xutils;
//import com.zncm.jmxandroid.view.pd.PDColorSelView;
//import com.zncm.jmxandroid.view.pd.PDCropImageView;
//import com.zncm.jmxandroid.view.pd.PDPaintView;
//import com.zncm.jmxandroid.view.TextFloatView;
//import com.zncm.jmxandroid.view.pd.crop.ClipImageLayout;
//
///**
// * Created by jiaomx on 2017/5/25.
// */
//
//public class PicEditAc extends BaseAc {
// public String filePath;
// public String saveFilePath;
// Context ctx;
// Bitmap baseBitmap;
//
//
// TextFloatView mTextFloatView;
//
// PDColorSelView mColorSelView;
// PDPaintView paintView;
// ImageView edit;
// ImageView undo;
// ImageView text;
// ImageView crop;
// RelativeLayout rlPaint;
// ImageView baseImageView;
// PDCropImageView mPDCropImageView;
//
// RelativeLayout bottomOp;
// RelativeLayout bottomOpCrop;
// RelativeLayout rlCrop;
//
// FrameLayout flBase;
//
//
// Rect imgRectF = new Rect();
//
//
// Button btnCancel;
// Button btnSure;
//
// ClipImageLayout mClipImageLayout = null;
// @Override
// public void onCreate(@Nullable Bundle savedInstanceState) {
// super.onCreate(savedInstanceState);
// ctx = this;
//
//
// baseBitmap = BitmapFactory.decodeResource(ctx.getResources(),
// R.drawable.img1);
//
//
// flBase = (FrameLayout) findViewById(R.id.flBase);
// baseImageView = (ImageView) findViewById(R.id.baseImageView);
// mClipImageLayout = (ClipImageLayout) findViewById(R.id.mClipImageLayout);
//
//// baseImageView.setImageBitmap(baseBitmap);
// paintView = (PDPaintView) findViewById(R.id.mPaintView);
//
//
// mClipImageLayout.setImageBitmap(baseBitmap);
//
//// baseImageView.post(new Runnable() {
//// public void run() {
//// float x = baseImageView.getLeft();
//// float y = baseImageView.getTop();
//// float width = baseImageView.getWidth();
//// float height = baseImageView.getHeight();
//// float right = baseImageView.getRight();
//// float bottom = baseImageView.getBottom();
////
//// Log.i("^_^ Runnable", "x = " + x + ", y = " + y + ", width = " + width + ", height = " + height +
//// ", right = " + right + ", bottom = " + bottom);
//// imgRectF = new RectF(x,y,width,height);
//// Xutils.debug("imgRectF->>"+imgRectF);
//// paintView.setImgRectF(imgRectF);
//// }
//// });
//
// mTextFloatView = (TextFloatView) findViewById(R.id.mTextFloatView);
// mPDCropImageView = (PDCropImageView) findViewById(R.id.mPDCropImageView);
// imageLayout();
//
//
// baseImageView.bringToFront();
//
//
// bottomOp = (RelativeLayout) findViewById(R.id.bottomOp);
// bottomOpCrop = (RelativeLayout) findViewById(R.id.bottomOpCrop);
//
// rlPaint = (RelativeLayout) findViewById(R.id.rlPaint);
// rlCrop = (RelativeLayout) findViewById(R.id.rlCrop);
// edit = (ImageView) findViewById(R.id.edit);
// text = (ImageView) findViewById(R.id.text);
// crop = (ImageView) findViewById(R.id.crop);
// undo = (ImageView) findViewById(R.id.undo);
// btnCancel = (Button) findViewById(R.id.btnCancel);
// btnSure = (Button) findViewById(R.id.btnSure);
// mColorSelView = (PDColorSelView) findViewById(R.id.mColorSelView);
//
//
//// EditText mEditText= (EditText) findViewById(R.id.mEditText);
//// ImageView editTextSure = (ImageView) findViewById(R.id.editTextSure);
//
//
// mTextFloatView.setText("PMS258");
// // editTextSure.setOnClickListener(new View.OnClickListener() {
//// @Override
//// public void onClick(View v) {
//// String text = mEditText.getText().toString().trim();
//// if (TextUtils.isEmpty(text)){
//// return;
//// }
//// mTextFloatView.setText(text);
//// }
//// });
// mColorSelView.setmOnColorSelListener(new PDColorSelView.OnColorSelListener() {
// @Override
// public void onColorSelListener(int color) {
// paintView.setColor(color);
//// mTextFloatView.setColor(color);
// }
// });
//
//
// edit.setOnClickListener(new View.OnClickListener() {
// @Override
// public void onClick(View v) {
// bottomOp.setVisibility(View.VISIBLE);
// rlPaint.bringToFront();
// rlCrop.setVisibility(View.GONE);
// }
// });
//
// text.setOnClickListener(new View.OnClickListener() {
// @Override
// public void onClick(View v) {
// bottomOp.setVisibility(View.GONE);
// mTextFloatView.bringToFront();
// rlCrop.setVisibility(View.GONE);
// }
// });
//
//
// crop.setOnClickListener(new View.OnClickListener() {
// @Override
// public void onClick(View v) {
// rlCrop.setVisibility(View.VISIBLE);
// rlCrop.bringToFront();
// bottomOp.setVisibility(View.GONE);
//
// }
// });
//
//
// undo.setOnClickListener(new View.OnClickListener() {
// @Override
// public void onClick(View v) {
// paintView.undo();
// }
// });
// btnCancel.setOnClickListener(new View.OnClickListener() {
// @Override
// public void onClick(View v) {
//
// baseImageView.bringToFront();
// }
// });
// btnSure.setOnClickListener(new View.OnClickListener() {
// @Override
// public void onClick(View v) {
//
//// baseBitmap = mClipImageLayout.clip();
// baseBitmap = mPDCropImageView.cropBitmap(baseBitmap);
// if (baseBitmap != null) {
// baseImageView.setImageBitmap(baseBitmap);
// imageLayout();
// }
//
// }
// });
//
// }
//
// private void imageLayout() {
// baseImageView.post(new Runnable() {
// public void run() {
// baseImageView.getGlobalVisibleRect(imgRectF);
// int top = imgRectF.height() / 2 - baseBitmap.getHeight() / 2;
// imgRectF.top = top;
// int bottom = top + baseBitmap.getHeight();
// imgRectF.bottom = bottom;
// Xutils.debug("imgRectF->>" + imgRectF + " " + imgRectF.height() / 2 + ":" + baseBitmap.getHeight() / 2);
// paintView.setImgRectF(imgRectF);
// mTextFloatView.setImgRectF(imgRectF);
// mPDCropImageView.setImgRectF(new RectF(imgRectF));
// }
// });
// }
//
// @Override
// protected int getLayoutResource() {
// return R.layout.aa_picedit;
// }
//}
<file_sep>package com.zncm.jmxandroid.ui;
import android.content.Context;
import android.graphics.Typeface;
import android.os.Bundle;
import android.view.View;
import android.widget.Toast;
import com.zncm.dminter.tip.Tip;
import com.zncm.jmxandroid.R;
import com.zncm.jmxandroid.base.BaseAc;
import com.zncm.jmxandroid.utils.Xutils;
import es.dmoral.toasty.Toasty;
public class ToastyActivity extends BaseAc implements View.OnClickListener {
Context ctx;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
ctx = this;
Toasty.Config.getInstance()
.setErrorColor(getResources().getColor(R.color.material_amber_100)) // optional
.setInfoColor(getResources().getColor(R.color.material_blue_800)) // optional
.setSuccessColor(getResources().getColor(R.color.material_brown_900)) // optional
.setWarningColor(getResources().getColor(R.color.material_deep_orange_50)) // optional
.setTextColor(getResources().getColor(R.color.material_green_300)) // optional
.tintIcon(true) // optional (apply textColor also to the icon)
.setToastTypeface(Typeface.DEFAULT) // optional
.apply(); // required
Tip.Config.getInstance()
.setErrorColor(getResources().getColor(R.color.material_amber_100)) // optional
.setInfoColor(getResources().getColor(R.color.material_blue_800)) // optional
.setSucessColor(getResources().getColor(R.color.material_brown_900)) // optional
.setWarningColor(getResources().getColor(R.color.material_deep_orange_50)) // optional
.setTextColor(getResources().getColor(R.color.material_green_300)) // optional
.apply();
}
@Override
protected int getLayoutResource() {
return R.layout.activity_toasty;
}
/**
* To display an error Toast:
*
* Toasty.error(ctx, "This is an error toast.", Toast.LENGTH_SHORT, true).show(); To
* display a success Toast:
*
* Toasty.success(ctx, "Success!", Toast.LENGTH_SHORT, true).show(); To display an info
* Toast:
*
* Toasty.info(ctx, "Here is some info for you.", Toast.LENGTH_SHORT, true).show(); To
* display a warning Toast:
*
* Toasty.warning(ctx, "Beware of the dog.", Toast.LENGTH_SHORT, true).show(); To
* display the usual Toast:
*
* Toasty.normal(ctx, "Normal toast w/o icon").show(); To display the usual Toast with
* icon:
*
* Toasty.normal(ctx, "Normal toast w/ icon", yourIconDrawable).show(); You can also
* create your custom Toasts with the custom() method:
*
* Toasty.custom(ctx, "I'm a custom Toast", yourIconDrawable, tintColor, duration,
* withIcon, shouldTint).show();
*/
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.toasty1:
Toasty.error(ctx, "This is an error toast.", Toast.LENGTH_SHORT, true).show();
break;
case R.id.toasty2:
Toasty.success(ctx, "Success!", Toast.LENGTH_SHORT, true).show();
break;
case R.id.toasty3:
Toasty.info(ctx, "Here is some info for you.", Toast.LENGTH_SHORT, true).show();
break;
case R.id.toasty4:
Toasty.normal(ctx, "Normal toast w/ icon", getResources().getDrawable(R.drawable.ic_launcher)).show();
break;
case R.id.toasty5:
Xutils.tLong("Toast.makeText(MyApp.getInstance().ctx, msg, Toast.LENGTH_LONG).show();");
break;
case R.id.tip1:
Tip.info(ctx, "Here is some info for you1.").show();
break;
case R.id.tip2:
Tip.info(ctx, "Here is some info for you2.").show();
break;
case R.id.tip3:
Tip.info(ctx, "Here is some info for you3.").show();
break;
case R.id.tip4:
Tip.info(ctx, "Here is some info for you.4").show();
break;
case R.id.tip5:
Xutils.tShort("Toast.makeText(MyApp.getInstance().ctx, msg, Toast.LENGTH_LONG).show();");
break;
default:
break;
}
}
}
<file_sep>package com.zncm.jmxandroid.baseview;
/**
* Created by jiaomx on 2017/5/25.
*/
public class ViewSwitcherAc {
}
<file_sep>package com.zncm.jmxandroid.view;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Path;
import android.util.AttributeSet;
import android.view.View;
import android.view.WindowManager;
import com.zncm.jmxandroid.utils.Xutils;
public class BezierTestView extends View {
private int screenWidth;
private Paint mPaint;
private Path mPath;
//两个距离的差越小角的高越小
//最大距离 距离顶端 第一个顶点y
private int first_y = 60;
//第一个顶点x
private int first_x = 0;
//角的大小宽
private int horn_size = 50;
//最小距离距离顶端
private int minimum_distance = 20;
public int getMinimum_distance() {
return minimum_distance;
}
public void setMinimum_distance(int minimum_distance) {
this.minimum_distance = minimum_distance;
}
public int getScreenWidth() {
return screenWidth;
}
public void setScreenWidth(int screenWidth) {
this.screenWidth = screenWidth;
}
public int getFirst_x() {
return first_x;
}
public int getFirst_y() {
return first_y;
}
public void setFirst_x(int first_x) {
this.first_x = first_x;
}
public void setFirst_y(int first_y) {
this.first_y = first_y;
}
public int getHorn_size() {
return horn_size;
}
public void setHorn_size(int horn_size) {
this.horn_size = horn_size;
}
public BezierTestView(Context context) {
super(context);
init();
}
public BezierTestView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init();
}
public BezierTestView(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
private void init() {
//画笔
mPaint = new Paint();
mPaint.setAntiAlias(true);
mPaint.setStyle(Paint.Style.STROKE);
mPaint.setColor(0xFF412129);
mPath = new Path();
WindowManager wm = (WindowManager) getContext()
.getSystemService(Context.WINDOW_SERVICE);
this.screenWidth = wm.getDefaultDisplay().getWidth();
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
mPath.reset();
// mPath.moveTo(0,90);
// //mPath.lineTo(0,0);
// mPath.lineTo(100,50);
// mPath.lineTo(100*2,90);
// mPath.lineTo(100*3,50);
// mPath.lineTo(100*4,90);
// mPath.lineTo(100*5,50);
mPath.moveTo(first_x, first_y);
//mPath.lineTo(50,10);
for (int i = 1; i <= screenWidth / horn_size; i++) {
//y前一个大角后一个小
/**
*float x, float y
* 50 _ 20
100 _ 60
150 _ 20
200 _ 60
250 _ 20
300 _ 60
350 _ 20
400 _ 60
*/
Xutils.debug(((i * horn_size) + first_x) + " _ " + (i % 2 == 0 ? first_y : minimum_distance));
mPath.lineTo((i * horn_size) + first_x, i % 2 == 0 ? first_y : minimum_distance);
}
mPath.moveTo(first_x, 200 - first_y);
for (int i = 1; i <= screenWidth / horn_size; i++) {
mPath.lineTo((i * horn_size) + first_x, 200 - (i % 2 == 0 ? first_y : minimum_distance));
}
canvas.drawPath(mPath, mPaint);
}
}<file_sep>package com.zncm.jmxandroid.view;
import android.animation.AnimatorSet;
import android.content.Context;
import android.graphics.Paint;
import android.support.annotation.Nullable;
import android.util.AttributeSet;
import android.view.View;
/**
* Created by jiaomx on 2017/5/8.
*/
public class BubbleView extends View {
private AnimatorSet animatorSet;
private Paint paint;
public BubbleView(Context context) {
super(context);
init();
}
private void init() {
animatorSet = new AnimatorSet();
initPaint();
}
private void initPaint() {
paint = new Paint();
paint.setStrokeWidth(5);
}
public BubbleView(Context context, @Nullable AttributeSet attrs) {
super(context, attrs);
init();
}
public BubbleView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init();
}
}
<file_sep>package com.zncm.jmxandroid.utils;
import android.app.Activity;
import android.content.res.Resources;
import android.graphics.Color;
import android.os.Build;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
/**
* Created by jiaomx on 2017/7/19.
*/
public class MyStatusBarUtils {
public static void setStatusBarColor(Activity activity, int color) {
/**
*>=21
*/
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
activity.getWindow().setStatusBarColor(color);
}// 4.4 - 5.0 之间 采用一个技巧,首先把他弄成全屏,在状态栏的部分加一个布局
else if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT) {
// 首先把他弄成全屏(),在状态栏的部分加一个布局
// activity.getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
// 电量 时间 网络状态 都还在
activity.getWindow().addFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);
// 在状态栏的部分加一个布局 setContentView 源码分析,自己加一个布局 (高度是状态栏的高度)
View view = new View(activity);
ViewGroup.LayoutParams params = new ViewGroup.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, getStatusBarHeight(activity));
view.setLayoutParams(params);
view.setBackgroundColor(color);
// android:fitsSystemWindows="true" 每个布局都要写
// DecorView是一个 FrameLayout 布局 , 会加载一个系统的布局(LinearLayout) ,
// 在系统布局中会有一个 id 为 android.R.id.content 这布局是(RelativeLayout)
// http://www.jianshu.com/p/531d1168b3ee
ViewGroup decorView = (ViewGroup) activity.getWindow().getDecorView();
decorView.addView(view);
// 获取activity中setContentView布局的根布局
ViewGroup contentView = (ViewGroup) activity.findViewById(android.R.id.content);
contentView.setPadding(0, getStatusBarHeight(activity), 0, 0);
// View activityView = contentView.getChildAt(0);
// activityView.setFitsSystemWindows(true);
}
}
public static int getStatusBarHeight(Activity activity) {
Resources resources = activity.getResources();
int statusBarId = resources.getIdentifier("status_bar_height", "dimen", "android");
int statusBarHeight = resources.getDimensionPixelOffset(statusBarId);
Xutils.debug("statusBarHeight::" + statusBarHeight);
return statusBarHeight;
}
public static void setStatusBarTranslucent(Activity activity){
// 5.0 以上
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP){
// 这个怎么写有没有思路?看源码 29次
View decorView = activity.getWindow().getDecorView();
decorView.setSystemUiVisibility(View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN);
activity.getWindow().setStatusBarColor(Color.TRANSPARENT);
}
// 4.4 - 5.0 之间 采用一个技巧,首先把他弄成全屏,在状态栏的部分加一个布局
else if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT){
activity.getWindow().addFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);
}
}
}
<file_sep>package com.zncm.jmxandroid.ui
import android.content.Context
import android.os.Bundle
import com.zncm.jmxandroid.R
import com.zncm.jmxandroid.base.BaseAc
class AZViewActivity : BaseAc() {
override fun getLayoutResource(): Int {
return R.layout.activity_azview
}
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
}
}
<file_sep>package com.zncm.jmxandroid.volley;
import android.content.Context;
import com.android.volley.Response;
import com.android.volley.VolleyError;
/**
* Created by jiaomx on 2017/5/7.
*/
public abstract class VolleyInterface {
public Context context;
public static Response.Listener<String> mListener;
public static Response.ErrorListener mErrorListener;
public VolleyInterface(Context context, Response.Listener<String> listener, Response.ErrorListener errorListener) {
this.context = context;
this.mListener = listener;
this.mErrorListener = errorListener;
}
public abstract void onMySuccess(String result);
public abstract void onMyError(VolleyError result);
public Response.Listener<String> myListener() {
mListener = new Response.Listener<String>() {
@Override
public void onResponse(String response) {
onMySuccess(response);
}
};
return mListener;
}
public Response.ErrorListener myErrorListener() {
mErrorListener = new Response.ErrorListener() {
@Override
public void onErrorResponse(VolleyError error) {
onMyError(error);
}
};
return mErrorListener;
}
}
<file_sep>package com.zncm.jmxandroid.mykotlin
import android.app.Activity
import android.os.Bundle
import android.widget.EditText
import android.widget.TextView
import com.zncm.jmxandroid.R
import kotlinx.android.synthetic.main.activity_kotlin2.*
/**
* Created by jiaomx on 2017/5/19.
*/
class MyKotlinDemo2: Activity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_kotlin2)
tv001.setText("this is a text!")
et001.setText("请输入")
}
}<file_sep>package com.zncm.jmxandroid.baseview;
import android.os.Bundle;
import android.os.PersistableBundle;
import android.support.annotation.Nullable;
import android.view.View;
import android.webkit.WebView;
import android.widget.TextView;
import android.widget.ZoomButton;
import android.widget.ZoomControls;
import com.zncm.jmxandroid.R;
import com.zncm.jmxandroid.base.BaseAc;
import com.zncm.jmxandroid.utils.Xutils;
import butterknife.BindView;
import butterknife.ButterKnife;
/**
* Created by jiaomx on 2017/5/25.
*/
public class ZoomControlsAc extends BaseAc {
@BindView(R.id.zoomIn)
ZoomControls zoomIn;
@BindView(R.id.zoomButton)
ZoomButton zoomButton;
@BindView(R.id.mWebView)
WebView mWebView;
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
ButterKnife.bind(this);
zoomIn.setOnZoomInClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Xutils.debug("zoomIn");
}
});
zoomIn.setOnZoomOutClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Xutils.debug("zoomOut");
}
});
mWebView.loadUrl("https://www.baidu.com/");
/**
*不配置,显示不出来任何东西
*/
mWebView.getSettings().setJavaScriptEnabled(true);
mWebView.getSettings().setSupportZoom(true);
mWebView.getSettings().setBuiltInZoomControls(true);
}
@Override
protected int getLayoutResource() {
return R.layout.aa_zoomcontrols;
}
}
<file_sep>package com.zncm.jmxandroid;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import com.zncm.imbrary.ImActivity;
import com.zncm.jmxandroid.activityshortcut.ActivityShortcut;
import com.zncm.jmxandroid.base.BaseAc;
import com.zncm.jmxandroid.baseview.ColorSelViewAc;
import com.zncm.jmxandroid.baseview.MyPaintAc;
import com.zncm.jmxandroid.baseview.PicEditAc;
import com.zncm.jmxandroid.baseview.ZoomControlsAc;
import com.zncm.jmxandroid.github.FingerprintIdentifyAc;
import com.zncm.jmxandroid.github.GifActivity;
import com.zncm.jmxandroid.github.Okhttp1Activity;
import com.zncm.jmxandroid.github.PicEditActivity;
import com.zncm.jmxandroid.github.PopListActivity;
import com.zncm.jmxandroid.hook.HookAActivity;
import com.zncm.jmxandroid.mykotlin.MyKotlinDemo;
import com.zncm.jmxandroid.mykotlin.MyKotlinListView;
import com.zncm.jmxandroid.os.AudioAc;
import com.zncm.jmxandroid.os.BehaviorActivity;
import com.zncm.jmxandroid.os.BottomDlgActivity;
import com.zncm.jmxandroid.os.DlgActivity;
import com.zncm.jmxandroid.os.MyHandlerAc;
import com.zncm.jmxandroid.os.PaperDetailsActivity;
import com.zncm.jmxandroid.os.java.MyHandlerAcJ;
import com.zncm.jmxandroid.support.FabAc;
import com.zncm.jmxandroid.support.TabLayoutAc;
import com.zncm.jmxandroid.ui.AZViewActivity;
import com.zncm.jmxandroid.ui.BaseViewActivity;
import com.zncm.jmxandroid.ui.ColorpickerviewActivity;
import com.zncm.jmxandroid.ui.GlobalLocalAct;
import com.zncm.jmxandroid.ui.ImgActivity;
import com.zncm.jmxandroid.ui.QQSportStepActivity;
import com.zncm.jmxandroid.ui.RockerViewActivity;
import com.zncm.jmxandroid.ui.SmoothImgActivity;
import com.zncm.jmxandroid.ui.ToastyActivity;
import com.zncm.jmxandroid.ui.UtilsActivity;
import com.zncm.jmxandroid.volley.VolleyDemo;
public class MainActivity extends BaseAc implements View.OnClickListener {
Context ctx;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
ctx = this;
}
@Override
protected int getLayoutResource() {
return R.layout.activity_main;
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.UtilsActivity:
startActivity(new Intent(ctx, UtilsActivity.class));
break;
case R.id.DlgActivity:
startActivity(new Intent(ctx, DlgActivity.class));
break;
case R.id.RockerViewActivity:
startActivity(new Intent(ctx, RockerViewActivity.class));
break;
case R.id.QQSportStepActivity:
startActivity(new Intent(ctx, QQSportStepActivity.class));
break;
case R.id.Okhttp1Activity:
startActivity(new Intent(ctx, Okhttp1Activity.class));
break;
case R.id.BehaviorActivity:
startActivity(new Intent(ctx, BehaviorActivity.class));
break;
case R.id.PaperDetailsActivity:
startActivity(new Intent(ctx, PaperDetailsActivity.class));
break;
case R.id.BottomDlgActivity:
startActivity(new Intent(ctx, BottomDlgActivity.class));
break;
case R.id.PopListActivity:
startActivity(new Intent(ctx, PopListActivity.class));
break;
case R.id.PicEditAc:
// startActivity(new Intent(ctx, PicEditAc.class));
break;
case R.id.ColorSelViewAc:
startActivity(new Intent(ctx, ColorSelViewAc.class));
break;
case R.id.MyPaintAc:
startActivity(new Intent(ctx, MyPaintAc.class));
break;
case R.id.PicEditActivity:
// startActivity(new Intent(ctx, PicEditActivity.class));
break;
case R.id.GifActivity:
startActivity(new Intent(ctx, GifActivity.class));
break;
case R.id.AudioAc:
startActivity(new Intent(ctx, AudioAc.class));
break;
case R.id.FingerprintIdentifyAc:
startActivity(new Intent(ctx, FingerprintIdentifyAc.class));
break;
case R.id.FabAc:
startActivity(new Intent(ctx, FabAc.class));
break;
case R.id.ZoomControlsAc:
startActivity(new Intent(ctx, ZoomControlsAc.class));
break;
case R.id.TabLayoutAc:
startActivity(new Intent(ctx, TabLayoutAc.class));
break;
case R.id.MyKotlinListView:
startActivity(new Intent(ctx, MyKotlinListView.class));
break;
case R.id.MyHandlerAcJ:
startActivity(new Intent(ctx, MyHandlerAcJ.class));
break;
case R.id.MyHandlerAc:
startActivity(new Intent(ctx, MyHandlerAc.class));
break;
case R.id.btn1:
startActivity(new Intent(ctx, ImgActivity.class));
break;
case R.id.btn2:
startActivity(new Intent(ctx, GlobalLocalAct.class));
break;
case R.id.btn3:
startActivity(new Intent(ctx, SmoothImgActivity.class));
break;
case R.id.btn4:
startActivity(new Intent(ctx, ImActivity.class));
break;
case R.id.colorpickerview:
startActivity(new Intent(ctx, ColorpickerviewActivity.class));
break;
case R.id.hookaactivity:
// Caused by: android.content.ActivityNotFoundException: Unable to find explicit activity class {com.zncm.jmxandroid/com.zncm.jmxandroid.hook.HookAActivity}; have you declared this activity in your AndroidManifest.xml?
startActivity(new Intent(ctx, HookAActivity.class));
// Class<?> myClass = null;
// try {
// myClass = Class.forName("com.zncm.jmxandroid.ui.ColorpickerviewActivity");
// } catch (ClassNotFoundException e) {
// e.printStackTrace();
// }
// Intent intent = new Intent(this,myClass);
// startActivity(intent);
break;
case R.id.volleydemo:
startActivity(new Intent(ctx, VolleyDemo.class));
break;
case R.id.BaseViewActivity:
startActivity(new Intent(ctx, BaseViewActivity.class));
break;
case R.id.ToastyActivity:
startActivity(new Intent(ctx, ToastyActivity.class));
break;
case R.id.ActivityShortcut:
startActivity(new Intent(ctx, ActivityShortcut.class));
break;
// case R.id.MyActivity:
// startActivity(new Intent(ctx, MyActivity.class));
// break;
case R.id.AZViewActivity:
startActivity(new Intent(ctx, AZViewActivity.class));
break;
case R.id.MyKotlinDemo:
startActivity(new Intent(ctx, MyKotlinDemo.class));
break;
default:
break;
}
}
}
<file_sep>package com.zncm.jmxandroid.mykotlin
import android.os.Bundle
import android.support.v7.widget.LinearLayoutManager
import android.support.v7.widget.RecyclerView
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.widget.TextView
import com.zncm.jmxandroid.R
import com.zncm.jmxandroid.base.BaseAc
import kotlinx.android.synthetic.main.activity_ktlistview.*
import java.util.ArrayList
/**
* Created by jiaomx on 2017/5/19.
*/
class MyKotlinListView : BaseAc() {
override fun getLayoutResource(): Int {
return R.layout.activity_ktlistview
}
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
recyclerView.setHasFixedSize(true)
recyclerView.layoutManager = LinearLayoutManager(this, LinearLayoutManager.VERTICAL, false)
var datas = ArrayList<String>()
for (i in 1..50){
datas.add(i.toString())
}
var adapter: RecyclerViewAdapter=RecyclerViewAdapter(datas)
recyclerView.adapter = adapter
}
}
class RecyclerViewAdapter(var data: MutableList<String>) : RecyclerView.Adapter<RecyclerViewAdapter.RecyclerViewHoder>() {
override fun onBindViewHolder(recyclerViewHoder: RecyclerViewHoder, i: Int) {
recyclerViewHoder.textView.setText(data.get(i))
}
override fun onCreateViewHolder(viewGroup: ViewGroup, i: Int): RecyclerViewHoder {
val view = LayoutInflater.from(viewGroup.context).inflate(R.layout.activity_item, viewGroup, false)
val holder = RecyclerViewHoder(view)
return holder
}
override fun getItemCount(): Int {
return data.size
}
class RecyclerViewHoder(itemView: View) : RecyclerView.ViewHolder(itemView) {
var textView: TextView
init {
textView = itemView.findViewById(R.id.textView) as TextView
}
}
}
<file_sep>package com.zncm.jmxandroid.hook;
import android.content.Context;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import com.zncm.jmxandroid.R;
public class ProxyActivity extends AppCompatActivity {
}
<file_sep>package com.zncm.jmxandroid.os;
import android.app.DatePickerDialog;
import android.app.ProgressDialog;
import android.app.TimePickerDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.os.Bundle;
import android.os.Handler;
import android.support.v7.app.AlertDialog;
import android.view.View;
import android.widget.Button;
import android.widget.DatePicker;
import android.widget.TimePicker;
import android.widget.Toast;
import com.zncm.jmxandroid.R;
import com.zncm.jmxandroid.base.BaseAc;
import com.zncm.jmxandroid.base.MyApp;
import java.util.ArrayList;
import java.util.Calendar;
public class DlgActivity extends BaseAc {
Context ctx;
private Button dlg1;
private Button dlg2;
private Button dlg3;
private Button dlg4;
private Button dlg5;
private Button dlg6;
private Button dlg7;
private Button dlg8;
private Button dlg9;
ArrayList<Integer> list = new ArrayList<>();
Handler mHandler = new Handler();
int progress = 0;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
ctx = this;
dlg1 = (Button) findViewById(R.id.dlg1);
dlg2 = (Button) findViewById(R.id.dlg2);
dlg3 = (Button) findViewById(R.id.dlg3);
dlg4 = (Button) findViewById(R.id.dlg4);
dlg5 = (Button) findViewById(R.id.dlg5);
dlg6 = (Button) findViewById(R.id.dlg6);
dlg7 = (Button) findViewById(R.id.dlg7);
dlg8 = (Button) findViewById(R.id.dlg8);
dlg9 = (Button) findViewById(R.id.dlg9);
dlg1.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
dlg1();
}
});
dlg2.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
dlg2();
}
});
dlg3.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
dlg3();
}
});
dlg4.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
dlg4();
}
});
dlg5.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
dlg5();
}
});
dlg6.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
dlg6();
}
});
dlg7.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
dlg7();
}
});
dlg8.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
dlg8();
}
});
dlg9.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
Calendar c = Calendar.getInstance();
TimePickerDialog dialog = new TimePickerDialog(ctx,
new TimePickerDialog.OnTimeSetListener() {
@Override
public void onTimeSet(TimePicker view, int hourOfDay, int minute) {
d("选择时间:" + hourOfDay + "时" + minute + "分");
}
}, c.get(Calendar.HOUR_OF_DAY), c.get(Calendar.MINUTE), true);
dialog.show();
}
});
}
private void dlg8() {
Calendar c = Calendar.getInstance();
DatePickerDialog dialog = new DatePickerDialog(ctx,
new DatePickerDialog.OnDateSetListener() {
@Override
public void onDateSet(DatePicker view, int year, int monthOfYear, int dayOfMonth) {
d("选择日期:" + year + "年" + (monthOfYear+1) + "月" + dayOfMonth + "日");
}
}, c.get(Calendar.YEAR), c.get(Calendar.MONTH), c.get(Calendar.DAY_OF_MONTH));
dialog.show();
}
private void dlg7() {
final int MAX_PROGRESS = 100;
final ProgressDialog dialog = new ProgressDialog(ctx);
dialog.setTitle("我是标题");
dialog.setProgress(0);
dialog.setMax(MAX_PROGRESS);
dialog.setProgressStyle(ProgressDialog.STYLE_HORIZONTAL);
dialog.setOnCancelListener(new DialogInterface.OnCancelListener() {
public void onCancel(DialogInterface dialog) {
mHandler.removeCallbacksAndMessages(null);
d("进度被打断");
}
});
dialog.show();
progress = 0;
mHandler.postDelayed(new Runnable() {
@Override
public void run() {
d("" + progress);
progress++;
dialog.setProgress(progress);
if (progress == 100) {
dialog.cancel();
} else {
mHandler.postDelayed(this, 100);
}
}
}, 100);
}
private void dlg6() {
ProgressDialog dialog = new ProgressDialog(ctx);
dialog.setTitle("我是标题");
dialog.setMessage("等待中... 想关闭请杀掉app");
dialog.setIndeterminate(true);
dialog.setCancelable(true);
dialog.show();
}
private void dlg5() {
final String[] items = {"项目1", "项目2", "项目3", "项目4"};
final boolean selected[] = {false, true, false, false};
list.clear();
for (int i = 0, size = selected.length; i < size; ++i) {
if (selected[i]) {
list.add(i);
}
}
AlertDialog.Builder dialog = new AlertDialog.Builder(ctx);
dialog.setTitle("我是标题")
.setMultiChoiceItems(items, selected,
new DialogInterface.OnMultiChoiceClickListener() {
@Override
public void onClick(DialogInterface dialog, int which, boolean isChecked) {
if (isChecked) {
list.add(which);
} else {
list.remove(Integer.valueOf(which));
}
}
})
.setPositiveButton("确定",
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
if (list.size() == 0) {
d("你什么都没选啊,小伙");
} else {
StringBuilder str = new StringBuilder();
for (int i = 0, size = list.size(); i < size; i++) {
str.append(items[list.get(i)]);
if (i < size - 1) {
str.append(", ");
}
}
d("你选中了: " + str.toString());
}
}
});
dialog.show();
}
private void dlg4() {
int select = 1; //表示单选对话框初始时选中哪一项
final String[] items = {"项目1", "项目2", "项目3", "项目4"};
AlertDialog.Builder dialog = new AlertDialog.Builder(ctx);
dialog.setTitle("我是标题")
.setSingleChoiceItems(items, select,
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
d("选择: " + items[which]);
}
})
.setPositiveButton("确定",
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
}
});
dialog.show();
}
private void dlg3() {
final String[] items = {"项目1", "项目2", "项目3", "项目4"};
AlertDialog.Builder dialog = new AlertDialog.Builder(ctx);
dialog.setTitle("我是标题")
.setItems(items, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
d("选择: " + items[which]);
}
});
dialog.show();
}
private void dlg2() {
final AlertDialog.Builder dialog = new AlertDialog.Builder(ctx);
dialog.setIcon(R.drawable.ic_launcher)
.setTitle("我是标题")
.setMessage("我是要显示的消息")
.setPositiveButton("确定",
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
d("确定");
}
})
.setNeutralButton("说明",
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
d("说明");
}
})
.setNegativeButton("取消",
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
d("取消");
}
});
dialog.show();
}
private void dlg1() {
final AlertDialog.Builder dialog = new AlertDialog.Builder(ctx) {
@Override
public AlertDialog create() {
d("对话框create,创建时调用");
return super.create();
}
@Override
public AlertDialog show() {
d("对话框show,显示时调用");
return super.show();
}
};
dialog.setOnCancelListener(new DialogInterface.OnCancelListener() {
public void onCancel(DialogInterface dialog) {
d("对话框取消");
}
});
dialog.setOnDismissListener(new DialogInterface.OnDismissListener() {
public void onDismiss(DialogInterface dialog) {
d("对话框销毁");
}
});
dialog.setIcon(R.drawable.ic_launcher)
.setTitle("我是标题")
.setMessage("我是要显示的消息")
.setCancelable(true)
.setPositiveButton("确定",
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
d("点击确定");
}
})
.setNegativeButton("取消",
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
d("点击取消");
}
});
dialog.show();
}
public static void d(String msg) {
Toast.makeText(MyApp.getInstance().ctx, msg, Toast.LENGTH_SHORT).show();
}
@Override
protected int getLayoutResource() {
return R.layout.activity_dlg;
}
}
<file_sep>package com.zncm.jmxandroid.view;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.Shader;
import android.graphics.SweepGradient;
import android.support.annotation.Nullable;
import android.util.AttributeSet;
import android.view.View;
import com.zncm.jmxandroid.R;
/**
* Created by jiaomx on 2017/5/12.
*
* QQ运动动画
*/
public class QQSportStepView extends View {
private int outerColor;
private int innerColor;
private int borderWidth;
private int stepTextSize;
private int stepTextColor;
int mCurrentStep = 50;
int mStepMax =200;
Paint paint;
Paint innerPaint;
Paint textPaint;
public QQSportStepView(Context context) {
super(context);
}
public QQSportStepView(Context context, @Nullable AttributeSet attrs) {
super(context, attrs);
init(context, attrs);
}
private void init(Context context, @Nullable AttributeSet attrs) {
TypedArray typedArray = context.obtainStyledAttributes(attrs, R.styleable.QQStepView);
outerColor = typedArray.getColor(R.styleable.QQStepView_outerColor,outerColor);
innerColor = typedArray.getColor(R.styleable.QQStepView_innerColor,innerColor);
borderWidth = (int) typedArray.getDimension(R.styleable.QQStepView_borderWidth,borderWidth);
stepTextSize = (int) typedArray.getDimension(R.styleable.QQStepView_stepTextSize,stepTextSize);
stepTextColor = typedArray.getColor(R.styleable.QQStepView_stepTextColor,stepTextColor);
typedArray.recycle();
paint = new Paint();
// paint.setColor(outerColor);
int[] mGradientColors = {Color.GREEN, Color.YELLOW, Color.RED};
Shader shader = new SweepGradient(0,1000,mGradientColors,null);
paint.setShader(shader);
paint.setStrokeCap(Paint.Cap.ROUND);
paint.setStrokeWidth(borderWidth);
paint.setStyle(Paint.Style.STROKE);
paint.setAntiAlias(true);
innerPaint = new Paint();
innerPaint.setColor(innerColor);
innerPaint.setStrokeWidth(borderWidth);
innerPaint.setStyle(Paint.Style.STROKE);
innerPaint.setStrokeCap(Paint.Cap.ROUND);
innerPaint.setAntiAlias(true);
textPaint = new Paint();
Shader shader2 = new SweepGradient(0,1000,mGradientColors,null);
textPaint.setShader(shader2);
textPaint.setColor(stepTextColor);
textPaint.setTextSize(stepTextSize);
textPaint.setStyle(Paint.Style.STROKE);
textPaint.setAntiAlias(true);
}
public QQSportStepView(Context context, @Nullable AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init(context, attrs);
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
int center = getWidth()/2;
int radius = getWidth()/2-borderWidth/2;
RectF rectF = new RectF(center-radius,center-radius,center+radius,center+radius);
canvas.drawArc(rectF,135,270,false,paint);
if (mStepMax==0){
return;
}
float sweepAngle = (float)mCurrentStep/mStepMax;
canvas.drawArc(rectF,135,sweepAngle*270,false,innerPaint);
String stepText = mCurrentStep+"";
Rect textRect = new Rect();
textPaint.getTextBounds(stepText,0,stepText.length(),textRect);
int dx = getWidth()/2-textRect.width()/2;
Paint.FontMetricsInt fontMetricsInt = textPaint.getFontMetricsInt();
int dy = (fontMetricsInt.bottom-fontMetricsInt.top)/2-fontMetricsInt.bottom;
int baseLine = getHeight()/2+dy;
canvas.drawText(stepText,dx,baseLine,textPaint);
}
public void setmCurrentStep(int mCurrentStep) {
this.mCurrentStep = mCurrentStep;
invalidate();
}
public void setmStepMax(int mStepMax) {
this.mStepMax = mStepMax;
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int width = MeasureSpec.getSize(widthMeasureSpec);
int height = MeasureSpec.getSize(heightMeasureSpec);
setMeasuredDimension(width>height?height:width,width>height?height:width);
}
}
|
a930674aa2971e75b4b0b8d3b2e5434218f51e7a
|
[
"Markdown",
"Java",
"Kotlin"
] | 38
|
Java
|
Dminter/JmxAndroid
|
ee7e72eed34a87da0bf5e8f0ac1b8a9d0306a499
|
e57e4fa9b011d2859caca0d32ad1d51c95dc0720
|
refs/heads/master
|
<file_sep>import javafx.beans.binding.StringExpression;
import javax.imageio.ImageIO;
import javax.swing.*;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* Main Entry Point
* Created by Andreas on 13.12.2015.
*/
public class SpritesheetGenerator {
private SGView view = null;
private SGModel model = null;
private SettingsDialog dialog = null;
public SpritesheetGenerator() {
model = new SGModel( this );
view = new SGView( this );
}
FileFilter filter_png = new FileFilter() {
@Override
public boolean accept(File pathname) {return (pathname.isFile() && pathname.getName().endsWith("png"));
}
};
FileFilter filter_directory = new FileFilter() {
@Override
public boolean accept(File pathname) {
return pathname.isDirectory();
}
};
public void loadDirectories(String importDirectory) {
model.reset();
view.reset();
Node<File> tree = new Node<File>( new File(importDirectory), Node.FileType.DIRECTORY );
tree = loadDirectories( tree, 0); //directories loaded.
model.addDirectory( tree );
loadSprites( null ); //null will remain datastructure as is
//"E:\\Projects\\sketches\\flea_new\\png\\final\\hdpi"
//System.out.println( "finished" );
model.setTree( tree );
//TODO: make GUI. (vorher: make input dialogs at render. with filechoosers...)
}
/**
* Creates a new SubTree which consists of all Images and subdirectories within the given directory.
* @param node
* @return
*/
public Node<File> loadDirectories(Node<File> node, int level) {
if( node == null ) return null;
System.out.println("Level: " + level);
File[] images = node.getData().listFiles(filter_png); //check if this directory is empty
File[] directories = node.getData().listFiles(filter_directory);
if( images.length==0 && directories.length==0 ) return null;
int i=0;
int anzNull = 0;
for( File subDirectory : directories ) {
System.out.println(i);
Node<File> tmp = new Node<File>( subDirectory, Node.FileType.DIRECTORY );
tmp = loadDirectories(tmp, level + 1);
if( tmp == null ) anzNull++;
node.addDirectory( tmp );
model.addDirectory( tmp );
i++;
}
if( anzNull == i && images!=null && images.length == 0 ) return null; //this excludes empty directories, even if they have empty subdirectories.
return node;
}
/**
* Loads the bufferedImages in the given directory to a new Tree. The head of the tree is a Node with type = directory
* which contains images, which are also nodes, but from the type "file".
* @param path The path of the directory, where the images are located.
* @return A new tree with the loaded images.
*/
public Node<File> loadImages( String path ) {
return loadImages(null, path);
}
/**
* Loads the images in the directory from the tree. Saves the images to this tree.
* @param tree
* @return
*/
public Node<File> loadImages( Node<File> tree ) {
return loadImages( tree, tree.getData() );
}
public Node<File> loadImages( Node<File> tree, String path ) {
File directory = new File( path );
return loadImages(tree, directory);
}
/**
* Loads the bufferedImages in the given directory to a new Tree. The head of the tree is a Node with type = directory
* which contains images, which are also nodes, but from the type "file".
* @param tree The Node where the images should be added.
* @param directory The file of the directory, where the images are located.
* @return The tree with the images added.
*/
public Node<File> loadImages( Node<File> tree, File directory ) {
if( tree==null ) tree = new Node<File>( null, directory, Node.FileType.DIRECTORY );
File[] files = directory.listFiles( filter_png );
int fileSize = 0;
if( files!= null && files.length!=0 ) {
List<String> filenames = getSortedFileNames( files );
for( int u=0; u<filenames.size(); u++) {
String name = filenames.get(u);
File file = new File( directory.getAbsolutePath() + File.separator + name );
fileSize += file.length();
BufferedImage img = null;
try{
img = ImageIO.read( file );
}catch ( IOException e ) {
}
if( img != null ) {
tree.addFile(new Node<BufferedImage>(img, Node.FileType.FILE));
model.addSprite( img );
model.addHeight( img.getHeight() );
model.addWidth( img.getWidth() );
}else if( u<=3 ) {
JOptionPane.showMessageDialog( view, "img==null :( ... File: " + file.getName() + "; exists=" + file.exists() + "; importDirectory: " + directory.getAbsolutePath() );
}
}
/**
if( !model.heightsAreEqual() ) {
JOptionPane.showMessageDialog(view, "The images are not equal in height.");
model.reset();
view.reset();
}else {
if( !model.widthsAreEqual() )
JOptionPane.showMessageDialog( view, "The images are not equal in width. The spritesheet WILL be generated, but better check, if all is right!");
/*
view.setSpriteAmount( model.getSprites().size() );
view.setSpriteWidth(model.getSprites().get(0).getWidth());
view.setSpriteHeight(model.getSprites().get(0).getHeight());
view.setTotalWidth(model.getSprites().size() * model.getSprites().get(0).getWidth());
view.setTotalFileSize(fileSize / 1024);
view.setImageLabel( model.getSprites().get(0));
*
//TODO: set above information for nodes not for view.
} */
}else { //just no "files" in this directory, but maybe in a subdirectory.
//JOptionPane.showMessageDialog(view, "No sprites (png files) detected!");
}
return tree;
}
/**
* Filters the images from the models nodes and returns a new Tree.
* @param tree
* @return
*/
public Node<File> filterImages( Node<File> tree) {
return filterImages(tree, null);
}
/**
* Filters the images from the models nodes and puts them into the new Tree.
* @param tree
* @param newTree
* @return
*/
public Node<File> filterImages( Node<File> tree, Node<File> newTree ) { //TODO
int u=0;
float nte = 1; //jedes nte element wird ausgewaehlt
float startOffset = 0; //offset für gleichmäßige verteilung
float customIndex = 0;
if( newTree == null ) newTree = new Node<File>(null, tree.getData(), Node.FileType.DIRECTORY);
if( model.getFfstate()== SGModel.FileFilterState.ODD ) u++;
else if( model.getFfstate() == SGModel.FileFilterState.CUSTOM ) {
nte = (float)tree.getFileAmount() / (float)model.getCustomFilterValue();
startOffset = (nte-1)/2;
customIndex += startOffset;
}
for( ; u<tree.getFileAmount(); ) {
newTree.addFile(new Node<BufferedImage>((BufferedImage) tree.getFileAt(u).getData(), Node.FileType.FILE));
switch ( model.getFfstate() ) {
case ALL:
u++;
break;
case EVEN:
case ODD:
u+=2;
break;
case CUSTOM:
customIndex += nte;
u = (int) customIndex;
break;
}
}
//TODO: set for each Node
/*
view.setSpriteAmount( model.getSprites().size() );
view.setSpriteWidth(model.getSprites().get(0).getWidth());
view.setSpriteHeight(model.getSprites().get(0).getHeight());
view.setTotalWidth(model.getSprites().size() * model.getSprites().get(0).getWidth());
view.setTotalFileSize(newTree.getTotalFileSize() / 1024);
view.setImageLabel( model.getSprites().get(0));
*/
return newTree;
}
/**
* Iterates through the list of nodes from the model (not a tree) and loads the images to their parent directory nodes.
* @param
* @return
*/
public void loadSprites( String absoluteSourceDensityPath ) {
List<Node<File>> directories = model.getDirectories();
File[] files = null;
for( Node<File> directory : directories ) {
String finalPath = "";
if( absoluteSourceDensityPath!=null ) {
String str = directory.getData().getAbsolutePath(); //absolute path of image directory
String sourceParent = absoluteSourceDensityPath.substring(0, absoluteSourceDensityPath.lastIndexOf("\\")); //cut density directory
String strBody = "";
if( sourceParent.length() < str.length() ) strBody = str.substring( sourceParent.length() ); //cut beginning from image directory until its density folder.
//replace density with new one.
String[] split = strBody.split("\\\\");
if( split.length>0 ) {
String body = "";
for( int i=2; i<split.length; i++ ) {
body += "\\"+split[i];
}
finalPath += absoluteSourceDensityPath + body;
System.out.println( finalPath );
}
File tmp = new File( finalPath );
files = tmp.listFiles( filter_png );
directory.setData( tmp );
}else {
finalPath = directory.getData().getAbsolutePath();
files = directory.getData().listFiles( filter_png );
}
List<String> filenames = getSortedFileNames( files );
System.out.println("Directory: " + directory.getData().getName() + "; Files found: " + filenames.size() );
for( int i=0; i<filenames.size(); i++ ) {
String name = filenames.get(i);
File file = new File( finalPath + File.separator + name );
BufferedImage img = null;
try{
img = ImageIO.read( file );
}catch ( IOException e ) {
}
if( img != null ) directory.addFile( new Node<BufferedImage>(img, Node.FileType.FILE) );
}
}
System.out.println("Finished loading sprites.");
//TODO: return unnecessary??
}
public List<String> getSortedFileNames( File[] files ) {
List<String> filenames = new ArrayList<>();
if( files != null ) {
for (File file : files) {
filenames.add(file.getName());
}
Collections.sort(filenames);
}
return filenames;
}
public void saveSpriteSheets() {
List<Node<File>> directories = model.getDirectories();
dialog = new SettingsDialog( this );
resetCurrentNodeIndex();
}
public void nextDirectory( ) {
boolean k=true;
do {
k=model.incDirectoryIndex();
} while ( !model.getCurrentDirectory().hasFiles() && k );
System.out.println("directory Index: " + model.getCurrentDirectoryIndex());
//exportPrompt.next(model.getCurrentDirectory(), model.getCurrentDirectoryIndex() == model.getDirectories().size()-1 );
}
public void saveSpritesheet( String exportPath ) {
if( model.hasSprites() ) {
int height = model.getHeight();
int width = model.getWidth();
int totalWidth = model.getTotalWidth();
if( height != -1 && width != -1 && totalWidth != -1 ) {
BufferedImage finalSpriteSheet = new BufferedImage(totalWidth, height, BufferedImage.TYPE_INT_ARGB_PRE);
//JOptionPane.showMessageDialog( view, "totalWidth = " + totalWidth + "; height = " + height + "; type = " + model.getType() );
//JOptionPane.showMessageDialog( view, "Amount of pics: " + model.getSprites().size() +"; finalSpriteSheet Width and height: "
// + finalSpriteSheet.getWidth() +", " +finalSpriteSheet.getHeight());
for (int i = 0; i < model.getSprites().size(); i++) {
BufferedImage image = model.getSprites().get(i);
finalSpriteSheet.createGraphics().drawImage(image, model.getWidthTo(i), 0, null);
}
view.setImageLabel(finalSpriteSheet);
if( !exportPath.endsWith("png")) exportPath+=".png";
try {
ImageIO.write( finalSpriteSheet, "png", new File(exportPath) );
} catch (IOException e) {
e.printStackTrace();
}
}else {
JOptionPane.showMessageDialog( view, "An error occured: One of the sizes is -1");
}
}else {
JOptionPane.showMessageDialog( view, "No sprites loaded yet!" );
}
}
public static void main(String[] args) {
SpritesheetGenerator sg = new SpritesheetGenerator();
}
public int nextState() {
return model.nextState();
}
public void setGlobalCustomFilter( int value ) {
model.setCustomFilterValue( value );
for( Node<File> node : model.getDirectories() ) {
if( !node.isCustomFilterChanged() ) node.setCustomFilterValue(value);
}
}
public SGModel.FileFilterState getFfState() {
return model.getFfstate();
}
public String getCustomFilterValue() {
return ""+model.getCustomFilterValue();
}
public Node<File> getCurrentNode() {
return model.getCurrentDirectory();
}
public void previousDirectory() {
/*if( model.getCurrentDirectoryIndex()==0 ) {
exportPrompt.setVisible(false);
}else {
*/do {
model.decDirectoryIndex();
} while (!model.getCurrentDirectory().hasFiles() && model.getCurrentDirectoryIndex() >= 0);
/*
exportPrompt.next(model.getCurrentDirectory(), model.getCurrentDirectoryIndex() == model.getDirectories().size() - 1);
}*/
}
public void finallyExport() {
//List<String> densityNames = new ArrayList<>();
List<String> densityPaths = new ArrayList<>();
int z=1;
if( view.isDensityChecked() ) {
//TODO load new densities and make a new loop
//for now: folder must have same structure.
File srcdir = model.getTree().getData();
srcdir = srcdir.getParentFile();
File[] densityFiles = srcdir.listFiles();
for( File f : densityFiles ) {
//densityNames.add( f.getName() );
densityPaths.add( f.getAbsolutePath() );
}
}
int d=0;
while( !densityPaths.isEmpty() ) {
for (Node<File> directory : model.getDirectories()) {
if (directory.hasFiles() && directory.isActive()) {
System.out.println("Active Spritesheet loading...");
int cumulativeWidth = 0;
float startOffset = 0;
float customIndex = 0;
float nte = (float) directory.getFileAmount() / (float) directory.getCustomFilterValue();
if (nte < 1) nte = 1;
startOffset = (nte - 1) / 2;
customIndex += startOffset;
System.out.println("Custom Index = " + customIndex + "; nte = " + nte);
List<BufferedImage> images = new ArrayList<>();
if (directory.firstAlwaysActive() && customIndex >= 1) {
BufferedImage image = (BufferedImage) directory.getFileAt(0).getData();
images.add(image);
cumulativeWidth += image.getWidth();
}
boolean lastAdded = false;
int i = (int) customIndex;
for (; i < directory.getFileAmount(); ) {
System.out.println("Active Spritesheet now loads the individual images: " + i);
BufferedImage image = directory.getFileAt(i).getData();
images.add(image);
if (i == directory.getFileAmount() - 1) lastAdded = true;
cumulativeWidth += image.getWidth();
customIndex += nte;
i = (int) customIndex;
}
if (directory.lastAlwaysActive() && !lastAdded) {
BufferedImage image = directory.getFileAt(directory.getFileAmount() - 1).getData();
images.add(image);
cumulativeWidth += image.getWidth();
}
if (!images.isEmpty()) {
BufferedImage finalSpriteSheet = new BufferedImage(cumulativeWidth, images.get(0).getHeight(), BufferedImage.TYPE_INT_ARGB_PRE);
cumulativeWidth = 0;
for (BufferedImage image : images) {
finalSpriteSheet.createGraphics().drawImage(image, cumulativeWidth, 0, null);
cumulativeWidth += image.getWidth();
}
String exportPath = directory.getDestinationPath();
if(d>0) {
String currentDensityPath = densityPaths.get(0);
exportPath = getDuplicateDensityExportPath(exportPath, currentDensityPath);
}
if (!exportPath.endsWith("png")) exportPath += ".png";
System.out.println(exportPath);
try {
ImageIO.write(finalSpriteSheet, "png", new File(exportPath));
} catch (IOException e) {
e.printStackTrace();
}
System.out.println("Active Spritesheet exported.");
}
}
}
//densityPaths = removeDensity( model.getDirectories().get(model.getDirectories().size()-1), densityNames, true );
if( !densityPaths.isEmpty() ) densityPaths = removeDensity( model.getDirectories().get(model.getDirectories().size()-1), densityPaths );
if( !densityPaths.isEmpty() ) {
updateDirectoriesWithNewSrcDensity( densityPaths.get(0) );
d++;
}
}
}
public String getDuplicateDensityExportPath( String originalExportPath, String newDensityPath ) {
if( newDensityPath!=null ) {
String str = originalExportPath; //absolute path of image directory
String sourceParent = newDensityPath.substring(0, newDensityPath.lastIndexOf("\\")); //cut density directory
String strBody = "";
if( sourceParent.length() < str.length() ) strBody = str.substring( sourceParent.length() ); //cut beginning from image directory until its density folder.
//replace density with new one.
String[] split = strBody.split("\\\\");
if( split.length>0 ) {
String body = "";
for( int i=2; i<split.length; i++ ) {
body += "\\"+split[i];
}
newDensityPath += body;
System.out.println( newDensityPath );
}
}
return newDensityPath;
}
private void updateDirectoriesWithNewSrcDensity( String densityPath ) {
model.releaseFiles();
loadSprites( densityPath ); //TODO: debug and test.
}
private List<String> removeDensity( Node<File> tree, List<String> names ) {
int i=0;
while( !names.isEmpty() && i<names.size() && !names.get(i).equalsIgnoreCase( tree.getData().getAbsolutePath() )) i++;
//System.out.println( "Names[i]=" + names.get(i) + "; Head of tree: " + tree.getData().getAbsolutePath() );
names.remove(i);
return names;
}
public void resetCurrentNodeIndex() {
model.setCurrentDirectoryIndex(0);
}
public SGView getView() {
return view;
}
public boolean isLast() {
return model.getCurrentDirectoryIndex() == model.getDirectories().size()-1;
}
public boolean isFirst() {
return model.getCurrentDirectoryIndex()==0;
}
public void setGlobalExportDirectory(String directory) {
model.setGlobalExportDirectory( directory );
for( Node<File> node : model.getDirectories() ) {
if( !node.isCustomDirectoryChanged() ) node.setDestinationPath(directory);
}
}
public String getGlobalExportPath() {
return view.getGlobalExportPath();
}
public String getCurrentIndex() {
String index = "";
if( model.getCurrentDirectoryIndex()<10 ) index+="0";
return index+=model.getCurrentDirectoryIndex();
}
}
|
551a62c1359e7968e1d6ff958a14e24eaf79f67e
|
[
"Java"
] | 1
|
Java
|
halx99/SpritesheetGenerator
|
54d2ebc0eaa63b86166819152b02351841e3096a
|
a87161a73c2f7d1488a64b49429595ed38aa39d0
|
refs/heads/master
|
<repo_name>BartekPitala/Graph-Constructor<file_sep>/Grafy/MainWindow.xaml.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Input;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Navigation;
using System.Windows.Shapes;
namespace Grafy
{
/// <summary>
/// Interaction logic for MainWindow.xaml
/// </summary>
public partial class MainWindow : Window
{
public Canvas mainCanvas;
private Graf nasz_graf = null;
public int ilosc_wierzcholkow { get; set; }
public int aktualny_wierzcholek { get; set; }//ZMIENNA POTRZEBNA DO WYPISYWANIA NA WKRAN INFO, KTOREMU WIERZCHOLKOWI WPISUJEMY
//SĄSIADÓW
public bool zatwierdzono_sasiada { get; set; }//ZMIENNA POTRZEBNA DO REALIZACJI WPISYWANIA SĄSIADÓW (PATRZ NIŻEJ)
public bool kolejny_wierzcholek { get; set; } //ZMIENNA POTRZEBNA DO REALIZACJI WPISYWANIA SĄSIADÓW (PATRZ NIŻEJ)
public MainWindow()
{
InitializeComponent();
aktualny_wierzcholek = 1;
zatwierdzono_sasiada = false;
kolejny_wierzcholek = false;
draw_connections_button.IsEnabled = false;
confirm_button_2.IsEnabled = false;
kolejny_wierzcholek_button.IsEnabled = false;
}
private void Generuj_kolo()
{
mainCanvas = new Canvas();
mainCanvas.Width = 512;
mainCanvas.Height = 768;
mainCanvas.Background = Brushes.Green;
mainCanvas.Margin = new Thickness(512, 0, 0, 0);
Najlepszy_grid.Children.Add(mainCanvas);
Ellipse kolo = new Ellipse();
kolo.Width = 400;
kolo.Height = 400;
kolo.StrokeThickness = 5;
kolo.Stroke = Brushes.Black;
Canvas.SetBottom(kolo, 212);
Canvas.SetLeft(kolo, 56);
}
void UstawWspolrzedne(Graf nowy_graf)
{
double kat = 2*Math.PI / nowy_graf.ilosc;
for(int i=0;i<nowy_graf.ilosc;++i)
{
nowy_graf.x[i] = Math.Cos(i * kat) * nowy_graf.promien + 255;
nowy_graf.y[i] = Math.Sin(i * kat) * nowy_graf.promien + 408;
}
}
private void illosc_wierzcholkow_TextChanged(object sender, TextChangedEventArgs e)
{
if (illosc_wierzcholkow.Text == "")
return;
ilosc_wierzcholkow = Int32.Parse(illosc_wierzcholkow.Text);
}
private void restart_button_Click(object sender, RoutedEventArgs e)//RESTARTUJE APKE, CZYLI ROBIMY GRAF OD NOWA
{
if (nasz_graf != null)
{
illosc_wierzcholkow.Text = "";
illosc_wierzcholkow.IsEnabled = true;
nasz_graf.usunGraf();
aktualny_wierzcholek = 1;
zatwierdzono_sasiada = false;
kolejny_wierzcholek = false;
draw_connections_button.IsEnabled = false;
sasiedzi_wierzcholka.Text = "Poniżej wpisz sąsiadów wierzchołka nr ";
confirm_button.IsEnabled = true;
sasiedzi.Text = "";
confirm_button_2.IsEnabled = false;
kolejny_wierzcholek_button.IsEnabled = false;
Info_o_liczbie.Text = "Poniżej wpisz liczbę wierzchołków";
}
}
private void confirm_button_Click(object sender, RoutedEventArgs e)//ZATWIERDZA ILOSC WIERZCHOLKOW
{
if (illosc_wierzcholkow.Text == "")
{
Info_o_liczbie.Text = "Wprowadź prawidłową liczbę wierzchołków!";
}
else
{
confirm_button.IsEnabled = false;
illosc_wierzcholkow.IsEnabled = false;
Generuj_kolo();
nasz_graf = new Graf(ilosc_wierzcholkow, this);
UstawWspolrzedne(nasz_graf);
nasz_graf.rysujWierzcholki();
sasiedzi_wierzcholka.Text += aktualny_wierzcholek.ToString();
confirm_button_2.IsEnabled = true;
kolejny_wierzcholek_button.IsEnabled = true;
}
}
private void sasiedzi_TextChanged(object sender, TextChangedEventArgs e)//TU WPISUJEMY SASIADOW KONKRETNYCH WIERZCHOŁKÓW
{
}
private void confirm_button_2_Click(object sender, RoutedEventArgs e)//ZATWIERDZA SĄSIADA DLA WIERZCHOŁKA
{
zatwierdzono_sasiada = true;
int z = 0;
if (aktualny_wierzcholek <= ilosc_wierzcholkow)
{
Int32.TryParse(sasiedzi.Text, out z);
if (z >= 0 && z <= ilosc_wierzcholkow)
{
sasiedzi_wierzcholka.Text = "Poniżej wpisz sąsiadów wierzchołka nr " + aktualny_wierzcholek.ToString() + "\nWierzcholek nr " + z.ToString() + " jest sasiadem wierzcholka nr " + aktualny_wierzcholek.ToString();
nasz_graf.sasiedztwo[aktualny_wierzcholek - 1, z - 1] = 1;
nasz_graf.sasiedztwo[z - 1, aktualny_wierzcholek - 1] = 1;
}
else
{
sasiedzi_wierzcholka.Text = "Sąsiad spoza zakresu wierzchołków! Wpisz poprawnego sąsiada wierzchołka nr " + aktualny_wierzcholek.ToString();
}
}
}
private void kolejny_wierzcholek_button_Click(object sender, RoutedEventArgs e)//PRZECHODZI DO WPISYWANIA SASIADOW KOLEJNEGO WIERZCHOLKA
{
kolejny_wierzcholek = true;
if (aktualny_wierzcholek < ilosc_wierzcholkow)
{
aktualny_wierzcholek++;
sasiedzi_wierzcholka.Text = "Poniżej wpisz sąsiadów wierzchołka nr " + aktualny_wierzcholek.ToString();
}
else
{
sasiedzi_wierzcholka.Text = "Wpisano sąsiadów wszystkich wierzchołkow!";
confirm_button_2.IsEnabled = false;
kolejny_wierzcholek_button.IsEnabled = false;
draw_connections_button.IsEnabled = true;
}
}
private void draw_connections_button_Click(object sender, RoutedEventArgs e)
{
nasz_graf.rysujPolaczenie();
}
private void wczytywanie_Click(object sender, RoutedEventArgs e)
{
string[] polaczenia = System.IO.File.ReadAllLines(@"../../dane/macierz_sasiedztwa.txt");
int ilosc_wierzcholkow = System.IO.File.ReadAllLines(@"../../dane/macierz_sasiedztwa.txt").Count();
Generuj_kolo();
nasz_graf = new Graf(ilosc_wierzcholkow, this);
UstawWspolrzedne(nasz_graf);
nasz_graf.rysujWierzcholki();
int i = 0;
foreach(string linia in polaczenia)
{
for (int j = 0; j < ilosc_wierzcholkow; j++)
nasz_graf.sasiedztwo[i, j] = Convert.ToInt32(linia[j]) - 48;
i++;
}
nasz_graf.rysujPolaczenie();
}
}
}
<file_sep>/README.md

# Graphs Project
Small WPF application created as part of project at AGH University of Science and Technology.
It is responsible for visualization of simple graph. You can read the graph from the file (format of Matrix) or simply put all tops of graph and their "neighbours" from GUI. Program written in POLISH!
# Running the application
Go to "Grafy/bin/Debug/Grafy.exe" and run .exe file.
# Technologies and languages
Windows Presentation Foundation Application created in C# using MS Visual Studio 2015.
<file_sep>/Grafy/Graf.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Shapes;
using System.Windows.Media;
using System.Windows.Controls;
namespace Grafy
{
class Graf
{
public MainWindow okno;//ZMIENNA REPREZENTUJACA GLOWNE OKNO PROGRAMU
public double[] x { get; set; }//WSPOLRZEDNE X-OWE WIERZCHOLKOW GRAFU
public double[] y { get; set; }//WSPOLRZEDNE Y-OWE WIERZCHOLKOW GRAFU
public int[,] sasiedztwo { get; set; }//MACIERZ POLACZEN W GRAFIE
public int ilosc { get; set; }//LICZBA WIERZCHOLKOW
public double promien { get; set; }//PROMIEN KOLA, NA KTORYM LEZA WIERZCHOLKI
public Graf(int n, MainWindow _okno)
{
x = new double[n];
y = new double[n];
ilosc = n;
promien = 200;
okno = _okno;
sasiedztwo = new int[ilosc , ilosc];
for (int i = 0; i < ilosc; ++i)
{
for(int j=0; j < ilosc; ++j)
{
sasiedztwo[i, j] = 0;
}
}
}
public void rysujWierzcholki()//METODA REALIZUJACA RYSOWANIEWIERZCHOLKOW
{
for (int i = 0; i < ilosc; ++i)
{
Ellipse punkt = new Ellipse();
punkt.Width = 10;
punkt.Height = 10;
punkt.StrokeThickness = 1;
punkt.Stroke = Brushes.Black;
punkt.Fill = Brushes.Black;
Canvas.SetBottom(punkt, y[i]);
Canvas.SetLeft(punkt, x[i]);
okno.mainCanvas.Children.Add(punkt);
}
}
public void polaczenie()//METODA REALIZUJACA WYPELNIANIE MACIERZY POLACZEN
{
Random los = new Random();
int z;
for (int i = 0; i < ilosc; ++i)
{
for (int j = 0; j < ilosc; ++j)
{
sasiedztwo[i, j] = 0;
z = los.Next(0, 5);
if (z == 1)
{
sasiedztwo[i, j] = z;
sasiedztwo[j, i] = z;
}
}
}
}
public void rysujPolaczenie()//METODA REALIZUJACA RYSOWANIE POLACZEN WIERZCHOLKOW W GRAFIE
{
for (int i = 0; i < ilosc; ++i)
{
for (int j = 0; j < ilosc; ++j)
{
if(sasiedztwo[i,j]==1)
{
Line krawedz = new Line();
krawedz.X1 = x[i] + 4;
krawedz.X2 = x[j] + 4;
krawedz.Y1 = y[i] - 53;
krawedz.Y2 = y[j] - 53;
krawedz.StrokeThickness = 1;
krawedz.Stroke = Brushes.Yellow;
krawedz.Visibility = System.Windows.Visibility.Visible;
okno.mainCanvas.Children.Add(krawedz);
}
}
}
}
public void usunGraf()//METODA USUWAJACA GRAF
{
okno.mainCanvas.Children.Clear();
}
}
}
|
b28645df57c632ccbd5d6daa6b2f6ff02ca63684
|
[
"Markdown",
"C#"
] | 3
|
C#
|
BartekPitala/Graph-Constructor
|
fc8c4f32ea82f577641ca0d84e10cb053c33dea3
|
a154aa75eae639bb7061340f5625672ba198ac94
|
refs/heads/master
|
<repo_name>64studio/pifactory<file_sep>/pifirmware/generate_firmware
#!/bin/bash
#
# generate_firmware
#
# (c) <NAME> 2016
#
# Downloads and packages the RPi Non-free Firmware
#
# Downloads and packages the RPi Linux Kernel
#
# ./generate_firmware firmware-rpi master # generates the firmware package from the master commit
# ./generate_firmware linux-image-rpi master # generates the linux-image package from the master commit
#
# For now, this script only supports generating HardFP on the RPi 3 (possibly 2, untested)
#
#
# 2cf8fd5ba0b195e16627df6a5b45f47c0edc3a54 is 4.4.17
#
# todo: get version from https://raw.githubusercontent.com/raspberrypi/linux/d31c6f2420afca543abf466e6bc18c17f15eb291/Makefile
# todo: merge scripts?
# todo: HIGH PRIORITY remove modules from firmware.zip that are not compiled for v7? 50MB saving.
# todo: autopopulate Version field in control files; auto generate control files
# todo: figure out Version field for firmware-rpi
# parse arguments
if (( $# != 2 )); then
echo ""
echo "generate_firmware: Firmware Packager"
echo ""
echo "You must pass two arguments."
echo "./generate_firmware <package_type> <commit_id>"
echo ""
echo ""
echo "./generate_firmware firmware-rpi master generates the firmware-rpi package from the master commit"
echo "./generate_firmware linux-image-rpi master generates the linux-image-rpi package from the master commit"
echo ""
echo "Magic Script written by <NAME> <<EMAIL>>"
exit 1
fi
ACTION=$1
FIRMWARE_COMMIT=$2
# todo: move these into a seperate file
function info {
echo "[I] generate_firmware: $1"
}
function error {
echo "[E] generate_firmware: $1"
exit 1
}
if [ -f "firmware.zip" ]; then
# if firmware.zip exists then use it
# info "firmware already exists; not removing."
# info "run 'rm firmware.zip' to clean"
:
else
# download the firmware from the Github repo
# no need to make this an argument
wget --quiet --output-document firmware.zip https://github.com/raspberrypi/firmware/archive/$FIRMWARE_COMMIT.zip
if (($? > 0)); then
error "Failed to download firmware!"
fi
fi
# if firmware/ exists then remove it
if [ -d "firmware" ]; then
rm -rf firmware
fi
# unzip to firmware/
unzip -qq firmware.zip
mv firmware-*/ firmware/
# move needed files to package temporary folder
if [ -d "tmp" ]; then
rm -rf tmp
fi
mkdir tmp
# first get a list of the files to move
if [ "$ACTION" = "firmware-rpi" ]; then
info "Re-packaging non-free firmware"
mkdir tmp/boot
mv firmware/boot/bootcode.bin tmp/boot/
mv firmware/boot/fixup*.dat tmp/boot/
mv firmware/boot/LICENCE.broadcom tmp/boot/
mv firmware/boot/start*.elf tmp/boot/
mv firmware/hardfp/opt/ tmp/opt/
elif [ "$ACTION" = "linux-image-rpi" ]; then
info "Re-packaging linux kernel"
KERNEL_HASH=`cat firmware/extra/git_hash`
info " Kernel Source git commit: $KERNEL_HASH"
mkdir tmp/boot
mkdir tmp/boot/overlays
mkdir tmp/lib
mv firmware/boot/overlays/*.dtbo tmp/boot/overlays/
mv firmware/boot/*.dtb tmp/boot/
mv firmware/boot/kernel7.img tmp/boot/
mv firmware/boot/COPYING.linux tmp/boot/
mv firmware/modules/ tmp/lib/modules/
else
error "Not an action!"
fi
# set the owner of the tmp dir
chown root:root -R tmp/
# add in the packaging files
mkdir tmp/DEBIAN
# maybe we should script this?
cp control/$ACTION.control tmp/DEBIAN/control
# make debian package
# todo: find a better way of making this quiet
dpkg-deb --build tmp > /dev/null
# make the name a bit more familiar
mv tmp.deb $ACTION.deb
# remove tmp dir
if [ -d "tmp" ]; then
rm -rf tmp
fi
# remove firmware dir
if [ -d "firmware" ]; then
rm -rf firmware
fi
<file_sep>/pifactory/pifactory
#!/bin/bash
#
# pifactory
#
# (c) <NAME> 2016
#
# This script will make a Debian-based Image for the Raspberry Pi
# for flashing onto an SD card.
#
# The script handles installation of all packages and some basic setting
# up of the system.
#
# Kernel compiling support is included; or we will just download the
# official builds.
#
# Callbacks are included to allow the user to customise the installation
# as much as possible.
#
# Based on code by <NAME> http://blog.kmp.or.at/
#
#
# Size of the Partitions
rootsize="1500"
bootsize="64M"
# read arguments
pwd=`pwd`
buildenv="${pwd}/tmp" # accept this an an input ?
#out="${pwd}/images" # accept this as an input ?
distrib_name=$1
deb_mirror=${pwd}/$4
deb_release=$2
deb_arch=armhf
output_file=${pwd}/$3
# folders in the buildenv to be mounted, one for rootfs, one for /boot
rootfs="${buildenv}/rootfs"
bootfs="${rootfs}/boot"
# Check to make sure this is ran by root
if [ $EUID -ne 0 ]; then
echo "PI-BUILDER: this tool must be run as root"
exit 1
fi
# Create the buildenv folder, and image file
echo "PI-BUILDER: Creating Image file"
mkdir -p $buildenv
image="${buildenv}/rpi_${distrib_name}_${deb_release}_${deb_arch}.img"
dd if=/dev/zero of=$image bs=1MB count=$rootsize
device=`losetup -f --show $image`
echo "PI-BUILDER: Image $image created and mounted as $device"
# Format the image file partitions
echo "PI-BUILDER: Setting up MBR/Partitions"
fdisk $device &>/dev/null << EOF
n
p
1
+$bootsize
t
c
n
p
2
w
EOF
# todo: hide the output cus it's annoying
# Mount the loopback device so we can modify the image, format the partitions, and mount/cd into rootfs
device=`kpartx -va $image | sed -E 's/.*(loop[0-9])p.*/\1/g' | head -1`
sleep 1 # Without this, we sometimes miss the mapper device!
device="/dev/mapper/${device}"
bootp=${device}p1
rootp=${device}p2
echo "PI-BUILDER: Formatting Partitions"
mkfs.vfat $bootp
mkfs.ext4 $rootp -L root
mkdir -p $rootfs
mount $rootp $rootfs
cd $rootfs
# start the debootstrap of the system
echo "PI-BUILDER: Mounted partitions, debootstrapping..."
debootstrap --no-check-gpg --foreign --arch $deb_arch $deb_release $rootfs file:///$deb_mirror
cp /usr/bin/qemu-arm-static usr/bin/
LANG=C chroot $rootfs /debootstrap/debootstrap --no-check-gpg --second-stage
# Mount the boot partition
mount -t vfat $bootp $bootfs
# Start adding content to the system files
echo "PI-BUILDER: Setting up custom files/settings relating to rpi"
# apt mirrors
# note: this is for the install process only, so we need to remove apt sources list after
# todo: install public key
# link the repo we have created to /pdktmp/repo
tmpdir="pdktmp"
repodir="$tmpdir/repo"
mkdir -p $repodir
mount --bind $deb_mirror $rootfs/$repodir
# todo: remove trusted once public key saving has been added
echo "deb [trusted=yes] file:/$repodir $deb_release main
deb-src [trusted=yes] file:/$repodir $deb_release main" > etc/apt/sources.list
# Boot commands
echo "dwc_otg.lpm_enable=0 console=ttyAMA0,115200 console=tty1 root=/dev/mmcblk0p2 rootfstype=ext4 elevator=deadline fsck.repair=yes rootwait" > boot/cmdline.txt
# Mounts
# the noatime may be a config option
echo "proc /proc proc defaults,noatime 0 0
/dev/mmcblk0p1 /boot vfat defaults 0 0
/dev/mmcblk0p2 / ext4 defaults,noatime 0 1
" > etc/fstab
# Hostname
echo "${distrib_name}" > etc/hostname
echo "127.0.1.1 ${distrib_name}" >> etc/hosts
# Networking
echo "auto lo
iface lo inet loopback
allow-hotplug eth0
iface eth0 inet dhcp
iface eth0 inet6 dhcp
" > etc/network/interfaces
# Modules
# todo: is this needed?
echo "vchiq
snd_bcm2835
" >> etc/modules
# load preseed
touch preseed.conf
if [ -d $pwd/cdd-assets/ ]; then
if [ -f $pwd/cdd-assets/preseed.conf ]; then
cp $pwd/cdd-assets/preseed.conf preseed.conf
fi
fi
# add repo key TODO add this
#LANG=C chroot $rootfs wget $deb_mirror.public.key -O - | apt-key add -
# Third Stage Setup Script (most of the setup process)
echo "#!/bin/bash
export LANGUAGE=en_GB.UTF-8
export LANG=en_GB.UTF-8
export LC_ALL=en_GB.UTF-8
apt-get update
apt-get install debconf-utils locales -y
debconf-set-selections /preseed.conf
rm -f /preseed.conf
echo 'en_GB.UTF-8 UTF-8' > /etc/locale.gen
locale-gen
apt-get install console-common -y
# workaround for aptitude not being installed
apt-get install aptitude -y
# install custom packagez from PDK
aptitude install --without-recommends -q -y -o Dpkg::Options::=\"--force-confdef\" -o Dpkg::Options::=\"--force-confold\" ~tcdd
echo \"root:raspberry\" | chpasswd
sed -i -e 's/KERNEL\!=\"eth\*|/KERNEL\!=\"/' /lib/udev/rules.d/75-persistent-net-generator.rules
rm -f /etc/udev/rules.d/70-persistent-net.rules
sed -i 's/^PermitRootLogin without-password/PermitRootLogin no/' /etc/ssh/sshd_config
echo 'HWCLOCKACCESS=no' >> /etc/default/hwclock
echo 'RAMTMP=yes' >> /etc/default/tmpfs
# make /boot/config.txt
if [ ! -f /boot/config.txt ]; then
touch /boot/config.txt
fi
rm -f third-stage
" > third-stage
chmod +x third-stage
LANG=C chroot $rootfs /third-stage
echo "Running custom setup scripts.."
# mount cdd-assets to distro
if [ -d $pwd/cdd-assets ]; then
mkdir -p $tmpdir/cdd-assets
mount --bind $pwd/cdd-assets $rootfs/$tmpdir/cdd-assets
fi
# run setup script if exists
if [ -f $pwd/cdd-setup ]; then
cp $pwd/cdd-setup $tmpdir/cdd-setup
chmod +x $tmpdir/cdd-setup
LANG=C chroot $rootfs /$tmpdir/cdd-setup
rm -f $tmpdir/cdd-setup
fi
# unmount cdd-assets
if [ -d $pwd/cdd-assets ]; then
umount $rootfs/$tmpdir/cdd-assets
rm -rf $tmpdir/cdd-assets
fi
echo "Cleaning up"
# for now lets add this apt repo for super fun good times
echo "deb http://ftp.uk.debian.org/debian jessie main
deb-src http://ftp.uk.debian.org/debian jessie main" > etc/apt/sources.list
# Cleanup Script
echo "#!/bin/bash
update-rc.d ssh remove
apt-get autoclean
apt-get clean
apt-get purge # what does this do?
apt-get update # no need to do this !
service ntp stop
#ps ax | grep ntpd | awk '{print $1}' | xargs kill
rm -r /root/.rpi-firmware > /dev/null 2>&1
rm -f cleanup
" > cleanup
chmod +x cleanup
LANG=C chroot $rootfs /cleanup
# startup script to generate new ssh host keys
rm -f etc/ssh/ssh_host_*
cat << EOF > etc/init.d/ssh_gen_host_keys
#!/bin/sh
### BEGIN INIT INFO
# Provides: Generates new ssh host keys on first boot
# Required-Start: $remote_fs $syslog
# Required-Stop: $remote_fs $syslog
# Default-Start: 2 3 4 5
# Default-Stop:
# Short-Description: Generates new ssh host keys on first boot
# Description: Generates new ssh host keys on first boot
### END INIT INFO
ssh-keygen -f /etc/ssh/ssh_host_rsa_key -t rsa -N ""
ssh-keygen -f /etc/ssh/ssh_host_dsa_key -t dsa -N ""
insserv -r /etc/init.d/ssh_gen_host_keys
service ssh start
update-rc.d ssh defaults
rm -f \$0
EOF
chmod a+x etc/init.d/ssh_gen_host_keys
insserv etc/init.d/ssh_gen_host_keys
# Run Raspi-Config at first login so users can expand storage and such
#echo "#!/bin/bash
#if [ `id -u` -ne 0 ]; then
# printf \"\nNOTICE: the software on this Raspberry Pi has not been fully configured. Please run 'raspi-config' as root.\n\n\"
#else
# raspi-config && exit
#fi
#" > etc/profile.d/raspi-config.sh
#chmod +x etc/profile.d/raspi-config.sh
# show the size
echo "SIZE"
df -h | grep $rootp
echo "/SIZE"
# remove repo link
umount $rootfs/$repodir
rm -rf $tmpdir
# Lets cd back
cd $buildenv && cd ..
# Unmount some partitions
echo "PI-BUILDER: Unmounting Partitions"
umount $bootp
umount $rootp
kpartx -d $image
# Properly terminate the loopback devices
echo "PI-BUILDER: Finished making the image $image"
dmsetup remove_all
losetup -D
# Move image out of builddir, as buildscript will delete it
echo "PI-BUILDER: Moving image out of builddir, then terminating"
mv ${image} ${output_file}
# this is temporary
rm -rf $buildenv
echo "PI-BUILDER: Finished!"
exit 0
<file_sep>/pikernel/compile_kernel
#!/bin/bash
#
# compile_kernel
#
# (c) <NAME> 2016
#
# Downloads, compiles and packages the RPi Linux Kernel from a Source Commit
#
# ./generate_firmware firmware-rpi master # generates the firmware package from the master commit
# ./generate_firmware linux-image-rpi master # generates the linux-image package from the master commit
#
# For now, this script only supports generating HardFP on the RPi 3 (possibly 2, untested)
#
#
# 2cf8fd5ba0b195e16627df6a5b45f47c0edc3a54 is 4.4.17
# todo: get version from https://raw.githubusercontent.com/raspberrypi/linux/d31c6f2420afca543abf466e6bc18c17f15eb291/Makefile
# todo: merge scripts?
# todo: autopopulate Version field in control files; auto generate control files
# todo: figure out Version field for firmware-rpi
# https://github.com/raspberrypi/linux/archive/rpi-4.4.y.zip
# parse arguments
if (( $# < 1 )); then
echo ""
echo "compile_kernel: Kernel Compiler"
echo ""
echo "You must pass at least one argument."
echo "./compile_kernel <action>"
echo ""
echo ""
echo "./compile_kernel download rpi-4.4.y prepares the environment from the rpi-4.4.y branch"
echo "./compile_kernel make makes the linux-image-rpi package from the already-downloaded sources"
echo ""
echo "Magic Script written by <NAME> <<EMAIL>>"
exit 1
fi
ACTION=$1
KERNEL_COMMIT=$2
# todo: move these into a seperate file
function info {
echo "[I] generate_firmware: $1"
}
function error {
echo "[E] generate_firmware: $1"
exit 1
}
if [ "$ACTION" = "download" ]; then
# get a clean slate
rm -rf kernel/
rm -rf kernel_tools/
if [ ! -d "kernel" ]; then
info "no kernel source found; downloading Foundation source"
wget --quiet --output-document kernel.zip https://github.com/raspberrypi/linux/archive/$KERNEL_COMMIT.zip
unzip -qq kernel.zip
rm kernel.zip
mkdir kernel
mv linux-*/* kernel/
rm linux-*/ -rf
info "kernel source downloaded!"
fi
if [ ! -d "kernel_tools" ]; then
info "no kernel tools found; downloading"
wget --quiet --output-document kernel_tools.zip https://github.com/raspberrypi/tools/archive/master.zip
unzip -qq kernel_tools.zip
rm kernel_tools.zip
mkdir kernel_tools
mv tools-*/* kernel_tools/
rm tools-*/ -rf
info "kernel tools downloaded"
fi
elif [ "$ACTION" = "make" ]; then
info "Compiling & packaging linux kernel"
if [ -f "linux-image-rpi.deb" ]; then
exit "Kernel image linux-image-rpi.deb already exists!"
fi
# move needed files to package temporary folder
if [ -d "tmp" ]; then
rm -rf tmp
fi
# environment
mkdir tmp
mkdir tmp/boot
mkdir tmp/boot/overlays
# convoluted way of calculating number of host CPU cores...
KERNEL_J=`nproc`
KERNEL_J=`echo 1.5*$KERNEL_J | bc`
KERNEL_J=${KERNEL_J%.*}
#KERNEL_J=6 # cpu cores * 1.5
KERNEL_TYPE=bcm2709 #bcm2708 for pi1, bcm2709 for pi3. We only support pi3, anyway
# figure out toolchain to use
PWD=`pwd`
if [ `arch` = "i386" ]; then
TOOLCHAIN=$PWD/kernel_tools/arm-bcm2708/gcc-linaro-arm-linux-gnueabihf-raspbian/bin
elif [ `arch` = "x86_64" ]; then
TOOLCHAIN=$PWD/kernel_tools/arm-bcm2708/gcc-linaro-arm-linux-gnueabihf-raspbian-x64/bin
else
error "Unsupported arch! Quitting."
fi
#figure out what we are compiling
if [ "$KERNEL_TYPE" = bcm2708 ]; then
KERNEL_VERSION=kernel
KERNEL_CONFIG=bcmrpi_defconfig
elif [ "$KERNEL_TYPE" = bcm2709 ]; then
KERNEL_VERSION=kernel7
KERNEL_CONFIG=bcm2709_defconfig
fi
# export the MAKE settings
export KERNEL=$KERNEL_VERSION
export ARCH=arm
export CROSS_COMPILE=$TOOLCHAIN/arm-linux-gnueabihf-
export INSTALL_MOD_PATH=$PWD/tmp
# we will configure the kernel with the defaults as you are too lazy to do so ;-)
if [ ! -f "kernel/.config" ]; then
info "Configuring kernel with default settings"
make --directory=kernel -j$KERNEL_J $KERNEL_CONFIG
fi
info "Compiling Kernel"
make --directory=kernel -j$KERNEL_J zImage modules dtbs
make --directory=kernel -j$KERNEL_J modules_install
#mv firmware/boot/COPYING.linux tmp/boot/
# move kernel stuff
./kernel/scripts/mkknlimg kernel/arch/arm/boot/zImage tmp/boot/$KERNEL.img
cp kernel/arch/arm/boot/dts/*.dtb tmp/boot/
cp kernel/arch/arm/boot/dts/overlays/*.dtbo tmp/boot/overlays
cp kernel/COPYING tmp/boot/COPYING.linux
# remove firmware
rm -rf $INSTALL_MOD_PATH/lib/firmware
info "Compiling Kernel: FINISHED"
# add in the packaging files
mkdir tmp/DEBIAN
# maybe we should script this?
cp control/linux-image-rpi.control tmp/DEBIAN/control
#chown files
chown root:root -R tmp/
# make debian package
# todo: find a better way of making this quiet
dpkg-deb --build tmp > /dev/null
# make the name a bit more familiar
mv tmp.deb linux-image-rpi.deb
# remove tmp dir
if [ -d "tmp" ]; then
rm -rf tmp
fi
# remove firmware dir
if [ -d "firmware" ]; then
rm -rf firmware
fi
else
error "Not an action!"
fi
<file_sep>/README.md
# pifactory
Test scripts to build Raspbery Pi images using PDK
Scripts included:
pifactory - handles distro building
pikernel - compiles kernel and packages as dpkg for inclusion in distro
pifirmware - packages non-free firmware as dpkg for inclusion in distro
Setup:
```
git clone https://github.com/64studio/pifactory
```
todo list:
* Debianize as part of PDK source package
* Integrate into PDK
|
a09392eee6c399b0ea131d088b85f580c1a83495
|
[
"Markdown",
"Shell"
] | 4
|
Shell
|
64studio/pifactory
|
950996daee1348a5db579b903434029fd7c0fe66
|
c5c21e8c2e7fe68c01b25ba665ab4e9679b8a1c3
|
refs/heads/main
|
<repo_name>Bohatyrenko/goit-react-hw-06-phonebook<file_sep>/src/components/ContactFilter/ContactFilter.js
import React from 'react';
const ContactFilter = ({ onSetFilter, filter }) => (
<>
<label>
<p>Search for contacts</p>
<input onInput={onSetFilter} type="text" name="filter" value={filter} />
</label>
</>
);
export default ContactFilter;
<file_sep>/src/redux/phonebook/phonebook-reducer.js
import { combineReducers } from 'redux';
import action from './phonebook-actions';
import types from './phonebook-types';
const itemReducer = (state = [], action) => {
return state;
};
const filterReducer = (state = '', action) => {
return state;
};
export default combineReducers({
itemReducer,
filterReducer,
});
// const reducer = (state = initilState, action) => {
// switch (action.type) {
// case 'phoneBook/AddContact':
// return [...action.payload, ...state];
// default:
// return state;
// }
// };
<file_sep>/src/components/ContactList/ContactList.js
import React from 'react';
const ContactList = ({ contacts, onDeleteContact }) => (
<ul style={{ listStyle: 'none', paddingLeft: '0' }}>
{contacts.map(({ name, number, id }) => (
<li key={id}>
<p>Name:{name}</p>
<p>Number:{number}</p>
<button id={id} onClick={onDeleteContact}>
Delete
</button>
</li>
))}
</ul>
);
export default ContactList;
|
f197c318b36291d8aecd52eba05ea6d26b2156d6
|
[
"JavaScript"
] | 3
|
JavaScript
|
Bohatyrenko/goit-react-hw-06-phonebook
|
5012975c4aabbb0bf0cb3fe68a652d48831d2b20
|
e939fce03d21b576306f88f2a7c184e05feec604
|
refs/heads/main
|
<file_sep>//
// ContentView.swift
// Fructus
//
// Created by <NAME> on 30/10/20.
//
import SwiftUI
struct ContentView: View {
// MARK: - PROPERTIES
var fruits: [Fruit] = fruitsData
// MARK: - BODY
var body: some View {
NavigationView {
List {
ForEach(fruits.shuffled()) { fruit in
NavigationLink(destination: FruitDetailView(fruit: fruit)){
FruitRowView(fruit: fruit)
.padding(.vertical, 4)
}
}
}
.navigationTitle("Fruits")
}//: NAVIGATION
}
}
// MARK: - PREVIEW
struct ContentView_Previews: PreviewProvider {
static var previews: some View {
ContentView(fruits: fruitsData).previewDevice("iPhone 11 Pro")
}
}
|
9d207c9898467253eea2ee85ba2b45360b722561
|
[
"Swift"
] | 1
|
Swift
|
alanrslima/Fructus
|
bf277755635863b1834d37ec72889795453d207b
|
8516065ff42ae999a69a6a768f57e4a70a9e2a05
|
refs/heads/master
|
<file_sep>import React, {Component} from 'react';
import {FaEnvelope, FaGithub, FaFacebook, FaLinkedin} from 'react-icons/lib/fa';
import {Row, Input, Button} from 'react-materialize';
export default class Contact extends Component{
render(){
return(
<div>
<h1>Contact with me</h1>
<Row>
<form>
<Input type="text" s={12} label="Your Name" />
<Input type="email" s={12} label="Email" />
<Input type="text" s={12} label="Your Massege"/>
<Button waves='light'>Send</Button>
</form>
</Row>
<p className="sn-links">
<a href="https://www.linkedin.com/in/viktorya-danilevskaya-6318ab82/" target="_blank" title="Linked-in"><FaLinkedin /></a>
<a href="https://www.facebook.com/profile.php?id=100001787044861" target="_blank" title="Facebook"><FaFacebook /></a>
<a href="https://github.com/dvikster" target="_blank" title="Github"><FaGithub /></a>
<a href="mailto:<EMAIL>" target="_blank" title="E-mail"><FaEnvelope /></a>
</p>
</div>
);
}
}
<file_sep>import React, {Component} from 'react';
import {FormGroup, FormControl} from 'react-bootstrap';
import axios from 'axios';
export default class Projects extends Component{
state ={
books: [],
loading: true,
error: null,
inputValue: ''
};
onChangeHandler = (e)=> {
this.setState({inputValue: e.target.value})
};
componentDidMount(){
axios.get('https://www.googleapis.com/books/v1/volumes?q=Harry&key=<KEY>')
.then(result =>{
this.setState({
books: result.data.items,
loading: false,
error: null
});
})
.catch(errormes =>{
this.setState({
loading: false,
error: errormes
});
})
}
renderLoading(){
return(
<div className="loading">Loading</div>
)
}
renderError(){
return(
<div>Error....{this.state.error.message}</div>
)
}
renderBooks(){
const {error, books} = this.state;
if(error){
return this.renderError;
}
let inputValueLet = this.state.inputValue.trim().toLowerCase();
let booksNewArray=[];
//фильтрация массива согласно введенного значения (обрабатывае и возвращает массив отфильтрованыx данных)
if(books.length>0){
booksNewArray = books.filter(function (letters) {
return letters.volumeInfo.title.toLowerCase().match( inputValueLet )
});
}
return(
<div className="books-container">
{booksNewArray.map(books => {
return (
<div className="item" key={books.id}>
<div className="img-container"><img src={books.volumeInfo.imageLinks.smallThumbnail}/></div>
{/*<div><span className="desc">Language: </span>{books.original_language}</div>*/}
<div><span className="desc">Title: </span>{books.volumeInfo.title}</div>
<div><span className="desc">Description: </span>{books.volumeInfo.description}</div>
{/*<div><span className="desc">Raiting: </span>{books.vote_count}</div>*/}
</div>
)
})}
</div>
);
}
render(){
const {loading} = this.state;
return(
<div>
<h1>Book Store125</h1>
<form className="form-inline">
<FormGroup>
<FormControl
className='form-control'
value={this.state.inputValue}
onChange={this.onChangeHandler}
defaultValue=''
placeholder='Choose title'
/>
</FormGroup>
</form>
{ loading ? this.renderLoading() : this.renderBooks()}
</div>
);
}
}
<file_sep># D38-react-search-input
D38-react-search-input
|
1e76ffad8dd664977211523c8e0cf6d9546b817b
|
[
"JavaScript",
"Markdown"
] | 3
|
JavaScript
|
dvikster/D39-react-about-me
|
3ff9ff49a0febb1e37947d9c203b474f5a129b03
|
2d772f30300f3e8955b70d4c74a7327cd54cb553
|
refs/heads/master
|
<file_sep>def starts_with_a_vowel?(word)
if (word.match(/^[aeiouAEIOU]\w+/) != nil)
return true
else
return false
end
end
def words_starting_with_un_and_ending_with_ing(text)
text.scan(/un+\w+ing/)
end
def words_five_letters_long(text)
return text.scan(/\b\w{5}\b/i)
end
def first_word_capitalized_and_ends_with_punctuation?(text)
if (text.match(/^[A-Z].*\W$/) != nil)
return true
else
return false
end
end
def valid_phone_number?(text)
if (text.match(/\A(\+\d{1,2}\s)?\(?\d{3}\)?[\s.-]?\d{3}[\s.-]?\d{4}\z/) != nil)
return true
else
return false
end
end
|
d49dc5db0f2d2a913198996aeb7cc13071447b24
|
[
"Ruby"
] | 1
|
Ruby
|
codyrickman/regex-lab-online-web-sp-000
|
c22a1ca86f6d06411fd4ae935780860170f4d2cd
|
bbc4d43ccf331ff2720267b07a4bbba4e0418eee
|
refs/heads/master
|
<repo_name>salda/file_scraper<file_sep>/parser.cpp
#include "include_headers/myhtml/api.h"
#include "downloader.cpp"
#include <experimental/filesystem>
using namespace experimental::filesystem;
class parser {
myhtml_t* myHTML;
myhtml_tree_t* tree;
string relative_URL_base, relative_URL_base_root;
enum attribute_types { with_possible_URL, with_possible_comma_separated_URLs, with_CSS_possibly_containing_URLs_in_url_data_type };
unordered_set<string> URLs;
void iterate_attribute_occurrences_and_possibly_start_downloading(downloader& downloader, const string& attribute, attribute_types attribute_type = with_possible_URL) {
myhtml_collection_t* tag_collection = myhtml_get_nodes_by_attribute_key(tree, NULL, NULL, attribute.c_str(), attribute.size(), NULL);
if (tag_collection && tag_collection->list && tag_collection->length)
for (size_t i = 0; i < tag_collection->length; ++i) {
string attribute_value = myhtml_attribute_value(myhtml_attribute_by_key(tag_collection->list[i], attribute.c_str(), strlen(attribute.c_str())), NULL);
if (attribute_type == with_CSS_possibly_containing_URLs_in_url_data_type) {
size_t url_pos = attribute_value.find("url(");
while (url_pos != string::npos) {
size_t url_end_bracket_pos = attribute_value.find(')', url_pos + 4);
int url_escaped = attribute_value[url_pos + 4] == '"' || attribute_value[url_pos + 4] == '\'' ? 1 : 0; // TODO maybe add range check
construct_absolute_URL_and_possibly_start_downloading(downloader, attribute_value.substr(url_pos + 4 + url_escaped, url_end_bracket_pos - url_pos - 4 - 2 * url_escaped));
url_pos = attribute_value.find("url(", url_end_bracket_pos + 1);
}
}
else if (attribute_type == with_possible_comma_separated_URLs) {
size_t url_pos = attribute_value.find_first_not_of(' ');
do {
size_t url_end_pos = attribute_value.find_first_of(" ,", url_pos + 1);
construct_absolute_URL_and_possibly_start_downloading(downloader, attribute_value.substr(url_pos, url_end_pos - url_pos));
url_pos = attribute_value.find_first_not_of(' ', attribute_value.find(',', url_end_pos) + 1);
} while (url_pos != string::npos);
}
else
construct_absolute_URL_and_possibly_start_downloading(downloader, move(attribute_value.erase(0, attribute_value.find_first_not_of(' '))));
}
myhtml_collection_destroy(tag_collection);
}
void construct_absolute_URL_and_possibly_start_downloading(downloader& downloader, string URL) {
size_t pos = URL.find_first_of(" #?");
if (pos != string::npos)
URL.erase(pos);
if (URL.empty() || URL.back() == '/')
return;
pos = URL.find("//");
if (pos == 0 || (pos != string::npos && URL[pos - 1] == ':')) {
if (URL.find('/', pos + 3) == string::npos)
return;
}
else if (URL.front() == '/')
URL.insert(0, relative_URL_base_root);
else // not sure if cURL handles ".." inside URLs, but I guess it's not relevant
URL.insert(0, relative_URL_base);
auto return_value = URLs.insert(move(URL));
if (return_value.second)
downloader.start_download(*return_value.first);
}
public:
parser(const string& HTML) {
myHTML = myhtml_create();
myhtml_init(myHTML, MyHTML_OPTIONS_DEFAULT, 1, 0);
tree = myhtml_tree_create();
myhtml_tree_init(tree, myHTML);
myencoding_t encoding;
if (!myencoding_detect(HTML.c_str(), HTML.size(), &encoding))
encoding = MyENCODING_UTF_8;
myhtml_parse(tree, encoding, HTML.c_str(), HTML.size());
}
void set_relative_URL_bases(string effective_URL) {
myhtml_collection_t* base_tag_collection = myhtml_get_nodes_by_tag_id(tree, NULL, MyHTML_TAG_BASE, NULL);
if (base_tag_collection && base_tag_collection->list && base_tag_collection->length) {
myhtml_tree_attr_t* base_href_attribute = myhtml_attribute_by_key(base_tag_collection->list[0], "href", strlen("href"));
if (base_href_attribute)
effective_URL = myhtml_attribute_value(base_href_attribute, NULL);
}
myhtml_collection_destroy(base_tag_collection);
relative_URL_base_root = effective_URL.substr(0, effective_URL.find('/', effective_URL.find("//") + 2));
relative_URL_base = effective_URL;
}
downloader start_downloading_referenced_files() {
if ((!exists("download") && !create_directory("download")) // I want to delete neither the files inside possibly existing folder
|| (exists("download") && !is_directory("download"))) // nor file with a name "download"
throw runtime_error("problem creating directory \"download\" for downloading files");
downloader downloader;
for (const string& attribute : { "action", "cite", "data", "formaction", "href", "manifest", "poster", "src" })
iterate_attribute_occurrences_and_possibly_start_downloading(downloader, attribute, with_possible_URL);
iterate_attribute_occurrences_and_possibly_start_downloading(downloader, "srcset", with_possible_comma_separated_URLs);
iterate_attribute_occurrences_and_possibly_start_downloading(downloader, "style", with_CSS_possibly_containing_URLs_in_url_data_type);
return downloader;
}
~parser() {
myhtml_tree_destroy(tree);
myhtml_destroy(myHTML);
}
};<file_sep>/downloader.cpp
#include <fstream>
#include <forward_list>
#include <unordered_set>
#include <iomanip>
#include <curl/curl.h>
#include <iostream>
#include <vector>
#include <future>
using namespace std;
struct download {
const string file_name;
ofstream download_file;
CURL* cURL;
size_t size{0};
unsigned int Adler_32_a{1};
unsigned long long int Adler_32_b{0};
size_t minimum_iterations_for_overflow{257};
const unsigned int largest_short_int_prime_number{65521};
download(const string file_name, const string& new_file_name, CURL* file_cURL) : file_name(file_name), download_file("download/" + new_file_name), cURL(file_cURL) {
if (!cURL) {
cout << "cURL not duplicated correctly" << endl;
throw runtime_error("failed to duplicate cURL");
}
if (!download_file) {
cout << "Error opening " << new_file_name << endl;
throw runtime_error("failed to open file");
}
}
bool operator==(const download& other) {
return this == &other;
}
};
class downloader {
CURL* cURL_for_copying;
forward_list<download> successful_downloads;
unordered_set<string> file_names;
vector<future<void>> started_downloads;
size_t (*CurlWrite_CallbackFunc_File)(void*, size_t, size_t, forward_list<download>::iterator) =
[](void* contents, size_t element_size, size_t element_count, forward_list<download>::iterator download) {
size_t additional_size = element_size * element_count;
download->download_file.write((char*)contents, additional_size);
size_t processed_size = 0;
do {
size_t last_element = min(processed_size + download->minimum_iterations_for_overflow, additional_size);
for (size_t i = processed_size; i != last_element; ++i) {
download->Adler_32_a += ((unsigned char*)contents)[i];
download->Adler_32_b += download->Adler_32_a;
}
processed_size = last_element;
if (download->Adler_32_a >= download->largest_short_int_prime_number) {
download->Adler_32_a -= download->largest_short_int_prime_number;
download->minimum_iterations_for_overflow = download->Adler_32_a > 240 ? 256 : 257;
}
else
download->minimum_iterations_for_overflow = (download->largest_short_int_prime_number - download->Adler_32_a + 254) / 255;
} while (processed_size != additional_size);
download->size += additional_size;
return additional_size;
};
void delete_file(const string& new_file_name, forward_list<download>::iterator DL) {
if (remove(("download/" + new_file_name).c_str())) // maybe call download.download_file.close(); before remove on some OSs
cout << "Error deleting " << new_file_name << endl;
successful_downloads.remove(*DL);
}
void download_file(const string URL) {
const string old_file_name = URL.substr(URL.find_last_of('/') + 1);
auto return_value = file_names.insert(old_file_name);
int number_of_duplicates = 0;
while (!return_value.second) // TODO rename first file on file systems to <name>.0/<name>.0.<extension>, next <name>.1/<name>.1.<extension>, ..
return_value = file_names.insert(old_file_name + '.' + to_string(++number_of_duplicates));
const string& new_file_name = *return_value.first;
forward_list<download>::iterator DL;
try {
DL = successful_downloads.insert_after(successful_downloads.before_begin(), download(move(old_file_name), new_file_name, curl_easy_duphandle(cURL_for_copying)));
} catch (const exception& e) {
return;
}
curl_easy_setopt(DL->cURL, CURLOPT_URL, URL.c_str());
curl_easy_setopt(DL->cURL, CURLOPT_WRITEDATA, DL);
CURLcode curl_code = curl_easy_perform(DL->cURL);
if (curl_code != CURLE_OK) {
cout << string(curl_easy_strerror(curl_code)) << endl;
delete_file(new_file_name, DL);
return;
}
long respCode;
curl_easy_getinfo(DL->cURL, CURLINFO_RESPONSE_CODE, &respCode);
if (respCode != 200) {
cout << "response code from " + URL + ": " + to_string(respCode) << endl;
delete_file(new_file_name, DL);
}
}
public:
downloader() {
cURL_for_copying = curl_easy_init();
if (!cURL_for_copying) {
cout << "cURL not initialized correctly" << endl;
throw runtime_error("failed to construct");
}
curl_easy_setopt(cURL_for_copying, CURLOPT_FOLLOWLOCATION, 1L);
curl_easy_setopt(cURL_for_copying, CURLOPT_SSL_VERIFYPEER, 0L);
curl_easy_setopt(cURL_for_copying, CURLOPT_WRITEFUNCTION, CurlWrite_CallbackFunc_File);
}
downloader(downloader && other) {
cURL_for_copying = move(other.cURL_for_copying);
other.cURL_for_copying = nullptr;
successful_downloads = move(other.successful_downloads);
file_names = move(other.file_names);
started_downloads = move(other.started_downloads);
}
void start_download(const string& URL) {
started_downloads.emplace_back(async(launch::async, [=] { download_file(move(URL)); } ));
}
void wait_for_running_downloads() {
for (auto& parallel_download : started_downloads)
parallel_download.get();
}
~downloader() {
successful_downloads.sort([](const download& a, const download& b) { return a.size > b.size; });
for (download& download : successful_downloads) {
if (&download != &successful_downloads.front())
cout << endl;
cout << "file name: " << setw(50) << left << download.file_name // TODO make width dynamic
//<< " size: " << dec << setw(10) << download.size
<< " hash: " << hex << setw(10) << right << (download.Adler_32_b % download.largest_short_int_prime_number << 16 | download.Adler_32_a);
if (&download == &successful_downloads.front())
cout << " <- biggest file";
curl_easy_cleanup(download.cURL);
}
if (!successful_downloads.empty())
cout << " <- smallest file" << endl;
curl_easy_cleanup(cURL_for_copying);
}
};
<file_sep>/makefile
# building on gcc version 7.2.0
CFLAGS = -g -Wall -O2
file_scraper: file_scraper.cpp
g++ $(CFLAGS) -o file_scraper.out file_scraper.cpp -Wl,-rpath,'$$ORIGIN/libraries' -Llibraries -lcurl -lmyhtml -lpthread -Wl,-Bstatic -lstdc++fs -Wl,-Bdynamic #-Wl,--verbose
clean:
$(RM) file_scraper.out *.o *~
# NEEDED libcurl-nss.so.4
# NEEDED libmyhtml.so
# NEEDED libpthread.so.0
# NEEDED libstdc++.so.6
# NEEDED libgcc_s.so.1
# NEEDED libc.so.6<file_sep>/readme.txt
file_scraper.out is the executable.
My notes are in notes.txt.
There are 2 classes making interfaces usable in some other program:
response_getter and parser and there should be actually 3, the last downloader,
but I am sending it like that, because following days I will not have time to finish it next 30 hours.
My research where to search for files referenced from HTML is in places_with_references_in _html.txt.
Despite receiving following Links in HTTP header, the assignment is clear in downloading only "files the page references"
and from Wikipedia: "The web page usually means what is visible, but the term may also refer to a computer file, usually written in HTML or a comparable markup language."
so information from protocol does not qualify.
Link: <https://www.meetangee.com/wp-json/>; rel="https://api.w.org/"
Link: <https://www.meetangee.com/>; rel=shortlink
I really want to make the program better, but I think I will be too late after that, this is surely my biggest homework. <file_sep>/notes.txt
the assignment is vague, there should be at least specified what "every file the page references" means
HTTP is the default protocol for the web so http://www.meetangee.com is used
the redirection is needed, http://www.meetangee.com redirects to https://www.meetangee.com
there could have been man in the middle while using HTTP and NSS doesn't setup certificates automatically I rather not verify the authenticity of the peer's certificate
possible to call curl_easy_unescape for URLs, but not needed for this assignment
on the first look, Adler-32 looks like from stone age and the zlib implementation looks strange
a needs at least 256 iterations to overflow 65521, 257 when starting from "1"
b can overflow every iteration when a is 65520 and new chars are 0
b overflow can be count at the end, because "unsigned long long int" is big enough to store the whole b without using modulo
I would use headers, but I think it would be an overkill for this assignment. <file_sep>/file_scraper.cpp
#include "parser.cpp"
#include "response_getter.cpp"
downloader initialize_downloads(char* URL) {
response_getter response_getter(URL);
const string response_body = response_getter.get_response();
parser parser(response_body);
parser.set_relative_URL_bases(response_getter.get_effective_URL());
return parser.start_downloading_referenced_files();
}
int main(int argc, char* argv[]) {
if (!argv[1] || !strlen(argv[1])) {
cout << "Usage: " + string(argv[0]) + " <HTML5_valid_source_URL>" << endl;
cin.sync();
cin.ignore();
return 0;
}
if (curl_global_init(CURL_GLOBAL_ALL)) {
cout << "cURL global not initialized correctly" << endl;
cin.sync();
cin.ignore();
return -1;
}
try {
downloader downloader = initialize_downloads(argv[1]);
downloader.wait_for_running_downloads();
} catch (const exception& e) {
cout << e.what() << endl;
return -1;
}
curl_global_cleanup();
}<file_sep>/response_getter.cpp
#include <curl/curl.h>
#include <iostream>
using namespace std;
class response_getter {
CURL* cURL;
char* URL;
size_t (*CurlWrite_CallbackFunc_String)(void*, size_t, size_t, string*) =
[](void* contents, size_t element_size, size_t element_count, string* output) {
size_t additional_size = element_size * element_count;
output->append((char*)contents, additional_size);
return additional_size;
};
public:
response_getter(char* URL) : URL(URL) {
cURL = curl_easy_init();
if (!cURL) {
cout << "cURL not initialized correctly" << endl;
throw runtime_error("failed to construct");
}
curl_easy_setopt(cURL, CURLOPT_URL, URL);
curl_easy_setopt(cURL, CURLOPT_FOLLOWLOCATION, 1L);
curl_easy_setopt(cURL, CURLOPT_SSL_VERIFYPEER, 0L);
}
const string get_response() {
curl_easy_setopt(cURL, CURLOPT_WRITEFUNCTION, CurlWrite_CallbackFunc_String);
string response_body;
curl_easy_setopt(cURL, CURLOPT_WRITEDATA, &response_body);
CURLcode curl_code = curl_easy_perform(cURL);
if (curl_code != CURLE_OK) {
cout << string(curl_easy_strerror(curl_code));
throw runtime_error("failed to perform");
}
long respCode;
curl_easy_getinfo(cURL, CURLINFO_RESPONSE_CODE, &respCode);
if (respCode != 200) {
cout << "response code from " + string(URL) + ": " + to_string(respCode);
throw runtime_error("response code not 200");
}
if (response_body.empty()) {
cout << "no response from " + string(URL);
throw runtime_error("response body empty");
}
return response_body;
}
string get_effective_URL() {
char* URL = NULL;
curl_easy_getinfo(cURL, CURLINFO_EFFECTIVE_URL, &URL);
if (!URL)
throw runtime_error("effective URL empty");
return URL;
}
~response_getter() {
curl_easy_cleanup(cURL);
}
};
|
64f3ae95f1bf4f3cf004927470fbbf2cad75027a
|
[
"Text",
"Makefile",
"C++"
] | 7
|
C++
|
salda/file_scraper
|
6ea0d2a91ab37f7d7308f43390e75907c7a0553c
|
9b2d74c20abcbede8b38f0b3cff230dcf95a053f
|
refs/heads/main
|
<file_sep># NIK Translator
NIK Translator Berfungsi untuk mengkonversi 16 digit kode NIK menjadi informasi yang bisa dibaca.
### Example Request:
```php
require 'NIK-Translator.php';
$NIK = new NIKTranslator;
print json_encode($NIK->parse('Masukkan NIK disini..'), JSON_PRETTY_PRINT);
```
### Example Response:
NIK dibawah didapat secara gratis dari internet sebagai contoh.
```json
{
"nik": "3271046504930002",
"uniqueCode": "0002",
"gender": "PEREMPUAN",
"bornDate": "25-04-1993",
"age": {
"text": "27 tahun 9 bulan 29 hari",
"year": 27,
"month": 9,
"days": 29
},
"nextBirthday": {
"text": "2 bulan 2 hari lagi",
"year": 0,
"month": 2,
"day": 2
},
"zodiac": "Taurus",
"province": "JAWA BARAT",
"city": "KOTA BOGOR",
"subdistrict": "BOGOR BARAT",
"postalCode": "16116"
}
```
### Other Language:
* [Dart Version by yusriltakeuchi](https://github.com/yusriltakeuchi/nik_validator)
* [JavaScript Version by fauzan121002](https://github.com/fauzan121002/nik-validator)
<file_sep><?php
class NIKTranslator
{
// Get current year and get the last 2 digit numbers
function getCurrentYear() {
return (int)date('y');
}
// Get year in NIK
function getNIKYear($nik) {
return (int)substr($nik, 10, 2);
}
// Get date in NIK
function getNIKDate($nik) {
return (int)substr($nik, 6, 2);
}
function getNIKDateFull($nik, $isFemale) {
$date = (int)substr($nik, 6, 2);
if($isFemale) $date -= 40;
return ($date > 10) ? $date : '0'.$date;
}
// Get subdistrict split postal code
function getSubdistrictPostalCode($nik, $location) {
return explode(' -- ', $location['kecamatan'][substr($nik, 0, 6)]);
}
// Get province in NIK
function getProvince($nik, $location) {
return $location['provinsi'][substr($nik, 0, 2)];
}
// Get city in NIK
function getCity($nik, $location) {
return $location['kabkot'][substr($nik, 0, 4)];
}
// Get NIK gender
function getGender($date) {
return ($date > 40) ? 'PEREMPUAN' : 'LAKI-LAKI';
}
// Get born month
function getBornMonth($nik) {
return (int)substr($nik, 8, 2);
}
function getBornMonthFull($nik) {
return substr($nik, 8, 2);
}
// Get born year
function getBornYear($nikYear, $currentYear) {
return ($nikYear < $currentYear)
? (($nikYear > 10) ? '20'.$nikYear : '200'.$nikYear)
: (($nikYear > 10) ? '19'.$nikYear : '190'.$nikYear);
}
// Get unique code in NIK
function getUniqueCode($nik) {
return substr($nik, 12, 4);
}
// Get age from NIK
function getAge($birthday) {
date_default_timezone_set('Asia/Jakarta');
$diff = date_diff(date_create($birthday), date_create(date('Y-m-d')));
return [
'years' => $diff->y,
'months' => $diff->m,
'days' => $diff->d,
];
}
// Get next birthday
function getNextBirthday($birthday) {
date_default_timezone_set('Asia/Jakarta');
$date = explode('-', date('Y-m-d'));
$birth = explode('-', $birthday);
if($date[1] == $birth[1] && $date[2] <= $birth[2]) $date[0] += 1;
$births = $date[0].substr($birthday, -6);
$diff = date_diff(date_create(date('Y-m-d')), date_create($births));
$y = ($diff->invert) ? -1*$diff->y : $diff->y;
$m = ($diff->invert) ? -1*$diff->m : $diff->m;
$d = ($diff->invert) ? -1*$diff->d : $diff->d;
$txt = '';
if($y != 0) $txt .= "$y tahun ";
if($m != 0) $txt .= "$m bulan ";
if($d != 0) $txt .= "$d hari ";
$txt .= 'lagi';
return [
'text' => $txt,
'year' => $y,
'month' => $m,
'day' => $d,
];
}
// Get zodiac from bornDate and bornMonth
function getZodiac($date, $month, $isFemale) {
if($isFemale) $date -= 40;
if(($month == 1 && $date >= 20) || ($month == 2 && $date < 19)) return 'Aquarius';
if(($month == 2 && $date >= 19) || ($month == 3 && $date < 21)) return 'Pisces';
if(($month == 3 && $date >= 21) || ($month == 4 && $date < 20)) return 'Aries';
if(($month == 4 && $date >= 20) || ($month == 5 && $date < 21)) return 'Taurus';
if(($month == 5 && $date >= 21) || ($month == 6 && $date < 22)) return 'Gemini';
if(($month == 6 && $date >= 21) || ($month == 7 && $date < 23)) return 'Cancer';
if(($month == 7 && $date >= 23) || ($month == 8 && $date < 23)) return 'Leo';
if(($month == 8 && $date >= 23) || ($month == 9 && $date < 23)) return 'Virgo';
if(($month == 9 && $date >= 23) || ($month == 10 && $date < 24)) return 'Libra';
if(($month == 10 && $date >= 24) || ($month == 11 && $date < 23)) return 'Scorpio';
if(($month == 11 && $date >= 23) || ($month == 12 && $date < 22)) return 'Sagitarius';
if(($month == 12 && $date >= 22) || ($month == 1 && $date < 19)) return 'Capricorn';
return 'Zodiak tidak ditemukan';
}
function parse($nik) {
$location = $this->getLocationAsset();
// Check NIK and make sure is correct
if($this->validate($nik)) {
$currentYear = $this->getCurrentYear();
$nikYear = $this->getNIKYear($nik);
$nikDate = $this->getNIKDate($nik);
$gender = $this->getGender($nikDate);
$nikDateFull = $this->getNIKDateFull($nik, $gender == 'PEREMPUAN');
$subdistrictPostalCode = $this->getSubdistrictPostalCode($nik, $location);
$province = $this->getProvince($nik, $location);
$city = $this->getCity($nik, $location);
$subdistrict = $subdistrictPostalCode[0];
$postalCode = $subdistrictPostalCode[1];
$bornMonth = $this->getBornMonth($nik);
$bornMonthFull = $this->getBornMonthFull($nik);
$bornYear = $this->getBornYear($nikYear, $currentYear);
$uniqueCode = $this->getUniqueCode($nik);
$zodiac = $this->getZodiac($nikDate, $bornMonth, $gender == 'PEREMPUAN');
$age = $this->getAge("$bornYear-$bornMonthFull-$nikDateFull");
$nextBirthday = $this->getNextBirthday("$bornYear-$bornMonthFull-$nikDateFull");
return [
'nik' => $nik ?? '',
'uniqueCode' => $uniqueCode ?? '',
'gender' => $gender ?? '',
'bornDate' => "$nikDateFull-$bornMonthFull-$bornYear" ?? '',
'age' => [
'text' => $age['years'].' tahun '.$age['months'].' bulan '.$age['days'].' hari',
'year' => $age['years'],
'month' => $age['months'],
'days' => $age['days']
],
'nextBirthday' => $nextBirthday,
'zodiac' => $zodiac ?? '',
'province' => $province ?? '',
'city' => $city ?? '',
'subdistrict' => $subdistrict ?? '',
'postalCode' => $postalCode ?? ''
];
} else {
return false;
}
}
// Validate NIK and make sure the number is correct
function validate($nik) {
$loc = $this->getLocationAsset();
return strlen($nik) == 16 &&
$loc['provinsi'][substr($nik, 0, 2)] != null &&
$loc['kabkot'][substr($nik, 0, 4)] != null &&
$loc['kecamatan'][substr($nik, 0, 6)] != null;
}
// Load location assets like province, city, and subdistricts
// from local json data
function getLocationAsset() {
$result = file_get_contents('wilayah.json');
return json_decode($result, true);
}
}
<file_sep><?php
header('Content-Type: application/json');
require 'NIK-Translator.php';
$NIK = new NIKTranslator;
print json_encode($NIK->parse('Masukkan NIK disini..'), JSON_PRETTY_PRINT);
|
e4449eabf80f01825a43f6aa649cf002425b486d
|
[
"Markdown",
"PHP"
] | 3
|
Markdown
|
ShennBoku/NIK-Translator
|
62a9168cb8fd639e3d505b1dd7a976457029c421
|
cee589e1bcfd5e56342c2e635e4938ef5e7ea3b0
|
refs/heads/master
|
<repo_name>FullHendrix/csharp-ddd-skeleton<file_sep>/apps/Mooc/Backend/Command/ConsumeMsSqlDomainEventsCommand.cs
namespace CodelyTv.Apps.Mooc.Backend.Command
{
using Microsoft.Extensions.DependencyInjection;
using Shared.Cli;
using Shared.Infrastructure.Bus.Event.MsSql;
public class ConsumeMsSqlDomainEventsCommand : Command
{
private readonly MsSqlDomainEventsConsumer _consumer;
public ConsumeMsSqlDomainEventsCommand(MsSqlDomainEventsConsumer consumer)
{
_consumer = consumer;
}
public override void Execute(string[] args)
{
_consumer.Consume();
}
}
}<file_sep>/src/Mooc/CoursesCounter/Domain/CoursesCounter.cs
namespace CodelyTv.Mooc.CoursesCounter.Domain
{
using System;
using System.Collections.Generic;
using System.Linq;
using Courses.Domain;
public class CoursesCounter
{
public CoursesCounterId Id { get; private set; }
public CoursesCounterTotal Total { get; private set; }
public List<CourseId> ExistingCourses { get; private set; }
public CoursesCounter(CoursesCounterId id, CoursesCounterTotal total, List<CourseId> existingCourses)
{
Id = id;
Total = total;
ExistingCourses = existingCourses;
}
private CoursesCounter()
{
}
public static CoursesCounter Initialize(string id)
{
return new CoursesCounter(new CoursesCounterId(id), CoursesCounterTotal.Initialize(), new List<CourseId>());
}
public bool HasIncremented(CourseId id)
{
return this.ExistingCourses.Contains(id);
}
public void Increment(CourseId id)
{
this.Total = this.Total.Increment();
this.ExistingCourses.Add(id);
}
public override bool Equals(object obj)
{
if (this == obj) return true;
var item = obj as CoursesCounter;
if (item == null) return false;
return this.Id.Equals(item.Id) &&
this.Total.Equals(item.Total) &&
this.ExistingCourses.SequenceEqual(item.ExistingCourses);
}
public override int GetHashCode()
{
return HashCode.Combine(this.Id, this.Total, this.ExistingCourses);
}
}
}<file_sep>/src/Mooc/CoursesCounter/Application/Incrementer/CoursesCounterIncrementer.cs
namespace CodelyTv.Mooc.CoursesCounter.Application.Incrementer
{
using System.Threading.Tasks;
using CodelyTv.Shared.Domain;
using Courses.Domain;
using Domain;
public class CoursesCounterIncrementer
{
private ICoursesCounterRepository repository;
private IUuidGenerator uuidGenerator;
public CoursesCounterIncrementer(ICoursesCounterRepository repository, IUuidGenerator uuidGenerator)
{
this.repository = repository;
this.uuidGenerator = uuidGenerator;
}
public async Task Increment(CourseId id)
{
CoursesCounter counter = await repository.Search() ?? InitializeCounter();
if (!counter.HasIncremented(id))
{
counter.Increment(id);
await repository.Save(counter);
}
}
private CoursesCounter InitializeCounter()
{
return CoursesCounter.Initialize(uuidGenerator.Generate());
}
}
}<file_sep>/src/Mooc/Shared/Infrastructure/Persistence/EntityFramework/MoocContext.cs
namespace CodelyTv.Mooc.Shared.Infrastructure.Persistence.EntityFramework
{
using CodelyTv.Shared.Domain.Bus.Event;
using Courses.Domain;
using CoursesCounter.Domain;
using EntityConfigurations;
using Microsoft.EntityFrameworkCore;
public class MoocContext : DbContext
{
public DbSet<Course> Courses { get; set; }
public DbSet<CoursesCounter> CoursesCounter { get; set; }
public DbSet<DomainEventPrimitive> DomainEvents { get; set; }
public MoocContext(DbContextOptions<MoocContext> options) : base(options)
{
}
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
modelBuilder.ApplyConfiguration(new CourseConfiguration());
modelBuilder.ApplyConfiguration(new CoursesCounterConfiguration());
modelBuilder.ApplyConfiguration(new DomainEventPrimitiveConfiguration());
}
}
}
<file_sep>/src/Shared/Infrastructure/Bus/Command/InMemoryCommandBus.cs
namespace CodelyTv.Shared.Infrastructure.Bus.Command
{
using System;
using System.Collections;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Domain.Bus.Command;
public class InMemoryCommandBus : ICommandBus
{
private readonly IServiceProvider _provider;
private static readonly ConcurrentDictionary<Type, IEnumerable<CommandHandlerWrapper>> _commandHandlers = new ConcurrentDictionary<Type, IEnumerable<CommandHandlerWrapper>>();
public InMemoryCommandBus(IServiceProvider provider)
{
_provider = provider;
}
public async Task Dispatch(Command command)
{
var wrappedHandlers = GetWrappedHandlers(command);
if(wrappedHandlers == null) throw new CommandNotRegisteredError(command);
foreach (CommandHandlerWrapper handler in wrappedHandlers)
{
await handler.Handle(command, _provider);
}
}
private IEnumerable<CommandHandlerWrapper> GetWrappedHandlers(Command command)
{
Type handlerType = typeof(ICommandHandler<>).MakeGenericType(command.GetType());
Type wrapperType = typeof(CommandHandlerWrapper<>).MakeGenericType(command.GetType());
IEnumerable handlers =
(IEnumerable) _provider.GetService(typeof(IEnumerable<>).MakeGenericType(handlerType));
var wrappedHandlers = _commandHandlers.GetOrAdd(command.GetType(), handlers.Cast<object>()
.Select(handler => (CommandHandlerWrapper) Activator.CreateInstance(wrapperType)));
return wrappedHandlers;
}
}
}<file_sep>/src/Shared/Domain/Bus/Command/ICommandHandler.cs
namespace CodelyTv.Shared.Domain.Bus.Command
{
using System.Threading.Tasks;
public interface ICommandHandler<TCommand> where TCommand : Command
{
Task Handle(TCommand command);
}
}<file_sep>/test/src/Mooc/CoursesCounter/Domain/CoursesCounterIdMother.cs
namespace CodelyTv.Test.Mooc.CoursesCounter.Domain
{
using CodelyTv.Mooc.CoursesCounter.Domain;
using Test.Shared.Domain;
public class CoursesCounterIdMother
{
public static CoursesCounterId Create(string value)
{
return new CoursesCounterId(value);
}
public static CoursesCounterId Random()
{
return Create(UuidMother.Random());
}
}
}<file_sep>/src/Shared/Cli/CommandBuilder.cs
namespace CodelyTv.Shared.Cli
{
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
public abstract class CommandBuilder<T>
{
protected ServiceProvider Provider { get; set; }
private readonly string[] _args;
private readonly Dictionary<string, Type> Commands;
protected CommandBuilder(string[] args, Dictionary<string, Type> commands)
{
_args = args;
Commands = commands;
}
public abstract T Build(IConfigurationRoot config);
public virtual void Run()
{
var command = GetCommands();
using IServiceScope scope = Provider.CreateScope();
Type commandType = command;
object service = scope.ServiceProvider.GetService(commandType);
((Command) service).Execute(_args);
}
protected Type GetCommands()
{
var command = Commands.FirstOrDefault(cmd => _args.Contains(cmd.Key));
if (command.Value == null) throw new SystemException("arguments does not match with any command");
return command.Value;
}
}
}<file_sep>/test/src/Mooc/CoursesCounter/CoursesCounterModuleUnitTestCase.cs
namespace CodelyTv.Test.Mooc.CoursesCounter
{
using CodelyTv.Mooc.CoursesCounter.Domain;
using Moq;
using Test.Shared.Infrastructure;
public class CoursesCounterModuleUnitTestCase : UnitTestCase
{
protected readonly Mock<ICoursesCounterRepository> Repository;
protected CoursesCounterModuleUnitTestCase()
{
this.Repository = new Mock<ICoursesCounterRepository>();
}
protected void ShouldHaveSaved(CoursesCounter course)
{
this.Repository.Verify(x => x.Save(course), Times.AtLeastOnce());
}
protected void ShouldSearch(CoursesCounter counter)
{
this.Repository.Setup(x => x.Search()).ReturnsAsync(counter);
}
protected void ShouldSearch()
{
this.Repository.Setup(x => x.Search()).ReturnsAsync((CoursesCounter) null);
}
}
}<file_sep>/test/apps/Mooc/Backend/Controller/CoursesCounter/CoursesCounterGetControllerShould.cs
namespace MoocTest.apps.Backend.Controller.CoursesCounter
{
using System.Collections.Generic;
using System.Net.Http;
using System.Threading.Tasks;
using CodelyTv.Apps.Mooc.Backend;
using CodelyTv.Shared.Domain.Bus.Event;
using CodelyTv.Shared.Domain.Courses;
using CodelyTv.Test.Mooc;
using Xunit;
public class CoursesCounterGetControllerShould : MoocContextApplicationTestCase
{
public CoursesCounterGetControllerShould(MoocWebApplicationFactory<Startup> factory) : base(factory)
{
CreateAnonymousClient();
}
[Fact]
public async Task get_the_counter_with_one_course()
{
await GivenISendEventsToTheBus(new List<DomainEvent>
{
new CourseCreatedDomainEvent("8f34bc99-e0e2-4296-a008-75f51f03aeb4", "DDD en Java", "7 days"),
}
);
await AssertResponse(HttpMethod.Get, "/courses-counter", 200, "{\"total\":1}");
}
[Fact]
public async Task get_the_counter_with_more_than_one_course()
{
await GivenISendEventsToTheBus(new List<DomainEvent>
{
new CourseCreatedDomainEvent("8f34bc99-e0e2-4296-a008-75f51f03aeb4", "DDD en Java", "7 days"),
new CourseCreatedDomainEvent("3642f700-868a-4778-9317-a2d542d01785", "DDD en PHP", "6 days"),
new CourseCreatedDomainEvent("92dd8402-69f3-4900-b569-3f2c2797065f", "DDD en CSharp", "10 years")
}
);
await AssertResponse(HttpMethod.Get, "/courses-counter", 200, "{\"total\":3}");
}
[Fact]
public async Task get_the_counter_with_more_than_one_course_having_duplicated_events()
{
await GivenISendEventsToTheBus(new List<DomainEvent>
{
new CourseCreatedDomainEvent("8f34bc99-e0e2-4296-a008-75f51f03aeb4", "DDD en Java", "7 days"),
new CourseCreatedDomainEvent("8f34bc99-e0e2-4296-a008-75f51f03aeb4", "DDD en Java", "7 days"),
new CourseCreatedDomainEvent("8f34bc99-e0e2-4296-a008-75f51f03aeb4", "DDD en Java", "7 days"),
new CourseCreatedDomainEvent("3642f700-868a-4778-9317-a2d542d01785", "DDD en PHP", "6 days"),
new CourseCreatedDomainEvent("3642f700-868a-4778-9317-a2d542d01785", "DDD en PHP", "6 days"),
new CourseCreatedDomainEvent("3642f700-868a-4778-9317-a2d542d01785", "DDD en PHP", "6 days"),
new CourseCreatedDomainEvent("3642f700-868a-4778-9317-a2d542d01785", "DDD en PHP", "6 days"),
new CourseCreatedDomainEvent("92dd8402-69f3-4900-b569-3f2c2797065f", "DDD en CSharp", "10 years"),
new CourseCreatedDomainEvent("92dd8402-69f3-4900-b569-3f2c2797065f", "DDD en CSharp", "10 years")
}
);
await AssertResponse(HttpMethod.Get, "/courses-counter", 200, "{\"total\":3}");
}
}
}<file_sep>/src/Shared/Infrastructure/Bus/Event/MsSql/MsSqlEventBus.cs
namespace CodelyTv.Shared.Infrastructure.Bus.Event.MsSql
{
using System.Collections.Generic;
using System.Threading.Tasks;
using Domain.Bus.Event;
using Microsoft.EntityFrameworkCore;
public class MsSqlEventBus : IEventBus
{
private readonly DbContext _context;
public MsSqlEventBus(DbContext eventContext)
{
_context = eventContext;
}
public async Task Publish(List<DomainEvent> events)
{
foreach (var domainEvent in events)
{
await Publish(domainEvent);
}
}
private async Task Publish(DomainEvent domainEvent)
{
DomainEventPrimitive value = new DomainEventPrimitive()
{
Id = domainEvent.EventId,
AggregateId = domainEvent.AggregateId,
Body = domainEvent.ToPrimitives(),
Name = domainEvent.EventName(),
OccurredOn = domainEvent.OccurredOn
};
await _context.Set<DomainEventPrimitive>().AddAsync(value);
await _context.SaveChangesAsync();
}
}
}<file_sep>/test/src/Shared/Infrastructure/UnitTestCase.cs
namespace CodelyTv.Test.Shared.Infrastructure
{
using System.Collections.Generic;
using CodelyTv.Shared.Domain;
using CodelyTv.Shared.Domain.Bus.Event;
using Moq;
public class UnitTestCase
{
protected readonly Mock<IEventBus> EventBus;
protected readonly Mock<IUuidGenerator> UuidGenerator;
public UnitTestCase()
{
this.EventBus = new Mock<IEventBus>();
this.UuidGenerator = new Mock<IUuidGenerator>();
}
public void ShouldHavePublished(List<DomainEvent> domainEvents)
{
this.EventBus.Verify(x => x.Publish(domainEvents), Times.AtLeastOnce());
}
public void ShouldHavePublished(DomainEvent domainEvent)
{
ShouldHavePublished(new List<DomainEvent>() {domainEvent});
}
public void ShouldGenerateUuid(string uuid)
{
this.UuidGenerator.Setup(x => x.Generate()).Returns(uuid);
}
}
}<file_sep>/apps/Mooc/Backend/Program.cs
namespace CodelyTv.Apps.Mooc.Backend
{
using System;
using System.IO;
using System.Linq;
using CodelyTv.Apps.Mooc.Backend.Command;
using Microsoft.AspNetCore;
using Microsoft.AspNetCore.Hosting;
using Microsoft.Extensions.Configuration;
public static class Program
{
public static void Main(string[] args)
{
if (!args.Any()) CreateWebHostBuilder(args).Build().Run();
MoocBackendCommandBuilder.Create(args).Build(Configuration()).Run();
}
private static IWebHostBuilder CreateWebHostBuilder(string[] args)
{
return WebHost.CreateDefaultBuilder(args)
.UseStartup<Startup>();
}
private static IConfigurationRoot Configuration()
{
var builder = new ConfigurationBuilder()
.SetBasePath(Path.Combine(AppContext.BaseDirectory))
.AddJsonFile("appsettings.json", optional: true, reloadOnChange: true);
return builder.Build();
}
}
}<file_sep>/src/Shared/Infrastructure/Bus/Event/InMemoryApplicationEventBus.cs
namespace CodelyTv.Shared.Infrastructure.Bus.Event
{
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using Domain.Bus.Event;
using Microsoft.Extensions.DependencyInjection;
public class InMemoryApplicationEventBus : IEventBus
{
private readonly IServiceProvider _serviceProvider;
public InMemoryApplicationEventBus(IServiceProvider serviceProvider)
{
_serviceProvider = serviceProvider;
}
public async Task Publish(List<DomainEvent> events)
{
if (events == null)
return;
using IServiceScope scope = _serviceProvider.CreateScope();
foreach (var @event in events)
{
var subscribers = GetSubscribers(@event, scope);
foreach (object subscriber in subscribers)
{
await ((IDomainEventSubscriberBase) subscriber).On(@event);
}
}
}
private static IEnumerable<object> GetSubscribers(DomainEvent @event, IServiceScope scope)
{
Type eventType = @event.GetType();
Type subscriberType = typeof(IDomainEventSubscriber<>).MakeGenericType(eventType);
return scope.ServiceProvider.GetServices(subscriberType);
}
}
}<file_sep>/test/src/Mooc/CoursesCounter/Application/Increment/IncrementCoursesCounterOnCourseCreatedShould.cs
namespace CodelyTv.Test.Mooc.CoursesCounter.Application.Increment
{
using CodelyTv.Mooc.Courses.Domain;
using CodelyTv.Mooc.CoursesCounter.Application.Incrementer;
using CodelyTv.Mooc.CoursesCounter.Domain;
using CodelyTv.Shared.Domain.Courses;
using Courses.Domain;
using Domain;
using Xunit;
public class IncrementCoursesCounterOnCourseCreatedShould : CoursesCounterModuleUnitTestCase
{
private IncrementCoursesCounterOnCourseCreated Subscriber;
public IncrementCoursesCounterOnCourseCreatedShould()
{
this.Subscriber = new IncrementCoursesCounterOnCourseCreated(
new CoursesCounterIncrementer(this.Repository.Object, this.UuidGenerator.Object)
);
}
[Fact]
public void it_should_initialize_a_new_counter()
{
CourseCreatedDomainEvent domainEvent = CourseCreatedDomainEventMother.Random();
CourseId courseId = CourseIdMother.Create(domainEvent.AggregateId);
CoursesCounter newCounter = CoursesCounterMother.WithOne(courseId);
ShouldSearch();
ShouldGenerateUuid(newCounter.Id.Value);
this.Subscriber.On(domainEvent);
this.ShouldHaveSaved(newCounter);
}
[Fact]
public void it_should_increment_an_existing_counter()
{
CourseCreatedDomainEvent domainEvent = CourseCreatedDomainEventMother.Random();
CourseId courseId = CourseIdMother.Create(domainEvent.AggregateId);
CoursesCounter existingCounter = CoursesCounterMother.Random();
CoursesCounter incrementedCounter = CoursesCounterMother.Incrementing(existingCounter, courseId);
ShouldSearch(existingCounter);
this.Subscriber.On(domainEvent);
ShouldHaveSaved(incrementedCounter);
}
[Fact]
public void it_should_not_increment_an_already_incremented_course()
{
CourseCreatedDomainEvent domainEvent = CourseCreatedDomainEventMother.Random();
CourseId courseId = CourseIdMother.Create(domainEvent.AggregateId);
CoursesCounter existingCounter = CoursesCounterMother.WithOne(courseId);
ShouldSearch(existingCounter);
this.Subscriber.On(domainEvent);
}
}
}<file_sep>/test/src/Mooc/Courses/CoursesModuleInfrastructureTestCase.cs
namespace CodelyTv.Test.Mooc.Courses
{
using CodelyTv.Mooc.Courses.Domain;
public abstract class CoursesModuleInfrastructureTestCase : MoocContextInfrastructureTestCase
{
protected ICourseRepository Repository => GetService<ICourseRepository>();
}
}<file_sep>/test/src/Mooc/Courses/Application/Create/CreateCourseCommandHandlerShould.cs
namespace CodelyTv.Test.Mooc.Courses.Application.Create
{
using CodelyTv.Mooc.Courses.Application.Create;
using Domain;
using Xunit;
public class CreateCourseCommandHandlerShould : CoursesModuleUnitTestCase
{
private readonly CreateCourseCommandHandler _handler;
public CreateCourseCommandHandlerShould()
{
this._handler = new CreateCourseCommandHandler(new CourseCreator(Repository.Object, EventBus.Object));
}
[Fact]
public void create_a_valid_course()
{
var command = CreateCourseCommandMother.Random();
var course = CourseMother.FromRequest(command);
var domainEvent = CourseCreatedDomainEventMother.FromCourse(course);
this._handler.Handle(command);
this.ShouldHaveSave(course);
this.ShouldHavePublished(domainEvent);
}
}
}<file_sep>/src/Shared/Infrastructure/Persistence/EntityFramework/EntityConfigurations/ConvertConfiguration.cs
namespace CodelyTv.Shared.Infrastructure.Persistence.EntityFramework.EntityConfigurations
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using Newtonsoft.Json;
public static class ConvertConfiguration
{
public static List<TObject> ObjectFromJson<TObject>(string json) where TObject : class
{
var jsonList = JsonConvert.DeserializeObject<List<string>>(json);
Type type = typeof(TObject);
ConstructorInfo ctor = type.GetConstructor(new[] {typeof(string)});
return jsonList.Select(x => (TObject) ctor.Invoke(new object[] {x})).ToList();
}
public static string ObjectToJson<T>(List<T> objects)
{
return JsonConvert.SerializeObject(objects.Select(x => x.ToString()));
}
}
}<file_sep>/src/Shared/Infrastructure/Bus/Event/DomainEventJsonDeserializer.cs
namespace CodelyTv.Shared.Infrastructure.Bus.Event
{
using System;
using System.Collections.Generic;
using System.Reflection;
using Domain.Bus.Event;
using Newtonsoft.Json;
public class DomainEventJsonDeserializer
{
private readonly DomainEventsInformation information;
public DomainEventJsonDeserializer(DomainEventsInformation information)
{
this.information = information;
}
public DomainEvent Deserialize(string body)
{
var eventData = JsonConvert.DeserializeObject<Dictionary<string, Dictionary<string, object>>>(body);
var data = eventData["data"];
var attributes = JsonConvert.DeserializeObject<Dictionary<string, string>>(data["attributes"].ToString());
var domainEventType = information.ForName((string) data["type"]);
DomainEvent instance = (DomainEvent) Activator.CreateInstance(domainEventType);
DomainEvent domainEvent = (DomainEvent) domainEventType
.GetTypeInfo()
.GetDeclaredMethod(nameof(DomainEvent.FromPrimitives))
.Invoke(instance, new object[]
{
attributes["id"],
attributes,
data["id"].ToString(),
data["occurred_on"].ToString()
});
return domainEvent;
}
}
}<file_sep>/apps/Mooc/Backend/Command/ConsumeRabbitMqDomainEventsCommand.cs
namespace CodelyTv.Apps.Mooc.Backend.Command
{
using Shared.Cli;
using Shared.Infrastructure.Bus.Event.RabbitMq;
public class ConsumeRabbitMqDomainEventsCommand : Command
{
private readonly RabbitMqDomainEventsConsumer _consumer;
public ConsumeRabbitMqDomainEventsCommand(RabbitMqDomainEventsConsumer consumer)
{
_consumer = consumer;
}
public override void Execute(string[] args)
{
_consumer.Consume();
}
}
}<file_sep>/src/Shared/Domain/IUuidGenerator.cs
namespace CodelyTv.Shared.Domain
{
public interface IUuidGenerator
{
string Generate();
}
}<file_sep>/test/src/Mooc/Courses/CoursesModuleUnitTestCase.cs
namespace CodelyTv.Test.Mooc.Courses
{
using CodelyTv.Mooc.Courses.Domain;
using Moq;
using Test.Shared.Infrastructure;
public abstract class CoursesModuleUnitTestCase : UnitTestCase
{
protected readonly Mock<ICourseRepository> Repository;
protected CoursesModuleUnitTestCase()
{
this.Repository = new Mock<ICourseRepository>();
}
protected void ShouldHaveSave(Course course)
{
this.Repository.Verify(x => x.Save(course), Times.AtLeastOnce());
}
}
}<file_sep>/src/Shared/Domain/Bus/Command/ICommandBus.cs
namespace CodelyTv.Shared.Domain.Bus.Command
{
using System.Threading.Tasks;
public interface ICommandBus
{
Task Dispatch(Command command);
}
}<file_sep>/test/src/Mooc/CoursesCounter/Domain/CoursesCounterTotalMother.cs
namespace CodelyTv.Test.Mooc.CoursesCounter.Domain
{
using CodelyTv.Mooc.CoursesCounter.Domain;
using Test.Shared.Domain;
public class CoursesCounterTotalMother
{
public static CoursesCounterTotal Create(int value)
{
return new CoursesCounterTotal(value);
}
public static CoursesCounterTotal Random()
{
return Create(IntegerMother.Random());
}
public static CoursesCounterTotal One()
{
return Create(1);
}
}
}<file_sep>/src/Mooc/Shared/Infrastructure/Persistence/EntityFramework/EntityConfigurations/CoursesCounterConfiguration.cs
namespace CodelyTv.Mooc.Shared.Infrastructure.Persistence.EntityFramework.EntityConfigurations
{
using CodelyTv.Shared.Infrastructure.Persistence.EntityFramework.Extension;
using CoursesCounter.Domain;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Metadata.Builders;
using ValueConverter;
public class CoursesCounterConfiguration : IEntityTypeConfiguration<CoursesCounter>
{
public void Configure(EntityTypeBuilder<CoursesCounter> builder)
{
builder.ToTable(nameof(MoocContext.CoursesCounter).ToDatabaseFormat());
builder.HasKey(x => x.Id);
builder.Property(x => x.Id)
.HasConversion(v => v.Value, v => new CoursesCounterId(v))
.HasColumnName(nameof(CoursesCounter.Id).ToDatabaseFormat());
builder.OwnsOne(x => x.Total)
.Property(x => x.Value)
.HasColumnName(nameof(CoursesCounter.Total).ToDatabaseFormat());
builder.Property(e => e.ExistingCourses)
.HasConversion(new ExistingCoursesConverter())
.HasColumnName(nameof(CoursesCounter.ExistingCourses).ToDatabaseFormat());
}
}
}<file_sep>/src/Shared/Infrastructure/CSharpUuidGenerator.cs
namespace CodelyTv.Shared.Infrastructure
{
using System;
using Domain;
public class CSharpUuidGenerator : IUuidGenerator
{
public string Generate()
{
return Guid.NewGuid().ToString();
}
}
}<file_sep>/apps/Backoffice/Frontend/Command/ConsumeRabbitMqDomainEventsCommand.cs
namespace CodelyTv.Apps.Backoffice.Frontend.Command
{
using CodelyTv.Shared.Cli;
using CodelyTv.Shared.Infrastructure.Bus.Event.RabbitMq;
public class ConsumeRabbitMqDomainEventsCommand : Command
{
private readonly RabbitMqDomainEventsConsumer _consumer;
public ConsumeRabbitMqDomainEventsCommand(RabbitMqDomainEventsConsumer consumer)
{
_consumer = consumer;
}
public override void Execute(string[] args)
{
_consumer.Consume();
}
}
}<file_sep>/src/Shared/Domain/Bus/Event/IEventBus.cs
namespace CodelyTv.Shared.Domain.Bus.Event
{
using System.Collections.Generic;
using System.Threading.Tasks;
public interface IEventBus
{
Task Publish(List<DomainEvent> events);
}
}<file_sep>/src/Mooc/Courses/Infrastructure/Persistence/MsSqlCourseRepository.cs
namespace CodelyTv.Mooc.Courses.Infrastructure.Persistence
{
using System.Linq;
using System.Threading.Tasks;
using Domain;
using Microsoft.EntityFrameworkCore;
using Shared.Infrastructure.Persistence.EntityFramework;
public class MsSqlCourseRepository : ICourseRepository
{
private MoocContext _context;
public MsSqlCourseRepository(MoocContext context)
{
this._context = context;
}
public async Task Save(Course course)
{
await this._context.Courses.AddAsync(course);
await this._context.SaveChangesAsync();
}
public async Task<Course> Search(CourseId id)
{
return await this._context.Courses.FirstOrDefaultAsync(c => c.Id.Equals(id));
}
}
}<file_sep>/src/Shared/Domain/Bus/Event/IDomainEventSubscriberBase.cs
namespace CodelyTv.Shared.Domain.Bus.Event
{
using System.Threading.Tasks;
public interface IDomainEventSubscriberBase
{
Task On(DomainEvent @event);
}
}<file_sep>/src/Shared/Domain/Bus/Query/IQueryBus.cs
namespace CodelyTv.Shared.Domain.Bus.Query
{
using System.Threading.Tasks;
public interface IQueryBus
{
Task<TResponse> Ask<TResponse>(Query request);
}
}<file_sep>/src/Mooc/Courses/Domain/Course.cs
namespace CodelyTv.Mooc.Courses.Domain
{
using System;
using CodelyTv.Shared.Domain.Aggregate;
using CodelyTv.Shared.Domain.Courses;
public class Course : AggregateRoot
{
public CourseId Id { get; private set; }
public CourseName Name { get; private set; }
public CourseDuration Duration { get; private set; }
public Course(CourseId id, CourseName name, CourseDuration duration)
{
Id = id;
Name = name;
Duration = duration;
}
private Course()
{
}
public static Course Create(CourseId id, CourseName name, CourseDuration duration)
{
Course course = new Course(id, name, duration);
course.Record(new CourseCreatedDomainEvent(id.Value, name.Value, duration.Value));
return course;
}
public override bool Equals(object obj)
{
if (this == obj) return true;
var item = obj as Course;
if (item == null) return false;
return this.Id.Equals(item.Id) && this.Name.Equals(item.Name) && this.Duration.Equals(item.Duration);
}
public override int GetHashCode()
{
return HashCode.Combine(this.Id, this.Name, this.Duration);
}
}
}<file_sep>/apps/Mooc/Backend/Controller/CoursesCounter/CoursesCounterGetController.cs
namespace CodelyTv.Apps.Mooc.Backend.Controller.CoursesCounter
{
using System.Collections.Generic;
using System.Threading.Tasks;
using CodelyTv.Mooc.CoursesCounter.Application.Find;
using Microsoft.AspNetCore.Mvc;
using Shared.Domain.Bus.Query;
[Route("courses-counter")]
public class CoursesCounterGetController : Controller
{
private readonly IQueryBus _bus;
public CoursesCounterGetController(IQueryBus bus)
{
_bus = bus;
}
[HttpGet]
[Produces("application/json")]
public async Task<IActionResult> Index()
{
CoursesCounterResponse response = await _bus.Ask<CoursesCounterResponse>(new FindCoursesCounterQuery());
return Ok(new Dictionary<string, int>()
{
{"total", response.Total}
});
}
}
}<file_sep>/src/Mooc/CoursesCounter/Domain/ICoursesCounterRepository.cs
namespace CodelyTv.Mooc.CoursesCounter.Domain
{
using System.Threading.Tasks;
public interface ICoursesCounterRepository
{
Task Save(CoursesCounter counter);
Task<CoursesCounter> Search();
}
}<file_sep>/src/Shared/Domain/Bus/Event/IDomainEventsConsumer.cs
namespace CodelyTv.Shared.Domain.Bus.Event
{
using System.Threading.Tasks;
public interface IDomainEventsConsumer
{
Task Consume();
}
}<file_sep>/src/Mooc/CoursesCounter/Application/Incrementer/IncrementCoursesCounterOnCourseCreated.cs
namespace CodelyTv.Mooc.CoursesCounter.Application.Incrementer
{
using System.Threading.Tasks;
using CodelyTv.Shared.Domain.Bus.Event;
using CodelyTv.Shared.Domain.Courses;
using Courses.Domain;
public class IncrementCoursesCounterOnCourseCreated : IDomainEventSubscriber<CourseCreatedDomainEvent>
{
private readonly CoursesCounterIncrementer _incrementer;
public IncrementCoursesCounterOnCourseCreated(CoursesCounterIncrementer incrementer)
{
_incrementer = incrementer;
}
public async Task On(CourseCreatedDomainEvent @event)
{
CourseId courseId = new CourseId(@event.AggregateId);
await _incrementer.Increment(courseId);
}
}
}<file_sep>/test/src/Mooc/Shared/Infrastructure/Bus/Event/RabbitMq/TestAllWorksOnRabbitMqEventsPublished.cs
namespace CodelyTv.Test.Mooc.Shared.Infrastructure.Bus.Event.RabbitMq
{
using System.Threading.Tasks;
using CodelyTv.Shared.Domain.Bus.Event;
using CodelyTv.Shared.Domain.Courses;
public class TestAllWorksOnRabbitMqEventsPublished : IDomainEventSubscriber<CourseCreatedDomainEvent>
{
public bool HasBeenExecuted = false;
public Task On(CourseCreatedDomainEvent domainEvent)
{
HasBeenExecuted = true;
return Task.CompletedTask;
}
}
}<file_sep>/src/Backoffice/Courses/Application/Create/BackofficeCourseCreator.cs
namespace CodelyTv.Backoffice.Courses.Application.Create
{
using System.Threading.Tasks;
using CodelyTv.Backoffice.Courses.Domain;
public class BackofficeCourseCreator
{
private readonly IBackofficeCourseRepository _repository;
public BackofficeCourseCreator(IBackofficeCourseRepository repository)
{
_repository = repository;
}
public async Task Create(string id, string name, string duration)
{
await this._repository.Save(new BackofficeCourse(id, name, duration));
}
}
}<file_sep>/src/Backoffice/Courses/Domain/IBackofficeCourseRepository.cs
namespace CodelyTv.Backoffice.Courses.Domain
{
using System.Collections.Generic;
using System.Threading.Tasks;
public interface IBackofficeCourseRepository
{
Task Save(BackofficeCourse course);
Task<IEnumerable<BackofficeCourse>> SearchAll();
}
}<file_sep>/apps/Backoffice/Frontend/Controllers/Courses/ApiCoursesGetController.cs
namespace CodelyTv.Apps.Backoffice.Frontend.Controllers.Courses
{
using System.Collections.Generic;
using System.Threading.Tasks;
using CodelyTv.Backoffice.Courses.Application;
using CodelyTv.Backoffice.Courses.Application.SearchAll;
using CodelyTv.Shared.Domain.Bus.Query;
using Microsoft.AspNetCore.Mvc;
[ApiController]
[Route("/api/courses")]
public class ApiCoursesGetController
{
private readonly IQueryBus _bus;
public ApiCoursesGetController(IQueryBus bus)
{
_bus = bus;
}
public async Task<IEnumerable<BackofficeCourseResponse>> Index()
{
var courses = await _bus.Ask<BackofficeCoursesResponse>(new SearchAllBackofficeCoursesQuery());
return courses.Courses;
}
}
}<file_sep>/apps/Backoffice/Frontend/Controllers/Courses/CoursesPostWebModel.cs
namespace CodelyTv.Apps.Backoffice.Frontend.Controllers.Courses
{
using System.ComponentModel.DataAnnotations;
using CodelyTv.Shared.Validator.Attributes;
public class CoursesPostWebModel
{
[Uuid]
[Required]
public string Id { get; set; }
[Required]
public string Name { get; set; }
[Required]
public string Duration { get; set; }
}
}<file_sep>/apps/Backoffice/Frontend/Extension/DependencyInjection/Infrastructure.cs
namespace CodelyTv.Apps.Backoffice.Frontend.Extension.DependencyInjection
{
using CodelyTv.Backoffice.Courses.Domain;
using CodelyTv.Backoffice.Courses.Infrastructure.Persistence;
using CodelyTv.Backoffice.Shared.Infrastructure.Persistence.EntityFramework;
using CodelyTv.Mooc.Courses.Domain;
using CodelyTv.Mooc.Courses.Infrastructure.Persistence;
using CodelyTv.Mooc.CoursesCounter.Application.Find;
using CodelyTv.Mooc.CoursesCounter.Domain;
using CodelyTv.Mooc.CoursesCounter.Infrastructure.Persistence;
using CodelyTv.Mooc.Shared.Infrastructure.Persistence.EntityFramework;
using CodelyTv.Shared.Domain;
using CodelyTv.Shared.Domain.Bus.Command;
using CodelyTv.Shared.Domain.Bus.Event;
using CodelyTv.Shared.Domain.Bus.Query;
using CodelyTv.Shared.Infrastructure;
using CodelyTv.Shared.Infrastructure.Bus.Command;
using CodelyTv.Shared.Infrastructure.Bus.Event;
using CodelyTv.Shared.Infrastructure.Bus.Event.MsSql;
using CodelyTv.Shared.Infrastructure.Bus.Event.RabbitMq;
using CodelyTv.Shared.Infrastructure.Bus.Query;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
public static class Infrastructure
{
public static IServiceCollection AddInfrastructure(this IServiceCollection services,
IConfiguration configuration)
{
services.AddScoped<IRandomNumberGenerator, CSharpRandomNumberGenerator>();
services.AddScoped<IUuidGenerator, CSharpUuidGenerator>();
services.AddScoped<IBackofficeCourseRepository, MsSqlBackofficeCourseRepository>();
services.AddScoped<ICoursesCounterRepository, MsSqlCoursesCounterRepository>();
services.AddScoped<ICourseRepository, MsSqlCourseRepository>();
services.AddScoped<CoursesCounterFinder, CoursesCounterFinder>();
services.AddScoped<IEventBus, RabbitMqEventBus>();
services.AddScoped<IEventBusConfiguration, RabbitMqEventBusConfiguration>();
services.AddScoped<InMemoryApplicationEventBus, InMemoryApplicationEventBus>();
// Failover
services.AddScoped<MsSqlEventBus, MsSqlEventBus>();
services.AddScoped<RabbitMqDomainEventsConsumer, RabbitMqDomainEventsConsumer>();
services.AddScoped<DomainEventsInformation, DomainEventsInformation>();
services.AddScoped<DbContext, BackofficeContext>();
services.AddDbContext<BackofficeContext>(options =>
options.UseSqlServer(configuration.GetConnectionString("BackofficeDatabase")), ServiceLifetime.Transient);
services.AddScoped<MoocContext, MoocContext>();
services.AddDbContext<MoocContext>(options =>
options.UseSqlServer(configuration.GetConnectionString("MoocDatabase")), ServiceLifetime.Transient);
services.AddRabbitMq(configuration);
services.AddScoped<DomainEventJsonDeserializer, DomainEventJsonDeserializer>();
services.AddScoped<ICommandBus, InMemoryCommandBus>();
services.AddScoped<IQueryBus, InMemoryQueryBus>();
return services;
}
private static IServiceCollection AddRabbitMq(this IServiceCollection services,
IConfiguration configuration)
{
services.AddScoped<RabbitMqPublisher, RabbitMqPublisher>();
services.AddScoped<RabbitMqConfig, RabbitMqConfig>();
services.Configure<RabbitMqConfigParams>(configuration.GetSection("RabbitMq"));
return services;
}
}
}<file_sep>/src/Shared/Domain/Bus/Query/IQueryHandler.cs
namespace CodelyTv.Shared.Domain.Bus.Query
{
using System.Threading.Tasks;
public interface IQueryHandler<TQuery, TResponse> where TQuery : Query
{
Task<TResponse> Handle(TQuery query);
}
}<file_sep>/test/src/Mooc/CoursesCounter/Domain/CoursesCounterMother.cs
namespace CodelyTv.Test.Mooc.CoursesCounter.Domain
{
using System.Collections.Generic;
using CodelyTv.Mooc.Courses.Domain;
using CodelyTv.Mooc.CoursesCounter.Domain;
using Courses.Domain;
using Test.Shared.Domain;
public static class CoursesCounterMother
{
public static CoursesCounter Create(CoursesCounterId id, CoursesCounterTotal total, List<CourseId> existingCourses)
{
return new CoursesCounter(id, total, existingCourses);
}
public static CoursesCounter WithOne(CourseId courseId)
{
return Create(CoursesCounterIdMother.Random(), CoursesCounterTotalMother.One(), ListMother<CourseId>.One(courseId));
}
public static CoursesCounter Incrementing(CoursesCounter existingCounter, CourseId courseId)
{
List<CourseId> existingCourses = new List<CourseId>(existingCounter.ExistingCourses);
existingCourses.Add(courseId);
return Create(
existingCounter.Id,
CoursesCounterTotalMother.Create(existingCounter.Total.Value + 1),
existingCourses
);
}
public static CoursesCounter Random()
{
List<CourseId> existingCourses = ListMother<CourseId>.Random(CourseIdMother.Random);
return Create(
CoursesCounterIdMother.Random(),
CoursesCounterTotalMother.Create(existingCourses.Count),
existingCourses
);
}
}
}<file_sep>/src/Mooc/CoursesCounter/Domain/CoursesCounterId.cs
namespace CodelyTv.Mooc.CoursesCounter.Domain
{
using CodelyTv.Shared.Domain.ValueObject;
public class CoursesCounterId : Uuid
{
public CoursesCounterId(string value) : base(value)
{
}
}
}<file_sep>/src/Shared/Domain/Aggregate/AggregateRoot.cs
namespace CodelyTv.Shared.Domain.Aggregate
{
using System.Collections.Generic;
using Bus.Event;
public abstract class AggregateRoot
{
private List<DomainEvent> _domainEvents = new List<DomainEvent>();
public List<DomainEvent> PullDomainEvents()
{
List<DomainEvent> events = _domainEvents;
_domainEvents = new List<DomainEvent>();
return events;
}
protected void Record(DomainEvent domainEvent)
{
this._domainEvents.Add(domainEvent);
}
}
}<file_sep>/src/Mooc/Courses/Domain/ICourseRepository.cs
namespace CodelyTv.Mooc.Courses.Domain
{
using System.Threading.Tasks;
public interface ICourseRepository
{
Task Save(Course course);
Task<Course> Search(CourseId id);
}
}<file_sep>/src/Mooc/Shared/Infrastructure/Persistence/EntityFramework/EntityConfigurations/DomainEventPrimitiveConfiguration.cs
namespace CodelyTv.Mooc.Shared.Infrastructure.Persistence.EntityFramework.EntityConfigurations
{
using System.Collections.Generic;
using CodelyTv.Shared.Domain;
using CodelyTv.Shared.Domain.Bus.Event;
using CodelyTv.Shared.Infrastructure.Persistence.EntityFramework.Extension;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Metadata.Builders;
using Newtonsoft.Json;
public class DomainEventPrimitiveConfiguration : IEntityTypeConfiguration<DomainEventPrimitive>
{
public void Configure(EntityTypeBuilder<DomainEventPrimitive> builder)
{
builder.ToTable(nameof(MoocContext.DomainEvents).ToDatabaseFormat());
builder.HasKey(x => x.AggregateId);
builder.Property(x => x.Body)
.HasConversion(v => JsonConvert.SerializeObject(v),
v => JsonConvert.DeserializeObject<Dictionary<string, string>>(v));
builder.Property(x => x.AggregateId)
.HasColumnName(nameof(DomainEventPrimitive.AggregateId).ToDatabaseFormat());
builder.Property(x => x.OccurredOn)
.HasConversion(v => Utils.StringToDate(v), v => Utils.DateToString(v))
.HasColumnName(nameof(DomainEventPrimitive.OccurredOn).ToDatabaseFormat());
}
}
}
<file_sep>/test/src/Shared/Domain/ListMother.cs
namespace CodelyTv.Test.Shared.Domain
{
using System;
using System.Collections.Generic;
public static class ListMother<T>
{
public static List<T> Create(int size, Func<T> creator)
{
List<T> list = new List<T>();
for (int i = 0; i < size; i++)
{
list.Add(creator());
}
return list;
}
public static List<T> Random(Func<T> creator)
{
return Create(IntegerMother.Between(1, 10), creator);
}
public static List<T> One(T element)
{
return new List<T>() {element};
}
}
}<file_sep>/src/Backoffice/Courses/Infrastructure/Persistence/MsSqlBackofficeCourseRepository.cs
namespace CodelyTv.Backoffice.Courses.Infrastructure.Persistence
{
using System.Collections.Generic;
using System.Threading.Tasks;
using CodelyTv.Backoffice.Courses.Domain;
using CodelyTv.Backoffice.Shared.Infrastructure.Persistence.EntityFramework;
using Microsoft.EntityFrameworkCore;
public class MsSqlBackofficeCourseRepository : IBackofficeCourseRepository
{
private BackofficeContext _context;
public MsSqlBackofficeCourseRepository(BackofficeContext context)
{
this._context = context;
}
public async Task Save(BackofficeCourse course)
{
await this._context.BackofficeCourses.AddAsync(course);
await this._context.SaveChangesAsync();
}
public async Task<IEnumerable<BackofficeCourse>> SearchAll()
{
return await this._context.BackofficeCourses.ToListAsync();
}
}
}<file_sep>/test/src/Mooc/CoursesCounter/Application/Find/FindCoursesCounterQueryHandlerShould.cs
namespace CodelyTv.Test.Mooc.CoursesCounter.Application.Find
{
using System.Threading.Tasks;
using CodelyTv.Mooc.CoursesCounter.Application.Find;
using CodelyTv.Mooc.CoursesCounter.Domain;
using Domain;
using Xunit;
public class FindCoursesCounterQueryHandlerShould : CoursesCounterModuleUnitTestCase
{
private readonly FindCoursesCounterQueryHandler _handler;
public FindCoursesCounterQueryHandlerShould()
{
_handler = new FindCoursesCounterQueryHandler(new CoursesCounterFinder(this.Repository.Object));
}
[Fact]
public async Task it_should_find_an_existing_courses_counter()
{
CoursesCounter counter = CoursesCounterMother.Random();
FindCoursesCounterQuery query = new FindCoursesCounterQuery();
CoursesCounterResponse response = CoursesCounterResponseMother.Create(counter.Total.Value);
ShouldSearch(counter);
Assert.Equal(response, await _handler.Handle(query));
}
[Fact]
public async Task it_should_throw_an_exception_when_courses_counter_does_not_exists()
{
FindCoursesCounterQuery query = new FindCoursesCounterQuery();
ShouldSearch();
await Assert.ThrowsAsync<CoursesCounterNotInitialized>(async () => await _handler.Handle(query));
}
}
}<file_sep>/test/src/Mooc/CoursesCounter/CoursesCounterModuleInfrastructureTestCase.cs
namespace CodelyTv.Test.Mooc.CoursesCounter
{
using CodelyTv.Mooc.CoursesCounter.Domain;
public class CoursesCounterModuleInfrastructureTestCase : MoocContextInfrastructureTestCase
{
protected ICoursesCounterRepository Repository => GetService<ICoursesCounterRepository>();
}
}<file_sep>/test/src/Mooc/MoocWebApplicationFactory.cs
namespace CodelyTv.Test.Mooc
{
using System;
using System.Net.Http;
using CodelyTv.Mooc.Shared.Infrastructure.Persistence.EntityFramework;
using CodelyTv.Shared.Domain.Bus.Event;
using CodelyTv.Shared.Infrastructure.Bus.Event;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.DependencyInjection;
using Test.Shared.Infrastructure;
public class MoocWebApplicationFactory<TStartup> : ApplicationTestCase<TStartup> where TStartup : class
{
private string _databaseName;
public HttpClient GetAnonymousClient()
{
SetDatabaseName();
return CreateClient();
}
private void SetDatabaseName()
{
this._databaseName = Guid.NewGuid().ToString();
}
protected override Action<IServiceCollection> Services()
{
return services =>
{
// Create a new service provider.
var serviceProvider = new ServiceCollection()
.AddEntityFrameworkInMemoryDatabase()
.BuildServiceProvider();
// Add a database context using an in-memory
// database for testing.
services.AddDbContext<MoocContext>(options =>
{
options.UseInMemoryDatabase(_databaseName);
options.UseInternalServiceProvider(serviceProvider);
});
services.AddScoped<IEventBus, InMemoryApplicationEventBus>();
var sp = services.BuildServiceProvider();
using var scope = sp.CreateScope();
var scopedServices = scope.ServiceProvider;
var context = scopedServices.GetRequiredService<MoocContext>();
// Ensure the database is created.
context.Database.EnsureCreated();
};
}
}
}<file_sep>/apps/Mooc/Backend/Controller/Courses/CoursesPutController.cs
namespace CodelyTv.Apps.Mooc.Backend.Controller.Courses
{
using System;
using System.Threading.Tasks;
using CodelyTv.Mooc.Courses.Application.Create;
using Microsoft.AspNetCore.Mvc;
using Newtonsoft.Json;
using Shared.Domain.Bus.Command;
[Route("courses")]
public class CoursesPutController : Controller
{
private readonly ICommandBus _bus;
public CoursesPutController(ICommandBus bus)
{
_bus = bus;
}
[HttpPut("{id}")]
public async Task<IActionResult> Index(string id, [FromBody] dynamic body)
{
body = JsonConvert.DeserializeObject(Convert.ToString(body));
await this._bus.Dispatch(new CreateCourseCommand(id, body["name"].ToString(), body["duration"].ToString()));
return StatusCode(201);
}
}
}<file_sep>/src/Mooc/CoursesCounter/Application/Find/CoursesCounterFinder.cs
namespace CodelyTv.Mooc.CoursesCounter.Application.Find
{
using System.Threading.Tasks;
using Domain;
public class CoursesCounterFinder
{
private ICoursesCounterRepository _repository;
public CoursesCounterFinder(ICoursesCounterRepository repository)
{
_repository = repository;
}
public async Task<CoursesCounterResponse> Find()
{
CoursesCounter coursesCounter = await this._repository.Search() ?? throw new CoursesCounterNotInitialized();
return new CoursesCounterResponse(coursesCounter.Total.Value);
}
}
}<file_sep>/apps/Mooc/Backend/Controller/HealthCheck/HealthCheckGetController.cs
namespace CodelyTv.Apps.Mooc.Backend.Controller.HealthCheck
{
using Microsoft.AspNetCore.Mvc;
using Shared.Domain;
[Route("health-check")]
public class HealthCheckGetController : Controller
{
private readonly IRandomNumberGenerator _generator;
public HealthCheckGetController(IRandomNumberGenerator generator)
{
_generator = generator;
}
[HttpGet]
[Produces("application/json")]
public IActionResult Index()
{
return Ok(new {moocBackend = "ok", rand = _generator.Generate()});
}
}
}<file_sep>/src/Mooc/Shared/Infrastructure/Persistence/EntityFramework/ValueConverter/ExistingCoursesConverter.cs
namespace CodelyTv.Mooc.Shared.Infrastructure.Persistence.EntityFramework.ValueConverter
{
using System.Collections.Generic;
using CodelyTv.Shared.Infrastructure.Persistence.EntityFramework.EntityConfigurations;
using Courses.Domain;
using Microsoft.EntityFrameworkCore.Storage.ValueConversion;
public class ExistingCoursesConverter : ValueConverter<List<CourseId>, string>
{
public ExistingCoursesConverter(ConverterMappingHints mappingHints = null)
: base(v => ConvertConfiguration.ObjectToJson(v),
v => ConvertConfiguration.ObjectFromJson<CourseId>(v),
mappingHints
)
{
}
}
}<file_sep>/test/src/Mooc/MoocContextInfrastructureTestCase.cs
namespace CodelyTv.Test.Mooc
{
using System;
using System.Linq;
using CodelyTv.Apps.Mooc.Backend;
using CodelyTv.Mooc.Shared.Infrastructure.Persistence.EntityFramework;
using CodelyTv.Shared;
using CodelyTv.Shared.Helpers;
using CodelyTv.Shared.Infrastructure.Bus.Event;
using CodelyTv.Shared.Infrastructure.Bus.Event.MsSql;
using CodelyTv.Shared.Infrastructure.Bus.Event.RabbitMq;
using CodelyTv.Test.Mooc.Shared.Infrastructure.Bus.Event.RabbitMq;
using CodelyTv.Test.Shared.Infrastructure;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
public class MoocContextInfrastructureTestCase : InfrastructureTestCase<Startup>
{
protected override Action<IServiceCollection> Services()
{
return services =>
{
var descriptor = services.SingleOrDefault(d => d.ServiceType == typeof(DbContextOptions<MoocContext>));
if (descriptor != null)
services.Remove(descriptor);
IConfigurationRoot configuration = new ConfigurationBuilder()
.SetBasePath(AppContext.BaseDirectory)
.AddJsonFile("appsettings.json")
.Build();
services.AddScoped<MsSqlEventBus, MsSqlEventBus>();
services.AddScoped<MsSqlDomainEventsConsumer, MsSqlDomainEventsConsumer>();
services.AddScoped<RabbitMqEventBus>(p =>
{
var publisher = p.GetRequiredService<RabbitMqPublisher>();
var failOverBus = p.GetRequiredService<MsSqlEventBus>();
return new RabbitMqEventBus(publisher, failOverBus, "test_domain_events");
});
services.AddScoped<IEventBusConfiguration, RabbitMqEventBusConfiguration>();
services.AddScoped<DomainEventsInformation, DomainEventsInformation>();
services.AddScoped<TestAllWorksOnRabbitMqEventsPublished, TestAllWorksOnRabbitMqEventsPublished>();
services.AddDomainEventSubscriberInformationService(AssemblyHelper.GetInstance(Assemblies.Mooc));
services.AddCommandServices(AssemblyHelper.GetInstance(Assemblies.Mooc));
services.AddQueryServices(AssemblyHelper.GetInstance(Assemblies.Mooc));
services.AddDbContext<MoocContext>(options =>
options.UseSqlServer(configuration.GetConnectionString("MoocDatabase")));
services.Configure<RabbitMqConfig>(configuration.GetSection("RabbitMq"));
};
}
}
}<file_sep>/src/Mooc/Courses/Application/Create/CourseCreator.cs
namespace CodelyTv.Mooc.Courses.Application.Create
{
using System.Threading.Tasks;
using CodelyTv.Shared.Domain.Bus.Event;
using Domain;
public class CourseCreator
{
private readonly ICourseRepository _repository;
private readonly IEventBus _eventBus;
public CourseCreator(ICourseRepository repository, IEventBus eventBus)
{
_repository = repository;
_eventBus = eventBus;
}
public async Task Create(CourseId id, CourseName name, CourseDuration duration)
{
Course course = Course.Create(id, name, duration);
await this._repository.Save(course);
await this._eventBus.Publish(course.PullDomainEvents());
}
}
}<file_sep>/src/Mooc/Courses/Infrastructure/FileCourseRepository.cs
namespace CodelyTv.Mooc.Courses.Infrastructure
{
using System.IO;
using System.Threading.Tasks;
using Domain;
using Newtonsoft.Json;
public class FileCourseRepository : ICourseRepository
{
private readonly string _filePath = Directory.GetCurrentDirectory() + "/courses";
public async Task Save(Course course)
{
await Task.Run(() =>
{
using (StreamWriter outputFile = new StreamWriter(this.FileName(course.Id.Value), false))
{
outputFile.WriteLine(JsonConvert.SerializeObject(course));
}
});
}
public async Task<Course> Search(CourseId id)
{
if (File.Exists(FileName(id.Value)))
{
var text = await File.ReadAllTextAsync(FileName(id.Value));
return JsonConvert.DeserializeObject<Course>(text);
}
return null;
}
private string FileName(string id)
{
return $"{_filePath}.{id}.repo";
}
}
}<file_sep>/test/src/Mooc/CoursesCounter/Application/Find/CoursesCounterResponseMother.cs
namespace CodelyTv.Test.Mooc.CoursesCounter.Application.Find
{
using CodelyTv.Mooc.CoursesCounter.Application.Find;
using Test.Shared.Domain;
public static class CoursesCounterResponseMother
{
public static CoursesCounterResponse Create(int value)
{
return new CoursesCounterResponse(value);
}
public static CoursesCounterResponse Random()
{
return Create(IntegerMother.Random());
}
}
}<file_sep>/src/Mooc/CoursesCounter/Domain/CoursesCounterNotInitialized.cs
namespace CodelyTv.Mooc.CoursesCounter.Domain
{
using System;
public class CoursesCounterNotInitialized : SystemException
{
}
}<file_sep>/apps/Backoffice/Frontend/Extension/Validators/SummaryByPropertyValidatorHtml.cs
namespace CodelyTv.Apps.Backoffice.Frontend.Extension.Validators
{
using System.Text;
using Microsoft.AspNetCore.Html;
using Microsoft.AspNetCore.Mvc.ModelBinding;
using Microsoft.AspNetCore.Mvc.Rendering;
public static class SummaryByPropertyValidatorHtml
{
public static IHtmlContent ValidationSummaryByProperty<TModel>(this IHtmlHelper<TModel> helper, ModelStateDictionary dictionary, string property, string className)
{
StringBuilder builder = new StringBuilder();
if (dictionary[property] != null)
{
foreach (var modelState in dictionary[property].Errors)
{
builder.Append($"<p class='{className}'>{modelState.ErrorMessage}</p>");
}
}
return new HtmlString(builder.ToString());
}
}
}<file_sep>/src/Shared/Domain/IRandomNumberGenerator.cs
namespace CodelyTv.Shared.Domain
{
public interface IRandomNumberGenerator
{
int Generate();
}
}<file_sep>/src/Shared/Infrastructure/Persistence/EntityFramework/Extension/ConfigurationExtension.cs
namespace CodelyTv.Shared.Infrastructure.Persistence.EntityFramework.Extension
{
using System;
using System.Linq;
public static class ConfigurationExtension
{
static Func<char, string> AddUnderscoreBeforeCapitalLetter = x => Char.IsUpper(x) ? "_" + x : x.ToString();
public static string ToDatabaseFormat(this string value)
{
return string.Concat(value.Select(AddUnderscoreBeforeCapitalLetter)).Substring(1).ToLower();
}
}
}<file_sep>/test/src/Mooc/Shared/Infrastructure/Bus/Event/MsSql/MsSqlEventBusShould.cs
namespace CodelyTv.Test.Mooc.Shared.Infrastructure.Bus.Event.MsSql
{
using System.Collections.Generic;
using CodelyTv.Shared.Domain.Bus.Event;
using CodelyTv.Shared.Domain.Courses;
using CodelyTv.Shared.Infrastructure.Bus.Event.MsSql;
using CodelyTv.Test.Mooc.Courses.Domain;
using Xunit;
public class MsSqlEventBusShould : MoocContextInfrastructureTestCase
{
[Fact]
public void PublishAndConsumeDomainEventFromMsSql()
{
var bus = GetService<MsSqlEventBus>();
var consumer = GetService<MsSqlDomainEventsConsumer>();
CourseCreatedDomainEvent domainEvent = CourseCreatedDomainEventMother.Random();
bus.Publish(new List<DomainEvent>() {domainEvent});
consumer.Consume();
}
}
}<file_sep>/src/Mooc/CoursesCounter/Application/Find/FindCoursesCounterQuery.cs
namespace CodelyTv.Mooc.CoursesCounter.Application.Find
{
using CodelyTv.Shared.Domain.Bus.Query;
public class FindCoursesCounterQuery : Query
{
}
}<file_sep>/src/Backoffice/Shared/Infrastructure/Persistence/EntityFramework/BackofficeContext.cs
namespace CodelyTv.Backoffice.Shared.Infrastructure.Persistence.EntityFramework
{
using CodelyTv.Backoffice.Courses.Domain;
using CodelyTv.Backoffice.Shared.Infrastructure.Persistence.EntityFramework.EntityConfigurations;
using Microsoft.EntityFrameworkCore;
public class BackofficeContext : DbContext
{
public DbSet<BackofficeCourse> BackofficeCourses { get; set; }
public BackofficeContext(DbContextOptions<BackofficeContext> options) : base(options)
{
}
protected override void OnModelCreating(ModelBuilder modelBuilder)
{
modelBuilder.ApplyConfiguration(new BackofficeCourseConfiguration());
}
}
}<file_sep>/src/Mooc/CoursesCounter/Infrastructure/Persistence/MsSqlCoursesCounterRepository.cs
namespace CodelyTv.Mooc.CoursesCounter.Infrastructure.Persistence
{
using System.Linq;
using System.Threading.Tasks;
using Domain;
using Microsoft.EntityFrameworkCore;
using Shared.Infrastructure.Persistence.EntityFramework;
public class MsSqlCoursesCounterRepository : ICoursesCounterRepository
{
private readonly MoocContext _context;
public MsSqlCoursesCounterRepository(MoocContext context)
{
this._context = context;
}
public async Task Save(CoursesCounter counter)
{
if (this._context.Entry(counter).State == EntityState.Detached)
{
await this._context.AddAsync(counter);
}
else
{
this._context.Entry(counter).State = EntityState.Modified;
}
await this._context.SaveChangesAsync();
}
public async Task<CoursesCounter> Search()
{
return await this._context.CoursesCounter.SingleOrDefaultAsync();
}
}
}<file_sep>/test/src/Shared/Infrastructure/ApplicationTestCase.cs
namespace CodelyTv.Test.Shared.Infrastructure
{
using System;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Mvc.Testing;
using Microsoft.Extensions.DependencyInjection;
public abstract class ApplicationTestCase<TStartup> : WebApplicationFactory<TStartup> where TStartup : class
{
protected override void ConfigureWebHost(IWebHostBuilder builder)
{
builder.ConfigureServices(Services());
}
protected abstract Action<IServiceCollection> Services();
}
}
|
90e6ba9f452cd793b0aba5219fc424bf5e40d601
|
[
"C#"
] | 70
|
C#
|
FullHendrix/csharp-ddd-skeleton
|
6004a75a4d0e537c531cce6aaac77f1d9e3032cc
|
fd05bdd4e20d5c54004926f9fbfa54176333afa4
|
refs/heads/master
|
<file_sep>spring.datasource.url = jdbc:mysql://localhost:3306/springbootdb
spring.datasource.username = root
spring.datasource.password = <PASSWORD>
spring.jpa.show-sql = true
spring.jpa.hibernate.ddl-auto = update<file_sep>package main.java.com.webstoresite.domain;
public class Product {
}
|
03de39c88e3187e430c3134b05a0dae834c8e75c
|
[
"Java",
"INI"
] | 2
|
INI
|
AG321/WebStore
|
26fcbcb20aa3ab7a1e8d44d347ea745dcf7c893c
|
de8024cb8b94d71a98a036f1db941e5058fbb131
|
refs/heads/master
|
<repo_name>Matyaszek/dijkstra<file_sep>/src/Cities/MainCities.java
package Cities;
import java.io.BufferedReader;
import java.io.FileReader;
import java.util.*;
public class MainCities {
public static String FILE_PATH = "src/Cities/cities.txt"; // sciezka do pliku
public HashMap<String, City> cities; //slownik przechowuje nazwe miasta i obiekt
public Graph graph;
public MainCities() {
cities = new HashMap<String, City>();
graph = new Graph();
loadGraph();
}
public Graph getGraph() {
return graph;
}
public void setStart(String nazwaMiasta) {
cities.get(nazwaMiasta).setDistance(0);
}
public City getCity(String nazwaMiasta) {
return cities.get(nazwaMiasta);
}
private void loadGraph() {
loadCities();
loadEdges();
}
private void loadCities() {
try {
BufferedReader br = new BufferedReader(new FileReader(FILE_PATH));
String line;
while ((line = br.readLine()) != null) {
String cName = line.substring(0, line.indexOf("-"));
City c = new City(cName);
cities.put(cName, c);
graph.addNode(c);
}
br.close();
} catch(Exception e) {
System.out.println(e.getMessage());
}
}
private void loadEdges() {
try {
BufferedReader br = new BufferedReader(new FileReader(FILE_PATH));
String line;
while ((line = br.readLine()) != null) {
String c1 = line.substring(0, line.indexOf("-"));
String[] adjacent = line.substring(
line.indexOf("-") + 1).split(",");
for (String s : adjacent) {
String c2 = s.substring(0, s.indexOf("("));
String cost = s.substring(s.indexOf("(") + 1, s.indexOf(")"));
graph.addEdge(
cities.get(c1),
cities.get(c2),
Integer.parseInt(cost));
}
}
br.close();
} catch(Exception e) {
System.out.println(e);
}
}
public static void main(String[] args){
MainCities mc = new MainCities();
//System.out.println(mc.miasta.toString());
Set cities = mc.cities.keySet();
showCities(cities);
Scanner scanner = new Scanner(System.in);
boolean result = false;
String s ="";
String k ="";
while(result==false){
System.out.println("Podaj miasto poczatkowe:");
s = scanner.nextLine();
result = cities.contains(s);
System.out.println("PODAJ POPRAWNE MIASTO!");
}
result = false;
while(result==false){
System.out.println("Podaj miasto docelowe:");
k = scanner.nextLine();
System.out.println("PODAJ POPRAWNE MIASTO!");
result = cities.contains(k);
}
String start = s;
String end = k;
mc.setStart(start);
Dijkstra d = new Dijkstra(mc.getGraph(), mc.getCity(end));
d.run();
List<City> path = new ArrayList<City>();
City city = mc.getCity(end);
while (city.previous() != null) {
path.add(city);
city = city.previous();
}
path.add(city);
Collections.reverse(path);
System.out.println("Najkrotsza droga: " + path);
System.out.println("Dystans (w kilometrach): " + mc.getCity(end).distance());
}
private static void showCities(Set cities){
int i = 0;
for (Object city:cities
) {
System.out.println((i+1)+". "+city.toString());
i++;
}
}
}
<file_sep>/src/Cities/Main.java
package Cities;
import java.lang.reflect.Array;
import java.util.*;
public class Main {
public static void main(String[] args) {
MainCities mc = new MainCities();
//System.out.println(mc.miasta.toString());
Set cities = mc.cities.keySet();
showCities(cities);
Scanner scanner = new Scanner(System.in);
boolean result = false;
String s ="";
String k ="";
while(result==false){
System.out.println("Podaj miasto poczatkowe:");
s = scanner.nextLine();
result = cities.contains(s);
if(s==null) System.out.println("PODAJ POPRAWNE MIASTO!");
}
result = false;
ArrayList<String> targets = new ArrayList<>();
ArrayList<Integer> packs = new ArrayList<Integer>();
Scanner scanner2 = new Scanner(System.in);
boolean next = true;
while(next){
System.out.println("Podaj miasto docelowe:");
k = scanner.nextLine();
//System.out.println("ilość paczek do dostarczenia w "+k+": ");
//packs.add(scanner.nextInt());
result = cities.contains(k);
targets.add(k);
System.out.println("Koniec?: ");
if(scanner2.nextLine().equals("y")) next=false;
}
//tworzy permutacje trasy
List<List<String>> alternatives = generatePerm(targets);
for(int i=0;i<alternatives.size();i++){
//dodaje miasto początkowe do wszystkich alternaywnych tras
alternatives.get(i).add(0,s);
}
System.out.println(alternatives.toString());
int fullDistance = 0;
String prevCity = s;
System.out.println("Przystanki: ");
for (List<String> list:alternatives
) {
prevCity = s;
for (String target:list
) {
fullDistance+=distance(prevCity,target);
prevCity = target;
}
//dodanie do dystansu powrót do głównego miasta
fullDistance+=distance(prevCity,s);
System.out.println("Cały dystans: "+fullDistance);
System.out.println(list);
fullDistance = 0;
}
}
//generuje permutacje tras bez uwzględnienia miasta głównego
public static <E> List<List<E>> generatePerm(List<E> original) {
if (original.isEmpty()) {
List<List<E>> result = new ArrayList<>();
result.add(new ArrayList<>());
return result;
}
E firstElement = original.remove(0);
List<List<E>> returnValue = new ArrayList<>();
List<List<E>> permutations = generatePerm(original);
for (List<E> smallerPermutated : permutations) {
for (int index=0; index <= smallerPermutated.size(); index++) {
List<E> temp = new ArrayList<>(smallerPermutated);
temp.add(index, firstElement);
returnValue.add(temp);
}
}
return returnValue;
}
//liczy dystans z miasta start do end (szuka połączenia)
private static int distance(String start, String end){
MainCities mc = new MainCities();
mc.setStart(start);
Dijkstra d = new Dijkstra(mc.getGraph(), mc.getCity(end));
d.run();
List<City> path = new ArrayList<City>();
City city = mc.getCity(end);
while (city.previous() != null) {
path.add(city);
city = city.previous();
}
path.add(city);
Collections.reverse(path);
//System.out.println("Najkrotsza droga: " + path);
int dis = mc.getCity(end).distance();
//System.out.println("Dystans (w kilometrach): " + dis);
return dis;
}
private static void showCities(Set cities){
int i = 0;
for (Object city:cities
) {
System.out.println((i+1)+". "+city.toString());
i++;
}
}
}
<file_sep>/src/Cities/Graph.java
package Cities;
import java.util.*;
public class Graph {
private Map<City, HashMap<City, Integer>> graph;
public Graph() {
graph = new HashMap<City, HashMap<City, Integer>>();
}
public void addNode(City node) {
HashMap<City, Integer> map = new HashMap<City, Integer>();
graph.put(node, map);
}
public void addEdge(City n1, City n2, int cost) {
graph.get(n1).put(n2, cost);
}
public List<City> getNodes() {
List<City> nodes = new ArrayList<City>(graph.keySet());
return nodes;
}
public List<City> getChildren(City node) {
List<City> children = new ArrayList<City>(graph.get(node).keySet());
return children;
}
public Integer getCost(City n1, City n2) {
return graph.get(n1).get(n2);
}
}<file_sep>/src/Cities/City.java
package Cities;
public class City {
private final String name;
private int distance;
private City previous;
public City(String name) {
this.name = name;
this.distance = Integer.MAX_VALUE;
this.previous = null;
}
public String name() {
return name;
}
public int distance() {
return distance;
}
public City previous() {
return previous;
}
public void setDistance(int d) {
distance = d;
}
public void setPrevious(City c) {
previous = c;
}
public boolean equals(Object o) {
if (o instanceof City) {
City other = (City) o;
return this.name.equals(other.name);
}
return false;
}
public int hashCode() {
return name.hashCode();
}
public String toString() {
return name.toString();
}
}
|
9101591628278f14c04d9b9cfae301a6b76eeb9a
|
[
"Java"
] | 4
|
Java
|
Matyaszek/dijkstra
|
fd2b7e3041f468fdb72243394b217906a15a1760
|
94fb9f0e6983d2c29e6d9aa9fefc615fce8ea512
|
refs/heads/master
|
<file_sep>/**
* Meet Programming Language Conversion.
*/
// https://blog.csdn.net/u012234115/article/details/83186386
#include "Conversion.hpp"
#ifdef _WIN32
#include <windows.h>
std::string utf8ToGbk(const char* utf8) {
int len = MultiByteToWideChar(CP_UTF8, 0, utf8, -1, NULL, 0);
wchar_t* wszGBK = new wchar_t[len + 1];
memset(wszGBK, 0, len * 2 + 2);
MultiByteToWideChar(CP_UTF8, 0, utf8, -1, wszGBK, len);
len = WideCharToMultiByte(CP_ACP, 0, wszGBK, -1, NULL, 0, NULL, NULL);
char* szGBK = new char[len + 1];
memset(szGBK, 0, len + 1);
WideCharToMultiByte(CP_ACP, 0, wszGBK, -1, szGBK, len, NULL, NULL);
std::string strTemp(szGBK);
if (wszGBK) delete[] wszGBK;
if (szGBK) delete[] szGBK;
return strTemp;
}
#else
#include <iconv.h>
typedef long unsigned int size_t;
int utf8ToGbk(char *inbuf, size_t inlen, char *outbuf, size_t outlen) {
iconv_t cd;
char **pin = &inbuf;
char **pout = &outbuf;
cd = iconv_open("gbk", "utf-8");
if (0 == cd)
return -1;
if (-1 == iconv(cd, pin, &inlen, pout, &outlen)) {
iconv_close(cd);
return -1;
}
iconv_close(cd);
return 0;
}
#endif<file_sep>/**
* Meet Programming Language Parser.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2019 Turaiiao
* Email: <EMAIL>
* Github: https://github.com/turaiiao
*/
#include "Parser.hpp"
Parser::Parser(std::vector<Token> tokens) {
this->tokens = std::move(tokens);
this->statements = std::vector<Statement *>();
this->position = 0;
}
std::vector<Statement *> Parser::parseProgram() {
while (this->tokens.size() - 1 >= this->position && !isAtEnd())
insertStatement(statement());
return this->statements;
}
void Parser::insertStatement(Statement* stmt) {
this->statements.push_back(stmt);
}
Token Parser::previous() {
return this->tokens.at(this->position - 1);
}
Token Parser::look() {
return this->tokens.at(this->position);
}
Token Parser::look(int pos) {
if (this->position + pos > this->tokens.size() - 1)
return Token(TOKEN_EOF, "EOF", 0);
else
return this->tokens.at(this->position + pos);
}
bool Parser::look(TokenType tokenType) {
bool equal = look().type == tokenType;
if (equal)
this->position ++;
return equal;
}
bool Parser::isAtEnd() {
return look(0).type == TOKEN_EOF;
}
void Parser::error(std::string message) {
throw std::runtime_error("[ line " + std::to_string(look().line) + " ] " + message);
}
Statement* Parser::statement() {
if (look(TOKEN_VAR))
return varStatement();
if (look(TOKEN_PRINTLN) || look(TOKEN_PRINT))
return printlnStatement();
if (look(TOKEN_LBRACE))
return blockStatement();
if (look(TOKEN_FOR))
return forStatement();
if (look(TOKEN_BREAK))
return breakStatement();
if (look(TOKEN_CONTINUE))
return continueStatement();
if (look(TOKEN_IF))
return ifStatement();
if (look(TOKEN_WHILE))
return whileStatement();
if (look(TOKEN_FUN))
return funStatement();
if (look(TOKEN_RETURN))
return returnStatement();
return expressionStatement();
}
Expression* Parser::expression() {
return assignment();
}
Expression* Parser::assignment() {
Expression* expr = logicalOr();
/**
* var a = 12 : AssignExpression
*/
if (look(TOKEN_EQUAL)) {
Expression* initializer = assignment();
if (expr->defintion() == EXPRESSION_VARIABLE) {
Token name = ((VariableExpression *) expr)->name;
return new AssignExpression(name, initializer, Token(TOKEN_ANY, "any", name.line));
}
error("syntax error: invalid assignment target.");
}
/**
* var a: int = 12 : AssignExpression
*/
if (look(TOKEN_COLON)) {
Token name = look(-2);
Token type = look();
AssignExpression* assignExpr = new AssignExpression;
assignExpr->name = name;
assignExpr->typed = type;
this->position ++;
if (look(TOKEN_LESS)) {
Token a = look();
this->position ++;
if (look(TOKEN_GREATER) == false)
error("syntax error: expect '>' after list type.");
assignExpr->fixedListToken = a;
}
if (look(TOKEN_EQUAL) == false)
return new AssignExpression(name, nullptr, type);
assignExpr->initializer = expression();
return assignExpr;
}
return expr;
}
/**
* LogicalExpression:
*
* 1 or 2
* 2 | 2
*/
Expression* Parser::logicalOr() {
Expression* expr = logicalAnd();
while (look(TOKEN_OR)) {
Token op = previous();
Expression* right = logicalAnd();
expr = new LogicalExpression(expr, op, right);
}
return expr;
}
/**
* LogicalExpression:
*
* 1 and 2
* 1 & 2
*/
Expression* Parser::logicalAnd() {
Expression* expr = equality();
while (look(TOKEN_AND)) {
Token op = previous();
Expression* right = equality();
expr = new LogicalExpression(expr, op, right);
}
return expr;
}
Expression* Parser::equality() {
Expression* expr = comparison();
while (look(TOKEN_EQUAL_EQUAL) || look(TOKEN_BANG_EQUAL)) {
Token op = previous();
Expression* right = comparison();
expr = new BinaryExpression(expr, op, right);
}
return expr;
}
Expression* Parser::comparison() {
Expression* expr = addition();
while (look(TOKEN_GREATER) || look(TOKEN_GREATER_EQUAL) ||
look(TOKEN_LESS) || look(TOKEN_LESS_EQUAL)) {
Token op = previous();
Expression* right = addition();
expr = new BinaryExpression(expr, op, right);
}
return expr;
}
Expression* Parser::addition() {
Expression* expr = multiplication();
while (look(TOKEN_PLUS) || look(TOKEN_MINUS) || look(TOKEN_PLUS_EQUAL) || look(TOKEN_MINUS_EQUAL)) {
Token op = previous();
Expression* right = multiplication();
expr = new BinaryExpression(expr, op, right);
}
return expr;
}
Expression* Parser::multiplication() {
Expression* expr = unary();
while (look(TOKEN_STAR) || look(TOKEN_SLASH) || look(TOKEN_STAR_EQUAL) || look(TOKEN_SLASH_EQUAL) ||
look(TOKEN_MODULAR)) {
Token op = previous();
Expression* right = unary();
expr = new BinaryExpression(expr, op, right);
}
return expr;
}
/**
* UnaryExpression:
*
* !a
* -123
*/
Expression* Parser::unary() {
if (look(TOKEN_BANG) || look(TOKEN_MINUS)) {
Token op = previous();
Expression* expression = unary();
return new UnaryExpression(op, expression);
}
return primary();
}
Expression* Parser::primary() {
if (look(TOKEN_VALUE_INT) || look(TOKEN_VALUE_STRING) || look(TOKEN_VALUE_FLOAT))
return new LiteralExpression(previous());
if (look(TOKEN_NULL) || look(TOKEN_TRUE) || look(TOKEN_FALSE))
return new LiteralExpression(previous());
if (look(TOKEN_VALUE_IDENTIFIER)) {
Token name = previous();
/**
* a [0] : GetExpression
* a [0] = 123 : SetExpression
*/
if (look(TOKEN_LBRACKET)) {
Expression* initializer = expression();
if (look(TOKEN_RBRACKET) == false)
error("syntax error: expect ']' after expression.");
if (look(TOKEN_EQUAL))
return new SetExpression(name, initializer, expression(), EXPRESSION_LIST);
return new GetExpression(name, initializer, EXPRESSION_LIST);
}
/**
* CallExpression:
*
* hello ()
* hello (a, 123, 'world')
*/
if (look(TOKEN_LPAREN)) {
std::vector<Expression *> parameters = std::vector<Expression *>();
while (look(TOKEN_RPAREN) == false) {
if (look(TOKEN_COMMA))
continue;
else
parameters.push_back(expression());
}
return new CallExpression(name, parameters);
}
return new VariableExpression(name);
}
/**
* (1 + 4 * 3) : GroupExpression
*/
if (look(TOKEN_LPAREN)) {
Expression* expr = expression();
if (look(TOKEN_RPAREN) == false)
error("syntax error: expect ')' after expression.");
return new GroupExpression(expr);
}
/**
* var a: list<int> = [2, 3, 4, 5, 6] : ListExpression
*/
if (look(TOKEN_LBRACKET)) {
std::vector<Value> values = std::vector<Value>();
while (look(TOKEN_RBRACKET) == false) {
if (look(TOKEN_COMMA))
continue;
values.push_back(backValueWithToken(look()));
this->position ++;
}
return new ListExpression(values);
}
error("syntax error: illegal expression, token '" + look().literal + "'.");
return NULL;
}
Statement* Parser::expressionStatement() {
return new ExpressionStatement(expression());
}
Statement* Parser::varStatement() {
std::vector<AssignExpression *> list = std::vector<AssignExpression *>();
bool firstParseStatement = true;
while (firstParseStatement || look().type == TOKEN_COMMA) {
look(TOKEN_COMMA);
Expression* expr = expression();
if (expr->defintion() == EXPRESSION_VARIABLE && look().type == TOKEN_COLON) {
expr = expression();
if (expr->defintion() != EXPRESSION_ASSIGN)
error("syntax error: variable initializer must be assignment.");
list.push_back((AssignExpression *) expr);
continue;
}
if (expr->defintion() != EXPRESSION_ASSIGN)
error("syntax error: variable initializer must be assignment.");
list.push_back((AssignExpression *) expr);
firstParseStatement = false;
}
for (auto i : list) i->isVar = true;
return new VarStatement(list);
}
Statement* Parser::printlnStatement() {
bool cls = look(-1).literal == TOKEN_PRINTLN ? true : false;
return new PrintlnStatement(expression(), cls);
}
Statement* Parser::minusGreaterBlockStatement() {
std::vector<Statement *> block = std::vector<Statement *>();
block.push_back(statement());
return new BlockStatement(block);
}
Statement* Parser::blockStatement() {
std::vector<Statement *> block = std::vector<Statement *>();
while (look(TOKEN_RBRACE) == false) {
block.push_back(statement());
if (isAtEnd())
error("syntax error: lost right '}' after block statement.");
}
return new BlockStatement(block);
}
Statement* Parser::breakStatement() {
return new BreakStatement();
}
Statement* Parser::continueStatement() {
return new ContinueStatement();
}
Statement* Parser::forStatement() {
Statement* initializer = statement();
if (look(TOKEN_SEMICOLON) == false)
error("syntax error: expect ';' after initializer.");
Statement* condition = statement();
if (look(TOKEN_SEMICOLON) == false)
error("syntax error: expect ';' after condition.");
Statement* renovate = statement();
std::vector<Statement *> block = std::vector<Statement *>();
if (look(TOKEN_MINUS_GREATER))
block.push_back(statement());
else if (look(TOKEN_LBRACE))
return new ForStatement(initializer, condition, renovate, (BlockStatement *) blockStatement());
else
error("syntax error: exepct '{' or '->' after for statement.");
return new ForStatement(initializer, condition, renovate, new BlockStatement(block));
}
Statement* Parser::ifStatement() {
IfStatement* ifStatement = new IfStatement;
ifStatement->condition = statement();
if (look(TOKEN_MINUS_GREATER))
ifStatement->establish = (BlockStatement *) minusGreaterBlockStatement();
else if (look(TOKEN_LBRACE))
ifStatement->establish = (BlockStatement *) blockStatement();
else
error("syntax error: exepct '{' or '->' after if statement condition.");
if (look(TOKEN_ELIF)) {
ifStatement->elifCondition = statement();
if (look(TOKEN_MINUS_GREATER))
ifStatement->elifEstablish = (BlockStatement *) minusGreaterBlockStatement();
else if (look(TOKEN_LBRACE))
ifStatement->elifEstablish = (BlockStatement *) blockStatement();
else
error("syntax error: exepct '{' or '->' after if statement condition.");
}
if (look(TOKEN_ELSE))
if (look(TOKEN_MINUS_GREATER))
ifStatement->elseEstablish = (BlockStatement *) minusGreaterBlockStatement();
else if (look(TOKEN_LBRACE))
ifStatement->elseEstablish = (BlockStatement *) blockStatement();
else
error("syntax error: exepct '{' or '->' after if statement condition.");
return ifStatement;
}
Statement* Parser::whileStatement() {
WhileStatement* whileStatement = new WhileStatement;
whileStatement->condition = statement();
if (look(TOKEN_MINUS_GREATER))
whileStatement->block = (BlockStatement *) minusGreaterBlockStatement();
else if (look(TOKEN_LBRACE))
whileStatement->block = (BlockStatement *) blockStatement();
else
error("syntax error: exepct '{' or '->' after while statement condition.");
return whileStatement;
}
Statement* Parser::funStatement() {
FunctionStatement* funStatement = new FunctionStatement;
if (look(TOKEN_VALUE_IDENTIFIER) == false)
error("syntax error: cannot use function name without identifier.");
Token name = previous();
if (look(TOKEN_LPAREN) == false)
error("syntax error: expect '(' after function name.");
std::map<std::string, std::string> parameters = std::map<std::string, std::string>();
while (look(TOKEN_RPAREN) == false) {
if (look(TOKEN_COLON))
parameters.insert(std::pair<std::string, std::string>(look(-2).literal, look().literal));
this->position ++;
}
funStatement->name = name;
funStatement->parameters = parameters;
if (look(TOKEN_MINUS_GREATER)) {
funStatement->returnType = look();
this->position ++;
}
if (look(TOKEN_MINUS_GREATER))
funStatement->block = (BlockStatement *) minusGreaterBlockStatement();
else if (look(TOKEN_LBRACE))
funStatement->block = (BlockStatement *) blockStatement();
else
error("syntax error: exepct '{' or '->' after function statement.");
return funStatement;
}
Statement* Parser::returnStatement() {
return new ReturnStatement(expression());
}<file_sep>/**
* Meet Programming Language Interpreter.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2019 Turaiiao
* Email: <EMAIL>
* Github: https://github.com/turaiiao
*/
#include "Interpreter.hpp"
Interpreter::Interpreter(std::vector<Statement *> statements, std::map<std::string, Value>* environment) {
this->statements = std::move(statements);
this->environment = environment;
this->size = this->statements.size();
this->position = 0;
this->replMode = replMode;
}
void Interpreter::assign(std::string name, Value value) {
this->environment->insert(std::pair<std::string, Value>(name, value));
}
void Interpreter::reAssign(std::string name, Value value) {
std::map<std::string, Value>::iterator a = this->environment->find(name);
if (a == this->environment->end())
throw std::runtime_error("interpret error: undefind variable '" + name + "'.");
this->environment->erase(a);
this->assign(name, value);
}
Value Interpreter::get(std::string name) {
std::map<std::string, Value>::iterator a = this->environment->find(name);
if (a == this->environment->end())
throw std::runtime_error("interpret error: undefined variable: '" + name + "'.");
return a->second;
}
bool Interpreter::haveObject(std::string name) {
return this->environment->find(name) != this->environment->end();
}
Statement* Interpreter::look() {
return this->statements.at(this->position);
}
int Interpreter::removeStatement(int pos) {
std::vector<Statement *>::iterator a = this->statements.begin() + pos;
if (a == this->statements.end())
return 0;
else {
this->statements.erase(a);
this->position --;
return this->statements.size();
}
}
void Interpreter::execute() {
while (this->size) {
executeStatement(look());
this->size = removeStatement(this->position ++);
}
}
void Interpreter::executeStatement(Statement* stmt) {
if (stmt->defintion() == STATEMENT_EXPRESSION) executeExpressionStatement(stmt);
if (stmt->defintion() == STATEMENT_VAR) executeVarStatement(stmt);
if (stmt->defintion() == STATEMENT_PRINTLN) executePrintlnStatement(stmt);
if (stmt->defintion() == STATEMENT_BLOCK) executeBlockStatement(stmt);
if (stmt->defintion() == STATEMENT_BREAK) executeBreakStatement();
if (stmt->defintion() == STATEMENT_CONTINUE) executeContinueStatement();
if (stmt->defintion() == STATEMENT_FOR) executeForStatement(stmt);
if (stmt->defintion() == STATEMENT_IF) executeIfStatement(stmt);
if (stmt->defintion() == STATEMENT_WHILE) executeWhileStatement(stmt);
if (stmt->defintion() == STATEMENT_FUN) executeFunctionStatement(stmt);
if (stmt->defintion() == STATEMENT_RETURN) executeReturnStatement(stmt);
}
Value Interpreter::executeExpression(Expression* expr) {
if (expr->defintion() == EXPRESSION_LITERAL) return executeLiteralExpression(expr);
if (expr->defintion() == EXPRESSION_BINARY) return executeBinaryExpression(expr);
if (expr->defintion() == EXPRESSION_GROUP) return executeGroupExpression(expr);
if (expr->defintion() == EXPRESSION_UNARY) return executeUnaryExpression(expr);
if (expr->defintion() == EXPRESSION_ASSIGN) return executeAssignExpression(expr);
if (expr->defintion() == EXPRESSION_LOGICAL) return executeLogicalExpression(expr);
if (expr->defintion() == EXPRESSION_VARIABLE) return executeVariableExpression(expr);
if (expr->defintion() == EXPRESSION_GET) return executeGetExpression(expr);
if (expr->defintion() == EXPRESSION_SET) return executeSetExpression(expr);
if (expr->defintion() == EXPRESSION_CALL) return executeCallExpression(expr);
if (expr->defintion() == EXPRESSION_LIST) return executeListExpression(expr);
throw std::runtime_error("interpret error: unknow expression '" + expr->defintion() + "'.");
}
Value Interpreter::executeLiteralExpression(Expression* expr) {
Token token = ((LiteralExpression *) expr)->token;
if (token.type == TOKEN_VALUE_STRING) {
std::stringstream data;
bool haveDollarString = false;
for (int i = 0; i < token.literal.length(); ) {
char c = token.literal.at(i);
if (c == '$') {
haveDollarString = true;
i ++;
continue;
}
if (c == '\\' && i < token.literal.length() - 1) {
if (token.literal.at(i + 1) == 'n') {
data << '\n';
i += 2;
continue;
}
if (token.literal.at(i + 1) == 't') {
data << '\t';
i += 2;
continue;
}
}
if (haveDollarString) {
std::stringstream stream;
while (isalpha(c) && i < token.literal.length()) {
stream << c;
if (i >= token.literal.length() - 1)
break;
c = token.literal.at(++ i);
}
if (stream.str().length() != 0) {
data << this->get(stream.str()).toString();
if (i == token.literal.length() - 1 && !isspace(token.literal.at(i)))
i ++;
}
haveDollarString = c == '$';
continue;
}
data << c;
i ++;
}
return Value(data.str());
}
return backValueWithToken(token);
}
Value Interpreter::executeBinaryExpression(Expression* expr) {
BinaryExpression* a = (BinaryExpression *) expr;
Value l = executeExpression(a->left);
Value r = executeExpression(a->right);
if (a->token.type == TOKEN_PLUS) return l + r;
if (a->token.type == TOKEN_MINUS) return l - r;
if (a->token.type == TOKEN_STAR) return l * r;
if (a->token.type == TOKEN_SLASH) return l / r;
if (a->token.type == TOKEN_MODULAR) return l % r;
if (a->token.type == TOKEN_GREATER) return l > r;
if (a->token.type == TOKEN_GREATER_EQUAL) return l >= r;
if (a->token.type == TOKEN_LESS) return l < r;
if (a->token.type == TOKEN_LESS_EQUAL) return l <= r;
if (a->token.type == TOKEN_BANG_EQUAL) return l != r;
if (a->token.type == TOKEN_EQUAL_EQUAL) return l == r;
bool isIdentifierLeftName = ((LiteralExpression *) a->left)->token.type == TOKEN_VALUE_IDENTIFIER;
if (isIdentifierLeftName) {
std::string name = ((LiteralExpression *) a->left)->token.literal;
if (a->token.type == TOKEN_PLUS_EQUAL) {
this->reAssign(name, l + r);
return l + r;
}
if (a->token.type == TOKEN_MINUS_EQUAL) {
this->reAssign(name, l - r);
return l -r;
}
if (a->token.type == TOKEN_STAR_EQUAL) {
this->reAssign(name, l * r);
return l * r;
}
if (a->token.type == TOKEN_SLASH_EQUAL) {
this->reAssign(name, l / r);
return l / r;
}
}
throw std::runtime_error("interpret error: unknow operator for binary expression.");
}
Value Interpreter::executeGroupExpression(Expression* expr) {
return executeExpression(((GroupExpression *) expr)->expression);
}
Value Interpreter::executeUnaryExpression(Expression* expr) {
UnaryExpression* unaryExpr = (UnaryExpression *) expr;
Value a = executeExpression(unaryExpr->expression);
if (unaryExpr->token.type == TOKEN_BANG) {
if (a.valueNumber)
return Value(!a.numberValue);
if (a.valueBool)
return Value(!a.boolValue);
throw std::runtime_error("interpret error: unknow operator for unary expression.");
}
if (unaryExpr->token.type == TOKEN_MINUS) {
if (a.valueNumber)
return Value(-a.numberValue);
if (a.valueFloat)
return Value(-a.floatValue);
throw std::runtime_error("interpret error: unknow operator for unary expression.");
}
throw std::runtime_error("interpret error: unknow operator for unary expression.");
}
Value Interpreter::executeAssignExpression(Expression* expr) {
AssignExpression* assignExpr = (AssignExpression *) expr;
if (assignExpr->isVar && assignExpr->initializer == nullptr) {
Value value = backValueWithNullTyped(assignExpr->typed.literal);
this->assign(assignExpr->name.literal, value);
return value;
}
if (assignExpr->initializer->defintion() == EXPRESSION_LIST) {
ListExpression* listExpr = (ListExpression *) assignExpr->initializer;
if (assignExpr->fixedListToken.literal != "") {
for (int i = 0; i < listExpr->values.size(); i ++) {
if (assignExpr->fixedListToken.type == TOKEN_INT && !listExpr->values.at(i).valueNumber)
throw std::runtime_error("interpret error: list fixed in different int formats.");
else if (assignExpr->fixedListToken.type == TOKEN_FLOAT && !listExpr->values.at(i).valueFloat)
throw std::runtime_error("interpret error: list fixed in different float formats.");
else if (assignExpr->fixedListToken.type == TOKEN_STRING && !listExpr->values.at(i).valueString)
throw std::runtime_error("interpret error: list fixed in different string formats.");
else if (assignExpr->fixedListToken.type == TOKEN_BOOLEAN && !listExpr->values.at(i).valueBool)
throw std::runtime_error("interpret error: list fixed in different boolean formats.");
if (isNotlistFixedType(assignExpr->fixedListToken) &&
this->haveObject(assignExpr->fixedListToken.literal) == false)
throw std::runtime_error("interpret error: list fixed type undefined.");
}
}
Value listValue = Value(listExpr->values);
this->assign(assignExpr->name.literal, listValue);
return listValue;
}
Value value = executeExpression(assignExpr->initializer);
value.varAny = true;
if (assignExpr->isVar) {
if (this->haveObject(assignExpr->name.literal)) {
throw std::runtime_error("interpret error: repeatedly defining variable '" +
assignExpr->name.literal + "'.");
}
if (assignExpr->typed.literal != "") {
if (assignExpr->typed.literal == TOKEN_ANY)
value.varAny = true;
else if (assignExpr->typed.literal == TOKEN_INT && value.valueNumber)
value.varNumber = true;
else if (assignExpr->typed.literal == TOKEN_FLOAT && value.valueFloat)
value.varFloat = true;
else if (assignExpr->typed.literal == TOKEN_STRING && value.valueString)
value.varString = true;
else if (assignExpr->typed.literal == TOKEN_BOOLEAN && value.valueBool)
value.varBoolean = true;
if (!value.varNumber && !value.varFloat && !value.varString && !value.varBoolean && !value.varAny)
throw std::runtime_error("interpret error: the value type is defferent from the specified.");
}
this->assign(assignExpr->name.literal, value);
} else {
Value a = this->get(assignExpr->name.literal);
if ((a.varNumber && !value.valueNumber) || (a.varString && !value.valueString) ||
(a.varBoolean && !value.valueBool) || (a.varFloat && !value.valueFloat)) {
throw std::runtime_error("interpret error: cannot defined as other type.");
}
this->reAssign(assignExpr->name.literal, value);
}
return value;
}
Value Interpreter::executeLogicalExpression(Expression* expr) {
LogicalExpression* logicalExpr = (LogicalExpression *) expr;
Value a = executeExpression(logicalExpr->left);
Value b = executeExpression(logicalExpr->right);
if (logicalExpr->token.type == TOKEN_OR)
return a || b;
return a && b;
}
Value Interpreter::executeVariableExpression(Expression* expr) {
VariableExpression* varExpr = (VariableExpression *) expr;
return this->get(varExpr->name.literal);
}
Value Interpreter::executeListExpression(Expression* expr) {
return Value(((ListExpression *) expr)->values);
}
Value Interpreter::executeGetExpression(Expression* expr) {
GetExpression* getExpr = (GetExpression *) expr;
Value value = executeExpression(getExpr->expression);
if (getExpr->type == EXPRESSION_LIST) {
if (value.valueNumber == false)
throw std::runtime_error("interpret error: cannot use no number value to get array.");
std::vector<Value> a = this->get(getExpr->name.literal).listValue;
if (a.size() == 0 || value.numberValue > a.size() - 1)
return Value();
return a.at(value.numberValue);
}
return Value();
}
Value Interpreter::executeSetExpression(Expression* expr) {
SetExpression* setExpr = (SetExpression *) expr;
if (setExpr->type == EXPRESSION_LIST) {
Value initializer = executeExpression(setExpr->expression);
if (initializer.valueNumber == false)
throw std::runtime_error("interpret error: cannot use no number value to get array.");
std::vector<Value> a = this->get(setExpr->name.literal).listValue;
Value value = executeExpression(setExpr->value);
if (a.size() == 0 || initializer.numberValue > a.size() - 1)
a.push_back(value);
else
a.at(initializer.numberValue) = value;
this->reAssign(setExpr->name.literal, Value(a));
return value;
}
return Value();
}
/**
* map<string -> name, string -> value> : FunctionStatement Parameters.
* vector<Value> : CallExpression Parameters.
*/
Value Interpreter::executeCallExpression(Expression* expr) {
CallExpression* callExpr = (CallExpression *) expr;
Value a = this->get(callExpr->name.literal);
if (a.valueFun == false)
throw std::runtime_error("interpret error: name '" + callExpr->name.literal + "' is not a function.");
if (a.funValue->parameters.size() != callExpr->parameters.size())
throw std::runtime_error("interpret error: inconsistency of real parameters.");
int l = 0;
std::map<std::string, Value> backup = std::map<std::string, Value>();
for (std::map<std::string, std::string>::iterator i = a.funValue->parameters.begin();
i != a.funValue->parameters.end(); i ++) {
Value b = executeExpression(callExpr->parameters.at(l));
if (i->second == TOKEN_STRING && !b.valueString)
throw std::runtime_error("interpret error: function string argument type error.");
else if (i->second == TOKEN_INT && !b.valueNumber)
throw std::runtime_error("interpret error: function int argument type error.");
else if (i->second == TOKEN_FLOAT && !b.valueFloat)
throw std::runtime_error("interpret error: function float argument type error.");
else if (i->second == TOKEN_BOOLEAN && !b.valueBool)
throw std::runtime_error("interpret error: function boolean argument type error.");
else if (i->second == TOKEN_LIST && !b.valueList)
throw std::runtime_error("interpret error: function list argument type error.");
if (i->second != TOKEN_STRING && i->second != TOKEN_INT && i->second != TOKEN_FLOAT &&
i->second != TOKEN_BOOLEAN && i->second != TOKEN_LIST && this->haveObject(i->second) == false) {
throw std::runtime_error("interpret error: undefind object name '" + i->second + "'.");
}
if (this->haveObject(i->first)) {
backup.insert(std::pair<std::string, Value>(i->first, this->get(i->first)));
this->reAssign(i->first, b);
} else
this->assign(i->first, b);
l ++;
}
try {
executeBlockStatement(a.funValue->block);
} catch (ReturnStatement* operation) {
Value v = executeExpression(operation->expression);
if (a.funValue->returnType.type == TOKEN_INT && !v.valueNumber)
throw std::runtime_error("interpret error: return type is int different from defintion.");
else if (a.funValue->returnType.type == TOKEN_FLOAT && !v.valueFloat)
throw std::runtime_error("interpret error: return type is float different from defintion.");
else if (a.funValue->returnType.type == TOKEN_STRING && !v.valueString)
throw std::runtime_error("interpret error: return type is string different from defintion.");
else if (a.funValue->returnType.type == TOKEN_BOOLEAN && !v.valueBool)
throw std::runtime_error("interpret error: return type is boolean different from defintion.");
else if (a.funValue->returnType.type == TOKEN_LIST && !v.valueList)
throw std::runtime_error("interpret error: return type is list different from defintion.");
if (isNotlistFixedType(a.funValue->returnType) && this->haveObject(a.funValue->returnType.literal) == false)
throw std::runtime_error("interpret error: return type is different from defintion.");
for (auto i : a.funValue->parameters)
this->environment->erase(i.first);
if (backup.size() != 0) {
for (auto i : backup) {
this->assign(i.first, i.second);
}
}
return v;
}
for (auto i : a.funValue->parameters)
this->environment->erase(i.first);
if (backup.size() != 0) {
for (auto i : backup) {
this->assign(i.first, i.second);
}
}
return Value();
}
Value Interpreter::executeExpressionStatement(Statement* stmt) {
return executeExpression(((ExpressionStatement *) stmt)->expression);
}
void Interpreter::executeVarStatement(Statement* stmt) {
VarStatement* varStmt = (VarStatement *) stmt;
for (auto i : varStmt->list)
executeExpression(i);
}
void Interpreter::executePrintlnStatement(Statement* stmt) {
PrintlnStatement* printlnStmt = (PrintlnStatement *) stmt;
Value a = executeExpression(printlnStmt->expression);
if (printlnStmt->cls)
a.printLineValue();
else
a.printValue();
}
void Interpreter::executeBlockStatement(Statement* stmt) {
BlockStatement* blockStmt = (BlockStatement *) stmt;
std::map<std::string, Value>* old = new std::map<std::string, Value>();
old->insert(this->environment->begin(), this->environment->end());
for (auto i : blockStmt->block)
executeStatement(i);
for (auto i : *this->environment) {
std::map<std::string, Value>::iterator name = old->find(i.first);
if (name == old->end()) {
auto a = this->environment->find(i.first);
this->environment->erase(a);
}
}
delete old;
}
std::vector<std::string> Interpreter::executeStatementWithoutEnvironment(Statement* stmt) {
std::map<std::string, Value>* old = new std::map<std::string, Value>();
std::vector<std::string> names = std::vector<std::string>();
old->insert(this->environment->begin(), this->environment->end());
executeStatement(stmt);
for (auto i : *this->environment) {
std::map<std::string, Value>::iterator name = old->find(i.first);
if (name == old->end()) {
names.push_back(i.first);
}
}
delete old;
return names;
}
void Interpreter::executeBreakStatement() {
throw BreakStatement();
}
void Interpreter::executeContinueStatement() {
throw ContinueStatement();
}
void Interpreter::executeForStatement(Statement* stmt) {
ForStatement* forStmt = (ForStatement *) stmt;
std::vector<std::string> names = executeStatementWithoutEnvironment(forStmt->initializer);
Value condition = executeExpressionStatement(forStmt->condition);
while (condition.boolValue) {
try {
executeBlockStatement(forStmt->block);
} catch (BreakStatement operation) {
break;
} catch (ContinueStatement operation) {
executeStatement(forStmt->renovate);
condition = executeExpressionStatement(forStmt->condition);
continue;
}
executeStatement(forStmt->renovate);
condition = executeExpressionStatement(forStmt->condition);
}
for (int i = 0; i < names.size(); i ++) {
this->environment->erase(this->environment->find(names.at(i)));
}
}
void Interpreter::executeIfStatement(Statement* stmt) {
IfStatement* ifStmt = (IfStatement *) stmt;
bool condition = executeExpressionStatement(ifStmt->condition).boolValue;
if (condition) {
if (ifStmt->establish != nullptr)
executeBlockStatement(ifStmt->establish);
return;
} else if (condition == false) {
if (ifStmt->elifCondition != nullptr) {
bool elifCondition = executeExpressionStatement(ifStmt->elifCondition).boolValue;
if (elifCondition) {
executeBlockStatement(ifStmt->elifEstablish);
return;
} else if (elifCondition == false && ifStmt->elseEstablish != nullptr) {
executeBlockStatement(ifStmt->elseEstablish);
return;
}
}
if (ifStmt->elseEstablish != nullptr)
executeBlockStatement(ifStmt->elseEstablish);
}
}
void Interpreter::executeWhileStatement(Statement* stmt) {
WhileStatement* whileStmt = (WhileStatement *) stmt;
bool condition = executeExpressionStatement(whileStmt->condition).boolValue;
while (condition) {
try {
executeBlockStatement(whileStmt->block);
} catch (BreakStatement operation) {
break;
} catch (ContinueStatement operation) {
condition = executeExpressionStatement(whileStmt->condition).boolValue;
continue;
}
condition = executeExpressionStatement(whileStmt->condition).boolValue;
}
}
void Interpreter::executeFunctionStatement(Statement* stmt) {
FunctionStatement* funStmt = (FunctionStatement *) stmt;
this->assign(funStmt->name.literal, Value(funStmt));
}
void Interpreter::executeReturnStatement(Statement* stmt) {
throw (ReturnStatement *) stmt;
}<file_sep>/**
* Meet Programming Language Expression Statement.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2019 Turaiiao
* Email: <EMAIL>
* Github: https://github.com/turaiiao
*/
#ifndef MEET_EXPRESSION_H
#define MEET_EXPRESSION_H
#define EXPRESSION_ASSIGN "EXPRESSION_ASSIGN"
#define EXPRESSION_BINARY "EXPRESSION_BINARY"
#define EXPRESSION_GROUP "EXPRESSION_GROUP"
#define EXPRESSION_LITERAL "EXPRESSION_LITERAL"
#define EXPRESSION_UNARY "EXPRESSION_UNARY"
#define EXPRESSION_VARIABLE "EXPRESSION_VARIABLE"
#define EXPRESSION_LOGICAL "EXPRESSION_LOGICAL"
#define EXPRESSION_LIST "EXPRESSION_LIST"
#define EXPRESSION_GET "EXPRESSION_GET"
#define EXPRESSION_SET "EXPRESSION_SET"
#define EXPRESSION_CALL "EXPRESSION_CALL"
class Expression {
public:
virtual ~Expression() = default;
virtual std::string defintion() = 0;
virtual std::string toString() = 0;
};
#endif<file_sep>/**
* Meet Programming Language Lexer.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2019 Turaiiao
* Email: <EMAIL>
* Github: https://github.com/turaiiao
*/
#ifndef MEET_LEXER_H
#define MEET_LEXER_H
#include <vector>
#include <map>
#include "Token.hpp"
class Lexer {
private:
std::string source;
std::vector<Token> tokens;
std::map<std::string, TokenType> keywords;
int line;
int position;
char look();
char look(int pos);
bool isAtEnd();
TokenType isKeyword(const std::string& identifier);
void addToken(const TokenType& type);
void addToken(const TokenType& type, bool skipTwoPos);
void addToken(TokenType type, std::string literal, int skip);
void lexIdentifier();
void lexString();
void lexNumber();
void lexSymbol();
void lexSkipWriteSpace();
void error(std::string message);
public:
explicit Lexer(std::string source);
std::vector<Token> tokenizer();
};
#endif
<file_sep># The Meet Programming Language
<file_sep>/**
* Meet Programming Language Statement.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2019 Turaiiao
* Email: <EMAIL>
* Github: https://github.com/turaiiao
*/
#ifndef MEET_FOR_STATEMENT_H
#define MEET_FOR_STATEMENT_H
#include <sstream>
#include "../interpreter/Statement.hpp"
#include "BlockStatement.hpp"
class ForStatement: public Statement {
public:
Statement* initializer;
Statement* condition;
Statement* renovate;
BlockStatement* block;
ForStatement(Statement* a, Statement* b, Statement* c, BlockStatement* d):
initializer(a), condition(b), renovate(c), block(d) {}
~ForStatement() {
delete initializer;
delete condition;
delete renovate;
}
std::string defintion() {
return STATEMENT_FOR;
}
std::string toString() {
std::stringstream data;
data << "[ ForStatement: initializer = " << initializer->toString() << ", condition = ";
data << condition->toString() << ", renovate = " << renovate->toString() << ", block = ";
data << block->toString() << " ]";
return data.str();
}
};
#endif<file_sep>cmake_minimum_required(VERSION 3.15)
project(meet)
set(CMAKE_CXX_STANDARD 17)
file(GLOB_RECURSE SOURCES ${PROJECT_SOURCE_DIR}/src/*.cpp)
file(GLOB_RECURSE INCLUDES ${PROJECT_SOURCE_DIR}/src/*.h*)
add_executable(meet ${INCLUDES} ${SOURCES})<file_sep>/**
* Meet Programming Language Values.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2019 Turaiiao
* Email: <EMAIL>
* Github: https://github.com/turaiiao
*/
#include "Value.hpp"
Value::Value(int value) {
this->valueNumber = true;
this->numberValue = std::move(value);
}
Value::Value(float value) {
this->valueFloat = true;
this->floatValue = std::move(value);
}
Value::Value(std::string value) {
this->valueString = true;
this->stringValue = std::move(value);
}
Value::Value(bool value) {
this->valueBool = true;
this->boolValue = value;
}
Value::Value(std::vector<Value> value) {
this->valueList = true;
this->listValue = std::move(value);
}
Value::Value(FunctionStatement* value) {
this->valueFun = true;
this->funValue = std::move(value);
}
Value::Value() {
this->valueNull = true;
}
void Value::printValue() {
if (this->valueNumber) {
std::cout << this->numberValue;
return;
}
if (this->valueFloat) {
std::cout << this->floatValue;
return;
}
if (this->valueString) {
std::cout << this->stringValue;
return;
}
if (this->valueBool) {
this->boolValue ? std::cout << "true" << std::endl : std::cout << "false";
return;
}
if (this->valueNull)
std::cout << "null";
if (this->valueList) {
std::cout << "[ ";
for (std::vector<Value>::iterator a = listValue.begin(); a != listValue.end(); a ++) {
if ((a + 1) == listValue.end())
std::cout << a->toString();
else
std::cout << a->toString() << ", ";
}
std::cout << " ]" << std::endl;
}
if (this->valueFun)
std::cout << "FunctionStatement";
}
void Value::printLineValue() {
if (this->valueNumber) {
std::cout << this->numberValue << std::endl;
return;
}
if (this->valueFloat) {
std::cout << this->floatValue << std::endl;
return;
}
if (this->valueString) {
std::cout << this->stringValue << std::endl;
return;
}
if (this->valueBool) {
this->boolValue ? std::cout << "true" << std::endl : std::cout << "false" << std::endl;
return;
}
if (this->valueNull)
std::cout << "null" << std::endl;
if (this->valueList) {
std::cout << "[ ";
for (std::vector<Value>::iterator a = listValue.begin(); a != listValue.end(); a ++) {
if ((a + 1) == listValue.end())
std::cout << a->toString();
else
std::cout << a->toString() << ", ";
}
std::cout << " ]" << std::endl;
}
if (this->valueFun)
std::cout << "FunctionStatement" << std::endl;
}
std::string Value::toString() {
if (this->valueNumber) return std::to_string(this->numberValue);
if (this->valueFloat) return std::to_string(this->floatValue);
if (this->valueString) return this->stringValue;
if (this->valueBool) return (this->boolValue) ? "true" : "false";
if (this->valueNull) return "null";
if (this->valueFun) return "FunctionStatement";
if (this->valueList) {
std::stringstream data;
data << "[ ";
for (std::vector<Value>::iterator a = this->listValue.begin(); a != this->listValue.end(); a ++) {
if ((a + 1) == this->listValue.end())
data << a->toString();
else
data << a->toString() << ", ";
}
data << " ]";
return data.str();
}
throw std::runtime_error("interpret error: cannot to string.");
}
Value backValueWithToken(Token token) {
if (token.type == TOKEN_VALUE_INT) return Value(std::stoi(token.literal));
if (token.type == TOKEN_VALUE_FLOAT) return Value(std::stof(token.literal));
if (token.type == TOKEN_VALUE_STRING) return Value(token.literal);
if (token.type == TOKEN_TRUE) return Value(true);
if (token.type == TOKEN_FALSE) return Value(false);
if (token.type == TOKEN_NULL) return Value();
throw std::runtime_error("type error: unknow literal token decode to value '" + token.literal + "'.");
}
Value backValueWithNullTyped(std::string literal) {
if (literal == TOKEN_NULL || literal == TOKEN_ANY) return Value();
if (literal == TOKEN_INT) return Value(0);
if (literal == TOKEN_FLOAT) return Value((float) 0);
if (literal == TOKEN_BOOLEAN) return Value(false);
if (literal == TOKEN_STRING) return Value(std::string(""));
if (literal == TOKEN_LIST) {
std::vector<Value> a = std::vector<Value>();
return Value(a);
}
throw std::runtime_error("type error: unknown literal with Post-initialized variable.");
}
Value Value::operator + (const Value& a) {
if (this->valueNumber) {
if (a.valueNumber) return Value(this->numberValue + a.numberValue);
if (a.valueFloat) return Value(this->numberValue + a.floatValue);
}
if (this->valueFloat) {
if (a.valueNumber) return Value(this->floatValue + a.numberValue);
if (a.valueFloat) return Value(this->floatValue + a.floatValue);
}
if (this->valueString && a.valueString)
return Value(this->stringValue + a.stringValue);
throw std::runtime_error("type error: Operands '+' must be two numbers and floats or two strings.");
}
Value Value::operator - (const Value& a) {
if (this->valueNumber) {
if (a.valueNumber) return Value(this->numberValue - a.numberValue);
if (a.valueFloat) return Value(this->numberValue - a.floatValue);
}
if (this->valueFloat) {
if (a.valueNumber) return Value(this->floatValue - a.numberValue);
if (a.valueFloat) return Value(this->floatValue - a.floatValue);
}
throw std::runtime_error("type error: Operands '-' must be two numbers or two floats.");
}
Value Value::operator * (const Value& a) {
if (this->valueNumber) {
if (a.valueNumber) return Value(this->numberValue * a.numberValue);
if (a.valueFloat) return Value(this->numberValue * a.floatValue);
}
if (this->valueFloat) {
if (a.valueNumber) return Value(this->floatValue * a.numberValue);
if (a.valueFloat) return Value(this->floatValue * a.floatValue);
}
throw std::runtime_error("type error: Operands '*' must be two numbers or two floats.");
}
Value Value::operator / (const Value& a) {
if (this->valueNumber) {
if (a.valueNumber) return Value(this->numberValue / a.numberValue);
if (a.valueFloat) return Value(this->numberValue / a.floatValue);
}
if (this->valueFloat) {
if (a.valueNumber) return Value(this->floatValue / a.numberValue);
if (a.valueFloat) return Value(this->floatValue / a.floatValue);
}
throw std::runtime_error("type error: Operands '/' must be two numbers or two floats.");
}
Value Value::operator % (const Value& a) {
if (this->valueNumber && a.valueNumber) {
int x = this->numberValue;
int y = a.numberValue;
float z = x % y;
return Value(z);
}
throw std::runtime_error("type error: Operands '%' must be two numbers.");
}
Value Value::operator > (const Value& a) {
if (this->valueNumber) {
if (a.valueNumber) return Value(this->numberValue > a.numberValue);
if (a.valueFloat) return Value(this->numberValue > a.floatValue);
}
if (this->valueFloat) {
if (a.valueNumber) return Value(this->floatValue > a.numberValue);
if (a.valueFloat) return Value(this->floatValue > a.floatValue);
}
throw std::runtime_error("type error: Operands '>' must be two numbers or two floats.");
}
Value Value::operator < (const Value& a) {
if (this->valueNumber) {
if (a.valueNumber) return Value(this->numberValue < a.numberValue);
if (a.valueFloat) return Value(this->numberValue < a.floatValue);
}
if (this->valueFloat) {
if (a.valueNumber) return Value(this->floatValue < a.numberValue);
if (a.valueFloat) return Value(this->floatValue < a.floatValue);
}
throw std::runtime_error("type error: Operands '<' must be two numbers or two floats.");
}
Value Value::operator >= (const Value& a) {
if (this->valueNumber) {
if (a.valueNumber) return Value(this->numberValue >= a.numberValue);
if (a.valueFloat) return Value(this->numberValue >= a.floatValue);
}
if (this->valueFloat) {
if (a.valueNumber) return Value(this->floatValue >= a.numberValue);
if (a.valueFloat) return Value(this->floatValue >= a.floatValue);
}
throw std::runtime_error("type error: Operands '>=' must be two numbers or two floats.");
}
Value Value::operator <= (const Value& a) {
if (this->valueNumber) {
if (a.valueNumber) return Value(this->numberValue <= a.numberValue);
if (a.valueFloat) return Value(this->numberValue <= a.floatValue);
}
if (this->valueFloat) {
if (a.valueNumber) return Value(this->floatValue <= a.numberValue);
if (a.valueFloat) return Value(this->floatValue <= a.floatValue);
}
throw std::runtime_error("type error: Operands '<=' must be two numbers or two floats.");
}
Value Value::operator != (const Value& a) {
if (this->valueNull) {
if (a.valueNull) return Value(false);
if (a.valueNumber) return Value(a.numberValue != 0);
if (a.valueFloat) return Value(a.floatValue != 0);
if (a.valueString) return Value(a.stringValue != "");
if (a.valueBool) return Value(true);
}
if (this->valueNumber) {
if (a.valueNull) return Value(a.numberValue != 0);
if (a.valueNumber) return Value(this->numberValue != a.numberValue);
if (a.valueFloat) return Value(this->numberValue != a.floatValue);
if (a.valueBool) {
if (a.boolValue)
return Value(this->numberValue <= 1);
else
return Value(this->numberValue >= 0);
}
if (a.valueString)
throw std::runtime_error("type error: cannot comparison number with string.");
}
if (this->valueString) {
if (a.valueNull) return Value(this->stringValue != "");
if (a.valueString) return Value(this->stringValue != a.stringValue);
throw std::runtime_error("type error: string cannot comparison not null and string.");
}
if (this->valueFloat) {
if (a.valueFloat) return Value(this->floatValue != a.floatValue);
throw std::runtime_error("type error: cannot use '!=' operator without two float.");
}
if (this->valueBool) {
if (a.valueBool) return Value(this->boolValue != a.boolValue);
if (a.valueNumber) {
if (this->boolValue)
return Value(a.numberValue >= 1);
else
return Value(a.numberValue <= 0);
}
if (a.valueFloat) {
if (this->boolValue)
return Value(a.floatValue >= 1);
else
return Value(a.floatValue <= 0);
}
throw std::runtime_error("type error: bool cannot comparison not bool and number or float.");
}
return Value();
}
Value Value::operator == (const Value& a) {
if (this->valueNull) {
if (a.valueNull) return Value(true);
if (a.valueNumber) return Value(a.numberValue == 0);
if (a.valueFloat) return Value(a.floatValue == 0);
if (a.valueString) return Value(a.stringValue == "");
if (a.valueBool) return Value(false);
}
if (this->valueNumber) {
if (a.valueNull) return Value(a.numberValue == 0);
if (a.valueNumber) return Value(a.numberValue == this->numberValue);
if (a.valueFloat) return Value(a.numberValue == this->floatValue);
if (a.valueBool) {
if (a.boolValue)
return Value(this->numberValue >= 1);
else
return Value(this->numberValue <= 0);
}
if (a.valueString)
throw std::runtime_error("type error: cannot comparison number with string.");
}
if (this->valueFloat) {
if (a.valueFloat) return Value(this->floatValue == a.floatValue);
if (a.valueNumber) return Value((int) this->floatValue == a.numberValue);
throw std::runtime_error("type error: cannot use '==' operator without two float.");
}
if (this->valueString) {
if (a.valueNull) return Value(this->stringValue == "");
if (a.valueString) return Value(this->stringValue == a.stringValue);
throw std::runtime_error("type error: string cannot comparison not null and string.");
}
if (this->valueBool) {
if (a.valueBool) return Value(this->boolValue == a.boolValue);
if (a.valueNumber) {
if (this->boolValue)
return Value(a.numberValue >= 1);
else
return Value(a.numberValue <= 0);
}
if (a.valueFloat) {
if (this->boolValue)
return Value(a.floatValue >= 1);
else
return Value(a.floatValue <= 0);
}
throw std::runtime_error("type error: bool cannot comparison not bool and number or floats.");
}
return Value();
}
Value Value::operator || (const Value& b) {
if (this->valueNumber) return Value(this->numberValue >= 1);
if (this->valueFloat) return Value(this->floatValue >= 1);
if (this->valueBool) return Value(this->boolValue);
if (this->valueNull) return Value(false);
throw std::runtime_error("interpret error: cannot execute logical expression unknown value.");
}
Value Value::operator && (const Value& b) {
if (this->valueNumber) {
if (b.valueNumber) return Value(this->numberValue >= 1 && b.numberValue >= 1);
if (b.valueFloat) return Value(this->numberValue >= 1 && b.floatValue >= 1);
if (b.valueBool) return Value(this->numberValue >= 1 && b.boolValue);
if (b.valueNull) return Value(false);
throw std::runtime_error("interpret error: cannot execute logical expression unknown value.");
}
if (this->valueBool) {
if (b.valueNumber) return Value(this->boolValue && b.numberValue >= 1);
if (b.valueFloat) return Value(this->boolValue && b.floatValue >= 1);
if (b.valueBool) return Value(this->boolValue && b.boolValue);
if (b.valueNull) return Value(this->boolValue == false);
throw std::runtime_error("interpret error: cannot execute logical expression unknown value.");
}
if (this->valueNull) {
if (b.valueNumber) return Value(b.numberValue <= 0);
if (b.valueFloat) return Value(b.floatValue == 0);
if (b.valueBool) return Value(b.boolValue == false);
if (b.valueNull) return Value(true);
throw std::runtime_error("interpret error: cannot execute logical expression unknown value.");
}
return Value();
}<file_sep>/**
* Meet Programming Language Main.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2019 Turaiiao
* Email: <EMAIL>
* Github: https://github.com/turaiiao
*/
#include <iostream>
#include <fstream>
#include <iomanip>
#include <cstring>
#include <cstdio>
#include <cstdlib>
#include "Conversion.hpp"
#include "Lexer.hpp"
#include "Token.hpp"
#include "interpreter/Parser.hpp"
#include "interpreter/Interpreter.hpp"
#include "interpreter/Value.hpp"
using namespace std;
map<string, Value>* environment = new map<string, Value>();
bool isDebugMode = false;
static void partitionLine() {
for (int i = 0; i < 92; i ++)
cout << "-";
cout << endl;
}
static void run(const string& source);
static void repl() {
char* line = (char *) malloc(1024);
cout << "\n\t Meet Programming Language 1.0.0 (repl mode, June 10 2019, 19:24) \n" << endl;
while (true) {
cout << "meet > ";
cin.getline(line, 1024);
if (strlen(line) == 0)
continue;
run(line);
}
}
static void runFile(const char* path) {
char* suffix = (char *) malloc(5);
for (int i = 1; i < 6; i ++)
suffix[i- 1] = path[strlen(path) - i];
suffix[5] = '\0';
if (strcmp(suffix, "teem.") != 0) {
cout << "read .meet file only." << endl;
exit(64);
}
ifstream fileStream;
fileStream.open(path);
if (fileStream.fail()) {
cerr << "cannot open file." << endl;
exit(64);
}
string source((istreambuf_iterator<char>(fileStream)), (istreambuf_iterator<char>()));
#ifdef _WIN32
run(utf8ToGbk(source.c_str()));
#else
char data[strlen(source)] = { 0 };
utf8ToGbk(source.c_str(), strlen(source), data, strlen(data));
run(data);
#endif
fileStream.close();
delete suffix;
}
static void run(const string& source) {
int i = 0;
Lexer* lexer = new Lexer(source);
vector<Token> tokens = lexer->tokenizer();
if (isDebugMode) {
for (auto token : tokens)
printf("%-5d %-25s : %-50s : %5d \n", i ++, getTokenLiteralWithType(token.type).c_str(),
token.literal.c_str(), token.line);
partitionLine();
i = 0;
}
Parser* parser = new Parser(tokens);
vector<Statement *> statements = parser->parseProgram();
if (isDebugMode) {
for (auto stmt : statements)
printf("%-5d %-50s \n", i ++, stmt->toString().c_str());
partitionLine();
i = 0;
}
Interpreter* interpret = new Interpreter(statements, environment);
interpret->execute();
delete lexer;
delete parser;
delete interpret;
if (isDebugMode) {
for (auto obj : *environment)
printf("%-5d %-25s : %s \n", i ++, obj.first.c_str(), obj.second.toString().c_str());
partitionLine();
i = 0;
}
}
int main(int argc, char** argv) {
cout.setf(ios::fixed);
if (argc == 1) repl();
if (argc == 2 && strcmp(argv[1], "-d") == 0) {
isDebugMode = true;
repl();
return 0;
}
if (argc == 2) {
runFile(argv[1]);
return 0;
}
if (argc == 3 && strcmp(argv[2], "-d") == 0) {
isDebugMode = true;
runFile(argv[1]);
return 0;
}
cout << "usage: " << argv[0] << " [ .meet file path ] " << endl;
return 0;
}
<file_sep>/**
* Meet Programming Language Statement.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2019 Turaiiao
* Email: <EMAIL>
* Github: https://github.com/turaiiao
*/
#ifndef MEET_VAR_STATEMENT
#define MEET_VAR_STATEMENT
#include <vector>
#include <sstream>
#include "../interpreter/Statement.hpp"
#include "../expressions/AssignExpression.hpp"
class VarStatement: public Statement {
public:
std::vector<AssignExpression *> list;
VarStatement(std::vector<AssignExpression *> list): list(std::move(list)) {}
~VarStatement() {
list.clear();
std::vector<AssignExpression *>().swap(list);
}
std::string defintion() {
return STATEMENT_VAR;
}
std::string toString() {
std::stringstream stream;
stream << "[ VarStatement: list = [ ";
for (auto i : list) {
stream << "name = " << i->name.literal << ", value = ";
if (i->initializer != nullptr)
stream << i->initializer->toString() << ", type = ";
else
stream << "null, type = ";
if (i->typed.literal.length() != 0)
stream << i->typed.literal << " | ";
else
stream << "any" << " | ";
}
stream << "]";
return stream.str();
}
};
#endif<file_sep>/**
* Meet Programming Language Expression.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2019 Turaiiao
* Email: <EMAIL>
* Github: https://github.com/turaiiao
*/
#ifndef MEET_VARIABLE_EXPRESSION_H
#define MEET_VARIABLE_EXPRESSION_H
#include "../interpreter/Expression.hpp"
class VariableExpression: public Expression {
public:
Token name;
VariableExpression(Token name): name(std::move(name)) {}
~VariableExpression() {
delete &name;
}
std::string defintion() {
return EXPRESSION_VARIABLE;
}
std::string toString() {
return "[ VariableExpression: name = " + name.literal + " ]";
}
};
#endif<file_sep>/**
* Meet Programming Language Statement.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2019 Turaiiao
* Email: <EMAIL>
* Github: https://github.com/turaiiao
*/
#ifndef MEET_STATEMENT_H
#define MEET_STATEMENT_H
#include <iostream>
#define STATEMENT_EXPRESSION "STATEMENT_EXPRESSION"
#define STATEMENT_VAR "STATEMENT_VAR"
#define STATEMENT_PRINTLN "STATEMENT_PRINTLN"
#define STATEMENT_BLOCK "STATEMENT_BLOCK"
#define STATEMENT_FOR "STATEMENT_FOR"
#define STATEMENT_BREAK "STATEMENT_BREAK"
#define STATEMENT_CONTINUE "STATEMENT_CONTINUE"
#define STATEMENT_IF "STATEMENT_IF"
#define STATEMENT_WHILE "STATEMENT_WHILE"
#define STATEMENT_FUN "STATEMENT_FUN"
#define STATEMENT_RETURN "STATEMENT_RETURN"
class Statement {
public:
virtual ~Statement() = default;
virtual std::string defintion() = 0;
virtual std::string toString() = 0;
};
#endif<file_sep>/**
* Meet Programming Language Expression.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2019 Turaiiao
* Email: <EMAIL>
* Github: https://github.com/turaiiao
*/
#ifndef MEET_SET_EXPRESSION_H
#define MEET_SET_EXPRESSION_H
#include "../interpreter/Expression.hpp"
#include "../interpreter/Statement.hpp"
#include "../Token.hpp"
class SetExpression: public Expression {
public:
Token name;
Expression* expression;
Expression* value;
std::string type;
SetExpression(Token name, Expression* expression, Expression* value, std::string type): name(std::move(name)) {
this->expression = std::move(expression);
this->value = std::move(value);
this->type = std::move(type);
}
~SetExpression() {
delete expression;
delete value;
}
std::string defintion() {
return EXPRESSION_SET;
}
std::string toString() {
std::stringstream data;
data << "[ SetExpression: name = ";
data << name.literal << ", expression = ";
data << expression->toString() << ", value = ";
data << value->toString();
data << ", type = " << type;
data << " ]";
return data.str();
}
};
#endif<file_sep>/**
* Meet Programming Language Statement.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2019 Turaiiao
* Email: <EMAIL>
* Github: https://github.com/turaiiao
*/
#ifndef MEET_PRINTLN_STATEMENT_H
#define MEET_PRINTLN_STATEMENT_H
#include "../interpreter/Statement.hpp"
#include "../interpreter/Expression.hpp"
class PrintlnStatement: public Statement {
public:
Expression* expression;
bool cls;
PrintlnStatement(Expression* expression, bool cls): expression(std::move(expression)), cls(cls) {}
~PrintlnStatement() {
delete expression;
delete &cls;
}
std::string defintion() {
return STATEMENT_PRINTLN;
}
std::string toString() {
return "[ PrintlnStatement: expr = " + expression->toString() +
", cls = " + (cls ? "true" : "false") + " ]";
}
};
#endif<file_sep>### 如何快速的实现一门编程语言
非常抱歉我在这次技术交流上由于自身原因导致分享不下去。我通过这篇文章给他家分享。
这个教程不会涉及 LLVM、Anltr、Yacc 等各种框架,而是从头开始写解析器和语法分析器,最后通过以 visitor 模式进行遍历求值。
[项目地址](https://github.com/Turaiiao/meet)
编译 Meet 首先需要 CMake >= 3.15,在 Linux 下使用:
```
cmake .
make
./meet
```
在 Windows 平台需要安装 MinGW 和 CMake,使用 Clion 或者 CMake Vscode extension 进行编译。
[leet-code](https://github.com/Turaiiao/meet/tree/master/test/leet-code) 文件夹里是我重订语法后写的结题参照程序。
我的 blog 上也写了几篇关于编译原理方面的:[Turaiiao's Blog](https://blog.xyiio.cn/)
#### 为什么想做编程语言?
我觉得编程是很有趣的事情,雷军曾在毕业论文上写下 “愿意写一辈子程序,因为这就是我的兴趣。”,自己一步一步实现想要的功能真的很有意思。
创建一个属于自己的编程语言,能够有优美的语法、不做过多的特性、轻量级和完美的包管理实现,能应付简单的业务需求。
#### 实现什么?
我已经使用 C++ 实现了一些基本的语法结构,由于语言设计有很多缺陷,已经停止开发,后面准备用 Rust 进行重写并重订语法。
```
var a = 23, b = 0.5, c = 'hello world', d = false
var f: int = 34
println a
println a + b + f
println c + '123'
println d == true
println !d
var a: string = 'hello, ', b: string = 'world !'
println '$a $b'
if a > 20 & a < 25 -> println 'a > 20' else -> println 'a < 25'
if a > 20 {
println 'a'
} elif a == 20 {
println 'b'
} else {
println 'c'
}
var a: list<int> = [ 1, 3, 4, 5, 6 ]
var b: list<any> = [ 1, 'a', false, 23, 4.5 ]
println a[0] + a[1] # 4
println b[0] + b[3] # 24
println b[1] + b[1] # "aa"
for var a = 0; a < 10; a += 1 -> println a
var a: int = 10
while a > 0 {
if a % 2 == 0 ->
println a
a -= 1
}
# 例如生成一个心?
var x: float , y: float, str: string
for var i = 1.5; i > -1.5; i -= 0.1 {
str = ''
for x = -1.5; x < 1.5; x += 0.05 {
y = x * x + i * i - 1
if y * y * y - x * x * i * i * i < 0.0 ->
str += '*'
else ->
str += ' '
}
println str
}
# turaiiao@turaiiao-P45VJ:~/Desktop/meet$ ./meet ./test/heart.meet
#
#
#
# ********* *********
# ***************** *****************
# ****************************************
# *******************************************
# *********************************************
# *********************************************
# *********************************************
# *********************************************
# *********************************************
# *********************************************
# *******************************************
# *****************************************
# ****************************************
# *************************************
# ***********************************
# *********************************
# *****************************
# *************************
# *********************
# ***************
# *********
# ***
#
#
# turaiiao@turaiiao-P45VJ:~/Desktop/meet$
fun a (a: int, b: int) -> int {
return a + b
}
fun assign (a: string, b: string, c: boolean) {
if c ->
println '$a $b'
else ->
println 'undefined'
}
println a (1, 2)
println assign ('hello', 'world', true)
```
#### 有没有必要讲下编译器呢?
编译器(compiler)是一个能够把字符串编译成指定语言格式的程序。
例如,Ocmal、C/C++、Go、Python、Java、JavaScript、Haskell、PHP、Ruby、Erlang 都是编译型语言。
编译成字节码的语言也叫编译器,通过自己的虚拟机执行。
解释器(Interpreter)能够解释某个特定语法树和节点的程序。
例如 JLox、Monkey、VimScript、TypeScript、 等等的语言玩具。
借用一张 [Goby - Yet](https://github.com/goby-lang/goby) 的架构图:

本文将会讲解一个 Meet 的解释器。
#### 词法分析
编译器接受一个字符串,首先转换成词法列表,例如:
**在 Meet 中可以加入 -d 参数展示词法列表、语法节点**
```
var a: int = 20
if a > 0 & a < 30 {
println 'hello world'
}
# turaiiao@DESKTOP-AH5VM1I:~/meet/test$ ../meet ./test.meet -d
# 0 TOKEN_VAR : var : 1
# 1 TOKEN_VALUE_IDENTIFIER : a : 1
# 2 TOKEN_COLON : : : 1
# 3 TOKEN_INT : int : 1
# 4 TOKEN_EQUAL : = : 1
# 5 TOKEN_VALUE_INT : 20 : 1
# 6 TOKEN_IF : if : 3
# 7 TOKEN_VALUE_IDENTIFIER : a : 3
# 8 TOKEN_GREATER : > : 3
# 9 TOKEN_VALUE_INT : 0 : 3
# 10 TOKEN_AND : & : 3
# 11 TOKEN_VALUE_IDENTIFIER : a : 3
# 12 TOKEN_LESS : < : 3
# 13 TOKEN_VALUE_INT : 30 : 3
# 14 TOKEN_LBRACE : { : 3
# 15 TOKEN_PRINTLN : println : 4
# 16 TOKEN_VALUE_STRING : hello world : 4
# 17 TOKEN_RBRACE : } : 5
# 18 TOKEN_EOF : EOF : 6
# --------------------------------------------------------------------------------------------
hello world
```
[具体源码参考](https://github.com/Turaiiao/meet/blob/master/src/Lexer.cpp)
#### 语法分析
语法分析(Parser)我认为是最难的一个步骤,它考虑任意表达式的解析和复杂语法的解析,Lambda、等等。
传入之前解析的词法列表,一一匹配,都将转换成各种语法节点。
```
var a: int = 20, b: int = 30
if a > b -> println 'a greater than b'
println 1 + 2 * 3 - (4 + 5)
[ VarStatement: list = [ name = a, value = [ LiteralExpression: token = 20 ], type = int | name = b, value = [ LiteralExpression: token = 30 ], type = int | ]
[ IfStatement: condition = [ ExpressionStatement: expr = [ BinaryExpression: left = [ VariableExpression: name = a ], token = >, right = [ VariableExpression: name = b ] ] ], establish = [ BlockStatement: block = [ PrintlnStatement: expr = [ VariableExpression: name = a ], cls = true ] | , elifCondition = [ ExpressionStatement: expr = [ BinaryExpression: left = [ VariableExpression: name = a ], token = ==, right = [ VariableExpression: name = b ] ] ], elifEstablish = [ BlockStatement: block = [ PrintlnStatement: expr = [ VariableExpression: name = b ], cls = true ] | , elseEstablish = [ BlockStatement: block = [ PrintlnStatement: expr = [ BinaryExpression: left = [ VariableExpression: name = a ], token = +, right = [ VariableExpression: name = b ] ], cls = true ] | ]
[ PrintlnStatement: expr = [ BinaryExpression: left = [ BinaryExpression: left = [ LiteralExpression: token = 1 ], token = +, right = [ BinaryExpression: left = [ LiteralExpression: token = 2 ], token = *, right = [ LiteralExpression: token = 3 ] ] ], token = -, right = [ GroupExpression: expr = [ BinaryExpression: left = [ LiteralExpression: token = 4 ], token = +, right = [ LiteralExpression: token = 5 ] ] ] ], cls = true ]
```
不好意思之前没有进行优化,直接挨着输出的所以不太美观。
可以看出,每个节点都有一个特定的名称,例如:
```
a、12、'abc' -> LiteralExpr
1 + 2 -> [ BinaryExpr = [ Left = LiteralExpr, Op = '+', Right = LiteralExpr ] ]
println a -> [ PrintlnStmt = [ Expr = LiteralExpr ] ]
if a + 1 -> println a
[ IfStmt = [ Condition = [ BinaryExpr... ], Block = [ PrintlnStmt = [ Expr = LiteralExpr ] ] ] ]
```
解析表达式比较麻烦,通常使用自顶向下分析法。依次遍历词法列表,直到遇到最高优先级符号结束。
自顶向下分析法参照 104 - 352 行。[Parser.cpp](https://github.com/Turaiiao/meet/blob/master/src/interpreter/Parser.cpp#L104)
if, for, while, 等等的语法解析参照 Parser.cpp。
例如解析一个 while 语句:[Interpreter.cpp](https://github.com/Turaiiao/meet/blob/master/src/interpreter/Parser.cpp#L486)
```c++
Statement* Parser::whileStatement() {
WhileStatement* whileStatement = new WhileStatement;
whileStatement->condition = statement();
if (look(TOKEN_MINUS_GREATER))
whileStatement->block = (BlockStatement *) minusGreaterBlockStatement();
else if (look(TOKEN_LBRACE))
whileStatement->block = (BlockStatement *) blockStatement();
else
error("syntax error: exepct '{' or '->' after while statement condition.");
return whileStatement;
}
```
各种语法节点参照 expressions 和 statements 文件夹。
#### 符号表和运行环境
通常解释器存储运行时数据、例如变量等等。[Interpreter.hpp](https://github.com/Turaiiao/meet/blob/master/src/interpreter/Interpreter.hpp#L59)
都是使用 map 这个数据结构存储,K 代表名字,V 代表内容。
我们只需要遍历一遍语法树就可以了。参照 82 行 [Interpreter.cpp](https://github.com/Turaiiao/meet/blob/master/src/interpreter/Interpreter.cpp#L82)
map 里的 V 可以存储任意 Value,由此我定义了 Value.hpp 去存储各种类型。
[Value.hpp](https://github.com/Turaiiao/meet/blob/master/src/interpreter/Value.hpp#L32)
例如输出节点的处理:[Interpreter.cpp](https://github.com/Turaiiao/meet/blob/master/src/interpreter/Interpreter.cpp#L528)
```c++
void Interpreter::executePrintlnStatement(Statement* stmt) {
PrintlnStatement* printlnStmt = (PrintlnStatement *) stmt;
Value a = executeExpression(printlnStmt->expression);
if (printlnStmt->cls)
a.printLineValue();
else
a.printValue();
}
```
#### 逆波兰表达式和栈、字节码
二元运算符总是置于与之相关的两个运算对象之间,这种表示法也称为中缀表示。
波兰逻辑学家 J.Lukasiewicz 于 1929 年提出了另一种表示表达式的方法,按此方法,每一运算符都置于其运算对象之后,故称为后缀表示。
```
turaiiao@turaiiao-P45VJ:~/Desktop/stack-evaluate$ ./stack-four-operational-execute
1 + 2 * 3 - 4 -> 1 2 3 * + 4 -
OP_LOCAL 1
OP_LOCAL 2
OP_LOCAL 3
OP_MULTIPLY
OP_ADD
OP_LOCAL 4
OP_SUBTRACT
OP_RETURN
3.000000
(1 + 2) * 3 - 4 -> 1 2 + 3 * 4 -
OP_LOCAL 1
OP_LOCAL 2
OP_ADD
OP_LOCAL 3
OP_MULTIPLY
OP_LOCAL 4
OP_SUBTRACT
OP_RETURN
5.000000
1 + 2 * 3 - (4 + 5) / 6 -> 1 2 3 * + 4 5 + 6 / -
OP_LOCAL 1
OP_LOCAL 2
OP_LOCAL 3
OP_MULTIPLY
OP_ADD
OP_LOCAL 4
OP_LOCAL 5
OP_ADD
OP_LOCAL 6
OP_DIVIDE
OP_SUBTRACT
OP_RETURN
5.500000
7 / 9 -> 7 9 /
OP_LOCAL 7
OP_LOCAL 9
OP_DIVIDE
OP_RETURN
0.777778
```
[解析函数](https://github.com/Turaiiao/stack-evaluate/blob/master/stack-four-operational-execute.rs#L50)
然后转换成一个块(Chunk),通常传送给虚拟机只是一个块,里面包括几个栈,运算栈、字节码栈等等。
```rust
struct Chunk {
opcode_stack: Vec<OpCode>,
values_stack: Vec<i32>
}
impl ChunkImpl for Chunk {
fn emit_constant(&mut self, value: i32) {
self.opcode_stack.push(OpCode::OpLocal);
self.values_stack.push(value);
}
fn emit_opcode(&mut self, opcode: OpCode) {
self.opcode_stack.push(opcode);
}
fn display(&self) {
let mut k = 0;
for i in self.opcode_stack.iter() {
print!("{}", opcode_string(i));
if opcode_string(i) == opcode_string(&OpCode::OpLocal) {
println!("{:>10}", self.values_stack.get(k).unwrap());
k += 1;
} else {
println!();
}
}
}
}
trait ChunkImpl {
// emit a OP_LOCAL and some value to chunk.
fn emit_constant(&mut self, value: i32);
// only emit a opcode.
fn emit_opcode(&mut self, opcode: OpCode);
// display opcodes and values.
// display value if it is OP_LOCAL else only opcode.
fn display(&self);
}
fn transform(stack: Vec<char>) -> Chunk {
let a: Vec<OpCode> = Vec::new();
let b: Vec<i32> = Vec::new();
let mut chunk = Chunk {
opcode_stack: a,
values_stack: b
};
for i in stack {
match i {
'0'..='9' => chunk.emit_constant(
(i as i32) - 48
),
'+' => chunk.emit_opcode(OpCode::OpAdd),
'-' => chunk.emit_opcode(OpCode::OpSubtract),
'*' => chunk.emit_opcode(OpCode::OpMultiply),
'/' => chunk.emit_opcode(OpCode::OpDivide),
_ => unimplemented!()
}
}
chunk.emit_opcode(OpCode::OpReturn);
return chunk;
}
```
然后一个 visitor 执行,遍历字节码栈,如果是 OP_LOCAL 就入运算栈,如果遇到 OP_ADD 就运算栈出栈两个值进行运算并把值压栈。
```rust
fn visitor(chunk: Chunk) {
let mut stack: Vec<f32> = Vec::new();
let mut k = 0;
for i in chunk.opcode_stack {
match i {
OpCode::OpLocal => {
stack.push(
*chunk.values_stack.get(k).unwrap() as f32
);
k += 1;
}
OpCode::OpReturn => break,
_ => {
let a = stack.pop().unwrap();
let b = stack.pop().unwrap();
match i {
OpCode::OpAdd => stack.push(b + a),
OpCode::OpSubtract => stack.push(b - a),
OpCode::OpMultiply => stack.push(b * a),
OpCode::OpDivide => stack.push(b / a),
_ => unimplemented!()
}
}
}
}
println!("{:.6}", stack.last().unwrap());
}
```
#### CPython 字节码
我们知道 CPython 是使用纯 C 语言编写的。仅仅使用栈结构。
CPython 使用三种类型的栈:
- 调用栈(CallStack),这是主要结构,每个当前活动使用了一个叫 帧(Frame),栈底是程序入口,每当调用函数就推送一个帧到栈里,结束函数则销毁。
- 计算栈(EvaluationStack)在每个帧中有一个计算栈,大多数代码都是在这里运行,操作它们然后销毁它。
- 块栈(BlockStack)它用于追踪某些特定的接口,例如 break、continue、try、with 块等,这个帮助 Python 表示任意时刻哪个块是活动的,例如 continue 会影响正确的块。
Python 中可以引用 dis 模块进行字节码的反汇编然后输出。
```
>>> import dis
>>> def a():
... print('Hello World')
...
>>> dis.dis(a)
2 0 LOAD_GLOBAL 0 (print)
2 LOAD_CONST 1 ('Hello World')
4 CALL_FUNCTION 1
6 POP_TOP
8 LOAD_CONST 0 (None)
10 RETURN_VALUE
>>> def a():
... x = 2
... y = 5
... print(x + y)
...
>>> dis.dis(a)
2 0 LOAD_CONST 1 (2)
2 STORE_FAST 0 (x)
3 4 LOAD_CONST 2 (5)
6 STORE_FAST 1 (y)
4 8 LOAD_GLOBAL 0 (print)
10 LOAD_FAST 0 (x)
12 LOAD_FAST 1 (y)
14 BINARY_ADD
16 CALL_FUNCTION 1
18 POP_TOP
20 LOAD_CONST 0 (None)
22 RETURN_VALUE
```
如果有条件语句呢,那么就会在栈内进行跳转。
```
>>> def a():
... x = 23
... if x > 0:
... print('123')
... elif x == 20:
... print('456')
... else:
... print('789')
...
>>> dis.dis(a)
2 0 LOAD_CONST 1 (23)
2 STORE_FAST 0 (x)
3 4 LOAD_FAST 0 (x)
6 LOAD_CONST 2 (0)
8 COMPARE_OP 4 (>)
10 POP_JUMP_IF_FALSE 22
4 12 LOAD_GLOBAL 0 (print)
14 LOAD_CONST 3 ('123')
16 CALL_FUNCTION 1
18 POP_TOP
20 JUMP_FORWARD 26 (to 48)
5 >> 22 LOAD_FAST 0 (x)
24 LOAD_CONST 4 (20)
26 COMPARE_OP 2 (==)
28 POP_JUMP_IF_FALSE 40
6 30 LOAD_GLOBAL 0 (print)
32 LOAD_CONST 5 ('456')
34 CALL_FUNCTION 1
36 POP_TOP
38 JUMP_FORWARD 8 (to 48)
8 >> 40 LOAD_GLOBAL 0 (print)
42 LOAD_CONST 6 ('789')
44 CALL_FUNCTION 1
46 POP_TOP
>> 48 LOAD_CONST 0 (None)
50 RETURN_VALUE
>>> def a():
... x = 100
... while x > 0:
... print(x)
... x -= 1
...
>>> dis.dis(a)
2 0 LOAD_CONST 1 (100)
2 STORE_FAST 0 (x)
3 4 SETUP_LOOP 28 (to 34)
>> 6 LOAD_FAST 0 (x)
8 LOAD_CONST 2 (0)
10 COMPARE_OP 4 (>)
12 POP_JUMP_IF_FALSE 32
4 14 LOAD_GLOBAL 0 (print)
16 LOAD_FAST 0 (x)
18 CALL_FUNCTION 1
20 POP_TOP
5 22 LOAD_FAST 0 (x)
24 LOAD_CONST 3 (1)
26 INPLACE_SUBTRACT
28 STORE_FAST 0 (x)
30 JUMP_ABSOLUTE 6
>> 32 POP_BLOCK
>> 34 LOAD_CONST 0 (None)
36 RETURN_VALUE
```
我们可以很清楚的看到字节码和运行流程。
Python 里执行字节码的 Switch 语句:[ceval.c](https://github.com/python/cpython/blob/master/Python/ceval.c#L1319)
#### 更多参考
本人文笔不佳,以下教程可以对你更有帮助。
[munificent, Google 编译团队工程师,Dart、Wren 等语言开发者](https://github.com/munificent)
[500 Lines, 使用 Python 解释 Python,和 cpython 一样的流程](https://github.com/aosabook/500lines/tree/master/interpreter)
[craftinginterpreters,利用栈实现 CLox 语言](http://www.craftinginterpreters.com/)
[《自己动手实现 lua》](https://github.com/zxh0/luago-book)
[两周实现 Stone 脚本语言](https://github.com/chibash/stone)
[awesome-compilers](https://github.com/aalhour/awesome-compilers)
[可能是最小的编译器](https://github.com/jamiebuilds/the-super-tiny-compiler)
[let us build a simple interpreter](https://github.com/rspivak/lsbasi)
[minimal lisp interpreter](https://github.com/mattn/cisp)
<file_sep>/**
* Meet Programming Language Values.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2019 Turaiiao
* Email: <EMAIL>
* Github: https://github.com/turaiiao
*/
#ifndef MEET_VALUE_H
#define MEET_VALUE_H
#include <iostream>
#include <map>
#include <sstream>
#include <vector>
#include "../Token.hpp"
#include "../statements/FunctionStatement.hpp"
class Value {
public:
explicit Value(int value);
explicit Value(float value);
explicit Value(std::string value);
explicit Value(bool value);
explicit Value(std::vector<Value> value);
explicit Value(FunctionStatement* value);
Value(); // Null value.
Value operator + (const Value& a);
Value operator - (const Value& b);
Value operator * (const Value& b);
Value operator / (const Value& b);
Value operator % (const Value& b);
Value operator > (const Value& b);
Value operator < (const Value& b);
Value operator >= (const Value& b);
Value operator <= (const Value& b);
Value operator != (const Value& b);
Value operator == (const Value& b);
Value operator || (const Value& b);
Value operator && (const Value& b);
bool valueNumber = false;
bool valueFloat = false;
bool valueString = false;
bool valueBool = false;
bool valueNull = false;
bool valueList = false;
bool valueFun = false;
int numberValue = 0;
float floatValue = 0;
bool boolValue = false;
std::string stringValue = std::string();
std::string fixedListValue;
std::vector<Value> listValue = std::vector<Value>();
FunctionStatement* funValue = nullptr;
bool varAny = false;
bool varNumber = false;
bool varFloat = false;
bool varString = false;
bool varBoolean = false;
bool varNull = false;
bool varList = false;
void printValue();
void printLineValue();
std::string toString();
};
Value backValueWithToken(Token token);
Value backValueWithNullTyped(std::string literal);
#endif<file_sep>/**
* Meet Programming Language Parser.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2019 Turaiiao
* Email: <EMAIL>
* Github: https://github.com/turaiiao
*/
#ifndef MEET_PARSER_H
#define MEET_PARSER_H
#include <vector>
#include "Statement.hpp"
#include "Expression.hpp"
#include "Value.hpp"
#include "../Token.hpp"
#include "../expressions/LiteralExpression.hpp"
#include "../expressions/BinaryExpression.hpp"
#include "../expressions/GroupExpression.hpp"
#include "../expressions/VariableExpression.hpp"
#include "../expressions/UnaryExpression.hpp"
#include "../expressions/AssignExpression.hpp"
#include "../expressions/LogicalExpression.hpp"
#include "../expressions/ListExpression.hpp"
#include "../expressions/GetExpression.hpp"
#include "../expressions/SetExpression.hpp"
#include "../expressions/CallExpression.hpp"
#include "../statements/ExpressionStatement.hpp"
#include "../statements/VarStatement.hpp"
#include "../statements/PrintlnStatement.hpp"
#include "../statements/BlockStatement.hpp"
#include "../statements/ForStatement.hpp"
#include "../statements/BreakStatement.hpp"
#include "../statements/ContinueStatement.hpp"
#include "../statements/IfStatement.hpp"
#include "../statements/WhileStatement.hpp"
#include "../statements/FunctionStatement.hpp"
#include "../statements/ReturnStatement.hpp"
class Parser {
private:
std::vector<Token> tokens;
std::vector<Statement *> statements;
int position;
Token look();
Token look(int pos);
Token previous();
bool isAtEnd();
bool look(TokenType tokenType);
void error(std::string message);
void insertStatement(Statement* stmt);
Expression* expression();
Expression* assignment();
Expression* logicalOr();
Expression* logicalAnd();
Expression* equality();
Expression* comparison();
Expression* addition();
Expression* multiplication();
Expression* unary();
Expression* primary();
Statement* minusGreaterBlockStatement();
Statement* statement();
Statement* expressionStatement();
Statement* varStatement();
Statement* printlnStatement();
Statement* blockStatement();
Statement* forStatement();
Statement* breakStatement();
Statement* continueStatement();
Statement* ifStatement();
Statement* whileStatement();
Statement* funStatement();
Statement* returnStatement();
public:
explicit Parser(std::vector<Token> tokens);
std::vector<Statement *> parseProgram();
};
#endif<file_sep>/**
* Meet Programming Language Interpreter.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2019 Turaiiao
* Email: <EMAIL>
* Github: https://github.com/turaiiao
*/
#ifndef MEET_INTERPRETER_H
#define MEET_INTERPRETER_H
#include <vector>
#include <map>
#include "../Token.hpp"
#include "Value.hpp"
#include "Statement.hpp"
#include "Expression.hpp"
#include "../expressions/LiteralExpression.hpp"
#include "../expressions/BinaryExpression.hpp"
#include "../expressions/GroupExpression.hpp"
#include "../expressions/UnaryExpression.hpp"
#include "../expressions/AssignExpression.hpp"
#include "../expressions/LogicalExpression.hpp"
#include "../expressions/VariableExpression.hpp"
#include "../expressions/ListExpression.hpp"
#include "../expressions/GetExpression.hpp"
#include "../expressions/SetExpression.hpp"
#include "../expressions/CallExpression.hpp"
#include "../statements/ExpressionStatement.hpp"
#include "../statements/VarStatement.hpp"
#include "../statements/PrintlnStatement.hpp"
#include "../statements/BlockStatement.hpp"
#include "../statements/ForStatement.hpp"
#include "../statements/BreakStatement.hpp"
#include "../statements/ContinueStatement.hpp"
#include "../statements/IfStatement.hpp"
#include "../statements/WhileStatement.hpp"
#include "../statements/FunctionStatement.hpp"
#include "../statements/ReturnStatement.hpp"
class Interpreter {
private:
std::vector<Statement *> statements;
std::map<std::string, Value>* environment;
int size;
int position;
int removeStatement(int pos);
bool replMode = false;
bool haveObject(std::string name);
Statement* look();
std::vector<std::string> executeStatementWithoutEnvironment(Statement* stmt);
void assign(std::string name, Value value);
void reAssign(std::string name, Value value);
void executeStatement(Statement* stmt);
void executeVarStatement(Statement* stmt);
void executePrintlnStatement(Statement* stmt);
void executeBlockStatement(Statement* stmt);
void executeBreakStatement();
void executeContinueStatement();
void executeForStatement(Statement* stmt);
void executeIfStatement(Statement* stmt);
void executeWhileStatement(Statement* stmt);
void executeFunctionStatement(Statement* stmt);
void executeReturnStatement(Statement* stmt);
Value get(std::string);
Value executeExpressionStatement(Statement* stmt);
Value executeExpression(Expression* expr);
Value executeLiteralExpression(Expression* expr);
Value executeBinaryExpression(Expression* expr);
Value executeGroupExpression(Expression* expr);
Value executeUnaryExpression(Expression* expr);
Value executeAssignExpression(Expression* expr);
Value executeLogicalExpression(Expression* expr);
Value executeVariableExpression(Expression* expr);
Value executeGetExpression(Expression* expr);
Value executeSetExpression(Expression* expr);
Value executeCallExpression(Expression* expr);
Value executeListExpression(Expression* expr);
public:
explicit Interpreter(std::vector<Statement *> statements, std::map<std::string, Value>* environment);
void execute();
};
#endif<file_sep>/**
* Meet Programming Language Expression.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2019 Turaiiao
* Email: <EMAIL>
* Github: https://github.com/turaiiao
*/
#ifndef MEET_LOGICAL_EXPRESSION_H
#define MEET_LOGICAL_EXPRESSION_H
#include "../Token.hpp"
#include "../interpreter/Expression.hpp"
class LogicalExpression: public Expression {
public:
Expression* left;
Token token;
Expression* right;
LogicalExpression(Expression* left, Token token, Expression* right): left(std::move(left)),
token(std::move(token)), right(std::move(right)) {}
~LogicalExpression() {
delete left;
delete &token;
delete right;
}
std::string defintion() {
return EXPRESSION_LOGICAL;
}
std::string toString() {
return "[ LogicalExpr: left = " + left->toString() + ", operator = " + token.literal + ", right = "
+ right->toString() + " ]";
}
};
#endif<file_sep>/**
* Meet Programming Language Tokens.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2019 Turaiiao
* Email: <EMAIL>
* Github: https://github.com/turaiiao
*/
#ifndef MEET_TOKEN_H
#define MEET_TOKEN_H
#include <iostream>
typedef std::string TokenType;
#define TOKEN_LPAREN "("
#define TOKEN_RPAREN ")"
#define TOKEN_LBRACE "{"
#define TOKEN_RBRACE "}"
#define TOKEN_LBRACKET "["
#define TOKEN_RBRACKET "]"
#define TOKEN_PLUS "+"
#define TOKEN_MINUS "-"
#define TOKEN_STAR "*"
#define TOKEN_SLASH "/"
#define TOKEN_EQUAL "="
#define TOKEN_EQUAL_EQUAL "=="
#define TOKEN_BANG "!"
#define TOKEN_BANG_EQUAL "!="
#define TOKEN_LESS "<"
#define TOKEN_LESS_EQUAL "<="
#define TOKEN_GREATER ">"
#define TOKEN_GREATER_EQUAL ">="
#define TOKEN_PLUS_EQUAL "+="
#define TOKEN_MINUS_EQUAL "-="
#define TOKEN_STAR_EQUAL "*="
#define TOKEN_SLASH_EQUAL "/="
#define TOKEN_MODULAR "%"
#define TOKEN_DOT "."
#define TOKEN_COMMA ","
#define TOKEN_SEMICOLON ";"
#define TOKEN_MARK "'"
#define TOKEN_COLON ":"
#define TOKEN_MINUS_GREATER "->"
#define TOKEN_DOLLAR "$"
#define TOKEN_VALUE_INT "TOKEN_VALUE_INT"
#define TOKEN_VALUE_FLOAT "TOKEN_VALUE_FLOAT"
#define TOKEN_VALUE_STRING "TOKEN_VALUE_STRING"
#define TOKEN_VALUE_IDENTIFIER "TOKEN_VALUE_IDENTIFIER"
#define TOKEN_INT "int"
#define TOKEN_FLOAT "float"
#define TOKEN_STRING "string"
#define TOKEN_BOOLEAN "boolean"
#define TOKEN_LIST "list"
#define TOKEN_ANY "any"
#define TOKEN_NULL "null"
#define TOKEN_TRUE "true"
#define TOKEN_FALSE "false"
#define TOKEN_VAR "var"
#define TOKEN_OR "or"
#define TOKEN_AND "and"
#define TOKEN_IMPORT "import"
#define TOKEN_SHOW "show"
#define TOKEN_AS "as"
#define TOKEN_OPEN "open"
#define TOKEN_FUN "fun"
#define TOKEN_RETURN "return"
#define TOKEN_ENUM "enum"
#define TOKEN_DATA "data"
#define TOKEN_IMPL "impl"
#define TOKEN_IF "if"
#define TOKEN_ELIF "elif"
#define TOKEN_ELSE "else"
#define TOKEN_TRAIT "trait"
#define TOKEN_INIT "init"
#define TOKEN_THIS "this"
#define TOKEN_THEN "then"
#define TOKEN_OVERRIDE "override"
#define TOKEN_NEW "new"
#define TOKEN_FOR "for"
#define TOKEN_WHILE "while"
#define TOKEN_MATCH "match"
#define TOKEN_PRINTLN "println"
#define TOKEN_PRINT "print"
#define TOKEN_BREAK "break"
#define TOKEN_CONTINUE "continue"
#define TOKEN_EOF "EOF"
class Token {
public:
TokenType type;
std::string literal;
int line;
Token();
Token(TokenType type, std::string literal, int line);
bool operator < (Token token) const;
};
std::string getTokenLiteralWithType(const TokenType& tokenType);
bool isTyped(const Token& token);
bool isNotlistFixedType(const Token& token);
#endif
<file_sep>/**
* Meet Programming Language Expression.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2019 Turaiiao
* Email: <EMAIL>
* Github: https://github.com/turaiiao
*/
#ifndef MEET_ASSIGN_EXPRESSION_H
#define MEET_ASSIGN_EXPRESSION_H
#include "../interpreter/Expression.hpp"
#include "../Token.hpp"
class AssignExpression: public Expression {
public:
Token name;
Expression* initializer;
Token typed;
bool isVar = false;
Token fixedListToken;
AssignExpression() {}
AssignExpression(Token name, Expression* initializer, Token typed): name(std::move(name)),
initializer(std::move(initializer)), typed(std::move(typed)) {}
~AssignExpression() {
delete &name;
delete initializer;
delete &typed;
delete &isVar;
}
std::string defintion() {
return EXPRESSION_ASSIGN;
}
std::string toString() {
std::stringstream data;
data << "[ AssignExpression: name = " << name.literal;
if (initializer != nullptr)
data << ", initializer = " << initializer->toString();
else
data << ", type = " << typed.literal;
data << ", isVar = " << (isVar ? "true" : "false");
data << ", fixedListToken = " << fixedListToken.literal;
data << " ]";
return data.str();
}
};
#endif<file_sep>/**
* Meet Programming Language Expression.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2019 Turaiiao
* Email: <EMAIL>
* Github: https://github.com/turaiiao
*/
#ifndef MEET_LIST_EXPRESSION_H
#define MEET_LIST_EXPRESSION_H
#include <sstream>
#include "../interpreter/Expression.hpp"
class ListExpression: public Expression {
public:
std::vector<Value> values;
ListExpression(std::vector<Value> values): values(std::move(values)) {}
~ListExpression() {
values.clear();
std::vector<Value>().swap(values);
}
std::string defintion() {
return EXPRESSION_LIST;
}
std::string toString() {
std::stringstream data;
data << "[ ListExpression = ";
for (auto i : values)
data << "[ " << i.toString() << " ] ";
return data.str();
}
};
#endif<file_sep>/**
* Meet Programming Language Expression.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2019 Turaiiao
* Email: <EMAIL>
* Github: https://github.com/turaiiao
*/
#ifndef MEET_CALL_EXPRESSION_H
#define MEET_CALL_EXPRESSION_H
#include "../interpreter/Expression.hpp"
#include "../Token.hpp"
class CallExpression: public Expression {
public:
Token name;
std::vector<Expression *> parameters;
CallExpression(Token name, std::vector<Expression *> parameters) {
this->name = std::move(name);
this->parameters = std::move(parameters);
}
std::string defintion() {
return EXPRESSION_CALL;
}
std::string toString() {
std::stringstream data;
data << "[ CallExpression: name = " << name.literal << ", parameters = ";
for (int i = 0; i < parameters.size(); i ++)
if ((i + 1) == parameters.size())
data << parameters.at(i)->toString();
else
data << parameters.at(i)->toString() << ", ";
data << " ]";
return data.str();
}
};
#endif<file_sep>/**
* Meet Programming Language Tokens.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2019 Turaiiao
* Email: <EMAIL>
* Github: https://github.com/turaiiao
*/
#include <utility>
#include "Token.hpp"
Token::Token() {}
Token::Token(TokenType type, std::string literal, int line): literal(std::move(literal)), line(line) {
this->type = std::move(type);
}
bool isTyped(const Token& token) {
return token.type == TOKEN_INT || token.type == TOKEN_STRING || token.type == TOKEN_FLOAT ||
token.type == TOKEN_LIST || token.type == TOKEN_BOOLEAN || token.type == TOKEN_ANY;
}
bool isNotlistFixedType(const Token& token) {
return token.type != TOKEN_INT && token.type != TOKEN_STRING && token.type != TOKEN_FLOAT &&
token.type != TOKEN_BOOLEAN && token.type != TOKEN_LIST && token.type != TOKEN_ANY;
}
bool Token::operator < (Token token) const {
return false;
}
std::string getTokenLiteralWithType(const TokenType& tokenType) {
if (tokenType == TOKEN_LPAREN) return "TOKEN_LPAREN";
if (tokenType == TOKEN_RPAREN) return "TOKEN_RPAREN";
if (tokenType == TOKEN_LBRACE) return "TOKEN_LBRACE";
if (tokenType == TOKEN_RBRACE) return "TOKEN_RBRACE";
if (tokenType == TOKEN_LBRACKET) return "TOKEN_LBRACKET";
if (tokenType == TOKEN_RBRACKET) return "TOKEN_RBRACKET";
if (tokenType == TOKEN_PLUS) return "TOKEN_PLUS";
if (tokenType == TOKEN_MINUS) return "TOKEN_MINUS";
if (tokenType == TOKEN_STAR) return "TOKEN_STAR";
if (tokenType == TOKEN_SLASH) return "TOKEN_SLASH";
if (tokenType == TOKEN_EQUAL) return "TOKEN_EQUAL";
if (tokenType == TOKEN_EQUAL_EQUAL) return "TOKEN_EQUAL_EQUAL";
if (tokenType == TOKEN_BANG) return "TOKEN_BANG";
if (tokenType == TOKEN_BANG_EQUAL) return "TOKEN_BANG_EQUAL";
if (tokenType == TOKEN_LESS) return "TOKEN_LESS";
if (tokenType == TOKEN_LESS_EQUAL) return "TOKEN_LESS_EQUAL";
if (tokenType == TOKEN_GREATER) return "TOKEN_GREATER";
if (tokenType == TOKEN_GREATER_EQUAL) return "TOKEN_GREATER_EQUAL";
if (tokenType == TOKEN_PLUS_EQUAL) return "TOKEN_PLUS_EQUAL";
if (tokenType == TOKEN_MINUS_EQUAL) return "TOKEN_MINUS_EQUAL";
if (tokenType == TOKEN_STAR_EQUAL) return "TOKEN_STAR_EQUAL";
if (tokenType == TOKEN_SLASH_EQUAL) return "TOKEN_SLASH_EQUAL";
if (tokenType == TOKEN_MODULAR) return "TOKEN_MODULAR";
if (tokenType == TOKEN_DOT) return "TOKEN_DOT";
if (tokenType == TOKEN_COMMA) return "TOKEN_COMMA";
if (tokenType == TOKEN_SEMICOLON) return "TOKEN_SEMICOLON";
if (tokenType == TOKEN_MARK) return "TOKEN_MARK";
if (tokenType == TOKEN_COLON) return "TOKEN_COLON";
if (tokenType == TOKEN_MINUS_GREATER) return "TOKEN_MINUS_GREATER";
if (tokenType == TOKEN_DOLLAR) return "TOKEN_DOLLAR";
if (tokenType == TOKEN_VALUE_INT) return "TOKEN_VALUE_INT";
if (tokenType == TOKEN_VALUE_FLOAT) return "TOKEN_VALUE_FLOAT";
if (tokenType == TOKEN_VALUE_STRING) return "TOKEN_VALUE_STRING";
if (tokenType == TOKEN_VALUE_IDENTIFIER) return "TOKEN_VALUE_IDENTIFIER";
if (tokenType == TOKEN_INT) return "TOKEN_INT";
if (tokenType == TOKEN_FLOAT) return "TOKEN_FLOAT";
if (tokenType == TOKEN_STRING) return "TOKEN_STRING";
if (tokenType == TOKEN_BOOLEAN) return "TOKEN_BOOLEAN";
if (tokenType == TOKEN_LIST) return "TOKEN_LIST";
if (tokenType == TOKEN_ANY) return "TOKEN_ANY";
if (tokenType == TOKEN_NULL) return "TOKEN_NULL";
if (tokenType == TOKEN_TRUE) return "TOKEN_TRUE";
if (tokenType == TOKEN_FALSE) return "TOKEN_FALSE";
if (tokenType == TOKEN_VAR) return "TOKEN_VAR";
if (tokenType == TOKEN_OR) return "TOKEN_OR";
if (tokenType == TOKEN_AND) return "TOKEN_AND";
if (tokenType == TOKEN_IMPORT) return "TOKEN_IMPORT";
if (tokenType == TOKEN_SHOW) return "TOKEN_SHOW";
if (tokenType == TOKEN_AS) return "TOKEN_AS";
if (tokenType == TOKEN_OPEN) return "TOKEN_OPEN";
if (tokenType == TOKEN_FUN) return "TOKEN_FUN";
if (tokenType == TOKEN_RETURN) return "TOKEN_RETURN";
if (tokenType == TOKEN_ENUM) return "TOKEN_ENUM";
if (tokenType == TOKEN_DATA) return "TOKEN_DATA";
if (tokenType == TOKEN_IMPL) return "TOKEN_IMPL";
if (tokenType == TOKEN_IF) return "TOKEN_IF";
if (tokenType == TOKEN_ELIF) return "TOKEN_ELIF";
if (tokenType == TOKEN_ELSE) return "TOKEN_ELSE";
if (tokenType == TOKEN_TRAIT) return "TOKEN_TRAIT";
if (tokenType == TOKEN_INIT) return "TOKEN_INIT";
if (tokenType == TOKEN_THIS) return "TOKEN_THIS";
if (tokenType == TOKEN_THEN) return "TOKEN_THEN";
if (tokenType == TOKEN_OVERRIDE) return "TOKEN_OVERRIDE";
if (tokenType == TOKEN_NEW) return "TOKEN_NEW";
if (tokenType == TOKEN_FOR) return "TOKEN_FOR";
if (tokenType == TOKEN_WHILE) return "TOKEN_WHILE";
if (tokenType == TOKEN_MATCH) return "TOKEN_MATCH";
if (tokenType == TOKEN_PRINTLN) return "TOKEN_PRINTLN";
if (tokenType == TOKEN_PRINT) return "TOKEN_PRINT";
if (tokenType == TOKEN_BREAK) return "TOKEN_BREAK";
if (tokenType == TOKEN_CONTINUE) return "TOKEN_CONTINUE";
if (tokenType == TOKEN_EOF) return "TOKEN_EOF";
return "UNKNOWN";
}<file_sep>/**
* Meet Programming Language Lexer.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2019 Turaiiao
* Email: <EMAIL>
* Github: https://github.com/turaiiao
*/
#include <iostream>
#include <sstream>
#include <utility>
#include "Lexer.hpp"
Lexer::Lexer(std::string source) {
this->source = std::move(source);
this->tokens = std::vector<Token>();
this->line = 1;
this->position = 0;
this->keywords["int"] = TOKEN_INT;
this->keywords["float"] = TOKEN_FLOAT;
this->keywords["string"] = TOKEN_STRING;
this->keywords["boolean"] = TOKEN_BOOLEAN;
this->keywords["list"] = TOKEN_LIST;
this->keywords["any"] = TOKEN_ANY;
this->keywords["null"] = TOKEN_NULL;
this->keywords["true"] = TOKEN_TRUE;
this->keywords["false"] = TOKEN_FALSE;
this->keywords["var"] = TOKEN_VAR;
this->keywords["or"] = TOKEN_OR;
this->keywords["and"] = TOKEN_AND;
this->keywords["import"] = TOKEN_IMPORT;
this->keywords["show"] = TOKEN_SHOW;
this->keywords["as"] = TOKEN_AS;
this->keywords["open"] = TOKEN_OPEN;
this->keywords["fun"] = TOKEN_FUN;
this->keywords["return"] = TOKEN_RETURN;
this->keywords["enum"] = TOKEN_ENUM;
this->keywords["data"] = TOKEN_DATA;
this->keywords["impl"] = TOKEN_IMPL;
this->keywords["if"] = TOKEN_IF;
this->keywords["elif"] = TOKEN_ELIF;
this->keywords["else"] = TOKEN_ELSE;
this->keywords["trait"] = TOKEN_TRAIT;
this->keywords["init"] = TOKEN_INIT;
this->keywords["this"] = TOKEN_THIS;
this->keywords["then"] = TOKEN_THEN;
this->keywords["override"] = TOKEN_OVERRIDE;
this->keywords["new"] = TOKEN_NEW;
this->keywords["for"] = TOKEN_FOR;
this->keywords["while"] = TOKEN_WHILE;
this->keywords["match"] = TOKEN_MATCH;
this->keywords["println"] = TOKEN_PRINTLN;
this->keywords["print"] = TOKEN_PRINT;
this->keywords["break"] = TOKEN_BREAK;
this->keywords["continue"] = TOKEN_CONTINUE;
}
std::vector<Token> Lexer::tokenizer() {
while (this->source.length() - 1 >= this->position) {
char current = look();
if (isspace(current) || current == '#') lexSkipWriteSpace();
else if (isalpha(current)) lexIdentifier();
else if (isdigit(current)) lexNumber();
else if ((int) current == 39) lexString();
else lexSymbol();
}
addToken(TOKEN_EOF, "EOF", 0);
return this->tokens;
}
char Lexer::look() {
return this->source.at(this->position);
}
char Lexer::look(int pos) {
if (this->position + pos > this->source.length() - 1)
return '\0';
else
return this->source.at(this->position + pos);
}
bool Lexer::isAtEnd() {
return this->source.length() - 1 <= this->position;
}
void Lexer::addToken(const TokenType& type) {
addToken(type, false);
}
void Lexer::addToken(const TokenType& type, bool skipTwoPos) {
if (skipTwoPos)
addToken(type, look() + std::string(1, look(1)), 2);
else
addToken(type, std::string(1, look()), 1);
}
void Lexer::addToken(TokenType type, std::string literal, int skip) {
this->tokens.emplace_back(type, literal, this->line);
if (skip)
this->position += skip;
}
void Lexer::error(std::string message) {
throw std::runtime_error("[ line " + std::to_string(this->line) + " ] " + message);
}
TokenType Lexer::isKeyword(const std::string& identifier) {
std::map<std::string, TokenType>::iterator find = this->keywords.find(identifier);
if (find != this->keywords.end())
return find->second;
else
return TOKEN_EOF;
}
void Lexer::lexIdentifier() {
std::stringstream literalStream;
while (isalpha(look())) {
literalStream << look();
if (isAtEnd()) {
this->position ++;
break;
} else
this->position ++;
}
TokenType type = isKeyword(literalStream.str());
addToken(type != TOKEN_EOF ? type : TOKEN_VALUE_IDENTIFIER, literalStream.str(), 0);
}
void Lexer::lexString() {
std::stringstream literalStream;
this->position ++;
if (look(0) == 0)
error("syntax error: expect string lost right mark.");
if (look(0) == '\'') {
addToken(TOKEN_VALUE_STRING, "", 1);
return;
}
while (look() != '\'') {
literalStream << look();
if (isAtEnd())
error("syntax error: expect string lost right mark.");
else
this->position ++;
}
addToken(TOKEN_VALUE_STRING, literalStream.str(), 1);
}
void Lexer::lexNumber() {
std::stringstream literalStream;
bool haveDot = false;
while (isdigit(look()) || look() == '.') {
literalStream << look();
if (look() == '.')
haveDot = true;
if (isAtEnd()) {
this->position ++;
break;
} else
this->position ++;
}
addToken(haveDot ? TOKEN_VALUE_FLOAT : TOKEN_VALUE_INT, literalStream.str(), 0);
}
void Lexer::lexSymbol() {
switch (look()) {
case '+':
look(1) == '=' ? addToken(TOKEN_PLUS_EQUAL, true) :
addToken(TOKEN_PLUS, false);
break;
case '-':
look(1) == '>' ? addToken(TOKEN_MINUS_GREATER, true) :
look(1) == '=' ? addToken(TOKEN_MINUS_EQUAL, true) :
addToken(TOKEN_MINUS, false);
break;
case '*':
look(1) == '=' ? addToken(TOKEN_STAR_EQUAL, true) :
addToken(TOKEN_STAR, false);
break;
case '/':
look(1) == '=' ? addToken(TOKEN_SLASH_EQUAL, true) :
addToken(TOKEN_SLASH, false);
break;
case ';':
addToken(TOKEN_SEMICOLON);
break;
case ':':
addToken(TOKEN_COLON);
break;
case '.':
addToken(TOKEN_DOT);
break;
case ',':
addToken(TOKEN_COMMA);
break;
case '(':
addToken(TOKEN_LPAREN);
break;
case ')':
addToken(TOKEN_RPAREN);
break;
case '{':
addToken(TOKEN_LBRACE);
break;
case '}':
addToken(TOKEN_RBRACE);
break;
case '[':
addToken(TOKEN_LBRACKET);
break;
case ']':
addToken(TOKEN_RBRACKET);
break;
case '=':
look(1) == '=' ? addToken(TOKEN_EQUAL_EQUAL, true) :
addToken(TOKEN_EQUAL, false);
break;
case '!':
look(1) == '=' ? addToken(TOKEN_BANG_EQUAL, true) :
addToken(TOKEN_BANG, false);
break;
case '>':
look(1) == '=' ? addToken(TOKEN_GREATER_EQUAL, true) :
addToken(TOKEN_GREATER, false);
break;
case '<':
look(1) == '=' ? addToken(TOKEN_LESS_EQUAL, true) :
addToken(TOKEN_LESS, false);
break;
case '|':
addToken(TOKEN_OR);
break;
case '&':
addToken(TOKEN_AND);
break;
case '%':
addToken(TOKEN_MODULAR);
break;
case '$':
addToken(TOKEN_DOLLAR);
break;
default:
error("syntax error: unexpect character '" + std::string(1, look()) + "'.");
}
}
void Lexer::lexSkipWriteSpace() {
switch (look()) {
case ' ':
case '\r':
case '\t':
this->position ++;
break;
case '\n':
this->line ++;
this->position ++;
break;
case '#':
while (look() != '\n' && !isAtEnd())
this->position ++;
if (isAtEnd()) this->position ++;
}
}
|
ca7423ef49a0940164c56c1d30ba659074d41b23
|
[
"Markdown",
"CMake",
"C++"
] | 26
|
C++
|
Turaiiao/meet
|
2025b207205d6681ef54bd1514e41f4f8432b59c
|
9148670e3bd3be9561762279800ec7ef00357406
|
refs/heads/master
|
<repo_name>becharvey93/Assignment2<file_sep>/server/server.js
// set up mongoDB connection
const MongoClient = require('mongodb').MongoClient;
const url = 'mongodb://localhost:27017';
MongoClient.connect(url,function(err, client){
//this is the callback function for when a connection is made. It recieves two values
if (err) {return console.log(err)}
const dbName = 'DATABASE_CHAT';
const db = client.db(dbName);
var querycb = require('./serial/querycallback');
// Call Backs
querycb.updatedata(db, function(res){
console.log(result);
});
});
const express = require('express');
const app = express();
const path = require('path');
const http = require('http').Server(app);
const io = require('socket.io')(http);
const bodyParser = require('body-parser');
app.use (bodyParser) = require('body-parser');
app.use(bodyParser.urlencoded({extended:false}));
app.use(express.static(path.join(__dirname , '../dist/channels')));
require('./routes.js')(app, path);
require('./socket.js')(app, io);
require('./listen.js')(http);<file_sep>/src/app/groups/groups.component.ts
import { Component, OnInit } from '@angular/core';
@Component({
selector: 'app-groups',
templateUrl: './groups.component.html',
styleUrls: ['./groups.component.css']
})
export class GroupsComponent implements OnInit {
username: string;
router: any;
newgroupname: any;
constructor() { }
ngOnInit() {
}
loginUser(event){
event.preventDefault();
var usertype = localStorage.getItem("user-type");
if (usertype == "super"){
alert('You have created a new group called.' + this.newgroupname);
localStorage.setItem("groupname", this.newgroupname);
}
else{
alert('You do not have permission to create groups');
this.router.navigateByUrl('/groups');
}
}
}
<file_sep>/mongodb/querycallback.js
module.exports =
{
//Object with one method - update data
//Takes in a reference to the database and returns the result as a callback
updatedata:function(db,result){
const collection = db.collection('CollectionUsers');
collection.findOne({id:'1'}, function(err,res){
if(err)throw err;
collection.find().toArray(function(err,res){
if (err) throw err;
console.log (res);
result(res);
})
})
}
}<file_sep>/src/index.js
var express = require('express');
var app = express();
var http = require('http').Server(app);
let server = http.Server(app);
let socketIO = require('socket.io');
var io = require('socket.io')(http);
var mongo = require('mongodb');
const port = process.env.PORT || 3000;
server.listen(port, () => {
console.log('started on port: 3000');
});
app.use(express.static(__dirname + '/www'));
var server = http.listen(3000, function (){
var host = server.address().address;
var port = server.address().port;
console.log("Node.js Server is Working");
console.log("Server is listening on: " + host + "port: " + port);
});
<file_sep>/src/app/login/login.component.ts
'use strict';
declare var require: any;
import { Component, OnInit } from '@angular/core';
import { FormsModule } from '@angular/forms';
import { Router } from '@angular/router';
var MongoClient = require('mongodb').MongoClient;
var assert = require('assert');
var url = "mongodb://localhost:27017/";
@Component({
selector: 'app-login',
templateUrl: './login.component.html',
styleUrls: ['./login.component.css']
})
export class LoginComponent implements OnInit {
username:string ='';
password:string ='';
constructor(private router:Router, private form:FormsModule) { }
ngOnInit() {
}
// function for logging in
loginUser(event){
event.preventDefault();
MongoClient.connect(url, function(err,db){
if(err) throw err;
var dbo = db.db("DATABASE_CHAT");
dbo.collection("CollectionUsers").find({projection: { username:this.username }});
})
if(this.username && this.password) {
}
// Old Method of Testing Login
event.preventDefault();
if (this.username == "super" && this.password == "123"){
alert('You are logged in as the Super Admin.');
this.router.navigateByUrl('/groups');
localStorage.setItem("login-type", "super");
localStorage.setItem("username", this.username);
}
else if(this.username == "group" && this.password == "123"){
alert('You are logged in as the Group Admin');
this.router.navigateByUrl('/groups');
localStorage.setItem("login-type", "group");
localStorage.setItem("username", this.username)
}
else if (this.username == "correctusername" && this.password =="<PASSWORD>"){
this.router.navigateByUrl('/groups');
localStorage.setItem("login-type", "general");
localStorage.setItem("username", this.username)
}else{
alert('Username and Password incorrect. Please log in again or create account.');
}
}
createUser(event){
event.preventDefault();
}
}
|
0387a98b244a51d31eaee1c83580afd3dee5c416
|
[
"JavaScript",
"TypeScript"
] | 5
|
JavaScript
|
becharvey93/Assignment2
|
c3f7b6c9f1685f4350b550b10eade2f13c015623
|
1f1dfad48bd048b3a6dfc1362f4e199d6506cfaa
|
refs/heads/master
|
<repo_name>AnkurRyder/Bookmark-Chrome-Extension<file_sep>/README.md
# Bookmark-Chrome-Extension
Chrome Extension for saving Bookmarks to cloud
<file_sep>/popup.js
var text="untaged";
$(function() {
var list = $('.js-dropdown-list');
var link = $('.js-link');
link.click(function(e) {
e.preventDefault();
list.slideToggle(200);
});
list.find('li').click(function() {
text = $(this).html();
var icon = '<i class="fa fa-chevron-down"></i>';
link.html(text+icon);
list.slideToggle(200);
});
});
$(document).ready(function(){
chrome.tabs.query({currentWindow: true,active: true},function(tabs){
$("#title").val(tabs[0].title);
$("#url").val(tabs[0].url);
});
});
$(document).ready(function(){
$("#sub").click(function(){
/* var table = document.getElementById("bookmarks");
var row = table.insertRow(0);
var cell1 = row.insertCell(0);
var cell2 = row.insertCell(1);
var cell3 = row.insertCell(2);
// Add some text to the new cells:
cell1.innerHTML = $('#title').val();
cell2.innerHTML = 1;
cell3.innerHTML = Date();*/
var url = $('#url').val();
var title = $('#title').val();
var tag = text;
if( !url.includes("http://") && !url.includes("https://") )
{ url = "http://" + String(url); }
tag = tag.toUpperCase();
title = String(tag) + " :: " + String(title);
$.ajax({
type:'GET',
url:'https://bookmarks.pythonanywhere.com/add_get',
data:{
'title':title,
'tag':tag,
'ur':url
},
dataType: 'json',
sucess:function(data){
alert(" gf");
},
});
setTimeout(function(){ window.close(); }, 400);
});
});
|
44697fa49f4dbcdba26a13e3eeef5929927fdd05
|
[
"Markdown",
"JavaScript"
] | 2
|
Markdown
|
AnkurRyder/Bookmark-Chrome-Extension
|
9be42b96717e13180a14a3a91941b05ccebd9e48
|
be0040deea8d631d47ebbd1a4922a17b900d7295
|
refs/heads/master
|
<file_sep>// entry.ts
import ApplicationRoot from './components/ApplicationRoot.vue';
import Vue from 'vue';
document.addEventListener("DOMContentLoaded", e => {
const vueInstance = new Vue({
render: h => h(ApplicationRoot)
});
console.log("Value of component is: %o", ApplicationRoot);
// This won't work:
// TS2339: Property 'options' does not exist on type 'VueConstructor<Record<never, any> & Vue>'.
console.log("Value of options is: %o", ApplicationRoot.options);
// Neither will this:
// TS2339: Property 'created' does not exist on type 'VueConstructor<Record<never, any> & Vue>'.
console.log("Value of created hook is: %o", ApplicationRoot.created);
vueInstance.$mount('#vue-outlet');
});
<file_sep># vue-typescript-options-bug
|
516a1ea92dbbd287f15a4322fa4d8cb1537b0f1f
|
[
"Markdown",
"TypeScript"
] | 2
|
TypeScript
|
amoe/vue-typescript-options-bug
|
747c29fdd1903fe7d37f5b286e1ec42ac050398c
|
3cca22d9018e4afc72194477479b887db8c85de0
|
refs/heads/master
|
<file_sep># myemacs
<file_sep>#!/bin/bash
connect -H www-proxy.ericsson.se:8080 "$@"
|
0d18f21ef1e3802392ac920945eb9bcdaadcbe0d
|
[
"Markdown",
"Shell"
] | 2
|
Markdown
|
imsngn/Dotfiles
|
ca002b489309a035db56eec1ba4eb206b7644492
|
95d713250cf291204204b43376429e4d1b1f66dc
|
refs/heads/master
|
<repo_name>albytho/Coding-Practice-Problems<file_sep>/partition.cpp
void partition(int n){
Node *k = head;
Node *current = head;
while(current != nullptr){
if(current->data < n){
int temp = k->data;
k->data = current->data;
current->data = temp;
k=k->next;
}
current = current->next;
}
}<file_sep>/README.md
# Coding-Practice-Problems
This is a collection of problems I've worked on from the book "Cracking the Coding Interview" and LeetCode. I make sure these are correct/optimal before posting them
<file_sep>/is_balanced.cpp
bool is_balanced(Node *root){
if(root == nullptr){
return true;
}
int lh = get_height(root->left);
int rh = get_height(root->right);
if(abs(lh-rh) <= 1 && is_balanced(root->left) && is_balanced(root->right)){
return true;
}
else{
return false;
}
}<file_sep>/rotateMatrix.cpp
//
// main.cpp
// rotateMatrix
//
// Created by <NAME> on 6/26/17.
// Copyright © 2017 <NAME>. All rights reserved.
//
#include <iostream>
#include <vector>
using namespace std;
void rotate(vector<vector<int>> &matrix){
int m = int(matrix.size());
//Go layer by layer
for(int layer=0; layer<m/2; ++layer){
//As you into a deeper layer, the first and last values get closer together
int first = layer;
int last = m-1-layer;
//You need this for loop to account for the shifts in each of the layers
for(int i=first; i<last; ++i){
int offset = i-first;
//Save the top left element
int top = matrix[first][i];
//set the top left element equal to the bottom left element of the layer
matrix[first][i] = matrix[last-offset][first];
//bottom left = bottom right
matrix[last-offset][first] = matrix[last][last-offset];
//bottom right = top right
matrix[last][last-offset] = matrix[first+offset][last];
//top right = the top left element we saved on line 28
matrix[first+offset][last] = top;
}
}
}
int main() {
vector<vector<int>> matrix = {{1,2,3,4},{5,6,7,8},{9,10,11,12},{13,14,15,16}};
rotate(matrix);
for(int row=0; row<matrix.size(); ++row){
for(int col=0; col<matrix[0].size(); ++col){
cout<<matrix[row][col]<<" ";
}
cout<<endl;
}
return 0;
}
<file_sep>/is_palindrome.cpp
bool is_palindrome(Node *a){
vector<char> vec;
while(a!=nullptr){
vec.push_back(a->data);
a=a->next;
}
for(int index=0; index<vec.size()/2; ++index){
if(vec[index] != vec[vec.size()-1-index]){
return false;
}
}
return true;
}<file_sep>/kth_element.cpp
int kth_element(int num){
Node *k = head;
Node *current = head;
for(int index=0; index<num-1; ++index){
if(current==nullptr){
return -1;
}
current = current->next;
}
while(current->next != nullptr){
current = current->next;
k=k->next;
}
return k->data;
}
<file_sep>/list_sum.cpp
void list_sum(Node *a, Node *b){
int carry = 0;
int sum = 0;
stack<int> s;
//Iterates through both lists at the same time until one of them ends
while(a!=nullptr && b!=nullptr){
sum = a->data + b->data + carry;
carry = int(sum/10);
//If the digits are both the last digit in the sequence, put the whole
//sum in there instead of just the mod 10 of it
if(a->next == nullptr && b->next == nullptr){
s.push(sum);
}
else{
s.push(sum%10);
}
//Go to the next locations in the linked lists
a=a->next;
b=b->next;
}
//If linked list "a" has more digits than "b", then put the remainder of "a"s
//digits into the list
if(a != nullptr){
while(a!=nullptr){
s.push(a->data+carry);
a=a->next;
carry=0;
}
}
//The same as above but for the case that linked list "b" has more digits
else if(b != nullptr){
while(b!=nullptr){
s.push(b->data+carry);
b=b->next;
carry=0;
}
}
//Get the digits in the form of an actual number;
sum = 0;
while(!s.empty()){
sum = sum*10+s.top();
s.pop();
}
//Put each of the digits in the form of a linked list
Node *result = new Node(sum%10);
Node *original = result;
sum = sum/10;
while(sum!=0){
result->next = new Node(sum%10);
sum=sum/10;
result=result->next;
}
while(original!=nullptr){
cout<<original->data<<endl;
original=original->next;
}
}<file_sep>/sort_stack.cpp
void sort_stack(stack<double> &original){
stack<double> new_one;
while(!original.empty()){
new_one.push(original.top());
original.pop();
}
int curr;
while(!new_one.empty()){
curr = new_one.top();
new_one.pop();
while(!original.empty() && curr<original.top()){
new_one.push(original.top());
original.pop();
}
original.push(curr);
}
}<file_sep>/remove_middle.cpp
void remove_middle(Node *middle){
middle->data = middle->next->data;
Node *temp = middle->next;
middle->next = temp->next;
delete temp;
temp = nullptr;
}
<file_sep>/min()_function_in_stack.cpp
//This method uses O(1) speed and O(n) extra space
//The idea is that you as you push elements into the main stack, you
//keep track of the minimum element so far. Every time you push an elemnt
//into the main stack, you push the mininum element so far into the min stack.
//If you push a new value into the main stack that is lower than the minimum element
//so far, than you make that the new min_element
class stack{
public:
Node *head = nullptr;
Node *head_min = nullptr;
int min_elem = INT_MAX;
bool isEmpty(){
if(head == nullptr){
return true;
}
return false;
}
void push(int elem){
//If a smaller min_element is found, make that the new min_element
min_elem = std::min(min_elem,elem);
Node *a = new Node(elem);
Node *b = new Node(min_elem);
//You continually keep pushing whatever you have as the min_element into the min_stack every time you
//push something into the main stack
if(isEmpty()){
head = a;
head_min = b;
}
else{
a->next = head;
head = a;
b->next = head_min;
head_min = b;
}
}
void pop(){
if(!isEmpty()){
head = head->next;
head_min = head_min->next;
}
}
int top(){
return head->val;
}
int min(){
return head_min->val;
}
};
<file_sep>/intersection.cpp
Node* intersection(Node *a, Node *b){
Node *temp_a = a;
Node *temp_b = b;
int a_size = 0;
int b_size = 0;
while(temp_a != nullptr){
++a_size;
temp_a = temp_a->next;
}
while(temp_b != nullptr){
++b_size;
temp_b = temp_b->next;
}
if(a_size != b_size){
if(a_size > b_size){
int diff = a_size - b_size;
for(int index=0; index<diff; ++index){
a=a->next;
}
}
else{
int diff = b_size - a_size;
for(int index=0; index<diff; ++index){
b=b->next;
}
}
}
while(a != nullptr && b!= nullptr){
if(a == b){
return a;
}
}
return nullptr;
}<file_sep>/remove_dubs.cpp
void remove_dubs(){
unordered_map<int, int> dict;
Node *a = head;
while(a != nullptr){
++dict[a->data];
if(dict[a->data] == 2){
Node *previouse_node = head;
while(previouse_node->next != a){
previouse_node = previouse_node->next;
}
previouse_node->next = a->next;
--dict[a->data];
delete a;
a = previouse_node;
previouse_node = nullptr;
}
if(a!=nullptr){
a=a->next;
}
}
}
|
bdefdc4c6f5911a52415b8c9a26b9b984540618e
|
[
"Markdown",
"C++"
] | 12
|
C++
|
albytho/Coding-Practice-Problems
|
9b6f6619756baa4876a5d449e9435df9f2167756
|
e62792f4b8510bffea6434da662be2cab038e0d0
|
refs/heads/main
|
<repo_name>xuxiaowei-com-cn/deep-in-spring-cloud-samples<file_sep>/chapter02/spring-cloud-netflix-eureka-provider/src/main/resources/application.properties
spring.application.name=my-provider
server.port=8080
eureka.client.service-url.defaultZone=http://localhost:8761/eureka
<file_sep>/chapter05/resilience4j/README.md
## 5.6.1 Resilience4j 体验
Resilience4j 与 OpenFeign 的整合。
还包括了 Resilience4j 自身的熔断、限流使用。<file_sep>/chapter10/spring-cloud-functional-application/README.md
使用 FunctionalSpringApplication 运行一个 FaaS 程序。<file_sep>/chapter06/spring-cloud-stream/README.md
## 6.4.1 Spring Cloud Stream 发送和接收消息
Spring Cloud Steam 项目入门例子。<file_sep>/chapter05/my-circuit-breaker/README.md
## 5.1 手动实现一个断路器
自定义断路器的实现例子。<file_sep>/chapter05/netflix-hystrix-zuul/src/main/resources/application.properties
server.port=8080
spring.application.name=hystrix-zuul
zuul.routes.my-provider1.path=/dubbo/**
zuul.routes.my-provider1.service-id=my-provider1
zuul.routes.my-provider2.path=/springcloud/**
zuul.routes.my-provider2.service-id=my-provider2
zuul.routes.my-provider3.path=/s-c-alibaba/**
zuul.routes.my-provider3.url=my-provider3
hystrix.command.my-provider1.execution.isolation.semaphore.maxConcurrentRequests=0
hystrix.command.my-provider1.execution.isolation.strategy=SEMAPHORE
hystrix.command.my-provider1.execution.timeout.enabled=false
hystrix.command.my-provider2.execution.isolation.thread.timeoutInMilliseconds=5000
hystrix.command.my-provider2.metrics.rollingStats.timeInMilliseconds=10000
hystrix.command.my-provider2.circuitBreaker.requestVolumeThreshold=15
hystrix.command.my-provider2.circuitBreaker.errorThresholdPercentage=50
hystrix.command.my-provider2.circuitBreaker.sleepWindowInMilliseconds=5000
hystrix.command.my-provider3.execution.isolation.thread.timeoutInMilliseconds=5000
hystrix.command.my-provider3.metrics.rollingStats.timeInMilliseconds=10000
hystrix.command.my-provider3.circuitBreaker.requestVolumeThreshold=30
hystrix.command.my-provider3.circuitBreaker.errorThresholdPercentage=50
hystrix.command.my-provider3.circuitBreaker.sleepWindowInMilliseconds=5000
management.endpoints.web.exposure.include=*
<file_sep>/chapter02/spring-cloud-alibaba-nacos-consumer-reactive/src/main/resources/application.properties
spring.application.name=nacos-consumer
server.port=8081
spring.cloud.nacos.discovery.server-addr=localhost:8848
<file_sep>/chapter09/nacos-provider/src/main/resources/application.properties
spring.application.name=nacos-provider
server.port=9090
spring.cloud.nacos.discovery.server-addr=localhost:8848<file_sep>/chapter04/spring-cloud-alibaba-nacos-configuration/src/main/resources/bootstrap.properties
spring.application.name=nacos-configuration-sample
server.port=8080
spring.cloud.nacos.config.server-addr=localhost:8848
book.author=jim<file_sep>/chapter04/spring-cloud-config-client/src/main/resources/bootstrap.properties
spring.application.name=sc-config-client
spring.cloud.config.name=book
spring.cloud.config.profile=prod
spring.cloud.config.uri=http://localhost:8080/<file_sep>/chapter05/alibaba-sentinel-spring-cloud-gateway/scripts/gateway.sh
#!/usr/bin/env bash
curl -v -H "LANG:zh-cn" http://localhost:8080/httpbin/status/500
#* Trying ::1...
#* TCP_NODELAY set
#* Connected to localhost (::1) port 8080 (#0)
#> GET /httpbin/status/500 HTTP/1.1
#> Host: localhost:8080
#> User-Agent: curl/7.54.0
#> Accept: */*
#> LANG:zh-cn
#>
#< HTTP/1.1 429 Too Many Requests
#< Content-Type: application/json;charset=UTF-8
#< Content-Length: 64
#<
#* Connection #0 to host localhost left intact
#{"code":429,"message":"Blocked by Sentinel: ParamFlowException"}
curl -v -H "LANG1:zh-cn" http://localhost:8080/httpbin/status/500
#* Trying ::1...
#* TCP_NODELAY set
#* Connected to localhost (::1) port 8080 (#0)
#> GET /httpbin/status/500 HTTP/1.1
#> Host: localhost:8080
#> User-Agent: curl/7.54.0
#> Accept: */*
#> LANG1:zh-cn
#>
#< HTTP/1.1 500 Internal Server Error
#< Date: Wed, 26 Feb 2020 16:45:37 GMT
#< Content-Type: text/html; charset=utf-8
#< Content-Length: 0
#< Server: gunicorn/19.9.0
#< Access-Control-Allow-Origin: *
#< Access-Control-Allow-Credentials: true
<file_sep>/chapter10/spring-cloud-function-stream-multifuncs/README.md
## 10.4.2 Spring Cloud Function 与 Spring Cloud Stream
Spring Cloud Function 和 Spring Cloud Stream 集成完成根据参数决定处理函数的例子。<file_sep>/chapter04/profile-properties/src/main/resources/application-dev.properties
custom.welcome=Hello Mock Data<file_sep>/chapter03/spring-cloud-nacos-consumer-ribbonenhance/README.md
## 3.8 应用流量控制
消费端扩展 RestTemplate 和 OpenFeign 进行流量识别,并通过 ThreadLocal 透传结果。 <file_sep>/chapter08/batch-processing/task-04/README.md
## 8.4 Spring Cloud Data Flow 批处理任务组合
8-4 章节里的 Task04 任务。<file_sep>/chapter07/spring-cloud-bus-node3/src/main/resources/application.properties
spring.application.name=scb-node3
spring.cloud.bus.id=scb-node3
server.port=8082
management.endpoints.web.exposure.include=*
spring.cloud.bus.trace.enabled=true<file_sep>/chapter02/spring-cloud-eureka-nacos-provider/README.md
## 2.5 双注册双订阅模式
支持双注册(Nacos、Eureka)的应用<file_sep>/chapter04/profile-properties/src/main/java/deep/in/spring/cloud/UserService.java
package deep.in.spring.cloud;
/**
* @author <a href="mailto:<EMAIL>">Jim</a>
*/
public interface UserService {
String findAll();
}
<file_sep>/chapter10/spring-cloud-function-gcp/README.md
## 10.5 案例: 使用 GCP Cloud Functions 体验 Spring Cloud Function
Spring Cloud Function 与 GCP Cloud Functions 集成的例子。<file_sep>/chapter02/spring-cloud-alibaba-nacos-provider/README.md
## 2.2 使用 Alibaba Nacos 体验第一个 Spring Cloud 微服务应用
2.2.2 启动 Nacos Discovery Provider 进行服务注册对应的例子 <file_sep>/chapter09/zuul-gateway/url.sh
#!/usr/bin/env bash
curl -v -XGET 'http://localhost:8080/http/status/400'
sleep 1
curl -v -XGET 'http://localhost:8080/nacos/echo?name=jim'<file_sep>/chapter05/alibaba-sentinel/src/main/resources/application-openfeign.properties
feign.sentinel.enabled=true
spring.cloud.sentinel.datasource.ds.file.file=classpath: degraderule-openfeign.json
spring.cloud.sentinel.datasource.ds.file.data-type=json
spring.cloud.sentinel.datasource.ds.file.rule-type=degrade
<file_sep>/chapter08/spring-cloud-task/src/main/resources/application.properties
spring.h2.console.enabled=true
logging.level.org.springframework.cloud.task=DEBUG
spring.cloud.task.single-instance-enabled=true
spring.cloud.task.external-execution-id=888
spring.cloud.task.parent-execution-id=999<file_sep>/chapter05/netflix-hystrix/README.md
## 5.5 Netflix Hystrix
Hystrix 与 OpenFeign,Spring Cloud Circuit Breaker 的整合。
还包括了 Hystrix 自身的熔断、限流使用。<file_sep>/chapter04/spring-cloud-nacos-consumer-ribbonenhance-dynamicupdate/src/main/resources/application.properties
server.port=8888
spring.cloud.nacos.discovery.server-addr=localhost:8848
feign.hystrix.enabled=true<file_sep>/chapter09/nacos-provider/README.md
## 9 网关
第 9 章网关需要调用的微服务提供者应用。<file_sep>/chapter05/netflix-hystrix/src/main/resources/application-openfeign.properties
feign.hystrix.enabled=true
hystrix.command.default.execution.isolation.thread.timeoutInMilliseconds=1000
hystrix.command.default.metrics.rollingStats.timeInMilliseconds=1000
hystrix.command.default.circuitBreaker.requestVolumeThreshold=3
hystrix.command.default.circuitBreaker.errorThresholdPercentage=100
hystrix.command.default.circuitBreaker.sleepWindowInMilliseconds=5000<file_sep>/chapter07/spring-cloud-bus-node1/README.md
## 7.3 使用 Spring Cloud Bus 完成多节点配置动态刷新
节点 1。需要配合节点 2 和节点 3。<file_sep>/chapter05/netflix-hystrix-spring-cloud-gateway/scripts/provider1.sh
#!/usr/bin/env bash
while true
do
echo `curl -s -XGET http://localhost:8080/dubbo/status/500`
done<file_sep>/chapter05/alibaba-sentinel/src/main/resources/application-resttemplate.properties
spring.cloud.sentinel.datasource.ds.file.file=classpath: degraderule-resttemplate.json
spring.cloud.sentinel.datasource.ds.file.data-type=json
spring.cloud.sentinel.datasource.ds.file.rule-type=degrade
<file_sep>/chapter04/spring-cloud-config-client-service-registry/src/main/resources/bootstrap.properties
spring.application.name=sc-config-client-service-registry
spring.cloud.config.name=book
spring.cloud.config.profile=prod
spring.cloud.config.discovery.enabled=true
spring.cloud.config.discovery.service-id=sc-config-server-service-registry
server.port=8082
management.endpoints.web.exposure.include=*<file_sep>/chapter04/spring-cloud-config-server-jdbc/README.md
## 4.4.1 Spring Cloud Config Server
使用 JDBC 数据库作为 Spring Cloud Config Server 里的 EnvironmentRepository 实现类的例子。<file_sep>/chapter08/scdf-common/src/main/java/deep/in/spring/cloud/CreditCardRecord.java
/*
* Copyright (C) 2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package deep.in.spring.cloud;
import java.math.BigDecimal;
import java.util.HashMap;
import java.util.Map;
/**
* @author <a href="mailto:<EMAIL>">Jim</a>
*/
public class CreditCardRecord {
public static Map<String, String> cardTypes = new HashMap<>();
public static Map<String, String> users = new HashMap<>();
static {
cardTypes.put("0", "CMB"); // 招行银行
cardTypes.put("1", "ICBC"); // 工商银行
cardTypes.put("2", "ABC"); // 农业银行
cardTypes.put("3", "CCB"); // 建设银行
cardTypes.put("4", "BCM"); // 交通银行
cardTypes.put("5", "CMBC"); // 民生银行
users.put("0", "jim");
users.put("1", "jerry");
users.put("2", "tom");
}
private String user;
private BigDecimal cost;
private String cardType;
public CreditCardRecord() {
}
public CreditCardRecord(String user, BigDecimal cost, String cardType) {
this.user = user;
this.cost = cost;
this.cardType = cardType;
}
public static Map<String, String> getCardTypes() {
return cardTypes;
}
public static void setCardTypes(Map<String, String> cardTypes) {
CreditCardRecord.cardTypes = cardTypes;
}
public String getUser() {
return user;
}
public void setUser(String user) {
this.user = user;
}
public BigDecimal getCost() {
return cost;
}
public void setCost(BigDecimal cost) {
this.cost = cost;
}
public String getCardType() {
return cardType;
}
public void setCardType(String cardType) {
this.cardType = cardType;
}
@Override
public String toString() {
return "CreditCardRecord{" +
"user='" + user + '\'' +
", cost=" + cost +
", cardType='" + cardType + '\'' +
'}';
}
}
<file_sep>/chapter06/spring-cloud-stream-custom-bindingtargetfactory/README.md
## 6.4.4 Spring Cloud Stream 高级特性
自定义 MessageChannel 类型的例子。<file_sep>/chapter03/spring-cloud-alibaba-dubbo-order/src/main/java/deep/in/spring/cloud/Order.java
/*
* Copyright (C) 2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package deep.in.spring.cloud;
import java.io.Serializable;
import java.sql.Timestamp;
import java.util.UUID;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
/**
* @author <a href="mailto:<EMAIL>">Jim</a>
*/
public class Order implements Serializable {
private String id;
private Timestamp createdTime;
private String userId;
public static Order generate(String userId) {
Order order = new Order();
order.setId(UUID.randomUUID().toString());
order.setCreatedTime(new Timestamp(System.currentTimeMillis()));
order.setUserId(userId);
return order;
}
public static Order error() {
Order order = new Order();
order.setId("-1");
order.setCreatedTime(new Timestamp(System.currentTimeMillis()));
order.setUserId("none");
return order;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public Timestamp getCreatedTime() {
return createdTime;
}
public void setCreatedTime(Timestamp createdTime) {
this.createdTime = createdTime;
}
public String getUserId() {
return userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
}
<file_sep>/chapter04/profile-properties/README.md
## 4.2 Spring/Spring Boot 与配置
体验基于 Profile 加载不同配置的场景。<file_sep>/chapter03/spring-cloud-alibaba-nacos-consumer-openfeign/README.md
## 3.5 OpenFeign: 声明式 Rest 客户端
使用 OpenFeign Rest 客户端进行服务调用的例子 <file_sep>/chapter03/spring-cloud-alibaba-nacos-consumer-openfeign/src/main/resources/application.properties
spring.application.name=nacos-openfeign-consumer
server.port=8086
spring.cloud.nacos.discovery.server-addr=localhost:8848
spring.cloud.loadbalancer.ribbon.enabled=false
logging.level.web=debug
<file_sep>/chapter06/spring-cloud-stream-polling-consumer/src/main/resources/application.properties
spring.cloud.stream.bindings.input.destination=test-input
spring.cloud.stream.bindings.input.group=test-input-binder-polling
spring.application.name=scs-polling-consumer<file_sep>/chapter08/batch-processing/task-05/README.md
## 8.4 Spring Cloud Data Flow 批处理任务组合
8-4 章节里的 Task05 任务。<file_sep>/chapter06/spring-cloud-stream-consumer-retry/README.md
## 6.4.4 Spring Cloud Stream 高级特性
消息消费失败重试的例子。<file_sep>/chapter02/spring-cloud-eureka-nacos-consumer/README.md
## 2.5 双注册双订阅模式
支持双订阅(Nacos、Eureka)的应用<file_sep>/chapter06/spring-cloud-stream/src/main/resources/application.properties
spring.cloud.stream.bindings.input.destination=test-input
spring.cloud.stream.bindings.input.group=test-input-binder
spring.cloud.stream.bindings.input.binder=kafka
spring.cloud.stream.bindings.output.destination=test-output
spring.cloud.stream.bindings.output.binder=rocketmq<file_sep>/chapter08/batch-processing/task-02/README.md
## 8.4 Spring Cloud Data Flow 批处理任务组合
8-4 章节里的 Task02 任务。<file_sep>/chapter05/sms-service/README.md
## 5.7 使用 Sentinel 保护应用防止服务雪崩
第三方短信服务对应的例子。<file_sep>/chapter05/alibaba-sentinel/src/main/resources/application.properties
spring.application.name=alibaba-sentinel
server.port=8080
spring.cloud.sentinel.transport.dashboard=localhost:9090<file_sep>/chapter02/spring-cloud-alibaba-nacos-consumer/README.md
## 2.2 使用 Alibaba Nacos 体验第一个 Spring Cloud 微服务应用
2.2.3 启动 Nacos Discovery Consumer 进行服务发现对应的例子 <file_sep>/chapter07/spring-cloud-bus-node2/README.md
## 7.3 使用 Spring Cloud Bus 完成多节点配置动态刷新
节点 2。需要配合节点 1 和节点 3。<file_sep>/chapter06/spring-cloud-stream-consumer-error/README.md
## 6.4.4 Spring Cloud Stream 高级特性
消息消费失败处理的例子。<file_sep>/chapter02/spring-cloud-alibaba-nacos-provider/src/main/resources/application.properties
spring.application.name=my-provider
server.port=8080
spring.cloud.nacos.discovery.server-addr=localhost:8848
management.endpoints.web.exposure.include=*<file_sep>/chapter04/spring-cloud-config-client-refresh/README.md
## 4.4.2 Spring Cloud Config Client
使用 Spring Cloud Config Client 完成动态配置更新的例子。<file_sep>/chapter03/spring-cloud-nacos-normal-provider/README.md
## 3.8 应用流量控制
正常 Provider。 <file_sep>/chapter03/spring-cloud-alibaba-nacos-provider4-lb/README.md
## 3.2-3.3 Spring Cloud LoadBalancer/Netflix Ribbon 负载均衡
服务提供者应用。
为了验证负载均衡是否生效,需要修改应用的启动接口分别注册。 <file_sep>/chapter04/spring-cloud-nacos-consumer-ribbonenhance-dynamicupdate/README.md
## 4.6 Spring Cloud 应用流量控制策略动态生效
需要配合 chapter03 的 spring-cloud-nacos-gray-provider 以及 spring-cloud-nacos-normal-provider 应用。<file_sep>/chapter06/spring-cloud-stream-producer-error/src/main/resources/application.properties
spring.cloud.stream.bindings.output.destination=test-output
spring.cloud.stream.bindings.output.content-type=text/plain
spring.cloud.stream.bindings.output.producer.errorChannelEnabled=true
spring.cloud.stream.rocketmq.bindings.output.producer.sync=true
spring.application.name=scs-producer-error
<file_sep>/chapter10/spring-cloud-function/README.md
## 10.3 Spring Cloud Function
Spring Cloud Function 相关的例子。<file_sep>/chapter10/java-function/README.md
## 10.2 Java Function
Java 内置 Function 相关的例子。<file_sep>/chapter06/spring-integration/README.md
## 6.3.2 Spring Integration 核心组件使用
Spring Integration 项目核心功能的例子。<file_sep>/chapter03/spring-cloud-nacos-gray-provider/src/main/resources/application.properties
spring.application.name=nacos-traffic-service
server.port=9090
spring.cloud.nacos.discovery.server-addr=localhost:8848
spring.cloud.nacos.discovery.metadata.gray=true
management.endpoints.web.exposure.include=*
logging.level.web=debug
<file_sep>/chapter10/spring-cloud-function-task/README.md
## 10.4.3 Spring Cloud Function 与 Spring Cloud Task
Spring Cloud Function 和 Spring Cloud Task 集成的例子。<file_sep>/chapter03/spring-cloud-alibaba-dubbo-user/README.md
## 3.6 Dubbo Spring Cloud: 服务调用的新选择
Consumer 应用: 使用以下 3 种方式进行服务调用
1. 使用 Dubbo @Reference 调用 Dubbo 服务
2. 使用 OpenFeign 调用 Spring Cloud 服务
3. 使用 OpenFeign 配合 @DubboTransported 注解完成 Dubbo 调用<file_sep>/chapter08/spring-batch/README.md
## 8.9.2 案例: 使用 Spring Batch 完成便利店每日账单统计
使用 Spring Batch 完成便利店每日账单统计的例子。<file_sep>/chapter04/profile-properties/src/main/resources/application-prod.properties
custom.welcome=Hello Spring Profile<file_sep>/chapter03/spring-cloud-nacos-gray-provider/README.md
## 3.8 应用流量控制
灰度 Provider。 <file_sep>/chapter05/my-circuit-breaker/src/main/java/deep/in/spring/cloud/Counter.java
/*
* Copyright (C) 2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package deep.in.spring.cloud;
import java.util.concurrent.atomic.AtomicInteger;
/**
* @author <a href="mailto:<EMAIL>">Jim</a>
*/
public class Counter {
// Closed 状态进入 Open 状态的错误个数阀值
private final int failureCount;
// failureCount 统计时间窗口
private final long failureTimeInterval;
// 当前错误次数
private final AtomicInteger currentCount;
// 上一次调用失败的时间戳
private long lastTime;
// Half-Open 状态下成功次数
private final AtomicInteger halfOpenSuccessCount;
public Counter(int failureCount, long failureTimeInterval) {
this.failureCount = failureCount;
this.failureTimeInterval = failureTimeInterval;
this.currentCount = new AtomicInteger(0);
this.halfOpenSuccessCount = new AtomicInteger(0);
this.lastTime = System.currentTimeMillis();
}
public synchronized int incrFailureCount() {
long current = System.currentTimeMillis();
if (current - lastTime > failureTimeInterval) { // 超过时间窗口,当前失败次数重置为 0
lastTime = current;
currentCount.set(0);
}
return currentCount.getAndIncrement();
}
public int incrSuccessHalfOpenCount() {
return this.halfOpenSuccessCount.incrementAndGet();
}
public boolean failureThresholdReached() {
return getCurCount() >= failureCount;
}
public int getCurCount() {
return currentCount.get();
}
public synchronized void reset() {
halfOpenSuccessCount.set(0);
currentCount.set(0);
}
}
<file_sep>/chapter10/spring-cloud-function-stream-multifuncs/src/main/resources/application.properties
spring.cloud.stream.bindings.functionRouter-in-0.destination=test-input
spring.cloud.stream.bindings.functionRouter-in-0.group=test-input-function
spring.cloud.function.definition=functionRouter;supplier;consume1;consume2;consume3<file_sep>/chapter04/spring-cloud-config-server-jdbc/src/main/resources/application.properties
spring.application.name=sc-config-server-jdbc
server.port=8080
spring.profiles.active=jdbc
spring.cloud.config.server.jdbc.sql=SELECT `KEY`, VALUE from PROPERTIES where APPLICATION=? and `PROFILE`=? and LABEL=?;
spring.datasource.url=jdbc:mysql://127.0.0.1:3306/{database}?useUnicode=true&characterEncoding=UTF-8
spring.datasource.username={username}
spring.datasource.password={<PASSWORD>}
spring.datasource.driver-class-name=com.mysql.cj.jdbc.Driver<file_sep>/chapter05/netflix-hystrix-zuul/README.md
## 5.5.3 Hystrix 限流
Hystrix 对 Netflix Zuul 网关限流的例子。<file_sep>/chapter05/netflix-hystrix/src/main/resources/application.properties
logging.level.root=INFO
server.port=8080
spring.application.name=netflix-hystrix
management.endpoints.web.exposure.include=*<file_sep>/chapter10/java-function/src/main/java/deep/in/spring/cloud/FunctionApplication.java
/*
* Copyright (C) 2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package deep.in.spring.cloud;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* @author <a href="mailto:<EMAIL>">Jim</a>
*/
public class FunctionApplication {
public static void main(String[] args) {
System.out.println(Stream.of("a", "b", "c").map(String::toUpperCase).collect(Collectors.toList()));
System.out.println(func(s -> s.toUpperCase(), "a"));
Function<String, String> func = s -> s.toUpperCase();
System.out.println(func(func, "b"));
System.out.println(func(func, "c"));
}
static String func(Function<String, String> func, String origin) {
return func.apply(origin);
}
}
<file_sep>/chapter02/spring-cloud-eureka-nacos-consumer/src/main/resources/application.properties
spring.application.name=my-consumer
server.port=8083
eureka.client.service-url.defaultZone=http://localhost:8761/eureka
spring.cloud.nacos.discovery.server-addr=localhost:8848
spring.autoconfigure.exclude=org.springframework.cloud.client.serviceregistry.ServiceRegistryAutoConfiguration,org.springframework.cloud.client.serviceregistry.AutoServiceRegistrationAutoConfiguration
ServiceInstanceChooser<file_sep>/chapter04/spring-cloud-nacos-consumer-ribbonenhance-dynamicupdate/src/main/resources/bootstrap.properties
spring.application.name=nacos-ribbonenhanced-dynamicupdate-consumer
spring.cloud.nacos.config.server-addr=localhost:8848
<file_sep>/chapter06/spring-cloud-stream-custom-bindingtargetfactory/src/main/resources/application.properties
spring.cloud.stream.bindings.input.destination=test-input
spring.cloud.stream.bindings.input.group=test-input-binder
spring.application.name=scs-custom-bindingtargetfactory<file_sep>/chapter05/resilience4j/src/main/resources/application-openfeign2r4j.properties
resilience4j.circuitbreaker.backends.inventory.minimum-number-of-calls=3
resilience4j.circuitbreaker.backends.inventory.failure-rate-threshold=100<file_sep>/chapter05/netflix-hystrix/src/main/java/deep/in/spring/cloud/HystrixFlowControlApplication.java
/*
* Copyright (C) 2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package deep.in.spring.cloud;
import com.netflix.hystrix.contrib.javanica.annotation.HystrixCommand;
import org.springframework.boot.WebApplicationType;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.cloud.client.circuitbreaker.EnableCircuitBreaker;
import org.springframework.context.annotation.Profile;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.client.RestTemplate;
/**
* @author <a href="mailto:<EMAIL>">Jim</a>
*/
@Profile("flowcontrol")
@SpringBootApplication
@EnableCircuitBreaker
public class HystrixFlowControlApplication {
public static void main(String[] args) {
new SpringApplicationBuilder(HystrixFlowControlApplication.class)
.properties("spring.profiles.active=flowcontrol").web(WebApplicationType.SERVLET)
.run(args);
}
@RestController
class HystrixController {
@HystrixCommand(commandKey = "Hello", groupKey = "HelloGroup", fallbackMethod = "fallback")
@GetMapping("/hello")
public String hello() {
return "Hello World";
}
public String fallback(Throwable throwable) {
return "Hystrix fallback";
}
}
}
<file_sep>/chapter05/netflix-hystrix/src/main/resources/application-flowcontrol.properties
hystrix.command.Hello.execution.isolation.semaphore.maxConcurrentRequests=0
hystrix.command.Hello.execution.isolation.strategy=SEMAPHORE
hystrix.command.Hello.execution.timeout.enabled=false<file_sep>/chapter05/netflix-hystrix-spring-cloud-gateway/README.md
## 5.5.3 Hystrix 限流
Hystrix 对 Spring Cloud Gateway 网关限流的例子。<file_sep>/chapter07/spring-cloud-bus-node3/README.md
## 7.3 使用 Spring Cloud Bus 完成多节点配置动态刷新
节点 3。需要配合节点 1 和节点 2。<file_sep>/chapter05/alibaba-sentinel/README.md
## 5.4 Alibaba Sentinel
Sentinel 与 RestTemplate,OpenFeign,Spring Cloud Circuit Breaker 的整合。
还包括了 Sentinel 自身的熔断、限流使用。<file_sep>/chapter08/scdf-common/README.md
## 8 Spring Cloud Data Flow
Spring Cloud Data Flow 章节一些公共的类或接口。<file_sep>/chapter08/spring-cloud-deployer-local/src/main/java/deep/in/spring/cloud/SpringCloudDeployerLocalApplication.java
/*
* Copyright (C) 2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package deep.in.spring.cloud;
import java.util.HashMap;
import java.util.Map;
import org.springframework.cloud.deployer.resource.maven.MavenResource;
import org.springframework.cloud.deployer.spi.app.AppScaleRequest;
import org.springframework.cloud.deployer.spi.app.AppStatus;
import org.springframework.cloud.deployer.spi.app.DeploymentState;
import org.springframework.cloud.deployer.spi.core.AppDefinition;
import org.springframework.cloud.deployer.spi.core.AppDeploymentRequest;
import org.springframework.cloud.deployer.spi.local.LocalAppDeployer;
import org.springframework.cloud.deployer.spi.local.LocalDeployerProperties;
import org.springframework.cloud.deployer.spi.local.LocalTaskLauncher;
import org.springframework.cloud.deployer.spi.task.LaunchState;
import org.springframework.cloud.deployer.spi.task.TaskStatus;
/**
* @author <a href="mailto:<EMAIL>">Jim</a>
*/
public class SpringCloudDeployerLocalApplication {
public static void main(String[] args) {
//taskLaunch();
appDeploy();
}
private static void appDeploy() {
LocalAppDeployer deployer = new LocalAppDeployer(new LocalDeployerProperties());
String deploymentId = deployer.deploy(createAppDeploymentRequest());
while (true) {
try {
Thread.sleep(1000L);
} catch (InterruptedException e) {
// ignore
}
AppStatus status = deployer.status(deploymentId);
System.out.println("app status: " + status);
if (status.getState() == DeploymentState.deployed) {
System.out.println("app is deployed");
break;
}
}
deployer.scale(new AppScaleRequest(deploymentId, 2, createNacosProperties(8081)));
System.out.println("app will be cancel after 30s");
try {
Thread.sleep(30000L);
deployer.shutdown();
} catch (Exception e) {
e.printStackTrace();
}
}
private static void taskLaunch() {
LocalTaskLauncher launcher = new LocalTaskLauncher(new LocalDeployerProperties());
String launchId = launcher.launch(createAppDeploymentRequest());
while (true) {
try {
Thread.sleep(1000L);
} catch (InterruptedException e) {
// ignore
}
TaskStatus status = launcher.status(launchId);
System.out.println("task status: " + status);
if (status.getState() == LaunchState.running) {
System.out.println("task is running");
break;
}
}
System.out.println("task will be cancel after 30s");
try {
Thread.sleep(30000L);
} catch (InterruptedException e) {
// ignore
}
launcher.cancel(launchId);
}
private static AppDeploymentRequest createAppDeploymentRequest() {
MavenResource resource = new MavenResource.Builder()
.artifactId("spring-cloud-alibaba-nacos-provider")
.groupId("deep.in.spring.cloud")
.version("0.0.1-SNAPSHOT")
.build();
AppDefinition definition = new AppDefinition("nacos-provider", createNacosProperties(8080));
AppDeploymentRequest request = new AppDeploymentRequest(definition, resource);
return request;
}
private static Map<String, String> createNacosProperties(int port) {
Map<String, String> properties = new HashMap<>();
properties.put("server.port", String.valueOf(port));
properties.put("spring.application.name", "spring-cloud-deployer-provider");
properties.put("spring.cloud.nacos.discovery.server-addr", "localhost:8848");
return properties;
}
}
<file_sep>/chapter07/spring-cloud-bus-node2/src/main/resources/application.properties
spring.application.name=scb-node2
spring.cloud.bus.id=scb-node2
server.port=8081
management.endpoints.web.exposure.include=*
spring.cloud.bus.trace.enabled=true
<file_sep>/chapter03/spring-cloud-alibaba-dubbo-order/README.md
## 3.6 Dubbo Spring Cloud: 服务调用的新选择
Provider 应用: 使用 SpringMVC 注解暴露 Dubbo 服务和 Rest 服务。 <file_sep>/chapter07/spring-cloud-bus-node1/src/main/resources/application.properties
spring.application.name=scb-node1
spring.cloud.bus.id=scb-node1
server.port=8080
management.endpoints.web.exposure.include=*
spring.cloud.bus.trace.enabled=true<file_sep>/chapter06/spring-cloud-stream-consumer-retry/src/main/resources/application.properties
spring.cloud.stream.bindings.input.destination=test-input
spring.cloud.stream.bindings.input.content-type=application/json
spring.cloud.stream.bindings.input.group=test-input-binder
spring.cloud.stream.rocketmq.bindings.input.consumer.orderly=true
spring.cloud.stream.rocketmq.bindings.input.consumer.suspendCurrentQueueTimeMillis=2000
spring.cloud.stream.bindings.input.consumer.maxAttempts=3
spring.cloud.stream.bindings.output.destination=test-output
spring.cloud.stream.bindings.output.content-type=text/plain
spring.application.name=scs-retry<file_sep>/chapter08/spring-batch/src/main/resources/schema.sql
CREATE TABLE IF NOT EXISTS BILLS
(
name varchar(50),
amount decimal(10,2)
);<file_sep>/chapter05/resilience4j/src/main/java/deep/in/spring/cloud/R4jCircuitBreakerDetail.java
/*
* Copyright (C) 2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package deep.in.spring.cloud;
import java.time.Duration;
import io.github.resilience4j.circuitbreaker.CircuitBreaker;
import io.github.resilience4j.circuitbreaker.CircuitBreakerConfig;
import io.vavr.control.Try;
import org.springframework.web.client.HttpServerErrorException;
import org.springframework.web.client.RestTemplate;
/**
* @author <a href="mailto:<EMAIL>">Jim</a>
*/
public class R4jCircuitBreakerDetail {
public static void main(String[] args) {
//testFailureRateThresholdInMinimumNumberOfCalls();
//testIgnoreException();
//testSlowCallRateThresholdInMinimumNumberOfCalls();
waitDurationInOpenState();
}
private static void testFailureRateThresholdInMinimumNumberOfCalls() {
CircuitBreaker circuitBreaker = CircuitBreaker.of("httpbin",
CircuitBreakerConfig.custom().
minimumNumberOfCalls(10).
failureRateThreshold(20).
build());
RestTemplate restTemplate = new RestTemplate();
for (int i = 0; i < 10; i++) {
String result = Try.ofSupplier(CircuitBreaker
.decorateSupplier(circuitBreaker, () -> {
return restTemplate.
getForEntity("http://httpbin.org/status/500", String.class).
getStatusCode().toString();
})).recover(throwable -> "fallback: " + throwable.getMessage()).get();
System.out.println(result);
}
}
private static void testIgnoreException() {
CircuitBreaker circuitBreaker = CircuitBreaker.of("httpbin",
CircuitBreakerConfig.custom().
minimumNumberOfCalls(10).
failureRateThreshold(20).
//ignoreExceptions(HttpServerErrorException.InternalServerError.class).
ignoreException( throwable -> {
if(throwable instanceof HttpServerErrorException.InternalServerError) {
return true;
}
return false;
}).
build());
RestTemplate restTemplate = new RestTemplate();
for (int i = 0; i < 11; i++) {
String result = Try.ofSupplier(CircuitBreaker
.decorateSupplier(circuitBreaker, () -> {
return restTemplate.
getForEntity("http://httpbin.org/status/500", String.class).
getStatusCode().toString();
})).recover(throwable -> "fallback: " + throwable.getMessage()).get();
System.out.println(result);
}
}
private static void testSlowCallRateThresholdInMinimumNumberOfCalls() {
CircuitBreaker circuitBreaker = CircuitBreaker.of("httpbin",
CircuitBreakerConfig.custom().
minimumNumberOfCalls(2).
slowCallRateThreshold(100).
slowCallDurationThreshold(Duration.ofSeconds(2)).
build());
RestTemplate restTemplate = new RestTemplate();
for (int i = 0; i < 5; i++) {
String result = Try.ofSupplier(CircuitBreaker
.decorateSupplier(circuitBreaker, () -> {
return restTemplate.
getForEntity("http://httpbin.org/delay/3", String.class).
getStatusCode().toString();
})).recover(throwable -> "fallback: " + throwable.getMessage()).get();
System.out.println(result);
}
}
private static void waitDurationInOpenState() {
CircuitBreaker circuitBreaker = CircuitBreaker.of("httpbin",
CircuitBreakerConfig.custom().
minimumNumberOfCalls(4).
failureRateThreshold(100).
waitDurationInOpenState(Duration.ofSeconds(10)). // 10 秒后进入 half-open 状态
build());
RestTemplate restTemplate = new RestTemplate();
for (int i = 0; i < 6; i++) {
String result = Try.ofSupplier(CircuitBreaker
.decorateSupplier(circuitBreaker, () -> {
return restTemplate.
getForEntity("http://httpbin.org/status/500", String.class).
getStatusCode().toString();
})).recover(throwable -> "fallback: " + throwable.getMessage()).get();
System.out.println(result);
}
try {
Thread.sleep(10000L);
} catch (InterruptedException e) {
e.printStackTrace();
}
for (int i = 0; i < 3; i++) {
String result = Try.ofSupplier(CircuitBreaker
.decorateSupplier(circuitBreaker, () -> {
return restTemplate.
getForEntity("http://httpbin.org/status/500", String.class).
getStatusCode().toString();
})).recover(throwable -> "fallback: " + throwable.getMessage()).get();
System.out.println(result);
}
}
}
<file_sep>/chapter04/profile-properties/src/main/java/deep/in/spring/cloud/ProfilePropertiesApplication.java
/*
* Copyright (C) 2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package deep.in.spring.cloud;
import java.util.Arrays;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.ApplicationRunner;
import org.springframework.boot.WebApplicationType;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.boot.env.RandomValuePropertySource;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import org.springframework.context.support.StaticApplicationContext;
import org.springframework.core.env.AbstractEnvironment;
import org.springframework.core.env.MapPropertySource;
/**
* @author <a href="mailto:<EMAIL>">Jim</a>
*/
@SpringBootApplication
public class ProfilePropertiesApplication {
public static void main(String[] args) {
new SpringApplicationBuilder()
.web(WebApplicationType.NONE)
.sources(ProfilePropertiesApplication.class)
.run(args);
}
@Autowired
private ApplicationContext applicationContext;
@Bean
public ApplicationRunner runner() {
return (args) -> {
((AbstractEnvironment)applicationContext.getEnvironment()).getPropertySources().addFirst(new RandomValuePropertySource());
((AbstractEnvironment)applicationContext.getEnvironment()).getPropertySources().addFirst(new RandomValuePropertySource());
System.out.println(applicationContext.getBean(UserService.class).findAll());
System.out.println(applicationContext.getEnvironment().getProperty("custom.welcome"));
System.out.println(Arrays.toString(applicationContext.getEnvironment().getActiveProfiles()));
System.out.println(Arrays.toString(applicationContext.getEnvironment().getDefaultProfiles()));
};
}
}
<file_sep>/chapter10/spring-cloud-function-stream-withbindingannotation/src/main/resources/application.properties
spring.cloud.stream.bindings.input.destination=test-input
spring.cloud.stream.bindings.input.group=test-input-function
spring.cloud.stream.bindings.output.destination=upper-test-input
<file_sep>/chapter05/resilience4j/src/main/resources/application.properties
logging.level.root=INFO
server.port=8080
spring.application.name=r4j
management.endpoints.web.exposure.include=*<file_sep>/chapter08/spring-cloud-skipper/hello-skipper-v2/README.md
## 8.6 Spring Cloud Skipper
helloskipper 1.0.1 版本对应的应用。<file_sep>/chapter08/spring-cloud-deployer-local/README.md
## 8.7.3 LocalAppDeployer
使用基于本地 Local 的 Spring Cloud Deploy 例子。<file_sep>/chapter03/spring-cloud-nacos-normal-provider/src/main/resources/application.properties
spring.application.name=nacos-traffic-service
server.port=8080
spring.cloud.nacos.discovery.server-addr=localhost:8848
spring.cloud.nacos.discovery.metadata.gray=false
management.endpoints.web.exposure.include=*
logging.level.web=debug
<file_sep>/chapter09/spring-cloud-gateway-serviceregistry/README.md
## 9.7 Spring Cloud Gateway 整合注册中心
Spring Cloud Gateway 读取 Nacos 注册中心上的服务的例子。<file_sep>/chapter04/spring-cloud-config-client-refresh/src/main/resources/bootstrap.properties
spring.application.name=sc-config-client-refresh
spring.cloud.config.name=book
spring.cloud.config.profile=prod
spring.cloud.config.uri=http://localhost:8080/
server.port=8081
management.endpoints.web.exposure.include=*<file_sep>/chapter05/my-circuit-breaker/src/main/java/deep/in/spring/cloud/State.java
package deep.in.spring.cloud;
/**
* @author <a href="mailto:<EMAIL>">Jim</a>
*/
public enum State {
CLOSED,
HALF_OPEN,
OPEN
}
<file_sep>/chapter10/spring-cloud-function-aws/README.md
## 10.3 Spring Cloud Function
Spring Cloud Function 与 AWS Lambda 集成的例子。<file_sep>/chapter02/spring-cloud-netflix-eureka-consumer/README.md
## 2.3 使用 Netflix Eureka 替换 Alibaba Nacos 注册中心
2.3.3 启动 Eureka Discovery Consumer 进行服务发现对应的例子 <file_sep>/chapter08/batch-processing/spring-cloud-task-simple/README.md
## 8.3 批处理案例: 统计 Github 仓库的各项指标数据
使用 Spring Cloud Task 完成批处理。<file_sep>/chapter06/spring-cloud-stream-metrics/src/main/resources/application.properties
spring.cloud.stream.bindings.output.destination=test-output
spring.cloud.stream.metrics.key=scs-metrics
spring.cloud.stream.bindings.applicationMetrics.destination=metrics-topic
spring.application.name=scs-metrics
server.port=8080
management.endpoints.web.exposure.include=*<file_sep>/chapter06/spring-cloud-stream-producer-error/README.md
## 6.4.4 Spring Cloud Stream 高级特性
消息发送失败处理的例子。<file_sep>/chapter08/batch-processing/task-error/src/main/java/deep/in/spring/cloud/TaskErrorApplication.java
/*
* Copyright (C) 2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package deep.in.spring.cloud;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.task.configuration.EnableTask;
import org.springframework.cloud.task.listener.annotation.AfterTask;
import org.springframework.cloud.task.listener.annotation.BeforeTask;
import org.springframework.cloud.task.listener.annotation.FailedTask;
import org.springframework.cloud.task.repository.TaskExecution;
import org.springframework.context.annotation.Bean;
/**
* @author <a href="mailto:<EMAIL>">Jim</a>
*/
@SpringBootApplication
@EnableTask
public class TaskErrorApplication {
private final Logger logger = LoggerFactory.getLogger(TaskErrorApplication.class);
public static void main(String[] args) {
SpringApplication.run(TaskErrorApplication.class, args);
}
@AfterTask
public void afterTask(TaskExecution taskExecution) {
logger.info("AfterTask Trigger");
taskExecution.setExitMessage(String.valueOf(taskExecution.getExitCode()));
}
@BeforeTask
public void beforeTask(TaskExecution taskExecution) {
logger.info("BeforeTask Trigger");
}
@FailedTask
public void failedTask(TaskExecution taskExecution, Throwable throwable) {
logger.info("failedTask Trigger");
}
@Bean
public CommandLineRunner commandLineRunner() {
return args -> {
throw new IllegalStateException("Oops");
};
}
}
<file_sep>/chapter04/profile-properties/src/main/resources/application-default.properties
custom.welcome=Default Mock Data<file_sep>/chapter08/stream-processing/creditcard-fraud-detection-sink/README.md
## 8.2 流处理案例:信用卡反欺诈系统
Sink 应用。<file_sep>/chapter03/spring-cloud-alibaba-nacos-consumer-ribbon/README.md
## 3.3 Netflix Ribbon 负载均衡
使用 Netflix Ribbon 验证服务消费者调用提供者是否有负载均衡的效果 <file_sep>/chapter03/spring-cloud-nacos-consumer-ribbonenhance/src/main/resources/application.properties
spring.application.name=nacos-ribbonenhanced-consumer
server.port=8888
spring.cloud.nacos.discovery.server-addr=localhost:8848
feign.hystrix.enabled=true
logging.level.web=debug
<file_sep>/chapter08/spring-cloud-skipper/hello-skipper-v3/README.md
## 8.6 Spring Cloud Skipper
helloskipper 1.0.2 版本对应的应用。<file_sep>/chapter04/spring-cloud-config-client-service-registry/README.md
## 4.4.3 Spring Cloud Config Client 与 Service Registry 整合
Spring Cloud Config Client 和 Alibaba Nacos 注册中心整合的例子。<file_sep>/chapter07/bus-common/README.md
## 7.3 使用 Spring Cloud Bus 完成多节点配置动态刷新
一些公共的类或接口。<file_sep>/chapter06/spring-messaging/README.md
## 6.2.1/6.2.2 Spring 与消息
spring-messaging 相关的例子。<file_sep>/chapter05/order-service/src/main/resources/application.properties
spring.application.name=order-service
server.port=8082
spring.cloud.sentinel.transport.dashboard=localhost:9090
spring.cloud.nacos.discovery.server-addr=localhost:8848
ribbon.ReadTimeout=10000<file_sep>/chapter10/spring-cloud-function-web/README.md
## 10.4.3 Spring Cloud Function 与 Spring Web/WebFlux
Spring Cloud Function 和 Spring Web 集成的例子。<file_sep>/chapter10/spring-cloud-function-gcp/pom.xml
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.3.0.BUILD-SNAPSHOT</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>
<groupId>deep.in.spring.cloud</groupId>
<artifactId>spring-cloud-function-gcp</artifactId>
<version>0.0.1-SNAPSHOT</version>
<name>spring-cloud-function-gcp</name>
<description>Demo project for Spring Boot</description>
<properties>
<java.version>1.8</java.version>
<spring-cloud.version>Hoxton.RELEASE</spring-cloud.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-function-adapter-gcp</artifactId>
<version>3.1.0.BUILD-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-function-adapter-azure</artifactId>
<version>3.1.0.BUILD-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>com.aliyun</groupId>
<artifactId>aliyun-java-sdk-fc</artifactId>
<version>1.8.11</version>
</dependency>
<dependency>
<groupId>com.aliyun.fc.runtime</groupId>
<artifactId>fc-java-core</artifactId>
<version>1.3.0</version>
</dependency>
</dependencies>
<dependencyManagement>
<dependencies>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-function-dependencies</artifactId>
<version>3.1.0.BUILD-SNAPSHOT</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-deploy-plugin</artifactId>
<configuration>
<skip>true</skip>
</configuration>
</plugin>
<plugin>
<groupId>com.google.cloud.functions</groupId>
<artifactId>function-maven-plugin</artifactId>
<version>0.9.1</version>
<configuration>
<functionTarget>org.springframework.cloud.function.adapter.gcp.FunctionInvoker</functionTarget>
<port>8080</port>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<shadedArtifactAttached>true</shadedArtifactAttached>
<outputDirectory>target/deploy</outputDirectory>
<shadedClassifierName>gcp</shadedClassifierName>
<transformers>
<transformer implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
<resource>META-INF/spring.handlers</resource>
</transformer>
<transformer implementation="org.springframework.boot.maven.PropertiesMergingResourceTransformer">
<resource>META-INF/spring.factories</resource>
</transformer>
<transformer implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
<resource>META-INF/spring.schemas</resource>
</transformer>
<transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<mainClass>deep.in.spring.cloud.SpringCloudFunctionGCPApplication</mainClass>
</transformer>
</transformers>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>8</source>
<target>8</target>
</configuration>
</plugin>
</plugins>
</build>
<repositories>
<repository>
<id>spring-snapshots</id>
<name>Spring Snapshots</name>
<url>https://repo.spring.io/snapshot</url>
<snapshots>
<enabled>true</enabled>
</snapshots>
</repository>
<repository>
<id>spring-milestones</id>
<name>Spring Milestones</name>
<url>https://repo.spring.io/milestone</url>
<snapshots>
<enabled>false</enabled>
</snapshots>
</repository>
</repositories>
<pluginRepositories>
<pluginRepository>
<id>spring-snapshots</id>
<name>Spring Snapshots</name>
<url>https://repo.spring.io/snapshot</url>
<snapshots>
<enabled>true</enabled>
</snapshots>
</pluginRepository>
<pluginRepository>
<id>spring-milestones</id>
<name>Spring Milestones</name>
<url>https://repo.spring.io/milestone</url>
<snapshots>
<enabled>false</enabled>
</snapshots>
</pluginRepository>
</pluginRepositories>
</project>
<file_sep>/chapter05/netflix-hystrix-dashboard/README.md
## 5.5.3 Hystrix Dashboard
Hystrix Dashboard 应用。<file_sep>/chapter08/stream-processing/creditcard-source/README.md
## 8.2 流处理案例:信用卡反欺诈系统
Source 应用。<file_sep>/chapter04/spring-cloud-config-server-file/src/main/resources/book-prod.properties
book.name=deep in spring cloud<file_sep>/chapter10/spring-cloud-function-task/src/main/resources/application.properties
spring.h2.console.enabled=true
spring.cloud.function.task.function=function
spring.cloud.function.task.supplier=supplier
spring.cloud.function.task.consumer=consumer<file_sep>/chapter08/spring-cloud-task/README.md
## 8.8.1 体验 Spring Cloud Task
Spring Cloud Task 入门的例子。<file_sep>/chapter05/delivery-service/README.md
## 5.7 使用 Sentinel 保护应用防止服务雪崩
配送服务对应的例子。<file_sep>/chapter09/zuul-gateway/README.md
## 9.2 Netflix Zuul
Netflix Zuul 相关知识点的例子。<file_sep>/chapter04/spring-cloud-alibaba-nacos-configuration/README.md
## 4.3.1 使用 Alibaba Nacos 体验配置的获取以及动态刷新
基于 Spring Cloud Alibaba Nacos Config 的例子。<file_sep>/chapter05/order-service/README.md
## 5.7 使用 Sentinel 保护应用防止服务雪崩
订单服务对应的例子。<file_sep>/chapter06/spring-cloud-stream-polling-consumer/README.md
## 6.4.4 Spring Cloud Stream 高级特性
Polling Consumer 例子。<file_sep>/chapter04/spring-cloud-config-server-file/README.md
## 4.4.1 Spring Cloud Config Server
使用 File System 文件系统作为 Spring Cloud Config Server 里的 EnvironmentRepository 实现类的例子。<file_sep>/chapter08/spring-cloud-skipper/hello-skipper-v1/README.md
## 8.6 Spring Cloud Skipper
helloskipper 1.0.0 版本对应的应用。<file_sep>/chapter05/alibaba-sentinel/src/main/resources/application-flowcontrol.properties
spring.cloud.sentinel.datasource.ds.file.file=classpath: flowcontrol.json
spring.cloud.sentinel.datasource.ds.file.data-type=json
spring.cloud.sentinel.datasource.ds.file.rule-type=flow
spring.cloud.sentinel.filter.url-patterns=/**<file_sep>/chapter05/alibaba-sentinel-zuul/src/main/java/deep/in/spring/cloud/SentinelZuulApplication.java
/*
* Copyright (C) 2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package deep.in.spring.cloud;
import com.alibaba.csp.sentinel.adapter.gateway.zuul.fallback.BlockResponse;
import com.alibaba.csp.sentinel.adapter.gateway.zuul.fallback.ZuulBlockFallbackProvider;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.netflix.zuul.EnableZuulProxy;
import org.springframework.context.annotation.Bean;
/**
* @author <a href="mailto:<EMAIL>">Jim</a>
*/
@SpringBootApplication
@EnableZuulProxy
public class SentinelZuulApplication {
public static void main(String[] args) {
SpringApplication.run(SentinelZuulApplication.class, args);
}
@Bean
public ZuulBlockFallbackProvider zuulBlockFallbackProvider1() {
return new ZuulBlockFallbackProvider() {
@Override
public String getRoute() {
return "*";
}
@Override
public BlockResponse fallbackResponse(String route, Throwable cause) {
if (route.equals("my-provider1")) {
return new BlockResponse(403, "Provider1 Block", route);
} else if (route.equals("my-provider2")) {
return new BlockResponse(403, "Provider2 Block", route);
} else {
return new BlockResponse(403, "Sentinel Block", route);
}
}
};
}
}
<file_sep>/chapter02/spring-cloud-alibaba-nacos-consumer-reactive/README.md
## 2.4.1 ReactiveDiscoveryClient 相关的知识
使用 ReactiveDiscoveryClient 配合 Alibaba Nacos 进行服务发现 <file_sep>/chapter05/resilience4j-spring-cloud/README.md
## 5.6.2 Spring Cloud Resilience4j
Resilience4j 与 Spring Cloud Circuit Breaker 的整合。<file_sep>/chapter03/spring-cloud-alibaba-nacos-consumer-sclb/README.md
## 3.2 Spring Cloud LoadBalancer 负载均衡
使用 Spring Cloud LoadBalancer 验证服务消费者调用提供者是否有负载均衡的效果 <file_sep>/chapter02/spring-cloud-netflix-eureka-provider/README.md
## 2.3 使用 Netflix Eureka 替换 Alibaba Nacos 注册中心
2.3.2 启动 Eureka Discovery Provider 进行服务注册对应的例子 <file_sep>/chapter02/spring-cloud-eureka-nacos-provider/src/main/resources/application.properties
spring.application.name=my-provider
server.port=8082
eureka.client.service-url.defaultZone=http://localhost:8761/eureka
spring.cloud.nacos.discovery.server-addr=localhost:8848
spring.autoconfigure.exclude=org.springframework.cloud.client.serviceregistry.ServiceRegistryAutoConfiguration,org.springframework.cloud.client.serviceregistry.AutoServiceRegistrationAutoConfiguration
<file_sep>/chapter06/spring-cloud-stream-metrics/README.md
## 6.4.4 Spring Cloud Stream 高级特性
Metrics 指标的例子。<file_sep>/chapter05/alibaba-sentinel-spring-cloud-gateway/README.md
## 5.4.4 Sentinel 限流
Sentinel 对 Spring Cloud Gateway 网关限流的例子。<file_sep>/chapter08/batch-processing/task-03/README.md
## 8.4 Spring Cloud Data Flow 批处理任务组合
8-4 章节里的 Task03 任务。<file_sep>/chapter09/zuul-gateway/src/main/resources/application.properties
spring.application.name=zuul-proxy
server.port=8080
spring.cloud.nacos.discovery.server-addr=localhost:8848
zuul.routes.service1.path=/nacos/**
zuul.routes.service1.service-id=nacos-provider
zuul.routes.service2.path=/http/**
zuul.routes.service2.url=http://httpbin.org/<file_sep>/chapter03/spring-cloud-alibaba-nacos-consumer-ribbon/src/main/java/deep/in/spring/cloud/ServerStatsController.java
/*
* Copyright (C) 2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package deep.in.spring.cloud;
import com.netflix.loadbalancer.AbstractLoadBalancer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cloud.netflix.ribbon.SpringClientFactory;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
/**
* @author <a href="mailto:<EMAIL>">Jim</a>
*/
@RestController
public class ServerStatsController {
@Autowired
private SpringClientFactory springClientFactory;
@GetMapping("/stats")
public String stats() {
AbstractLoadBalancer loadBalancer =
springClientFactory.getInstance("nacos-provider-lb", AbstractLoadBalancer.class);
return loadBalancer.getLoadBalancerStats().toString();
}
}
<file_sep>/chapter06/spring-cloud-stream-consumer-error/src/main/resources/application.properties
spring.cloud.stream.bindings.output.destination=test-output
spring.cloud.stream.bindings.output.content-type=text/plain
spring.cloud.stream.bindings.input.destination=test-input
spring.cloud.stream.bindings.input.content-type=text/plain
spring.cloud.stream.bindings.input.group=test-input-group
spring.cloud.stream.bindings.input.consumer.concurrency=20
spring.application.name=scs-consumer-error<file_sep>/chapter05/alibaba-sentinel-zuul/README.md
## 5.4.4 Sentinel 限流
Sentinel 对 Netflix Zuul 网关限流的例子。<file_sep>/chapter08/batch-processing/task-06/README.md
## 8.4 Spring Cloud Data Flow 批处理任务组合
8-4 章节里的 Task06 任务。<file_sep>/chapter06/spring-messaging/src/main/resources/application.properties
server.port=8080
spring.application.name=websocket-demo<file_sep>/chapter09/spring-cloud-gateway/README.md
## 9.3/9.4/9.5/9.6
Spring Cloud Gateway 相关知识点包括 Route、Predicate、Filter 的例子。
|
8861377de284c800a892173ae42ea4fe71b709dc
|
[
"SQL",
"Markdown",
"Maven POM",
"INI",
"Java",
"Shell"
] | 142
|
INI
|
xuxiaowei-com-cn/deep-in-spring-cloud-samples
|
0b6f12b592f0d445db6f1136877d9fdcbad1ef4b
|
9f652e402166e98c6525dd1ee338a65c20f7af13
|
refs/heads/master
|
<file_sep>package com.zhuinden.examplegithubclient.util;
import com.zhuinden.examplegithubclient.application.BoltsExecutors;
import java.util.concurrent.Executor;
/**
* Created by Zhuinden on 2016.12.21..
*/
public class BoltsConfig {
private BoltsConfig() {
}
public static void configureMocks() {
Executor executor = Runnable::run;
BoltsExecutors.UI_THREAD = executor;
BoltsExecutors.BACKGROUND_THREAD = executor;
}
}
<file_sep>package com.zhuinden.examplegithubclient.application.injection.config;
import com.zhuinden.examplegithubclient.application.injection.modules.InteractorModule;
import com.zhuinden.examplegithubclient.application.injection.modules.OkHttpModule;
import com.zhuinden.examplegithubclient.application.injection.modules.RetrofitModule;
import com.zhuinden.examplegithubclient.application.injection.modules.ServiceModule;
import com.zhuinden.examplegithubclient.presentation.activity.main.DaggerMainComponent;
import com.zhuinden.examplegithubclient.presentation.activity.main.MainComponent;
/**
* Created by Zhuinden on 2016.12.21..
*/
public class MainComponentConfig {
private MainComponentConfig() {
}
static OkHttpModule okHttpModule = new OkHttpModule();
static RetrofitModule retrofitModule = new RetrofitModule();
static InteractorModule interactorModule = new InteractorModule();
static ServiceModule serviceModule = new ServiceModule();
public static MainComponent create() {
return DaggerMainComponent.builder() //
.okHttpModule(okHttpModule) //
.retrofitModule(retrofitModule) //
.interactorModule(interactorModule) //
.serviceModule(serviceModule) //
.build(); //
}
}
<file_sep>package com.zhuinden.examplegithubclient.domain.interactor.impl;
import com.zhuinden.examplegithubclient.application.BoltsExecutors;
import com.zhuinden.examplegithubclient.application.injection.ActivityScope;
import com.zhuinden.examplegithubclient.domain.interactor.LoginInteractor;
import javax.inject.Inject;
import bolts.Task;
/**
* Created by Zhuinden on 2016.12.18..
*/
@ActivityScope
public class LoginInteractorImpl
implements LoginInteractor {
@Inject
public LoginInteractorImpl() {
}
@Override
public Task<Boolean> login(String username, String password) {
return Task.call(() -> {
Thread.sleep(3250); // simulate login
return true;
}, BoltsExecutors.BACKGROUND_THREAD);
}
}
<file_sep>package com.zhuinden.examplegithubclient.domain.interactor;
import bolts.Task;
/**
* Created by Zhuinden on 2016.12.18..
*/
public interface LoginInteractor {
Task<Boolean> login(String username, String password);
}
<file_sep>package com.zhuinden.examplegithubclient.application.injection.modules;
import com.github.aurae.retrofit2.LoganSquareConverterFactory;
import com.zhuinden.examplegithubclient.application.BoltsExecutors;
import com.zhuinden.examplegithubclient.application.injection.ActivityScope;
import com.zhuinden.examplegithubclient.domain.service.retrofit.RetrofitGithubService;
import com.zhuinden.examplegithubclient.util.bolts.BoltsCallAdapterFactory;
import dagger.Module;
import dagger.Provides;
import okhttp3.OkHttpClient;
import retrofit2.Retrofit;
/**
* Created by Owner on 2016.12.10.
*/
@Module
public class RetrofitModule {
@Provides
@ActivityScope
Retrofit retrofit(OkHttpClient okHttpClient, BoltsCallAdapterFactory boltsCallAdapterFactory) {
return new Retrofit.Builder().addConverterFactory(LoganSquareConverterFactory.create()) //
.baseUrl("https://api.github.com/") //
.addCallAdapterFactory(boltsCallAdapterFactory)
.client(okHttpClient) //
.build();
}
@Provides
@ActivityScope
BoltsCallAdapterFactory boltsCallAdapterFactory() {
return BoltsCallAdapterFactory.createWithExecutor(BoltsExecutors.BACKGROUND_THREAD);
}
@Provides
@ActivityScope
RetrofitGithubService retrofitGithubService(Retrofit retrofit) {
return retrofit.create(RetrofitGithubService.class);
}
}
|
dfbb30bee1b30c3a6520d2fd3e412f24637a5a87
|
[
"Java"
] | 5
|
Java
|
matrixxun/flowless
|
a315f00c13c5488ff5f34c80a35ce7469e4c64ab
|
e57658421345ae3f599c2117e750441def676192
|
refs/heads/master
|
<repo_name>leilajacob/leila-jacob<file_sep>/app/models/storefront.rb
class Storefront < ActiveRecord::Base
end
<file_sep>/app/models/order.rb
class Order < ActiveRecord::Base
has_many :items
belongs_to :user
serialize :order_items, Hash
end
<file_sep>/app/models/item.rb
class Item < ActiveRecord::Base
belongs_to :art
belongs_to :order
end
<file_sep>/app/models/category.rb
class Category < ActiveRecord::Base
has_many :arts
end
<file_sep>/test/controllers/front_controller_test.rb
require 'test_helper'
class FrontControllerTest < ActionController::TestCase
test "should get all_art" do
get :all_art
assert_response :success
end
test "should get by_size" do
get :by_size
assert_response :success
end
end
<file_sep>/app/views/arts/index.json.jbuilder
json.array!(@arts) do |art|
json.extract! art, :id, :name, :price, :quantity, :description, :category_id
json.url art_url(art, format: :json)
end
<file_sep>/app/controllers/front_controller.rb
class FrontController < ApplicationController
def all_art
@arts = Art.all
end
def by_size
@art = Art.where(category_id: params[:cat_id])
@category = Category.find(params[:cat_id])
end
end
<file_sep>/app/controllers/cart_controller.rb
class CartController < ApplicationController
before_filter :authenticate_user!, :except => [:add_to_cart, :view_order]
def add_to_cart
art = Art.find(params[:product_id])
if art.quantity < params[:qty].to_i
redirect_to art, notice: "Not enough quantity in stock."
else
item = Item.new
item.product_id = params[:product_id].to_i
item.quantity = params[:qty]
item.save
redirect_to view_order_path
end
end
def view_order
@items = Item.all
end
def checkout
@items = Item.all
@order = Order.new
@order.user_id = current_user.id
sum = 0
@items.each do |item|
@order.order_items[item.art_id] = item.quantity
sum += item.line_item_total
end
@order.subtotal = sum
@order.sales_tax = sum * 0.07
@order.grand_total = @order.subtotal + @order.sales_tax
@order.save
@items.each do |item|
item.art.quantity
item.art.save
end
Item.destroy_all
end
def order_complete
@order = Order.find(params[:order_id])
@amount = (@order.grand_total.to_f.round(2) * 100).to_i
customer = Stripe::Customer.create(
:email => current_user.email,
:card => params[:stripeToken]
)
charge = Stripe::Charge.create(
:customer => customer.id,
:amount => @amount,
:description => 'Rails Stripe customer',
:currency => 'usd'
)
rescue Stripe::CardError => e
flash[:error] = e.message
redirect_to charges_path
end
end
|
c2d203ae7a25ba5ae70824ef9df6ad4658679f0d
|
[
"Ruby"
] | 8
|
Ruby
|
leilajacob/leila-jacob
|
c6e7b191eaf34a204c23adc8756c519c2d6cbaa2
|
5497e8804b9ffdcd9c32a9a8dba387363194dd25
|
refs/heads/master
|
<file_sep>import { Injectable, InternalServerErrorException, NotFoundException } from '@nestjs/common';
import { InjectRepository } from '@nestjs/typeorm';
import { Repository } from 'typeorm';
import { User } from './user.entity';
import { CreateUserInput } from './dto/create-user.input';
import { UpdateUserInput } from './dto/update-user.input';
import * as jwt from 'jsonwebtoken';
import { jwtConstants } from 'src/auth/constants';
@Injectable()
export class UserService {
constructor(
@InjectRepository(User)
private readonly userRepository: Repository<User>
) { }
async findAll(): Promise<User[]> {
const users = await this.userRepository.find();
return users;
}
async findById(id: string): Promise<User> {
const user = await this.userRepository.findOne(id);
if (!user) {
throw new NotFoundException('User not found')
}
return user;
}
async findByEmail(email: string): Promise<User> {
const user = await this.userRepository.findOne({
where: {
email,
},
});
if (!user) {
throw new NotFoundException('User not found')
}
return user;
}
async create(data: CreateUserInput): Promise<User> {
const user = this.userRepository.create(data);
const userSaved = await this.userRepository.save(user);
if (!userSaved) {
throw new InternalServerErrorException('Problem creating a user');
}
return userSaved;
}
async update(id: string, data: UpdateUserInput): Promise<User> {
const user = await this.findById(id);
await this.userRepository.update(user, { ...data });
const userUpdated = this.userRepository.create({ ...user, ...data });
return userUpdated;
}
async delete(id: string): Promise<boolean> {
const user = await this.findById(id);
const deleted = await this.userRepository.delete(user);
return deleted ? true : false;
}
}
<file_sep>import { Injectable } from '@nestjs/common';
import { JwtService } from '@nestjs/jwt';
import { AuthParamsInput } from './dto/auth-params.input';
@Injectable()
export class AuthService {
constructor(
private readonly jwtService: JwtService
) { }
async login(user: AuthParamsInput) {
const payload = { email: user.email, password: <PASSWORD> };
return this.jwtService.sign(payload)
}
}
<file_sep>{
users {
id,
name,
email
}
}
{
user(
id: "1"
) {
name,
email
}
}
mutation{
createUser(data: {
name: "",
email: <EMAIL>"
})
{
id
name
email
}
}
mutation{
updateUser(
id: "2",
data: {
name: "danilo--123"
}
) {
id
email
}
}
mutation{
deleteUser(
id: "2"
)
}
query {
login(
auth: {
email: "<EMAIL>",
password: "<PASSWORD>"
}
)
}<file_sep>import { Resolver, Query, Args, Mutation } from '@nestjs/graphql';
import { AuthService } from './auth.service';
import { AuthParamsInput } from './dto/auth-params.input';
import { UserService } from 'src/user/user.service';
import { UnauthorizedException } from '@nestjs/common';
@Resolver('Auth')
export class AuthResolver {
constructor(
private readonly authService: AuthService,
private readonly userService: UserService
) { }
@Query(() => String)
async login(
@Args('params') { email, password }: AuthParamsInput
): Promise<string> {
try {
await this.userService.findByEmail(email);
// const valid = await bcryptjs.compare(password, user.password);
// if (!valid) {
// throw new UnauthorizedException('Email or password incorrect');
// }
// const payload = { email: user.email, password: <PASSWORD> };
// const jwt = this.jwt.sign(payload);
// res.cookie('token', jwt, { httpOnly: true });
return await this.authService.login({ email, password })
} catch (error) {
throw new UnauthorizedException('Email or password incorrect');
}
}
}
<file_sep>import { InputType, Field } from "@nestjs/graphql";
import { IsString, IsNotEmpty, IsEmail, IsOptional, MinLength, MaxLength } from "class-validator"
@InputType()
export class AuthParamsInput {
@Field()
@IsEmail()
@IsNotEmpty({ message: 'The email field cannot be empty' })
email: string;
@Field()
@MinLength(4)
@MaxLength(20)
@IsNotEmpty({ message: 'The password field cannot be empty' })
password: string;
}<file_sep>import { InputType } from "@nestjs/graphql";
import { IsString, IsNotEmpty, IsEmail, MaxLength, MinLength } from "class-validator"
import { CreateDateColumn, UpdateDateColumn } from "typeorm";
@InputType()
export class CreateUserInput {
@IsString()
@IsNotEmpty({ message: 'The name field cannot be empty' })
name: string;
@IsEmail()
@IsNotEmpty({ message: 'The email field cannot be empty!' })
email: string;
@MinLength(4)
@MaxLength(20)
password: string
@CreateDateColumn({ name: 'created_At' })
createdAt: Date
@UpdateDateColumn({ name: 'updated_At' })
updatedAt: Date
}<file_sep>import { InputType } from "@nestjs/graphql";
import { IsString, IsNotEmpty, IsEmail, IsOptional, MinLength, MaxLength } from "class-validator"
@InputType()
export class UpdateUserInput {
@IsString()
@IsNotEmpty({ message: 'The name field cannot be empty' })
@IsOptional()
name?: string;
@IsEmail()
@IsNotEmpty({ message: 'The email field cannot be empty!' })
@IsOptional()
email?: string;
@MinLength(4)
@MaxLength(20)
@IsNotEmpty({ message: 'The password field cannot be empty!' })
@IsOptional()
password?: string
}
|
9be6d47ef2ef19a7d1d60c5180740ea9abf1e88c
|
[
"Markdown",
"TypeScript"
] | 7
|
TypeScript
|
catapandanilo/nestjs-api-gql
|
52a0f7266d70e7d2ebd2d8192ef3181f0aa85c98
|
e84694aa67d3d2cf53ae04382a3d80d39de02b54
|
refs/heads/master
|
<repo_name>AMAN-MAHTO/Snake_Game<file_sep>/README.md
# Snake_Game



<file_sep>/snake/snake.py
# these is a snake game code
# here to many intretting code are written but in a comment
# so if you want to explore it just uncomment them
# if you use the music code to add music then also remembre your music file and these file are at same place
import pygame
import random
import os
import sys
current_path=os.path.dirname(sys.argv[0]).replace('/','\\\\')
pygame.mixer.init()
pygame.init()
# gamewindow
screen_width = 600
screen_height = 500
gamewindow=pygame.display.set_mode((screen_width,screen_height))
# game title
pygame.display.set_caption('Snake.Aman_code')
# image
bgimg= pygame.image.load(f"{current_path}\\bgi.png")
bgimg= pygame.transform.scale(bgimg, (screen_width, screen_height)).convert_alpha()
# colours
red = (255, 0, 0)
white = (255, 255, 255)
green = (0, 255, 0)
black = (0, 0, 0)
blue = (0,0,200)
clock = pygame.time.Clock()
# score font
font = pygame.font.SysFont(None, 55)
def score_screen(text, colour, x, y ):
score_text = font.render(text, True, colour)
gamewindow.blit(score_text, [x,y])
def plot_snk(gamewindow, color, snk_list ,snake_size,):
for x,y in snk_list:
pygame.draw.rect(gamewindow, color, [x, y, snake_size, snake_size])
def welcome():
exit_game = False
pygame.mixer.music.load(f"{current_path}\\music.mp3")
pygame.mixer.music.play()
while not exit_game:
if pygame.mixer.music.get_busy()==0:
pygame.mixer.music.load(f"{current_path}\\music.mp3")
pygame.mixer.music.play()
gamewindow.blit(bgimg, (0, 0))
score_screen("Welcome To Snakes", red, 100, 175)
score_screen("Press Enter To Play", red, 100, 225)
for event in pygame.event.get():
if event.type == pygame.QUIT:
exit_game= True
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_RETURN:
gameloop()
clock.tick(60)
pygame.display.update()
#game loop
def gameloop():
exit_game = False
game_over = False
snake_x = 45
snake_y = 55
snake_size = 20
food_x = random.randint(20, screen_width / 2)
food_y = random.randint(20, screen_height / 2)
score = 0
velocity_x = 0
velocity_y = 0
snk_list = []
snk_length = 1
if (not os.path.exists("highscore.txt")):
with open("highscore.txt", "w") as f:
f.write("0")
with open("highscore.txt","r") as f:
highscore = f.read()
while not exit_game:
if game_over:
with open("highscore.txt", "w") as f:
f.write(str(highscore))
if pygame.mixer.music.get_busy()==0:
pygame.mixer.music.load(f"{current_path}\\music.mp3")
pygame.mixer.music.play()
gamewindow.blit(bgimg, (0, 0))
score_screen("Game Over!.. ", red, 50, 200)
score_screen("Press Enter to continue",red ,50 ,250)
score_screen("Score: " + str(score), red, 50, 150)
for event in pygame.event.get():
if event.type == pygame.QUIT:
exit_game = True
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_RETURN:
gameloop()
else:
for event in pygame.event.get():
if event.type == pygame.QUIT:
exit_game = True
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_RIGHT:
velocity_x = 4
velocity_y =0
if event.key == pygame.K_LEFT:
velocity_x = -4
velocity_y = 0
if event.key == pygame.K_DOWN:
velocity_y = 4
velocity_x = 0
if event.key == pygame.K_UP:
velocity_y = -4
velocity_x = 0
if abs(snake_x - food_x)<15 and abs(snake_y - food_y)<15:
score += 10
snk_length += 5
food_x = random.randint(20, screen_width/2)
food_y = random.randint(20, screen_height/2)
if pygame.mixer.music.get_busy()==0:
pygame.mixer.music.load(f"{current_path}\\music.mp3")
pygame.mixer.music.play()
if int(highscore) <= score:
highscore = score
snake_y += velocity_y
snake_x += velocity_x
gamewindow.fill(black)
# pygame.mixer.music.load("C:\\Users\\<NAME>\\Desktop\\programing\\files\\game file\\snake\\music2.mp3") # music code for play music during playing game
# pygame.mixer.music.play() # and add your music file name
score_screen("Score: "+str(score)+" Highscore: "+ str(highscore) , white, 5, 5)
pygame.draw.rect(gamewindow, red, [food_x, food_y, snake_size, snake_size] )
#pygame.draw.rect(gamewindow, black, [snake_x, snake_y, snake_size, snake_size])
head = []
head.append(snake_x)
head.append(snake_y)
snk_list.append(head)
if len(snk_list) > snk_length:
del snk_list[0]
if head in snk_list[:-1]:
game_over = True
if pygame.mixer.music.get_busy()==0:
pygame.mixer.music.load(f"{current_path}\\music.mp3")
pygame.mixer.music.play()
plot_snk(gamewindow, green, snk_list, snake_size)
#codes for make boundery as a wall
#if snake_x < 0 or snake_x > screen_width or snake_y < 0 or snake_y > screen_height:
#game_over = True
#pygame.mixer.music.load("music2.mp3") #music code for game over
#pygame.mixer.music.play()
#code to make boundery invisible
if snake_x < 5:
snake_x = screen_width-5
if snake_x > screen_width-5:
snake_x = 5
if snake_y < 5:
snake_y = screen_height-5
if snake_y > screen_height-5:
snake_y = 5
clock.tick(60)
pygame.display.update()
pygame.quit()
quit()
welcome()
|
6271acc23b3d387154647f4c13c73805ca6cf200
|
[
"Markdown",
"Python"
] | 2
|
Markdown
|
AMAN-MAHTO/Snake_Game
|
efc51461aaa2cbabf1d62af1b67de27cd0061e50
|
c80eec24c576c86faba6a2001a0da57028cdf90c
|
refs/heads/master
|
<file_sep>package com.street;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
/**
* @version 1.0
* @author: OldStreetHong
* @date: 2021/7/21 17:35
* @desc:
*/
@SpringBootApplication
public class Admin8883 {
public static void main(String[] args) {
SpringApplication.run(Admin8883.class,args);
}
}
<file_sep>package com.street.pojo;
import com.baomidou.mybatisplus.annotation.TableName;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
/**
* @version 1.0
* @author: zhen
* @date: 2021/7/22 19:35
* @desc:
*/
@Data
@TableName("recruit")
@ApiModel(value = "招聘实体类")
@AllArgsConstructor
@NoArgsConstructor
public class Recruit {
@ApiModelProperty(value = "主键Id")
private int recruit_id;
@ApiModelProperty(value = "岗位id")
private String recruit_post_id;
@ApiModelProperty(value = "企业类型")
private String recruit_enterprise_type;
@ApiModelProperty(value = "导入时间")
private String create_time;
@ApiModelProperty(value = "更新时间")
private String update_time;
}
<file_sep>package com.street;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
/**
* @version 1.0
* @author: OldStreetHong
* @date: 2021/7/21 09:25
* @desc:
*/
@SpringBootApplication
public class ClientProvider8881 {
public static void main(String[] args) {
SpringApplication.run(ClientProvider8881.class,args);
}
}
<file_sep>package com.street;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
/**
* @version 1.0
* @author: OldStreetHong
* @date: 2021/7/21 17:32
* @desc:
*/
@SpringBootApplication
public class SchoolProvider8884 {
public static void main(String[] args) {
SpringApplication.run(SchoolProvider8884.class,args);
}
}
<file_sep>package com.street.pojo;
import com.baomidou.mybatisplus.annotation.TableName;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import lombok.experimental.Accessors;
/**
* @version 1.0
* @author: zhen
* @date: 2021/7/22 19:23
* @desc:
*/
@Data
@TableName("admin")
@ApiModel(value = "管理实体类")
@AllArgsConstructor
@NoArgsConstructor
@Accessors(chain = true) //链式写法
public class Admin {
@ApiModelProperty(value = "主键Id")
private int admin_id;
@ApiModelProperty(value = "账号")
private String university_name;
@ApiModelProperty(value = "密码(加密)")
private String admin_account;
@ApiModelProperty(value = "用户名")
private String admin_password;
@ApiModelProperty(value = "用户名")
private String admin_username;
@ApiModelProperty(value = "权限(添加、删除)")
private String admin_jurisdiction;
@ApiModelProperty(value = "导入时间")
private String create_time;
@ApiModelProperty(value = "更新时间")
private String update_time;
}
<file_sep>package com.street.dao;
/**
* @version 1.0
* @author: OldStreetHong
* @date: 2021/7/21 09:28
* @desc:
*/
public class UserDao {
}
|
b35220fe38aa4346ddb2ead059bc0bfaa12a070a
|
[
"Java"
] | 6
|
Java
|
studystage4/talent-ecosystem
|
5956bc17489b0bc955df693fe3d0c639be5ce54c
|
35d35674b84982a92d2b6c3ee43795e7f2b66f30
|
refs/heads/master
|
<repo_name>Enrqrdz/Cave<file_sep>/Cave/Assets/Scripts/PlayerMovement.cs
using System;
using UnityEngine;
public class PlayerMovement : MonoBehaviour
{
private Grid _grid;
private Grid Grid
{
get
{
if (_grid == null)
{
_grid = GameObject.Find("Grid").GetComponent<Grid>();
}
return _grid;
}
}
public Action MovedEvent;
private float _movementSize => Grid.GetSize();
private Vector3 Forward => transform.position + _movementSize * Vector3.forward;
private Vector3 Back => transform.position + _movementSize * Vector3.back;
private Vector3 Left => transform.position + _movementSize * Vector3.left;
private Vector3 Right => transform.position + _movementSize * Vector3.right;
private void Update()
{
if (InputManager.Forward)
{
Move(Forward);
MovedEvent?.Invoke();
}
if (InputManager.Back)
{
Move(Back);
MovedEvent?.Invoke();
}
if (InputManager.Left)
{
Move(Left);
MovedEvent?.Invoke();
}
if (InputManager.Right)
{
Move(Right);
MovedEvent?.Invoke();
}
}
private void Move(Vector3 newPosition_)
{
if (Grid.Positions.TryGetValue(newPosition_.GetHashCode(), out var newMoveToPosition))
{
transform.position = newMoveToPosition;
}
}
private void OnDestroy()
{
foreach(var action in MovedEvent.GetInvocationList())
{
MovedEvent -= (action as Action);
}
}
}
<file_sep>/Cave/Assets/Scripts/Grid.cs
using System.Collections.Generic;
using UnityEngine;
public class Grid : MonoBehaviour
{
[SerializeField] private float _size = 1f;
[SerializeField] private float _height = .5f;
public Dictionary<int,Vector3> Positions = new Dictionary<int, Vector3>();
public float GetSize() => _size;
public float GetHeight() => _height;
public void SnapObjectToGrid(Transform target_)
{
target_.position = GetNearestPointOnGrid(target_.position);
AddObjectToGridPositions(target_);
}
public Vector3 GetNearestPointOnGrid(Vector3 position_)
{
position_ -= transform.position;
int xCount = Mathf.RoundToInt(position_.x / _size);
int yCount = Mathf.RoundToInt(position_.y / _height);
int zCount = Mathf.RoundToInt(position_.z / _size);
Vector3 result = new Vector3(xCount * _size, yCount * _height, zCount * _size);
result += transform.position;
return result;
}
public void AddObjectToGridPositions(Transform object_)
{
var objectPosition = object_.position;
var hashCode = objectPosition.GetHashCode();
Positions[hashCode] = objectPosition;
}
}
<file_sep>/Cave/Assets/Scripts/InputManager.cs
using UnityEngine;
public static class InputManager
{
public static bool Up
{
get
{
if (Input.GetKeyDown(KeyCode.Space))
{
return true;
}
return false;
}
}
public static bool Down
{
get
{
if (Input.GetKeyDown(KeyCode.LeftControl))
{
return true;
}
return false;
}
}
public static bool Forward
{
get
{
if (Input.GetKeyDown(KeyCode.W))
{
return true;
}
return false;
}
}
public static bool Back
{
get
{
if (Input.GetKeyDown(KeyCode.S))
{
return true;
}
return false;
}
}
public static bool Left
{
get
{
if (Input.GetKeyDown(KeyCode.A))
{
return true;
}
return false;
}
}
public static bool Right
{
get
{
if (Input.GetKeyDown(KeyCode.D))
{
return true;
}
return false;
}
}
}
<file_sep>/Cave/Assets/Scripts/SnapToGrid.cs
using UnityEngine;
public class SnapToGrid : MonoBehaviour
{
private Grid _grid;
private Grid Grid
{
get
{
if (_grid == null)
{
_grid = GameObject.Find("Grid").GetComponent<Grid>();
}
return _grid;
}
}
private void OnValidate()
{
Grid.SnapObjectToGrid(transform);
}
private void Awake()
{
Grid.SnapObjectToGrid(transform);
}
}
|
bed2b0eeb57dcb05a0007c6474af34b059a4e101
|
[
"C#"
] | 4
|
C#
|
Enrqrdz/Cave
|
2d79940500d3ca1adec0616c93383e611a7a339e
|
14780ce00e9c8b95cf74c43f3115289a7757313d
|
refs/heads/master
|
<file_sep>apply plugin: 'com.android.application'
apply plugin: 'com.ofg.uptodate'
apply plugin: 'me.tatarka.retrolambda'
android {
compileSdkVersion rootProject.ext.androidCompileSdkVersion
buildToolsVersion rootProject.ext.androidBuildToolsVersion
defaultConfig {
applicationId "com.github.piasy.testunderstand.rx"
minSdkVersion rootProject.ext.minSdkVersion
targetSdkVersion rootProject.ext.targetSdkVersion
versionCode rootProject.ext.releaseVersionCode
versionName rootProject.ext.releaseVersionName
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
testOptions.unitTests.all {
testLogging {
events 'passed', 'skipped', 'failed', 'standardOut', 'standardError'
outputs.upToDateWhen { false }
showStandardStreams = true
}
}
}
dependencies {
compile "com.android.support:appcompat-v7:$rootProject.ext.androidSupportSdkVersion"
compile 'io.reactivex:rxjava:1.1.10'
compile ('io.reactivex:rxandroid:1.2.1') {
exclude module: 'rxjava'
}
testCompile ('com.github.ubiratansoares:rxassertions:0.2.0') {
exclude module: 'rxjava'
}
testCompile 'junit:junit:4.12'
testCompile 'org.mockito:mockito-core:1.10.19'
testCompile 'com.squareup.okhttp3:okhttp:3.4.1'
retrolambdaConfig 'net.orfjackal.retrolambda:retrolambda:2.3.0'
}
<file_sep>// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
repositories {
jcenter()
}
dependencies {
classpath 'com.android.tools.build:gradle:2.2.2'
classpath 'com.ofg:uptodate-gradle-plugin:1.6.2'
classpath 'me.tatarka:gradle-retrolambda:3.2.5'
}
}
allprojects {
repositories {
mavenCentral()
jcenter()
maven { url "https://jitpack.io" }
maven { url "https://oss.sonatype.org/content/repositories/snapshots" }
}
}
ext {
releaseVersionCode = 3
releaseVersionName = '1.1.0'
androidCompileSdkVersion = 24
androidBuildToolsVersion = '24.0.2'
androidSupportSdkVersion = '24.2.1'
minSdkVersion = 16
targetSdkVersion = 24
}
<file_sep>package com.github.piasy.testunderstand.rx;
import java.util.concurrent.TimeUnit;
import org.junit.Before;
import org.junit.Test;
import rx.observers.TestSubscriber;
import rx.schedulers.Schedulers;
import rx.schedulers.Timestamped;
import rx.subjects.PublishSubject;
import rx.subjects.Subject;
/**
* Created by Piasy{github.com/Piasy} on 4/6/16.
*/
public class DebounceTest {
private Subject<Integer, Integer> mSource;
private TestSubscriber<Timestamped<Timestamped<Integer>>> mSubscriber;
@Before
public void setUp() {
mSource = PublishSubject.create();
mSubscriber = new TestSubscriber<>();
}
@Test
public void testDebounce() {
setUp();
mSource.subscribeOn(Schedulers.computation())
.timestamp()
.doOnNext(item -> System.out.println(
"" + item.getValue() + " emitted at " + item.getTimestampMillis()))
.debounce(100, TimeUnit.MILLISECONDS)
.timestamp()
.doOnSubscribe(
() -> System.out.println("subscribed at " + System.currentTimeMillis()))
.subscribe(mSubscriber);
System.out.println("debounce:");
doTest(mSource, mSubscriber);
}
@Test
public void testThrottleFirst() {
setUp();
mSource.subscribeOn(Schedulers.computation())
.timestamp()
.doOnNext(item -> System.out.println(
"" + item.getValue() + " emitted at " + item.getTimestampMillis()))
.throttleFirst(100, TimeUnit.MILLISECONDS)
.timestamp()
.doOnSubscribe(
() -> System.out.println("subscribed at " + System.currentTimeMillis()))
.subscribe(mSubscriber);
System.out.println("throttleFirst:");
doTest(mSource, mSubscriber);
}
@Test
public void testThrottleLast() {
setUp();
mSource.subscribeOn(Schedulers.computation())
.timestamp()
.doOnNext(item -> System.out.println(
"" + item.getValue() + " emitted at " + item.getTimestampMillis()))
.throttleLast(100, TimeUnit.MILLISECONDS)
.timestamp()
.doOnSubscribe(
() -> System.out.println("subscribed at " + System.currentTimeMillis()))
.subscribe(mSubscriber);
System.out.println("throttleLast:");
doTest(mSource, mSubscriber);
}
private void doTest(Subject<Integer, Integer> source,
TestSubscriber<Timestamped<Timestamped<Integer>>> subscriber) {
Utils.sleep(100);
source.onNext(1);
Utils.sleep(30);
source.onNext(2);
Utils.sleep(30);
source.onNext(3);
Utils.sleep(200);
source.onCompleted();
subscriber.awaitTerminalEvent();
for (Timestamped<Timestamped<Integer>> item : subscriber.getOnNextEvents()) {
System.out.println("" + item.getValue().getValue() + ", emitted at " +
item.getValue().getTimestampMillis() + ", received at " +
item.getTimestampMillis());
}
}
}
<file_sep>#!/bin/sh
./gradlew clean :app:test
<file_sep>package com.github.piasy.testunderstand.rx;
import android.util.Log;
import org.junit.Test;
import rx.Observable;
import rx.Subscriber;
import rx.observers.TestSubscriber;
import rx.schedulers.Schedulers;
import static android.content.ContentValues.TAG;
/**
* Created by Piasy{github.com/Piasy} on 7/4/16.
*/
public class ConcatTest {
private static Observable<Integer> local() {
return Observable.just(1, 2, 3);
}
private static Observable<Integer> remote() {
return Observable.error(new RuntimeException("Error"));
}
@Test
public void testConcatWithError() {
TestSubscriber<Integer> subscriber = new TestSubscriber<>(new Subscriber<Integer>() {
@Override
public void onCompleted() {
System.out.println("subscriber.onCompleted #" + Thread.currentThread().getName());
}
@Override
public void onError(Throwable e) {
System.out.println("subscriber.onError #" + Thread.currentThread().getName());
}
@Override
public void onNext(Integer integer) {
System.out.println(
"subscriber.onNext: " + integer + " #" + Thread.currentThread().getName());
}
});
Observable.concat(
local()
.doOnSubscribe(() -> System.out.println(
"local().doOnSubscribe #" + Thread.currentThread().getName()))
.doOnNext(integer -> System.out.println(
"local().doOnNext: " + integer + " #" + Thread.currentThread()
.getName()))
.doOnCompleted(() -> System.out.println(
"local().doOnCompleted #" + Thread.currentThread().getName()))
.doOnUnsubscribe(() -> System.out.println(
"local().doOnUnsubscribe #" + Thread.currentThread().getName()))
.doOnError(e -> System.out.println(
"local().doOnError #" + Thread.currentThread().getName())),
remote()
.doOnSubscribe(() -> System.out.println(
"remote().doOnSubscribe #" + Thread.currentThread().getName()))
.doOnNext(integer -> System.out.println(
"remote().doOnNext: " + integer + " #" + Thread.currentThread()
.getName()))
.doOnCompleted(() -> System.out.println(
"remote().doOnCompleted #" + Thread.currentThread().getName()))
.doOnUnsubscribe(() -> System.out.println(
"remote().doOnUnsubscribe #" + Thread.currentThread().getName()))
.doOnError(e -> System.out.println(
"remote().doOnError #" + Thread.currentThread().getName())))
.doOnSubscribe(() -> System.out.println(
"concat().doOnSubscribe #" + Thread.currentThread().getName()))
.doOnNext(integer -> System.out.println(
"concat().doOnNext: " + integer + " #" + Thread.currentThread().getName()))
.doOnCompleted(() -> System.out.println(
"concat().doOnCompleted #" + Thread.currentThread().getName()))
.doOnUnsubscribe(() -> System.out.println(
"concat().doOnUnsubscribe #" + Thread.currentThread().getName()))
.doOnError(e -> System.out.println(
"concat().doOnError #" + Thread.currentThread().getName()))
.subscribeOn(Schedulers.io())
.observeOn(Schedulers.newThread())
.subscribe(subscriber);
subscriber.awaitTerminalEvent();
subscriber.assertValues(1, 2, 3);
subscriber.assertError(RuntimeException.class);
}
@Test
public void test2() {
Observable<Integer> local = Observable.just(1, 2, 3);
Observable<Integer> remote2 = Observable.create(new Observable.OnSubscribe<Integer>() {
@Override
public void call(Subscriber<? super Integer> subscriber) {
subscriber.onError(new RuntimeException("Boom!!!"));
}
});
Observable.concat(local, remote2)
.subscribeOn(Schedulers.newThread())
.observeOn(Schedulers.io())
.subscribe(new Subscriber<Integer>() {
@Override
public void onCompleted() {
Log.d(TAG, "onCompleted: ");
}
@Override
public void onError(Throwable e) {
Log.e(TAG, "onError: ", e);
}
@Override
public void onNext(Integer integer) {
Log.d(TAG, "onNext: " + integer);
}
});
}
}
<file_sep>package com.github.piasy.testunderstand.rx;
import org.junit.Test;
import rx.Observable;
import rx.Subscriber;
/**
* Created by Piasy{github.com/Piasy} on 16/10/2016.
*/
public class AllTest {
@Test
public void all() {
Observable.<Integer>empty()
.all(integer -> integer % 2 == 0)
.subscribe(new Subscriber<Boolean>() {
@Override
public void onCompleted() {
System.out.println("onCompleted");
}
@Override
public void onError(Throwable e) {
System.out.println("onError");
}
@Override
public void onNext(Boolean aBoolean) {
System.out.println("onNext " + aBoolean);
}
});
}
}
<file_sep>package com.github.piasy.testunderstand.rx;
import java.io.IOException;
import okhttp3.Call;
import okhttp3.Callback;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.Response;
import org.junit.Test;
/**
* Created by Piasy{github.com/Piasy} on 7/5/16.
*/
public class OkHttpTest {
@Test
public void testGet() {
OkHttpClient client = new OkHttpClient();
Request request = new Request.Builder()
.url("https://raw.github.com/square/okhttp/master/README.md")
.build();
client.newCall(request).enqueue(new Callback() {
@Override
public void onFailure(Call call, IOException e) {
}
@Override
public void onResponse(Call call, Response response) throws IOException {
System.out.println(response.body().string());
}
});
try {
Thread.sleep(5000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
<file_sep>package com.github.piasy.testunderstand.rx;
import java.util.Arrays;
import java.util.List;
import org.junit.Test;
import rx.Observable;
import rx.schedulers.Schedulers;
/**
* Created by Piasy{github.com/Piasy} on 5/22/16.
*/
public class SchedulerTest {
@Test
public void testSubscribeOnTogether() {
Observable.defer(() -> Observable.from(createInts()))
.filter(this::odd)
.map(this::square)
.subscribeOn(Schedulers.io())
.observeOn(Schedulers.computation())
.subscribe(this::print);
Utils.sleep(2000);
}
@Test
public void testSubscribeOnSplit() {
Observable.defer(() -> Observable.from(createInts()))
.subscribeOn(Schedulers.io())
.filter(this::odd)
.map(this::square)
.observeOn(Schedulers.computation())
.subscribe(this::print);
Utils.sleep(2000);
}
@Test
public void testZip() {
Observable<Integer> odd = Observable.defer(() -> Observable.from(createInts()))
.filter(this::odd)
.map(this::square);
Observable<Integer> even = Observable.defer(() -> Observable.from(createInts()))
.filter(this::even)
.map(this::square);
Observable.zip(odd, even, this::add)
.map(this::triple)
.subscribeOn(Schedulers.io())
.observeOn(Schedulers.computation())
.subscribe(this::print);
Utils.sleep(2000);
}
@Test
public void testZip2() {
Observable<Integer> odd = Observable.defer(() -> Observable.from(createInts()))
.filter(this::odd)
.map(this::square)
.subscribeOn(Schedulers.newThread());
Observable<Integer> even = Observable.defer(() -> Observable.from(createInts()))
.filter(this::even)
.map(this::square);
Observable.zip(odd, even, this::add)
.map(this::triple)
.subscribeOn(Schedulers.io())
.observeOn(Schedulers.computation())
.subscribe(this::print);
Utils.sleep(2000);
}
@Test
public void testZip3() {
Observable<Integer> odd = Observable.defer(() -> Observable.from(createInts()))
.filter(this::odd)
.map(this::square)
.subscribeOn(Schedulers.io());
Observable<Integer> even = Observable.defer(() -> Observable.from(createInts()))
.filter(this::even)
.map(this::square)
.subscribeOn(Schedulers.io());
Observable.zip(odd, even, this::add)
.map(this::triple)
.observeOn(Schedulers.computation())
.subscribe(this::print);
Utils.sleep(2000);
}
@Test
public void testZip4() {
Observable<Integer> odd = Observable
.<Integer>create(subscriber -> {
logThread("create 1");
subscriber.onNext(1);
subscriber.onCompleted();
});
Observable<Integer> even = Observable
.<Integer>create(subscriber -> {
logThread("create 2");
subscriber.onNext(2);
subscriber.onCompleted();
});
Observable.zip(odd,
even.observeOn(Schedulers.computation()),
this::add)
.observeOn(Schedulers.io())
.map(this::triple)
.subscribeOn(Schedulers.io())
.observeOn(Schedulers.computation())
.subscribe(this::print);
Utils.sleep(2000);
}
@Test
public void testCreateAsync() {
Observable
.<Integer>create(subscriber -> {
logThread("OnSubscribe#call");
new AsyncEmitter(value -> {
logThread("emit");
subscriber.onNext(value);
subscriber.onCompleted();
}).asyncEmit();
})
.filter(this::odd)
.observeOn(Schedulers.io())
.map(this::triple)
.subscribeOn(Schedulers.io())
.observeOn(Schedulers.computation())
.subscribe(this::print);
Utils.sleep(2000);
}
private List<Integer> createInts() {
System.out.println("createInts from " + Thread.currentThread().getName());
return Arrays.asList(1, 2, 3, 4, 5);
}
private boolean odd(Integer i) {
System.out.println("odd " + i + " from " + Thread.currentThread().getName());
return i % 2 == 1;
}
private boolean even(Integer i) {
System.out.println("even " + i + " from " + Thread.currentThread().getName());
return i % 2 == 0;
}
private int square(Integer i) {
System.out.println("square " + i + " from " + Thread.currentThread().getName());
return i * i;
}
private int triple(Integer i) {
System.out.println("triple " + i + " from " + Thread.currentThread().getName());
return i * 3;
}
private int add(Integer i1, Integer i2) {
System.out.println(
"add " + i1 + " and " + i2 + " from " + Thread.currentThread().getName());
return i1 + i2;
}
private void print(Integer i) {
System.out.println("print " + i + " from " + Thread.currentThread().getName());
}
private void logThread(String message) {
System.out.println(message + " from " + Thread.currentThread().getName());
}
interface Receiver {
void onValue(int value);
}
private static class AsyncEmitter {
private final Receiver mReceiver;
private AsyncEmitter(Receiver receiver) {
mReceiver = receiver;
}
void asyncEmit() {
new Thread(() -> {
mReceiver.onValue(1);
}, "AsyncEmitter emit thread").start();
}
}
}
<file_sep># TestUnderstandRx
Understanding Rx with test cases [](https://travis-ci.org/Piasy/TestUnderstandRx)
|
173a4fb1241e688028a09fe50a1ac4b94a538918
|
[
"Markdown",
"Java",
"Shell",
"Gradle"
] | 9
|
Gradle
|
Piasy/TestUnderstandRx
|
edb9bc29658d78fb8324630ac908a4dababaffa7
|
ac7ac915d6d68a6bfa6acd191cc7368d2721efb3
|
refs/heads/master
|
<repo_name>MaksZhukov/Tasks<file_sep>/linear-unfold.js
function linearUnfold(func = required(), initialValue = required()) {
if (typeof func !== 'function') {
throw new Error('First argument is not a function');
}
let arr = [],
state = initialValue,
value;
while (state !== null) {
[state, value] = func(initialValue);
arr.push(value);
}
return arr;
}
function funcWithRandomValue(state) {
let value = Math.floor(Math.random() * 10);
if (value >= 7) {
state = null;
}
return [
state,
value
]
}<file_sep>/linear-fold.js
function linearFold(arr = required(), callback = require(), initValue) {
const countCallbackArgs = 4;
if (!Array.isArray(arr)) {
throw new Error('First argument is not a function!');
}
for (let value of arr) {
if (!Number.isInteger(value)) {
throw new Error('One of element the array is not a number');
}
}
if (typeof callback !== 'function') {
throw new Error('First argument is not a function');
}
if (callback.length > countCallbackArgs) {
throw new Error('Function callback has to accept not more 4 parameters');
}
if (initValue) {
if (typeof initValue !== 'object') {
throw new Error('initValue is not an object');
}
}
let prev = initValue;
for (let i = 0; i < arr.length; i++) {
prev = callback(prev, arr[i], i, arr);
}
return prev;
}
function sumPrevWithCurr(prev, curr, index, arr) {
return prev+curr;
}<file_sep>/helpers.js
function required() {
throw new Error('Required!');
}<file_sep>/memorization.js
function sum(a, b, c) {
return a + b + c;
}
function memorization(func = required()) {
if (typeof func !== 'function') {
throw new Error('First argument is not a function');
}
let cache = {};
return (...args)=>{
for (let number of args) {
if (!Number.isInteger(number)) {
throw new Error('One of the arguments is not a number');
}
}
let key = args.toString();
if (key in cache) {
return cache[key];
}
return cache[key] = func(...args);
};
}<file_sep>/currying.js
function sum(a, b, c) {
return a + b + c;
}
function curry(func = required()) {
if (typeof func !== 'function') {
throw new Error('First argument is not a function');
}
function nest(N, args) {
return (...xs) => {
if (!Number.isInteger(...xs)) {
throw new Error('Argument is not a integer');
}
if (N - xs.length === 0) {
return func(...args, ...xs);
}
return nest(N - xs.length, [...args, ...xs]);
};
}
return nest(func.length, []);
}
<file_sep>/partial-application.js
function sum(a, b, c) {
return a + b + c;
}
function partialApplication(func = required(), ...paArgs) {
if (typeof func !== 'function') {
throw new Error('First argument is not a function');
}
for (let number of paArgs) {
if (!Number.isInteger(number)) {
throw new Error('One of the arguments is not a number');
}
}
return (...cfArgs) => {
return func([...cfArgs, ...paArgs]);
}
}<file_sep>/lazy-evaluation.js
function sum(a, b, c) {
return a + b + c;
}
function lazyEvaluation(func = required(), delay = required()) {
if (typeof func !== 'function') {
throw new Error('First argument is not a function');
}
if (!Number.isInteger(delay)) {
throw new Error('Second argument is not a number');
}
return (...cfArgs) => {
setTimeout(() => {
func(...cfArgs)
}, delay);
}
}
|
c683bbb61eae278ffc2a714f3d6e7f105a8080c9
|
[
"JavaScript"
] | 7
|
JavaScript
|
MaksZhukov/Tasks
|
16938500c8fd9311bc3db36645eab29524f0e593
|
e59752ad7ca1ae121337e5e401b0ee4f27452f98
|
refs/heads/master
|
<file_sep>import React,{useContext,useState} from "react";
import {View,Text,StyleSheet,Button,TextInput} from "react-native";
import {Context} from "../context/BlogContext";
const EditScreen = ({navigation}) => {
const {state,editPost} = useContext(Context);
const blogPost = state.find((post)=>post.id===navigation.getParam("id"));
const [title,setTitle] = useState(blogPost.title);
const [content,setContent] = useState(blogPost.content);
return (
<View style={styles.layout}>
<Text style={styles.label}>Title</Text>
<TextInput
style={styles.input}
value={title}
onChangeText={text => setTitle(text)}
></TextInput>
<Text style={styles.label}>Content</Text>
<TextInput
style={styles.input}
value={content}
multiline={true}
onChangeText={text => setContent(text)}
></TextInput>
<Button
title="Save Blog Post"
onPress={() => {
editPost(blogPost.id,title, content);
navigation.navigate("Index");
}}
/>
</View>
)
}
const styles = StyleSheet.create({
layout: {
margin: 2,
padding: 3
},
input: {
fontSize: 18,
borderWidth: 1,
borderColor: "black",
marginBottom: 10
},
label: {
fontSize: 20,
fontWeight: "bold",
marginBottom: 2
}
});
export default EditScreen;<file_sep>import React, { useReducer } from "react";
import createDataContext from "./createDataContext";
const blogReducer = (state, action) => {
switch (action.type) {
case "DELETE_BLOG":
return [...state.filter(blogPost => blogPost.id !== action.payload)];
case "ADD_BLOG":
return [
...state,
{
id: Math.floor(Math.random() * 9999),
title: action.payload.title,
content: action.payload.content
}
];
case "EDIT_BLOG":
return state.map(post => {
return post.id === action.payload.id ? action.payload : post;
});
default:
return state;
}
};
const addBlogPost = dispatch => {
return (title, content) => {
dispatch({ type: "ADD_BLOG", payload: { title, content } });
};
};
const editPost = dispatch => {
return (id, title, content) => {
dispatch({ type: "EDIT_BLOG", payload: { id, title, content } });
};
};
const deletePost = dispatch => {
return id => {
dispatch({ type: "DELETE_BLOG", payload: id });
};
};
export const { Context, Provider } = createDataContext(
blogReducer,
{ addBlogPost, deletePost, editPost },
[{ title: "TEST TITLE", content: "TEST CONTENT", id: 1 }]
);
<file_sep>import React from "react";
import MapView,{Polyline} from "react-native-maps";
import { Text, StyleSheet } from "react-native";
const Map = () => {
let points = [];
for(let i=0;i<=20;i++) {
points.push({
latitude: 20.5937+i*0.01,
longitude: 78.9629+1*0.01
})
}
return (
<MapView
style={{ height: 300 }}
initialRegion={{
latitude: 20.5937,
longitude: 78.9629,
latitudeDelta: 0.01,
longitudeDelta: 0.01
}}
>
<Polyline coordinates={points}/>
</MapView>
);
};
const styles = StyleSheet.create({});
export default Map;
|
0361c3ace3b7814e722b44a97e73089c0f52bba2
|
[
"JavaScript"
] | 3
|
JavaScript
|
anmol-gupta/React-Native-Projects
|
e8b95fe80caec187e793899ff456867fba8db6d7
|
6601e075ae9e5a106b99ae8a9d22f7f20b41d0e0
|
refs/heads/master
|
<file_sep>package readinglist;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.stereotype.Controller;
import org.springframework.ui.Model;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import java.util.List;
/**
* Created by Bijon on 3/30/2017.
*/
@Slf4j
@RequestMapping("/")
@Controller
public class ReadingListController {
@Autowired
private ReadingListRepository readingListRepository;
@Autowired
private ReaderRepository readerRepository;
@RequestMapping(value = "/{reader}", method = RequestMethod.GET)
public String readersBooks(@PathVariable("reader") String reader, Model model) {
log.info("something,{}",reader);
List<Book> readingList = readingListRepository.findByReader(reader);
if (readingList != null) {
model.addAttribute("books", readingList);
}
return "readingList";
}
@RequestMapping(value = "/{reader}", method = RequestMethod.POST)
public String addToReadingList(@PathVariable("reader") String reader, Book book) {
book.setReader(reader);
readingListRepository.save(book);
return "redirect:/{reader}";
}
@RequestMapping(value = "/{username}/{password}", method = RequestMethod.GET)
public String addUser(@PathVariable("username") String username, @PathVariable("password") String password) {
Reader reader = new Reader();
reader.setPassword(<PASSWORD>);
reader.setUsername(username);
readerRepository.saveAndFlush(reader);
System.out.println("saving data");
return "redirect:/login";
}
@RequestMapping(value = "login-success", method = RequestMethod.GET)
public String redirectUser() {
String userName = SecurityContextHolder.getContext().getAuthentication().getName();
return "redirect:/".concat(userName);
}
}
<file_sep>package readinglist;
import org.springframework.data.jpa.repository.JpaRepository;
/**
* Created by Bijon on 3/30/2017.
*/
public interface ReaderRepository extends JpaRepository<Reader,String> {
}
|
decf099ddbc573f44ddcd5d17cf1fef18557bcdd
|
[
"Java"
] | 2
|
Java
|
bejon028/Sping-Boot-in-Action
|
88ed16444d3e5a85359e55cbc4b1c89f3a2ce8c1
|
5513b596514b332d16c26dcc78065f67aa8ddf8b
|
refs/heads/master
|
<file_sep>using System;
using CoreGraphics;
using UIKit;
namespace XamIOSGestosToque
{
public partial class ViewController : UIViewController
{
nfloat rotacion = 0;
nfloat coordenadaX = 0;
nfloat coordenadaY = 0;
bool Toque;
UIRotationGestureRecognizer GestoRotar;
UIPanGestureRecognizer GestoMover;
UIAlertController Alerta;
protected ViewController(IntPtr handle) : base(handle)
{
// Note: this .ctor should not contain any initialization logic.
}
public override void ViewDidLoad()
{
base.ViewDidLoad();
// Perform any additional setup after loading the view, typically from a nib.
Imagen.UserInteractionEnabled = true;
var GestoToque = new UITapGestureRecognizer(Tocando);
Imagen.AddGestureRecognizer(GestoToque);
GestoMover = new UIPanGestureRecognizer(() =>
{
if ((GestoMover.State == UIGestureRecognizerState.Began ||
GestoMover.State == UIGestureRecognizerState.Changed)
&& (GestoMover.NumberOfTouches == 1))
{
var p0 = GestoMover.LocationInView(View);
if (coordenadaX == 0)
coordenadaX = p0.X - Imagen.Center.X;
if (coordenadaY == 0)
coordenadaY = p0.Y - Imagen.Center.Y;
var p1 = new CGPoint(p0.X - coordenadaX, p0.Y - coordenadaY);
Imagen.Center = p1;
}
else
{
coordenadaX = 0;
coordenadaY = 0;
}
});
GestoRotar = new UIRotationGestureRecognizer(()=>
{
if((GestoRotar.State == UIGestureRecognizerState.Began ||
GestoRotar.State == UIGestureRecognizerState.Changed)
&& (GestoRotar.NumberOfTouches == 2))
{
Imagen.Transform = CGAffineTransform.MakeRotation(GestoRotar.Rotation + rotacion);
}else if(GestoRotar.State == UIGestureRecognizerState.Ended)
{
rotacion = GestoRotar.Rotation;
}
});
Imagen.AddGestureRecognizer(GestoMover);
Imagen.AddGestureRecognizer(GestoRotar);
}
private void Tocando(UITapGestureRecognizer toque)
{
// throw new NotImplementedException();
if(!Toque)
{
toque.View.Transform *= CGAffineTransform.MakeRotation((float)
Math.PI);
Toque = true;
Alerta = UIAlertController.Create("Imagen Tocada", "Imagen Girando",
UIAlertControllerStyle.Alert);
Alerta.AddAction(UIAlertAction.Create("Aceptar",
UIAlertActionStyle.Default, null));
PresentViewController(Alerta,true,null);
}else{
toque.View.Transform *= CGAffineTransform.MakeRotation((float)
-Math.PI);
Toque = false;
Alerta = UIAlertController.Create("Imagen Regresando", "Imagen Regresando",
UIAlertControllerStyle.Alert);
Alerta.AddAction(UIAlertAction.Create("Aceptar",
UIAlertActionStyle.Default, null));
PresentViewController(Alerta, true, null);
}
}
public override void DidReceiveMemoryWarning()
{
base.DidReceiveMemoryWarning();
// Release any cached data, images, etc that aren't in use.
}
}
}
|
8d3ba32f3ae7d932f8edbe0b941380b1506dbe9c
|
[
"C#"
] | 1
|
C#
|
korbek99/XamIOSGestosToque
|
c23cf573c027a15318f99d19cc03c0a5e4b8ef79
|
1f2a510114ede9539e15832ffe5b400f4277a003
|
refs/heads/master
|
<file_sep>/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package operadores;
import java.util.Scanner;
/**
*
* @author Murilo
*/
public class TelaCalc extends javax.swing.JFrame {
/**
* Creates new form TelaSoma
*/
public TelaCalc() {
initComponents();
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
txtN1 = new javax.swing.JTextField();
txtN2 = new javax.swing.JTextField();
lblSoma = new javax.swing.JLabel();
jLabel1 = new javax.swing.JLabel();
jLabel2 = new javax.swing.JLabel();
btnSoma = new javax.swing.JButton();
txtN3 = new javax.swing.JTextField();
txtN4 = new javax.swing.JTextField();
lblSubtrair = new javax.swing.JLabel();
jLabel3 = new javax.swing.JLabel();
jLabel4 = new javax.swing.JLabel();
BtnSubtrair = new javax.swing.JButton();
txtN5 = new javax.swing.JTextField();
txtN6 = new javax.swing.JTextField();
lblMultiplicar = new javax.swing.JLabel();
jLabel5 = new javax.swing.JLabel();
jLabel6 = new javax.swing.JLabel();
btnMultiplicar = new javax.swing.JButton();
txtN7 = new javax.swing.JTextField();
txtN8 = new javax.swing.JTextField();
lblDividir = new javax.swing.JLabel();
jLabel7 = new javax.swing.JLabel();
jLabel8 = new javax.swing.JLabel();
btnDividir = new javax.swing.JButton();
setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE);
txtN1.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
txtN1ActionPerformed(evt);
}
});
txtN2.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
txtN2ActionPerformed(evt);
}
});
lblSoma.setText("0");
jLabel1.setText("+");
jLabel2.setText("=");
btnSoma.setText("Somar");
btnSoma.setActionCommand("Calcular");
btnSoma.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnSomaActionPerformed(evt);
}
});
lblSubtrair.setText("0");
jLabel3.setText(" -");
jLabel4.setText("=");
BtnSubtrair.setText("Subtrair");
BtnSubtrair.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
BtnSubtrairActionPerformed(evt);
}
});
lblMultiplicar.setText("0");
jLabel5.setText("x");
jLabel6.setText("=");
btnMultiplicar.setText("Multiplicar");
btnMultiplicar.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnMultiplicarActionPerformed(evt);
}
});
lblDividir.setText("0");
jLabel7.setText("/");
jLabel8.setText("=");
btnDividir.setText("Dividir");
btnDividir.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
btnDividirActionPerformed(evt);
}
});
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(txtN3, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.PREFERRED_SIZE, 40, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(txtN5, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.PREFERRED_SIZE, 40, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addGroup(layout.createSequentialGroup()
.addGap(34, 34, 34)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(txtN1, javax.swing.GroupLayout.PREFERRED_SIZE, 40, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(txtN7, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.PREFERRED_SIZE, 40, javax.swing.GroupLayout.PREFERRED_SIZE))))
.addGap(23, 23, 23)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(jLabel3, javax.swing.GroupLayout.PREFERRED_SIZE, 14, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel1, javax.swing.GroupLayout.PREFERRED_SIZE, 14, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel7, javax.swing.GroupLayout.PREFERRED_SIZE, 14, javax.swing.GroupLayout.PREFERRED_SIZE))
.addComponent(jLabel5))
.addGap(18, 18, 18)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addGroup(layout.createSequentialGroup()
.addComponent(txtN2, javax.swing.GroupLayout.PREFERRED_SIZE, 40, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(0, 0, Short.MAX_VALUE))
.addGroup(javax.swing.GroupLayout.Alignment.LEADING, layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(txtN8, javax.swing.GroupLayout.Alignment.LEADING, javax.swing.GroupLayout.PREFERRED_SIZE, 40, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(txtN6, javax.swing.GroupLayout.PREFERRED_SIZE, 40, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 31, Short.MAX_VALUE)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jLabel8)
.addComponent(jLabel6))
.addGap(23, 23, 23)))
.addGap(4, 4, 4))
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addGroup(layout.createSequentialGroup()
.addGap(0, 0, Short.MAX_VALUE)
.addComponent(jLabel2))
.addGroup(layout.createSequentialGroup()
.addComponent(txtN4, javax.swing.GroupLayout.PREFERRED_SIZE, 40, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jLabel4)))
.addGap(27, 27, 27)))
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(lblSoma)
.addComponent(lblSubtrair)
.addComponent(lblMultiplicar)
.addComponent(lblDividir))
.addGap(71, 71, 71)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(btnSoma)
.addComponent(BtnSubtrair)
.addComponent(btnDividir)
.addComponent(btnMultiplicar))
.addContainerGap(133, Short.MAX_VALUE))
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGap(14, 14, 14)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel2)
.addComponent(lblSoma)
.addComponent(txtN1, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(txtN2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(jLabel1)
.addComponent(btnSoma))
.addGap(15, 15, 15)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel3)
.addComponent(txtN4, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGroup(layout.createSequentialGroup()
.addGap(1, 1, 1)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(txtN7, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGroup(layout.createSequentialGroup()
.addComponent(txtN3, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(19, 19, 19)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel5)
.addComponent(jLabel6)
.addComponent(lblMultiplicar)
.addComponent(txtN5, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(txtN6, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGap(18, 18, 18)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel7)
.addComponent(jLabel8)
.addComponent(lblDividir))
.addGap(6, 6, 6))
.addComponent(txtN8, javax.swing.GroupLayout.Alignment.TRAILING, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))))
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jLabel4)
.addComponent(lblSubtrair)
.addComponent(BtnSubtrair))
.addGap(14, 14, 14)
.addComponent(btnMultiplicar)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(btnDividir)))))
.addContainerGap(171, Short.MAX_VALUE))
);
pack();
}// </editor-fold>//GEN-END:initComponents
private void btnSomaActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnSomaActionPerformed
// TODO add your handling code here:
int n1 = Integer.parseInt(txtN1.getText());
int n2 = Integer.parseInt(txtN2.getText());
int soma = n1 + n2;
lblSoma.setText(Integer.toString(soma));
}//GEN-LAST:event_btnSomaActionPerformed
private void txtN1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_txtN1ActionPerformed
// TODO add your handling code here:
}//GEN-LAST:event_txtN1ActionPerformed
private void txtN2ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_txtN2ActionPerformed
// TODO add your handling code here:
}//GEN-LAST:event_txtN2ActionPerformed
private void btnMultiplicarActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnMultiplicarActionPerformed
// TODO add your handling code here:
int n5 = Integer.parseInt(txtN5.getText());
int n6 = Integer.parseInt(txtN6.getText());
int multiplicar = n5 * n6;
lblMultiplicar.setText(Integer.toString(multiplicar));
}//GEN-LAST:event_btnMultiplicarActionPerformed
private void BtnSubtrairActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_BtnSubtrairActionPerformed
// TODO add your handling code here:
int n3 = Integer.parseInt(txtN3.getText());
int n4 = Integer.parseInt(txtN4.getText());
int subtrair = n3 - n4;
lblSubtrair.setText(Integer.toString(subtrair));
}//GEN-LAST:event_BtnSubtrairActionPerformed
private void btnDividirActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnDividirActionPerformed
// TODO add your handling code here:
int n7 = Integer.parseInt(txtN7.getText());
int n8 = Integer.parseInt(txtN8.getText());
int dividir = n7 / n8;
lblDividir.setText(Integer.toString(dividir));
}//GEN-LAST:event_btnDividirActionPerformed
/**
* @param args the command line arguments
*/
public static void main(String args[]) {
/* Set the Nimbus look and feel */
//<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) ">
/* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel.
* For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html
*/
try {
for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) {
if ("Nimbus".equals(info.getName())) {
javax.swing.UIManager.setLookAndFeel(info.getClassName());
break;
}
}
} catch (ClassNotFoundException ex) {
java.util.logging.Logger.getLogger(TelaCalc.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (InstantiationException ex) {
java.util.logging.Logger.getLogger(TelaCalc.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (IllegalAccessException ex) {
java.util.logging.Logger.getLogger(TelaCalc.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
} catch (javax.swing.UnsupportedLookAndFeelException ex) {
java.util.logging.Logger.getLogger(TelaCalc.class.getName()).log(java.util.logging.Level.SEVERE, null, ex);
}
//</editor-fold>
//</editor-fold>
//</editor-fold>
//</editor-fold>
/* Create and display the form */
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
new TelaCalc().setVisible(true);
}
});
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton BtnSubtrair;
private javax.swing.JButton btnDividir;
private javax.swing.JButton btnMultiplicar;
private javax.swing.JButton btnSoma;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel2;
private javax.swing.JLabel jLabel3;
private javax.swing.JLabel jLabel4;
private javax.swing.JLabel jLabel5;
private javax.swing.JLabel jLabel6;
private javax.swing.JLabel jLabel7;
private javax.swing.JLabel jLabel8;
private javax.swing.JLabel lblDividir;
private javax.swing.JLabel lblMultiplicar;
private javax.swing.JLabel lblSoma;
private javax.swing.JLabel lblSubtrair;
private javax.swing.JTextField txtN1;
private javax.swing.JTextField txtN2;
private javax.swing.JTextField txtN3;
private javax.swing.JTextField txtN4;
private javax.swing.JTextField txtN5;
private javax.swing.JTextField txtN6;
private javax.swing.JTextField txtN7;
private javax.swing.JTextField txtN8;
// End of variables declaration//GEN-END:variables
}
|
68d4c08cfc24a7480749a828db11a06601668049
|
[
"Java"
] | 1
|
Java
|
muriloaugusto11/CalculadoraSwing
|
8b8a64e306ef228deeeddc5dd8ba243ce8b23eb8
|
24c711552d92e304cad1f508f707b485c78a60f2
|
refs/heads/master
|
<file_sep>/* global __dirname */
var app = require('app')
var BrowserWindow = require('browser-window')
var mainWindow = null;
// Cerrar la aplicacion cuando todas las ventanas se cierren.
app.on('window-all-closed', function () {
if (process.platform != 'darwin') {
app.quit();
}
});
//Al iniciar la aplicacion
app.on('ready',function(){
var mainWindow = new BrowserWindow({
with:800,
height:600,
title: 'Hadro Electron',
'auto-hide-menu-bar': true
})
//esto es para cargar un html local
mainWindow.loadUrl('file://' + __dirname + '/index.html')
//mainWindow.loadUrl('http://hadro.drovancal.com/login.html')
mainWindow.maximize()
mainWindow.on('closed', function () {
mainWindow = null;
});
})
|
463f1fb23e4b41845a39b6228d6ed9fb1ebbc44f
|
[
"JavaScript"
] | 1
|
JavaScript
|
Drovancal/hadro-electron
|
9788fa4b8f1bf2f6cf8bb3f0374f9ddff52d85d8
|
bb97d154b319639195f09e18367324fc615f71da
|
refs/heads/main
|
<file_sep>package com.araujotadeu.dot_indicator.lib
interface DotIndicatorListener {
fun setCurrent(position: Int)
fun setDots(count: Int)
}
<file_sep>package com.araujotadeu.dot_indicator.lib
import android.animation.Animator
import android.animation.AnimatorSet
import android.animation.ValueAnimator
import android.content.Context
import android.graphics.Color
import android.util.AttributeSet
import android.view.Gravity
import android.view.View
import android.widget.LinearLayout
import androidx.recyclerview.widget.RecyclerView
import kotlin.math.roundToInt
class ExpandedDotIndicatorView @JvmOverloads constructor(
context: Context,
attrs: AttributeSet? = null,
defStyleAttr: Int = 0
) : LinearLayout(context, attrs, defStyleAttr), DotIndicatorListener {
var dotColor: Int = Color.BLACK
var dotSize: Int = 0
var dotSpace: Int = 0
var dotAlphaSelected: Float = 1.0f
var dotAlphaNotSelected: Float = 0.5f
var dotExpandFraction: Float = 1.5f
// TODO expand X and Y, or only Y, or only X
private val helper = DotIndicatorHelper(resources, this)
var currentPosition = -1
private set
init {
orientation = HORIZONTAL
gravity = Gravity.CENTER
if (isInEditMode) {
dotSize = 100
dotSize = 20
setDots(3)
}
attrs?.let {
applyAttrs(it)
}
}
private fun applyAttrs(attrs: AttributeSet) {
val typedArray = context.obtainStyledAttributes(attrs, R.styleable.DotIndicatorView)
dotColor = typedArray.getColor(R.styleable.DotIndicatorView_dotColor, Color.BLACK)
dotSize = typedArray.getDimensionPixelSize(R.styleable.DotIndicatorView_dotSize, 0)
dotSpace = typedArray.getDimensionPixelSize(R.styleable.DotIndicatorView_dotSpace, 0)
dotAlphaSelected = typedArray.getFloat(R.styleable.DotIndicatorView_dotAlphaSelected, 1.0f)
dotAlphaNotSelected = typedArray.getFloat(R.styleable.DotIndicatorView_dotAlphaNotSelected, 0.5f)
dotExpandFraction = typedArray.getFloat(R.styleable.DotIndicatorView_dotExpandFraction, 1.5f)
val count = typedArray.getInt(R.styleable.DotIndicatorView_dotCount, 0)
setDots(count)
typedArray.recycle()
}
override fun setDots(count: Int) {
removeAllViews()
takeIf { count > 0 }?.apply {
for (i in 0 until count) {
addView(View(context).apply {
alpha = dotAlphaNotSelected
//background = helper.createRoundRectDrawable(dotColor, dotSize)
background = helper.createOvalDrawable(dotColor)
layoutParams = LayoutParams(dotSize, dotSize).apply {
setMargins(dotSpace, 0, dotSpace, 0)
}
})
}
}
}
override fun setCurrent(position: Int) {
if (position >= childCount && position >= 0) return
takeIf { currentPosition != position }?.apply {
val list: MutableList<ValueAnimator> = mutableListOf()
val expandedSize = (dotSize * dotExpandFraction).roundToInt()
if (currentPosition >= 0) {
list.add(helper.createResizeAnimation(getChildAt(currentPosition), expandedSize, dotSize))
list.add(helper.createAlphaAnimation(getChildAt(currentPosition), dotAlphaSelected, dotAlphaNotSelected))
}
list.add(helper.createResizeAnimation(getChildAt(position), dotSize, expandedSize))
list.add(helper.createAlphaAnimation(getChildAt(position), dotAlphaNotSelected, dotAlphaSelected))
val set = AnimatorSet()
set.playTogether(list as Collection<Animator>?)
set.start()
currentPosition = position
}
}
fun attachRecyclerView(recyclerView: RecyclerView) {
helper.attachRecyclerView(recyclerView)
}
}
<file_sep>package com.araujotadeu.dot_indicator.lib
import android.content.Context
import android.graphics.Color
import android.util.AttributeSet
import android.view.Gravity
import android.view.View
import android.widget.LinearLayout
import androidx.recyclerview.widget.RecyclerView
// TODO add expand type property
class DotIndicatorView @JvmOverloads constructor(
context: Context,
attrs: AttributeSet? = null,
defStyleAttr: Int = 0
) : LinearLayout(context, attrs, defStyleAttr), DotIndicatorListener {
var selectedColor: Int = Color.BLACK
var notSelectedColor: Int = Color.WHITE
var dotSize: Int = 0
var dotSpace: Int = 0
private val helper = DotIndicatorHelper(resources, this)
var currentPosition = -1
private set
init {
orientation = HORIZONTAL
gravity = Gravity.CENTER
if (isInEditMode) {
dotSize = 100
dotSize = 20
setDots(3)
}
attrs?.let {
applyAttrs(it)
}
}
private fun applyAttrs(attrs: AttributeSet) {
val typedArray = context.obtainStyledAttributes(attrs, R.styleable.DotIndicatorView)
selectedColor = typedArray.getColor(R.styleable.DotIndicatorView_selectedColor, Color.BLACK)
notSelectedColor = typedArray.getColor(R.styleable.DotIndicatorView_notSelectedColor, Color.WHITE)
dotSize = typedArray.getDimensionPixelSize(R.styleable.DotIndicatorView_dotSize, 0)
dotSpace = typedArray.getDimensionPixelSize(R.styleable.DotIndicatorView_dotSpace, 0)
val count = typedArray.getInt(R.styleable.DotIndicatorView_dotCount, 0)
setDots(count)
typedArray.recycle()
}
override fun setDots(count: Int) {
removeAllViews()
takeIf { count > 0 }?.apply {
for (i in 0 until count) {
addView(View(context).apply {
background = helper.createRoundRectDrawable(notSelectedColor, dotSize)
layoutParams = LayoutParams(dotSize, dotSize).apply {
setMargins(dotSpace, 0, dotSpace, 0)
}
})
}
}
}
override fun setCurrent(position: Int) {
if (position >= childCount && position >= 0) return
takeIf { currentPosition != position }?.apply {
if (currentPosition >= 0) {
helper.executeColorTransitionAnimation(getChildAt(currentPosition), helper.createRoundRectDrawable(selectedColor, dotSize), helper.createRoundRectDrawable(notSelectedColor, dotSize))
}
helper.executeColorTransitionAnimation(getChildAt(position), helper.createRoundRectDrawable(notSelectedColor, dotSize), helper.createRoundRectDrawable(selectedColor, dotSize))
currentPosition = position
}
}
fun attachRecyclerView(recyclerView: RecyclerView) {
helper.attachRecyclerView(recyclerView)
}
}
<file_sep>package com.araujotadeu.dot_indicator.lib
import android.animation.AnimatorSet
import android.animation.ValueAnimator
import android.content.Context
import android.graphics.Color
import android.util.AttributeSet
import android.view.View
import android.widget.FrameLayout
import androidx.core.view.updateLayoutParams
import androidx.recyclerview.widget.RecyclerView
import java.lang.Math.abs
class WormDotIndicatorView @JvmOverloads constructor(
context: Context,
attrs: AttributeSet? = null,
defStyleAttr: Int = 0
) : FrameLayout(context, attrs, defStyleAttr), DotIndicatorListener {
var dotColor: Int = Color.BLACK
var dotSize: Int = 0
var dotSpace: Int = 0
var dotAlphaSelected: Float = 1.0f
var dotAlphaNotSelected: Float = 0.5f
var selectedView: View = View(context)
private val helper = DotIndicatorHelper(resources, this)
var currentPosition = -1
private set
init {
if (isInEditMode) {
dotSize = 100
dotSize = 20
setDots(3)
}
attrs?.let {
applyAttrs(it)
}
}
private fun applyAttrs(attrs: AttributeSet) {
val typedArray = context.obtainStyledAttributes(attrs, R.styleable.DotIndicatorView)
dotColor = typedArray.getColor(R.styleable.DotIndicatorView_dotColor, Color.BLACK)
dotSize = typedArray.getDimensionPixelSize(R.styleable.DotIndicatorView_dotSize, 0)
dotSpace = typedArray.getDimensionPixelSize(R.styleable.DotIndicatorView_dotSpace, 0)
dotAlphaSelected = typedArray.getFloat(R.styleable.DotIndicatorView_dotAlphaSelected, 1.0f)
dotAlphaNotSelected = typedArray.getFloat(R.styleable.DotIndicatorView_dotAlphaNotSelected, 0.5f)
val count = typedArray.getInt(R.styleable.DotIndicatorView_dotCount, 0)
setDots(count)
typedArray.recycle()
}
override fun setDots(count: Int) {
removeAllViews()
takeIf { count > 0 }?.apply {
for (i in 0 until count) {
addView(View(context).apply {
alpha = dotAlphaNotSelected
background = helper.createRoundRectDrawable(dotColor, dotSize)
layoutParams = LayoutParams(dotSize, dotSize).apply {
marginStart = dotSpace * i + dotSize * i
}
})
}
selectedView = View(context).apply {
alpha = dotAlphaSelected
background = helper.createRoundRectDrawable(dotColor, dotSize)
layoutParams = LayoutParams(dotSize, dotSize)
visibility = INVISIBLE
}
addView(selectedView)
}
}
override fun setCurrent(position: Int) {
if (position >= childCount && position >= 0) return
takeIf { currentPosition != position }?.apply {
if (currentPosition < 0) {
val view = getChildAt(position)
selectedView.updateLayoutParams<LayoutParams> {
marginStart = (view.layoutParams as LayoutParams).marginStart
}
selectedView.apply {
visibility = VISIBLE
}
} else {
if (position > currentPosition) { // Move forward
val steps = kotlin.math.abs(currentPosition - position)
val finalSize = (dotSize * steps + dotSpace * steps) + dotSize
val anim = ValueAnimator.ofInt(dotSize, finalSize)
anim.addUpdateListener { valueAnimator ->
val value = valueAnimator.animatedValue as Int
selectedView.updateLayoutParams<LayoutParams> {
width = value
}
}
anim.duration = 200
val anim2 = ValueAnimator.ofInt(finalSize, dotSize)
anim2.addUpdateListener { valueAnimator ->
val value = valueAnimator.animatedValue as Int
selectedView.updateLayoutParams<LayoutParams> {
width = value
marginStart = abs(finalSize - value)
}
}
anim2.duration = 200
val set = AnimatorSet()
set.playSequentially(anim, anim2)
set.start()
} else { // Move backward
val steps = kotlin.math.abs(currentPosition - position)
val finalSize = (dotSize * steps + dotSpace * steps) + dotSize
val anim = ValueAnimator.ofInt(dotSize, finalSize)
anim.addUpdateListener { valueAnimator ->
val value = valueAnimator.animatedValue as Int
selectedView.updateLayoutParams<LayoutParams> {
width = value
marginStart = abs(finalSize - value)
}
}
anim.duration = 200
val anim2 = ValueAnimator.ofInt(finalSize, dotSize)
anim2.addUpdateListener { valueAnimator ->
val value = valueAnimator.animatedValue as Int
selectedView.updateLayoutParams<LayoutParams> {
width = value
}
}
anim2.duration = 200
val set = AnimatorSet()
set.playSequentially(anim, anim2)
set.start()
}
}
currentPosition = position
}
}
fun attachRecyclerView(recyclerView: RecyclerView) {
helper.attachRecyclerView(recyclerView)
}
}
<file_sep>package com.araujotadeu.dot_indicator.sample
import android.os.Bundle
import androidx.appcompat.app.AppCompatActivity
import kotlinx.android.synthetic.main.activity_main.*
class MainActivity : AppCompatActivity() {
// TODO
// Auto and manual modes
var index = 0
var max = 3
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
button.setOnClickListener {
testDefault.setCurrent(index)
testExpanded.setCurrent(index)
testWorm.setCurrent(index)
index += 1
if (index == max) {
index = 0
}
}
}
}<file_sep>package com.araujotadeu.dot_indicator.lib
import android.animation.ValueAnimator
import android.content.res.Resources
import android.graphics.drawable.Drawable
import android.graphics.drawable.ShapeDrawable
import android.graphics.drawable.TransitionDrawable
import android.graphics.drawable.shapes.OvalShape
import android.graphics.drawable.shapes.RoundRectShape
import android.view.View
import android.view.ViewGroup
import android.widget.FrameLayout
import androidx.recyclerview.widget.LinearLayoutManager
import androidx.recyclerview.widget.RecyclerView
internal class DotIndicatorHelper(resources : Resources, val listener : DotIndicatorListener) {
private val animationDuration = resources.getInteger(android.R.integer.config_shortAnimTime)
private var scrollListener: RecyclerView.OnScrollListener? = null
fun createRoundRectDrawable(color: Int, dotSize : Int): Drawable {
val radius = dotSize / 2f
val radiusArray = floatArrayOf(radius, radius, radius, radius, radius, radius, radius, radius)
val shapeDrawable = ShapeDrawable(RoundRectShape(radiusArray, null, null))
shapeDrawable.paint.color = color
return shapeDrawable
}
fun createOvalDrawable(color: Int): Drawable {
val shapeDrawable = ShapeDrawable(OvalShape())
shapeDrawable.paint.color = color
return shapeDrawable
}
fun createResizeAnimation(view: View, from: Int, to: Int, expandWidth : Boolean = true, expandHeight : Boolean = true): ValueAnimator {
val anim = ValueAnimator.ofInt(from, to)
anim.addUpdateListener { valueAnimator ->
val value = valueAnimator.animatedValue as Int
val layoutParams: ViewGroup.LayoutParams = view.layoutParams
if (expandWidth)
layoutParams.width = value
if (expandHeight)
layoutParams.height = value
view.layoutParams = layoutParams
}
anim.duration = animationDuration.toLong()
return anim
}
fun createAlphaAnimation(view: View, from: Float, to: Float): ValueAnimator {
val anim = ValueAnimator.ofFloat(from, to)
anim.addUpdateListener { valueAnimator ->
val value = valueAnimator.animatedValue as Float
view.alpha = value
}
anim.duration = animationDuration.toLong()
return anim
}
fun executeColorTransitionAnimation(view: View, from: Drawable, to: Drawable) {
val trans = TransitionDrawable(arrayOf(from, to))
view.background = trans
trans.startTransition(animationDuration)
}
fun attachRecyclerView(recyclerView: RecyclerView) {
takeIf { recyclerView.layoutManager is LinearLayoutManager }?.apply {
scrollListener?.let {
recyclerView.removeOnScrollListener(it)
}
scrollListener = object : RecyclerView.OnScrollListener() {
override fun onScrolled(recyclerView: RecyclerView, dx: Int, dy: Int) {
super.onScrolled(recyclerView, dx, dy)
val layoutManager = recyclerView.layoutManager as LinearLayoutManager
val firstPos = layoutManager.findFirstCompletelyVisibleItemPosition()
listener.setCurrent(firstPos)
}
}
recyclerView.adapter?.let {
listener.setDots(it.itemCount)
}
}
}
fun createMoveXAnimation(view: View?, from: Int, to: Int): ValueAnimator {
val anim = ValueAnimator.ofInt(from, to)
anim.addUpdateListener { valueAnimator ->
val value = valueAnimator.animatedValue as Int
val layoutParams: FrameLayout.LayoutParams = (view?.layoutParams as FrameLayout.LayoutParams)
layoutParams.leftMargin = value
view.layoutParams = layoutParams
}
anim.duration = animationDuration.toLong()
return anim
}
// TODO attachViewPager
}
|
9bf9e0f92c39471590662a1bf2b6afb223292903
|
[
"Kotlin"
] | 6
|
Kotlin
|
araujotadeu/DotIndicator
|
cc8d2785878c861e9076deb685a5ef992f019ea4
|
da2a66436c335050784f995eb4d75d432189e30f
|
refs/heads/main
|
<file_sep>NODE_ENV='development'
APP_PORT='8080'
APP_HOST='localhost'
APP_VERSION='1.0.0'
COOL_ENV_VARIABLE='NodeJS is awesome!'
MINIO_ROOT_USER='minio'
MINIO_ROOT_PASSWORD='<PASSWORD>'
MINIO_ENDPOINT='http://minio.local:9000'
<file_sep># Basic Docker-Minio-NodeJS setup for dev purposes
### How to use AWS SDK for Javascript with MinIO Server:
https://docs.min.io/docs/how-to-use-aws-sdk-for-javascript-with-minio-server.html
### Some interesting further reading:
https://www.digitalocean.com/community/tutorials/containerizing-a-node-js-application-for-development-with-docker-compose
<file_sep>'use strict'
const http = require('http')
const s3Service = require('./services/s3Service')
const { APP_PORT, APP_HOST } = process.env
const server = http.createServer(function (req, res) {
// TODO: Create a more interesting test example
const currentDateString = new Date().toISOString();
const bucketName = 'testbucket'; // It could be into .env if were necessary
const fileName = `testfile_${currentDateString}.txt`;
const fileContent = `File created at ${currentDateString}`;
// Creates a file named testfile into minio bucket when ANY request is received
s3Service.uploadTestFile(bucketName, fileName, fileContent)
.then(
result => {
res.statusCode = 200
res.setHeader('Content-Type', 'Application/json')
res.end(JSON.stringify(result))
},
err => {
console.error('ERROR:', err.message)
res.statusCode = 500
res.setHeader('Content-Type', 'Application/json')
res.end(JSON.stringify({
error: err.message
}))
})
})
// Start the server
server.listen(APP_PORT, () => {
// console.log('Server running on port', PORT)
})
server.on('listening', () => { console.log(`Running version ${process.env.npm_package_version} on %s:%s`, APP_HOST, APP_PORT) })
server.on('error', (err) => { console.error('Something wrong happened:', err) })
server.on('close', () => { console.log('server closed...') })
|
18a2e48743b776f8cc0145a1a13697d05ccca477
|
[
"Markdown",
"JavaScript",
"Shell"
] | 3
|
Shell
|
howseman/docker-minio-nodejs
|
411d702f573f09a54cf652aeacc4c0375d258f2b
|
9297ecabd9337028bae4a5761b9a3d7267dd57e1
|
refs/heads/master
|
<repo_name>bdiveley/bad_tracks<file_sep>/app/controllers/artists_controller.rb
class ArtistsController < ApplicationController
def new
@artist = Artist.new
end
def create
@artist = Artist.new(artist_params)
if @artist.save
redirect_to artist_path(@artist)
else
flash.now[:error] = "Could not save artist"
render action: "new"
end
end
def show
@artist = Artist.find(params[:id])
session[:secret] = "This time for real, though"
@songs = @artist.songs
end
private
def artist_params
params.require(:artist).permit(:name)
end
end
<file_sep>/spec/features/user_can_add_songs_to_their_cart_spec.rb
require "rails_helper"
RSpec.describe "When a user adds songs to their cart" do
it "a message is displayed" do
artist = Artist.create(name: "Enya")
song = artist.songs.create(title: "Never Gonna Give You Up", length: 250, play_count: 100000)
visit songs_path
click_button "Add Song"
expect(page).to have_content("You now have 1 copy of #{song.title} in your cart.")
end
it "the message correctly increments for multiple songs" do
artist = Artist.create(name: "Enya")
song = artist.songs.create(title: "Never Gonna Give You Up", length: 250, play_count: 100000)
visit songs_path
click_button "Add Song"
expect(page).to have_content("You now have 1 copy of #{song.title} in your cart.")
click_button "Add Song"
expect(page).to have_content("You now have 2 copies of #{song.title} in your cart.")
end
end
<file_sep>/app/controllers/carts_controller.rb
class CartsController < ApplicationController
def create
song = Song.find(params[:song_id])
session[:cart] ||= Hash.new(0)
session[:cart][song.id.to_s] = session[:cart][song.id] + 1
flash[:notice] = "You now have #{session[:cart][song.id]} copy of #{song.title} in your cart."
redirect_to songs_path
end
end
<file_sep>/spec/factories/artists.rb
FactoryBot.define do
factory :artist do
sequence(:name) { |n| "Song #{n}"}
end
end
|
441d96fe908ea74b54f7f474ef02e9de472d85af
|
[
"Ruby"
] | 4
|
Ruby
|
bdiveley/bad_tracks
|
7805669ff6af2743aec2ac283f297309d3933684
|
4155b4a2e70b63084326403381dc4dafc0174b8e
|
refs/heads/main
|
<repo_name>droxander/rock-paper-scissorsGame<file_sep>/js/picked.js
//Añadirles un evento click a los tokens
tokens.forEach(function (elemento) {
elemento.element.addEventListener('click', function (){
focus = elemento.element;
tokenPicked = elemento.name;
printPicked(focus);
});
});
/*Esta funcion tiene como parametro el elemento que ha sido seleccionado,
oculta el tablero del step 1 y visibiliza el tablero del step 2 con
la ficha seleccionada*/
function printPicked (focus){
focus = focus.firstElementChild;
document.getElementById('step-1').classList.add('hide');
document.getElementById('step-2').classList.remove('hide');
picked.appendChild(focus);
homeTokenAnimation();
}
function randomNumber (){
let random = Math.floor(Math.random()*3);
return random;
}
function homeTokenAnimation (){
let contador = 0;
let intervalo = setInterval(
function (){
let random = randomNumber();
nuevoToken.setAttribute('class', tokenHome[random].class);
nuevoToken.innerHTML = tokenHome[random].child;
home.appendChild(nuevoToken);
contador++;
if(contador == 8){
clearInterval(intervalo);
compararGanador(tokenPicked, tokenHome[random].name);
}
}
, 200);
}
function compararGanador (tokenPicked, homeToken){
var calculo = parseInt(score.innerText);
if(tokenPicked == homeToken){
outcome.textContent = 'tie';
}else if(tokenPicked === 'paper' && homeToken === 'rock'
|| tokenPicked === 'scissor' && homeToken === 'paper'
|| tokenPicked === 'rock' && homeToken === 'scissor'){
outcome.textContent = 'you win';
score.textContent = calculo+1;
picked.classList.add('token--winner');
}else{
outcome.textContent = 'you lose';
if(score.textContent>0){
score.textContent = calculo-1;
}
home.classList.add('token--winner');
}
outcome.parentElement.classList.remove('hide');
}<file_sep>/js/game.js
document.getElementById('return').addEventListener('click', playAgain);
function playAgain (){
comeBack();
home.classList.remove('token--winner');
picked.classList.remove('token--winner');
document.getElementById('step-2').classList.add('hide');
document.getElementById('step-1').classList.remove('hide');
}
function comeBack (){
home.removeChild(nuevoToken);
let nodo = picked.firstElementChild;
document.getElementById(tokenPicked).appendChild(nodo);
outcome.parentElement.classList.add('hide');
}<file_sep>/js/variables.js
const tokens = [
{name: "paper", element: document.getElementById('paper')},
{name: "scissor", element: document.getElementById('scissor')},
{name: "rock", element: document.getElementById('rock')}
];
const tokenHome = [
{ name: 'paper',
class: 'board__token-background board__token--paper',
child: '<img src="images/icon-paper.svg" alt=""></img>'
},
{name: 'scissor',
class: 'board__token-background board__token--scissors',
child: '<img src="images/icon-scissors.svg" alt=""></img>'
},
{ name: 'rock',
class: 'board__token-background board__token--rock',
child: '<img src="images/icon-rock.svg" alt=""></img>'
},
];
//constantes
const home = document.getElementById('home');
const picked = document.getElementById('picked');
const outcome = document.getElementById('outcome');
const score = document.getElementById('score-points');
const rules = document.getElementById('rules');
//variables no constantes
let focus;
let tokenPicked;
let nuevoToken = document.createElement('div');<file_sep>/README.md
# Paper, Scissors or Rock (Game)
<file_sep>/js/rules.js
const lightbox = document.createElement('div');
rules.addEventListener('click', rulesLightbox);
function rulesLightbox (){
lightbox.classList.add('lightbox');
lightbox.innerHTML = '<div class="lightbox__rules">' +
'<h2 class="text--dark text--bold">Rules</h2>' +
'<img class="lightbox__rules-bg" src="images/image-rules.svg" alt="Aqui se encuentran las reglas del juego">' +
'<img class="button--cursor" src="images/icon-close.svg" alt="cerrar" id="close"></div>';
document.body.appendChild(lightbox);
document.getElementById('close').addEventListener('click', function (){
document.body.removeChild(lightbox);
});
}
|
30479c302578183aa21a140c2540c977fc6b9d66
|
[
"JavaScript",
"Markdown"
] | 5
|
JavaScript
|
droxander/rock-paper-scissorsGame
|
4311dc6d1ac225eaaea1556fabfad8fb11c1b57a
|
031c0c88cc77edeb0a4b5eb34fb73ba3f0011a29
|
refs/heads/master
|
<repo_name>rjeli/msl<file_sep>/README.md
# macOS subsystem for Linux
## ⚠️ under construction, come back soon :)
Seamlessly run Linux CLI applications under macOS.
MSL is a script that installs Debian under the xhyve hypervisor, mounts your Debian home directory into your host macOS home directory, and installs shell hooks so you can switch between the two OSes easily.
### Why not Docker for Mac, Vagrant, Virtualbox, ...?
MSL and Docker for Mac both use xhyve, which uses Hypervisor.framework, so they're similar underneath. However, the primary goal of MSL is to remove any extra steps between your macOS and Linux shell. No `docker run -it -exec -a --detach -mount type=bind,src=dst that-one-image bash`... or even `vagrant ssh`, just `cd ~/deb`.
<file_sep>/msl.sh
#!/bin/bash
set -euo pipefail
APP_DIR=$HOME/.local/share/msl
mkdir -p $APP_DIR
XHYVE_ARGS=""
add_arg() {
XHYVE_ARGS="$XHYVE_ARGS $*"
}
KERNEL=$APP_DIR/vmlinuz-whatever
INITRD=$APP_DIR/initrd-whatever
CMDLINE="earlyprintk=serial console=ttyS0 root=/dev/vda1 ro"
dl_cached() {
url=$1
cached_path=$APP_DIR/$(basename $url)
if [ -f $cached_path ]; then
echo >&2 using cached $cached_path
else
echo >&2 downloading $url
curl -L $url >$cached_path
fi
echo $cached_path
}
start() {
add_arg -A # acpi
add_arg -m 4G # ram
add_arg -s 0:0,hostbridge # pci hostbridge
add_arg -s 31,lpc -l com1,autopty # pci-isa bridge and isa serial port
add_arg -s 2:0,virtio-net # network controller
add_arg -s 3:0,virtio-blk,$APP_DIR/hdd.img # root drive
echo sudoing to enable networking
sudo true
sudo nohup xhyve $XHYVE_ARGS -f kexec,$KERNEL,$INITRD,"$CMDLINE" \
>$APP_DIR/stdout.log 2>$APP_DIR/stderr.log &
pid=$!
echo pid is $pid
echo $pid >$APP_DIR/pid
sleep 0.5
tty=$(egrep -o '/dev/ttys\d+' $APP_DIR/stderr.log)
echo "linking $APP_DIR/tty -> $tty"
sudo chmod a+rwx $tty
ln -s $tty $APP_DIR/tty
}
stop() {
echo stopping
[ -f $APP_DIR/pid ] && sudo kill -9 $(cat $APP_DIR/pid)
echo removing tty symlink
[ -f $APP_DIR/tty ] && rm $APP_DIR/tty
}
install_msl() {
buster_url=http://deb.debian.org/debian/dists/buster
netboot_url=$buster_url/main/installer-amd64/current/images/netboot
iso_path=$(dl_cached $netboot_url/mini.iso)
KERNEL=$(dl_cached $netboot_url/debian-installer/amd64/linux)
INITRD=$(dl_cached $netboot_url/debian-installer/amd64/initrd.gz)
echo creating 8gb hdd
dd if=/dev/zero of=$APP_DIR/hdd.img bs=1g count=8
echo starting installer
add_arg -s 4:0,ahci-cd,$iso_path
start
echo attaching to serial for configuration
screen $APP_DIR/tty
}
if [ $# -lt 1 ]; then
echo must provide command
exit 1
fi
case $1 in
install)
install_msl
;;
start)
start
;;
stop)
stop
;;
*)
echo unrecognized command $1
exit 1
;;
esac
|
c4db8f1a77847396af159c76586b74f29aa6c8c3
|
[
"Markdown",
"Shell"
] | 2
|
Markdown
|
rjeli/msl
|
8764e82ba9ded0cb900ef724d8d81fe9651db61a
|
b26d62372b5b737c55d3428ca4c8ea5d8b2394e6
|
refs/heads/master
|
<file_sep>#include "Container.h"
// External deps
#include <QCoreApplication>
#include <QDir>
// Include the setting managers
namespace App { namespace Settings
{
/**
* Contruct the container performs:
* - Creation of all setting managers
*
* @brief Container::Container
* @param parent
*/
Container::Container(QObject *parent)
//: general(*new General())
{
// Configure the setting
loadSettings();
}
/**
* Load the json setting files into the setting objects
*
* @brief Container::loadSettings
*/
void Container::loadSettings()
{
// Get the current programs dir
QString pathDir = getDir();
// Load the general settings file
//general.load(pathDir+"/config/General.json", general.SaveFormat::Json);
}
/**
* Get the programs root path
*
* @brief Container::getDir
* @return QString path
*/
QString Container::getDir()
{
// This should work across platform, working windows need to check mac
QString path = QCoreApplication::applicationDirPath();
// I know the below works fine for mac
#ifdef __APPLE__
#include "TargetConditionals.h"
#ifdef TARGET_OS_MAC
// Instance of QDir at current path of program
QDir pathsRoot(QDir::currentPath());
pathsRoot.cdUp(); pathsRoot.cdUp(); pathsRoot.cdUp();
path = pathsRoot.path();
#endif
#endif
// Return the path
return path;
}
}}
<file_sep>#pragma once
// Include externla deps
#include <QObject>
// Include applcation class
#include "App/Application.h"
namespace Bootstrap
{
class Startup : public QObject
{
Q_OBJECT
public:
Startup();
~Startup();
bool success() const { return this->m_isValid; }
private:
QObject* m_root;
bool m_isValid;
App::Application& m_application;
};
}
<file_sep>#pragma once
// Include externla deps
#include <QObject>
// Include the setting contain
#include "Settings/Container.h"
namespace App
{
class Application : public QObject
{
Q_OBJECT
public:
Application(QObject *parent);
~Application();
// Settings container
Settings::Container& settings_container;
};
}
<file_sep>#pragma once
#include <QObject>
#include <QString>
#include <QDebug>
#include <QJsonObject>
namespace App { namespace Utilities {
class JsonFile
{
public:
JsonFile();
//virtual ~JsonFile() {};
enum SaveFormat {
Json, Binary
};
QString m_loadedFile;
SaveFormat m_format;
bool load(QString location, SaveFormat format);
bool update();
virtual void read(const QJsonObject &json) = 0;
virtual void write(QJsonObject &json) const = 0;
};
}}
<file_sep># Executor
This program is to create an easy, secure method of executing OS commands.
The goal is to be able to configure and control an OS with simple **modular** configuration packages that can be
installed and uninstalled.
## Greater Security
The program gives greater security by using a white list approach to the available commands that can be ran by each package.
When installing additional modules into executor the server admin will be shown the commands that the command package
requires performing its actions, this can then be confirmed or denied (Like apps on phones).
## Executor Commands
As commands packages are installed they can be accessed via the commands prompt or by an internal running service.
## Installs and Updates
Executor is not only designed as a gateway to running commands but also design to manage system packages.
For example, a system package can be wrote for MYSQL. This would cover installing MYSQL and managing updates published
by the author.
## Executor Packages
Executor packages are made by simple JSON files
### Package File
This file contains information about the Executor package.
### Commands File
Using shell scripts is highly discouraged when using executor are it removed the benefits of the white listing approach.
Commands that are required are built up within the command file.
Commands are far more high level than shell scripts as they include addition help such as:
* Powerful functions
* Templates support
* Command chaining
* Input validation
* Success action
* Failure action
* Logging
* and more.
#### Functions Explained
Function are inbuilt methods within Executor that perform actions. For example, when analysing a response from a ran
commands one can run the below:
```json
"responce": "contains:user added"
```
This will check whether the response from the terminal contains the phase:
```json
"user added"
```
If it does Executor will run the success action and if not it will run the failure action.
#### Templates Explained
Templates allow an easy way of making new files and editing existing files for the OS.
Templates can be created with place holder and when the template method is called, it will take the contain of
the template and insert the relevant data into the place holders. Executor will then create or edit an exists file,
making adding and modifying files easy.
<file_sep>// Include external deps
#include <QCoreApplication>
// Include startup class
#include "Bootstrap/Startup.h"
int main(int argc, char *argv[])
{
// Start app
QCoreApplication app(argc, argv);
// Attach message handler
//qInstallMessageHandler(fileMessageHandler);
// Boot the applcation
Bootstrap::Startup loader;
// If successfull return event loop
if (loader.success())
{
return app.exec();
}
// End app
return -1;
}
<file_sep>#include "Startup.h"
// Include externla deps
#include <QObject>
// Include applcation class
#include "App/Application.h"
namespace Bootstrap
{
// Location for the main application settings file
static auto SETTINGS_FILE = QStringLiteral("executor.json");
Startup::Startup()
: m_isValid(true),
m_application(*new App::Application(this))
{
}
Startup::~Startup()
{
}
}
<file_sep>#include "Terminal.h"
namespace App { namespace Terminal
{
Terminal::Terminal()
{
}
}}
<file_sep>#!/bin/bash
# Demo shell script for Executor
echo "Hello World!"
<file_sep>#pragma once
// Include external deps
#include <QObject>
#include <QMap>
#include <QVariantMap>
#include <QString>
// Include setting managers
namespace App { namespace Settings
{
class Container
{
public:
Container(QObject *parent = 0);
//App::Settings::General& general;
private:
void loadSettings();
QString getDir();
};
}}
<file_sep>#pragma once
namespace App { namespace Terminal
{
class Terminal
{
public:
Terminal();
};
}}
<file_sep>#include "Application.h"
// Include externla deps
#include <QObject>
// Include the setting contain
#include "Settings/Container.h"
namespace App
{
/**
* Class constructor
*
* @brief Application::Application
* @param parent
*/
Application::Application(QObject* parent)
: QObject(parent),
// Create instance of the settings container
settings_container(*new Settings::Container)
{
}
/**
* Class destructor performs:
*
* @brief Application::~Application
*/
Application::~Application()
{
}
}
<file_sep>#include "JsonFile.h"
#include <QFile>
#include <QJsonArray>
#include <QJsonDocument>
#include <QDebug>
namespace App { namespace Utilities
{
JsonFile::JsonFile()
{
}
/**
* Load the settings from a file
*
* @brief Settings::load
* @param location
* @param format
* @return bool
*/
bool JsonFile::load(QString location, SaveFormat format)
{
// Cache data
m_loadedFile = location;
m_format = format;
// Load the file
QFile loadFile(m_loadedFile);
// Open the file and return error if failed
if (!loadFile.open(QIODevice::ReadOnly)) {
qWarning("Couldn't open save file.");
return false;
}
// Get the file data
QByteArray saveData = loadFile.readAll();
// Load the document into the json object from json or binary stored format
QJsonDocument loadDoc(m_format == Json
? QJsonDocument::fromJson(saveData)
: QJsonDocument::fromBinaryData(saveData));
// Get the object
auto object = loadDoc.object();
// Trigger the read method
read(object);
return true;
}
/**
* Update the loaded file with the new data
*
* @brief Settings::update
* @return bool
*/
bool JsonFile::update()
{
// Load the file
QFile saveFile(m_loadedFile);
// Open the file and error if failed
if (!saveFile.open(QIODevice::WriteOnly)) {
qWarning("Couldn't open save file.");
return false;
}
// Create a json object
QJsonObject object;
// Add data to the json object
write(object);
// Create a json format document
QJsonDocument saveDoc(object);
// Save the file in the correct format
saveFile.write(m_format == Json
? saveDoc.toJson(QJsonDocument::Indented)
: saveDoc.toBinaryData());
return true;
}
}}
|
d1958c7445eee8d390d0840d9bb746367ebda338
|
[
"Markdown",
"C++",
"Shell"
] | 13
|
C++
|
motters/executor
|
34d4ea46555524060bced7e8447132a877c6ce4b
|
ccaa06c16b1bdfe23cc3afd7f81e902d8f448b2e
|
refs/heads/master
|
<repo_name>HanleyLee/HLPodSpecs<file_sep>/HLTest/0.0.1/HLTest.podspec
Pod::Spec.new do |s|
s.name = "HLTest"
s.version = "0.0.1"
s.summary = "HLTest_summary"
s.description = <<-DESC
HLTest1 by HanleyLee
DESC
s.homepage = "https://hanleylee.com"
s.license = "MIT"
s.author = { "HanleyLee" => "<EMAIL>" }
s.platform = :ios, "10.0"
# s.ios.deployment_target = "5.0"
# s.osx.deployment_target = "10.7"
# s.watchos.deployment_target = "2.0"
# s.tvos.deployment_target = "9.0"
# s.source = { :git => "http://EXAMPLE/HLTest1.git", :tag => "#{s.version}" }
s.source = { :git => '<EMAIL>:HanleyLee/HLTest.git', :tag => s.version, :submodules => true}
s.source_files = "HLTest/**/*.{swift}"
# 功能文件夹
# s.subspec 'Utility' do |ss|
# ss.source_files = 'ZRNewsModule/ZRNewsModule/Utility/*{.swift}'
# end
# s.subspec 'Models' do |ss|
# ss.source_files = 'ZRNewsModule/ZRNewsModule/Models/*{.swift}'
# ss.dependency 'ZRNews/Utility'
# end
# s.preserve_paths = "FilesToSave", "MoreFilesToSave"
# s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
s.frameworks = 'Foundation', 'UIKit'
s.dependency 'RxSwift'
s.dependency 'RxCocoa'
s.dependency 'RxDataSources'
s.dependency 'RxSwiftExt'
s.dependency 'AsyncSwift'
s.dependency 'Charts'
s.dependency 'SnapKit', '~> 5.0.0'
s.dependency 'SwiftMonkeyPaws', '~> 2.1.0'
s.swift_version = '5.0'
s.resource_bundles = {
'Resources' => ['HLTest/Resources/**/*']
}
end
<file_sep>/HLTest/0.0.2/HLTest.podspec
Pod::Spec.new do |s|
s.name = "HLTest"
s.version = "0.0.2"
s.summary = "HLTest_summary"
s.description = <<-DESC
HLTest1 by HanleyLee
DESC
s.homepage = "https://hanleylee.com"
s.license = "MIT"
s.author = { "HanleyLee" => "<EMAIL>" }
s.platform = :ios, "10.0"
s.source = { :git => '<EMAIL>:HanleyLee/HLTest.git', :tag => s.version, :submodules => true}
s.source_files = "HLTest/**/*.{swift}"
s.frameworks = 'Foundation', 'UIKit'
s.dependency 'RxSwift'
s.dependency 'RxCocoa'
s.dependency 'RxDataSources'
s.dependency 'RxSwiftExt'
s.dependency 'AsyncSwift'
s.dependency 'Charts'
s.dependency 'SnapKit', '~> 5.0.0'
s.dependency 'SwiftMonkeyPaws', '~> 2.1.0'
s.swift_version = '5.0'
s.resource_bundles = {
'Resources' => ['HLTest/Resources/**/*']
}
end
<file_sep>/HLUtils/0.0.2/HLUtils.podspec
Pod::Spec.new do |s|
s.name = 'HLUtils'
s.version = '0.0.2'
s.summary = 'HL 工具箱'
s.description = <<-DESC
工具箱
DESC
s.homepage = 'https://github.com/HanleyLee/HLUtils'
s.author = { "<NAME>" => "<EMAIL>" }
s.social_media_url = "https://twitter.com/Hanley_Lei"
s.license = { :type => "Apache-2.0", :file => "LICENSE" }
s.platform = :ios, '10.0'
s.requires_arc = true
s.author = 'hanleylee'
s.source = { :git => 'https://github.com/HanleyLee/HLUtil.git', :tag => s.version }
s.source_files = [
"Sources/HLUtils/**/*.swift"
]
s.frameworks = 'UIKit'
s.dependency 'ObjectMapper'
s.dependency 'SnapKit'
s.dependency 'RxSwift'
s.dependency 'RxCocoa'
s.swift_version = '5.3'
s.resource_bundles = {
'hl' => ['Sources/HLUtils/Resources/**/*']
}
end
|
13f390e8a64dc934a45087d6ecbc101062ca1fbf
|
[
"Ruby"
] | 3
|
Ruby
|
HanleyLee/HLPodSpecs
|
a35708d98088908e7cd91565d17de6dc3df30d68
|
6275ac79aa0bde106d58a1642d125b9ed63b8b39
|
refs/heads/main
|
<repo_name>uronsol/monke-business-scraper<file_sep>/README.md
# Solana Monkey Business Scraper and Dataset
- Output is in `output`
## Getting Started
- `yarn`
- `yarn start`
<file_sep>/pageScraper.js
const request = require("request");
const fs = require("fs");
const camelCase = require("camelcase");
function delay(time) {
return new Promise(function (resolve) {
setTimeout(resolve, time);
});
}
async function download(url, dest) {
/* Create an empty file where we can save data */
const file = fs.createWriteStream(dest);
/* Using Promises so that we can use the ASYNC AWAIT syntax */
await new Promise((resolve, reject) => {
request({
/* Here you should specify the exact link to the file you are trying to download */
uri: url,
gzip: true,
})
.pipe(file)
.on("finish", async () => {
console.log(`The file is finished downloading.`);
resolve();
})
.on("error", (error) => {
reject(error);
});
}).catch((error) => {
console.log(`Something happened: ${error}`);
});
}
const scraperObject = {
url: "https://howrare.is/smb",
async scraper(browser) {
let page = await browser.newPage();
console.log(`Navigating to ${this.url}...`);
// Navigate to the selected page
await page.goto(this.url);
await delay(1000);
// Wait for the required DOM to be rendered
async function scrapeCurrentPage() {
await page.waitForSelector(".main");
// Get pages for pagination
let paginationUrls = await page.$$eval(
"body > div > div > div.col-md-10 > nav:nth-child(4) > ul > li",
(links) => {
return links.map(
(_, index) =>
`https://howrare.is/smb/?page=${index}&ids=&sort_by=rank`
);
}
);
// Create promise to get individual item urls
let paginationPromise = (link) =>
new Promise(async (resolve, reject) => {
try {
let paginationPage = await browser.newPage();
console.log(`Navigating to ${link}`);
await paginationPage.goto(link);
await delay(1000);
let itemLinks = await paginationPage.$$eval(
"body > div > div > div.col-md-10 > div.nft-listing > a",
(links) => {
return links.map((link) => link.href);
}
);
await delay(1000);
resolve(itemLinks);
await paginationPage.close();
} catch (err) {
console.log(err);
reject([]);
}
});
let itemDetailUrls = [];
// Get link for the item detail page
for (let i = 0; i < paginationUrls.length; i++) {
let currentPageData = await paginationPromise(paginationUrls[i]);
itemDetailUrls = itemDetailUrls.concat(currentPageData);
}
page.close();
// Create promise to get individual item attributes
let detailPromise = (link) =>
new Promise(async (resolve, reject) => {
try {
let detailPage = await browser.newPage();
console.log(`Navigating to ${link}`);
await detailPage.goto(link);
const imageSrc = await detailPage.$$eval(
"body > div > div > div.col-md-4 > img",
(images) => {
return images[0].src;
}
);
const smbId = await detailPage.$$eval(
"body > div > div > div.col-md-4 > h3 > strong",
(ids) => {
return ids[0].innerText.replace("#", "");
}
);
let attributeNames = await detailPage.$$eval(
"body > div > div > div.col-md-8 > ul > li > span:first-child",
(attributes) => {
return attributes.map((attributeName) => {
return attributeName.innerText;
});
}
);
attributeNames = attributeNames.map((attrName) =>
camelCase(attrName.replace(":", ""))
);
let attributeValues = await detailPage.$$eval(
"body > div > div > div.col-md-8 > ul > li > div",
(attributes) => {
return attributes.map((attributeValue) => {
return attributeValue.innerText;
});
}
);
attributeValues = attributeValues.reduce(
(attrs, attrValue) => attrs.concat([attrValue.split("(")]),
[]
);
const attributes = attributeNames.reduce(
(attrObj, attributeName, index) => {
const currentAttributeValues = attributeValues[index];
const attributeValuesNormalized = currentAttributeValues.map(
(v) => {
if (v.indexOf("%)") !== -1) {
const normV = v.replace("%)", "").replace("(", "");
const percentValue = parseFloat(normV);
return percentValue;
}
return v.trim();
}
);
attrObj[attributeName] = {
value:
attributeValuesNormalized[0].length === 0
? null
: attributeValuesNormalized[0],
percentile: attributeValuesNormalized[1] || null,
};
return attrObj;
},
{}
);
resolve({
smbId,
imageSrc,
attributes,
});
await detailPage.close();
} catch (err) {
console.log(err);
reject([]);
}
});
let data = [];
// Get data from the detail pages
for (let i = 0; i < itemDetailUrls.length; i++) {
let currentPageData = await detailPromise(itemDetailUrls[i]);
data.push(currentPageData);
}
return data;
}
let data = await scrapeCurrentPage();
fs.unlinkSync("output/monkes.json");
for (let i = 0; i < data.length; i++) {
const dataItem = data[i];
const fileName = dataItem.imageSrc.substring(
dataItem.imageSrc.lastIndexOf("/") + 1
);
const smbId = dataItem.smbId;
delete data[i].smbId;
data[i] = {
smbId,
imagePath: `output/images/${fileName}`,
...data[i],
};
}
const dataWritten = fs.writeFileSync(
"./output/monkes.json",
JSON.stringify(data, null, 2)
);
console.log(dataWritten);
return dataWritten;
},
};
module.exports = scraperObject;
|
b869d27b1e6acf7e2eaa420c90d2049b6d22c4ea
|
[
"Markdown",
"JavaScript"
] | 2
|
Markdown
|
uronsol/monke-business-scraper
|
d21a7eb47a6de877d331d44fcdb590885f773fe4
|
0993f21885aa4a76268ba91eb9b55acda9780994
|
refs/heads/master
|
<file_sep>// from data.js
var tableData = data;
// YOUR CODE HERE!
var submit = d3.select("#filter-btn");
var filterby = function(record, key, field) {
if (key ==''){
return record;
}
else if (key == field){
return record;
}
else {return false};
};
submit.on("click", function(){
d3.event.preventDefault();
var inputDate = d3.select("#datetime")
.property("value");
var inputCity = d3.select("#city")
.property("value")
.toLowerCase();
var inputState = d3.select("#state")
.property("value")
.toLowerCase();
var inputCountry = d3.select("#country")
.property("value")
.toLowerCase();
var inputShape = d3.select("#shape")
.property("value")
.toLowerCase();
var filteredData = data.map(function(d) {return filterby(d, inputDate, d.datetime)}).filter( Boolean);
filteredData = filteredData.map(function(d) {return filterby(d, inputCity, d.city)}).filter( Boolean);
filteredData = filteredData.map(function(d) {return filterby(d, inputState, d.state)}).filter( Boolean);
filteredData = filteredData.map(function(d) {return filterby(d, inputCountry, d.country)}).filter( Boolean);
filteredData = filteredData.map(function(d) {return filterby(d, inputShape, d.shape)}).filter( Boolean);
console.log(filteredData);
var appendRow = function(item){
var row = d3.select("tbody").append('tr');
row.append('td').text(item.datetime);
row.append('td').text(item.city);
row.append('td').text(item.state);
row.append('td').text(item.country);
row.append('td').text(item.shape);
row.append('td').text(item.durationMinutes);
row.append('td').text(item.comments);
}
filteredData.forEach(appendRow);
});
|
89611ee7ef7cea762054b5a7dbcff1e8beed46ee
|
[
"JavaScript"
] | 1
|
JavaScript
|
ovbondarenko/ufo-sightings
|
519a1977f747922ad0f68b0f5e4eaf0878bd6361
|
8fff3d6cec47326630a40f74c106a385788ad748
|
refs/heads/master
|
<file_sep>import math
def sumofsquares(num):
if num < 1:
return False
else:
sqroot = int(math.sqrt(num)//1)
for i in range(sqroot,0,-1):
for j in range(i,0,-1):
if (i**2 + (j)**2) == num:
# print(i,j)
return True
return False
# print(sumofsquares(41))
# print(sumofsquares(30))
# print(sumofsquares(17))
print(sumofsquares(-17))
# print(sumofsquares(0))
# print(sumofsquares(1))
# print(sumofsquares(2))
def wellbracketed(expr):
open_bracket = 0
close_bracket = 0
for i in expr:
if i == "(":
open_bracket += 1
elif i == ")":
close_bracket += 1
if open_bracket == close_bracket:
return True
else:
return False
# print(wellbracketed("22)"))
# print(wellbracketed("(a+b)(a-b)"))
print(wellbracketed("(a(b+c)-d)((e+f)"))
def rotatelist(lst,rotation):
if rotation>=len(lst):
shift = rotation%len(lst)
else:
shift = rotation
if shift == 0:
return lst
temp = lst[-shift:]+lst[:-shift]
return temp
# print(rotatelist([1,2,3,4,5],3))
# print(rotatelist([1,2,3,4,5],1))
print(rotatelist([1,2,3,4,5],12))
<file_sep># Histogram
'''
Write a Python function histogram(l) that takes as input a list of integers with repetitions and returns a list of pairs as follows:
for each number n that appears in l, there should be exactly one pair (n,r) in the list returned by the function, where r is is the number of repetitions of n in l.
the final list should be sorted in ascending order by r, the number of repetitions. For numbers that occur with the same number of repetitions, arrange the pairs in ascending order of the value of the number.
For instance:
>>> histogram([13,12,11,13,14,13,7,7,13,14,12])
[(11, 1), (7, 2), (12, 2), (14, 2), (13, 4)]
>>> histogram([7,12,11,13,7,11,13,14,12])
[(14, 1), (7, 2), (11, 2), (12, 2), (13, 2)]
>>> histogram([13,7,12,7,11,13,14,13,7,11,13,14,12,14,14,7])
[(11, 2), (12, 2), (7, 4), (13, 4), (14, 4)]
'''
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
def histogram(data):
data.sort()
if len(data) < 1:
return []
final_list = []
frequency_dict = {}
current = data[0]
count = 0
for element in data:
if element != current:
if count not in frequency_dict:
frequency_dict[count] = []
frequency_dict[count].append(current)
count = 0
current = element
count += 1
else:
count += 1
if count not in frequency_dict:
frequency_dict[count] = []
frequency_dict[count].append(current)
temp = list(frequency_dict.keys())
temp.sort()
for i in temp:
frequency_dict[i].sort()
for j in frequency_dict[i]:
final_list.append((j, i))
return final_list
# TRANSCRIPT
'''
A college maintains academic information about students in three separate lists.
Course details: A list of pairs of form (coursecode,coursename), where both entries are strings.
For instance, [("MA101","Calculus"),("PH101","Mechanics"),("HU101","English")]
Student details: A list of pairs of form (rollnumber,name), where both entries are strings.
For instance, [("UGM2018001","<NAME>"),("UGP2018132","<NAME>")]
Grades: A list of triples of the form (rollnumber,coursecode,grade), where all entries are strings.
For instance, [("UGM2018001", "MA101", "AB"), ("UGP2018132", "PH101", "B"), ("UGM2018001", "PH101", "B")].
You may assume that each roll number and course code in the grade list appears in the student details and course details, respectively.
Your task is to write a function transcript(coursedetails,studentdetails,grades) that takes these three lists as input and
produces consolidated grades for each student. Each of the input lists may have its entries listed in arbitrary order.
Each entry in the returned list should be a tuple of the form
(rollnumber, name,[(coursecode_1,coursename_1,grade_1),...,(coursecode_k,coursename_k,grade_k)])
where the student has grades for k >= 1 courses reported in the input list grades.
The output list should be organized as follows.
The tuples shold sorted in ascending order by rollnumber
Each student's grades should sorted in ascending order by coursecode
For instance:
>>> transcript([("MA101","Calculus"),("PH101","Mechanics"),("HU101","English")],[("UGM2018001","<NAME>"),("UGP2018132","Neha Talwar")],[("UGM2018001","MA101","AB"),("UGP2018132","PH101","B"),("UGM2018001","PH101","B")])
[('UGM2018001', '<NAME>', [('MA101', 'Calculus', 'AB'), ('PH101', 'Mechanics', 'B')]), ('UGP2018132', 'Neha Talwar', [('PH101', 'Mechanics', 'B')])]
>>> transcript([("T1","Test 1"),("T2","Test 2"),("T3","Test 3")],[("Opener","<NAME>"),("Captain","<NAME>"),("No3","Cheteshwar Pujara")],[("Opener","T1","14"),("Captain","T1","33"),("No3","T1","30"),("Opener","T2","55") ,("Captain","T2","158"),("No3","T2","19"), ("Opener","T3","33"),("Captain","T3","95"),("No3","T3","51")])
[('Captain', '<NAME>', [('T1', 'Test 1', '33'), ('T2', 'Test 2', '158'), ('T3', 'Test 3', '95')]), ('No3', 'Cheteshwar Pujara', [('T1', 'Test 1', '30'), ('T2', 'Test 2', '19'), ('T3', 'Test 3', '51')]), ('Opener', '<NAME>', [('T1', 'Test 1', '14'), ('T2', 'Test 2', '55'), ('T3', 'Test 3', '33')])]
'''
def transcript(coursedetails, studentdetails, grades):
course_dict = {}
student_dict = {}
for i in coursedetails:
course_dict[i[0]] = i[1]
for i in studentdetails:
student_dict[i[0]] = i[1]
roll_no = []
for roll_nos in student_dict.keys():
roll_no.append(roll_nos)
roll_no.sort()
grades.sort()
final_list = []
for i in roll_no:
individual_tuple = ()
individual_tuple += (i, student_dict[i])
grade_list = []
for j in grades:
if j[0] == i:
grade_list.append((j[1], course_dict[j[1]], j[2]))
individual_tuple += (grade_list,)
final_list.append(individual_tuple)
return final_list
<file_sep>score_dict = {}
data = input()
while (data != ''):
data = data.split(":")
win = data[0]
los = data[1]
set_won = game_won = set_lose = set_win = 0
scores = data[2].split(",")
if win not in score_dict.keys():
score_dict[win] = [0] * 6
if los not in score_dict.keys():
score_dict[los] = [0] * 6
if len(scores) > 3:
score_dict[win][0] += 1
else:
score_dict[win][1] += 1
for score in scores:
score = list(map(int,score.split("-")))
if score[0] > score[1]:
score_dict[win][2] += 1
score_dict[los][4] += 1
else:
score_dict[los][2] += 1
score_dict[win][4] += 1
score_dict[win][3] += score[0]
score_dict[win][5] += score[1]
score_dict[los][5] += score[0]
score_dict[los][3] += score[1]
data = input()
temp = sorted(score_dict,key = lambda x: (score_dict[x][0],score_dict[x][1],score_dict[x][2],score_dict[x][3]), reverse = True)
for i in temp:
print(i,end='')
for j in score_dict[i]:
print('',j,end='')
print() <file_sep>from random import randint
lst = []
for i in range(20):
lst.append(randint(-100,200))
def selectionSort(lst):
minpos = 0
for i in range(len(lst)):
minpos = i
for j in range(i,len(lst)):
if lst[j] < lst[minpos]:
minpos = j
(lst[j],lst[minpos]) = (lst[minpos],lst[j])
selectionSort(lst)
print(lst)<file_sep>def matmult(matA, matB):
matA_rows = len(matA)
matA_columns = len(matA[0])
matB_rows = len(matB)
matB_columns = len(matB[0])
assert (matA_columns == matB_rows), "Matrix Multiplication not possible"
matC_rows = matA_rows
matC_columns = matB_columns
matC = []
for i in range(matC_rows):
matC.append([0]*matC_columns)
for r in range(matC_rows):
for c in range(matC_columns):
for k in range(matB_rows):
matC[r][c] += matA[r][k] * matB[k][c]
return matC
def ascending(lst):
if len(lst) < 2:
return True
small = lst[0]
for i in range(1, len(lst)):
if small > lst[i]:
return False
small = lst[i]
return True
'''
def alternating(lst):
if lst == [] or len(lst) < 3:
return True
odd_lst = []
even_lst = []
even_flag = True
odd_flag = True
for element in range(0, len(lst), 2):
even_lst.append(lst[element])
for element in range(1, len(lst), 2):
odd_lst.append(lst[element])
if len(even_lst) < 2 or len(odd_lst) < 2:
return True
# computing even list
dec = even_lst[1]
inc = even_lst[0]
if dec >= inc:
even_flag = False
else:
for i in range(0, (len(even_lst)),2):
if even_lst[i] != inc:
even_flag = False
break
for i in range(1, (len(even_lst)),2):
if even_lst[i] != dec:
even_flag = False
break
# computing odd list
dec = odd_lst[1]
inc = odd_lst[0]
if dec >= inc:
odd_flag = False
else:
for i in range(0, (len(odd_lst)),2):
if odd_lst[i] != inc:
odd_flag = False
break
for i in range(1, (len(odd_lst)),2):
if odd_lst[i] != dec:
odd_flag = False
break
if odd_flag or even_flag:
return True
else:
return False
'''
def alternating(lst):
if len(lst) < 3:
return True
prev = lst[0]
i_d = False
d_i = True
if prev > lst[1]:
i_d = True
else:
d_i = True
if i_d:
inc = False
for i in range(1, len(lst)):
current = lst[i]
if inc:
if not (current > prev):
return False
inc = False
prev = current
else:
if not (current < prev):
return False
inc = True
prev = current
return True
else:
inc = True
for i in range(1,len(lst)):
current = lst[i]
if inc:
if not (current > prev):
return False
inc = False
prev = current
else:
if not (current < prev):
return False
inc = True
prev = current
return True
print(alternating([]))
print(alternating([1,3,2,3,1,5]))
print(alternating([3,2,3,1,5]))
print(alternating([3,2,2,1,5]))
print(alternating([3,2,1,3,5]))
|
4e12a1145b82c27a2ffcbb54ea50d2559b4bea46
|
[
"Python"
] | 5
|
Python
|
ravirajawasthi/python_pdsa_nptel
|
74c851610fcd33804c419baaf56414f3546b91e0
|
71984caa7bdbcb74fe56cc78f1fb4111b1adaa60
|
refs/heads/master
|
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using FluentNHibernate.Mapping;
namespace AgendaBSS.Models
{
public class TarefaMap: ClassMap<Tarefa>
{
public TarefaMap()
{
Id(x => x.Id);
Map(x => x.Titulo).Length(100);
Map(x => x.Data_Cadastro);
Map(x => x.Data_Entrega);
Map(x => x.Descricao).Length(100);
HasMany(x => x.ListaAnexos);
HasMany(x => x.ListaCheckList);
Table("Tarefa");
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using AgendaBSS.Models;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using NHibernate;
using NHibernate.Linq;
using System.Web;
namespace AgendaBSS.Controller
{
public class AnexosController: ControllerBase
{
//Get CheckList Index.
public ActionResult Index()
{
using (NHibernate.ISession session = NHibernateHelper.AbreSessao())
{
var anexo = session.Query<Anexos>().ToList();
return View(anexo);
}
}
//Get: CheckList/Detail/5
public ActionResult Details(int id)
{
using (NHibernate.ISession session = NHibernateHelper.AbreSessao())
{
var anexo = session.Get<Anexos>(id);
return View(anexo);
}
}
DirectoryInfo dirArquivos = new DirectoryInfo(Server.MapPath("~/Upload/"));
//POST CheckList/Create
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Create(Anexos anexo, HttpPostedFileBase file = null)
{
try
{
using (NHibernate.ISession session = NHibernateHelper.AbreSessao())
{
if (file != null)
{
var arquivo = Request.Files[0];
var nomeArquivo = "Arq" + DateTime.Now.Millisecond.ToString() + ".pdf";
if (arquivo != null && arquivo.ContentLength() > 0)
{
anexo.Caminho_Arquivo = Path.Combine(dirArquivos, file.FileName);
var path = Path.Combine(dirArquivos, Path.GetFileName(file.FileName));
arquivo.SaveAs(path);
}
}
using (ITransaction transacao = session.BeginTransaction())
{
session.Save(anexo);
transacao.Commit();
}
}
return RedirectToAction(nameof(Index));
}
catch
{
return View();
}
}
//Get CheckList/Edit/5
public ActionResult Edit(int id)
{
using (NHibernate.ISession session = NHibernateHelper.AbreSessao())
{
var anexo = session.Get<Anexos>(id);
return View(anexo);
}
}
//Post CheckList/Edit/5
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Edit(int id, Anexos anexo)
{
try
{
using (NHibernate.ISession session = NHibernateHelper.AbreSessao())
{
var anexoAlterado = session.Get<Anexos>(id);
anexoAlterado.Id = anexo.Id;
anexoAlterado.Caminho_Arquivo = anexo.Caminho_Arquivo;
anexoAlterado._Tarefa = anexo._Tarefa;
using (ITransaction transacao = session.BeginTransaction())
{
session.Save(anexoAlterado);
transacao.Commit();
}
}
return RedirectToAction(nameof(Index));
}
catch
{
return View();
}
}
//Get CheckList/Delete/5
public ActionResult Delete(int id)
{
using (NHibernate.ISession session = NHibernateHelper.AbreSessao())
{
var anexo = session.Get<Anexos>(id);
return View(anexo);
}
}
//Post CheckList/Delete/5
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Delete(int id, Anexos anexo)
{
try
{
using (NHibernate.ISession session = NHibernateHelper.AbreSessao())
{
using (ITransaction transacao = session.BeginTransaction())
{
session.Delete(anexo);
transacao.Commit();
}
}
return RedirectToAction(nameof(Index));
}
catch
{
return View();
}
}
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace AgendaBSS.Models
{
public class Tarefa
{
private int id;
private String titulo;
private DateTime data_cadastro;
private DateTime data_entrega;
private String descricao;
private IList<Anexos> listaanexo;
private IList<Checklist> listacheckelist;
public virtual int Id
{
get;
set;
}
public virtual String Titulo
{
get;
set;
}
public virtual DateTime Data_Cadastro
{
get;
set;
}
public virtual DateTime Data_Entrega
{
get;
set;
}
public virtual String Descricao
{
get;
set;
}
public virtual IList<Anexos> ListaAnexos
{
get;
set;
}
public virtual IList<Checklist> ListaCheckList
{
get;
set;
}
public Tarefa() { }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using FluentNHibernate.Mapping;
namespace AgendaBSS.Models
{
public class AnexosMap: ClassMap<Anexos>
{
public AnexosMap()
{
Id(x => x.Id);
Map(x => x.Caminho_Arquivo).Length(200);
References(x => x._Tarefa).Column("idtarefa");
Join("Tarefa", m => {
m.Fetch.Join();
m.KeyColumn("idtarefa");
m.Map(t => t.Id).Nullable();
});
Table("Anexos");
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using AgendaBSS.Models;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using NHibernate;
using NHibernate.Linq;
namespace AgendaBSS.Controller
{
public class CheckListController: ControllerBase
{
//Get CheckList Index.
public ActionResult Index()
{
using (NHibernate.ISession session = NHibernateHelper.AbreSessao())
{
var checklist = session.Query<Checklist>().ToList();
return View(checklist);
}
}
//Get: CheckList/Detail/5
public ActionResult Details(int id)
{
using (NHibernate.ISession session = NHibernateHelper.AbreSessao())
{
var checklist = session.Get<Checklist>(id);
return View(checklist);
}
}
//POST CheckList/Create
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Create(Checklist checklist)
{
try
{
using (NHibernate.ISession session = NHibernateHelper.AbreSessao())
{
using (ITransaction transacao = session.BeginTransaction())
{
session.Save(checklist);
transacao.Commit();
}
}
return RedirectToAction(nameof(Index));
}
catch
{
return View();
}
}
//Get CheckList/Edit/5
public ActionResult Edit(int id)
{
using (NHibernate.ISession session = NHibernateHelper.AbreSessao())
{
var checklist = session.Get<Checklist>(id);
return View(checklist);
}
}
//Post CheckList/Edit/5
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Edit(int id, Checklist checklist)
{
try
{
using (NHibernate.ISession session = NHibernateHelper.AbreSessao())
{
var checklistAlterado = session.Get<Checklist>(id);
checklistAlterado.Id = checklist.Id;
checklistAlterado.CheckList = checklist.CheckList;
checklistAlterado._Tarefa = checklist._Tarefa;
using (ITransaction transacao = session.BeginTransaction())
{
session.Save(checklistAlterado);
transacao.Commit();
}
}
return RedirectToAction(nameof(Index));
}
catch
{
return View();
}
}
//Get CheckList/Delete/5
public ActionResult Delete(int id)
{
using (NHibernate.ISession session = NHibernateHelper.AbreSessao())
{
var checklist = session.Get<Checklist>(id);
return View(checklist);
}
}
//Post CheckList/Delete/5
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Delete(int id, Checklist checklist)
{
try
{
using (NHibernate.ISession session = NHibernateHelper.AbreSessao())
{
using (ITransaction transacao = session.BeginTransaction())
{
session.Delete(checklist);
transacao.Commit();
}
}
return RedirectToAction(nameof(Index));
}
catch
{
return View();
}
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace AgendaBSS.Models
{
public class Anexos
{
private int id;
private String caminho_arquivo;
private Tarefa _tarefa;
public virtual int Id
{
get;
set;
}
public virtual String Caminho_Arquivo
{
get;
set;
}
public virtual Tarefa _Tarefa
{
get;
set;
}
public Anexos() { }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace AgendaBSS.Models
{
public class Checklist
{
private int id;
private String checklist;
private Tarefa _tarefa;
public virtual int Id
{
get;
set;
}
public virtual String CheckList
{
get;
set;
}
public virtual Tarefa _Tarefa
{
get;
set;
}
public Checklist() { }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using FluentNHibernate.Cfg;
using FluentNHibernate.Cfg.Db;
using NHibernate;
using NHibernate.Tool.hbm2ddl;
namespace AgendaBSS.Models
{
public class NHibernateHelper
{
public static ISession AbreSessao()
{
ISessionFactory sessionFactory = Fluently.Configure().Database(PostgreSQLConfiguration.PostgreSQL82.
ConnectionString("Server=localhost; Port=5432; User Id=postgres; Password=<PASSWORD>; Database=AgendaDB").ShowSql()).
Mappings(m => m.FluentMappings.AddFromAssemblyOf<Tarefa>()).
Mappings(m => m.FluentMappings.AddFromAssemblyOf<Anexos>()).
Mappings(m => m.FluentMappings.AddFromAssemblyOf<Checklist>()).
ExposeConfiguration(cfg => new SchemaExport(cfg).Create(false, false)).BuildSessionFactory();
return sessionFactory.OpenSession();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using AgendaBSS.Models;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using NHibernate;
using NHibernate.Linq;
namespace AgendaBSS.Controller
{
public class TarefaController : ControllerBase
{
// GET: Tarefa
public ActionResult Index()
{
using (NHibernate.ISession session = NHibernateHelper.AbreSessao())
{
var tarefas = session.Query<Tarefa>().ToList();
return View(tarefas);
}
}
// GET: Tarefa/Details/5
public ActionResult Details(int id)
{
using (NHibernate.ISession session = NHibernateHelper.AbreSessao())
{
var tarefa = session.Get<Tarefa>(id);
return View(tarefa);
}
}
// POST: Tarefa/Create
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Create(Tarefa tarefa)
{
try
{
// TODO: Add insert logic here
using (NHibernate.ISession session = NHibernateHelper.AbreSessao())
{
using (ITransaction transacao = session.BeginTransaction())
{
session.Save(tarefa);
transacao.Commit();
}
}
return RedirectToAction(nameof(Index));
}
catch
{
return View();
}
}
// GET: Tarefa/Edit/5
public ActionResult Edit(int id)
{
using (NHibernate.ISession session = NHibernateHelper.AbreSessao())
{
var tarefa = session.Get<Tarefa>(id);
return View(tarefa);
}
}
// POST: Tarefa/Edit/5
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Edit(int id, Tarefa tarefa)
{
try
{
// TODO: Add update logic here
using (NHibernate.ISession session = NHibernateHelper.AbreSessao())
{
var tarefaAlterado = session.Get<Tarefa>(id);
tarefaAlterado.Id = tarefa.Id;
tarefaAlterado.Titulo = tarefa.Titulo;
tarefaAlterado.Descricao = tarefa.Descricao;
tarefaAlterado.Data_Cadastro = tarefa.Data_Cadastro;
tarefaAlterado.Data_Entrega = tarefa.Data_Entrega;
using (ITransaction transacao = session.BeginTransaction())
{
session.Save(tarefaAlterado);
transacao.Commit();
}
}
return RedirectToAction(nameof(Index));
}
catch
{
return View();
}
}
// GET: Tarefa/Delete/5
public ActionResult Delete(int id)
{
using (NHibernate.ISession session = NHibernateHelper.AbreSessao())
{
var tarefa = session.Get<Tarefa>(id);
return View(tarefa);
}
}
// POST: Tarefa/Delete/5
[HttpPost]
[ValidateAntiForgeryToken]
public ActionResult Delete(int id, Tarefa tarefa)
{
try
{
// TODO: Add delete logic here
using (NHibernate.ISession session = NHibernateHelper.AbreSessao())
{
using (ITransaction transacao = session.BeginTransaction())
{
session.Delete(tarefa);
transacao.Commit();
}
}
return RedirectToAction(nameof(Index));
}
catch
{
return View();
}
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using FluentNHibernate.Mapping;
namespace AgendaBSS.Models
{
public class CheckListMap: ClassMap<Checklist>
{
public CheckListMap()
{
Id(x => x.Id);
Map(x => x.CheckList).Length(200);
References(x => x._Tarefa).Column("idtarefa");
Join("Tarefa", m => {
m.Fetch.Join();
m.KeyColumn("idtarefa");
m.Map(t => t.Id).Nullable();
});
Table("Checkedlist");
}
}
}
|
2bb3c1fa74f477ba45504fe09850e2b13dcaf7ad
|
[
"C#"
] | 10
|
C#
|
carlosDeveloperDelphi/projetos
|
dd8a0a3ef3bd9760c3718c9ee5a67a055969d5e4
|
585ea78576bd92b99a5ddb7ba591ab5658f64ff8
|
refs/heads/master
|
<repo_name>Evgeny985/crud<file_sep>/src/main/java/ru/gruzdov/mvc/dao/DepartmentDAO.java
package ru.gruzdov.mvc.dao;
import ru.gruzdov.mvc.model.Department;
import java.util.List;
public interface DepartmentDAO {
void addDepartment(Department department);
Department getDepartmentById(Integer id);
void updateDepartment(Department department);
void deleteDepartment(Integer id);
List<Department> getAllDepartmentByCityId(Integer cityId);
}
<file_sep>/src/main/java/ru/gruzdov/mvc/model/Department.java
package ru.gruzdov.mvc.model;
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import javax.persistence.*;
import java.util.List;
@NoArgsConstructor
@AllArgsConstructor
@Data
@Entity
@Table(name = "DEPARTMENT")
public class Department {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Column(name = "DEPARTMENT_ID")
private Integer id;
@Column(name = "DEPARTMENT_NAME")
private String name;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "CITY_ID")
private City city;
@OneToMany(mappedBy = "department", cascade = CascadeType.ALL)
private List<Employee> employee;
}
<file_sep>/src/main/java/ru/gruzdov/mvc/dao/CityDAOImpl.java
package ru.gruzdov.mvc.dao;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
import ru.gruzdov.mvc.model.City;
import java.util.List;
@Repository
public class CityDAOImpl implements CityDAO {
@Autowired
private SessionFactory sessionFactory;
@Override
public void addCity(City city) {
sessionFactory.getCurrentSession().persist(city);
}
@Override
public City getCityById(Integer id) {
return sessionFactory.getCurrentSession().get(City.class, id);
}
@Override
public void updateCity(City city) {
sessionFactory.getCurrentSession().update(city);
}
@Override
public void deleteCity(Integer id) {
sessionFactory.getCurrentSession().delete(getCityById(id));
}
@Override
@SuppressWarnings("unchecked")
public List<City> getAllCity() {
Session session = sessionFactory.getCurrentSession();
return session.createQuery("select c from City c").list();
}
}
<file_sep>/src/main/java/ru/gruzdov/mvc/service/EmployeeServiceImpl.java
package ru.gruzdov.mvc.service;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import ru.gruzdov.mvc.dao.EmployeeDAO;
import ru.gruzdov.mvc.model.Employee;
import java.util.List;
@Service
@Transactional
public class EmployeeServiceImpl implements EmployeeService {
@Autowired
private EmployeeDAO employeeDAO;
@Override
public void addEmployee(Employee employee) {
employeeDAO.addEmployee(employee);
}
@Override
public Employee getEmployeeById(Integer id) {
return employeeDAO.getEmployeeById(id);
}
@Override
public void updateEmployee(Employee employee) {
employeeDAO.updateEmployee(employee);
}
@Override
public void deleteEmployee(Integer id) {
employeeDAO.deleteEmployee(id);
}
@Override
public List<Employee> getAllEmployeeByDepartmentId(Integer departmentId) {
return employeeDAO.getAllEmployeeByDepartmentId(departmentId);
}
}
<file_sep>/src/main/java/ru/gruzdov/mvc/controller/EmployeeController.java
package ru.gruzdov.mvc.controller;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.servlet.ModelAndView;
import ru.gruzdov.mvc.model.City;
import ru.gruzdov.mvc.model.Department;
import ru.gruzdov.mvc.model.Employee;
import ru.gruzdov.mvc.service.CityService;
import ru.gruzdov.mvc.service.DepartmentService;
import ru.gruzdov.mvc.service.EmployeeService;
@Controller
public class EmployeeController {
@Autowired
private DepartmentService departmentService;
@Autowired
private EmployeeService employeeService;
@Autowired
private CityService cityService;
@GetMapping(value = "/employee/{id}")
public ModelAndView getAllEmployee(@PathVariable Integer id) {
ModelAndView modelAndView = new ModelAndView();
modelAndView.addObject("employeeFromServer", employeeService.getAllEmployeeByDepartmentId(id));
modelAndView.setViewName("employee");
return modelAndView;
}
@GetMapping(value = "/updateEmployee/{id}")
public ModelAndView updatePage(@PathVariable Integer id) {
ModelAndView modelAndView = new ModelAndView();
modelAndView.addObject("employee", employeeService.getEmployeeById(id));
modelAndView.setViewName("updateEmployee");
return modelAndView;
}
@PostMapping(value = "/updateEmployee")
public ModelAndView updateEmployee(@ModelAttribute("employee") Employee employee,
Integer departmentId, Integer cityId) {
City city = cityService.getCityById(cityId);
Department department = departmentService.getDepartmentById(departmentId);
ModelAndView modelAndView = new ModelAndView();
if (city != null && department != null) {
department.setCity(city);
employee.setDepartment(department);
employeeService.updateEmployee(employee);
modelAndView.setViewName("redirect:/employee/" + departmentId);
} else {
modelAndView.setViewName("Error");
}
return modelAndView;
}
@GetMapping(value = "/addEmployee")
public ModelAndView addPage() {
ModelAndView modelAndView = new ModelAndView();
modelAndView.setViewName("addEmployee");
return modelAndView;
}
@PostMapping(value = "/addEmployee")
public ModelAndView addEmployee(@ModelAttribute("employee") Employee employee,
Integer departmentId) {
Department department = departmentService.getDepartmentById(departmentId);
ModelAndView modelAndView = new ModelAndView();
if (department != null) {
employee.setDepartment(department);
employeeService.addEmployee(employee);
modelAndView.setViewName("redirect:/employee/" + departmentId);
} else {
modelAndView.setViewName("Error");
}
return modelAndView;
}
@GetMapping(value = "/deleteEmployee/{id}/{departmentId}")
public ModelAndView deleteEmployee(@PathVariable("id") Integer id,
@PathVariable("departmentId") Integer departmentId) {
employeeService.deleteEmployee(id);
ModelAndView modelAndView = new ModelAndView();
modelAndView.setViewName("redirect:/employee/"+departmentId);
return modelAndView;
}
}
<file_sep>/src/main/java/ru/gruzdov/mvc/service/CityServiceImpl.java
package ru.gruzdov.mvc.service;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import ru.gruzdov.mvc.dao.CityDAO;
import ru.gruzdov.mvc.model.City;
import java.util.List;
@Service
@Transactional
public class CityServiceImpl implements CityService {
@Autowired
private CityDAO cityDAO;
@Override
public void addCity(City city) {
cityDAO.addCity(city);
}
@Override
public City getCityById(Integer id) {
return cityDAO.getCityById(id);
}
@Override
public void updateCity(City city) {
cityDAO.updateCity(city);
}
@Override
public void deleteCity(Integer id) {
cityDAO.deleteCity(id);
}
@Override
public List<City> getAllCity() {
return cityDAO.getAllCity();
}
}
<file_sep>/src/main/java/ru/gruzdov/mvc/service/DepartmentServiceImpl.java
package ru.gruzdov.mvc.service;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import ru.gruzdov.mvc.dao.DepartmentDAO;
import ru.gruzdov.mvc.model.Department;
import java.util.List;
@Service
@Transactional
public class DepartmentServiceImpl implements DepartmentService{
@Autowired
private DepartmentDAO departmentDAO;
@Override
public void addDepartment(Department department) {
departmentDAO.addDepartment(department);
}
@Override
public Department getDepartmentById(Integer id) {
return departmentDAO.getDepartmentById(id);
}
@Override
public void updateDepartment(Department department) {
departmentDAO.updateDepartment(department);
}
@Override
public void deleteDepartment(Integer id) {
departmentDAO.deleteDepartment(id);
}
@Override
public List<Department> getAllDepartmentByCityId(Integer cityId) {
return departmentDAO.getAllDepartmentByCityId(cityId);
}
}
<file_sep>/src/main/java/ru/gruzdov/mvc/controller/DepartmentController.java
package ru.gruzdov.mvc.controller;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.servlet.ModelAndView;
import ru.gruzdov.mvc.model.City;
import ru.gruzdov.mvc.model.Department;
import ru.gruzdov.mvc.service.CityService;
import ru.gruzdov.mvc.service.DepartmentService;
@Controller
public class DepartmentController {
@Autowired
private DepartmentService departmentService;
@Autowired
private CityService cityService;
@GetMapping(value = "/department/{id}")
public ModelAndView getAllDepartment(@PathVariable Integer id) {
ModelAndView modelAndView = new ModelAndView();
modelAndView.addObject("departmentFromServer", departmentService.getAllDepartmentByCityId(id));
modelAndView.setViewName("department");
return modelAndView;
}
@GetMapping(value = "/updateDepartment/{id}")
public ModelAndView updatePage(@PathVariable Integer id) {
ModelAndView modelAndView = new ModelAndView();
modelAndView.addObject("department", departmentService.getDepartmentById(id));
modelAndView.setViewName("updateDepartment");
return modelAndView;
}
@PostMapping(value = "/updateDepartment")
public ModelAndView updateDepartment(@ModelAttribute("department") Department department,
Integer cityId) {
ModelAndView modelAndView = new ModelAndView();
City city = cityService.getCityById(cityId);
if (city!=null) {
department.setCity(city);
departmentService.updateDepartment(department);
modelAndView.setViewName("redirect:/department/" + cityId);
} else {
modelAndView.setViewName("Error");
}
return modelAndView;
}
@GetMapping(value = "/addDepartment")
public ModelAndView addPage() {
ModelAndView modelAndView = new ModelAndView();
modelAndView.setViewName("addDepartment");
return modelAndView;
}
@PostMapping(value = "/addDepartment")
public ModelAndView addDepartment(@ModelAttribute("department") Department department,
Integer cityId) {
ModelAndView modelAndView = new ModelAndView();
City city = cityService.getCityById(cityId);
if (city!=null) {
department.setCity(city);
departmentService.addDepartment(department);
modelAndView.setViewName("redirect:/department/" + cityId);
} else {
modelAndView.setViewName("Error");
}
return modelAndView;
}
@GetMapping(value = "/deleteDepartment/{id}/{cityId}")
public ModelAndView deleteDepartment(@PathVariable("id") Integer id,
@PathVariable("cityId") Integer cityId) {
departmentService.deleteDepartment(id);
ModelAndView modelAndView = new ModelAndView();
modelAndView.setViewName("redirect:/department/" + cityId);
return modelAndView;
}
}
<file_sep>/src/main/java/ru/gruzdov/mvc/dao/EmployeeDAOImpl.java
package ru.gruzdov.mvc.dao;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
import ru.gruzdov.mvc.model.Employee;
import java.util.List;
@Repository
public class EmployeeDAOImpl implements EmployeeDAO {
@Autowired
SessionFactory sessionFactory;
@Override
public void addEmployee(Employee employee) {
sessionFactory.getCurrentSession().persist(employee);
}
@Override
public Employee getEmployeeById(Integer id) {
return sessionFactory.getCurrentSession().get(Employee.class, id);
}
@Override
public void updateEmployee(Employee employee) {
sessionFactory.getCurrentSession().update(employee);
}
@Override
public void deleteEmployee(Integer id) {
sessionFactory.getCurrentSession().delete(getEmployeeById(id));
}
@Override
@SuppressWarnings("unchecked")
public List<Employee> getAllEmployeeByDepartmentId(Integer departmentId) {
Session session = sessionFactory.getCurrentSession();
return session.createQuery("select e from Employee e join e.department d where d.id=:id").setParameter("id", departmentId).list();
}
}
<file_sep>/src/main/java/ru/gruzdov/mvc/dao/CityDAO.java
package ru.gruzdov.mvc.dao;
import ru.gruzdov.mvc.model.City;
import java.util.List;
public interface CityDAO {
void addCity(City city);
City getCityById(Integer id);
void updateCity(City city);
void deleteCity(Integer id);
List<City> getAllCity();
}
<file_sep>/src/main/java/ru/gruzdov/mvc/service/DepartmentService.java
package ru.gruzdov.mvc.service;
import ru.gruzdov.mvc.model.Department;
import java.util.List;
public interface DepartmentService {
void addDepartment(Department department);
Department getDepartmentById(Integer id);
void updateDepartment(Department department);
void deleteDepartment(Integer id);
List<Department> getAllDepartmentByCityId(Integer cityId);
}
|
9bdabce6d5ae17fad049568ab91f55fb9887b16e
|
[
"Java"
] | 11
|
Java
|
Evgeny985/crud
|
87bc5effdaca36c875b18616c0d55c48f0c01bd2
|
497827fe429bda1e02a87e6d12bd0911d91610ff
|
refs/heads/master
|
<file_sep>function addNumbers(num1, num2) {
return num1 + num2;
}
describe('addNumbers', () => {
it('add two numbers', () => {
expect(addNumbers(2, 3)).toEqual(5);
});
});
<file_sep>import { InternalServerErrorException, Logger } from '@nestjs/common';
import { User } from '../auth/user.entity';
import { EntityRepository, Repository } from 'typeorm';
import { GetTasksFilterDTO } from './dto/get-tasks-filter.dto';
import { Task } from './task.entity';
@EntityRepository(Task)
export class TaskRepository extends Repository<Task> {
private logger = new Logger('TaskRepository');
async getTasks(filterDTO: GetTasksFilterDTO, user: User): Promise<Task[]> {
const { status, search } = filterDTO;
const query = this.createQueryBuilder('task');
query.where({ user });
if (status) {
query.andWhere('task.status=:status', { status });
}
if (search) {
query.andWhere(
'(LOWER(task.title) LIKE LOWER(:search) OR LOWER(task.description) LIKE LOWER(:search))',
{ search: `%${search}%` },
);
}
try {
const tasks = await query.getMany();
return tasks;
} catch (error) {
this.logger.error(
`user "${user.username}" faild to get his tasks`,
error.stack,
);
throw new InternalServerErrorException();
}
}
}
<file_sep>import { NotFoundException } from '@nestjs/common';
import { Test } from '@nestjs/testing';
import { TaskStatus } from './task-status.enum';
import { TaskRepository } from './tasks.repositry';
import { TasksService } from './tasks.service';
const mockTasksRepository = () => ({
getTasks: jest.fn(),
findOne: jest.fn(),
});
const mockUser = {
username: 'mohamed',
password: '<PASSWORD>',
id: 'someid',
tasks: [],
};
const mockTaskId = 'someId';
const mockTask = {
title: 'someTitle',
description: 'someDescription',
id: 'someId',
status: TaskStatus.OPEN,
};
describe('TaskService', () => {
let tasksService: TasksService;
let tasksRepository;
beforeEach(async () => {
const module = await Test.createTestingModule({
providers: [
TasksService,
{ provide: TaskRepository, useFactory: mockTasksRepository },
],
}).compile();
tasksService = module.get(TasksService);
tasksRepository = module.get(TaskRepository);
});
describe('getTasks', () => {
it('call TaskRepository.getTasks and return result', async () => {
tasksRepository.getTasks.mockResolvedValue('someValue');
const result = await tasksService.getTasks(null, mockUser);
expect(result).toEqual('someValue');
});
});
describe('getTaskById', () => {
it('call TaskRepository.findOne and return result', async () => {
tasksRepository.findOne.mockResolvedValue(mockTask);
const res = await tasksService.getTaskById(mockTaskId, mockUser);
expect(res).toEqual(mockTask);
});
it('call TaskRepository.findOne and throw exception', async () => {
tasksRepository.findOne.mockResolvedValue(null);
const res = tasksService.getTaskById(mockTaskId, mockUser);
expect(res).rejects.toThrow(NotFoundException);
});
});
});
|
b40c4feda8195528fa5dc74844db3e7adc9e6141
|
[
"TypeScript"
] | 3
|
TypeScript
|
momorsy89/Task-Mangement
|
82bc78ab89b83801de5e676c7d1bb8a605867c3b
|
9952260c5e124c03fcdd8d2189a98fe4162ddf3a
|
refs/heads/master
|
<file_sep>#!/bin/bash
if [[ $EUID -ne 0 ]]; then
echo "This script must be run as root"
exit 1
fi
if [ ! -d "/root/portspoof" ]; then
# Control will enter here if portspoof doesn't exist.
git clone https://github.com/drk1wi/portspoof.git /root/portspoof
cd /root/portspoof
./configure
make
make install
fi
iptables --table nat -F
#change this line - sets the ranges in a variable
spoofed="1:19 23:138 140:442 444:65535"
for prange in ${spoofed}; do
iptables -t nat -A PREROUTING -i eth0 -p tcp -m tcp --dport ${prange} -j REDIRECT --to-ports 4444
done
/usr/local/bin/portspoof -c /root/portspoof/tools/portspoof.conf -s /root/portspoof/tools/portspoof_signatures -D
iptables --table nat --list
if grep -q /root/autospoof.sh /etc/rc.local; then
echo already in rc.local
else
echo "/bin/bash /root/autospoof.sh" >> /etc/rc.local
fi
echo "All DONE"
exit 1
|
bf85f5c9d1c5382b7b27936c5f1fd692ed4d55a2
|
[
"Shell"
] | 1
|
Shell
|
aaroncowley/autospoof
|
400e82289107e431cf2049e6a86e672d514bf422
|
edc2fb859c488a4f73db16436d204f9f4a34e481
|
refs/heads/main
|
<repo_name>Umar-M-Haroon/DynamicLinksDemo<file_sep>/FirebaseFrameworkWrapper/LinkController.swift
//
// LinkController.swift
// FirebaseFrameworkWrapper
//
// Created by <NAME> on 11/8/20.
//
import Combine
import Foundation
import FirebaseDynamicLinks
public class LinkController {
static var cancellables: Set<AnyCancellable> = []
// just a demo function to use some dynamic link classes
// if you're to comment out this function, then it builds successfully, otherwise itll throw errors
public static func createLink(link: URL) -> Future<URL, Never> {
return Future { promise in
guard let shareLink = DynamicLinkComponents.init(link: link, domainURIPrefix: "https://example.com/link") else {
print("Error creating dynamic link")
return
}
if let bundleID = Bundle.main.bundleIdentifier {
shareLink.iOSParameters = DynamicLinkIOSParameters(bundleID: bundleID)
}
let navigationParameters = DynamicLinkNavigationInfoParameters()
navigationParameters.isForcedRedirectEnabled = true
shareLink.navigationInfoParameters = navigationParameters
shareLink.shorten { (url, _, error) in
if let url = url{
promise(.success(url))
}
}
}
}
}
|
d3b9ad06082db56311705f2a1616d280f09bf329
|
[
"Swift"
] | 1
|
Swift
|
Umar-M-Haroon/DynamicLinksDemo
|
9da5fae6fe03e7f45cba247f39d6247669b10c9c
|
527f443d94e96f41f6b06c0f44de19d4df131cf1
|
refs/heads/master
|
<file_sep># CensusApp
US Census Visualization
<file_sep>shinyUI(fluidPage(
titlePanel("censusVis"),
sidebarLayout(
sidebarPanel(
helpText(p("Create Demo,"),
p("2010 Census")),
selectInput("var", label = h3("Choose a var to disp"),
choices = list("Percent White", "Percent Black",
"Percent Hispanic", "Percent Asian"),
selected = "Percent White"),
sliderInput("range", label = h3("Range of Interest"),
min = 0, max = 100, value = c(0, 100))
),
mainPanel(textOutput("text1"),
textOutput("text2"),
plotOutput("map"))
)
)
)
|
aa95670fe6a7ea058435e9b804e501fe0799d97a
|
[
"Markdown",
"R"
] | 2
|
Markdown
|
philYilun/CensusApp
|
61c85adc9f5cacd6d140cf11ddbd8175a84264f2
|
9510e881215fd55639f00b54d62b69abaeb7e6a4
|
refs/heads/master
|
<file_sep>package com.controllers;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.servlet.ModelAndView;
@Controller
public class FuncionarioController {
@RequestMapping(value="/")
public ModelAndView listarFuncionarios() {
ModelAndView view = new ModelAndView("listarfuncionarios");
System.out.println("Listar");
return view;
}
}
<file_sep># spring-mvc-xmlconfig
Projeto base com Spring Web MVC. Configurações via xml
|
b32330faf4db844c512480c90a132d2de3f939fa
|
[
"Markdown",
"Java"
] | 2
|
Java
|
marcylanger/spring-mvc-xmlconfig
|
130d16a475e633fb1df74384f581c9bf11babbd2
|
d998597d773142f9e17fe8660c15f91d29e6645c
|
refs/heads/main
|
<file_sep>import React from "react";
import "./Header.css";
import SearchIcon from "@material-ui/icons/Search";
import ShoppingCartIcon from "@material-ui/icons/ShoppingBasket";
import { Link } from "react-router-dom";
import { useStateValue } from "./StateProvider";
function Header() {
const [{ basket }, dispatch] = useStateValue();
return (
<div className="header">
<Link to="/">
<img className="header-logo" src="img/logo.png" alt="logo" />
</Link>
<div className="header-search">
<input className="header-searchInput" type="text" />
<SearchIcon className="header-searchIcon" />
</div>
<div className="header-nav">
<div className="header-option">
<span className="header-optionLineOne">Hello</span>
<span className="header-optionLineTwo">Sign In</span>
</div>
<div className="header-option">
<span className="header-optionLineOne">Returns</span>
<span className="header-optionLineTwo">& Orders</span>
</div>
<div className="header-option">
<span className="header-optionLineOne">Your</span>
<span className="header-optionLineTwo">Prime</span>
</div>
<Link to="/checkout">
<div className="header-optionCart">
<ShoppingCartIcon />
<span className="header-optionLineTwo header-cartCount">
{basket?.length}
</span>
</div>
</Link>
</div>
</div>
);
}
export default Header;
|
7047d63c6af95bae4afadcc0dd91f48352a139a0
|
[
"JavaScript"
] | 1
|
JavaScript
|
shashinduweerasekara/amazon-clone
|
2e10624d2304184d48639880bddce97f90fb6583
|
61acb46497122360bd123631a0f30b1bce09559a
|
refs/heads/main
|
<repo_name>danesparza/fxtrigger<file_sep>/cmd/defaults.go
package cmd
import (
"fmt"
"github.com/spf13/cobra"
)
var (
jsonConfig bool
yamlConfig bool
)
var yamlDefault = []byte(`
loglevel: INFO
`)
var jsonDefault = []byte(`{
"loglevel": "INFO"
}`)
// defaultsCmd represents the defaults command
var defaultsCmd = &cobra.Command{
Use: "defaults",
Short: "Prints default fxtrigger configuration file",
Long: `Use this to create a default configuration file for fxtrigger.
Example:
fxtrigger defaults > fxtrigger.yaml`,
Run: func(cmd *cobra.Command, args []string) {
if jsonConfig {
fmt.Printf("%s", jsonDefault)
} else if yamlConfig {
fmt.Printf("%s", yamlDefault)
}
},
}
func init() {
rootCmd.AddCommand(defaultsCmd)
defaultsCmd.Flags().BoolVarP(&jsonConfig, "json", "j", false, "Create a JSON configuration file")
defaultsCmd.Flags().BoolVarP(&yamlConfig, "yaml", "y", true, "Create a YAML configuration file")
}
<file_sep>/internal/data/trigger.go
package data
import (
"encoding/json"
"fmt"
"time"
"github.com/rs/xid"
"github.com/tidwall/buntdb"
)
// Trigger represents sensor/button trigger information.
type Trigger struct {
ID string `json:"id"` // Unique Trigger ID
Enabled bool `json:"enabled"` // Trigger enabled or not
Created time.Time `json:"created"` // Trigger create time
Name string `json:"name"` // The trigger name
Description string `json:"description"` // Additional information about the trigger
GPIOPin int `json:"gpiopin"` // The GPIO pin the sensor or button is on
WebHooks []WebHook `json:"webhooks"` // The webhooks to send when triggered
MinimumSecondsBeforeRetrigger int `json:"minimumsecondsbeforeretrigger"` // Minimum time (in seconds) before a retrigger
}
// WebHook represents a notification message sent to an endpoint
// It's always content type: application/json
// It's always HTTP verb POST
type WebHook struct {
URL string `json:"url"` // The URL to connect to
Headers map[string]string `json:"headers,omitempty"` // The HTTP headers to send
Body []byte `json:"body,omitempty"` // The HTTP body to send. This can be empty
}
// AddTrigger adds a trigger to the system
func (store Manager) AddTrigger(name, description string, gpiopin int, webhooks []WebHook, minimumsleep int) (Trigger, error) {
// Our return item
retval := Trigger{}
newTrigger := Trigger{
ID: xid.New().String(), // Generate a new id
Created: time.Now(),
Enabled: true,
Name: name,
Description: description,
GPIOPin: gpiopin,
WebHooks: webhooks,
MinimumSecondsBeforeRetrigger: minimumsleep,
}
// Serialize to JSON format
encoded, err := json.Marshal(newTrigger)
if err != nil {
return retval, fmt.Errorf("problem serializing the data: %s", err)
}
// Save it to the database:
err = store.systemdb.Update(func(tx *buntdb.Tx) error {
_, _, err := tx.Set(GetKey("Trigger", newTrigger.ID), string(encoded), &buntdb.SetOptions{})
return err
})
// If there was an error saving the data, report it:
if err != nil {
return retval, fmt.Errorf("problem saving the trigger: %s", err)
}
// Set our retval:
retval = newTrigger
// Return our data:
return retval, nil
}
// AddTrigger adds a trigger to the system
func (store Manager) UpdateTrigger(updatedTrigger Trigger) (Trigger, error) {
// Our return item
retval := Trigger{}
// Serialize to JSON format
encoded, err := json.Marshal(updatedTrigger)
if err != nil {
return retval, fmt.Errorf("problem serializing the data: %s", err)
}
// Save it to the database:
err = store.systemdb.Update(func(tx *buntdb.Tx) error {
_, _, err := tx.Set(GetKey("Trigger", updatedTrigger.ID), string(encoded), &buntdb.SetOptions{})
return err
})
// If there was an error saving the data, report it:
if err != nil {
return retval, fmt.Errorf("problem saving the trigger: %s", err)
}
// Set our retval:
retval = updatedTrigger
// Return our data:
return retval, nil
}
// GetTrigger gets information about a single trigger in the system based on its id
func (store Manager) GetTrigger(id string) (Trigger, error) {
// Our return item
retval := Trigger{}
// Find the item:
err := store.systemdb.View(func(tx *buntdb.Tx) error {
val, err := tx.Get(GetKey("Trigger", id))
if err != nil {
return err
}
if len(val) > 0 {
// Unmarshal data into our item
if err := json.Unmarshal([]byte(val), &retval); err != nil {
return err
}
}
// If we get to this point and there is no error...
return nil
})
// If there was an error, report it:
if err != nil {
return retval, fmt.Errorf("problem getting the trigger: %s", err)
}
// Return our data:
return retval, nil
}
// GetAllTriggers gets all triggers in the system
func (store Manager) GetAllTriggers() ([]Trigger, error) {
// Our return item
retval := []Trigger{}
// Set our prefix
prefix := GetKey("Trigger")
// Iterate over our values:
err := store.systemdb.View(func(tx *buntdb.Tx) error {
tx.Descend(prefix, func(key, val string) bool {
if len(val) > 0 {
// Create our item:
item := Trigger{}
// Unmarshal data into our item
bval := []byte(val)
if err := json.Unmarshal(bval, &item); err != nil {
return false
}
// Add to the array of returned users:
retval = append(retval, item)
}
return true
})
return nil
})
// If there was an error, report it:
if err != nil {
return retval, fmt.Errorf("problem getting the list of triggers: %s", err)
}
// Return our data:
return retval, nil
}
// DeleteTrigger deletes a trigger from the system
func (store Manager) DeleteTrigger(id string) error {
// Remove it from the database:
err := store.systemdb.Update(func(tx *buntdb.Tx) error {
_, err := tx.Delete(GetKey("Trigger", id))
return err
})
// If there was an error removing the data, report it:
if err != nil {
return fmt.Errorf("problem removing the trigger: %s", err)
}
// Return our data:
return nil
}
<file_sep>/cmd/start.go
package cmd
import (
"context"
"fmt"
"github.com/danesparza/fxtrigger/internal/data"
"github.com/danesparza/fxtrigger/internal/trigger"
"github.com/rs/zerolog/log"
"net/http"
"os"
"os/signal"
"strings"
"syscall"
"time"
"github.com/danesparza/fxtrigger/api"
_ "github.com/danesparza/fxtrigger/docs" // swagger docs location
"github.com/gorilla/mux"
"github.com/rs/cors"
"github.com/spf13/cobra"
"github.com/spf13/viper"
httpSwagger "github.com/swaggo/http-swagger" // http-swagger middleware
)
// startCmd represents the start command
var startCmd = &cobra.Command{
Use: "start",
Short: "Start the API and UI services",
Long: `Start the API and UI services`,
Run: start,
}
func start(cmd *cobra.Command, args []string) {
// If we have a config file, report it:
if viper.ConfigFileUsed() != "" {
log.Debug().Str("configFile", viper.ConfigFileUsed()).Msg("Using config file")
} else {
log.Debug().Msg("No config file found")
}
systemdb := viper.GetString("datastore.system")
dndschedule := viper.GetString("trigger.dndschedule")
dndstarttime := viper.GetString("trigger.dndstart")
dndendtime := viper.GetString("trigger.dndend")
// Emit what we know:
log.Info().
Str("systemdb", systemdb).
Str("dndschedule", dndschedule).
Str("dndstarttime", dndstarttime).
Str("dndendtime", dndendtime).
Msg("Config")
// Create a DBManager object and associate with the api.Service
db, err := data.NewManager(systemdb)
if err != nil {
log.Err(err).Msg("Problem trying to open the system database")
return
}
defer db.Close()
// Create a background service object
backgroundService := trigger.BackgroundProcess{
FireTrigger: make(chan data.Trigger),
AddMonitor: make(chan data.Trigger),
RemoveMonitor: make(chan string),
DB: db,
}
// Create an api service object
apiService := api.Service{
FireTrigger: backgroundService.FireTrigger,
AddMonitor: backgroundService.AddMonitor,
RemoveMonitor: backgroundService.RemoveMonitor,
DB: db,
StartTime: time.Now(),
}
// Trap program exit appropriately
ctx, cancel := context.WithCancel(context.Background())
sigs := make(chan os.Signal, 2)
signal.Notify(sigs, os.Interrupt, syscall.SIGTERM)
go handleSignals(ctx, sigs, cancel)
// Log that the system has started:
log.Info().Msg("System started")
// Create a router and setup our REST endpoints...
restRouter := mux.NewRouter()
// TRIGGER ROUTES
restRouter.HandleFunc("/v1/triggers", apiService.CreateTrigger).Methods("POST") // Create a trigger
restRouter.HandleFunc("/v1/triggers", apiService.UpdateTrigger).Methods("PUT") // Update a trigger
restRouter.HandleFunc("/v1/triggers", apiService.ListAllTriggers).Methods("GET") // List all triggers
restRouter.HandleFunc("/v1/triggers/{id}", apiService.DeleteTrigger).Methods("DELETE") // Delete a trigger
restRouter.HandleFunc("/v1/trigger/fire/{id}", apiService.FireSingleTrigger).Methods("POST") // Fire a trigger
// SWAGGER ROUTES
restRouter.PathPrefix("/v1/swagger").Handler(httpSwagger.WrapHandler)
// Create background processes to
// - listen for triggers events
// - handle requests to fire a trigger:
go backgroundService.ListenForEvents(ctx)
go backgroundService.HandleAndProcess(ctx)
// Initialize monitoring
backgroundService.InitializeMonitors()
// Setup the CORS options:
log.Info().Str("CORS origins", viper.GetString("server.allowed-origins")).Msg("CORS config")
uiCorsRouter := cors.New(cors.Options{
AllowedOrigins: strings.Split(viper.GetString("server.allowed-origins"), ","),
AllowCredentials: true,
}).Handler(restRouter)
// Format the bound interface:
formattedServerPort := fmt.Sprintf(":%v", viper.GetString("server.port"))
// Start the service and display how to access it
log.Info().Str("server", formattedServerPort).Msg("Started REST service")
log.Err(http.ListenAndServe(formattedServerPort, uiCorsRouter)).Msg("HTTP API service error")
}
func handleSignals(ctx context.Context, sigs <-chan os.Signal, cancel context.CancelFunc) {
select {
case <-ctx.Done():
case sig := <-sigs:
switch sig {
case os.Interrupt:
log.Info().Msg("SIGINT")
case syscall.SIGTERM:
log.Info().Msg("SIGTERM")
}
log.Info().Msg("Shutting down ...")
cancel()
os.Exit(0)
}
}
func init() {
rootCmd.AddCommand(startCmd)
}
<file_sep>/internal/trigger/process.go
package trigger
import (
"bytes"
"context"
"fmt"
"github.com/danesparza/fxtrigger/internal/data"
"github.com/rs/zerolog/log"
"net/http"
"os"
"sync"
"time"
"github.com/danesparza/go-rpio"
)
// BackgroundProcess encapsulates background processing operations
type BackgroundProcess struct {
DB *data.Manager
HistoryTTL time.Duration
// FireTrigger signals a trigger should be fired
FireTrigger chan data.Trigger
// AddMonitor signals a trigger should be added to the list of monitored triggers
AddMonitor chan data.Trigger
// RemoveMonitor signals a trigger id should not be monitored anymore
RemoveMonitor chan string
}
type monitoredTriggersMap struct {
m map[string]func()
rwMutex sync.RWMutex
}
// HandleAndProcess handles system context calls and channel events to fire triggers
func (bp BackgroundProcess) HandleAndProcess(systemctx context.Context) {
// Loop and respond to channels:
for {
select {
case trigReq := <-bp.FireTrigger:
// As we get a request on a channel to fire a trigger...
// Create a goroutine
go func(cx context.Context, trigger data.Trigger) {
// Loop through the associated webhooks
for _, hook := range trigger.WebHooks {
// Fire each of them...
// First, build the initial request with the verb, url and body (if the body exists)
req, err := http.NewRequestWithContext(systemctx, http.MethodPost, hook.URL, bytes.NewBuffer(hook.Body))
if err != nil {
log.Err(err).Str("TriggerID", trigger.ID).Str("HookUrl", hook.URL).Msg("Error creating request for trigger/hook")
continue // Go to the next hook
}
// Then, set our initial content-type header
req.Header.Set("Content-Type", "application/json")
// Next, set any custom headers
for k, v := range hook.Headers {
req.Header.Set(k, v)
}
// Finally, send the request
client := &http.Client{Timeout: time.Second * 10}
resp, err := client.Do(req)
if err != nil {
log.Err(err).Str("TriggerID", trigger.ID).Str("HookUrl", hook.URL).Msg("Error with response for trigger/hook")
// 'continue' doesn't really matter here -- we're already at the end of this loop
}
defer resp.Body.Close()
}
}(systemctx, trigReq) // Launch the goroutine
case <-systemctx.Done():
fmt.Println("Stopping trigger processor")
return
}
}
}
// ListenForEvents listens to channel events to add / remove monitors
//
// and 'fires' triggers when an event (motion / button press / time) occurs from a monitor
func (bp BackgroundProcess) ListenForEvents(systemctx context.Context) {
// Track our list of active event monitors. These could be buttons or sensors
monitoredTriggers := monitoredTriggersMap{m: make(map[string]func())}
// Loop and respond to channels:
for {
select {
case monitorReq := <-bp.AddMonitor:
// This should be called when creating a trigger,
// when initializing the service,
// or when enabling a trigger (that was previously disabled)
// If you need to add a monitor, spin up a background goroutine to monitor that pin
go func(cx context.Context, req data.Trigger) {
// Create a cancelable context from the passed (system) context
ctx, cancel := context.WithCancel(cx)
defer cancel()
// Add an entry to the map with
// - key: triggerid
// - value: the cancel function (pointer)
// (critical section)
monitoredTriggers.rwMutex.Lock()
monitoredTriggers.m[req.ID] = cancel
monitoredTriggers.rwMutex.Unlock()
if err := rpio.Open(); err != nil {
fmt.Println(err)
os.Exit(1)
}
defer rpio.Close()
pin := rpio.Pin(req.GPIOPin)
pin.Mode(rpio.Input)
// Store the 'last reading'
// Initially, set it to the 'low' (no motion) state
lr := rpio.Low
lastTrigger := time.Unix(0, 0) // Initialize with 1/1/1970
log.Debug().Int("GPIOPin", req.GPIOPin).Str("TriggerID", req.ID).Msg("Monitoring started")
// Our channel checker and sensor reader
for {
select {
case <-ctx.Done():
// Remove ourselves from the map and exit (critical section)
monitoredTriggers.rwMutex.Lock()
delete(monitoredTriggers.m, req.ID)
monitoredTriggers.rwMutex.Unlock()
return
case <-time.After(500 * time.Millisecond):
// Read from the sensor
v := pin.Read()
// Latch / unlatch check
if lr != v {
lr = v
currentTime := time.Now()
diff := currentTime.Sub(lastTrigger)
if lr == rpio.High {
if diff.Seconds() > float64(req.MinimumSecondsBeforeRetrigger) {
// If it's been long enough -- reset the lrTime to now
// and actually trigger the item
lastTrigger = currentTime
log.Debug().Int("GPIOPin", req.GPIOPin).Str("TriggerID", req.ID).Msg("Motion detected. Firing event")
bp.FireTrigger <- req
} else {
log.Debug().
Int("GPIOPin", req.GPIOPin).
Str("TriggerID", req.ID).
Int("MinimumSecondsBeforeRetrigger", req.MinimumSecondsBeforeRetrigger).
Msg("Motion detected, but minimum seconds threshold not met. Not triggering.")
}
}
if lr == rpio.Low {
log.Debug().Int("GPIOPin", req.GPIOPin).Str("TriggerID", req.ID).Msg("Motion reset")
}
}
}
}
}(systemctx, monitorReq) // Launch the goroutine
case removeReq := <-bp.RemoveMonitor:
// This should be called when removing a trigger (permanently)
// or when disabling a trigger
// Look up the item in the map and call cancel if the item exists (critical section):
monitoredTriggers.rwMutex.Lock()
monitorCancel, exists := monitoredTriggers.m[removeReq]
if exists {
log.Debug().Str("TriggerID", removeReq).Msg("Monitoring stopped")
// Call the context cancellation function
monitorCancel()
// Remove ourselves from the map and exit
delete(monitoredTriggers.m, removeReq)
}
monitoredTriggers.rwMutex.Unlock()
case <-systemctx.Done():
fmt.Println("Stopping trigger processor")
return
}
}
}
// InitializeMonitors starts all monitoring processes
func (bp BackgroundProcess) InitializeMonitors() {
// Get all triggers:
allTriggers, err := bp.DB.GetAllTriggers()
if err != nil {
log.Err(err).Msg("Problem getting all triggers to initialze monitors")
}
log.Debug().Int("TriggerCount", len(allTriggers)).Msg("Initializing monitoring")
// Start monitoring all enabled triggers:
for _, trigger := range allTriggers {
if trigger.Enabled {
bp.AddMonitor <- trigger
}
}
}
<file_sep>/internal/triggertype/const.go
package triggertype
const (
// Motion is for motion sensors
Motion = "Motion"
// Button is for button triggers
Button = "Button"
// Time is for time based triggers
Time = "Time"
// System is for system event types
System = "System"
// Unknown trigger type
Unknown = "Unknown"
)
<file_sep>/api/trigger.go
package api
import (
"encoding/json"
"fmt"
"github.com/rs/zerolog/log"
"net/http"
"strings"
"github.com/gorilla/mux"
)
// ListAllTriggers godoc
// @Summary List all triggers in the system
// @Description List all triggers in the system
// @Tags triggers
// @Accept json
// @Produce json
// @Success 200 {object} api.SystemResponse
// @Failure 500 {object} api.ErrorResponse
// @Router /triggers [get]
func (service Service) ListAllTriggers(rw http.ResponseWriter, req *http.Request) {
// Get a list of files
retval, err := service.DB.GetAllTriggers()
if err != nil {
err = fmt.Errorf("error getting a list of triggers: %v", err)
sendErrorResponse(rw, err, http.StatusInternalServerError)
return
}
// Construct our response
response := SystemResponse{
Message: fmt.Sprintf("%v triggers(s)", len(retval)),
Data: retval,
}
// Serialize to JSON & return the response:
rw.Header().Set("Content-Type", "application/json; charset=utf-8")
json.NewEncoder(rw).Encode(response)
}
// CreateTrigger godoc
// @Summary Create a new trigger
// @Description Create a new trigger
// @Tags triggers
// @Accept json
// @Produce json
// @Param trigger body api.CreateTriggerRequest true "The trigger to create"
// @Success 200 {object} api.SystemResponse
// @Failure 400 {object} api.ErrorResponse
// @Failure 500 {object} api.ErrorResponse
// @Router /triggers [post]
func (service Service) CreateTrigger(rw http.ResponseWriter, req *http.Request) {
// req.Body is a ReadCloser -- we need to remember to close it:
defer req.Body.Close()
// Decode the request
request := CreateTriggerRequest{}
err := json.NewDecoder(req.Body).Decode(&request)
if err != nil {
sendErrorResponse(rw, err, http.StatusBadRequest)
return
}
// If we don't have any webhooks associated, make sure we indicate that's not valid
if len(request.WebHooks) < 1 {
sendErrorResponse(rw, fmt.Errorf("at least one webhook must be included"), http.StatusBadRequest)
return
}
// Create the new trigger:
newTrigger, err := service.DB.AddTrigger(request.Name, request.Description, request.GPIOPin, request.WebHooks, request.MinimumSecondsBeforeRetrigger)
if err != nil {
sendErrorResponse(rw, err, http.StatusInternalServerError)
return
}
// Record the event:
log.Debug().Any("request", request).Msg("Trigger created")
// Add the new trigger to monitoring:
service.AddMonitor <- newTrigger
// Create our response and send information back:
response := SystemResponse{
Message: "Trigger created",
Data: newTrigger,
}
// Serialize to JSON & return the response:
rw.Header().Set("Content-Type", "application/json; charset=utf-8")
json.NewEncoder(rw).Encode(response)
}
// UpdateTrigger godoc
// @Summary Update a trigger
// @Description Update a trigger
// @Tags triggers
// @Accept json
// @Produce json
// @Param trigger body api.UpdateTriggerRequest true "The trigger to update. Must include trigger.id"
// @Success 200 {object} api.SystemResponse
// @Failure 400 {object} api.ErrorResponse
// @Failure 500 {object} api.ErrorResponse
// @Router /triggers [put]
func (service Service) UpdateTrigger(rw http.ResponseWriter, req *http.Request) {
// Some state change instructions
shouldAddMonitoring := false
shouldRemoveMonitoring := false
// req.Body is a ReadCloser -- we need to remember to close it:
defer req.Body.Close()
// Decode the request
request := UpdateTriggerRequest{}
err := json.NewDecoder(req.Body).Decode(&request)
if err != nil {
sendErrorResponse(rw, err, http.StatusBadRequest)
return
}
// If we don't have the trigger.id, make sure we indicate that's not valid
if strings.TrimSpace(request.ID) == "" {
sendErrorResponse(rw, fmt.Errorf("the trigger.id is required"), http.StatusBadRequest)
return
}
// Make sure the id exists
trigUpdate, _ := service.DB.GetTrigger(request.ID)
if trigUpdate.ID != request.ID {
sendErrorResponse(rw, fmt.Errorf("trigger must already exist"), http.StatusBadRequest)
return
}
// See if 'enabled' has changed
if trigUpdate.Enabled != request.Enabled {
if request.Enabled {
// If it has, and it's now 'enabled', add the trigger to monitoring
shouldAddMonitoring = true
} else {
// If it has, and it's now 'disabled', remove the trigger from monitoring
shouldRemoveMonitoring = true
}
}
// Only update the name if it's been passed
if strings.TrimSpace(request.Name) != "" {
trigUpdate.Name = request.Name
}
// Only update the description if it's been passed
if strings.TrimSpace(request.Description) != "" {
trigUpdate.Description = request.Description
}
// Enabled / disabled is always set
trigUpdate.Enabled = request.Enabled
// If the GPIO pin is not zero (the default value of an int) pass it in. Yes -- GPIO 0 is valid,
// but is generally reserved for special uses. See https://pinout.xyz/pinout/pin27_gpio0#
if request.GPIOPin != 0 {
trigUpdate.GPIOPin = request.GPIOPin
}
// This is an int. It's always going to get updated
trigUpdate.MinimumSecondsBeforeRetrigger = request.MinimumSecondsBeforeRetrigger
// Only update webhooks if we've passed some in
if len(request.WebHooks) > 0 {
trigUpdate.WebHooks = request.WebHooks
service.RemoveMonitor <- trigUpdate.ID
shouldAddMonitoring = true
}
// Create the new trigger:
updatedTrigger, err := service.DB.UpdateTrigger(trigUpdate)
if err != nil {
sendErrorResponse(rw, err, http.StatusInternalServerError)
return
}
// Record the event:
log.Debug().Any("request", request).Msg("Trigger updated")
// If we have a state change, make sure to add/remove monitoring and record that event as well
if shouldAddMonitoring {
service.AddMonitor <- trigUpdate
log.Debug().Str("id", trigUpdate.ID).Msg("Trigger monitoring enabled")
}
if shouldRemoveMonitoring {
service.RemoveMonitor <- trigUpdate.ID
log.Debug().Str("id", trigUpdate.ID).Msg("Trigger monitoring disabled")
}
// Create our response and send information back:
response := SystemResponse{
Message: "Trigger updated",
Data: updatedTrigger,
}
// Serialize to JSON & return the response:
rw.Header().Set("Content-Type", "application/json; charset=utf-8")
json.NewEncoder(rw).Encode(response)
}
// DeleteTrigger godoc
// @Summary Deletes a trigger in the system
// @Description Deletes a trigger in the system
// @Tags triggers
// @Accept json
// @Produce json
// @Param id path string true "The trigger id to delete"
// @Success 200 {object} api.SystemResponse
// @Failure 400 {object} api.ErrorResponse
// @Failure 500 {object} api.ErrorResponse
// @Failure 503 {object} api.ErrorResponse
// @Router /triggers/{id} [delete]
func (service Service) DeleteTrigger(rw http.ResponseWriter, req *http.Request) {
// Get the id from the url (if it's blank, return an error)
vars := mux.Vars(req)
if vars["id"] == "" {
err := fmt.Errorf("requires an id of a trigger to delete")
sendErrorResponse(rw, err, http.StatusBadRequest)
return
}
// Delete the trigger
err := service.DB.DeleteTrigger(vars["id"])
if err != nil {
err = fmt.Errorf("error deleting file: %v", err)
sendErrorResponse(rw, err, http.StatusInternalServerError)
return
}
// Record the event:
log.Debug().Str("id", vars["id"]).Msg("Trigger deleted")
// Remove the trigger from monitoring:
service.RemoveMonitor <- vars["id"]
// Construct our response
response := SystemResponse{
Message: "Trigger deleted",
Data: vars["id"],
}
// Serialize to JSON & return the response:
rw.Header().Set("Content-Type", "application/json; charset=utf-8")
json.NewEncoder(rw).Encode(response)
}
// FireSingleTrigger godoc
// @Summary Fires a trigger in the system
// @Description Fires a trigger in the system
// @Tags triggers
// @Accept json
// @Produce json
// @Param id path string true "The trigger id to fire"
// @Success 200 {object} api.SystemResponse
// @Failure 400 {object} api.ErrorResponse
// @Failure 500 {object} api.ErrorResponse
// @Router /trigger/fire/{id} [post]
func (service Service) FireSingleTrigger(rw http.ResponseWriter, req *http.Request) {
// Get the id from the url (if it's blank, return an error)
vars := mux.Vars(req)
if vars["id"] == "" {
err := fmt.Errorf("requires an id of a trigger to fire")
sendErrorResponse(rw, err, http.StatusBadRequest)
return
}
// Get the trigger
trigger, err := service.DB.GetTrigger(vars["id"])
if err != nil {
err = fmt.Errorf("error getting trigger: %v", err)
sendErrorResponse(rw, err, http.StatusInternalServerError)
return
}
// Call the channel to fire the event:
service.FireTrigger <- trigger
// Record the event:
log.Debug().Str("id", trigger.ID).Str("name", trigger.Name).Msg("Trigger fired")
// Construct our response
response := SystemResponse{
Message: "Trigger fired",
Data: trigger,
}
// Serialize to JSON & return the response:
rw.Header().Set("Content-Type", "application/json; charset=utf-8")
json.NewEncoder(rw).Encode(response)
}
<file_sep>/internal/data/trigger_test.go
package data_test
import (
data2 "github.com/danesparza/fxtrigger/internal/data"
"os"
"testing"
)
func TestTrigger_AddTrigger_ValidTrigger_Successful(t *testing.T) {
// Arrange
systemdb := getTestFiles()
db, err := data2.NewManager(systemdb)
if err != nil {
t.Fatalf("NewManager failed: %s", err)
}
defer func() {
db.Close()
os.RemoveAll(systemdb)
}()
testTrigger := data2.Trigger{Name: "Unit test trigger", Description: "Unit test trigger desc", GPIOPin: 23, WebHooks: []data2.WebHook{}}
// Act
newTrigger, err := db.AddTrigger(testTrigger.Name, testTrigger.Description, testTrigger.GPIOPin, testTrigger.WebHooks, testTrigger.MinimumSecondsBeforeRetrigger)
// Assert
if err != nil {
t.Errorf("AddTrigger - Should add trigger without error, but got: %s", err)
}
if newTrigger.Created.IsZero() {
t.Errorf("AddTrigger failed: Should have set an item with the correct datetime: %+v", newTrigger)
}
if newTrigger.Enabled != true {
t.Errorf("AddTrigger failed: Should have enabled the trigger by default: %+v", newTrigger)
}
}
func TestTrigger_GetTrigger_ValidTrigger_Successful(t *testing.T) {
// Arrange
systemdb := getTestFiles()
db, err := data2.NewManager(systemdb)
if err != nil {
t.Fatalf("NewManager failed: %s", err)
}
defer func() {
db.Close()
os.RemoveAll(systemdb)
}()
testTrigger1 := data2.Trigger{Name: "Trigger 1", Description: "Unit test 1", GPIOPin: 11}
testTrigger2 := data2.Trigger{Name: "Trigger 2", Description: "Unit test 2", GPIOPin: 12}
testTrigger3 := data2.Trigger{Name: "Trigger 3", Description: "Unit test 3", GPIOPin: 13}
// Act
db.AddTrigger(testTrigger1.Name, testTrigger1.Description, testTrigger1.GPIOPin, testTrigger1.WebHooks, testTrigger1.MinimumSecondsBeforeRetrigger)
newTrigger2, _ := db.AddTrigger(testTrigger2.Name, testTrigger2.Description, testTrigger2.GPIOPin, testTrigger2.WebHooks, testTrigger2.MinimumSecondsBeforeRetrigger)
db.AddTrigger(testTrigger3.Name, testTrigger3.Description, testTrigger3.GPIOPin, testTrigger3.WebHooks, testTrigger3.MinimumSecondsBeforeRetrigger)
gotTrigger, err := db.GetTrigger(newTrigger2.ID)
// Log the file details:
t.Logf("Trigger: %+v", gotTrigger)
// Assert
if err != nil {
t.Errorf("GetTrigger - Should get trigger without error, but got: %s", err)
}
if len(gotTrigger.ID) < 2 {
t.Errorf("GetTrigger failed: Should get valid id but got: %v", gotTrigger.ID)
}
}
func TestTrigger_GetTrigger_ValidTriggerWithWebhooks_Successful(t *testing.T) {
// Arrange
systemdb := getTestFiles()
db, err := data2.NewManager(systemdb)
if err != nil {
t.Fatalf("NewManager failed: %s", err)
}
defer func() {
db.Close()
os.RemoveAll(systemdb)
}()
testTrigger1 := data2.Trigger{Name: "Trigger 1", Description: "Unit test 1", GPIOPin: 11}
testTrigger2 := data2.Trigger{
Name: "Trigger 2",
Description: "Unit test 2",
GPIOPin: 12,
WebHooks: []data2.WebHook{
{URL: "http://www.github.com/webhook1",
Headers: map[string]string{"header1": "value1", "header2": "value2"}},
{URL: "http://www.microsoft.com/webhook2"}}}
testTrigger3 := data2.Trigger{Name: "Trigger 3", Description: "Unit test 3", GPIOPin: 13}
// Act
db.AddTrigger(testTrigger1.Name, testTrigger1.Description, testTrigger1.GPIOPin, testTrigger1.WebHooks, testTrigger1.MinimumSecondsBeforeRetrigger)
newTrigger2, _ := db.AddTrigger(testTrigger2.Name, testTrigger2.Description, testTrigger2.GPIOPin, testTrigger2.WebHooks, testTrigger2.MinimumSecondsBeforeRetrigger)
db.AddTrigger(testTrigger3.Name, testTrigger3.Description, testTrigger3.GPIOPin, testTrigger3.WebHooks, testTrigger3.MinimumSecondsBeforeRetrigger)
gotTrigger, err := db.GetTrigger(newTrigger2.ID)
// Log the file details:
t.Logf("Trigger: %+v", gotTrigger)
// Assert
if err != nil {
t.Errorf("GetTrigger - Should get trigger without error, but got: %s", err)
}
if len(gotTrigger.ID) < 2 {
t.Errorf("GetTrigger failed: Should get valid id but got: %v", gotTrigger.ID)
}
}
func TestTrigger_GetAllTriggers_ValidTriggers_Successful(t *testing.T) {
// Arrange
systemdb := getTestFiles()
db, err := data2.NewManager(systemdb)
if err != nil {
t.Fatalf("NewManager failed: %s", err)
}
defer func() {
db.Close()
os.RemoveAll(systemdb)
}()
testTrigger1 := data2.Trigger{Name: "Trigger 1", Description: "Unit test 1", GPIOPin: 11}
testTrigger2 := data2.Trigger{Name: "Trigger 2", Description: "Unit test 2", GPIOPin: 12}
testTrigger3 := data2.Trigger{Name: "Trigger 3", Description: "Unit test 3", GPIOPin: 13}
// Act
db.AddTrigger(testTrigger1.Name, testTrigger1.Description, testTrigger1.GPIOPin, testTrigger1.WebHooks, testTrigger1.MinimumSecondsBeforeRetrigger)
newTrigger2, _ := db.AddTrigger(testTrigger2.Name, testTrigger2.Description, testTrigger2.GPIOPin, testTrigger2.WebHooks, testTrigger2.MinimumSecondsBeforeRetrigger)
db.AddTrigger(testTrigger3.Name, testTrigger3.Description, testTrigger3.GPIOPin, testTrigger3.WebHooks, testTrigger3.MinimumSecondsBeforeRetrigger)
gotTriggers, err := db.GetAllTriggers()
// Assert
if err != nil {
t.Errorf("GetAllTriggers - Should get all triggers without error, but got: %s", err)
}
if len(gotTriggers) < 2 {
t.Errorf("GetAllTriggers failed: Should get all items but got: %v", len(gotTriggers))
}
if gotTriggers[1].Description != newTrigger2.Description {
t.Errorf("GetAllTriggers failed: Should get an item with the correct details: %+v", gotTriggers[1])
}
}
func TestTrigger_UpdateTrigger_ValidTriggers_Successful(t *testing.T) {
// Arrange
systemdb := getTestFiles()
db, err := data2.NewManager(systemdb)
if err != nil {
t.Fatalf("NewManager failed: %s", err)
}
defer func() {
db.Close()
os.RemoveAll(systemdb)
}()
testTrigger1 := data2.Trigger{Name: "Trigger 1", Description: "Unit test 1", GPIOPin: 11}
testTrigger2 := data2.Trigger{Name: "Trigger 2", Description: "Unit test 2", GPIOPin: 12}
testTrigger3 := data2.Trigger{Name: "Trigger 3", Description: "Unit test 3", GPIOPin: 13}
// Act
db.AddTrigger(testTrigger1.Name, testTrigger1.Description, testTrigger1.GPIOPin, testTrigger1.WebHooks, testTrigger1.MinimumSecondsBeforeRetrigger)
newTrigger2, _ := db.AddTrigger(testTrigger2.Name, testTrigger2.Description, testTrigger2.GPIOPin, testTrigger2.WebHooks, testTrigger2.MinimumSecondsBeforeRetrigger)
db.AddTrigger(testTrigger3.Name, testTrigger3.Description, testTrigger3.GPIOPin, testTrigger3.WebHooks, testTrigger3.MinimumSecondsBeforeRetrigger)
// Update the 2nd trigger:
newTrigger2.Enabled = false
_, err = db.UpdateTrigger(newTrigger2) // Update the 2nd trigger
gotTrigger, _ := db.GetTrigger(newTrigger2.ID) // Refetch to verify
// Assert
if err != nil {
t.Errorf("UpdateTrigger - Should update trigger without error, but got: %s", err)
}
if gotTrigger.Enabled != false {
t.Errorf("UpdateTrigger failed: Should get an item that has been disabled but got: %+v", gotTrigger)
}
}
func TestTrigger_DeleteTrigger_ValidTriggers_Successful(t *testing.T) {
// Arrange
systemdb := getTestFiles()
db, err := data2.NewManager(systemdb)
if err != nil {
t.Fatalf("NewManager failed: %s", err)
}
defer func() {
db.Close()
os.RemoveAll(systemdb)
}()
testTrigger1 := data2.Trigger{Name: "Trigger 1", Description: "Unit test 1", GPIOPin: 11}
testTrigger2 := data2.Trigger{Name: "Trigger 2", Description: "Unit test 2", GPIOPin: 12}
testTrigger3 := data2.Trigger{Name: "Trigger 3", Description: "Unit test 3", GPIOPin: 13}
// Act
db.AddTrigger(testTrigger1.Name, testTrigger1.Description, testTrigger1.GPIOPin, testTrigger1.WebHooks, testTrigger1.MinimumSecondsBeforeRetrigger)
newTrigger2, _ := db.AddTrigger(testTrigger2.Name, testTrigger2.Description, testTrigger2.GPIOPin, testTrigger2.WebHooks, testTrigger2.MinimumSecondsBeforeRetrigger)
db.AddTrigger(testTrigger3.Name, testTrigger3.Description, testTrigger3.GPIOPin, testTrigger3.WebHooks, testTrigger3.MinimumSecondsBeforeRetrigger)
err = db.DeleteTrigger(newTrigger2.ID) // Delete the 2nd trigger
gotTriggers, _ := db.GetAllTriggers()
// Assert
if err != nil {
t.Errorf("DeleteTrigger - Should delete trigger without error, but got: %s", err)
}
if len(gotTriggers) != 2 {
t.Errorf("DeleteTrigger failed: Should remove an item but got: %v", len(gotTriggers))
}
if gotTriggers[1].Description == newTrigger2.Description {
t.Errorf("DeleteTrigger failed: Should get an item with different details than the removed item but got: %+v", gotTriggers[1])
}
}
<file_sep>/api/root.go
package api
import (
"encoding/json"
"github.com/danesparza/fxtrigger/internal/data"
"net/http"
"time"
)
// Service encapsulates API service operations
type Service struct {
DB *data.Manager
StartTime time.Time
// FireTrigger signals a trigger should be fired
FireTrigger chan data.Trigger
// AddMonitor signals a trigger should be added to the list of monitored triggers
AddMonitor chan data.Trigger
// RemoveMonitor signals a trigger id should not be monitored anymore
RemoveMonitor chan string
}
// CreateTriggerRequest is a request to create a new trigger
type CreateTriggerRequest struct {
Name string `json:"name"` // The trigger name
Description string `json:"description"` // Additional information about the trigger
GPIOPin int `json:"gpiopin"` // The GPIO pin the sensor or button is on
WebHooks []data.WebHook `json:"webhooks"` // The webhooks to send when triggered
MinimumSecondsBeforeRetrigger int `json:"minimumsecondsbeforeretrigger"` // Minimum time (in seconds) before a retrigger
}
// UpdateTriggerRequest is a request to update a trigger
type UpdateTriggerRequest struct {
ID string `json:"id"` // Unique Trigger ID
Enabled bool `json:"enabled"` // Trigger enabled or not
Name string `json:"name"` // The trigger name
Description string `json:"description"` // Additional information about the trigger
GPIOPin int `json:"gpiopin"` // The GPIO pin the sensor or button is on
WebHooks []data.WebHook `json:"webhooks"` // The webhooks to send when triggered
MinimumSecondsBeforeRetrigger int `json:"minimumsecondsbeforeretrigger"` // Minimum time (in seconds) before a retrigger
}
// SystemResponse is a response for a system request
type SystemResponse struct {
Message string `json:"message"`
Data interface{} `json:"data"`
}
// ErrorResponse represents an API response
type ErrorResponse struct {
Message string `json:"message"`
}
// Used to send back an error:
func sendErrorResponse(rw http.ResponseWriter, err error, code int) {
// Our return value
response := ErrorResponse{
Message: "Error: " + err.Error()}
// Serialize to JSON & return the response:
rw.Header().Set("Content-Type", "application/json; charset=utf-8")
rw.WriteHeader(code)
json.NewEncoder(rw).Encode(response)
}
// GetIP gets a requests IP address by reading off the forwarded-for
// header (for proxies) and falls back to use the remote address.
func GetIP(r *http.Request) string {
forwarded := r.Header.Get("X-FORWARDED-FOR")
if forwarded != "" {
return forwarded
}
return r.RemoteAddr
}
<file_sep>/internal/data/root_test.go
package data_test
import (
"os"
"path"
"path/filepath"
"testing"
"github.com/mitchellh/go-homedir"
)
// Gets the database path for this environment:
func getTestFiles() string {
systemdb := os.Getenv("FXTRIGGER_TEST_ROOT")
if systemdb == "" {
home, _ := homedir.Dir()
if home != "" {
systemdb = path.Join(home, "fxtrigger", "db", "system.db")
}
}
return systemdb
}
func TestRoot_GetTestDBPaths_Successful(t *testing.T) {
systemdb := getTestFiles()
if systemdb == "" {
t.Fatal("The required FXTRIGGER_TEST_ROOT environment variable is not set to the test database root path. It should probably be $HOME/fxtrigger/db/system.db")
}
t.Logf("System db path: %s", systemdb)
t.Logf("System db folder: %s", filepath.Dir(systemdb))
}
func TestRoot_Databases_ShouldNotExistYet(t *testing.T) {
// Arrange
systemdb := getTestFiles()
// Act
// Assert
if _, err := os.Stat(systemdb); err == nil {
t.Errorf("System database check failed: System db %s already exists, and shouldn't", systemdb)
}
}
<file_sep>/scripts/sqlite/migrations/000001_init.down.sql
drop table if exists webhook;
drop table if exists trigger;
<file_sep>/dist/DEBIAN/postinst
#!/bin/sh
systemctl enable fxtrigger.service
systemctl start fxtrigger.service
<file_sep>/go.mod
module github.com/danesparza/fxtrigger
go 1.16
require (
github.com/BurntSushi/toml v1.3.2 // indirect
github.com/danesparza/go-rpio v3.0.0+incompatible
github.com/go-openapi/jsonpointer v0.20.0 // indirect
github.com/go-openapi/jsonreference v0.20.2 // indirect
github.com/go-openapi/spec v0.20.9 // indirect
github.com/gorilla/mux v1.8.0
github.com/mitchellh/go-homedir v1.1.0
github.com/rs/cors v1.7.0
github.com/rs/xid v1.5.0
github.com/rs/zerolog v1.30.0
github.com/spf13/cobra v1.1.3
github.com/spf13/viper v1.7.1
github.com/swaggo/http-swagger v1.0.0
github.com/swaggo/swag v1.16.1
github.com/tidwall/buntdb v1.2.3
golang.org/x/tools v0.12.0 // indirect
)
<file_sep>/scripts/sqlite/migrations/000001_init.up.sql
create table trigger
(
id TEXT,
enabled INT,
created integer default current_timestamp,
name TEXT,
description TEXT,
gpiopin integer,
seconds_to_retrigger integer
);
create table webhook
(
id TEXT,
trigger_id TEXT
constraint webhook_trigger_id_fk
references trigger (id),
URL integer,
headers BLOB,
body TEXT
);
<file_sep>/README.md
# fxtrigger [](https://github.com/danesparza/fxtrigger/actions/workflows/release.yaml)
REST service for Raspberry Pi GPIO/Sensor -> webhooks. Made with ❤️ for makers, DIY craftsmen, prop makers and professional soundstage designers everywhere
## Prerequisites
fxtrigger uses Raspberry Pi GPIO to listen for input pin button presses or sensor events. You'll need to make sure those buttons and sensors are wired up and working before using fxTrigger to connect those triggers to your webhook endpoints.
For motion sensing, I would recommend using the [Adafruit PIR (motion) sensor](https://www.adafruit.com/product/189) as well -- just get a [Pi with headers](https://www.adafruit.com/product/3708) and connect the PIR to power, ground, and to a GPIO data pin (and be sure to follow the [PIR motion sensor guide](https://learn.adafruit.com/pir-passive-infrared-proximity-motion-sensor/) for the board). In fxtrigger, specifiy the GPIO pin you hook it up to (not the physical pin) when create the trigger. See the [Raspberry Pi Pinout interactive reference](https://pinout.xyz/#) for more information.
## Installing
Installing fxtrigger is also really simple. Grab the .deb file from the [latest release](https://github.com/danesparza/fxtrigger/releases/latest) and then install it using dpkg:
```bash
sudo dpkg -i fxtrigger-1.0.40_armhf.deb
````
This automatically installs the **fxtrigger** service with a default configuration and starts the service.
You can then use the service at http://localhost:3020
See the REST API documentation at http://localhost:3020/v1/swagger/
## Removing
Uninstalling is just as simple:
```bash
sudo dpkg -r fxtrigger
````
<file_sep>/dist/var/lib/fxtrigger/readme.txt
This is the data directory for fxtrigger.
To clear history and uploaded files, just remove the files and folders in this directory.<file_sep>/dist/DEBIAN/prerm
#!/bin/sh
systemctl stop fxtrigger.service
systemctl disable fxtrigger.service
<file_sep>/main.go
package main
import (
"github.com/danesparza/fxtrigger/cmd"
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
"github.com/rs/zerolog/pkgerrors"
"os"
"strings"
"time"
)
// @title fxTrigger
// @version 1.0
// @description fxTrigger REST based management for GPIO/Sensor -> endpoint triggers (on Raspberry Pi)
// @license.name Apache 2.0
// @license.url http://www.apache.org/licenses/LICENSE-2.0.html
// @BasePath /v1
func main() {
// Set log info:
log.Logger = log.With().Timestamp().Caller().Logger()
// Set log level (default to info)
zerolog.SetGlobalLevel(zerolog.InfoLevel)
switch strings.ToLower(os.Getenv("LOGGER_LEVEL")) {
case "trace":
zerolog.SetGlobalLevel(zerolog.TraceLevel)
break
case "debug":
zerolog.SetGlobalLevel(zerolog.DebugLevel)
break
case "info":
zerolog.SetGlobalLevel(zerolog.InfoLevel)
break
case "warn":
zerolog.SetGlobalLevel(zerolog.WarnLevel)
break
case "error":
zerolog.SetGlobalLevel(zerolog.ErrorLevel)
break
case "fatal":
zerolog.SetGlobalLevel(zerolog.FatalLevel)
break
}
// Set the error stack marshaller
zerolog.ErrorStackMarshaler = pkgerrors.MarshalStack
// Set log time format
zerolog.TimeFieldFormat = time.RFC3339Nano
cmd.Execute()
}
<file_sep>/internal/data/root.go
package data
import (
"fmt"
"os"
"path/filepath"
"strings"
"github.com/tidwall/buntdb"
)
// Manager is the data manager
type Manager struct {
systemdb *buntdb.DB
}
// NewManager creates a new instance of a Manager and returns it
func NewManager(systemdbpath string) (*Manager, error) {
retval := new(Manager)
// Make sure the path already exists:
if err := os.MkdirAll(filepath.Dir(systemdbpath), os.FileMode(0664)); err != nil {
return nil, err
}
sysdb, err := buntdb.Open(systemdbpath)
if err != nil {
return retval, fmt.Errorf("problem opening the systemDB: %s", err)
}
retval.systemdb = sysdb
// Create our indexes
sysdb.CreateIndex("Trigger", "Trigger:*", buntdb.IndexString)
// Return our Manager reference
return retval, nil
}
// Close closes the data Manager
func (store Manager) Close() error {
syserr := store.systemdb.Close()
if syserr != nil {
return fmt.Errorf("an error occurred closing the manager. Syserr: %s ", syserr)
}
return nil
}
// GetKey returns a key to be used in the storage system
func GetKey(entityType string, keyPart ...string) string {
allparts := []string{}
allparts = append(allparts, entityType)
allparts = append(allparts, keyPart...)
return strings.Join(allparts, ":")
}
<file_sep>/cmd/root.go
package cmd
import (
"fmt"
"os"
"path"
"github.com/spf13/cobra"
"github.com/mitchellh/go-homedir"
"github.com/spf13/viper"
)
var (
cfgFile string
problemWithConfigFile bool
loglevel string
)
// rootCmd represents the base command when called without any subcommands
var rootCmd = &cobra.Command{
Use: "fxtrigger",
Short: "REST service for GPIO / Sensor triggers",
Long: `REST based management for GPIO/Sensor -> endpoint triggers`,
}
// Execute adds all child commands to the root command and sets flags appropriately.
// This is called by main.main(). It only needs to happen once to the rootCmd.
func Execute() {
if err := rootCmd.Execute(); err != nil {
fmt.Println(err)
os.Exit(1)
}
}
func init() {
cobra.OnInitialize(initConfig)
// Here you will define your flags and configuration settings.
// Cobra supports persistent flags, which, if defined here,
// will be global for your application.
rootCmd.PersistentFlags().StringVar(&cfgFile, "config", "", "config file (default is $HOME/fxtrigger.yaml)")
// Bind config flags for optional config file override:
viper.BindPFlag("loglevel", rootCmd.PersistentFlags().Lookup("loglevel"))
}
// initConfig reads in config file and ENV variables if set.
func initConfig() {
// Find home directory.
home, err := homedir.Dir()
if err != nil {
fmt.Println(err)
os.Exit(1)
}
if cfgFile != "" {
// Use config file from the flag.
viper.SetConfigFile(cfgFile)
} else {
viper.AddConfigPath(home) // adding home directory as first search path
viper.AddConfigPath(".") // also look in the working directory
viper.SetConfigName("fxtrigger") // name the config file (without extension)
}
viper.AutomaticEnv() // read in environment variables that match
// Set our defaults
viper.SetDefault("datastore.system", path.Join(home, "fxtrigger", "db", "system.db"))
viper.SetDefault("datastore.retentiondays", 30)
viper.SetDefault("trigger.dndschedule", false) // Use a 'Do not disturb' schedule
viper.SetDefault("trigger.dndstart", "8:00pm") // Do not disturb scheduled start time
viper.SetDefault("trigger.dndend", "6:00am") // Do not disturb scheduled end time
viper.SetDefault("server.port", 3020)
viper.SetDefault("server.allowed-origins", "*")
// If a config file is found, read it in.
if err := viper.ReadInConfig(); err != nil {
problemWithConfigFile = true
}
}
|
8ae130389fd7861cdebd95d976ff556e6dac056f
|
[
"SQL",
"Markdown",
"Text",
"Go",
"Go Module",
"Shell"
] | 19
|
Go
|
danesparza/fxtrigger
|
c2a24bbde3857e16914b6c6d87180d774e1e0e3c
|
59c78a77b449d8b02bcc8ca5239a45d0b3fa7f2d
|
refs/heads/main
|
<repo_name>5ahmnm2021/03inclass-recap-llukic<file_sep>/README.md
# 03inclass-recap-llukic
Project description: A simple UI project to utilise a scene switching script.
Projekt
Development platform: MacOS X 11.2 (Beta), Unity 2020.1.5f1, Visual Studio 2019
Target platform: WebGL (960x600)
Visuals:
Screenshots (concept and experience), Video
Necessary setup/execution steps: /
Installation process, e.g. step by step instructions that I can run the project after cloning it
Third party material: /
(if used Fonts, Sounds, Music, Graphics, Materials, Code etc.)
Project state: Scene Switch Working 80%
incl. progress in percent
Limitations: Still need to code functionality of individual screens.
Lessons Learned: How I program a scene switch script.
Copyright by lealukic ;)
<file_sep>/Assets/Scripts/Master.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
public class Master : MonoBehaviour
{
public Light l;
public GameObject obj;
public BoxCollider boxC;
public Camera cam;
public Slider s;
void Start()
{
l.intensity = 1.4f;
obj.name = "Leuchte";
boxC.isTrigger = true;
}
}
<file_sep>/Assets/Scripts/Controller.cs
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.SceneManagement;
using UnityEngine.UI;
public class Controller : MonoBehaviour
{
public InputField Zahl01;
public InputField Zahl02;
public Text result;
public Text Error;
public float num1;
public float num2;
string errorMsg = "Mate! Pls type a number!";
public void ChangeSceneA()
{
SceneManager.LoadScene("01ColorScene");
}
public void ChangeSceneB()
{
SceneManager.LoadScene("02NumberScene");
}
public void ChangeSceneC()
{
SceneManager.LoadScene("00WelcomeScene");
}
public void AddNumbersOnClick()
{
try
{
num1 = float.Parse(Zahl01.text);
}
catch (System.Exception)
{
Zahl01.image.color = new Color32(255,0,0,100);
Error.text = errorMsg;
}
try
{
num2 = float.Parse(Zahl02.text);
}
catch (System.Exception)
{
Zahl02.image.color = new Color32(255,0,0,100);
Error.text = errorMsg;
}
result.text = (num1 + num2).ToString();
Debug.Log("Die Addition ergibt: " + result.text);
}
}
|
790d8fe7c0956147cd8d5f0a7908040115f530b3
|
[
"Markdown",
"C#"
] | 3
|
Markdown
|
5ahmnm2021/03inclass-recap-llukic
|
ef3cacd8692fa379d08617655fbb569c8929328b
|
8950f8ca01d17ba8c447fc9ef9529217aa30a442
|
refs/heads/master
|
<file_sep>/*
* The MIT License
*
* Copyright (c) 2016 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#include <unistd.h>
#include <ruby.h>
static VALUE rb_alarmist_alarm(VALUE self, VALUE rb_seconds)
{
int seconds;
Check_Type(rb_seconds, T_FIXNUM);
seconds = FIX2INT(rb_seconds);
alarm(seconds);
return Qnil;
}
static VALUE rb_alarmist_sleep(VALUE self, VALUE rb_seconds)
{
int seconds;
Check_Type(rb_seconds, T_FIXNUM);
seconds = FIX2INT(rb_seconds);
sleep(seconds);
return Qnil;
}
void Init_alarmist(void)
{
VALUE rb_mAlarmist;
rb_mAlarmist = rb_define_module("Alarmist");
rb_define_module_function(rb_mAlarmist, "alarm", rb_alarmist_alarm, 1);
rb_define_module_function(rb_mAlarmist, "sleep", rb_alarmist_sleep, 1);
}
<file_sep>if ENV['DEVELOPMENT']
VERSION = `git describe --tags`.strip.gsub('-', '.')
else
VERSION = "0.1.0"
end
Gem::Specification.new do |s|
s.name = "alarmist"
s.version = VERSION
s.summary = "Alarmist is a simple wrapper around alarm(2)"
s.description = <<EOF
A simple wrapper around alarm(2) so that you will be delivered a
SIGALRM after a configurable timeout.
EOF
s.authors = [ "<NAME>" ]
s.email = "<EMAIL>"
s.homepage = "https://github.com/ethomson/alarmist"
s.license = "MIT"
s.files += Dir.glob("ext/**/*.[ch]")
s.extensions << "ext/alarmist/extconf.rb"
end
<file_sep>require 'minitest/autorun'
require 'alarmist'
class AlarmTest < Minitest::Test
def test_alarm
alarmed = false
Signal.trap("ALRM") do
alarmed = true
end
Alarmist::alarm(1)
sleep(2)
assert_equal(true, alarmed)
Signal.trap("ALRM", "DEFAULT")
end
def test_cancel_alarm
Alarmist::alarm(1)
Alarmist::alarm(0)
sleep(2)
end
def test_timeout
assert_raises Alarmist::TimeoutError do
Alarmist::timeout(1) { sleep(2) }
end
end
def test_timeout_returns
assert_equal(42, Alarmist::timeout(2) { sleep(1); 42 })
end
def test_timeout_propagates_exception
assert_raises NameError do
Alarmist::timeout(1) { raise NameError.new "foo" }
end
end
def test_timeout_resets_sighandler
handler = Proc.new { puts "hello, world!\n" }
Signal.trap("ALRM", handler)
Alarmist::timeout(1) { nil }
restored = Signal.trap("ALRM", "DEFAULT")
assert_equal(handler, restored)
end
def test_timeout_can_interrupt_native_sleep
assert_raises Alarmist::TimeoutError do
Alarmist::timeout(1) { Alarmist::sleep(60) }
end
end
def test_timeout_raises_other_signals
e = assert_raises SignalException do
Alarmist::timeout(2) { Process.kill("USR1", Process.pid); sleep(60) }
end
assert_equal("SIGUSR1", e.message)
end
end
<file_sep>require 'mkmf'
dir_config('alarmist')
create_makefile('alarmist')
<file_sep>require 'rake/testtask'
require 'rake/extensiontask'
gemspec = Gem::Specification::load(File.expand_path('alarmist.gemspec'))
Rake::ExtensionTask.new('alarmist', gemspec) do |r|
r.lib_dir = 'lib/alarmist'
end
Rake::TestTask.new do |t|
t.libs << 'lib' << 'test'
t.pattern = 'test/alarm_test.rb'
t.warning = true
t.verbose = true
end
task :default => [:compile, :test]
<file_sep>require File.expand_path('../alarmist/alarmist', __FILE__)
module Alarmist
def self.timeout(time, &block)
old_handler = Signal.trap("ALRM", "DEFAULT")
Alarmist::alarm(time)
begin
ret = block.call
sleep(0)
return ret
rescue SignalException => e
raise Alarmist::TimeoutError if e.message == "SIGALRM"
raise
ensure
Alarmist::alarm(0)
Signal.trap("ALRM", old_handler)
end
end
class TimeoutError < StandardError
end
end
|
6968a694e849769170f56b33bbd484f3b80ed197
|
[
"C",
"Ruby"
] | 6
|
C
|
ethomson/alarmist
|
f8e844f3a4dcca5fa10009c5b3280861ca7d7313
|
c4fa0375f921340c01703f266660a6cb5d350cb9
|
refs/heads/master
|
<repo_name>poweif/graphics-fun<file_sep>/skulpt_gl/simple/mesh.py
import webgl
import webgl.glut
class Mesh:
<file_sep>/skulpt_gl/nacl/libigl.cc
// Copyright (c) 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "ppapi/cpp/instance.h"
#include "ppapi/cpp/module.h"
#include "ppapi/cpp/var.h"
#include <igl/cotmatrix.h>
#include <Eigen/Dense>
#include <Eigen/Sparse>
#include <iostream>
#include <map>
#include <sstream>
// libigl should be a singleton.
class libiglInstance;
libiglInstance* libigl_singleton_ = NULL;
namespace {
const char* const kHelloString = "hello";
const char* const kReplyString = "hello from NaCl";
} // namespace
class libiglInstance : public pp::Instance {
public:
explicit libiglInstance(PP_Instance instance) :
pp::Instance(instance),
next_method_id_(1) {}
virtual ~libiglInstance() {}
virtual void HandleMessage(const pp::Var& var_message) {
if (!var_message.is_string()) {
return;
}
std::string message = var_message.AsString();
pp::Var var_reply;
if (message == kHelloString) {
}
}
void RegisterMethod(const pp::Var& method_name) {
}
private:
int next_method_id_;
};
class libiglModule : public pp::Module {
public:
libiglModule() : pp::Module() {}
virtual ~libiglModule() {}
virtual pp::Instance* CreateInstance(PP_Instance instance) {
if (libigl_singleton_)
return libigl_singleton_;
return (libigl_singleton_ = new libiglInstance(instance));
}
};
namespace pp {
Module* CreateModule() {
return new libiglModule();
}
} // namespace pp
<file_sep>/skulpt_gl/setup.sh
#/bin/sh
function cp_skulpt() {
cp -r ../../skulpt/dist ./skulpt
}
function update() {
# Nodejs packages
npm install
# Bower
bower update
}
function libigl_update() {
cd nacl
if [ ! -d ./libigl ]; then
git clone https://github.com/libigl/libigl.git
fi
if [ ! -d ./eigen ]; then
hg clone https://bitbucket.org/eigen/eigen/
cd ./eigen/unsupported/Eigen/src/SparseExtra
patch < ../../../../../MatrixMarketIterator.patch
cd ../../../../../
fi
if [ ! -d ./libigl/include/Eigen ]; then
cd libigl/include
ln -s ../../eigen/Eigen .
cd ../../
fi
if [ ! -d ./libigl/include/unsupported ]; then
cd libigl/include
ln -s ../../eigen/unsupported .
cd ../../
fi
cd ..
}
cp_skulpt
update
libigl_update
<file_sep>/skulpt_gl/simple/main.py
from math import sin
trianglesVerticeBuffer = gl.createBuffer()
trianglesColorBuffer = gl.createBuffer()
program = None
vs_src = """
attribute vec3 aVertexPosition;
attribute vec3 aVertexColor;
varying highp vec4 vColor;
void main(void) {
gl_Position = vec4(aVertexPosition, 1.0);
vColor = vec4(aVertexColor, 1.0);
}
"""
fs_src = """
varying highp vec4 vColor;
void main(void) {
gl_FragColor = vColor;
}
"""
def setup():
global program
vs = gl.createShader(gl.VERTEX_SHADER)
gl.shaderSource(vs, vs_src)
gl.compileShader(vs)
print ("Vertex shader COMPILE_STATUS: " +
str(gl.getShaderParameter(vs, gl.COMPILE_STATUS)))
fs = gl.createShader(gl.FRAGMENT_SHADER)
gl.shaderSource(fs, fs_src)
gl.compileShader(fs)
print ("Fragment shader COMPILE_STATUS: " +
str(gl.getShaderParameter(fs, gl.COMPILE_STATUS)))
program = gl.createProgram()
gl.attachShader(program, vs)
gl.attachShader(program, fs)
gl.linkProgram(program)
print ("Program LINK_STATUS: " +
str(gl.getProgramParameter(program, gl.LINK_STATUS)))
gl.useProgram(program)
triangleVerticeColors = [1.0, 0.0, 0.0,
1.0, 1.0, 1.0,
1.0, 0.0, 0.0]
gl.bindBuffer(gl.ARRAY_BUFFER, trianglesColorBuffer)
gl.bufferData(gl.ARRAY_BUFFER, webgl.Float32Array(triangleVerticeColors),
gl.STATIC_DRAW)
def render(gl):
gl.clearColor(1.0, 1.0, 1.0, 1.0)
gl.clear(gl.COLOR_BUFFER_BIT)
gl.viewport(0, 0, 500, 500)
triangleVertices = [-0.5, 0.5, 0.0,
0.0, 0.0, 0.0,
-0.5, -0.5, 0.0]
gl.bindBuffer(gl.ARRAY_BUFFER, trianglesVerticeBuffer)
gl.bufferData(gl.ARRAY_BUFFER, webgl.Float32Array(triangleVertices),
gl.DYNAMIC_DRAW)
vertexPositionAttribute = gl.getAttribLocation(program, "aVertexPosition")
gl.enableVertexAttribArray(vertexPositionAttribute)
gl.bindBuffer(gl.ARRAY_BUFFER, trianglesVerticeBuffer)
gl.vertexAttribPointer(vertexPositionAttribute, 3, gl.FLOAT, False, 0, 0)
vertexColorAttribute = gl.getAttribLocation(program, "aVertexColor")
gl.enableVertexAttribArray(vertexColorAttribute)
gl.bindBuffer(gl.ARRAY_BUFFER, trianglesColorBuffer)
gl.vertexAttribPointer(vertexColorAttribute, 3, gl.FLOAT, False, 0, 0)
gl.drawArrays(gl.TRIANGLES, 0, 3)
def mouse(gl, button, state, x, y):
print str(x) + " " + str(y)
def mouseMove(gl, x, y):
print 'mouse moving: ' + str(x) + ' ' + str(y)
def keyboard(gl, ch, x, y):
print 'pressed ' + ch + ' with position at: ' + str(x) + ' ' + str(y)
def main():
setup()
glut.displayFunc(render)
glut.mouseFunc(mouse)
glut.keyboardFunc(keyboard)
main()
<file_sep>/skulpt_gl/simple/prep.py
import webgl
import webgl.glut
gl = webgl.Context("threedDisplay")
glut = webgl.glut.Init(gl)
|
6713aabddd10266024b11001f9a625a424e78d2b
|
[
"Python",
"C++",
"Shell"
] | 5
|
Python
|
poweif/graphics-fun
|
9bd620cef97fca4b21b56b8fa1126ec25513eebb
|
092828f3e7d0aaea95a57f77451781e1feb3c7f3
|
refs/heads/master
|
<file_sep>var isMouseDownSearch=0;
var deltaSearchPositionX;
var deltaSearchPositionY;
var searchStatus=0;
var searchTotalSteps=15;
var searchIncrement = -1;
var searchVisible=0;
var searchPosX;
var searchPosY;
var searchWidth;
var searchHeight;
var modeEdit = -1;
var searchBoolString = new Array("AND", "OR");
var searchYearFromString = new Array(2012, 2011, 2010, 2009, 2008, 2007, 2006, 2005, 2004, 2003, 2002, 2001, 2000, 1999,
1998, 1997, 1996, 1995, 1994, 1993, 1992, 1991, 1990, 1989, 1988, 1987, 1986, 1985,
1984, 1983, 1982, 1981, 1980, 1979, 1978, 1977, 1976, 1975, 1974, 1973, 1972, 1971,
1970, 1969, 1968, 1967, 1966, 1965, 1964, 1963, 1962, 1961, 1960, "All years");
var searchYearToString = new Array("Present", 2012, 2011, 2010, 2009, 2008, 2007, 2006, 2005, 2004, 2003, 2002, 2001, 2000,
1999, 1998, 1997, 1996, 1995, 1994, 1993, 1992, 1991, 1990, 1989, 1988, 1987, 1986,
1985, 1984, 1983, 1982, 1981, 1980, 1979, 1978, 1977, 1976, 1975, 1974, 1973, 1972,
1971, 1970, 1969, 1968, 1967, 1966, 1965, 1964, 1963, 1962, 1961, 1960, "Before 1960");
var searchSortByString = new Array("Date (Newest)", "Date (Oldest)", "Cited by", "Relevance", "Author cite",
"First Author (A-Z)", "First Author (Z-A)", "Publication Name (A-Z)", "Title (A-Z)",
"Title (A-Z)", "Title (Z-A)", "Relevance");
var searchIndexQuery = new Array();
var searchStringQuery = new Array();
var searchBoolQuery = new Array();
var searchText;
var searchSelect;
var searchBoolSelect;
var searchYearFromSelect;
var searchYearToSelect;
var searchSortBySelect;
var abstractSearchHeight = new Array();
var abstractSearchState = new Array();
var abstractSearchMode = new Array();
var abstractSearchTotal = 20;
var contentSearchResult;
var headerSearch;
var showSearchinMap = 1;
var showSearchHref;
var divCountryDistributionSearch;
var modeCountryDistributionSearch = 0;
var defaultChangedSearch = 0;
var modeCountryTypeSearch = 0;
var hrefCountryTypeSearch;
var hrefCDS;
//var callShowOverallSearch = 1;
var imgSearch = new Array();
imgSearch[0] = imgObject[8];
imgSearch[1] = imgObject[13];
function initializeSearch() {
divSearch = document.getElementById("windowSearch");
divSearch.style.overflow = 'hidden';
searchPosX = 780;
searchPosY = 130;
searchWidth = parseInt(divSearch.style.width);
searchHeight = parseInt(divSearch.style.height);
//divSearch.style.display = "none";
divSearch.style.width = '0px';
divSearch.style.height = '0px';
divSearch.style.overflow = 'hidden';
ctxMenu.drawImage(imgSearch[searchVisible], 5*frameWidth+4*buttonMenuWidth, frameWidth);
headerSearch = document.createElement('div');
headerSearch.style.position = 'relative';
headerSearch.style.top = '0px';
headerSearch.style.left ='0px';
headerSearch.style.height = '23px';
headerSearch.style.width = searchWidth + 'px';
headerSearch.style.paddingLeft = '5px';
headerSearch.style.color = 'white';
headerSearch.appendChild(document.createTextNode("Search"));
divSearch.appendChild(headerSearch);
headerSearch.onselectstart = function() {return false};
var temp = document.createElement('div');
divSearch.appendChild(temp);
temp.setAttribute('id', "contentSearch");
temp.style.position = 'relative';
//temp.style.top = topbarHeight-9 + 'px';
temp.style.left = 1 + 'px';
temp.style.width = searchWidth-2 + 'px';
temp.style.height = searchHeight-parseInt(headerSearch.style.height) +'px';
temp.style['overflow-x'] = 'hidden';
temp.style['overflow-y'] = 'auto';
var contentSearch = document.getElementById('contentSearch');
temp = document.createElement('div');
contentSearch.appendChild(temp);
temp.setAttribute('id', 'contentSearch_query');
searchText = document.createElement('input');
searchText.type = 'text';
searchText.setAttribute('name', 'search_inputText');
searchSelect = document.createElement('select');
searchSelect.setAttribute('name', 'search_inputSelect');
for (var i=0; i<searchElement.length; i++) {
searchSelect.options[i] = new Option (searchElement[i], i);
}
searchBoolSelect = document.createElement('select');
searchSelect.setAttribute('name', 'search_inputBoolSelect');
for (var i=0; i<searchBoolString.length; i++) {
searchBoolSelect.options[i] = new Option (searchBoolString[i], i);
}
searchYearFromSelect = document.createElement('select');
searchYearFromSelect.setAttribute('name', 'search_inputYearFromSelect');
for (var i=0; i<searchYearFromString.length; i++) {
searchYearFromSelect.options[i] = new Option (searchYearFromString[i], i);
}
searchYearFromSelect.value = searchYearFromString.length-1;
searchYearToSelect = document.createElement('select');
searchYearToSelect.setAttribute('name', 'search_inputYearToSelect');
for (var i=0; i<searchYearToString.length; i++) {
searchYearToSelect.options[i] = new Option (searchYearToString[i], i);
}
searchSortBySelect = document.createElement('select');
searchSortBySelect.setAttribute('name', 'search_inputSortBySelect');
for (var i=0; i<searchSortByString.length; i++) {
searchSortBySelect.options[i] = new Option (searchSortByString[i], i);
}
updatecontentSearchQuery();
contentSearchResult = document.createElement('div');
contentSearch.appendChild(contentSearchResult);
contentSearchResult.setAttribute('id', 'contentSearch_result');
contentSearchResult.style.position = 'absolute';
contentSearchResult.style.left = 1 + 'px';
contentSearchResult.style.width = searchWidth-2 + 'px';
contentSearchResult.style.overflow = 'hidden';
showSearchHref = document.createElement('a');
showSearchHref.href = "#";
showSearchHref.style.color = 'blue';
showSearchHref.onclick = function () {
modeInMap = searchMode;
viewAllModeActive = 0;
showResult(searchMode, queryResults);
}
showSearchHref.textContent = "Show in map";
divCountryDistributionSearch = document.createElement('div');
divCountryDistributionSearch.style.background = '#F2F1EF';
divCountryDistributionSearch.style.position = 'absolute';
divCountryDistributionSearch.style.width = '130px';
divCountryDistributionSearch.style.height = '300px';
divCountryDistributionSearch.style.top = searchPosY + 27 + 'px';
divCountryDistributionSearch.style.left = searchPosX-parseInt(divCountryDistributionSearch.style.width) + 8 + 'px';
divCountryDistributionSearch.style['z-index'] = 0;
divCountryDistributionSearch.style.overflow = 'auto';
divCountryDistributionSearch.style.display = 'none';
document.body.appendChild(divCountryDistributionSearch);
hrefCountryTypeSearch = document.createElement('a');
hrefCountryTypeSearch.href = "#";
hrefCountryTypeSearch.style.color = 'blue';
hrefCountryTypeSearch.textContent = "view 100 result distribution";
hrefCountryTypeSearch.onclick = function () {
if (modeCountryTypeSearch==0) {
viewAllModeActive = 0;
modeInMap = searchMode;
modeCountryTypeSearch = 1;
hrefCountryTypeSearch.textContent = "View overall result distribution";
//changeModeSearch(); **
showResult(searchMode, searchObject);
}
else {
viewAllModeActive = 1;
modeInMap = searchMode;
modeCountryTypeSearch = 0;
hrefCountryTypeSearch.textContent = "View 100 result distribution";
showOverallCountrySearch(queryCtry);
showResult(searchMode, queryCtry);
}
//showOverallCountrySearch(queryCtry);
}
//showOverallCountrySearch(queryCtry);
hrefCDS = document.createElement('a');
hrefCDS.href = "#";
hrefCDS.style.color = 'blue';
hrefCDS.textContent = "View country distribution";
hrefCDS.onclick = function () {
showSearchCountryDistribution();
if (modeCountryDistributionSearch==0)
hrefCDS.textContent = "View country distribution";
else hrefCDS.textContent = "Hide country distribution";
};
}
function updatecontentSearchQuery() {
removecontentSearchQueryChild();
var contentSearch_query = document.getElementById("contentSearch_query");
/*
//CHANGED:
while (headerSearch.firstChild)
{
headerSearch.removeChild(headerSearch.firstChild);
}
headerSearch.appendChild(document.createTextNode("Search(" +total_Search_Engine +")"));
/////////////////////
*/
var searchCategory = document.createElement('div');
searchCategory.setAttribute('id', "contentSearchQuery_categor1");
contentSearch_query.appendChild(searchCategory);
for (var i=0; i<searchIndexQuery.length; i++) {
if (modeEdit==i) {
if (i>0) {
searchBoolSelect.value = searchBoolQuery[modeEdit];
searchCategory.appendChild(searchBoolSelect);
}
searchText.value = searchStringQuery[modeEdit];
searchCategory.appendChild(searchText);
searchSelect.value = searchIndexQuery[modeEdit];
searchCategory.appendChild(searchSelect);
var searchAccept = document.createElement('a');
searchAccept.href = "#";
searchAccept.style.color ='blue';
searchAccept.textContent = "accept";
searchAccept.onclick = function() {acceptQueryChange();};
searchCategory.appendChild(searchAccept);
searchCategory.appendChild(document.createTextNode(" "));
var searchCancel = document.createElement('a');
searchCancel.href = "#";
searchCancel.style.color = 'blue';
searchCancel.textContent = "cancel";
searchCategory.appendChild(searchCancel);
searchCancel.onclick = function() {cancelQueryChange();};
}
else {
if (i>0) {
searchCategory.appendChild(document.createTextNode(searchBoolString[searchBoolQuery[i]]+" "));
}
searchCategory.appendChild(document.createTextNode(searchElement[searchIndexQuery[i]] + " : " + searchStringQuery[i] + " "));
if (modeEdit==-1) {
var searchEdit = document.createElement('a');
searchEdit.textContent = "edit";
searchCategory.appendChild(searchEdit);
searchEdit.href = "javascript:editSearchQuery("+i+")";
searchCategory.appendChild(document.createTextNode(" "));
var searchRemove = document.createElement('a');
searchRemove.textContent = "remove";
searchCategory.appendChild(searchRemove);
searchRemove.href = "javascript:removeSearchQuery("+i+")";
}
}
searchCategory.appendChild(document.createElement('br'));
}
if (modeEdit==-1) {
var searchField = document.createElement('div');
contentSearch_query.appendChild(searchField);
if (searchIndexQuery.length>0) {
searchBoolSelect.value = 0;
searchField.appendChild(searchBoolSelect);
}
searchText.value = "";
searchField.appendChild(searchText);
searchSelect.value = "0";
searchField.appendChild(searchSelect);
var searchAdd = document.createElement('a');
searchField.appendChild(searchAdd);
searchAdd.href = "#";
searchAdd.style.color = 'blue';
searchAdd.textContent = "add";
searchAdd.onclick = function() {addSearchQuery();};
searchField.appendChild(document.createTextNode(" "));
var searchReset = document.createElement('a');
searchReset.href = "#";
searchReset.style.color = 'blue';
searchReset.textContent = "reset";
searchField.appendChild(searchReset);
searchReset.onclick = function() {resetSearchQuery();};
searchField.appendChild(document.createElement('br'));
searchField.appendChild(document.createTextNode("Published year"));
searchField.appendChild(document.createElement('br'));
searchField.appendChild(document.createTextNode("from : "));
searchField.appendChild(searchYearFromSelect);
searchField.appendChild(document.createElement('br'));
searchField.appendChild(document.createTextNode("to : "));
searchField.appendChild(searchYearToSelect);
searchField.appendChild(document.createElement('br'));
searchField.appendChild(document.createTextNode("Sort the document by "));
searchField.appendChild(searchSortBySelect);
searchField.appendChild(document.createElement('br'));
var searchSubmitButton = document.createElement('button');
searchSubmitButton.textContent = "Search";
searchField.appendChild(searchSubmitButton);
searchSubmitButton.onclick = function() {submitSearchQuery();};
}
}
function removecontentSearchQueryChild() {
var el = document.getElementById("contentSearch_query");
while (el.firstChild)
el.removeChild(el.firstChild);
}
function addSearchQuery() {
if (!searchText.value) {
alert ("Please specify a value to the text box.");
}
else {
searchBoolQuery.push(searchBoolSelect.value);
searchIndexQuery.push(searchSelect.value);
searchStringQuery.push(searchText.value);
updatecontentSearchQuery();
//console.log(searchSelect.value);
}
}
function resetSearchQuery() {
searchBoolQuery = [];
searchIndexQuery = [];
searchStringQuery = [];
updatecontentSearchQuery();
}
function submitSearchQuery() {
if (searchIndexQuery.length==0) {
alert("Please insert a query.");
}
else if (searchYearFromSelect.value<searchYearToSelect.value-1) {
alert("The year cannot be from higher value to lower value.")
}
else {
resetQuery();
searchBoolQuery[0] = 0;
for (var i=0; i<searchIndexQuery.length; i++) {
addQuery(searchStringQuery[i], searchIndexQuery[i], 1-searchBoolQuery[i]);
//console.log(!searchBoolQuery[i]);
}
modeInMap = searchMode;
viewAllModeActive = 1;
modeCountryDistributionSearch = 0;
hrefCountryTypeSearch.textContent = "view 100 result distribution";
defaultChangedSearch = 0;
changeDate(searchYearFromString[searchYearFromSelect.value], searchYearToString[searchYearToSelect.value])
changeSort(searchSortBySelect.value)
submitQuery(0);
}
}
function editSearchQuery(editNumber) {
modeEdit = editNumber;
updatecontentSearchQuery();
}
function removeSearchQuery(removeNumber) {
searchBoolQuery.splice(removeNumber, 1);
searchIndexQuery.splice(removeNumber, 1);
searchStringQuery.splice(removeNumber, 1);
modeEdit = -1;
updatecontentSearchQuery();
}
function acceptQueryChange() {
if (!searchText.value) {
alert("Please specify a value to the text box.");
}
else {
searchBoolQuery[modeEdit] = searchBoolSelect.value;
searchIndexQuery[modeEdit] = searchSelect.value;
searchStringQuery[modeEdit] = searchText.value;
modeEdit = -1;
updatecontentSearchQuery();
}
}
function cancelQueryChange() {
modeEdit = -1;
updatecontentSearchQuery();
}
function updateSearch(sObject, sMode) {
console.log("search haha");
console.log(sObject);
searchYearFromSelect.value = searchYearFromString.length-1;
searchYearToSelect.value = 0;
/*if (callShowOverallSearch==1) {
showOverallCountrySearch(queryCtry);
callShowOverallSearch = 0;
}*/
updatecontentSearchQuery();
removecontentSearchResultChild();
//console.log(sObject);
if (sObject.length>0) {
if (sMode==1 || defaultChangedSearch==1) {
var temp = document.createElement('a');
temp.href = "#";
temp.style.color = 'blue';
temp.onclick = function () {
if (defaultChangedSearch==1) {
modeInMap = searchMode;
viewAllModeActive = 0;
defaultChangedSearch = 0;
//resetQueryCitedby(); **
}
else {
viewAllModeActive = 0;
modeInMap = searchMode;
showResult(searchMode, searchObject);
updateSearch(queryResults, 0);
showOverallCountrySearch(queryCtry);
}
modeCountryTypeSearch = 1;
hrefCountryTypeSearch.textContent = "View overall result distribution";
};
temp.textContent = "Show all result";
contentSearchResult.appendChild(temp);
contentSearchResult.appendChild(document.createElement('br'));
}
//contentSearchResult.appendChild(hrefCDS);
//contentSearchResult.appendChild(document.createElement('br'));
contentSearchResult.appendChild(showSearchHref);
contentSearchResult.appendChild(document.createElement('br'));
for (var i=0; i<sObject.length; i++) {
var temp = document.createElement('div');
temp.style.fontSize = '13px';
contentSearchResult.appendChild(temp);
temp.setAttribute('id', "Search" + i);
temp.style.position = 'relative';
temp.style.left = 3 + 'px';
insertSearch(sObject, i);
}
if (sMode==0) {
if (currentLevelSearchEngine>1) {
temp = document.createElement('a');
contentSearchResult.appendChild(temp);
temp.href="javascript:downSearchEngine()";
temp.textContent = "Previous";
contentSearchResult.appendChild(document.createTextNode(" "));
}
if (currentLevelSearchEngine<totalLevelSearchEngine) {
temp = document.createElement('a');
contentSearchResult. appendChild(temp);
temp.href = "javascript:upSearchEngine()";
//temp.style.cssFloat = 'right';
temp.textContent = "Next";
}
}
}
else {
contentSearchResult.innerHTML = "There is no result for the query.";
}
}
function insertSearch(sObject, i) {
if (typeof(sObject[i].title) != 'undefined' || sObject[i].sourcetitle || sObject[i].publicationName)
{
var tempTable = document.createElement('table');
//tempTable.align = 'justify';
document.getElementById("Search"+i).appendChild(tempTable);
var temp = document.createElement("IMG");
temp.setAttribute('id', "Search" + i + "_image");
temp.src = imgExpand.src;
//temp.setAttribute('onclick', "showAbstractRef("+i+")");
temp.onclick = function () {showAbstractSearch(i);};
var row = tempTable.insertRow(0);
row.insertCell(0).appendChild(temp);
temp = document.createElement("a");
temp.style['font-weight'] = 'bold';
temp.style.textDecoration = 'none';
temp.onclick = function () {
if (abstractSearchMode[i]==0) {
showAbstractSearch(i);
}
viewAllModeActive = 0;
modeInMap = searchMode;
showResult (searchMode, sObject);
highlight(sObject[i]);
};
temp.href = "#";
temp.style.color = 'blue';
//temp.textContent = (currentLevelSearchEngine-1)*100+i+1 + " " + sObject[i].title;
var st = 0;
if (sObject[i].title) {
temp.textContent = sObject[i].title;
}
else if (sObject[i].sourcetitle){
temp.textContent = sObject[i].sourcetitle;
st=1;
}
else if (sObject[i].publicationName) {
temp.textContent = sObject[i].publicationName;
st=1;
}
row.insertCell(1).appendChild(temp);
temp = document.createElement('div');
temp.style.fontSize = '11px';
temp.style.paddingLeft = '18px';
document.getElementById("Search"+i).appendChild(temp);
//console.log("aa " + sObject[i].sourcetitle);
if (st==0) {
if (sObject[i].sourcetitle) {
temp.appendChild (document.createTextNode(sObject[i].sourcetitle));
if (sObject[i].citedby) {
temp.appendChild(document.createTextNode(", cited "+sObject[i].citedby+" times"));
}
else if (sObject[i].citedbyCount) {
temp.appendChild(document.createTextNode(", cited "+sObject[i].citedbyCount+" times"));
}
}
else if (sObject[i].publicationName) {
temp.appendChild (document.createTextNode(sObject[i].publicationName));
if (sObject[i].citedby) {
temp.appendChild(document.createTextNode(", cited "+sObject[i].citedby+" times"));
}
else if (sObject[i].citedbyCount) {
temp.appendChild(document.createTextNode(", cited "+sObject[i].citedbyCount+" times"));
}
}
temp.appendChild(document.createElement('br'));
}
if (sObject[i].author) {
if ( typeof(sObject[i].author[0].authname)!= 'undefined' )temp.appendChild(document.createTextNode(sObject[i].author[0].authname));
else temp.appendChild(document.createTextNode(sObject[i].author[0]["ce:indexed-name"]));
for (var j=1; j<sObject[i].author.length; j++) {
if (j==3) {
temp.appendChild(document.createTextNode(", et al."));
break;
}
if ( typeof(sObject[i].author[j].authname)!= 'undefined' )temp.appendChild(document.createTextNode(", "+sObject[i].author[j].authname));
else temp.appendChild(document.createTextNode(sObject[i].author[j]["ce:indexed-name"]));
}
temp.appendChild(document.createElement('br'));
}
if (sObject[i].affilname) {
temp.appendChild(document.createTextNode(sObject[i].affilname.split('|')[0]));
temp.appendChild(document.createElement('br'));
}
if (sObject[i].city) {
temp.appendChild(document.createTextNode(sObject[i].city));
if (sObject[i].country) {
temp.appendChild(document.createTextNode(", "+sObject[i].country));
}
temp.appendChild(document.createElement('br'));
}
else if (sObject[i].country) {
temp.appendChild(document.createTextNode(sObject[i].country));
temp.appendChild(document.createElement('br'));
}
temp = document.createElement('div');
temp.style.fontSize = '11px';
temp.align = 'justify';
document.getElementById("Search"+i).appendChild(temp);
temp.setAttribute('id', "Search" + i + "_abstract");
temp.style.position = 'relative';
temp.style.left = 18 + 'px';
temp.style.width = searchWidth - 43 + 'px';
if (sObject[i].url) {
var temp2 = document.createElement('a');
temp2.textContent = "Show in Scopus";
temp2.href = "javascript:window.open('" + sObject[i].url + "')";
temp2.style.textDecoration = 'none';
temp.appendChild(temp2);
//temp.appendChild(document.createElement('br'));
}
if (sObject[i].authorId && sObject[i].scopusId) {
var temp2 = document.createElement('a');
temp2.href = "#";
temp2.style.color = 'blue';
temp2.style.textDecoration = 'none';
temp2.style.cssFloat = 'right';
temp2.textContent = "Set as main article";
temp2.onclick = function() {newMainArticle(sObject[i]);};
temp.appendChild(temp2);
//temp.appendChild(document.createElement('br'));
}
temp.appendChild(document.createElement('br'));
if (sObject[i].Abstract) {
temp.appendChild(document.createTextNode("Abstract:"));
temp.appendChild(document.createElement('br'));
temp.appendChild(document.createTextNode(sObject[i].Abstract));
}
else temp.appendChild(document.createTextNode("Abstract not available"));
temp.style.overflow = 'hidden';
abstractSearchHeight[i] = temp.clientHeight;
temp.style.height = 0 + 'px';
//temp.style.display = 'none';
abstractSearchState[i] = 0;
abstractSearchMode[i] = 0;
}
}
function showSearchCountryDistribution() {
if (modeCountryDistributionSearch==0) {
modeCountryDistributionSearch = 1;
divCountryDistributionSearch.style.display = 'block';
}
else {
modeCountryDistributionSearch = 0;
divCountryDistributionSearch.style.display = 'none';
}
}
function showOverallCountrySearch(csObject) {
while (divCountryDistributionSearch.firstChild) {
divCountryDistributionSearch.removeChild(divCountryDistributionSearch.firstChild);
}
divCountryDistributionSearch.appendChild(hrefCountryTypeSearch);
divCountryDistributionSearch.appendChild(document.createElement('br'));
for (var i=0; i<csObject.length; i++) {
var temp = document.createElement('a');
temp.href = "javascript:focusToCountrySearch('"+csObject[i].name+"');";
temp.textContent = csObject[i].name;
temp.style.textDecoration = 'none';
divCountryDistributionSearch.appendChild(temp);
divCountryDistributionSearch.appendChild(document.createTextNode(" ("+csObject[i].hitCount+")"));
divCountryDistributionSearch.appendChild(document.createElement('br'));
}
}
function focusToCountrySearch(csObjectName) {
viewAllModeActive = 0;
modeInMap = searchMode;
if (modeCountryTypeSearch==0) {
defaultChangedSearch = 1;
modeCountryTypeSearch = 1;
hrefCountryTypeSearch.textContent = "View overall result distribution";
var temp=new Object;
temp.country=csObjectName;
//getCitedbyFilter1(new Array(temp)); **
}
else {
//getCitedbyFilter2(new Array(csObjectName)); **
}
highlight(getObject(csObjectName));
}
function showAbstractSearch(i) {
//console.log("show");
if (abstractSearchMode[i]==0) {
document.getElementById("Search" + i + "_image").src = imgContract.src;
//document.getElementById("Reference" + i + "_abstract").style.display = 'block';
abstractSearchMode[i] = 1;
expandAbstractSearch(i);
}
else {
document.getElementById("Search" + i + "_image").src = imgExpand.src;
abstractSearchMode[i] = 0;
contractAbstractSearch(i);
}
}
function expandAbstractSearch(i) {
//console.log(i);
abstractSearchState[i] += 1;
document.getElementById("Search" + i + "_abstract").style.height = abstractSearchState[i]*abstractSearchHeight[i]/abstractSearchTotal + 'px';
//console.log(abstractSearchHeight[i]);
if (abstractSearchState[i]<abstractSearchTotal && abstractSearchMode[i]==1) {
setTimeout (function() {expandAbstractSearch(i)}, 10);
}
}
function contractAbstractSearch(i) {
//console.log(i);
abstractSearchState[i] -= 1;
document.getElementById("Search" + i + "_abstract").style.height = abstractSearchState[i]*abstractSearchHeight[i]/abstractSearchTotal + 'px';
if (abstractSearchState[i]>0 && abstractSearchMode[i]==0) {
setTimeout (function(){contractAbstractSearch(i);}, 10);
}
//else if (abstractRefMode[i]==0)
//document.getElementById("Reference" + i + "_abstract").style.display = 'none';
}
function removecontentSearchResultChild() {
var el = document.getElementById("contentSearch_result");
while (el.firstChild) {
//console.log(el.firstChild.id);
el.removeChild(el.firstChild);
}
//console.log(el.lastChild.id);
}
function mouseDownSearch(e){
divSearch.style['z-index'] = zIndex;
divCountryDistributionSearch.style['z-index'] = zIndex;
zIndex += 1;
if(e.clientY-divSearch.offsetTop<topbarHeight) {
if (e.clientX-divSearch.offsetLeft<=parseInt(divSearch.style.width)-minimizePosWidth) {
isMouseDownSearch=true;
lastMouseX = e.clientX;
lastMouseY = e.clientY;
}
else if (e.clientX-divSearch.offsetLeft>parseInt(divSearch.style.width)-minimizePosWidth) {
searchIncrement *= -1;
changeViewSearch();
}
}
}
function changeViewSearch() {
modeCountryDistributionSearch = 0;
divCountryDistributionSearch.style.display = 'none';
hrefCDS.textContent = "View country distribution";
if (searchStatus <= 0 && searchIncrement==-1) {
searchStatus = 1;
}
else if (searchStatus >= searchTotalSteps && searchIncrement==1) {
searchStatus = searchTotalSteps - 1;
}
searchStatus += searchIncrement;
divSearch.style.width = searchStatus*searchWidth/searchTotalSteps + "px";
divSearch.style.height = searchStatus*searchHeight/searchTotalSteps + "px";
divSearch.style.left = (canvasMenu.offsetLeft+5*frameWidth+9*buttonMenuWidth/2) + searchStatus*(searchPosX+searchWidth/2-canvasMenu.offsetLeft-5*frameWidth-9*buttonMenuWidth/2)/searchTotalSteps - searchStatus*searchWidth/2/searchTotalSteps + "px";
divSearch.style.top = (canvasMenu.offsetTop+canvasMenu.height/2) + searchStatus*(searchPosY-canvasMenu.offsetTop-canvasMenu.height/2)/searchTotalSteps + "px";
if (searchStatus > 0) {
//divSearch.style.display = "block";
searchVisible = 1;
}
else {
//divSearch.style.display = "none";
searchVisible = 0;
}
ctxMenu.drawImage(imgSearch[searchVisible], 5*frameWidth+4*buttonMenuWidth, frameWidth);
if (searchStatus > 0 && searchStatus <searchTotalSteps) setTimeout (changeViewSearch, 10);
}
function searchDisplacement(e){
if(e.clientX>divSearch.offsetLeft && e.clientY>divSearch.offsetTop && e.clientY-divSearch.offsetTop<topbarHeight) {
if (e.clientX-divSearch.offsetLeft<=parseInt(divSearch.style.width)-minimizePosWidth) {
divSearch.style.cursor = "move";
}
else if (e.clientX-divSearch.offsetLeft>parseInt(divSearch.style.width)-minimizePosWidth) {
divSearch.style.cursor = "default";
}
}
else {
divSearch.style.cursor = "default";
}
if(isMouseDownSearch) {
searchPosX += e.clientX - lastMouseX;
searchPosY += e.clientY - lastMouseY;
lastMouseX = e.clientX;
lastMouseY = e.clientY;
divSearch.style.left = searchPosX + "px";
divSearch.style.top = searchPosY + "px";
if(divSearch.offsetTop<0) {
divSearch.style.top="0px";
searchPosY = 0;
}
divCountryDistributionSearch.style.top = searchPosY + 27 + "px";
divCountryDistributionSearch.style.left = searchPosX-parseInt(divCountryDistributionSearch.style.width) + 8 + 'px';
/*
if(divSearch.offsetLeft<0){
divSearch.style.left="0px";
console.log(divSearch.offsetLeft);
console.log(divSearch.style.left);
}
if(divSearch.offsetLeft+parseInt(divSearch.style.width)>window.innerWidth)
divSearch.style.left = (window.innerWidth - parseInt(divSearch.style.width)) +"px";
if(divSearch.offsetTop+parseInt(divSearch.style.height)>window.innerHeight)
divSearch.style.top = (window.innerHeight-parseInt(divSearch.style.height))+"px";
*/
}
}<file_sep>function initializeMenu(){
//document.onselectstart = function(){ return false; }
canvasMenu = document.getElementById('canvasMenu');
ctxMenu=canvasMenu.getContext('2d');
//New Adjustment For ButtonMenuWidth:
buttonMenuWidth = imgObject[4].width;
buttonMenuHeight = imgObject[4].height;
frameWidth = 0;
////////////////////////////////////////
canvasMenu.width = 5*buttonMenuWidth + 6*frameWidth;
canvasMenu.height = buttonMenuHeight + 2*frameWidth;
//canvasAuthor = document.getElementById('canvasAuthor');
//ctxAuthor=canvasAuthor.getContext('2d');
//ctxAuthor.fillStyle = "rgb(220,220,220)";
//canvasAffiliation = document.getElementById('canvasAffiliation');
//ctxAffiliation=canvasAffiliation.getContext('2d');
//ctxAffiliation.fillStyle = "rgb(220,220,220)";
//CHANGED:
/*
ctxMenu.fillStyle = "rgb(220,220,220)";
ctxMenu.fillRect(0 , 0, canvasMenu.width, canvasMenu.height);
imgDataMenu[0] = ctxMenu.createImageData(buttonMenuWidth, buttonMenuHeight);
imgDataMenu[1] = ctxMenu.createImageData(buttonMenuWidth, buttonMenuHeight);
for (var i=0; i<4*buttonMenuWidth*buttonMenuHeight; i+=4)
{
imgDataMenu[0].data[i] = 220;
imgDataMenu[0].data[i+1] = 220;
imgDataMenu[0].data[i+3] = 255;
imgDataMenu[1].data[i] = 220;
imgDataMenu[1].data[i+3] = 255;
}
*/
initializeReference();
initializeCitedBy();
initializeRelevantDocument();
initializeCoAuthor();
initializeSearch();
//initializeAffiliation();
canvasMenu.onmousedown = function(e) {
if (e.clientX-canvasMenu.offsetLeft>frameWidth && e.clientX-canvasMenu.offsetLeft<frameWidth+buttonMenuWidth && e.clientY-canvasMenu.offsetTop>frameWidth && e.clientY-canvasMenu.offsetTop<frameWidth+buttonMenuHeight) {
referenceIncrement *= -1;
divReference.style['z-index'] = zIndex;
zIndex += 1;
changeViewReference();
}
else if (e.clientX-canvasMenu.offsetLeft>2*frameWidth+buttonMenuWidth && e.clientX-canvasMenu.offsetLeft<2*frameWidth+2*buttonMenuWidth && e.clientY-canvasMenu.offsetTop>frameWidth && e.clientY-canvasMenu.offsetTop<frameWidth+buttonMenuHeight) {
divCitedBy.style['z-index'] = zIndex;
zIndex += 1;
citedByIncrement *= -1;
changeViewCitedBy();
}
else if (e.clientX-canvasMenu.offsetLeft>3*frameWidth+2*buttonMenuWidth && e.clientX-canvasMenu.offsetLeft<3*frameWidth+3*buttonMenuWidth && e.clientY-canvasMenu.offsetTop>frameWidth && e.clientY-canvasMenu.offsetTop<frameWidth+buttonMenuHeight) {
divRelevantDocument.style['z-index'] = zIndex;
zIndex += 1;
relevantDocumentIncrement *= -1;
changeViewRelevantDocument();
}
else if (e.clientX-canvasMenu.offsetLeft>4*frameWidth+3*buttonMenuWidth && e.clientX-canvasMenu.offsetLeft<4*frameWidth+4*buttonMenuWidth && e.clientY-canvasMenu.offsetTop>frameWidth && e.clientY-canvasMenu.offsetTop<frameWidth+buttonMenuHeight) {
divCoAuthor.style['z-index'] = zIndex;
zIndex += 1;
coAuthorIncrement *= -1;
changeViewCoAuthor();
}
else if (e.clientX-canvasMenu.offsetLeft>5*frameWidth+4*buttonMenuWidth && e.clientX-canvasMenu.offsetLeft<5*frameWidth+5*buttonMenuWidth && e.clientY-canvasMenu.offsetTop>frameWidth && e.clientY-canvasMenu.offsetTop<frameWidth+buttonMenuHeight) {
divSearch.style['z-index'] = zIndex;
zIndex += 1;
searchIncrement *= -1;
changeViewSearch();
}
/*else if (e.clientX-canvasMenu.offsetLeft>6*frameWidth+5*buttonMenuWidth && e.clientX-canvasMenu.offsetLeft<6*frameWidth+6*buttonMenuWidth && e.clientY-canvasMenu.offsetTop>frameWidth && e.clientY-canvasMenu.offsetTop<frameWidth+buttonMenuHeight) {
divAffiliation.style['z-index'] = zIndex;
zIndex += 1;
affiliationIncrement *= -1;
changeViewAffiliation();
}*/
}
}
//SHOWING MAIN ARTICLE:
function setDivPosition(_divObj, _posX, _posY)
{
//console.log("positionObject: "+ _posX + " " + _posY);
_divObj.style.top = _posX + "px";
_divObj.style.left = _posY + "px";
}
function setDivInnerHTML (_divObj, _sourceObj)
{
_divObj.innerHTML = "";
if (_sourceObj != null)
{
if (!(_sourceObj.title === undefined))_divObj.innerHTML += _sourceObj.title + "<br>";
if (!(_sourceObj.name === undefined))_divObj.innerHTML +="Author: " + _sourceObj.name + "<br>";
if (!(_sourceObj.affiliationName === undefined))_divObj.innerHTML +="Affiliation: " + _sourceObj.affiliationName + "<br>";
if (!(_sourceObj.city === undefined))_divObj.innerHTML +="Affiliation-City: " + _sourceObj.city + "<br>";
if (!(_sourceObj.country === undefined))_divObj.innerHTML +="Affiliation-Country: " + _sourceObj.country + "<br>";
}
console.log("authorObject:")
console.log(_sourceObj.title === undefined);
console.log(_sourceObj["title"]);
console.log(authorObject.title);
console.log(_sourceObj);
console.log("/////////////////")
}
<file_sep>function mapButton(){
canvasButton = document.getElementById('canvasButton');
ctxButton = canvasButton.getContext('2d');
ctxButton.fillStyle = "rgb(220,220,220)";
canvasButtonScroll = document.getElementById('canvasButtonScroll');
ctxButtonScroll = canvasButtonScroll.getContext('2d');
ctxButtonScroll.fillStyle = "rgb(220,220,220)";
canvasButtonScroll.onmousedown = function(e) {
console.log("button scrolll on onmousedown");
if (e.clientX>canvas.offsetLeft+20 && e.clientX<40+canvas.offsetLeft && e.clientY>150+canvas.offsetTop && e.clientY<180+canvas.offsetTop) {
if (readyScroll==1) {
multiplier = 1;
timeBefore = 0;
if(zoom<2) {
readyScroll = -1;
triangleSize = 60;
}
mouseX=canvas.offsetLeft+canvas.width/2;
mouseY=canvas.offsetTop+canvas.height/2;
deltaMouseX = canvas.width/2;
deltaMouseY = canvas.height/2;
}
}
if(e.clientX>canvas.offsetLeft+20 && e.clientX<40+canvas.offsetLeft && e.clientY>320+canvas.offsetTop && e.clientY<350+canvas.offsetTop){
if (readyScroll==1) {
multiplier = 1;
timeBefore = 0;
if (zoom>0) {
readyScroll = -2;
triangleSize = 120;
}
mouseX = canvas.offsetLeft+canvas.width/2;
mouseY = canvas.offsetTop+canvas.height/2;
deltaMouseX = canvas.width/2;
deltaMouseY = canvas.height/2;
}
}
}
canvasButton.onmousedown = function(e){
console.log("button scrolll on onmousedown");
console.log(e.clientX);
console.log(canvasButton.clientLeft);
if (e.clientX<canvasButton.width/3+canvas.offsetLeft && e.clientY>canvasButton.height/3+canvas.offsetTop && e.clientY<2*canvasButton.height/3+canvas.offsetTop) {
imageCoords[0]+=10;
}
if (e.clientX>canvas.offsetLeft+2*canvasButton.width/3 && e.clientY>canvasButton.height/3+canvas.offsetTop && e.clientY<2*canvasButton.height/3+canvas.offsetTop) {
imageCoords[0]-=10;
}
if (e.clientX>canvas.offsetLeft+canvasButton.width/3 && e.clientX<2*canvasButton.width/3+canvas.offsetLeft && e.clientY<canvasButton.height/3+canvas.offsetTop) {
imageCoords[1]+=10;
}
if (e.clientX>canvas.offsetLeft+canvasButton.width/3 && e.clientX<2*canvasButton.width/3+canvas.offsetLeft && e.clientY>2*canvasButton.height/3+canvas.offsetTop) {
imageCoords[1]-=10;
}
}
}
function renderButton(){
//ctxButton.fillRect ( 0 , 0 , canvasButton.width , canvasButton.height );
ctxButton.drawImage (imgLeft, 0, canvasButton.height/3);
ctxButton.drawImage (imgRight, 2*canvasButton.width/3, canvasButton.height/3);
ctxButton.drawImage (imgUp, canvasButton.width/3, 0);
ctxButton.drawImage (imgDown, canvasButton.width/3, 2*canvasButton.height/3);
ctxButtonScroll.fillRect ( 0 , 0 , canvasButtonScroll.width , canvasButtonScroll.height);
ctxButtonScroll.drawImage (imgIn, 0, 0);
ctxButtonScroll.drawImage (imgOut, 0, canvasButtonScroll.height-imgOut.height);
}<file_sep>var isMouseDownRelevantDocument=0;
var deltaRelevantDocumentPositionX;
var deltaRelevantDocumentPositionY;
var relevantDocumentStatus=0;
var relevantDocumentTotalSteps=15;
var relevantDocumentIncrement = -1;
var relevantDocumentVisible=0;
var relevantDocumentPosX;
var relevantDocumentPosY;
var relevantDocumentWidth;
var relevantDocumentHeight;
var abstractRelevantHeight = new Array();
var abstractRelevantState = new Array();
var abstractRelevantMode = new Array();
var abstractRelevantTotal = 20;
var contentRelevantDocument;
var headerRelevantDocument;
//var showRelevantDocumentinMap = 1;
var showRelevantDocumentHref;
var divCountryDistributionRelevantDocument;
var modeCountryDistributionRelevantDocument = 0;
var modeCountryTypeRelevantDocument = 1;
var defaultChangedRelevantDocument = 0;
var hrefCountryTypeRelevantDocument;
var hrefCDRD;
var imgRelevantDocument = new Array();
imgRelevantDocument[0] = imgObject[6];
imgRelevantDocument[1] = imgObject[11];
function initializeRelevantDocument () {
divRelevantDocument = document.getElementById("windowRelevantDocument");
divRelevantDocument.style.overflow = 'hidden';
relevantDocumentPosX = 740;
relevantDocumentPosY = 90;
relevantDocumentWidth = parseInt(divRelevantDocument.style.width);
relevantDocumentHeight = parseInt(divRelevantDocument.style.height);
//divRelevantDocument.style.display = "none";
divRelevantDocument.style.height = '0px';
divRelevantDocument.style.width = '0px';
ctxMenu.drawImage(imgRelevantDocument[relevantDocumentVisible], 3*frameWidth+2*buttonMenuWidth, frameWidth);
headerRelevantDocument = document.createElement('div');
headerRelevantDocument.style.position = 'relative';
headerRelevantDocument.style.top = '0px';
headerRelevantDocument.style.left ='0px';
headerRelevantDocument.style.height = '23px';
headerRelevantDocument.style.width = relevantDocumentWidth + 'px';
headerRelevantDocument.style.paddingLeft = '5px';
headerRelevantDocument.style.color = 'white';
headerRelevantDocument.appendChild(document.createTextNode("Relevant Document"));
divRelevantDocument.appendChild(headerRelevantDocument);
headerRelevantDocument.onselectstart = function() {return false};
contentRelevantDocument = document.createElement('div');
divRelevantDocument.appendChild(contentRelevantDocument);
contentRelevantDocument.setAttribute('id', "contentRelevantDocument");
contentRelevantDocument.style.position = 'relative';
//contentRelevantDocument.style.top = topbarHeight-9 + 'px';
contentRelevantDocument.style.left = 1 + 'px';
contentRelevantDocument.style.width = relevantDocumentWidth-2 + 'px';
contentRelevantDocument.style.height = relevantDocumentHeight-parseInt(headerRelevantDocument.style.height) +'px';
contentRelevantDocument.style['overflow-x'] = 'hidden';
contentRelevantDocument.style['overflow-y'] = 'auto';
showRelevantDocumentHref= document.createElement('a');
showRelevantDocumentHref.href = "#";
showRelevantDocumentHref.style.color = 'blue';
showRelevantDocumentHref.textContent = "Show in map";
/*
showRelevantDocumentHref.onclick = function () {
if (defaultChangedRelevantDocument==0) showResult(0, relevantDocumentObject);
else resetQueryRelevantDocument;
}*/
divCountryDistributionRelevantDocument = document.createElement('div');
divCountryDistributionRelevantDocument.style.background = '#F2F1EF';
divCountryDistributionRelevantDocument.style.position = 'absolute';
divCountryDistributionRelevantDocument.style.width = '130px';
divCountryDistributionRelevantDocument.style.height = '300px';
divCountryDistributionRelevantDocument.style.top = relevantDocumentPosY + 27 + 'px';
divCountryDistributionRelevantDocument.style.left = relevantDocumentPosX-parseInt(divCountryDistributionRelevantDocument.style.width) + 8 + 'px';
divCountryDistributionRelevantDocument.style['z-index'] = 0;
divCountryDistributionRelevantDocument.style.overflow = 'auto';
divCountryDistributionRelevantDocument.style.display = 'none';
document.body.appendChild(divCountryDistributionRelevantDocument);
hrefCountryTypeRelevantDocument = document.createElement('a');
hrefCountryTypeRelevantDocument.href = "#";
hrefCountryTypeRelevantDocument.style.color = 'blue';
hrefCountryTypeRelevantDocument.textContent = "View overall result distribution";
hrefCountryTypeRelevantDocument.onclick = function () {
//console.log("bbbbb");
if (modeCountryTypeRelevantDocument==0) {
modeCountryTypeRelevantDocument = 1;
viewAllModeActive = 0;
modeInMap = relevantDocumentMode;
hrefCountryTypeRelevantDocument.textContent = "View overall result distribution";
changeModeRelevantDocument();
showResult(relevantDocumentMode, relevantDocumentObject);
}
else {
viewAllModeActive = 1;
modeInMap = relevantDocumentMode;
modeCountryTypeRelevantDocument = 0;
hrefCountryTypeRelevantDocument.textContent = "View 25 result distribution";
showOverallCountryRelevantDocument(countryRelevantDocument);
showResult(relevantDocumentMode, countryRelevantDocument);
}
//showOverallCountryRelevantDocument(countryRelevantDocument);
}
//showOverallCountryRelevantDocument(countryRelevantDocument);
hrefCDRD = document.createElement('a');
hrefCDRD.href = "#";
hrefCDRD.style.color = 'blue';
hrefCDRD.textContent = "View country distribution";
hrefCDRD.onclick = function () {
showRelevantDocumentCountryDistribution();
if (modeCountryDistributionRelevantDocument==0)
hrefCDRD.textContent = "View country distribution";
else hrefCDRD.textContent = "Hide country distribution";
};
}
function updateRelevantDocument (rdObject, rdMode) {
removecontentRelevantDocumentChild();
console.log("ini rdObject");
console.log(rdObject);
if (rdObject.length>0) {
if (rdMode==1 || defaultChangedRelevantDocument==1) {
var temp = document.createElement('a');
temp.href = "#";
temp.style.color = 'blue';
temp.onclick = function () {
if (defaultChangedRelevantDocument==1) {
modeInMap = relevantDocumentMode;
viewAllModeActive = 0;
defaultChangedRelevantDocument = 0;
resetQueryRelevantDocument();
}
else {
viewAllModeActive = 0;
modeInMap = relevantDocumentMode;
showResult(relevantDocumentMode, relevantDocumentObject);
updateRelevantDocument(relevantDocumentObject, 0);
showOverallCountryRelevantDocument(countryRelevantDocument);
}
modeCountryTypeRelevantDocument = 1;
hrefCountryTypeRelevantDocument.textContent = "View overall result distribution";
}
temp.textContent = "Show all result";
contentRelevantDocument.appendChild(temp);
contentRelevantDocument.appendChild(document.createElement('br'));
}
showRelevantDocumentHref.onclick = function () {
modeInMap = relevantDocumentMode;
viewAllModeActive = 0;
showResult(relevantDocumentMode, rdObject);
};
contentRelevantDocument.appendChild(showRelevantDocumentHref);
contentRelevantDocument.appendChild(document.createElement('br'));
contentRelevantDocument.appendChild(hrefCDRD);
contentRelevantDocument.appendChild(document.createElement('br'));
for (var i=0; i<rdObject.length; i++) {
if (rdMode==1 && (i==0 || rdObject[i].country!=rdObject[i-1].country)) {
contentRelevantDocument.appendChild(document.createTextNode(rdObject[i].country));
}
var temp = document.createElement('div');
temp.style.fontSize = '13px';
document.getElementById("contentRelevantDocument").appendChild(temp);
temp.setAttribute('id', "RelevantDocument" + i);
temp.style.position = 'relative';
temp.style.left = 3 + 'px';
insertRelevantDocument(rdObject, i);
}
//console.log(currentLevelCitation);
//console.log(totalLevelCitation);
if (rdMode==0) {
if (currentLevelRelevantDocument>1) {
temp = document.createElement('a');
document.getElementById("contentRelevantDocument").appendChild(temp);
temp.href="javascript:downRelevantDocument()";
temp.style.color = 'blue';
temp.textContent = "Previous";
contentRelevantDocument.appendChild(document.createTextNode(" "));
}
if (currentLevelRelevantDocument<totalLevelRelevantDocument) {
temp = document.createElement('a');
document.getElementById("contentRelevantDocument"). appendChild(temp);
temp.href = "javascript:upRelevantDocument()";
temp.style.color ='blue';
temp.style.cssFloat = 'right';
temp.textContent = "Next";
}
}
}
else {
document.getElementById("contentRelevantDocument").innerHTML = "No relevant document is found.";
}
}
function insertRelevantDocument(rdObject, i) {
if (typeof(rdObject[i].title) != 'undefined' || rdObject[i].sourcetitle || rdObject[i].publicationName)
{
var tempTable = document.createElement('table');
//tempTable.align = 'justify';
document.getElementById("RelevantDocument"+i).appendChild(tempTable);
var temp = document.createElement("IMG");
temp.setAttribute('id', "RelevantDocument" + i + "_image");
temp.src = imgExpand.src;
//temp.setAttribute('onclick', "showAbstractRef("+i+")");
temp.onclick = function () {showAbstractRelevant(i);};
var row = tempTable.insertRow(0);
row.insertCell(0).appendChild(temp);
temp = document.createElement("a");
temp.style['font-weight'] = 'bold';
temp.style.textDecoration = 'none';
temp.onclick = function () {
if (abstractRelevantMode[i]==0) {
showAbstractRelevant(i);
}
viewAllModeActive = 0;
modeInMap = relevantDocumentMode;
showResult(relevantDocumentMode, rdObject);
highlight(rdObject[i]);
};
temp.href = "#";
temp.style.color = 'blue';
var st = 0;
if (rdObject[i].title) {
temp.textContent = rdObject[i].title;
}
else if (rdObject[i].sourcetitle){
temp.textContent = rdObject[i].sourcetitle;
st=1;
}
else if (rdObject[i].publicationName) {
temp.textContent = rdObject[i].publicationName;
st=1;
}
row.insertCell(1).appendChild(temp);
temp = document.createElement('div');
temp.style.fontSize = '11px';
temp.style.paddingLeft = '18px';
document.getElementById("RelevantDocument"+i).appendChild(temp);
console.log("aa " + rdObject[i].sourcetitle);
if (st==0) {
if (rdObject[i].sourcetitle) {
temp.appendChild (document.createTextNode(rdObject[i].sourcetitle));
if (rdObject[i].citedby) {
temp.appendChild(document.createTextNode(", cited "+rdObject[i].citedby+" times"));
}
else if (rdObject[i].citedbyCount) {
temp.appendChild(document.createTextNode(", cited "+rdObject[i].citedbyCount+" times"));
}
}
else if (rdObject[i].publicationName) {
temp.appendChild (document.createTextNode(rdObject[i].publicationName));
if (rdObject[i].citedby) {
temp.appendChild(document.createTextNode(", cited "+rdObject[i].citedby+" times"));
}
else if (rdObject[i].citedbyCount) {
temp.appendChild(document.createTextNode(", cited "+rdObject[i].citedbyCount+" times"));
}
}
temp.appendChild(document.createElement('br'));
}
if (rdObject[i].author) {
if ( typeof(rdObject[i].author[0].authname)!= 'undefined' )temp.appendChild(document.createTextNode(rdObject[i].author[0].authname));
else temp.appendChild(document.createTextNode(rdObject[i].author[0]["ce:indexed-name"]));
for (var j=1; j<rdObject[i].author.length; j++) {
if (j==3) {
temp.appendChild(document.createTextNode(", et al."));
break;
}
if ( typeof(rdObject[i].author[j].authname)!= 'undefined' )temp.appendChild(document.createTextNode(", "+rdObject[i].author[j].authname));
else temp.appendChild(document.createTextNode(rdObject[i].author[j]["ce:indexed-name"]));
}
temp.appendChild(document.createElement('br'));
}
if (rdObject[i].affilname) {
temp.appendChild(document.createTextNode(rdObject[i].affilname.split('|')[0]));
temp.appendChild(document.createElement('br'));
}
if (rdObject[i].city) {
temp.appendChild(document.createTextNode(rdObject[i].city));
if (rdObject[i].country) {
temp.appendChild(document.createTextNode(", "+rdObject[i].country));
}
temp.appendChild(document.createElement('br'));
}
else if (rdObject[i].country) {
temp.appendChild(document.createTextNode(rdObject[i].country));
temp.appendChild(document.createElement('br'));
}
temp = document.createElement('div');
temp.style.fontSize = '11px';
document.getElementById("RelevantDocument"+i).appendChild(temp);
temp.align = 'justify';
temp.setAttribute('id', "RelevantDocument" + i + "_abstract");
temp.style.position = 'relative';
temp.style.left = 18 + 'px';
temp.style.width = relevantDocumentWidth - 43 + 'px';
if (rdObject[i].url) {
var temp2 = document.createElement('a');
temp2.style.color = 'blue';
temp2.textContent = "Show in Scopus";
temp2.href = "javascript:window.open('" + rdObject[i].url + "')";
temp2.style.textDecoration = 'none';
temp.appendChild(temp2);
//temp.appendChild(document.createElement('br'));
}
if (rdObject[i].authorId && rdObject[i].scopusId) {
var temp2 = document.createElement('a');
temp2.href = "#";
temp2.style.color = 'blue';
temp2.onclick = function() {newMainArticle(rdObject[i]);};
temp2.style.textDecoration = 'none';
temp2.style.cssFloat = 'right';
temp2.textContent = "Set as main article";
temp.appendChild(temp2);
//temp.appendChild(document.createElement('br'));
}
temp.appendChild(document.createElement('br'));
if (rdObject[i].Abstract) {
temp.appendChild(document.createTextNode("Abstract:"));
temp.appendChild(document.createElement('br'));
temp.appendChild(document.createTextNode(rdObject[i].Abstract));
}
else temp.appendChild(document.createTextNode("Abstract not available"));
abstractRelevantHeight[i] = temp.clientHeight;
temp.style.overflow = 'hidden';
//console.log(abstractRelevantHeight[i]);
temp.style.height = 0 + 'px';
//temp.style.display = 'none';
abstractRelevantState[i] = 0;
abstractRelevantMode[i] = 0;
}
}
function showRelevantDocumentCountryDistribution() {
if (modeCountryDistributionRelevantDocument==0) {
modeCountryDistributionRelevantDocument = 1;
divCountryDistributionRelevantDocument.style.display = 'block';
}
else {
modeCountryDistributionRelevantDocument = 0;
divCountryDistributionRelevantDocument.style.display = 'none';
}
}
function showOverallCountryRelevantDocument(crdObject) {
console.log("show all");
console.log (crdObject);
while (divCountryDistributionRelevantDocument.firstChild) {
divCountryDistributionRelevantDocument.removeChild(divCountryDistributionRelevantDocument.firstChild);
}
divCountryDistributionRelevantDocument.appendChild(hrefCountryTypeRelevantDocument);
divCountryDistributionRelevantDocument.appendChild(document.createElement('br'));
for (var i=0; i<crdObject.length; i++) {
var temp = document.createElement('a');
temp.href = "javascript:focusToCountryRelevantDocument('"+crdObject[i].name+"')";
temp.style.color = 'blue';
temp.textContent = crdObject[i].name;
temp.style.textDecoration = 'none';
divCountryDistributionRelevantDocument.appendChild(temp);
divCountryDistributionRelevantDocument.appendChild(document.createTextNode(" ("+crdObject[i].hitCount+")"));
divCountryDistributionRelevantDocument.appendChild(document.createElement('br'));
}
}
function focusToCountryRelevantDocument(crdObjectName) {
viewAllModeActive = 0;
modeInMap = relevantDocumentMode;
if (modeCountryTypeRelevantDocument==0) {
defaultChangedRelevantDocument = 1;
modeCountryTypeRelevantDocument = 1;
hrefCountryTypeRelevantDocument.textContent = "View overall result distribution";
var temp=new Object;
temp.country=crdObjectName;
getRelevantDocumentFilter1(new Array(temp));
}
else {
getRelevantDocumentFilter2(new Array(crdObjectName));
}
highlight(getObject(crdObjectName));
}
function showAbstractRelevant(i) {
//console.log("show");
if (abstractRelevantMode[i]==0) {
document.getElementById("RelevantDocument" + i + "_image").src = imgContract.src;
//document.getElementById("Reference" + i + "_abstract").style.display = 'block';
abstractRelevantMode[i] = 1;
//console.log("before");
expandAbstractRelevant(i);
//console.log("after");
}
else {
document.getElementById("RelevantDocument" + i + "_image").src = imgExpand.src;
abstractRelevantMode[i] = 0;
contractAbstractRelevant(i);
}
}
function expandAbstractRelevant(i) {
//console.log("expand");
abstractRelevantState[i] += 1;
document.getElementById("RelevantDocument" + i + "_abstract").style.height = abstractRelevantState[i]*abstractRelevantHeight[i]/abstractRelevantTotal + 'px';
//console.log(abstractRelevantHeight[i]);
if (abstractRelevantState[i]<abstractRelevantTotal && abstractRelevantMode[i]==1) {
setTimeout (function() {expandAbstractRelevant(i)}, 10);
}
}
function contractAbstractRelevant(i) {
//console.log(i);
abstractRelevantState[i] -= 1;
document.getElementById("RelevantDocument" + i + "_abstract").style.height = abstractRelevantState[i]*abstractRelevantHeight[i]/abstractRelevantTotal + 'px';
if (abstractRelevantState[i]>0 && abstractRelevantMode[i]==0) {
setTimeout (function(){contractAbstractRelevant(i);}, 10);
}
//else if (abstractRefMode[i]==0)
//document.getElementById("Reference" + i + "_abstract").style.display = 'none';
}
function removecontentRelevantDocumentChild() {
var el = document.getElementById("contentRelevantDocument");
while (el.firstChild) {
//console.log(el.firstChild.id);
el.removeChild(el.firstChild);
}
//console.log(el.lastChild.id);
}
function mouseDownRelevantDocument(e){
divRelevantDocument.style['z-index'] = zIndex;
divCountryDistributionRelevantDocument.style['z-index'] = zIndex;
zIndex += 1;
if(e.clientY-divRelevantDocument.offsetTop<topbarHeight) {
if (e.clientX-divRelevantDocument.offsetLeft<=parseInt(divRelevantDocument.style.width)-minimizePosWidth) {
isMouseDownRelevantDocument=true;
lastMouseX = e.clientX;
lastMouseY = e.clientY;
}
else if (e.clientX-divRelevantDocument.offsetLeft>parseInt(divRelevantDocument.style.width)-minimizePosWidth) {
relevantDocumentIncrement *= -1;
changeViewRelevantDocument();
}
}
}
function changeViewRelevantDocument() {
modeCountryDistributionRelevantDocument = 0;
divCountryDistributionRelevantDocument.style.display = 'none';
hrefCDRD.textContent = "View country distribution";
if (relevantDocumentStatus <= 0 && relevantDocumentIncrement==-1) {
relevantDocumentStatus = 1;
}
else if (relevantDocumentStatus >= relevantDocumentTotalSteps && relevantDocumentIncrement==1) {
relevantDocumentStatus = relevantDocumentTotalSteps - 1;
}
relevantDocumentStatus += relevantDocumentIncrement;
divRelevantDocument.style.width = relevantDocumentStatus*relevantDocumentWidth/relevantDocumentTotalSteps + "px";
divRelevantDocument.style.height = relevantDocumentStatus*relevantDocumentHeight/relevantDocumentTotalSteps + "px";
divRelevantDocument.style.left = (canvasMenu.offsetLeft+3*frameWidth+5*buttonMenuWidth/2) + relevantDocumentStatus*(relevantDocumentPosX+relevantDocumentWidth/2-canvasMenu.offsetLeft-3*frameWidth-5*buttonMenuWidth/2)/relevantDocumentTotalSteps - relevantDocumentStatus*relevantDocumentWidth/2/relevantDocumentTotalSteps + "px";
divRelevantDocument.style.top = (canvasMenu.offsetTop+canvasMenu.height/2) + relevantDocumentStatus*(relevantDocumentPosY-canvasMenu.offsetTop-canvasMenu.height/2)/relevantDocumentTotalSteps + "px";
if (relevantDocumentStatus > 0) {
divRelevantDocument.style.display = "block";
relevantDocumentVisible = 1;
}
else {
divRelevantDocument.style.display = "none";
relevantDocumentVisible = 0;
}
ctxMenu.drawImage(imgRelevantDocument[relevantDocumentVisible], 3*frameWidth+2*buttonMenuWidth, frameWidth);
if (relevantDocumentStatus > 0 && relevantDocumentStatus <relevantDocumentTotalSteps) setTimeout (changeViewRelevantDocument, 10);
}
function relevantDocumentDisplacement(e){
if(e.clientX>divRelevantDocument.offsetLeft && e.clientY>divRelevantDocument.offsetTop && e.clientY-divRelevantDocument.offsetTop<topbarHeight) {
if (e.clientX-divRelevantDocument.offsetLeft<=parseInt(divRelevantDocument.style.width)-minimizePosWidth) {
divRelevantDocument.style.cursor = "move";
}
else if (e.clientX-divRelevantDocument.offsetLeft>parseInt(divRelevantDocument.style.width)-minimizePosWidth) {
divRelevantDocument.style.cursor = "default";
}
}
else {
divRelevantDocument.style.cursor = "default";
}
if(isMouseDownRelevantDocument) {
relevantDocumentPosX += e.clientX - lastMouseX;
relevantDocumentPosY += e.clientY - lastMouseY;
lastMouseX = e.clientX;
lastMouseY = e.clientY;
divRelevantDocument.style.left = relevantDocumentPosX + "px";
divRelevantDocument.style.top = relevantDocumentPosY + "px";
if(divRelevantDocument.offsetTop<0) {
divRelevantDocument.style.top="0px";
relevantDocumentPosY = 0;
}
divCountryDistributionRelevantDocument.style.top = relevantDocumentPosY + 27 + "px";
divCountryDistributionRelevantDocument.style.left = relevantDocumentPosX-parseInt(divCountryDistributionRelevantDocument.style.width) + 8 + 'px';
/*
if(divRelevantDocument.offsetLeft<0){
divRelevantDocument.style.left="0px";
console.log(divRelevantDocument.offsetLeft);
console.log(divRelevantDocument.style.left);
}
if(divRelevantDocument.offsetLeft+parseInt(divRelevantDocument.style.width)>window.innerWidth)
divRelevantDocument.style.left = (window.innerWidth - parseInt(divRelevantDocument.style.width)) +"px";
if(divRelevantDocument.offsetTop+parseInt(divRelevantDocument.style.height)>window.innerHeight)
divRelevantDocument.style.top = (window.innerHeight-parseInt(divRelevantDocument.style.height))+"px";
*/
}
}<file_sep>function moveCursor(){
ctx.save();
ctx.lineWidth = 2;
ctx.translate(deltaMouseX, deltaMouseY);
ctx.rotate(rotateAngle*Math.PI/180);
//console.log(rotateAngle);
ctx.beginPath();
ctx.moveTo(-triangleSize/2.0+stripLength, triangleSize*Math.sqrt(3.0)/6.0);
ctx.lineTo(-triangleSize/2.0, triangleSize*Math.sqrt(3.0)/6.0);
ctx.lineTo(-triangleSize/2.0+stripLength/2.0, triangleSize*Math.sqrt(3.0)/6.0-stripLength*Math.sqrt(3.0)/2.0);
ctx.strokeStyle = 'blue';
ctx.stroke();
ctx.beginPath();
ctx.moveTo(-stripLength/2.0, -triangleSize*Math.sqrt(3.0)/3.0+stripLength*Math.sqrt(3.0)/2.0);
ctx.lineTo(0, -triangleSize*Math.sqrt(3.0)/3.0);
ctx.lineTo(stripLength/2.0, -triangleSize*Math.sqrt(3.0)/3.0+stripLength*Math.sqrt(3.0)/2.0);
ctx.strokeStyle = 'red';
ctx.stroke();
ctx.beginPath();
ctx.moveTo(triangleSize/2.0-stripLength, triangleSize*Math.sqrt(3.0)/6.0);
ctx.lineTo(triangleSize/2.0, triangleSize*Math.sqrt(3.0)/6.0);
ctx.lineTo(triangleSize/2.0-stripLength/2.0, triangleSize*Math.sqrt(3.0)/6.0-stripLength*Math.sqrt(3.0)/2.0);
ctx.strokeStyle = 'green';
ctx.stroke();
ctx.restore();
if (timeBefore<21) {
if (readyScroll==-1) {
triangleSize += 3;
rotateAngle += 6;
}
else {
triangleSize -= 3;
rotateAngle -= 6;
}
}
}<file_sep>var isMouseDownCoAuthor=0;
var deltaCoAuthorPositionX;
var deltaCoAuthorPositionY;
var coAuthorStatus=0;
var coAuthorTotalSteps=15;
var coAuthorIncrement = -1;
var coAuthorVisible=0;
var coAuthorPosX;
var coAuthorPosY;
var coAuthorWidth;
var coAuthorHeight;
var listCoAuthorHeight = new Array();
var listCoAuthorState = new Array();
var listCoAuthorMode = new Array();
var listCoAuthorTotal = 20;
var contentCoAuthor;
var headerCoAuthor;
var showCoAuthorinMap = 1;
var showCoAuthorHref;
var divCountryDistributionCoAuthor;
var modeCountryDistributionCoAuthor = 0;
var modeCountryTypeCoAuthor = 0;
//var hrefCountryTypeCoAuthor;
var hrefCDCA;
var imgCoAuthor = new Array();
imgCoAuthor[0] = imgObject[7];
imgCoAuthor[1] = imgObject[12];
function initializeCoAuthor() {
divCoAuthor = document.getElementById("windowCoAuthor");
coAuthorPosX = 760;
coAuthorPosY = 110;
coAuthorWidth = parseInt(divCoAuthor.style.width);
coAuthorHeight = parseInt(divCoAuthor.style.height);
//divCoAuthor.style.display = "none";
divCoAuthor.style.width = '0px';
divCoAuthor.style.height = '0px';
divCoAuthor.style.overflow = 'hidden';
ctxMenu.drawImage(imgCoAuthor[coAuthorVisible], 4*frameWidth+3*buttonMenuWidth, frameWidth);
headerCoAuthor = document.createElement('div');
headerCoAuthor.style.position = 'relative';
headerCoAuthor.style.top = '0px';
headerCoAuthor.style.left ='0px';
headerCoAuthor.style.height = '23px';
headerCoAuthor.style.width = referenceWidth + 'px';
headerCoAuthor.style.paddingLeft = '5px';
headerCoAuthor.style.color = 'white';
headerCoAuthor.appendChild(document.createTextNode("Co-Authors"));
divCoAuthor.appendChild(headerCoAuthor);
headerCoAuthor.onselectstart = function() {return false};
contentCoAuthor = document.createElement('div');
divCoAuthor.appendChild(contentCoAuthor);
contentCoAuthor.setAttribute('id', "contentCoAuthor");
contentCoAuthor.style.position = 'relative';
//contentCoAuthor.style.top = topbarHeight-9 + 'px';
contentCoAuthor.style.left = 1 + 'px';
contentCoAuthor.style.width = coAuthorWidth-2 + 'px';
contentCoAuthor.style.height = coAuthorHeight-parseInt(headerCoAuthor.style.height) +'px';
contentCoAuthor.style['overflow-x'] = 'hidden';
contentCoAuthor.style['overflow-y'] = 'auto';
showCoAuthorHref = document.createElement('a');
showCoAuthorHref.href = "#";
showCoAuthorHref.style.color = 'blue';
showCoAuthorHref.textContent = "Show in map";
divCountryDistributionCoAuthor = document.createElement('div');
divCountryDistributionCoAuthor.style.background = '#F2F1EF';
divCountryDistributionCoAuthor.style.position = 'absolute';
divCountryDistributionCoAuthor.style.width = '130px';
divCountryDistributionCoAuthor.style.height = '300px';
divCountryDistributionCoAuthor.style.top = coAuthorPosY + 27 + 'px';
divCountryDistributionCoAuthor.style.left = coAuthorPosX-parseInt(divCountryDistributionCoAuthor.style.width) + 8 + 'px';
divCountryDistributionCoAuthor.style['z-index'] = 0;
divCountryDistributionCoAuthor.style.overflow = 'hidden';
divCountryDistributionCoAuthor.style.display = 'none';
document.body.appendChild(divCountryDistributionCoAuthor);
hrefCDCA = document.createElement('a');
hrefCDCA.textContent = "View country distribution";
hrefCDCA.href = "#";
hrefCDCA.style.color = 'blue';
hrefCDCA.onclick = function () {
showCoAuthorCountryDistribution();
if (modeCountryDistributionCoAuthor==0)
hrefCDCA.textContent = "View country distribution";
else hrefCDCA.textContent = "Hide country distribution";
};
}
function updateCoauthors (caObject, caMode) {
removecontentCoAuthorChild();
console.log ("updatecoauthors");
console.log(caObject);
//CHANGED
while (headerCoAuthor.firstChild)
{
headerCoAuthor.removeChild(headerCoAuthor.firstChild);
}
headerCoAuthor.appendChild(document.createTextNode("Co-Authors(" +total_Coauthors +")"));
////////////////////////////////////////
console.log("update");
//console.log(caObject);
if (caObject.length>0) {
if (caMode==1) {
var temp = document.createElement('a');
temp.href = "#";
temp.style.color ='blue';
temp.onclick = function () {
modeInMap = coAuthorsMode;
viewAllModeACtive = 0;
showResult(coAuthorsMode, coauthorsObject);
updateCoauthors (caObject, 0);
}
temp.textContent = "Show all result";
contentCoAuthor.appendChild(temp);
contentCoAuthor.appendChild(document.createElement('br'));
}
showCoAuthorHref.onclick = function () {
modeInMap = coAuthorsMode;
viewAllModeActive = 0;
showResult(coAuthorsMode, caObject);
}
contentCoAuthor.appendChild(showCoAuthorHref);
contentCoAuthor.appendChild(document.createElement('br'));
//console.log("coauthors number = " + caObject.length);
contentCoAuthor.appendChild(hrefCDCA);
contentCoAuthor.appendChild(document.createElement('br'));
for (var i=0; i<caObject.length; i++) {
var temp = document.createElement('div');
temp.style.fontSize = '13px';
document.getElementById("contentCoAuthor").appendChild(temp);
temp.setAttribute('id', "CoAuthor" + i);
temp.style.position = 'relative';
temp.style.left = 3 + 'px';
insertCoauthors(caObject, i);
}
if (currentLevelCoauthors>1) {
temp = document.createElement('a');
contentCoAuthor.appendChild(temp);
temp.href="javascript:downCoauthors()";
temp.textContent = "Previous";
}
if (currentLevelCoauthors<totalLevelCoauthors) {
temp = document.createElement('a');
contentCoAuthor. appendChild(temp);
temp.href = "javascript:upCoauthors()";
temp.textContent = "Next";
}
}
else {
document.getElementById("contentCoAuthor").innerHTML = "This author has no co-Authors.";
}
}
function insertCoauthors(caObject, i) {
var tempTable = document.createElement('table');
//tempTable.align = 'justify';
document.getElementById("CoAuthor"+i).appendChild(tempTable);
var temp = document.createElement("IMG");
temp.setAttribute('id', "CoAuthor" + i + "_image");
temp.src = imgExpand.src;
temp.onclick = function () {showListCoAuthor(i);};
var row = tempTable.insertRow(0);
row.insertCell(0).appendChild(temp);
temp = document.createElement("a");
temp.style['font-weight'] = 'bold';
temp.style.textDecoration = 'none';
temp.onclick = function () {
if (listCoAuthorMode[i]==0) {
showListCoAuthor(i);
}
viewAllModeActive = 0;
modeInMap = coAuthorsMode;
showResult(coAuthorsMode, caObject);
highlight(caObject[i]);
};
temp.href = "#";
temp.style.color = 'blue';
//temp.textContent = (currentLevelCoauthors-1)*200+i+1 + " " + caObject[i].name['given-name'] + ", " + caObject[i].name.surname;
temp.textContent = caObject[i].name['given-name'] + ", " + caObject[i].name.surname;
row.insertCell(1).appendChild(temp);
temp = document.createElement('div');
temp.style.fontSize = '12px';
temp.align = 'justify';
document.getElementById("CoAuthor"+i).appendChild(temp);
temp.setAttribute('id', "CoAuthor" + i + "_affiliation");
temp.style.position = 'relative';
temp.style.left = 18 + 'px';
temp.style.width = citedByWidth - 43 + 'px';
if (caObject[i].url) {
var temp2 = document.createElement('a');
temp2.textContent = "Show in Scopus";
temp2.href = "javascript:window.open('" + caObject[i].url + "')";
temp2.style.textDecoration = 'none';
temp.appendChild(temp2);
temp.appendChild(document.createElement('br'));
}
if (caObject[i].affiliationName) {
temp.appendChild(document.createTextNode("Affiliation : " + caObject[i].affiliationName));;
temp.appendChild(document.createElement('br'));
}
if (caObject[i].city) {
temp.appendChild(document.createTextNode("City : " + caObject[i].city));
temp.appendChild(document.createElement('br'));
}
if (caObject[i].country) {
temp.appendChild(document.createTextNode("Country : " + caObject[i].country));
temp.appendChild(document.createElement('br'));
}
if (caObject[i].documentCount) {
temp.appendChild(document.createTextNode("Document count : " + caObject[i].documentCount));
temp.appendChild(document.createElement('br'));
}
temp.style.overflow = 'hidden';
listCoAuthorHeight[i] = temp.clientHeight;
temp.style.height = 0 + 'px';
//temp.style.display = 'none';
listCoAuthorState[i] = 0;
listCoAuthorMode[i] = 0;
//console.log(i+1);
}
function showListCoAuthor(i) {
//console.log("show");
if (listCoAuthorMode[i]==0) {
document.getElementById("CoAuthor" + i + "_image").src = imgContract.src;
//document.getElementById("Reference" + i + "_abstract").style.display = 'block';
listCoAuthorMode[i] = 1;
expandAffiliationCoAuthor(i);
}
else {
document.getElementById("CoAuthor" + i + "_image").src = imgExpand.src;
listCoAuthorMode[i] = 0;
contractAffiliationCoAuthor(i);
}
}
function showCoAuthorCountryDistribution() {
if (modeCountryDistributionCoAuthor==0) {
modeCountryDistributionCoAuthor = 1;
divCountryDistributionCoAuthor.style.display = 'block';
}
else {
modeCountryDistributionCoAuthor = 0;
divCountryDistributionCoAuthor.style.display = 'none';
}
}
function showOverallCountryCoAuthor(ccaObject) {
//console.log("counrycoauthor");
//console.log(caObject);
while (divCountryDistributionCoAuthor.firstChild) {
divCountryDistributionCoAuthor.removeChild(divCountryDistributionCoAuthor.firstChild);
}
//divCountryDistributionCoAuthor.appendChild(hrefCountryTypeCoAuthor);
//divCountryDistributionCoAuthor.appendChild(document.createElement('br'));
for (var i=0; i<ccaObject.length; i++) {
var temp = document.createElement('a');
temp.href = "javascript:focusToCountryCoAuthor('"+ccaObject[i].name+"')";
temp.textContent = ccaObject[i].name;
divCountryDistributionCoAuthor.appendChild(temp);
divCountryDistributionCoAuthor.appendChild(document.createTextNode(" ("+ccaObject[i].hitCount+")"));
divCountryDistributionCoAuthor.appendChild(document.createElement('br'));
}
}
function focusToCountryCoAuthor(ccaObjectName) {
// console.log("focus country");
viewAllModeActive = 0;
modeInMap = coAuthorsMode;
getCoauthorsFilter(new Array(ccaObjectName));
//console.log(ccaObjectName)
highlight(getObject(ccaObjectName));
}
function expandAffiliationCoAuthor(i) {
//console.log(i);
listCoAuthorState[i] += 1;
document.getElementById("CoAuthor" + i + "_affiliation").style.height = listCoAuthorState[i]*listCoAuthorHeight[i]/listCoAuthorTotal + 'px';
//console.log(abstractCitedHeight[i]);
if (listCoAuthorState[i]<listCoAuthorTotal && listCoAuthorMode[i]==1) {
setTimeout (function() {expandAffiliationCoAuthor(i);}, 10);
}
}
function contractAffiliationCoAuthor(i) {
//console.log(i);
listCoAuthorState[i] -= 1;
document.getElementById("CoAuthor" + i + "_affiliation").style.height = listCoAuthorState[i]*listCoAuthorHeight[i]/listCoAuthorTotal + 'px';
if (listCoAuthorState[i]>0 && listCoAuthorMode[i]==0) {
setTimeout (function(){contractAffiliationCoAuthor(i);}, 10);
}
//else if (abstractRefMode[i]==0)
//document.getElementById("Reference" + i + "_abstract").style.display = 'none';
}
function removecontentCoAuthorChild() {
var el = document.getElementById("contentCoAuthor");
while (el.firstChild) {
//console.log(el.firstChild.id);
el.removeChild(el.firstChild);
}
//console.log(el.lastChild.id);
}
function mouseDownCoAuthor(e){
divCoAuthor.style['z-index'] = zIndex;
divCountryDistributionCoAuthor.style['z-index'] = zIndex;
zIndex += 1;
if(e.clientY-divCoAuthor.offsetTop<topbarHeight) {
if (e.clientX-divCoAuthor.offsetLeft<=parseInt(divCoAuthor.style.width)-minimizePosWidth) {
isMouseDownCoAuthor=true;
lastMouseX = e.clientX;
lastMouseY = e.clientY;
}
else if (e.clientX-divCoAuthor.offsetLeft>parseInt(divCoAuthor.style.width)-minimizePosWidth) {
coAuthorIncrement *= -1;
changeViewCoAuthor();
}
}
}
function changeViewCoAuthor() {
modeCountryDistributionCoAuthor = 0;
divCountryDistributionCoAuthor.style.display = 'none';
hrefCDCA.textContent = "View country distribution";
if (coAuthorStatus <= 0 && coAuthorIncrement==-1) {
coAuthorStatus = 1;
}
else if (coAuthorStatus >= coAuthorTotalSteps && coAuthorIncrement==1) {
coAuthorStatus = coAuthorTotalSteps - 1;
}
coAuthorStatus += coAuthorIncrement;
divCoAuthor.style.width = coAuthorStatus*coAuthorWidth/coAuthorTotalSteps + "px";
divCoAuthor.style.height = coAuthorStatus*coAuthorHeight/coAuthorTotalSteps + "px";
divCoAuthor.style.left = (canvasMenu.offsetLeft+4*frameWidth+7*buttonMenuWidth/2) + coAuthorStatus*(coAuthorPosX+coAuthorWidth/2-canvasMenu.offsetLeft-4*frameWidth-7*buttonMenuWidth/2)/coAuthorTotalSteps - coAuthorStatus*coAuthorWidth/2/coAuthorTotalSteps + "px";
divCoAuthor.style.top = (canvasMenu.offsetTop+canvasMenu.height/2) + coAuthorStatus*(coAuthorPosY-canvasMenu.offsetTop-canvasMenu.height/2)/coAuthorTotalSteps + "px";
if (coAuthorStatus > 0) {
//divCoAuthor.style.display = "block";
coAuthorVisible = 1;
}
else {
//divCoAuthor.style.display = "none";
coAuthorVisible = 0;
}
ctxMenu.drawImage(imgCoAuthor[coAuthorVisible], 4*frameWidth+3*buttonMenuWidth, frameWidth);
if (coAuthorStatus > 0 && coAuthorStatus <coAuthorTotalSteps) setTimeout (changeViewCoAuthor, 10);
}
function coAuthorDisplacement(e){
if(e.clientX>divCoAuthor.offsetLeft && e.clientY>divCoAuthor.offsetTop && e.clientY-divCoAuthor.offsetTop<topbarHeight) {
if (e.clientX-divCoAuthor.offsetLeft<=parseInt(divCoAuthor.style.width)-minimizePosWidth) {
divCoAuthor.style.cursor = "move";
}
else if (e.clientX-divCoAuthor.offsetLeft>parseInt(divCoAuthor.style.width)-minimizePosWidth) {
divCoAuthor.style.cursor = "default";
}
}
else {
divCoAuthor.style.cursor = "default";
}
if(isMouseDownCoAuthor) {
coAuthorPosX += e.clientX - lastMouseX;
coAuthorPosY += e.clientY - lastMouseY;
lastMouseX = e.clientX;
lastMouseY = e.clientY;
divCoAuthor.style.left = coAuthorPosX + "px";
divCoAuthor.style.top = coAuthorPosY + "px";
if(divCoAuthor.offsetTop<0) {
divCoAuthor.style.top="0px";
coAuthorPosY = 0;
}
divCountryDistributionCoAuthor.style.top = coAuthorPosY + 27 + "px";
divCountryDistributionCoAuthor.style.left = coAuthorPosX-parseInt(divCountryDistributionCoAuthor.style.width) + 8 + 'px';
/*
if(divCoAuthor.offsetLeft<0){
divCoAuthor.style.left="0px";
console.log(divCoAuthor.offsetLeft);
console.log(divCoAuthor.style.left);
}
if(divCoAuthor.offsetLeft+parseInt(divCoAuthor.style.width)>window.innerWidth)
divCoAuthor.style.left = (window.innerWidth - parseInt(divCoAuthor.style.width)) +"px";
if(divCoAuthor.offsetTop+parseInt(divCoAuthor.style.height)>window.innerHeight)
divCoAuthor.style.top = (window.innerHeight-parseInt(divCoAuthor.style.height))+"px";
*/
}
}<file_sep>function initializeMenu(){
document.onselectstart = function(){ return false; }
canvasMenu = document.getElementById('canvasMenu');
ctxMenu=canvasMenu.getContext('2d');
//canvasAuthor = document.getElementById('canvasAuthor');
//ctxAuthor=canvasAuthor.getContext('2d');
//ctxAuthor.fillStyle = "rgb(220,220,220)";
//canvasAffiliation = document.getElementById('canvasAffiliation');
//ctxAffiliation=canvasAffiliation.getContext('2d');
//ctxAffiliation.fillStyle = "rgb(220,220,220)";
ctxMenu.fillStyle = "rgb(220,220,220)";
ctxMenu.fillRect(0 , 0, canvasMenu.width, canvasMenu.height);
imgDataMenu[0] = ctxMenu.createImageData(canvasMenu.width/3-2*frameWidth, canvasMenu.height-2*frameWidth);
imgDataMenu[1] = ctxMenu.createImageData(canvasMenu.width/3-2*frameWidth, canvasMenu.height-2*frameWidth);
for (var i=0; i<4*(canvasMenu.width/3)*canvasMenu.height; i+=4)
{
imgDataMenu[0].data[i] = 220;
imgDataMenu[0].data[i+1] = 220;
imgDataMenu[0].data[i+3] = 255;
imgDataMenu[1].data[i] = 220;
imgDataMenu[1].data[i+3] = 255;
}
divSearch = document.getElementById("windowSearch");
searchPosX = divSearch.offsetLeft;
searchPosY = divSearch.offsetTop;
searchWidth = parseInt(divSearch.style.width);
searchHeight = parseInt(divSearch.style.height);
divSearch.style.display = "none";
ctxMenu.putImageData(imgDataMenu[searchVisible], frameWidth+canvasMenu.width/3, frameWidth);
divAffiliation = document.getElementById("windowAffiliation");
affiliationPosX = divAffiliation.offsetLeft;
affiliationPosY = divAffiliation.offsetTop;
affiliationWidth = parseInt(divAffiliation.style.width);
affiliationHeight = parseInt(divAffiliation.style.height);
divAffiliation.style.display = "none";
ctxMenu.putImageData(imgDataMenu[affiliationVisible], frameWidth+2*canvasMenu.width/3, frameWidth);
canvasMenu.onmousedown = function(e) {
if (e.clientX-canvasMenu.offsetLeft>canvasMenu.width/3+frameWidth && e.clientX-canvasMenu.offsetLeft<2*canvasMenu.width/3-frameWidth && e.clientY-canvasMenu.offsetTop>frameWidth && e.clientY-canvasMenu.offsetTop<canvasMenu.height-frameWidth) {
searchIncrement *= -1;
changeViewSearch();
}
if (e.clientX-canvasMenu.offsetLeft>2*canvasMenu.width/3+frameWidth && e.clientX-canvasMenu.offsetLeft<canvasMenu.width-frameWidth && e.clientY-canvasMenu.offsetTop>frameWidth && e.clientY-canvasMenu.offsetTop<canvasMenu.height-frameWidth) {
affiliationIncrement *= -1;
changeViewAffiliation();
}
}
}
<file_sep>var authorMarginX=5;
var authorMarginY=10;
var canvasObjectText = new Array();
var canvasObjectAuthorText;
var canvasObjectHighlight= new Array();
var canvasObjectChange = new Object();
canvasObjectChange.status=0;
function renderCanvasObject(){
authorMarginX=imgObject[0].width/2/multiplierObjectX[zoom];
authorMarginY=imgObject[0].height/2/multiplierObjectY[zoom];
obj_dis_x=multiplier*multiplierObjectX[zoom];
obj_dis_y=multiplier*multiplierObjectY[zoom];
obj_m_x=multiplier*multiplierObjectX[zoom];
obj_m_y=multiplier*multiplierObjectY[zoom];
// console.log(" render canvas object console "+canvasObjectAuthor.status);
if(canvasObjectAuthor.status!=-1){
for(var i=0;i<canvasObject.length;i++){
drawObjectLine(canvasObjectAuthor,canvasObject[i])};}
for(var i=0;i<canvasObject.length;i++)
{ drawObject(canvasObject[i].img,canvasObject[i].x,canvasObject[i].y); }
drawObject(canvasObjectAuthor.img,canvasObjectAuthor.x,canvasObjectAuthor.y);
for(var i=0;i<canvasObject.length;i++)
{ drawText(canvasObjectText[i],canvasObject[i].x,canvasObject[i].y); }
drawText(canvasObjectAuthorText,canvasObjectAuthor.x,canvasObjectAuthor.y);
for(var i=0;i<canvasObjectHighlight.length;i++){
drawObject(canvasObjectHighlight[i].img,canvasObjectHighlight[i].x,canvasObjectHighlight[i].y);
}
if(canvasObjectChange.status!=-1)
drawObject(canvasObjectChange.img,canvasObjectChange.x,canvasObjectChange.y);
}
function drawObject(im,x,y){
if(readyScroll<0) {
ctx.drawImage(imgObject[im],nready_x+x*obj_dis_x,y*obj_dis_y+imageTempCoords[1]);
if(imageCoords[0]<0)
ctx.drawImage(imgObject[im],nready_x_p+x*obj_dis_x,y*obj_dis_y+imageTempCoords[1]);
else
ctx.drawImage(imgObject[im],nready_x_n+x*obj_dis_x,y*obj_dis_y+imageTempCoords[1]);
}
if(readyScroll>0 &!(x===undefined)) {
ctx.drawImage(imgObject[im],ready_x+x*multiplierObjectX[zoom],imageCoords[1]+y*multiplierObjectY[zoom]);
if(imageCoords[0]<0)
ctx.drawImage(imgObject[im],ready_x_p+x*multiplierObjectX[zoom],imageCoords[1]+y*multiplierObjectY[zoom]);
else
ctx.drawImage(imgObject[im],ready_x_n+x*multiplierObjectX[zoom],imageCoords[1]+y*multiplierObjectY[zoom]);
}
}
function drawText(text,x,y){
ctx.font = "13px Helvetica";
ctx.strokeWidth = "0px";
//ctx.lineWidth = 1;
ctx.fillStyle = "#000000";
ctx.strokeStyle = "#E1DFCF";
if(readyScroll<0) {
// ctx.fillText(text,nready_x+x*obj_dis_x,y*obj_dis_y+imageTempCoords[1]);
ctx.strokeText(text,nready_x+x*obj_dis_x,y*obj_dis_y+imageTempCoords[1]);
ctx.fillText(text,nready_x+x*obj_dis_x,y*obj_dis_y+imageTempCoords[1]);
if(imageCoords[0]<0)
{
// ctx.fillText(text,nready_x_p+x*obj_dis_x,y*obj_dis_y+imageTempCoords[1]);
ctx.strokeText(text,nready_x_p+x*obj_dis_x,y*obj_dis_y+imageTempCoords[1]);
ctx.fillText(text,nready_x_p+x*obj_dis_x,y*obj_dis_y+imageTempCoords[1]);
}
else
{
// ctx.fillText(text,nready_x_n+x*obj_dis_x,y*obj_dis_y+imageTempCoords[1]);
ctx.strokeText(text,nready_x_n+x*obj_dis_x,y*obj_dis_y+imageTempCoords[1]);
ctx.fillText(text,nready_x_n+x*obj_dis_x,y*obj_dis_y+imageTempCoords[1]);
}
}
if(readyScroll>0) {
// ctx.fillText(text,ready_x+x*multiplierObjectX[zoom],imageCoords[1]+y*multiplierObjectY[zoom]);
ctx.strokeText(text,ready_x+x*multiplierObjectX[zoom],imageCoords[1]+y*multiplierObjectY[zoom]);
ctx.fillText(text,ready_x+x*multiplierObjectX[zoom],imageCoords[1]+y*multiplierObjectY[zoom]);
if(imageCoords[0]<0)
{
// ctx.fillText(text,ready_x_p+x*multiplierObjectX[zoom],imageCoords[1]+y*multiplierObjectY[zoom]);
ctx.strokeText(text,ready_x_p+x*multiplierObjectX[zoom],imageCoords[1]+y*multiplierObjectY[zoom]);
ctx.fillText(text,ready_x_p+x*multiplierObjectX[zoom],imageCoords[1]+y*multiplierObjectY[zoom]);
}
else
{
// ctx.fillText(text,ready_x_n+x*multiplierObjectX[zoom],imageCoords[1]+y*multiplierObjectY[zoom]);
ctx.strokeText(text,ready_x_n+x*multiplierObjectX[zoom],imageCoords[1]+y*multiplierObjectY[zoom]);
ctx.fillText(text,ready_x_n+x*multiplierObjectX[zoom],imageCoords[1]+y*multiplierObjectY[zoom]);
}
}
ctx.fillStyle = "rgb(120,120,120)";
}
function drawObjectLine(obj1,obj2){
ctx.save();
ctx.strokeStyle = 'yellow';
ctx.lineWidth = 2;
ctx.beginPath();
if(readyScroll<0) {
ctx.moveTo(nready_x+(obj1.x+authorMarginX)*obj_m_x,imageTempCoords[1]+(obj1.y+authorMarginY)*multiplier*multiplierObjectY[zoom]);
ctx.lineTo(nready_x+(obj2.x+authorMarginX)*obj_m_x,imageTempCoords[1]+(obj2.y+authorMarginY)*multiplier*multiplierObjectY[zoom]); ctx.stroke();
ctx.stroke();
if(imageCoords[0]<0){
ctx.moveTo(nready_x_p+(obj1.x+authorMarginX)*obj_m_x,imageTempCoords[1]+(obj1.y+authorMarginY)*obj_m_y);
ctx.lineTo(nready_x_p+(obj2.x+authorMarginX)*obj_m_x,imageTempCoords[1]+(obj2.y+authorMarginY)*obj_m_y);
ctx.stroke();}
else{
ctx.moveTo(nready_x_n+(obj1.x+authorMarginX)*obj_m_x,imageTempCoords[1]+(obj1.y+authorMarginY)*obj_m_y);
ctx.lineTo(nready_x_n+(obj2.x+authorMarginX)*obj_m_x,imageTempCoords[1]+(obj2.y+authorMarginY)*obj_m_y);
ctx.stroke();}}
if(readyScroll>0){
ctx.moveTo(ready_x+(obj1.x+authorMarginX)*multiplierObjectX[zoom],imageCoords[1]+(obj1.y+authorMarginY)*multiplierObjectY[zoom]);
ctx.lineTo(ready_x+(obj2.x+authorMarginX)*multiplierObjectX[zoom],imageCoords[1]+(obj2.y+authorMarginY)*multiplierObjectY[zoom]);ctx.stroke();
if(imageCoords[0]<0){
ctx.moveTo(ready_x_p+(obj1.x+authorMarginX)*multiplierObjectX[zoom],imageCoords[1]+(obj1.y+authorMarginY)*multiplierObjectY[zoom]);
ctx.lineTo(ready_x_p+(obj2.x+authorMarginX)*multiplierObjectX[zoom],imageCoords[1]+(obj2.y+authorMarginY)*multiplierObjectY[zoom]);ctx.stroke();
}
else{
ctx.moveTo(ready_x_n+(obj1.x+authorMarginX)*multiplierObjectX[zoom],imageCoords[1]+(obj1.y+authorMarginY)*multiplierObjectY[zoom]);
ctx.lineTo(ready_x_n+(obj2.x+authorMarginX)*multiplierObjectX[zoom],imageCoords[1]+(obj2.y+authorMarginY)*multiplierObjectY[zoom]);ctx.stroke();
}}
ctx.stroke();
ctx.restore();
}
function addCanvasObject(x,y,imgNumber){
var obj = new Object;
obj.x=x;
obj.y=y;
obj.img=imgNumber;
canvasObject.push(obj);
}
function addCanvasObjectHighlight(x,y,imgNumber){
var obj = new Object;
obj.x=x;
obj.y=y;
obj.img=imgNumber;
canvasObjectHighlight.push(obj);
}
function addCanvasObjectAuthor(x,y,imgNumber){
canvasObjectAuthor.x=x;
canvasObjectAuthor.y=y;
canvasObjectAuthor.img=imgNumber;
canvasObjectAuthor.status=1;
}
function clearCanvasObjectHighlight(){
canvasObjectHighlight=[];
}
function clearCanvasObject(){
// console.log("test clear canvas");
canvasObjectAuthor= new Object();
canvasObjectHighlight=[];
canvasObjectAuthor.status=-1;
canvasObject=[];
}
// ctx.drawImage(img[zoom],imageTempCoords[0]%tempWidthImage,imageTempCoords[1],tempWidthImage,img[zoom].height*multiplier);
<file_sep>var isMouseDownReference=0;
var deltaReferencePositionX;
var deltaReferencePositionY;
var referenceStatus=15;
var referenceTotalSteps=15;
var referenceIncrement = 1;
var referenceVisible=1;
var referencePosX;
var referencePosY;
var referenceWidth;
var referenceHeight;
var abstractRefHeight = new Array();
var abstractRefState = new Array();
var abstractRefMode = new Array();
var abstractRefTotal = 20;
var headerReference;
var contentReference;
//var showReferenceinMap = 1;
var showReferenceHref;
var divCountryDistributionReference;
var modeCountryDistributionReference = 0;
//var modeCountryTypeReference = 0;
//var hrefCountryTypeReference;
var hrefCDR;
var imgReference = new Array();
imgReference[0] = imgObject[4];
imgReference[1] = imgObject[9];
function initializeReference() {
divReference = document.getElementById("windowReference");
referencePosX = 700;
referencePosY = 50;
referenceWidth = parseInt(divReference.style.width);
referenceHeight = parseInt(divReference.style.height);
divReference.style.overflow = 'hidden';
ctxMenu.drawImage(imgReference[referenceVisible], 0, 0);
//console.log ("before");
//setTimeout(insertReference, 3000);
//console.log("after");
headerReference = document.createElement('div');
headerReference.style.position = 'relative';
headerReference.style.top = '0px';
headerReference.style.left ='0px';
headerReference.style.height = '23px';
headerReference.style.width = referenceWidth + 'px';
headerReference.style.paddingLeft = '5px';
headerReference.style.color = 'white';
headerReference.appendChild(document.createTextNode("Reference"));
divReference.appendChild(headerReference);
headerReference.onselectstart = function() {return false};
contentReference = document.createElement('div');
divReference.appendChild(contentReference);
contentReference.setAttribute('id', "contentReference");
contentReference.style.position = 'relative';
//contentReference.style.top = topbarHeight-9 + 'px';
contentReference.style.left = 1 + 'px';
contentReference.style.width = referenceWidth-2 + 'px';
contentReference.style.height = referenceHeight-parseInt(headerReference.style.height) +'px';
contentReference.style['overflow-x'] = 'hidden';
contentReference.style['overflow-y'] = 'auto';
showReferenceHref = document.createElement('a');
showReferenceHref.style.color = 'blue';
showReferenceHref.href = "#";
showReferenceHref.textContent = "Show in map";
divCountryDistributionReference = document.createElement('div');
divCountryDistributionReference.style.background = '#F2F1EF';
divCountryDistributionReference.style.position = 'absolute';
divCountryDistributionReference.style.width = '130px';
divCountryDistributionReference.style.height = '300px';
divCountryDistributionReference.style.top = referencePosY + 27 + 'px';
divCountryDistributionReference.style.left = referencePosX-parseInt(divCountryDistributionReference.style.width) + 2 + 'px';
divCountryDistributionReference.style['z-index'] = 0;
divCountryDistributionReference.style.overflow = 'auto';
divCountryDistributionReference.style['border-radius'] = '4px';
divCountryDistributionReference.style['-moz-border-radius'] = '4px';
divCountryDistributionReference.style['-webkit-border-radius'] = '4px';
divCountryDistributionReference.style.paddingLeft = '5px';
divCountryDistributionReference.style.display = 'block';
document.body.appendChild(divCountryDistributionReference);
hrefCDR = document.createElement('a');
hrefCDR.style.color = 'blue';
hrefCDR.textContent = "View country distribution";
hrefCDR.href = "#";
hrefCDR.onclick = function () {
showReferenceCountryDistribution();
if (modeCountryDistributionReference==0)
hrefCDR.textContent = "View country distribution";
else hrefCDR.textContent = "Hide country distribution";
};
}
function updateReference (rObject, rMode) {
removecontentReferenceChild();
console.log("updatereference");
console.log(rObject);
//CHANGED
while (headerReference.firstChild)
{
headerReference.removeChild(headerReference.firstChild);
}
headerReference.appendChild(document.createTextNode("References(" +rObject.length +")"));
//////////////////////
// contentReference.appendChild(document.createElement('br'));
console.log("updatereference");
console.log (rObject);
if (rObject.length>0) {
if (rMode==1) {
var temp = document.createElement('a');
temp.href = "#";
temp.style.color = 'blue';
temp.onclick = function () {
modeInMap = referenceMode;
viewAllModeActive = 0;
showResult(referenceMode, referenceObject);
updateReference(referenceObject, 0);};
temp.textContent = "Show all result";
contentReference.appendChild(temp);
contentReference.appendChild(document.createElement('br'));
}
showReferenceHref.onclick = function () {
modeInMap = referenceMode;
viewAllModeActive = 0;
showResult(referenceMode, rObject);
}
contentReference.appendChild(showReferenceHref);
contentReference.appendChild(document.createElement('br'));
contentReference.appendChild(hrefCDR);
contentReference.appendChild(document.createElement('br'));
for (var i=0; i<rObject.length; i++) {
var temp = document.createElement('div');
temp.style.fontSize = '13px';
document.getElementById("contentReference").appendChild(temp);
temp.setAttribute('id', "Reference" + i);
temp.style.position = 'relative';
//temp.style.left = 1 + 'px';
insertReference(rObject, i);
}
}
else {
document.getElementById("contentReference").innerHTML = "There is no data for reference.";
}
}
function insertReference(rObject, i) {
if (typeof(rObject[i].title) != 'undefined' || rObject[i].sourcetitle || rObject[i].publicationName)
{
var tempTable = document.createElement('table');
//tempTable.align = 'justify';
document.getElementById("Reference"+i).appendChild(tempTable);
var temp = document.createElement("IMG");
temp.setAttribute('id', "Reference" + i + "_image");
temp.src = imgExpand.src;
//temp.setAttribute('onclick', "showAbstractRef("+i+")");
temp.onclick = function () {showAbstractRef(i);};
var row = tempTable.insertRow(0);
row.insertCell(0).appendChild(temp);
temp = document.createElement("a");
temp.style['font-weight'] = 'bold';
temp.style.textDecoration = 'none';
//CHANGED:
// temp.style.position = "absolute";
// temp.style.top = "0px";
////////////////////
temp.onclick = function () {
if (abstractRefMode[i]==0) {
showAbstractRef(i);
}
viewAllModeActive = 0;
modeInMap = referenceMode;
showResult(referenceMode, rObject);
highlight(rObject[i]);
};
temp.href = "#";
temp.style.color = 'blue';
//CHANGED:
/*var toStartOne = i+1;
if ( typeof(rObject[i].title) != 'undefined') temp.textContent = toStartOne + " " + rObject[i].title;
else temp.textContent = toStartOne + " " + rObject[i].sourcetitle;
///////////////////////////
*/
var st = 0;
if (rObject[i].title) {
temp.textContent = rObject[i].title;
}
else if (rObject[i].sourcetitle){
temp.textContent = rObject[i].sourcetitle;
st=1;
}
else if (rObject[i].publicationName) {
temp.textContent = rObject[i].publicationName;
st=1;
}
row.insertCell(1).appendChild(temp);
temp = document.createElement('div');
temp.style.fontSize = '11px';
temp.style.paddingLeft = '18px';
document.getElementById("Reference"+i).appendChild(temp);
//console.log("aa " + rObject[i].sourcetitle);
if (st==0) {
if (rObject[i].sourcetitle) {
temp.appendChild (document.createTextNode(rObject[i].sourcetitle));
if (rObject[i].citedby) {
temp.appendChild(document.createTextNode(", cited "+rObject[i].citedby+" times"));
}
else if (rObject[i].citedbyCount) {
temp.appendChild(document.createTextNode(", cited "+rObject[i].citedbyCount+" times"));
}
}
else if (rObject[i].publicationName) {
temp.appendChild (document.createTextNode(rObject[i].publicationName));
if (rObject[i].citedby) {
temp.appendChild(document.createTextNode(", cited "+rObject[i].citedby+" times"));
}
else if (rObject[i].citedbyCount) {
temp.appendChild(document.createTextNode(", cited "+rObject[i].citedbyCount+" times"));
}
}
temp.appendChild(document.createElement('br'));
}
if (rObject[i].author) {
if ( typeof(rObject[i].author[0].authname)!= 'undefined' )temp.appendChild(document.createTextNode(rObject[i].author[0].authname));
else temp.appendChild(document.createTextNode(rObject[i].author[0]["ce:indexed-name"]));
for (var j=1; j<rObject[i].author.length; j++) {
if (j==3) {
temp.appendChild(document.createTextNode(", et al."));
break;
}
if ( typeof(rObject[i].author[j].authname)!= 'undefined' )temp.appendChild(document.createTextNode(", "+rObject[i].author[j].authname));
else temp.appendChild(document.createTextNode(rObject[i].author[j]["ce:indexed-name"]));
}
temp.appendChild(document.createElement('br'));
}
if (rObject[i].affilname) {
temp.appendChild(document.createTextNode(rObject[i].affilname.split('|')[0]));
temp.appendChild(document.createElement('br'));
}
if (rObject[i].city) {
temp.appendChild(document.createTextNode(rObject[i].city));
if (rObject[i].country) {
temp.appendChild(document.createTextNode(", "+rObject[i].country));
}
temp.appendChild(document.createElement('br'));
}
else if (rObject[i].country) {
temp.appendChild(document.createTextNode(rObject[i].country));
temp.appendChild(document.createElement('br'));
}
temp = document.createElement('div');
temp.style.fontSize = '11px';
row.insertCell(1).appendChild(temp);
temp = document.createElement('div');
temp.align = 'justify';
document.getElementById("Reference"+i).appendChild(temp);
temp.setAttribute('id', "Reference" + i + "_abstract");
temp.style.position = 'relative';
temp.style.left = 18 + 'px';
temp.style.width = referenceWidth - 43 + 'px';
if (rObject[i].url) {
var temp2 = document.createElement('a');
temp2.textContent = "Show in Scopus";
temp2.href = "javascript:window.open('" + rObject[i].url + "')";
temp2.style.color = 'blue';
temp2.style.textDecoration = 'none';
temp.appendChild(temp2);
//temp.appendChild(document.createElement('br'));
}
//temp.appendChild(document.createElement('br'));
if (rObject[i].authorId && rObject[i].scopusId) {
var temp2 = document.createElement('a');
temp2.textContent = "Set as main article";
temp2.href = "#";
temp2.style.color = 'blue';
temp2.style.textDecoration = 'none';
temp2.style.cssFloat = 'right';
temp2.onclick = function() {newMainArticle(rObject[i]);};
temp.appendChild(temp2);
//temp.appendChild(document.createElement('br'));
}
temp.appendChild(document.createElement('br'));
if (rObject[i].Abstract) {
temp.appendChild(document.createTextNode("Abstract:"));
temp.appendChild(document.createElement('br'));
temp.appendChild(document.createTextNode(rObject[i].Abstract));
}
else temp.appendChild(document.createTextNode("Abstract not available"));
temp.style.overflow = 'hidden';
abstractRefHeight[i] = temp.clientHeight;
temp.style.height = 0 + 'px';
//temp.style.display = 'none';
abstractRefState[i] = 0;
abstractRefMode[i] = 0;
}
}
function removecontentReferenceChild () {
while (contentReference.firstChild) {
contentReference.removeChild(contentReference.firstChild);
}
}
function showReferenceCountryDistribution() {
if (modeCountryDistributionReference==0) {
modeCountryDistributionReference = 1;
divCountryDistributionReference.style.display = 'block';
}
else {
modeCountryDistributionReference = 0;
divCountryDistributionReference.style.display = 'none';
}
}
function showOverallCountryReference(crObject) {
while (divCountryDistributionReference.firstChild) {
divCountryDistributionReference.removeChild(divCountryDistributionReference.firstChild);
}
//divCountryDistributionReference.appendChild(hrefCountryTypeReference);
//divCountryDistributionReference.appendChild(document.createElement('br'));
for (var i=0; i<crObject.length; i++) {
var temp = document.createElement('a');
temp.href = "javascript:focusToCountryReference('" + crObject[i].name + "')";
temp.style.color = 'blue';
// temp.href = "javascript:console.log('" + crObject[i].name + "')";
temp.textContent = crObject[i].name;
temp.style.textDecoration = 'none';
divCountryDistributionReference.appendChild(temp);
divCountryDistributionReference.appendChild(document.createTextNode(" ("+crObject[i].hitCount+")"));
divCountryDistributionReference.appendChild(document.createElement('br'));
}
}
function focusToCountryReference(crObjectName) {
console.log("focus country");
viewAllModeActive = 0;
modeInMap = referenceMode;
getReferenceFilter(new Array(crObjectName));
// console.log(crObjectName);
// console.log(getObject(crObjectName));
highlight(getObject(crObjectName));
}
function showAbstractRef(i) {
//console.log("show");
if (abstractRefMode[i]==0) {
document.getElementById("Reference" + i + "_image").src = imgContract.src;
//document.getElementById("Reference" + i + "_abstract").style.display = 'block';
abstractRefMode[i] = 1;
expandAbstractRef(i);
}
else {
document.getElementById("Reference" + i + "_image").src = imgExpand.src;
abstractRefMode[i] = 0;
contractAbstractRef(i);
}
}
function expandAbstractRef(i) {
//console.log(i);
abstractRefState[i] += 1;
document.getElementById("Reference" + i + "_abstract").style.height = abstractRefState[i]*abstractRefHeight[i]/abstractRefTotal + 'px';
//console.log(abstractRefHeight[i]);
if (abstractRefState[i]<abstractRefTotal && abstractRefMode[i]==1) {
setTimeout (function() {expandAbstractRef(i)}, 10);
}
}
function contractAbstractRef(i) {
//console.log(i);
abstractRefState[i] -= 1;
document.getElementById("Reference" + i + "_abstract").style.height = abstractRefState[i]*abstractRefHeight[i]/abstractRefTotal + 'px';
if (abstractRefState[i]>0 && abstractRefMode[i]==0) {
setTimeout (function(){contractAbstractRef(i);}, 10);
}
//else if (abstractRefMode[i]==0)
//document.getElementById("Reference" + i + "_abstract").style.display = 'none';
}
function mouseDownReference(e){
divReference.style['z-index'] = zIndex;
divCountryDistributionReference.style['z-index'] = zIndex;
zIndex += 1;
if(e.clientY-divReference.offsetTop<topbarHeight) {
if (e.clientX-divReference.offsetLeft<=parseInt(divReference.style.width)-minimizePosWidth) {
isMouseDownReference=true;
lastMouseX = e.clientX;
lastMouseY = e.clientY;
}
else if (e.clientX-divReference.offsetLeft>parseInt(divReference.style.width)-minimizePosWidth) {
referenceIncrement *= -1;
changeViewReference();
}
}
}
function changeViewReference() {
modeCountryDistributionReference = 0;
divCountryDistributionReference.style.display = 'none';
hrefCDR.textContent = "View country distribution";
if (referenceStatus <= 0 && referenceIncrement==-1) {
referenceStatus = 1;
}
else if (referenceStatus >= referenceTotalSteps && referenceIncrement==1) {
referenceStatus = referenceTotalSteps - 1;
}
referenceStatus += referenceIncrement;
divReference.style.width = referenceStatus*referenceWidth/referenceTotalSteps + "px";
divReference.style.height = referenceStatus*referenceHeight/referenceTotalSteps + "px";
divReference.style.left = (canvasMenu.offsetLeft+frameWidth+buttonMenuWidth/2) + referenceStatus*(referencePosX+referenceWidth/2-canvasMenu.offsetLeft-frameWidth-buttonMenuWidth)/referenceTotalSteps - referenceStatus*referenceWidth/2/referenceTotalSteps + "px";
divReference.style.top = (canvasMenu.offsetTop+canvasMenu.height/2) + referenceStatus*(referencePosY-canvasMenu.offsetTop-canvasMenu.height/2)/referenceTotalSteps + "px";
if (referenceStatus > 0) {
//divReference.style.display = "block";
referenceVisible = 1;
}
else {
//divReference.style.display = "none";
referenceVisible = 0;
}
ctxMenu.drawImage(imgReference[referenceVisible], 0, 0);
if (referenceStatus > 0 && referenceStatus <referenceTotalSteps) setTimeout (changeViewReference, 10);
}
function referenceDisplacement(e){
if(e.clientX>divReference.offsetLeft && e.clientY>divReference.offsetTop && e.clientY-divReference.offsetTop<topbarHeight) {
if (e.clientX-divReference.offsetLeft<=parseInt(divReference.style.width)-minimizePosWidth) {
divReference.style.cursor = "move";
}
else if (e.clientX-divReference.offsetLeft>parseInt(divReference.style.width)-minimizePosWidth) {
divReference.style.cursor = "default";
}
}
else {
divReference.style.cursor = "default";
}
if(isMouseDownReference) {
referencePosX += e.clientX - lastMouseX;
referencePosY += e.clientY - lastMouseY;
lastMouseX = e.clientX;
lastMouseY = e.clientY;
divReference.style.left = referencePosX + "px";
divReference.style.top = referencePosY + "px";
if(divReference.offsetTop<0) {
divReference.style.top="0px";
referencePosY = 0;
}
divCountryDistributionReference.style.top = referencePosY + 27 + "px";
divCountryDistributionReference.style.left = referencePosX-parseInt(divCountryDistributionReference.style.width) + 2 + 'px';
/*
if(divReference.offsetLeft<0){
divReference.style.left="0px";
console.log(divReference.offsetLeft);
console.log(divReference.style.left);
}
if(divReference.offsetLeft+parseInt(divReference.style.width)>window.innerWidth)
divReference.style.left = (window.innerWidth - parseInt(divReference.style.width)) +"px";
if(divReference.offsetTop+parseInt(divReference.style.height)>window.innerHeight)
divReference.style.top = (window.innerHeight-parseInt(divReference.style.height))+"px";
*/
}
}<file_sep>function calculateDelta()
{
var curDate = new Date();
var x = (mouseX-canvas.offsetLeft);
var y = (mouseY-canvas.offsetTop);
delta = [5*(x - startCoords[0])/(curDate - pastDate), 5*(y - startCoords[1])/(curDate - pastDate)];
startCoords = [x,y];
pastDate = curDate;
}
|
5242e697b8dd05a1475c9e9fcb9ad377e4760d6d
|
[
"JavaScript"
] | 10
|
JavaScript
|
alvi0010/map
|
f58caf6b88c7b57ab6c92552b0d74cf5b47325b5
|
515450d4a65d87fbe7a8e896dcb6896a09f9ac76
|
refs/heads/main
|
<file_sep><?php
namespace App\Entity;
use App\Repository\ArticleRepository;
use Doctrine\ORM\Mapping as ORM;
/**
* @ORM\Entity(repositoryClass=ArticleRepository::class)
*/
class Article
{
/**
* @ORM\Id
* @ORM\GeneratedValue
* @ORM\Column(type="integer")
*/
private $id;
/**
* @ORM\Column(type="string", length=100)
*/
private $titre;
/**
* @ORM\Column(type="text")
*/
private $description;
/**
* @ORM\ManyToOne(targetEntity=Membre::class, inversedBy="articles")
*/
private $membre;
/**
* @ORM\Column(type="string", length=30)
*/
private $auteur;
/**
* @ORM\Column(type="string", length=50)
*/
private $photo;
public function getId(): ?int
{
return $this->id;
}
public function getTitre(): ?string
{
return $this->titre;
}
public function setTitre(string $titre): self
{
$this->titre = $titre;
return $this;
}
public function getDescription(): ?string
{
return $this->description;
}
public function setDescription(string $description): self
{
$this->description = $description;
return $this;
}
public function getMembre(): ?Membre
{
return $this->membre;
}
public function setMembre(?Membre $membre): self
{
$this->membre = $membre;
return $this;
}
public function getAuteur(): ?string
{
return $this->auteur;
}
public function setAuteur(string $auteur): self
{
$this->auteur = $auteur;
return $this;
}
public function getPhoto(): ?string
{
return $this->photo;
}
public function setPhoto(string $photo): self
{
$this->photo = $photo;
return $this;
}
}
<file_sep><?php
namespace App\Entity;
use App\Repository\EvenementRepository;
use Doctrine\Common\Collections\ArrayCollection;
use Doctrine\Common\Collections\Collection;
use Doctrine\ORM\Mapping as ORM;
/**
* @ORM\Entity(repositoryClass=EvenementRepository::class)
*/
class Evenement
{
/**
* @ORM\Id
* @ORM\GeneratedValue
* @ORM\Column(type="integer")
*/
private $id;
/**
* @ORM\Column(type="string", length=30)
*/
private $ville;
/**
* @ORM\Column(type="integer")
*/
private $cp;
/**
* @ORM\Column(type="string", length=30)
*/
private $adresse;
/**
* @ORM\Column(type="date")
*/
private $date;
/**
* @ORM\Column(type="string", length=30)
*/
private $titre;
/**
* @ORM\ManyToMany(targetEntity=Membre::class, inversedBy="evenements")
*/
private $membre;
public function __construct()
{
$this->membre = new ArrayCollection();
}
public function getId(): ?int
{
return $this->id;
}
public function getVille(): ?string
{
return $this->ville;
}
public function setVille(string $ville): self
{
$this->ville = $ville;
return $this;
}
public function getCp(): ?int
{
return $this->cp;
}
public function setCp(int $cp): self
{
$this->cp = $cp;
return $this;
}
public function getAdresse(): ?string
{
return $this->adresse;
}
public function setAdresse(string $adresse): self
{
$this->adresse = $adresse;
return $this;
}
public function getDate(): ?\DateTimeInterface
{
return $this->date;
}
public function setDate(\DateTimeInterface $date): self
{
$this->date = $date;
return $this;
}
public function getTitre(): ?string
{
return $this->titre;
}
public function setTitre(string $titre): self
{
$this->titre = $titre;
return $this;
}
/**
* @return Collection|Membre[]
*/
public function getMembre(): Collection
{
return $this->membre;
}
public function addMembre(Membre $membre): self
{
if (!$this->membre->contains($membre)) {
$this->membre[] = $membre;
}
return $this;
}
public function removeMembre(Membre $membre): self
{
$this->membre->removeElement($membre);
return $this;
}
}
<file_sep># Art-d-un-temps
projet de fin de formation
<file_sep><?php
namespace App\Form;
use App\Entity\Membre;
use Symfony\Component\Form\AbstractType;
use Symfony\Component\Form\FormBuilderInterface;
use Symfony\Component\Validator\Constraints\Regex;
use Symfony\Component\Validator\Constraints\IsTrue;
use Symfony\Component\OptionsResolver\OptionsResolver;
use Symfony\Component\Form\Extension\Core\Type\FileType;
use Symfony\Component\Form\Extension\Core\Type\TextType;
use Symfony\Component\Form\Extension\Core\Type\ChoiceType;
use Symfony\Component\Form\Extension\Core\Type\SubmitType;
class MembreType extends AbstractType
{
public function buildForm(FormBuilderInterface $builder, array $options)
{
$membre = $options["data"];
$builder
->add('pseudo')
->add('roles', ChoiceType::class, [
"choices" => [
"Admin" => "ROLE_ADMIN",
"Artiste" => "ROLE_ARTISTE",
"Membre" => "ROLE_MEMBRE",
],
"multiple" => true,
"expanded" => true
])
->add('password', TextType::class, [
"label" => "Mot de passe",
"mapped" => false,
"constraints" => [
new Regex([
"pattern" => "/^(?=.*[A-Z])(?=.*[a-z])(?=.*\d)(?=.*[-+!*$@%_])([-+!*$@%_\w]{6,10})$/",
"message" => "Le mot de passe doit contenir au moins 1 majuscule, 1 minuscule, 1 chiffre, 1 caractère spécial parmi -+!*$@%_ et doit faire entre 6 et 10 caractères."
])
],
"help" => "Le mot de passe doit contenir au moins 1 majuscule, 1 minuscule, 1 chiffre, 1 caractère spécial parmi -+!*$@%_ et doit faire entre 6 et 10 caractères.",
"required" => $membre->getId() ? false : true
])
->add('nom')
->add('prenom')
->add('email')
->add('ville')
->add('cp')
->add('adresse')
->add("photo", FileType::class, [
"mapped" => false,
"attr" => ["label_attr" => "Parcourir", "lang" => "fr"]
])
->add('enregistrer', SubmitType::class,[
"attr" =>
["class" => "btn"
]
])
;
}
public function configureOptions(OptionsResolver $resolver)
{
$resolver->setDefaults([
'data_class' => Membre::class,
]);
}
}
<file_sep><?php
namespace App\Controller;
use App\Entity\Article;
use App\Form\ArticleType;
use App\Form\ArtisteArticleType;
use App\Repository\ArticleRepository;
use Doctrine\ORM\EntityManagerInterface;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\HttpFoundation\Response;
use Symfony\Component\Routing\Annotation\Route;
use Symfony\Bundle\FrameworkBundle\Controller\AbstractController;
class ArticleController extends AbstractController
{
//ACCES ADMIN
#[Route('/admin/articles', name: 'article_index', methods: ['GET'])]
public function index(ArticleRepository $articleRepository): Response
{
return $this->render('article/index.html.twig', [
'articles' => $articleRepository->findAll(),
]);
}
#[Route('/admin/article/new', name: 'article_new', methods: ['GET', 'POST'])]
public function new(Request $request, EntityManagerInterface $entityManager): Response
{
$article = new Article();
$form = $this->createForm(ArticleType::class, $article);
$form->handleRequest($request);
if ($form->isSubmitted() && $form->isValid()) {
$destination = $this->getParameter("dossier_images_articles");
if($photoTelechargee = $form->get("photo")->getData()){
$photo = pathinfo($photoTelechargee->getClientOriginalName(), PATHINFO_FILENAME);
$nouveauNom = str_replace(" ", "_", $photo);
$nouveauNom .= "-" . uniqid() . "." . $photoTelechargee->guessExtension();
$photoTelechargee->move($destination, $nouveauNom);
$article->setPhoto($nouveauNom);
}
$entityManager = $this->getDoctrine()->getManager();
$entityManager->persist($article);
$entityManager->flush();
return $this->redirectToRoute('article_index');
}
return $this->render('article/new.html.twig', [
'article' => $article,
'form' => $form->createView(),
]);
}
#[Route('/admin/article/{id}/edit', name: 'article_edit', methods: ['GET', 'POST'])]
public function edit(Request $request, Article $article): Response
{
$form = $this->createForm(ArticleType::class, $article);
$form->handleRequest($request);
if ($form->isSubmitted() && $form->isValid()) {
$destination = $this->getParameter("dossier_images_articles");
if($photoTelechargee = $form->get("photo")->getData()){
$photo = pathinfo($photoTelechargee->getClientOriginalName(), PATHINFO_FILENAME);
$nouveauNom = str_replace(" ", "_", $photo);
$nouveauNom .= "-" . uniqid() . "." . $photoTelechargee->guessExtension();
$photoTelechargee->move($destination, $nouveauNom);
$article->setPhoto($nouveauNom);
}
$this->getDoctrine()->getManager()->flush();
return $this->redirectToRoute('article_index');
}
return $this->render('article/edit.html.twig', [
'article' => $article,
'form' => $form->createView(),
]);
}
//ACCES ARTISTES
#[Route('/artiste/articles', name: 'artiste_articles', methods: ['GET'])]
public function indexArtiste(ArticleRepository $articleRepository): Response
{
return $this->render('article/artiste_articles.html.twig', [
'articles' => $articleRepository->findAll(),
]);
}
#[Route('/artiste/article/{id}/edit', name: 'artiste_article_edit', methods: ['GET', 'POST'])]
public function editArtiste(Request $request, Article $article): Response
{
$form = $this->createForm(ArticleType::class, $article);
$form->handleRequest($request);
if ($form->isSubmitted() && $form->isValid()) {
$destination = $this->getParameter("dossier_images_articles");
if($photoTelechargee = $form->get("photo")->getData()){
$photo = pathinfo($photoTelechargee->getClientOriginalName(), PATHINFO_FILENAME);
$nouveauNom = str_replace(" ", "_", $photo);
$nouveauNom .= "-" . uniqid() . "." . $photoTelechargee->guessExtension();
$photoTelechargee->move($destination, $nouveauNom);
$article->setPhoto($nouveauNom);
}
$this->getDoctrine()->getManager()->flush();
return $this->redirectToRoute('artiste_articles');
}
return $this->render('article/edit.html.twig', [
'article' => $article,
'form' => $form->createView(),
]);
}
#[Route('/artiste/article/new', name: 'artiste_article_new', methods: ['GET', 'POST'])]
public function newArtisteArticle(Request $request, EntityManagerInterface $entityManager): Response
{
$article = new Article();
$form = $this->createForm(ArtisteArticleType::class, $article);
$form->handleRequest($request);
if ($form->isSubmitted() && $form->isValid()) {
$destination = $this->getParameter("dossier_images_articles");
if($photoTelechargee = $form->get("photo")->getData()){
$photo = pathinfo($photoTelechargee->getClientOriginalName(), PATHINFO_FILENAME);
$nouveauNom = str_replace(" ", "_", $photo);
$nouveauNom .= "-" . uniqid() . "." . $photoTelechargee->guessExtension();
$photoTelechargee->move($destination, $nouveauNom);
$article->setPhoto($nouveauNom);
}
$entityManager = $this->getDoctrine()->getManager();
$entityManager->persist($article);
$entityManager->flush();
return $this->redirectToRoute('artiste_articles');
}
return $this->render('article/artiste_article_new.html.twig', [
'article' => $article,
'form' => $form->createView(),
]);
}
#[Route('/artiste/article/{id}', name: 'article_show', methods: ['GET'] )]
public function show(Article $article): Response
{
return $this->render('article/show.html.twig', [
'article' => $article,
]);
}
#[Route('/artiste/article/{id}', name: 'article_delete', methods: ['POST'])]
public function delete(Request $request, Article $article): Response
{
if ($this->isCsrfTokenValid('delete'.$article->getId(), $request->request->get('_token'))) {
$entityManager = $this->getDoctrine()->getManager();
$entityManager->remove($article);
$entityManager->flush();
}
return $this->redirectToRoute('article_index');
}
}
<file_sep><?php
namespace App\Repository;
use App\Entity\Membre;
use App\Entity\Oeuvre;
use Doctrine\Bundle\DoctrineBundle\Repository\ServiceEntityRepository;
use Doctrine\Persistence\ManagerRegistry;
/**
* @method Oeuvre|null find($id, $lockMode = null, $lockVersion = null)
* @method Oeuvre|null findOneBy(array $criteria, array $orderBy = null)
* @method Oeuvre[] findAll()
* @method Oeuvre[] findBy(array $criteria, array $orderBy = null, $limit = null, $offset = null)
*/
class OeuvreRepository extends ServiceEntityRepository
{
public function __construct(ManagerRegistry $registry)
{
parent::__construct($registry, Oeuvre::class);
}
/**
* @return Oeuvre[] Returns an array of Oeuvre objects
*/
// public function showPeinture(){
// /*
// SELECT o.*
// FROM oeuvre o
// WHERE o.categorie = peinture
// */
// return $this->createQueryBuilder("oeuvre")
// ->where("oeuvre.categorie = 'peinture'")
// ->setMaxResults(6)
// ->getQuery()
// ->getResult()
// ;
// }
public function home(){
/*
SELECT *
FROM oeuvre
ORDER BY id
*/
return $this->createQueryBuilder("oeuvre")
->orderBy("oeuvre.id")
->setMaxResults(6)
->getQuery()
->getResult()
;
}
public function oeuvresByMembre(){
/*
SELECT o.*
FROM oeuvres o JOIN membre m ON o.membre = m.pseudo
*/
return $this->createQueryBuilder("o")
->join(Membre::class, "m", "WITH", "o.membre = m.pseudo")
->orderBy("o.id")
->getQuery()
->getResult()
;
}
public function findByCategorie($categorie)
{
return $this->createQueryBuilder('o')
->andWhere('o.categorie = :categorie')
->setParameter('categorie', $categorie)
->orderBy('o.id', 'ASC')
->setMaxResults(6)
->getQuery()
->getResult()
;
}
}
<file_sep><?php
namespace App\Controller;
use App\Entity\Membre;
use App\Form\EditProfilArtisteType;
use Doctrine\ORM\EntityManagerInterface;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\HttpFoundation\Response;
use Symfony\Component\Routing\Annotation\Route;
use Sensio\Bundle\FrameworkExtraBundle\Configuration\IsGranted;
use Symfony\Bundle\FrameworkBundle\Controller\AbstractController;
use Symfony\Component\Security\Core\Encoder\UserPasswordEncoderInterface as Encoder;
class ProfilController extends AbstractController
{
// PROFIL MEMBRE
#[Route('/profil', name: 'profil_membre')]
#[IsGranted("ROLE_MEMBRE")]
public function profilMembre(): Response
{
return $this->render('profil/profil_membre.html.twig');
}
#[Route('/profil/{id}', name: 'edit_profil_membre', methods: ['GET', 'POST'])]
public function editMembre(Request $request, Membre $membre, Encoder $encoder): Response
{
$form = $this->createForm(EditProfilMembreType::class, $membre,);
$form->handleRequest($request);
if ($form->isSubmitted() && $form->isValid()) {
if( $password = $form->get("password")->getData() ){
$password = $encoder->encodePassword($membre, $password);
$membre->setPassword($password);
}
$this->getDoctrine()->getManager()->flush();
return $this->redirectToRoute('profil_membre');
}
return $this->render('profil/edit_profil_membre.html.twig', [
'membre' => $membre,
'form' => $form->createView(),
]);
}
// PROFIL ARTISTE
#[Route('/artiste/profil', name: 'profil_artiste')]
#[IsGranted("ROLE_ARTISTE")]
public function profilArtiste(): Response
{
return $this->render('profil/profil_artiste.html.twig');
}
#[Route('/artiste/profil/{id}', name: 'edit_profil_artiste', methods: ['GET', 'POST'])]
#[IsGranted("ROLE_ARTISTE")]
public function editArtiste(Request $request, Membre $membre, Encoder $encoder): Response
{
$form = $this->createForm(EditProfilArtisteType::class, $membre,);
$form->handleRequest($request);
if ($form->isSubmitted() && $form->isValid()) {
if( $password = $form->get("password")->getData() ){
$password = $encoder->encodePassword($membre, $password);
$membre->setPassword($password);
}
$this->getDoctrine()->getManager()->flush();
return $this->redirectToRoute('profil_artiste');
}
return $this->render('profil/edit_profil_artiste.html.twig', [
'membre' => $membre,
'form' => $form->createView(),
]);
}
// PROFIL ADMIN
#[Route('/admin/profil', name: 'profil_admin')]
#[IsGranted("ROLE_ADMIN")]
public function profilAdmin(): Response
{
return $this->render('profil/profil_admin.html.twig');
}
}
<file_sep><?php
namespace App\Form;
use App\Entity\Evenement;
use Symfony\Component\Form\AbstractType;
use Symfony\Component\Form\FormBuilderInterface;
use Symfony\Component\OptionsResolver\OptionsResolver;
use Symfony\Component\Form\Extension\Core\Type\SubmitType;
class EvenementType extends AbstractType
{
public function buildForm(FormBuilderInterface $builder, array $options)
{
$builder
->add('titre')
->add('date')
->add('adresse')
->add('cp')
->add('ville')
// ->add('membre')
->add('enregistrer', SubmitType::class,[
"attr" =>
["class" => "btn"
]
])
;
}
public function configureOptions(OptionsResolver $resolver)
{
$resolver->setDefaults([
'data_class' => Evenement::class,
]);
}
}
<file_sep><?php
namespace App\Controller;
use App\Entity\Oeuvre;
use App\Form\OeuvreType;
use App\Repository\OeuvreRepository;
use Doctrine\ORM\EntityManager;
use Doctrine\ORM\EntityManagerInterface;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\HttpFoundation\Response;
use Symfony\Component\Routing\Annotation\Route;
use Sensio\Bundle\FrameworkExtraBundle\Configuration\IsGranted;
use Symfony\Bundle\FrameworkBundle\Controller\AbstractController;
class OeuvreController extends AbstractController
{
//ACCES MEMBRES
#[Route('/oeuvres', name: 'oeuvres', methods: ['GET'])]
#[IsGranted("ROLE_MEMBRE")]
public function homeOeuvres(OeuvreRepository $oeuvreRepository): Response
{
$peintures = $oeuvreRepository->findByCategorie('peinture');
$sculptures = $oeuvreRepository->findByCategorie('sculpture');
$ceramiques = $oeuvreRepository->findByCategorie('céramique');
//dd($sculptures);
return $this->render('oeuvre/oeuvres.html.twig', [
'peintures' => $peintures,
'sculptures' => $sculptures,
'ceramiques' => $ceramiques
]);
}
#[Route('/oeuvres/oeuvre/{id}', name: 'oeuvre', methods: ['GET'])]
public function ficheOeuvre(Oeuvre $oeuvre): Response
{
return $this->render('oeuvre/oeuvre.html.twig', [
'oeuvre' => $oeuvre
,]);
}
// ACCES ADMIN
#[Route('admin/oeuvres/', name: 'bo_oeuvres', methods: ['GET'])]
#[IsGranted("ROLE_ADMIN")]
public function index(OeuvreRepository $oeuvreRepository): Response
{
return $this->render('oeuvre/index.html.twig', [
'oeuvres' => $oeuvreRepository->findAll(),
]);
}
#[Route('admin/oeuvre/new', name: 'admin_oeuvre_new', methods: ['GET', 'POST'])]
public function newOeuvreAdmin(Request $request, EntityManagerInterface $entityManager): Response
{
$oeuvre = new Oeuvre();
$form = $this->createForm(OeuvreType::class, $oeuvre);
$form->handleRequest($request);
if ($form->isSubmitted() && $form->isValid()) {
$destination = $this->getParameter("dossier_images_oeuvres");
if($photoTelechargee = $form->get("photo")->getData()){
$photo = pathinfo($photoTelechargee->getClientOriginalName(), PATHINFO_FILENAME);
$nouveauNom = str_replace(" ", "_", $photo);
$nouveauNom .= "-" . uniqid() . "." . $photoTelechargee->guessExtension();
$photoTelechargee->move($destination, $nouveauNom);
$oeuvre->setPhoto($nouveauNom);
}
$entityManager = $this->getDoctrine()->getManager();
$entityManager->persist($oeuvre);
$entityManager->flush();
return $this->redirectToRoute('bo_oeuvres');
}
return $this->render('oeuvre/new.html.twig', [
'oeuvre' => $oeuvre,
'form' => $form->createView(),
]);
}
#[Route('admin/oeuvre/{id}/edit', name: 'oeuvre_edit', methods: ['GET', 'POST'])]
public function editOeuvreAdmin(Request $request, Oeuvre $oeuvre): Response
{
$form = $this->createForm(OeuvreType::class, $oeuvre);
$form->handleRequest($request);
if ($form->isSubmitted() && $form->isValid()) {
$destination = $this->getParameter("dossier_images_oeuvres");
if($photoTelechargee = $form->get("photo")->getData()){
$photo = pathinfo($photoTelechargee->getClientOriginalName(), PATHINFO_FILENAME);
$nouveauNom = str_replace(" ", "_", $photo);
$nouveauNom .= "-" . uniqid() . "." . $photoTelechargee->guessExtension();
$photoTelechargee->move($destination, $nouveauNom);
$oeuvre->setPhoto($nouveauNom);
}
$this->getDoctrine()->getManager()->flush();
return $this->redirectToRoute('bo_oeuvres');
}
return $this->render('oeuvre/edit.html.twig', [
'oeuvre' => $oeuvre,
'form' => $form->createView(),
]);
}
#[Route('admin/oeuvre/{id}', name: 'oeuvre_show', methods: ['GET'])]
public function showOeuvreAdmin(Oeuvre $oeuvre): Response
{
return $this->render('oeuvre/show.html.twig', [
'oeuvre' => $oeuvre,
]);
}
// ACCES ARTISTE
#[Route('artiste/oeuvres/', name: 'artiste_oeuvres', methods: ['GET'])]
public function artisteOeuvres(OeuvreRepository $oeuvreRepository): Response
{
return $this->render('oeuvre/artiste_oeuvres.html.twig', [
'oeuvres' => $oeuvreRepository->findAll(),
]);
}
#[Route('artiste/oeuvre/new', name: 'artiste_oeuvre_new', methods: ['GET', 'POST'])]
public function newOeuvreArtiste(Request $request, EntityManagerInterface $entityManager): Response
{
$oeuvre = new Oeuvre();
$form = $this->createForm(OeuvreType::class, $oeuvre);
$form->handleRequest($request);
if ($form->isSubmitted() && $form->isValid()) {
$destination = $this->getParameter("dossier_images_oeuvres");
if($photoTelechargee = $form->get("photo")->getData()){
$photo = pathinfo($photoTelechargee->getClientOriginalName(), PATHINFO_FILENAME);
$nouveauNom = str_replace(" ", "_", $photo);
$nouveauNom .= "-" . uniqid() . "." . $photoTelechargee->guessExtension();
$photoTelechargee->move($destination, $nouveauNom);
$oeuvre->setPhoto($nouveauNom);
}
$entityManager = $this->getDoctrine()->getManager();
$entityManager->persist($oeuvre);
$entityManager->flush();
return $this->redirectToRoute('profil_artiste');
}
return $this->render('oeuvre/artiste_oeuvre_new.html.twig', [
'oeuvre' => $oeuvre,
'form' => $form->createView(),
]);
}
#[Route('artiste/oeuvre/{id}', name: 'artiste_oeuvre_show', methods: ['GET'])]
public function show(Oeuvre $oeuvre): Response
{
return $this->render('oeuvre/artiste_oeuvre_show.html.twig', [
'oeuvre' => $oeuvre,
]);
}
#[Route('artiste/oeuvre/{id}/edit', name: 'artiste_oeuvre_edit', methods: ['GET', 'POST'])]
public function editOeuvreArtiste(Request $request, Oeuvre $oeuvre): Response
{
$form = $this->createForm(OeuvreType::class, $oeuvre);
$form->handleRequest($request);
if ($form->isSubmitted() && $form->isValid()) {
$destination = $this->getParameter("dossier_images_oeuvres");
if($photoTelechargee = $form->get("photo")->getData()){
$photo = pathinfo($photoTelechargee->getClientOriginalName(), PATHINFO_FILENAME);
$nouveauNom = str_replace(" ", "_", $photo);
$nouveauNom .= "-" . uniqid() . "." . $photoTelechargee->guessExtension();
$photoTelechargee->move($destination, $nouveauNom);
$oeuvre->setPhoto($nouveauNom);
}
$this->getDoctrine()->getManager()->flush();
return $this->redirectToRoute('profil_artiste');
}
return $this->render('oeuvre/artiste_oeuvre_edit.html.twig', [
'oeuvre' => $oeuvre,
'form' => $form->createView(),
]);
}
#[Route('artiste/oeuvre/{id}', name: 'oeuvre_delete', methods: ['POST'])]
public function delete(Request $request, Oeuvre $oeuvre): Response
{
if ($this->isCsrfTokenValid('delete' . $oeuvre->getId(), $request->request->get('_token'))) {
$entityManager = $this->getDoctrine()->getManager();
$entityManager->remove($oeuvre);
$entityManager->flush();
}
return $this->redirectToRoute('bo_oeuvres');
}
}
<file_sep><?php
namespace App\Form;
use App\Entity\Article;
use App\Entity\Membre;
use Symfony\Bridge\Doctrine\Form\Type\EntityType;
use Symfony\Component\Form\AbstractType;
use Symfony\Component\Form\FormBuilderInterface;
use Symfony\Component\OptionsResolver\OptionsResolver;
use Symfony\Component\Form\Extension\Core\Type\FileType;
use Symfony\Component\Form\Extension\Core\Type\SubmitType;
class ArticleType extends AbstractType
{
public function buildForm(FormBuilderInterface $builder, array $options)
{
$builder
->add('titre')
->add('description')
// ->add('membre')
->add("photo", FileType::class, [
"mapped" => false,
"attr" => ["label_attr" => "Parcourir", "lang" => "fr"]
])
->add('auteur')
// ->add('Auteur', EntityType::class, [
// "class" => Membre::class,
// "choice_label" => "pseudo",
// "placeholder" => "Choisissez parmi les membres..."
// ])
->add('enregistrer', SubmitType::class,[
"attr" =>
["class" => "btn"
]
])
;
}
public function configureOptions(OptionsResolver $resolver)
{
$resolver->setDefaults([
'data_class' => Article::class,
]);
}
}
<file_sep><?php
namespace App\Controller;
use App\Repository\MembreRepository;
use App\Repository\OeuvreRepository;
use App\Repository\EvenementRepository;
use Symfony\Component\HttpFoundation\Response;
use Symfony\Component\Routing\Annotation\Route;
use Symfony\Bundle\FrameworkBundle\Controller\AbstractController;
class HomeController extends AbstractController
{
#[Route('/', name: 'accueil')]
public function index(OeuvreRepository $oeuvreRepository, EvenementRepository $evenementRepository, MembreRepository $membreRepository): Response
{
$oeuvres = $oeuvreRepository->home();
$evenements = $evenementRepository->home();
$membres = $membreRepository->home('ROLE_ARTISTE');
return $this->render('home/index.html.twig', [
'oeuvres' => $oeuvres,
'evenements' => $evenements,
'membres' => $membres
]);
}
}
<file_sep><?php
namespace App\Form;
use App\Entity\Membre;
use Symfony\Component\Form\AbstractType;
use Symfony\Component\Form\FormBuilderInterface;
use Symfony\Component\Validator\Constraints\IsTrue;
use Symfony\Component\Validator\Constraints\Length;
use Symfony\Component\Validator\Constraints\NotBlank;
use Symfony\Component\OptionsResolver\OptionsResolver;
use Symfony\Component\Form\Extension\Core\Type\FileType;
use Symfony\Component\Form\Extension\Core\Type\TextType;
use Symfony\Component\Form\Extension\Core\Type\ChoiceType;
use Symfony\Component\Form\Extension\Core\Type\NumberType;
use Symfony\Component\Form\Extension\Core\Type\SubmitType;
use Symfony\Component\Form\Extension\Core\Type\CheckboxType;
use Symfony\Component\Form\Extension\Core\Type\PasswordType;
class RegistrationFormType extends AbstractType
{
public function buildForm(FormBuilderInterface $builder, array $options)
{
$membre = $options["data"];
$builder
->add('roles', ChoiceType::class, [
"choices" => [
"Artiste" => "ROLE_ARTISTE",
"Visiteur" => "ROLE_MEMBRE",
],
"label" => "Je m'inscris en tant que :",
"mapped" => false,
"multiple" => false,
"expanded" => true
])
->add('pseudo', TextType::class, [
"label" => "Pseudo",
"required" => true
])
->add('plainPassword', PasswordType::class, [
// instead of being set onto the object directly,
// this is read and encoded in the controller
"mapped" => false,
"attr" => ["autocomplete" => "new-password"],
"constraints" => [
new NotBlank([
"message" => "Veuillez renseigner un mot de passe",
]),
new Length([
"min" => 6,
"minMessage" => "Votre mot de passe doit contenir au moins 6 caractères",
// max length allowed by Symfony for security reasons
'max' => 4096,
]),
],
])
->add("prenom", TextType::class, [
"label" => "Prénom",
"required" => false
])
->add("nom", TextType::class, [
"required" => false
])
->add("email", TextType::class, [
"required" => false
])
// ->add('photo', FileType::class, [
// "mapped" => false,
// "attr" => ["label_attr" => "Parcourir", "lang" => "fr"]
// ])
->add("agreeTerms", CheckboxType::class, [
"mapped" => false,
"constraints" => [
new IsTrue([
"message" => "Vous devez accepter les C.G.U.",
]),
],
"attr" => ["class" => "form-check-input"]
])
->add("enregistrer", SubmitType::class,[
"attr" =>
["class" => "btn"
]
])
;
}
public function configureOptions(OptionsResolver $resolver)
{
$resolver->setDefaults([
"data_class" => Membre::class,
]);
}
}
<file_sep><?php
namespace App\Controller;
use App\Entity\Membre;
use App\Form\MembreType;
use App\Repository\MembreRepository;
use Doctrine\ORM\EntityManagerInterface;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\HttpFoundation\Response;
use Symfony\Component\Routing\Annotation\Route;
use Symfony\Bundle\FrameworkBundle\Controller\AbstractController;
use Symfony\Component\Security\Core\Encoder\UserPasswordEncoderInterface as Encoder;
#[Route('admin/membre')]
class MembreController extends AbstractController
{
#[Route('s', name: 'membre_index', methods: ['GET'])]
public function index(MembreRepository $membreRepository): Response
{
return $this->render('membre/index.html.twig', [
'membres' => $membreRepository->findAll(),
]);
}
#[Route('/membres', name: 'membre_membres', methods: ['GET'])]
public function membres(MembreRepository $membreRepository): Response
{
return $this->render('membre/membres.html.twig', [
'membres' => $membreRepository->findAll(),
]);
}
#[Route('/new', name: 'membre_new', methods: ['GET', 'POST'])]
public function new(Request $request, EntityManagerInterface $entityManager, Encoder $encoder): Response
{
$membre = new Membre();
$form = $this->createForm(MembreType::class, $membre);
$form->handleRequest($request);
if ($form->isSubmitted() && $form->isValid()) {
$password = $form->get("password")->getData();
$password = $encoder ->encodePassword($membre, $password);
$membre->setPassword( $password );
$destination = $this->getParameter("dossier_images_membres");
if($photoTelechargee = $form->get("photo")->getData()){
$photo = pathinfo($photoTelechargee->getClientOriginalName(), PATHINFO_FILENAME);
$nouveauNom = str_replace(" ", "_", $photo);
$nouveauNom .= "-" . uniqid() . "." . $photoTelechargee->guessExtension();
$photoTelechargee->move($destination, $nouveauNom);
$membre->setPhoto($nouveauNom);
}
$entityManager = $this->getDoctrine()->getManager();
$entityManager->persist($membre);
$entityManager->flush();
return $this->redirectToRoute('membre_index');
}
return $this->render('membre/new.html.twig', [
'membre' => $membre,
'form' => $form->createView(),
]);
}
#[Route('/{id}', name: 'membre_show', methods: ['GET'])]
public function show(Membre $membre): Response
{
return $this->render('membre/show.html.twig', [
'membre' => $membre,
]);
}
#[Route('/{id}/edit', name: 'membre_edit', methods: ['GET', 'POST'])]
public function edit(Request $request, Membre $membre, Encoder $encoder): Response
{
$form = $this->createForm(MembreType::class, $membre,);
$form->handleRequest($request);
if ($form->isSubmitted() && $form->isValid()) {
if( $password = $form->get("password")->getData() ){
$password = $encoder->encodePassword($membre, $password);
$membre->setPassword($password);
}
$destination = $this->getParameter("dossier_images_membres");
if($photoTelechargee = $form->get("photo")->getData()){
$photo = pathinfo($photoTelechargee->getClientOriginalName(), PATHINFO_FILENAME);
$nouveauNom = str_replace(" ", "_", $photo);
$nouveauNom .= "-" . uniqid() . "." . $photoTelechargee->guessExtension();
$photoTelechargee->move($destination, $nouveauNom);
$membre->setPhoto($nouveauNom);
}
$this->getDoctrine()->getManager()->flush();
return $this->redirectToRoute('membre_index');
}
return $this->render('membre/edit.html.twig', [
'membre' => $membre,
'form' => $form->createView(),
]);
}
#[Route('/{id}', name: 'membre_delete', methods: ['POST'])]
public function delete(Request $request, Membre $membre): Response
{
if ($this->isCsrfTokenValid('delete'.$membre->getId(), $request->request->get('_token'))) {
$entityManager = $this->getDoctrine()->getManager();
$entityManager->remove($membre);
$entityManager->flush();
}
return $this->redirectToRoute('membre_index');
}
}
<file_sep><?php
namespace App\Form;
use App\Entity\Membre;
use App\Entity\Oeuvre;
use Symfony\Component\Form\AbstractType;
use Symfony\Component\Form\FormBuilderInterface;
use Symfony\Component\Validator\Constraints\Length;
use Symfony\Component\Validator\Constraints\NotBlank;
use Symfony\Component\OptionsResolver\OptionsResolver;
use Symfony\Component\Form\Extension\Core\Type\DateType;
use Symfony\Component\Form\Extension\Core\Type\FileType;
use Symfony\Component\Form\Extension\Core\Type\TextType;
use Symfony\Component\Form\Extension\Core\Type\ChoiceType;
use Symfony\Component\Form\Extension\Core\Type\NumberType;
use Symfony\Component\Form\Extension\Core\Type\SubmitType;
use Symfony\Bridge\Doctrine\Form\Type\EntityType;
class OeuvreType extends AbstractType
{
public function buildForm(FormBuilderInterface $builder, array $options)
{
$builder
->add('categorie', ChoiceType::class, [
"choices" => [
"Peinture" => "Peinture",
"Scupture" => "Sculpture",
"Céramique" => "Céramique",
"Gravure" => "Gravure",
"Illustration" => "Illustration",
"Verre" => "Verre"
],
"multiple" => false,
"expanded" => true
])
->add('nom_oeuvre', TextType::class)
->add('annee', DateType::class, [
"widget" => "single_text",
"label" => "Réalisée en",
"required" => false
])
->add('description')
->add('dimension')
->add('prix')
->add('photo', FileType::class, [
"mapped" => false,
"attr" => ["label_attr" => "Parcourir", "lang" => "fr"]
])
->add('stock')
->add('membre', EntityType::class, [
"class" => Membre::class,
"choice_label" => "pseudo",
"placeholder" => "Choisissez parmi les membres..."
])
->add('enregistrer', SubmitType::class,[
"attr" =>
["class" => "btn"
]
])
;
}
public function configureOptions(OptionsResolver $resolver)
{
$resolver->setDefaults([
'data_class' => Oeuvre::class,
]);
}
}
<file_sep><?php
namespace App\Entity;
use App\Repository\OeuvreRepository;
use Doctrine\ORM\Mapping as ORM;
/**
* @ORM\Entity(repositoryClass=OeuvreRepository::class)
*/
class Oeuvre
{
/**
* @ORM\Id
* @ORM\GeneratedValue
* @ORM\Column(type="integer")
*/
private $id;
/**
* @ORM\Column(type="string", length=70)
*/
private $categorie;
/**
* @ORM\Column(type="string", length=50)
*/
private $nom_oeuvre;
/**
* @ORM\Column(type="date")
*/
private $annee;
/**
* @ORM\Column(type="string", length=30)
*/
private $dimension;
/**
* @ORM\Column(type="integer")
*/
private $prix;
/**
* @ORM\Column(type="string", length=255)
*/
private $photo;
/**
* @ORM\Column(type="integer")
*/
private $stock;
/**
* @ORM\ManyToOne(targetEntity=Membre::class, inversedBy="oeuvres")
*/
private $membre;
/**
* @ORM\Column(type="text")
*/
private $description;
public function getId(): ?int
{
return $this->id;
}
public function getCategorie(): ?string
{
return $this->categorie;
}
public function setCategorie(string $categorie): self
{
$this->categorie = $categorie;
return $this;
}
public function getNomOeuvre(): ?string
{
return $this->nom_oeuvre;
}
public function setNomOeuvre(string $nom_oeuvre): self
{
$this->nom_oeuvre = $nom_oeuvre;
return $this;
}
public function getAnnee(): ?\DateTimeInterface
{
return $this->annee;
}
public function setAnnee(\DateTimeInterface $annee): self
{
$this->annee = $annee;
return $this;
}
public function getDimension(): ?string
{
return $this->dimension;
}
public function setDimension(string $dimension): self
{
$this->dimension = $dimension;
return $this;
}
public function getPrix(): ?int
{
return $this->prix;
}
public function setPrix(int $prix): self
{
$this->prix = $prix;
return $this;
}
public function getPhoto(): ?string
{
return $this->photo;
}
public function setPhoto(string $photo): self
{
$this->photo = $photo;
return $this;
}
public function getStock(): ?int
{
return $this->stock;
}
public function setStock(int $stock): self
{
$this->stock = $stock;
return $this;
}
public function getMembre(): ?Membre
{
return $this->membre;
}
public function setMembre(?Membre $membre): self
{
$this->membre = $membre;
return $this;
}
public function getDescription(): ?string
{
return $this->description;
}
public function setDescription(string $description): self
{
$this->description = $description;
return $this;
}
}
<file_sep><?php
namespace App\Controller;
use App\Repository\EvenementRepository;
use Symfony\Component\HttpFoundation\Response;
use Symfony\Component\Routing\Annotation\Route;
use Symfony\Bundle\FrameworkBundle\Controller\AbstractController;
class EventController extends AbstractController
{
#[Route('/event', name: 'event')]
public function index(EvenementRepository $evenementRepository): Response
{
return $this->render('event/index.html.twig', [
'evenements' => $evenementRepository->findAll(),
]);
}
}
<file_sep><?php
namespace App\Controller;
use App\Entity\Panier;
use App\Entity\Oeuvre;
use App\Form\PanierType;
use App\Repository\OeuvreRepository;
use Doctrine\ORM\EntityManagerInterface;
use Symfony\Bundle\FrameworkBundle\Controller\AbstractController;
use Symfony\Component\HttpFoundation\Response;
use Symfony\Component\Routing\Annotation\Route;
use Sensio\Bundle\FrameworkExtraBundle\Configuration\IsGranted;
use Symfony\Component\HttpFoundation\Request;
use Symfony\Component\HttpFoundation\Session\SessionInterface;
class PanierController extends AbstractController
{
#[Route('/panier', name: 'panier')]
#[IsGranted("ROLE_USER")]
public function index(SessionInterface $session, OeuvreRepository $oeuvreRepository): Response
{
$panier = $session->get('panier', []);
$panierAvecDonnees = [];
foreach($panier as $id => $quantite){
$panierAvecDonnees[]=[
'oeuvre' => $oeuvreRepository->find($id),
'quantite' => $quantite
];
}
$total = 0;
foreach($panierAvecDonnees as $item){
$totalItem = $item['oeuvre']->getPrix() * $item['quantite'];
$total += $totalItem;
}
//dd($panierAvecDonnees);
return $this->render('panier/index.html.twig', [
'items' => $panierAvecDonnees,
'total' => $total
]);
}
#[Route('/panier/add/{id}', name: 'ajout_panier')]
#[IsGranted("ROLE_USER")]
public function add($id, SessionInterface $session)
{
$panier = $session->get('panier', []);
if(!empty( $panier[$id] )){
$panier[$id]++;
}else{
$panier[$id] = 1;
}
$session->set('panier', $panier);
return $this->redirectToRoute("panier");
/* php bin/console debug:autowiring session => recherche tous les services en rapport avec la session */
}
#[Route('/panier/remove/{id}', name: 'suppression_panier')]
#[IsGranted("ROLE_USER")]
public function remove($id, SessionInterface $session)
{
$panier = $session->get('panier',[]);
if(!empty($panier[$id])){
unset($panier[$id]);
}
$session->set('panier', $panier);
return $this->redirectToRoute("panier");
}
#[Route('/new', name: 'panier_new', methods: ['GET', 'POST'])]
#[IsGranted("ROLE_USER")]
public function new(Request $request): Response
{
$panier = new Panier();
$form = $this->createForm(PanierType::class, $panier);
$form->handleRequest($request);
if ($form->isSubmitted() && $form->isValid()) {
$entityManager = $this->getDoctrine()->getManager();
$entityManager->persist($panier);
$entityManager->flush();
return $this->redirectToRoute('panier');
}
return $this->render('panier/new.html.twig', [
'panier' => $panier,
'form' => $form->createView(),
]);
}
#[Route('/{id}/edit', name: 'panier_edit', methods: ['GET', 'POST'])]
public function edit(Request $request, Panier $panier): Response
{
$form = $this->createForm(PanierType::class, $panier);
$form->handleRequest($request);
if ($form->isSubmitted() && $form->isValid()) {
$this->getDoctrine()->getManager()->flush();
return $this->redirectToRoute('panier');
}
return $this->render('panier/edit.html.twig', [
'panier' => $panier,
'form' => $form->createView(),
]);
}
}
|
45e057ed625f47c1a34f08b4550138c39715bdc7
|
[
"Markdown",
"PHP"
] | 17
|
PHP
|
milo5634/Art-d-un-temps
|
2afa9b5f7532dc72df7271fd7b6cb0935185cd35
|
6f02430fe83b25f9666c0c982435bf496c454e7d
|
refs/heads/master
|
<file_sep># Transfer-Style-Exercise
Transfer Style training
Doing the exercises suggested in the Advanced Computer Vision course on Udemy by The Lazy Programmer Inc. The course is great and the instructor really put things in a clear way that I was able to grasp the concept and get ready to dive in other core concepts ahead like CycleGAN and etc.
<file_sep>from keras.applications.vgg16 import preprocess_input
from keras.preprocessing import image
import numpy as np
from keras.models import Model, Sequential
from keras.applications.vgg16 import VGG16
from keras.layers import MaxPooling2D, AveragePooling2D
#Standard load method
def Load_Image(img_path):
img = image.load_img(img_path)
img = image.img_to_array(img)
img = np.expand_dims(img, axis = 0)
img = preprocess_input(img)
return img
#Load image resizing if necessary, just to make more clear
def Load_and_resize(img_path, shape = None):
img = image.load_img(img_path, target_size = shape)
img = image.img_to_array(img)
img = np.expand_dims(img, axis = 0)
img = preprocess_input(img)
return img
#Standard VGG16 average pooling instead of max pooling
def VggAvgPOOL(batch_shape):
vgg = VGG16(include_top = False, weights = 'imagenet', input_shape = batch_shape[1:])
avg = Sequential()
for layer in vgg.layers:
if layer.__class__ == MaxPooling2D:
avg.add(AveragePooling2D())
else:
avg.add(layer)
return avg
#Reverse the VGG16 preprocess_input
def unprocess_input(img):
img[..., 0] += 103.939
img[..., 1] += 116.979
img[..., 2] += 128.88
img = img[..., ::-1]
return img
#Scale the generated image between 0-1 for ploting
def scale_img(img):
img -= img.min()
img /= img.max()
return img
<file_sep>
#Imports
import tensorflow as tf
import numpy as np
from keras.applications.vgg16 import VGG16
from keras.applications.vgg16 import preprocess_input
from keras.preprocessing import image
from keras.models import Sequential, Model
from content import Content
from style import Style
from keras.layers.convolutional import Conv2D
from keras.layers import MaxPooling2D
from keras.layers import AveragePooling2D
from scipy.optimize import fmin_l_bfgs_b
import keras.backend as K
import utils_c
import matplotlib.pyplot as plt
import matplotlib.image as pltimage
if __name__ == "__main__":
#Paths
img_content_path = 'thiago.jpg'
img_style_path = 'star_night.jpg'
#instantiate the classes
content = Content(img_content_path, 11)
style = Style(img_style_path, content.batch_shape)
content.image = utils_c.Load_and_resize(img_content_path, (style.image.shape[1], style.image.shape[2]))
content.batch_shape = content.image.shape
#VGG
VGGpool = utils_c.VggAvgPOOL(content.batch_shape)
#Merged Model
contentModel = Model(VGGpool.input, VGGpool.layers[13].get_output_at(1))
#Target
content_target = K.variable(contentModel.predict(content.image))
#The layer.get_output_at(1) refer to the index of the VGGpool diferent nets
#Get the output of the first conv of each block for the style
s_outputs = [layer.get_output_at(1) for layer in VGGpool.layers if layer.name.endswith('conv1')]
#Style Model
styleModel = Model(VGGpool.input, s_outputs)
stl_model_ouputs = [K.variable(y) for y in styleModel.predict(style.image)]
#Weights of each block output in the result
style_weights = [0.1,0.2,0.3,0.4,0.5]
#Content Loss
loss = K.mean(K.square(contentModel.output - content_target))
#Sum of the content Loss and the Style Loss
#The style loss is the MSE of the gram-matrix of both outputs
for w, stl, conv in zip(style_weights, s_outputs, stl_model_ouputs):
loss += w*style.style_Loss(conv[0], stl[0])
#Get the gradients
grads = K.gradients(loss, VGGpool.input)
#A Keras function to pass to the multi-diff scipy function
get_loss_and_grads_content = K.function(inputs = [VGGpool.input], outputs = [loss] + grads)
def get_loss_grads_wraper(x):
l, g = get_loss_and_grads_content([x.reshape(*content.batch_shape)])
return l.astype(np.float64), g.flatten().astype(np.float64)
#The image that will be generated
x = np.random.randn(np.prod(content.batch_shape))
losses = []
for i in range(10):
#Call to the scipy.optimize multidiff function
x, l, _ = fmin_l_bfgs_b(func = get_loss_grads_wraper, x0 = x, maxfun = 20)
#Clip x
x = np.clip(x, -127, 127)
losses.append(l)
print("Epoch:{} loss:{}".format(i, l))
#Print the image
final_img = x.reshape(*content.batch_shape)
final_img = utils_c.unprocess_input(final_img)
pltimage.imsave("thiago_star_night2.jpg",utils_c.scale_img(final_img[0]))
plt.imshow(utils_c.scale_img(final_img[0]))
plt.show()<file_sep>
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
from keras.applications.vgg16 import VGG16
from keras.applications.vgg16 import preprocess_input
from pathlib import Path
from keras.layers.convolutional import Conv2D
from keras.layers import AveragePooling2D, MaxPooling2D
from keras.models import Sequential, Model
from keras.preprocessing import image
from scipy.optimize import fmin_l_bfgs_b
import keras.backend as K
import utils_c
#Class that hold the content
class Content:
def __init__(self, img_path, num_convs = None):
self.img_path = img_path
self.image = utils_c.Load_Image(self.img_path)
self.batch_shape = self.image.shape
self.num_convs = num_convs
#A cut off version of VGG16
def VggMinLayers(self):
avg = utils_c.VggAvgPOOL(self.batch_shape)
n = 0
model = Sequential()
for layer in avg.layers:
if layer.__class__ == Conv2D:
n+=1
model.add(layer)
if n >= self.num_convs:
break
return model
# Code to test the content, the output image should be a image no aparent color("style"), but sharp enough to
# to ressemble the input image
if __name__ == "__main__":
img_path = 'cat.jpg'
content = Content(img_path, num_convs=11)
ContentVgg = content.VggMinLayers()
ContentVgg.summary()
target = K.variable(ContentVgg.predict(content.image))
loss = K.mean(tf.square(target - ContentVgg.output))
grads = K.gradients(loss, ContentVgg.input)
get_grads_loss = K.function(inputs = [ContentVgg.input], outputs = [loss] + grads)
def get_grads_loss_wraper(x):
l, g = get_grads_loss([x.reshape(*content.batch_shape)])
return l.astype(np.float64), g.flatten().astype(np.float64)
losses = []
x = np.random.randn(np.prod(content.batch_shape))
print("Starting...")
for i in range(10):
x, l, _ = fmin_l_bfgs_b(func = get_grads_loss_wraper, x0 = x, maxfun=20)
x = np.clip(x, -127, 127)
losses.append(l)
print("Epoch:{} loss:{}".format(i, l))
plt.plot(losses)
plt.show()
shape_x = content.batch_shape[1:]
x = x.reshape(*shape_x)
final_img = utils_c.unprocess_input(x)
plt.imshow(utils_c.scale_img(final_img))
plt.show()
<file_sep>
import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
from keras.preprocessing import image
from keras.layers.convolutional import Conv2D
from keras.layers import MaxPooling2D, AveragePooling2D
from keras.applications.vgg16 import VGG16
from keras.applications.vgg16 import preprocess_input
from pathlib import Path
from keras.models import Sequential, Model
from scipy.optimize import fmin_l_bfgs_b
import keras.backend as K
from datetime import datetime
import utils_c
#Class for the style
class Style:
def __init__(self, img_path, content_shape):
self.img_path = img_path
self.content_shape = content_shape
self.image = utils_c.Load_Image(self.img_path)
self.batch_shape = self.image.shape
#Seek the correlation between an item with itself, X.XT/N
def gram_matrix(self,x):
y = K.batch_flatten(K.permute_dimensions(x, (2,0,1)))
g = K.dot(y, K.transpose(y))/x.get_shape().num_elements()
return g
#The MSE of the target vs the output with their gram matrixs
def style_Loss(self, x, y):
return K.mean(K.square(self.gram_matrix(y) - self.gram_matrix(x)))
def train(self, func, epochs, shape):
losses = []
x = np.random.randn(np.prod(shape))
for i in range(epochs):
x, l, _ = fmin_l_bfgs_b(func = func, x0 = x, maxfun = 20)
x = np.clip(x, -127, 127)
losses.append(l)
print("Epoch:{} loss:{}".format(i, l))
n_img = x.reshape(*shape)
final_img = utils_c.unprocess_input(n_img)
return final_img
# Code to check the Style class the final_img should output a image with no sharpness but showing the style of the
# given image with color pattern e etc
if __name__ == "__main__":
img_path = 'star_night.jpg'
style = Style(img_path, None)
print(style.image.shape)
styleVGG = utils_c.VggAvgPOOL(style.batch_shape)
styleVGG.summary()
conv_outputs = [layer.get_output_at(1) for layer in styleVGG.layers if layer.name.endswith('conv1')]
merged_model = Model(styleVGG.input, conv_outputs)
style_outputs = [K.variable(y) for y in merged_model.predict(style.image)]
loss = 0
for stl, conv in zip(style_outputs, conv_outputs):
print(conv[0], stl[0])
loss += style.style_Loss(conv[0], stl[0])
grads_style = K.gradients(loss, merged_model.input)
get_grads_loss = K.function(inputs = [merged_model.input], outputs = [loss] + grads_style)
def get_grads_loss_style_wraper(x):
l, g = get_grads_loss([x.reshape(*style.batch_shape)])
return l.astype(np.float64), g.flatten().astype(np.float64)
print("Starting......")
final_img = style.train(get_grads_loss_style_wraper, 10, style.batch_shape)
plt.imshow(utils_c.scale_img(final_img[0]))
plt.show()
|
edd96cac07db9c83c2356c5001fd5bb5ed59e2f1
|
[
"Markdown",
"Python"
] | 5
|
Markdown
|
thiagoboeker/Transfer-Style-Exercise
|
5f64cb3d3f97a6dcae3f6eca82eada3c51a787df
|
58568a90ec46fe175fb86dac917879c80fab48fc
|
refs/heads/master
|
<repo_name>bayysp/AndroidApiUsingKotlin<file_sep>/app/src/main/java/com/example/administator/footbalapi/Presenter/DetailPresenter.kt
package com.example.administator.footbalapi.Presenter
import com.example.administator.footbalapi.Api.Api
import com.example.administator.footbalapi.Api.TheSportDBApi
import com.example.administator.footbalapi.Model.Team
import com.example.administator.footbalapi.Model.TeamResponse
import com.example.administator.footbalapi.View.Interface.DetailView
import com.google.gson.Gson
import org.jetbrains.anko.doAsync
import org.jetbrains.anko.uiThread
class DetailPresenter(private val view : DetailView,
private val apiRepository : Api,
private val gson: Gson) {
fun getTeamOneIcon(idTeam: String?){
view.showLoading()
doAsync {
val data = gson.fromJson(apiRepository
.doRequest(TheSportDBApi.getTeam(idTeam)),
TeamResponse::class.java
)
uiThread {
view.showTeamOneIcon(data.teams)
}
}
}
fun getTeamTwoIcon(idTeam: String?){
view.showLoading()
doAsync {
val data = gson.fromJson(apiRepository
.doRequest(TheSportDBApi.getTeam(idTeam)),
TeamResponse::class.java
)
uiThread {
view.hideLoading()
view.showTeamTwoIcon(data.teams)
}
}
}
}<file_sep>/app/src/main/java/com/example/administator/footbalapi/Presenter/MainPresenter.kt
package com.example.administator.footbalapi.Presenter
import com.example.administator.footbalapi.Api.Api
import com.example.administator.footbalapi.Api.TheSportDBApi
import com.example.administator.footbalapi.Model.MatchResponse
import com.example.administator.footbalapi.View.Interface.MainView
import com.google.gson.Gson
import org.jetbrains.anko.doAsync
import org.jetbrains.anko.uiThread
class MainPresenter(private val view : MainView,
private val apiRepository : Api,
private val gson: Gson) {
fun getMatchList(typeMatch : String?){
view.showLoading()
doAsync{
val data = gson.fromJson(apiRepository
.doRequest(TheSportDBApi.getMatch(typeMatch)),
MatchResponse::class.java
)
uiThread {
view.hideLoading()
view.showMatchList(data.events)
}
}
}
}<file_sep>/app/src/main/java/com/example/administator/footbalapi/View/Interface/MainView.kt
package com.example.administator.footbalapi.View.Interface
import com.example.administator.footbalapi.Model.Match
interface MainView {
fun showLoading()
fun hideLoading()
fun showMatchList(data : List<Match>)
}<file_sep>/app/src/main/java/com/example/administator/footbalapi/Adapter/PrevAdapter.kt
package com.example.administator.footbalapi.Adapter
import android.annotation.SuppressLint
import android.support.v7.widget.RecyclerView
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.widget.TextView
import com.example.administator.footbalapi.Model.Match
import com.example.administator.footbalapi.R
import java.text.SimpleDateFormat
import java.util.*
class PrevAdapter(private val match : List<Match>,
private val listener : (Match) -> Unit) : RecyclerView.Adapter<PrevAdapter.PrevViewHolder>(){
override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): PrevViewHolder {
return PrevViewHolder(LayoutInflater.from(parent?.context).inflate(R.layout.match_item_list,parent,false))
}
override fun getItemCount(): Int {
return match.size
}
override fun onBindViewHolder(holder: PrevViewHolder, position: Int) {
holder.bindItem(match[position],listener)
}
class PrevViewHolder(view: View) : RecyclerView.ViewHolder(view) {
val dateMatch = view.findViewById<TextView>(R.id.date_match)
val teamOne = view.findViewById<TextView>(R.id.team_one_title_match)
val teamTwo = view.findViewById<TextView>(R.id.team_two_title_match)
val teamOneScore = view.findViewById<TextView>(R.id.team_one_point_match)
val teamTwoScore = view.findViewById<TextView>(R.id.team_two_point_match)
@SuppressLint("SimpleDateFormat")
fun bindItem(match: Match, listener: (Match) -> Unit){
val oldSdf = SimpleDateFormat("yyyy-MM-dd")
val newSdf = SimpleDateFormat("EEEE, d MMM yyyy", Locale.getDefault())
val newDate = newSdf.format(oldSdf.parse(match.dateEvent))
dateMatch.text = newDate
teamOne.text = match.strHomeTeam
teamTwo.text = match.strAwayTeam
teamOneScore.text = match.intHomeScore
teamTwoScore.text = match.intAwayScore
itemView.setOnClickListener{
listener(match)
}
}
}
}
<file_sep>/app/src/main/java/com/example/administator/footbalapi/Model/TeamResponse.kt
package com.example.administator.footbalapi.Model
data class TeamResponse(val teams : List<Team>)<file_sep>/app/src/main/java/com/example/administator/footbalapi/Model/Team.kt
package com.example.administator.footbalapi.Model
import com.google.gson.annotations.SerializedName
data class Team (
@SerializedName("strTeamBadge") val strTeamBadge : String
)<file_sep>/app/src/main/java/com/example/administator/footbalapi/View/Activity/MainActivity.kt
package com.example.administator.footbalapi.View.Activity
import android.support.v7.app.AppCompatActivity
import android.os.Bundle
import android.support.design.widget.BottomNavigationView
import android.support.v4.app.Fragment
import com.example.administator.footbalapi.R
import com.example.administator.footbalapi.View.Fragment.MatchFragment
class MainActivity : AppCompatActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
supportActionBar?.title = "Prev Match"
val matchFragment = MatchFragment.newInstance("eventspastleague.php")
openFragment(matchFragment)
val bottomNav : BottomNavigationView = findViewById(R.id.main_bottom_nav)
bottomNav.setOnNavigationItemSelectedListener(mOnNavigationItemSelectedListener)
}
private val mOnNavigationItemSelectedListener= BottomNavigationView.OnNavigationItemSelectedListener{
item -> when(item.itemId){
R.id.prev_match ->{
supportActionBar?.title="Prev Match"
val matchFragment = MatchFragment.newInstance("eventspastleague.php")
openFragment(matchFragment)
return@OnNavigationItemSelectedListener true
}
R.id.next_match ->{
supportActionBar?.title="Next Match"
val matchFragment = MatchFragment.newInstance("eventsnextleague.php")
openFragment(matchFragment)
return@OnNavigationItemSelectedListener true
}
}
return@OnNavigationItemSelectedListener false
}
private fun openFragment(fragment: Fragment) {
val transaction = supportFragmentManager.beginTransaction()
transaction.replace(R.id.frame_container,fragment)
transaction.addToBackStack(null)
transaction.commit()
}
}
<file_sep>/app/src/main/java/com/example/administator/footbalapi/View/Interface/DetailView.kt
package com.example.administator.footbalapi.View.Interface
import com.example.administator.footbalapi.Model.Team
interface DetailView {
fun showTeamOneIcon(data : List<Team>)
fun showTeamTwoIcon(data : List<Team>)
fun initData()
fun showLoading()
fun hideLoading()
}<file_sep>/app/src/main/java/com/example/administator/footbalapi/Api/TheSportDBApi.kt
package com.example.administator.footbalapi.Api
import android.net.Uri
import com.example.administator.footbalapi.BuildConfig
object TheSportDBApi {
fun getMatch(typeMatch : String?) : String{
return Uri.parse(BuildConfig.BASE_URL).buildUpon()
.appendPath("api")
.appendPath("v1")
.appendPath("json")
.appendPath(BuildConfig.TDSP_API_KEY)
.appendPath(typeMatch)
.appendQueryParameter("id","4328")
.build()
.toString()
}
fun getTeam(idTeam : String?) : String{
return Uri.parse(BuildConfig.BASE_URL).buildUpon()
.appendPath("api")
.appendPath("v1")
.appendPath("json")
.appendPath(BuildConfig.TDSP_API_KEY)
.appendPath("lookupteam.php")
.appendQueryParameter("id",idTeam)
.build()
.toString()
}
}<file_sep>/app/src/main/java/com/example/administator/footbalapi/View/Fragment/MatchFragment.kt
package com.example.administator.footbalapi.View.Fragment
import android.os.Bundle
import android.support.v4.app.Fragment
import android.support.v4.widget.SwipeRefreshLayout
import android.support.v7.widget.LinearLayoutManager
import android.support.v7.widget.RecyclerView
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.widget.ProgressBar
import com.example.administator.footbalapi.Adapter.PrevAdapter
import com.example.administator.footbalapi.Api.Api
import com.example.administator.footbalapi.Model.Match
import com.example.administator.footbalapi.Presenter.MainPresenter
import com.example.administator.footbalapi.R
import com.example.administator.footbalapi.View.Activity.DetailActivity
import com.example.administator.footbalapi.View.Interface.MainView
import com.example.administator.footbalapi.invisible
import com.example.administator.footbalapi.visible
import com.google.gson.Gson
import org.jetbrains.anko.bundleOf
import org.jetbrains.anko.support.v4.ctx
import org.jetbrains.anko.support.v4.startActivity
class MatchFragment : Fragment() , MainView {
private lateinit var rvPrevMatch : RecyclerView
private lateinit var prevAdapter : PrevAdapter
private lateinit var pbPrevMatch : ProgressBar
private lateinit var presenter : MainPresenter
private var match : MutableList<Match> = mutableListOf()
override fun showLoading() {
pbPrevMatch.visible()
}
override fun hideLoading() {
pbPrevMatch.invisible()
}
override fun showMatchList(data: List<Match>) {
match.clear()
match.addAll(data)
prevAdapter.notifyDataSetChanged()
}
override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?,
savedInstanceState: Bundle?): View? {
return inflater.inflate(R.layout.fragment_match, container, false)
}
override fun onViewCreated(view: View, savedInstanceState: Bundle?) {
super.onViewCreated(view, savedInstanceState)
rvPrevMatch = view.findViewById(R.id.prev_rv_match)
pbPrevMatch = view.findViewById(R.id.prev_progress_bar)
prevAdapter = PrevAdapter(match){
startActivity<DetailActivity>("match" to it)
}
rvPrevMatch.layoutManager = LinearLayoutManager(ctx)
rvPrevMatch.adapter =prevAdapter
val request = Api()
val gson = Gson()
presenter = MainPresenter(this,request,gson)
presenter.getMatchList(arguments?.getString("typeMatch"))
}
companion object {
@JvmStatic
fun newInstance(state : String) =
MatchFragment().apply {
val args = Bundle()
args.putString("typeMatch",state)
val fragment = MatchFragment()
fragment.arguments = args
return fragment
}
}
}
<file_sep>/app/src/main/java/com/example/administator/footbalapi/Api/Api.kt
package com.example.administator.footbalapi.Api
import java.net.URL
class Api {
fun doRequest(url : String) : String{
return URL(url).readText()
}
}<file_sep>/app/src/main/java/com/example/administator/footbalapi/View/Activity/DetailActivity.kt
package com.example.administator.footbalapi.View.Activity
import android.support.v7.app.AppCompatActivity
import android.os.Bundle
import android.view.MenuItem
import com.example.administator.footbalapi.Api.Api
import com.example.administator.footbalapi.Model.Match
import com.example.administator.footbalapi.Model.Team
import com.example.administator.footbalapi.Presenter.DetailPresenter
import com.example.administator.footbalapi.Presenter.MainPresenter
import com.example.administator.footbalapi.R
import com.example.administator.footbalapi.View.Interface.DetailView
import com.example.administator.footbalapi.invisible
import com.example.administator.footbalapi.visible
import com.google.gson.Gson
import com.squareup.picasso.Picasso
import kotlinx.android.synthetic.main.activity_detail.*
import java.text.SimpleDateFormat
import java.util.*
class DetailActivity : AppCompatActivity(), DetailView {
private lateinit var matchModel : Match
private lateinit var presenter : DetailPresenter
override fun showTeamOneIcon(data: List<Team>) {
Picasso.get().load(data[0].strTeamBadge).into(iv_team_one_badge)
}
override fun showTeamTwoIcon(data: List<Team>) {
Picasso.get().load(data[0].strTeamBadge).into(iv_team_two_badge)
}
override fun onOptionsItemSelected(item: MenuItem?): Boolean {
return if (item?.itemId == android.R.id.home) {
finish()
true
} else {
super.onOptionsItemSelected(item)
}
}
override fun initData() {
val oldSdf = SimpleDateFormat("yyyy-MM-dd")
val newSdf = SimpleDateFormat("EEEE, d MMM yyyy" , Locale.getDefault())
val newDate = newSdf.format(oldSdf.parse(matchModel.dateEvent))
supportActionBar?.title = matchModel.strHomeTeam?.toString() + " vs " + matchModel.strAwayTeam?.toString()
date_match.text = newDate
team_one_title_match.text = matchModel.strHomeTeam?.toString()
team_two_title_match.text = matchModel.strAwayTeam?.toString()
team_one_point_match.text = matchModel.intHomeScore?.toString()
team_two_point_match.text = matchModel.intAwayScore?.toString()
team_one_goal.text = matchModel.strHomeGoalDetails?.toString()
team_two_goal.text = matchModel.strAwayGoalDetails?.toString()
team_one_shoots.text = matchModel.intHomeShots?.toString()
team_two_shoots.text = matchModel.intAwayShots?.toString()
team_one_red_card.text = matchModel.strHomeRedCards?.toString()
team_two_red_card.text = matchModel.strAwayRedCards?.toString()
team_one_yellow_card.text = matchModel.strHomeYellowCards?.toString()
team_two_yellow_card.text = matchModel.strAwayYellowCards?.toString()
team_one_goal_keeper.text = matchModel.strHomeLineupGoalkeeper?.toString()
team_two_goal_keeper.text = matchModel.strAwayLineupGoalkeeper?.toString()
team_one_defense.text = matchModel.strHomeLineupDefense?.toString()
team_two_defense.text = matchModel.strAwayLineupDefense?.toString()
team_one_midfield.text = matchModel.strHomeLineupMidfield?.toString()
team_two_midfield.text = matchModel.strAwayLineupMidfield?.toString()
team_one_forward.text = matchModel.strHomeLineupForward?.toString()
team_two_forward.text = matchModel.strAwayLineupForward?.toString()
team_one_subtituties.text = matchModel.strHomeLineupSubstitutes?.toString()
team_two_subtituties.text = matchModel.strAwayLineupSubstitutes?.toString()
}
override fun showLoading() {
team_one_pb.visible()
team_two_pb.visible()
}
override fun hideLoading() {
team_one_pb.invisible()
team_two_pb.invisible()
}
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_detail)
supportActionBar?.setDisplayHomeAsUpEnabled(true)
val request = Api()
val gson = Gson()
matchModel = intent.getParcelableExtra("match")
initData()
presenter = DetailPresenter(this,request,gson)
presenter.getTeamOneIcon(matchModel.idHomeTeam.toString())
presenter.getTeamTwoIcon(matchModel.idAwayTeam.toString())
}
override fun onSupportNavigateUp(): Boolean {
onBackPressed()
return super.onSupportNavigateUp()
}
}
<file_sep>/app/src/main/java/com/example/administator/footbalapi/Model/MatchResponse.kt
package com.example.administator.footbalapi.Model
data class MatchResponse( val events : List<Match>)
|
d32a68ce5b496f500ebc5709f56cc037e6903962
|
[
"Kotlin"
] | 13
|
Kotlin
|
bayysp/AndroidApiUsingKotlin
|
bf487e7ae8c3a1cd7162dc74a84a84ebb6c5e1ef
|
20dae6282a28c51c9e757432b30ec8bfb20b6e07
|
refs/heads/master
|
<repo_name>GraphBLAS/LAGraph-Working-Group<file_sep>/minutes/2020-04-15.md
# LAGraph Working Group Meeting Minutes - April 15, 2020
## Attendees
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [ ] <NAME>
## Agenda and Minutes
**Agenda** by ___
**Minutes** by <NAME>
- [X] LAGraph Matrix object discussion
> Should the object be opaque? If we make it opaque, we can make changes to its structure later without breaking implementations down the road.
<file_sep>/minutes/2020-10-14.md
# LAGraph Working Group Meeting Minutes - October 14, 2020
## Attendees
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
## Minutes
### Discussion on connected components
The current connected components algorithm uses code outside of GraphBLAS to sample the graph. It extracts rows and takes e.g. 4 elements from the beginning and the end of the row.
Currently, the select operation (both GxB and the upcoming GrB) are only aware of the 'absolute' index of the elements row/column, not their relative one. It would be possible to create such a select operation which would make CC much cleaner (essentially implemented in plain GraphBLAS).
### Testing
We need to translate the Graph500 test eventually. (Long discussion on testing...)
<file_sep>/LAGraph.h
// LAGraph.h
#define LAGRAPH_ERROR(error_message,info) \
{ \
if (message != NULL) strcpy (message, error_message) ; \
LAGRAPH_FREE_ALL ; \
return (info) ; \
}
// int info ; must be declared to use this macro
#define LAGraph_TRY(LAGraph_or_GrB_method) \
{ \
info = (LAGraph_or_GrB_method) ; \
if (info < 0) \
{ \
LAGRAPH_ERROR ("" __LINE__ __FILE__ , info) ; \
} \
}
// LAGraph_internal.h
#include "LAGraph.h"
#define TRY(method) LAGraph_TRY(method)
typedef enum
{
// enum:
// adjacency
// undirected
// directed
// bipartite
// one kind so far
// later: incidence
// A(i,j) is the edge (i,j)
// undirected: A is square, symmetric (both tril and triu present)
// directed: A is square, unsymmetric or might happen to symmetric
// bipartite: A is rectangular (later) or might happen to be square
LAGRAPH_UNKNOWN = 0,
LAGRAPH_ADJACENCY_UNDIRECTED = 1,
LAGRAPH_ADJACENCY_DIRECTED = 2,
// LAGRAPH_ADJACENCY_UNDIRECTED_TRIL = ...,
// LAGRAPH_ADJACENCY_UNDIRECTED_TRIU = ...,
// LAGRAPH_BIPARTITE = ...,
// LAGRAPH_BIPARTITE_DIRECTED = ...,
// LAGRAPH_BIPARTITE_UNDIRECTED = ...,
// LAGRAPH_INCIDENCE_* = ...,
// LAGRAPH_MULTIGRAPH_* = ...,
// LAGRAPH_HYPERGRAPH = ...,
// LAGRAPH_HYPERGRAPH_DIRECTED = ...,
// for example:
//
// 5 node7 8 11 ...
// ith row: [ -1 -1 1 -1 -1 1 ] ith edge
// ...
}
LAGraph_Kind ;
// LAGraph graph
typedef struct
{
GrB_Matrix A ; // the graph itself:
LAGraph_Kind kind ; // the kind of graph:
// possible future:
// multigraph ..
// GrB_Matrix *Amult ; // array of size nmatrices
// int nmatrices ;
//-----------------------------------------------------------
// cached:
// if present, they are correct.
// to invalidate: delete them
// TODO: write a utility to clear all cache
// when does an algorithm choose to (a) *update* these? (b) delete?
// default: algo decides
// others modes?
// can be recomputed at any time via utility functions,
// or freed at will (if Graph is input/output to a method)
GrB_Matrix AT ;
GrB_Vector rowdegree ;
GrB_Vector coldegree ;
consider: enum A_is_symmetric : yes, no, i don't know
// AT = A' regardless of kind
// rowdegree (i) = nvals (A (i,:)), regardless of kind
// coldegree (j) = nvals (A (:,j)), regardless of kind
// possible future cached properties:
// GrB_Vector rowsum, colsum ;
// rowsum (i) = sum (A (i,:)), regardless of kind
// colsum (j) = sum (A (:,j)), regardless of kind
}
LAGraph_Graph_struct ;
typedef struct LAGraph_Graph_struct *LAGraph_Graph ;
// TODO: discuss sanity checking.
// what if A is unsymmetric, but the graph is labeled "undirected"?
// error? use A+A' ?
LAGraph_Algorithm_Variant ?
GrB_Info LAGraph_BreadthFirstSearch_variant
or
// rule: NULL vs GrB_NULL: use NULL (discuss this later)
// rule: if any output (*arg) is NULL, do not compute
// rule: if any input arg is NULL: use default, don't use it, etc
// rule: output vectors and matrices are "new'd" by the LAGraph function:
// usage:
GrB_Vector level ;
LAGraph_BreadthFirstSearch (&level, NULL, ...)
//------------------------------------------------------------------------------
// ALGO: breadth first search
//------------------------------------------------------------------------------
// Policy: in general, if an input *x is NULL, do not compute
// TODO: hops for all methods?
GrB_Vector level ;
GrB_Vector parent ;
int LAGraph_BreadthFirstSearch // no _Variant suffix
(
// outputs:
GrB_Vector *level, // if NULL do not compute
GrB_Vector *parent, // if NULL do not compute
// inputs:
LAGraph_Graph G,
GrB_Index source,
char *message
) ;
GrB_Vector parent ; // undefined
result = LAGraph_BreadthFirstSearch (NULL, &parent, G, 0, NULL) ;
if result is bad, parent = NULL
int LAGraph_BreadthFirstSearch_MultiSource
(
// outputs:
GrB_Matrix *level, // if NULL do not compute
GrB_Matrix *parent, // if NULL do not compute
// inputs:
LAGraph_Graph G,
GrB_Index *sources, size_t nsources // or LAGraph_array? GrB_array?
char *message
) ;
info = LAGraph_whatever (&level, NULL, G, Src, 5) ;
if info is 'error' then level is returned as level == NULL.
// expert function:
GrB_Info LAGraph_BreadthFirstSearch_Frontier
(
// input/output:
GrB_Vector level,
GrB_Vector parent,
GrB_Vector frontier,
// inputs:
LAGraph_Graph G,
GrB_Vector vertex_mask, // filter the vertices
bool vertex_mask_complement,
bool vertex_mask_structural,
// or:
// GrB_Descriptor desc, // mask complement, mask structural:
// // NULL, GrB_DESC_C, GrB_DESC_S, or GrB_DESC_SC
uint64_t hops // if hops=0, no limit (or INT64_MAX), or "NONE"
) ;
//------------------------------------------------------------------------------
// ALGO: connected components:
//------------------------------------------------------------------------------
// TODO: utility to do SCC = Comm * G * Comm'
// TODO: utility to do Gnew = Perm * G * Perm' ; or Gnew = G (P,P)
// TODO: utility to convert Perm matrix <==> perm "vector" (array?) GrB_Index*
// SCC graph = Community * G * Community', a contracted graph
GrB_Info LAGraph_Community_Map
(
// output:
uint64_t *ncomponents,
GrB_Vector *component, // vertex i is in component(i), a dense vector.
// input:
GrB_Matrix Community // Community(c,i)=1 becomes c=component(i)
) ;
// Gnew = Perm * G *Perm', then Gnew is block diagonal
GrB_Info LAGraph_Community_Permutation
(
// output:
GrB_Matrix *Perm, // Perm(...), n-by-n
// input:
GrB_Matrix Community // Community(c,i)=1 becomes c=component(i)
) ;
GrB_Info LAGraph_ConnectedComponents
(
// output:
GrB_Matrix *Community, // k-by-n if there are k components. GrB_BOOL.
// Community(c,i)=1 if c=component(i)
// TODO: or n-by-k?
// input:
LAGraph_Graph G // if directed: strongly cc
) ;
GrB_Info LAGraph_ConnectedComponents_Weakly
(
// output:
GrB_Matrix *Community, // k-by-n if there are k components. GrB_BOOL.
// Community(c,i)=1 if c=component(i)
// input:
LAGraph_Graph G
) ;
//------------------------------------------------------------------------------
// ALGO: centrality:
//------------------------------------------------------------------------------
// TODO: utility to remove self loops, or add all self-loops (G=G+I), ...
// TODO: utility like trace(G), trace-like methods
// TODO: G =G+G', G = spones(G)
// random number generator? vertex sampler?
// exact vs approx
// TODO make 2 different typedef enums, for VertexC. and EdgeC.
typedef enum
{
LAGR_BETWEENNESS_FP32 = 4,
LAGR_BETWEENNESS_FP64 = 5,
...
}
LAGraph_VertexCentrality_Type ;
// this: easy mode
GrB_Vector centrality ; // uninit
LAGraph_something (¢rality, ...)
// then for 'expert' mode
GrB_Vector x ; //
LAGraph_something_init (&x, ...) or GrB_Vector_new (&x, n, 1, INT32)
for a billion times:
LAGraph_something_tiny (x, ...) // reuse it, might be faster
GrB_Info LAGraph_VertexCentrality
(
// output:
GrB_Vector *centrality,
// input:
LAGraph_Graph G,
LAGraph_VertexCentrality_Type kind
// betweenness, eigenvector, degree, pagerank, ...
// which pagerank? parameters to pagerank?
// LAGR_BETWEENNESS_FP32,
// LAGR_BETWEENNESS_FP64
) ;
LAGraph_VertexCentrality (°ree, G, LAGR_OUTDEGREE) ;
// utility function:
LAGraph_Degree (°ree, G, kind)
// or:
LAGraph_Degree_in (°ree, G)
LAGraph_Degree_out (°ree, G) // ... etc
LAGraph_Degree_etc (°ree, G) // ... etc
GrB_Info LAGraph_EdgeCentrality
(
// output:
GrB_Matrix *Centrality,
// input:
LAGraph_Graph G,
LAGraph_EdgeCentrality_Type kind
// betweeness, eigenvector, degree, pagerank, ...
// LAGR_BETWEENNESS_FP32,
// LAGR_BETWEENNESS_FP64
) ;
//------------------------------------------------------------------------------
// ALGO: shortest paths:
//------------------------------------------------------------------------------
GrB_Info LAGraph_ShortestPath_[...]
GrB_Info LAGraph_ShortestPath_SingleSource
(
// output: if NULL do not compute
GrB_Vector *distance, // type: INT64, FP32, or FP64
// INT64: if G is int*
// UINT64: if G is bool, uint*
// FP32: if G is FP32
// FP64: if G is FP64
GrB_Vector *parent, // tree
GrB_Vector *hops, // # of hops, level from source (call it "level")?
// input:
GrB_Index source,
LAGraph_Graph G,
// error handling (every LAGraph function has this last):
char *message
) ;
// negative-weight-cycle: result is not defined, must stop!
// report info < 0 (an error)
GrB_Info LAGraph_ShortestPath_AllPairs
(
// output:
GrB_Matrix *Distance,
GrB_Matrix *Parent,
GrB_Matrix *Hops,
// input:
LAGraph_Graph G,
// input/output
char *message
) ;
// Dist, Parent undefined on input, created on output, just like GrB_Matrix_new
info = LAGraph_something (&Dist, &Parent, NULL, G) ;
//------------------------------------------------------------------------------
// final design for error handling:
//------------------------------------------------------------------------------
// but is LAGRAPH_MESSAGE_LENGTH too wordy?
#define LAGRAPH_MESSAGE_LENGTH 256
char message [LAGRAPH_MESSAGE_LENGTH] ;
int info = LAGraph_ShortestPath_AllPairs (Dis, Pa, Hop, G, &message) ;
// convention: 0:ok, < error, > warning.
// if no message, we set message [0] = '\0' (if not NULL)
// no message:
int info = LAGraph_ShortestPath_AllPairs (Dis, Pa, Hop, G, NULL) ;
LAGraph info: not an enum, just an int
//------------------------------------------------------------------------------
// what's next?
//------------------------------------------------------------------------------
GAP: BFS, SSSP, TriangleCount, Conn.Components, PageRank, BetweennessCentrality
LDBC: Local Clustering Coef, CDLP, different PageRank
Luby's MIS
minimum spanning forest
k-truss
max flow
Louvain community detection
Notation
Doxygen
//------------------------------------------------------------------------------
// ALGO: triangle counting
//------------------------------------------------------------------------------
// LAGraph_Graph G: is a read-only object if "input"
int LAGraph_TriangleCount
(
uint64_t *ntriangles, // # of triangles
// input:
LAGraph_Graph G,
// input/output:
char *message
) ;
int LAGraph_TriangleCount_expert
(
uint64_t *ntriangles, // # of triangles
// input:
int method,
LAGraph_Graph G,
// input/output:
char *message
) ;
// TODO: # triangles incident on each node/edge
//------------------------------------------------------------------------------
// ALGO: k-truss
//------------------------------------------------------------------------------
// this should OK: G = func(G); we know G is aliased in/out
LAGraph_anything (/* out: */ &G, k, /* in: */ G, &message) ;
G = NULL ; // make a new G
LAGraph_anything (/* out: */ &G, k, /* in: */ H, &message) ;
LAGraph_Graph G ;
LAGraph_anything (/* out: */ &G, k, /* in: */ G, &message) ;
LAGraph_Graph Gnew = LAGraph_create ( ) ;
LAGraph_anything (/* out: */ Gnew, k, /* in: */ G, &message) ;
int LAGraph_Ktruss_next
(
// input/output:
LAGraph_Graph Gnext,
// input:
uint64_t k,
LAGraph_Graph Gprev, // (k-1)-truss on input, with support A(i,j)
// input/output:
char *message
) ;
LAGraph_Graph K = NULL ;
int LAGraph_Ktruss_all
(
// output:
LAGraph_Graph *K, // an array of LAGraph_Graph, each with support
// output:
uint64_t *kmax,
LAGraph_Graph G, // just a graph
// input/output:
char *message
) ;
// k = 12 on output:
// K[3], ... K[11].
<file_sep>/minutes/2020-06-24.md
# LAGraph Working Group Meeting Minutes - June 10, 2020
## Attendees
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [X] <NAME>
## Agenda and Minutes
**Agenda** by <NAME>
**Minutes** by <NAME>
- [X] Questions presented but not fully resolved
> What is the name of out LAGraph graph type? LAGraph_graph? graph_t?
> For naming our functions, do we have a base name (LAGraph_bc) and a descriptor to select variants? Or do we have distinctly named functions (LAGraph_bc_vertex, LAGraph_bc_edge)? There was a preference for distinguishing the cases by function name. This is important since the arguments might be completely different between cases.
- [X] Functions in LAGraph
> Scott provided us with a nice spreadsheet that summarized the functions currently in LAGraph. The purpose of this spread sheet was to help us have more concrete discussions as we move into the details of designing the API. We didn't get very far in the list. We hit betweenness centrality and had a long conversation about the variants of this algorithm. Here are just a few of the cases:
> - Degree centrality
> - Closeness centrality
> - Edge centrality: output a matrix with the number of shortest paths passing through each edge
> - Vertex BC: output a vector with the number of shortest paths passing through a vertex
> Edge centrality raises and interesting case. The output is a value for each edge. In a graphBLAS mindset, that would be a matrix which is how we represent a graph. So should the output from edge centrality be a graph? A list of tuples defining each edge and its centrality measure? A sorted list of Tuples? A graphBLAS object? We did not reach final resolution ? though a strong subset of the group favored making it an LAGraph graph type and then we?d provide a separate function to pull out the ?top k? items from a matrix.
- [X] Coarsening and Expanding graphs
> Another topic of discussion was on the need for functions to coarsen a graph (combine subsets of nodes together to form a representation of the graph with fewer vertices) and the inverse function to expand the graph. This is used in algorithms that follow a sort of divide-and-conquer pattern ? coarsen until the graph is small enough for direct computation and then expand out to the original vertex set. This is used, for example, in graph partitioning algorithms.
- [X] Homework
> we need to come up with a full list of all the methods we want in the released version of LAGraph. We need this to keep our conversations grounded and move us to resolution on released artifact from all our meetings.
<file_sep>/minutes/2020-08-27.md
# LAGraph Working Group Meeting Minutes - August 27, 2020
## Attendees
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
## Minutes
**Minutes** by <NAME> (and <NAME>)
### Last meeting minutes reviewed.
### Revisited ownership of output
The issue comes down to the question, who allocates memory for output objects? Does this happen inside a method? Or does the user own memory allocation for output objects. For "easy mode" you don't want users to estimate the size needed to hold a result and allocate the memory. But there are numerous algorithms where you call an LAGraph method inside an iterative control structure. If you allocate space for the output objects each time a method is called, it could add a great deal of overhead. We need a consistent for memory allocation of output objects.
We discussed the existing idiom where the methods take a pointer to `GrB_Vector` (for example) and the algorithm allocates the space and type by calling something like `GrB_Vector_new`. If the vector was already allocated the reference to that other data will be lost.
We discussed an alternative where the `GrB_Vector` is set to `NULL` before calling the algorithm. This approach was ultimately rejected. As conflicting with other approaches.
We discussed an alternative where the `GrB_Vector` is allocated outside of the algorithm and passed in. Allocation would be performed according to the documentation of the algorithm and the vector would be passed by value (not pointer). This approach is currently used by `IN/OUT` parameters. It will also likely be used by many other expert mode algorithms (especially those focusing on high-performance).
### Shortest paths revisited
The single-source, single-destination version has been removed from consideration for the "easy" mode. It is unknown whether such a version is reasonable given the optimization 'nature' of the SSSP algorithm
The multi-source version (analogous to a batch mode BC algorithm) was also removed from consideration for the "easy" mode.
The only two versions that currently survive are:
General signature `GrB_Info LAGraph_ShortestPath_[...]`, where `[...]` might be:
* `SingleSource` (see the signature in [`LAGraph.h`](../LAGraph.h))
* `AllPairs`: expensive but it's a well-known linear algebra-based algorithm (Floyd–Warshall)
One issue raised by not resolved pertains to the stop condition on shortest path algorithms. Since this is an optimization algorithm, it can get stuck. Do we need to add an explicit stop condition?
A second issue with shortest path methods (especially for all-pairs); what do you return if there is no path? We can't use zero since in a weighted graph you may have paths that sum to zero. Do we return MAXINT? Inf? Something else? We did not resolve this issue and will bring it up again later.
### Next up (again)
Reviewing the GAP / LDBC Graphalytics algorithms.
<file_sep>/minutes/2020-03-18.md
# LAGraph Working Group Meeting Minutes - March 18, 2020
## Attendees
- [X] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [X] <NAME>
## Agenda and Minutes
**Agenda** by ___
**Minutes** by <NAME>
- [X] Should we have a blocking/non-blocking mode?
> In short, no. LAGraph is assumed to operate in blocking mode. Within LAGraph, there can be plenty of exploitation of GraphBLAS blocking/non-blocking, but LAGraph will not have a separate blocking/non-blocking state.
- [X] Should we have an asynchronous mode?
> Potentially in the future, but not at first. We may, in the future, add asynchronous versions of LAGraph functions with slightly different (a few added characters, like \_async) function signatures.
- [X] How do we handle errors? Do we include an error flag as an argument to a function or as returned value? We are currently returning a GrB_info value. Do we need an LAgraph_info? Does integration with Python favor one approach over the other?
> Generally, GraphBLAS errors are pretty good, but LAGraph algorithms will have algorithm-specific errors as well.
- [ ] Is there an LAGraph context? Or do we just used the GraphBLAS context? Do we pass the context to each individual library routine (sort of like what we do when we call MPI routines)?
- [ ] What rules do we use to generate a function name from the mathematical operation?
- [ ] Do we need a nonpoloymorphic interface as we have for the GraphBLAS?
<file_sep>/minutes/2022-02-16.md
Topics/Minutes, Feb 16, 2022
* user guide: see also Gabor's Feb 2021 draft
( https://github.com/GraphBLAS/LAGraph-Docs ).
need discussion on how experimental gets promoted to src.
* test, document, polish all src & experimental: algorithms, utilities,
and tests.
* see new cached propertes: G->emin, G->emax, for SSSP
* CC is not Basic: needs G->structure_is_symmetric
* add Basic SSSP to pick Delta by itself?
No. Do the Basic method later (not v1.0)
* naming convention needed to distinguish Basic vs Advanced methods
LAGraph_BreadthFirstSearch
LAGraph_BreadthFirstSearch_...
LAGraph_VertexCentrality_Betweenness
LAGraph_VertexCentrality_Betweenness__Stuff (two underscores
denotes Advanced)?
not discussed:
* remove LAGraph_VertexCentrality?
* CC: G->A is temporarily modified (unpacked then repacked), returned
the same as on input.
Need new GxB methods in GraphBLAS for CC, ideally for v1.0 of LAGraph:
GxB_select with GxB_RankUnaryOp, and GxB_extract with GrB_Vectors as
inputs instead of (GrB_Index *) arrays.
Or do we use CC as-is and Tim adds the GxB methods later?
<file_sep>/LAGraph_bfs_parent2.c
//------------------------------------------------------------------------------
// LAGraph_bfs_parent: push-pull breadth-first search
//------------------------------------------------------------------------------
/*
LAGraph: graph algorithms based on GraphBLAS
Copyright 2020 LAGraph Contributors.
SPDX-License-Identifier: BSD-2
(see Contributors.txt for a full list of Contributors; see
ContributionInstructions.txt for information on how you can Contribute to
this project).
All Rights Reserved.
NO WARRANTY. THIS MATERIAL IS FURNISHED ON AN "AS-IS" BASIS. THE LAGRAPH
CONTRIBUTORS MAKE NO WARRANTIES OF ANY KIND, EITHER EXPRESSED OR IMPLIED,
AS TO ANY MATTER INCLUDING, BUT NOT LIMITED TO, WARRANTY OF FITNESS FOR
PURPOSE OR MERCHANTABILITY, EXCLUSIVITY, OR RESULTS OBTAINED FROM USE OF
THE MATERIAL. THE CONTRIBUTORS DO NOT MAKE ANY WARRANTY OF ANY KIND WITH
RESPECT TO FREEDOM FROM PATENT, TRADEMARK, OR COPYRIGHT INFRINGEMENT.
Released under a BSD license, please see the LICENSE file distributed with
this Software or contact <EMAIL> for full terms.
Created, in part, with funding and support from the United States
Government. (see Acknowledgments.txt file).
This program includes and/or can make use of certain third party source
code, object code, documentation and other files ("Third Party Software").
See LICENSE file for more details.
*/
//------------------------------------------------------------------------------
// LAGraph_bfs_parent2: direction-optimized push/pull breadth first search,
// contributed by <NAME>, Texas A&M. Computes only the BFS tree.
// Requires SuiteSparse:GraphBLAS v4.0.1.
// Usage:
// info = LAGraph_bfs_parent2 (&pi, A, AT, source) ;
// GrB_Vector *pi: a vector containing the BFS tree, in 0-based indexing.
// pi(source) = source for source node. pi(i) = p if p is the parent
// of i. If pi(i) is not present, then node i has not been reached.
// GrB_Matrix A: a square matrix of any type. The values of A are not
// accessed. The presence of the entry A(i,j) indicates the edge
// (i,j). That is, an explicit entry A(i,j)=0 is treated as an edge.
// GrB_Matrix AT: an optional matrix of any type. If NULL, the algorithm
// is a conventional push-only BFS. If not NULL, AT must be the
// transpose of A, and a push-pull algorithm is used (NOTE: this
// assumes GraphBLAS stores its matrix in CSR form; see discussion in
// LAGraph_bfs_pushpull). Results are undefined if AT is not NULL but
// not identical to the transpose of A.
// int64_t source: the source node for the BFS.
// This algorithm can use the push-pull strategy, which requires both A and
// AT=A' to be passed in. If the graph is known to be symmetric, then the same
// matrix A can be passed in for both arguments. Results are undefined if AT
// is not the transpose of A.
// See LAGraph_bfs_pushpull for a discussion of the push/pull strategy.
// References:
// <NAME>, <NAME>, and <NAME>. 2018. Implementing Push-Pull
// Efficiently in GraphBLAS. In Proceedings of the 47th International
// Conference on Parallel Processing (ICPP 2018). ACM, New York, NY, USA,
// Article 89, 11 pages. DOI: https://doi.org/10.1145/3225058.3225122
// <NAME>, <NAME> and <NAME>, The GAP Benchmark
// Suite, http://arxiv.org/abs/1508.03619, 2015. http://gap.cs.berkeley.edu/
#include "LAGraph_internal.h"
#define LAGRAPH_FREE_ALL \
{ \
GrB_free (&w) ; \
GrB_free (&q) ; \
GrB_free (&pi) ; \
}
int LAGraph_BreadthFirstSearch // easy-mode
(
// outputs:
GrB_Vector *level_handle, // if NULL do not compute
GrB_Vector *parent_handle, // if NULL do not compute
// inputs:
LAGraph_Graph G,
GrB_Index source,
char *message // for error handling; may be NULL
// convention: 0:ok, < error, > warning.
// if no message, we set message [0] = '\0' (if not NULL)
) ;
GrB_Info LAGraph_bfs_parent2 // push-pull BFS, compute the tree only
(
// output:
GrB_Vector *pi_output, // pi(i) = p if p is the parent of node i
// inputs:
GrB_Matrix A, // input graph, any type
GrB_Matrix AT, // transpose of A (optional; push-only if NULL)
GrB_Vector Degree, // Degree(i) is the out-degree of node i
// (optional: push-only if NULL)
int64_t source // starting node of the BFS
)
{
//--------------------------------------------------------------------------
// check inputs
//--------------------------------------------------------------------------
int info ;
GrB_Vector q = NULL ; // the current frontier
GrB_Vector pi = NULL ; // parent vector
GrB_Vector w = NULL ; // to compute work remaining
GrB_Vector level = NULL ; // level vector
if (G == NULL || G->A == NULL)
{
// required argument is missing or mangled
LAGRAPH_ERROR ("required arguments are NULL", GrB_NULL_POINTER) ;
}
GrB_Index nrows, ncols, nvals ;
GrB_Matrix A = G.A ;
GrB_Matrix AT = G.AT ;
GrB_Vector Degree = G.rowdegree ;
LAGraph_TRY (GrB_Matrix_nrows (&nrows, A)) ;
LAGraph_TRY (GrB_Matrix_ncols (&ncols, A)) ;
LAGraph_TRY (GrB_Matrix_nvals (&nvals, A)) ;
if (nrows != ncols)
{
// A must be square
LAGRAPH_ERROR ("A must be square", GrB_INVALID_VALUE) ;
}
//--------------------------------------------------------------------------
// check the format of A and AT
//--------------------------------------------------------------------------
#ifdef SS4
LAGraph_SS_bfs_parent ( ... ) ;
bool A_csr = true, AT_csr = true ;
if (A != NULL)
{
GxB_Format_Value A_format ;
LAGr_get (A , GxB_FORMAT, &A_format) ;
A_csr = (A_format == GxB_BY_ROW) ;
}
if (AT != NULL)
{
GxB_Format_Value AT_format ;
LAGr_get (AT, GxB_FORMAT, &AT_format) ;
AT_csr = (AT_format == GxB_BY_ROW) ;
}
bool vxm_is_push = (A != NULL && A_csr ) ; // vxm (q,A) is a push step
bool vxm_is_pull = (A != NULL && !A_csr ) ; // vxm (q,A) is a pull step
bool mxv_is_push = (AT != NULL && !AT_csr) ; // mxv (AT,q) is a push step
bool mxv_is_pull = (AT != NULL && AT_csr) ; // mxv (AT,q) is a pull step
// can_push is true if the push-step can be performed
bool can_push = vxm_is_push || mxv_is_push ;
// can_pull is true if the pull-step can be performed
bool can_pull = vxm_is_pull || mxv_is_pull ;
// direction-optimization requires both push and pull, and Degree
bool push_pull = can_push && can_pull && (Degree != NULL) ;
//--------------------------------------------------------------------------
// initializations
//--------------------------------------------------------------------------
GrB_Index n = nrows ;
GrB_Type int_type = (n > INT32_MAX) ? GrB_INT64 : GrB_INT32 ;
GrB_Semiring semiring ;
// create an sparse integer vector q, and set q(source) = source
LAGr_Vector_new (&q, int_type, n) ;
LAGr_Vector_setElement (q, source, source) ;
GrB_Index nq = 1 ; // number of nodes in the current level
if (parent == NULL)
// just want the level, not parent
semiring = GrB_LOR_LAND_BOOL ;
semiring = GxB_ANY_PAIR_BOOL ;
else
{
// need the parent
if (n > INT32_MAX)
{
semiring = GrB_MIN_FIRST_INT64 ;
semiring = GxB_ANY_SECONDI_INT64 ;
}
else
{
semiring = GxB_ANY_SECONDI_INT32 ;
}
}
// pi = an empty bitmap vector
LAGr_Vector_new (&pi, int_type, n) ;
GxB_set (pi, GxB_SPARSITY_CONTROL, GxB_BITMAP + GxB_FULL) ;
// pi (source) = source denotes a root of the BFS tree
LAGr_Vector_setElement (pi, source, source) ;
if (push_pull) LAGr_Vector_new (&w, GrB_INT64, n) ;
double alpha = 8.0 ;
double beta1 = 8.0 ;
double beta2 = 512.0 ;
int64_t n_over_beta1 = (int64_t) (((double) n) / beta1) ;
int64_t n_over_beta2 = (int64_t) (((double) n) / beta2) ;
//--------------------------------------------------------------------------
// BFS traversal and label the nodes
//--------------------------------------------------------------------------
bool do_push = can_push ; // start with push, if available
GrB_Index last_nq = 0 ;
int64_t edges_unexplored = nvals ;
bool any_pull = false ; // true if any pull phase has been done
for (int64_t nvisited = 1 ; nvisited < n ; nvisited += nq)
{
//----------------------------------------------------------------------
// select push vs pull
//----------------------------------------------------------------------
int64_t edges_in_frontier = 0 ;
if (push_pull)
{
if (do_push && can_pull)
{
// check for switch from push to pull
bool growing = nq > last_nq ;
bool switch_to_pull ;
if (edges_unexplored < n)
{
// very little of the graph is left; disable the pull
push_pull = false ;
}
else if (any_pull)
{
// once any pull phase has been done, the # of edges in the
// frontier has no longer been tracked. But now the BFS
// has switched back to push, and we're checking for yet
// another switch to pull. This switch is unlikely, so
// just keep track of the size of the frontier, and switch
// if it starts growing again and is getting big.
switch_to_pull = (growing && nq > n_over_beta1) ;
}
else
{
// update the # of unexplored edges
// w<q>=Degree
// w(i) = outdegree of node i if node i is in the queue
LAGr_assign (w, q, NULL, Degree, GrB_ALL, n, GrB_DESC_RS) ;
// edges_in_frontier = sum (w) = # of edges incident on all
// nodes in the current frontier
LAGr_reduce (&edges_in_frontier, NULL,
GrB_PLUS_MONOID_INT64, w, NULL) ;
edges_unexplored -= edges_in_frontier ;
switch_to_pull = growing &&
(edges_in_frontier > (edges_unexplored / alpha)) ;
}
if (switch_to_pull)
{
// the # of edges incident on
do_push = false ;
}
}
else if (!do_push && can_push)
{
// check for switch from pull to push
bool shrinking = nq < last_nq ;
if (shrinking && (nq <= n_over_beta2))
{
do_push = true ;
}
}
}
any_pull = any_pull || (!do_push) ;
//----------------------------------------------------------------------
// q = next level of the BFS
//----------------------------------------------------------------------
GxB_set ((GrB_Matrix) q, GxB_SPARSITY_CONTROL,
do_push ? GxB_SPARSE : GxB_BITMAP) ;
if ((do_push && vxm_is_push) || (!do_push && vxm_is_pull))
{
// q'<!pi> = q'*A
// this is a push if A is in CSR format; pull if A is in CSC
LAGr_vxm (q, pi, NULL, semiring, q, A, GrB_DESC_RSC) ;
}
else // ((!do_push && mxv_is_pull) || (do_push && mxv_is_push))
{
// q<!pi> = AT*q
// this is a pull if AT is in CSR format; push if AT is in CSC
LAGr_mxv (q, pi, NULL, semiring, AT, q, GrB_DESC_RSC) ;
}
last_nq = nq ;
LAGr_Vector_nvals (&nq, q) ;
if (nq == 0) break ;
//----------------------------------------------------------------------
// assign parents
//----------------------------------------------------------------------
// q(i) currently contains the parent id of node i in tree.
// pi<q> = q
LAGr_assign (pi, q, NULL, q, GrB_ALL, n, GrB_DESC_S) ;
}
#else
if ( .. parent ... and no level)
{
// parent only
LAGRAPH_TRY (GrB_assign (pi, NULL, NULL, -1, GrB_ALL, n, GrB_DESC_S)) ;
while (1)
{
LAGRAPH_TRY (GrB_assign (q, q, NULL, ramp, GrB_ALL, n, GrB_DESC_S));
LAGRAPH_TRY (GrB_vxm (q, pi, NULL, semiring, q, A, GrB_DESC_RSC)) ;
LAGRAPH_TRY (GrB_Vector_nvals (&nq, q)) ;
if (nq == 0) break ;
LAGRAPH_TRY (GrB_assign (pi, q, NULL, q, GrB_ALL, n, GrB_DESC_S) ;
}
}
else if both parent and level
{
// parent and level
for (int64_t k = 0 ; k < n ; k++)
{
LAGRAPH_TRY (GrB_assign (q, q, NULL, ramp, GrB_ALL, n, GrB_DESC_S));
LAGRAPH_TRY (GrB_vxm (q, pi, NULL, semiring, q, A, GrB_DESC_RSC)) ;
LAGRAPH_TRY (GrB_Vector_nvals (&nq, q)) ;
if (nq == 0) break ;
LAGRAPH_TRY (GrB_assign (pi, q, NULL, q, GrB_ALL, n, GrB_DESC_S) ;
LAGRAPH_TRY (GrB_assign (level, q, NULL, k, GrB_ALL, n, GrB_DESC_S) ;
}
}
else
{
// level only
for (int64_t k = 0 ; k < n ; k++)
{
LAGRAPH_TRY (GrB_vxm (q, pi, NULL, semiring, q, A, GrB_DESC_RSC)) ;
LAGRAPH_TRY (GrB_Vector_nvals (&nq, q)) ;
if (nq == 0) break ;
LAGRAPH_TRY (GrB_assign (level, q, NULL, k, GrB_ALL, n, GrB_DESC_S) ;
}
#endif
//--------------------------------------------------------------------------
// free workspace and return result
//--------------------------------------------------------------------------
(*pi_output) = pi ;
pi = NULL ;
LAGRAPH_FREE_ALL ; // free all workspace (except for result pi)
return (GrB_SUCCESS) ;
#endif
}
<file_sep>/minutes/2020-09-09.md
# LAGraph Working Group Meeting Minutes - September 9, 2020
## Attendees
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
## Minutes
### Discussion on the proposed LAGraph error handling mechanisms
Certain properties (such as negative weight cycles) could be computed and cached on the graph. With careful design, this could be set in an atomic way.
Discussion on the second option:
> Have `LAGraph_Info` which is a superset of `GrB_Info`.
* Scott's concern: is one set of error codes going to cover all the algorithms?
* <NAME>.: agreed that this is a problem. And also, in some cases, an algorithm might need more than an enum for error message. But in those cases, that can be the output enum. So the `LAGraph_Info` which subsumes `GrB_Info` would go a long way for most problems.
Discussion on the fourth option:
> Every LAGraph function could have an `LAGraph_Info *info` argument which solves most of the problems but requires +1 arguments.
* This could wrap a `GrB_Info` (but this is probably repetitive), an algorithm-specific error code (for the specific error such as 'negative-length cycles') and a string (is the string arbitrary length and if so, who frees it?).
* Is it for all algorithms or just algorithms that need specialized error codes?
* What if it is an input/output argument and it's created on the stack and passed by the user?
* The [`LAGRAPH_OK()`](https://github.com/GraphBLAS/LAGraph/blob/a2b7688eb2f06784c6b860bf71a782a5e209a63e/Include/LAGraph.h#L435) macro already does something similar. See also the [`LAGRAPH_TRY_CATCH()`](https://github.com/GraphBLAS/LAGraph/blob/a2b7688eb2f06784c6b860bf71a782a5e209a63e/Include/LAGraph.h#L421-L428) macro.
Design question: who are we writing the errr messages for?
* Both the developer and the user (e.g., the negative weight cycle). The developer errors are mostly GrB_Info errors and the user error are mostly LAGraph_Info errors. (Not 100% but this is the case in the overwhelming majority of the time.)
Discussion on the third option:
> Have `LAGraph_Info struct` with a message (`char message [128]`). This has some overhead due to `memcpy`-ing the message all the time gets expensive when performed repeatedly in an incremental-style algorithm.
This is a nice option but it has performance issues if the algorithm is invoked a lot of times. (BLAS has this problem and vendors work around it differently with non-portable solutions.)
:warning: If there's an error code but no message, the first byte of the `message` should be cleared.
Scott's point: the two sets of errors are quite different and it's not trivial to untangle them. We should not return `GrB_SUCCESS` plus a LAGraph error -- at the very least, it should be `GrB_NO_VALUE`.
You could also pass a 'performance warning' as part of the LAGraph 'error' object.
<NAME>: it's a realistic scenario that LAGraph and GraphBLAS are going to be used together, similarly to LAPACK and BLAS. So if the error codes are 'compatible' that might make these libraries easier to use.
Does the `LAGraph_Info` object came as the first argument or as the last?
Convergence? The third option (algorithm-specific error object) seems best. See [`LAGraph.h`](../LAGraph.h#L311).
### Misc
Threading model: LAGraph should be trivially thread-safe.
<file_sep>/lagraph_spec/BFS5M.c
#include <stdlib.h>
#include <stdio.h>
#include <stdint.h>
#include <stdbool.h>
#include "GraphBLAS.h"
/*
* Given a boolean n x n adjacency matrix A and a source vertex s, performs a BFS traversal
* of the graph and sets v[i] to the level in which vertex i is visited (v[s] == 1).
* If i is not reacheable from s, then v[i] = 0. (Vector v should be empty on input.)
*/
GrB_Info BFS(GrB_Vector *v, GrB_Matrix A, GrB_Index s)
{
GrB_Index n;
GrB_Matrix_nrows(&n,A); // n = # of rows of A
GrB_Vector_new(v,GrB_INT32,n); // Vector<int32_t> v(n)
GrB_Vector q; // vertices visited in each level
GrB_Vector_new(&q,GrB_BOOL,n); // Vector<bool> q(n)
GrB_Vector_setElement(q,(bool)true,s); // q[s] = true, false everywhere else
/*
* BFS traversal and label the vertices.
*/
int32_t d = 0; // d = level in BFS traversal
bool succ = false; // succ == true when some successor found
do {
++d; // next level (start with 1)
GrB_assign(*v,q,GrB_NULL,d,GrB_ALL,n,GrB_NULL); // v[q] = d
GrB_vxm(q,*v,GrB_NULL,GrB_LOR_LAND_SEMIRING_BOOL,
q,A,GrB_DESC_RC); // q[!v] = q ||.&& A ; finds all the
// unvisited successors from current q
GrB_reduce(&succ,GrB_NULL,GrB_LOR_MONOID_BOOL,
q,GrB_NULL); // succ = ||(q)
} while (succ); // if there is no successor in q, we are done.
GrB_free(&q); // q vector no longer needed
return GrB_SUCCESS;
}
<file_sep>/minutes/2020-07-22.md
# LAGraph Working Group Meeting Minutes - June 22, 2020
## Attendees
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [X] <NAME>
## Minutes
**Minutes** by <NAME>
Main activity: drafting `LAGraph.h` by <NAME>
### LAGraph_graph
First, define the `LAGraph_graph` data structure.
* See Tim's code.
* The graph property weighted/unweighted is important because one can have an unweighted graph that is INT64.
### BFS
* Naming convention: `LAGraph_BreadthFirstSearch_variant`
* Some of the variants could be handled by passing a NULL as an argument but we are better off adding separate functions for them.
* The basic BFS in easy mode would be just BFS from a single source but still there are two variants: levels and parents.
* Scott M: the easy mode if often just a dispatch, e.g., here it calls the algoritm that computes levels only, parents only or both.
* Should we allow the use to specify the maximum number of levels? Not here.
* `LAGraph_BreadthFirstSearch_Frontier` can be useful in problems such as MIS: run one step of BFS from the set of nodes and if the resulting frontier is disjoint, the set of nodes is indeed independent.
* Discussion on style for passing variable-size arrays:
* `GrB_Vector source` vs. `GrB_Index *sources` and `size_t nsource`
* C does not have an `std::array` which would be very useful here.
* Consensus: it's unlikely that the list of source vertices comes as a `GrB_Vector` from the outside.
* Tim D: Passing a dense `GrB_Vector` with **index values** (e.g., for 3 source nodes, we have a 3-length vector with the node indices) could be beneficial, it's very cheap to construct even with `setElement` (no `build` needed).
* Usage of `GrB_Index` and `uint64_t` should correspond to their conceptual meaning, e.g., the number of hops is a `uint64_t` value.
* The "transitive reachability" could be benificial as it does slightly less work.
* <NAME>: this is useful for max-flow. In that case, its unidirectional traversal and the parents along the path are needed.
* Gabor: if its only about reachability, it can be implemented with a bidirectional search and the result is just a boolean.
We currently deem these transitive reachability algorithms not so essential.
* Return the DAG of the traversal.
* This is an advanced algorithm.
* There are many other variants of BFS.
## Arguments
* How do we state that the BFS traversal does not limit the number of hops? By setting `nhops` to `0` vs. `UINT64_MAX` vs. `NONE` (a defined value)?
* `NULL` vs. `GrB_NULL`:
* Slight preference for `NULL`
* Up for further discussion
## Next up
* connected components
* BC variants (exact/approximate, vertex/edge?):
* do we need a random number generator or a random node sampler?
* maybe it is out of scope
* in any case, our algorithms should be deterministic (e.g., with a fixed seed) unless the user specifically asks for non-deterministic computation
<file_sep>/minutes/2020-05-20.md
# LAGraph Working Group Meeting Minutes - May 20, 2020
## Attendees
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [X] <NAME>
## Agenda and Minutes
**Agenda** by ???
**Minutes** by <NAME>
- [X] API and Algorithms Discussion
> Fundamental issue emerging: What information do we provide to algorithms? What information is needed?
> Are properties discoverable? Some are (symmetry), others are not (symmetric matrix with lower triangular portion provided)
> "Easy button" - we want to have to provide only minimal information (few arguments). "Expert mode" - we may have a lot of information transfer (lots of arguments).
- [X] Easy vs Expert Modes
> It's becoming increasingly apparent that there is a VAST gulf of separation between the easy mode and expert mode. Expert mode requires a lot of information, especially if performance is important. Easy mode should remain user-friendly, and we should not expect the user to have to understand or specify lots of information.
> How do we reconcile this?
<file_sep>/lagraph_spec/Makefile
pdf:
pdflatex LAGraph_API_C.tex
bibtex LAGraph_API_C
pdflatex LAGraph_API_C.tex
pdflatex LAGraph_API_C.tex
clean:
rm -f *.aux *.log *.toc *.bbl *.blg *.lof *.lot *.out
<file_sep>/minutes/2020-09-02.md
# LAGraph Working Group Meeting Minutes - September 2, 2020
## Attendees
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
## Minutes
### Discussion on notation
We should converge towards a common pseudocode in LaTeX.
* Tim D: It would be great if you could type it in ASCII so that's it's similar to the LaTeX-based formula.
* How to represent the "replace semantics" in a mask?
* How to differentiate structure and value masks (e.g., `<M.S>` and `<M>`)?
### Agenda
Reviewing the GAP / LDBC Graphalytics algorithms.
### Shortest path algorithms
Do we want to have a condition for maximum number of iterations?
Tim D: The easy mode doesn't really need such an option, it should run the algorithm as long as needed.
Discussion on negative edge weights: what should it do? Should it produce `GrB_INVALID_OBJECT`?
Quick detour to GrB errors codes in SuiteSparse:GraphBLAS:
* 0/1 (`GrB_SUCCESS`/`GrB_NO_VALUE`) are not really errors (however, see p33 of the C API spec v1.3, where `GrB_NO_VALUE` is classified as an API error),
* the ones between 2 and 9 are API errors,
* the ones between 10 and 13 are execution errors.
Currently, the closest one is `GrB_INVALID_OBJECT`.
Lots of questions popped up -- What is the semantics of a graph with a negative cycle? Is it a property of a graph? Is `GrB_Info` enough or do we need `LAGraph_Info`?
Main alternatives:
* Use `GrB_Info` and return `GrB_NO_VALUE` (e.g., in the presence of a negative-weight cycle). This is not expressive enough to return the error message.
* Have `LAGraph_Info` which is a superset of `GrB_Info`.
* Have `LAGraph_Info struct` with a message (`char message [128]`). This has some overhead due to `memcpy`-ing the message all the time gets expensive when performed repeatedly in an incremental-style algorithm.
* Have an `LAGraph_report(G)` method that returns the error string. Has to be thread-safe -- this introduced similar problems than the global `GrB_error` did.
* It's also problematic when 2 LAGraph calls want to use `G` at the same time. This is more powerful, it can e.g., return the negative cycle.
* It is difficult to put the error to `G` because it is the input in most cases.
* Alternative would be to put the error message in (one of) the outputs? But we cannot put an error in a GraphBLAS object such as a `GrB_Vector`.
* Every LAGraph function could have an `LAGraph_Info *info` argument which solves most of the problems but requires +1 arguments.
<file_sep>/minutes/2020-04-01.md
# LAGraph Working Group Meeting Minutes - April 1, 2020
## Attendees
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
## Agenda and Minutes
**Agenda** by ___
**Minutes** by <NAME>
- [X] GitHub infrastructure
> Setup of Project (Kanban) Board, automate for new issues
- [ ] How do we handle errors? Do we include an error flag as an argument to a function or as returned value? We are currently returning a GrB_info value. Do we need an LAgraph_info? Does integration with Python favor one approach over the other?
- [ ] Is there an LAGraph context? Or do we just used the GraphBLAS context? Do we pass the context to each individual library routine (sort of like what we do when we call MPI routines)?
- [ ] What rules do we use to generate a function name from the mathematical operation?
- [ ] Do we need a nonpoloymorphic interface as we have for the GraphBLAS?
<file_sep>/Notation/Makefile
go:
pdflatex notation
pdflatex notation
<file_sep>/minutes/2020-06-10.md
# LAGraph Working Group Meeting Minutes - June 10, 2020
## Attendees
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [ ] <NAME>
## Agenda and Minutes
**Agenda** by <NAME>
**Minutes** by <NAME>
- [X] Let's get specific and talk about function signatures
> We discussed the functions that exist in the LAGraph repository today. The list includes betweenness centrality, single source shortest paths, ktruss, BFS, connected components, community detection by label propagation, Minimum spanning forest, page rank, strongly connected components, and triangle counting. That's 10 functions
> It's time to work through them and come up with function signatures. With specific signatures nailed down, we can have more productive discussions on the more expansive design issues.
> Also ... note that in addition to the list of algorithm provided above, we need assessor functions for any opaque types and the set of utility functions
- [X] Let's work on an actual function signature
> LAGraph_bc_batch4() is in good shape and is the right bc case to work with.
> We considered two modes for function signatures: and easy mode for the general user and an advanced mode. In many cases the easy mode will turn around and call the advanced mode after setting up the computation.
> We talked at length about polymorphism and even the right term for the concept. To me, the key is having a single function name but distinct functions distinguished by their function signatures. Based on experience with SuiteSparse running on everything from windows to Linux, <NAME> pointed out that their is uneven support for that sort of polymorphism. Hence, we really want the function names themselves to distinguish between cases. Bummer.
- [X] LAGraph_BC() .... easy mode
~~~
LAGraph_Info LAGraph_BC // vertex betweeness centrality, batch algorithm
(
GrB_Vector *centrality, // centrality(i): betweeness centrality of i
const LAGraph_Graph A, // input graph, A(i,j) is the edge (i,j)
LAGraph_Context const CNTX // a context parameter or even a descriptor to pass in
)
~~~
- [X] LAGraph_BC() .... advanced mode
> The advanced mode gives one the option to pass in a set of source vertices. It's not clear if these would be a LAGraph_vector object or a pointer to int with a second parameter for the number of source vertices. We didn't discuss this point to conclusion
- [X] LAGraph_BC() .... Discussion
> The BC algorithm in normal usage selects a random set of vertices as the sources. This means we need to think about pseudorandom numbers inside LAGraph. As no self respecting algorithm designer would depend on the rand() function in C, we need to pull in a proper generator. The right one to pull in is SPRNG from Mascagni at Florida State. The details of the generator and the handling of the seed should be a build option on the LAGraph library. We do not want to force the general user to worry about such things.
> Another issue pertains to what we might want to store in the LAGraph_Graph type. It's easy to do so in addition to the transpose of the matrix, we may want to keep row degree and column degree information (and perhaps other structural properties)
<file_sep>/minutes/2020-09-30.md
# LAGraph Working Group Meeting Minutes - September 30, 2020
## Attendees
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
## Minutes
### Brief review
* breadth-first search variants
* connected components
* weak/strong
* return communities
* centrality framework
* vertex centrality
* betweenness centrality
* etc.
* shortest paths
* single-source shortest paths
* all-pairs shortest paths
Takeaway: unify the naming for `hops` vs. `levels` (we currently use `hops` to denote the maximum number of hops in SSSP).
### Algorithms
#### Triangle count
LAGraph currently uses an integer to denote the approach used.
In easy mode, we can just get the graph and return the number of triangles.
#### Degrees and transpose
Idea from <NAME>:
* <NAME>: many algorithms need the vertex degrees, we could add it to LAGraph_Graph
* Scott: from an application's point of view, you may need this for an algorithm, then you may not and you would need to free that up.
* <NAME>: for these optional O(n)/O(m) storage parameters, we can use an environment variable to set whether we want to optimize for performance or memory. This is easy for the users as well. This is the approach used by OpenMP and these internal control variables. If we allow algorithms to compute these on different threads, race conditions appear easily. Defining our memory model is asking for trouble.
* Tim D's recommendation: allow the user to do these explicitly.
Agreement: the user can call these explicitly with methods that take the LAGraph_Graph as input and output. These will compute the degrees and transpose.
Regarding the graph properties, the user can set them to `yes`, `no`, `no idea`.
Pointwise (vertex-wise) triangle count: we could put it under another algorithm, and if we do, we should put it under another algorithm where we get it for "free". This would be the LCC (local clustering coefficient) algorithm, which computes the LCC values as #triangles/#wedges for each node.
#### Deletes
<NAME>: Related question - how does a user delete vertices and edges?
<NAME>: the workflow is also important, how do the changes arrive, what are we looking for in the output in the algorithm?
#### Error Reporting
The notes in this section were added late in October by <NAME>. I have in my notes from the meeting that we agreed on the way we will report errors in LAGraph functions. I didn't see this written down in any of our minutes so I wanted to add them just to make sure these didn't get lost.
We agreed that each LAGraph function will return an int that will be 0 for sucess, negative for an error, and positive for a warning message. We did not go with an enum since there is so much variation between functions that it just wouldn't make sense to construct a standard set of error/warning messages across the full library.
We agreed that every function will have (I belive as the last argument) a string that holds an error message. We will define a macro for the length of this string since a user of the library will need to declare this explicitly in his or her program. So I might have in my program (and I quickly types this out so please excuse any C syntax errors).
```c
char err[LAGRAPH_MESSAGE_LENGTH];
int retVal = LAGraph_BFS(arg1, arg2, arg3, &err);
if (retVal != 0) printf("LAGraph was not successful %d %s",retval,err);
```
### Next up
* More GAP/Graphalytics algorithms
<file_sep>/minutes/LAGr_Matrix.h
// Every LAGraph function has a descriptor input (opaque?)
// modes:
// debug: check all properties
// etc: ...
// raw:
// LAGr_Matrix: A matrix, not a graph: [ opaque ]
// blocking and non-blocking?
G->stuff = x
LAGr_set_stuff (G, x)
typedef struct
{
GrB_Matrix A // one matrix
symmetry // yes, no, unknown
typeid // enum for now int8, int16, .... user
typestring // name of user defined type, or "GrB_INT8"
diagonal // yes (any neg? all pos? all >=0), no, unknown ...
cc
kind ? // adjacency, incidence, bipartite?, hypergraph?, ...
// none of the above
char name [64] ;
// ... hidden: implementation defined
MPI_comm_world stuff
FPGA stuff
GPU stuff ...
special GxB stuff
// ... other
uint8_t other [64] // user definable space
} LAGr_Matrix_struct
typedef LAGr_Matrix *LAGr_Matrix_struct ;
// discuss more:
tril or triu or neither?
row vs col vs both (agnostic)?
char name [64] ?
posweights
// probably not:
cycle
scc
# of conn. components?
}
LAGr_Matrix
<file_sep>/minutes/2020-04-08.md
# LAGraph Working Group Meeting Minutes - April 8, 2020
## Attendees
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [X] <NAME>
## Agenda and Minutes
**Agenda** by ___
**Minutes** by <NAME>
- [X] Discussion about structure of LAGr_Matrix object
> Should include some kind of information about the matrix. Allow users to specify this information directly (no opacity).
> Concerns about packing too much information into this object. Vertex weights probably need to be a separate vector object. Matrix transpose probably does, too. Limit these properties to descriptions/properties of the matrix.
- [X] What properties should be included in the LAGr_Matrix object?
> Yes/probably: Symmetry, diagonal, cycles
> What about user-defined properties? Very useful in application-level, but properties can be very specific to domain area. Agreed that these properties are useful, and we need a method of communicating these properties, but perhaps not in the LAGr_Matrix object.
- [X] Should the LAGr_ object represent a matrix or a graph?
> General consensus is it should be an LAGr_Matrix.
> May need some way to communicate whether this matrix object represents an adjacency, incidence, or bipartite graph (or other...?).
<file_sep>/minutes/2020-09-16.md
# LAGraph Working Group Meeting Minutes - September 16, 2020
## Attendees
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
## Minutes
### Discussion on the proposed LAGraph error handling mechanisms
Scott's idea: how about returning a custom, LAGraph-specific error code for our calls, then the `GrB_Info` code in to the `LAGraph_Info` (which the user can decide to discard).
Tim M's point: it gets confusing if we have two overlapping ways of communicating an error. The API should provide a straightfoward/unambiguous way to communicate that the user did something that resulted in an error.
Conclusion: return an integer with the convention
* :white_check_mark: `info == 0`: ok
* :x: `info < 0`: error
* :warning: `info > 0`: warning
And a fixed-length `message`, which is preferably allocated once per thread.
In practice, this might look like the following:
char message[LAGRAPH_MESSAGE_LENGTH];
int retVal;
retVal = LAGraph_method(output, input, &message);
if(retVal != 0) printf("you've got error %s\n",message);
### Testing
We need some way of testing LAGraph algorithms on small (but testing-wise interesting) graphs.
* Gabor will follow up on this through email regarding the framework.
* Testing something like a BFS tree is challenging but well studied (see the Graph500 and GAP benchmarks).
* Many algorithms such as community detection are non-deterministic.
* Some level of sanity testing should be possible.
* The suite of tests is just as valuable as the algorithm.
### Next up
* GAP/Graphalytics algorithms
* Notation
<file_sep>/minutes/2020-04-29.md
# LAGraph Working Group Meeting Minutes - April 29, 2020
## Attendees
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [X] <NAME>
## Agenda and Minutes
**Agenda** by ???
**Minutes** by <NAME>
- [X] Property checking levels
> Maybe "Common" and "Uncommon" properties. Common properties are included and managed by the LAGraph object (e.g. symmetry). Uncommon properties are usually algorithm-specific and must be managed and communicated by the user.
> Maybe a different name.
> Probably too many levels currently. Maybe just 2-3.
- [X] General discussion regarding LAGraph object
- [ ] TODO for next meeting: Draft algorithm function signatures
<file_sep>/minutes/2021-12-1.md
Minutes, Dec 1, 2021
* entire user guide needs to be written.
need discussion on how experimental gets promoted to src.
* test, document, polish all src & experimental: algorithms, utilities,
and tests.
as of Nov 30: testing is at 94.7%. Only untested functions are
4 experimental/algoritms: lcc, cdlp, scc, and msf.
* unify error return values as much as possible, now that v2.0 C API
has assigned enum values to GrB_Info. Suggest we use GrB_Info
values, as ints, as much as possible. Briefly discussed.
* brutal memory testing: in progress. grep for brutal in the latest
Nov 30 version.
* need new GxB methods in GraphBLAS for CC, ideally for v1.0 of LAGraph:
GxB_select with GxB_RankUnaryOp, and GxB_extract with GrB_Vectors as
inputs instead of (GrB_Index *) arrays.
* for passing a pointer to a user-owned array (see LG_CC_Boruvka):
Michel Pelletier to Everyone (2:37 PM)
uintptr_t? (instead of uint64_t cast)
it's in C99 and C++03
Michel Pelletier to Everyone (2:38 PM)
"The following type designates an unsigned integer type with the
property that any valid pointer to void can be converted to this
type, then converted back to a pointer to void, and the result will
compare equal to the original pointer: uintptr_t"
<file_sep>/minutes/2021-03-22.md
# LAGraph Working Group Meeting Minutes - March 22, 2021
## Attendees
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
## Minutes
Minutes taken by Scott McMillan. I apologize if I made any mistakes on the list of attendees. I composed the list by memory following our meeting
### Moving to an official release
We discussed the organization of the repository to prepare for upcoming official release.
Decisions made:
* A branch called "stable" will be created with a placeholder README.md (with comment that release will happen in the next few months), LICENSE and contributor files
* The current master branch will be renamed "develop"
* On the develop branch, we will
* copy LAGraph.h to experimental (temporary)
* rename LAGraph2.h to LAGraph.happen
* move the contents of Experimental2 to Source
* We still need to figure out what to do with the rest of the repo.
### GrAPL paper
We also discussed the LAGraph paper for GrAPL. Gabor and <NAME>. are going to take a pass. The rest will review before next meeting.
<file_sep>/minutes/2023-01-25.md
Minutes, Jan 25, 2023
topics:
* add more algorithms
* document how algorithms are to be added
* Network X <---> LAGraph
* look at cuGraph
* better kernels in GraphBLAS (GxB):
- 'aggregator' monoids, (community detection needs mode)
c = A*x
monoid: "mode" (not a monoid)
mult: second(aij,x) = x
- GrB_Vector-based method for GxB_select of a matrix
(need by scc, and more)
select op:
depends on parent, dense vector
keep a(i,j) if parent(i) == parent(j)
- workvector format for GrB_Vector
- JIT
- CUDA, SYCL, ...
* add to SuiteSparse (cmake mods)
* Intel DevCloud -- benchmark GAP: Saphire Rapids
<file_sep>/minutes/2020-05-13.md
# LAGraph Working Group Meeting Minutes - May 13, 2020
## Attendees
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [X] <NAME>
## Agenda and Minutes
**Agenda** by ???
**Minutes** by <NAME>
- [X] API discussion. How do we organize the library, and how do we present this to the user?
> Need some kind of categorization. Utilities, expert/fast, easy, etc. Also, domains.
> Don't make it too wordy. LAGraph_Utility_function_name is probably too long?
> Core, Advanced, and Utility categories.
- [X] Repository structure
> Separate research and release by folders, not branches.
<file_sep>/minutes/2021-04-14.md
# LAGraph Working Group Meeting Minutes - April 14, 2021
## Attendees
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
## Minutes
Please volunteer for tasks around LAGraph 1.0 (documentation, testing, etc.)
<file_sep>/graphblas-notation.md
# GraphBLAS notation
This is a working document to come up with a notation for typesetting the pseudocode of LAGraph algorithms.
## Requirements
We would like to come up with a consistent notation in LaTeX that also has an ASCII counterpart (and it should also work in WYSIWYG presentation software).
It would be nice if the LaTeX one could be mapped to PowerPoint's equation editor (Gabor).
## Existing notations
- There are multiple notations in the 2011 GALLA book.
- The specification's notation in <https://people.eecs.berkeley.edu/~aydin/GraphBLAS_API_C_v13.pdf#page=84>.
- <NAME>'s notation in <https://people.engr.tamu.edu/davis/GraphBLAS_files/toms_graphblas.pdf#page=8> and in the SuiteSparse User Guide.
- <NAME>'s notation in <https://resources.sei.cmu.edu/asset_files/Presentation/2016_017_001_474272.pdf#page=15>
- <NAME> et al.'s notation in the CF'18 paper, <https://dl.acm.org/doi/10.1145/3203217.3205342>, Table 2.
## Questions regarding the notation
### Matrices, vectors, arrays
* How to differentiate matrices/vectors from each other? (bold/regular, lowercase/uppercase, ...)
* There seems to be a consensus to use bold for both **A** and **w**.
* Uppercase for matrices, lowercase for vectors?
* How to denote the transpose operation (superscript `T` or `'`)?
* `T` seems more popular in LaTeX-based notations.
* `'` is used in MATLAB.
* Do we differentiate row vectors from column vectors, i.e., do we use `q'A` or just `qA`?
* I think there's no need to differentiate between them. (Gabor)
* How to typeset arrays (italic, bold, uppercase, lowercase, ...)?
* Italic, i.e., _J_?
* Italic bold, i.e., _**J**_?
### Masks
* How to denote masks with the 'replace' flag?
* The spec has a separate parameter, `z`.
* Kumar et al.'s notation uses a symbol, the double dagger `‡` to denote the 'replace' flag.
* We could use `<<M>>` for replace. (Gabor)
* How to denote masks with the 'structure' flag?
* `<M>` (value) and `<M.S>` (structure)
### Operations
* For matrix multiplication of `A` and `B`, do we write `AB` or spell out `A ⊕.⊗ B`?
* ?
* Is it allowed to use a `+=`-style notation for the accumulator, e.g., express `C = C MIN A` as `C MIN= A`?
* ?
* How to denote common mathematical operations such 'logical and' and 'logical or'?
* By spelling our the operator's name, i.e., `LAND`/`LOR`, or with wedge/vee symbols.
* Same for their binary counterparts `BAND`/`BOR`, or `&`/`|`.
* How to denote the `extractElement` and `setElement` operations?
* Seems fairly simple to have `s = C(i,j)` and `C(i,j) = s`.
* How to denote common operations such as `nvals`/`nrows`/`ncols`, `clear`, `size`?
* It is probably simplest to just spell them out.
* How to denote the Kronecker operation?
* I've seen the double circled times symbol and the 'kron' string used for this.
### Operators
* How to spell the `FIRST` and `SECOND` operators? `1ST`/`FIRST`, `2ND`/`SECOND`)?
* For me, `FIRST` / `SECOND` seem better due to less havig less numbers. (Gabor)
* How to denote element-wise operations such as element-wise division?
* Division: `DIV`, `/` or circled `/`. (See also `MINV`.)
* Minus: `MINUS`, `-` or circled `-`.
* How to handle corner cases where an operator needs unusual element-wise semantics? E.g., sometimes it is desirable to evaluate `GrB_PLUS` with element-wise multiplication semantics.
* Kumar et al.'s notation handles this by passing the separate operator.
* Put the intersection/union symbol there?
### Matrix initialization
* What syntax (if any) should be used for initializing matrices/vectors? While the memory allocation itself isn't very important in a pseudocode, the dimensions of the matrix and the precise types (`UINTxx` instead of `N`) would be useful to display.
* Do we use the mathematical symbols for number sets (`Q`/`R` vs. `FPxx`, `N` vs. `UINTxx`, `Z` vs. `INTxx`)?
* How to state the domain(s) of the semiring that an operation is evaluated on?
* Kumar et al.'s notation handles this by defining the semirings `S1`, `S2`, ... used in each algorithm.
### Types
* How to typeset the logical variables
* `true`/`True`/`TRUE` or `1` or the 'top' symbol?
* How to handle conversions (both implicit and explicit) such as `float` to `boolean`?
* ?
### Indexing
* Does the indexing start from 0 or 1?
* I'm strongly inclined towards 0-based indexing as the API is defined in C. It's also orthogonal to the rest of the notation. (Gabor)
<file_sep>/minutes/2021-03-08.md
# LAGraph Working Group Meeting Minutes - March 8, 2021
## Attendees
- [ ] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
## Minutes
Minutes taken by <NAME>. I apologize if I made any mistakes on the list of attendees. I composed the list by memory following our meeting
### Moving to an official release
Here is the list of activities we MUST do in order to create our first official release of LAGraph
* Write LAGraph code using SuiteSparse GraphBLAS including extensions
* Code Review LAGraph code using SuiteSparse GraphBLAS including extensions
* Write LAGraph code using official GraphBLAS API methods only
* Code Review of LAGraph code using GraphBLAS API methods only
* Write the user guide for LAGraph
* Write the specification for LAGraph
* develop test suite: (1) unit tests, and (2) a performance regression test suite (perhaps based on GAP benchmarks)
* Clean up github site for public release.
I need to full group to look over this list and make sure we aren't missing anything. Then we need volunteers for each item.
Note that it would be good to have an automated test framework so we test code checked in each day. Gabor has setup some
automated testing using Github actions. We need to look into this further.
<file_sep>/minutes/2020-06-03.md
# LAGraph Working Group Meeting Minutes - May 20, 2020
## Attendees
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [X] <NAME>
## Agenda and Minutes
**Agenda** by ???
**Minutes** by <NAME>
- [X] Does LAGraph have to be built on top of GraphBLAS?
> In short, yes. That is the purpose of LAGraph
> The opaque objects at the heart of LAGraph are "graphs" not "matrices". This is in recognition of the fact that properties/features of the objects are specific to Graphs even though they are built on top of the more general GraphBLAS matrices.
- [X] Is LAGraph "BLOCKING"?
> In short, yes...to start. But GraphBLAS underneath could be NON-BLOCKING
> Basically LAGraph function performs a GrB_wait(..) on every output GraphBLAS object that comes out of the function.
- [X] Importing and exporting GrB matrices into/outof of LAGraph graph objects
> Pull and matrix out, mess with it, and insert it back in will require a GrB_wait to be called on the GrB matrix, but all the properties are also invalidated
> Pulling it out should be destructive to LAGraph object, and constructing a new object will the
> What if there are multiple matrices inside LAGraph object? Export everything.
> Should we just export all of the data objects and destroy the LAGraph object. Construct new LAGraph object with various properties when you want to use in LAGraph functions again.
- [X] Concurrency
> we discussed how LAGraph functions interact when called on different threads. This took up a fair amount of time. The sense of the group ... this is a complex issues and we have not solved it yet. This is still being hotly discussed inside the GraphBLAS group. However, <NAME> emphasized that he felt there must be a GrB_wait(obj) before an object can be safely shared between threads.
> There was a discussion about zombies and whether they had to be resolved (i.e. written back into the core matrix) before being shared between threads. <NAME> said they needed to be. <NAME> pointed out that resolving zombies is a safe approach to take, but other systems (the TileDB array storage engine, for example) did not feel the need to resolve their "zombies" before sharing an object between threads. You just must assure that the zombies are in a data structure that is shared along with the object. <NAME> pointed out that doing so would require use of concurrent data structures in the core implementation of GraphBLAS objects and that was too complicated to tolerate.
- [X] Decisions and non-decisions
> We agreed that LAGraph is blocking, LAGraph objects are opaque, we will provide accessor functions to members of the type.
> We agreed that the GraphBLAS inside LAGraph functions may be non-blocking but in the nonblocking case, GrB_wait(obj) must be called on relevant objects before exit from the function.
> We DID NOT agree on LAGraph_init() and if GrB_init() is called inside or outside LAGraph_init()
> We agreed that one GrB_object can not belong to more than one LAGraph at the same time.
<file_sep>/minutes/2020-03-25.md
# LAGraph Working Group Meeting Minutes - March 25, 2020
## Attendees
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
## Agenda and Minutes
**Agenda** by ___
**Minutes** by <NAME>
- [X] Should we have a blocking/non-blocking mode?
> More discussion regarding blocking/non-blocking mode in LAGraph.
> - Have LAGraph utilities that import/export CSC, CSR, and similar transparent formats into GraphBLAS objects.
> - Wrap LAGraph algorithm functions with the above import/export utilities to create an "easy" mode. This enables user-friendly versions of the algorithm functions that look like high-level, pure C graph algorithms (e.g. CSC format C arrays -> LAGraph Easy Betweenness Centrality -> C array).
> - We still have "core" LAGraph algorithms that accept and return GraphBLAS objects. These functions would return GraphBLAS objects that may be unfinished/deferred.
- [ ] How do we handle errors? Do we include an error flag as an argument to a function or as returned value? We are currently returning a GrB_info value. Do we need an LAgraph_info? Does integration with Python favor one approach over the other?
- [ ] Is there an LAGraph context? Or do we just used the GraphBLAS context? Do we pass the context to each individual library routine (sort of like what we do when we call MPI routines)?
- [ ] What rules do we use to generate a function name from the mathematical operation?
- [ ] Do we need a nonpoloymorphic interface as we have for the GraphBLAS?
<file_sep>/minutes/2020-07-01.md
# LAGraph Working Group Meeting Minutes - June 10, 2020
## Attendees
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [ ] <NAME>
## Minutes
**Minutes** by <NAME>
- [X] Function names ... Decisions
> We discussed the topic of function signatures last week but we did not reach closure. This week, we are closing on these issues.
> - We will use abbreviations in function names. So it will be "LAGraph_bc" rather than "LAGraph_betweenness_centrality". Likewise for BFS (Breadth First Search) and PR (page rank).
> - We agree that functions should be distinguished by name not by argument lists. This will let us support compilers that do not handle polymorphism. If we make the names descriptive, this will also improve the "understandability" of library. For example, we use the names LAGraph_bc_vertex and LAGraph_bc_edge rather than distinguish between the cases by the fact one returns a vector and the other a matrix.
- [X] Data Types in LAGraph
> Last week we discussed the return types from functions. When the returned value is Graph, the decision is easy. The return type is of course an LAGraph graph. How do we handle vectors and matrices? Consider Edge Centrality. It returns a value for each edge in the graph. That is a matrix. Do we return it as a Lagraph_graph, a Grb_matrix, or a pointer to an array in the C programming language?
> After much discussion we decided on the GrB types (GrB_vector and GrB_matrix). A big part of this decision was the simplification it offers the implementor. Instead of long case statements at the beginning of each functions to cover all the type cases, you have have one simple signature with the GrB opaque types. This makes for a really clean interface.
> This does mean we'll need more utility functions to help users query the output objects. For example, for vertex_PR we'll want a utility function that sorts an input GrB_array and outputs a pointer to a basic C array with the top K ranks. We'll need to think of the queries users will have of matrix types (such as the matrix returned from BC_edge) and provide corresponding plain-old-C types (input a GrB_matrix and return tuples for the top K edges).
> the conversation then strayed into the ongoing issues of import and export functions. We'll need a function to import a GrB_matrix into an LAGraph graph.
> There are a few complications. Sometimes you want a list of graphs (think of connected components). How do we cover this case using the matrix and vector types from GraphBLAS?
- [X] GrB_scalar
> If we return a vector, then we should return a GrB_vector. Do we need a scalar GrB_scalar? Note that by using the GrB types, we avoid the explosion of non polymorphic function signatures corresponding to all the different types of objects.
> <NAME> emphasized that he wants a GrB_Scalar in the graph BLAS. The benefits to the implementor is just too strong to not add a GrB_scalar type that LAGraph can exploit. I will bring this up with the full GraphBLAS spec group. Eric had other issues he wanted to discuss with the GraphBLAS group. He will send me email with that list of issues.
> During the discussion, the Anaconda folks mentioned their recently announced project called metagraph. The goal to metagraph is to have a python interface that wraps many different libraries. This would let them have a single interface to multiple different graph algorithm libraries. So they have been thinking about what are the basic types across different graph packages. This is important as they construct wrappers around these packages and work out a coherent way to expose these to users.
- [X] Homework
> Our next meeting is July 15. We'll skip the next meeting (on July 8).
> At our next meeting, everyone should show up with their own version of what you want the easy mode LAGraph.h to be. We can start with the spreadsheet that scott sent us (LAGraph_signature_survey.xlsx).
<file_sep>/minutes/2020-03-11.md
# LAGraph Working Group Meeting Minutes - Month Day, Year
## Attendees
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
## Agenda and Minutes
**Agenda** by ___
**Minutes** by <NAME>
We had an open agenda with a rambling meeting.
We started by saying that we needed to stop turning this into a GraphBLAS meeting. The work
on LAGraph has exposed a number of issues the GraphBLAS group will need to deal with. We
should keep sending issues to that group, but we need to move forward and work on the design
of an API for the LAGraph library (in C, C++ and Python).
We ended up discussing the idea of an LAGraph context, plus a range of random issues
stretching across the API.
<<<LAGraph Context>>>
We could expect users to call Grb_init() and import the GraphBLAS context into LAGraph. Or
do we want to wrap the Grb_init() and have our own LAGraph_init()?
In the GraphBLAS group, as we consider more advanced uses of the GraphBLAS, we might need
to expand the concept of a context. Basically, when you move to parallel and distributed computing
we need the context to carry more information than just "blocking vs. nonblocking". You need a
way to deal with MPI communicators or subsets of threads, for example.
The GraphBLAS group is leaning towards passing a context object to every method in the GraphBLAS.
We could adopt that approach in LAGraph. LAGraph_init() returns a context handle which we'd pass
to every LAGraph function.
Tim Davis really liked that approach and suggested we start with that approach as
we move forward. We can remove it later if we find we don't need it (at least up until the 1.0 release).
So where do we put the handle to a context in the LAGraph functions.
<NAME> suggest putting the handle at the end of every argument list. Putting the handle
at the end of the list, however, can impact use of polymorphism. If you have a function that has
multiple polymorphic forms and the number of arguments changes, it can be tough if you put
the context at the end.
<NAME> pointed out that he has polymorphic code for GrB_assign. It works, but it is really ugly.
If you want to have a default rule which would let people call without providing a context, its best
if you put it at the beginning.
We wondered if where we put the handle has any impact on interfacing to Matlab or python. The
feeling of the group was that it didn't.
We converged on a number of decisions summarized later.
<<<< Random discussion points (it was an open agenda after all) >>>>
Indices in LAGraph should support various integer sizes. This would be tricky but we may want to
avoid the approach taken in GraphBLAS where we defined the index type to use. GraphBLAS assumes
int64. This can be a real problem with Grb_extract and GrB_assign, for example. Fixing the integer
type for indices was an odd move by the GraphBLAS team since it breaks opacity with these user
visible arrays.
Arguments to LAGraph functions should use GrB objects and therefore support Opaque types. The
group seemed to appreciate this perspective, but there are concerns about whether we really want to
expose our users to the fact that GraphBLAS sit underneath LAGraph.
<NAME> raised a point about the return objects from GraphBLAS and if you can get lists of values
rather than a graph object? He said he could send us a specific case to explain what he needs. He
will provide a couple motifs that he encounters that exposes what he is looking for. He sees this as
important for much larger scale problems on large distributed memory machines.
As the conversation roamed, it came out that perhaps what we need is some concept of a
view ? allowing users to operate on a subset of a graph without materializing objects for that subset.
This is a topic we'll want to consider at a future date.
Decisions:
1. We will pass an LAgraph context to every library function.
2. The LAGraph handle will be passed in as the first argument
3. We will support a default case with a default handle (LAGRAPH_WORLD?)
2. Objects in the argument list to an LAGraph function must all belong to the same context.
3. LAGraph will assume and therefore require presence of GraphBLAS.
<file_sep>/minutes/2020-04-22.md
# LAGraph Working Group Meeting Minutes - April 22, 2020
## Attendees
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [ ] <NAME>
## Agenda and Minutes
**Agenda** by <NAME>
**Minutes** by <NAME>
- [X] What is LAGraph?
* Is it a lowish-level graph library that many interfaces and/or libraries will be built on?
- Implementers become our primary stakeholders
- Performance over user-friendliness. API can afford to be a little messy, since we will still have some sort of wrapper between us and a user.
* **Is it a highish-level graph library that users will be able to use as a drop-in graph library?**
- **C/C++ users become our primary stakeholders**
- **User-friendliness is key. Users will not tolerate clunky/messy APIs.**
> Of course we want both user-friendliness and performance. But, when in conflict, our focus is on user-friendliness of the C API.
- [X] LAGraph Levels Idea
* Level 0 is GraphBLAS
* Level 1 are LAGraph functions that (almost?) exclusively call GraphBLAS
- Lots of input, lots of output. Ex: Connected components. Return number, vertex labels, etc.
* Level 2 are LAGraph functions that (almost?) exclusively call LAGraph
- Very concise input (few arguments), very concise output.
> Above idea was panned, but alternative idea was popular:
> Runtime parameter to determine level of checks:
> * Level 0 - no checks at all. Trust the programmer to satisfy all input requirements. "I want to go as fast as possible, don't even check the input flags."
> * Level 1 - raise error when parameter is checked and is found to be unknown. "I want to go fast! What properties do you need to know?"
> * Level 2 - run check or compute property if it is unknown, but trust input flags. "I know a few things about the input, but not everything. Here are some hints, but compute everything else."
> * Level 3 - run checks or compute properties no matter what. Do not trust input flags. "It just works."
>
> This parameter can be stored in some form of context or descriptor
<file_sep>/minutes/2021-04-21.md
# LAGraph Working Group Meeting Minutes - April 14, 2021
## Attendees
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [X] <NAME>
## Minutes
<NAME>: status report on the upcoming SuiteSparse:GraphBLAS, v5.
Scott: We need a test framework. The current tests are basically demos.
Based on web search, Googletest is the recommended framework but it needs a C++ compiler.
The test framework should not only check the results but also test assertions.
Scott: We need documentation. [Sphinx](https://www.sphinx-doc.org/en/master/) is used by Scott for another project, it uses reStructuredText.
Scott, Gabor: We'll create a build matrix in the GitHub Actions configuration for Mac/Linux and SuiteSparse:GrahpBLAS v4, v5, and future versions.
Documentation:
* [Executable book project](https://executablebooks.org/en/latest/)
* [PreTeXt](https://pretextbook.org/)
We need to document both the API and how the algorithms work internally.
<file_sep>/minutes/2020-12-2.md
# LAGraph Working Group Meeting Minutes - December 2, 2020
## Attendees
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
## Minutes
Minutes taken by <NAME>. I apologize if I made any mistakes on the list of attendees. I composed the list by memory following our meeting
### Moving to an official release
The graph community is moving past us. We are taking too long to get something out there that anyone can download and use. Benchmarking projects are proceeding without us since we don't have software anyone can download and use. We will slip into irrelevance if we don't get moving.
We discussed how long it would take for us to offer a first release of LAGraph. The feeling was it would take two months. That means we should try to have this release done by the beginning of February. The group committed to hitting this deadline. We need to complete the following between now and February.
* What functions will go in the first release? We agreed that the methods from the GAP benchmark suite will be covered. We will add others too such as those in the LDBC or perhaps the Graph Challenge. We need to be careful, however, that we do not add so many functions that we miss our deadline.
* We need documentation which includes: (1) doxygen or some other system tied to the code, (2) A user guide, adn (3) a contributors guide.
* Marketting is reality so we need a paper for GrAPL. Scott will be talking about LAGraph at a SIAM symposium as well.
We need to plan for these documents now so they are ready when the software is. It is best if someone who is not writing the code leads on the documentation efforts (which means I suspect <NAME> will need to play a key role in creating this documentation).
### Is this generic GraphBLAS or do we include SuiteSparse extensions?
We discussed this topic and agreed that we needed a pure graphBLAS version and a version that uses extensions in the SuiteSparse library. We need both. Hopefully, however, we can support both options from one code base using ifdefs.
### Error codes returned
We agreed a while back, for the error codes, values less than zero are error conditions, values greater than zero are warnings, and values equal to zero are sucessful.
Different algorithm implementations are free to define their own error codes as long as they follow the basic rule.
How do LAGraph return codes interact with GraphBLAS return codes? It is not clear. One approach is that we pick a range (say -100 to +100) and leave those reserved for graphBLAS.
<file_sep>/minutes/2020-10-07.md
# LAGraph Working Group Meeting Minutes - October 7, 2020
## Attendees
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
## Minutes
Short discussion about the roperties of `LAGraph_Graph`:
* Multigraphs with edge properties are tricky.
* They can be represented with an incidence matrix.
* However, they might be too advanced for easy mode.
* Directed/undirected
* Bipartite graph (they are rarely directed but it's not impossible)
Conclusion: we need a few algorithms in LAGraph, then we can answer this. We should postpone this discussion.
## Algorithms
### k-truss
* one k-truss
* all k-trusses
* next k-truss (expert mode)
<file_sep>/minutes/2020-03-04.md
# LAGraph Working Group Meeting Minutes - March 4, 2020
## Attendees
- [x] <NAME>
- [x] <NAME>
- [x] <NAME>
- [x] <NAME>
- [x] <NAME>
- [ ] <NAME>
- [x] <NAME>
- [x] <NAME>
## Agenda and Minutes
**Agenda** by <NAME>
**Minutes** by <NAME>
- [x] Report on the status of Travis/continuous-integration with LAGraph. Following the traffic from github, there has been a huge amount of activity. I’m really impressed. It would be good to brief the group on where you are at and what remains to be done on that front.
> Gabor provided a summary: There were a number of issues in compiling GraphBLAS from source in the Travis builds. GraphBLAS would take too long to compile and cause a time out, and there was also C-specific code that prohibited building in a C++ context. These issues have been ironed out: A faster compile flag has been added, and the C-specific issues have been fixed/removed.
> Related to this, the goal would be for us not to have to build GraphBLAS from source for every CI build - we would like pre-compiled binaries available via package managers or directly from GitHub.
- [ ] We need to setup the structure of the repository with a development branch and a release branch. What else do we need to do to configure our repository for distribution to the “end-user” programmer community?
- [ ] At the GraphBLAS meeting this week, we discussed the list of GraphBLAS extensions <NAME> provided. We want to invite <NAME> to join an upcoming GraphBLAS meeting so we can talk to him directly about these items. Are there others from the LAGraph group who should be at that meeting?
- [x] A high level issue came up in today’s GraphBLAS meeting that the LAGraph group needs to consider. Will LAGraph have its own graph type? Or will LAGraph use the GraphBLAS types? This is a key issue to help us divide functionality between the two libraries.
> We spent most of the meeting debating this. Right now, the primary challenge is being able to query the matrix type. Not having this functionality in GraphBLAS may justify an LAGraph graph or matrix object.
> While there was not a strong consensus, the current plan forward seems to be to move ahead using GraphBLAS objects. If type querying is rejected from the GraphBLAS API, we will need to reverse course and make our own type wrapper, but by default we want to stay as close to GraphBLAS as possible.
> Related to this, we did agree that LAGraph algorithms should be "clean" and GraphBLAS library-agnostic, with code written using mostly GraphBLAS functions and LAGraph utilities (few if any #ifdefs). LAGraph utilities, on the other hand, can have #ifdefs and other "less clean" implementations that allow for extensions (e.g. GxB_select). These utilities should ideally not be simple wrappers for GraphBLAS functions (just use GraphBLAS then), but could be wrappers such that if an extension is available, use it, otherwise, provide an implementation.
- [ ] I am getting some pressure (in a good way) to provide guidance on the actual API we will define for LAGraph. I’d really like these LAGraph meetings to shift priorities and talk about API design issues. I think of this in terms of the rules we use to generate a function signature for inclusion in the library. This means agreeing on a range of issues such as:
- [ ] How do we handle errors? Do we include an error flag as an argument to a function or as returned value? We are currently returning a GrB_info value. Do we need an LAgraph_info? Does integration with Python favor one approach over the other?
- [ ] Is there an LAGraph context? Or do we just used the GraphBLAS context? Do we pass the context to each individual library routine (sort of like what we do when we call MPI routines)?
- [ ] What rules do we use to generate a function name from the mathematical operation?
- [ ] Do we need a nonpoloymorphic interface as we have for the GraphBLAS?
- [ ] Others?
<file_sep>/minutes/2021-04-07.md
# LAGraph Working Group Meeting Minutes - April 7, 2021
## Attendees
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
## Minutes
Minutes taken by <NAME>.
### LAGraph refactoring
Scott and Erik are working on the refactoring on the `reorg` branch. This will become the `stable` branch.
There is some cruft left from benchmarking exercises. They currently surveyed the algorithms in the GAP benchmark, the other algorithms (`Experimental` directory) were not yet surveyed.
The goal is to be able to compile (a variant) without SuiteSparse extensions.
We are targeting GraphBLAS v1.3.0 as v2.0 has not yet been released yet.
Scott is working on a clean & idiomatic CMake setup.
Gabor suggested removing the `git` history because there is an excessive amount of files, mostly input and output files (matrices, logs, etc.). However, this should be done in a way that does not affect the releases. ([This seems to be doable.](https://www.reddit.com/r/git/comments/ja0fsq/does_deleting_git_tags_cause_unmerged_history_to/g8mvwl8/))
Scott proposes to move the type into `LAGraph_Graph` and every graph object will have its type.
<file_sep>/minutes/2022-12-14.md
Topics/Minutes, Dec 14, 2022
LAGraph 1.0.1
check it and post it
contributor covenant?
contact: steering committee
LAGraph 1.1:
new algorithms (promote from experimental)
make issues for codes like msf (API wrong, test is partial)
contributor covenant?
GraphBLAS.org: add committees
<file_sep>/README.md
# LAGraph Working Group Repository
Public document and planning repository for the LAGraph Working Group. Documents such as meeting minutes, plans, and other non-code artifacts are stored here.
# License
Unless otherwise decided or stated, documents in this repository are licensed under the [Creative Commons Attribution (CC BY 4.0) license](https://creativecommons.org/licenses/by/4.0/).
<file_sep>/old/LAGraph_June17_2020.h
// June 17, 2020, LAGraph discussion
LAGraph_Info LAGraph_vertex_bc
(
GrB_Vector *c,
const LAGraph_Graph A
)
LAGraph_Info LAGraph_edge_bc
(
LAGraph_Graph *E,
const LAGraph_Graph A
)
LAGraph_Info LAGraph_top_k
(
list *x,
const LAGraph_Graph E
)
E = f (A)
x = g (E)
x = g (f (A))
desc->query true/false
desc->algorithm char *
"" (auto)
"priority queue:other stuff:more stuff:"
"batch"
"stuff"
"..."
LAGraph_Info LAGraph_bc
(
LAGraph_descriptor desc,
GrB_Vector *c,
const LAGraph_Graph A,
GrB_Vector sources,
)
LAGraph_Info LAGraph_bc_Y
(
LAGraph_descriptor desc,
GrB_Vector *c,
const LAGraph_Graph A,
GrB_Vector sources,
LAGraph_Random rand
)
LAGraph_Info LAGraph_bc_Z
(
LAGraph_descriptor desc,
GrB_Vector *c,
const LAGraph_Graph A,
LAGraph_Random rand
)
LAGraph_descriptor :
ignore edge weights
treat graph as undirected, even though A.stuff says otherwise
#define LAGraph_bc(arg1,...)
_Generic ((arg1),
LAGraph_descriptor: LAGraph_bc_X,
default: LAGraph_bc_easy )
(arg1, __VA_ARGS__)
<file_sep>/minutes/2020-08-20.md
# LAGraph Working Group Meeting Minutes - August 20, 2020
## Attendees
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
## Minutes
**Minutes** by <NAME>
### Centrality algorithms (cont'd)
* There are many algorithms -> we define two functions `LAGraph_VertexCentrality` and `LAGraph_EdgeCentrality` and use an enum `kind` to pass whether we want to calculate betweenness, eigenvector, degree, pagerank (many variants with different parameters), etc.
* There are multiple types here (e.g., degree: int, betweenness: float) but this can be hidden under `GrB_Vector`.
* The current decision is to also contain the type in the enum argument, e.g., for betweenness centrality, `LAGR_BETWEENNESS_FP32` and `LAGR_BETWEENNESS_FP64` will allow the user to specify whether the output should be produced in 32-bit or 64-bit floats.
* The preference would be to have two separate enums `LAGraph_VertexCentrality_Type` or `LAGraph_EdgeCentrality_Type`.
### Discussion on GrB_Type
* Exposing `GrB_Type` is essential.
* The thinking in the GrB design API was that the (default) casting rules of the C programming language covers us.
* There's also no `GrB_EQUAL` operator due to the lack of a way to query `GrB_Type`. For example, if the user has two matrices (might be floats or ints), they have to tell the GrB implementation the types (because casting can change the results of equals, e.g., by losing the mantissa of the float).
### Ownership of output
The convention in LAGraph follows the GraphBLAS API, e.g., `*centrality` is uninitialized on input, the function initializes it (allocates memory) and returns it to the user.
### Degree
We had a discussion on how "sophisticated" we want to the `LAGraph_degree` function. Do we want to add an option filter out self-edges? Do we want to add an option to summarize edge weights (e.g., edges with weights -1, +1, +1, +1 would result in a degree of +2).
This discussion is loosely related to sanitization – in existing LAGraph functions, there is often a `sanitize` flag that, if set to `true`, results in a copy of the original matrix with changes to confirm the required input (e.g., by converting the matrix to `GrB_BOOL` and removing self-edges). However, in many cases, it would be preferable to compute the results without completing a complete (sanitized) copy of the original graph.
How should the function look like?
Approach 1:
* `LAGraph_Degree(*d, G, kind)` with an enum `kind` to specify whether it's an in-degree, out-degree, etc.
Approach 2:
* `LAGraph_Degree_in`
* `LAGraph_Degree_out`
* `LAGraph_Degree_...`
### Shortest paths
General signature `GrB_Info LAGraph_ShortestPath_[...]`, where `[...]` might be:
* `SingleSource` (see the signature in [`LAGraph.h`](../LAGraph.h))
* `SingleSourceSingleDestination`
* `MultiSource`
* `AllPairs`: expensive but it's a well-known linear algebra-based algorithm (Floyd–Warshall)
### Next up
Reviewing the GAP / LDBC Graphalytics algorithms.
<file_sep>/minutes/2020-05-06.md
# LAGraph Working Group Meeting Minutes - May 6, 2020
## Attendees
- [ ] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [X] <NAME>
## Agenda and Minutes
**Agenda** by ???
**Minutes** by <NAME>
- [X] API namespace and function signature discussion
> No namespaces in C, so what prefix should we use for function signatures? LAGraph_, LAG_, LAGr_, LG_?
> Change name of library entirely (LAGraph -> GraphPACK?)
- [X] More API discussion - Betweenness Centrality is a good case study
> Do we need bc and bc_batch? bc is currently a bc_batch with a single source node.
> What about an "easy mode" bc? That would require some approximation. Run batches of size k and check for convergence?
> Probably need layers of some sort: High-level "easy," lower-level "expert," and utilities.
- [X] Some algorithms need more info: Iterate over row or column? Push or pull in BFS?
> Need that info from GraphBLAS. But how?
- [X] How are we going to categorize algorithms? Or are we?
> Follow NetworkX and make it all one level? If not, how do we organize the algorithms? Add something to the function signature?
> Categorize only in documentation. That way, we don't have to change the code with future releases as algorithms are re-organized.
<file_sep>/minutes/2020-07-29.md
# LAGraph Working Group Meeting Minutes - June 29, 2020
## Attendees
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
## Minutes
**Minutes** by <NAME>
## BFS
Tightening up BFS (see the latest `LAGraph.h`):
* The convention is that output handles set to `NULL` are not required to be computed.
* Observation: the `LAGraph_BreadthFirstSearch` can be implemented on top of `LAGraph_BreadthFirstSearch_Frontier` (Quite efficiently in some cases but it depends. We didn't discuss it further.)
* Discussion on whether to add `hops` to all `LAGraph_BreadthFirstSearch*` algorithms.
* Suggestion: be conservative -- it's easier to add an argument later than to take it away.
* Added a TODO `hops for all methods` in the code.
* Discussion on should `GrB_Vector vertex_mask` (and either a `bool vector_mask_complemented`+`bool vector_mask_structural` or a `GrB_Descriptor desc` descriptor to allow the use of its structure/complement) be added to the `LAGraph_BreadthFirstSearch_Frontier` function?
* This would restrict the BFS to the nodes that are selected by the `vertex_mask`.
* It would be useful extension but it might belong to the expert mode and not to easy mode.
* In easy mode, it can be simulated with a filtering step before.
## Connected Components
* Weakly Connected Components vs. Strongly Connected Components?
* Obviously makes no difference for undirected graphs. For a directed graph -> ?
* What if matrix A is unsymmetric but the graph is labeled as undirected?
* Sanity check: should be possible to turn this off (e.g., benchmarking), in which case it becomes the user's responsibility.
Options for returning the connected components:
* `GrB_Vector *components`, with the component ids for each of the vertices (even if we had a `GrB_Array`, this would be a `GrB_Vector`)
* `GrB_Matrix *Components`, a boolean k-by-n projection matrix (`C`). This can be converted using utility function that does a simple extraction step. This matrix can be used for contraction, e.g., to compute the component graph using (`C * G * C'`).
* `GrB_Matrix *Permutation`, a boolean n-by-n permutation matrix (`P`). Then `Gnew = P * G * P'` is block diagonal.
## Next up
* BC variants (exact/approximate, vertex/edge?):
* do we need a random number generator or a random node sampler?
* maybe it is out of scope
* in any case, our algorithms should be deterministic (e.g., with a fixed seed) unless the user specifically asks for non-deterministic computation
<file_sep>/minutes/2020-06-17.md
# LAGraph Working Group Meeting Minutes - June 10, 2020
## Attendees
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [X] <NAME>
## Agenda and Minutes
**Agenda** by <NAME>
**Minutes** by <NAME>
- [X] polymorphic types and function signatures
> Windows Visual Studio does not support polymorphic functions at all in C. Matlab is compiled on windows in visual studio C. So a spec for LAgraph that requires polymorphic function signatures excludes a potentially important market for LAGraph. You can use gcc with cygwin or even the intel compilers on windows to get around this problem. But it doesn't matter since, as we state above, Matlab uses visual studio (as do countless other programmers working with Windows).
- [X] variable argument lists in function signatures
> In C, you cannot query the number of arguments. Variable argument lists must distinguish cases by argument types early in the list. This is why you can't have a function that comes in a form without arguments and a form with arguments (as we found with the ill-fated GrB_wait() )
> Consequently, we might want to add a suffix to all the function names. For example, we could add a suffix "_adv" for the advanced mode.
> It was also pointed out that in a well designed API, a knowledgeable user must be likely to guess the right arguments to any given function. That means rigid consistency across the API is critical. This goal complicates distinguishing between functions by their argument lists alone.
- [X] What do we call the different modes
> We don't know what we're going to call the different modes. Suggestions include easy mode, Expert mode, advanced mode, or implementation mode (impl). That last one follows since in all likelihood, the simple/easy mode will call the advanced mode functions internally; suggesting that instead of "advanced" maybe they could be called "impl" mode.
> We did not reach closure on what names to use for the two modes.
> But we did agree that we need to different name spaces distinguished by function names. We'll just have to figure out those names later.
- [X] descriptors?
> Do we need a descriptor in LAgraph? The cases form the GraphBLAS don't really apply. We could have a few such as "Ignore edge weights" or "treat graph as undirected".
> One option is to have a descriptor on advanced mode functions but not on the basic or easy case.
> And that brought up an interesting conversation. If we added a descriptor only for the advanced cases functions and if we provided that descriptor as the first argument, then we might just be able to use function polymorphism?
- [X] Function names
> It was suggested that we didn't need a different name/suffix for the advanced mode function. They will always imply some algorithm variant that they implement. We could just use that algorithm variant in the name. For example LAGraph_bc_sourced()
or LAGraph_bc_randomized()
> Based on the GAP benchmarking experiments, there are algorithm variants that just work better for different graphs so there is a reason to have different algorithms in the same library. It is still an open question on if we want to specify the algorithm variant with the name of the function? Or do we want to put the algorithm selection in a descriptor? The descriptor approach gives us the flexibility to add new algorithms over time without changing the function signature. It could be an enum or maybe even a string.
<file_sep>/minutes/2021-10-20.md
minutes for the Oct 20, 2021 LAGraph meeting
* use the v2.0 C API
* reduce # of include files
* need to create documents, pdf, user guide
* connected components needs work:
+ use LACC for GrB version (vanilla)
+ refine GxB version (lots of todos)
* need statement coverage metrics (Roi is working on this)
* need brutal memory tests
<file_sep>/minutes/2021-07-07.md
# LAGraph Working Group Meeting Minutes - July 7, 2021
## Attendees
- [ ] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [ ] <NAME>
## Minutes
Scott and Tim reviewed experimental code that was moved to "old" pending removal. All of it will be deleted. Next step for Scott is to make sure all of the experimental/test/ programs run and check answers from master branch (and thereby create an initial regression test or two for each).
The bulk of the meeting was spent discussing the roles of LAGraph and GraphBLAS Spec when it comes to determining the responsibilities of something like LAGraph_BinRead/Write versus GrB_Matrix_import/export.
Erik Welch asserts that GrB_Matrix_import/export should only be concerned with the "local"/platform in-memory representations of the three arrays that these methods use.
LAGraph's binary formats should be concerned with cross platform portability for now which includes more specifications
Pseudocode for illustrating the relationship between these:
```
LAGraph_BinWrite(GrB_Matrix A, GrB_Type A_type, char const header[256], bool compress_flag, FILE *f):
// get the size of the GrB arrays (in elements)
GrB_Index n_indptr, n_indices, n_values;
GrB_Matrix_exportSize(&n_indptr, &n_indices, &n_values, GrB_CSR_FORMAT, A);
// Allocate enough storage to hold the arrays and export data
GrB_Index *indptr, *indices;
<A_type> *values; // details omitted about how to manage types
indptr = malloc( n_indptr*sizeof(GrB_Index));
indices = malloc(n_indices*sizeof(GrB_Index));
values = mallocValues(n_values, A_type);
GrB_Matrix_export(indptr, indices, values, GrB_CSR_FORMAT
// Output information (note: network order as big-endian is a defacto standard, but most x86 systems are not)
n = A.nrows()
m = A.ncols()
nnz = A.nvals()
assert(n+1 == n_indptr)
assert(nnz == n_indices)
assert(nnz == n_values)
// assume binwrite has the logic of converting endianness when necessary
asciiwrite(header, 256);
binwrite_uint64(get_type_enum(A_type));
binwrite_uint64(get_type_size(A_type));
binwrite_uint64(get_format_enum(GrB_CSR_FORMAT));
binwrite_uint64(n);
binwrite_uint64(m);
binwrite_uint64_array(indptr, n_indptr);
binwrite_uint64_array(indices, n_indices);
binwrite_byte_array(values, n_values, sizeof(A_type)); // not sure what to do here.
```<file_sep>/minutes/2020-10-21.md
# LAGraph Working Group Meeting Minutes - October 21, 2020
## Attendees
- [X] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [X] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [X] <NAME>
## Minutes
### Discussion on submatrices
Scott's and Tim D's suggestion to Gabor's [issue](https://github.com/GraphBLAS/LAGraph/issues/83): it's probably better to use extract submatrix and also extract the matrix representing the outgoing edges from it.
### Discussion on direction optimization
For LAGraph and any GraphBLAS library, it's important to know whether the underlying implementation stores the matrix by-row, by-column or both.
Then, it could perform direction optimization, i.e., push/pull, similar to the idea in [GraphBLAST](https://arxiv.org/pdf/1908.01407.pdf).
The API should follow an OpenGL-like extension mechanism, which allows libraries like LAGraph to query for "vendor-specific" extensions (which may later make their way into the standard API).
<file_sep>/minutes/0000-00-00.md
# LAGraph Working Group Meeting Minutes - Month Day, Year
## Attendees
- [ ] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [ ] <NAME>
- [ ] <NAME>
## Agenda and Minutes
**Agenda** by ___
**Minutes** by ___
- [ ] Item 1
> Meeting discussion on Item 1
- [ ] Item 2
> Meeting discussion on Item 2
|
e1e4274c5e91ab5eca08ff24e1a796d8d0cec394
|
[
"Markdown",
"C",
"Makefile"
] | 50
|
Markdown
|
GraphBLAS/LAGraph-Working-Group
|
d9a8b0aac25829deeb75cb87aa6b7f3d3cdfa9bd
|
304a459accfdcceb3c281cd865da457c13627558
|
refs/heads/main
|
<repo_name>Developer-Prince/Stock-Price-Predictions-using-LSTM-<file_sep>/README.md
# Stock-Price-Predictions-using-LSTM-
In this we are using the LSTM model to predict the Google Stock price of 20 Financial Days of Jan 2017 by training our model on 5 year data from 2011 to 2016 Based on that we have trained our model
we have used 4 LSTM layers with 20% Dropouts.
<file_sep>/main.py
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.preprocessing import MinMaxScaler
from keras.models import Sequential
from keras.layers import LSTM , Dropout , Dense
# Importing Data
dataset_train = pd.read_csv('./Dataset/Google_Stock_Price_Train.csv')
training_set = dataset_train.iloc[: , 1:2].values
dataset_test = pd.read_csv('./Dataset/Google_Stock_Price_Test.csv')
test_set = dataset_test.iloc[: , 1:2].values
dataset_total = pd.concat((dataset_train['Open'] , dataset_train['Open']),axis = 0)
# preprocessing data / Feature scaling
sc = MinMaxScaler()
training_set_scaled = sc.fit_transform(training_set)
# data structure making timestamp 60 and 1 output
X_train = []
y_train = []
for i in range(60 , len(dataset_train)):
X_train.append(training_set_scaled[i-60:i , 0])
y_train.append(training_set_scaled[i,0])
X_train = np.array(X_train)
y_train = np.array(y_train)
X_train = np.reshape(X_train ,(X_train.shape[0],X_train.shape[1] , 1))
# Building the model
regressor = Sequential([
LSTM(units = 50 , return_sequences = True , input_shape = (X_train.shape[1] , 1)),
Dropout(0.2),
LSTM(units = 50 , return_sequences = True),
Dropout(0.2),
LSTM(units = 50 , return_sequences = True),
Dropout(0.2),
LSTM(units = 50),
Dropout(0.2),
Dense(1)
])
regressor.compile(optimizer ='adam', loss = 'mean_squared_error')
regressor.fit(X_train , y_train , epochs = 100 , batch_size = 32)
# Predictions
inputs = dataset_total[len(dataset_total)-len(dataset_test) - 60:].values
inputs = inputs.reshape(-1,1)
scaled_inputs = sc.transform(inputs)
X_test = []
for i in range(60 ,80):
X_test.append(scaled_inputs[i-60:i,0])
X_test = np.array(X_test)
X_test = np.reshape(X_test ,( X_test.shape[0] , X_test.shape[1], 1))
predicted_stock_price = regressor.predict(X_test)
predicted_stock_price = sc.inverse_transform(predicted_stock_price)
real_stock_price = test_set
# Visualising the result
plt.figure(figsize = (12,10))
plt.plot(real_stock_price , color = 'red' , label = 'Real Srock Price')
plt.plot(predicted_stock_price , color = 'red' , label = 'Predicted Srock Price')
plt.xlabel('Time')
plt.ylabel('Stock Price')
plt.title('Real vs Predicted Stock Price Using LSTM ')
plt.show()
|
863b77c4a0aa90d60fb1b039e4e64944f918ead9
|
[
"Markdown",
"Python"
] | 2
|
Markdown
|
Developer-Prince/Stock-Price-Predictions-using-LSTM-
|
b095964f2b0876b0f965b9cb4c06ad3f0e0cb622
|
edb04041a55c7075ba3c92be21a92e0ef61a6018
|
refs/heads/master
|
<file_sep>import * as React from 'react';
import {View,Image,Text} from 'react-native';
import { createDrawerNavigator } from '@react-navigation/drawer';
import { createStackNavigator } from '@react-navigation/stack';
import { NavigationContainer } from '@react-navigation/native';
import Drawerstack from './src/screens/Drawerstack';
import Scroll from './src/screens/Scroll';
import Valida from './src/screens/Valida';
import Lineargra from './src/screens/Lineargra';
import Drawernav from './src/screens/Drawernav';
import Playvideo from './src/screens/playVideo/Playvideo';
const Stack = createStackNavigator();
const Drawer = createDrawerNavigator();
function DrawerView(){
return(
<Drawer.Navigator
drawerContent={props=><Scroll{...props}/>}>
<Drawer.Screen name="Drawerstack" component={Drawerstack}
options={{ headerShown: false }} />
</Drawer.Navigator>
);
}
function App(){
return(
<NavigationContainer>
<Stack.Navigator>
<Stack.Screen name="Playvideo" component={Playvideo}
options={{ headerShown: false }} />
<Stack.Screen name="Drawernav" component={Drawernav}
options={{ headerShown: false }} />
<Stack.Screen name=" Valida" component={ Valida}
options={{ headerShown: false }} />
<Stack.Screen name="Drawerstack" component={DrawerView}
options={{ headerShown: false }} />
</Stack.Navigator>
</NavigationContainer>
);
}
export default App;
<file_sep>import * as React from 'react';
import{View,Text,image} from 'react-native';
import {Button} from 'react-native-elements';
function Profile({navigation}){
return(
<View style={{flex:1}}>
<Text style={{alignSelf:"center",marginTop:150,fontSize:20}}> WELCOME</Text>
<Button
title="tq for sign in"
buttonStyle={{backgroundColor:"red",marginTop:50}}
containerStyle={{width:150,alignSelf:"center"}}
// onPress={()=>navigation.navigate('Albam')}
/>
</View>
)
}
export default Profile;<file_sep>import * as React from 'react'
import {View,Text,Image} from 'react-native'
import { TextInput } from 'react-native-gesture-handler';
import AntDesign from 'react-native-vector-icons/AntDesign';
import {Button} from 'react-native-elements';
function Enterotp(){
return(
<View style={{flex:1,marginLeft:38,marginTop:10}}>
<Image source={require('../assets/lovebirds.jpg')}
style={{width:50,height:50,marginTop:30}} />
<Text style={{fontSize:30,marginTop:5, fontWeight: 'bold'}}>Kirill</Text>
<Text style={{fontSize:18,color:`#696969`,marginTop:10}}>Enter OTP</Text>
<View
style={{flexDirection:"row", marginTop:80}}>
<Button
title="1"
buttonStyle={{backgroundColor:`#ffe4c4`}}
containerStyle={{width:50,height:50,marginLeft:20}}
titleStyle={{color:"blue"}}
/>
<Button
title="2"
buttonStyle={{backgroundColor:`#ffe4c4`}}
containerStyle={{width:50,height:50,marginLeft:20}}
titleStyle={{color:"blue"}}
/>
<Button
title="7"
buttonStyle={{backgroundColor:`#ffe4c4`}}
containerStyle={{width:50,height:50,marginLeft:20}}
titleStyle={{color:"blue"}}
/>
<Button
title="8"
buttonStyle={{backgroundColor:`#ffe4c4`}}
containerStyle={{width:50,height:50,marginLeft:20}}
titleStyle={{color:"blue"}}
/>
</View>
<View style={{marginTop:90,marginRight:30}}>
<Text style={{fontSize:12,color:`#6495ed`,alignSelf:"center"}}>
Code sent to your moblie number
</Text>
<Text style={{fontSize:12,color:`#6495ed`,alignSelf:"center"}}>
ends with *****4142</Text>
<Text style={{fontSize:12,color:`#6495ed`,alignSelf:"center",marginTop:25}}>
This code will expire in 10 minutes
</Text>
<Text style={{fontSize:12,color:`#dc143c`,alignSelf:"center",marginTop:25}}>
Resend Code
</Text>
</View>
<Text style={{color:'#a9a9a9',marginTop:280,fontSize:12,marginLeft:13}}> By continuing you confirm that you agree with our</Text>
<Text style={{fontSize:14,color:`#696969`,marginLeft:100, marginTop:5}}> Terms & conditions</Text>
<Text style={{
borderBottomColor : '#a9a9a9',
borderBottomWidth: 3,marginLeft:120,marginRight:150
}}>
</Text>
</View>
)
};
export default Enterotp;<file_sep>import * as React from 'react';
import {View,Image,Text,StyleSheet} from 'react-native';
import {Button} from 'react-native-elements';
import { Icon } from 'react-native-vector-icons';
function Home({navigation}){
return(
// <Icon style={ name="heartbeat"color="#00aced"} />
<View style={{flex:1,backgroundColor:"#ff7f50"}}>
<Text style={styles.text}> heartlink</Text>
<Button
title="SIGN UP"
buttonStyle={{backgroundColor:"#ff7f50",borderRadius:20,borderColor:"white"}}
containerStyle={{width:250,height:40,marginTop:40,alignSelf:"center",color:"white"}}
onPress={()=>navigation.navigate('Profile')}
>
</Button>
<Button
title="LOGIN"
buttonStyle={{backgroundColor:"white"}}
containerStyle={{width:280,height:40,marginTop:20,alignSelf:"center",borderRadius:20}}
onPress={()=>navigation.navigate('Albam')}
titleStyle={{color:"#ff7f50"}}
>
</Button>
</View>
);
}
const styles=StyleSheet.create({
text:{color:"white",alignSelf:"center",marginTop:150,fontSize:40,marginBottom:100}
});
export default Home
<file_sep>import React, { PureComponent } from 'react';
import { AppRegistry, StyleSheet, Text, TouchableOpacity, View } from 'react-native';
import { RNCamera } from 'react-native-camera';
class Camerarn extends PureComponent{
render(){
return(
<view>
<Text> hello world</Text>
</view>
)
}
}<file_sep>import * as React from 'react';
import{View,Text,image} from 'react-native';
import {Button} from 'react-native-elements';
function Albam({navigation}){
return(
<View style={{flex:1}}>
<Text style={{alignSelf:"center"}}> HOME PAGE</Text>
<Button
title="chandana"
buttonStyle={{backgroundColor:"red",marginTop:10}}
containerStyle={{width:150,alignSelf:"center"}}
onPress={()=>navigation.navigate('Profile')}
/>
</View>
)
}
export default Albam;<file_sep>import * as React from 'react';
import { Image, View, Text, StyleSheet, TextInput, KeyboardAvoidingView, ScrollView, Dimensions } from 'react-native';
import MaterialCommunityIcons from "react-native-vector-icons/MaterialCommunityIcons";
import Entypo from "react-native-vector-icons/Entypo";
import FontAwesome from "react-native-vector-icons/FontAwesome";
import LinearGradient from 'react-native-linear-gradient';
const device_height = Dimensions.get(`window`).height - 24
function Lineargra() {
return (
<KeyboardAvoidingView
behavior="height"
style={{ flex: 1 }}
>
<ScrollView>
<View style={{ height: device_height }}>
<LinearGradient colors={['#051937', '#002961', '#00388e', '#0047bc', '#1254eb']}
style={styles.linearGradient}>
<Text style={styles.buttonText}>
SIGN UP
</Text>
<View
style={{
flex: 0.7,
backgroundColor: "white",
marginLeft: 20,marginRight:20,marginTop:10,
borderBottomEndRadius: 40,
borderBottomStartRadius: 180,
borderTopEndRadius: 40,
borderTopStartRadius: 40
}}>
<View>
<Text style={{ marginTop: 20, marginLeft: 30, fontSize: 15 }}>NAME</Text>
</View>
<View style={{ marginLeft: 30, flexDirection: 'row' }}>
<MaterialCommunityIcons
name="account-outline" color={'#696969'} size={26} style={{ marginTop: 10 }} />
<TextInput
style={{
borderBottomColor: '#a9a9a9',
marginRight: 50
}}>
</TextInput>
</View>
<View style={{ height: 2, width: 280, backgroundColor: "grey", marginLeft: 30 }}></View>
<View>
<Text style={{ marginTop: 20, marginLeft: 30, fontSize: 15 }}>EMAIL</Text>
</View>
<View style={{ marginLeft: 30, flexDirection: "row" }}>
<MaterialCommunityIcons
name="email-outline" color={'#696969'} size={26} style={{ marginTop: 10 }} />
<TextInput
style={{
borderBottomColor: '#a9a9a9',
}}>
</TextInput>
</View>
<View style={{ height: 2, width: 280, backgroundColor: "grey", marginLeft: 30 }}></View>
<View>
<Text style={{ marginTop: 20, marginLeft: 30, fontSize: 15 }}>PASSWORD</Text>
</View>
<View style={{ marginLeft: 30, flexDirection: "row" }}>
<MaterialCommunityIcons
name="lock-outline" color={'#696969'} size={26} style={{ marginTop: 10 }} />
<TextInput
style={{
borderBottomColor: '#a9a9a9',
}}>
</TextInput>
</View>
<View style={{ height: 2, width: 280, backgroundColor: "grey", marginLeft: 30 }}></View>
<View style={{flexDirection:"row"}}>
<MaterialCommunityIcons
name="check-box-outline" color={'#780664'} size={26} style={{ marginTop: 10 ,marginLeft:30}} />
<Text style={{fontSize:15,marginTop:13}}>Agree to our</Text>
<Text style={{fontSize:15,marginTop:13,color:'#780664'}}>Terms & Conditions</Text>
</View>
</View>
<View
style={{
flex: 0.22,
backgroundColor: "white",
marginLeft: 30,marginTop:10,marginRight:15,
borderBottomEndRadius: 40,
borderBottomStartRadius: 40,
borderTopEndRadius: 180,
borderTopStartRadius: 40
}}>
<Text style={{fontSize:20,alignSelf:"center",marginTop:20}}>OR </Text>
<Text style={{fontSize:20,alignSelf:"center"}}>Sign Up With Social Media </Text>
<View style={{flexDirection:"row",marginTop:10,justifyContent:"space-evenly",width:150, alignSelf:"center"}}>
<MaterialCommunityIcons
name="facebook" color={'#051871'} size={35} />
<FontAwesome
name="google-plus-official" color={'#A6080F'} size={35} />
<Entypo
name="twitter-with-circle" color={'#4C7FF2'} size={35} />
</View>
</View>
</LinearGradient>
</View>
</ScrollView>
</KeyboardAvoidingView>
);
}
const styles = StyleSheet.create({
linearGradient: {
flex: 1,
},
buttonText: {
fontSize: 15, alignSelf: "center", color: "white", marginTop: 20
}
});
export default Lineargra;
<file_sep>import * as React from 'react'
import {View,Text,Image} from 'react-native'
import { TextInput } from 'react-native-gesture-handler';
import AntDesign from 'react-native-vector-icons/AntDesign';
import {Button} from 'react-native-elements';
function Fullname(){
return(
<View style={{flex:1,marginLeft:38,marginTop:10}}>
<Image source={require('../assets/lovebirds.jpg')}
style={{width:50,height:50,marginTop:30}} />
<Text style={{fontSize:25,marginTop:5, fontWeight: 'bold'}}>Kirill</Text>
<Text style={{fontSize:18,color:`#696969`}}> FullName</Text>
<View
style={{flexDirection:"row",height:40,width:300, backgroundColor:`#ffe4c4`,marginTop:80,borderRadius:8}}>
<TextInput
placeholder="Enter Name"
style={{paddingLeft:15}}
/>
<AntDesign
name="check"
size={13}
style={{color:"green",marginTop:13,marginLeft:170}}
/>
</View>
<Button
title="Continue"
buttonStyle={{backgroundColor:`#ff7f50`,marginTop:20,borderRadius:8}}
containerStyle={{width:300,height:50}}
/>
<Text style={{color:'#a9a9a9',marginTop:300,fontSize:12,marginLeft:13}}> By continuing you confirm that you agree with our</Text>
<Text style={{fontSize:14,color:`#696969`,marginLeft:100, marginTop:5}}> Terms & conditions</Text>
<Text style={{
borderBottomColor : '#a9a9a9',
borderBottomWidth: 2,marginLeft:120,marginRight:150
}}>
</Text>
</View>
)
};
export default Fullname;<file_sep>import * as React from 'react'
import {View,Text,Image} from 'react-native'
import { TextInput } from 'react-native-gesture-handler';
import FontAwesome5 from 'react-native-vector-icons/FontAwesome5';
import AntDesign from 'react-native-vector-icons/AntDesign';
import {Button} from 'react-native-elements';
function Matirymoni(){
return(
<View style={{flex:1,marginLeft:38,marginTop:10}}>
<Image source={require('../assets/lovebirds.jpg')}
style={{width:50,height:50,marginTop:30}} />
<Text style={{fontSize:25,marginTop:5, fontWeight: 'bold'}}>Kirill</Text>
<Text style={{fontSize:18,color:`#696969`}}> Sign in to continue</Text>
<View
style={{flexDirection:"row",height:40,width:300, backgroundColor:`#ffe4c4`,marginTop:70,borderRadius:8}}>
<TextInput
placeholder="Mobile Number"
style={{paddingLeft:15}}
/>
<AntDesign
name="check"
size={13}
style={{color:"green",marginTop:13,marginLeft:160}}
/>
</View>
<Button
title="Continue"
buttonStyle={{backgroundColor:`#ff7f50`,marginTop:20,borderRadius:8}}
containerStyle={{width:300,height:50}}
/>
<Button
title="LOGIN WITH INSTAGRAM"
buttonStyle={{backgroundColor:`#dc143c`,borderRadius:8}}
containerStyle={{width:300,height:50,marginTop:190}}
/>
<Button
title="LOGIN WITH FEACBOOK"
buttonStyle={{backgroundColor:"blue",borderRadius:8}}
containerStyle={{width:300,height:50,marginTop:10}}
/>
<Text style={{color:'#a9a9a9',marginTop:10,fontSize:12,marginLeft:13}}> By continuing you confirm that you agree with our</Text>
<Text style={{fontSize:14,color:`#696969`,marginLeft:100, marginTop:5}}> Terms & conditions</Text>
<Text style={{
borderBottomColor : '#a9a9a9',
borderBottomWidth: 2,marginLeft:120,marginRight:150
}}>
</Text>
</View>
)
};
export default Matirymoni;<file_sep>import React from 'react';
import { View, Text, ActivityIndicator, TouchableOpacity, SafeAreaView, KeyboardAvoidingView, ScrollView, Dimensions } from 'react-native'
import { Formik } from "formik";
import * as Yup from 'yup';
import { Button, Input } from 'react-native-elements';
const device_height = Dimensions.get(`window`).height - 24
const loginSchema = Yup.object().shape({
firstname: Yup
.string()
.label("firstname")
.required('Enter your firstname')
.min(4, "Minimum 4 characters"),
lastname: Yup
.string()
.label("lastname")
.required('Enter your lastname')
.min(4, "Minimum 4 characters"),
email: Yup
.string()
.trim()
.label("Email")
.email("Invalid email")
.required('Email is required'),
mobile: Yup
.string()
.required('Mobile number is required')
.label('Mobile')
.min(10, 'Mobile number must be at least 10 digits')
.max(10, 'Mobile number is not valid'),
password: Yup
.string()
.label("Password")
.min(6, "Minimum 6 characters")
.max(12, "Maximum 12 characters only")
.required('Password is required'),
confirmPassword: Yup
.string()
.label("Confirm Password")
.test("passwords-match", "Password did not match", function (value) {
return this.parent.password === value})
.required('Confirm password is required'),
address: Yup
.string()
.trim()
.label("address")
//.email("Invalid email")
.required('Enter your address details')
.min(20, "Minimum 20 characters"),
});
// handleLogin=(values) =>{
// }
function Valida() {
return (
<KeyboardAvoidingView
behavior="height"
style={{ flex: 1 }}
>
<ScrollView>
<View style={{ height: device_height }} >
<Text style={{ alignSelf: 'center', fontSize: 20, fontWeight: 'bold',marginTop:10 }}>ff</Text>
<View style={{ flex: 1, width: 300, alignSelf: 'center' }}>
<Formik
initialValues={{ firstname: "", lastname: "", email: "", mobile: "",
password: "", confirmPassword: "", address: "" }}
onSubmit={(values, actions) => { }}
validationSchema={loginSchema}
>
{formikProps => (
<View style={{ flex: 1 }}>
<View style={{ flex: 0.4, marginTop: 10 }}>
<Input
placeholder='Firstname'
inputContainerStyle={{ width: 300, alignSelf: 'center', borderColor: '#ccc', borderWidth: 1, marginTop: 10 }}
onChangeText={formikProps.handleChange("firstname")}
onBlur={formikProps.handleBlur("firstname")}
/>
<Text style={{ color: 'red', marginTop: -15 }}>{formikProps.touched.firstname && formikProps.errors.firstname}</Text>
</View>
<View style={{ flex: 0.4, marginTop: 30 }}>
<Input
placeholder='LastName'
inputContainerStyle={{ width: 300, alignSelf: 'center', borderColor: '#ccc', borderWidth: 1, marginTop: 10 }}
onChangeText={formikProps.handleChange("lastname")}
onBlur={formikProps.handleBlur("lastname")}
/>
<Text style={{ color: 'red', marginTop: -15 }}>{formikProps.touched.lastname && formikProps.errors.lastname}</Text>
</View>
<View style={{ flex: 0.4, marginTop: 30 }}>
<Input
placeholder='email'
inputContainerStyle={{ width: 300, alignSelf: 'center', borderColor: '#ccc', borderWidth: 1, marginTop: 10 }}
onChangeText={formikProps.handleChange("email")}
onBlur={formikProps.handleBlur("email")}
/>
<Text style={{ color: 'red', marginTop: -15 }}>{formikProps.touched.email && formikProps.errors.email}</Text>
</View>
<View style={{ flex: 0.4, marginTop: 30 }}>
<Input
placeholder='Mobile '
keyboardType='number-pad'
// maxLength={11}
inputContainerStyle={{ width: 300, alignSelf: 'center', borderColor: '#ccc', borderWidth: 1, marginTop: 10 }}
onChangeText={formikProps.handleChange("mobile")}
onBlur={formikProps.handleBlur("mobile")}
/>
<Text style={{ color: 'red', marginTop: -15 }}>{formikProps.touched.mobile && formikProps.errors.mobile}</Text>
</View>
<View style={{ flex: 0.4, marginTop: 40 }}>
<Input
placeholder='Password'
//secureTextEntry={true}
inputContainerStyle={{ width: 300, alignSelf: 'center', borderColor: '#ccc', borderWidth: 1 }}
onChangeText={formikProps.handleChange("password")}
onBlur={formikProps.handleBlur("password")}
/>
<Text style={{ color: 'red', marginTop: -15 }}>{formikProps.touched.password && formikProps.errors.password}</Text>
</View>
<View style={{ flex: 0.4, marginTop: 30 }}>
<Input
placeholder='Confirm password'
//secureTextEntry={true}
inputContainerStyle={{ width: 300, alignSelf: 'center', borderColor: '#ccc', borderWidth: 1, marginTop: 10 }}
onChangeText={formikProps.handleChange(" confirmPassword")}
onBlur={formikProps.handleBlur(" confirmPassword")}
/>
<Text style={{ color: 'red', marginTop: -15 }}>
{formikProps.touched.confirmPassword && formikProps.errors.confirmPassword}</Text>
</View>
<View style={{ flex: 0.4, marginTop: 30 }}>
<Input
placeholder='address'
inputContainerStyle={{ width: 300, alignSelf: 'center', borderColor: '#ccc', borderWidth: 1, marginTop: 10 }}
onChangeText={formikProps.handleChange("address")}
onBlur={formikProps.handleBlur("address")}
/>
<Text style={{ color: 'red', marginTop: -15 }}>{formikProps.touched.address && formikProps.errors.address}</Text>
</View>
<View style={{ flex: 1 }}>
{formikProps.isSubmitting ? (
<ActivityIndicator size="large"
style={{ alignSelf: 'center' }} />
) : (
<Button
title='Login'
buttonStyle={{ width: 300, alignSelf: 'center', marginTop: 50 }}
onPress={formikProps.handleSubmit}
/>
)}
</View>
</View>
)}
</Formik>
</View>
</View>
</ScrollView>
</KeyboardAvoidingView>
)
}
export default Valida;
|
a706ac42038c2c22f7c0b6de8beda3b5e9522596
|
[
"JavaScript"
] | 10
|
JavaScript
|
MushamChandana/firstproject
|
1b2da21f5c9b8cb42a86d2e16c776ac951d7653b
|
da84c828a992fef831c42afeca2dc0c1e28c4977
|
refs/heads/master
|
<file_sep>import json
from flask import Flask, Response, request
from flask_restful import Api, Resource
from werkzeug.exceptions import HTTPException
# Hard coded auth tokens for the sake of simplicity
AUTH_TOKENS = ['<KEY>',
'<KEY>',
'<KEY>',
'<KEY>',
'<KEY>']
HTTP_ERRORS = {
'InternalServerError': {
'message': 'Something went wrong',
'status': 500},
'NoStringProvidedError': {
'message': ('Request must contain string to be echoed in variable '
'\'str\''),
'status': 400},
'NotAuthorizedError': {
'message': ('Request must contain valid authorization token in '
'valiable \'auth\''),
'status': 401},
}
class HelloAPI(Resource):
"""The purpose of this API is to be provide a simple response (as a test to
make sure the server is up)."""
def get(self):
data = {'Hello': 'Welcome to the Echo Room.'}
return Response(response=json.dumps(data),
status=200,
mimetype='application/json')
class EchoAPI(Resource):
"""This is the main ECHO API, providing 1 simple POST method that accepts a
JSON payload with 2 key-value pairs: 'auth', with an auth token to be
validated; and 'str', the string to be reflected back to the caller."""
AUTH_KEY = 'auth'
STR_KEY = 'str'
def post(self):
data = request.get_json()
try:
a = data[self.AUTH_KEY]
except (TypeError, KeyError):
raise NotAuthorizedError
# Making sure the auth token provided is valid
if not a in AUTH_TOKENS:
raise NotAuthorizedError
try:
s = data[self.STR_KEY]
except (TypeError, KeyError):
raise NoStringProvidedError
return Response(response=json.dumps({self.STR_KEY: s}),
status=200,
mimetype='application/json')
class InternalServerError(HTTPException):
pass
class NoStringProvidedError(HTTPException):
pass
class NotAuthorizedError(HTTPException):
pass
app = Flask(__name__)
api = Api(app, errors=HTTP_ERRORS)
api.add_resource(HelloAPI, '/', '/hello')
api.add_resource(EchoAPI, '/echo')
if __name__ == '__main__':
app.run(host='127.0.0.1',
port='5000',
ssl_context=('cert.pem', 'key.pem'),
debug=True,
)
<file_sep>import json
import random
import string
import unittest
from app import app, HTTP_ERRORS, AUTH_TOKENS
class ServerLiveTest(unittest.TestCase):
def setUp(self):
print('# SERVER LIVE TEST')
app.testing = True
self.app = app.test_client()
def test_server_is_active(self):
print('## Making sure server can be started...')
response = self.app.get('/')
self.assertEqual(response.status_code, 200)
class EchoApiTest(unittest.TestCase):
def setUp(self):
print('# ECHO API TEST')
app.testing = True
self.app = app.test_client()
self.auth_token = random.choice(AUTH_TOKENS)
def test_echo_different_strings(self):
print('## Testing different strings...')
payload = {'auth': self.auth_token}
strs = ['foobar', '123456', string.printable]
for s in strs:
payload['str'] = s
print(f'\tPayload: {payload}')
response = self.app.post('/echo',
data=json.dumps(payload),
content_type='application/json')
response = response.get_json()
self.assertEqual(response, {'str': s})
def test_echo_no_string(self):
print('## Testing when no string is provided...')
payload = {'auth': self.auth_token}
print(f'\tPayload: {payload}')
response = self.app.post('/echo',
data=json.dumps(payload),
content_type='application/json')
response = response.get_json()
self.assertEqual(response, HTTP_ERRORS['NoStringProvidedError'])
def test_echo_incorrect_key(self):
print('## Testing when incorrect string key is provided...')
keys = ['s', 'abc', '!@#', 'something else']
for k in keys:
payload = {'auth': self.auth_token, k: 'foobar'}
print(f'\tPayload: {payload}')
response = self.app.post('/echo',
data=json.dumps(payload),
content_type='application/json')
response = response.get_json()
self.assertEqual(response,
HTTP_ERRORS['NoStringProvidedError'])
def test_echo_mixed_keys(self):
print('## Testing when irrelevant keys are provided...')
payload = {'a': 'one',
'b': 'two',
'c': 'three',
'str': 'foobar',
'd': 'four',
'auth': self.auth_token,
}
print(f'\tPayload: {payload}')
response = self.app.post('/echo',
data=json.dumps(payload),
content_type='application/json')
response = response.get_json()
self.assertFalse('a' in response)
self.assertTrue('str' in response)
self.assertEqual(response['str'], 'foobar')
def test_echo_no_auth_token(self):
print('## Testing when no auth tokens are provided...')
payload = {'str': 'foobar'}
print(f'\tPayload: {payload}')
response = self.app.post('/echo',
data=json.dumps(payload),
content_type='application/json')
response = response.get_json()
self.assertEqual(response, HTTP_ERRORS['NotAuthorizedError'])
def test_echo_invalid_auth_token(self):
print('## Testing when invallid auth tokens are provided...')
payload = {'auth': 'baz', 'str': 'foobar'}
print(f'\tPayload: {payload}')
response = self.app.post('/echo',
data=json.dumps(payload),
content_type='application/json')
response = response.get_json()
self.assertEqual(response, HTTP_ERRORS['NotAuthorizedError'])
if __name__ == '__main__':
unittest.main()
<file_sep># Objective
Write a server that echoes back whatever string a client sent through an API
## Requirements
1. **The server is written in the language of your choice:**
The language of choice is Python.
2. **The server must have the ability to communicate with multiple clients:**
The framework chosen, Flask, provides native support for treading and
concurrent client connections.
3. **The source must live in a code repository with access to history of
commits:**
The source code is uploaded to
[GitHub](https://github.com/thehungryturnip/echo_room/)
4. **There must be unit tests to cover at least the API:**
Unit tests are implemented using Python's `unittest` module and included in
`./tests.py`.
5. **Discuss what good SLOs would look like to understand the operational
health:**
See below section on [Service Level Objectives](#service-level-objectives).
This is an area I have limited experince in and would be a focus of my
learning/upskilling given the opportunity.
## Bonuses
1. **You have a Makefile to easily build and demonstrate your server:**
The service is packgade with its own virtual environment. See
Installation, Run, and Test sections below.
2. **You have good quality documentation as part of your code:**
I aimed to have clearly written code that doesn't require too much
commenting, with enough docstrings and `print()` to provide enough conext.
3. **Communication between the client and the server is encrypted and
authenticated:**
Encryption is provided through SSL and aunthetication is provided through
auth tokens.
4. **Prepare critical code paths for monitoring / SLIs:**
This is also an area I don't have much experience in and would be a focus of
my learning if given the opportunity.
# Installation, Run, and Test
1. Clone the repository:
> git clone https://github.com/thehungryturnip/echo_room
2. Create a virtual environment:
> python -m venv env
3. Activate the virtual environment:
> source env/bin/activate
4. Install dependencies:
> python -m pip install -r reqs.txt
5. The service can be run by executing the `./app.py` file (ctrl-c when done):
> python ./app.py
6. The tests can be run by executing the `./tests.py` file:
> python ./tests.py
6. Deactivate the virtual environment:
> deactivate
# Design
## Fremework
The Flask framework, along with Flask RESTful, is used to implement this
sevice. It provides the basis for implementing APIs and provide native support
for processing requests in parellel.
Python's unittest module is used for unit testing.
## Encryption
SSL is used to secure the communication channel between the client and the
server. Self-signed certificates are used for this exercise.
## Authentication
API authentication tokens are used to confirm the identity of the API caller.
For simplicity, a database is not used for this exercise. Mechanisms for user
sign-up, password-based authentication, auth token generation, auth token
rotation, etc. are not implemented. Switching to session-key based
authentication after authenicataion check is also not implemented.
## APIs
### Hello
This is a simple API that implements the GET request and simply responds with 1
key-value pair: `'Hello': 'Welcome to the Echo Room.'`. The intention is for
this to be a check that the server is up and running.
### Echo
This is the main API that implements the POST request. The expect syntax is as
follows:
{
auth: <authorization token>
str: <string to be reflected back to the caller>
}
The API validates that the authorization token is provided and valid, then
reflects the string passed in back to the caller in a respons of this format:
{
str: <string reflected back to the caller>
}
## Service Level Objectives
I think of SLOs as what the consumer and risk management should be
experincing/expecting and then extrapolate to finer sub-SLOs we can measure and
improve on. e.g.
* Authenticating to the Echo API should take less than 500ms 99% of the time.
* The amount of time for the request hitting our load balancer to getting
to the authentication microservice should be less than 50ms.
* The amount of time for the microservice to process an authentication
request should be lass than 100ms.
* The amount of time for the responses from microservices to exit our
perimiter should be less than 50ms.
* The authentication microservices should scale so that there's always more
than 10% capacity buffer/reserve.
* The disaster recovery infrastructure should be availablle 99.5% of the
time.
* The Echo API should return its repsonse in less than 500ms 99% of the time.
* The amount of time for the request hitting our load balancer to getting
to the echo microservice should be less than 50ms.
* The amount of time for the microservice to process echo request should be
lass than 100ms.
* The amount of time for the responses from microservices to exit our
perimiter should be less than 50ms.
* The echo microservices should scale so that there's always more than 10%
capacity buffer/reserve.
* The disaster recovery infrastructure should be availablle 99.5% of the
time.
* (In the cases where the operations are more complex.) The string reflected
back by the Echo API should always be correct.
* The percentage of correct responses sent from the echo microservice
should be 100%.
* Revocation of authorization should take effect in less than 5 minutes.
* The time it takes for an auth token deletion request to be submitted to
when the auth token is removed from the database should be less than 3
minutes.
* 99% of malformed requests should be not be processed. (We'll need to define
what the critea for "malformed" is for this context.)
* 99.9% of requests with scripts in the request should not reach the echo
microservice. (Should be dropped by the web application firewall.)
* 100% of requests with payload larger than 1mb should be rejected by the
echo microservice.
## Trade-Offs
### Code Management
* In a more robust development environments, feature branches should be created
to isolate development/troubleshooting work before commiting and merging back
to the main branch.
* For a larger code base, both the `app.py` and `test.py` files should be
broken up into smaller more organized modules instead of having them all sit
together.
* For a more robust code base, more rigorous commenting and/or docstring
convention should be follow.
### Performance
* Flask by itself is not a production-ready framework and should be deployed to
a WSGI server/service such as Gunicorn, Apache, etc.
* Elastic scaling (with load balancers and/or distributed microservices) has
not been considered or tuned in this solution.
### Authentication
* The authentication mechanism should be split into its own subcomponent and be
more robust. Considerations include:
* Implement a username-/password-based authentication mechanism
* Implement sign-up mechanisms
* Implement method for generating and managing authentication tokens
* Store and manage authentication data in a database
* Switch to session tokens after authenticating the client so we don't need
to repeatedly pass auth tokens (unncessary risk of exposure)
### Security
* In a proper implementation the private key would not be part of the
repository and should be managed separately.
* In a proper implelmentation the SSL certificate should be signed by a
certified CA.
* In a proper implementation, there would probably need to have more input
validation and encoding for the payload received to make sure it's of
expected format and doesn't contain malicious code. (Epsecially if the input
is going to be stored and/or mainpulated.)
* As part of input validation, the service should consider a cap to the size of
the string submitted to the service to prevent DDoS caused by attackers
submitting extremely large inputs.
* As part of request filtering and load balanceng, the service (inclulding the
infrastructure that sits in front of Flask such as a load balancer and/or a
web application firewall) should consider (reasonablly) rate limiting
requests coming from specific sources, from specific authentiated users,
and/or of specific formats.
|
00044173a3e5a5f43f4bb93c27f8e2fee51acdc2
|
[
"Markdown",
"Python"
] | 3
|
Python
|
thehungryturnip/echo_room
|
d8fd3bde6e12b7eb1a1d35be8ccd7728deb35395
|
d483869d9c56d5a20f31d23e2f7edf69c12b36e2
|
refs/heads/master
|
<file_sep># -*- coding: utf-8 -*-
"""
/***************************************************************************
NextGIS WEB API
-------------------
begin : 2014-11-19
git sha : $Format:%H$
copyright : (C) 2014 by NextGIS
email : <EMAIL>
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
import os
import json
import requests
from base64 import b64encode
from requests.utils import to_native_string
from ngw_error import NGWError
from utils import log
UPLOAD_FILE_URL = '/api/component/file_upload/upload'
GET_VERSION_URL = '/api/component/pyramid/pkg_version'
class File2Upload(file):
#def __init__(self, path, callback):
def __init__(self, path):
file.__init__(self, path, "rb")
self.seek(0, os.SEEK_END)
self._total = self.tell()
self._readed = 0
self.seek(0)
#self._callback = callback
def __len__(self):
return self._total
def read(self, size):
data = file.read(self, size)
self._readed += len(data)
#self._callback(self._total, self._readed)
return data
def _basic_auth_str(username, password):
"""Returns a Basic Auth string."""
authstr = 'Basic ' + to_native_string(
b64encode(('%s:%s' % (username, password)).encode('utf-8')).strip()
)
return authstr
class NGWConnection(object):
AbilityBaseMap = range(1)
def __init__(self):
self.__server_url = None
self.__session = requests.Session()
self.__auth = ("", "")
def __init__(self, conn_settings):
self.__server_url = None
self.__session = requests.Session()
self.__auth = ("", "")
self.__proxy = None
self.set_from_settings(conn_settings)
self.__ngw_components = None
def set_from_settings(self, conn_settings):
self.server_url = conn_settings.server_url
self.set_auth(conn_settings.username, conn_settings.password)
if conn_settings.proxy_enable and conn_settings.proxy_host != "":
proxy_url = conn_settings.proxy_host
if conn_settings.proxy_port != "":
proxy_url = "%s:%s" % (proxy_url, conn_settings.proxy_port)
if conn_settings.proxy_user != "":
proxy_url = "%s:%s@%s" % (
conn_settings.proxy_user,
conn_settings.proxy_password,
proxy_url
)
self.__proxy = {
"http": proxy_url
}
@property
def server_url(self):
return self.__server_url
@server_url.setter
def server_url(self, value):
if isinstance(value, (str, unicode)):
self.__server_url = value.strip().rstrip('\\\/')
else:
self.__server_url = value
def set_auth(self, username, password):
# self.__session.auth = (username, password)
self.__auth = (username, password)
def get_auth(self):
# return self.__session.auth
return self.__auth
def __request(self, sub_url, method, params=None, **kwargs):
payload = None
if params:
payload = json.dumps(params)
if 'data' in kwargs:
payload = kwargs['data']
json_data = None
if 'json' in kwargs:
json_data = kwargs['json']
log(
"Request\nmethod: {}\nurl: {}\ndata: {}\njson:".format(
method,
self.server_url + sub_url,
payload,
json_data
)
)
req = requests.Request(method, self.server_url + sub_url, data=payload, json=json_data)
req.headers['Authorization'] = _basic_auth_str(self.__auth[0], self.__auth[1])
prep = self.__session.prepare_request(req)
try:
resp = self.__session.send(prep, proxies=self.__proxy)
except requests.exceptions.ConnectionError:
raise NGWError(NGWError.TypeRequestError, "Connection error", req.url)
except requests.exceptions.RequestException as e:
log( "Response\nerror {}: {}".format(type(e), e) )
raise NGWError(NGWError.TypeRequestError, "%s" % type(e), req.url)
if resp.status_code == 502:
log( "Response\nerror status_code 502" )
raise NGWError(NGWError.TypeRequestError, "Response status code is 502", req.url)
if resp.status_code / 100 != 2:
log("Response\nerror status_code {}\nmsg: {!r}".format(resp.status_code, resp.content))
raise NGWError(NGWError.TypeNGWError, resp.content, req.url)
try:
json_response = resp.json()
except:
log("Response\nerror response JSON parse")
raise NGWError(NGWError.TypeNGWUnexpectedAnswer, "", req.url)
return json_response
def get(self, sub_url, params=None, **kwargs):
return self.__request(sub_url, 'GET', params, **kwargs)
def post(self, sub_url, params=None, **kwargs):
return self.__request(sub_url, 'POST', params, **kwargs)
def put(self, sub_url, params=None, **kwargs):
return self.__request(sub_url, 'PUT', params, **kwargs)
def patch(self, sub_url, params=None, **kwargs):
return self.__request(sub_url, 'PATCH', params, **kwargs)
def delete(self, sub_url, params=None, **kwargs):
return self.__request(sub_url, 'DELETE', params, **kwargs)
def get_upload_file_url(self):
return UPLOAD_FILE_URL
#def upload_file(self, filename, callback):
def upload_file(self, filename):
try:
#with File2Upload(filename, callback) as fd:
with File2Upload(filename) as fd:
upload_info = self.put(self.get_upload_file_url(), data=fd)
return upload_info
except requests.exceptions.RequestException, e:
raise NGWError(NGWError.TypeRequestError, e.message.args[0], self.get_upload_file_url())
def download_file(self, url):
req = requests.Request('GET', self.server_url + url)
prep = self.__session.prepare_request(req)
try:
resp = self.__session.send(prep, stream=True)
except requests.exceptions.RequestException, e:
raise NGWError(NGWError.TypeRequestError, e.message.args[0], req.url)
if resp.status_code / 100 != 2:
raise NGWError(NGWError.TypeNGWError, resp.content, req.url)
return resp.content
def get_ngw_components(self):
if self.__ngw_components is None:
try:
self.__ngw_components = self.get(GET_VERSION_URL)
except requests.exceptions.RequestException, e:
self.__ngw_components = {}
return self.__ngw_components
def get_version(self):
ngw_components = self.get_ngw_components()
return ngw_components.get("nextgisweb")
def get_abilities(self):
ngw_components = self.get_ngw_components()
abilities = []
if ngw_components.has_key("nextgisweb_basemap"):
abilities.append(self.AbilityBaseMap)
return abilities
<file_sep>import os, sys, requests
from base64 import b64encode
from requests.utils import to_native_string
from ngw_connection_settings import NGWConnectionSettings
from ngw_connection import NGWConnection
import conf
def basic_auth_str(username, password):
"""
Returns a Basic Auth string
"""
authstr = 'Basic ' + to_native_string(
b64encode(('%s:%s' % (username, password)).encode('utf-8')).strip()
)
return authstr
def is_S1_render(fname):
"""
Checks if the filename is valid name for S1 renders ready for upload
"""
if fname[-11:] == '_render.tif' and fname[:9] == 'niersc_s1':
return True
else:
return False
def create_raster_layer(file_name, parent_id, headers):
"""
Uploads raster, creates raster layer in directory with parent_id, creates raster style for it
"""
print('Uploading ' + file_name)
attachment_info = ngwConnection.upload_file(file_name)
display_name = file_name.split("/")[-1][:-11] # cuts the "_render.tif" suffix
payload = {
"resource": {
"cls": "raster_layer",
"display_name": display_name,
"parent": {"id": parent_id}
},
"raster_layer": {
"source": attachment_info,
"srs": {"id": 3857}
}
}
r = requests.post(conf.HOST + '/api/resource/', headers=headers, json=payload)
if r.status_code == 201:
created_resource_id = r.json()['id']
print('Raster layer with id %s was created' % created_resource_id)
payload = {
"resource": {"cls": "raster_style", "description": None, "display_name": display_name, "keyname": None,
"parent": {"id": created_resource_id}}}
r = requests.post(conf.HOST + '/api/resource/', headers=headers, json=payload) # creates raster style
# TODO: check if it was created
# print r
else:
print('Failed: %s responded %s. %s' % (conf.HOST, r.status_code, r.json()['message']))
ngwConnectionSettings = NGWConnectionSettings("test", conf.HOST, conf.LOGIN, conf.PASSWORD)
ngwConnection = NGWConnection(ngwConnectionSettings)
headers = {'Accept': '*/*', 'Authorization': basic_auth_str(conf.LOGIN, conf.PASSWORD)}
# get the directories list under the 'Daily' folder
payload = {'parent': conf.ngw_resources_id}
r = requests.get(conf.HOST + '/api/resource/', headers=headers, params=payload)
if r.status_code == 200:
dir_list = list()
dir_id_list = list()
for resource in r.json():
dir_list.append(resource['resource']['display_name'])
dir_id_list.append(resource['resource']['id'])
for fname in os.listdir(conf.sourcedir):
if os.path.isfile(conf.sourcedir + fname) and is_S1_render(fname):
fname = conf.sourcedir + fname # later we need full paths only
render_date = fname.split("_")[2].split("T")[
0] # this extracts the date from file name. Update it when naming conventions change
# Checks if the directory for this date already exists in the Daily folder.
# If so, uploads raster layer inside it.
# Otherwise, creates a new directory for this date, then upload.
if render_date in dir_list:
print("%s folder already exists" % (render_date))
ngw_date_dir_id = dir_id_list[
dir_list.index(render_date)] # get ID of a folder with a corresponding date to upload render in it
create_raster_layer(fname, ngw_date_dir_id, headers)
else:
print("%s folder does not exist, creating" % render_date)
payload = {"resource":
{"cls": "resource_group",
"parent": {"id": conf.ngw_resources_id},
"display_name": render_date,
"keyname": None,
"description": conf.ngw_resources_id}
}
r = requests.post(conf.HOST + '/api/resource/', headers=headers, json=payload)
if r.status_code == 201:
ngw_date_dir_id = r.json()['id']
print('Folder with id %s created' % (ngw_date_dir_id))
dir_list.append(render_date)
dir_id_list.append(ngw_date_dir_id)
create_raster_layer(fname, ngw_date_dir_id, headers)
|
793ea9d24f2ef92a7d96b06c93169b6abf6f2321
|
[
"Python"
] | 2
|
Python
|
tepextepex/ngw-helpers
|
9ed7764e03e56760ce61f30b6a22c6653c7ccbd9
|
b6514117e3ee8e7f3b5f0d9ffb5149c6f684a445
|
refs/heads/develop
|
<file_sep>#!/usr/bin/env bash
sed -i 's#/src/target/classes/win64/lib#/src/src/main/scripts/win64/libiconv/win64/lib#g' /src/src/main/scripts/win64/libiconv/win64/lib/libiconv.la
sed -i 's#/src/target/classes/win64/lib#/src/src/main/scripts/win64/libunistring/win64/lib#g' /src/src/main/scripts/win64/libunistring/win64/lib/libunistring.la
export $ LDFLAGS="-L/src/src/main/scripts/win64/libiconv/win64/lib -L/src/src/main/scripts/win64/libunistring/win64/lib" CFLAGS="-I/src/src/main/scripts/win64/libiconv/win64/include -I/src/src/main/scripts/win64/libunistring/win64/include"
cd ../..
curl https://ftp.gnu.org/gnu/libidn/libidn2-2.3.0.tar.gz -o libidn2-2.3.0.tar.gz
tar xzf libidn2-2.3.0.tar.gz
mv libidn2-2.3.0 c++
rm libidn2-2.3.0.tar.gz
cd c++
./configure --target=x86_64-w64-mingw32 --host=x86_64-w64-mingw32 --disable-shared --enable-static
make
make install
r1=$?
mkdir -p /src/target/classes/win64/lib
mkdir -p /src/target/classes/win64/include
cp -r /usr/local/lib /src/target/classes/win64/
cp -r /usr/local/include /src/target/classes/win64/
exit ${r1}
|
a6ea6e21d6ffb96553ecfb8e6f44791c23dbc359
|
[
"Shell"
] | 1
|
Shell
|
yildiz-online/component-native-libidn2
|
ef894e0fb62f5bece309e4da4085598fcf670264
|
7436c2fcbeff7c31fd5fa0ab5e338c77ab073007
|
refs/heads/main
|
<file_sep>const Hotel = require('./Hotel')
const User = require("./User");
const Booking = require("./Booking");
exports.resolvers = {
Query: {
getHotel: () => {
return Hotel.find({});
},
getHotelByName: (parent, args) => {
return Hotel.find({"hotel_name": args.hotel_name})
},
getHotelByCity: (parent, args) => {
return Hotel.find({"city": args.city})
},
getBookings: (parent, args) => {
return Booking.find({})
},
getUser: (parent, args) => {
return User.find({})
}
},
Mutation: {
addHotel: async (parent, args) => {
let newHotel = new Hotel({
hotel_id: args.hotel_id,
hotel_name: args.hotel_name,
street: args.street,
city: args.city,
postal_code: args.postal_code,
price: args.price,
email: args.email,
});
return await newHotel.save();
},
addUser: async (parent, args) => {
let newUser = new User({
user_id: args.user_id,
username: args.username,
password: <PASSWORD>,
email: args.email
});
return await newUser.save()
},
createBooking: async (parent, args) => {
function findUser(user) {
User.countDocuments({"user_id": user}, ((err, count) => {
return count;
}))
}
if(findUser(args.user_id) < 0)
{
throw new Error("User does not exist")
}
else
{
let newBooking = new Booking({
hotel_id: args.hotel_id,
booking_date: args.booking_date,
booking_start: args.booking_start,
booking_end: args.booking_end,
user_id: args.user_id
})
return await newBooking.save()
}
}
}
}
<file_sep>const mongoose = require('mongoose')
var date = new Date()
const bookingSchema = new mongoose.Schema({
hotel_id: {
type: Number,
required: true,
min: 0
},
booking_date: {
type: String,
default: (date.getMonth() + 1) + "-" + date.getDate() + "-" + date.getFullYear()
},
booking_start: {
type: String,
required: true,
validate : function (value) {
const dateFormat = /\d{2}-\d{2}-\d{4}/
return dateFormat.test(value)
}
},
booking_end: {
type: String,
required: true,
validate : function (value) {
const dateFormat = /\d{2}-\d{2}-\d{4}/
return dateFormat.test(value)
}
},
user_id: {
type: Number,
required: true,
}
})
const Booking = mongoose.model("Bookings", bookingSchema)
module.exports = Booking
<file_sep>const mongoose = require('mongoose')
const userSchema = new mongoose.Schema({
user_id: {
type: Number,
required : [true, "Please enter ID number"],
unique : [true, "ID must be unique"],
min: 0,
trim: true
},
username: {
type: String,
required : [true, "Please enter username"],
unique : [true, "Username must be unique"],
trim: true
},
password: {
type: String,
required: [true, "Password is required"],
trim: true
},
email: {
type: String,
required: [true, "Email is required"],
unique: [true, "Email must not match other users"],
trim: true,
validate: function(value) {
let email = /^([\w-]+@([\w-]+\.)+[\w-]{2,4})?$/;
return email.test(value);
}
}
})
const User = mongoose.model("Users", userSchema);
module.exports = User;
|
40cc323912694b8780c11909736468d9e1fc35c3
|
[
"JavaScript"
] | 3
|
JavaScript
|
101056255/101056255_comp3133_assig2
|
9089041d8860b99d475abe553a6df66c1f1e351b
|
a6c939aff3afaff761257a48b6ff22b34e47f65b
|
refs/heads/master
|
<repo_name>pixelhacker/puremvc-swift-standard-framework<file_sep>/PureMVCSwift Demo/PureMVCSwift Demo/Classes/View/scenes/RecordsDetailController.swift
//
// DetailViewController.swift
// asdfsdfasdf
//
// Created by <NAME> on 06.10.14.
// Copyright (c) 2014 <NAME>. All rights reserved.
//
import UIKit
class RecordsDetailController: UIViewController {
@IBOutlet weak var txtAlbum: UILabel!
@IBOutlet weak var txtInterpret: UILabel!
@IBOutlet weak var txtGenres: UILabel!
@IBOutlet weak var txtYear: UILabel!
var record: RecordVO? {
didSet {
// Update the view.
self.update()
}
}
func update() {
// Update the user interface for the detail item.
if (record != nil) {
if let interpret = self.txtInterpret {
interpret.text = record?.interpret
}
if let album = self.txtAlbum {
album.text = record?.album
}
if let genres = self.txtGenres {
genres.text = record?.sortedGenres()
}
if let year = self.txtYear {
year.text = record?.year
}
}
}
override func viewDidLoad() {
super.viewDidLoad()
// Do any additional setup after loading the view, typically from a nib.
self.update()
}
}
<file_sep>/PureMVCSwift Demo/PureMVCSwift Demo/Classes/Model/util/Util.swift
//
// Util.swift
// PureMVCSwift Demo
//
// Created by <NAME> on 07.10.14.
// Copyright (c) 2014 <NAME>. All rights reserved.
//
import Foundation
typealias dispatch_cancelable_closure = (cancel: Bool) -> ()
func delay(time: NSTimeInterval, closure: () -> ()) -> dispatch_cancelable_closure? {
func dispatch_later(clsr: () -> ()) {
dispatch_after(
dispatch_time(
DISPATCH_TIME_NOW,
Int64(time * Double(NSEC_PER_SEC))
),
dispatch_get_main_queue(), clsr)
}
var closure: dispatch_block_t? = closure
var cancelableClosure: dispatch_cancelable_closure?
let delayedClosure: dispatch_cancelable_closure = { cancel in
if let clsr = closure {
if (cancel == false) {
dispatch_async(dispatch_get_main_queue(), clsr)
}
}
closure = nil
cancelableClosure = nil
}
cancelableClosure = delayedClosure
dispatch_later {
if let delayedClosure = cancelableClosure {
delayedClosure(cancel: false)
}
}
return cancelableClosure
}
<file_sep>/PureMVCSwift Demo/PureMVCSwift Demo/Classes/View/scenes/add/RecordsAddGenreController.swift
//
// RecordsAddGenreController.swift
// PureMVCSwift Demo
//
// Created by <NAME> on 22.09.15.
// Copyright © 2015 <NAME>. All rights reserved.
//
protocol RecordsGenreDelegate {
var genres: Array<String> { get }
var genresSelected: Array<String> { get set }
}
class RecordsAddGenreController: UITableViewController {
var delegate: RecordsGenreDelegate?
// MARK: - Table View
override func numberOfSectionsInTableView(tableView: UITableView) -> Int {
return 1
}
override func tableView(tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return delegate!.genres.count
}
override func tableView(tableView: UITableView, cellForRowAtIndexPath indexPath: NSIndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCellWithIdentifier(kGenreCell, forIndexPath: indexPath)
let genre = delegate!.genres[indexPath.row]
cell.textLabel?.text = genre
cell.accessoryType = delegate!.genresSelected.contains(genre) ? UITableViewCellAccessoryType.Checkmark : UITableViewCellAccessoryType.None
return cell
}
override func tableView(tableView: UITableView, didSelectRowAtIndexPath indexPath: NSIndexPath) {
let cell = tableView.cellForRowAtIndexPath(indexPath)
let genre = delegate!.genres[indexPath.row]
if var d = delegate {
if d.genresSelected.contains(genre) {
d.genresSelected.removeObject(genre)
cell?.accessoryType = UITableViewCellAccessoryType.None
} else {
d.genresSelected.append(genre)
cell?.accessoryType = UITableViewCellAccessoryType.Checkmark
}
}
delegate!.genresSelected.sortInPlace { $0.localizedCaseInsensitiveCompare($1) == NSComparisonResult.OrderedAscending }
}
override func viewDidDisappear(animated: Bool) {
super.viewDidDisappear(animated)
delegate = nil
}
}
<file_sep>/PureMVCSwift Demo/PureMVCSwift Demo/Classes/Model/vo/RecordVO.swift
//
// RecordVO.swift
// PureMVCSwift Demo
//
// Created by <NAME> on 06.10.14.
// Copyright (c) 2014 <NAME>. All rights reserved.
//
import SwiftyJSON
func == (a: RecordVO, b: RecordVO) -> Bool {
return a.interpret == b.interpret && a.album == b.album && a.year == b.year && a.genres == b.genres
}
class RecordVO: Equatable {
var interpret: String?
var album: String?
var year: String?
var genres: String?
class func initWithData(data: JSON) -> RecordVO {
return RecordVO(interpret: data["interpret"].string!,
album: data["album"].string!,
year: data["year"].string!,
genres: data["genres"].string!)
}
init(interpret: String?, album: String?, year: String?, genres: String?) {
self.interpret = interpret
self.album = album
self.year = year
self.genres = genres
}
func sortedGenres() -> String {
var a: Array = self.genres!.componentsSeparatedByString(", ")
a.sortInPlace { $0.localizedCaseInsensitiveCompare($1) == NSComparisonResult.OrderedAscending }
return a.combine(", ")
}
}
<file_sep>/PureMVCSwift/org/puremvc/swift/core/View.swift
//
// View.swift
// PureMVC Swift
//
// Created by <NAME> on 01.07.14.
// Copyright (c) 2014 <NAME>. All rights reserved.
//
import Foundation
/**
* A Singleton <code>IView</code> implementation.
*
* <P>
* In PureMVC, the <code>View</code> class assumes these responsibilities:
* <UL>
* <LI>Maintain a cache of <code>IMediator</code> instances.</LI>
* <LI>Provide methods for registering, retrieving, and removing <code>IMediators</code>.</LI>
* <LI>Notifiying <code>IMediators</code> when they are registered or removed.</LI>
* <LI>Managing the observer lists for each <code>INotification</code> in the application.</LI>
* <LI>Providing a method for attaching <code>IObservers</code> to an <code>INotification</code>'s observer list.</LI>
* <LI>Providing a method for broadcasting an <code>INotification</code>.</LI>
* <LI>Notifying the <code>IObservers</code> of a given <code>INotification</code> when it broadcast.</LI>
* </UL>
*
* @see Mediator, Observer, Notification
*/
class View: IView {
struct Static {
static var onceToken: dispatch_once_t = 0
static var instance: View? = nil
}
var mediatorMap: Dictionary<String, IMediator>
var observerMap: Dictionary<String, Array<IObserver>>
/**
* Constructor.
*
* <P>
* This <code>IView</code> implementation is a Singleton,
* so you should not call the constructor
* directly, but instead call the static Singleton
* Factory method <code>[View getInstance]</code>
*
* @throws NSException if Singleton instance has already been constructed
*
*/
init() {
assert(Static.instance == nil, "View Singleton already initialized!")
mediatorMap = Dictionary<String, IMediator>()
observerMap = Dictionary<String, Array<IObserver>>()
initializeView()
}
/**
* Initialize the Singleton View instance.
*
* <P>
* Called automatically by the constructor, this
* is your opportunity to initialize the Singleton
* instance in your subclass without overriding the
* constructor.</P>
*
* @return void
*/
func initializeView() {
}
/**
* View Singleton Factory method.
*
* @return the Singleton instance of <code>View</code>
*/
class var getInstance: View {
dispatch_once(&Static.onceToken, {
Static.instance = View()
})
return Static.instance!
}
/**
* Check if a Mediator is registered or not
*
* @param mediatorName
* @return whether a Mediator is registered with the given <code>mediatorName</code>.
*/
func hasMediator(mediatorName: String) -> Bool {
return mediatorMap[mediatorName] != nil
}
/**
* Notify the <code>IObservers</code> for a particular <code>INotification</code>.
*
* <P>
* All previously attached <code>IObservers</code> for this <code>INotification</code>'s
* list are notified and are passed a reference to the <code>INotification</code> in
* the order in which they were registered.</P>
*
* @param notification the <code>INotification</code> to notify <code>IObservers</code> of.
*/
func notifiyObservers(notification: INotification) {
if let observers: Array<IObserver> = self.observerMap[notification.name!] {
for observer in observers {
observer.notifyObserver(notification)
}
}
}
/**
* Register an <code>IMediator</code> instance with the <code>View</code>.
*
* <P>
* Registers the <code>IMediator</code> so that it can be retrieved by name,
* and further interrogates the <code>IMediator</code> for its
* <code>INotification</code> interests.</P>
* <P>
* If the <code>IMediator</code> returns any <code>INotification</code>
* names to be notified about, an <code>Observer</code> is created encapsulating
* the <code>IMediator</code> instance's <code>handleNotification</code> method
* and registering it as an <code>Observer</code> for all <code>INotifications</code> the
* <code>IMediator</code> is interested in.</p>
*
* @param mediator a reference to the <code>IMediator</code> instance
*/
func registerMediator(mediator: IMediator) {
if (mediatorMap[mediator.name!] != nil) {
return
}
self.mediatorMap[mediator.name!] = mediator
let interests: Array<String> = mediator.listNotificationInterests()
if (interests.count > 0) {
let observer: IObserver = Observer.withNotifyMethod(mediator.handleNotification, notifyContext: mediator.context())
for notificationName in interests {
registerObserver(notificationName, observer: observer)
}
}
mediator.onRegister()
}
/**
* Register an <code>IObserver</code> to be notified
* of <code>INotifications</code> with a given name.
*
* @param notificationName the name of the <code>INotifications</code> to notify this <code>IObserver</code> of
* @param observer the <code>IObserver</code> to register
*/
func registerObserver(notificationName: String, observer: IObserver) {
var observers: Array<IObserver>? = observerMap[notificationName]
if (observers != nil) {
observers?.append(observer)
} else {
observers = [observer]
}
observerMap[notificationName] = observers
}
/**
* Remove an <code>IMediator</code> from the <code>View</code>.
*
* @param mediatorName name of the <code>IMediator</code> instance to be removed.
* @return the <code>IMediator</code> that was removed from the <code>View</code>
*/
func removeMediator(mediatorName: String) -> IMediator {
let mediator: IMediator? = mediatorMap[mediatorName]!
if (mediator != nil) {
let interests: Array<String> = mediator!.listNotificationInterests()
for notificationName in interests {
self.removeObserver(notificationName, notifyContext: mediator!.context())
}
mediator!.onRemove()
mediatorMap.removeValueForKey(mediatorName)
}
return mediator!
}
/**
* Remove the observer for a given notifyContext from an observer list for a given Notification name.
* <P>
* @param notificationName which observer list to remove from
* @param notifyContext remove the observer with this object as its notifyContext
*/
func removeObserver(notificationName: String, notifyContext: AnyObject) {
var observers: Array<IObserver>? = observerMap[notificationName]!
if (observers != nil) {
for i: Int in 0 ..< observers!.count {
let observer: IObserver = observers![i]
if (observer.compareNotifyContext(notifyContext)) {
observers!.removeAtIndex(i)
break
}
}
}
if (observers!.count == 0) {
observerMap.removeValueForKey(notificationName)
}
}
/**
* Retrieve an <code>IMediator</code> from the <code>View</code>.
*
* @param mediatorName the name of the <code>IMediator</code> instance to retrieve.
* @return the <code>IMediator</code> instance previously registered with the given <code>mediatorName</code>.
*/
func retrieveMediator(mediatorName: String) -> IMediator {
return mediatorMap[mediatorName]!
}
}
<file_sep>/PureMVCSwift Demo/PureMVCSwift Demo/Extensions/ArrayExtension.swift
//
// ArrayExtension.swift
// PureMVCSwift Demo
//
// Created by <NAME> on 09.10.14.
// Copyright (c) 2014 <NAME>. All rights reserved.
//
import Foundation
extension Array {
func indexOf<U: Equatable>(item: U) -> Int? {
if item is Element {
return unsafeBitCast(self, [U].self).indexOf(item)
}
return nil
}
func contains<T: Equatable>(obj: T) -> Bool {
let filtered = self.filter { $0 as? T == obj }
return filtered.count > 0
}
mutating func removeObject<U: Equatable>(object: U) {
var index: Int?
for (idx, objectToCompare) in self.enumerate() {
if let to = objectToCompare as? U {
if object == to {
index = idx
}
}
}
if (index != nil) {
self.removeAtIndex(index!)
}
}
func combine(separator: String) -> String {
var str: String = ""
for (idx, item) in self.enumerate() {
str += "\(item)"
if idx < self.count - 1 {
str += separator
}
}
return str
}
}
<file_sep>/PureMVCSwift/org/puremvc/swift/patterns/mediator/Mediator.swift
//
// Mediator.swift
// PureMVC Swift
//
// Created by <NAME> on 01.07.14.
// Copyright (c) 2014 <NAME>. All rights reserved.
//
import Foundation
/**
* A base <code>IMediator</code> implementation.
*
* @see View
*/
class Mediator: IMediator {
var name: String?
var viewComponent: AnyObject?
/**
* The name of the <code>Mediator</code>.
*
* <P>
* Typically, a <code>Mediator</code> will be written to serve
* one specific control or group controls and so,
* will not have a need to be dynamically named.</P>
*/
class func NAME() -> String {
return "Mediator"
}
class func mediator() -> Mediator {
return Mediator(mediatorName: nil, viewComponent: nil)
}
class func withMediatorName(mediatorName: String) -> Mediator {
return Mediator(mediatorName: mediatorName, viewComponent: nil)
}
class func withMediatorName(mediatorName: String, viewComponent: AnyObject) -> Mediator {
return Mediator(mediatorName: mediatorName, viewComponent: viewComponent)
}
class func withViewComponent(viewComponent: AnyObject) -> Mediator {
return Mediator(mediatorName: nil, viewComponent: viewComponent)
}
init(mediatorName: String?, viewComponent: AnyObject?) {
self.name = mediatorName!
self.viewComponent = viewComponent!
self.initializeMediator()
}
/**
* The instance of the Mediator.
*
* @return the <code>IMediator</code> instance
*/
func context() -> AnyObject {
return self
}
/**
* Initialize the Mediator instance.
*
* <P>
* Called automatically by the constructor, this
* is your opportunity to initialize the Mediator
* instance in your subclass without overriding the
* constructor.</P>
*
* @return void
*/
func initializeMediator() {
}
/**
* Handle <code>INotification</code>s.
*
* <P>
* Typically this will be handled in a switch statement,
* with one 'case' entry per <code>INotification</code>
* the <code>Mediator</code> is interested in.
*/
func handleNotification(notification: INotification) -> Void {
}
/**
* List the <code>INotification</code> names this
* <code>Mediator</code> is interested in being notified of.
*
* @return Array the list of <code>INotification</code> names
*/
func listNotificationInterests() -> Array<String> {
return Array<String>()
}
/**
* Called by the View when the Mediator is registered
*/
func onRegister() {
}
/**
* Called by the View when the Mediator is removed
*/
func onRemove() {
}
}
<file_sep>/PureMVCSwift Demo/PureMVCSwift Demo/Classes/View/scenes/RecordsOverviewController.swift
//
// MasterViewController.swift
// asdfsdfasdf
//
// Created by <NAME> on 06.10.14.
// Copyright (c) 2014 <NAME>. All rights reserved.
//
import UIKit
var kOverviewCell: String = "RecordsOverviewCell"
class RecordsOverviewController: UITableViewController {
var detailViewController: RecordsDetailController? = nil
var records: Array<RecordVO>? {
didSet {
if (oldValue != nil) {
self.tableView.beginUpdates()
for (index, record) in (oldValue!).enumerate() {
let recordWasRemoved = !self.records!.contains(record)
if (recordWasRemoved) {
self.tableView.deleteRowsAtIndexPaths([NSIndexPath(forRow: index, inSection: 0)], withRowAnimation: .Fade)
}
}
for (index, record) in (self.records!).enumerate() {
let recordWasAdded = !oldValue!.contains(record)
if (recordWasAdded) {
self.tableView.insertRowsAtIndexPaths([NSIndexPath(forRow: index, inSection: 0)], withRowAnimation: .Fade)
}
}
self.tableView.endUpdates()
} else {
self.tableView.reloadData()
}
}
}
override func awakeFromNib() {
super.awakeFromNib()
if UIDevice.currentDevice().userInterfaceIdiom == .Pad {
self.clearsSelectionOnViewWillAppear = false
self.preferredContentSize = CGSize(width: 320.0, height: 600.0)
}
}
override func viewDidLoad() {
super.viewDidLoad()
setupNavigationBar()
}
override func viewWillAppear(animated: Bool) {
super.viewWillAppear(animated)
setupForiPad()
}
func setupNavigationBar() {
// Do any additional setup after loading the view, typically from a nib.
self.navigationItem.leftBarButtonItem = self.editButtonItem()
let addButton = UIBarButtonItem(barButtonSystemItem: .Add, target: self, action: #selector(RecordsOverviewController.didPressAdd(_:)))
self.navigationItem.rightBarButtonItem = addButton
}
func setupForiPad() {
if UIDevice.currentDevice().userInterfaceIdiom == .Pad {
self.tableView.selectRowAtIndexPath(NSIndexPath(forRow: 0, inSection: 0), animated: false, scrollPosition: UITableViewScrollPosition.Top)
self.performSegueWithIdentifier(SEGUE_OVERVIEW_DETAIL, sender: self)
}
}
// MARK: - Segues
override func prepareForSegue(segue: UIStoryboardSegue, sender: AnyObject?) {
if segue.identifier == SEGUE_OVERVIEW_DETAIL {
detailViewController = (segue.destinationViewController as! UINavigationController).topViewController as? RecordsDetailController
showDetailViewController(detailViewController!)
}
}
func showDetailViewController(vc: RecordsDetailController) {
if let indexPath = self.tableView.indexPathForSelectedRow {
let record = records?[indexPath.row]
vc.record = record
vc.navigationItem.leftBarButtonItem = self.splitViewController?.displayModeButtonItem()
vc.navigationItem.leftItemsSupplementBackButton = true
}
}
// MARK: - Table View
override func numberOfSectionsInTableView(tableView: UITableView) -> Int {
return 1
}
override func tableView(tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
if let count = self.records?.count {
return count
}
return 0
}
override func tableView(tableView: UITableView, cellForRowAtIndexPath indexPath: NSIndexPath) -> UITableViewCell {
let cell = tableView.dequeueReusableCellWithIdentifier(kOverviewCell, forIndexPath: indexPath)
let record = records?[indexPath.row]
cell.textLabel?.text = record?.interpret
return cell
}
override func tableView(tableView: UITableView, canEditRowAtIndexPath indexPath: NSIndexPath) -> Bool {
// Return false if you do not want the specified item to be editable.
return true
}
override func tableView(tableView: UITableView, didSelectRowAtIndexPath indexPath: NSIndexPath) {
if UIDevice.currentDevice().userInterfaceIdiom == .Pad {
showDetailViewController(self.detailViewController!)
} else {
performSegueWithIdentifier(SEGUE_OVERVIEW_DETAIL, sender: self)
}
}
override func tableView(tableView: UITableView, commitEditingStyle editingStyle: UITableViewCellEditingStyle, forRowAtIndexPath indexPath: NSIndexPath) {
if editingStyle == .Delete {
ApplicationFacade.getInstance().sendNotification(EVENT_RECORD_SHOULD_REMOVE, body: self.records?[indexPath.row])
}
}
func didPressAdd(sender: AnyObject) {
ApplicationFacade.getInstance().sendNotification(EVENT_RECORD_SHOULD_ADD)
}
}
<file_sep>/PureMVCSwift Demo/PureMVCSwift Demo/Classes/Controller/StartupCommand.swift
//
// StartupCommand.swift
// PureMVCSwift Demo
//
// Created by <NAME> on 03.10.14.
// Copyright (c) 2014 <NAME>. All rights reserved.
//
import UIKit
class StartupCommand: SimpleCommand {
override func execute(notification: INotification) {
let root = notification.body as! UISplitViewController
let overview = root.viewControllers[0] as! UINavigationController
facade.registerProxy(RecordProxy(proxyName: RecordProxy.NAME()))
facade.registerMediator(RecordsOverviewMediator(mediatorName: RecordsOverviewMediator.NAME(), viewComponent: overview.topViewController))
}
}
<file_sep>/PureMVCSwift Demo/PureMVCSwift Demo/Classes/ApplicationFacade.swift
//
// ApplicationFacade.swift
// PureMVC Swift
//
// Created by <NAME> on 02.10.14.
// Copyright (c) 2014 <NAME>. All rights reserved.
//
import Foundation
let COMMAND_STARTUP = "CommandStartup"
let EVENT_RECORD_WILL_ADD = "EventRecordWillAdd"
let EVENT_RECORD_DID_ADD = "EventRecordDidAdd"
let EVENT_RECORD_SHOULD_ADD = "EventRecordShouldAdd"
let EVENT_RECORD_SHOULD_REMOVE = "EventRecordShouldRemove"
let EVENT_RECORD_WILL_REMOVE = "EventRecordWillRemove"
let EVENT_RECORD_DID_REMOVE = "EventRecordDidRemove"
let SEGUE_OVERVIEW_DETAIL = "OverviewDetailSegue"
let SEGUE_ADD_GENRES = "AddGenreSegue"
let STORYBOARD_MAIN = "Main"
let STORYBOARD_ADD_RECORD = "RecordsAddNavigationController"
let COLOR_LIGHT_GRAY = "#C4C4C9"
let EPSILON = 0.001
class ApplicationFacade: Facade {
func startup(root: AnyObject) {
sendNotification(COMMAND_STARTUP, body: root)
}
override class func getInstance() -> ApplicationFacade {
dispatch_once(&Static.onceToken, {
Static.instance = ApplicationFacade()
})
return Static.instance! as! ApplicationFacade
}
override func initializeController() {
super.initializeController()
registerCommand(COMMAND_STARTUP, commandClass: StartupCommand.self)
}
}
<file_sep>/PureMVCSwift Demo/Podfile
source 'https://github.com/CocoaPods/Specs.git'
platform :ios, '8.0'
use_frameworks!
target 'PureMVCSwift Demo' do
pod 'ActionSheetPicker-3.0', '2.0.1'
pod 'SwiftyJSON', '2.3.2'
end<file_sep>/PureMVCSwift Demo/PureMVCSwift Demo/Classes/View/scenes/RecordsAddController.swift
//
// RecordsCreateController.swift
// PureMVCSwift Demo
//
// Created by <NAME> on 06.10.14.
// Copyright (c) 2014 <NAME>. All rights reserved.
//
import ActionSheetPicker_3_0
var kAddInputCell: String = "RecordsAddInputCell"
var kAddSelectCell: String = "RecordsAddSelectCell"
var kGenreCell: String = "RecordsGenreCell"
var kLanguageInterpret: String = "Interpret"
var kLanguageAlbum: String = "Album"
var kLanguageGenres: String = "Genres"
var kLanguageYear: String = "Year"
var kLanguageEnterInterpret = "Enter Interpret Name"
var kLanguageEnterAlbum = "Enter Album Name"
var kLanguageSelectYear = "Select Year"
var kLanguageSelectGenres = "Select Genres"
class RecordsAddController: UITableViewController, UITextFieldDelegate, RecordsGenreDelegate {
var years: Array<String> = []
var genres: Array<String> = []
var genresSelected: Array<String> = []
var txtInterpret: UITextField?
var txtAlbum: UITextField?
var txtYear: UILabel?
var txtGenre: UILabel?
private var _lastTextfield: UITextField?
@IBOutlet weak var btnDone: UIBarButtonItem!
override func viewDidLoad() {
super.viewDidLoad()
setupYears()
}
override func viewDidAppear(animated: Bool) {
super.viewDidAppear(animated)
focusTextfield()
}
override func viewDidLayoutSubviews() {
validateTextfields()
updateTextfields()
}
func setupYears() {
for index in 1900 ... 2099 {
let year = String(format: "%i", index)
years.append(year)
}
}
func focusTextfield() {
if (txtInterpret!.text!.isEmpty) {
txtInterpret?.becomeFirstResponder()
}
}
func validateTextfields() {
self.btnDone.enabled = !txtInterpret!.text!.isEmpty && !txtAlbum!.text!.isEmpty && genres.count > 0 && txtYear?.tag > 0
}
func updateTextfields() {
txtGenre?.text = genresSelected.count > 0 ? genresSelected.combine(", ") : kLanguageSelectGenres
txtGenre?.textColor = genresSelected.count > 0 ? UIColor.blackColor() : UIColor(rgba: COLOR_LIGHT_GRAY)
txtYear?.textColor = txtYear?.tag > 0 ? UIColor.blackColor() : UIColor(rgba: COLOR_LIGHT_GRAY)
}
func close() {
self.view.endEditing(true)
self.dismissViewControllerAnimated(true, completion: nil)
}
func textFieldShouldReturn(textField: UITextField) -> Bool {
if textField.returnKeyType == UIReturnKeyType.Next {
let current = self.tableView.indexPathForCell(textField.superview?.superview as! UITableViewCell)
let next = tableView.cellForRowAtIndexPath(NSIndexPath(forRow: current!.row + 1, inSection: 0))
(next as! RecordsAddInputCell).txtInput.becomeFirstResponder()
} else if textField.returnKeyType == UIReturnKeyType.Done {
textField.resignFirstResponder()
}
return true
}
func textField(textField: UITextField, shouldChangeCharactersInRange range: NSRange, replacementString string: String) -> Bool {
delay(EPSILON) {
self.validateTextfields()
}
_lastTextfield = textField
return true
}
func textFieldShouldClear(textField: UITextField) -> Bool {
validateTextfields()
return true
}
@IBAction func onCancelTouched(sender: AnyObject) {
close()
}
@IBAction func onDoneTouched(sender: AnyObject) {
let record = RecordVO(interpret: txtInterpret?.text, album: txtAlbum?.text, year: txtYear?.text, genres: txtGenre?.text)
ApplicationFacade.getInstance().sendNotification(EVENT_RECORD_WILL_ADD, body: record)
close()
}
func onYearSelected(selectedIndex: NSNumber!, origin: AnyObject!) {
txtYear?.text = years[selectedIndex.integerValue]
txtYear?.tag = 1
validateTextfields()
updateTextfields()
}
// MARK: - Table View
override func numberOfSectionsInTableView(tableView: UITableView) -> Int {
return 1
}
override func tableView(tableView: UITableView, numberOfRowsInSection section: Int) -> Int {
return 4
}
override func tableView(tableView: UITableView, cellForRowAtIndexPath indexPath: NSIndexPath) -> UITableViewCell {
var cell: UITableViewCell?
switch indexPath.row {
case 0, 1:
let c = tableView.dequeueReusableCellWithIdentifier(kAddInputCell, forIndexPath: indexPath) as! RecordsAddInputCell
c.txtInput.delegate = self
switch indexPath.row {
case 0:
c.txtTitle.text = kLanguageInterpret
c.txtInput.placeholder = kLanguageEnterInterpret
txtInterpret = c.txtInput
case 1:
c.txtTitle.text = kLanguageAlbum
c.txtInput.placeholder = kLanguageEnterAlbum
c.txtInput.returnKeyType = UIReturnKeyType.Done
txtAlbum = c.txtInput
default :()
}
cell = c
case 2, 3:
let c = tableView.dequeueReusableCellWithIdentifier(kAddSelectCell, forIndexPath: indexPath) as! RecordsAddSelectCell
switch indexPath.row {
case 2:
c.txtTitle.text = kLanguageYear
c.txtSelect.text = kLanguageSelectYear
txtYear = c.txtSelect
case 3:
c.txtTitle.text = kLanguageGenres
c.txtSelect.text = kLanguageSelectGenres
c.accessoryType = UITableViewCellAccessoryType.DisclosureIndicator
txtGenre = c.txtSelect
default :()
}
cell = c
default: ()
}
return cell!
}
override func tableView(tableView: UITableView, didSelectRowAtIndexPath indexPath: NSIndexPath) {
switch indexPath.row {
case 2:
_lastTextfield?.resignFirstResponder()
ActionSheetStringPicker.showPickerWithTitle(kLanguageSelectYear, rows: years, initialSelection: 115, target: self, successAction: #selector(RecordsAddController.onYearSelected(_:origin:)), cancelAction: nil, origin: txtYear)
case 3:
performSegueWithIdentifier(SEGUE_ADD_GENRES, sender: self)
default: ()
}
}
// MARK: - Segues
override func prepareForSegue(segue: UIStoryboardSegue, sender: AnyObject?) {
if segue.identifier == SEGUE_ADD_GENRES {
let genreController = segue.destinationViewController as! RecordsAddGenreController
genreController.delegate = self
}
}
}
<file_sep>/PureMVCSwift Demo/PureMVCSwift Demo/Classes/Model/RecordProxy.swift
//
// RecordProxy.swift
// PureMVCSwift Demo
//
// Created by <NAME> on 06.10.14.
// Copyright (c) 2014 <NAME>. All rights reserved.
//
import SwiftyJSON
class RecordProxy: Proxy {
override class func NAME() -> String {
return "RecordProxy"
}
var records: Array<RecordVO> {
get {
return self.data as! Array<RecordVO>
}
set {
self.data = newValue
}
}
var genres: Array<String> = []
var interprets: Array<String> {
get {
var array: Array<String> = []
for record in records {
array.append(record.interpret!)
}
return array
}
}
override func initializeProxy() {
let bundle = NSBundle.mainBundle()
let path = bundle.pathForResource("data", ofType: "json")
let data: NSData?
do {
data = try NSData(contentsOfFile: path!, options: NSDataReadingOptions.DataReadingUncached)
}
catch _ as NSError {
data = nil
}
let json = JSON(data: data!)
var records: Array<RecordVO> = []
for record in json["records"].array! {
records.append(RecordVO.initWithData(record))
}
for genre in json["genres"].array! {
genres.append(genre.string!)
}
self.data = records
sortRecordsByInterpret()
sortGenresByName()
}
func sortRecordsByInterpret() {
records.sortInPlace { $0.interpret?.localizedCaseInsensitiveCompare($1.interpret!) == NSComparisonResult.OrderedAscending }
}
func sortGenresByName() {
genres.sortInPlace { $0.localizedCaseInsensitiveCompare($1) == NSComparisonResult.OrderedAscending }
}
func addRecord(record: RecordVO) {
records.append(record)
sortRecordsByInterpret()
ApplicationFacade.getInstance().sendNotification(EVENT_RECORD_DID_ADD, body: record)
}
func removeRecord(record: RecordVO) {
records.removeAtIndex(self.records.indexOf(record)!)
ApplicationFacade.getInstance().sendNotification(EVENT_RECORD_DID_REMOVE, body: record)
}
}
<file_sep>/PureMVCSwift Demo/PureMVCSwift Demo/Classes/View/scenes/add/RecordsAddInputCell.swift
//
// RecordsAddInputCell.swift
// PureMVCSwift Demo
//
// Created by <NAME> on 22.09.15.
// Copyright © 2015 <NAME>. All rights reserved.
//
import UIKit
class RecordsAddInputCell: UITableViewCell {
@IBOutlet weak var txtTitle: UILabel!
@IBOutlet weak var txtInput: UITextField!
required init?(coder aDecoder: NSCoder) {
super.init(coder: aDecoder)
self.selectionStyle = UITableViewCellSelectionStyle.None
}
}
<file_sep>/PureMVCSwift Demo/PureMVCSwift Demo/Classes/View/RecordsOverviewMediator.swift
//
// OverviewMediator.swift
// PureMVCSwift Demo
//
// Created by <NAME> on 06.10.14.
// Copyright (c) 2014 <NAME>. All rights reserved.
//
import Foundation
class RecordsOverviewMediator: Mediator {
var recordProxy: RecordProxy?
override class func NAME() -> String {
return "RecordsOverviewMediator"
}
var controller: RecordsOverviewController {
get {
return self.viewComponent as! RecordsOverviewController
}
}
override func initializeMediator() {
self.recordProxy = ApplicationFacade.getInstance().retrieveProxy(RecordProxy.NAME()) as? RecordProxy
self.controller.records = self.recordProxy?.records
}
override func listNotificationInterests() -> Array<String> {
return [
EVENT_RECORD_SHOULD_ADD, /* User has pressed Add button */
EVENT_RECORD_WILL_ADD, /* User has pressed Done button after entering new record data */
EVENT_RECORD_DID_ADD, /* New record has been stored in RecordProxy */
EVENT_RECORD_SHOULD_REMOVE, /* User has pressed Delete button */
EVENT_RECORD_DID_REMOVE, /* Selected Record has been removed from RecordProxy */
]
}
override func handleNotification(notification: INotification) {
if (notification.name == EVENT_RECORD_SHOULD_ADD) {
let storyboard = UIStoryboard(name: STORYBOARD_MAIN, bundle: nil)
let navigationController = storyboard.instantiateViewControllerWithIdentifier(STORYBOARD_ADD_RECORD) as! UINavigationController
let viewController = navigationController.viewControllers.first as! RecordsAddController
viewController.genres = recordProxy!.genres
if UIDevice.currentDevice().userInterfaceIdiom == .Pad {
navigationController.modalPresentationStyle = UIModalPresentationStyle.CurrentContext
self.controller.detailViewController?.presentViewController(navigationController, animated: true, completion: nil)
} else {
self.controller.presentViewController(navigationController, animated: true, completion: nil)
}
} else if (notification.name == EVENT_RECORD_WILL_ADD) {
let record = notification.body as! RecordVO
self.recordProxy?.addRecord(record)
} else if (notification.name == EVENT_RECORD_DID_ADD) {
self.controller.records = self.recordProxy?.records
} else if (notification.name == EVENT_RECORD_SHOULD_REMOVE) {
let record = notification.body as! RecordVO
self.recordProxy?.removeRecord(record)
} else if (notification.name == EVENT_RECORD_DID_REMOVE) {
self.controller.records = self.recordProxy?.records
}
}
}
|
fd45fdb7cf507b1d5878a897f3aedcbd6d0963be
|
[
"Swift",
"Ruby"
] | 15
|
Swift
|
pixelhacker/puremvc-swift-standard-framework
|
4eebc14dc9463fb5fc2edcd66138d1ca5c3e535a
|
b24c041f63d8fa6e8ea3360f8ff6765787d61f46
|
refs/heads/master
|
<repo_name>hxegon/Haskell-Exercises<file_sep>/cis194/Log/Gemfile
source "https://rubygems.org"
group :develompent do
gem 'guard-shell'
end
<file_sep>/cis194/Log/Guardfile
directories %w(spec)
clearing :on
def run file
`runhaskell #{file}`
end
guard :shell do
watch(/(.*).hs/) {|m| run m[0]}
end
|
69a03873623a3f7f7a727c15b1871de9d393892a
|
[
"Ruby"
] | 2
|
Ruby
|
hxegon/Haskell-Exercises
|
8b770ab0966629a2158813a2174df4cb763e6d4a
|
a55292f11fb0be5f37e597f268d6eb6cb3ac8883
|
refs/heads/master
|
<repo_name>jpeak5/local_proctoru<file_sep>/version.php
<?php
$plugin->version = 2013102310;
$plugin->cron = 30;
?>
<file_sep>/settings.php
<?php
defined('MOODLE_INTERNAL') || die;
require_once $CFG->dirroot.'/local/proctoru/lib.php';
if ($hassiteconfig) {
$br = function(){return html_writer::empty_tag('br');};
$settings = new admin_settingpage('local_proctoru', ProctorU::_s('mod_name'));
//report heading
$counts ="";
$rawCounts = ProctorU::dbrGetUserCountByStatus();
foreach(ProctorU::mapRawUserCountToFriendlyNames($rawCounts) as $name => $count) {
$counts .= sprintf("%s%s: %d",$br(),$name, $count->count);
}
$reportLinkUrl = new moodle_url('/local/proctoru/report.php');
$reportLinkText = html_writer::tag('a', "Full Report", array('href'=>$reportLinkUrl));
$statsText = ProctorU::_s('report_link_text').$br().$counts.$br().$br().$reportLinkText;
$settings->add(
new admin_setting_heading('report_link_head', ProctorU::_s('report_head'), $statsText));
$settings->add(
new admin_setting_heading('config_head', ProctorU::_s('config_head'),''));
$roles = role_get_names(null, null, true);
$exemptRoles = array('teacher');
$settings->add(
new admin_setting_configmultiselect(
'local_proctoru/roleselection',
ProctorU::_s('roleselection_label'),
ProctorU::_s('roleselection_description'),
$exemptRoles,
$roles
)
);
$settings->add(
new admin_setting_configtext(
'local_proctoru/profilefield_shortname',
ProctorU::_s('profilefield_shortname'),
ProctorU::_s('profilefield_shortname_description'),
ProctorU::_s('profilefield_default_shortname'),
PARAM_ALPHANUM
)
);
$settings->add(
new admin_setting_configtext(
'local_proctoru/profilefield_longname',
ProctorU::_s('profilefield_longname'),
ProctorU::_s('profilefield_longname_description'),
''
)
);
$settings->add(
new admin_setting_configtext(
'local_proctoru/credentials_location',
ProctorU::_s('credentials_location'),
ProctorU::_s('credentials_location_description'),
''
)
);
$settings->add(
new admin_setting_configtext(
'local_proctoru/localwebservice_url',
ProctorU::_s('localwebservice_url'),
ProctorU::_s('localwebservice_url_description'),
''
)
);
$settings->add(
new admin_setting_configtext(
'local_proctoru/eligible_users_service',
ProctorU::_s('eligible_users_service'),
ProctorU::_s('eligible_users_service_description'),
''
)
);
$settings->add(
new admin_setting_configtext(
'local_proctoru/stu_profile',
ProctorU::_s('stu_profile'),
ProctorU::_s('stu_profile_description'),
'')
);
$settings->add(
new admin_setting_configtext(
'local_proctoru/userid_service',
ProctorU::_s('userid_service'),
ProctorU::_s('userid_service_description'),
''
)
);
$settings->add(
new admin_setting_configtext(
'local_proctoru/proctoru_api',
ProctorU::_s('proctoru_api'),
ProctorU::_s('proctoru_api_description'),
''
)
);
$settings->add(
new admin_setting_configtext(
'local_proctoru/proctoru_token',
ProctorU::_s('proctoru_token'),
ProctorU::_s('proctoru_token_description'),
''
)
);
$settings->add(
new admin_setting_configcheckbox(
'local_proctoru/bool_cron',
ProctorU::_s('cron_run'),
ProctorU::_s('cron_desc'),
false, true, false)
);
$ADMIN->add('localplugins', $settings);
}
?>
<file_sep>/lang/en/local_proctoru.php
<?php
$string['mod_name'] = "Proctor U";
$string['pluginname'] = "Proctor U";
$string['franken_name'] = 'local_proctoru';
//status codes
$string['unregistered'] = 'Unregistered';
$string['registered'] = 'Regisitered';
$string['verified'] = 'Verified';
$string['exempt'] = 'Exempt';
$string['sam_profile_error'] = 'Ineligible Profile';
$string['no_idnumber'] = 'NO IDNUMBER';
$string['pu_404'] = '404 PrU';
// roles to consider exempt
$string['roleselection'] = 'roleselection';
$string['roleselection_label'] = 'Roles Exempt';
$string['roleselection_description'] = 'which roles should be excluded from the PU lookup';
// cron
$string['cron_run'] = 'Cron';
$string['cron_desc'] = 'Run with Cron?';
// custom profile field
$string['profilefield_default_shortname'] = 'proctoru';
$string['profilefield_shortname'] = "Custom role name";
$string['profilefield_shortname_description'] = "Name of the custom profile field";
$string['profilefield_longname'] = "Custom role long name";
$string['profilefield_longname_description'] = "Full name of the custom profile field";
//$string['user_proctoru'] = "ProctorU Registration status";
// ProctorU API details
$string['proctoru_token'] = 'ProctorU token';
$string['proctoru_token_description'] = 'API token';
$string['proctoru_api'] = "ProctorU Profile API";
$string['proctoru_api_description'] = "URL for the ProctorU API URL";
// LSU-specific local data store connection settings
$string['credentials_location'] = 'Credentials Location';
$string['credentials_location_description'] = 'Location of local webservices credentials';
// More LSU-specific local data store connection settings
$string['localwebservice_url'] = 'Local Datastore (LD)';
$string['localwebservice_url_description'] = "URL for the local datastore";
$string['userid_service'] = 'LD User ID Service';
$string['userid_service_description'] = "Local source for user ids";
$string['stu_profile'] = "LD Eligible User Profile";
$string['stu_profile_description'] = "Users Eligible for PU enrollment are distinuished by the presence of this profile in the LD.";
$string['eligible_users_service'] = 'LD Eligible Users Service';
$string['eligible_users_service_description'] = "Local API to verify whether users may have a PU Profile";
//report strings
$string['report_page_title'] = 'PU Registration Report';
$string['report_breadcrumb'] = '';
$string['report_head'] = "Current Registration Statistics";
$string['report_link_text'] = 'current stats are as follows...';
$string['report_not_auth'] = 'You are not authorized to view this resource';
//config_head
$string['config_head'] = "Configuration";
// cap
$string['proctoru:viewstats'] = 'View ProctorU registration statistics';
//exceptions
$string['wrong_protocol'] = 'URL protocol given in admin settings is malformed. Expected http/https, got {$a}';
$string['general_curl_exception'] = 'Exception thrown while making a webservice request from class {$a}';
$string['xml_exception'] = 'class \'{$a->cls}\' generated an exception while trying to convert the response from {$a->url} to XML. Original exception message was \'{$a->msg}\'';
$string['missing_credentials'] = 'Missing one or both expected values in response from Credentials Client.';
$string['datastore_errors'] = 'Problem obtaining data for service {$a->srv}, message was {$a->msg}';
$string['pu_404'] = 'Got 404 for user with PU id# {$a->uid}, full response was:{$a->msg}';
$string['profilefield_not_foud'] = 'attempt to filter by non-existent profile field; check your field shortname exists.';
$string['exception_envelope'] = 'caught Exception of type {$a->cls}: {$a->hln}; Message was: {$a->msg} Stack trace: {$a->trc}';
//output
$string['start_cron'] = 'Running ProctorU cron tasks';
$string['toplevel_datastore_exception'] = '!!!Trouble initializing LocalDataStore component of the CronProcessor: {$a->msg} | {$a->trc} Aborting ProctorU cron tasks\n';
$string['toplevel_credentials_exception'] = '!!!Trouble initializing CredentialsClient component of the CronProcessor: {$a->msg} {$a->trc} Aborting ProctorU cron tasks.';
$string['toplevel_generic_exception'] = '!!!Trouble initializing CronProcessor:{$a->msg} Aborting ProctorU cron tasks';
$string['general_exception'] = 'caught exception while processing users; aborting...';
$string['cron_not_required'] = 'Cron not required for ProctorU.';
?>
<file_sep>/lib.php
<?php
global $CFG;
require_once $CFG->libdir . '/filelib.php';
require_once($CFG->dirroot.'/user/filters/profilefield.php');
require_once($CFG->dirroot.'/user/filters/yesno.php');
require_once 'Cronlib.php';
function local_proctoru_cron() {
if (ProctorU::_c('bool_cron')) {
//format exception messages in a standard template
$outputException = function(Exception $e, $headline){
$class = get_class($e);
$a = new stdClass();
$a->cls = $class;
$a->hln = $headline;
$a->msg = $e->getMessage();
$a->trc = $e->getTraceAsString();
$out = ProctorU::_s('exception_envelope', $a);
mtrace($out);
ProctorUCronProcessor::emailAdmins($out);
};
mtrace(ProctorU::_s('start_cron'));
//ensure profile field exists
ProctorU::default_profile_field();
try{
$cron = new ProctorUCronProcessor();
}catch(ProctorUWebserviceLocalDataStoreException $e){
$a = new stdClass();
$a->msg = $e->getMessage();
$a->trc = $e->getTrace();
$outputException($e,ProctorU::_s('toplevel_datastore_exception', $a));
return true;
}catch(ProctorUWebserviceCredentialsClientException $e){
$a = new stdClass();
$a->msg = $e->getMessage();
$a->trc = $e->getTrace();
$outputException($e,ProctorU::_s('toplevel_credentials_exception', $a));
return true;
}catch(ProctorUException $e){
$a = new stdClass();
$a->msg = $e->getMessage();
$outputException($e,ProctorU::_s('toplevel_generic_exception', $a));
return true;
}
//get users without status (new users)
list($unreg,$exempt) = $cron->objPartitionUsersWithoutStatus();
//set new users as unregistered
$intUnreg = $cron->intSetStatusForUser($unreg, ProctorU::UNREGISTERED);
mtrace(sprintf("Set status %s for %d of %d unregistered users.",
ProctorU::UNREGISTERED, $intUnreg, count($unreg)));
//set exempt status
$intExempt = $cron->intSetStatusForUser($exempt, ProctorU::EXEMPT);
mtrace(sprintf("Set status %s for %d of %d exempt users.",
ProctorU::EXEMPT, $intExempt, count($exempt)));
//get unverified users
$needProcessing = $cron->objGetUnverifiedUsers();
mtrace(sprintf("Begin processing user status for %d users", count($needProcessing)));
try{
// Add the users who need exemption processing to the list
$needProcessing += $cron->checkExemptUsersForStudentStatus();
$cron->blnProcessUsers($needProcessing);
}
catch(ProctorUException $e){
$outputException($e,ProctorU::_s('general_exception'));
return true;
}
} else {
mtrace(ProctorU::_s('cron_not_required'));
}
return true;
}
class ProctorU {
// public $username, $password, $localWebservicesCredentialsUrl, $localWebserviceUrl;
const UNREGISTERED = 1;
const REGISTERED = 2;
const VERIFIED = 3;
const EXEMPT = 4;
const SAM_HAS_PROFILE_ERROR = -1;
const NO_IDNUMBER = -2;
const PU_NOT_FOUND = -404;
public static function _c($c){
return get_config('local_proctoru', $c);
}
public static function _s($s, $a=null){
$b = get_string('franken_name', 'local_proctoru');
return get_string($s, $b, $a);
}
/**
* Simply returns an array of the class constants
* @return int[]
*/
public static function arrStatuses(){
return array(
ProctorU::EXEMPT,
ProctorU::NO_IDNUMBER,
ProctorU::PU_NOT_FOUND,
ProctorU::REGISTERED,
ProctorU::SAM_HAS_PROFILE_ERROR,
ProctorU::UNREGISTERED,
ProctorU::VERIFIED);
}
/**
* for a given const status, returns a human-freindly string
*/
public static function strMapStatusToLangString($status){
if(empty($status)) return ''; //necessary so that users without status do not cause array index errors
$map = array(
ProctorU::UNREGISTERED => 'unregistered',
ProctorU::REGISTERED => 'registered',
ProctorU::VERIFIED => 'verified',
ProctorU::EXEMPT => 'exempt',
ProctorU::SAM_HAS_PROFILE_ERROR => 'sam_profile_error',
ProctorU::NO_IDNUMBER => 'no_idnumber',
ProctorU::PU_NOT_FOUND => 'pu_404',
);
return ProctorU::_s($map[$status]);
}
/**
* insert new record into {user_info_field}
* @global type $DB
* @param type $params
* @return \stdClass
*/
public static function default_profile_field() {
global $DB;
$shortname = ProctorU::_c( 'profilefield_shortname');
if($shortname == false){
$shortname = ProctorU::_s( 'profilefield_default_shortname');
}
if (!$field = $DB->get_record('user_info_field', array('shortname' => $shortname))) {
$field = new stdClass;
$field->shortname = $shortname;
$field->name = ProctorU::_c('profilefield_longname');
$field->description = ProctorU::_s('profilefield_shortname');
$field->descriptionformat = 1;
$field->datatype = 'text';
$field->categoryid = 1;
$field->locked = 1;
$field->visible = 1;
$field->param1 = 30;
$field->param2 = 2048;
$field->id = $DB->insert_record('user_info_field', $field);
}
return $field;
}
/**
* helper fn
* @return string shortname of the custom field in the DB
*/
public static function strFieldname() {
return ProctorU::_c('profilefield_shortname');
}
/**
* helper fn returning the record ID of the custom field
* @global type $DB
* @return int ID of the custom field
*/
public static function intCustomFieldID(){
global $DB;
return $DB->get_field('user_info_field', 'id', array('shortname'=>self::strFieldname()));
}
/**
* Simple DB lookup, directly in the {user_info_data} table,
* for an occurence of the userid WHERE fieldid = ??
* @global stdClass $USER
* @return stdClass|false
*/
public static function blnUserHasProctoruProfileFieldValue($userid) {
global $DB;
$result = $DB->record_exists('user_info_data',
array('id'=>$userid, 'fieldid'=>self::intCustomFieldID()));
return $result;
}
/**
* Similar to @see ProctorU::blnUserHasProctoruProfileFieldValue()
* except that returning boolean exists ?, we return the value in question
* @global type $DB
* @param type $userid
* @return type
*/
public static function constProctorUStatusForUserId($userid){
global $DB;
$status = $DB->get_field('user_info_data','data',
array('userid'=>$userid, 'fieldid'=>self::intCustomFieldID()));
return $status === false ? false : $status;
}
public static function blnUserHasAcceptableStatus($userid) {
$status = self::constProctorUStatusForUserId($userid);
if($status == ProctorU::VERIFIED || $status == ProctorU::EXEMPT){
return true;
}elseif(self::blnUserHasExemptRole($userid)){
return true;
}else{
return false;
}
}
public static function blnUserHasExemptRole($userid){
global $DB;
$exemptRoleIds = ProctorU::_c( 'roleselection');
$sql = "SELECT id
FROM {role_assignments}
WHERE roleid IN ({$exemptRoleIds}) AND userid = {$userid}";
$intRoles = count($DB->get_records_sql($sql));
return $intRoles > 0 ? true : false;
}
/**
* @global type $DB
* @param int $userid
* @param ProctorU $status one of the class constants
* @return int insert id
*/
public static function intSaveProfileFieldStatus($userid, $status){
global $DB;
$msg = sprintf("Setting ProctorU status for user %s: ", $userid);
$fieldId = self::intCustomFieldID();
$record = $DB->get_record('user_info_data', array('userid'=>$userid, 'fieldid'=>$fieldId));
if(!$record){
$record = new stdClass();
$record->data = $status;
$record->userid = $userid;
$record->fieldid = $fieldId;
mtrace(sprintf("%sInsert new record, status %s", $msg,$status));
return $DB->insert_record('user_info_data',$record, true, false);
}elseif($record->data != $status){
mtrace(sprintf("%supdate from %s to %s.",$msg,$record->data,$status));
$record->data = $status;
return $DB->update_record('user_info_data',$record, false);
}else{
mtrace(sprintf("%s Already set - do nothing", $msg));
return true;
}
}
/**
* Partial application of the datalib.php function get_users_listing tailored to
* the task at hand
*
* Return filtered (if provided) list of users in site, except guest and deleted users.
*
* @param string $sort PASSTHROUGH An SQL field to sort by
* @param string $dir PASSTHROUGH The sort direction ASC|DESC
* @param int $page PASSTHROUGH The page or records to return
* @param int PASSTHROUGH $recordsperpage The number of records to return per page
* @param string PASSTHROUGH(|IGNORE) $search A simple string to search for
* @param string $firstinitial PASSTHROUGH Users whose first name starts with $firstinitial
* @param string $lastinitial PASSTHROUGH Users whose last name starts with $lastinitial
* @param string $extraselect An additional SQL select statement to append to the query
* @param array $extraparams Additional parameters to use for the above $extraselect
* @param stdClass $extracontext If specified, will include user 'extra fields'
* as appropriate for current user and given context
* @return array Array of {@link $USER} records
*/
public static function partial_get_users_listing($status= null,$sort='lastaccess', $dir='ASC', $page=0, $recordsperpage=0,
$search='', $firstinitial='', $lastinitial='') {
// $status = PROCTORU::VERIFIED;
// echo $status;
// the extraselect needs to vary to allow the user to specify 'is not empty', etc
$proFilter = new user_filter_profilefield('profile','Profile',1);
if(!isset($status)){
$extraselect = '';
$extraparams = array();
}else{
//figure out which field key the filter function uses for our field
$fieldKey = null;
$fieldShortname = ProctorU::_c( 'profilefield_shortname');
foreach($proFilter->get_profile_fields() as $k=>$sn){
if($sn == $fieldShortname){
$fieldKey = $k;
}
}
if(is_null($fieldKey)){
throw new Exception(ProctorU::_s('profilefield_not_foud'));
}
$data['profile'] = $fieldKey;
$data['operator'] = 2;
$data['value'] = $status;
list($extraselect, $extraparams) = $proFilter->get_sql_filter($data);
}
//get filter for suspended users
list($extraselect, $extraparams) = self::arrAddSuspendedUserFilter($extraselect, $extraparams);
$extracontext = context_system::instance();
return get_users_listing($sort,$dir,$page,$recordsperpage,$search,
$firstinitial,$lastinitial, $extraselect, $extraparams, $extracontext);
}
public static function partial_get_users_listing_by_roleid($roleid){
$roFilter = new user_filter_courserole('role', 'Role', 1);
$data = array('value'=>false, 'roleid'=>$roleid, 'categoryid'=>0);
$extracontext = context_system::instance();
list($extraselectRo, $extraparamsRo) = $roFilter->get_sql_filter($data);
//get filter for suspended users
list($extraselect, $extraparams) = self::arrAddSuspendedUserFilter($extraselectRo, $extraparamsRo);
return get_users_listing('','',null,null,'',
'','', $extraselect, $extraparams, $extracontext);
}
/**
* helper function wrapping functionality needed in two fns;
* Mainly exists to de-clutter the partial functions above and to
* avoid repreated code.
* @param string $extraselect
* @param array $extraparams
* @return array
*/
private static function arrAddSuspendedUserFilter($extraselect, $extraparams){
//exclude suspended users
$suspFilter = new user_filter_yesno('suspended', 'Suspended',1,'suspended');
$suspData = array('value' => "0",);
list($suspXSelect, $suspXParams) = $suspFilter->get_sql_filter($suspData);
$extraselect .= " AND ".$suspXSelect;
$extraparams += $suspXParams;
return array($extraselect, $extraparams);
}
/**
* Gets role rows frmo the DB that are in the admin setting 'roles to exempt'
* @global type $DB
* @return object[] role records of type stdClass, keyed by id
*/
public static function objGetExemptRoles(){
global $DB;
$rolesConfig = ProctorU::_c( 'roleselection');
return $DB->get_records_list('role', 'id', explode(',', $rolesConfig));
}
/**
* Gets all non-suspended, non-deleted, non-guest users from the db
* @global type $DB
* @return object[] db result row objects
*/
public static function objGetAllUsers(){
global $DB;
$guestUserId = $DB->get_field('user', 'id', array('username'=>'guest'));
$active = $DB->get_records('user', array('suspended'=>0,'deleted'=>0));
unset($active[$guestUserId]);
return $active;
}
/**
* Get all users with one of the ProctorU statuses.
* Used to set
*
* @return object[] user rows objects
*/
public static function objGetAllUsersWithProctorStatus(){
$users = array();
foreach(self::arrStatuses() as $st){
$users += self::objGetUsersWithStatus($st);
}
return $users;
}
/**
* Gets users without a value in the proctoru profile field
* @return int[]
*/
public static function objGetAllUsersWithoutProctorStatus(){
$allUsers = self::objGetAllUsers();
$haveStatus = self::objGetAllUsersWithProctorStatus();
$ids = array_diff(
array_keys($allUsers),
array_keys($haveStatus)
);
$noStatus = array_intersect_key($allUsers, array_flip($ids));
return $noStatus;
}
/**
* @param int $status class constants
* @return object[] db user row objects having the given proctoru status
*/
public static function objGetUsersWithStatus($status){
return ProctorU::partial_get_users_listing($status);
}
/**
* Find users that are exempt for proctoru lookup based
* on their membership in a one of te exempt roles in some context
* @return object[] users having the exempt role in any course
*/
public static function objGetExemptUsers() {
$exemptRoles = self::objGetExemptRoles();
$exempt = array();
$total = 0;
foreach (array_keys($exemptRoles) as $roleid) {
$ex = ProctorU::partial_get_users_listing_by_roleid($roleid);
mtrace(sprintf("found %d users with exempt roleid %d", count($ex), $roleid));
// $exempt = array_merge($exempt, $ex);
$exempt += $ex;
$total += count($ex);
}
mtrace(sprintf("%d TOTAL users are exempt from ProctorU limitations. This number should be reflected below.", count($exempt)));
return $exempt;
}
public static function dbrGetUserCountByStatus() {
global $DB;
$sql = "
SELECT data, count(userid) AS count
FROM {user_info_data}
WHERE fieldid = :fieldid
GROUP BY data;
";
return $DB->get_records_sql($sql, array('fieldid'=>self::intCustomFieldID()));
}
/**
* returns an associative array of status names to user counts
* @param array $dbr such as that returned from self::dbrGetUserCountByStatus
*/
public static function mapRawUserCountToFriendlyNames(array $dbr){
$friendly = array();
foreach($dbr as $status => $count){
$friendly[self::strMapStatusToLangString($status)] = $count;
}
return $friendly;
}
}
class ProctorUException extends moodle_exception{
}
?>
|
ebab2974efda5fa836c0daa50449dac09b2e8df3
|
[
"PHP"
] | 4
|
PHP
|
jpeak5/local_proctoru
|
87893e859c22b59ec843455f603f8b787c41980f
|
a232e09dbc483fcf1c0011b9e704330117e9f48d
|
refs/heads/master
|
<repo_name>randomjoho/randomkit<file_sep>/command.sh
#!/bin/bash
commands=(
"$DART_HOME/pub run build_runner build"
"$FLUTTER_HOME/flutter packages pub run build_runner build --delete-conflicting-outputs"
"$FLUTTER_HOME/flutter packages pub run build_runner watch"
"pub global run intl_utils:generate"
"iconfont_builder --from ./fonts --to ./lib/iconfont.dart --focus true"
"agen -w --no-watch -s --no-save -c RandomRes"
)
echo Your DART_HOME: $DART_HOME
echo Your FLUTTER_HOME: $FLUTTER_HOME
echo
echo Please select a command.
echo ==============================
for i in "${!commands[@]}"
do
echo "$i) ${commands[i]}"
done
echo ==============================
echo
read -p "Enter command id:" command_id
${commands[command_id]}<file_sep>/android/settings.gradle
rootProject.name = 'randomkit'
|
22486c9881e75f8f4c28a104457a4ba61efcdbca
|
[
"Shell",
"Gradle"
] | 2
|
Shell
|
randomjoho/randomkit
|
6eaa1a1a35e1a707d40a0e8f3da479348a969b34
|
6c6429822636c60b813a6d9da871fd0e34d07f2f
|
refs/heads/master
|
<repo_name>juancoob/BackingApp<file_sep>/app/src/main/java/com/juancoob/nanodegree/and/backingapp/repository/RecipesRepository.java
package com.juancoob.nanodegree.and.backingapp.repository;
import android.support.annotation.NonNull;
import com.juancoob.nanodegree.and.backingapp.domain.model.Ingredient;
import com.juancoob.nanodegree.and.backingapp.domain.model.Recipe;
import com.juancoob.nanodegree.and.backingapp.domain.model.Step;
import com.juancoob.nanodegree.and.backingapp.domain.usecase.impl.FetchingRecipesUseCaseImpl;
import com.juancoob.nanodegree.and.backingapp.repository.rest.IBackingAppAPIService;
import com.juancoob.nanodegree.and.backingapp.util.Constants;
import java.util.List;
import retrofit2.Call;
import retrofit2.Callback;
import retrofit2.Response;
import retrofit2.Retrofit;
import retrofit2.converter.gson.GsonConverterFactory;
/**
* Created by <NAME> on 22/04/18.
*/
public class RecipesRepository implements Repository {
private static RecipesRepository sRecipesRepository = new RecipesRepository();
private List<Step> mRecipeSteps;
private List<Ingredient> mRecipeIngredients;
private int mSelectedStepPosition = 0;
public static RecipesRepository getInstance() {
return sRecipesRepository;
}
@Override
public void fetchRecipes(final FetchingRecipesUseCaseImpl fetchingRecipesUseCaseImpl) {
Retrofit retrofit = new Retrofit.Builder()
.baseUrl(Constants.URL)
.addConverterFactory(GsonConverterFactory.create())
.build();
IBackingAppAPIService iBackingAppAPIService = retrofit.create(IBackingAppAPIService.class);
Call<List<Recipe>> responseCall = iBackingAppAPIService.getRecipes();
if(responseCall != null) {
responseCall.enqueue(new Callback<List<Recipe>>() {
@Override
public void onResponse(@NonNull Call<List<Recipe>> call, @NonNull Response<List<Recipe>> response) {
if(response.isSuccessful()) {
fetchingRecipesUseCaseImpl.showRecipes(response.body());
}
}
@Override
public void onFailure(@NonNull Call<List<Recipe>> call, @NonNull Throwable t) {
fetchingRecipesUseCaseImpl.noInternetConnection();
}
});
}
}
public List<Step> getRecipeSteps() {
return mRecipeSteps;
}
public void setRecipeSteps(List<Step> steps) {
mRecipeSteps = steps;
}
public List<Ingredient> getRecipeIngredients() {
return mRecipeIngredients;
}
public void setRecipeIngredients(List<Ingredient> ingredients) {
mRecipeIngredients = ingredients;
}
public int getSelectedStepPosition() {
return mSelectedStepPosition;
}
public void setSelectedStepPosition(int selectedStepPosition) {
this.mSelectedStepPosition = selectedStepPosition;
}
}
<file_sep>/app/src/main/java/com/juancoob/nanodegree/and/backingapp/domain/model/Step.java
package com.juancoob.nanodegree.and.backingapp.domain.model;
import android.os.Parcel;
import android.os.Parcelable;
import com.google.gson.annotations.SerializedName;
/**
* Created by <NAME> on 18/04/18.
*/
public class Step implements Parcelable {
public static final Creator<Step> CREATOR = new Creator<Step>() {
@Override
public Step createFromParcel(Parcel in) {
return new Step(in);
}
@Override
public Step[] newArray(int size) {
return new Step[size];
}
};
@SerializedName("id")
private Integer mStepId;
@SerializedName("shortDescription")
private String mShortDescription;
@SerializedName("description")
private String mDescription;
@SerializedName("videoURL")
private String mVideoURL;
@SerializedName("thumbnailURL")
private String mThumbnailURL;
protected Step(Parcel in) {
if (in.readByte() == 0) {
mStepId = null;
} else {
mStepId = in.readInt();
}
mShortDescription = in.readString();
mDescription = in.readString();
mVideoURL = in.readString();
mThumbnailURL = in.readString();
}
public Integer getStepId() {
return mStepId;
}
public void setStepId(Integer mStepId) {
this.mStepId = mStepId;
}
public String getShortDescription() {
return mShortDescription;
}
public void setShortDescription(String mShortDescription) {
this.mShortDescription = mShortDescription;
}
public String getDescription() {
return mDescription;
}
public void setDescription(String mDescription) {
this.mDescription = mDescription;
}
public String getVideoURL() {
return mVideoURL;
}
public void setVideoURL(String mVideoURL) {
this.mVideoURL = mVideoURL;
}
public String getThumbnailURL() {
return mThumbnailURL;
}
public void setThumbnailURL(String mThumbnailURL) {
this.mThumbnailURL = mThumbnailURL;
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel parcel, int i) {
if (mStepId == null) {
parcel.writeByte((byte) 0);
} else {
parcel.writeByte((byte) 1);
parcel.writeInt(mStepId);
}
parcel.writeString(mShortDescription);
parcel.writeString(mDescription);
parcel.writeString(mVideoURL);
parcel.writeString(mThumbnailURL);
}
}
<file_sep>/app/src/main/java/com/juancoob/nanodegree/and/backingapp/repository/Repository.java
package com.juancoob.nanodegree.and.backingapp.repository;
import com.juancoob.nanodegree.and.backingapp.domain.usecase.impl.FetchingRecipesUseCaseImpl;
/**
* Created by <NAME> on 22/04/18.
*/
public interface Repository {
void fetchRecipes(FetchingRecipesUseCaseImpl fetchingRecipesUseCase);
}
<file_sep>/app/src/main/java/com/juancoob/nanodegree/and/backingapp/adapter/IRecipeListAdapterContract.java
package com.juancoob.nanodegree.and.backingapp.adapter;
import com.juancoob.nanodegree.and.backingapp.domain.model.Recipe;
import java.util.List;
/**
* Created by <NAME> on 25/04/18.
*/
public interface IRecipeListAdapterContract {
void updateRecipes(List<Recipe> recipes);
}
<file_sep>/app/src/main/java/com/juancoob/nanodegree/and/backingapp/adapter/IRecipeStepsAdapter.java
package com.juancoob.nanodegree.and.backingapp.adapter;
import com.juancoob.nanodegree.and.backingapp.domain.model.Step;
import java.util.List;
/**
* Created by <NAME> on 28/04/18.
*/
public interface IRecipeStepsAdapter {
void updateSteps(List<Step> steps);
}
<file_sep>/app/src/main/java/com/juancoob/nanodegree/and/backingapp/adapter/Impl/IngredientsAdapter.java
package com.juancoob.nanodegree.and.backingapp.adapter.Impl;
import android.content.Context;
import android.support.annotation.NonNull;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.juancoob.nanodegree.and.backingapp.R;
import com.juancoob.nanodegree.and.backingapp.adapter.IRecipeIngredientsAdapter;
import com.juancoob.nanodegree.and.backingapp.domain.model.Ingredient;
import java.util.ArrayList;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
/**
* Created by <NAME> on 28/04/18.
*/
public class IngredientsAdapter extends RecyclerView.Adapter<IngredientsAdapter.IngredientViewHolder> implements IRecipeIngredientsAdapter {
private Context mCtx;
private List<Ingredient> mIngredients = new ArrayList<>();
public IngredientsAdapter(Context context) {
mCtx = context;
}
@Override
public void updateIngredients(List<Ingredient> ingredients) {
mIngredients.clear();
mIngredients.addAll(ingredients);
notifyDataSetChanged();
}
@NonNull
@Override
public IngredientViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) {
View view = LayoutInflater.from(mCtx).inflate(R.layout.item_ingredient, parent, false);
return new IngredientViewHolder(view);
}
@Override
public void onBindViewHolder(@NonNull IngredientViewHolder holder, int position) {
Ingredient ingredient = mIngredients.get(position);
if (ingredient.getQuantity() == Math.round(ingredient.getQuantity())) {
holder.recipeIngredientTextView.setText(String.format(mCtx.getString(R.string.recipe_ingredient_integer),
(int) ingredient.getQuantity(), ingredient.getMeasure(), ingredient.getIngredient()));
} else {
holder.recipeIngredientTextView.setText(String.format(mCtx.getString(R.string.recipe_ingredient_float),
ingredient.getQuantity(), ingredient.getMeasure(), ingredient.getIngredient()));
}
}
@Override
public int getItemCount() {
return mIngredients.size();
}
public class IngredientViewHolder extends RecyclerView.ViewHolder {
@BindView(R.id.tv_recipe_ingredient)
public TextView recipeIngredientTextView;
public IngredientViewHolder(View itemView) {
super(itemView);
ButterKnife.bind(this, itemView);
}
}
}
<file_sep>/app/src/main/java/com/juancoob/nanodegree/and/backingapp/presentation/recipeDescriptionSelected/IRecipeDescriptionSelectedContract.java
package com.juancoob.nanodegree.and.backingapp.presentation.recipeDescriptionSelected;
import com.juancoob.nanodegree.and.backingapp.presentation.base.presenters.BasePresenter;
import com.juancoob.nanodegree.and.backingapp.presentation.base.ui.BaseView;
/**
* Created by <NAME> on 28/04/18.
*/
public interface IRecipeDescriptionSelectedContract {
interface View extends BaseView {
void showStepDescription();
}
interface Presenter extends BasePresenter {
}
}
<file_sep>/app/src/main/java/com/juancoob/nanodegree/and/backingapp/domain/executor/impl/ThreadExecutor.java
package com.juancoob.nanodegree.and.backingapp.domain.executor.impl;
import com.juancoob.nanodegree.and.backingapp.domain.executor.Executor;
import com.juancoob.nanodegree.and.backingapp.domain.usecase.base.AbstractUseCase;
import com.juancoob.nanodegree.and.backingapp.util.Constants;
import java.util.concurrent.ThreadPoolExecutor;
/**
* Created by <NAME> on 17/04/18.
*/
public class ThreadExecutor implements Executor {
private static final ThreadExecutor sThreadExecutor = new ThreadExecutor();
private final ThreadPoolExecutor mThreadPoolExecutor;
public static ThreadExecutor getInstance() {
return sThreadExecutor;
}
private ThreadExecutor() {
mThreadPoolExecutor = new ThreadPoolExecutor(
Constants.CORE_POOL_SIZE,
Constants.MAXIMUN_POOL_SIZE,
Constants.KEEP_ALIVE_TIME,
Constants.TIME_UNIT,
Constants.WORK_QUEUE);
}
@Override
public void executor(final AbstractUseCase useCase) {
mThreadPoolExecutor.submit(new Runnable() {
@Override
public void run() {
useCase.run();
useCase.onFinished();
}
});
}
}
<file_sep>/app/src/main/java/com/juancoob/nanodegree/and/backingapp/presentation/recipeDescriptionList/RecipeDescriptionListActivity.java
package com.juancoob.nanodegree.and.backingapp.presentation.recipeDescriptionList;
import android.content.Intent;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v7.app.AppCompatActivity;
import com.juancoob.nanodegree.and.backingapp.R;
import com.juancoob.nanodegree.and.backingapp.presentation.recipeDescriptionSelected.RecipeDescriptionSelectedActivity;
import com.juancoob.nanodegree.and.backingapp.presentation.recipeDescriptionSelected.RecipeDescriptionSelectedFragment;
import com.juancoob.nanodegree.and.backingapp.util.ActivityUtils;
import com.juancoob.nanodegree.and.backingapp.util.Constants;
/**
* Created by <NAME> on 28/04/18.
*/
public class RecipeDescriptionListActivity extends AppCompatActivity implements IRecipeDescriptionListContract {
private RecipeDescriptionSelectedFragment mRecipeDescriptionSelectedFragment;
private String mRecipeName;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_recipe_description_list);
if (getSupportActionBar() != null) {
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
}
Intent intent = getIntent();
if (intent != null && intent.hasExtra(Constants.RECIPE_NAME)) {
mRecipeName = intent.getStringExtra(Constants.RECIPE_NAME);
setTitle(mRecipeName);
}
RecipeDescriptionListFragment recipeDescriptionListFragment;
if (getResources().getBoolean(R.bool.tablet)) {
recipeDescriptionListFragment =
(RecipeDescriptionListFragment) getSupportFragmentManager().findFragmentById(R.id.f_recipe_steps);
mRecipeDescriptionSelectedFragment =
(RecipeDescriptionSelectedFragment) getSupportFragmentManager().findFragmentById(R.id.f_recipe_step_description);
if (recipeDescriptionListFragment == null) {
recipeDescriptionListFragment = RecipeDescriptionListFragment.getInstance();
ActivityUtils.addFragmentToActivity(getSupportFragmentManager(), recipeDescriptionListFragment, R.id.f_recipe_steps);
}
if (mRecipeDescriptionSelectedFragment == null) {
mRecipeDescriptionSelectedFragment = RecipeDescriptionSelectedFragment.getInstance();
ActivityUtils.addFragmentToActivity(getSupportFragmentManager(), mRecipeDescriptionSelectedFragment, R.id.f_recipe_step_description);
}
} else {
recipeDescriptionListFragment =
(RecipeDescriptionListFragment) getSupportFragmentManager().findFragmentById(R.id.fl_content_frame);
if (recipeDescriptionListFragment == null) {
recipeDescriptionListFragment = RecipeDescriptionListFragment.getInstance();
ActivityUtils.addFragmentToActivity(getSupportFragmentManager(), recipeDescriptionListFragment, R.id.fl_content_frame);
}
}
}
@Override
public void onClickStep(int selectedStepPosition) {
if (getResources().getBoolean(R.bool.tablet)) {
mRecipeDescriptionSelectedFragment.goToStep(selectedStepPosition);
} else {
Intent intent = new Intent(this, RecipeDescriptionSelectedActivity.class);
intent.putExtra(Constants.SELECTED_STEP_POSITION, selectedStepPosition);
intent.putExtra(Constants.RECIPE_NAME, mRecipeName);
startActivity(intent);
}
}
}
<file_sep>/app/src/main/java/com/juancoob/nanodegree/and/backingapp/adapter/Impl/StepsAdapter.java
package com.juancoob.nanodegree.and.backingapp.adapter.Impl;
import android.content.Context;
import android.support.annotation.NonNull;
import android.support.v7.widget.CardView;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.juancoob.nanodegree.and.backingapp.R;
import com.juancoob.nanodegree.and.backingapp.adapter.IRecipeStepsAdapter;
import com.juancoob.nanodegree.and.backingapp.domain.model.Step;
import com.juancoob.nanodegree.and.backingapp.presentation.recipeDescriptionList.IRecipeDescriptionListContract;
import com.juancoob.nanodegree.and.backingapp.repository.RecipesRepository;
import java.util.ArrayList;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
/**
* Created by <NAME> on 28/04/18.
*/
public class StepsAdapter extends RecyclerView.Adapter<StepsAdapter.IngredientViewHolder> implements IRecipeStepsAdapter {
private Context mCtx;
private List<Step> mSteps = new ArrayList<>();
private IRecipeDescriptionListContract mIRecipeDescriptionListContract;
private int mPreviousSelectedStepPosition;
public StepsAdapter(Context context, IRecipeDescriptionListContract iRecipeDescriptionListContract) {
mCtx = context;
mIRecipeDescriptionListContract = iRecipeDescriptionListContract;
if(mCtx.getResources().getBoolean(R.bool.tablet)) {
mPreviousSelectedStepPosition = RecipesRepository.getInstance().getSelectedStepPosition();
}
}
@Override
public void updateSteps(List<Step> steps) {
mSteps.clear();
mSteps.addAll(steps);
notifyDataSetChanged();
}
@NonNull
@Override
public IngredientViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) {
View view = LayoutInflater.from(mCtx).inflate(R.layout.item_step, parent, false);
return new IngredientViewHolder(view);
}
@Override
public void onBindViewHolder(@NonNull IngredientViewHolder holder, int position) {
Step step = mSteps.get(position);
StringBuilder builder = new StringBuilder();
if(step.getStepId() != 0) {
builder.append(String.format(mCtx.getString(R.string.step), position));
}
builder.append(step.getShortDescription());
holder.recipeStepTextView.setText(builder.toString());
if(mCtx.getResources().getBoolean(R.bool.tablet)) {
if (mPreviousSelectedStepPosition == position) {
holder.stepCardView.setBackgroundColor(mCtx.getResources().getColor(R.color.colorAccent));
} else {
holder.stepCardView.setBackgroundColor(mCtx.getResources().getColor(R.color.cardview_light_background));
}
}
}
@Override
public int getItemCount() {
return mSteps.size();
}
public class IngredientViewHolder extends RecyclerView.ViewHolder implements View.OnClickListener {
@BindView(R.id.cv_step)
public CardView stepCardView;
@BindView(R.id.tv_recipe_step)
public TextView recipeStepTextView;
public IngredientViewHolder(View itemView) {
super(itemView);
ButterKnife.bind(this, itemView);
itemView.setOnClickListener(this);
}
@Override
public void onClick(View view) {
mIRecipeDescriptionListContract.onClickStep(getAdapterPosition());
if(mCtx.getResources().getBoolean(R.bool.tablet)) {
RecipesRepository.getInstance().setSelectedStepPosition(getAdapterPosition());
mPreviousSelectedStepPosition = getAdapterPosition();
notifyDataSetChanged();
}
}
}
}
<file_sep>/app/src/main/java/com/juancoob/nanodegree/and/backingapp/presentation/recipeList/RecipeListActivity.java
package com.juancoob.nanodegree.and.backingapp.presentation.recipeList;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import com.juancoob.nanodegree.and.backingapp.R;
import com.juancoob.nanodegree.and.backingapp.domain.executor.impl.ThreadExecutor;
import com.juancoob.nanodegree.and.backingapp.domain.model.Recipe;
import com.juancoob.nanodegree.and.backingapp.domain.threading.impl.MainThreadImpl;
import com.juancoob.nanodegree.and.backingapp.presentation.recipeDescriptionList.RecipeDescriptionListActivity;
import com.juancoob.nanodegree.and.backingapp.repository.RecipesRepository;
import com.juancoob.nanodegree.and.backingapp.util.ActivityUtils;
import com.juancoob.nanodegree.and.backingapp.util.Constants;
public class RecipeListActivity extends AppCompatActivity implements IRecipeListContract {
private RecipeListPresenter mRecipeListPresenter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_recipe_list);
RecipeListFragment recipeListFragment = (RecipeListFragment) getSupportFragmentManager().findFragmentById(R.id.fl_content_frame);
if(recipeListFragment == null) {
recipeListFragment = com.juancoob.nanodegree.and.backingapp.presentation.recipeList.RecipeListFragment.getInstance();
ActivityUtils.addFragmentToActivity(getSupportFragmentManager(), recipeListFragment, R.id.fl_content_frame);
}
mRecipeListPresenter = new RecipeListPresenter(
recipeListFragment,
ThreadExecutor.getInstance(),
MainThreadImpl.getInstance(),
RecipesRepository.getInstance());
recipeListFragment.setPresenter(mRecipeListPresenter);
}
public RecipeListPresenter getRecipeListPresenter() {
return mRecipeListPresenter;
}
@Override
public void onClickRecipe(Recipe recipe) {
RecipesRepository.getInstance().setRecipeIngredients(recipe.getIngredients());
RecipesRepository.getInstance().setRecipeSteps(recipe.getSteps());
RecipesRepository.getInstance().setSelectedStepPosition(getResources().getInteger(R.integer.default_number));
Intent intentToDetail = new Intent(this, RecipeDescriptionListActivity.class);
intentToDetail.putExtra(Constants.RECIPE_NAME, recipe.getRecipeName());
startActivity(intentToDetail);
}
}
<file_sep>/app/src/main/java/com/juancoob/nanodegree/and/backingapp/presentation/widget/BackingAppWidgetActivity.java
package com.juancoob.nanodegree.and.backingapp.presentation.widget;
import android.appwidget.AppWidgetManager;
import android.content.Intent;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.GridLayoutManager;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.DisplayMetrics;
import android.widget.RemoteViews;
import com.juancoob.nanodegree.and.backingapp.R;
import com.juancoob.nanodegree.and.backingapp.adapter.IRecipeListAdapterContract;
import com.juancoob.nanodegree.and.backingapp.adapter.Impl.RecipeListAdapter;
import com.juancoob.nanodegree.and.backingapp.domain.executor.impl.ThreadExecutor;
import com.juancoob.nanodegree.and.backingapp.domain.model.Ingredient;
import com.juancoob.nanodegree.and.backingapp.domain.model.Recipe;
import com.juancoob.nanodegree.and.backingapp.domain.threading.impl.MainThreadImpl;
import com.juancoob.nanodegree.and.backingapp.domain.usecase.FetchingRecipesUseCase;
import com.juancoob.nanodegree.and.backingapp.domain.usecase.impl.FetchingRecipesUseCaseImpl;
import com.juancoob.nanodegree.and.backingapp.presentation.recipeList.IRecipeListContract;
import com.juancoob.nanodegree.and.backingapp.repository.RecipesRepository;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
/**
* Created by <NAME> on 1/05/18.
*/
public class BackingAppWidgetActivity extends AppCompatActivity implements IRecipeListContract, FetchingRecipesUseCase.Callback {
@BindView(R.id.rv_widget_recipes)
public RecyclerView recipesRecyclerViewWidget;
private IRecipeListAdapterContract mAdapter;
private int mAppWidgetId = AppWidgetManager.INVALID_APPWIDGET_ID;
private AppWidgetManager mAppWidgetManager;
private RemoteViews mRemoteViews;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.backing_app_widget);
setResult(RESULT_CANCELED);
ButterKnife.bind(this);
setTitle(R.string.widget_title);
}
@Override
protected void onStart() {
super.onStart();
initRecipesRecyclerView();
getRecipes();
}
private void initRecipesRecyclerView() {
LinearLayoutManager linearLayoutManager;
GridLayoutManager gridLayoutManager;
if (getResources().getBoolean(R.bool.tablet)) {
gridLayoutManager = new GridLayoutManager(this, getNumberColumns());
recipesRecyclerViewWidget.setLayoutManager(gridLayoutManager);
} else {
linearLayoutManager = new LinearLayoutManager(this);
recipesRecyclerViewWidget.setLayoutManager(linearLayoutManager);
}
mAdapter = new RecipeListAdapter(this, this);
recipesRecyclerViewWidget.setAdapter((RecyclerView.Adapter) mAdapter);
}
private int getNumberColumns() {
DisplayMetrics displayMetrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(displayMetrics);
int width = displayMetrics.widthPixels;
int columns = width / getResources().getInteger(R.integer.width_divider);
if (columns >= 2) return columns;
else return 2;
}
private void getRecipes() {
FetchingRecipesUseCase useCase = new FetchingRecipesUseCaseImpl(
ThreadExecutor.getInstance(),
MainThreadImpl.getInstance(),
this,
RecipesRepository.getInstance());
useCase.execute();
}
@Override
public void onRecipesRetrieved(List<Recipe> recipes) {
mAdapter.updateRecipes(recipes);
mAppWidgetManager = AppWidgetManager.getInstance(this);
mRemoteViews = new RemoteViews(this.getPackageName(), R.layout.backing_app_recipe_widget);
Intent intent = getIntent();
Bundle extras = intent.getExtras();
if (extras != null) {
mAppWidgetId = extras.getInt(AppWidgetManager.EXTRA_APPWIDGET_ID,
AppWidgetManager.INVALID_APPWIDGET_ID);
}
if (AppWidgetManager.INVALID_APPWIDGET_ID == mAppWidgetId) {
finish();
}
}
@Override
public void onNoInternetConnection() {
}
@Override
public void onClickRecipe(Recipe recipe) {
mRemoteViews.setTextViewText(R.id.tv_widget_recipe_name, recipe.getRecipeName());
StringBuilder stringBuilder = new StringBuilder();
boolean notFirstTime = false;
for (Ingredient ingredient : recipe.getIngredients()) {
if (notFirstTime) {
stringBuilder.append(getString(R.string.ingredients_space));
}
if (ingredient.getQuantity() == Math.round(ingredient.getQuantity())) {
stringBuilder.append(getString(R.string.ingredients_dot));
stringBuilder.append(String.format(getString(R.string.recipe_ingredient_integer),
(int) ingredient.getQuantity(), ingredient.getMeasure(), ingredient.getIngredient()));
} else {
stringBuilder.append(getString(R.string.ingredients_dot));
stringBuilder.append(String.format(getString(R.string.recipe_ingredient_float),
ingredient.getQuantity(), ingredient.getMeasure(), ingredient.getIngredient()));
}
notFirstTime = true;
}
mRemoteViews.setTextViewText(R.id.tv_widget_ingredients, stringBuilder.toString());
mAppWidgetManager.updateAppWidget(mAppWidgetId, mRemoteViews);
Intent result = new Intent();
result.putExtra(AppWidgetManager.EXTRA_APPWIDGET_ID, mAppWidgetId);
setResult(RESULT_OK, result);
finish();
}
}
<file_sep>/app/src/main/java/com/juancoob/nanodegree/and/backingapp/presentation/recipeDescriptionList/IRecipeDescriptionListContract.java
package com.juancoob.nanodegree.and.backingapp.presentation.recipeDescriptionList;
import com.juancoob.nanodegree.and.backingapp.presentation.base.presenters.BasePresenter;
import com.juancoob.nanodegree.and.backingapp.presentation.base.ui.BaseView;
/**
* Created by <NAME> on 28/04/18.
*/
public interface IRecipeDescriptionListContract {
void onClickStep(int selectedPosition);
interface View extends BaseView {
void showIngredients();
void showSteps();
}
interface Presenter extends BasePresenter {
}
}
<file_sep>/app/src/main/java/com/juancoob/nanodegree/and/backingapp/util/ActivityUtils.java
package com.juancoob.nanodegree.and.backingapp.util;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
/**
* Created by <NAME> on 23/04/18.
*/
public final class ActivityUtils {
public static void addFragmentToActivity(FragmentManager fragmentManager, Fragment fragment, int fragmentId) {
if(fragmentManager != null && fragment != null) {
FragmentTransaction transaction = fragmentManager.beginTransaction();
transaction.add(fragmentId, fragment);
transaction.commit();
}
}
}
<file_sep>/app/src/main/java/com/juancoob/nanodegree/and/backingapp/presentation/base/presenters/AbstractPresenter.java
package com.juancoob.nanodegree.and.backingapp.presentation.base.presenters;
import com.juancoob.nanodegree.and.backingapp.domain.executor.Executor;
import com.juancoob.nanodegree.and.backingapp.domain.threading.MainThread;
/**
* Created by <NAME> on 18/04/18.
*/
public abstract class AbstractPresenter {
protected final Executor mExecutor;
protected final MainThread mMainThread;
public AbstractPresenter(Executor executor, MainThread mainThread) {
mExecutor = executor;
mMainThread = mainThread;
}
}
<file_sep>/app/src/main/java/com/juancoob/nanodegree/and/backingapp/domain/threading/impl/MainThreadImpl.java
package com.juancoob.nanodegree.and.backingapp.domain.threading.impl;
import android.os.Handler;
import android.os.Looper;
import com.juancoob.nanodegree.and.backingapp.domain.threading.MainThread;
/**
* Created by <NAME> on 17/04/18.
*/
public class MainThreadImpl implements MainThread {
private static final MainThread sMainThread = new MainThreadImpl();
private final Handler mHandler;
private MainThreadImpl() {
mHandler = new Handler(Looper.getMainLooper());
}
public static MainThread getInstance() {
return sMainThread;
}
@Override
public void post(Runnable runnable) {
mHandler.post(runnable);
}
}
<file_sep>/README.md
# BackingApp
This is the backing app project stage 3 from Udacity Android Nanodegree.
* It's implemented using MVP Clean Pattern thanks to https://medium.com/@dmilicic/a-detailed-guide-on-developing-android-apps-using-the-clean-architecture-pattern-d38d71e94029
* This codelab helped me to understand the Exoplayer better: https://codelabs.developers.google.com/codelabs/exoplayer-intro/#0
* I saw how to match the toolbar the title using espresso thanks to this article: http://blog.sqisland.com/2015/05/espresso-match-toolbar-title.html
* The app and widget icon is made by Freepik from www.flaticon.com
* Thanks to all my classmates which helped me to going forward
It will be improved on the next version:
* Test the logic to get recipes
* Redesign the UI
* Add Kotlin
Libraries used:
* design:27.1.1 -> Add the design library for Android
* constraint-layout:1.1.0 -> Brings the new constraint layout
* recyclerview-v7:27.1.1 -> Adds the recyclerView functionality
* cardview-v7:27.1.1 -> Adds the card style
* picasso:2.71828 -> Manages images
* butterknife:8.8.1 -> Manages bidings
* retrofit:2.4.0 -> Manages HTTP requests
* exoplayer:2.7.1 -> Manages videos
* runner:1.0.2 -> Espresso testing libraries
* espresso:espresso-core:3.0.2
* rules:1.0.2
* espresso-contrib:3.0.2
* support-annotations:27.1.1
* espresso-intents:3.0.2
* espresso-idling-resource:3.0.2
<file_sep>/app/src/main/java/com/juancoob/nanodegree/and/backingapp/presentation/recipeList/RecipeListFragment.java
package com.juancoob.nanodegree.and.backingapp.presentation.recipeList;
import android.content.Context;
import android.content.DialogInterface;
import android.os.Bundle;
import android.os.Parcelable;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v7.app.AlertDialog;
import android.support.v7.widget.GridLayoutManager;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.DisplayMetrics;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ProgressBar;
import com.juancoob.nanodegree.and.backingapp.R;
import com.juancoob.nanodegree.and.backingapp.adapter.IRecipeListAdapterContract;
import com.juancoob.nanodegree.and.backingapp.adapter.Impl.RecipeListAdapter;
import com.juancoob.nanodegree.and.backingapp.domain.model.Recipe;
import com.juancoob.nanodegree.and.backingapp.util.Constants;
import java.util.ArrayList;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
/**
* Created by <NAME> on 23/04/18.
*/
public class RecipeListFragment extends Fragment implements com.juancoob.nanodegree.and.backingapp.presentation.recipeList.IRecipeListContract.View {
@BindView(R.id.rv_recipes)
public RecyclerView recipesRecyclerView;
@BindView(R.id.pb_recipe)
public ProgressBar progressBarRecipe;
private com.juancoob.nanodegree.and.backingapp.presentation.recipeList.RecipeListPresenter mRecipeListPresenter;
private List<Recipe> mRecipes = new ArrayList<>();
private LinearLayoutManager mLinearLayoutManager;
private GridLayoutManager mGridLayoutManager;
private IRecipeListAdapterContract mAdapter;
private IRecipeListContract mIRecipeListContract;
private Parcelable mCurrentRecyclerViewState;
public static RecipeListFragment getInstance() {
return new RecipeListFragment();
}
@Nullable
@Override
public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_recipe_list, container, false);
ButterKnife.bind(this, view);
return view;
}
@Override
public void onStart() {
super.onStart();
initRecipesRecyclerView();
}
@Override
public void onResume() {
super.onResume();
mRecipeListPresenter.resume();
}
@Override
public void onSaveInstanceState(@NonNull Bundle outState) {
super.onSaveInstanceState(outState);
outState.putParcelableArrayList(Constants.RECIPE_LIST, (ArrayList<? extends Parcelable>) mRecipes);
if (getResources().getBoolean(R.bool.tablet)) {
outState.putParcelable(Constants.CURRENT_GRID_POSITION, mGridLayoutManager.onSaveInstanceState());
} else {
outState.putParcelable(Constants.CURRENT_LINEAR_POSITION, mLinearLayoutManager.onSaveInstanceState());
}
}
@Override
public void onViewStateRestored(@Nullable Bundle savedInstanceState) {
super.onViewStateRestored(savedInstanceState);
if (savedInstanceState != null) {
mRecipes = savedInstanceState.getParcelableArrayList(Constants.RECIPE_LIST);
if (getResources().getBoolean(R.bool.tablet)) {
mCurrentRecyclerViewState = savedInstanceState.getParcelable(Constants.CURRENT_GRID_POSITION);
} else {
mCurrentRecyclerViewState = savedInstanceState.getParcelable(Constants.CURRENT_LINEAR_POSITION);
}
}
}
private void initRecipesRecyclerView() {
if (getResources().getBoolean(R.bool.tablet)) {
mGridLayoutManager = new GridLayoutManager(getContext(), getNumberColumns());
recipesRecyclerView.setLayoutManager(mGridLayoutManager);
} else {
mLinearLayoutManager = new LinearLayoutManager(getContext());
recipesRecyclerView.setLayoutManager(mLinearLayoutManager);
}
mAdapter = new RecipeListAdapter(getContext(), mIRecipeListContract);
recipesRecyclerView.setAdapter((RecyclerView.Adapter) mAdapter);
}
private int getNumberColumns() {
if (getActivity() != null) {
DisplayMetrics displayMetrics = new DisplayMetrics();
getActivity().getWindowManager().getDefaultDisplay().getMetrics(displayMetrics);
int width = displayMetrics.widthPixels;
int columns = width / getResources().getInteger(R.integer.width_divider);
if (columns >= 2) return columns;
}
return 2;
}
@Override
public void showRecipes(List<Recipe> recipes) {
mRecipes = recipes;
mAdapter.updateRecipes(mRecipes);
if (mCurrentRecyclerViewState != null) {
if (getResources().getBoolean(R.bool.tablet)) {
mGridLayoutManager.onRestoreInstanceState(mCurrentRecyclerViewState);
} else {
mLinearLayoutManager.onRestoreInstanceState(mCurrentRecyclerViewState);
}
}
}
@Override
public void showProgress() {
progressBarRecipe.setVisibility(View.VISIBLE);
}
@Override
public void hideProgress() {
progressBarRecipe.setVisibility(View.GONE);
}
@Override
public void noInternetConnection() {
if(getContext() != null) {
AlertDialog.Builder builder = new AlertDialog.Builder(getContext());
builder.setTitle(R.string.no_internet_title)
.setMessage(R.string.no_internet_description)
.setIcon(R.drawable.ic_signal_wifi_off_black_24dp)
.setPositiveButton(R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
mRecipeListPresenter.fetchRecipes();
dialogInterface.dismiss();
}
})
.setNegativeButton(R.string.cancel, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
dialogInterface.dismiss();
}
}).show();
}
}
public void setPresenter(com.juancoob.nanodegree.and.backingapp.presentation.recipeList.RecipeListPresenter presenter) {
this.mRecipeListPresenter = presenter;
}
public List<Recipe> getRecipes() {
return mRecipes;
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
if (context instanceof IRecipeListContract) {
mIRecipeListContract = (IRecipeListContract) context;
}
}
@Override
public void onDetach() {
super.onDetach();
mIRecipeListContract = null;
}
}
<file_sep>/app/src/main/java/com/juancoob/nanodegree/and/backingapp/adapter/Impl/RecipeListAdapter.java
package com.juancoob.nanodegree.and.backingapp.adapter.Impl;
import android.content.Context;
import android.support.annotation.NonNull;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
import com.juancoob.nanodegree.and.backingapp.R;
import com.juancoob.nanodegree.and.backingapp.adapter.IRecipeListAdapterContract;
import com.juancoob.nanodegree.and.backingapp.domain.model.Recipe;
import com.juancoob.nanodegree.and.backingapp.presentation.recipeList.IRecipeListContract;
import com.squareup.picasso.Picasso;
import java.util.ArrayList;
import java.util.List;
import butterknife.BindView;
import butterknife.ButterKnife;
/**
* Created by <NAME> on 25/04/18.
*/
public class RecipeListAdapter extends RecyclerView.Adapter<RecipeListAdapter.RecipeViewHolder> implements IRecipeListAdapterContract {
private Context mCtx;
private final List<Recipe> mRecipes = new ArrayList<>();
private final IRecipeListContract mIRecipeListContract;
public RecipeListAdapter(Context context, IRecipeListContract iRecipeListContract) {
mCtx = context;
mIRecipeListContract = iRecipeListContract;
}
@Override
public void updateRecipes(List<Recipe> recipes) {
mRecipes.clear();
mRecipes.addAll(recipes);
notifyDataSetChanged();
}
@NonNull
@Override
public RecipeViewHolder onCreateViewHolder(@NonNull ViewGroup parent, int viewType) {
View view = LayoutInflater.from(mCtx).inflate(R.layout.item_recipe, parent, false);
return new RecipeViewHolder(view);
}
@Override
public void onBindViewHolder(@NonNull RecipeViewHolder holder, int position) {
Recipe recipe = mRecipes.get(position);
if(!recipe.getImagePath().isEmpty()) {
Picasso.get().load(recipe.getImagePath()).into(holder.recipeNameIconImageView);
}
holder.recipeNameTextView.setText(String.format(mCtx.getString(R.string.recipe_name_servings), recipe.getRecipeName(), recipe.getServings()));
}
@Override
public int getItemCount() {
return mRecipes.size();
}
public class RecipeViewHolder extends RecyclerView.ViewHolder implements View.OnClickListener {
@BindView(R.id.iv_recipe_name_icon)
public ImageView recipeNameIconImageView;
@BindView(R.id.tv_recipe_name)
public TextView recipeNameTextView;
public RecipeViewHolder(View itemView) {
super(itemView);
ButterKnife.bind(this, itemView);
itemView.setOnClickListener(this);
}
@Override
public void onClick(View view) {
mIRecipeListContract.onClickRecipe(mRecipes.get(getAdapterPosition()));
}
}
}
|
56f3e4744aebd0ab55a5cfa2253f5cbc898554fd
|
[
"Markdown",
"Java"
] | 19
|
Java
|
juancoob/BackingApp
|
161618dfef1b1839bd512c992157a26e32e45a62
|
6ff410068519dee138e096d18be3e144ff25b08d
|
refs/heads/master
|
<file_sep>var container = document.getElementById('canvasContainer');
var canvas = document.getElementById('backgroundCanvas');
container.style.height = container.clientWidth*9/16 + 'px';
canvas.width = container.clientWidth;
canvas.height = canvas.width*9/16;
width = canvas.width;
height = canvas.height;
var ctx = canvas.getContext('2d');
var xoff = width/600;
var yoff = height/338;
var bounce = -1;
var dotsNum = Math.floor(70*width/1280);
var connectingDotsNum = Math.floor(100*width/1280);
var dotSpeed = 1;
var lineLength = Math.floor(20*width/640);
var sourceImage = document.getElementById('sourceImage');
function setParams() {
container.style.height = container.clientWidth*9/16 + 'px';
canvas.width = container.clientWidth;
canvas.height = canvas.width*9/16;
width = canvas.width;
height = canvas.height;
xoff = width/600;
yoff = height/338;
dotsNum = Math.floor(100*width/1280);
connectingDotsNum = Math.floor(70*width/1280);
lineLength = Math.floor(20*width/640);
}
var dots = [];
var connectingDots = [];
var mousePosition = {};
ctx.lineWidth = .2;
ctx.lineCap = "round";
ctx.lineJoin = "round";
/*===== Image positions ==========*/
var image1 = {x: 14*width/100, y: 33*height/100, wid: 300*width/1280, hig: 300*height/720};
var image2 = {x: 92*width/100, y: 37*height/100, wid: 150*width/1280, hig: 150*height/720};
/*==== initialize dots ===========*/
for(var i=0;i<dotsNum;i++){
dots.push({
x: Math.random()*width,
y: Math.random()*height,
vx: Math.random() < 0.5 ? Math.random()*dotSpeed : Math.random()*dotSpeed*bounce,
vy: Math.random() < 0.5 ? Math.random()*dotSpeed : Math.random()*dotSpeed*bounce,
color: '#fff',
radius: Math.random() + 0.5
})
}
for(var j=0;j<connectingDotsNum;j++){
connectingDots.push({
x: Math.random()*width,
y: Math.random()*height,
vx: Math.random() < 0.5 ? Math.random()*dotSpeed : Math.random()*dotSpeed*bounce,
vy: Math.random() < 0.5 ? Math.random()*dotSpeed : Math.random()*dotSpeed*bounce,
color: '#fff',
radius: Math.random()
})
}
/*==== Background Gradient ========*/
var grad = ctx.createLinearGradient(0,0,width,0);
grad.addColorStop(0,'#091965');
grad.addColorStop(1, '#0483fc');
function drawBackground(ctx, xoff, yoff,) {
ctx.beginPath();
ctx.clearRect(0,0,width,height);
ctx.moveTo(0*xoff, 68*yoff);
ctx.bezierCurveTo(104*xoff, 0*yoff, 159*xoff, 98*yoff, 238*xoff, 0*yoff);
ctx.lineTo(600*xoff, 0*yoff);
ctx.lineTo(600*xoff, 168*yoff);
ctx.bezierCurveTo(555*xoff, 223*yoff, 545*xoff, 237*yoff, 518*xoff, 253*yoff);
ctx.bezierCurveTo(489*xoff, 275*yoff, 377*xoff, 297*yoff, 285*xoff, 228*yoff);
ctx.bezierCurveTo(173*xoff, 128*yoff, 80*xoff, 182*yoff, 0*xoff, 231*yoff);
ctx.fillStyle = grad;
ctx.fill();
ctx.closePath();
ctx.beginPath();
ctx.moveTo(600*xoff, 150*yoff);
ctx.bezierCurveTo(553*xoff, 181*yoff, 523*xoff, 259*yoff, 419*xoff, 274*yoff);
ctx.bezierCurveTo(553*xoff, 277*yoff, 565*xoff, 195*yoff, 600*xoff, 173*yoff);
ctx.lineTo(600*xoff, 150*yoff);
ctx.fillStyle = '#0483fc';
ctx.fill();
ctx.closePath();
}
function drawImage(img, mouse) {
ctx.beginPath();
/*if(mouse.x>0 && mouse.y>0){
ctx.drawImage(img, image1.x + mouse.x/50, image1.y - (height - mouse.y)/50, image1.wid, image1.hig);
ctx.drawImage(img, image2.x + (width - mouse.x)/60, image2.y - (height - mouse.y)/60, image2.wid, image2.hig);
}else {*/
ctx.drawImage(img, image1.x, image1.y, image1.wid, image1.hig);
ctx.drawImage(img, image2.x, image2.y, image2.wid, image2.hig);
/*}*/
ctx.closePath();
}
function drawDots(ctx, dots) {
var dot;
for(var i =0;i<dots.length;i++){
dot = dots[i];
ctx.beginPath();
ctx.moveTo(dot.x,dot.y);
ctx.arc(dot.x, dot.y, dot.radius, 0, 2*Math.PI);
ctx.fillStyle = dot.color;
ctx.fill();
ctx.closePath();
}
}
function updateDots(dots){
var dot;
for(var i=0;i<dots.length;i++){
dot = dots[i];
dot.x += dot.vx;
dot.y += dot.vy;
if(dot.x>width || dot.x<0){
dot.vx *= bounce;
}
if(dot.y>height || dot.x<0){
dot.vy *= bounce;
}
}
}
function drawLines(dots1, dots2){
var dot1,dot2;
for(var i=0;i<dots1.length;i++){
dot1 = dots1[i];
drawLine(dot1.x, dot1.y, dots2);
}
drawLine(mousePosition.x, mousePosition.y, dots2);
drawLine(mousePosition.x, mousePosition.y, dots1);
}
function drawLine(x,y,dots){
var dot;
if(x>0 && y>0) {
for (var j = 0; j < dots.length; j++) {
dot = dots[j];
if (Math.abs(x - dot.x) < lineLength && Math.abs(y - dot.y) < lineLength) {
ctx.beginPath();
ctx.moveTo(Math.floor(x) + 0.5, Math.floor(y) + 0.5);
ctx.lineTo(Math.floor(dot.x) + 0.5, Math.floor(dot.y)+ 0.5);
ctx.strokeStyle = dot.color;
ctx.stroke();
ctx.closePath();
}
}
}
}
function updateMouse(e){
mousePosition.x = e.clientX;
mousePosition.y = e.clientY;
}
container.addEventListener("mousemove", function (event) {
updateMouse(event);
});
container.onmouseleave = function(){
mousePosition.x = 0;
mousePosition.y = 0;
};
$(window).resize(function(){
setParams();
});
function animate(){
updateDots(dots);
updateDots(connectingDots);
drawBackground(ctx,xoff, yoff);
drawDots(ctx, dots);
drawDots(ctx, connectingDots);
drawLines(connectingDots, dots);
drawImage(sourceImage, mousePosition);
window.requestAnimationFrame(animate);
}
window.requestAnimationFrame(animate);
|
425857136e7fc1bc176a4145254dbe8ee571f65a
|
[
"JavaScript"
] | 1
|
JavaScript
|
avreddy1996/company-website
|
02290e5d93cdc452c7e57e63a2540b0c9a522abe
|
4bfb855255a8e6c57ea24a1fb3ab6e68a1ad3274
|
refs/heads/master
|
<file_sep>#!/usr/bin/env node
var CommanderWrapper = require('./utils/commander-wrapper');
var log = require('./utils/log');
var config = require('./utils/get-config')();
var program = CommanderWrapper(function(commander) {
return commander
.command('copy', 'copy src lang folder to dist')
.command('all', 'run all build lang commands', {isDefault: true});
});
if (!config.lang) {
log.warn('lang builds are turned off').
process.exit();
}
<file_sep># sb-test-all(1) - Run all tests for a project
## SYNOPSIS
sb-test-all [-l, --log-level <level>] [-h,--help] [-q,--quiet] [-V,--version]
[-w, --watch] [-nb, --no-build] [-nl, --no-lint]
[-b, --browsers <chrome,firefox,safari>] [-p, --port <port=9876>]
## DESCRIPTION
Test all of the following through sub-binaries:
* node - see sb-test-all-node(1)
* browser - see sb-test-all-browser(1)
## OPTIONS
-l, --log-level <level>
The level of log messages that you want to see. Can be none, fatal, error,
warn, info, verbose, or debug. Defaults to info.
-h, --help
View the help information for this binary
-V, --version
View the version of this binary
-q, --quiet
Do not log any messages to stdout or stderr
-w, --watch
Watch unit test dist files for changes and re-run/re-load tests.
-nb, --no-build
Do not build before testing.
-nl, --no-lint
Do not lint before testing.
-b, --browsers <chrome,firefox,safari>
comma seperated list of browsers to run on
-p, --port <port=9876>
The port to use for the browsers test server. Can be defined with the
TEST_PORT environment variable as well. defaults to 9876
## EXAMPLES
Get the current version of spellbook
sb-test-all -V
sb-test-all --version
Get help for this binary
sb-test-all --help
sb-test-all -h
Set the log level for this binary
sb-test-all -l info
sb-test-all -l fatal
sb-test-all --log-level debug
sb-test-all --log-level error
Dont output anything
sb-test-all -q
sb-test-all -quiet
Watch for changes and re-load/re-run tests
sb-test-all -w
sb-test-all --watch
Do not build before testing
sb-test-all -nb
sb-test-all --no-build
Do not lint before testing
sb-test-all -nl
sb-test-all --no-lint
Only tests specific browsers not detected browsers
sb-test-all -b chrome
sb-test-all --browsers chorme,firefox
Use a different port
sb-test-all -p 3333
sb-test-all --port 8888
## ENVIRONMENT AND CONFIGURATION VARIABLES
SB_LOG_LEVEL='info'
An enviornment variable that sets the log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
TEST_PORT=9876
The port that the test server (karma-runner) is running on. Defaults to 9876.
<package.json>.spellbook.log-level=info
A package.json variable that sets the default log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
<package.json>.spellbook.shim-videojs=true
If video.js is shimed it will be included in unit tests for use. Otherwise it won't be.
<package.json>.spellbook.test='{}'
Test configuration to use in spellbook. If this is set to a false value
then tests will never be run.
## SEE ALSO
sb-test(1), sb-test-all-browser(1), sb-test-all-node(1)
## EXIT
0 - all commands succeeded
1 - one or more sub-command failed
## Spellbook
Part of the sb(1) suite
<file_sep>#!/usr/bin/env node
var config = require('./utils/get-config')();
var PathsExist = require('./utils/paths-exist');
var path = require('path');
var GetPath = require('./utils/get-path');
var fs = require('fs');
var log = require('./utils/log');
var Promise = require('bluebird');
var Watch = require('./utils/watch');
var browserifyHelper = require('./utils/browserify-helper');
var webpack = require('webpack');
var shimConf = require('../config/shim.config.js');
var Run = require('./utils/run');
var mkdirp = require('mkdirp');
var rimraf = require('rimraf');
var es6 = function(bundler) {
return [
'/**',
' * ' + bundler + ' test ',
' */',
'import pkg from "' + path.join('..', '..', path.relative(config.path, config.jsNextMain)) + '";',
'',
'QUnit.module("' + bundler + ' require");',
'QUnit.test("' + config.name + ' should be requireable via ' + bundler + '", (assert) => {',
' assert.ok(pkg, "' + config.name + ' is required properly");',
'});',
].join('\n');
};
var CommanderWrapper = require('./utils/commander-wrapper');
var program = CommanderWrapper(function(commander) {
return commander
.option('-w, --watch', 'keep the tester running and run as things change');
});
var dist = path.join(config.dist, 'test');
// always finish the log
var exit = function(code) {
var rmGlob = path.join(dist, '*.start.js');
log.debug('removing ' + rmGlob);
rimraf.sync(rmGlob);
};
process.on('exit', exit);
var build = function() {
// TODO: get rollup to work during watch
program.bundlers = [/*'rollup',*/ 'webpack', 'browserify'];
var promises = [];
log.info('Building...');
program.bundlers.forEach(function(bundlerName) {
var srcFile = path.join(dist, bundlerName + '.start.js');
var distFile = path.join(dist, bundlerName + '.test.js');
var unitTestContents = es6(bundlerName);
var command;
// cleanup
rimraf.sync(srcFile);
rimraf.sync(distFile);
if (bundlerName === 'browserify') {
command = function() {
return browserifyHelper({
dist: distFile.replace(/\.js$/, ''),
src: srcFile,
standalone: false,
watch: program.watch,
internalMap: true,
noRollup: true
});
};
} else if (bundlerName === 'rollup') {
// rollup only works on es6
command = function() {
// due to a bug with rollupify?
// we cannot use watchify
var build = function() {
return browserifyHelper({
dist: distFile.replace(/\.js$/, ''),
src: srcFile,
standalone: false,
watch: false,
internalMap: true
});
};
if (program.watch) {
Watch(path.join(config.js.src, '**', '*.js'), build);
return Promise.resolve();
} else {
return build();
}
};
} else if (bundlerName === 'webpack') {
command = function() {
var externals = {};
var first = true;
Object.keys(shimConf).forEach(function(k) {
externals[k] = shimConf[k].exports.replace('global:', '');
});
return new Promise(function(resolve, reject) {
webpack({
watch: program.watch || false,
module: {
loaders: [{
test: /\.js$/,
exclude: /node_modules/,
cacheDirectory: program.watch || false,
loader: 'babel-loader',
query: {presets: [GetPath('babel-preset.config.js')]}
}]
},
resolve: {
root: [
path.join(__dirname, '..', 'node_modules'),
path.join(config.path, 'node_modules')
],
},
resolveLoader: {
root: [
path.join(__dirname, '..', 'node_modules'),
path.join(config.path, 'node_modules')
],
},
context: config.path,
entry: srcFile,
// TODO: webpack breaks when we do inline-source-map...
devtool: 'inline-source-map',
externals: externals,
output: {
filename: path.basename(distFile),
path: path.dirname(distFile)
},
}, function(err, stats) {
if(err) {
reject(err);
}
var jsonStats = stats.toJson();
if (jsonStats.errors.length) {
reject(jsonStats.errors.join(''));
}
if (jsonStats.warnings.length) {
log.warn(jsonStats.warnings.join(''));
}
// TODO: webpack logs twice during first watch
if (first && program.watch) {
first = false;
} else {
log.info('Wrote: ' + distFile);
}
resolve();
});
});
};
}
mkdirp.sync(path.dirname(srcFile));
mkdirp.sync(path.dirname(distFile));
fs.writeFileSync(srcFile, unitTestContents);
promises.push(command());
});
Promise.all(promises).then(function() {
// done
}).catch(function(err) {
log.error(err);
});
};
build();
<file_sep>#!/usr/bin/env node
var config = require('./utils/get-config')();
var browserSync = require('browser-sync');
var CommanderWrapper = require('./utils/commander-wrapper');
var log = require('./utils/log');
var proxies = require('./utils/proxies');
var path = require('path');
var program = CommanderWrapper(function(commander) {
return commander
.option('-p, --port <number>', 'what port to start the server on', parseInt)
.option('-tp, --test-port <number>', 'what port the karma server is on', parseInt)
.option('-o, --open', 'open the server in your default browser')
.option('-s, --sync', 'enable cross browser action sync')
.option('-t, --tunnel', 'enable cross browser action sync')
.option('-nui, --no-ui', 'disable browser-sync ui');
});
program.testPort = program.testPort || process.env.TEST_PORT || 9876;
program.port = program.port || process.env.PORT || 9999;
program.open = program.open ? 'local' : false;
if (program.open && program.tunnel) {
program.open = 'tunnel';
}
if (program.ui) {
program.ui = {
port: (program.port+1)
};
}
var server = browserSync.create()
server.init({
server: {
baseDir: config.path,
middleware: [proxies.test(program, server)]
},
watchOptions: {
ignored: [path.join(config.path, 'node_modules')],
},
port: program.port,
ghostMode: program.sync || false,
ui: program.ui,
open: program.open,
online: program.tunnel || false,
reloadOnRestart: true,
reloadDelay: 200,
tunnel: program.tunnel || false,
logPrefix: function() {
return log.prefix('info');
},
files: [
path.join(config.path, 'dist', '**', '*.js'),
path.join(config.path, 'dist', '**', '*.css'),
path.join(config.path, 'dist', '**', '*.html'),
path.join(config.path, 'examples', '**', '*.html'),
path.join(config.path, 'index.html')
]
});
<file_sep>var config = require('../src/utils/get-config')();
var PathsExist = require('../src/utils/paths-exist');
var path = require('path');
var log = require('../src/utils/log');
module.exports = function(karmaConfig) {
var detectBrowsers = true;
if (karmaConfig.autoWatch && !karmaConfig.singleRun) {
detectBrowsers = false;
}
if (karmaConfig.browsers.length > 0) {
detectBrowsers = false;
}
var files = [];
var sbNodeDir = path.join('node_modules', 'videojs-spellbook', 'node_modules');
var nodeDir = path.join('node_modules');
var sinonDir = path.join('sinon', 'pkg');
if (PathsExist(path.join(nodeDir, sinonDir))) {
files.push(path.join(nodeDir, sinonDir, 'sinon.js'));
files.push(path.join(nodeDir, sinonDir, 'sinon-ie.js'));
} else if (PathsExist(path.join(sbNodeDir, sinonDir))) {
files.push(path.join(sbNodeDir, sinonDir, 'sinon.js'));
files.push(path.join(sbNodeDir, sinonDir, 'sinon-ie.js'));
} else {
log.fatal('sinon is not installed!');
process.exit(1);
}
if (config.shimVideojs) {
var vjsDir = path.join('video.js', 'dist');
if (PathsExist(path.join(nodeDir, vjsDir))) {
files.push(path.join(nodeDir, vjsDir, 'video.js'));
files.push(path.join(nodeDir, vjsDir, 'video-js.css'));
} else if (PathsExist(path.join(sbNodeDir, vjsDir))) {
log.info('using videojs-spellbook\'s version of video.js as there is no local version');
files.push(path.join(sbNodeDir, vjsDir, 'video.js'));
files.push(path.join(sbNodeDir, vjsDir, 'video-js.css'));
} else {
log.fatal('video.js is not installed, use spellbook.shim-video: true in package.json if you dont need it');
process.exit(1);
}
}
var dist = path.relative(config.path, config.dist);
if (config.css && config.css.src && PathsExist(config.css.src)) {
files.push(path.join(dist, 'browser', config.name + '.css'));
files.push({pattern: path.join(dist, 'browser', config.name + '.css.map'), included: false});
}
files.push(path.join(dist, 'test', '**', '*.test.js'));
karmaConfig.set({
reporters: ['dots'],
frameworks: ['qunit', 'detectBrowsers'],
basePath: config.path,
browsers: karmaConfig.browsers || [],
detectBrowsers: {
enabled: detectBrowsers,
usePhantomJS: false
},
loggers: [{type: path.join(__dirname, '../src/utils/log.js')}],
client: {
clearContext: false,
qunit: {showUI: true}
},
files: files.map(function(pattern) {
if (typeof pattern !== 'string') {
pattern.nocache = true;
return pattern;
}
return {pattern: pattern, nocache: true}
})
});
};
<file_sep># Table of Contents
- this will be filled by a remark-plugin
# Test
## Alpha
### Beta
* list
* test
```js
var failBoat = true;
if (failBoat === true) {
process.exit(1);
}
```
# Test2
## Echo
### Foo
[bar]: http://nope.com
<file_sep>#!/usr/bin/env node
var CommanderWrapper = require('./utils/commander-wrapper');
var program = CommanderWrapper(function(commander) {
return commander
.command('require', 'require main es5 file in node')
.command('all', 'run all node tests', {isDefault: true});
});
<file_sep>#!/usr/bin/env node
var path = require('path');
var config = require('./utils/get-config')();
var GetFiles = require('./utils/get-files');
var Watch = require('./utils/watch');
var Run = require('./utils/run');
var log = require('./utils/log');
var CommanderWrapper = require('./utils/commander-wrapper');
var mkdirp = require('mkdirp');
var rimraf = require('rimraf');
var program = CommanderWrapper(function(commander) {
return commander
.option('-w, --watch', 'watch docs for changes and rebuild')
.option('-d, --dist <dist-dir>', 'directory to build api docs to', path.join(config.dist, 'docs', 'api'))
.arguments('<src-dir>')
.action(function(src) {
this.src = src;
});
});
if (!program.src) {
program.src = config.js.src;
}
if (!GetFiles(path.join(program.src, '**', '*.js'))) {
log.fatal('Source directory ' + program.src + ' does not exist or contains no js files!');
process.exit(1);
}
var command = [
'jsdoc', program.src,
'--configure', 'jsdoc.config.json',
'-r',
'-d', program.dist
];
var build = function() {
rimraf.sync(program.dist);
mkdirp.sync(path.dirname(program.dist));
log.info('Building...');
Run.one(command, {silent: true, nonFatal: program.watch, write: path.join(program.dist, 'index.html')}).then(function(retval) {
if (program.watch && retval.status !== 0) {
log.error(retval.stderr);
return;
}
log.info('Wrote: ' + program.dist);
});
};
if (program.watch) {
Watch(path.join(program.src, '**', '*.js'), build);
} else {
build();
}
<file_sep># sb-watch(1) - Watch files for changes and rebuild test, re-lint, and re-test
## SYNOPSIS
sb-watch [-l, --log-level <level>] [-h,--help] [-q,--quiet] [-V,--version]
[-nb, --no-build] [-nl, --no-lint] [-p, --port <port-number=9876>]
## DESCRIPTION
This binary is an alias for running the following in --watch mode:
1. sb-build-all(1)
2. sb-lint-all(1)
3. sb-test-all(1) with '--no-build' and '--no-lint'
## OPTIONS
-l, --log-level <level>
The level of log messages that you want to see. Can be none, fatal, error,
warn, info, verbose, or debug. Defaults to info.
-h, --help
View the help information for this binary
-V, --version
View the version of this binary
-q, --quiet
Do not log any messages to stdout or stderr
-nb, --no-build
Do not run an initial build and do not build on file changes.
-nl, --no-lint
Do not run an initial link and do not lint on file changes.
-p, --port <port-number=9876>
What port to open the watching test server on. Default is 9876.
## EXAMPLES
Get the current version of spellbook
sb-watch -V
sb-watch --version
Get help for this binary
sb-watch --help
sb-watch -h
Set the log level for this binary
sb-watch -l info
sb-watch -l fatal
sb-watch --log-level debug
sb-watch --log-level error
Dont output anything
sb-watch -q
sb-watch -quiet
Don't build and watch
sb-watch -nb
sb-watch --no-build
Don't lint and watch
sb-watch -nl
sb-watch --no-lint
Set the test server port
sb-watch --port 7777
sb-watch -p 8888
## ENVIRONMENT AND CONFIGURATION VARIABLES
SB_LOG_LEVEL='info'
An enviornment variable that sets the log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
TEST_PORT=9876
The port to run the test server (karma-runner) on. Defaults to 9876.
<package.json>.spellbook.log-level=info
A package.json variable that sets the default log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
<package.json>.spellbook.ie8=false
Should spellbook make sure that IE8 is supported. Defaults to false.
<package.json>.spellbook.shim-videojs=true
Makes sure that video.js is included in unit tests but will not be bundled into
distribution js files. Defaults to true.
<package.json>.spellbook.css='{}'
CSS configuration to use in spellbook. If this is set to a false value then css
builds will never be attempted.
<package.json>.spellbook.css.src='src/css'
Source directory to use for css files, set in package.json. If this is unset
'src/css' will be used. If this directory does not exist css will not be built.
<package.json>.spellbook.js='{}'
JS configuration to use in spellbook. If this is set to a false value then js
builds will never be attempted.
<package.json>.spellbook.js.src='src/js'
Source directory to use for js files, set in package.json. If this is unset
'src/js' will be used. If this directory does not exist js will not be built.
<package.json>.spellbook.lang='{}'
Lang configuration to use in spellbook. If this is set to a false value then lang
builds will never be attempted.
<package.json>.spellbook.lang.src='lang/'
Source directory to use for lang files, set in package.json. If this is unset
'lang/' will be used. If this directory does not exist lang will not be built.
<package.json>.spellbook.docs='{}'
Documentation configuration to use in spellbook. If this is set to a false value
then docs builds will never be attempted.
<package.json>.spellbook.docs.src='docs/'
Source directory to use for docs files, set in package.json. If this is unset
'docs/' will be used. If this directory does not exist docs will not be built.
<package.json>.spellbook.test='{}'
Test configuration to use in spellbook. If this is set to a false value
then test builds will never be attempted.
<package.json>.spellbook.test.src='test/'
Source directory to use for test files, set in package.json. If this is unset
'test/' will be used. If this directory does not exist tests will not be built.
## SEE ALSO
sb(1), sb-build-all(1), sb-lint-all(1), sb-test-all(1)
## EXIT
Not used will always exit 0.
## Spellbook
Part of the sb(1) suite
<file_sep>#!/usr/bin/env node
var config = require('./get-config')();
var path = require('path');
var PathsExist = require('./paths-exist');
var Run = require('./run');
var Watch = require('./watch');
var eslintHelper = function(program) {
var files = [];
program.src.forEach(function(src) {
// make it recursive
if (PathsExist(src) && path.extname(src) === '') {
src = path.join(src, '**', '*.*');
}
files.push(src);
});
var command = [
'eslint',
'--color',
'--no-eslintrc',
'--ignore', 'node_modules',
'--config', 'eslint.config.js'
].concat(files);
if (program.errors) {
command.push('--quiet');
}
if (program.fix) {
command.push('--fix');
}
var run = function() {
Run.one(command, {toLog: true, nonFatal: program.watch});
};
if (program.watch) {
Watch(program.src, run);
} else {
run();
}
};
module.exports = eslintHelper;
<file_sep># sb-lint-lang-src(1) - Lint json lang assets for a project
## SYNOPSIS
sb-lint-lang-src [-l, --log-level <level>] [-h,--help] [-q,--quiet] [-V,--version]
[-w, --watch] [-e, --errors] [-f, --fix]
[glob-or-file="<lang-src>/**/*.json"]
## DESCRIPTION
This lints all files under <glob-or-file>.
o
This will use eslint and eslint-plugin-json internally to lint json.
Rules can be found at:
https://github.com/azeemba/eslint-plugin-json
## OPTIONS
-l, --log-level <level>
The level of log messages that you want to see. Can be none, fatal, error,
warn, info, verbose, or debug. Defaults to info.
-h, --help
View the help information for this binary
-V, --version
View the version of this binary
-q, --quiet
Do not log any messages to stdout or stderr
-w, --watch
Watch files for changes and re-lint on file change.
Errors during linting with this active will not exit.
-e, --errors
Only log linting errors. Will not log any linting warnings.
-f, --fix
This should fix any issues that eslint can fix automatically within markdown
examples.
<glob-or-file="<lang-src>/**/*.json">
The file or glob to lint. By default "<lang-src>/**/*.md" is linted. <lang-src> is the
source listed for docs in package.json or 'lang/' by default.
## EXAMPLES
Get the current version of spellbook
sb-lint-lang-src -V
sb-lint-lang-src --version
Get help for this binary
sb-lint-lang-src --help
sb-lint-lang-src -h
Set the log level for this binary
sb-lint-lang-src -l info
sb-lint-lang-src -l fatal
sb-lint-lang-src --log-level debug
sb-lint-lang-src --log-level error
Dont output anything
sb-lint-lang-src -q
sb-lint-lang-src -quiet
Watch files for changes and lint again once they change
sb-lint-lang-src -w
sb-lint-lang-src --watch
Fix any errors/warnings that can be fixed automatically
sb-lint-lang-src -f
sb-lint-lang-src --fix
Only log errors, do not log warnings.
sb-lint-lang-src -e
sb-lint-lang-src --errors
lint a specific glob or file.
sb-lint-lang-src some-md-file.json
sb-lint-lang-src src/lang/**/*.json
## ENVIRONMENT AND CONFIGURATION VARIABLES
SB_LOG_LEVEL='info'
An enviornment variable that sets the log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
<package.json>.spellbook.log-level=info
A package.json variable that sets the default log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
<package.json>.spellbook.lang='{}'
Lang configuration to use in spellbook. If this is set to a false value then lang
linting will use default values.
<package.json>.spellbook.lang.src='lang/'
Source directory to use for lang files, set in package.json. If this is unset
'lang/' will be used. If this directory does not exist lang linting will fail.
## SEE ALSO
sb-lint-lang-all(1), sb-lint-lang(1)
## EXIT
0 - all commands succeeded
1 - one or more sub-command failed, unless --watch is active
## Spellbook
Part of the sb(1) suite
<file_sep>var glob = require('glob');
var path = require('path');
var PathsExists = require('./paths-exist');
var GetFiles = function() {
var searches = Array.prototype.slice.call(arguments);
var files = [];
searches.forEach(function(search) {
var fn = function() {
if(PathsExists(search)) {
return [search];
}
return glob.sync(search, {ignore: ['**/node_modules/**']});
};
if (Array.isArray(search)) {
fn = function() {
return GetFiles.apply(null, search);
};
}
files = files.concat(fn());
});
return files;
};
module.exports = GetFiles;
<file_sep>import TestingThing from '../src/js/index';
import QUnit from 'qunitjs';
import sinon from 'sinon';
QUnit.module('test', {
beforeEach() {},
afterEach() {}
});
QUnit.test('foo fn returns bar', function(assert) {
const test = new TestingThing();
assert.strictEqual(test.foo(), 'bar', 'bar returns this.bar');
});
<file_sep>#!/usr/bin/env node
var Run = require('./utils/run');
var CommanderWrapper = require('./utils/commander-wrapper');
var log = require('./utils/log');
var config = require('./utils/get-config')();
var program = CommanderWrapper(function(commander) {
return commander
.option('-w, --watch', 'incrementally rebuild');
});
if (!config.js) {
log.warn('js builds are turned off').
process.exit();
}
var commands = [
['sb-build-js-node'],
['sb-build-js-browser'],
];
if (program.watch) {
commands = commands.map(function(command) {
command.push('--watch');
return command;
});
}
Run.parallel(commands);
<file_sep># sb-build-test-bundlers(1) - Build Unit tests that verify that your code will work with certain browser bundlers
## SYNOPSIS
sb-build-test-bundles [-l, --log-level <level>] [-h,--help] [-q,--quiet] [-V,--version]
[-w, --watch] [-d, --dist <dist-dir='dist/test'>]
[<src-dir='${pkg.spellbook.js.src}'|'src/js'>]
## DESCRIPTION
Build bundles that test your code using bundlers. These are the steps this binary will take to build
1. Look for es6 `index.js` in `<src-dir>` or fail if nothing is found
2. Write QUnit test files to '<dist-dir>' that includes that main file. Currently writes:
* <dist-dir>/webpack.start.js
* <dist-dir>/browserify.start.js
3. Runs browserify on `browserify.start.js` which:
1. Start watching code changes to generate an internal source map.
2. ignore video.js if <package.json>.spellbook.shim-videojs is set to true (which is te default)
3. convert all code to es5 using babelify with ie8 support if <package.json>.spellbook.ie8 is set to true (default)
is false)
4. convert '__VERSION__' strings to the package version
5. browserify all es5 assets into the bundle
6. bundle-collapse all require paths so that they are 1 character numbers rather than long strings
7. Write a dist file to <dist-dir>/browserify.test.js
4. Runs webpack on `webpack.start.js` which:
1. Start watching code changes to generate an internal source map.
2. ignore video.js if <package.json>.spellbook.shim-videojs is set to true (which is te default)
3. convert all code to es5 using babel-loader with ie8 support if <package.json>.spellbook.ie8 is set to true (default)
is false)
5. webpackify all es5 assets into the bundle
7. Write a dist file to <dist-dir>/webpack.test.js
5. Removes all `*.start.js` files
> NOTE: During watch mode all steps will run and build failures will not exit this
> binary.
## OPTIONS
-l, --log-level <level>
The level of log messages that you want to see. Can be none, fatal, error,
warn, info, verbose, or debug. Defaults to info.
-h, --help
View the help information for this binary
-V, --version
View the version of this binary
-q, --quiet
Do not log any messages to stdout or stderr
-w, --watch
Watch files for changes and incrementally rebuild on file change.
Failed builds with this active will not exit.
-d, --dist <dist-dir='dist/test'>
Write to a specific dist-dir. Defaults to 'dist/test'
<src-dir='${pkg.spellbook.js.src}'|'src/js'>
Read from a specefic <src-dir> instead of `${pkg.spellbook.js.src}` or
the default of `src/js`
## EXAMPLES
Get the current version of spellbook
sb-build-test-bundles -V
sb-build-test-bundles --version
Get help for this binary
sb-build-test-bundles --help
sb-build-test-bundles -h
Set the log level for this binary
sb-build-test-bundles -l info
sb-build-test-bundles -l fatal
sb-build-test-bundles --log-level debug
sb-build-test-bundles --log-level error
Dont output anything
sb-build-test-bundles -q
sb-build-test-bundles -quiet
Incrementally rebuild
sb-build-test-bundles --watch
sb-build-test-bundles -w
Non default source directory
sb-build-test-bundles src/js/idk
sb-build-test-bundles js/
Non default dist directory
sb-build-test-bundles --dist dist/bunders
sb-build-test-bundles -d dist/test-bundlers/spellbook
## ENVIRONMENT AND CONFIGURATION VARIABLES
SB_LOG_LEVEL='info'
An enviornment variable that sets the log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
<package.json>.spellbook.log-level=info
A package.json variable that sets the default log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
<package.json>.spellbook.ie8=false
Should spellbook make sure that IE8 is supported. Defaults to false.
<package.json>.spellbook.shim-videojs=true
Makes sure that video.js is included in unit tests but will not be bundled into
distribution js files. Defaults to true.
<package.json>.spellbook.test='{}'
Setting this to a false value will not stop this binary from being run.
<package.json>.spellbook.test.src='test/'
This value will have no effect on this binary.
<package.json>.spellbook.js={}
Setting this to a false value will not stop this binary from being run.
<package.json>.spellbook.js.src
package.json spellbook configuration for the default js source directory. Changes the default
from `src/js` to whatever is specified. If this is set and does not exist this build will fail.
## SEE ALSO
sb-build-test(1), sb-build-test-all(1)
## EXIT
0 - all commands succeeded
1 - one or more sub-command failed
## Spellbook
Part of the sb(1) suite
<file_sep>var assert = require('chai').assert;
var shelljs = require('shelljs');
var mkdirp = require('mkdirp');
var path = require('path');
var TestHelper = require('./test-helper.js');
var PathsExist = require('../../src/utils/paths-exist');
var glob = require('glob');
var parallel = require('mocha.parallel');
var tests = {
'sb-lint-css-css': {
lines: 14,
doubleLines: 18,
file: 'src/css/index.css'
},
'sb-lint-css-sass': {
lines: 12,
doubleLines: 15,
file: 'src/css/index.scss'
},
'sb-lint-docs-examples': {
lines: 8,
doubleLines: 11,
file: 'docs/index.md'
},
'sb-lint-docs-md': {
lines: 9,
doubleLines: 14,
file: 'docs/index.md'
},
'sb-lint-lang-src': {
lines: 9,
doubleLines: 13,
file: 'lang/index.json'
},
'sb-lint-js-src': {
lines: 13,
doubleLines: 21,
file: 'src/js/index.js'
},
'sb-lint-test-src': {
lines: 7,
doubleLines: 9,
file: 'test/index.test.js'
},
};
// tests binaries that lint more than one thing
var manyTests = {};
// css
['sb-lint-css', 'sb-lint-css-all'].forEach(function(binName) {
manyTests[binName] = {
lines: tests['sb-lint-css-css'].lines + tests['sb-lint-css-sass'].lines,
doubleLines: tests['sb-lint-css-css'].doubleLines + tests['sb-lint-css-sass'].doubleLines
};
});
// js
['sb-lint-js', 'sb-lint-js-all'].forEach(function(binName) {
manyTests[binName] = {
lines: tests['sb-lint-js-src'].lines,
doubleLines: tests['sb-lint-js-src'].doubleLines
};
});
// test
['sb-lint-test', 'sb-lint-test-all'].forEach(function(binName) {
manyTests[binName] = {
lines: tests['sb-lint-test-src'].lines,
doubleLines: tests['sb-lint-test-src'].doubleLines
};
});
// lang
['sb-lint-lang', 'sb-lint-lang-all'].forEach(function(binName) {
manyTests[binName] = {
lines: tests['sb-lint-lang-src'].lines,
doubleLines: tests['sb-lint-lang-src'].doubleLines
};
});
// docs
['sb-lint-docs', 'sb-lint-docs-all'].forEach(function(binName) {
manyTests[binName] = {
lines: tests['sb-lint-docs-md'].lines + tests['sb-lint-docs-examples'].lines,
doubleLines: tests['sb-lint-docs-md'].doubleLines + tests['sb-lint-docs-examples'].doubleLines
};
});
// TODO: --fix, --errors
parallel('linters:single', function() {
Object.keys(tests).forEach(function(binName) {
var testProps = tests[binName];
it(binName + ' should lint default files with no args', function(done) {
var helper = new TestHelper();
helper.exec(binName, function(code, stdout, stderr) {
var lines = stderr.length + stdout.length;
assert.notEqual(code, 0, 'should not return 0');
assert.equal(lines, testProps.lines, 'should print ' + testProps.lines + ' lines');
helper.cleanup(done);
});
});
it(binName + ' should lint custom dir', function(done) {
var helper = new TestHelper();
var newsrc = path.join(helper.config.path, 'newsrc');
mkdirp.sync(newsrc);
shelljs.mv(path.join(helper.config.path, testProps.file), newsrc);
helper.exec(binName, [newsrc], function(code, stdout, stderr) {
var lines = stderr.length + stdout.length;
assert.notEqual(code, 0, 'should not return 0');
assert.equal(lines, testProps.lines, 'should print ' + testProps.lines + ' lines');
helper.cleanup(done);
});
});
it(binName + ' should lint custom file', function(done) {
var helper = new TestHelper();
var oldsrc = path.join(helper.config.path, testProps.file);
var newsrc = path.join(helper.config.path, 'newsrc' + path.extname(oldsrc));
shelljs.mv(oldsrc, newsrc);
helper.exec(binName, [newsrc], function(code, stdout, stderr) {
var lines = stderr.length + stdout.length;
assert.notEqual(code, 0, 'should not return 0');
assert.equal(lines, testProps.lines, 'should print ' + testProps.lines + ' lines');
helper.cleanup(done);
});
});
it(binName + ' should lint two files', function(done) {
var helper = new TestHelper();
var oldsrc = path.join(helper.config.path, testProps.file);
var newsrc = path.join(helper.config.path, 'newsrc' + path.extname(oldsrc));
shelljs.cp(oldsrc, newsrc);
helper.exec(binName, [newsrc, oldsrc], function(code, stdout, stderr) {
var lines = stderr.length + stdout.length;
assert.notEqual(code, 0, 'should not return 0');
assert.equal(lines, testProps.doubleLines, 'should print ' + testProps.doubleLines + ' lines');
helper.cleanup(done);
});
});
it(binName + ' should lint custom glob', function(done) {
var helper = new TestHelper();
var glob = path.join(
helper.config.path,
path.dirname(testProps.file),
'*.' + path.extname(testProps.file)
);
helper.exec(binName, [], function(code, stdout, stderr) {
var lines = stderr.length + stdout.length;
assert.notEqual(code, 0, 'should return 0');
assert.equal(lines, testProps.lines, 'should print ' + testProps.lines + ' lines');
helper.cleanup(done);
});
});
});
});
parallel('linters:multiple', function() {
Object.keys(manyTests).forEach(function(binName) {
var testProps = manyTests[binName];
it(binName + ' should lint default files with no args', function(done) {
var helper = new TestHelper();
helper.exec(binName, function(code, stdout, stderr) {
var lines = stderr.length + stdout.length - 3;
// -all binaries will have two less lines than
// non-all binaies because the non -all ones
// run the -all binaries
if (!(/-all$/).test(binName)) {
lines -= 2;
}
assert.notEqual(code, 0, 'should not return 0');
assert.equal(lines, testProps.lines, 'should print ' + testProps.lines + ' lines');
helper.cleanup(done);
});
});
});
});
<file_sep># sb-build-docs-manual(1) - Build manual generated documentation into html
## SYNOPSIS
sb-build-docs-manual [-l, --log-level <level>] [-h,--help] [-q,--quiet] [-V,--version]
[-w, --watch] [-d, --dist <dist-dir='dist/docs/manual'>]
[<src-dir='${pkg.spellbook.docs.src}'|'docs/'>]
## DESCRIPTION
Build all manual markdown documenation from <src-dir> into html files at <dist-dir>.
## OPTIONS
-l, --log-level <level>
The level of log messages that you want to see. Can be none, fatal, error,
warn, info, verbose, or debug. Defaults to info.
-h, --help
View the help information for this binary
-V, --version
View the version of this binary
-q, --quiet
Do not log any messages to stdout or stderr
-w, --watch
Watch files for changes and incrementally rebuild on file change.
Failed builds with this active will not exit.
-d, --dist <dist-dir='dist/docs/manual'>
Write to a specific dist-dir. Defaults to 'dist/docs/manual'
<src-dir='${pkg.spellbook.docs.src}'|'docs/'>
Read from a specefic <src-dir> instead of `${pkg.spellbook.docs.src}` or
the default of `docs/`
## EXAMPLES
Get the current version of spellbook
sb-build-docs-manual -V
sb-build-docs-manual --version
Get help for this binary
sb-build-docs-manual --help
sb-build-docs-manual -h
Set the log level for this binary
sb-build-docs-manual -l info
sb-build-docs-manual -l fatal
sb-build-docs-manual --log-level debug
sb-build-docs-manual --log-level error
Dont output anything
sb-build-docs-manual -q
sb-build-docs-manual -quiet
Incrementally rebuild
sb-build-docs-manual --watch
sb-build-docs-manual -w
Non default source directory
sb-build-docs-manual src/docs
sb-build-docs-manual src/docs/man
Non default dist directory
sb-build-docs-manual --dist dist/docs
sb-build-docs-manual -d dist/manual-docs
## ENVIRONMENT AND CONFIGURATION VARIABLES
SB_LOG_LEVEL='info'
An enviornment variable that sets the log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
<package.json>.spellbook.log-level=info
A package.json variable that sets the default log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
<package.json>.spellbook.docs='{}'
This setting will be ignored by this binary.
<package.json>.spellbook.docs.src='docs/'
Source directory to use for docs files, set in package.json. If this is unset
'docs/' will be used. If this directory is set and does not exist build will fail.
## SEE ALSO
sb-build-docs(1), sb-build-docs-all(1)
## EXIT
0 - all commands succeeded
1 - one or more sub-command failed
## Spellbook
Part of the sb(1) suite
<file_sep>#!/usr/bin/env node
var config = require('./utils/get-config')();
var path = require('path');
var CommanderWrapper = require('./utils/commander-wrapper');
var Run = require('./utils/run');
var log = require('./utils/log');
var GetFiles = require('./utils/get-files');
var rimraf = require('rimraf');
var mkdirp = require('mkdirp');
var program = CommanderWrapper(function(commander) {
return commander
.option('-w, --watch', 'incremental rebuild')
.option('-d, --dist <dir>', 'directory to write output to', path.join(config.dist, 'es5'))
.arguments('<dir>')
.action(function(src) {
this.src = src;
});
});
if (!program.src) {
program.src = config.js.src;
}
if (!GetFiles(path.join(program.src, '**', '*.js'))) {
log.fatal('Source directory ' + program.src + ' does not exist or contains no js files!');
process.exit(1);
}
var command = [
'babel',
'-d', program.dist,
'--presets', 'babel-preset.config.js',
program.src
];
if (program.watch) {
command.push('--watch');
}
rimraf.sync(program.dist);
mkdirp.sync(path.dirname(program.dist));
log.info('Building...');
Run.one(command, {toLog: true});
<file_sep># videojs-spellbook
[](https://travis-ci.org/videojs/spellbook)
[](https://nodei.co/npm/videojs-spellbook/)
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)*
- [Features](#features)
- [Installation](#installation)
- [Things to know](#things-to-know)
- [Known issues & Workarounds](#known-issues--workarounds)
- [More Information](#more-information)
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
# Features
* General
* Small browser dists by using all of the latest technologies
* External source maps for all js and css files
* Automatic licence/banner insertion at the top of minified files
* support for linting js, css, documentation, and documenation examples (js only)
* super fast incremental rebuilds `--watch`
* JavaScript
* es6 -> es5 for publishing on npm
* Ability to support IE8 (or not) effortlessly (NOTE: this will add a lot of bytes to your dist)
* Support for es6 -> es6 imports using `jsnext:main` pointing to es6 entry files in package.json
* Support for shimming `video.js` so that it will be included in the page for testing but not included in the dist file
* Automatic unit test generation for `webpack` and `browserify`
* CSS
* support for vanilla css with concatination via `postcss-import` (like require for css)
* support for sass
* Docs
* API documenation generated into html files from jsdoc comments
* Manual documenation generated into html files
* Automatic table of contents for manual documenation
* Test
* QUnitjs supported out of the box
* sinon supported out of the box (local installation not necessary or recommended)
* Manual debuging via karma debug page and --watch
* Server
* `--tunnel` option to share your local dev server over the internet
* `sb-start` to:
* start a dev server
* build everything and re-build on change
* lint everything and re-lint on change
* test everything and re-build tests on change
* auto-reload when files change in
* `<project-root>/dist/**/*.js`
* `<project-root>/dist/**/*.css`
* `<project-root>/dist/**/*.html`
* `<project-root>/examples/**/*.html`
* `<project-root>/index.html`
* Proxy karma to `http://localhost:9999/test` so:
* manual debugging is easier
* it can be included in a tunnel over the internet
* auto-reloads will happen when code changes
* browser-sync which will:
* reload on file change
* offers a ui for configuration at `http://localhost:10000`
* can be used to turn off auto reload etc.
* Release mangagement
* Support for use as an npm script or standalone
* Does the following:
1. Release un-released changelog entries with `chg`
2. update the version in package.json
3. support/build dists for bower in the tag so they don't clutter the main repo
4. make a git commit with the version name as the message
5. tag a git branch
6. Advise the user on how to push to git, and publish to npm
# Installation
1. Run `npm install --save-dev videojs-spellbook`
2. Read the [sb man page](/docs/sb.md) to lean about how your project should be structured.
# Things to know
* If video.js is not installed in your project the version in spellbook will be used
* CSS changes are injected without a page reload
* `jsnext:main` must point to your es6 main file in each project that you want to bundle together (this will make
the dist files much smaller).
* `global`/`video.js` are included in spellbook and will be used for your project if your project does not have/need
local versions.
# Known issues & Workarounds
* sometimes binaries fail to exit during watch which can cause weird behaviour
* make sure to check for running `sb-*` binaries if things are weird
* rollup is not used during watch (rollupify and watchify don't play nice)
* this is not really something that can be worked around yet but it should
not be an issue as rollup will be used during `build` and `watch` should only
be used in development
* es6 code changes in sub projects don't trigger a rebuild on watch
* This is due to rollupify not working with watchify, see the above issue. Have spellbook or your
current build system watch your sub-project and rebuild its es5 dist on change.
# More Information
* [Man Pages](/docs/)
* [TODO](TODO.md)
<file_sep># sb-build-js-browser(1) - Build javascript assets into minified/unminified bundles with external source maps
## SYNOPSIS
sb-build-js-browser [-l, --log-level <level>] [-h,--help] [-q,--quiet] [-V,--version]
[-w, --watch] [-d, --dist <dist-dir='dist/browser'>]
[<src-dir='${pkg.spellbook.js.src}'|'src/js'>]
## DESCRIPTION
This binary will do the following:
1. Look for index.js in <src-dir> or fail if it does not find one
2. Pass what it found to browserify which will:
1. Pass <pkg.name> to browserify, as standalone name. This will get exposed under window and be
converted to title case. IE: `test-main-pkg` becomes `window.testMainPkg`
2. Start watching code changes to generate an internal source map.
3. ignore video.js if <package.json>.spellbook.shim-videojs is set to true (which is te default)
4. rollup all es6 code and dependencies that support it (using jsnext:main). This saves a lot of bytes
5. convert all code to es5 using babelify with ie8 support if <package.json>.spellbook.ie8 is set to true (default)
is false)
6. convert '__VERSION__' strings to the package version
7. browserify all es5 assets into the bundle
8. bundle-collapse all require paths so that they are 1 character numbers rather than long strings
9. Write a dist file to <dist-dir>/<pkg.name>.js
3. exorcist will remove the source map from <dist-dir>/<pkg.name>.js into <dist-dir>/<pkg.name>.js.map
4. uglify will be run on <dist-dir>/<pkg.name>.js this will:
1. Use the exorcised source map and update it to match the soon-to-be minified file
2. minify the file
3. Add a banner to the top of the file and update the source map with the line offsets
4. write the minified output to <dist-dir>/<pkg.name>.min.js
5. Write the updated souce map to <dist-dir>/<pkg.name>.min.js.map
> NOTE: During watch watchify will be used for browserify, steps 1-2 will be the only steps run,
> and as of right now rollupify will not be used as it breaks watchify. This means that it will
> have to use `main` rather than `jsnext:main` to build projects. Basically sub projects will have to
> watch and rebuild their es6 -> es5 sources in order for the current project to be rebuilt.
## OPTIONS
-l, --log-level <level>
The level of log messages that you want to see. Can be none, fatal, error,
warn, info, verbose, or debug. Defaults to info.
-h, --help
View the help information for this binary
-V, --version
View the version of this binary
-q, --quiet
Do not log any messages to stdout or stderr
-w, --watch
Watch files for changes and incrementally rebuild on file change.
Failed builds with this active will not exit.
-d, --dist <dist-dir='dist/browser'>
Write to a specific dist-dir. Defaults to 'dist/browser'
<src-dir='${pkg.spellbook.js.src}'|'src/js'>
Read from a specefic <src-dir> instead of `${pkg.spellbook.js.src}` or
the default of `src/js`
## EXAMPLES
Get the current version of spellbook
sb-build-js-browser -V
sb-build-js-browser --version
Get help for this binary
sb-build-js-browser --help
sb-build-js-browser -h
Set the log level for this binary
sb-build-js-browser -l info
sb-build-js-browser -l fatal
sb-build-js-browser --log-level debug
sb-build-js-browser --log-level error
Dont output anything
sb-build-js-browser -q
sb-build-js-browser -quiet
Incrementally rebuild
sb-build-js-browser --watch
sb-build-js-browser -w
Non default source directory
sb-build-js-browser src/js/js
sb-build-js-browser src/js/es6
Non default dist directory
sb-build-js-browser --dist dist/node
sb-build-js-browser -d dist/js
## ENVIRONMENT AND CONFIGURATION VARIABLES
SB_LOG_LEVEL='info'
An enviornment variable that sets the log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
<package.json>.spellbook.log-level=info
A package.json variable that sets the default log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
<package.json>.spellbook.js='{}'
This setting will be ignored by this binary.
<package.json>.spellbook.js.src='src/js'
Source directory to use for js files, set in package.json. If this is unset
'src/js' will be used. If this directory does not exist and is set, build will fail.
<package.json>.spellbook.ie8=false
Make sure that IE8 is supported. Defaults to false.
<package.json>.spellbook.shim-videojs=true
Makes sure that video.js is included in unit tests but will not be bundled into
distribution js files. Defaults to true.
## SEE ALSO
sb-build-js(1), sb-build-js-all(1)
## EXIT
0 - all commands succeeded
1 - one or more sub-command failed
## Spellbook
Part of the sb(1) suite
<file_sep>#!/usr/bin/env node
var CommanderWrapper = require('./utils/commander-wrapper');
var config = require('./utils/get-config')();
var log = require('./utils/log');
var program = CommanderWrapper(function(commander) {
return commander
.command('browser', 'build js tests for the browser')
.command('bundlers', 'build js bundler tests for the browser')
.command('all', 'build all possible js outputs', {isDefault: true});
});
if (!config.test) {
log.warn('Unit Test builds are turned off');
process.exit();
}
<file_sep>var assert = require('chai').assert;
var shelljs = require('shelljs');
var path = require('path');
var TestHelper = require('./test-helper.js');
var PathsExist = require('../../src/utils/paths-exist');
var binName = 'sb-clean';
var parallel = require('mocha.parallel');
var mkdirp = require('mkdirp');
var PathRemoved = function(helper, stdout, p) {
var relpath = path.relative(helper.config.path, p)
.replace(/^\.\//, '')
.replace(/^\//, '');
var regex = new RegExp('removing ' + relpath);
assert.ok(regex.test(stdout), 'should have removed ' + relpath);
};
parallel('sb-clean:defaults', function() {
it('should delete nothing if there is nothing to clean', function(done) {
var helper = new TestHelper();
helper.exec(binName, function(code, stdout, stderr) {
assert.equal(code, 0, 'should return success');
assert.equal(stdout.length, 2, 'should stdout start + finish only');
assert.equal(stderr.length, 0, 'should stderr nothing');
helper.cleanup(done);
});
});
it('should delete the dist folder if it exists', function(done) {
var helper = new TestHelper();
mkdirp.sync(helper.config.dist);
helper.exec(binName, function(code, stdout, stderr) {
assert.equal(code, 0, 'should return success');
assert.equal(stderr.length, 0, 'should stderr nothing');
assert.equal(stdout.length, 3, 'should only print one removal');
PathRemoved(helper, stdout, helper.config.dist);
assert.equal(PathsExist(helper.config.dist), false, 'dist should be deleted');
helper.cleanup(done);
});
});
it('should delete npm-debug.log if it exists', function(done) {
var helper = new TestHelper();
var npmDebug = path.join(helper.config.path, 'npm-debug.log');
shelljs.touch(npmDebug);
helper.exec(binName, function(code, stdout, stderr) {
assert.equal(code, 0, 'should return success');
assert.equal(stderr.length, 0, 'should stderr nothing');
assert.equal(stdout.length, 3, 'should only print one removal');
PathRemoved(helper, stdout, npmDebug);
assert.equal(PathsExist(npmDebug), false, 'npm-debug.log should be deleted');
helper.cleanup(done);
});
});
it('should delete npm-debug.log and dist if they exist', function(done) {
var helper = new TestHelper();
var npmDebug = path.join(helper.config.path, 'npm-debug.log');
mkdirp.sync(helper.config.dist);
shelljs.touch(npmDebug);
helper.exec(binName, function(code, stdout, stderr) {
assert.equal(code, 0, 'should return success');
assert.equal(stderr.length, 0, 'should not print to stderr');
assert.equal(stdout.length, 4, 'should print nothing');
PathRemoved(helper, stdout, helper.config.dist);
assert.equal(PathsExist(helper.config.dist), false, 'dist should be deleted');
PathRemoved(helper, stdout, npmDebug);
assert.equal(PathsExist(npmDebug), false, 'npm-debug.log should be deleted');
helper.cleanup(done);
});
});
});
parallel('sb-clean:dry-run', function() {
['-d', '--dry-run'].forEach(function(option) {
it(option + ': should not delete the dist folder if it exists', function(done) {
var helper = new TestHelper();
mkdirp.sync(helper.config.dist);
helper.exec(binName, [option], function(code, stdout, stderr) {
assert.equal(code, 0, 'should return success');
assert.equal(stderr.length, 0, 'should not print to stderr');
assert.equal(stdout.length, 3, 'should print one removals');
PathRemoved(helper, stdout, helper.config.dist);
assert.equal(PathsExist(helper.config.dist), true, 'dist should exist');
helper.cleanup(done);
});
});
it(option + ': should not delete npm-debug.log or dist folder', function(done) {
var helper = new TestHelper();
var npmDebug = path.join(helper.config.path, 'npm-debug.log');
mkdirp.sync(helper.config.dist);
shelljs.touch(npmDebug);
helper.exec(binName, [option], function(code, stdout, stderr) {
assert.equal(code, 0, 'should return success');
assert.equal(stderr.length, 0, 'should not print to stderr');
assert.equal(stdout.length, 4, 'should print two removals');
PathRemoved(helper, stdout, helper.config.dist);
assert.equal(PathsExist(helper.config.dist), true, 'dist should exist');
assert.equal(PathsExist(npmDebug), true, 'debug log should exist');
helper.cleanup(done);
});
});
});
});
<file_sep>var fs = require('fs');
var path = require('path');
var PathsExist = require('./paths-exist');
var findRoot = require('find-root');
var readJSON = function(file) {
return JSON.parse(fs.readFileSync(file, 'utf8'));
};
var GetConfig = function (dir) {
dir = dir || process.cwd();
if (!path.isAbsolute(dir)) {
dir = path.join(process.cwd(), dir);
}
var appRoot = findRoot(dir);
var workingPkg = readJSON(path.join(appRoot, 'package.json'));
var sbPkg = readJSON(path.join(__dirname, '..', '..', 'package.json'));
workingPkg.spellbook = workingPkg.spellbook || {};
if (!process.env.SB_INTERNAL) {
if (!workingPkg.main || !workingPkg['jsnext:main']) {
console.error('The package in ' + appRoot + ' does not have a main file or jsnext:main file set.');
console.error('please set these in your package.json');
process.exit(1);
}
}
if (!PathsExist(path.join(appRoot, 'node_modules'))) {
log.fatal('no node_modules directory!');
process.exit(1);
}
var name = workingPkg.name.replace(/^@.+\//, '');
var author = workingPkg.author || '';
if (Array.isArray(workingPkg.author)) {
console.error('Author cannot be an array in package.json, as this is invalid, going to use first author');
console.error('See: https://docs.npmjs.com/files/package.json#people-fields-author-contributors');
workingPkg.author = workingPkg.author[0];
}
if (typeof workingPkg.author === 'object') {
if (!workingPkg.author.name) {
console.error('author must have a name key or be a string in package.json!');
console.error('See: https://docs.npmjs.com/files/package.json#people-fields-author-contributors');
process.exit(1);
}
author = workingPkg.author.name;
if (workingPkg.author.email) {
author += ' <' + workingPkg.author.email + '>';
}
if (workingPkg.author.url) {
author += ' (' + workingPkg.author.url + ')';
}
}
var config = {
// workingPkg information
name: name,
scope: workingPkg.name.replace(name, '').replace(/\/$/, ''),
version: workingPkg.version,
path: appRoot,
main: workingPkg['main'] ? path.join(appRoot, workingPkg.main) : '',
jsNextMain: workingPkg['jsnext:main'] ? path.join(appRoot, workingPkg['jsnext:main']) : '',
// workingPkg settings
logLevel: process.env.SB_LOG_LEVEL || workingPkg.spellbook['log-level'] || 'info',
ie8: workingPkg.spellbook.ie8 || false,
browserList: workingPkg.spellbook.browserList || ['> 1%', 'last 4 versions', 'Firefox ESR'],
shimVideojs: workingPkg.spellbook['shim-videojs'] || workingPkg.spellbook['shim-video.js'] || true,
bannerObj: {
name: workingPkg.name || '',
version: workingPkg.version || '',
author: author,
license: workingPkg.license || ''
},
dist: path.join(appRoot, 'dist'),
docs: Object.assign({
src: path.join(appRoot, 'docs')
}, workingPkg.spellbook.docs || {}),
lang: Object.assign({
src: path.join(appRoot, 'lang')
}, workingPkg.spellbook.lang || {}),
test: Object.assign({
src: path.join(appRoot, 'test')
}, workingPkg.spellbook.lang || {}),
css: Object.assign({
src: path.join(appRoot, 'src', 'css')
}, workingPkg.spellbook.css || {}),
js: Object.assign({
src: path.join(appRoot, 'src', 'js')
}, workingPkg.spellbook.js || {})
};
return config;
};
module.exports = GetConfig;
<file_sep>CHANGELOG
=========
## HEAD (Unreleased)
_(none)_
--------------------
## 2.1.0 (2016-12-08)
* update eslint-config-videojs to 3.0.1 (#18)
* added linting before testing, but fail after both (#13)
* fix es6 module resolution (#16)
* support linting for css and sass at the same time (#15)
* fix live-reload related issues (#17)
* prevent zombies by killing any running process when parent exits (#20)
* Add man pages and documenation on each binary in spellbook (#22)
## 2.0.2 (2016-11-03)
* fix node6 karma tests
## CHANGELOG.md (2016-11-03)
* fix issues with --watch
* use npm-run, rimraf, and mkdirp instead of shelljs
* dont support rollup in bundlers
* make sure that all linter return an error code when they fail
* make sure that nothing dies with an error code while watching
* added sb-config
* dont use css in tests if css is off
* support for special charaters in command line arguments (such as single quote)
* warn now logs to stdout
* sb-server now proxies /test and /test/ rather than just /test
## 2.0.0 (2016-11-03)
* BREAKING: change i18n wording and binaries to lang
* BREAKING: CHANGE npm wording to node and es5
* BREAKING: remove --watch from binaries that can't use it
* implement vanilla css linting and building
* add banners to css/scss files
* BREAKING: rename karma wording to browser
* added more -all binaries to future proof this build system
## 1.0.9 (2016-10-25)
* get tests working in TRAVIS
* make all tests run in parallel
## 1.0.8 (2016-10-25)
* use options rather than configs for eslint
* use vanilla eslint
* remove browserify-incremental and use vanilla browserify
* remove sb-cache
* update the readme with todo and goals
* set the process title to the curren running binary
* better error handling
* dont use configs for remark
* get build tests working
## 1.0.7 (2016-10-25)
* fix browseriy shim when using spellbooks modules in browserify
## 1.0.6 (2016-10-25)
* add a --no-build option in sb-test-all
* make videojs-shim work no matter what
* better linter output
* remove clean from utils folder
* add videojs as a dependency
* make sb-test-all always rebuild
## 1.0.5 (2016-10-25)
* remove --quiet from sb-release as it is builtin
* fix a bug where getConfig was being used incorrectly
## 1.0.4 (2016-10-25)
* fix tests locally
* make --quiet a builtin with command-wrapper
* allow users to turn off certain build/lint steps
* merge babel configs into one babel preset
## 1.0.2 (2016-10-25)
* add a shim-videojs option so that it can be turned off
## 1.0.1 (2016-10-25)
* overhaul command running with logging
* fix issues with remark
* merge linter tests into one file
* run some tests in parallel
* add utilities to get-files and check if paths-exist
* get watchify to work and fixed test bundle builds watching
## 1.0.0 (2016-10-25)
* initial
<file_sep># sb-build-test-browser(1) - Build all user created unit tests into browser ready bundles
## SYNOPSIS
sb-build-test-browser [-l, --log-level <level>] [-h,--help] [-q,--quiet] [-V,--version]
[-w, --watch] [-d, --dist <dist-dir='dist/browser'>]
[<src-dir='${pkg.spellbook.test.src}'|'test/'>]
## DESCRIPTION
This binary will do the following:
1. Look for *.test.js in <src-dir> or fail if it does not find one
2. Pass what it found to browserify which will:
1. Start watching code changes to generate an internal source map.
2. ignore video.js if <package.json>.spellbook.shim-videojs is set to true (which is te default)
3. rollup all es6 code and dependencies that support it (using jsnext:main). This saves a lot of bytes and
mimics what we do on normal builds
5. convert all code to es5 using babelify with ie8 support if <package.json>.spellbook.ie8 is set to true (default)
is false)
6. convert '__VERSION__' strings to the package version
7. browserify all es5 assets into the bundle
8. bundle-collapse all require paths so that they are 1 character numbers rather than long strings
9. Write a dist file to <dist-dir>/<pkg.name>.test.js
> NOTE: During watch watchify will be used for browserify and as of right now rollupify will not be used
> as it breaks watchify. This means that it will have to use `main` rather than `jsnext:main` to
> build projects. Basically sub projects will have to watch and rebuild their es6 -> es5 sources in
> order for the current project to be rebuilt.
## OPTIONS
-l, --log-level <level>
The level of log messages that you want to see. Can be none, fatal, error,
warn, info, verbose, or debug. Defaults to info.
-h, --help
View the help information for this binary
-V, --version
View the version of this binary
-q, --quiet
Do not log any messages to stdout or stderr
-w, --watch
Watch files for changes and incrementally rebuild on file change.
Failed builds with this active will not exit.
-d, --dist <dist-dir='dist/browser'>
Write to a specific dist-dir. Defaults to 'dist/browser'
<src-dir='${pkg.spellbook.test.src}'|'test/'>
Read from a specefic <src-dir> instead of `${pkg.spellbook.test.src}` or
the default of `test/`
## EXAMPLES
Get the current version of spellbook
sb-build-test-browser -V
sb-build-test-browser --version
Get help for this binary
sb-build-test-browser --help
sb-build-test-browser -h
Set the log level for this binary
sb-build-test-browser -l info
sb-build-test-browser -l fatal
sb-build-test-browser --log-level debug
sb-build-test-browser --log-level error
Dont output anything
sb-build-test-browser -q
sb-build-test-browser -quiet
Incrementally rebuild
sb-build-test-browser --watch
sb-build-test-browser -w
Non default source directory
sb-build-test-browser src/test/unit
sb-build-test-browser src/test
Non default dist directory
sb-build-test-browser --dist dist/test/unit
sb-build-test-browser -d dist/whogives
## ENVIRONMENT AND CONFIGURATION VARIABLES
SB_LOG_LEVEL='info'
An enviornment variable that sets the log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
<package.json>.spellbook.log-level=info
A package.json variable that sets the default log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
<package.json>.spellbook.ie8=false
Should spellbook make sure that IE8 is supported. Defaults to false.
<package.json>.spellbook.shim-videojs=true
Makes sure that video.js is included in unit tests but will not be bundled into
distribution js files. Defaults to true.
<package.json>.spellbook.test='{}'
test configuration to use in spellbook. If this is set to a false value then test
builds will never be attempted.
<package.json>.spellbook.test.src='test/'
Source directory to use for test files, set in package.json. If this is unset
'test/' will be used. If this directory does not exist test will not be built.
## SEE ALSO
sb-build-test(1), sb-build-test-all(1)
## EXIT
0 - all commands succeeded
1 - one or more sub-command failed
## Spellbook
Part of the sb(1) suite
<file_sep>#!/usr/bin/env node
var path = require('path');
var shelljs = require('shelljs');
var config = require('./utils/get-config')();
var Watch = require('./utils/watch');
var log = require('./utils/log');
var GetFiles = require('./utils/get-files');
var CommanderWrapper = require('./utils/commander-wrapper');
var mkdirp = require('mkdirp');
var rimraf = require('rimraf');
var program = CommanderWrapper(function(commander) {
return commander
.option('-w, --watch', 'watch for changes and copy to dist')
.option('-d, --dist <dir>', 'dir to build to', path.join(config.dist, 'lang'))
.arguments('<dir>')
.action(function(src) {
this.src = src;
});
});
if (!program.src) {
program.src = config.lang.src;
}
if (!GetFiles(path.join(program.src, '**', '*.json'))) {
log.fatal('Source directory ' + program.src + ' does not exist or contains no json files!');
process.exit(1);
}
var build = function() {
rimraf.sync(program.dist);
mkdirp.sync(path.dirname(program.dist));
log.info('Building...');
shelljs.cp('-r', program.src, program.dist);
log.info('Wrote: ' + program.dist);
};
if (program.watch) {
Watch(path.join(program.src, '**', '*.json'), build);
} else {
build();
}
<file_sep>var assert = require('chai').assert;
var shelljs = require('shelljs');
var path = require('path');
var TestHelper = require('./test-helper.js');
var PathsExist = require('../../src/utils/paths-exist');
var binName = 'sb-clean';
var parallel = require('mocha.parallel');
['sb-test-all', 'sb-test-browser', 'sb-test-node-all'].forEach(function(binName) {
parallel(binName, function() {
it('should run error on linter', function(done) {
var helper = new TestHelper({copyDist: true});
helper.exec(binName, ['--no-build'], function(code, stdout, stderr) {
assert.notEqual(code, 0, 'should return failure for linter');
helper.cleanup(done);
});
});
it('should run with no errors and --no-lint', function(done) {
var helper = new TestHelper({copyDist: true});
helper.exec(binName, ['--no-lint', '--no-build'], function(code, stdout, stderr) {
assert.equal(code, 0, 'should return success');
helper.cleanup(done);
});
});
it('should error on fake browser', function(done) {
var helper = new TestHelper({copyDist: true});
helper.exec(binName, ['--no-lint', '--no-build', '--browsers', 'cow'], function(code, stdout, stderr) {
assert.notEqual(code, 0, 'should return failure');
helper.cleanup(done);
});
});
if (binName === 'sb-test-node-all') {
return
}
it('should run just chrome', function(done) {
var helper = new TestHelper({copyDist: true});
helper.exec(binName, ['--no-lint', '--no-build', '--browsers', 'chrome'], function(code, stdout, stderr) {
assert.equal(code, 0, 'should return success');
helper.cleanup(done);
});
});
});
});
<file_sep># sb-build-js-all(1) - Build all javascipt assets for a project
## SYNOPSIS
sb-build-js-all [-l, --log-level <level>] [-h,--help] [-q,--quiet] [-V,--version]
[-w, --watch]
## DESCRIPTION
Build all of the following:
* browser js - see sb-build-js-browser(1)
* node.js - see sb-build-js-node(1)
## OPTIONS
-l, --log-level <level>
The level of log messages that you want to see. Can be none, fatal, error,
warn, info, verbose, or debug. Defaults to info.
-h, --help
View the help information for this binary
-V, --version
View the version of this binary
-q, --quiet
Do not log any messages to stdout or stderr
-w, --watch
Watch files for changes and incrementally rebuild on file change.
Failed builds with this active will not exit.
## EXAMPLES
Get the current version of spellbook
sb-build-js-all -V
sb-build-js-all --version
Get help for this binary
sb-build-js-all --help
sb-build-js-all -h
Set the log level for this binary
sb-build-js-all -l info
sb-build-js-all -l fatal
sb-build-js-all --log-level debug
sb-build-js-all --log-level error
Dont output anything
sb-build-js-all -q
sb-build-js-all -quiet
Incrementally rebuild
sb-build-js-all --watch
sb-build-js-all -w
## ENVIRONMENT AND CONFIGURATION VARIABLES
SB_LOG_LEVEL='info'
An enviornment variable that sets the log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
<package.json>.spellbook.log-level=info
A package.json variable that sets the default log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
<package.json>.spellbook.js='{}'
JS configuration to use in spellbook. If this is set to a false value
then js builds will never be attempted.
<package.json>.spellbook.js.src='src/js'
Source directory to use for js files, set in package.json. If this is unset
'src/js' will be used. If this directory does not exist js will not be built.
<package.json>.spellbook.ie8=false
Make sure that IE8 is supported. Defaults to false.
<package.json>.spellbook.shim-videojs=true
Makes sure that video.js is included in unit tests but will not be bundled into
distribution js files. Defaults to true.
## SEE ALSO
sb-build-js(1), sb-build-js-node(1), sb-build-js-browser(1)
## EXIT
0 - all commands succeeded
1 - one or more sub-command failed
## Spellbook
Part of the sb(1) suite
<file_sep>#!/usr/bin/env node
var path = require('path');
var log = require('./utils/log');
var config = require('./utils/get-config')();
var GetFiles = require('./utils/get-files');
var CommanderWrapper = require('./utils/commander-wrapper');
var postcssHelper = require('./utils/postcss-helper');
var Watch = require('./utils/watch');
var program = CommanderWrapper(function(commander) {
return commander
.option('-w, --watch', 'incremental rebuild')
.option('-d, --dist <dist-dir>', 'dir to write output to', path.join(config.dist, 'browser'))
.arguments('<src-dir>')
.action(function(src) {
this.src = src;
});
});
if (!program.src) {
program.src = config.css.src;
}
var files = GetFiles(path.join(program.src, 'index.css'));
var distFile = path.join(program.dist, config.name);
if (!files.length) {
log.fatal('Source directory ' + program.src + ' does not exist or contains no index.scss/index.css!');
process.exit(1);
}
var build = function() {
return postcssHelper({src: files[0], dist: distFile, watch: program.watch});
};
if (program.watch) {
Watch(path.join(program.src, '**', '*.css'), build);
} else {
build();
}
<file_sep># sb-test-browser(1) - Run browser unit tests that have been built for a project
## SYNOPSIS
sb-test-browser [-l, --log-level <level>] [-h,--help] [-q,--quiet] [-V,--version]
[-w, --watch] [-nb, --no-build] [-nl, --no-lint]
[-b, --browsers <chrome,firefox,safari>] [-p, --port <port=9876>]
## DESCRIPTION
Test all unit tests that have been built to 'dist/test/*.test.js' using karma.
See sb-build-test(1) for more information on what is being built.
Basic steps:
1. Run sb-build unless we are passed --no-build
2. Run sb-lint unless we are passed --no-lint, but don't exit on failure
3. karma will be run with --no-auto-watch and --single-run unless this binary is run with
--watch. If run with --watch karma will be run with --no-single-run and --auto-watch.
2. Karma will shim sinon manually as only specific versions of sinon work for our needs.
If you install sinon locally it will be used instead of the version included with spellbook.
3. Karma will shim video.js unless `shim-videojs` is set to false in package.json
4. Karma will start a sever on <port> and use karma-detect-browsers to decide what browsers to run on
unless --browsers is provided.
5. Karma will run tests and exit with a return code of success or failure depending on if the tests
passed or failed.
config file can be seen in configs/karma.config.js
This uses karma and QUnit internally to run tests.
## OPTIONS
-l, --log-level <level>
The level of log messages that you want to see. Can be none, fatal, error,
warn, info, verbose, or debug. Defaults to info.
-h, --help
View the help information for this binary
-V, --version
View the version of this binary
-q, --quiet
Do not log any messages to stdout or stderr
-w, --watch
Watch unit test dist files for changes and re-run/re-load tests.
-nb, --no-build
Do not build before testing.
-nl, --no-lint
Do not lint before testing.
-b, --browsers <chrome,firefox,safari>
comma seperated list of browsers to run on
-p, --port <port=9876>
The port to use for the browsers test server. Can be defined with the
TEST_PORT environment variable as well. defaults to 9876
## EXAMPLES
Get the current version of spellbook
sb-test-browser -V
sb-test-browser --version
Get help for this binary
sb-test-browser --help
sb-test-browser -h
Set the log level for this binary
sb-test-browser -l info
sb-test-browser -l fatal
sb-test-browser --log-level debug
sb-test-browser --log-level error
Dont output anything
sb-test-browser -q
sb-test-browser -quiet
Watch for changes and re-load/re-run tests
sb-test-browser -w
sb-test-browser --watch
Do not build before testing
sb-test-browser -nb
sb-test-browser --no-build
Do not lint before testing
sb-test-browser -nl
sb-test-browser --no-lint
Only tests specific browsers not detected browsers
sb-test-browser -b chrome
sb-test-browser --browsers chorme,firefox
Use a different port
sb-test-browser -p 3333
sb-test-browser --port 8888
## ENVIRONMENT AND CONFIGURATION VARIABLES
SB_LOG_LEVEL='info'
An enviornment variable that sets the log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
TEST_PORT=9876
The port that the test server (karma-runner) is running on. Defaults to 9876.
<package.json>.spellbook.log-level=info
A package.json variable that sets the default log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
<package.json>.spellbook.shim-videojs=true
If video.js is shimed it will be included in unit tests for use. Otherwise it won't be.
## SEE ALSO
sb-test(1), sb-test-all(1), sb-build-test(1)
## EXIT
0 - all commands succeeded
1 - one or more sub-command failed
## Spellbook
Part of the sb(1) suite
<file_sep><!-- START doctoc generated TOC please keep comment here to allow auto update -->
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
**Table of Contents** *generated with [DocToc](https://github.com/thlorenz/doctoc)*
- [General](#general)
- [Release](#release)
- [Lint](#lint)
- [General](#general-1)
- [Docs](#docs)
- [JS](#js)
- [Build](#build)
- [JS](#js-1)
- [Docs](#docs-1)
- [CSS](#css)
- [Test](#test)
- [General](#general-2)
- [nodejs](#nodejs)
- [Server](#server)
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
# General
* Should we make watch a [dashboard](https://github.com/FormidableLabs/nodejs-dashboard)
* Add [colors](https://github.com/chalk/chalk)
* Add [tab-completion](https://github.com/mklabs/node-tabtab)
* Add documentation with a [man page](https://github.com/wooorm/remark-man)
* See if we follow:
* http://docopt.org/
* https://programmers.stackexchange.com/questions/307467/what-are-good-habits-for-designing-command-line-arguments
* http://eng.localytics.com/exploring-cli-best-practices/
* githooks for spellbook (run things when certain actions happen)
# Release
* Create a zip file of the source code to upload to github
* switch to `conventional-changelog` instead of `chg`
* support prerelease signifiers (beta, alpha etc)
# Lint
## General
* verify that --fix and --errors work for each linter
## Docs
* better markdown linting rules, remove the preset we are using
## JS
* eslint rules (see if there are any other useful ones)
* eslint-plugin-qunit
# Build
## JS
* rollup to a temporary file seperately from browserify
* add banner to unminified browser bundle
* jspm support?
* noderify npm files?
* only build the main code bundle once in with `js-browser-main` and include that in `js-browser-test`
* jsx support
* ts support
* add rollup bundler support
* fix issues with rollup and --watch
* when browserify-shim support config passing, use that rather than using browserify-shim in user pkg.json
* see: https://github.com/thlorenz/browserify-shim/pull/195
## Docs
* implement jsdoc tui theme
* make remark-toc generation automatic rather than forcing users to add a section
* html theme for guides
## CSS
* styl support?
* less support?
# Test
## General
* code coverage via istanbul
* sb-watch should auto-retest detected browsers. sb-start should pass and arg to sb-watch to disable this behavior
* test markdown examples js examples?
## nodejs
* get tests to run in nodejs, switch to mocha/chai?
# Server
* integrate with the hotel proxy to support project domain names?
<file_sep>var path = require('path');
var GetConfig = require('./get-config');
var pkg = require('../../package.json');
var getRootParent = require('./get-root-parent');
var os = require("os");
var util = require('util');
var LOG_LEVELS = {
none: 0,
fatal: 1,
error: 2,
warn: 3,
info: 4,
verbose: 5,
debug: 6,
};
// case insensitive log level number getter
var levelNumber = function(level) {
if (typeof level === 'number') {
return level;
}
for(var l in LOG_LEVELS) {
if ((new RegExp(l, 'i')).test(level)) {
return LOG_LEVELS[l];
}
}
return 0;
};
var rightPad = function(str, len, char) {
char = char || ' ';
if (str.len == len) {
return str;
}
while(str.length < len) {
str += char;
}
return str;
};
/**
* get the biggest lenght so we know how much to
* pad the getParent prefix
*/
var biggestLen = 0;
Object.keys(pkg.bin).forEach(function(bin) {
bin = bin.replace('sb-', '');
if (bin.length > biggestLen) {
biggestLen = bin.length;
}
});
var getParent = function() {
var parent = path.basename(getRootParent().filename)
.replace('sb-', '');
return '[' + rightPad(parent, biggestLen) + ']';
};
var getPrefix = function(level) {
return getParent() + '[' + rightPad(level, 5) + ']: ';
};
/**
* get the current time
*/
var getTime = function() {
var date = new Date();
var hour = date.getHours();
var min = date.getMinutes();
var sec = date.getSeconds();
var ms = date.getMilliseconds();
hour = (hour < 10 ? "0" : "") + hour;
min = (min < 10 ? "0" : "") + min;
sec = (sec < 10 ? "0" : "") + sec;
ms = (ms < 10 ? "0" : "") + ms;
ms = (ms < 100 ? "0" : "") + ms;
return '[' + hour + ":" + min + ":" + sec + ':' + ms + ']';
};
var log = function(level, msgs) {
if (level === 'none') {
return;
}
var config = GetConfig();
// skip if the currently set log level
// is less than this log line
var currentLevelNumber = levelNumber(level);
if (levelNumber(config.logLevel) < currentLevelNumber) {
return;
}
while(msgs.length) {
var msg = msgs.shift();
// allows us to print non-strings
if (typeof msg !== 'string') {
msg = util.inspect(msg, false, null);
}
// skip blank lines
if (!msg.trim()) {
continue;
}
// split on new lines
// treat them as new log messages
var lines = msg.split(os.EOL);
if (lines.length > 1) {
log(level, lines);
continue;
}
// keep long paths out of logs, but only on debug
if (level !== 'debug') {
msg = msg.split(path.join(__dirname, '..', '..', 'node_modules', '.bin') + path.sep).join('');
msg = msg.split(path.join(__dirname, '..', '..', 'config') + path.sep).join('');
msg = msg.split(path.join(__dirname, '..') + path.sep).join('');
msg = msg.split(__dirname + path.sep).join('');
msg = msg.split(config.path + path.sep).join('');
}
var fn = console.log;
// log to stderr on any level less than or equal to warn
if (currentLevelNumber <= LOG_LEVELS.error) {
fn = console.error;
}
fn(getPrefix(level) + msg);
}
};
var logObj;
var appender = function() {
return function(logEvent) {
logObj.info(util.format.apply(null, logEvent.data));
};
};
logObj = {
LEVELS: Object.keys(LOG_LEVELS),
LOG_LEVELS: LOG_LEVELS,
prefix: getPrefix,
appender: appender,
configure: function(config) {
return appender();
},
};
Object.keys(LOG_LEVELS).forEach(function(level) {
logObj[level] = function() {
var msgs = Array.prototype.slice.call(arguments) || [];
log(level, msgs);
};
});
module.exports = logObj;
<file_sep>#!/usr/bin/env node
var config = require('./utils/get-config')();
var PathsExist = require('./utils/paths-exist');
var path = require('path');
var log = require('./utils/log');
var Run = require('./utils/run');
var CommanderWrapper = require('./utils/commander-wrapper');
var Promise = require('bluebird');
var program = CommanderWrapper(function(commander) {
return commander
.option('-w, --watch', 'keep the tester running and run as things change')
.option('-b, --browsers <list,of,browsers>', 'comma seperated list of browsers to run on')
.option('-nb, --no-build', 'dont build before testing')
.option('-p, --port <port>', 'port for karma server')
.option('-nl, --no-lint', 'dont lint before testing');
});
program.port = program.port || process.env.TEST_PORT || 9876;
var KARMA_BROWSERS = ['Chrome', 'Firefox', 'IE', 'Safari'];
var commands = [];
var karmaCommand = [
'karma',
'start',
'karma.config.js',
'--port', program.port
];
if (program.watch) {
karmaCommand.push('--auto-watch');
karmaCommand.push('--no-single-run');
} else {
karmaCommand.push('--no-auto-watch');
karmaCommand.push('--single-run');
}
if (program.browsers) {
program.browsers = program.browsers.split(',');
var browsers = [];
program.browsers.forEach(function(userBrowser) {
userBrowser = userBrowser.trim();
var found = false;
KARMA_BROWSERS.forEach(function(karmaBrowser) {
// seach faster
if (found) {
return;
}
var regexp = new RegExp(karmaBrowser, 'i');
if (regexp.test(userBrowser)) {
// don't add the same browser more than once
if (browsers.indexOf(karmaBrowser) === -1) {
browsers.push(karmaBrowser);
}
found = true;
}
});
if (found !== true) {
log.fatal('invalid browser entry: ' + userBrowser);
process.exit(1);
}
});
karmaCommand.push('--browsers');
karmaCommand.push(browsers.join(','));
}
commands.push(karmaCommand);
if (program.lint && !program.watch) {
commands.unshift(['sb-lint']);
}
var buildPromise = Promise.resolve();
if (program.build && !program.watch) {
buildPromise = Run.one('sb-build');
}
buildPromise.then(function() {
return Run.series(commands, {failAfter: true});
})
<file_sep>var shelljs = require('shelljs');
var GetConfig = require('../../src/utils/get-config');
var path = require('path');
var fixtureDir = path.join(__dirname, '..', 'fixtures');
var rootDir = path.join(__dirname, '..', '..');
var uuid = require('uuid');
var PathsExist = require('../../src/utils/paths-exist');
var rimraf = require('rimraf');
var npmRun = require('npm-run');
var fs = require('fs');
var mkdirp = require('mkdirp');
shelljs.config.silent = false;
shelljs.config.fatal = true;
var TestHelper = function(options) {
if (typeof options === 'boolean') {
options = {debug: options};
}
options = options || {};
this.options = {
debug: options.debug || process.env.DEBUG_TEST || false,
npmLink: options.npmLink || true,
gitInit: options.gitInit || false,
changePkg: options.changePkg || false,
copyDist: options.copyDist || false
};
// allow a ton of process listeners
process.setMaxListeners(1000);
// do a normal exit on ctrl+c or ctrl+\
var sigexit = function() {
process.exit(0);
};
process.on('SIGINT', sigexit);
process.on('SIGQUIT', sigexit);
this.fixtureClean();
this.projectDir = path.join(fixtureDir, 'test-pkg-main');
while(PathsExist(this.projectDir)) {
var id = uuid.v4();
this.projectDir = path.join(shelljs.tempdir(), id);
}
shelljs.cp('-R', path.join(fixtureDir, 'test-pkg-main') + path.sep, this.projectDir);
if (this.options.copyDist) {
shelljs.cp('-R', path.join(__dirname, '..', 'expected-dist') + path.sep, path.join(this.projectDir, 'dist'));
}
if (!this.options.debug) {
shelljs.config.silent = true;
} else {
shelljs.config.silent = false;
console.log(this.projectDir);
//console.log(this.lsProject());
}
if (this.options.npmLink) {
this.npmLink(this.projectDir);
}
if (this.options.gitInit) {
this.gitInit(this.projectDir);
}
if (this.options.changePkg) {
this.changePkg(this.projectDir, this.options.changePkg);
}
// always cleanup the tmpdir
process.on('exit', this.cleanup.bind(this));
// make sure that tests can use a fresh config
// and not the one from the previous cache
this.config = GetConfig(this.projectDir);
return this;
};
var splitString = TestHelper.prototype.trim = function(string) {
var newStdout = [];
string = string.trim().split('\n') || [];
string.forEach(function(s) {
if (!s.trim()) {
return;
}
newStdout.push(s);
});
return newStdout;
};
TestHelper.prototype.exec = function(cmd, args, cb) {
if (!cb && typeof args === 'function') {
cb = args;
args = [];
}
var stdout = '';
var stderr = '';
if (this.options.debug) {
console.log('running ' + cmd + ' with args ' + args.join(' '));
console.log('in dir ' + this.projectDir);
}
var child = npmRun.spawn(cmd, args, {cwd: this.projectDir}).on('close', function(code) {
cb(code, splitString(stdout), splitString(stderr));
});
child.stdout.on('data', function(d) {
stdout += d.toString();
});
child.stderr.on('data', function(d) {
stderr += d.toString();
});
if (this.options.debug) {
child.stdout.on('data', process.stdout.write.bind(process.stdout));
child.stderr.on('data', process.stderr.write.bind(process.stderr));
}
return child;
};
TestHelper.prototype.cleanup = function(done) {
if (this.options.debug && PathsExist(this.projectDir)) {
console.log(this.projectDir);
//console.log(this.lsProject());
}
rimraf.sync(this.projectDir);
if (typeof done === 'function') {
done();
}
};
TestHelper.prototype.lsProject = function() {
return shelljs.ls('-RA', this.projectDir)
.grep('-v', '.git')
.grep('-v', 'node_modules')
.stdout;
};
TestHelper.prototype.gitInit = function(dir) {
if (process.env.TRAVIS) {
shelljs.exec('git config --global user.email "<EMAIL>"');
shelljs.exec('git config --global user.name "<NAME>"');
}
if (PathsExist(path.join(dir, '.git'))) {
rimraf.sync(path.join(dir, '.git'));
}
shelljs.pushd(dir);
shelljs.exec('git init');
shelljs.exec('git add --all');
shelljs.exec('git commit -a -m initial');
shelljs.popd();
};
TestHelper.prototype.changePkg = function(dir, newPkg) {
var pkgFile = path.join(dir, 'package.json');
var pkg = JSON.parse(fs.readFileSync(pkgFile));
pkg = Object.assign(pkg, newPkg);
fs.writeFileSync(pkgFile, JSON.stringify(pkg, null, 2));
};
TestHelper.prototype.npmLink = function(dir) {
var nodeDir = path.join(dir, 'node_modules');
var binDir = path.join(nodeDir, '.bin');
if (PathsExist(nodeDir)) {
rimraf.sync(nodeDir);
}
mkdirp.sync(binDir);
// mimic npm link
var pkgsToLink = shelljs.ls('-d', path.join(fixtureDir, '*'));
pkgsToLink.push(path.join(__dirname, '..', '..'));
pkgsToLink.forEach(function(folder) {
// skip the main package
if (path.basename(folder) === 'test-pkg-main') {
return;
}
var pkg = JSON.parse(fs.readFileSync((path.join(folder, 'package.json'))));
shelljs.ln('-sf', folder, path.join(nodeDir, pkg.name));
if (!pkg.bin) {
return;
}
Object.keys(pkg.bin).forEach(function(binName) {
var binPath = pkg.bin[binName];
shelljs.ln('-sf', path.join(folder, binPath), path.join(binDir, binName));
});
});
};
TestHelper.prototype.fixtureClean = function() {
// pre-cleanup
['bower.json', 'npm-debug.log', 'dist', 'node_modules', '.git'].forEach(function(dir) {
var d = path.join(fixtureDir, 'test-pkg-main', dir);
if (PathsExist(d)) {
rimraf.sync(d);
}
});
};
module.exports = TestHelper;
<file_sep># sb-start(1) - Run a sb-start(1) and sb-watch(1) in parallel and start a development workflow.
## SYNOPSIS
sb-start [-l, --log-level <level>] [-h,--help] [-q,--quiet] [-V,--version]
[-p, --port <port-number=9999>] [-tp, --test-port <port-number=9876>]
[-o, --open] [-s, --sync] [-t, --tunnel] [-nui, --no-ui] [-nb, --no-build]
[-nl, --no-lint]
## DESCRIPTION
Starts a development workflow watching files for changes and doing the following:
1. Building everything and re-building on change
2. Linting everything and re-linting on change
3. Starting a development server
4. Starting a testing server
5. Reloading the browser when a rebuild is finished
## OPTIONS
-l, --log-level <level>
The level of log messages that you want to see. Can be none, fatal, error,
warn, info, verbose, or debug. Defaults to info.
-h, --help
View the help information for this binary
-V, --version
View the version of this binary
-q, --quiet
Do not log any messages to stdout or stderr
-p, --port <port-number=9999>
What port to open the development server on. Default is 9999.
-tp, --test-port <port-number=9876>
What port the test server for this development server will be running on. Defaults
to 9876.
-o, --open
Open the development server address in the main browser for the OS. This does not
happen by default.
-s, --sync
Should all open browsers be synced using BrowserSync? This causes all browsers to
emulate any action that is done in another browser. So scrolling down will also scroll
other browsers down. Clicking a link will cause other browsers to click that link. This
behavior is off by default.
-t, --tunnel
Should browser sync tunnel your development server over the internet? By default this setting
is off. The tunnel link will be reported when BrowserSync starts.
-nui, --no-ui
Turn off the browser-sync ui, which is used to configure specific browser-sync functionality.
-nb, --no-build
Do not build and watch for re-build.
-nl, --no-lint
Do not lint and watch for re-lint.
## EXAMPLES
Get the current version of spellbook
sb-start -V
sb-start --version
Get help for this binary
sb-start --help
sb-start -h
Set the log level for this binary
sb-start -l info
sb-start -l fatal
sb-start --log-level debug
sb-start --log-level error
Dont output anything
sb-start -q
sb-start -quiet
Don't build and watch
sb-start -nb
sb-start --no-build
Don't lint and watch
sb-start -nl
sb-start --no-lint
Open the server in the main browser
sb-server --open
sb-server -o
Do not start the BrowserSync UI
sb-server -nui
sb-server --no-ui
Set the test server port
sb-server --test-port 7777
sb-server -tp 8888
Set the server port
sb-server --port 7777
sb-server -p 8888
Sync Browser actions across browsers
sb-server --sync
sb-server -s
Tunnel your development server over the internet
sb-server --tunnel
sb-server -t
## ENVIRONMENT AND CONFIGURATION VARIABLES
SB_LOG_LEVEL='info'
An enviornment variable that sets the log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
TEST_PORT=9876
The port that the test server (karma-runner) is running on. Defaults to 9876.
PORT=9999
The port to run the development server on (browser-sync). Defaults to 9999.
<package.json>.spellbook.log-level=info
A package.json variable that sets the default log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
<package.json>.spellbook.ie8=false
Should spellbook make sure that IE8 is supported. Defaults to false.
<package.json>.spellbook.shim-videojs=true
Makes sure that video.js is included in unit tests but will not be bundled into
distribution js files. Defaults to true.
<package.json>.spellbook.css='{}'
CSS configuration to use in spellbook. If this is set to a false value then css
builds will never be attempted.
<package.json>.spellbook.css.src='src/css'
Source directory to use for css files, set in package.json. If this is unset
'src/css' will be used. If this directory does not exist css will not be built.
<package.json>.spellbook.js='{}'
JS configuration to use in spellbook. If this is set to a false value then js
builds will never be attempted.
<package.json>.spellbook.js.src='src/js'
Source directory to use for js files, set in package.json. If this is unset
'src/js' will be used. If this directory does not exist js will not be built.
<package.json>.spellbook.lang='{}'
Lang configuration to use in spellbook. If this is set to a false value then lang
builds will never be attempted.
<package.json>.spellbook.lang.src='lang/'
Source directory to use for lang files, set in package.json. If this is unset
'lang/' will be used. If this directory does not exist lang will not be built.
<package.json>.spellbook.docs='{}'
Documentation configuration to use in spellbook. If this is set to a false value
then docs builds will never be attempted.
<package.json>.spellbook.docs.src='docs/'
Source directory to use for docs files, set in package.json. If this is unset
'docs/' will be used. If this directory does not exist docs will not be built.
<package.json>.spellbook.test='{}'
Test configuration to use in spellbook. If this is set to a false value
then test builds will never be attempted.
<package.json>.spellbook.test.src='test/'
Source directory to use for test files, set in package.json. If this is unset
'test/' will be used. If this directory does not exist tests will not be built.
## SEE ALSO
sb(1), sb-watch(1), sb-server(1)
## EXIT
will always exit 0
## Spellbook
Part of the sb(1) suite
<file_sep># sb-lint-docs-all(1) - Lint all documentation assets for a project
## SYNOPSIS
sb-lint-docs-all [-l, --log-level <level>] [-h,--help] [-q,--quiet] [-V,--version]
[-w, --watch] [-e, --errors] [-f, --fix]
## DESCRIPTION
This is run when sb-lint-docs(1) is not passed any arguments or is passed all
as the command to run.
This command will run:
* markdown files - see sb-lint-docs-md(1)
* js examples in docs - see sb-lint-docs-examples(1)
## OPTIONS
-l, --log-level <level>
The level of log messages that you want to see. Can be none, fatal, error,
warn, info, verbose, or debug. Defaults to info.
-h, --help
View the help information for this binary
-V, --version
View the version of this binary
-q, --quiet
Do not log any messages to stdout or stderr
-w, --watch
Watch files for changes and re-lint on file change.
Errors during linting with this active will not exit.
-e, --errors
Only log linting errors. Will not log any linting warnings.
-f, --fix
This will only work for fixing issues in sb-lint-docs-examples as
sb-lint-docs-md does not support it.
## EXAMPLES
Get the current version of spellbook
sb-lint-docs-all -V
sb-lint-docs-all --version
Get help for this binary
sb-lint-docs-all --help
sb-lint-docs-all -h
Set the log level for this binary
sb-lint-docs-all -l info
sb-lint-docs-all -l fatal
sb-lint-docs-all --log-level debug
sb-lint-docs-all --log-level error
Dont output anything
sb-lint-docs-all -q
sb-lint-docs-all -quiet
Watch files for changes and lint again once they change
sb-lint-docs-all -w
sb-lint-docs-all --watch
Fix any errors/warnings that can be fixed automatically
sb-lint-docs-all -f
sb-lint-docs-all --fix
Only log errors, do not log warnings.
sb-lint-docs-all -e
sb-lint-docs-all --errors
## ENVIRONMENT AND CONFIGURATION VARIABLES
SB_LOG_LEVEL='info'
An enviornment variable that sets the log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
<package.json>.spellbook.log-level=info
A package.json variable that sets the default log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
<package.json>.spellbook.docs='{}'
Docs configuration to use in spellbook. If this is set to a false value then docs
linting will never be attempted.
<package.json>.spellbook.docs.src='docs'
Source directory to use for docs files, set in package.json. If this is unset
'docs/' will be used. If this directory does not exist docs will not be linted.
## SEE ALSO
sb-lint-docs-examples(1), sb-lint-docs-md(1), sb-lint-docs(1)
## EXIT
0 - all commands succeeded
1 - one or more sub-command failed, unless --watch is active
## Spellbook
Part of the sb(1) suite
<file_sep># sb-build-js(1) - Build javascipt assets for a project
## SYNOPSIS
sb-build-js [-l, --log-level <level>] [-h,--help] [-q,--quiet] [-V,--version]
[<sub-command='all'> [<args...>]] [help <sub-command>]
## DESCRIPTION
Run one of the following sub commands for js builds
* node - see sb-build-js-node(1)
* browser - see sb-build-js-browser(1)
* all - see sb-build-js-all(1)
By default if no command is passed in `sb-build-js` will be run as if it was
passed `all`.
## OPTIONS
-l, --log-level <level>
The level of log messages that you want to see. Can be none, fatal, error,
warn, info, verbose, or debug. Defaults to info.
-h, --help
View the help information for this binary
-V, --version
View the version of this binary
-q, --quiet
Do not log any messages to stdout or stderr
<sub-command='all'> [<args...>]
The sub-command to run. Defaults to all if nothing is passed in.
[<args....>] will be passed along to sub command.
help <sub-command>
Get help for a sub-command, this is an alias for running `sb-build-js <sub-command> --help`.
## EXAMPLES
Build js assets with sub command
sb-build-js node [<args...>]
sb-build-js browser [<args...>]
sb-build-js all [<args...>]
Get help for a command
sb-build-js help node
sb-build-js help browser
sb-build-js all --help
Get the current version of spellbook
sb-build-js -V
sb-build-js --version
Get help for this binary
sb-build-js --help
sb-build-js -h
Set the log level for this binary
sb-build-js -l info
sb-build-js -l fatal
sb-build-js --log-level debug
sb-build-js --log-level error
Dont output anything
sb-build-js -q
sb-build-js -quiet
## ENVIRONMENT AND CONFIGURATION VARIABLES
SB_LOG_LEVEL='info'
An enviornment variable that sets the log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
<package.json>.spellbook.log-level=info
A package.json variable that sets the default log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
<package.json>.spellbook.js={}
JS configuration to use in spellbook. If this is set to a false value then js
builds will never be attempted.
<package.json>.spellbook.js.src
package.json spellbook configuration for the default js source directory. Changes the default
from `src/js` to whatever is specified.
<package.json>.spellbook.ie8=false
Make sure that IE8 is supported. Defaults to false.
<package.json>.spellbook.shim-videojs=true
Makes sure that video.js is included in unit tests but will not be bundled into
distribution js files. Defaults to true.
## SEE ALSO
sb-build-all(1), sb-build(1), sb-build-js-all(1), sb-build-js-node(1),
sb-build-js-browser(1)
## EXIT
0 - all commands succeeded
1 - one or more sub-command failed
## Spellbook
Part of the sb(1) suite
<file_sep>var assert = require('chai').assert;
var path = require('path');
var TestHelper = require('./test-helper.js');
var pkg = require('../../package.json');
var parallel = require('mocha.parallel');
var GetFiles = require('../../src/utils/get-files');
var PathsExist = require('../../src/utils/paths-exist');
// get a list of all binaries in pkg.json
// add any binairies not listed in pkg.json
var binaries = Object.keys(pkg.bin).concat(GetFiles(path.join(__dirname, '..', '..', 'src', '*')).filter(function(file) {
var basename = path.basename(file);
// filter out the utils folder
if (basename === 'utils') {
return false;
}
// filter out anything already in pkg.bin
if (pkg.bin[basename]) {
return false;
}
return true;
}));
binaries.forEach(function(fileOrBin) {
var binName = path.basename(fileOrBin);
parallel(binName, function() {
it('should have a bin in pkg.json', function(done) {
var helper = new TestHelper();
assert.ok(pkg.bin[binName], 'should exist in pkg.json');
helper.cleanup(done);
});
it('should have man page', function(done) {
var helper = new TestHelper();
var manName = "dist/man/" + binName + '.1';
assert.notEqual(pkg.man.indexOf('dist/man/' + binName + '.1'), -1, 'should exist in pkg.json');
assert.ok(PathsExist(path.join(__dirname, '..', '..', 'docs', binName + '.md')), 'should have a doc file');
helper.cleanup(done);
});
['--help', '-h'].forEach(function(option) {
it('should have ' + option, function(done) {
var helper = new TestHelper();
var child = helper.exec(binName, [option], function(code, stdout, stderr) {
assert.equal(code, 0, 'should return success');
assert.ok((new RegExp('^Usage: ' + binName)).test(stdout[0]), 'should print help');
assert.equal(stderr.length, 0, 'no errors');
helper.cleanup(done);
});
});
});
['--version', '-V'].forEach(function(option) {
it('should have ' + option, function(done) {
var helper = new TestHelper();
var child = helper.exec(binName, [option], function(code, stdout, stderr) {
assert.equal(code, 0, 'should return success');
assert.ok((new RegExp(pkg.version)).test(stdout[0]), 'should print version');
assert.equal(stderr.length, 0, 'no errors');
helper.cleanup(done);
});
});
});
// sb can not call through itself
if (binName !== 'sb') {
var nameArgs = binName.replace(/^sb-/, '').split('-');
it('should be able to call ' + binName + ' through sb ' + nameArgs.join(' '), function(done) {
var helper = new TestHelper();
helper.exec('sb', nameArgs.concat(['--help']), function(code, stdout, stderr) {
assert.equal(code, 0, 'should return success');
assert.ok((new RegExp('Usage: ' + binName)).test(stdout.join('\n')), 'should print help');
assert.equal(stderr.length, 0, 'no errors');
helper.cleanup(done);
});
});
}
});
});
<file_sep># sb-test(1) - Run tests for a project
## SYNOPSIS
sb-test [-l, --log-level <level>] [-h,--help] [-q,--quiet] [-V,--version]
[<sub-command='all'> [<args...>]] [help <sub-command>]
## DESCRIPTION
Potentially test all or one of the following through sub-binaries:
* node - see sb-test-node(1)
* browser - see sb-test-browser(1)
* all - see sb-test-all(1)
By default if no command is passed in `sb-test` will be run as if it was
passed `all`.
## OPTIONS
-l, --log-level <level>
The level of log messages that you want to see. Can be none, fatal, error,
warn, info, verbose, or debug. Defaults to info.
-h, --help
View the help information for this binary
-V, --version
View the version of this binary
-q, --quiet
Do not log any messages to stdout or stderr
<sub-command='all'> [<args...>]
The sub-command to run. Defaults to all if nothing is passed in.
[<args....>] will be passed along to sub command.
help <sub-command>
Get help for a sub-command, this is an alias for running `sb-test <sub-command> --help`.
## EXAMPLES
Test specific assets
sb-test node [<args...>]
sb-test browser [<args...>]
sb-test all [<args...>]
Get help for a command
sb-test help js
sb-test help docs
sb-test all --help
sb-test css --help
Get the current version of spellbook
sb-test -V
sb-test --version
Get help for this binary
sb-test --help
sb-test -h
Set the log level for this binary
sb-test -l info
sb-test -l fatal
sb-test --log-level debug
sb-test --log-level error
Dont output anything
sb-test -q
sb-test -quiet
## ENVIRONMENT AND CONFIGURATION VARIABLES
SB_LOG_LEVEL='info'
An enviornment variable that sets the log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
TEST_PORT=9876
The port that the test server (karma-runner) is running on. Defaults to 9876.
<package.json>.spellbook.log-level=info
A package.json variable that sets the default log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
<package.json>.spellbook.shim-videojs=true
If video.js is shimed it will be included in unit tests for use. Otherwise it won't be.
<package.json>.spellbook.test='{}'
Test configuration to use in spellbook. If this is set to a false value
then tests will never be run.
## SEE ALSO
sb-test-all(1), sb-test-browser(1), sb-test-node(1)
## EXIT
0 - all commands succeeded
1 - one or more sub-command failed
## Spellbook
Part of the sb(1) suite
<file_sep>var chokidar = require('chokidar');
var log = require('./log');
var Watch = function(glob, fn) {
chokidar.watch(glob, {ignored: '*.tmp', ignoreInitial: true})
.on('add', fn)
.on('addDir', fn)
.on('change', fn)
.on('unlink', fn)
.on('unlinkDir', fn);
log.info('watching ' + glob);
fn();
};
module.exports = Watch;
<file_sep>#!/usr/bin/env node
var CommanderWrapper = require('./utils/commander-wrapper');
var log = require('./utils/log');
var config = require('./utils/get-config')();
var program = CommanderWrapper(function(commander) {
return commander
.command('browser', 'build js browser output')
.command('node', 'build nodejs output')
.command('all', 'build all possible js outputs', {isDefault: true});
});
if (!config.js) {
log.warn('js builds are turned off').
process.exit();
}
<file_sep>#!/usr/bin/env node
var path = require('path');
var config = require('./utils/get-config')();
var GetFiles = require('./utils/get-files');
var Watch = require('./utils/watch');
var log = require('./utils/log');
var Run = require('./utils/run');
var CommanderWrapper = require('./utils/commander-wrapper');
var Promise = require('bluebird');
var PathsExist = require('./utils/paths-exist');
var rimraf = require('rimraf');
var mkdirp = require('mkdirp');
var program = CommanderWrapper(function(commander) {
return commander
.option('-w, --watch', 'watch docs for changes and rebuild')
.option('-d, --dist <dist-dir>', 'directory to build api docs to', path.join(config.dist, 'docs', 'manual'))
.arguments('<src-dir>')
.action(function(src) {
this.src = src;
});
});
if (!program.src) {
program.src = config.docs.src;
}
var command = [
'remark',
'--no-config',
'--no-color',
'--rc-path', 'remark-build.config.js',
];
var build = function() {
rimraf.sync(program.dist);
mkdirp.sync(program.dist);
var files = GetFiles(path.join(program.src, '**', '*.md'));
if (!files.length) {
log.error('Source directory ' + process.src + ' doesnt exist or contains no md files!');
process.exit(1);
}
log.info('Building...');
var promises = [];
// we have to build each file individually, as there is no directory style output
// aka src/docs/test/index.md would flatten to dist/docs/manual/index.html
// rather than dist/docs/manual/test/index.html
files.forEach(function(src) {
var dist = path.dirname(src.replace(program.src, program.dist));
mkdirp.sync(dist);
promises.push(Run.one(command.concat([src, '--output', dist]), {silent: true, write: dist}));
});
Promise.all(promises).then(function() {
log.info('Wrote: ' + program.dist);
});
};
if (program.watch) {
Watch(path.join(program.src, '**', '*.md'), build);
} else {
build();
}
<file_sep># sb-build-test-all(1) - Build all unit test files for a project
## SYNOPSIS
sb-build-test-all [-l, --log-level <level>] [-h,--help] [-q,--quiet] [-V,--version]
[-w, --watch]
## DESCRIPTION
Build all of the following:
* bundles - see sb-build-test-bundles(1)
* browser - see sb-build-test-browser(1)
## OPTIONS
-l, --log-level <level>
The level of log messages that you want to see. Can be none, fatal, error,
warn, info, verbose, or debug. Defaults to info.
-h, --help
View the help information for this binary
-V, --version
View the version of this binary
-q, --quiet
Do not log any messages to stdout or stderr
-w, --watch
Watch files for changes and incrementally rebuild on file change.
Failed builds with this active will not exit.
## EXAMPLES
Get the current version of spellbook
sb-build-test-all -V
sb-build-test-all --version
Get help for this binary
sb-build-test-all --help
sb-build-test-all -h
Set the log level for this binary
sb-build-test-all -l info
sb-build-test-all -l fatal
sb-build-test-all --log-level debug
sb-build-test-all --log-level error
Dont output anything
sb-build-test-all -q
sb-build-test-all -quiet
Incrementally rebuild
sb-build-test-all --watch
sb-build-test-all -w
## ENVIRONMENT AND CONFIGURATION VARIABLES
SB_LOG_LEVEL='info'
An enviornment variable that sets the log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
<package.json>.spellbook.log-level=info
A package.json variable that sets the default log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
<package.json>.spellbook.ie8=false
Should spellbook make sure that IE8 is supported. Defaults to false.
<package.json>.spellbook.shim-videojs=true
Makes sure that video.js is included in unit tests but will not be bundled into
distribution js files. Defaults to true.
<package.json>.spellbook.test='{}'
test configuration to use in spellbook. If this is set to a false value then test
builds will never be attempted.
<package.json>.spellbook.test.src='test/'
Source directory to use for test files, set in package.json. If this is unset
'test/' will be used. If this directory does not exist test will not be built.
<package.json>.spellbook.js='{}'
JS configuration to use in spellbook. If this is set to a false value
then bundler builds will never be attempted.
<package.json>.spellbook.js.src='src/js'
Source directory to use for docs files, set in package.json. If this is unset
'src/js' will be used. If this directory does not exist bundlers docs will not be built.
## SEE ALSO
sb-build-test(1), sb-build-test-browser(1), sb-build-test-bundles(1)
## EXIT
0 - all commands succeeded
1 - one or more sub-command failed
## Spellbook
Part of the sb(1) suite
<file_sep>module.exports = {
"rules": {
// number
"number-leading-zero": "always",
"number-no-trailing-zeros": true,
// "number-zero-length-no-unit": true,
// string
"string-no-newline": true,
"string-quotes": "double",
// declaration
"declaration-bang-space-after": "never",
"declaration-bang-space-before": "always",
"declaration-colon-space-after": "always",
"declaration-colon-space-before": "never",
// declaration block
"declaration-block-no-duplicate-properties": true,
"declaration-block-properties-order": "alphabetical",
"declaration-block-semicolon-newline-after": "always",
"declaration-block-semicolon-newline-before": "never-multi-line",
"declaration-block-semicolon-space-after": "never-single-line",
"declaration-block-semicolon-space-before": "never",
"declaration-block-trailing-semicolon": "always",
// block
"block-closing-brace-newline-after": "always",
"block-closing-brace-newline-before": "always",
"block-no-empty": true,
"block-no-single-line": true,
"block-opening-brace-newline-after": "always",
"block-opening-brace-newline-before": "always-single-line",
"block-opening-brace-space-before": "always",
// selector
"selector-combinator-space-after": "always",
"selector-combinator-space-before": "always",
"selector-pseudo-element-colon-notation": "single",
"selector-type-case": "lower",
// selector list
"selector-list-comma-newline-after": "always",
"selector-list-comma-newline-before": "never-multi-line",
"selector-list-comma-space-before": "never",
// media feature
"media-feature-colon-space-after": "always",
"media-feature-colon-space-before": "never",
"media-feature-no-missing-punctuation": true,
"media-feature-range-operator-space-after": "always",
"media-feature-range-operator-space-before": "never",
// at rule
"at-rule-empty-line-before": "always",
"at-rule-semicolon-newline-after": "always",
// comment
"comment-empty-line-before": "always",
"comment-whitespace-inside": "always",
// general
"indentation": 2,
"max-empty-lines": 1,
"max-line-length": [90, {
"ignore": "non-comments",
"severity": "warning"
}],
"max-nesting-depth": 5,
"no-duplicate-selectors": true,
"no-eol-whitespace": true,
"no-invalid-double-slash-comments": true,
"stylelint-disable-reason": "always-after"
}
};
<file_sep>#!/usr/bin/env node
var config = require('./get-config')();
var PathsExist = require('./paths-exist');
var Watch = require('./watch');
var log = require('./log');
var exorcistHelper = require('./exorcist-helper');
var Run = require('./run');
var mkdirp = require('mkdirp');
var rimraf = require('rimraf');
var Promise = require('bluebird');
var path = require('path');
// src, dist, watch, noStart
var postcssHelper = function(options) {
var banner = '';
Object.keys(config.bannerObj).forEach(function(k) {
banner += '@' + k + ' ' + config.bannerObj[k] + '\n';
});
banner = banner.replace(/\n$/, '');
var postcssCmd = [
'postcss',
'--map',
'--use', 'postcss-banner',
'--postcss-banner.banner', banner,
'--postcss-banner.important', 'true',
'--use', 'autoprefixer',
'--autoprefixer.browsers', config.browserList.join(', '),
'--use', 'postcss-import',
'--output', options.dist + '.css',
options.src
];
var cssnanoCmd = [
'postcss',
'--map',
'--use', 'cssnano',
'--cssnano.safe', 'true',
'--output', options.dist + '.min.css',
options.dist + '.css'
];
// if we are not starting with postcss
// aka sass and other pre-processors
if (!options.noStart) {
log.info('Building...');
['.css', '.css.map', '.css.min.map', '.css.min'].forEach(function(ext) {
rimraf.sync(options.dist + ext);
});
}
mkdirp.sync(path.dirname(options.dist));
// NOTE:
// exorcist has to be done after the min file
// as cssnano only uses internal maps
Run.one(postcssCmd, {silent: true, toLog: options.watch, nonFatal: options.watch}).then(function(retval) {
if (retval.status === 0) {
log.info('Wrote: ' + options.dist + '.css');
}
if (options.watch) {
if (retval.status !== 0) {
return Promise.reject();
}
return Promise.resolve();
}
return Run.one(cssnanoCmd, {silent: true}).then(function() {
log.info('Wrote: ' + options.dist + '.min.css');
return exorcistHelper(options.dist + '.min.css');
});
}).then(function() {
if (!options.watch) {
return exorcistHelper(options.dist + '.css');
}
}).catch(function() {
log.error('Build Failed!');
if (options.watch) {
// do nothing
return;
}
process.exit(1);
});
};
module.exports = postcssHelper;
<file_sep># sb-build-test(1) - Build Unit tests files for a project
## SYNOPSIS
sb-build-test [-l, --log-level <level>] [-h,--help] [-q,--quiet] [-V,--version]
[<sub-command='all'> [<args...>]] [help <sub-command>]
## DESCRIPTION
Run one following sub commands for test builds
* browser - see sb-build-test-browser(1)
* bundlers - see sb-build-test-bundlers(1)
* all - see sb-build-test-all(1)
By default if no command is passed in `sb-build-test` will be run as if it was
passed `all`.
## OPTIONS
-l, --log-level <level>
The level of log messages that you want to see. Can be none, fatal, error,
warn, info, verbose, or debug. Defaults to info.
-h, --help
View the help information for this binary
-V, --version
View the version of this binary
-q, --quiet
Do not log any messages to stdout or stderr
<sub-command='all'> [<args...>]
The sub-command to run. Defaults to all if nothing is passed in.
[<args....>] will be passed along to sub command.
help <sub-command>
Get help for a sub-command, this is an alias for running `sb-build-test <sub-command> --help`.
## EXAMPLES
Build test assets with sub command
sb-build-test browser [<args...>]
sb-build-test bundlers [<args...>]
sb-build-test all [<args...>]
Get help for a command
sb-build-test help browser
sb-build-test help bundlers
sb-build-test all --help
Get the current version of spellbook
sb-build-test -V
sb-build-test --version
Get help for this binary
sb-build-test --help
sb-build-test -h
Set the log level for this binary
sb-build-test -l info
sb-build-test -l fatal
sb-build-test --log-level debug
sb-build-test --log-level error
Dont output anything
sb-build-test -q
sb-build-test -quiet
## ENVIRONMENT AND CONFIGURATION VARIABLES
SB_LOG_LEVEL='info'
An enviornment variable that sets the log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
<package.json>.spellbook.log-level=info
A package.json variable that sets the default log level to use for all videojs-spellbook
binaries. Can be set to fatal, error, warn, info, verbose, debug, or none.
<package.json>.spellbook.ie8=false
Should spellbook make sure that IE8 is supported. Defaults to false.
<package.json>.spellbook.shim-videojs=true
Makes sure that video.js is included in unit tests but will not be bundled into
distribution js files. Defaults to true.
<package.json>.spellbook.test='{}'
test configuration to use in spellbook. If this is set to a false value then test
builds will never be attempted.
<package.json>.spellbook.test.src='test/'
Source directory to use for test files, set in package.json. If this is unset
'test/' will be used. If this directory does not exist test will not be built.
<package.json>.spellbook.js='{}'
JS configuration to use in spellbook. If this is set to a false value
then bundler builds will never be attempted.
<package.json>.spellbook.js.src='src/js'
Source directory to use for docs files, set in package.json. If this is unset
'src/js' will be used. If this directory does not exist bundlers docs will not be built.
## SEE ALSO
sb-build-all(1), sb-build(1), sb-build-test-all(1), sb-build-test-browser(1),
sb-build-test-bundlers(1)
## EXIT
0 - all commands succeeded
1 - one or more sub-command failed
## Spellbook
Part of the sb(1) suite
<file_sep>#!/usr/bin/env node
var CommanderWrapper = require('./utils/commander-wrapper');
var program = CommanderWrapper(function(commander) {
return commander
.command('docs', 'lint docs, and js examples in docs')
.command('lang', 'lint json lang files')
.command('js', 'lint src js')
.command('test', 'lint test js')
.command('css', 'lint css')
.command('all', 'lint css', {isDefault: true});
});
<file_sep># sb-build-all(1) - Build all assets for a project
## SYNOPSIS
sb-build-all [-l, --log-level <level>] [-h,--help] [-q,--quiet] [-V,--version]
[-w, --watch]
## DESCRIPTION
This is run when sb-build(1) is not passed any arguments or is passed all
as the command to run.
This command will build css, js, lang, test, and docs depending on what is available.
> NOTE: If no commands can be run, an error will be logged and this binary will exit
> with a failure return code.
## OPTIONS
-l, --log-level <level>
The level of log messages that you want to see. Can be none, fatal, error,
warn, info, verbose, or debug. Defaults to info.
-h, --help
View the help information for this binary
-V, --version
View the version of this binary
-q, --quiet
Do not log any messages to stdout or stderr
-w, --watch
Watch files for changes and incrementally rebuild on file change.
Failed builds with this active will not exit.
## EXAMPLES
Get the current version of spellbook
sb-build-all -V
sb-build-all --version
Get help for this binary
sb-build-all --help
sb-build-all -h
Set the log level for this binary
sb-build-all -l info
sb-build-all -l fatal
sb-build-all --log-level debug
sb-build-all --log-level error
Dont output anything
sb-build-all -q
sb-build-all -quiet
Watch builds for changes and incrementally rebuild
sb-build-all -w
sb-build-all --watch
## ENVIRONMENT AND CONFIGURATION VARIABLES
SB_LOG_LEVEL='info'
An enviornment variable that sets the log level to use for all videojs-spellbook
binaries. Can be set to none, fatal, error, warn, info, verbose, or debug.
<package.json>.spellbook.log-level=info
A package.json variable that sets the default log level to use for all videojs-spellbook
binaries. Can be set to none, fatal, error, warn, info, verbose, or debug.
<package.json>.spellbook.ie8=false
Should spellbook make sure that IE8 is supported. Defaults to false.
<package.json>.spellbook.shim-videojs=true
Makes sure that video.js is included in unit tests but will not be bundled into
distribution js files. Defaults to true.
<package.json>.spellbook.css='{}'
CSS configuration to use in spellbook. If this is set to a false value then css
builds will never be attempted.
<package.json>.spellbook.css.src='src/css'
Source directory to use for css files, set in package.json. If this is unset
'src/css' will be used. If this directory does not exist css will not be built.
<package.json>.spellbook.js='{}'
JS configuration to use in spellbook. If this is set to a false value then js
builds will never be attempted.
<package.json>.spellbook.js.src='src/js'
Source directory to use for js files, set in package.json. If this is unset
'src/js' will be used. If this directory does not exist js will not be built.
<package.json>.spellbook.lang='{}'
Lang configuration to use in spellbook. If this is set to a false value then lang
builds will never be attempted.
<package.json>.spellbook.lang.src='lang/'
Source directory to use for lang files, set in package.json. If this is unset
'lang/' will be used. If this directory does not exist lang will not be built.
<package.json>.spellbook.docs='{}'
Documentation configuration to use in spellbook. If this is set to a false value
then docs builds will never be attempted.
<package.json>.spellbook.docs.src='docs/'
Source directory to use for docs files, set in package.json. If this is unset
'docs/' will be used. If this directory does not exist docs will not be built.
<package.json>.spellbook.test='{}'
Test configuration to use in spellbook. If this is set to a false value
then test builds will never be attempted.
<package.json>.spellbook.test.src='test/'
Source directory to use for test files, set in package.json. If this is unset
'test/' will be used. If this directory does not exist tests will not be built.
## SEE ALSO
sb-build-css-all(1), sb-build-js-all(1), sb-build-docs-all(1), sb-build-lang-all(1),
sb-build-test-all(1)
## EXIT
0 - all commands succeeded
1 - one or more sub-command failed, unless --watch is active
## Spellbook
Part of the sb(1) suite
|
53300fc23bca9d0567a1a14fdde0442bfeb063b0
|
[
"JavaScript",
"Markdown"
] | 48
|
JavaScript
|
forbesjo/spellbook
|
bca41fcd97a676b588bbdeb2aaa1dc416f09abf1
|
fe236594c4ee017b3d1c37fb2081797723a2f62e
|
refs/heads/master
|
<repo_name>MuhammadMansoorAli/Things-Of-Interest<file_sep>/app/src/main/java/com/oreo/thingsofinterest/Parser.java
package com.oreo.thingsofinterest;
import android.app.Activity;
import android.os.AsyncTask;
import android.view.View;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.ProgressBar;
import com.google.gson.Gson;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import backend.KeysValues;
import backend.ThingCompoundKeyVals;
import backend.ThingCompoundProperty;
import backend.ThingFacts;
import backend.ThingObjectProperty;
import backend.ThingProperty;
import backend.TopicAPI;
/**
* Created by decent on 11/24/2015.
*/
public class Parser extends AsyncTask<String, Void, String> {
HashMap<String, ThingCompoundProperty> compoundProps = new HashMap<>();
ArrayList<ThingObjectProperty> objectProps = new ArrayList<>();
ListView lv;
Activity a;
ProgressBar pb;
String[] blackList = new String[]{"creator",
"alias",
"mid",
"guid",
"timestamp",
"attribution",
"id",
"permission",
"type",
"key",
"image",
"daylife_topic",
"article",
"topic_equivalent_webpage"
};
Parser(ListView _lv, Activity _a, ProgressBar _pb) {
lv = _lv;
a = _a;
pb = _pb;
}
public StringBuilder freebaseApiCall(String queryString) {
TopicAPI topicAPI = new TopicAPI(queryString);
return topicAPI.searchThisTopic();
}
public void callAPI_returnResult(String queryString) throws JSONException {
StringBuilder sb = freebaseApiCall(queryString);
JSONObject reader = new JSONObject(sb.toString());
JSONObject property = reader.getJSONObject("property");
Iterator<String> iter = property.keys();
HashMap<String, ThingObjectProperty> hmap = new HashMap<>();
while (iter.hasNext()) {
String propKey = iter.next();
JSONObject p = property.getJSONObject(propKey);
JSONArray values = p.getJSONArray("values");
for (int i = 0; values != null && i < values.length(); i++) {
JSONObject jobj = values.getJSONObject(i);
String type = p.has("valuetype") ? p.getString("valuetype") : "object";
String key = returnFormattedKey(propKey);
if (!Arrays.asList(blackList).contains(trim_key(propKey))) {
if (type.equals("compound")) {
traverseCompoundProperty(propKey, jobj);
} else {
traverseObjectProperty(key, jobj, hmap);
}
}
}
}
objectProps = new ArrayList<>(hmap.values());
}
public void traverseCompoundProperty(String propKey, JSONObject jobj) throws JSONException {
String[] arr1 = propKey.split("/");
String trimBaseKey = arr1[arr1.length - 1];
if (!Arrays.asList(blackList).contains(trimBaseKey)) {
if (!compoundProps.containsKey(trimBaseKey)) {
String sanitizedBaseKey = trimBaseKey.replace("_", " ");
sanitizedBaseKey = UppercaseFirstLetters(sanitizedBaseKey);
compoundProps.put(trimBaseKey, new ThingCompoundProperty(sanitizedBaseKey));
}
ArrayList<String> keysOfTheseCompound = compoundProps.get(trimBaseKey).getKeys();
if (keysOfTheseCompound.size() > 0 && keysOfTheseCompound.get(keysOfTheseCompound.size() - 1) != "---") {
compoundProps.get(trimBaseKey).addKeyValue("---", "---");
}
JSONObject subProp = jobj.getJSONObject("property");
Iterator<String> iter1 = subProp.keys();
while (iter1.hasNext()) {
String propKey1 = iter1.next();
JSONObject p1 = subProp.getJSONObject(propKey1);
JSONArray values1 = p1.getJSONArray("values");
for (int j = 0; values1 != null && j < values1.length(); j++) {
JSONObject jobj1 = values1.getJSONObject(j);
String val1 = jobj1.getString("text");
String[] arr = propKey1.split("/");
String trimKey = arr[arr.length - 1];
if (!Arrays.asList(blackList).contains(trimKey)) {
trimKey = trimKey.replace("_", " ");
trimKey = UppercaseFirstLetters(trimKey);
compoundProps.get(trimBaseKey).addKeyValue(trimKey, val1);
}
}
}
}
}
public void traverseObjectProperty(String key, JSONObject jobj, HashMap<String, ThingObjectProperty> hmap) throws JSONException {
ThingObjectProperty to = new ThingObjectProperty();
to.key = key;
JSONArray jarray = jobj.has("values") ? jobj.getJSONArray("values") : null;
String val = jobj.getString("text");
if (jarray != null) {
for (int j = 0; j < jarray.length(); j++) {
JSONObject jo = jarray.getJSONObject(j);
to.values.add(val);
}
} else {
to.values.add(val);
}
if (hmap.containsKey(key)) {
hmap.get(key).values.add(val);
} else {
hmap.put(key, to);
}
objectProps.add(to);
}
String trim_key(String s) {
String[] arr = s.split("/");
String trimKey = arr[arr.length - 1];
return trimKey;
}
String returnFormattedKey(String s) {
String[] arr = s.split("/");
String trimKey = arr[arr.length - 1];
String actualKey = trimKey.replace("_", " ");
return UppercaseFirstLetters(actualKey);
}
public static String UppercaseFirstLetters(String str) {
boolean prevWasWhiteSp = true;
char[] chars = str.toCharArray();
for (int i = 0; i < chars.length; i++) {
if (Character.isLetter(chars[i])) {
if (prevWasWhiteSp) {
chars[i] = Character.toUpperCase(chars[i]);
}
prevWasWhiteSp = false;
} else {
prevWasWhiteSp = Character.isWhitespace(chars[i]);
}
}
return new String(chars);
}
@Override
protected void onPostExecute(String s) {
super.onPostExecute(s);
RefinedThingAdapter ta = new RefinedThingAdapter(a, R.layout.compound_thing, compileThingFacts().properties);
pb.setVisibility(View.GONE);
lv.setAdapter(ta);
}
public ThingFacts compileThingFacts()
{
ArrayList<ThingCompoundProperty> tcps = new ArrayList(compoundProps.values());
ArrayList<ThingProperty> thingProps = new ArrayList<>();
ThingCompoundKeyVals tp = null;
for (ThingCompoundProperty tcp : tcps) {
tp = new ThingCompoundKeyVals(tcp.name);
if(tcp.name.equalsIgnoreCase("Animals Owned"))
{
String s = tcp.name;
}
KeysValues kv = new KeysValues();
for (int i = 0; i < tcp.keys.size() && i < tcp.values.size(); i++) {
if (i == 0) {
tp.addKeyValue(kv);
}
if (tcp.keys.get(i).equals("---") && tcp.values.get(i).equals("---")) {
kv = new KeysValues();
} else {
if(!tp.keyValues.contains(kv))
tp.addKeyValue(kv);
kv.keys.add(tcp.getKeys().get(i));
kv.values.add(tcp.getValues().get(i));
}
}
thingProps.add(tp);
}
for (ThingObjectProperty top : objectProps) {
thingProps.add(top);
}
ThingFacts thingFacts = new ThingFacts("", "", thingProps);
// Gson gson = new Gson();
// String user_json = gson.toJson(thingFacts);
//
// ThingFacts tf = gson.fromJson(user_json, ThingFacts.class);
// int k = 0;
return thingFacts;
}
@Override
protected String doInBackground(String... params) {
try {
String q = params[0].replace(' ', '_');
q = q.toLowerCase();
callAPI_returnResult(q);
} catch (JSONException e) {
e.printStackTrace();
}
return null;
}
}
<file_sep>/app/src/main/java/com/oreo/thingsofinterest/SearchAPI_Call.java
package com.oreo.thingsofinterest;
import android.app.Activity;
import android.os.AsyncTask;
import android.view.View;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.ProgressBar;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
/**
* Created by decent on 11/24/2015.
*/
public class SearchAPI_Call extends AsyncTask<String,Void,String>{
HashMap<String,CompoundThing> compoundThings = null;
LinearLayout ll;
Activity a;
ProgressBar pb;
String [] blackList = new String[] {"creator",
"alias",
"mid",
"guid",
"timestamp",
"attribution",
"id",
"permission",
"type",
"key",
"image",
"daylife_topic",
"article",
"topic_equivalent_webpage"
};
SearchAPI_Call(LinearLayout _ll, Activity _a, ProgressBar _pb)
{
ll = _ll;
a = _a;
pb = _pb;
}
public String callAPI_returnResult(String queryString) throws JSONException {
final StringBuilder sb=new StringBuilder();
URL url = null;
String inputLine;
try{
//url = new URL("https://www.googleapis.com/freebase/v1/topic/en/steve_jobs");
url = new URL("https://www.googleapis.com/freebase/v1/topic" + queryString + "?key=<KEY>");
} catch (MalformedURLException e) {
e.printStackTrace();
}
BufferedReader in;
try {
URLConnection con = url.openConnection();
con.setReadTimeout( 5000 ); //1 second
in = new BufferedReader(new InputStreamReader(con.getInputStream()));
while ((inputLine = in.readLine()) != null) {
sb.append(inputLine+"\n");
}
in.close();
} catch (IOException e) {
e.printStackTrace();
}
catch (Exception e) {
e.printStackTrace();
}
//String res = sb.toString();
// String res = sb.toString();
ArrayList<String> keysArr = new ArrayList<>();
ArrayList<String> valuesArr = new ArrayList<>();
JSONObject reader = new JSONObject(sb.toString());
String res = reader.getString("id") + "\n";
JSONObject property = reader.getJSONObject("property");
int propCount = property.length();
HashMap<String,String> keyValues = new HashMap<String,String>();
compoundThings = new HashMap<>();
compoundThings.put("khaliVals",new CompoundThing("General Info"));
Iterator<String> iter = property.keys();
while(iter.hasNext())
{
String propKey = iter.next();
JSONObject p = property.getJSONObject(propKey);
JSONArray values = p.getJSONArray("values");
for(int i = 0 ;values != null && i < values.length() ; i++)
{
JSONObject jobj = values.getJSONObject(i);
String val = null;
// if(propKey.contains("description"))
// val = jobj.getString("value");
// else
val = jobj.getString("text");
String type = p.has("valuetype") ? p.getString("valuetype") : "object";
if(type.compareTo("compound") == 0)
{
String[] arr1 = propKey.split("/");
String trimBaseKey = arr1[arr1.length - 1];
keyValues.put(trimBaseKey," => ");
if (!compoundThings.containsKey(trimBaseKey)) {
String sanitizedBaseKey = trimBaseKey.replace("_"," ");
sanitizedBaseKey = UppercaseFirstLetters(sanitizedBaseKey);
compoundThings.put(trimBaseKey, new CompoundThing(sanitizedBaseKey));
}
ArrayList<String> keysOfTheseCompound = compoundThings.get(trimBaseKey).getKeys();
if(keysOfTheseCompound.size() > 0 && keysOfTheseCompound.get(keysOfTheseCompound.size() - 1) != "---")
compoundThings.get(trimBaseKey).addKeyValue("---","---");
keysArr.add(trimBaseKey);
valuesArr.add("=>");
JSONObject subProp = jobj.getJSONObject("property");
Iterator<String> iter1 = subProp.keys();
while(iter1.hasNext()) {
String propKey1 = iter1.next();
JSONObject p1 = subProp.getJSONObject(propKey1);
JSONArray values1 = p1.getJSONArray("values");
//ArrayList<String> keysOfTheseCompound = compoundThings.get(trimBaseKey).getKeys();
// if(keysOfTheseCompound.size() > 0 && keysOfTheseCompound.get(keysOfTheseCompound.size() - 1) != "---")
//compoundThings.get(trimBaseKey).addKeyValue("---","---");
for(int j = 0 ;values1 != null && j < values1.length() ; j++) {
JSONObject jobj1 = values1.getJSONObject(j);
String val1 = jobj1.getString("text");
String[] arr = propKey1.split("/");
String trimKey = arr[arr.length - 1];
if (!Arrays.asList(keyValues).contains(trimKey) && !Arrays.asList(keysArr).contains(trimKey) && !Arrays.asList(valuesArr).contains(val1)) {
if (!Arrays.asList(blackList).contains(trimKey)) {
trimKey = trimKey.replace("_", " ");
trimKey = UppercaseFirstLetters(trimKey);
keyValues.put("\t" + trimKey, val1);
keysArr.add("\t" + trimKey);
valuesArr.add(val1);
compoundThings.get(trimBaseKey).addKeyValue(trimKey,val1);
}
}
}
}
} //
else {
String[] arr = propKey.split("/");
String trimKey = arr[arr.length - 1];
String actualKey = trimKey.replace("_", " ");
actualKey = UppercaseFirstLetters(actualKey);
if (!Arrays.asList(keyValues).contains(actualKey)&& !keysArr.contains(actualKey) && !valuesArr.contains(val)) {
if (!Arrays.asList(blackList).contains(trimKey)) {
keyValues.put(actualKey, val);
keysArr.add(actualKey);
valuesArr.add(val);
compoundThings.get("khaliVals").addKeyValue(actualKey,val);
}
}
}
}
}
// Iterator it = keyValues.entrySet().iterator();
// while (it.hasNext()) {
// HashMap.Entry pair = (HashMap.Entry)it.next();
//
// res += pair.getKey() + " = " + pair.getValue() + "\n\n";
//
// }
for(int j=0;j<keysArr.size();j++)
{
res += keysArr.get(j) + " = " + valuesArr.get(j) + "\n\n";
}
return res;
}
public static String UppercaseFirstLetters(String str)
{
boolean prevWasWhiteSp = true;
char[] chars = str.toCharArray();
for (int i = 0; i < chars.length; i++) {
if (Character.isLetter(chars[i])) {
if (prevWasWhiteSp) {
chars[i] = Character.toUpperCase(chars[i]);
}
prevWasWhiteSp = false;
} else {
prevWasWhiteSp = Character.isWhitespace(chars[i]);
}
}
return new String(chars);
}
@Override
protected void onPostExecute(String s) {
super.onPostExecute(s);
//tv.setText(s == null ? " not found " : s);
ArrayList<CompoundThing> cts = new ArrayList<>(compoundThings.values());
int ind = cts.indexOf(compoundThings.get("khaliVals"));
CompoundThing temp = cts.get(0);
cts.set(0, compoundThings.get("khaliVals"));
cts.set(ind, temp);
// cts.clear();
// CompoundThing t=new CompoundThing("shazib");
// t.addKeyValue("1","2");
// cts.add(t);
pb.setVisibility(View.GONE);
CompoundAdapter ca = new CompoundAdapter(a,R.layout.compound_thing,cts);
//lv.setAdapter(ca);
for (int i = 0; i < ca.getCount(); i++) {
View item = ca.getView(i, null, null);
ll.addView(item);
}
}
@Override
protected String doInBackground(String... params) {
String res = null;
try {
String q = params[0].replace(' ','_');
q = q.toLowerCase();
res = callAPI_returnResult(q);
res = q + "\n" + res;
} catch (JSONException e) {
e.printStackTrace();
}
return res;
}
}
<file_sep>/app/src/main/java/com/oreo/thingsofinterest/OneTimeApplication.java
package com.oreo.thingsofinterest;
import android.app.Application;
import com.facebook.FacebookSdk;
/**
* Created by <NAME> on 2/24/2016.
*/
public class OneTimeApplication extends Application {
@Override
public void onCreate() {
super.onCreate();
FacebookSdk.sdkInitialize(getApplicationContext());
// Initialize the SDK before executing any other operations,
// especially, if you're using Facebook UI elements.
}
}
<file_sep>/app/src/main/java/backend/ThingCompoundProperty.java
package backend;
import android.view.View;
import java.util.ArrayList;
/**
* Created by <NAME> on 12/11/2015.
*/
public class ThingCompoundProperty implements ThingProperty{
public ArrayList<String> keys = new ArrayList<>();
public ArrayList<String> values = new ArrayList<>(); // key Value pairs
public String name;
public void addKeyValue(String key,String value)
{
keys.add(key);
values.add(value);
}
public ArrayList<String> getKeys() {
return keys;
}
public void setKeys(ArrayList<String> keys) {
this.keys = keys;
}
public ArrayList<String> getValues() {
return values;
}
public void setValues(ArrayList<String> values) {
this.values = values;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public ThingCompoundProperty(String _name)
{
name = _name;
}
@Override
public View getView() {
return null;
}
}
<file_sep>/app/src/main/java/login/LoadPagesFragment.java
package login;
import android.app.Activity;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.facebook.AccessToken;
import com.facebook.GraphRequest;
import com.facebook.GraphResponse;
import com.facebook.HttpMethod;
import com.github.jlmd.animatedcircleloadingview.AnimatedCircleLoadingView;
import com.oreo.thingsofinterest.DB_Handler;
import com.oreo.thingsofinterest.FacebookPage;
import com.oreo.thingsofinterest.R;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.ArrayList;
/**
* A simple {@link Fragment} subclass.
* Use the {@link LoadPagesFragment#newInstance} factory method to
* create an instance of this fragment.
*/
public class LoadPagesFragment extends Fragment {
private AnimatedCircleLoadingView loadingView;
private DB_Handler db;
private Intro activity;
public static LoadPagesFragment newInstance(Activity context) {
LoadPagesFragment fragment = new LoadPagesFragment();
fragment.setContext(context);
return fragment;
}
public LoadPagesFragment() {
// Required empty public constructor
}
ArrayList<Thread> threads = new ArrayList<>();
public void setContext(Activity activity){
this.activity=(Intro)activity;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
private void getFacebookPages(String after){
Bundle param=new Bundle();
param.putString("fields", "name,category");
param.putString("limit","100");
if(after!=null){
param.putString("after",after);
param.putInt("pretty",0);
}
new GraphRequest(
AccessToken.getCurrentAccessToken(),
"/me/likes",
param,
HttpMethod.GET,
new GraphRequest.Callback() {
@Override
public void onCompleted(GraphResponse response) {
// TODO Auto-generated method stub
JSONObject json = response.getJSONObject();
try{
JSONArray jarray = json.getJSONArray("data");
ArrayList<FacebookPage> currentFacebookPages=new ArrayList<FacebookPage>();
for(int i = 0; i < jarray.length(); i++){
JSONObject oneFacebookPage = jarray.getJSONObject(i);
String category=oneFacebookPage.getString("category");
FacebookPage p=new FacebookPage();
p.category=category;
p.name=oneFacebookPage.getString("name");
currentFacebookPages.add(p);
}
Thread t = new StoreInDB(currentFacebookPages,db);
t.start();
threads.add(t);
//currentFacebookPages=null;
if(json.getJSONObject("paging").has("next")){
String after=json.getJSONObject("paging").getJSONObject("cursors").getString("after");
if(after!=null) {
getFacebookPages(after);
}
}
else {
Log.v("MyTask => ", " executed before start");
new MyTask(threads, loadingView).execute();
Log.v("MyTask => ", " executed start");
SharedPreferences sp=activity.getSharedPreferences("DB",Context.MODE_PRIVATE);
SharedPreferences.Editor editor=sp.edit();
editor.putBoolean("IS_WRITTEN",true);
}
}
catch(JSONException e){
loadingView.stopFailure();
e.printStackTrace();
}
}
}
).executeAsync();
}
private class MyTask extends AsyncTask<Void,Void,Void>
{
ArrayList<Thread> thds;
AnimatedCircleLoadingView aclv;
MyTask(ArrayList<Thread> thds, AnimatedCircleLoadingView aclv)
{
this.aclv = aclv;
this.thds = thds;
}
@Override
protected Void doInBackground(Void... voids) {
Log.v("MyTask => ", " start");
for(Thread t : thds)
{
try {
t.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
Log.v("MyTask => ", " joined");
return null;
}
@Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
aclv.stopOk();
Log.v("MyTask => ", " stopOk()");
activity.setProgressButtonEnabled(true);
Log.v("MyTask => ", " setEnb");
}
}
public void getFacebookPages(){
activity.setProgressButtonEnabled(false);
loadingView.startIndeterminate();
db=new DB_Handler(activity);
new Thread(new Runnable() {
@Override
public void run() {
Log.v("FacebookPages","Getting FacebookPages...");
getFacebookPages(null);
}
}).start();
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view=inflater.inflate(R.layout.load_pages,container,false);
loadingView=(AnimatedCircleLoadingView)view.findViewById(R.id.circle_loading_view);
return view;
}
private class StoreInDB extends Thread{
ArrayList<FacebookPage> pages;
DB_Handler db;
StoreInDB(ArrayList<FacebookPage> pages,DB_Handler db){
this.pages=pages;
this.db=db;
}
@Override
public void run() {
if(!db.insertAllFacebookPages(pages))
Log.e("DB Insertion Error","FacebookPages did not insert in DB");
//pages=null;
}
}
}
<file_sep>/app/src/main/java/login/Intro.java
package login;
import android.content.Intent;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.util.Log;
import com.facebook.AccessToken;
import com.github.paolorotolo.appintro.AppIntro;
import com.github.paolorotolo.appintro.AppIntro2;
import com.oreo.thingsofinterest.MainActivity;
/**
* Created by <NAME> on 26-Feb-16.
*/
public class Intro extends AppIntro2 {
LoadPagesFragment loadPagesFragment;
LoginFragment loginFragment;
@Override
public void init(@Nullable Bundle bundle) {
//addSlide(AppIntroFragment.newInstance("Things of Interest", "Please login with your Facebook account in order to continue", android.R.drawable.gallery_thumb, Color.parseColor("#3F51B5")));
setSwipeLock(true);
loginFragment=LoginFragment.newInstance(this);
addSlide(loginFragment);
loadPagesFragment= LoadPagesFragment.newInstance(this);
addSlide(loadPagesFragment);
if(AccessToken.getCurrentAccessToken()==null){
setProgressButtonEnabled(false);
}
//getPager().setCurrentItem(2);
// nextButton.setOnClickListener(new View.OnClickListener() {
// @Override
// public void onClick(@NonNull View v) {
//
// }
// });
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
loginFragment.onActivityResult(requestCode,resultCode,data);
}
@Override
public void onNextPressed() {
if(AccessToken.getCurrentAccessToken()!=null) {
loadPagesFragment.getFacebookPages();
}
else{
Log.e("Token","No Token");
}
}
@Override
public void onDonePressed() {
Intent i = new Intent(this, MainActivity.class);
startActivity(i);
finish();
}
@Override
public void onSlideChanged() {
}
}
<file_sep>/app/src/main/java/backend/ThingFacts.java
package backend;
import java.util.ArrayList;
/**
* Created by <NAME> on 2/2/2016.
*/
public class ThingFacts {
String name;
String m_id;
public ArrayList<ThingProperty> properties;
public ThingFacts(String name, String m_id, ArrayList<ThingProperty> properties)
{
this.name = name;
this.m_id = m_id;
this.properties = properties;
}
}
<file_sep>/app/src/main/java/com/oreo/thingsofinterest/ThingAdapter.java
package com.oreo.thingsofinterest;
import android.content.Context;
import android.content.Intent;
import android.graphics.Typeface;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import java.util.List;
/**
* Created by <NAME> on 2/3/2016.
*/
public class ThingAdapter extends RecyclerView.Adapter<ThingAdapter.ThingViewHolder> {
private List<Thing> thingsList;
Context c;
RecyclerView mRecyclerView;
public ThingAdapter(List<Thing> thingsList, Context c,RecyclerView pRecyclerView) {
this.thingsList = thingsList;
this.c = c;
this.mRecyclerView = pRecyclerView;
}
@Override
public int getItemCount() {
return thingsList.size();
}
@Override
public void onBindViewHolder(ThingViewHolder thingViewHolder, int i) {
Thing ci = thingsList.get(i);
thingViewHolder.vName.setText(ci.getName());
thingViewHolder.vDesc.setText(ci.getDesc());
thingViewHolder.vType.setText(ci.getType());
thingViewHolder.vImage.setImageDrawable(ci.getImgRes());
}
@Override
public ThingViewHolder onCreateViewHolder(ViewGroup viewGroup, int i) {
View itemView = LayoutInflater.
from(viewGroup.getContext()).
inflate(R.layout.things_cards, viewGroup,false);
// itemView.setOnClickListener(new View.OnClickListener() {
// @Override
// public void onClick(View view) {
// Toast.makeText(c, " dfsdf ", Toast.LENGTH_LONG).show();
//
//
// int itemPosition = mRecyclerView.getChildPosition(view);
//
// Toast.makeText(c, itemPosition + " dfsdf ", Toast.LENGTH_LONG).show();
// }
// });
return new ThingViewHolder(itemView, c);
}
public void addThing(Thing thing){
thingsList.add(0,thing);// add new thing to top of the list
}
public static class ThingViewHolder extends RecyclerView.ViewHolder{
protected TextView vName;
protected TextView vDesc;
protected TextView vType;
protected ImageView vImage;
final Context c;
public ThingViewHolder(View v, final Context c) {
super(v);
this.c = c;
vName = (TextView) v.findViewById(R.id.tvName);
vImage= (ImageView) v.findViewById(R.id.thingImage);
vDesc= (TextView) v.findViewById(R.id.description);
vType= (TextView) v.findViewById(R.id.type);
Typeface font=Typeface.createFromAsset(v.getContext().getAssets(),"fonts/roboto_light.ttf");
vName.setTypeface(font);
vDesc.setTypeface(font);
vType.setTypeface(font);
v.findViewById(R.id.thingCard).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Toast.makeText(view.getContext(), "clicked on card", Toast.LENGTH_SHORT).show();
Intent i = new Intent(c,TestThingDetailActivity.class);
i.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
i.putExtra("mid","/m/061s_");
c.startActivity(i);
}
});
}
}
}<file_sep>/app/src/main/java/com/oreo/thingsofinterest/FreebaseSearch.java
//package com.oreo.thingsofinterest;
//
//import android.os.AsyncTask;
//
///**
// * Created by <NAME> on 2/27/2016.
// */
//public class FreebaseSearch extends AsyncTask<Void,Void,String>{
//
// Freebase
//
// @Override
// protected String doInBackground(Void... voids) {
// return null;
// }
//}
<file_sep>/app/src/main/java/login/LoginFragment.java
package login;
import android.app.Activity;
import android.content.Intent;
import android.graphics.Color;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.LinearLayout;
import com.facebook.CallbackManager;
import com.facebook.FacebookCallback;
import com.facebook.FacebookException;
import com.facebook.login.LoginResult;
import com.facebook.login.widget.LoginButton;
import com.oreo.thingsofinterest.R;
import java.util.Arrays;
import java.util.List;
/**
* A simple {@link Fragment} subclass.
* Use the {@link LoginFragment#newInstance} factory method to
* create an instance of this fragment.
*/
public class LoginFragment extends Fragment {
private CallbackManager callbackManager;
/**
* Use this factory method to create a new instance of
* this fragment using the provided parameters.
*
* @return A new instance of fragment LoginFragment.
*/
Intro intro;
LoginButton loginButton;
public static LoginFragment newInstance(Activity activity) {
LoginFragment fragment = new LoginFragment();
fragment.setActivity((Intro) activity);
return fragment;
}
public LoginFragment(){
}
private void setActivity(Activity activity){
this.intro=(Intro)activity;
}
private void fbLoginListener(){
List<String> read= Arrays.asList("user_likes", "email", "user_about_me", "public_profile");
callbackManager = CallbackManager.Factory.create();
loginButton.setReadPermissions(read);
loginButton.registerCallback(callbackManager,new FacebookCallback<LoginResult>() {
@Override
public void onSuccess(LoginResult loginResult) {
Log.v("FB Login","Logged in");
intro.setProgressButtonEnabled(true);
}
@Override
public void onCancel() {
}
@Override
public void onError(FacebookException e) {
}
});
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
callbackManager.onActivityResult(requestCode, resultCode, data);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view=inflater.inflate(R.layout.fragment_login,container,false);
loginButton=(LoginButton) view.findViewById(R.id.login_button);
fbLoginListener();
LinearLayout m = (LinearLayout) view.findViewById(R.id.main);
m.setBackgroundColor(Color.parseColor("#00BCD4"));
return view;
}
}
<file_sep>/app/src/main/java/backend/SearchAPI.java
package backend;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.os.AsyncTask;
import android.util.Log;
import com.google.api.client.http.GenericUrl;
import com.google.api.client.http.HttpRequest;
import com.google.api.client.http.HttpRequestFactory;
import com.google.api.client.http.HttpResponse;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.http.javanet.NetHttpTransport;
import com.oreo.thingsofinterest.DB_Handler;
import com.oreo.thingsofinterest.Thing;
import com.oreo.thingsofinterest.ThingAdapter;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.ArrayList;
import toi_utilities.ImageUtilities;
/**
* Created by <NAME> on 2/19/2016.
*/
public class SearchAPI extends AsyncTask<String, Void, Void> {
ArrayList<Thing> things = null;
ThingAdapter tla;
final Context c;
int topicId;
public SearchAPI(ArrayList<Thing> things,ThingAdapter tla, Context c,int topicId) {
this.things = things;
this.tla = tla;
this.c = c;
this.topicId = topicId;
}
public void searchThisString(String query)
{
HttpTransport httpTransport = new NetHttpTransport();
HttpRequestFactory requestFactory = httpTransport.createRequestFactory();
GenericUrl url = new GenericUrl("https://www.googleapis.com/freebase/v1/search");
url.put("query", query);
url.put("key", "<KEY>");
Log.v("toi search ", url.build());
HttpRequest request = null;
try {
request = requestFactory.buildGetRequest(url);
HttpResponse httpResponse = request.execute();
parseResult(httpResponse);
} catch (IOException e) {
e.printStackTrace();
}
}
public void parseResult(HttpResponse res) throws IOException {
try {
JSONObject reader = new JSONObject(res.parseAsString());
JSONArray values = reader.getJSONArray("result");
for(int i = 0; values != null && i < 2 && i < values.length(); i++)
{
JSONObject val = values.getJSONObject(i);
if(val.has("id")) {
String id = val.getString("id");
Log.v("topic",id);
String name = val.getString("name");
// Parser p = new Parser(things,id,tla,name);
// p.execute(id);
moreDetails(id, name);
}
}
} catch (JSONException e) {
e.printStackTrace();
}
}
void moreDetails(String id, String name) throws IOException, JSONException {
HttpTransport httpTransport = new NetHttpTransport();
HttpRequestFactory requestFactory = httpTransport.createRequestFactory();
GenericUrl url = new GenericUrl("https://www.googleapis.com/freebase/v1/topic" + id + "?filter="
+ "/common/topic/image&filter="
+ "/common/topic/notable_types&filter="
+ "/common/topic/description");
url.put("limit", "1");
url.put("key", "<KEY>");
Log.d("toi url -> ", url.build());
HttpRequest request = null;
request = requestFactory.buildGetRequest(url);
HttpResponse httpResponse = request.execute();
JSONObject reader = new JSONObject(httpResponse.parseAsString());
JSONObject property = reader.getJSONObject("property");
String desc = "";
if(property.has("/common/topic/description")) {
JSONObject descObj = property.getJSONObject("/common/topic/description");
if(descObj.has("values"))
{
JSONArray descArr = descObj.getJSONArray("values");
JSONObject jobj = (JSONObject) descArr.get(0);
if(jobj != null)
desc = jobj.getString("text");
}
}
// JSONArray imgArr = property.getJSONObject("/common/topic/image").getJSONArray("values");
// jobj = (JSONObject) imgArr.get(0);
// String imgId = jobj.getString("id");
String imgId = "";
if(property.has("/common/topic/image")) {
JSONObject descObj = property.getJSONObject("/common/topic/image");
if(descObj.has("values"))
{
JSONArray descArr = descObj.getJSONArray("values");
JSONObject jobj = (JSONObject) descArr.get(0);
if(jobj != null)
imgId = jobj.getString("id");
}
}
// JSONArray typeArr = property.getJSONObject("/common/topic/notable_types").getJSONArray("values");
// jobj = (JSONObject) typeArr.get(0);
// String type = jobj.getString("text");
//
//
String type = "";
if(property.has("/common/topic/notable_types")) {
JSONObject descObj = property.getJSONObject("/common/topic/notable_types");
if(descObj.has("values"))
{
JSONArray descArr = descObj.getJSONArray("values");
JSONObject jobj = (JSONObject) descArr.get(0);
if(jobj != null)
type = jobj.getString("text");
}
}
final Thing t = new Thing(getImage(id), name, null, desc, type);
synchronized (things) {
things.add(0,t);
}
new Thread(new Runnable() {
@Override
public void run() {
DB_Handler db_handler = new DB_Handler(c);
// Gson gson = new Gson();
// String compEntity = gson.toJson(t);
Bitmap bm = ((BitmapDrawable) t.getImgRes()).getBitmap();
db_handler.insert_ENTITY(ImageUtilities.saveImage(bm),t.getName(),t.getType(),t.getDesc(),null,topicId);
db_handler.updateTopicDetails(true,true,topicId);
}
}).start();
}
@Override
protected void onPostExecute(Void aVoid) {
super.onPostExecute(aVoid);
tla.notifyDataSetChanged();
}
Drawable getImage(String id)
{
InputStream is = null;
try {
is = (InputStream) new URL("https://usercontent.googleapis.com/freebase/v1/image" + id + "?maxwidth=500&maxheight=500&mode=fillcropmid&key=<KEY>").getContent();
} catch (IOException e) {
e.printStackTrace();
}
return Drawable.createFromStream(is, "src name");
}
@Override
protected Void doInBackground(String... voids) {
searchThisString(voids[0]);
return null;
}
}
<file_sep>/app/src/main/java/com/oreo/thingsofinterest/TestThingDetailActivity.java
package com.oreo.thingsofinterest;
import android.app.Activity;
import android.os.Bundle;
import android.view.View;
import android.widget.ListView;
import android.widget.ProgressBar;
import android.widget.TextView;
/**
* Created by <NAME> on 2/24/2016.
*/
public class TestThingDetailActivity extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.test_thing_detail_layout);
ListView lv = (ListView)findViewById(R.id.listViewThing);
ProgressBar pb = (ProgressBar)findViewById(R.id.ghumanPheri);
Parser p = new Parser(lv,this,pb);
p.execute(getIntent().getStringExtra("mid"));
}
}
<file_sep>/app/src/main/java/com/oreo/thingsofinterest/DB_Handler.java
package com.oreo.thingsofinterest;
import android.content.ContentValues;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteOpenHelper;
import android.util.Log;
import java.util.ArrayList;
import toi_utilities.ImageUtilities;
public class DB_Handler extends SQLiteOpenHelper {
// All Static variables
// Database Version
private static final int DATABASE_VERSION = 5;
// Database Name
private static final String DATABASE_NAME = "TOIDB";
// tables name
private static final String TABLE_CATEGORIES = "Categories";
private static final String TABLE_TOPICS = "Topics";
private static final String TABLE_ENTITY = "ENTITY";
// Entity Table Cols names
private static final String KEY_ID_ENTITY = "id";
private static final String IMAGE_PATH = "imgPath";
private static final String NAME = "name";
private static final String TYPE = "TYPE";
private static final String COMPLETE_ENTITY = "compEntity";
private static final String DESCRIPTION = "desc";
private static final String TOPIC = "topic";
// Categories Table Columns names
private static final String KEY_ID_CATEGORY = "id";
private static final String CATEGORY_NAME = "category_name";
//Enumerations
public static final int SOURCE_FACEBOOK=1;
public static final int SOURCE_DESKTOP=2;
// Topic table columns
private static final String KEY_ID_TOPIC = "id";
private static final String TOPIC_NAME = "topic_name";
private static final String CTG_ID = "category_id";
private static final String SOURCE = "source";
private static final String TIMESTAMP = "timestamp";
private static final String IS_CHECKED = "is_checked";
private static final String IS_ENTITY = "is_entity";
public DB_Handler(Context context) {
super(context, DATABASE_NAME, null, DATABASE_VERSION);
// TODO Auto-generated constructor stub
}
//Create Database
@Override
public void onCreate(SQLiteDatabase db) {
// TODO Auto-generated method stub
String CREATE_CATEOGRY_TABLE = "CREATE TABLE " + TABLE_CATEGORIES + "("
+ KEY_ID_CATEGORY + " INTEGER PRIMARY KEY,"
+ CATEGORY_NAME + " TEXT)";
db.execSQL(CREATE_CATEOGRY_TABLE);
String CREATE_TOPICS_TABLE = "CREATE TABLE " + TABLE_TOPICS + "("
+ KEY_ID_TOPIC + " INTEGER PRIMARY KEY,"
+ TOPIC_NAME + " TEXT,"
+ SOURCE + " INTEGER,"
+ CTG_ID +" INTEGER,"
+ TIMESTAMP + " DATETIME DEFAULT CURRENT_TIMESTAMP,"
+ IS_CHECKED + " INTEGER DEFAULT 0,"
+ IS_ENTITY + " INTEGER DEFAULT 0,"
+ "FOREIGN KEY("+CTG_ID+") REFERENCES "+TABLE_CATEGORIES+"("+KEY_ID_CATEGORY+"))";
db.execSQL(CREATE_TOPICS_TABLE );
String CREATE_ENTITY_TABLE = "CREATE TABLE " + TABLE_ENTITY + "("
+ KEY_ID_ENTITY + " INTEGER PRIMARY KEY,"
+ IMAGE_PATH + " TEXT,"
+ NAME +" TEXT,"
+ TYPE +" TEXT,"
+ COMPLETE_ENTITY +" TEXT,"
+ DESCRIPTION +" TEXT,"
+ TOPIC + " INTEGER,"
+ "FOREIGN KEY("+ TOPIC +") REFERENCES "+ TABLE_TOPICS + "("+ KEY_ID_TOPIC +"))";
db.execSQL(CREATE_ENTITY_TABLE);
}
//Update Database
@Override
public void onUpgrade(SQLiteDatabase db, int oldVersion, int newVersion) {
// TODO Auto-generated method stub
db.execSQL("DROP TABLE IF EXISTS " + TABLE_CATEGORIES);
db.execSQL("DROP TABLE IF EXISTS " + TABLE_TOPICS);
db.execSQL("DROP TABLE IF EXISTS " + TABLE_ENTITY);
onCreate(db);
}
public void insert_ENTITY(String imagePath,String name,String type, String description,String compEntity,int topicID)
{
ContentValues values = new ContentValues();
values.put(IMAGE_PATH,imagePath);
values.put(NAME,name);
values.put(TYPE,type);
values.put(DESCRIPTION,description);
values.put(COMPLETE_ENTITY,compEntity);
values.put(TOPIC,topicID);
SQLiteDatabase db = this.getWritableDatabase();
db.insert(TABLE_ENTITY, null, values);
db.close();
}
public ArrayList<Thing> getEntities()
{
String sql_query = "Select * from " + TABLE_ENTITY;
SQLiteDatabase db=this.getReadableDatabase();
Cursor cursor = db.rawQuery(sql_query, null);
ArrayList<Thing> ents = new ArrayList<>();
if(!cursor.moveToFirst())
return null;
while(!cursor.isLast())
{
int id = cursor.getInt(cursor.getColumnIndex(KEY_ID_ENTITY));
String desc = cursor.getString(cursor.getColumnIndex(DESCRIPTION));
String name = cursor.getString(cursor.getColumnIndex(NAME));
String type = cursor.getString(cursor.getColumnIndex(TYPE));
String compEntity = cursor.getString(cursor.getColumnIndex(COMPLETE_ENTITY));
String imgPath = cursor.getString(cursor.getColumnIndex(IMAGE_PATH));
// public Thing(Drawable _imgRes,String _name, ThingFacts _facts,String shortDesc,String type)
Thing t = new Thing(ImageUtilities.getImage(imgPath), name, null, desc,type);
ents.add(t);
cursor.moveToNext();
}
db.close();
return ents;
}
public boolean updateTopicDetails(boolean isEntity, boolean isChecked,int topicID)
{
String query = "UPDATE " + TABLE_TOPICS +
"SET " + IS_ENTITY + " = " + (isEntity ? 1 : 0) + " , " + IS_CHECKED + " = " + (isChecked ? 1 : 0) +
" WHERE " + KEY_ID_TOPIC + " = " + topicID + ";";
SQLiteDatabase db = this.getWritableDatabase();
ContentValues cvs = new ContentValues();
cvs.put(IS_ENTITY,(isEntity ? 1 : 0));
cvs.put(IS_CHECKED,(isChecked ? 1 : 0));
String [] args = {topicID + ""};
int i = db.update(TABLE_TOPICS,cvs, KEY_ID_TOPIC + " =?",args);
return i > 0;
}
private long putCategoryIfAbsent(String categoryName)
{
String sql_query = "Select " + KEY_ID_CATEGORY + " from " + TABLE_CATEGORIES + " where " + CATEGORY_NAME + " = '" + categoryName + "'";
SQLiteDatabase db=this.getWritableDatabase();
Cursor cursor = db.rawQuery(sql_query, null);
if(cursor.moveToFirst())
{
db.close();
return cursor.getInt(cursor.getColumnIndex(KEY_ID_CATEGORY));
}
// Inserting Row
ContentValues values = new ContentValues();
values.put(CATEGORY_NAME, categoryName);
return db.insert(TABLE_CATEGORIES, null, values);
// cursor = db.rawQuery(sql_query, null);
//
// if(cursor.moveToFirst())
// {
// db.close();
// return cursor.getInt(cursor.getColumnIndex(KEY_ID_CATEGORY));
// }
// return -1;
}
public synchronized long insertFacebookPage(FacebookPage page){
long categoryId=putCategoryIfAbsent(page.category);
ContentValues values = new ContentValues();
values.put(CTG_ID, categoryId);
values.put(TOPIC_NAME, page.name);
values.put(SOURCE,SOURCE_FACEBOOK);
// Inserting Row
SQLiteDatabase db = this.getWritableDatabase();
long id=db.insert(TABLE_TOPICS, null, values);
// String getID="Select max("+KEY_ID_TOPIC+") as _"+KEY_ID_TOPIC+" from "+TABLE_TOPICS;
// Cursor cursor=db.rawQuery(getID, null);
// int id=0;
// if(cursor.moveToNext()){
// id=cursor.getInt(cursor.getColumnIndex("_"+KEY_ID_TOPIC));
// }
db.close(); // Closing database connection
Log.v("Page",page.name);
return id;
}
public ArrayList<FacebookPage> getFacebookPages(){
ArrayList<FacebookPage> pages=new ArrayList<FacebookPage>();
//
String sql_query = "Select " + TABLE_TOPICS + "." + KEY_ID_TOPIC +","+TOPIC_NAME+","+CATEGORY_NAME+ " from " + TABLE_TOPICS +" join "+TABLE_CATEGORIES+" on "+TABLE_TOPICS+"."+CTG_ID+"="+TABLE_CATEGORIES+"."+KEY_ID_CATEGORY;
SQLiteDatabase db=this.getReadableDatabase();
Cursor cursor = db.rawQuery(sql_query, null);
if(!cursor.moveToFirst())
return null;
while(!cursor.isLast())
{
FacebookPage page=new FacebookPage();
page.id=cursor.getInt(cursor.getColumnIndex(KEY_ID_TOPIC));
page.category=cursor.getString(cursor.getColumnIndex(CATEGORY_NAME));
page.name=cursor.getString(cursor.getColumnIndex(TOPIC_NAME));
pages.add(page);
cursor.moveToNext();
}
db.close();
return pages;
}
public boolean insertAllFacebookPages(ArrayList<FacebookPage> pages){
for (FacebookPage page : pages) {
if(insertFacebookPage(page)<=0)
return false;
}
//pages=null;
return true;
}
}
<file_sep>/app/src/main/java/com/oreo/thingsofinterest/ThingDetails.java
//package com.oreo.thingsofinterest;
//
//import com.getbase.floatingactionbutton.FloatingActionButton;
//
//
//import android.content.Intent;
//import android.graphics.Bitmap;
//import android.graphics.drawable.BitmapDrawable;
//import android.os.Bundle;
//import android.support.design.widget.CollapsingToolbarLayout;
//import android.support.v7.app.AppCompatActivity;
//import android.support.v7.graphics.Palette;
//import android.support.v7.widget.Toolbar;
//import android.view.Menu;
//import android.view.MenuItem;
//import android.view.View;
//import android.widget.ImageView;
//import android.widget.LinearLayout;
//import android.widget.ProgressBar;
//
//
//public class ThingDetails extends AppCompatActivity {
// CollapsingToolbarLayout collapsingToolbarLayout;
// ImageView image;
// @Override
// protected void onCreate(Bundle savedInstanceState) {
// super.onCreate(savedInstanceState);
// setContentView(R.layout.things_details_layout);
// image = (ImageView) findViewById(R.id.image);
// Toolbar toolbar=(Toolbar) findViewById(R.id.toolbar);
// setSupportActionBar(toolbar);
//
// toolbar.setNavigationIcon(R.drawable.abc_ic_ab_back_mtrl_am_alpha);
//
// collapsingToolbarLayout = (CollapsingToolbarLayout) findViewById(R.id.collapsing_toolbar);
// collapsingToolbarLayout.setTitle("Collapsing");
// //collapsingToolbarLayout.setExpandedTitleColor(getResources().getColor(android.R.color.transparent));
// setPalette();
//
// FloatingActionButton share= (FloatingActionButton) findViewById(R.id.share);
// share.setImageResource(R.drawable.abc_ic_menu_share_mtrl_alpha);
// share.setOnClickListener(new View.OnClickListener() {
// @Override
// public void onClick(View view) {
// Intent share = new Intent(Intent.ACTION_SEND);
// share.setType("text/plain");
// share.putExtra(Intent.EXTRA_TEXT, "I'm being sent!!");
// startActivity(Intent.createChooser(share, "Share this information"));
// }
// });
//
// FloatingActionButton like= (FloatingActionButton) findViewById(R.id.like);
// like.setImageResource(R.drawable.ic_favorite_border_white_24dp);
// like.setOnClickListener(new View.OnClickListener() {
// @Override
// public void onClick(View view) {
// if(view.getTag().toString().equalsIgnoreCase("like")){
// ((FloatingActionButton)view).setImageResource(R.drawable.ic_favorite_white_24dp);
// view.setTag("unlike");
// }
// else{
// ((FloatingActionButton)view).setImageResource(R.drawable.ic_favorite_border_white_24dp);
// view.setTag("like");
// }
// }
// });
//
// FloatingActionButton report= (FloatingActionButton) findViewById(R.id.report);
// report.setImageResource(R.drawable.ic_block_white_24dp);
//
//
//
// String title=getIntent().getStringExtra("name");
// collapsingToolbarLayout.setTitle(title);
//
// int drawableId = getIntent().getIntExtra("image",R.drawable.pic2);
// image.setImageResource(drawableId);
//
// String id=getIntent().getStringExtra("id");
//
//
// LinearLayout ll = (LinearLayout) findViewById(R.id.data_container);
//
// ProgressBar pb = (ProgressBar)findViewById(R.id.ghumanPheri);
//
// Parser sapi = new Parser(ll,this,pb);
//
// sapi.execute(id);
//
// }
//
// @Override
// public boolean onCreateOptionsMenu(Menu menu) {
// getMenuInflater().inflate(R.menu.menu_test, menu);
// return true;
// }
//
// private void setPalette() {
// Bitmap bitmap = ((BitmapDrawable) image.getDrawable()).getBitmap();
// Palette.from(bitmap).generate(new Palette.PaletteAsyncListener() {
// @Override
// public void onGenerated(Palette palette) {
// int primaryDark = getResources().getColor(R.color.primary_dark_material_dark);
// int primary = getResources().getColor(R.color.primary);
// collapsingToolbarLayout.setContentScrimColor(palette.getMutedColor(primary));
// collapsingToolbarLayout.setStatusBarScrimColor(palette.getDarkVibrantColor(primaryDark));
// }
// });
//
// }
//
// @Override
// public boolean onOptionsItemSelected(MenuItem item) {
// int id = item.getItemId();
// if (id == R.id.pic1) {
// image.setImageResource(R.drawable.pic2);
// setPalette();
// return true;
// }
// return super.onOptionsItemSelected(item);
//
// }
//}
<file_sep>/app/src/main/java/com/oreo/thingsofinterest/SharedPreferenceHandler.java
package com.oreo.thingsofinterest;
import android.content.Context;
import android.content.SharedPreferences;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.location.Address;
import android.location.Geocoder;
import android.location.Location;
import android.os.Environment;
import android.widget.Toast;
import com.facebook.Profile;
import com.google.gson.Gson;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
/**
* Created by <NAME> on 2/24/2016.
*/
public class SharedPreferenceHandler {
Context mContext;
SharedPreferenceHandler(Context pContext)
{
mContext = pContext;
}
public static String FB_PREFERENCES = "FacebookPreferences";
public static HashMap<String,String> getFacebookData(Context c){
SharedPreferences sp = c.getSharedPreferences(FB_PREFERENCES, Context.MODE_PRIVATE);
HashMap<String,String> ret=new HashMap<>();
ret.put("id", sp.getString("id", ""));
ret.put("name",sp.getString("name",""));
ret.put("email",sp.getString("email",""));
ret.put("age",sp.getInt("age",0)+"");
ret.put("dp",sp.getString("dp",""));
ret.put("locationAdd",sp.getString("locationAdd",""));
return ret;
}
public void saveFacebookProfile(JSONObject profile, Context c) throws JSONException {
String id = (String) profile.get("id");
String name = (String) profile.get("name");
int age = (int) profile.getJSONObject("age_range").get("min");
String email= (String) profile.get("email");
SharedPreferences sp = c.getSharedPreferences(FB_PREFERENCES, Context.MODE_PRIVATE);
SharedPreferences.Editor editor = sp.edit();
editor.putString("id", id);
editor.putString("name", name);
editor.putString("email", email);
editor.putInt("age", age);
////////////////////////////////////////////////////////
// location -> save
Location loc = new GPSTracker(mContext).getLocation();
Gson gson = new Gson();
String gsonLoc = gson.toJson(loc);
editor.putString("location", gsonLoc);
Toast.makeText(mContext,gsonLoc,Toast.LENGTH_LONG).show();
try {
Geocoder geocoder = new Geocoder(mContext, Locale.getDefault());
if(loc != null) {
List<Address> addresses = geocoder.getFromLocation(loc.getLatitude(), loc.getLongitude(), 1);
Toast.makeText(mContext, addresses.get(0).getLocality(), Toast.LENGTH_LONG).show();
editor.putString("locationAdd", addresses.get(0).getLocality());
Toast.makeText(mContext, addresses.get(0).getLocality(), Toast.LENGTH_LONG).show();
}
} catch (IOException e) {
e.printStackTrace();
}
// location -> save
////////////////////////////////////////////////////////
editor.commit();
if (profile.has("picture")) {
String profilePicUrl = profile.getJSONObject("picture").getJSONObject("data").getString("url");
this.savePhotoFromFacebook(profilePicUrl,c);
}
}
public void savePhotoFromFacebook(final String url, final Context c) {
Runnable r = new Runnable() {
@Override
public void run() {
try {
Bitmap bitmap = BitmapFactory.decodeStream((InputStream) new URL(url).getContent());
String path = saveImage(bitmap);
SharedPreferences sp = c.getSharedPreferences(FB_PREFERENCES, Context.MODE_PRIVATE);
SharedPreferences.Editor editor = sp.edit();
editor.putString("dp", path);
editor.commit();
} catch (IOException e) {
e.printStackTrace();
}
}
};
Thread thread=new Thread(r);
thread.start();
try {
thread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
public String getFilename() {
File file = new File(Environment.getExternalStorageDirectory().getPath(), "ThingsOfInterest/Images");
if (!file.exists()) {
file.mkdirs();
}
String uriSting = (file.getAbsolutePath() + "/" + System.currentTimeMillis() + ".png");
return uriSting;
}
public String saveImage(Bitmap bmp) {
FileOutputStream out = null;
String filename = getFilename();
try {
out = new FileOutputStream(filename);
bmp.compress(Bitmap.CompressFormat.PNG,100, out); // bmp is your Bitmap instance
// PNG is a lossless format, the compression factor (100) is ignored
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (out != null) {
out.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
return filename;
}
}
<file_sep>/app/src/main/java/backend/ThingProperty.java
package backend;
import android.view.View;
/**
* Created by <NAME> on 2/2/2016.
*/
public interface ThingProperty {
public View getView();
}
<file_sep>/app/src/main/java/com/oreo/thingsofinterest/CompoundAdapter.java
package com.oreo.thingsofinterest;
import android.app.Activity;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.TextView;
import java.util.ArrayList;
/**
* Created by <NAME> on 12/11/2015.
*/
public class CompoundAdapter extends ArrayAdapter<CompoundThing> {
ArrayList<CompoundThing> compounds;
Context c;
int layoutFile;
public CompoundAdapter(Context context, int resource, ArrayList<CompoundThing> objects) {
super(context, resource, objects);
c=context;
layoutFile=resource;
compounds=objects;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
View row=null;
// if(convertView==null)
// {
LayoutInflater li = ((Activity)c).getLayoutInflater();
row = li.inflate(layoutFile,parent,false);
// }
// else
// {
// row = convertView;
// }
LayoutInflater l=(LayoutInflater) c.getApplicationContext().getSystemService(Context.LAYOUT_INFLATER_SERVICE);
ArrayList<String> keys = compounds.get(position).getKeys();
ArrayList<String> vals = compounds.get(position).getValues();
((TextView)row.findViewById(R.id.mainKey)).setText(compounds.get(position).getName());
for(int i = 0; i < keys.size(); i++)
{
View v = null;
if(keys.get(i) == "---")
{
v = l.inflate(R.layout.divider, null);
}
else {
v = l.inflate(R.layout.object_thing, null);
TextView key = (TextView) v.findViewById(R.id.tv_key);
TextView val = (TextView) v.findViewById(R.id.tv_val);
key.setText(keys.get(i));
val.setText(vals.get(i));
}
ViewGroup insertPoint = (ViewGroup) row.findViewById(R.id.dyn_layout);
// new ViewGroup.LayoutParams(ViewGroup.LayoutParams.FILL_PARENT, ViewGroup.LayoutParams.FILL_PARENT)
insertPoint.addView(v, 0, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.FILL_PARENT, ViewGroup.LayoutParams.FILL_PARENT));
}
return row;
}
}
<file_sep>/app/src/main/java/com/oreo/thingsofinterest/AccountDetailsActivity.java
package com.oreo.thingsofinterest;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Color;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.graphics.Palette;
import android.view.View;
import android.view.Window;
import android.view.WindowManager;
import android.widget.ImageView;
import android.widget.TextView;
import android.support.v7.widget.Toolbar;
import java.io.File;
import java.util.HashMap;
/**
* Created by <NAME> on 2/26/2016.
*/
public class AccountDetailsActivity extends AppCompatActivity {
public TextView name;
public TextView email;
public TextView age;
public TextView location;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_account_detail);
final Toolbar toolbar = (Toolbar) findViewById(R.id.accountDetailsToolbar);
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
toolbar.setNavigationOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
onBackPressed();
}
});
HashMap<String, String> fbProfileData;
fbProfileData = SharedPreferenceHandler.getFacebookData(this);
name = (TextView) findViewById(R.id.name);
getSupportActionBar().setTitle(fbProfileData.get("name"));
email = (TextView) findViewById(R.id.email);
age = (TextView) findViewById(R.id.age);
name.setText(fbProfileData.get("name"));
String EMAIL = fbProfileData.get("email");
email.setText(EMAIL);
age.setText("Age : " + fbProfileData.get("age"));
location = (TextView) findViewById(R.id.location);
location.setText(fbProfileData.get("locationAdd"));
ImageView iv = (ImageView) findViewById(R.id.ivProfilePicture);
String path = fbProfileData.get("dp");
if (path == "") {
iv.setImageResource(R.drawable.dpempty);
} else {
File imgFile = new File(path);
if (imgFile.exists()) {
Bitmap myBitmap = BitmapFactory.decodeFile(imgFile.getAbsolutePath());
iv.setImageBitmap(myBitmap);
// This is the quick and easy integration path.
// May not be optimal (since you're dipping in and out of threads)
Palette.from(myBitmap).maximumColorCount(50).generate(new Palette.PaletteAsyncListener() {
@Override
public void onGenerated(Palette palette) {
// Get the "vibrant" color swatch based on the bitmap
Palette.Swatch vibrant = palette.getVibrantSwatch();
if (vibrant != null) {
// Set the background color of a layout based on the vibrant color
toolbar.setBackgroundColor(vibrant.getRgb());
// Update the title TextView with the proper text color
//toolbar.setTitleTextColor(vibrant.getTitleTextColor());
float[] hsv = new float[3];
int color = vibrant.getRgb();
Color.colorToHSV(color, hsv);
hsv[2] *= 0.8f; // value component
color = Color.HSVToColor(hsv);
Window window = getWindow();
// clear FLAG_TRANSLUCENT_STATUS flag:
window.clearFlags(WindowManager.LayoutParams.FLAG_TRANSLUCENT_STATUS);
// add FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS flag to the window
window.addFlags(WindowManager.LayoutParams.FLAG_DRAWS_SYSTEM_BAR_BACKGROUNDS);
// finally change the color
window.setStatusBarColor(color);
}
}
});
} else {
iv.setImageResource(R.drawable.dpempty);
}
}
}
}
<file_sep>/app/src/main/java/com/oreo/thingsofinterest/TestFlexible.java
package com.oreo.thingsofinterest;
import android.app.Activity;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.Bundle;
import android.support.design.widget.CollapsingToolbarLayout;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.graphics.Palette;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.Toolbar;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.ImageView;
import java.util.ArrayList;
import java.util.List;
/**
* Created by <NAME> on 12/18/2015.
*/
public class TestFlexible extends AppCompatActivity {
CollapsingToolbarLayout collapsingToolbar;
RecyclerView recyclerView;
int mutedColor = R.attr.colorPrimary;
ContactAdapter simpleRecyclerAdapter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.test);
final Toolbar toolbar = (Toolbar) findViewById(R.id.anim_toolbar);
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
collapsingToolbar = (CollapsingToolbarLayout) findViewById(R.id.collapsing_toolbar);
collapsingToolbar.setTitle("<NAME>");
ImageView header = (ImageView) findViewById(R.id.header);
Bitmap bitmap = BitmapFactory.decodeResource(getResources(),
R.drawable.header);
Palette.from(bitmap).generate(new Palette.PaletteAsyncListener() {
@SuppressWarnings("ResourceType")
@Override
public void onGenerated(Palette palette) {
mutedColor = palette.getMutedColor(R.attr.colorPrimary);
collapsingToolbar.setContentScrimColor(mutedColor);
collapsingToolbar.setStatusBarScrimColor(getResources().getColor(R.color.Black_transparent_black_percent_80));
}
});
recyclerView = (RecyclerView) findViewById(R.id.scrollableview);
recyclerView.setHasFixedSize(true);
LinearLayoutManager linearLayoutManager = new LinearLayoutManager(this);
recyclerView.setLayoutManager(linearLayoutManager);
List<ContactInfo> listData=new ArrayList<ContactInfo>();
listData.add(new ContactInfo("<NAME>","Ali","<EMAIL>"));
listData.add(new ContactInfo("<NAME>","Sarwar","<EMAIL>"));
listData.add(new ContactInfo("<NAME>","Chattha","<EMAIL>"));
listData.add(new ContactInfo("Haider","<NAME>","<EMAIL>"));
listData.add(new ContactInfo("<NAME>","Ali","<EMAIL>"));
listData.add(new ContactInfo("<NAME>","Sarwar","<EMAIL>"));
listData.add(new ContactInfo("<NAME>","Chattha","<EMAIL>"));
listData.add(new ContactInfo("Haider","<NAME>","<EMAIL>"));
listData.add(new ContactInfo("<NAME>","Ali","<EMAIL>"));
listData.add(new ContactInfo("<NAME>","Sarwar","<EMAIL>"));
listData.add(new ContactInfo("<NAME>","Chattha","<EMAIL>"));
listData.add(new ContactInfo("Haider","<NAME>","<EMAIL>"));
if (simpleRecyclerAdapter == null) {
simpleRecyclerAdapter = new ContactAdapter(listData);
recyclerView.setAdapter(simpleRecyclerAdapter);
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
finish();
return true;
case R.id.action_settings:
return true;
}
return super.onOptionsItemSelected(item);
}
}<file_sep>/app/src/main/java/com/oreo/thingsofinterest/NewCompoundAdapter.java
package com.oreo.thingsofinterest;
import android.app.Activity;
import android.content.Context;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.TextView;
import java.util.ArrayList;
/**
* Created by <NAME> on 12/11/2015.
*/
public class NewCompoundAdapter extends ArrayAdapter<ThingObject> {
ArrayList<ThingObject> things;
ArrayList<CompoundThing> compounds;
Context c;
int layoutFile;
public NewCompoundAdapter(Context context, int resource, ArrayList<ThingObject> objects,ArrayList<CompoundThing> compounds) {
super(context, resource, objects);
c = context;
layoutFile = resource;
things = objects;
this.compounds = compounds;
}
public View getCompoundView(int position, View convertView, ViewGroup parent) {
View row=null;
// if(convertView==null)
// {
LayoutInflater li = ((Activity)c).getLayoutInflater();
row = li.inflate(layoutFile,parent,false);
// }
// else
// {
// row = convertView;
// }
LayoutInflater l=(LayoutInflater) c.getApplicationContext().getSystemService(Context.LAYOUT_INFLATER_SERVICE);
ArrayList<String> keys = compounds.get(position).getKeys();
ArrayList<String> vals = compounds.get(position).getValues();
((TextView)row.findViewById(R.id.mainKey)).setText(compounds.get(position).getName());
for(int i = 0; i < keys.size(); i++)
{
View v = null;
if(keys.get(i) == "---")
{
v = l.inflate(R.layout.divider, null);
}
else {
v = l.inflate(R.layout.object_thing, null);
TextView key = (TextView) v.findViewById(R.id.tv_key);
TextView val = (TextView) v.findViewById(R.id.tv_val);
key.setText(keys.get(i));
val.setText(vals.get(i));
}
ViewGroup insertPoint = (ViewGroup) row.findViewById(R.id.dyn_layout);
// new ViewGroup.LayoutParams(ViewGroup.LayoutParams.FILL_PARENT, ViewGroup.LayoutParams.FILL_PARENT)
insertPoint.addView(v, 0, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.FILL_PARENT, ViewGroup.LayoutParams.FILL_PARENT));
}
return row;
}
public View getThingView(int position, View convertView, ViewGroup parent) {
View row = null;
// if(convertView==null)
// {
LayoutInflater li = ((Activity) c).getLayoutInflater();
row = li.inflate(layoutFile, parent, false);
// }
// else
// {
// row = convertView;
// }
LayoutInflater l = (LayoutInflater) c.getApplicationContext().getSystemService(Context.LAYOUT_INFLATER_SERVICE);
String key = things.get(position).key;
ArrayList<String> vals = things.get(position).values;
((TextView) row.findViewById(R.id.mainKey)).setText(key);
for (int i = 0; i < vals.size(); i++) {
View v = null;
v = l.inflate(R.layout.object_thing, null);
TextView keytv = (TextView) v.findViewById(R.id.tv_key);
TextView val = (TextView) v.findViewById(R.id.tv_val);
keytv.setText(key);
val.setText(vals.get(i));
ViewGroup insertPoint = (ViewGroup) row.findViewById(R.id.dyn_layout);
insertPoint.addView(v, 0, new ViewGroup.LayoutParams(ViewGroup.LayoutParams.FILL_PARENT, ViewGroup.LayoutParams.FILL_PARENT));
}
return row;
}
public int getThingsCount() {
return things.size();
}
public int getCompoundCount() {
return compounds.size();
}
}
<file_sep>/app/src/main/java/backend/ThingObjectProperty.java
package backend;
import android.view.View;
import java.util.ArrayList;
/**
* Created by <NAME> on 1/25/2016.
*/
public class ThingObjectProperty implements ThingProperty{
public String key;
public ArrayList<String> values= new ArrayList<>();
@Override
public View getView() {
return null;
}
}
|
95546a8b00dfa86fdcdcbb48aa7f80f74001f20b
|
[
"Java"
] | 21
|
Java
|
MuhammadMansoorAli/Things-Of-Interest
|
2aafa5527cd9dc143250f58baa7f3a99a0b34835
|
138db82cd85fd9749eb509142123ad4bd79b2859
|
refs/heads/master
|
<file_sep>package Aula4.ED2;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
public class ManipuladorSequencial implements IFileOrganizer {
private static final int RECORD_SIZE = 157;
private FileChannel canal;
public ManipuladorSequencial(String path) throws FileNotFoundException {
File file = new File(path);
RandomAccessFile raf = new RandomAccessFile(file, "rw");
this.canal = raf.getChannel();
}
public Aluno readAluno(long index) throws IOException {
if ((index < 0) || (index > this.canal.size()))
return null;// Out of bounds
ByteBuffer buffer = ByteBuffer.allocate(RECORD_SIZE);
this.canal.read(buffer, index);
buffer.flip();
return new Aluno(buffer);
}
private long binarySearch(int matric) throws IOException {
long low = 0;
long high = (this.canal.size() / RECORD_SIZE);
long mid = 0;
while (low <= high) {
mid = (low + high) / 2;
Aluno aluno = readAluno(mid * RECORD_SIZE);
if (aluno.getMatricula() < matric)
low = mid + 1;
else if (aluno.getMatricula() > matric)
high = mid - 1;
else
return mid;
}
return -1;
}
@Override
public void addReg(Aluno aluno) throws IOException {
ByteBuffer record = aluno.getBuffer();
this.canal.position(0);
if (this.canal.size() == 0) {
this.canal.write(record, 0);
} else {
for (int i = 0; i < this.canal.size(); i += RECORD_SIZE) {
Aluno bufferAluno = readAluno(i);
if (bufferAluno.getMatricula() >= aluno.getMatricula()) {
for (int j = i; j < this.canal.size(); j += RECORD_SIZE) {
bufferAluno = readAluno(j);
this.canal.write(record, j );
record = bufferAluno.getBuffer();
}
break;
}
}
this.canal.write(record, this.canal.size());
}
}
@Override
public Aluno delReg(int matric) throws IOException {
int newSize = (int) (canal.size() - RECORD_SIZE);
this.canal.position(0);
for (int i = 0; i < canal.size(); i += RECORD_SIZE) {
Aluno aluno = readAluno(i);
if (aluno.getMatricula() == matric) {
for (int j = i + RECORD_SIZE; j < canal.size(); j += RECORD_SIZE) {
ByteBuffer bufb = ByteBuffer.allocate(RECORD_SIZE);
canal.read(bufb, j);
bufb.flip();
canal.write(bufb, j - RECORD_SIZE);
}
canal.truncate(newSize);
break;
}
}
return null;
}
public Aluno getReg(int matric) throws IOException {
this.canal.position(0);
for (int i = 0; i < canal.size(); i += RECORD_SIZE) {
Aluno aluno = readAluno(i);
if (aluno.getMatricula() == matric) {
return aluno;
}
}
return null;
}
public Aluno getRegBin(int matric) throws IOException {
this.canal.position(0);
long index;
index = binarySearch(matric);
if (index == -1)
return null;
else
return readAluno(index * RECORD_SIZE);
}
}
|
a375ead4af1b4a1578ee8457a58adb1e075d3a49
|
[
"Java"
] | 1
|
Java
|
leoproject/Analise_Sequencial
|
1d9e4619a9f66f2269b6af2b1eb8b4fdeebf7c1f
|
d0a5e0a8db66c28724efdf5706d88ae660f0eac9
|
refs/heads/master
|
<file_sep>const startButton = document.createElement("button");
const body = document.querySelector("body");
body.appendChild(startButton);
const resetButton = document.createElement("button");
body.appendChild(resetButton);
const buttonArray = [startButton, resetButton];
buttonArray.forEach(element => (element.style.color = "black"));
buttonArray.forEach(element => (element.style.border = "2px solid black"));
buttonArray.forEach(element => (element.style.padding = "10px 40px 10px 40px"));
buttonArray.forEach(element => (element.style.fontSize = "20px"));
buttonArray.forEach(element => (element.style.marginTop = "20px"));
buttonArray.forEach(element => (element.style.backgroundColor = "white"));
startButton.textContent = "Start";
resetButton.textContent = "Reset";
body.style.flexDirection = "column";
var counterIntervalId = null;
startButton.addEventListener("click", event => {
startTimer(startButton);
startButton.disabled = true;
});
resetButton.addEventListener("click", event => {
if (counterIntervalId != null) {
clearInterval(counterIntervalId);
startButton.disabled = false;
const secondTens = document.querySelector("#secondTens");
const secondOnes = document.querySelector("#secondOnes");
const msHundreds = document.querySelector("#msHundreds");
const msTens = document.querySelector("#msTens");
secondTens.textContent = "0";
secondOnes.textContent = "0";
msHundreds.textContent = "0";
msTens.textContent = "0";
counterIntervalId = null;
}
});
function startTimer(button) {
counterIntervalId = setInterval(updateDigits, 10);
var ms = 0;
const digits = document.querySelectorAll(".digit");
digits.forEach(digit => (digit.style.color = "black"));
function updateDigits() {
var remainder = ms;
if (ms > 10000) {
clearInterval(counterIntervalId);
} else {
const _secondTens = Math.floor(ms / 10000);
remainder = remainder % 10000;
const _secondOnes = Math.floor(remainder / 1000);
remainder = remainder % 1000;
const _msHundreds = Math.floor(remainder / 100);
remainder = remainder % 100;
const _msTens = Math.floor(remainder / 10);
remainder = remainder % 10;
const secondTens = document.querySelector("#secondTens");
const secondOnes = document.querySelector("#secondOnes");
const msHundreds = document.querySelector("#msHundreds");
const msTens = document.querySelector("#msTens");
secondTens.textContent = _secondTens.toString();
secondOnes.textContent = _secondOnes.toString();
msHundreds.textContent = _msHundreds.toString();
msTens.textContent = _msTens.toString();
if (ms === 10000) {
const digits = document.querySelectorAll(".digit");
digits.forEach(digit => (digit.style.color = "red"));
button.disabled = false;
}
ms += 10;
}
}
}
|
f60cdcb710c48cddf6c4b483d60fb85344056c18
|
[
"JavaScript"
] | 1
|
JavaScript
|
dorabelme/DOM-I
|
0bb5b7896fa0b37c7646d8d654dacd736a277b18
|
7d542264e1256c80ee6334e16b5330bbedd5295e
|
refs/heads/master
|
<file_sep># ObsiNews
plugin integrating automatic messages that can be desactivated as well as a /infos
You can change the time between each message from the config.yml.
Usage:
/automessage on/off
Future additions:
/automessage add <message>
/automessage delete <message>
/automessage delay <delay>
********************************************************************************************************************************************
Plugin intégrant des messages automatiques modifiables/désactivables ainsi qu'un /infos.
Vous pouvez choisir le temps entre chaque message depuis le fichier config.yml
Utilisation:
/automessage on/off
Futurs ajouts:
/automessage add <message>
/automessage delete <message>
/automessage delay <delay>
<file_sep>package fr.lummix.AutoMessage;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.logging.Logger;
import org.bukkit.Bukkit;
import org.bukkit.plugin.PluginManager;
import org.bukkit.plugin.java.JavaPlugin;
import fr.lummix.AutoMessage.Commands.CommandAutoMessage;
import fr.lummix.AutoMessage.Commands.CommandInfos;
public class Main extends JavaPlugin {
private final Logger logger = Logger.getLogger("CraftBukkit");
private final String logTag = "[Obsi-News]";
private List<String> automessage = new ArrayList<>();
public void LoadConfig(){
getConfig().options().copyDefaults(true);
saveConfig();
}
public void onEnable(){
LoadConfig();
logger.info(logTag + "Activation du plugin Obsi-News");
automessage.add("Message1");
automessage.add("Message2");
automessage.add("Message3");
automessage.add("Message4");
getCommand("infos").setExecutor(new CommandInfos());
getCommand("automessage").setExecutor(new CommandAutoMessage(this));
int delay = getConfig().getInt("Delay");
Bukkit.getScheduler().scheduleSyncRepeatingTask(this, new Runnable(){
@Override
public void run(){
boolean automessageon = getConfig().getBoolean("AutoMessage");
if(automessageon){
int max = automessage.size();
Random random = new Random();
int randomMessage = random.nextInt(max);
String message = automessage.get(randomMessage);
Bukkit.broadcastMessage("§2§l[§6ObsiNews§2§l]§6§l" + message);
}
}
}, 0, delay); // 1s = 20 ticks
}
public void onDisbale(){
logger.info(logTag + "Désactivation du plugin Obsi-News");
}
}
<file_sep>package fr.lummix.AutoMessage.Commands;
import org.bukkit.command.Command;
import org.bukkit.command.CommandExecutor;
import org.bukkit.command.CommandSender;
import org.bukkit.entity.Player;
import fr.lummix.AutoMessage.Main;
public class CommandAutoMessage implements CommandExecutor {
private Main main;
public CommandAutoMessage(Main main) {
this.main = main;
// TODO Auto-generated constructor stub
}
@Override
public boolean onCommand(CommandSender sender, Command cmd, String label, String[] args) {
Player p = (Player)sender;
if(cmd.getName().equalsIgnoreCase("automessage")){
if(p.hasPermission("automessage.admin") || p.isOp()){
if(args.length == 0 || args.length >= 2){
p.sendMessage("utilisation : /automessage on/off");
}
if(args[0].equalsIgnoreCase("on")){
p.sendMessage("Activation des messages automatiques");
main.getConfig().set("AutoMessage", true);
main.saveConfig();
}
if(args[0].equalsIgnoreCase("off")){
p.sendMessage("Désactivation des messages automatiques");
main.getConfig().set("AutoMessage", false);
main.saveConfig();
}
}
else{
p.sendMessage("Vous n'etes pas op/ perm");
}
return true;
}
return false;
}
}
|
aab4e86c5798b2cd657a6cb64c5477140f7b5c81
|
[
"Markdown",
"Java"
] | 3
|
Markdown
|
lummix/ObsiNews
|
c782f2de6e7f4ead0af42ce2e83f81769701c3e5
|
ee757cd156bf12a47648cf4fcb6013fc0ddb9d7a
|
refs/heads/master
|
<repo_name>faddai/modelling-trello<file_sep>/isogram.py
'''# Isogram
Determine if a word or phrase is an isogram.
An isogram (also known as a "nonpattern word")
is a word or phrase without a repeating letter.
Examples of isograms:
- lumberjacks
- background
- downstream
The word *isograms*, however, is not an isogram,
because the s repeats.
'''
import string
def is_isogram(word):
# loop through word
# keep track of characters and their count
# check if count is greater than 1
# return True/False
word = word.lower()
for character in word:
if character.isalpha() and word.count(character) > 1:
return False
return True
<file_sep>/models/user.py
'''Model a User object
'''
import hashlib
import csv
import functions
DATASTORE = 'users.csv'
class User:
'''A user on trello'''
def __init__(self, firstname, lastname, email, password):
self.firstname = firstname
self.lastname = lastname
self.email = email
self.password = self.hash_password(password)
self.is_logged_in = False
def __repr__(self):
return functions.get_class_representation(self)
def hash_password(self, password):
'''Hash a given password'''
return hashlib.sha256(password).hexdigest()
def login(self, email, password):
self.is_logged_in = True
def save(self):
with open(DATASTORE, 'a+') as file:
store = csv.DictWriter(file, ['firstname', 'lastname', 'email', 'password', 'is_logged_in'])
if file.readline() == '':
store.writeheader()
store.writerow(self.__dict__)
<file_sep>/models/card.py
'''A card has details on a task to be carried out'''
import datetime
import functions
class Card:
def __init__(self, name, description, due_date=None):
self.name = name
self.description = description
# You don't always have to require your instance variables upfront
# require what makes and delegate the rest to setters
self.members = []
self.activities = []
self.comments = []
self.due_date = due_date
self.created_at = datetime.datetime.now()
def __repr__(self):
return functions.get_class_representation(self)
def add_member(self, user):
'''Add a user as a member of the card'''
self.members.append(user)
def add_activity(self, activity):
'''Records activities for the card'''
self.activities.append(activity)
def add_comment(self, comment):
'''Add a comment to the card'''
self.comments.append(comment)
<file_sep>/functions.py
'''
A collection of helper functions
'''
def get_class_representation(klass):
'''Get a meaningful representation for a given class instance'''
return '<{} {}>'.format(klass.__class__.__name__, klass.__dict__)
<file_sep>/models/board.py
import functions
class Board:
def __init__(self, name, lists=[], members=[]):
self.name = name
self.lists = lists
self.members = members
def __repr__(self):
return functions.get_class_representation(self)
def add_list(self, board_list):
'''Add an instance of a list to the board'''
self.lists.append(board_list)
def add_member(self, user):
'''Add an instance of a user to the board so that
user can have access to the board
'''
self.members.append(user)
<file_sep>/README.md
# Trello Clone
This is how I would model my objects if I were building a clone of Trello.
## Classes
* User
- firstname
- lastname
- email
- password
+ hash_password
+ login
+ signup
* Board
- name
- members
- lists
+ add_member
+ add_list
* List
- name
- cards
+ add_card
+ move_card
* Card
- title
- members
- comments
- description
- due_date
- activities
+ archive
* Activity
- performed_by
- timestamp
- description<file_sep>/main.py
'''Trello application
Putting our hard work to good use
'''
import models.user
import models.board
import models.card
import models.boardlist
firstname = input('Please enter your first name: ')
lastname = input('Please enter your last name: ')
email = input('Please enter your email: ')
password = input('Please enter your password: ')
user = models.user.User(firstname,
lastname,
email,
bytes(password, 'utf-8'))
print(user)
|
c42ce0aec3cef2812d96d54dd28c6324b89fe043
|
[
"Markdown",
"Python"
] | 7
|
Python
|
faddai/modelling-trello
|
41a665b7657db10bed77f4ee125f24128c5cb5b9
|
24eb2d7dda42c0ee38ee60c232261a0148381328
|
refs/heads/master
|
<file_sep>#-*- coding: utf-8 -*-
from news.Configuration import Configuration
# import pymysql
class TransportData():
pass
# @staticmethod
# def transport_data(app_name,pic_url,pic_more_url,writer,content_url,content_type,title,summary,content,home_url,pubTime,crawlTime):
# try:
# conn = pymysql.connect(host=Configuration.host, user=Configuration.user, passwd=<PASSWORD>,
# db=Configuration.db, charset="utf8")
# cursor = conn.cursor()
# sql_content = "insert into news_info(app_name,pic_url,pic_more_url,writer,content_url,content_type,title,summary,content,home_url,pubTime,crawlTime) values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
# cursor.execute(sql_content,(app_name,pic_url,pic_more_url,writer,content_url,content_type,title,summary,content,home_url,pubTime,crawlTime))
# conn.commit()
# cursor.close()
# conn.close()
# print "success!!!!!!!!!!!!!!!!!!!!!"
# except pymysql.Error, e:
# print "Mysql Error"
#
# @staticmethod
# def getData(app_name):
# existing_title = []
# try:
# conn =pymysql.connect(host=Configuration.host, user=Configuration.user, passwd=<PASSWORD>,
# db=Configuration.db, charset="utf8")
# cursor = conn.cursor()
# sql_content = "select title from news_info where app_name='%s'"%app_name
# cursor.execute(sql_content)
# data = cursor.fetchall()
# conn.commit()
# cursor.close()
# conn.close()
# for name in data:
# existing_title.append(name[0].encode("utf-8"))
# del data
# return existing_title
# except pymysql.Error, e:
# print "Mysql Error"
#
# @staticmethod
# def getMaxPubtime(app_name):
# try:
# conn = pymysql.connect(host=Configuration.host, user=Configuration.user, passwd=Configuration.passwd,
# db=Configuration.db, charset="utf8")
# cursor = conn.cursor()
# sql_content = "select MAX(pubTime) from news_info WHERE app_name = %s"
# cursor.execute(sql_content, app_name)
# data = cursor.fetchall()
# print data
# conn.commit()
# cursor.close()
# conn.close()
# for name in data:
# max_pubTime = name[0]
# del data
# return max_pubTime
# except pymysql.Error, e:
# print "Mysql Error"
<file_sep>#coding=utf-8
import scrapy, time, re, json
from lxml.etree import HTML
from news.items import NewsItem
class eluosi(scrapy.Spider):
name = 'eluosi'
start_urls = [
'http://sputniknews.cn/search/?query=%E4%B9%A0%E8%BF%91%E5%B9%B3'
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime('2018-06-01', "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
links = response.xpath('//ul[@class="b-plainlist__list"]/li/div[2]/h2/a/@href').extract()
title = response.xpath('//ul[@class="b-plainlist__list"]/li/div[2]/h2/a/text()').extract()
for i in range(len(links)):
if 'http' not in links[i]:
url = 'http://sputniknews.cn' + links[i]
else:
url = links[i]
yield scrapy.Request(url, meta={
'title': title[i],
'home_url': response.url
}, callback=self.parse_item, dont_filter=True)
def parse_item(self, response):
app_name = '俄罗斯卫星中文网'
try:
describe = response.xpath('//div[@itemprop="description"]').extract()
selato = HTML(describe[0])
describe = selato.xpath('//text()')
describe = ''.join(describe)
describe = describe.replace('\t', '').replace('\n', '').replace('\r', '')
except:
describe = ''
author = ''
pic_url = ''
title = response.meta['title'].replace('\t', '').replace('\n', '').replace('\r', '')
try:
publishedDate = response.xpath('//time/@datetime').extract()[0]
publishedDate = publishedDate.replace('T', ' ')
except:
publishedDate = '2018-01-01 01:01:01'
content = response.xpath('//div[@itemprop="articleBody"]').extract()
selator = HTML(content[0])
content = selator.xpath('//text()')
content = ''.join(content)
content = content.replace('\t', '').replace('\n', '').replace('\r', '')
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
home_url = response.meta['home_url']
pic_more_url = selator.xpath('//img/@src')
pic_more_urll = []
for i in range(len(pic_more_url)):
pic_more_urll.append(pic_more_url[i])
pic_more_url = str(pic_more_urll)
category = '中国'
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
try:
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M:%S")
except:
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M")
publishedDate = time.mktime(timeArray)
if publishedDate >= self.timeStamp:
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(publishedDate)))
item['publishedDate'] = publishedDate
self.count = self.count + 1
item['count'] = self.count
yield item
<file_sep>#-*- coding: utf-8 -*-
from scrapy.spider import Spider
import scrapy
import sys
import json,time,re
from news.items import NewsItem
import random
reload(sys)
sys.setdefaultencoding('utf8')
class tengxun(Spider):
name = 'tengxun'
start_urls = [
'https://www.baidu.com'
]
def parse(self, response):
a = self.suiji1()
b = self.suiji2()
c = self.suiji3()
d = self.suiji4()
e = self.suiji5()
# STR = [chr(i) for i in range(65, 91)] # 65-91对应字符A-Z
# str = [chr(i) for i in range(97, 123)] # a-z
# number = [chr(i) for i in range(48, 58)] # 0-9
# a = random.shuffle(str)
# b = random.shuffle(STR)
# c = random.shuffle(number)
# print a
# print b
# print c
num = str(a) + '-' + str(b) +'-' + c +'-' + d +'-' + e
print num
canshu = 'appver=19_android_5.6.02&cgi=searchMore&devid=352584064289389&qn-rid=%s&secret=qn123456'%num
print canshu
key = self.md5(canshu)
url = 'https://r.inews.qq.com/search?query=习近平&is_special_device=0&omgbizid=e32dc9fbfa5d254c5f3b7adeebc4c57157c40050213506&network_type=wifi&store=153&extinfo=&hw=LGE_Nexus5&orig_store=153&global_session_id=1528252581892&activefrom=icon&mac=64:89:9a:4e:e8:08&origin_imei=352584064289389&qqnetwork=wifi&islite=0&rom_type=&lite_version=&real_device_width=2.44&imsi_history=460078106345962&pagestartfrom=icon&sceneid=&dpi=480.0&apptype=android&screen_width=1080&real_device_height=4.33&is_chinamobile_oem=0&patchver=5602&global_info=0|1|1|1|1|8|4|1|2|6|1|2|1|2|0|0|2|&adcode=110108&imsi=460078106345962&mid=08caf3a626732fca43979e630a77e2e5855b9474&isoem=0&screen_height=1776&Cookie=lskey%3D;skey%3D;uin%3D;%20luin%3D;logintype%3D0;%20main_login%3D;%20&omgid=91c0f3a78f61b1429c0acea8f8480f3b8182001021301f&uid=def5d0d299d3042d&devid=352584064289389&appver=19_android_5.6.02&qn-rid=' + num + '&qn-sig=' + key
print url
params = {
"Cookie":"lskey=;skey=;uin=; luin=;logintype=0; main_login=;",
"appver":"19_android_5.6.02",
"Referer":"http://inews.qq.com/inews/android/",
"User-Agent":"%E8%85%BE%E8%AE%AF%E6%96%B0%E9%97%BB5602(android)",
"Host":"r.inews.qq.com",
"Connection":"Keep-Alive",
"Accept-Encoding":"gzip"
}
import requests
data =requests.get(url, params=json.dumps(params))
print data.content
def md5(self,page):
import hashlib
m = hashlib.md5()
m.update(page)
return m.hexdigest()
def suiji1(self):
import random
auth = "" # 定义全局验证码变量
for i in range(0, 8): # 定义循环4次,形成4位验证码。
current = random.randint(0, 9) # 定义一个随机0-4的一个范围,去猜i 的值。
if current == i: # 如果current 和i 的值一样
current_code = random.randint(0, 9) # 生成一个随机的数字
else: # 如果current和i 的值不一样
current_code = chr(random.randint(97, 120)) # 生成一个随机的字母,这里一定要主义chr()转换一下。
auth += str(current_code) # 将每次随机生成的值赋值给auth
return auth
def suiji2(self):
import random
auth = "" # 定义全局验证码变量
for i in range(0, 4): # 定义循环4次,形成4位验证码。
# current = random.randint(0, 4) # 定义一个随机0-4的一个范围,去猜i 的值。
# if current == i: # 如果current 和i 的值一样
current_code = random.randint(0, 9) # 生成一个随机的数字
# else: # 如果current和i 的值不一样
# current_code = chr(random.randint(97, 120)) # 生成一个随机的字母,这里一定要主义chr()转换一下。
# auth += str(current_code) # 将每次随机生成的值赋值给auth
auth += str(current_code)
return auth
def suiji3(self):
import random
auth = "" # 定义全局验证码变量
for i in range(0, 4): # 定义循环4次,形成4位验证码。
# current = random.randint(0, 4) # 定义一个随机0-4的一个范围,去猜i 的值。
# if current == i: # 如果current 和i 的值一样
# current_code = random.randint(0, 9) # 生成一个随机的数字
# else: # 如果current和i 的值不一样
current_code = chr(random.randint(97, 120)) # 生成一个随机的字母,这里一定要主义chr()转换一下。
# auth += str(current_code) # 将每次随机生成的值赋值给auth
auth += current_code
return auth
def suiji4(self):
import random
auth = "" # 定义全局验证码变量
for i in range(0, 4): # 定义循环4次,形成4位验证码。
# current = random.randint(0, 4) # 定义一个随机0-4的一个范围,去猜i 的值。
# if current == i: # 如果current 和i 的值一样
# current_code = random.randint(0, 9) # 生成一个随机的数字
# else: # 如果current和i 的值不一样
current_code = chr(random.randint(97, 120)) # 生成一个随机的字母,这里一定要主义chr()转换一下。
# auth += str(current_code) # 将每次随机生成的值赋值给auth
auth += current_code
return auth
def suiji5(self):
import random
auth = "" # 定义全局验证码变量
for i in range(0, 12): # 定义循环4次,形成4位验证码。
# current = random.randint(0, 4) # 定义一个随机0-4的一个范围,去猜i 的值。
# if current == i: # 如果current 和i 的值一样
# current_code = random.randint(0, 9) # 生成一个随机的数字
# else: # 如果current和i 的值不一样
current_code = chr(random.randint(97, 120)) # 生成一个随机的字母,这里一定要主义chr()转换一下。
# auth += str(current_code) # 将每次随机生成的值赋值给auth
auth += current_code
return auth
<file_sep>#-*- coding: utf-8 -*-
from scrapy.spider import Spider
from urlparse import urljoin
from scrapy.selector import Selector
from scrapy.http import Request
import time
import json
#from selenium import selenium
import re
import sys
from news.DataResource import TransportData
import scrapy
from news.items import NewsItem
reload(sys)
sys.setdefaultencoding('utf8')
class shangyou(Spider):
name = "shangyouxinwen"
base_url = "http://www.cqcb.com"
start_urls = [
"https://www.cqcb.com/headline/index.json?udid=862620027634098&appkey=<KEY>",
"https://www.cqcb.com/hot/index.json?udid=862620027634098&appkey=<KEY>",
"https://www.cqcb.com/reading/index.json?udid=862620027634098&appkey=<KEY>",
"https://www.cqcb.com/science/index.json?udid=862620027634098&appkey=<KEY>",
"https://www.cqcb.com/finance/index.json?udid=862620027634098&appkey=<KEY>",
]
DOWNLOAD_DELAY = 0
count = 0
appname = "上游新闻"
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str,"%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
results = json.loads(response.body)
pagenum = int(results['pagenum'])
thispage = int(results['thispage'])
if 'hot' in response.url:
pagetitle = u'头条'.encode('utf-8')
elif 'headline' in response.url:
pagetitle = u'推荐'.encode('utf-8')
elif 'finance' in response.url:
pagetitle = u'金融'.encode('utf-8')
elif 'science' in response.url:
pagetitle = u'科学'.encode('utf-8')
else:
pagetitle = u'推荐'.encode('utf-8')
newslists = results['newslist']
acceptable_title = []
for newslist in newslists:
title = newslist['title']
titleurl = newslist['titleurl']
titleurl = urljoin(self.base_url,titleurl)
pic_url = newslist['titlepic']
publishedDate = newslist['newstime']
author = newslist['befrom']
b = time.strptime(publishedDate, "%Y-%m-%d %H:%M:%S")
b = int(time.mktime(b))
if b > self.timeStamp:
acceptable_title.append(title)
yield Request(titleurl, meta={"title": title, "pic_url": pic_url, "publishedDate": publishedDate,
"author": author, "category": pagetitle},
callback=self.parse_news)
if pagenum > thispage:
if thispage == 1:
next_page = re.sub("index.json", "index_2.json",response.url)
else:
next_page = re.sub("index_\d*.json","index_"+str(thispage + 1) +".json",response.url)
yield Request(next_page,callback=self.parse)
def parse_news(self,response):
describe = ""
title = response.meta['title']
pic_url = response.meta['pic_url']
publishedDate = response.meta['publishedDate']
author = response.meta['author']
category = response.meta['category']
crawlTime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
home_url = self.base_url
hxs = Selector(response)
content = hxs.xpath('//div[@class="article_text"]//text()').extract()
content = "".join(content)
content = content.replace("\n","").replace(" ","")
pic_more_url = hxs.xpath('//div[@class="article_text"]//img/@src').extract()
pic_more_url = pic_more_url
self.count = self.count + 1
url = response.url
if pic_url:
pic_url = pic_url.encode("utf-8")
if pic_more_url:
for i in range(0, len(pic_more_url)):
pic_more_url[i] = pic_more_url[i].encode('utf-8')
pic_more_url = set(pic_more_url)
if author:
author = author.encode('utf-8')
if category:
category = author.encode('utf-8')
if title:
title = title.encode('utf-8')
if describe:
describe = describe.encode('utf-8')
if content:
content = content.encode('utf-8')
if publishedDate:
publishedDate = publishedDate.encode('utf-8')
if crawlTime:
crawlTime = crawlTime.encode('utf-8')
pic_more_url = set(pic_more_url)
item = NewsItem()
item['app_name'] = self.appname
item['count'] = self.count
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
yield item<file_sep>#coding=utf-8
import re,time,json
from news.items import NewsItem
import scrapy
from news.DataResource import TransportData
class zsdzb(scrapy.Spider):
name='zhongshidianzi'
start_urls=[
# 'http://www.chinatimes.com/politic/total/?page=2',#政治
# 'http://www.chinatimes.com/world/total?page=2',#国际
'http://www.chinatimes.com/chinese/total?page=2',#两岸
# 'http://www.chinatimes.com/armament/total/?page=2',#军事
# 'http://www.chinatimes.com/money/realtimenews?page=2',#财经
]
base_url='http://www.chinatimes.com'
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self,response):
for i in range(1,5):
if 'realtimenews' in response.url:
url='http://www.chinatimes.com/politic/total/?page=%s'%i
elif 'politic' in response.url:
url='http://www.chinatimes.com/life/total?page=%s'%i
elif 'star' in response.url:
url='http://www.chinatimes.com/star/total?page=%s'%i
elif 'life' in response.url:
url='http://www.chinatimes.com/life/total?page=%s'%i
elif 'society' in response.url:
url='http://www.chinatimes.com/society/total?page=%s'%i
elif 'world' in response.url:
url='http://www.chinatimes.com/world/total?page=%s'%i
elif 'chinese' in response.url:
url='http://www.chinatimes.com/chinese/total?page=%s'%i
elif 'sports' in response.url:
url='http://www.chinatimes.com/sports/total?page=%s'%i
elif 'armament' in response.url:
url='http://www.chinatimes.com/armament/total/?page=%s'%i
elif 'travel' in response.url:
url='http://www.chinatimes.com/travel/travel-hotnews?page='
elif 'health' in response.url:
url='http://www.chinatimes.com/healthcare/total?page=%s'%i
elif 'opinion' in response.url:
url='http://opinion.chinatimes.com/total/?page=%s'%i
elif 'money' in response.url:
url='http://www.chinatimes.com/money/realtimenews?page=%s'%i
elif 'hottopic' in response.url:
url='http://hottopic.chinatimes.com/total/?page=%s'%i
elif 'tube' in response.url:
url='http://tube.chinatimes.com/total?page=%s'%i
elif 'styletc' in response.url:
url='http://styletc.chinatimes.com/list/%s'%i
elif 'hottv' in response.url:
url='http://hottv.chinatimes.com/total/?page=%s'%i
else:
url=''
yield scrapy.Request(url,callback=self.parse_one)
def parse_one(self,response):
links_url = response.xpath('//li[@class="clear-fix"]/h3/a/@href').extract()
for i in range(len(links_url)):
url = 'http://www.chinatimes.com' + links_url[i]
yield scrapy.Request(url,callback=self.parse_two)
def parse_two(self,response):
title = response.xpath('//h1').extract()
title = title[0].replace('\t','').replace('\n','').replace('\r','').replace(' ','')
title = re.findall('>(.*?)<',title)
tit = ''
for i in range(len(title)):
tit += title[i]
title = tit
app_name = '中时电子报'
pic_url = ''
describe = ''
author = ''
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
home_url = 'http://www.chinatimes.com'
if 'realtimenews' in response.url:
category=u'即时'.encode('utf-8')
elif 'politic' in response.url:
category = u'政治'.encode('utf-8')
elif 'world' in response.url:
category = u'国际'.encode('utf-8')
elif 'chinese' in response.url:
category = u'两岸'.encode('utf-8')
elif 'money' in response.url:
category = u'财经'.encode('utf-8')
else:
category = u'即时'.encode('utf-8')
publishedDate = response.xpath('//time/text()').extract()[0].replace('\t', '').replace('\n', '').replace('\r','').replace(' ', '').replace('年', '-').replace('月', '-').replace('日', ' ')
try:
content=response.xpath('//article[@class="arttext marbotm clear-fix"]').extract()
content = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', content)
contentdata = ''
for i in range(0,len(content)):
contentdata += content[i]
content = contentdata
except:
content=response.xpath('//div[@class="page-cnt clear-fix"]/article').extract()
content = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', content)
contentdata = ''
for i in range(0,len(content)):
contentdata += content[i]
content = contentdata
try:
pic_more_url=response.xpath('//div[@class="picbox2"]/a/@href').extract()
pic_more_url=pic_more_url[0]
except:
content = response.xpath('//article').extract()
content = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
pic_more_url=re.findall('<imgsrc="(.*?)"',content)
pic_more_url1=[]
for i in range(0,len(pic_more_url)):
pic_more_url1.append(pic_more_url[i])
pic_more_url=str(set(pic_more_url1))
print "标题", title
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M")
publishedDate = time.mktime(timeArray)
if publishedDate >= self.timeStamp:
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(publishedDate)))
item['publishedDate'] = publishedDate
self.count = self.count + 1
item['count'] = self.count
yield item
<file_sep>#coding=utf-8
import scrapy
import time,re
from news.items import NewsItem
class radiofreeasia(scrapy.Spider):
name = 'radiofreeasia'
start_urls = [
'https://www.rfa.org/mandarin/Xinwen',#要闻
'https://www.rfa.org/mandarin/yataibaodao/gangtai',#港台
'https://www.rfa.org/mandarin/yataibaodao/zhengzhi',#政治
'https://www.rfa.org/mandarin/yataibaodao/shehui',#社会
'https://www.rfa.org/mandarin/guojishijiao',#国际
'https://www.rfa.org/mandarin/yataibaodao/renquanfazhi',#人权法治
]
def parse(self, response):
links = response.xpath('//div[@class="sectionteaser"]/h2/a/@href').extract()
for i in range(0,len(links)):
url = links[i]
yield scrapy.Request(url,meta={
'home_url':response.url
},callback=self.parse_item)
def parse_item(self,response):
home_url = response.meta['home_url']
app_name = 'radiofreeasia'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
pic_url = ''
describe = ''
author = ''
content = response.xpath('//div[@id="storytext"]').extract()
# content = "".join(content)
# content = content.replace("\n", "").replace(" ", "")
content=content.replace('\t','').replace('\n','').replace('\r','').replace(' ','')
content = re.findall('>(.*?)<', content)
contentdata = ''
for i in range(0, len(content)):
contentdata += content[i]
content = contentdata
title = response.xpath('//h1/text()').extract()[0]
publishedDate = response.xpath('//span[@id="story_date"]/text()').extract()[0]
try:
pic_more_url = response.xpath('//div[@id="headerimg"]/img/@src').extract()[0]
except:
pic_more_url = ''
if 'Xinwen' in home_url:
category = u'要闻'.encode('utf-8')
elif 'gangtai' in home_url:
category = u'港台'.encode('utf-8')
elif 'zhengzhi' in home_url:
category = u'政治'.encode('utf-8')
elif 'shehui' in home_url:
category = u'社会'.encode('utf-8')
elif 'guojishijiao' in home_url:
category = u'国际'.encode('utf-8')
else:
category = u'人权法治'.encode('utf-8')
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
<file_sep># -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class NewsItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
app_name = scrapy.Field()
pic_url =scrapy.Field()
pic_more_url =scrapy.Field()
author = scrapy.Field()
url =scrapy.Field()
category =scrapy.Field()
title =scrapy.Field()
describe =scrapy.Field()
content =scrapy.Field()
home_url =scrapy.Field()
publishedDate =scrapy.Field()
crawlTime = scrapy.Field()
count = scrapy.Field()
mctitle =scrapy.Field()
mcleixing =scrapy.Field()
mcaddress = scrapy.Field()
mcdaihao = scrapy.Field()
mcpinpai = scrapy.Field()
pass
<file_sep>#coding=utf-8
import scrapy
import json, re, time
from news.items import NewsItem
import datetime
class AFP(scrapy.Spider):
name = 'afpnews'
start_urls = [
'http://www.afpbb.com/sdata/detail_afp_json_v1_phone_4c43ffbac73aaf2877096fcf3df445a5.json'
]
def parse(self, response):
data = json.loads(response.body)
data = data['news']
for i in range(len(data)):
title = data[i]['title']
url = data[i]['link']
pubt = data[i]['pubDate']
pubt = pubt.split(', ')[1].replace('+0900','')
t = pubt.split(' ')
t1 = t[0]
t2 = t[1]
t3 = t[2]
t4 = t[3]
t12 = t2
if 'June' in t12:
tt = '06'
elif 'Jun' in t12:
tt = '06'
elif 'January' in t12:
tt = '01'
elif 'Jan' in t12:
tt = '01'
elif 'February' in t12:
tt = '02'
elif 'Feb' in t12:
tt = '02'
elif 'March' in t12:
tt = '03'
elif 'Mar' in t12:
tt = '03'
elif 'April' in t12:
tt = '04'
elif 'Apr' in t12:
tt = '04'
elif 'May' in t12:
tt = '05'
elif 'July' in t12:
tt = '07'
elif 'August' in t12:
tt = '08'
elif 'Aug' in t12:
tt = '08'
elif 'September' in t12:
tt = '09'
elif 'Sept' in t12:
tt = '09'
elif 'October' in t12:
tt = '10'
elif 'Oct' in t12:
tt = '10'
elif 'November' in t12:
tt = '11'
elif 'Nov' in t12:
tt = '11'
elif 'December' in t12:
tt = '12'
elif 'Dec' in t12:
tt = '12'
pubt = t3 + '-' + tt + '-' + t1 + ' ' + t4
try:
desc = data[i]['description']
except:
desc = ''
try:
pic_url = data[i]['image']
except:
pic_url = ''
yield scrapy.Request(url, meta={
'title': title,
'pic_url': pic_url,
'describe': desc,
'pubt': pubt
}, callback=self.parse_item, dont_filter=True)
def parse_item(self, response):
title = response.meta['title']
pic_url = response.meta['pic_url']
describe = response.meta['describe']
publishedDate = response.meta['pubt']
app_name = 'AFP news'
author = ''
home_url = 'http://www.afpbb.com'
category = 'News'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
pic_more_url = ''
content = response.xpath('//div[@class="article-body clear"]').extract()
content = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', content)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime<file_sep>#-*- coding: utf-8 -*-
from scrapy.spiders import Spider
from scrapy.selector import Selector
from scrapy.http import Request
from urlparse import urljoin
import json
# from news.Configuration import Configuration
# from yidong.extensions.myExtension import MyExtension
# from yidong_pay.ResultTransferUtil import ResultTransferUtil
# from rpc.app.ttypes import AppWeb
# from rpc.app.ttypes import AppType
# from rpc.app.ttypes import OriginType
# from yidong_pay.DataResource import GetData
# from rpc.app.ttypes import AppComment
# from mySQLdataexport import *
import time
import re
from news.DataResource import TransportData
from news.items import NewsItem
class xinbao(Spider):
name = 'xinbaomobile'
start_urls = [
'http://www1.hkej.com/dailynews/commentary',#时事评论
'http://www1.hkej.com/dailynews/finnews',#财经新闻
'http://www1.hkej.com/dailynews/politics',#政坛
'http://www1.hkej.com/dailynews/views',#独眼香江
'http://www1.hkej.com/dailynews/cntw',#两岸消息
'http://www1.hkej.com/dailynews/international',#EJGlobal
'http://www1.hkej.com/dailynews/headline',#即时要闻
'http://www1.hkej.com/features/topic/tag/2018%E5%85%A9%E6%9C%83',#两会
]
count = 0
download_delay = 2
# a = "2017-09-27 00:00:00"
# timeArray = time.strptime(a,"%Y-%m-%d %H:%M:%S")
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
links = response.xpath('//div[@id="news-listing-wrapper"]/div/h2/a/@href').extract()
title = response.xpath('//div[@id="news-listing-wrapper"]/div/h2/a/text()').extract()
desc = response.xpath('//div[@id="news-listing-wrapper"]/div/p[2]/text()').extract()
for i in range(1,100):
try:
url = links[i]
if 'http' not in url:
url = 'http://www1.hkej.com' + url
tit =title[i]
# print url
# print tit
try:
describe = desc[i].replace('\t','').replace('\n','').replace('\r','')
# print describe
except:
describe = ''
# print describe
yield Request(url,meta={
'home_url':response.url,
'describe':describe
},callback=self.parse_item)
except:
links = response.xpath('//ul[@class="fea_s_list"]/li/a/@href').extract()
desc = response.xpath('//ul[@class="fea_s_list"]/li/div[1]/a/text()').extract()
for i in range(1,len(links)):
url = links[i]
if 'http' not in url:
url = 'http://www1.hkej.com' + url
try:
describe = desc[i].replace('\t', '').replace('\n', '').replace('\r', '')
# print describe
except:
describe = ''
# print describe
yield Request(url, meta={
'home_url': response.url,
'describe': describe
}, callback=self.parse_item)
def parse_item(self,response):
home_url = response.meta['home_url']
app_name = '信报Mobile'
title = response.xpath('//h1/text()').extract()[0]
publishedDate = response.xpath('//p[@id="date"]/text()').extract()[0].replace(u'年','-').replace(u'月','-').replace(u'日',' ') + '00:00:00'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
pic_url = ''
describe = response.meta['describe']
content = response.xpath('//div[@id="article-detail-wrapper"]').extract()
contentt = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', contentt)
contentdata = ''
for i in range(0, len(content)):
contentdata += content[i]
content = contentdata
author = ''
pic_more_url = ''
if 'commentary' in home_url:
category = u'時事評論'.encode('utf-8')
elif 'finnews' in home_url:
category = u'財經新聞'.encode('utf-8')
elif 'politics' in home_url:
category = u'政壇脈搏'.encode('utf-8')
elif 'views' in home_url:
category = u'獨眼香江'.encode('utf-8')
elif 'cntw' in home_url:
category = u'兩岸消息'.encode('utf-8')
elif 'international' in home_url:
category = u'EJGlobal'.encode('utf-8')
elif '2018' in home_url:
category = u'兩會會議'.encode('utf-8')
else:
category = u'即時新聞'.encode('utf-8')
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
self.count = self.count + 1
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['count'] = self.count
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M:%S")
publishedDate = time.mktime(timeArray)
if publishedDate >= self.timeStamp:
yield item<file_sep>#coding=utf-8
import scrapy
import json,re,time
from news.items import NewsItem
from lxml.etree import HTML
class voayingwen(scrapy.Spider):
name = 'voayingwenwang'
start_urls = [
'https://www.voanews.com/z/599',#US News
'https://www.voanews.com/z/4720',#US Politics
'https://www.voanews.com/z/4720?p=1',
''
]<file_sep>#coding=utf-8
import time,re,json
import scrapy
from news.items import NewsItem
class baidu(scrapy.Spider):
name = 'baidu'
Ttime = int(round(time.time()*1000))
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def start_requests(self):
import requests
url = 'https://news.baidu.com/sn/api/feed_feedlist?pd=newsplus&os=android&sv=7.1.2.0&from=app&_uid=g8SNu0um2ulx8HuKlu2ci0is2tl5aB8o_iSW8_uNSiiOO2tgga2qi_u62ig8uvihA&_ua=_aBDCgaH-i46ywoUfpw1z4aBsiz5aX8D4a2AiqqHB&_ut=5yG_YtM1vC_bhvhJgODpOYhuA&_from=1019026r&_cfrom=1019026r&_network=1_0&cen=uid_ua_ut'
params = {
"ln": "20",
"os": "android",
"display_time": "%s"%self.Ttime,
"from": "app",
"ver": "6",
"withtoppic": "0",
"network": {"wifi_aps": {"ap_mac": "70:05:14:7d:2a:5f", "is_connected": True, "ap_name": "", "rssi": -33},
"ipv4": "172.18.173.37", "cellular_id": "-1", "operator_type": 99, "connection_type": 100},
"pd": "newsplus",
"user_category": "",
"cuid": "3ADAC23BAEBDC750FF38B3810FA334A1|918510050145753",
"action": "0",
"device": {"screen_size": {"height": 1184, "width": 768}, "model": "Nexus 4",
"udid": {"android_id": "6140f143b1a4dd1e", "mac": "70:05:14:7d:2a:5f",
"imei": "357541050015819"}, "vendor": "LGE", "device_type": 1,
"os_version": {"micro": 0, "minor": 4, "major": 4}, "os_type": 1},
"sv": "7.1.2.0",
"gps": '{"timestamp":1528790165,"longitude":"116.365275","coordinate_type":3,"latitude":"39.969771"}',
"mid": "357541050015819_70:05:14:7d:2a:5f",
"loc_ll": "116.365275,39.969771",
"wf": "1",
}
data = requests.post(url, data=params)
data = json.loads(data.content)
data = data['data']['news']
for i in range(len(data)):
title = data[i]['title']
url = data[i]['url']
pubt = data[i]['pulltime']
pubtt = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(float(pubt) / 1000)))
print pubtt
try:
desc = data[i]['abs']
except:
desc = ''
if int(float(pubt) / 1000) >= self.timeStamp:
pubt = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(float(pubt) / 1000)))
yield scrapy.Request(url, meta={
'title': title,
'pubt': pubt,
'desc': desc,
'category':'推荐'
}, callback=self.parse_item)
def parse_item(self, response):
title = response.meta['title']
publishedDate = response.meta['pubt']
describe = response.meta['desc']
app_name = '百度新闻'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
home_url = 'https://news.baidu.com/'
author = ''
pic_url = ''
category = response.meta['category']
content = response.xpath('//p/text()').extract()
contentt = ''
for i in range(len(content)):
contentt += content[i]
content = contentt.replace('\t', '').replace('\n', '').replace('\r', '')
pic_more_url = re.findall('<img src="(.*?)"', response.body)
pic = []
for i in range(len(pic_more_url)):
pic.append(pic_more_url[i])
pic_more_url = str(pic)
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
self.count += 1
item['count'] = self.count
yield item
<file_sep>#coding=utf-8
import time,re,json
import scrapy
from news.items import NewsItem
class baidu(scrapy.Spider):
name = 'zhongyangshe'
start_urls = [
'http://appweb.cna.com.tw/JsonData/CnaApp_2016mobile/firstnews.json',
'http://appweb.cna.com.tw/JsonData/CnaApp_2016mobile/Index_TopNews.json',
'http://appweb.cna.com.tw/JsonData/CnaApp_2016mobile/aipl.json',
'http://appweb.cna.com.tw/JsonData/CnaApp_2016mobile/aopl.json',
'http://appweb.cna.com.tw/JsonData/CnaApp_2016mobile/acn.json',
'http://appweb.cna.com.tw/JsonData/CnaApp_2016mobile/afe.json',
'http://appweb.cna.com.tw/JsonData/CnaApp_2016mobile/video.json',
'http://appweb.cna.com.tw/JsonData/CnaApp_2016mobile/NewsTopic.json',
'http://appweb.cna.com.tw/JsonData/CnaApp_2016mobile/ait.json',
'http://appweb.cna.com.tw/JsonData/CnaApp_2016mobile/ahel.json',
'http://appweb.cna.com.tw/JsonData/CnaApp_2016mobile/asoc.json',
'http://appweb.cna.com.tw/JsonData/CnaApp_2016mobile/aloc.json',
'http://appweb.cna.com.tw/JsonData/CnaApp_2016mobile/acul.json',
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
data = response.body.replace('\t','').replace('\n','').replace('\r','').replace(' ','')
print data
# data = json.loads(response.body.replace('\t','').replace('\n','').replace('\r','').replace(' ',''))
# data = data['NewsItems']
# for i in range(len(data)):
# category = data[i]['ClassName']
# url = data[i]['PageUrl']
# title = data[i]['HeadLine']
# pubt = data[i]['CreateTime']
# try:
# pic = data[i]['Source']
# except:
# pic = ''
# yield scrapy.Request(url,meta={
# 'title':title,
# 'pic':pic,
# 'category':category,
# 'pubt':pubt
# },callback=self.parse_one)
category = re.findall('"ClassName":"(.*?)",',data)
url = re.findall('"PageUrl":"(.*?)",',data)
title = re.findall('"HeadLine":"(.*?)",',data)
pic = re.findall('"Source":"(.*?)",',data)
for i in range(len(url)):
cate = category[i]
tit = title[i]
links = url[i]
picc = pic[i]
yield scrapy.Request(links, meta={
'title':tit,
'pic':picc,
'category':cate
}, callback=self.parse_one, dont_filter=True)
def parse_one(self,response):
title = response.meta['title']
category = response.meta['category']
pic_url = response.meta['pic']
app_name = '中央社'
describe = ''
home_url = 'http://appweb.cna.com.tw/'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
author = ''
try:
content = response.xpath('//p/text()').extract()
contentt = ''
for i in range(len(content)):
contentt += content[i]
content = contentt
except:
content = ''
try:
pic_more_url = response.xpath('//div[@class="newsPhoto"]/a/img/@src').extract()[0]
except:
pic_more_url = ''
pubt = response.xpath('//div[@class="newsTime"]').extract()
pubt = pubt[0].replace('\t','').replace('\n','').replace('\r','')
pubt = re.findall('>(.*?)<',pubt)
publishedDate = ''
for i in range(len(pubt)):
publishedDate += pubt[i]
publishedDate = publishedDate.split('更新')[0]
publishedDate = publishedDate.replace('發稿:','').replace('/','-')
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M")
timenum = int(time.mktime(timeArray))
if timenum >= self.timeStamp:
self.count += 1
item['count'] = self.count
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(timenum)))
item['publishedDate'] = publishedDate
yield item
<file_sep>#coding=utf-8
import scrapy
import re, time, json
from news.items import NewsItem
class nanhuazaobao(scrapy.Spider):
name = 'nanhuazaobao'
start_urls = [
'https://data.scmp.com/api/rest/app/v2/page.json?type=home&id=int&option[image_styles]=750x470%2C250x250%2Csquare%2C750x470_lq%2C250x250_lq%2Csquare_lq&hash=75320813329b2599df4bf9d7f6b9e9a833fe2a77a0e09faa4540eb6fa8e7aa07',
# 'https://data.scmp.com/api/rest/app/v2/page.json?type=home&id=latest&option[image_styles]=750x470%2C250x250%2Csquare%2C750x470_lq%2C250x250_lq%2Csquare_lq&hash=170e31731993ddef96df8a0ea32e4e2d30c156f52f70e00d4bd9f123e623366a',
# 'https://data.scmp.com/api/rest/app/v2/page.json?type=trending&id=pageviews&option[since]=-1days&option[sections]=&option[image_styles]=750x470%2C250x250%2Csquare%2C750x470_lq%2C250x250_lq%2Csquare_lq&hash=e1b89e6866bf51306d8a98eae705a82d6bdd73e9696533e3b3613f12184adcd8',
# 'https://data.scmp.com/api/rest/app/v2/page.json?type=section&id=2&option[image_styles]=750x470%2C250x250%2Csquare%2C750x470_lq%2C250x250_lq%2Csquare_lq&hash=b9a2dc7417c0014596505716570d97cf3609de837e0754a3b017e61b30a3a915',
# 'https://data.scmp.com/api/rest/app/v2/page.json?type=section&id=4&option[image_styles]=750x470%2C250x250%2Csquare%2C750x470_lq%2C250x250_lq%2Csquare_lq&hash=bc3e96e76bcf5c6fc18dbe2726c76ab90965ce5f81bdf78db58480004e5d60b4'
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
links = re.findall('"url":"(.*?)",',response.body)
for i in range(len(links)):
url = links[i]
yield scrapy.Request(url,meta={
'home_url': response.url
}, callback=self.parse_item)
def parse_item(self, response):
try:
title = response.xpath('//h1/text()').extract()[0]
app_name = '南华早报'
describe = ''
author = ''
pic_url = ''
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
home_url = response.meta['home_url']
publishedDate = response.xpath('//div[@class="node-updated"]/span/text()').extract()[0]
publishedDate = publishedDate.replace('UPDATED : ', '').replace('Monday, ', '')
publishedDate = publishedDate.replace('Sunday, ', '').replace('Tuesday, ', '')
publishedDate = publishedDate.replace('Wednesday, ', '').replace('Thursday, ', '')
publishedDate = publishedDate.replace('Friday, ', '').replace('Saturday, ', '')
t = publishedDate.split(',')
t1 = t[0]
t11 = t1.split(' ')[0]
t12 = t1.split(' ')[1]
if 'June' in t12:
tt = '6'
elif 'January' in t12:
tt = '1'
elif 'Jan' in t12:
tt = '1'
elif 'February' in t12:
tt = '2'
elif 'Feb' in t12:
tt = '2'
elif 'March' in t12:
tt = '3'
elif 'Mar' in t12:
tt = '3'
elif 'April' in t12:
tt = '4'
elif 'Apr' in t12:
tt = '4'
elif 'May' in t12:
tt = '5'
elif 'July' in t12:
tt = '7'
elif 'August' in t12:
tt = '8'
elif 'Aug' in t12:
tt = '8'
elif 'September' in t12:
tt = '9'
elif 'Sept' in t12:
tt = '9'
elif 'October' in t12:
tt = '10'
elif 'Oct' in t12:
tt = '10'
elif 'November' in t12:
tt = '11'
elif 'Nov' in t12:
tt = '11'
elif 'December' in t12:
tt = '12'
elif 'Dec' in t12:
tt = '12'
t2 = t[1].replace(' ', '')
t3 = t[2].replace(' ', '')
if 'am' in t3:
t3 = t3.replace('am', '') + ':00'
elif 'pm' in t3:
t3 = t3.replace('pm', '')
t3 = t3.split(':')
t31 = t3[0]
t31 = str(int(t31) + 12)
t32 = t3[1]
t3 = t31 + ':' + t32 + ':00'
publishedDate = t2 + '-' + str(tt) + '-' + t11 + ' ' + t3
content = response.xpath('//div[@class="pane-content"]').extract()
content = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', content)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
pic_more_url = ''
if 'id=int' in home_url:
category = 'TOP STORIES'
elif 'id=latest' in home_url:
category = 'Live'
elif 'id=pageviews' in home_url:
category = 'Trending'
elif 'id=2' in home_url:
category = 'HongKong'
elif 'id=4' in home_url:
category = 'China'
else:
category = 'China'
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M:%S")
timenum = int(time.mktime(timeArray))
if timenum >= self.timeStamp:
self.count += 1
item['count'] = self.count
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(timenum)))
item['publishedDate'] = publishedDate
yield item
except:
pass<file_sep># -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import traceback
import pymysql
import redis #键值数据库
import logging
import scrapy_redis_mongodb.settings as settings
from scrapy.utils.project import get_project_settings
import pymongo
from scrapy.exceptions import DropItem
class RedisPipeline(object):
def __init__(self):
self.redis_table = settings.MY_REDIS # 选择表
self.redis_db = redis.Redis(host=settings.REDIS_SERVER, port=settings.REDIS_PORT, db=settings.REDIS_DB) # redis数据库连接信息
def process_item(self, item, spider):
if self.redis_db.exists(item['url']):
raise DropItem('%s id exists!!' % (item['url']))
else:
self.redis_db.lpush(self.redis_table, item['url'])
return item
class MgdbPipeline(object):
'''mogodb连接方式'''
# 连接方式一
# def __init__(self):
# self.conn = pymongo.MongoClient("mongodb://{}:{}/".format(settings.MONGODB_SERVER,settings.MONGODB_PORT))
# self.db = self.conn[settings.MONGODB_DB] #选择数据库
# self.MG_table = self.db[settings.MONGODB_COLLECTION] #选择表
def __init__(self):
self.mongo_config = get_project_settings().get(settings.MONGODB_URI)
self.conn = pymongo.MongoClient(self.mongo_config)
self.db = self.conn[settings.MONGODB_DB] # 选择数据库
self.MG_table = self.db[settings.MONGODB_COLLECTION] # 选择表
def process_item(self, item, spider):
print item
# if self.site_item_exist(item):
# self.MG_table.insert(dict(item))
# logging.debug("Question added to MongoDB database!")
# log.msg("Question added to MongoDB database!", level=log.DEBUG, spider=spider)
# '''
# Scrapy 提供 5 层 logging 级别:
# CRITICAL - 严重错误(critical)
# ERROR - 一般错误(regular errors)
# WARNING - 警告信息(warning messages)
# INFO - 一般信息(informational messages)
# DEBUG - 调试信息(debugging messages) 本程序用的就是DEBUG
#
# '''
# else:
# raise DropItem("{} is exist".format(item['url']))
# return item
# def site_item_exist(self, item):
# if self.MG_table.find_one({"url": item['url']}):
# return False
# else:
# return True
class Mysqlpipine(object):
def __init__(self):
self.connect = pymysql.connect(host=settings.MYSQL_SERVER, port=settings.MYSQL_PORT, user=settings.MYSQL_USER, password=settings.MYSQL_PASSWORD, db=settings.MYSQL_TABLE, charset='utf8') # redis数据库连接信息
self.cursor = self.connect.cursor()
def process_item(self, item, spider):
print item
url = item['url']
title = item['title']
place = item['place']
types = item['types']
num = item['num']
sql = 'insert into zhicheng(url, title, place, types, num) VALUES(%s, %s, %s, %s, %s)'
params = [url, title, place, types, num]
try:
self.cursor.execute(sql, params)
self.connect.commit()
except Exception as e:
print e
self.connect.rollback()
self.connect.close()<file_sep>#coding=utf-8
import scrapy
import time
from news.items import NewsItem
class twpg(scrapy.Spider):
name = 'lutouxinwen'
allowed_domains = ['cn.reuters.com']
start_urls = [
'https://cn.reuters.com/news/archive/CNTopGenNews?view=page&page=1&pageSize=10', #时事要闻
'https://cn.reuters.com/news/archive/CNTopGenNews?view=page&page=2&pageSize=10', #时事要闻
'https://cn.reuters.com/news/archive/CNAnalysesNews?view=page&page=1&pageSize=10',#深度分析
'https://cn.reuters.com/news/archive/CNAnalysesNews?view=page&page=2&pageSize=10',#深度分析
'https://cn.reuters.com/news/archive/topic-cn-lifestyle?view=page&page=1&pageSize=10',#生活
'https://cn.reuters.com/news/archive/topic-cn-lifestyle?view=page&page=2&pageSize=10',#生活
'https://cn.reuters.com/news/archive/companyNews?view=page&page=1&pageSize=10', #投资
'https://cn.reuters.com/news/archive/companyNews?view=page&page=2&pageSize=10', #投资
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
links_url = response.xpath('//article[@class="story "]/div[@class="story-content"]/a/@href').extract()
title = response.xpath('//article[@class="story "]/div[@class="story-content"]/a/h3/text()').extract()
summary = response.xpath('//article[@class="story "]/div[@class="story-content"]/p/text()').extract()
pic_url = response.xpath('//article[@class="story "]/div[1]/a/img/@src').extract()
for i in range(0, len(links_url)):
url = 'https://cn.reuters.com/news' + links_url[i]
try:
pic_url1 = pic_url[i]
except:
pic_url1 = ''
yield scrapy.Request(url, meta={
'title': title[i].replace('\t', '').replace('\n', '').replace('\r', ''),
'summary': summary[i].replace('\t', '').replace('\n', '').replace('\r', ''),
'home_url': response.url,
'pic_url': pic_url1
}, callback=self.parse_item)
def parse_item(self, response):
title = response.meta['title']
describe = response.meta['summary']
home_url = response.meta['home_url']
app_name = u'路透新闻'.encode('utf-8')
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
pic_url = response.meta['pic_url']
content = response.xpath('//meta[@name="description"]/@content').extract()
content = content[0]
pubTime = response.xpath('//meta[@name="analytics<EMAIL>"]/@content').extract()
tt = response.xpath('//div[@class="date_V9eGk"]/text()').extract()[0]
t1 = tt.split('/')[0]
t2 = tt.split('/')[1]
t3 = t1 + t2
t3 = t3.replace(' ', ' ').replace('AM', '').replace('PM', '').replace(' ','')
timeStruct = time.strptime(t3, "%B %d, %Y %H:%M")
pubTime = time.strftime("%Y-%m-%d %H:%M:%S", timeStruct)
print pubTime
# pubTime = pubTime[0].split('T')[0]
pic_more_url = ''
author = ''
if 'CNTopGenNews' in home_url:
category = u'要闻'.encode('utf-8')
elif 'CNAnalysesNews' in response.url:
category = u'深度分析'.encode('utf-8')
elif 'topic-cn-lifestyle' in response.url:
category = u'生活'.encode('utf-8')
elif 'companyNews' in response.url:
category = u'投资'.encode('utf-8')
else:
category = u'实时资讯'.encode('utf-8')
publishedDate = pubTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
# print publishedDate
# print pubTime
# t = pubTime.split(' ')[0]
timeArray = time.strptime(pubTime, "%Y-%m-%d %H:%M:%S")
publishedDate = time.mktime(timeArray)
if publishedDate >= self.timeStamp:
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(publishedDate)))
item['publishedDate'] = publishedDate
self.count = self.count + 1
item['count'] = self.count
yield item
<file_sep>#coding=utf-8
import scrapy
import json,re
import time
from news.items import NewsItem
class xiangangyizhoukan(scrapy.Spider):
name = 'xianggangyizhoukan'
start_urls = [
'http://www.nextdigital.com.hk/'
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
links = response.xpath('//ul[@class="blk_realtime"]/li/a/@href').extract()
title = response.xpath('//ul[@class="blk_realtime"]/li/a/img/@alt').extract()
for i in range(len(links)):
url = links[i]
yield scrapy.Request(url,meta={
'title':title[i]
},callback=self.parse_item)
links = response.xpath('//ul[@class="blk_daily"]/li/a/@href').extract()
title = response.xpath('//ul[@class="blk_daily"]/li/a/img/@alt').extract()
for i in range(len(links)):
url = links[i]
yield scrapy.Request(url, meta={
'title': title[i]
}, callback=self.parse_item)
links = response.xpath('//ul[@class="blk_weekly"]/li/a/@href').extract()
title = response.xpath('//ul[@class="blk_daily"]/li/a/img/@alt').extract()
for i in range(len(links)):
url = links[i]
yield scrapy.Request(url, meta={
'title': title[i]
}, callback=self.parse_item)
def parse_item(self,response):
app_name = '香港壹周刊'
pic_url = ''
describe = ''
title = response.meta['title']
publishedDate = response.xpath('//span[@class="last_update"]/text()').extract()[0]
content = response.xpath('//p/text()').extract()
contentt = ''
for i in range(len(content)):
contentt += content[i]
content = contentt
author = ''
category = '今日'
pic_more_url = ''
home_url = 'http://www.nextdigital.com.hk/'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
self.count += 1
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
item['count'] = self.count
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M")
timeStamp = int(time.mktime(timeArray))
if timeStamp >= self.timeStamp:
item['publishedDate'] = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(timeStamp)))
yield item
<file_sep>#-*- coding: utf-8 -*-
class Configuration():
host = "192.168.1.184"
user = "taseUser"
passwd = "<PASSWORD>"
db = "test"<file_sep>#coding=utf-8
import scrapy
import time
import re
from news.items import NewsItem
class dtw(scrapy.Spider):
name='dongtaiwang'
start_urls=[
'http://dongtaiwang.com/loc/phome.php?v=0'
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
links_url=response.xpath('//div[@class="content_list"]/li/a/@href').extract()
title = response.xpath('//div[@class="content_list"]/li/a/text()').extract()
for i in range(0,len(links_url)):
url = links_url[i]
yield scrapy.Request(url,meta={
'title':title[i],
'home_url':response.url
},callback=self.parse_item,dont_filter=True)
def parse_item(self,response):
title=response.meta['title']
home_url=response.meta['home_url']
app_name='动态网'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
pic_url=''
describe = ''
author = ''
try:
content=response.xpath('//div[@id="ar_bArticleContent"]').extract()
contentt = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', contentt)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
except:
content=response.xpath('//p').extract()
contentdata=''
for i in range(0,len(content)):
contentdata+=content[i]
content=contentdata
content = content.replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', content)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
pic_more_url=''
if 'nsc' in response.url:
category = u'中国要闻'.encode('utf-8')
elif 'prog' in response.url:
category = u'明慧要闻'.encode('utf-8')
elif 'dweb' in response.url:
category = u'明慧新闻'.encode('utf-8')
elif 'gb' in response.url:
category = u'明慧新闻'.encode('utf-8')
elif 'mh' in response.url:
category = u'明慧要闻'.encode('utf-8')
else:
category = u'中国要闻'.encode('utf-8')
try:
try:
pubTime=response.xpath('//div[@class="mbottom10 large-12 medium-12 small-12 columns"]/time/text()').extract()[0]
pubTime=pubTime.replace(u'更新: ','').replace('PM','').replace('AM','').replace('\t','').replace('\n','').replace('\r','')
except:
pubTime=response.xpath('//div[@class="art-head"]/span/text()').extract()
pubTime=pubTime[0].split(' ')[0].replace(u'年','-').replace(u'月','-').replace(u'日','')
except:
pubTime = str(time.strftime("%Y-%m-%d"))
if category=='更多新闻':
publishedDate = pubTime
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
print pubTime
t = pubTime.split(' ')[0]
timeArray = time.strptime(t, "%Y-%m-%d")
publishedDate = time.mktime(timeArray)
if publishedDate >= self.timeStamp:
self.count += 1
item['count'] = self.count
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(publishedDate)))
item['publishedDate'] = publishedDate
yield item
<file_sep># -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
from scrapy import signals
import json
import codecs
import os
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
import time
class NewsPipeline(object):
def process_item(self, item, spider):
time_str = time.strftime("%Y-%m-%d")
fileName = str(time_str) + '.json'
items_title = item['title']
items_time = item['publishedDate']
items_cate = item['category']
items_appname = item['app_name']
items_content = item['content']
itemss = {
"appname":items_appname,
# "appurl":item['url'],
"apptitle":items_title,
'apptimes':items_time,
# 'appcategory':items_cate,
# 'apptontent':items_content
}
# ttt = 'https://1172.16.31.10:2181/pro0912/insertDataToFuckEs.htm?app_name=人民日报&content_url=' + item['url']+ '&content_type=首页新闻&title=' + item['title'] + '&content=' + item['content'] + '&pubTime=' + items_time
with open(fileName, 'a+') as fp:
line = json.dumps(dict(itemss), ensure_ascii=False) + '\n'
fp.write(line)
return item
<file_sep>#coding=utf-8
import scrapy
import json, re, time
from news.items import NewsItem
import urllib2,urllib,sys
class chaorixinwen(scrapy.Spider):
handle_httpstatus_list = [401]
name = 'chaorixinwen'
def start_requests(self):
num = [
'national', 'politics', 'eco', 'culture', 'tech_science', 'international', 'business'
]
category = [
'社会', '政治', '经济', '文化', '科学', '国际', '商业'
]
# headers = {
# "Authorization ContentType": "<KEY>",
# "Accept-Encoding": "gzip"
# }
for i in range(len(num)):
url = 'http://119.23.19.90:5000/news?category=%s/list'%num[i]
yield scrapy.Request(url, meta={
'category': category[i]
}, callback=self.parse)
def parse(self, response):
data = json.loads(response.body)
print data<file_sep>#coding=utf-8
import time,re,json
from news.items import NewsItem
import scrapy
class huanqiu(scrapy.Spider):
name = 'huanqiushibao'
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def start_requests(self):
url = [
'http://api.hqtime.huanqiu.com/api/news/list/general/hot/1',
'http://api.hqtime.huanqiu.com/api/news/list/general/hot/2',
'http://api.hqtime.huanqiu.com/api/news/list/general/international/1',
'http://api.hqtime.huanqiu.com/api/news/list/general/international/2',
'http://api.hqtime.huanqiu.com/api/news/list/general/military/1',
'http://api.hqtime.huanqiu.com/api/news/list/general/military/2',
'http://api.hqtime.huanqiu.com/api/news/list/general/taihai/1',
'http://api.hqtime.huanqiu.com/api/news/list/general/taihai/2',
'http://api.hqtime.huanqiu.com/api/news/list/general/overseas/1',
'http://api.hqtime.huanqiu.com/api/news/list/general/overseas/2',
'http://api.hqtime.huanqiu.com/api/news/list/general/finance/1',
'http://api.hqtime.huanqiu.com/api/news/list/general/finance/2'
]
params = {
"clientversion": "v1"
}
for i in range(len(url)):
yield scrapy.Request(url[i], headers=params, callback=self.parse, dont_filter=True)
def parse(self, response):
data = json.loads(response.body)
data = data['data'][0]['group_data']
for i in range(len(data)):
url = data[i]['source_url']
title = data[i]['title']
pubt = data[i]['time_publish']
if float(pubt) >= self.timeStamp:
yield scrapy.Request(url, meta={
'title':title,
'pubt':pubt,
'home_url':response.url
}, callback=self.parse_item)
def parse_item(self, response):
title = response.meta['title']
publishedDate = response.meta['pubt']
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(publishedDate)))
app_name = '环球时报'
author = ''
describe = ''
pic_url = ''
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
home_url = response.meta['home_url']
if 'hot' in home_url:
category = '热点'
elif 'international' in home_url:
category = '国际'
elif 'military' in home_url:
category = '军事'
elif 'taihai' in home_url:
category = '台湾'
elif 'overseas' in home_url:
category = '海外看中国'
elif 'finance' in home_url:
category = '财经'
else:
category = '热点'
try:
content = response.xpath('//div[@id="articleText"]').extract()
content = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', content)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
except:
content = response.xpath('//p').extract()
contentt = ''
for i in range(len(content)):
contentt += content[i]
content = contentt.replace('<p>', '').replace('</p>', '').replace(' ', '').replace('\t', '')
content = content.replace('\n', '').replace('\r', '')
# content = re.findall('>(.*?)<', content)
# contentdata = ''
# for i in content:
# contentdata += i
# content = contentdata
pic_more_url = ''
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
yield item<file_sep>#coding=utf-8
import time,re,json
import scrapy
from news.items import NewsItem
class baidu(scrapy.Spider):
name = 'baiduxinwen'
Ttime = int(round(time.time()*1000))
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def start_requests(self):
url = 'https://news.baidu.com/sn/api/feed_channellist?pd=newsplus&os=android&sv=7.1.2.0&from=app&_uid=g8SNu0um2ulx8HuKlu2ci0is2tl5aB8o_iSW8_uNSiiOO2tgga2qi_u62ig8uvihA&_ua=_aBDCgaH-i46ywoUfpw1z4aBsiz5aX8D4a2Ai6DDB&_ut=5yG_YtM1vC_bhvhJgODpOYhuA&_from=1019026r&_cfrom=1019026r&_network=1_0&cen=uid_ua_ut'
paramsnum = ['本地','科技','财经','国内','国际']
datanum = ['102','8','6','2','1']
for i in range(len(paramsnum)):
params = {
"cuid": "3ADAC23BAEBDC750FF38B3810FA334A1|918510050145753",
"category_name": "%s"%paramsnum[i],
"display_time": "%s"%self.Ttime,
"action": "0",
"category_id": "%s"%datanum[i],
"ver": "6",
"loc_ll": "116.365283,39.969771",
"mid": "357541050015819_70:05:14:7d:2a:5f",
"wf": "1"
}
yield scrapy.FormRequest(url, meta={
'category': paramsnum[i],
'datatime':datanum[i]
}, formdata=params, callback=self.parse)
def parse(self, response):
data = json.loads(response.body)
data = data['data']['news']
num = 0
timett = data[len(data)-1]['pulltime']
for i in range(len(data)):
title = data[i]['title']
url = data[i]['url']
pubt = data[i]['pulltime']
try:
desc = data[i]['abs']
except:
desc = ''
if int(float(pubt)/1000) >= self.timeStamp:
num += 1
pubt = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(float(pubt)/1000)))
yield scrapy.Request(url, meta={
'title': title,
'pubt': pubt,
'desc': desc,
'category': response.meta['category']
}, callback=self.parse_item)
if num > 0:
numtime = response.meta['datatime']
url = 'https://news.baidu.com/sn/api/feed_channellist?pd=newsplus&os=android&sv=7.1.2.0&from=app&_uid=g8SNu0um2ulx8HuKlu2ci0is2tl5aB8o_iSW8_uNSiiOO2tgga2qi_u62ig8uvihA&_ua=_aBDCgaH-i46ywoUfpw1z4aBsiz5aX8D4a2Ai6DDB&_ut=5yG_YtM1vC_bhvhJgODpOYhuA&_from=1019026r&_cfrom=1019026r&_network=1_0&cen=uid_ua_ut'
params = {
"cuid": "3ADAC23BAEBDC750FF38B3810FA334A1|918510050145753",
"category_name": "%s" % response.meta['category'],
"display_time": "%s"%timett ,
"action": "0",
"category_id": "%s" %numtime,
"ver": "6",
"loc_ll": "116.365283,39.969771",
"mid": "357541050015819_70:05:14:7d:2a:5f",
"wf": "1"
}
yield scrapy.FormRequest(url, meta={
'category': response.meta['category'],
'datatime': response.meta['datatime']
}, formdata=params, callback=self.parse)
def parse_item(self, response):
title = response.meta['title']
publishedDate = response.meta['pubt']
describe = response.meta['desc']
app_name = '百度新闻'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
home_url = 'https://news.baidu.com/'
author = ''
pic_url = ''
category = response.meta['category']
content = response.xpath('//p/text()').extract()
contentt = ''
for i in range(len(content)):
contentt += content[i]
content = contentt.replace('\t', '').replace('\n', '').replace('\r', '')
pic_more_url = re.findall('<img src="(.*?)"', response.body)
pic = []
for i in range(len(pic_more_url)):
pic.append(pic_more_url[i])
pic_more_url = str(pic)
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
self.count += 1
item['count'] = self.count
yield item
<file_sep>#coding=utf-8
from news.items import NewsItem
import scrapy
import time,re,json
class dw(scrapy.Spider):
name = 'deguozhisheng'
start_urls = [
'http://www.dw.com/zh/%E5%9C%A8%E7%BA%BF%E6%8A%A5%E5%AF%BC/%E6%97%B6%E6%94%BF%E9%A3%8E%E4%BA%91/s-1681?&zhongwen=simp',#时政风云
'http://www.dw.com/zh/%E5%9C%A8%E7%BA%BF%E6%8A%A5%E5%AF%BC/%E7%BB%8F%E6%B5%8E%E7%BA%B5%E6%A8%AA/s-1682?&zhongwen=simp',#经济纵横
'http://www.dw.com/zh/%E5%9C%A8%E7%BA%BF%E6%8A%A5%E5%AF%BC/%E6%96%87%E5%8C%96%E7%BB%8F%E7%BA%AC/s-1683?&zhongwen=simp',#文化经纬
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
links = response.xpath('//div[@id="bodyContent"]/div[1]/div/div/div/a/@href').extract()
title = response.xpath('////div[@id="bodyContent"]/div[1]/div/div/div/a/h2/text()').extract()
# pic = response.xpath('////div[@id="bodyContent"]/div[1]/div/div/div/a/div[1]/img/@src').extract()
# desc = response.xpath('//div[@id="bodyContent"]/div[1]/div/div/div/a/p/text()').extract()
for i in range(len(links)):
url = 'http://www.dw.com' + links[i]
tit = title[i].replace('\t', '').replace('\n', '').replace('\r', '')
yield scrapy.Request(url, meta={
'title': tit,
'home_url': response.url
}, callback=self.parse_item, dont_filter=True)
def parse_item(self, response):
title = response.meta['title']
home_url = response.meta['home_url']
pic_url = ''
describe = ''
app_name = '德国之声'
pubt = response.xpath('//div[@class="group"]/ul/li[1]').extract()[0]
publishedDate = pubt.replace('\t', '').replace('\n', '').replace('\r', '').replace('<strong>日期</strong>', '').replace('<li>', '').replace('</li>', '')
t = publishedDate.split('.')
t1 = t[0]
t2 = t[1]
t3 = t[2]
publishedDate = t3 + '-' + t2 + '-' + t1 + ' 00:00:00'
try:
author = response.xpath('//div[@class="group"]/ul/li[2]').extract()[0]
author = author.replace('\t', '').replace('\n', '').replace('\r', '').replace('<strong>作者</strong>', '').replace('<li>', '').replace('</li>', '')
except:
author = ''
author = ''
content = response.xpath('//div[@class="col3"]/div[@class="group"]').extract()
content = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', content)
contentdata = ''
for i in range(0, len(content)):
contentdata += content[i]
content = contentdata
pic_more_url = ''
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
if 's-1681' in home_url:
category = '时政风云'
elif 's-1682' in home_url:
category = '经济纵横'
else:
category = '文化经纬'
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M:%S")
timenum = int(time.mktime(timeArray))
if timenum >= self.timeStamp:
self.count += 1
item['count'] = self.count
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(timenum)))
item['publishedDate'] = publishedDate
yield item
<file_sep>#-*- coding: utf-8 -*-
from scrapy.spiders import Spider
from scrapy.selector import Selector
from scrapy.http import Request
from scrapy.http import FormRequest
from urlparse import urljoin
import scrapy
import json
from news.DataResource import TransportData
import time
import re
from news.items import NewsItem
class Shangguan(Spider):
name = "shangguanxinwen"
app_name="上观新闻"
allowed_domains = ["services.shobserver.com"]
base_url = ""
count = 0
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str,"%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
# existing_title = TransportData.getData('uc头条')
def start_requests(self):
k = ['sectionid', 'subsectionid', 'section', 'sign', 'times']
v = [
('1', '1,2,3', '政情', '9eec20d8e2f6bcafec80252fd5373dae', '1505287208975'),
('2', '13,14,15', '财经', '3c9446daa0373678c415f31340dabec0', '1505366899027'),
('35', '22,23,24', '区情', '0286ba8f3271622916cc73463636d33e', '1505366947969'),
('22', '29,30,31', '城事', '9a7079741240fb51b5a12369673324fc', '1505366950899'),
('4', '40,41,42', '文化', '09abef27619d3df33528559dbe08fa3a', '1505366953125'),
('21', '53,82,54', '天下', '456b6880eaa1e85a581c9e93aba9430c', '1505366955628'),
('40', '64,65,66', '互动', 'd27adb160731de03d62044379778e4c1', '1505366957610'),
('41', '95,96,97', '视觉', '8ce834e61847f77e42be05dfd89e959b', '1505366959761')
]
dicts = [dict(zip(k, values)) for values in v]
for i in range(len(dicts)):
pages = dicts[i]
url = "http://services.shobserver.com/news/get/sectionidWithNidPtime?platform=2&pagesize=10&subsectionid=%s&versionCode=440&page=1&sign=%s§ionid=%s×=%s"%\
(pages.get("subsectionid"),pages.get("sign"),pages.get("sectionid"),pages.get("times"))
yield Request(
url,
meta={"sectionname":pages.get("section"),
"sectionid":pages.get("sectionid"),
"subsectionid":pages.get("subsectionid"),
"sign":pages.get("sign"),
"times":pages.get("times"),
"page":1},
callback=self.parse
)
def parse(self, response):
# 构造各种分类的网址
results = json.loads(response.body)
object = results["object"]
newsList=object["newsList"]
totalpage = object["totalpage"]
total = object["total"]
sectionname=response.meta["sectionname"]
acceptable_title = []
for i in range(len(newsList)):
title = newsList[i]["title"]
if title:
title = title.encode('utf-8')
else:
title = ''
# if title not in self.existing_title:
acceptable_title.append(title)
# summary = newsList[i]["summary"]
id = newsList[i]["id"]
# writer=newsList[i]["writerName"]
writer= ''
Link="http://services.shobserver.com/news/viewNewsDetail?id=%s&versionCode=440&platform=2&uid=0"%id
yield Request(
url=Link,
meta={"title": title,'summary':'',"writer":writer,"Link":Link,"type":sectionname},
callback=self.content_parse
)
# 下一页
sectionid=response.meta["sectionid"]
subsectionid=response.meta["subsectionid"]
sign=response.meta["sign"]
times=response.meta["times"]
page=response.meta["page"]
page += 1
# if page<=totalpage:
if page <= 50 :
url = "http://services.shobserver.com/news/get/sectionidWithNidPtime?platform=2&pagesize=10&subsectionid=%s&versionCode=440&page=%s&sign=%s§ionid=%s×=%s" % (
subsectionid,page,sign, sectionid, times)
yield Request(
url,
meta={"sectionname": sectionname,
"sectionid": sectionid,
"subsectionid":subsectionid,
"sign": sign,
"times": times,
"page":page},
callback=self.parse
)
def content_parse(self, response):
import time
app_name = "上观新闻"
title=response.meta['title']
summary=response.meta['summary']
content_url=response.meta['Link']
content_type=response.meta['type']
writer=response.meta['writer']
viewCount=0
home_url="http://services.shobserver.com/news/get/homepage"
crawlTime=time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(time.time()))
hxs = Selector(response)
picmore_url=hxs.xpath('//img/@src').extract()
if picmore_url:
pic_url = picmore_url[0].encode('utf-8')
picmore_url = set(picmore_url)
else:
picmore_url = set()
pic_url = ''
content = hxs.xpath('//div[@class="news-content"]//text()').extract()
if content:
content = "".join(content)
content = (re.sub(r'\s+', '', content)).strip()
content = content.encode("utf-8")
else:
content = ''
pubTime= hxs.xpath('//span[@class="news-attr publish-time"]//text()').extract()
if pubTime:
pubTime = pubTime[0].encode('utf-8')
else:
pubTime = ''
# print pubTime
try:
a = time.strptime(pubTime, "%Y-%m-%d %H:%M:%S")
except:
a = time.strptime(pubTime, "%Y-%m-%d %H:%M")
# 转换成时间戳
a = time.mktime(a)
# print self.timeStamp
if int(a) > int(self.timeStamp):
self.count = self.count + 1
print self.count
print app_name
print pic_url
print picmore_url
print writer
print content_url
print content_type
print viewCount
print title
print summary
print content
print home_url
print pubTime
print crawlTime
author = writer
if author:
author = author.encode('utf-8')
else:
author = ""
category = content_type
describe = summary
describe = describe.encode('utf-8')
publishedDate = pubTime + ':00'
pic_more_url = picmore_url
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['count'] = self.count
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
# yield item
# if author:
# author = author.encode('utf-8')
# else:
# author = ""
# TransportData.transport_data(app_name, pic_url, pic_more_url, author, response.url, category, title,
# describe, content, home_url, publishedDate, crawlTime)
exsit_title = TransportData.getData("app_shangguanxinwen", title)
if exsit_title:
return
else:
yield item
TransportData.transport_data("app_shangguanxinwen", title, publishedDate)
<file_sep># -*- coding: utf-8 -*-
import sys
if sys.getdefaultencoding() != 'utf-8':
reload(sys)
sys.setdefaultencoding('utf-8')
import scrapy
import time
import json
from ..items import ScrapyRedisMongodbItem
class XpathRule(object):
total_page = "//div[@class='pagenav']/a/text()"
urlist = "//table[@class='tablelist']//tr[@class='even']//a/@href|//table[@class='tablelist']//tr[@class='odd']//a/@href"
class CctvSpider(scrapy.Spider):
name = "tencent"
custom_settings = {
'ITEM_PIPELINES': {
'scrapy_redis_mongodb.pipelines.RedisPipeline': 300
}}
start_urls = ['http://hr.tencent.com/position.php']
def parse(self, response):
try:
total_page = response.xpath(XpathRule.total_page).extract()[-2]
except:
return
for p in xrange(int(total_page)):
url = "http://hr.tencent.com/position.php?&start={}#a".format((p)*10)
yield scrapy.Request(url, self.parse_detail)
def parse_detail(self, response):
urls = response.xpath(XpathRule.urlist).extract()
for url in urls:
item = ScrapyRedisMongodbItem()
item['url'] = "http://hr.tencent.com/"+url
yield item
<file_sep>#-*- coding: utf-8 -*-
from scrapy.spider import Spider
import scrapy
from urlparse import urljoin
from scrapy.selector import Selector
from scrapy.http import Request
import time
import json
#from selenium import selenium
import re
import sys
# 全部爬取
from news.DataResource import TransportData
from news.items import NewsItem
reload(sys)
sys.setdefaultencoding('utf8')
class lwl(Spider):
name='rmrbdata'
allowed_domains=['people.com.cn']
start_urls=[
'http://world.people.com.cn/', #国际
'http://finance.people.com.cn/',#财经
'http://tw.people.com.cn/',#台湾
'http://military.people.com.cn/',#军事
'http://opinion.people.com.cn/',#观点
'http://politics.people.com.cn/',#时政
'http://leaders.people.com.cn/',#领导
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
base_url = str(response.url).split('/index')[0]
try:
try:
try:
title = response.xpath('//div[@class=" hdNews clearfix"]/p/strong/a/text()').extract()
links_url = response.xpath('//div[@class=" hdNews clearfix"]/p/strong/a/@href').extract()
describe = response.xpath('//div[@class=" hdNews clearfix"]/p/em/a/text()').extract()
tt = title[0]
for j in range(0,len(title)):
if 'http' not in links_url[j]:
url = base_url + links_url[j]
else:
url = links_url[j]
yield scrapy.Request(url, meta={
'title': title[j],
'describe': describe[j],
'home_url': response.url
}, callback=self.parse_item,dont_filter=True)
except:
title = response.xpath('//div[@class="hdNews clearfix"]/p/strong/a/text()').extract()
links_url = response.xpath('//div[@class="hdNews clearfix"]/p/strong/a/text()').extract()
describe = response.xpath('//div[@class="hdNews clearfix"]/p/em/a/text()').extract()
tt = title[0]
for j in range(0, len(title)):
if 'http' not in links_url[j]:
url = base_url + links_url[j]
else:
url = links_url[j]
yield scrapy.Request(url, meta={
'title': title[j],
'describe': describe[j],
'home_url': response.url
}, callback=self.parse_item,dont_filter=True)
except:
# print "第三次"
title = response.xpath('//div[@class=" hdNews clearfix"]/div/h5/a/text()').extract()
links_url = response.xpath('//div[@class=" hdNews clearfix"]/div/h5/a/@href').extract()
describe = response.xpath('//div[@class=" hdNews clearfix"]/div/em/a/text()').extract()
tt = title[0]
for j in range(0, len(title)):
if 'http' not in links_url[j]:
url = base_url + links_url[j]
else:
url = links_url[j]
yield scrapy.Request(url, meta={
'title': title[j],
'describe': describe[j],
'home_url': response.url
}, callback=self.parse_item,dont_filter=True)
except:
# print "第四次"
title = response.xpath('//div[@class="hdNews clearfix"]/div/h5/a/text()').extract()
links_url = response.xpath('//div[@class="hdNews clearfix"]/div/h5/a/@href').extract()
describe = response.xpath('//div[@class="hdNews clearfix"]/div/em/a/text()').extract()
tt = title[0]
for j in range(0, len(title)):
if 'http' not in links_url[j]:
url = base_url + links_url[j]
else:
url = links_url[j]
try:
describ = describe[j]
except:
describ = ''
yield scrapy.Request(url, meta={
'title': title[j],
'describe': describ,
'home_url': response.url
}, callback=self.parse_item,dont_filter=True)
def parse_item(self,response):
title = response.meta['title']
describe = response.meta['describe']
home_url = response.meta['home_url']
app_name = '人民日报'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
try:
pubTime = response.xpath('//div[@class="box01"]/div[1]/text()').extract()[0]
pubTime = pubTime.replace('来源:','').replace(' ','').replace('年','-').replace('月','-').replace('日',' ')
except:
pubTime =re.findall(r'n1/(.*?)/(.*?)/',str(response.url))
# print pubTime
pub1 = pubTime[0][0]
pub2 = pubTime[0][1]
pub3 = re.findall('\d{2}',pub2)
pubTime = pub1 + '-' + pub3[0]+'-'+pub3[1]
try:
try:
content = response.xpath('//div[@class="box_con"]').extract()
contentt = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', contentt)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
pic_more_url = re.findall('src="(.*?)"',contentt)
pic_more_url1 = []
for i in range(0,len(pic_more_url)):
if 'http' not in pic_more_url[i]:
pic_more_urlt = str(response.url).split('.cn/')[0] + '.cn' + pic_more_url[i]
pic_more_url1.append(pic_more_urlt)
else:
pic_more_url1.append(pic_more_url[i])
pic_more_url = str(set(pic_more_url1))
except:
content = response.xpath('//div[@id="picG"]').extract()
contentt = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', contentt)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
pic_more_url = re.findall('src="(.*?)"', contentt)
pic_more_url1 = []
for i in range(0,len(pic_more_url)):
if 'http' not in pic_more_url[i]:
pic_more_urlt = str(response.url).split('.cn/')[0] + '.cn' + pic_more_url[i]
pic_more_url1.append(pic_more_urlt)
else:
pic_more_url1.append(pic_more_url[i])
pic_more_url = str(set(pic_more_url1))
except:
content= response.xpath('//div[@class="show_text"]').extract()
contentt = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', contentt)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
pic_more_url = re.findall('src="(.*?)"', contentt)
pic_more_url1 = []
for i in range(0, len(pic_more_url)):
if 'http' not in pic_more_url[i]:
pic_more_urlt = str(response.url).split('.cn/')[0] + '.cn' + pic_more_url[i]
pic_more_url1.append(pic_more_urlt)
else:
pic_more_url1.append(pic_more_url[i])
pic_more_url = str(set(pic_more_url1))
author = ''
category = '首页新闻'
pic_url = ''
try:
timeArray = time.strptime(pubTime, "%Y-%m-%d %H:%M")
timenum = int(time.mktime(timeArray))
except:
timeArray = time.strptime(pubTime, "%Y-%m-%d")
timenum = int(time.mktime(timeArray))
accept_title =[]
if timenum > self.timeStamp:
accept_title.append(title)
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(timenum)))
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
self.count += 1
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
item['count'] = self.count
yield item
if len(accept_title) > 0:
self.number += 1
if self.number <= 5:
if 'index' not in home_url:
url = home_url + 'index2.html'
yield scrapy.Request(url,callback=self.parse)
else:
numt = re.findall('\d+',home_url)[0]
num = str(int(numt) + 1)
url = str(home_url).replace(numt,num)
yield scrapy.Request(url,callback=self.parse,dont_filter=True)<file_sep>#-*- coding: utf-8 -*-
from scrapy.spiders import Spider
from scrapy.http import Request
import time
import re
from news.items import NewsItem
class qb(Spider):
name='sanlinews'
start_urls=[
'http://www.setn.com/ViewAll.aspx?PageGroupID=1',#即时
'http://www.setn.com/ViewAll.aspx?PageGroupID=0&p=1',#热门
'http://www.setn.com/ViewAll.aspx?PageGroupID=0&p=2',#热门
'http://www.setn.com/ViewAll.aspx?PageGroupID=0&p=3',#热门
'http://www.setn.com/ViewAll.aspx?PageGroupID=0&p=4',#热门
'http://www.setn.com/ViewAll.aspx?PageGroupID=0&p=5',#热门
'http://www.setn.com/ViewAll.aspx?PageGroupID=6&p=1',#政治
'http://www.setn.com/ViewAll.aspx?PageGroupID=6&p=2',#政治
'http://www.setn.com/ViewAll.aspx?PageGroupID=6&p=3',#政治
'http://www.setn.com/ViewAll.aspx?PageGroupID=6&p=4',#政治
'http://www.setn.com/ViewAll.aspx?PageGroupID=6&p=5',#政治
'http://www.setn.com/ViewAll.aspx?PageGroupID=41&p=1',#社会
'http://www.setn.com/ViewAll.aspx?PageGroupID=41&p=2',#社会
'http://www.setn.com/ViewAll.aspx?PageGroupID=41&p=3',#社会
'http://www.setn.com/ViewAll.aspx?PageGroupID=41&p=4',#社会
'http://www.setn.com/ViewAll.aspx?PageGroupID=41&p=5',#社会
'http://www.setn.com/ViewAll.aspx?PageGroupID=5&p=1',#国际
'http://www.setn.com/ViewAll.aspx?PageGroupID=5&p=2',#国际
'http://www.setn.com/ViewAll.aspx?PageGroupID=5&p=3',#国际
'http://www.setn.com/ViewAll.aspx?PageGroupID=5&p=4',#国际
'http://www.setn.com/ViewAll.aspx?PageGroupID=5&p=5',#国际
'http://www.setn.com/ViewAll.aspx?PageGroupID=7&p=1',#科技
'http://www.setn.com/ViewAll.aspx?PageGroupID=7&p=2',#科技
'http://www.setn.com/ViewAll.aspx?PageGroupID=7&p=3',#科技
'http://www.setn.com/ViewAll.aspx?PageGroupID=7&p=4',#科技
'http://www.setn.com/ViewAll.aspx?PageGroupID=7&p=5',#科技
'http://www.setn.com/ViewAll.aspx?PageGroupID=2&p=1',#财经
'http://www.setn.com/ViewAll.aspx?PageGroupID=2&p=2',#财经
'http://www.setn.com/ViewAll.aspx?PageGroupID=2&p=3',#财经
'http://www.setn.com/ViewAll.aspx?PageGroupID=2&p=4',#财经
'http://www.setn.com/ViewAll.aspx?PageGroupID=2&p=5',#财经
'http://www.setn.com/ViewAll.aspx?PageGroupID=31&p=1',#HOT焦点
'http://www.setn.com/ViewAll.aspx?PageGroupID=31&p=2',#HOT焦点
'http://www.setn.com/ViewAll.aspx?PageGroupID=31&p=3',#HOT焦点
'http://www.setn.com/ViewAll.aspx?PageGroupID=31&p=4',#HOT焦点
'http://www.setn.com/ViewAll.aspx?PageGroupID=31&p=5',#HOT焦点
]
base_url='http://www.setn.com/'
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
links = response.xpath('//div[@class="row NewsList"]/div/div/div/div/div[1]/h4/a/@href').extract()
title = response.xpath('//div[@class="row NewsList"]/div/div/div/div/div[1]/h4/a/text()').extract()
categ = response.xpath('//div[@class="row NewsList"]/div/div/div/div/div[2]/a/text()').extract()
for i in range(len(links)):
if 'http' not in links[i]:
url = 'http://www.setn.com' + links[i]
else:
url = links[i]
tit = title[i]
yield Request(url, meta={
'title':tit,
'home_url':response.url,
'category':categ[i],
}, callback=self.parse_item)
def parse_item(self, response):
title = response.meta['title']
category = response.meta['category']
home_url = response.meta['home_url']
app_name = '三立新闻'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
pic_url = ''
describe = ''
pubTime = response.xpath('//time[@class="page-date"]/text()').extract()[0]
pubTime = pubTime.replace('/', '-')
try:
try:
content = response.xpath('//div[@id="Content1"]').extract()
contentt = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', contentt)
contentdata = ''
for i in range(0, len(content)):
contentdata += content[i]
content = contentdata
except:
content = response.xpath('//div[@id="Content2"]').extract()
contentt = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', contentt)
contentdata = ''
for i in range(0, len(content)):
contentdata += content[i]
content = contentdata
except:
content = response.xpath('//p/text()').extract()
contentt = ''
for i in range(len(content)):
contentt += content[i]
content = contentt
pic_more_url = re.findall('src="(.*?)"', contentt)
pic_more_url1 = []
if len(pic_more_url) > 0:
for i in range(0, len(pic_more_url)):
pic_more_url1.append(pic_more_url[i])
pic_more_url = str(set(pic_more_url1))
else:
pic_more_url = ''
author = ''
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", pubTime
print "爬取时间", crawlTime
timeArray = time.strptime(pubTime, "%Y-%m-%d %H:%M:%S")
publishedDate = time.mktime(timeArray)
if publishedDate >= self.timeStamp:
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(publishedDate)))
self.count = self.count + 1
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['count'] = self.count
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
print "okokokokokokokokokok"
if item['publishedDate'] != '':
yield item
<file_sep>#coding=utf-8
import scrapy
import time
import json
import re
from news.items import NewsItem
class dsxw(scrapy.Spider):
name = 'dongsenxinwen'
allowed_domains = ['news.ebc.net.tw']
start_urls = [
'https://news.ebc.net.tw/'
]
base_url = 'https://news.ebc.net.tw'
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
"""
因为内容为json格式,但json格式不规则,故分三步采集,每次运行一次url
:param response:
:return:
"""
#热门新闻
# url='https://news.ebc.net.tw/api.js.php?callback=getHottestNews_callback'
# formdata={
# "action": "getAllHottestNews",
# "cid": "0",
# "hot": "1",
# "last_id": "0",
# "pages": "0",
# "ad_shows": "",
# "secret": "5b7dc7d488775ea127836640ac978eca"
# }
#心情新闻
# url='https://news.ebc.net.tw/api.js.php?callback=getMoodNews_callback'
# formdata={
# "action": "getMoodNews",
# "heart": "0",
# "pages": "0",
# "secret": "bd40486ccc43149ac9a6321564430319"
# }
# 速报看这里
# url='https://news.ebc.net.tw/api.js.php?callback=getAllComplainNews_callback'
# formdata={
# "action": "getAllComplainNews",
# "cid": "0",
# "hot": "1",
# "last_id": "0",
# "pages": "0",
# "ad_shows": "",
# "secret": "<KEY>"
# }
#新闻总览 国际
listt = ['7', '29', '2', '1'] # 1:政治 7:国际 29:两岸 2:财经
for i in range(0, len(listt)):
url = 'https://news.ebc.net.tw/api.js.php?callback=getNews_callback'
formdata = {
"action": "getNews",
"cid": "%s" %listt[i],
"last_id": "0",
"ad_shows": "",
"pages": "0",
"secret": "<KEY>"
}
if listt[i] == '1':
category = u'财经'.encode('utf-8')
elif listt[i] == '7':
category = u'国际'.encode('utf-8')
elif listt[i] == '29':
category = u'两岸'.encode('utf-8')
else:
category = u'政治'.encode('utf-8')
yield scrapy.FormRequest(url, meta={
'category': category
}, formdata=formdata, callback=self.parse_item)
def parse_item(self, response):
data = response.body
category = response.meta['category']
data = data.replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '').replace('//NOCachegetNews_callback(', '').replace(')', '')
data = json.loads(data)
try:
for i in data:
url = self.base_url + i['url']
try:
summary = i['daital']
except:
summary = ''
yield scrapy.Request(url, meta={
'title': i['title'],
'pic_url': i['imgpath'],
'home_url': response.url,
'summary': summary,
'category': category
}, callback=self.parse_one)
except:
pass
def parse_one(self,response):
title = response.meta['title']
pic_url = response.meta['pic_url']
describe = response.meta['summary']
home_url = response.meta['home_url']
pubTime = response.xpath('//div[@class="float_left size12 Gray ml15 mt10"]/text()').extract()[0]
content = response.xpath('//div[@id="contentBody"]').extract()
content = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', content)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
app_name = '东森新闻'
pic_more_url = response.xpath('//div[@id="contentBody"]/img/@src').extract()
more_url = ''
for i in range(0, len(pic_more_url)):
more_url += pic_more_url[i]+' ; '
pic_more_url = more_url
author = ''
category = response.meta['category']
publishedDate = pubTime.replace(u'東森新聞','').replace(' ','')
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
acceptable_title = []
try:
t1 = publishedDate.split(' ')[0]
t2 = publishedDate.split(' ')[1]
t = t1 + ' ' + t2
timeArray = time.strptime(t, "%Y-%m-%d %H:%M")
except:
t = publishedDate.split(' ')[0]
timeArray = time.strptime(t, "%Y-%m-%d")
publishedDate = time.mktime(timeArray)
if publishedDate >= self.timeStamp:
acceptable_title.append(title)
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(publishedDate)))
item['publishedDate'] = publishedDate
self.count += 1
item['count'] = self.count
yield item
if len(acceptable_title) > 0:
url = 'https://news.ebc.net.tw/api.js.php?callback=getNews_callback'
formdata = {
"action": "getNews",
"cid": "0",
"last_id": "0",
"ad_shows": "",
"pages": "2",
"secret": "58d37d1d160d0cfb4d345ca42b76ada4"
}
yield scrapy.FormRequest(url, formdata=formdata, callback=self.parse_item)
<file_sep>#coding=utf-8
import scrapy
import time,re
from news.items import NewsItem
import json
from Cryptodome.Cipher import AES
from binascii import b2a_hex, a2b_hex
from binascii import b2a_hex, a2b_hex
class wangyi(scrapy.Spider):
name = 'wyxinwen'
# start_urls = [
# 'http://c.m.163.com/nc/article/list/T1414142214384/0-20.html',
# 'http://c.m.163.com/nc/article/list/T1414142214384/20-20.html',
# 'http://c.m.163.com/nc/article/list/T1414142214384/40-20.html'
# ]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
time_str1 = time.strftime("%Y-%m-%d %H:%M:%S")
timeArray1 = time.strptime(time_str1, "%Y-%m-%d %H:%M:%S")
timeStamp1 = int(time.mktime(timeArray1))
def start_requests(self):
t = str(self.timeStamp1)
key = self.aes(t)
url = 'http://c.m.163.com/recommend/getSubDocPic?tid=T1348647909107&from=toutiao&offset=1&size=10&fn=1&LastStdTime=0&spestr=shortnews&prog=bjrec_toutiao_v0e&passport=&devId=w5IMJr/VJZP4nluUZXnpaw==&lat=&lon=&version=36.0&net=wifi&ts=' + str(t) + '&sign=' + key + '&encryption=1&canal=baidu_cpd1_news&mac=Ft/gmnuXr4kqb/B9ZJB26CKCZ0vHdesSTBiDEVNplbY=&open=&openpath='
url = [
'http://c.m.163.com/dlist/article/dynamic?from=T1467284926140&offset=0&size=20&fn=1&LastStdTime=0&passport=&devId=w5IMJr/VJZP4nluUZXnpaw==&lat=v7YkY/cxMW8TiRWiqv8p5A==&lon=cu7pkk2XGKh9YksCMSg2BQ==&version=36.0&net=wifi&ts= '+ str(t) + '&sign=' + key + '&encryption=1&canal=baidu_cpd1_news&mac=Ft/gmnuXr4kqb/B9ZJB26CKCZ0vHdesSTBiDEVNplbY=&open=&openpath=',
'http://c.m.163.com/dlist/article/dynamic?from=T1467284926140&offset=20&size=20&fn=1&LastStdTime=0&passport=&devId=w5IMJr/VJZP4nluUZXnpaw==&lat=v7YkY/cxMW8TiRWiqv8p5A==&lon=cu7pkk2XGKh9YksCMSg2BQ==&version=36.0&net=wifi&ts= '+ str(t) + '&sign=' + key + '&encryption=1&canal=baidu_cpd1_news&mac=Ft/gmnuXr4kqb/B9ZJB26CKCZ0vHdesSTBiDEVNplbY=&open=&openpath=',
'http://c.m.163.com/dlist/article/dynamic?from=T1467284926140&offset=40&size=20&fn=1&LastStdTime=0&passport=&devId=w5IMJr/VJZP4nluUZXnpaw==&lat=v7YkY/cxMW8TiRWiqv8p5A==&lon=cu7pkk2XGKh9YksCMSg2BQ==&version=36.0&net=wifi&ts= '+ str(t) + '&sign=' + key + '&encryption=1&canal=baidu_cpd1_news&mac=Ft/gmnuXr4kqb/B9ZJB26CKCZ0vHdesSTBiDEVNplbY=&open=&openpath=',
'http://c.m.163.com/dlist/article/dynamic?from=T1348648756099&offset=0&size=10&fn=1&LastStdTime=0&passport=&devId=w5IMJr/VJZP4nluUZXnpaw==&lat=v7YkY/cxMW8TiRWiqv8p5A==&lon=cu7pkk2XGKh9YksCMSg2BQ==&version=36.0&net=wifi&ts= '+ str(t) + '&sign=' + key + '&encryption=1&canal=baidu_cpd1_news&mac=Ft/gmnuXr4kqb/B9ZJB26CKCZ0vHdesSTBiDEVNplbY=&open=&openpath=',
'http://c.m.163.com/dlist/article/dynamic?from=T1348648756099&offset=20&size=10&fn=1&LastStdTime=0&passport=&devId=w5IMJr/VJZP4nluUZXnpaw==&lat=v7YkY/cxMW8TiRWiqv8p5A==&lon=cu7pkk2XGKh9YksCMSg2BQ==&version=36.0&net=wifi&ts= '+ str(t) + '&sign=' + key + '&encryption=1&canal=baidu_cpd1_news&mac=Ft/gmnuXr4kqb/B9ZJB26CKCZ0vHdesSTBiDEVNplbY=&open=&openpath=',
'http://c.m.163.com/dlist/article/dynamic?from=T1348648756099&offset=40&size=10&fn=1&LastStdTime=0&passport=&devId=w5IMJr/VJZP4nluUZXnpaw==&lat=v7YkY/cxMW8TiRWiqv8p5A==&lon=cu7pkk2XGKh9YksCMSg2BQ==&version=36.0&net=wifi&ts= '+ str(t) + '&sign=' + key + '&encryption=1&canal=baidu_cpd1_news&mac=Ft/gmnuXr4kqb/B9ZJB26CKCZ0vHdesSTBiDEVNplbY=&open=&openpath=',
'http://c.m.163.com/dlist/article/dynamic?from=T1348648141035&offset=0&size=10&fn=1&LastStdTime=0&passport=&devId=w5IMJr/VJZP4nluUZXnpaw==&lat=v7YkY/cxMW8TiRWiqv8p5A==&lon=cu7pkk2XGKh9YksCMSg2BQ==&version=36.0&net=wifi&ts= '+ str(t) + '&sign=' + key + '&encryption=1&canal=baidu_cpd1_news&mac=Ft/gmnuXr4kqb/B9ZJB26CKCZ0vHdesSTBiDEVNplbY=&open=&openpath=',
'http://c.m.163.com/dlist/article/dynamic?from=T1348648141035&offset=20&size=10&fn=1&LastStdTime=0&passport=&devId=w5IMJr/VJZP4nluUZXnpaw==&lat=v7YkY/cxMW8TiRWiqv8p5A==&lon=cu7pkk2XGKh9YksCMSg2BQ==&version=36.0&net=wifi&ts= '+ str(t) + '&sign=' + key + '&encryption=1&canal=baidu_cpd1_news&mac=Ft/gmnuXr4kqb/B9ZJB26CKCZ0vHdesSTBiDEVNplbY=&open=&openpath=',
'http://c.m.163.com/dlist/article/dynamic?from=T1348648141035&offset=40&size=10&fn=1&LastStdTime=0&passport=&devId=w5IMJr/VJZP4nluUZXnpaw==&lat=v7YkY/cxMW8TiRWiqv8p5A==&lon=cu7pkk2XGKh9YksCMSg2BQ==&version=36.0&net=wifi&ts= '+ str(t) + '&sign=' + key + '&encryption=1&canal=baidu_cpd1_news&mac=Ft/gmnuXr4kqb/B9ZJB26CKCZ0vHdesSTBiDEVNplbY=&open=&openpath=',
'http://c.m.163.com/nc/article/list/T1414142214384/0-20.html',
'http://c.m.163.com/nc/article/list/T1414142214384/20-20.html',
'http://c.m.163.com/nc/article/list/T1414142214384/40-20.html',
'http://c.m.163.com/nc/article/list/T1368497029546/0-20.html',
'http://c.m.163.com/nc/article/list/T1368497029546/20-20.html',
'http://c.m.163.com/nc/article/list/T1368497029546/40-20.html'
]
category = [
'要闻', '要闻', '要闻',
'财经', '财经', '财经',
'军事', '军事', '军事',
'新时代', '新时代', '新时代',
'历史', '历史', '历史']
for i in range(len(url)):
yield scrapy.Request(url[i],meta={
'category': category[i]
}, callback=self.parse, dont_filter=True)
def parse(self, response):
data = json.loads(response.body)
try:
try:
try:
try:
data =data['T1467284926140']
except:
data = data['T1348648756099']
except:
data = data['T1348648141035']
except:
data = data['T1414142214384']
except:
data = data['T1368497029546']
for i in range(len(data)):
num = data[i]['docid']
title = data[i]['title']
pubt = data[i]['ptime']
url = 'http://c.m.163.com/nc/article/preload/%s/full.html'%num
try:
pic_url = data[i]['imgsrc']
except:
pic_url = ''
yield scrapy.Request(url, meta={
'title': title,
'pic_url': pic_url,
'pubt': pubt,
'num': num,
'category': response.meta['category']
}, callback=self.parse_item, dont_filter=True)
def parse_item(self, response):
title = response.meta['title']
pic_url = response.meta['pic_url']
publishedDate = response.meta['pubt']
app_name = '网易新闻'
describe = ''
num = response.meta['num']
data = json.loads(response.body)
data = data[num]
content = data['body']
content = re.findall('>(.*?)<', content)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
pic_more_url = ''
author = ''
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
home_url = ''
category = response.meta['category']
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M:%S")
timenum = int(time.mktime(timeArray))
if timenum >= self.timeStamp:
self.count += 1
item['count'] = self.count
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(timenum)))
item['publishedDate'] = publishedDate
yield item
def md5(self,data):
data = '357541050015819' + data
import hashlib
m = hashlib.md5()
m.update(data)
return m.hexdigest()
def aes(self, data):
# CBC模式加密
# 初始化AES,引入初始向量
str = self.md5(data)
key = 'neteasenewsboard'
AESCipher = AES.new(key, AES.MODE_ECB)
# 加密
cipher = AESCipher.encrypt(str)
return b2a_hex(cipher)
<file_sep>#coding=utf-8
from scrapy import cmdline
# cmdline.execute("scrapy crawl renminribao".split())
# cmdline.execute("scrapy crawl guangmingribao".split())
# cmdline.execute("scrapy crawl zhongguoxinwen".split())
# cmdline.execute("scrapy crawl souhuxinwen".split())
# cmdline.execute("scrapy crawl xinlang".split())
# cmdline.execute("scrapy crawl renmin".split())
# cmdline.execute("scrapy crawl tiantiankuaibao".split())
# cmdline.execute("scrapy crawl jinri".split())
# cmdline.execute("scrapy crawl mingbaoxinwen".split())
# cmdline.execute("scrapy crawl sanlinews".split())
# cmdline.execute("scrapy crawl wyxinwen".split())
# cmdline.execute("scrapy crawl deguozhisheng".split())
# cmdline.execute("scrapy crawl hanlianshe".split())
# cmdline.execute("scrapy crawl shikuang".split())
# cmdline.execute("scrapy crawl tengxunxinwen".split())
# cmdline.execute("scrapy crawl tengxun".split())
# cmdline.execute("scrapy crawl fenghuang".split())
# cmdline.execute("scrapy crawl dongfangtoutiao".split())
# cmdline.execute("scrapy crawl xianggang01".split())
# cmdline.execute("scrapy crawl xianggang02".split())
# cmdline.execute("scrapy crawl taiwanyizhoukan".split())
# cmdline.execute("scrapy crawl xianggangyizhoukan".split())
# cmdline.execute("scrapy crawl zhongshidianzibao".split())
# cmdline.execute("scrapy crawl zhongshidianzi".split())
# cmdline.execute("scrapy crawl wuxianxinwen".split())
# cmdline.execute("scrapy crawl shangyouxinwen".split())
# cmdline.execute("scrapy crawl lichangxinwen".split())
# cmdline.execute("scrapy crawl dajiyuan".split())
# cmdline.execute("scrapy crawl zhongguojinwen".split())
# cmdline.execute("scrapy crawl toutiaocaijing".split())
# cmdline.execute("scrapy crawl lianhezaobao".split())
# cmdline.execute("scrapy crawl dongsenxinwen".split())
# cmdline.execute("scrapy crawl lutouxinwen".split())
# cmdline.execute("scrapy crawl shijieribao".split())
# cmdline.execute("scrapy crawl dongtaiwang".split())
# cmdline.execute("scrapy crawl zhangshangliuyuan".split())
# cmdline.execute("scrapy crawl duoweixinwen".split())
# cmdline.execute("scrapy crawl dongwang".split())
# cmdline.execute("scrapy crawl yidianzixun".split())
# cmdline.execute("scrapy crawl jinritoutiao".split())
# cmdline.execute("scrapy crawl rmrbdata".split())
# cmdline.execute("scrapy crawl renminnews".split())
# cmdline.execute("scrapy crawl zhongyangshe".split())
# cmdline.execute("scrapy crawl baiduxinwen".split())
# cmdline.execute("scrapy crawl baidu".split())
# cmdline.execute("scrapy crawl hanlianshe".split())
# cmdline.execute("scrapy crawl meiguozhiyin".split())
# cmdline.execute("scrapy crawl meiguozhiyinzhongwenwang".split())
# cmdline.execute("scrapy crawl ziyouyazhou".split())
# cmdline.execute("scrapy crawl jingjixueren".split())
# cmdline.execute("scrapy crawl ftyingwenwang".split())
# cmdline.execute("scrapy crawl pengbo".split())
# cmdline.execute("scrapy crawl xinhuashe".split())
# cmdline.execute("scrapy crawl rijingxinwen".split())
# cmdline.execute("scrapy crawl eluosiweixing".split())
# cmdline.execute("scrapy crawl wyxinwen".split())
# cmdline.execute("scrapy crawl duanchuanmei".split())
# cmdline.execute("scrapy crawl shangguanxinwen".split())
# cmdline.execute("scrapy crawl shangyouxinwen".split())
# cmdline.execute("scrapy crawl nanfangzhoumo".split())
# cmdline.execute("scrapy crawl ziyouyazhou1".split())
# cmdline.execute("scrapy crawl radiofreeasia".split())
# cmdline.execute("scrapy crawl tianxiazazhimeiribao".split())
# cmdline.execute("scrapy crawl mingbao".split())
# cmdline.execute("scrapy crawl jiaohuidianxinwen".split())
# cmdline.execute("scrapy crawl duanchuanmei".split())
# cmdline.execute("scrapy crawl dongtaiwang".split())
# cmdline.execute("scrapy crawl cnbc".split())
# cmdline.execute("scrapy crawl meiguozhiyinzhongwen".split())
# cmdline.execute("scrapy crawl nbcnews".split())
cmdline.execute("scrapy crawl zhongguoxinwen".split())
# cmdline.execute("scrapy crawl yindushibao".split())
# cmdline.execute("scrapy crawl xiangangxinwen".split())
# cmdline.execute("scrapy crawl afp".split())
# cmdline.execute("scrapy crawl afpnews".split())
# cmdline.execute("scrapy crawl youbao".split())
# cmdline.execute("scrapy crawl chaorixinwen".split())
# cmdline.execute("scrapy crawl huaerjieyingwen".split())
# cmdline.execute("scrapy crawl wyxinwen".split())
# cmdline.execute("scrapy crawl huanqiushibao".split())
# cmdline.execute("scrapy crawl nanhuazaobao".split())
# cmdline.execute("scrapy crawl guardian".split())
# cmdline.execute("scrapy crawl eluosi".split())
# cmdline.execute("scrapy crawl cnbcnews".split())
cmdline.execute("scrapy crawl yindushibaoyingwen".split())
cmdline.execute("scrapy crawl niuyueshibao".split())
cmdline.execute("scrapy crawl huaerjieyingwenwang".split())
# cmdline.execute("scrapy crawl jingjixueren".split())
# import time
# import datetime
# time='26 Jun 2018 13:14:34'
# time_format=datetime.datetime.strptime(time,'%d %B %Y %H:%M:%S')
# print time_format
# import requests
# data = requests.get('http://192.168.3.11:5000/news?category=business/list')
# print data.content
# import time
# t = '1528769010'
# print time.time()
# Ttime = int(round(time.time() * 1000))
# a = '2018-06-12 10:03:30'
# timeArray = time.strptime(a, "%Y-%m-%d %H:%M:%S")
# timeStamp = int(time.mktime(timeArray))
# print timeStamp
# publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(t)))
# print publishedDate
# time1 = time.strftime("%Y-%m-%d %H:%M:%S")
# time2 = time.strptime(time1, "%Y-%m-%d %H:%M:%S")
# time3 = int(time.mktime(time2))
# print time3
# t = '%f'%time.time()
# import re
# print re.findall('\d+','%f'%time.time())[0]
# print time.localtime(time.time())
# import time
# print int(round(time.time() * 1000))<file_sep>#-*- coding: utf-8 -*-
from scrapy.spiders import Spider
import json
import time
import re
from news.items import NewsItem
import scrapy
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
class mingbao(Spider):
name = 'mingbaoxinwen'
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
tdata = time_str.replace('-','')
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
start_urls = [
'https://newsapp.mingpao.com/php/api/app_articlesearch.php?p=pns&date=%s§ion=S00001&size=20&page=1'%tdata,#要闻
'https://newsapp.mingpao.com/php/api/app_articlesearch.php?p=pns&date=%s§ion=S00002&size=20&page=1'%tdata,#港闻
'https://newsapp.mingpao.com/php/api/app_articlesearch.php?p=pns&date=%s§ion=S00004&size=20&page=1'%tdata,#经济
'https://newsapp.mingpao.com/php/api/app_articlesearch.php?p=pns&date=%s§ion=S00012&size=20&page=1'%tdata,#观点
'https://newsapp.mingpao.com/php/api/app_articlesearch.php?p=pns&date=%s§ion=S00013&size=20&page=1'%tdata,#中国
'https://newsapp.mingpao.com/php/api/app_articlesearch.php?p=pns&date=%s§ion=S00014&size=20&page=1'%tdata,#国际
'https://newsapp.mingpao.com/php/api/app_articlesearch.php?p=ins&date=%s§ion=S00001&size=20&page=1'%tdata,
'https://newsapp.mingpao.com/php/api/app_articlesearch.php?p=ins&date=%s§ion=S00004&size=20&page=1'%tdata,
'https://newsapp.mingpao.com/php/api/app_articlesearch.php?p=ins&date=%s§ion=S00005&size=20&page=1'%tdata
]
def parse(self, response):
print response.body
# data = json.loads(response.body.replace('\t','').replace('\n','').replace('\r',''))
# data = data['data_Result']
# data = response.body['data_Result']
title = re.findall('"TITLE":"(.*?)",',response.body)
url = re.findall('"LINK":"(.*?)",',response.body)
cate = re.findall('"CATEGORY":"(.*?)",',response.body)
pubt = re.findall('"PUBDATE":"(.*?)",',response.body)
desc = re.findall('"DESCRIPTION":"(.*?)",',response.body)
# author = re.findall('"AUTHOR":"(.*?)",',response.body)
for i in range(len(url)):
yield scrapy.Request(url[i].replace('\/','/'), meta={
'title': title[i],
'describe': desc[i],
'pubt': pubt[i],
'category': cate[i],
'home_url': url[i].replace('\/','/')
}, callback=self.parse_item, dont_filter=True)
def parse_item(self,response):
title = response.meta['title']
category = response.meta['category']
home_url = response.meta['home_url']
describe = response.meta['describe']
publishedDate = response.meta['pubt']
author = ''
app_name = '明报新闻'
pic_url = ''
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
try:
content = response.xpath('//article').extract()
content = content[0].replace('\t','').replace('\n','').replace('\r','')
content = re.findall('>(.*?)<', content)
contentt = ''
for i in range(len(content)):
contentt += content[i]
content = contentt
except:
content = response.xpath('//p/text()').extract()
contentt = ''
for i in range(len(content)):
contentt += content[i]
content = contentt
pic_more_url = ''
if category == '':
category = '要闻'
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
self.count += 1
item['count'] = self.count
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M:%S")
timenum = int(time.mktime(timeArray))
if timenum >= self.timeStamp:
self.count += 1
item['count'] = self.count
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(timenum)))
item['publishedDate'] = publishedDate
yield item
<file_sep>#-*- coding: utf-8 -*-
from scrapy.spider import Spider
import scrapy
from urlparse import urljoin
from scrapy.selector import Selector
from scrapy.http import Request
import time
import json
from selenium import webdriver
import re
import sys
# 全部爬取
from news.DataResource import TransportData
from news.items import NewsItem
import requests
import lxml.html as lh
reload(sys)
sys.setdefaultencoding('utf8')
class xhs(scrapy.Spider):
name = 'xinhuashe'
header = {
"User-Agent": "android-16-720x1184-GALAXY NEXUS",
"content-type": "application/json"
}
start_urls = [
'https://zhongguowangshi.com/'
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
tt = str(int(round(time.time() * 1000)))
def parse(self, response):
listnum = ['470','470','470','23140','23140','23140','462','462','462','463','463','463','16534','16534','16534'] #470 要闻 23140学习 462 国际 463财经
formdata = [
{"userID": "0","clientLongitude": 121.49237750590044,"clientApp": "104","city": "null","clientHeight": 1280,"count": 0,"province": "null","excludeRecommend": 0,"clientDev": "0","columnid": "470","clientModel": "SM-G9350","clientToken": "<PASSWORD> <KEY>","clientDate": self.tt,"clientOS": "5.1.1","clientVer": "4.0.6","clientMarket": "198","clientType": 2,"clientPrison": "0","clientBundleID": "net.xinhuamm.mainclient","clientNet": "wifi","columntype": "4001","clientId": "4062c95015dbaed2529aebd399ca0d77","clientLable": "867637959598351","clientWidth": 720,"pn": 1,"clientLatitude": 31.247196038642187},#要闻
{"userID": "0","clientLongitude": 121.49237750590044,"clientApp": "104","city": "null","clientHeight": 1280,"count": 0,"province": "null","excludeRecommend": 0,"clientDev": "0","columnid": "470","clientModel": "SM-G9350","clientToken": "<KEY>ed2529aebd399ca<PASSWORD>","clientDate": self.tt,"clientOS": "5.1.1","clientVer": "4.0.6","clientMarket": "198","clientType": 2,"clientPrison": "0","clientBundleID": "net.xinhuamm.mainclient","clientNet": "wifi","columntype": "4001","clientId": "4062c95015dbaed2529aebd399ca0d77","clientLable": "867637959598351","clientWidth": 720,"pn": 2,"clientLatitude": 31.247196038642187},#要闻
{"userID": "0","clientLongitude": 121.49237750590044,"clientApp": "104","city": "null","clientHeight": 1280,"count": 0,"province": "null","excludeRecommend": 0,"clientDev": "0","columnid": "470","clientModel": "SM-G9350","clientToken": "<KEY>","clientDate": self.tt,"clientOS": "5.1.1","clientVer": "4.0.6","clientMarket": "198","clientType": 2,"clientPrison": "0","clientBundleID": "net.xinhuamm.mainclient","clientNet": "wifi","columntype": "4001","clientId": "4062c95015dbaed2529aebd399ca0d77","clientLable": "867637959598351","clientWidth": 720,"pn": 3,"clientLatitude": 31.247196038642187},#要闻
{"userID": "0","clientLongitude": 121.49237750590044,"clientApp": "104","city": "null","clientHeight": 1280,"count": 0,"province": "null","excludeRecommend": 0,"clientDev": "0","columnid": "462","clientModel": "SM-G9350","clientToken": "<KEY>","clientDate": self.tt,"clientOS": "5.1.1","clientVer": "4.0.6","clientMarket": "198","clientType": 2,"clientPrison": "0","clientBundleID": "net.xinhuamm.mainclient","clientNet": "wifi","columntype": "4002","clientId": "4062c95015dbaed2529aebd399ca0d77","clientLable": "867637959598351","clientWidth": 720,"pn": 1,"clientLatitude": 31.247196038642187},#国际
{"userID": "0","clientLongitude": 121.49237750590044,"clientApp": "104","city": "null","clientHeight": 1280,"count": 0,"province": "null","excludeRecommend": 0,"clientDev": "0","columnid": "462","clientModel": "SM-G9350","clientToken": "4<PASSWORD>","clientDate": self.tt,"clientOS": "5.1.1","clientVer": "4.0.6","clientMarket": "198","clientType": 2,"clientPrison": "0","clientBundleID": "net.xinhuamm.mainclient","clientNet": "wifi","columntype": "4002","clientId": "4062c95015dbaed2529aebd399ca0d77","clientLable": "867637959598351","clientWidth": 720,"pn": 2,"clientLatitude": 31.247196038642187},#国际
{"userID": "0","clientLongitude": 121.49237750590044,"clientApp": "104","city": "null","clientHeight": 1280,"count": 0,"province": "null","excludeRecommend": 0,"clientDev": "0","columnid": "462","clientModel": "SM-G9350","clientToken": "4<PASSWORD>","clientDate": self.tt,"clientOS": "5.1.1","clientVer": "4.0.6","clientMarket": "198","clientType": 2,"clientPrison": "0","clientBundleID": "net.xinhuamm.mainclient","clientNet": "wifi","columntype": "4002","clientId": "4062c95015dbaed2529aebd399ca0d77","clientLable": "867637959598351","clientWidth": 720,"pn": 3,"clientLatitude": 31.247196038642187},#国际
{"userID": "0","clientLongitude": 121.49237750590044,"clientApp": "104","city": "null","clientHeight": 1280,"count": 0,"province": "null","excludeRecommend": 0,"clientDev": "0","columnid": "463","clientModel": "SM-G9350","clientToken": "40<PASSWORD>","clientDate": self.tt,"clientOS": "5.1.1","clientVer": "4.0.6","clientMarket": "198","clientType": 2,"clientPrison": "0","clientBundleID": "net.xinhuamm.mainclient","clientNet": "wifi","columntype": "4002","clientId": "4062c95015dbaed2529aebd399ca0d77","clientLable": "867637959598351","clientWidth": 720,"pn": 1,"clientLatitude": 31.247196038642187},#财经
{"userID": "0","clientLongitude": 121.49237750590044,"clientApp": "104","city": "null","clientHeight": 1280,"count": 0,"province": "null","excludeRecommend": 0,"clientDev": "0","columnid": "463","clientModel": "SM-G9350","clientToken": "406<KEY>","clientDate": self.tt,"clientOS": "5.1.1","clientVer": "4.0.6","clientMarket": "198","clientType": 2,"clientPrison": "0","clientBundleID": "net.xinhuamm.mainclient","clientNet": "wifi","columntype": "4002","clientId": "4062c95015dbaed2529aebd399ca0d77","clientLable": "867637959598351","clientWidth": 720,"pn": 2,"clientLatitude": 31.247196038642187},#财经
{"userID": "0","clientLongitude": 121.49237750590044,"clientApp": "104","city": "null","clientHeight": 1280,"count": 0,"province": "null","excludeRecommend": 0,"clientDev": "0","columnid": "463","clientModel": "SM-G9350","clientToken": "4<PASSWORD>","clientDate": self.tt,"clientOS": "5.1.1","clientVer": "4.0.6","clientMarket": "198","clientType": 2,"clientPrison": "0","clientBundleID": "net.xinhuamm.mainclient","clientNet": "wifi","columntype": "4002","clientId": "4062c95015dbaed2529aebd399ca0d77","clientLable": "867637959598351","clientWidth": 720,"pn": 3,"clientLatitude": 31.247196038642187},#财经
{"userID": "0","clientLongitude": 121.49237750590044,"clientApp": "104","city": "null","clientHeight": 1280,"count": 0,"province": "null","excludeRecommend": 0,"clientDev": "0","columnid": "23140","clientModel": "SM-G9350","clientToken": "4062c95015dbaed252<PASSWORD>d399ca0d77","clientDate": self.tt,"clientOS": "5.1.1","clientVer": "4.0.6","clientMarket": "198","clientType": 2,"clientPrison": "0","clientBundleID": "net.xinhuamm.mainclient","clientNet": "wifi","columntype": "4002","clientId": "4062c95015dbaed2529aebd399ca0d77","clientLable": "867637959598351","clientWidth": 720,"pn": 1,"clientLatitude": 31.247196038642187},#学习
{"userID": "0","clientLongitude": 121.49237750590044,"clientApp": "104","city": "null","clientHeight": 1280,"count": 0,"province": "null","excludeRecommend": 0,"clientDev": "0","columnid": "23140","clientModel": "SM-G9350","clientToken": "<KEY>","clientDate": self.tt,"clientOS": "5.1.1","clientVer": "4.0.6","clientMarket": "198","clientType": 2,"clientPrison": "0","clientBundleID": "net.xinhuamm.mainclient","clientNet": "wifi","columntype": "4002","clientId": "4062c95015dbaed2529aebd399ca0d77","clientLable": "867637959598351","clientWidth": 720,"pn": 2,"clientLatitude": 31.247196038642187},#学习
{"userID": "0","clientLongitude": 121.49237750590044,"clientApp": "104","city": "null","clientHeight": 1280,"count": 0,"province": "null","excludeRecommend": 0,"clientDev": "0","columnid": "23140","clientModel": "SM-G9350","clientToken": "<KEY>","clientDate": self.tt,"clientOS": "5.1.1","clientVer": "4.0.6","clientMarket": "198","clientType": 2,"clientPrison": "0","clientBundleID": "net.xinhuamm.mainclient","clientNet": "wifi","columntype": "4002","clientId": "40<PASSWORD>","clientLable": "867637959598351","clientWidth": 720,"pn": 3,"clientLatitude": 31.247196038642187},#学习
{"userID": "0","clientLongitude": 121.49237750590044,"clientApp": "104","city": "null","clientHeight": 1280,"count": 0,"province": "null","excludeRecommend": 0,"clientDev": "0","columnid": "16534","clientModel": "SM-G9350","clientToken": "<KEY>","clientDate": self.tt,"clientOS": "5.1.1","clientVer": "4.0.6","clientMarket": "198","clientType": 2,"clientPrison": "0","clientBundleID": "net.xinhuamm.mainclient","clientNet": "wifi","columntype": "4002","clientId": "4062c95015dbaed2529aebd399ca0d77","clientLable": "867637959598351","clientWidth": 720,"pn": 1,"clientLatitude": 31.247196038642187},#推荐
{"userID": "0","clientLongitude": 121.49237750590044,"clientApp": "104","city": "null","clientHeight": 1280,"count": 0,"province": "null","excludeRecommend": 0,"clientDev": "0","columnid": "16534","clientModel": "SM-G9350","clientToken": "<KEY>","clientDate": self.tt,"clientOS": "5.1.1","clientVer": "4.0.6","clientMarket": "198","clientType": 2,"clientPrison": "0","clientBundleID": "net.xinhuamm.mainclient","clientNet": "wifi","columntype": "4002","clientId": "4062c95015dbaed2529aebd399ca0d77","clientLable": "867637959598351","clientWidth": 720,"pn": 2,"clientLatitude": 31.247196038642187},#推荐
{"userID": "0","clientLongitude": 121.49237750590044,"clientApp": "104","city": "null","clientHeight": 1280,"count": 0,"province": "null","excludeRecommend": 0,"clientDev": "0","columnid": "16534","clientModel": "SM-G9350","clientToken": "<KEY>","clientDate": self.tt,"clientOS": "5.1.1","clientVer": "4.0.6","clientMarket": "198","clientType": 2,"clientPrison": "0","clientBundleID": "net.xinhuamm.mainclient","clientNet": "wifi","columntype": "4002","clientId": "4062c95015dbaed2529aebd399ca0d77","clientLable": "867637959598351","clientWidth": 720,"pn": 3,"clientLatitude": 31.247196038642187},#推荐
]
for i in range(0,len(formdata)):
header = {
"User-Agent": "android-16-720x1184-GALAXY NEXUS",
"content-type": "application/json"
}
if listnum[i] =='470':
category = '要闻'.encode('utf-8')
elif listnum[i] =='462':
category = '国际'.encode('utf-8')
elif listnum[i] == '463':
category = '财经'.encode('utf-8')
elif listnum[i] == '16534':
category = '推荐'.encode('utf-8')
else:
category = '学习'.encode('utf-8')
url = 'https://xhpfmapi.zhongguowangshi.com/v400/core/indexlist'
data = requests.post(url,data=json.dumps(formdata[i]), headers=self.header)
data = json.loads(data.content)
data = data['data']['data']
for i in data:
id = i['id']
title = i['topic']
print title
url = 'https://xhpfmapi.zhongguowangshi.com/v500/news/%s.js?ts=0'%id
try:
publishedDate = i['releasedate']
if ':' in publishedDate:
publishedDate = str(self.time_str) + ' ' + publishedDate
else:
publishedDate = '20' + publishedDate
except:
publishedDate = ''
try:
pic_url = i['shareImage']
except:
pic_url = ''
try:
pic_more_url = i['detailImg']
except:
pic_more_url = ''
yield scrapy.Request(url, meta={
'category': category,
'title': title,
'publishedDate': publishedDate,
'pic_url': pic_url,
'pic_more_url': pic_more_url,
'home_url': response.url
}, callback=self.parse_item, dont_filter=True)
def parse_item(self,response):
title = response.meta['title']
pic_url = response.meta['pic_url']
pic_more_url = response.meta['pic_more_url']
publishedDate = response.meta['publishedDate']
category = response.meta['category']
describe = ''
data = response.body.replace('var XinhuammNews =','')
data = json.loads(data)
content = data['content']
publishedDate = data['releasedate']
contentt = content.replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', contentt)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
home_url = response.meta['home_url']
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
app_name = '新华社'
author = ''
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['count'] = self.count
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
try:
try:
publishedDate_stamp = int(time.mktime(time.strptime(publishedDate, "%Y-%m-%d %H:%M")))
except:
publishedDate_stamp = int(time.mktime(time.strptime(publishedDate, "%Y-%m-%d")))
except:
publishedDate_stamp = int(time.mktime(time.strptime(publishedDate, "%Y-%m-%d %H:%M:%S")))
if publishedDate_stamp > self.timeStamp:
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(publishedDate_stamp)))
self.count += 1
item['publishedDate'] = publishedDate
yield item
<file_sep>#coding=utf-8
import scrapy
import time
import json
import re
from news.items import NewsItem
from scrapy.selector import Selector
from news.DataResource import TransportData
class rfa(scrapy.Spider):
name='xdrbnews'
allowed_domains = ["stheadline.com"]
start_urls = [
'http://std.stheadline.com/daily/daily.php',
]
base_url = 'http://std.stheadline.com/daily/'
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
links = response.xpath('//ul[@class="nav"]/li/ul/li/a/@href').extract()
for i in range(0,len(links)):
url = links[i]
yield scrapy.Request(url, callback=self.parse_item)
def parse_item(self,resposne):
links = resposne.xpath('//div[@class="module-wrap none-thumb underline"]/a/@href').extract()
title = resposne.xpath('//div[@class="module-wrap none-thumb underline"]/a/div[@class="module-detail"]/div[@class="title"]/text()').extract()
pic_url = resposne.xpath('//div[@class="module-wrap none-thumb underline"]/a/div[@class="module-thumb"]/div[@class="img"]/img/@src').extract()
summary = resposne.xpath('//div[@class="module-wrap none-thumb underline"]/a/div[@class="module-detail"]/div[@class="des"]/text()').extract()
Url = []
Title = []
Pic_url = []
Summary = []
for i in range(0, len(title)):
try:
Url.append(links[i])
Title.append(title[i])
Pic_url.append(pic_url[i])
Summary.append(summary[i].replace('\t','').replace('\n','').replace('\r','').replace(' ',''))
except:
Url.append(links[i])
Title.append(title[i])
Pic_url.append(' ')
Summary.append(summary[i].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', ''))
links = resposne.xpath('//div[@class="module-wrap thumb-pull-right underline"]/a/@href').extract()
title = resposne.xpath('//div[@class="module-wrap thumb-pull-right underline"]/a/div[@class="module-detail"]/div[@class="title"]/text()').extract()
pic_url = resposne.xpath('//div[@class="module-wrap thumb-pull-right underline"]/a/div[@class="module-thumb"]/div[@class="img"]/img/@src').extract()
summary = resposne.xpath('//div[@class="module-wrap thumb-pull-right underline"]/a/div[@class="module-detail"]/div[@class="des"]/text()').extract()
for i in range(0, len(title)):
try:
Url.append(links[i])
Title.append(title[i])
Pic_url.append(pic_url[i])
Summary.append(summary[i].replace('\t','').replace('\n','').replace('\r','').replace(' ',''))
except:
Url.append(links[i])
Title.append(title[i])
Pic_url.append(' ')
Summary.append(summary[i].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', ''))
for i in range(0, len(Url)):
try:
url = self.base_url + Url[i]
yield scrapy.Request(url, meta={
'title': Title[i],
'pic_url': Pic_url[i],
'summary': Summary[i],
'home_url': resposne.url
}, callback=self.parse_one)
except:
url = self.base_url + Url[i]
yield scrapy.Request(url, meta={
'title': Title[i],
'pic_url': '',
'summary': '',
'home_url': resposne.url
}, callback=self.parse_one)
def parse_one(self, response):
title = response.meta['title']
describe = response.meta['summary']
pic_url = response.meta['pic_url']
author = ''
pic_more_url = ''
home_url = response.meta['home_url']
app_name = '星岛日报'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
content = response.xpath('//div[@class="post-content"]').extract()
content = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', content)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
pubTime = response.xpath('//div[@class="post-heading supplement-p-h"]/div[@class="date"]/text()').extract()[1].replace(' ','')
category = response.xpath('//div[@class="post-heading supplement-p-h"]/div[@class="date"]/a/text()').extract()[0]
publishedDate = pubTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
timeArray = time.strptime(pubTime, "%Y-%m-%d")
publishedDate = time.mktime(timeArray)
if publishedDate >= self.timeStamp:
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(publishedDate)))
item['publishedDate'] = publishedDate
self.count = self.count + 1
item['count'] = self.count
yield item
<file_sep>#coding=utf-8
import json
import scrapy
import time,re,time
from news.items import NewsItem
from news.DataResource import TransportData
class ft(scrapy.Spider):
name = 'ftyingwenwang'
start_urls = [
'https://www.ft.com/world?format=&page=1',#world
'https://www.ft.com/world?format=&page=2',#world
'https://www.ft.com/world/us?format=&page=1',#US
'https://www.ft.com/world/us?format=&page=2',#US
'https://www.ft.com/companies?format=&page=1',#COMPANIES
'https://www.ft.com/companies?format=&page=2',#COMPANIES
'https://www.ft.com/markets?format=&page=1',#MARKETS
'https://www.ft.com/markets?format=&page=2',#MARKETS
'https://www.ft.com/opinion?format=&page=1',#OPINION
'https://www.ft.com/opinion?format=&page=2',#OPINION
'https://www.ft.com/work-careers?format=&page=1',#WORK & CAREERS
'https://www.ft.com/work-careers?format=&page=2',#WORK & CAREERS
]
def parse(self, response):
links = response.xpath('//ul[@class="o-teaser-collection__list"]/li/div[2]/div/div/div[1]/div[2]/a/@href').extract()
title = response.xpath('//ul[@class="o-teaser-collection__list"]/li/div[2]/div/div/div[1]/div[2]/a/text()').extract()
#descr = response.xpath('//ul[@class="o-teaser-collection__list"]/li/div[2]/div/div/div[1]/p/text()').extract()
#pic = response.xpath('//ul[@class="o-teaser-collection__list"]/li/div[2]/div/div/div[2]/a/div/img/@data-srcset').extract()
for i in range(len(links)):
url = 'https://www.ft.com' + links[i]
tit = title[i]
# describe = descr[i]
# pic_url = pic[i]
# print url
# print tit
# print describe
# print pic_url
yield scrapy.Request(url, meta={
'title':tit,
'home_url':response.url
}, callback=self.parse_item)
def parse_item(self,response):
title = response.meta['title']
home_url = response.meta['home_url']
author = ''
pic_url = ''
describe = ''
category = ''
app_name = 'FT英文网'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
print "app名称", app_name
print "主图片url", pic_url
# print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
# print "内容", content
print "主url", home_url
# print "发布时间", publishedDate
print "爬取时间", crawlTime<file_sep>#coding=utf-8
import scrapy,re,time,json
from news.items import NewsItem
class huaerjie(scrapy.Spider):
name = 'huaerjieyingwenwang'
start_urls = [
'https://www.wsj.com/search/term.html?KEYWORDS=Xi%20Jinping',
'https://www.wsj.com/search/term.html?KEYWORDS=Xi%20Jinping&page=2'
]
def parse(self, response):
links = response.xpath('//ul[@class="items hedSumm"]/li/div/div[1]/h3/a/@href').extract()
title = response.xpath('//ul[@class="items hedSumm"]/li/div/div[1]/h3/a/text()').extract()
for i in range(len(links)):
if 'http' not in links[i]:
url = 'https://www.wsj.com' + links[i]
else:
url = links[i]
tit = title[i]
yield scrapy.Request(url, meta={
'title': tit,
'home_url': response.url
}, callback=self.parse_item, dont_filter=True)
def parse_item(self, response):
title = response.meta['title']
home_url = response.meta['home_url']
app_name = '华尔街日报英文网'
describe = ''
pic_url = ''
author = ''
publishedDate = response.xpath('//time/text()').extract()[0].replace(' ','')
publishedDate = publishedDate.replace('\t', '').replace('\n', '').replace('\r', '')
pic_more_url = ''
try:
content = response.xpath('//div[@class="wsj-snippet-body"]').extract()[0]
content = content.replace('\t', '').replace('\n', '').replace('\r', '')
except:
content = response.xpath('//p').extract()
contentt = ''
for i in range(len(content)):
contentt += content[i]
content = contentt.replace('\t', '').replace('\n', '').replace('\r', '')
content = re.findall('>(.*?)<', content)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
print publishedDate
print content<file_sep>#coding=utf-8
import time,re,json
import scrapy
from news.items import NewsItem
from lxml.etree import HTML
class guardian(scrapy.Spider):
name = 'guardian'
count = 0
download_delay = 0
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def start_requests(self):
start_urls = [
# 大频道Home
# Headlines:
'https://mobile.guardianapis.com/us/groups/collections/us-alpha/news/regular-stories',
# Spotlight:
'https://mobile.guardianapis.com/us/groups/collections/us-alpha/features/feature-stories',
# World Cup 2018:
# 'https://mobile.guardianapis.com/us/groups/collections/189d1918-9c46-4bb4-a695-cc22f44dfe34',
# Opinions:
'https://mobile.guardianapis.com/us/groups/collections/98df412d-b0e7-4d9a-98c2-062642823e94',
# Culture:
'https://mobile.guardianapis.com/us/groups/collections/fb59c1f8-72a7-41d5-8365-a4d574809bed',
# Across the country:
'https://mobile.guardianapis.com/us/groups/collections/5a59a4e5-074e-4a2a-8bbe-2743e07ae30f',
# Around the world:
'https://mobile.guardianapis.com/us/groups/collections/adb2876e-946a-49ad-b641-e405d24e5f18',
# Explore:
'https://mobile.guardianapis.com/us/groups/collections/5fd45b04-c512-4a8c-a9b5-cc07a6097049',
# Most viewed:
'https://mobile.guardianapis.com/us/groups/collections/us/most-viewed/regular-stories',
# # 大频道US:
# US Headlines:
'https://mobile.guardianapis.com/us/groups/collections/ec5e3c77-2684-44a0-bfbd-d337edcb2cba',
# US Business:
'https://mobile.guardianapis.com/us/groups/collections/b0e0bc29-41b5-4dd7-8a5e-f5d4129971a7',
# US Politics:
'https://mobile.guardianapis.com/us/groups/collections/436ed09d-614f-4418-8500-d1fa9e20404e',
# Most viewed in US News:
'https://mobile.guardianapis.com/us/groups/collections/8e2f6e01-1af5-49c1-9cf9-73643365ab82',
# 大频道Politics:
# Headlines:
'https://mobile.guardianapis.com/us/groups/collections/8a12631c-72dd-4f57-baec-24ef7b2abfca',
# Opinion:
'https://mobile.guardianapis.com/us/groups/collections/21450e4f-a452-4601-a4b3-03ba00b5da1a',
# Trump Russia investigation:
'https://mobile.guardianapis.com/us/groups/collections/357173cc-06e1-4b86-b2f0-74f0b0b05f94',
# Most viewed in US politics:
'https://mobile.guardianapis.com/us/groups/collections/04549857-4025-45ec-b7c8-68f4a14a7562',
# 大频道World:
# World news:
'https://mobile.guardianapis.com/us/groups/collections/49ac-e112-5a5c-ff9d',
# Most viewed in world news:
'https://mobile.guardianapis.com/us/groups/collections/558b-3b8b-3485-d9ff',
# Cities:
'https://mobile.guardianapis.com/us/groups/collections/df6de230-16cf-4c2a-b892-fff893fccec5',
# Around the world:
'https://mobile.guardianapis.com/us/groups/collections/951dae3c-6b0d-4cb1-919f-e69f671cc39a',
# Opinion & analysis:
'https://mobile.guardianapis.com/us/groups/collections/e92b-7852-9615-d281',
# Spotlight:
'https://mobile.guardianapis.com/us/groups/collections/030e17a4-00f1-4aca-8d78-7b152c992111',
# Global development:
'https://mobile.guardianapis.com/us/groups/collections/9d2d867a-964c-43cc-a05d-3e517cc89247',
# World里面还有Asia频道:
# Asia:
'https://mobile.guardianapis.com/us/groups/collections/c531-61e2-fb9d-fcc7',
# Asia Pacific:
'https://mobile.guardianapis.com/us/groups/collections/4f5e7a27-0e66-47df-b3ff-d49d884399a0',
# South & central Asia:
'https://mobile.guardianapis.com/us/groups/collections/5b6dfa32-4782-49d4-88bc-a020b8cc695e',
]
category = [
# 大频道Home:
'Home-Headlines',
'Home-Spotlight',
# 'Home-World Cup 2018',
'Home-Opinions',
'Home-Culture',
'Home-Across the country',
'Home-Around the world',
'Home-Explore',
'Home-Most viewed',
# 大频道US:
'US-Headlines',
'US-Business',
'US-Politics',
'US-Most viewed in US News',
# 大频道Politics:
'Politics-Headlines',
'Politics-Opinion',
'Politics-Trump Russia investigation',
'Politics-Most viewed in US politics',
# 大频道World:
'World-World news',
'World-Most viewed in world news',
'World-Cities',
'World-Around the world',
'World-Opinion & analysis',
'World-Spotlight',
'World-Global development',
# World里面还有Asia频道:
'World-Asia',
'World-Asia Pacific',
'World-South & central Asia',
]
# print len(start_urls),len(category)
for i in range(len(start_urls)):
yield scrapy.Request(start_urls[i],
meta={
'category':category[i],
},
callback=self.parse_item)
def parse_item(self, response):
data_json = json.loads(response.body)
if 'cards' in data_json.keys():
for item in data_json['cards']:
category = response.meta['category']
title = item['item']['title']
pic_url = item['item']['displayImages'][0]['urlTemplate'].replace('w=#{width}&h=#{height}&quality=#{quality}','')
describe = item['item']['trailText']
app_name = '英国卫报'
try:
selector = HTML(item['item']['body'])
except:
return
content = selector.xpath('//text()')
content = ''.join(content)
content = content.replace('\t', '').replace('\n', '').replace('\r', '')
publishedDate = item['item']['webPublicationDate'].replace('T',' ').replace('Z','')
author = item['item']['byline']
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
home_url = response.url
url = 'https://www.theguardian.com/'+item['item']['id']
pic_more_url = []
for pic in item['item']['bodyImages']:
pic_more_url.append(pic['urlTemplate'].replace('w=#{width}&h=#{height}&quality=#{quality}',''))
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
print '\n\n'
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M:%S")
timenum = int(time.mktime(timeArray))
if timenum >= self.timeStamp:
self.count += 1
item['count'] = self.count
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(timenum)))
item['publishedDate'] = publishedDate
yield item
<file_sep>#coding=utf-8
import json
import scrapy
import time,re,time
from news.items import NewsItem
class ydzx(scrapy.Spider):
name='ydzxnews'
start_urls=[
#娱乐
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c3&cstart=0&cend=10&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517188240125',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c3&cstart=10&cend=20&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517188240128',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c3&cstart=20&cend=30&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517188240129',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c3&cstart=30&cend=40&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517188240130',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c3&cstart=40&cend=50&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517188240131',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c3&cstart=50&cend=60&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517188240132',
#体育
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c9&cstart=0&cend=10&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517193983454',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c9&cstart=10&cend=20&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517193983457',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c9&cstart=20&cend=30&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517193983458',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c9&cstart=30&cend=40&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517193983459',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c9&cstart=40&cend=50&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517193983459',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c9&cstart=50&cend=60&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517193983459',
#军事
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c7&cstart=0&cend=10&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194497427',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c7&cstart=10&cend=20&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194497430',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c7&cstart=20&cend=30&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194497431',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c7&cstart=30&cend=40&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194497432',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c7&cstart=40&cend=50&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194497433',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c7&cstart=50&cend=60&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194497434',
#体育
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c2&cstart=0&cend=10&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194604343',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c2&cstart=10&cend=20&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194604346',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c2&cstart=20&cend=30&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194604347',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c2&cstart=30&cend=40&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194604348',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c2&cstart=40&cend=50&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194604349',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c2&cstart=50&cend=60&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194604350',
#财经
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c5&cstart=0&cend=10&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194673075',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c5&cstart=10&cend=20&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194673078',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c5&cstart=20&cend=30&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194673079',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c5&cstart=30&cend=40&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194673080',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c5&cstart=40&cend=50&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194673081',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c5&cstart=50&cend=60&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194673082',
#NBA
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=sc4&cstart=0&cend=10&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194775137',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=sc4&cstart=10&cend=20&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194775140',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=sc4&cstart=20&cend=30&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194775141',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=sc4&cstart=30&cend=40&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194775142',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=sc4&cstart=40&cend=50&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194775143',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=sc4&cstart=50&cend=60&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194775144',
#汽车
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c11&cstart=0&cend=10&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194875329',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c11&cstart=10&cend=20&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194875332',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c11&cstart=20&cend=30&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194875333',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c11&cstart=30&cend=40&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194875334',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c11&cstart=40&cend=50&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194875335',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=c11&cstart=50&cend=60&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517194875336',
#视频
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=u13746&cstart=0&cend=10&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517195187853',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=u13746&cstart=10&cend=20&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517195187856',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=u13746&cstart=20&cend=30&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517195187857',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=u13746&cstart=30&cend=40&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517195187858',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=u13746&cstart=40&cend=50&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517195187859',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=u13746&cstart=50&cend=60&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517195187860',
#热点
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=hot&cstart=0&cend=10&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517195303945',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=hot&cstart=10&cend=20&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517195405518',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=hot&cstart=20&cend=30&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517195405519',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=hot&cstart=30&cend=40&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517195405520',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=hot&cstart=40&cend=50&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517195405521',
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=hot&cstart=50&cend=60&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=1517195405522',
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
data=json.loads(response.body)
data=data['result']
for i in range(0,len(data)):
title = data[i]['title']
pubTime = data[i]['date']
url = data[i]['docid']
pic_url = data[i]['image_urls']
describe = data[i]['summary']
try:
category = data[i]['category']
except:
category = '要闻'
url = 'http://www.yidianzixun.com/article/'+url
# print title
# print pubTime
# print url
# print pic_url
# print describe
# print category
yield scrapy.Request(url,meta={
'title':title,
'pubTime':pubTime,
'pic_url':pic_url,
'describe':describe,
'category':category,
'home_url':response.url
},callback=self.parse_item,dont_filter=True)
def parse_item(self,response):
title = response.meta['title']
publishedDate = response.meta['pubTime']
pic_url = response.meta['pic_url']
for i in range(0,len(pic_url)):
if 'http' not in pic_url[i]:
pic_urlt=''
else:
pic_urlt=str(pic_url)
describe = response.meta['describe']
category = response.meta['category']
home_url = response.meta['home_url']
app_name='一点资讯_b'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
try:
try:
content = response.xpath('//div[@id="imedia-article"]').extract()
contentt = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', contentt)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
except:
content = response.xpath('//div[@class="content-bd"]').extract()
contentt = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', contentt)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
pic_more_url = re.findall('<imgsrc="(.*?)"',contentt)
pic_more_url1 =[]
for i in range(0,len(pic_more_url)):
if 'http' not in pic_more_url[i]:
pic_more_urlt = 'http:' + pic_more_url[i]
pic_more_url1.append(pic_more_urlt)
else:
pic_more_url1.append(pic_more_url[i])
pic_more_url = str(set(pic_more_url1))
except:
content = response.xpath('//div[@class="video-wrapper"]').extract()
contentt = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('<videosrc="(.*?)"',contentt)[0]
pic_more_url =''
try:
author = response.xpath('//a[@class="doc-source"]/text()').extract()[0]
except:
author = ''
print "app名称", app_name
print "主图片url", pic_urlt
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_urlt
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M:%S")
timenum = int(time.mktime(timeArray))
if timenum >= self.timeStamp:
self.count +=1
item['count']= self.count
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(timenum)))
item['publishedDate'] = publishedDate
yield item
<file_sep>#coding=utf-8
import scrapy
import time
import json
import re
from news.items import NewsItem
from scrapy.selector import Selector
from news.DataResource import TransportData
class bbc(scrapy.Spider):
name='duanchuanmei'
allowed_domains = ["theinitium.com"]
start_urls=[
'https://theinitium.com/channel/feature/',
'https://theinitium.com/channel/news-brief/',
'https://theinitium.com/channel/roundtable/',
'https://theinitium.com/channel/travel/',
'https://theinitium.com/channel/notes-and-letters/',
'https://theinitium.com/channel/pick_up/'
]
base_url='https://theinitium.com'
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime('2018-06-13', "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
links=response.xpath('//li[@class="l-waffle-1 l-waffle-sm-2 l-waffle-md-3 l-waffle-lg-4"]/div[1]/a/@href').extract()
pic_url=response.xpath('//li[@class="l-waffle-1 l-waffle-sm-2 l-waffle-md-3 l-waffle-lg-4"]/div[1]/a/div/@style').extract()
title=response.xpath('//li[@class="l-waffle-1 l-waffle-sm-2 l-waffle-md-3 l-waffle-lg-4"]/div[1]/div/div/a/h3/text()').extract()
summary=response.xpath('//li[@class="l-waffle-1 l-waffle-sm-2 l-waffle-md-3 l-waffle-lg-4"]/div[1]/div/div/p/text()').extract()
Url=[]
Pic_url=[]
Summary=[]
Title=[]
for i in range(0,len(links)):
url=self.base_url + links[i]
try:
pic_url = pic_url[i]
except:
pic_url=''
try:
summary = summary[i]
except:
summary=''
Url.append(url)
Title.append(title[i])
Pic_url.append(pic_url)
Summary.append(summary)
links = response.xpath('//li[@class="l-waffle-1 l-waffle-sm-3"]/div[1]/a/@href').extract()
pic_url = response.xpath('//li[@class="l-waffle-1 l-waffle-sm-3"]/div[1]/a/div/@style').extract()
title = response.xpath('//li[@class="l-waffle-1 l-waffle-sm-3"]/div[1]/div/div[2]/a/h3/text()').extract()
summary = response.xpath('//li[@class="l-waffle-1 l-waffle-sm-3"]/div[1]/div/div[2]/p/text()').extract()
for i in range(0, len(links)):
url = self.base_url + links[i]
# print url
try:
pic_url = pic_url[i]
except:
pic_url = ''
# print title[i]
try:
summary = summary[i]
except:
summary = ''
Url.append(url)
Title.append(title[i])
Pic_url.append(pic_url)
Summary.append(summary)
for i in range(0,len(Url)):
yield scrapy.Request(Url[i],meta={
'title':Title[i],
'pic_url':Pic_url[i],
'summary':Summary[i],
'home_url':response.url
},callback=self.parse_item)
def parse_item(self,response):
title=response.meta['title']
pic_url=response.meta['pic_url']
describe=response.meta['summary']
home_url=response.meta['home_url']
app_name='端传媒新闻'
pubTime=response.xpath('//time[@class="posted-time"]/text()').extract()[0].replace('\t','').replace('\n','').replace('\r','').replace(' ','')
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
content=response.xpath('//div[@class="p-article__content u-content l-col-12 l-col-lg-9" ]').extract()
content = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', content)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
try:
category = response.xpath('//div[@class="p-article__channels"]/span[2]/a/text()').extract()[0]
except:
category = response.xpath('//div[@class="p-article__channels"]/span[1]/a/text()').extract()[0]
pic_more_url=''
try:
author=response.xpath('//span[@class="byline u-inline-divider"]/span/text()').extract()[0]
except:
author=''
# print "app名称", app_name
# print "主图片url", pic_url
# print "子图片url", pic_more_url
# print "作者", writer
# print "详情页地址", response.url
# print "所属类型", content_type
# print "标题", title
# print "描述", summary
# print "内容", content
# print "主url", home_url
# print "发布时间", pubTime
# print "爬取时间", crawlTime
#
# item = NewsItem()
# item['app_name'] = app_name
# item['pic_url'] = pic_url
# item['pic_more_url'] = pic_more_url
# item['writer'] = writer
# item['content_url'] = response.url
# item['content_type'] = content_type
# item['title'] = title
# item['summary'] = summary
# item['content'] = content
# item['home_url'] = home_url
# item['pubTime'] = pubTime
# item['crawlTime'] = crawlTime
# yield item
publishedDate = pubTime
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['crawlTime'] = crawlTime
# existing_title = TransportData.getData('端新闻_b')
# 符合要求,可以入库的title list
acceptable_title = []
# t = pubTime.split(' ')[0]
timeArray = time.strptime(pubTime, "%Y-%m-%d")
publishedDate = time.mktime(timeArray)
# if title not in existing_title:
if publishedDate >= self.timeStamp:
acceptable_title.append(title)
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(publishedDate)))
item['publishedDate'] = publishedDate
print "okokokokok"
self.count += 1
item['count'] = self.count
yield item
# TransportData.transport_data(app_name, pic_url, pic_more_url, author, response.url, category, title,
# describe,
# content,
# home_url, publishedDate, crawlTime)<file_sep>#coding=utf-8
import scrapy
import json, re, time
from news.items import NewsItem
from lxml.etree import HTML
class yindushibao(scrapy.Spider):
name = 'yindushibao'
start_urls = [
'http://timesofindia.indiatimes.com/feeds/newslistingfeedmc/feedtype-sjson,msid-1081479906,tag-alrt,uid-Entertainment-01,category-Entertainment-01,t-f.cms?platform=android&andver=498&adreqfrm=sec',
'https://timesofindia.indiatimes.com/feeds/homenewslistingfeed/feedtype-sjson,msid-51396865,tag-strt,uid-Top-01.cms?platform=android&andver=498&adreqfrm=home',
'http://timesofindia.indiatimes.com/feeds/newslistingfeedmc/feedtype-sjson,msid--2128932452,tag-alrt,uid-City-01,category-City-01,t-f.cms?platform=android&andver=498&adreqfrm=sec',
'http://timesofindia.indiatimes.com/feeds/newslistingfeedmc/feedtype-sjson,msid-4440100,tag-alrt,uid-India-01.cms?platform=android&andver=498&adreqfrm=sec',
'http://timesofindia.indiatimes.com/feeds/newslistingfeed/feedtype-sjson,msid-62542876,tag-alrt,uid-StateElec2017-01,category-KarnatakaElect2018-01.cms?platform=android&andver=498&adreqfrm=sec',
'http://timesofindia.indiatimes.com/feeds/newslistingfeedmc/feedtype-sjson,msid-1898184,tag-alrtdf,uid-World-01.cms?platform=android&andver=498&adreqfrm=sec',
'http://timesofindia.indiatimes.com/feeds/newslistingfeedmc/feedtype-sjson,msid-30359486,tag-alrtdf,uid-World-01,category-WorldUS-01.cms?platform=android&andver=498&adreqfrm=sec'
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
data = json.loads(response.body)
try:
data = data['items']
except:
data = data['items']
for i in range(len(data)):
try:
title = data[i]['hl']
url = data[i]['wu']
try:
pubt = data[i]['upd']
except:
pubt = data[i]['lpt']
if float(pubt)/1000 >= self.timeStamp:
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(pubt)/1000))
yield scrapy.Request(url,meta={
'title':title,
'home_url': response.url,
'pubt':publishedDate
}, callback=self.parse_item)
except:
pass
def parse_item(self, response):
title = response.meta['title']
home_url = response.meta['home_url']
app_name = '印度时报'
pic_url = ''
describe = ''
author = ''
publishedDate = response.meta['pubt']
content = response.xpath('//div[@class="Normal"]').extract()
try:
contentt = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '').replace(' ', '')
content = re.findall('>(.*?)<', contentt)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
except:
content = response.xpath('//p').extract()
contentt = ''
for i in range(len(content)):
contentt += content[i]
content = contentt.replace('<p>', '').replace('</p>', '').replace(' ', '').replace('\t', '')
content = content.replace('\n', '').replace('\r', '')
pic_more_url = ''
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
category = ''
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
yield item<file_sep># https://api-prod.wallstreetcn.com/apiv1/content/fabricate-articles?cursor=&accept=article%2Ctopic%2Cnewsroom%2Cnewsrooms%2Cad&channel=global&limit=25
#-*- coding: utf-8 -*-
from scrapy.spider import Spider
from urlparse import urljoin
from scrapy.selector import Selector
from scrapy.http import Request
import time
import json
import re
import sys
from news.DataResource import TransportData
import scrapy
from news.items import NewsItem
reload(sys)
sys.setdefaultencoding('utf8')
class dongfang(Spider):
name = "dongfangtoutiao"
start_urls = [
"https://refreshnews.dftoutiao.com/toutiao_appnew02/newsgzip"
]
DOWNLOAD_DELAY = 0
count = 0
appname = "东方头条"
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str,"%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
types = [
"guoji",
"caijing",
"redian"
]
pgnum = 1
idx = 0
for type in types:
yield scrapy.FormRequest(
response.url,
formdata={
"type": type,
"startkey": "null",
"newkey": "null",
"pgnum": str(pgnum),
"idx": str(idx),
"key": "37691bebb4a7706e",
"softtype": "TouTiao",
"softname": "DFTTAndroid",
"ime": "355456060868994",
"appqid": "dftt170925",
"apptypeid": "DFTT",
"ver": "1.8.2",
"os": "Android5.0.2",
"ttaccid": "null",
"appver": "010802",
"deviceid": "3b6b507<KEY>",
"position": "北京",
"iswifi": "wifi",
"channellabel": ""
},
meta={"type": type, "idx": idx, "pgnum": pgnum},
callback=self.parse_next
)
def parse_next(self,response):
type = response.meta['type']
idx = response.meta['idx']
pgnum = response.meta['pgnum']
results = json.loads(response.body)
endkey = results['endkey']
newkey = results['newkey']
if newkey:
newkey = str(newkey)
else:
newkey = "null"
results = results['data']
if len(results) > 1:
acceptable_title = []
for result in results:
category = response.meta['type']
publishedDate = result['date']
print publishedDate
a = str(publishedDate)
a = time.strptime(a, "%Y-%m-%d %H:%M")
author = result['source']
title = result['topic']
url = result['url']
t = int(time.mktime(a))
if t > self.timeStamp :
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(t))
acceptable_title.append(title)
yield Request(url, meta={
"category": category,
"publishedDate": publishedDate,
"author": author,
"title": title}, callback=self.parse_news)
pgnum = int(pgnum) + 1
if type in ["redian"]:
idx = int(idx) + 20
else:
idx = int(idx) + 15
if len(acceptable_title) > 1:
yield scrapy.FormRequest(
response.url,
formdata={
"type": type,
"startkey": endkey,
"newkey": newkey,
"pgnum": str(pgnum),
"idx": str(idx),
"key": "37691bebb4a7706e",
"softtype": "TouTiao",
"softname": "DFTTAndroid",
"ime": "355456060868994",
"appqid": "dftt170925",
"apptypeid": "DFTT",
"ver": "1.8.2",
"os": "Android5.0.2",
"ttaccid": "null",
"appver": "010802",
"deviceid": "3b6b5070d0c2471d",
"position": "北京",
"iswifi": "wifi",
"channellabel": ""
},
meta={"type": type, "idx": idx, "pgnum": pgnum},
callback=self.parse_next
)
def parse_news(self,response):
hxs = Selector(response)
list = hxs.xpath('//div[@class="ctg-content"]//a/@href').extract()
if list:
for i in list:
yield Request(url= i, meta={
"publishedDate": response.meta["publishedDate"],
"author":response.meta['author'],
"title": response.meta['title'],
"category": response.meta['category']},
callback=self.parse_news)
describe = ""
home_url = "http://mini.eastday.com"
category = response.meta['category']
author = response.meta['author']
publishedDate = response.meta['publishedDate']
crawlTime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
pic_url = ""
pic_more_url = hxs.xpath("//div[@id='content']//img/@src").extract()
if pic_more_url:
pic_more_url = set(pic_more_url)
else:
pic_more_url = set()
content = hxs.xpath("//div[@id='content']//text()").extract()
content = "".join(content)
content = content.replace("\n","").replace(" ","")
title1 = hxs.xpath("//div[@id='title']//text()").extract()
title = ""
title2 = hxs.xpath("//title/text()").extract()
if title2:
title = title2[0]
elif title1:
title = title1[0]
title = title.replace("\n", "").replace(" ", "")
self.count = self.count + 1
print self.count
print self.appname
print pic_url
print pic_more_url
print author
print response.url
print category
print title
print describe
print content
print home_url
print publishedDate
print crawlTime
url = response.url
item = NewsItem()
item['app_name'] = self.appname
item['count'] = self.count
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content.replace('\r','').replace('\n','')
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
if category=='lishi':
item['category'] =u'历史'.encode('utf-8')
elif category=='guoji':
item['category'] =u'国际'.encode('utf-8')
elif category=='caijing':
item['category'] =u'财经'.encode('utf-8')
elif category =='junshi':
item['category'] =u'军事'.encode('utf-8')
else:
item['category'] =u'推荐'.encode('utf-8')
numappName = self.readjson()
if len(numappName) == 0:
items = {
'url':response.url
}
with open('dongfangtoutiao.json','a+') as fp:
line = json.dumps(dict(items),ensure_ascii = False) + '\n'
fp.write(line)
yield item
else:
for i in range(len(numappName)):
if numappName[i]['url'] == response.url:
return
else:
items = {
'url':response.url
}
with open('dongfangtoutiao.json','a+') as fp:
line = json.dumps(dict(items),ensure_ascii = False) + '\n'
fp.write(line)
yield item
def readjson(self):
s = []
file_object = open('dongfangtoutiao.json','r')
try:
while True:
line = file_object.readline()
data = json.loads(line)
s.append(data)
finally:
return s
<file_sep>#coding=utf-8
import scrapy
import json,re,time
from news.items import NewsItem
from lxml.etree import HTML
class meiguozhiyin(scrapy.Spider):
name = 'meiguozhiyin'
start_urls = [
'https://www.voachinese.com/mobapp/zones.xml'
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
num = response.xpath('//item/zone/@id').extract()
cate = response.xpath('//item/zone/name/text()').extract()
for i in range(len(num)):
numt = num[i]
category = cate[i]
if category == '国际' or category == "亚太" or category == "全球议题" or category == "美国" or category == "中国" or category == "台湾"\
or category == "港澳" or category == "美中关系" or category == "美中贸易争端" or category == "韩朝峰会" \
or category == "朝鲜核问题" or category == "台海两岸关系" or category == "南中国海争端" or category == "世界媒体看中国" \
or category == "时事看台" or category == "焦点对话":
url = 'https://www.voachinese.com/mobapp/articles.xml?zoneid=%s&html=2'%numt
yield scrapy.Request(url, meta={
'category': category
}, callback=self.parse_item)
def parse_item(self, response):
category = response.meta['category']
app_name = '美国之音'
home_url = 'https://www.voachinese.com/'
pic_url = ''
describe = ''
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
author = ''
tit = response.xpath('//item/article/title/text()').extract()
pic_more = response.xpath('//item/article/img/@src').extract()
conten = response.xpath('//item/article/content/text()').extract()
publishedDat = response.xpath('//item/article/@pubDate').extract()
pic_more_url = ''
for i in range(len(tit)):
title = tit[i]
content = conten[i].replace('\t', '').replace('\n', '').replace('\r', '')
publishedDate = publishedDat[i].replace(' GMT','').replace('Tue, ','').replace('Thu, ','')
publishedDate = publishedDate.replace('Wed, ','').replace('Sat, ','').replace('Mon, ','')
publishedDate = publishedDate.replace('Sun, ','').replace('Fri, ','')
t = publishedDate.split(' ')
t1 = t[0]
t2 = t[1]
t3 = t[2]
t4 = t[3]
publishedDate = t3 + '-' + t2 + '-' + t1 + ' ' + t4
if 'Jan' in publishedDate:
publishedDate = publishedDate.replace('Jan','01')
elif 'Feb' in publishedDate:
publishedDate = publishedDate.replace('Feb','02')
elif 'Mar' in publishedDate:
publishedDate = publishedDate.replace('Mar','03')
elif 'Apr' in publishedDate:
publishedDate = publishedDate.replace('Apr','04')
elif 'May' in publishedDate:
publishedDate = publishedDate.replace('May','05')
elif 'Jun' in publishedDate:
publishedDate = publishedDate.replace('Jun','06')
elif 'Jul' in publishedDate:
publishedDate = publishedDate.replace('Jul','07')
elif 'Aug' in publishedDate:
publishedDate = publishedDate.replace('Aug','08')
elif 'Sept' in publishedDate:
publishedDate = publishedDate.replace('Sept', '09')
elif 'Sep' in publishedDate:
publishedDate = publishedDate.replace('Sep','09')
elif 'Oct' in publishedDate:
publishedDate = publishedDate.replace('Oct','10')
elif 'Nov' in publishedDate:
publishedDate = publishedDate.replace('Nov','11')
elif 'Dec' in publishedDate:
publishedDate = publishedDate.replace('Dec','12')
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M")
timenum = int(time.mktime(timeArray))
if timenum >= self.timeStamp:
self.count += 1
item['count'] = self.count
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(timenum)))
item['publishedDate'] = publishedDate
yield item
<file_sep>#-*- coding: utf-8 -*-
import time
import re
from news.items import NewsItem
# import sys
# reload(sys)
# sys.setdefaultencoding('utf8')
import scrapy
class Lianhezaob(scrapy.Spider):
name = "lianhezaobao"
start_urls = [
'http://www.zaobao.com/realtime/china',#中国
'http://www.zaobao.com/news/china',
'http://www.zaobao.com/realtime/world',#国际
'http://www.zaobao.com.sg/znews/greater-china',
'http://www.zaobao.com.sg/znews/international',
'http://www.zaobao.com.sg/realtime/china',
'http://www.zaobao.com.sg/realtime/world',
'http://www.zaobao.com.sg/zfinance/realtime',
'http://www.zaobao.com/znews/international',
'http://www.zaobao.com/finance/realtime',#财经
'http://www.zaobao.com/news/greater-china',#中港台
'http://www.zaobao.com/opinions/editorial',#言论
'http://www.zaobao.com.sg/zfinance/realtime',#财经
'http://www.zaobao.com/realtime/singapore',
'http://www.zaobao.com/finance/china',
'http://www.zaobao.com/special/report/politic/cnpol',
'http://www.zaobao.com/forum/views',
]
base_url = "http://www.zaobao.com"
count = 0
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
links_url = response.xpath('//div[@class=" row list"]/div[1]/a/@href').extract()
for i in range(0,len(links_url)):
if 'http' not in links_url[i]:
url = 'http://www.zaobao.com/' + links_url[i]
yield scrapy.Request(url,meta={
'home_url':response.url
}, callback=self.parse_news,dont_filter=True)
else:
url = links_url[i]
yield scrapy.Request(url,meta={
'home_url':response.url
},callback=self.parse_news,dont_filter=True)
def parse_news(self,response):
title = response.xpath('//h1/text()').extract()
if title:
title = title[0].encode('utf-8')
else:
return
publishedDate = response.xpath('//span[@class="datestamp date-published meta-date-published"]').extract()
p = publishedDate[0].replace('\t','').replace('\n','').replace('\r','')
p = re.findall('>(.*?)<', p)
pp = ''
for i in range(len(p)):
pp += p[i]
publishedDate = pp.replace('发布', '').replace(' ', '').replace('AM', '').replace('PM', '')
publishedDate = publishedDate.replace('年', '-').replace('月', '-').replace('日', ' ').replace('/','')
print publishedDate
# if publishedDate:
# publishedDate = publishedDate[0].encode('utf-8')
# publishedDate = re.findall('\d+',publishedDate)
# year = publishedDate[0]
# month = publishedDate[1]
# day = publishedDate[2]
# publishedDate = year+'-'+month+'-'+day
# else:
# publishedDate = self.time_str
content = response.xpath('//div[@id="FineDining"]/p//text()').extract()
if content:
content = ''.join(content).encode('utf-8').strip()
else:
content = ''
pic_url = ''
author = ""
pic_more_url = set()
category = response.xpath('//section[@id="breadcrumbs"]/a[3]/text()').extract()
if category:
category = category[0].encode('utf-8')
if category == u'社论':
category = u'观点'.encode('utf-8')
if category == u'中港台即时':
category = u'中港台'.encode('utf-8')
if category == u'国际即时':
category = u'国际'.encode('utf-8')
if category == u'中国即时':
category = u'中国'.encode('utf-8')
else:
category = u'首页'.encode('utf-8')
home_url = response.meta['home_url']
if u'/china' in home_url:
category = u'中国'.encode('utf-8')
elif u'-china' in home_url:
category = u'中港台'.encode('utf-8')
elif u'finance' in home_url:
category = u'中国财经'.encode('utf-8')
elif u'world' in home_url:
category = u'国际'.encode('utf-8')
elif u'international' in home_url:
category = u'国际'.encode('utf-8')
elif u'opinions' in home_url:
category = u'观点'.encode('utf-8')
elif 'view' in home_url:
category = u'观点'.encode('utf-8')
else:
category = u'首页'.encode('utf-8')
describe = response.xpath('//div[@id="FineDining"]/p[1]//text()').extract()
if describe:
describe = describe[0].encode('utf-8')
else:
describe = ''
home_url = "http://www.zaobao.com/"
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(time.time())))
app_name = '联合早报'
self.count = self.count + 1
item = NewsItem()
item['app_name'] = app_name
item['count'] = self.count
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = response.url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M")
timenum = int(time.mktime(timeArray))
if timenum >= self.timeStamp:
self.count += 1
item['count'] = self.count
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(timenum)))
item['publishedDate'] = publishedDate
yield item<file_sep>#coding=utf-8
import json
import scrapy
import time,re,time
from news.items import NewsItem
from news.DataResource import TransportData
class fenghuang(scrapy.Spider):
name = 'fenghuangxinwen'
start_urls = [
'http://api.iclient.ifeng.com/ClientNews?id=SYLB10,SYDT10&page=1&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#头条
'http://api.iclient.ifeng.com/ClientNews?id=SYLB10,SYDT10&page=2&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#头条
'http://api.iclient.ifeng.com/ClientNews?id=SYLB10,SYDT10&page=3&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#头条
'http://api.iclient.ifeng.com/ClientNews?id=SYLB10,SYDT10&page=4&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#头条
'http://api.iclient.ifeng.com/ClientNews?id=SYLB10,SYDT10&page=5&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#头条
'http://api.iclient.ifeng.com/ClientNews?id=TWOSES,FOCUSTWOSES&page=1&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#两会
'http://api.iclient.ifeng.com/ClientNews?id=TWOSES,FOCUSTWOSES&page=2&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#两会
'http://api.iclient.ifeng.com/ClientNews?id=TWOSES,FOCUSTWOSES&page=3&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#两会
'http://api.iclient.ifeng.com/ClientNews?id=TWOSES,FOCUSTWOSES&page=4&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#两会
'http://api.iclient.ifeng.com/ClientNews?id=TWOSES,FOCUSTWOSES&page=5&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#两会
'http://api.iclient.ifeng.com/ClientNews?id=19METTING&page=1&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#新时代
'http://api.iclient.ifeng.com/ClientNews?id=19METTING&page=2&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#新时代
'http://api.iclient.ifeng.com/ClientNews?id=19METTING&page=3&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#新时代
'http://api.iclient.ifeng.com/ClientNews?id=19METTING&page=4&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#新时代
'http://api.iclient.ifeng.com/ClientNews?id=19METTING&page=5&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#新时代
'http://api.iclient.ifeng.com/ClientNews?id=YAOWEN223&page=1&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#要闻
'http://api.iclient.ifeng.com/ClientNews?id=YAOWEN223&page=2&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#要闻
'http://api.iclient.ifeng.com/ClientNews?id=YAOWEN223&page=3&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#要闻
'http://api.iclient.ifeng.com/ClientNews?id=YAOWEN223&page=4&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#要闻
'http://api.iclient.ifeng.com/ClientNews?id=YAOWEN223&page=5&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#要闻
'http://api.iclient.ifeng.com/ClientNews?id=CJ33,FOCUSCJ33,HNCJ33&page=1&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#财经
'http://api.iclient.ifeng.com/ClientNews?id=CJ33,FOCUSCJ33,HNCJ33&page=2&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#财经
'http://api.iclient.ifeng.com/ClientNews?id=CJ33,FOCUSCJ33,HNCJ33&page=3&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#财经
'http://api.iclient.ifeng.com/ClientNews?id=CJ33,FOCUSCJ33,HNCJ33&page=4&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#财经
'http://api.iclient.ifeng.com/ClientNews?id=CJ33,FOCUSCJ33,HNCJ33&page=5&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#财经
'http://api.iclient.ifeng.com/ClientNews?id=CJ33,FOCUSCJ33,HNCJ33&page=6&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#财经
'http://api.iclient.ifeng.com/ClientNews?id=LS153,FOCUSLS153&page=1&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#历史
'http://api.iclient.ifeng.com/ClientNews?id=LS153,FOCUSLS153&page=2&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#历史
'http://api.iclient.ifeng.com/ClientNews?id=LS153,FOCUSLS153&page=3&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#历史
'http://api.iclient.ifeng.com/ClientNews?id=LS153,FOCUSLS153&page=4&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#历史
'http://api.iclient.ifeng.com/ClientNews?id=LS153,FOCUSLS153&page=5&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#历史
# 'http://api.iclient.ifeng.com/ClientNews?id=GJPD,FOCUSGJPD&page=1&newShowType=1&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#国际
'http://api.iclient.ifeng.com/irecommendList?userId=867637959598351&count=6&gv=5.2.0&av=5.2.0&uid=867637959598351&deviceid=867637959598351&proid=ifengnews&os=android_22&df=androidphone&vt=5&screen=720x1280&publishid=5008',#推荐
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
data = json.loads(response.body)
try:
data = data[0]['item']
for i in range(0,len(data)):
title = data[i]['title']
urlt = data[i]['link']['weburl']
yield scrapy.Request(urlt,meta={
'title':title,
'home_url':response.url
},callback=self.parse_item,dont_filter=True)
except:
data = data['item']
for i in range(0,len(data)):
title = data[i]['title']
urlt = data[i]['link']['weburl']
yield scrapy.Request(urlt,meta={
'title':title,
'home_url':response.url
},callback=self.parse_item,dont_filter=True)
def parse_item(self,response):
app_name = '凤凰新闻'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
pic_url = ''
title = response.meta['title']
home_url = response.meta['home_url']
author = ''
describe = ''
try:
publishedDate = response.xpath('//p[@class="n-i-time"]/text()').extract()[0]
except:
publishedDate = '2018-05-01 00:00:00'
try:
content = response.xpath('//div[@class="n-words"]').extract()
contentt = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', contentt)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
pic_more_url = response.xpath('//img[@class="z-img lazy"]/@src').extract()
pic_more_url1 = []
for i in range(0, len(pic_more_url)):
if 'http' not in pic_more_url[i]:
pic_more_urlt = 'http:' + pic_more_url[i]
pic_more_url1.append(pic_more_urlt)
else:
pic_more_url1.append(pic_more_url[i])
pic_more_url = str(set(pic_more_url1))
if 'SYLB10,SYDT10' in home_url:
category = u'头条'.encode('utf-8')
elif 'TWOSES,FOCUSTWOSES' in home_url:
category = u'要闻'.encode('utf-8')
elif '19METTING' in home_url:
category = u'新时代'.encode('utf-8')
elif 'YAOWEN223' in home_url:
category = u'要闻'.encode('utf-8')
elif 'CJ33,FOCUSCJ33,HNCJ33' in home_url:
category = u'财经'.encode('utf-8')
elif 'LS153,FOCUSLS153' in home_url:
category = u'历史'.encode('utf-8')
elif 'GJPD,FOCUSGJPD' in home_url:
category = u'国际'.encode('utf-8')
else:
category = u'推荐'.encode('utf-8')
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = response.url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
timeArray = time.strptime(publishedDate, "%Y/%m/%d %H:%M")
timenum = int(time.mktime(timeArray))
if timenum >= self.timeStamp:
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(timenum)))
item['publishedDate'] = publishedDate
self.count += 1
item['count'] = self.count
yield item
except:
pass
<file_sep>#-*- coding: utf-8 -*-
from scrapy.spider import Spider
from urlparse import urljoin
from scrapy.selector import Selector
from scrapy.http import Request
import time
import json
from lxml import etree
import re
import sys
from news.DataResource import TransportData
import scrapy
from news.items import NewsItem
from datetime import datetime
reload(sys)
sys.setdefaultencoding('utf8')
class Shijieribao(Spider):
name = "shijieribao"
base_url = "https://www.worldjournal.com/?variant=zh-cn"
count = 0
appname = "世界日报"
start_urls = [
'https://www.worldjournal.com/topic/%E5%9C%8B%E9%9A%9B%E6%96%B0%E8%81%9E%E7%B8%BD%E8%A6%BD/?pno=1',#国际
'https://www.worldjournal.com/topic/%E5%9C%8B%E9%9A%9B%E6%96%B0%E8%81%9E%E7%B8%BD%E8%A6%BD/?pno=2',#国际
'https://www.worldjournal.com/topic/%E5%9C%8B%E9%9A%9B%E6%96%B0%E8%81%9E%E7%B8%BD%E8%A6%BD/?pno=3',#国际
'https://www.worldjournal.com/topic/%E5%9C%8B%E9%9A%9B%E6%96%B0%E8%81%9E%E7%B8%BD%E8%A6%BD/?pno=4',#国际
'https://www.worldjournal.com/topic/%E8%A6%81%E8%81%9E%E6%96%B0%E8%81%9E/?pno=1',#热门
'https://www.worldjournal.com/topic/%E8%A6%81%E8%81%9E%E6%96%B0%E8%81%9E/?pno=2',#热门
'https://www.worldjournal.com/topic/%E8%A6%81%E8%81%9E%E6%96%B0%E8%81%9E/?pno=3',#热门
'https://www.worldjournal.com/topic/%E8%A6%81%E8%81%9E%E6%96%B0%E8%81%9E/?pno=4',#热门
'https://www.worldjournal.com/topic/%E5%8F%B0%E7%81%A3%E6%96%B0%E8%81%9E%E7%B8%BD%E8%A6%BD/?pno=1',#两岸
'https://www.worldjournal.com/topic/%E5%8F%B0%E7%81%A3%E6%96%B0%E8%81%9E%E7%B8%BD%E8%A6%BD/?pno=2',#两岸
'https://www.worldjournal.com/topic/%E4%B8%AD%E5%9C%8B%E6%96%B0%E8%81%9E%E7%B8%BD%E8%A6%BD/?pno=3',#两岸
'https://www.worldjournal.com/topic/%E4%B8%AD%E5%9C%8B%E6%96%B0%E8%81%9E%E7%B8%BD%E8%A6%BD/?pno=4',#两岸
'https://www.worldjournal.com/topic/%E7%A4%BE%E8%AB%96-2/?pno=1',#观点
'https://www.worldjournal.com/topic/%E7%A4%BE%E8%AB%96-2/?pno=2',#观点
'https://www.worldjournal.com/topic/%E7%A4%BE%E8%AB%96-2/?pno=3',#观点
'https://www.worldjournal.com/topic/%E7%A4%BE%E8%AB%96-2/?pno=4',#观点
]
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime('2018-06-10', "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
links_url = response.xpath('//div[@class="post-content"]/h2/a/@href').extract()
title = response.xpath('//div[@class="post-content"]/h2/a/text()').extract()
publish = response.xpath('//div[@class="post-content"]/time/@datetime').extract()
if len(links_url) > 0:
for i in range(0,len(links_url)):
url = links_url[i]
tit = title[i]
pub = publish[i]
desc = ''
yield Request(url,meta={
'title':tit,
'publish':pub,
'describe':desc,
'home_url':response.url
},callback=self.parse_item)
else:
pass
def parse_item(self,response):
title = response.meta['title']
publishedDate = response.meta['publish']
publishedDate = response.xpath('//time[@class="date"]/@datetime').extract()[0]
print publishedDate
describe = response.meta['describe']
home_url = response.meta['home_url']
app_name = '世界日报'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
author = ''
pic_url = ''
content =response.xpath('//div[@class="post-content"]').extract()
contentt = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', contentt)
contentdata = ''
for i in range(0, len(content)):
contentdata += content[i]
content = contentdata
try:
pic_more_url =response.xpath('//div[@class="img-holder"]/a/img/@src').extract()
pic_more_url1 = []
if len(pic_more_url) > 0:
for i in range(0, len(pic_more_url)):
pic_more_url1.append(pic_more_url[i])
pic_more_url = str(set(pic_more_url1))
else:
pic_more_url = ''
except:
pic_more_url = ''
if u'要聞新聞' in home_url:
category = u'热门'.encode('utf-8')
elif '%E5%9C%8B%E9%9A%9B%E6%96%B0%E8%81%9E%E7%B8%BD%E8%A6%BD' in home_url:
category = u'国际'.encode('utf-8')
elif '%E5%8F%B0%E7%81%A3%E6%96%B0%E8%81%9E%E7%B8%BD%E8%A6%BD' in home_url:
category = u'两岸'.encode('utf-8')
elif '%E4%B8%AD%E5%9C%8B%E6%96%B0%E8%81%9E%E7%B8%BD%E8%A6%BD' in home_url:
category = u'两岸'.encode('utf-8')
elif '%E7%A4%BE%E8%AB%96' in home_url:
category = u'观点'.encode('utf-8')
else:
category = u'热门'.encode('utf-8')
url = response.url
self.count += 1
item = NewsItem()
item['app_name'] = app_name
item['count'] = self.count
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M:%S")
timenum = int(time.mktime(timeArray))
if timenum >= self.timeStamp:
yield item
<file_sep>import time
import requests
url = 'https://news.baidu.com/sn/api/feed_feedlist?pd=newsplus&os=android&sv=7.1.2.0&from=app&_uid=g8SNu0um2ulx8HuKlu2ci0is2tl5aB8o_iSW8_uNSiiOO2tgga2qi_u62ig8uvihA&_ua=_aBDCgaH-i46ywoUfpw1z4aBsiz5aX8D4a2AiqqHB&_ut=5yG_YtM1vC_bhvhJgODpOYhuA&_from=1019026r&_cfrom=1019026r&_network=1_0&cen=uid_ua_ut'
params = {
"ln": "20",
"os": "android",
"display_time": "1528777824421",
"from": "app",
"ver": "6",
"withtoppic": "0",
"network": {"wifi_aps":{"ap_mac":"70:05:14:7d:2a:5f","is_connected":True,"ap_name":"","rssi":-33},"ipv4":"172.18.173.37","cellular_id":"-1","operator_type":99,"connection_type":100},
"pd": "newsplus",
"user_category": "",
"cuid": "3ADAC23BAEBDC750FF38B3810FA334A1|918510050145753",
"action": "0",
"device": {"screen_size":{"height":1184,"width":768},"model":"Nexus 4","udid":{"android_id":"6140f143b1a4dd1e","mac":"70:05:14:7d:2a:5f","imei":"357541050015819"},"vendor":"LGE","device_type":1,"os_version":{"micro":0,"minor":4,"major":4},"os_type":1},
"sv": "7.1.2.0",
"gps": '{"timestamp":1528790165,"longitude":"116.365275","coordinate_type":3,"latitude":"39.969771"}',
"mid": "357541050015819_70:05:14:7d:2a:5f",
"loc_ll": "116.365275,39.969771",
"wf": "1",
}
data = requests.post(url, data=params)
print data.content<file_sep>#-*- coding: utf-8 -*-
from scrapy.spiders import Spider
import time
from news.items import NewsItem
class toutiaocaijing(Spider):
name = 'toutiaocaijing'
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
start_urls = [
'http://iphone.headlinefinance.hk/xml/2018/%s/finance_instant.xml'%time_str.replace('-',''), #即市新闻
'http://iphone.headlinefinance.hk/xml/2018/%s/finance_daily.xml'%time_str.replace('-',''), #日报新闻
'http://iphone.headlinefinance.hk/xml/2018/%s/finance_investment_columnist.xml'%time_str.replace('-',''),#投资专栏&名家精选
'http://iphone.headlinefinance.hk/xml/2018/%s/finance_warrants.xml'%time_str.replace('-',''), #轮商精选
]
def parse(self, response):
links = response.xpath('//new/id/text()').extract()
title = response.xpath('//new/title/text()').extract()
published = response.xpath('//new/publishdate/text()').extract()
description = response.xpath('//new/short_description/text()').extract()
contentt = response.xpath('//new/description/text()').extract()
author = ''
pic_url = ''
pic_more_url = ''
app_name = u'头条财经'.encode('utf-8')
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
if 'finance_instant' in response.url:
category = u'即市新闻'.encode('utf-8')
elif 'finance_daily' in response.url:
category = u'日报新闻'.encode('utf-8')
elif 'finance_investment_columnist' in response.url :
category = u'投资专栏'.encode("utf-8")
else:
category = u'轮商精选'.encode('utf-8')
for i in range(0,len(links)):
tit = title[i]
publishedDate = published[i]
describe = description[i]
content = contentt[i]
self.count = self.count + 1
item = NewsItem()
item['app_name'] = app_name
item['count'] = self.count
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = response.url
item['category'] = category
item['title'] = tit
item['describe'] = describe
item['content'] = content.replace('\t', '').replace('\n', '').replace('\r', '')
item['home_url'] = response.url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
print publishedDate
try:
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M:%S")
except:
timeArray = time.strptime(publishedDate, "%Y-%m-%d")
publishedDate = time.mktime(timeArray)
if publishedDate >= self.timeStamp:
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(publishedDate)))
item['publishedDate'] = publishedDate
yield item
<file_sep>#-*- coding: utf-8 -*-
from scrapy.spiders import Spider
from scrapy.selector import Selector
from scrapy.http import Request
from urlparse import urljoin
import json
# from news.Configuration import Configuration
# from yidong.extensions.myExtension import MyExtension
# from yidong_pay.ResultTransferUtil import ResultTransferUtil
# from rpc.app.ttypes import AppWeb
# from rpc.app.ttypes import AppType
# from rpc.app.ttypes import OriginType
# from yidong_pay.DataResource import GetData
# from rpc.app.ttypes import AppComment
# from mySQLdataexport import *
import time
import re
from news.DataResource import TransportData
from news.items import NewsItem
class xiangang(Spider):
name = 'xiangangxinwen'
start_urls = [
'http://orientaldaily.on.cc/rss/news.xml',
'http://www.epochtimes.com/gb/n24hr.xml',
'http://rss.sina.com.cn/news/china/focus15.xml',
'https://www.hket.com/rss/hongkong'
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
title = response.xpath('//item/title/text()').extract()
desc = response.xpath('//item/description/text()').extract()
links = response.xpath('//item/link/text()').extract()
pubt = response.xpath('//item/pubDate/text()').extract()
for i in range(len(links)):
url = links[i].replace('\t','').replace('\n','').replace('\r','').replace(' ','')
tit = title[i].replace('\t','').replace('\n','').replace('\r','').replace(' ','')
describe = desc[i].replace('\t','').replace('\n','').replace('\r','').replace(' ','')
publish = pubt[i].replace('\t','').replace('\n','').replace('\r','')
publish = publish.split(', ')[1]
t = publish.split(' ')
t1 = t[0]
t2 = t[1]
t3 = t[2]
t4 = t[3]
t12 = t2
if 'June' in t12:
tt = '06'
elif 'Jun' in t12:
tt = '06'
elif 'January' in t12:
tt = '01'
elif 'Jan' in t12:
tt = '01'
elif 'February' in t12:
tt = '02'
elif 'Feb' in t12:
tt = '02'
elif 'March' in t12:
tt = '03'
elif 'Mar' in t12:
tt = '03'
elif 'April' in t12:
tt = '04'
elif 'Apr' in t12:
tt = '04'
elif 'May' in t12:
tt = '05'
elif 'July' in t12:
tt = '07'
elif 'August' in t12:
tt = '08'
elif 'Aug' in t12:
tt = '08'
elif 'September' in t12:
tt = '09'
elif 'Sept' in t12:
tt = '09'
elif 'October' in t12:
tt = '10'
elif 'Oct' in t12:
tt = '10'
elif 'November' in t12:
tt = '11'
elif 'Nov' in t12:
tt = '11'
elif 'December' in t12:
tt = '12'
elif 'Dec' in t12:
tt = '12'
else:
tt = '01'
publish = t3 + '-' + tt + '-' + t1 + ' ' + t4
print publish
timeArray = time.strptime(publish, "%Y-%m-%d %H:%M:%S")
timeStamp = int(time.mktime(timeArray))
if timeStamp >= self.timeStamp:
publish = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(timeStamp)))
yield Request(url, meta={
'publish': publish,
'title': tit,
'describe': describe
}, callback=self.parse_item, dont_filter=True)
def parse_item(self, response):
publishedDate = response.meta['publish']
title = response.meta['title']
describe = response.meta['describe']
app_name = 'HK News'
author = ''
pic_url = ''
home_url = ''
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
pic_more_url = ''
category = '最新'
content = response.xpath('//p').extract()
contentt = ''
for i in range(len(content)):
contentt += content[i]
content = contentt.replace('\t', '').replace('\n', '').replace('\r', '')
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content.replace('<p>','').replace('</p>')
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
self.count += 1
item['count'] = self.count
yield item
<file_sep>#coding=utf-8
import scrapy
import time,re,json
from news.items import NewsItem
class meiguozhiyin(scrapy.Spider):
name = 'meiguozhiyinzhongwen'
start_urls = [
'https://www.voachinese.com/s?k=%E4%B9%A0%E8%BF%91%E5%B9%B3&tab=all&pi=1&r=any&pp=10',
'https://www.voachinese.com/s?k=%E4%B9%A0%E8%BF%91%E5%B9%B3&tab=all&pi=2&r=any&pp=10',
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime('2018-06-23', "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
title = response.xpath('//ul[@class="small-thums-list follow-up-list"]/li/div/div/a/h4/text()').extract()
links = response.xpath('//ul[@class="small-thums-list follow-up-list"]/li/div/div/a/@href').extract()
descr = response.xpath('//ul[@class="small-thums-list follow-up-list"]/li/div/div/a/p').extract()
for i in range(len(links)):
url = 'https://www.voachinese.com' + links[i]
tit = title[i].replace('\t','').replace('\n','').replace('\r','').replace(' ','')
describe = descr[i]
category = '中国'
content = re.findall('>(.*?)<', describe)
contentdata = ''
for i in content:
contentdata += i
describe = contentdata
print describe
yield scrapy.Request(url, meta={
'title': tit,
'describe': describe,
'category': category
}, callback=self.parse_item, dont_filter=True)
def parse_item(self, response):
title = response.meta['title']
describe = response.meta['describe']
app_name = '美国之音中文网'
pubt = response.xpath('//time/text()').extract()[0].replace('\t', '').replace('\n', '').replace('\r', '')
publishedDate = pubt.replace('年', '-').replace('月', '-').replace('日', '').replace('最后更新: ','')
pic_url = ''
author = ''
home_url = 'https://www.voachinese.com'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
content = response.xpath('//p').extract()
contentt = ''
for i in range(len(content)):
contentt += content[i]
content = contentt.replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', content)
contentdata = ''
for i in content:
contentdata += i
content = contentdata.replace('没有媒体可用资源','').replace('评论','').replace('加载更多','')
pic_more_url = ''
category = response.meta['category']
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M")
timenum = int(time.mktime(timeArray))
if timenum >= self.timeStamp:
self.count += 1
item['count'] = self.count
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(timenum)))
item['publishedDate'] = publishedDate
if len(title) > 3:
yield item
<file_sep>#-*- coding: utf-8 -*-
from scrapy.spider import Spider
import scrapy
import sys
import json,time
from news.items import NewsItem
reload(sys)
sys.setdefaultencoding('utf8')
class guangming(scrapy.Spider):
name = 'guangmingribao'
page = 0
start_urls = [
'http://s.cloud.gmw.cn/zcms/searchContent?SiteID=126&CatalogID=15277,15285&Query=%E4%B9%A0%E8%BF%91%E5%B9%B3&PageSize=10&PageIndex=' + str(page)#query = 后为习近平
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime('2018-06-01', "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
data = json.loads(response.body)
data = data['data']
num = 0
for i in range(len(data)):
title = data[i]['title']
category = data[i]['catalogName']
publishe = data[i]['publishDate']
pic_url = 'http://s.cloud.gmw.cn/2016' + data[i]['logoFile']
id = data[i]['articleId']
url = 'http://s.cloud.gmw.cn/zcms/getArticleInfo?articleId=%s'%id
if float(publishe)/1000 >= self.timeStamp:
num += 1
yield scrapy.Request(url,meta={
'title':title,
'category':category,
'published':publishe,
'pic_url':pic_url
},callback=self.parse_item)
if num > 0:
self.page += 1
url = 'http://s.cloud.gmw.cn/zcms/searchContent?SiteID=126&CatalogID=15277,15285&Query=%E4%B9%A0%E8%BF%91%E5%B9%B3&PageSize=10&PageIndex=' + str(self.page)#query = 后为习近平
yield scrapy.Request(url, callback=self.parse)
def parse_item(self,response):
title = response.meta['title']
category = response.meta['category']
published = response.meta['published']
app_name = '光明日报'
pic_url = response.meta['pic_url']
describe = ''
home_url = 'http://s.cloud.gmw.cn/'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(published)/1000))
data = json.loads(response.body)
data = data['data']
content = data['artContent']
author = data['artAuthor']
pic_more_url = str(data['images'])
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
self.count += 1
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
item['count'] = self.count
yield item
<file_sep>#coding=utf-8
import scrapy
import re, time, json
from news.items import NewsItem
class jingjixueren(scrapy.Spider):
name = 'jingjixueren'
start_urls = [
'https://www.economist.com/latest-updates',
'https://www.economist.com/sections/leaders',
'https://www.economist.com/sections/briefings',
'https://www.economist.com/sections/united-states',
'https://www.economist.com/sections/americas',
'https://www.economist.com/sections/asia',
'https://www.economist.com/sections/china',
'https://www.economist.com/sections/middle-east-africa',
'https://www.economist.com/sections/europe',
'https://www.economist.com/sections/britain',
'https://www.economist.com/sections/international',
'https://www.economist.com/sections/business-finance',
'https://www.economist.com/sections/economics',
'https://www.economist.com/sections/science-technology'
]
def parse(self, response):
links = response.xpath('//div[@class="teaser-list"]/article/a/@href').extract()
for i in range(len(links)):
url = 'https://www.economist.com' + links[i]
yield scrapy.Request(url,meta={
'home_url':response.url
}, callback=self.parse_item, dont_filter=True)
def parse_item(self,response):
app_name = '经济学人'
home_url = response.meta['home_url']
author = ''
pic_url = ''
title = response.xpath('//h1').extract()[0]
title = title.replace('\t', '').replace('\n', '').replace('\r', '')
title = re.findall('>(.*?)<', title)
tit = ''
for i in range(len(title)):
tit += title[i]
title = tit
publishedDate = response.xpath('//time/text()').extract()[0]
content = response.xpath('//div[@class="blog-post__text"]/p/text()').extract()
contentt = ''
for i in range(len(content)):
contentt += content[i]
content = contentt
content = content.replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
describe = ''
pic_more_url = ''
category = ''
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
<file_sep>#coding=utf-8
import scrapy
import time,re
from news.items import NewsItem
class ziyouyazhou(scrapy.Spider):
name = 'ziyouyazhou'
start_urls = [
'https://www.rfa.org/mandarin/Xinwen/story_archive?b_start:int=0',#要闻
'https://www.rfa.org/mandarin/yataibaodao/gangtai/story_archive?b_start:int=0',#港台
'https://www.rfa.org/mandarin/yataibaodao/shaoshuminzu/story_archive?b_start:int=0',#少数民族
'https://www.rfa.org/mandarin/yataibaodao/jingmao/story_archive?b_start:int=0',#经贸
'https://www.rfa.org/mandarin/yataibaodao/zhengzhi/story_archive?b_start:int=0',#政治
'https://www.rfa.org/mandarin/yataibaodao/renquanfazhi/story_archive?b_start:int=0',#人权法治
'https://www.rfa.org/mandarin/yataibaodao/meiti/story_archive?b_start:int=0',#媒体网络
'https://www.rfa.org/mandarin/yataibaodao/junshiwaijiao/story_archive?b_start:int=0',#军事外交
'https://www.rfa.org/mandarin/pinglun/story_archive?b_start:int=0',#评论
'https://www.rfa.org/mandarin/duomeiti/story_archive?b_start:int=0',#多媒体
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime('2018-06-13', "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
links = response.xpath('//div[@class="searchresult"]/div[1]/a/@href').extract()
if len(links) != 0:
for i in range(len(links)):
url = links[i]
yield scrapy.Request(url, meta={
'home_url': response.url
}, callback=self.parse_item,dont_filter=True)
else:
links = response.xpath('//div[@class="sectionteaser"]/h2/a/@href').extract()
if len(links) > 0:
for i in range(len(links)):
url = links[i]
yield scrapy.Request(url, meta={
'home_url': response.url
}, callback=self.parse_item, dont_filter=True)
else:
print "文章连接错误!"
def parse_item(self, response):
home_url = response.meta['home_url']
app_name = '自由亚洲电台'
pic_url = ''
author = ''
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
describe = ''
title = response.xpath('//h1/text()').extract()[0]
content = response.xpath('//div[@id="storytext"]').extract()
content = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', content)
contentdata = ''
for i in range(0, len(content)):
contentdata += content[i]
content = contentdata
publishedDate = response.xpath('//span[@id="story_date"]/text()').extract()[0]
try:
pic_more_url = response.xpath('//div[@id="headerimg"]/img/@src').extract()[0]
except:
pic_more_url = ''
if 'Xinwen' in home_url:
category = u'要闻'.encode('utf-8')
elif 'gangtai' in home_url:
category = u'港台'.encode('utf-8')
elif 'shaoshuminzu' in home_url:
category = u'少数民族'.encode('utf-8')
elif 'jingmao' in home_url:
category = u'经贸'.encode('utf-8')
elif 'zhengzhi' in home_url:
category = u'政治'.encode('utf-8')
elif 'renquanfazhi' in home_url:
category = u'人权法制'.encode('utf-8')
elif 'meiti' in home_url:
category = u'媒体'.encode('utf-8')
elif 'shehui' in home_url:
category = u'社会'.encode('utf-8')
elif 'guojishijiao' in home_url:
category = u'国际'.encode('utf-8')
elif 'junshiwaijiao' in home_url:
category = u'军事外交'.encode('utf-8')
else:
category = u'要闻'.encode('utf-8')
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
timeArray = time.strptime(publishedDate, "%Y-%m-%d")
timenum = int(time.mktime(timeArray))
if timenum >= self.timeStamp:
self.count += 1
item['count'] = self.count
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(timenum)))
item['publishedDate'] = publishedDate
yield item
<file_sep>#coding=utf-8
import scrapy, re, time, json
from news.items import NewsItem
from lxml.etree import HTML
class yindushibao(scrapy.Spider):
name = 'yindushibaoyingwen'
start_urls = [
'https://timesofindia.indiatimes.com/topic/Xi-Jinping'
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime('2018-06-01', "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
links = response.xpath('//ul[@itemprop="ItemList"]/li/div/a/@href').extract()
title = response.xpath('//ul[@itemprop="ItemList"]/li/div/a/span[1]/text()').extract()
for i in range(len(links)):
if '' not in links:
url = 'https://timesofindia.indiatimes.com' + links[i]
else:
url = links[i]
yield scrapy.Request(url, meta={
'title': title[i]
}, callback=self.parse_item, dont_filter=True)
def parse_item(self, response):
app_name = '印度时报'
describe = ''
author = ''
pic_url = ''
title = response.meta['title'].replace('\t', '').replace('\n', '').replace('\r', '')
publishedDate = response.xpath('//time/@datetime').extract()[0]
publishedDate = publishedDate.split('+')[0].replace('T',' ')
content = response.xpath('//div[@class="section1"]').extract()
selator = HTML(content[0])
content = selator.xpath('//text()')
content = ''.join(content)
content = content.replace('\t', '').replace('\n', '').replace('\r', '')
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
home_url = 'https://timesofindia.indiatimes.com'
pic_more_url = ''
category = 'World'
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
try:
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M:%S")
except:
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M")
publishedDate = time.mktime(timeArray)
if publishedDate >= self.timeStamp:
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(publishedDate)))
item['publishedDate'] = publishedDate
self.count = self.count + 1
item['count'] = self.count
yield item<file_sep>#-*- coding: utf-8 -*-
from scrapy.spider import Spider
import scrapy
import sys
import json,time
from lxml.etree import HTML
from news.items import NewsItem
reload(sys)
sys.setdefaultencoding('utf8')
class souhu(Spider):
name = 'souhuxinwen'
page = 1
num = 1
Ttime = int(round(time.time() * 1000))
start_urls = [
'https://api.k.sohu.com/api/search/v5/search.go?rt=json&pageNo=' + str(num) +'&words=%E4%B9%A0%E8%BF%91%E5%B9%B3&keyword=%E4%B9%A0%E8%BF%91%E5%B9%B3&p1=NjQwOTIwNDUwMDQxODQ0MTI2MQ%3D%3D&pageSize=20&type=0&pid=&token=&gid=x011060802ff0decee47cd839000939fef9711202659&apiVersion=40&sid=10&u=1&bid=&keyfrom=input&autoCorrection=&refertype=1&versionName=6.0.4&os=android&picScale=16&h=&_=' + str(Ttime)
]
t = '''
rt json
pageNo 2
words 习近平
keyword 习近平
p1 NjQwOTIwNDUwMDQxODQ0MTI2MQ==
pageSize 20
type 0
pid
token
gid x011060802ff0decee47cd839000939fef9711202659
apiVersion 40
sid 10
u 1
bid
keyfrom input
autoCorrection
refertype 1
versionName 6.0.4
os android
picScale 16
h
_ 1528163152979
'''
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
try:
data = json.loads(response.body)
data =data['resultList']
tt = len(data)
for i in range(len(data)):
title = data[i]['title']
try:
desc = data[i]['description']
except:
desc = ''
publish = data[i]['updateTime']
id = data[i]['newsId']
try:
pic = data[i]['pics']
except:
pic = ''
url = 'https://api.k.sohu.com/api/news/v5/article.go?channelId=&apiVersion=40&gid=-1&imgTag=1&newsId=' + str(id) + '&openType=&u=1&p1=NjQwOTIwNDUwMDQxODQ0MTI2MQ%3D%3D&pid=-1&recommendNum=3&refer=130&rt=json&showSdkAd=1&moreCount=8&articleDebug=0&_=' + str(self.Ttime)
yield scrapy.Request(url,meta={
'title':title,
'describe':desc,
'publish':publish,
'pic':pic
}, callback=self.parse_item)
if tt > 0:
self.num += 1
url = 'https://api.k.sohu.com/api/search/v5/search.go?rt=json&pageNo=' + str(self.num) +'&words=%E4%B9%A0%E8%BF%91%E5%B9%B3&keyword=%E4%B9%A0%E8%BF%91%E5%B9%B3&p1=NjQwOTIwNDUwMDQxODQ0MTI2MQ%3D%3D&pageSize=20&type=0&pid=&token=&gid=x011060802ff0decee47cd839000939fef9711202659&apiVersion=40&sid=10&u=1&bid=&keyfrom=input&autoCorrection=&refertype=1&versionName=6.0.4&os=android&picScale=16&h=&_=' + str(self.Ttime)
yield scrapy.Request(url, callback=self.parse)
except:
pass
def parse_item(self,response):
title = response.meta['title']
describe = response.meta['describe']
publishedDate = response.meta['publish']
pic_url = response.meta['pic']
app_name = '搜狐新闻'
author = ''
home_url = 'https://api.k.sohu.com/'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(publishedDate)/1000))
category = '要闻'
data = json.loads(response.body)
content = data['content']
selector = HTML(content)
content = selector.xpath('//text()')
content = ''.join(content)
content = content.replace('\t','').replace('\n','').replace('\r','')
pic_more_url = data['photos']
pic = []
for i in range(len(pic_more_url)):
pic.append(str(pic_more_url[i]['pic']))
pic_more_url = pic
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
self.count += 1
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
item['count'] = self.count
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M:%S")
timeStamp = int(time.mktime(timeArray))
if timeStamp >= self.timeStamp:
numappName = self.readjson()
if len(numappName) == 0:
items = {
'title':title
}
with open('souhuxinwen.json', 'a+') as fp:
line = json.dumps(dict(items), ensure_ascii=False) + '\n'
fp.write(line)
yield item
else:
for i in range(len(numappName)):
if numappName[i]['title'] == item['title']:
return
else:
items = {
'title': item['title']
}
with open('souhuxinwen.json', 'a+') as fp:
line = json.dumps(dict(items), ensure_ascii=False) + '\n'
fp.write(line)
yield item
def readjson(self):
s = []
file_object = open('souhuxinwen.json', 'r')
try:
while True:
line = file_object.readline()
data = json.loads(line)
s.append(data)
finally:
return s
<file_sep>#coding=utf-8
import scrapy
import re,time
from news.items import NewsItem
class tianxiazazhi(scrapy.Spider):
name = 'tianxiazazhimeiribao'
start_urls = [
'https://www.cw.com.tw/masterChannel.action?idMasterChannel=7',#产业
'https://www.cw.com.tw/masterChannel.action?idMasterChannel=9',#国际
'https://www.cw.com.tw/masterChannel.action?idMasterChannel=12',#环境
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime('2018-06-13', "%Y-%m-%d")
# a = "2018-04-05 00:00:00"
# timeArray = time.strptime(a,"%Y-%m-%d %H:%M:%S")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
links = response.xpath('//div[@class="articleGroup"]/section/div[2]/h3/a/@href').extract()
title = response.xpath('//div[@class="articleGroup"]/section/div[2]/h3/a/text()').extract()
desc = response.xpath('//div[@class="articleGroup"]/section/div[2]/p/text()').extract()
pubt = response.xpath('//div[@class="articleGroup"]/section/div[2]/time/text()').extract()
pic = response.xpath('//div[@class="articleGroup"]/section/div[1]/a/img/@src').extract()
for i in range(0,len(links)):
url = links[i]
tit = title[i].replace(' ','')
describe = desc[i]
pubtime = pubt[i]
try:
pic_url = pic[i]
except:
pic_url = ''
timeArray = time.strptime(pubtime, "%Y-%m-%d")
publishedDate = time.mktime(timeArray)
if publishedDate >= self.timeStamp:
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(publishedDate)))
yield scrapy.Request(url,meta={
'title':tit,
'describe':describe,
'pubtime':publishedDate,
'pic_url':pic_url,
'home_url':response.url
},callback=self.parse_item,dont_filter=True)
def parse_item(self,response):
title = response.meta['title'].replace('\n','').replace('\r','').replace('\t','')
publishedDate = response.meta['pubtime']
home_url = response.meta['home_url']
describe = response.meta['describe']
pic_url = response.meta['pic_url']
app_name = u'天下杂志每日报'.encode('utf-8')
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
content = response.xpath('//section[@id="emailshow"]').extract()
contentt = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', contentt)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
author = ''
pic_more_url = re.findall('<img(.*?)src="(.*?)">',contentt)
pic_more_url1 = []
for i in range(0,len(pic_more_url)):
pic_more_url.append(pic_more_url[i][1])
pic_more_url = str(set(pic_more_url1))
if '7' in home_url:
category = u'产业'.encode('utf-8')
elif '7' in home_url:
category = u'国际'.encode('utf-8')
elif '12' in home_url:
category = u'环境'.encode('utf-8')
else:
category = u'服务'.encode('utf-8')
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
self.count += 1
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['count'] = self.count
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
yield item<file_sep>#-*- coding: utf-8 -*-
from scrapy.spider import Spider
import scrapy
import sys
import json,time,re
from news.items import NewsItem
reload(sys)
sys.setdefaultencoding('utf8')
class renminribao(scrapy.Spider):
name = 'renmin'
page = 1
start_urls = [
'http://app.peopleapp.com/Api/600/HomeApi/searchHotWord?city=北京市&citycode=010&count=20&device=670997f3-a0ee-374d-9271-fa02707b8b0d&device_model=Nexus 4&device_os=Android 4.4&device_product=LGE&device_size=768*1184&device_type=1&district=海淀区&fake_id=8533258&interface_code=621&keyword=习近平&latitude=39.96389&longitude=116.358495&page=%s&province=北京市&province_code=1528163652000&userId=0&version=6.2.1&securitykey=b5e1fd6b496267493431cdb9a0d3100c'%page
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime('2018-06-08', "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
data = json.loads(response.body)
data = data['data']
num = 0
for i in range(len(data)):
title = data[i]['title']
pic_url = data[i]['share_image']
published = data[i]['news_time']
url = data[i]['share_url']
print published
timeArray = time.strptime(published, "%Y-%m-%d %H-%M-%S")
timeStamp = int(time.mktime(timeArray))
if timeStamp >= self.timeStamp:
num += 1
yield scrapy.Request(url,meta={
'title':title,
'pic_url':pic_url,
'published':published,
'home_url':response.url
}, callback=self.parse_item)
if num >= 0:
self.page += 1
t = self.md5(self.page)
url = 'http://app.peopleapp.com/Api/600/HomeApi/searchHotWord?city=北京市&citycode=010&count=20&device=670997f3-a0ee-374d-9271-fa02707b8b0d&device_model=Nexus 4&device_os=Android 4.4&device_product=LGE&device_size=768*1184&device_type=1&district=海淀区&fake_id=8533258&interface_code=621&keyword=习近平&latitude=39.96389&longitude=116.358495&page=%s&province=北京市&province_code=1528163652000&userId=0&version=6.2.1&securitykey=%s'%(self.page,t)
yield scrapy.Request(url,callback=self.parse)
def parse_item(self,response):
title = response.meta['title']
pic_url = response.meta['pic_url']
publishedDate = response.meta['published']
p = publishedDate.split(' ')
p1 = p[0]
p2 = p[1]
p2 =p2.replace('-',':')
publishedDate = p1 + ' ' + p2
home_url = response.meta['home_url']
app_name = '人民日报'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
author = ''
category = '首页新闻'
describe = ''
try:
content = response.xpath('//div[@class="article long-article"]').extract()
content = content[0].replace('\t', '').replace('\n', '').replace('\r', '')
content = re.findall('>(.*?)<',content)
contentt = ''
for i in range(len(content)):
contentt += content[i]
content = contentt
except:
content = ''
pic_more_url = ''
print response.body
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
self.count += 1
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
item['count'] = self.count
yield item
def md5(self,page):
import hashlib
t = '北京市|010|20|670997f3-a0ee-374d-9271-fa02707b8b0d|Nexus 4|Android 4.4|LGE|768*1184|1|海淀区|8533258|621|习近平|39.96389|116.358495|' + str(page) + '|北京市|1528163652000|0|6.2.1rmrbsecurity$#%sut49fbb427a508bcc'
m = hashlib.md5()
m.update(t)
return m.hexdigest()
<file_sep>#-*- coding: utf-8 -*-
from scrapy.spiders import Spider
from scrapy.selector import Selector
from scrapy.http import Request
from urlparse import urljoin
import json
import time
import re
from news.DataResource import TransportData
from news.items import NewsItem
class dw(Spider):
name='duoweixinwen'
start_urls=[
'http://news.dwnews.com/china/',#中国
'http://news.dwnews.com/china/list/list1.json',
'http://news.dwnews.com/global/',#国际
'http://news.dwnews.com/global/list/list1.json',
'http://news.dwnews.com/taiwan/',#台湾
'http://news.dwnews.com/taiwan/list/list1.json',
'http://news.dwnews.com/hongkong/',#香港
'http://news.dwnews.com/hongkong/list/list1.json',
'http://economics.dwnews.com/',#经济
'http://culture.dwnews.com/history/',#历史
'http://culture.dwnews.com/history/list/list1.json',
'http://blog.dwnews.com/',#多维客
'http://blog.dwnews.com/index.php?r=club%2Fajax_list&catid=0&page=1&type=index',
]
base_url = ""
count = 0
number=1
download_delay = 2
# a = "2018-03-18 00:00:00"
# timeArray = time.strptime(a,"%Y-%m-%d %H:%M:%S")
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
try:
try:
try:
try:
#经济
links = response.xpath('//div[@class="news-list vikey"]/a/@href').extract()
print "123"
t= links[1]
pic_url = response.xpath('//div[@class="news-list vikey"]/a/img/@src').extract()
title = response.xpath('//div[@class="news-list vikey"]/div[1]/h2/a/text()').extract()
decsribe = ''
print "####################################"
for i in range(0,len(links)):
url = links[i]
tit = title[i]
try:
pic = pic_url[i]
except:
pic = ''
desc= decsribe
yield Request(url,meta={
'title':tit,
'pic_url':pic,
'describe':desc,
'home_url':response.url
},callback=self.parse_item,dont_filter=True)
except:
#json
data = json.loads(response.body)
print "@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@"
for i in data:
tit = i['title']
url = i['url']
pic = i['relevantPhoto']
desc = i['digest']
yield Request(url, meta={
'title': tit,
'pic_url': pic,
'describe': desc,
'home_url': response.url
}, callback=self.parse_item,dont_filter=True)
except:
#中国,国际,台湾,香港
links = response.xpath('//div[@class="lisbox"]/ul/li/div[1]/a/@href').extract()
tt = links[1]
title = response.xpath('//div[@class="lisbox"]/ul/li/div[1]/a/text()').extract()
pic_url = response.xpath('//div[@class="lisbox"]/ul/li/div[2]/a/img/@src').extract()
decsribe = response.xpath('//div[@class="lisbox"]/ul/li/div[3]/p/text()').extract()
print "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$"
for i in range(0,len(links)):
url = links[i]
tit = title[i]
pic = pic_url[i]
desc = decsribe[i]
yield Request(url, meta={
'title': tit,
'pic_url': pic,
'describe': desc,
'home_url': response.url
}, callback=self.parse_item)
except:
#多维客
links = response.xpath('//li[@class="vikey"]/div[1]/a/@href').extract()
title = response.xpath('//li[@class="vikey"]/div[1]/a/text()').extract()
pic_url = response.xpath('//li[@class="vikey"]/div[2]/a/img/@src').extract()
decsribe = response.xpath('//li[@class="vikey"]/div[3]/p/text()').extract()
print "%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%"
for i in range(0, len(links)):
url = links[i]
tit = title[i]
try:
pic = pic_url[i]
except:
pic = ''
try:
desc = decsribe[i]
except:
desc = ''
yield Request(url, meta={
'title': tit,
'pic_url': pic,
'describe': desc,
'home_url': response.url
}, callback=self.parse_item)
except:
#历史
print "&&&&&&&&&&&&&&&&&&&&&&&&&&&&"
links = response.xpath('//div[@class="main"]/div/ul/li/div[1]/a/@href').extract()
pic_url = response.xpath('//div[@class="main"]/div/ul/li/div[1]/a/text()').extract()
title = response.xpath('//div[@class="main"]/div/ul/li/div[2]/h3/a/text()').extract()
decsribe = response.xpath('//div[@class="main"]/div/ul/li/div[2]/p/text()').extract()
for i in range(0, len(links)):
url = links[i]
tit = title[i]
try:
pic = pic_url[i]
except:
pic = ''
try:
desc = decsribe[i]
except:
desc = ''
yield Request(url, meta={
'title': tit,
'pic_url': pic,
'describe': desc,
'home_url': response.url
}, callback=self.parse_item)
def parse_item(self,response):
title=response.meta['title']
pic_url=response.meta['pic_url']
describe=response.meta['describe']
try:
publishedDate = response.xpath('//div[@class="r"]/text()').extract()[0]
except:
publishedDate = response.xpath('//div[@class="time"]/text()').extract()[0]
home_url=response.meta['home_url']
app_name='多维新闻'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
if 'china' in home_url:
category=u'中国'.encode('utf-8')
elif 'global' in home_url:
category=u'国际'.encode('utf-8')
elif 'hongkong' in home_url:
category=u'香港'.encode('utf-8')
elif 'taiwan' in home_url:
category=u'台湾'.encode('utf-8')
elif 'economics' in home_url:
category =u'经济'.encode('utf-8')
elif 'history' in home_url:
category = u'历史'.encode('utf-8')
else:
category = u'多维客'.encode('utf-8')
try:
try:
try:
content=response.xpath('//div[@class="dia-lead-one"]').extract()
contentt = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', contentt)
contentdata = ''
for i in range(0, len(content)):
contentdata += content[i]
contentt = contentdata
pic_more_url=re.findall('src="(.*?)"',contentt)
pic_more_url1 = []
for i in range(0, len(pic_more_url)):
pic_more_url1.append(pic_more_url[i])
pic_more_url = str(set(pic_more_url1))
except:
content = response.xpath('//div[@class="container"]').extract()
contentt = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', contentt)
contentdata = ''
for i in range(0, len(content)):
contentdata += content[i]
contentt = contentdata
pic_more_url = re.findall('src="(.*?)"', contentt)
pic_more_url1 = []
for i in range(0, len(pic_more_url)):
pic_more_url1.append(pic_more_url[i])
pic_more_url = str(set(pic_more_url1))
except:
content=response.xpath('//div[@class="captions"]').extract()
contentt = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', contentt)
contentdata = ''
for i in range(0, len(content)):
contentdata += content[i]
contentt = contentdata
pic_more_url=response.xpath('//div[@class="bigImgs"]/img/@src').extract()
pic_more_url1 = []
for i in range(0, len(pic_more_url)):
pic_more_url1.append(pic_more_url[i])
pic_more_url = str(set(pic_more_url1))
except:
content = response.xpath('//p').extract()
contentt = ''
for i in range(0,len(content)):
contentt += content[i]
pic_more_url = ''
try:
try:
writer=response.xpath('//div[@class="nw"]/text()').extract()
author=writer[0]
except:
writer=response.xpath('//div[@class="author"]/text()').extract()
author=writer[0]
except:
author=''
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", contentt
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
self.count = self.count + 1
url=response.url
item = NewsItem()
item['app_name'] = app_name
item['count'] = self.count
item['pic_url'] = pic_url
item['pic_more_url'] = ""
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = contentt
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
try:
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M:%S")
except:
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M")
publishedDate = time.mktime(timeArray)
if publishedDate >= self.timeStamp:
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(publishedDate)))
item['publishedDate'] = publishedDate
self.count = self.count + 1
item['count'] = self.count
print "okokokokok"
yield item
# yield item
# TransportData.transport_data(app_name, pic_url, pic_more_url, author, response.url, category, title, describe,
# contentt,
# home_url, publishedDate, crawlTime)
<file_sep>#-*- coding: utf-8 -*-
from scrapy.spider import Spider
import time
import json
from lxml.etree import HTML
import re
import sys
import scrapy
from news.items import NewsItem
reload(sys)
sys.setdefaultencoding('utf8')
class Nytimes(Spider):
name = "niuyueshibao"
base_url = "https://cn.nytimes.com"
count = 0
appname = "纽约时报中文网"
allowed_domains = ["cn.nytimes.com"]
start_urls = [
'https://cn.nytimes.com/world/', #国际
'https://cn.nytimes.com/china/', #中国
'https://cn.nytimes.com/business/', #商业与经济
'https://cn.nytstyle.com/technology/',
]
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
links_url = response.xpath('//ul[@class="autoList"]/li/div[1]/a/@href').extract()
pic_url = response.xpath('//ul[@class="autoList"]/li/div[1]/a/img/@data-url').extract()
title = response.xpath('//ul[@class="autoList"]/li/h3/a/text()').extract()
writer = response.xpath('//ul[@class="autoList"]/li/h6').extract()
summary = response.xpath('//ul[@class="autoList"]/li/p/text()').extract()
if len(links_url) > 0:
for i in range(0, len(links_url)):
if '//' in links_url[i]:
url = 'http:'+links_url[i]
else:
url = 'https://cn.nytimes.com'+links_url[i]
try:
pic_url1 = pic_url[i]
except:
pic_url1 = ''
print title[i]
yield scrapy.Request(url,meta={
'title': title[i],
'writer': writer[i],
'pic_url': pic_url1,
'summary': summary[i],
'home_url': response.url
}, callback=self.parse_item, dont_filter=True)
else:
links_url = response.xpath('//ul[@class="well basic_list first last"]/li/a/@href').extract()
pic_url = response.xpath('//ul[@class="well basic_list first last"]/li/a/img/@data-url').extract()
title = response.xpath('//ul[@class="well basic_list first last"]/li/a/@title').extract()
writer = response.xpath('//ul[@class="well basic_list first last"]/li/div').extract()
summary = response.xpath('//ul[@class="well basic_list first last"]/li/p/text()').extract()
for i in range(0, len(links_url)):
if '//' in links_url[i]:
url = 'http:' + links_url[i]
else:
url = 'https://cn.nytimes.com' + links_url[i]
try:
pic_url1 = pic_url[i]
except:
pic_url1 = ''
yield scrapy.Request(url, meta={
'title': title[i],
'writer': writer[i],
'pic_url': pic_url1,
'summary': summary[i],
'home_url': response.url
}, callback=self.parse_item, dont_filter=True)
def parse_item(self, response):
title = response.meta['title']
writer = response.meta['writer']
pic_url = response.meta['pic_url']
home_url = response.meta['home_url']
describe = response.meta['summary']
app_name = '纽约时报中文网'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
try:
content = response.xpath('//div[@class="article-left col-lg-8"]').extract()[0]
selator = HTML(content)
content = selator.xpath('//text()')
content = ''.join(content)
content = content.replace('\t', '').replace('\n', '').replace('\r', '')
except:
content = response.xpath('//section[@class="article-body"]').extract()
selator = HTML(content[0])
content = selator.xpath('//text()')
content = ''.join(content)
content = content.replace('\t', '').replace('\n', '').replace('\r', '')
writer = re.findall('>(.*?)<', writer)
writerdata = ''
for i in range(0, len(writer)):
writerdata += writer[i]
author = writerdata
try:
pic_more_url = response.xpath('//figure[@class="article-span-photo"]/img/@src').extract()
pic_more_url = pic_more_url[0]
except:
pic_more_url = ''
try:
pubTime = response.xpath('//div[@class="byline"]/time/@datetime').extract()
pubTime = pubTime[0]
except:
pubTime = '2018-01-01'
if 'world' in home_url:
category = u'国际'.encode('utf-8')
elif 'china' in home_url:
category = u'中国'.encode('utf-8')
elif 'business' in home_url:
category = u'商业与经济'.encode('utf-8')
elif 'technology' in home_url:
category = u'科技'.encode('utf-8')
elif 'education' in home_url:
category = u'教育与职场'.encode('utf-8')
elif 'culture' in home_url:
category = u'文化'.encode('utf-8')
else:
category = u'最新文章'.encode('utf-8')
publishedDate = pubTime
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
timeArray = time.strptime(pubTime, "%Y-%m-%d %H:%M:%S")
publishedDate = time.mktime(timeArray)
if publishedDate >= self.timeStamp:
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(publishedDate)))
item['publishedDate'] = publishedDate
self.count = self.count + 1
item['count'] = self.count
yield item<file_sep>#coding=utf-8
import scrapy
import json,re
import time
from news.items import NewsItem
class taiwanyizhoukan(scrapy.Spider):
name = 'taiwanyizhoukan'
Ttime = int(round(time.time() * 1000))
start_urls = [
'http://www.nextmag.com.tw/breakingnews/mosthit',#热门
'http://www.nextmag.com.tw/section/getNext/mosthit/0/10/2/20/?&_=%s'%Ttime,
'http://www.nextmag.com.tw/breakingnews/topic',#推荐
'http://www.nextmag.com.tw/section/getNext/117/0/10/2/20/?&_=%s'%Ttime,
'http://www.nextmag.com.tw/breakingnews/business',
'http://www.nextmag.com.tw/section/getNext/112/0/10/2/20/?&_=%s'%Ttime,
'http://www.nextmag.com.tw/breakingnews/politics',
'http://www.nextmag.com.tw/section/getNext/108/0/10/2/20/?&_=%s'%Ttime,
'http://www.nextmag.com.tw/breakingnews/international',
'http://www.nextmag.com.tw/section/getNext/111/0/10/2/20/?&_=%s'%Ttime,
'http://www.nextmag.com.tw/breakingnews/latest',
'http://www.nextmag.com.tw/section/getNext/0/0/10/2/20/?&_=%s'%Ttime,
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
links = response.xpath('//li[@class="video"]/a/@href').extract()
title = response.xpath('//li[@class="video"]/a/div[2]/div/h3/text()').extract()
piccc = response.xpath('//li[@class="video"]/a/div[1]/span/@style').extract()
for i in range(len(links)):
url = 'http://www.nextmag.com.tw' + links[i]
try:
pic = piccc[i].replace('background-image:url(','').replace(');','')
except:
pic = ''
yield scrapy.Request(url,meta={
'title':title[i],
'pic':pic
},callback=self.parse_item,dont_filter=True)
def parse_item(self,response):
title = response.meta['title']
pic_url = response.meta['pic']
app_name = '台湾壹周刊'
describe = ''
author = ''
home_url = 'http://www.nextmag.com.tw'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
pic_more_url = ''
publishedDate = response.xpath('//time[@class="time"]/text()').extract()[0]
publishedDate = publishedDate.replace('年','-').replace('月','-').replace('日','')
content = response.xpath('//p/text()').extract()
contentt = ''
for i in range(len(content)):
contentt += content[i]
content = contentt
category = response.xpath('//div[@class="category"]/span/text()').extract()[0]
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
self.count += 1
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
item['count'] = self.count
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M")
timeStamp = int(time.mktime(timeArray))
if timeStamp >= self.timeStamp:
item['publishedDate'] = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(timeStamp)))
yield item
<file_sep>#coding=utf-8
import scrapy
import json,re
import time
from news.items import NewsItem
class xianggang(scrapy.Spider):
name = 'xianggang02'
start_urls = [
'https://web-data.api.hk01.com/v2/feed/category/364?offset=99999999987&bucketId=00000',#中国
'https://web-data.api.hk01.com/v2/feed/category/365?offset=99999999988&bucketId=00000',#中国
'https://web-data.api.hk01.com/v2/feed/category/366?offset=99999999987&bucketId=00000',#中国
'https://web-data.api.hk01.com/v2/feed/category/367?offset=99999999987&bucketId=00000',#中国
'https://web-data.api.hk01.com/v2/feed/category/2?offset=99999999993&bucketId=00000',#港闻
'https://web-data.api.hk01.com/v2/feed/category/6?offset=99999999990&bucketId=00000',#港闻
'https://web-data.api.hk01.com/v2/feed/category/310?offset=99999999991&bucketId=00000',#港闻
'https://web-data.api.hk01.com/v2/feed/category/143?offset=99999999993&bucketId=00000',#港闻
'https://web-data.api.hk01.com/v2/feed/category/403?offset=99999999993&bucketId=00000',#港闻
'https://web-data.api.hk01.com/v2/feed/category/19?offset=99999999990&bucketId=00000',#国际
'https://web-data.api.hk01.com/v2/feed/category/405?offset=99999999990&bucketId=00000',#国际
'https://web-data.api.hk01.com/v2/feed/category/406?offset=99999999987&bucketId=00000',#国际
'https://web-data.api.hk01.com/v2/feed/category/407?offset=99999999988&bucketId=00000',#国际
'https://web-data.api.hk01.com/v2/feed/category/408?offset=99999999991&bucketId=00000',#国际
'https://web-data.api.hk01.com/v2/feed/category/409?offset=99999999991&bucketId=00000',#国际
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
data = json.loads(response.body)
number = data['nextOffset']
data = data['items']
num = 0
for i in range(len(data)):
title = data[i]['data']['title']
url = data[i]['data']['publishUrl']
category = data[i]['data']['mainCategory']
pubt = data[i]['data']['publishTime']
try:
pic = data[i]['data']['mainImage']['cdnUrl']
except:
pic = ''
try:
desc = data[i]['data']['description']
except:
desc = ''
try:
author = data[i]['data']['authors'][0]['publishName']
except:
author = ''
rrr = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(float(pubt)))
print rrr
if float(pubt) >= self.timeStamp:
num += 1
pubt = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime(float(pubt)))
yield scrapy.Request(url,meta={
'title':title,
'category':category,
'pubt':pubt,
'desc':desc,
'author':author,
'pic':pic
}, callback=self.parse_item, dont_filter=True)
if num > 0 :
nu = str(response.url).split('offset=')[0]
url = nu + 'offset=' + str(number) + '&bucketId=00000'
yield scrapy.Request(url,callback=self.parse)
def parse_item(self,response):
title = response.meta['title']
category = response.meta['category']
publishedDate = response.meta['pubt']
describe = response.meta['desc']
author = response.meta['author']
app_name= '香港01'
pic_url = response.meta['pic']
home_url = 'https://www.hk01.com/'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
content = response.xpath('//p/text()').extract()
contentt = ''
for i in range(len(content)):
contentt += content[i]
content = contentt
pic_more_url = pic_url
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
self.count += 1
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
item['count'] = self.count
yield item<file_sep>#coding=utf-8
import thread,threading
import time
import os
while True:
os.system("scrapy crawl souhuxinwen")
# run1()
<file_sep>#coding=utf-8
import scrapy
import time
import json
import re
from news.items import NewsItem
from news.DataResource import TransportData
from scrapy.http import Request
import requests
class jiaohuidian(scrapy.Spider):
name = 'jiaohuidianxinwen'
start_urls = [
'https://japi.xhby.net/api/articles?column_id=79f521279a394064b66341a5e2a34629&page=1&hide_top=0',#推荐ossJson
'https://japi.xhby.net/api/articles?column_id=79f521279a394064b66341a5e2a34629&page=2&hide_top=0',#推荐
'https://japi.xhby.net/api/articles?column_id=79f521279a394064b66341a5e2a34629&page=3&hide_top=0',#推荐
'https://japi.xhby.net/api/articles?column_id=12&page=1&hide_top=0',#专题 ossJson
'https://japi.xhby.net/api/articles?column_id=12&page=2&hide_top=0',#专题
'https://japi.xhby.net/api/articles?column_id=12&page=3&hide_top=0',#专题
'https://japi.xhby.net/api/jhd_article?page=1',#交汇号 699
'https://japi.xhby.net/api/jhd_article?page=2',#交汇号
'https://japi.xhby.net/api/jhd_article?page=3',#交汇号
'https://japi.xhby.net/api/leaders_province?page=1',#政情
'https://japi.xhby.net/api/leaders_province?page=2',#政情
'https://japi.xhby.net/api/leaders_province?page=3',#政情
'https://japi.xhby.net/api/service?page=1',#服务
'https://japi.xhby.net/api/service?page=2',#服务
'https://japi.xhby.net/api/service?page=3',#服务
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
data = json.loads(response.body)
data = data['data']
try:
try:
try:
try:
try:
try:
data = data['column']
# print data
except:
data = data['data'] #政情
# print data
except:
data = data['top_activity'] #服务
# print data
except:
data = data['article_top'] #交汇号,#推荐,#专题
# print data
except:
data = data['article']['data']
# print data
except:
data = data['bottom_activity']['data']
# print data
except:
data = data['order_column']['data']
# print data
publishedDate = ''
for i in range(0,len(data)):
try:
title = data[i]['title']
# print title
except:
title = data[i]['duty']
# print title
try:
try:
links = data[i]['ossJson']
# print links
except:
links = data[i]['web_url']
# print links
except:
id = data[i]['id']
links = 'http://jnews.xhby.net/waparticles/699/' + id
# print links
try:
publishedDate = data[i]['created_at']
except:
publishedDate = ''
try:
try:
pic_url = data[i]['pic0']
except:
pic_url = data[i]['iconUrl']
except:
pic_url = ''
try:
describe = data[i]['description']
except:
describe = ''
yield scrapy.Request(links,meta={
'title':title,
'describe':describe,
'pic_url':pic_url,
'home_url':response.url,
'publishedDate':publishedDate
},callback=self.parse_item)
def parse_item(self,response):
title = response.meta['title']
describe = response.meta['describe']
pic_url = response.meta['pic_url']
home_url = response.meta['home_url']
publishedDate = response.meta['publishedDate']
app_name = '交汇点新闻'.encode('utf-8')
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
try:
data =json.loads(response.body)
content = data['article']['content']
pic_more_url = ''
if content == '':
content = data['article']['aAbstract']
if content =='':
content = data['article']['attachment']
contentt = ''
for i in range(0,len(content)):
contentt += content[i]['attContent']
content = contentt
pic_more_url = data['article']['attachment']
pic_more_url1 = []
for i in range(0,len(pic_more_url)):
pic_more_url1.append(pic_more_url[i]['attPath'])
pic_more_url = str(set(pic_more_url1))
publishedDate = data['article']['created_at']
if pic_more_url =='':
try:
pic_more_url = data['article']['contentImages']
pic_more_url1 = []
for i in range(0,len(pic_more_url)):
pic_more_url1.append(pic_more_url[i]['attPath'])
pic_more_url = str(set(pic_more_url1))
except:
pic_more_url = ''
author = data['article']['aEditor']
if 'column_id=12&' in home_url:
category = u'专题'.encode('utf-8')
elif 'jhd_article?' in home_url:
category = u'交汇号'.encode('utf-8')
elif 'leaders_province' in home_url:
category = u'政情'.encode('utf-8')
elif 'service' in home_url:
category = u'服务'.encode('utf-8')
else:
category = u'推荐'.encode('utf-8')
if category ==u'专题':
content = data['article']['aAbstract']
except:
content = response.xpath('//p').extract()
contentt = ''
for i in range(0, len(content)):
contentt += content[i]
content = contentt
content = content.replace('\t','').replace('\n','').replace('\r','')
content = re.findall('>(.*?)<',content)
contentt = ''
for i in range(0,len(content)):
contentt += content[i]
content = contentt
pic_more_url = ''
author = ''
if 'column_id=12&' in home_url:
category = u'专题'.encode('utf-8')
elif 'jhd_article?' in home_url:
category = u'交汇号'.encode('utf-8')
elif 'leaders_province' in home_url:
category = u'政情'.encode('utf-8')
elif 'service' in home_url:
category = u'服务'.encode('utf-8')
else:
category = u'推荐'.encode('utf-8')
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
self.count = self.count + 1
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['count'] = self.count
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M:%S")
publishedDate = time.mktime(timeArray)
if publishedDate > self.timeStamp:
yield item
<file_sep>#coding=utf-8
import scrapy
import re,json,time
from news.items import NewsItem
from lxml.etree import HTML
class pengbo(scrapy.Spider):
name = 'pengbo'
time_str1 = time.strftime("%Y-%m-%d %H:%M:%S")
timeArray1 = time.strptime(time_str1, "%Y-%m-%d %H:%M:%S")
timeStamp1 = int(time.mktime(timeArray1))
start_urls = [
'http://content.cdn.bb.bbwc.cn/slateInterface/v9/app_1/android/tag/cat_1600/articlelist?updatetime=%s&appVersion=4.5.7'%timeStamp1,#专访
'http://content.cdn.bb.bbwc.cn/slateInterface/v9/app_1/android/tag/cat_18/articlelist?updatetime=%s&appVersion=4.5.7'%timeStamp1,#特写
'http://content.cdn.bb.bbwc.cn/slateInterface/v9/app_1/android/tag/cat_244/articlelist?updatetime=%s&appVersion=4.5.7'%timeStamp1,#观点
'http://content.cdn.bb.bbwc.cn/slateInterface/v9/app_1/android/tag/cat_12/articlelist?updatetime=%s&appVersion=4.5.7'%timeStamp1,#金融
'http://content.cdn.bb.bbwc.cn/slateInterface/v9/app_1/android/tag/cat_11/articlelist?updatetime=%s&appVersion=4.5.7'%timeStamp1,#科技
'http://content.cdn.bb.bbwc.cn/slateInterface/v9/app_1/android/tag/cat_19/articlelist?updatetime=%s&appVersion=4.5.7'%timeStamp1,#全球
'http://content.cdn.bb.bbwc.cn/slateInterface/v9/app_1/android/tag/cat_1490/articlelist?updatetime=%s&appVersion=4.5.7'%timeStamp1,#AI
'http://content.cdn.bb.bbwc.cn/slateInterface/v9/app_1/android/tag/cat_20/articlelist?updatetime=%s&appVersion=4.5.7'%timeStamp1,#公司
'http://content.cdn.bb.bbwc.cn/slateInterface/v9/app_1/android/tag/cat_21/articlelist?updatetime=%s&appVersion=4.5.7'%timeStamp1,#政策
'http://content.cdn.bb.bbwc.cn/slateInterface/v9/app_1/android/tag/cat_13/articlelist?updatetime=%s&appVersion=4.5.7'%timeStamp1,#生活
'http://content.cdn.bb.bbwc.cn/slateInterface/v9/app_1/android/tag/cat_304/articlelist?updatetime=%s&appVersion=4.5.7'%timeStamp1,#能源
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
data = json.loads(response.body)
data = data['articletag'][0]['article']
for i in range(len(data)):
title = data[i]['title']
print title
pubt = data[i]['updatetime']
publishedDater = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(pubt)))
print publishedDater
describe = data[i]['desc']
url = data[i]['phonepagelist'][0]['url']
category = data[i]['catname']
if int(pubt) >= self.timeStamp:
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(pubt)))
yield scrapy.Request(url, meta={
'title':title,
'describe':describe,
'pubt':publishedDate,
'home_url':response.url,
'category':category
}, callback=self.parse_item)
def parse_item(self, response):
title = response.meta['title']
describe = response.meta['describe']
publishedDate = response.meta['pubt']
author = ''
pic_url = ''
app_name = '彭博商业周刊'
home_url = response.meta['home_url']
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
if 'cat_1600/' in home_url:
category = '专访'
elif 'cat_18/' in home_url:
category = '特写'
elif 'cat_244/' in home_url:
category = '观点'
elif 'cat_12/' in home_url:
category = '金融'
elif 'cat_11/' in home_url:
category = '科技'
elif 'cat_19/' in home_url:
category = '全球'
elif 'cat_1490/' in home_url:
category = 'AI'
elif 'cat_20/' in home_url:
category = '公司'
elif 'cat_21/' in home_url:
category = '政策'
elif 'cat_13/' in home_url:
category = '生活'
else:
category = '能源'
category = response.meta['category']
try:
content = response.xpath('//div[@class="bottom-content"]').extract()
contentt = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', contentt)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
except:
content = response.xpath('//p').extract()
contentt = ''
for i in range(len(content)):
contentt += content[i]
content = contentt
contentt = content.replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', contentt)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
pic_more_url = ''
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
yield item
<file_sep>#coding=utf-8
import time, json, re
from news.items import NewsItem
import scrapy
from lxml.etree import HTML
class shidaizhoukan(scrapy.Spider):
name = 'shidaizhoukan'
start_urls = [
'http://time.com/section/politics/?page=1',
'http://time.com/section/politics/?page=2',
'http://time.com/section/politics/?page=3',
'http://time.com/section/us/?page=1',
'http://time.com/section/us/?page=2',
'http://time.com/section/us/?page=3',
'http://time.com/section/world/?page=1',
'http://time.com/section/world/?page=2',
'http://time.com/section/world/?page=3',
'http://time.com/section/tech/?page=1',
'http://time.com/section/tech/?page=2',
'http://time.com/section/tech/?page=3'
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime('2018-06-25', "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
links =response.xpath('//article/div[1]/div[1]/a/@href').extract()
title =response.xpath('//article/div[1]/div[1]/a/text()').extract()
for i in range(len(links)):
if 'http' not in links[i]:
url = 'http://time.com' + links[i]
else:
url = links[i]
tit = title[i].replace('\t','').replace('\n','').replace('\r','')
yield scrapy.Request(url, meta={
'title': tit,
'home_url': response.url
}, callback=self.parse_item)
def parse_item(self, response):
title = response.meta['title']
home_url = response.meta['home_url']
app_name = '时代周刊'
author = ''
pic_url = ''
describe = ''
content = response.xpath('//div[@id="article-body"]').extract()
selator = HTML(content[0])
content = selator.xpath('//text()')
content = ''.join(content)
content = content.replace('\t', '').replace('\n', '').replace('\r', '')
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
pic_more_url = ''
if 'politics' in home_url:
category = 'Politics'
elif 'us' in home_url:
category = 'U.S.'
elif 'world' in home_url:
category = 'World'
elif 'tech' in home_url:
category = 'Tech'
else:
category = 'Politics'
publishedDate = response.xpath('//div[@class="timestamp published-date padding-12-left"]/text()').extract()[0]
publishedDate = publishedDate.replace('\t','').replace('\n','').replace('\r','').replace(' ','')
publishedDate = publishedDate.replace('Updated:','').replace('ET','').replace('EDT','').replace(',','')
if '2018' in publishedDate:
publishedDate = publishedDate.split('2018')
t1 = publishedDate[0]
publishedDate = '2018' + '-' + t1
elif '2019' in publishedDate:
publishedDate = publishedDate.split('2019')
t1 = publishedDate[0]
publishedDate = '2019' + '-' + t1
else:
publishedDate = '2018-1-01'
t12 = publishedDate
if 'June' in t12:
tt = t12.replace('June','6-')
elif 'Jun' in t12:
tt = t12.replace('Jun', '6-')
elif 'January' in t12:
tt = t12.replace('January', '1-')
elif 'Jan' in t12:
tt = t12.replace('Jan', '1-')
elif 'February' in t12:
tt = t12.replace('February', '2-')
elif 'Feb' in t12:
tt = t12.replace('Feb', '2-')
elif 'March' in t12:
tt = t12.replace('March', '3-')
elif 'Mar' in t12:
tt = t12.replace('Mar', '3-')
elif 'April' in t12:
tt = t12.replace('April', '4-')
elif 'Apr' in t12:
tt = t12.replace('Apr', '4-')
elif 'May' in t12:
tt = t12.replace('May', '5-')
elif 'July' in t12:
tt = t12.replace('July', '7-')
elif 'August' in t12:
tt = t12.replace('August', '8-')
elif 'Aug' in t12:
tt = t12.replace('Aug', '8-')
elif 'September' in t12:
tt = t12.replace('September', '9-')
elif 'Sept' in t12:
tt = t12.replace('Sept', '9-')
elif 'October' in t12:
tt = t12.replace('October', '10-')
elif 'Oct' in t12:
tt = t12.replace('Oct', '10-')
elif 'November' in t12:
tt = t12.replace('November', '11-')
elif 'Nov' in t12:
tt = t12.replace('Nov', '11-')
elif 'December' in t12:
tt = t12.replace('December', '12-')
elif 'Dec' in t12:
tt = t12.replace('Dec', '12-')
else:
tt = t12
publishedDate = tt + ' 00:00:00'
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M:%S")
timenum = int(time.mktime(timeArray))
if timenum >= self.timeStamp:
self.count += 1
item['count'] = self.count
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(timenum)))
item['publishedDate'] = publishedDate
yield item
<file_sep>#coding=utf-8
import scrapy
import json,re,time
from news.items import NewsItem
class xianggang(scrapy.Spider):
name = 'xianggang01'
start_urls = [
'https://www.hk01.com/'
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
links = response.xpath('//div[@class="s1lxp17y-1 cHzBvk"]/div/span/a/@href').extract()
cate = response.xpath('//div[@class="s1lxp17y-1 cHzBvk"]/div/span/a/text()').extract()
for i in range(len(links)):
url = 'https://www.hk01.com' + links[i]
category = cate[i]
if category == '港聞' or category == "觀點" or category =="國際" or category == "中國" or category=="經濟":
yield scrapy.Request(url, meta={
'category':category
}, callback=self.parse_item)
def parse_item(self,response):
category = response.meta['category']
links = response.xpath('//div[@class="sc-bwzfXH hxYtSF"]/div/div/span/a/@href').extract()
for i in range(len(links)):
url = 'https://www.hk01.com' + links[i]
yield scrapy.Request(url,meta={
'category':category
},callback=self.parse_one)
def parse_one(self,response):
category = response.meta['category']
links = response.xpath('//div[@class="sc-bdVaJa gRrvFh"]/span/a/@href').extract()
for i in range(len(links)):
url = 'https://www.hk01.com' + links[i]
yield scrapy.Request(url, meta={
'category': category
}, callback=self.parse_two)
def parse_two(self,response):
category = response.meta['category']
app_name = '香港01'
describe = ''
pic_url = ''
publishedDate = response.xpath('//time/text()').extract()[0]
home_url = 'https://www.hk01.com/'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
title = response.xpath('//h1/text()').extract()[0]
pic_more_url = ''
content = response.xpath('//p/text()').extract()
contentt = ''
for i in range(len(content)):
contentt += content[i]
content = contentt
try:
author = response.xpath('//a[@class="sc-gqjmRU dhKqyP"]/text()').extract()[0]
except:
author = ''
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
self.count += 1
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
item['count'] = self.count
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M")
timeStamp = int(time.mktime(timeArray))
if timeStamp >= self.timeStamp:
item['publishedDate'] = time.strftime("%Y-%m-%d %H:%M:%S" ,time.localtime(float(timeStamp)))
yield item
<file_sep>#coding=utf-8
import scrapy
import time
import json
import re
from news.items import NewsItem
class rmrb(scrapy.Spider):
name = 'renminnews'
Ttime = int(round(time.time()*1000))
count = 0
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def start_requests(self):
t1 = '1|||03847eef-3885-36cd-87ce-d10d6ed86ce0|Nexus 5|Android 4.4.4|LGE|1080*1776|1||6863551|1776|1080|620|0.0|0.0|' + '1' + '|北京市|' + str(self.Ttime) + '|1|0|20|0|0|6.2.0rmrbsecurity$#%sut49fbb427a508bcc'
t2 = '1|||03847eef-3885-36cd-87ce-d10d6ed86ce0|Nexus 5|Android 4.4.4|LGE|1080*1776|1||6863551|1776|1080|620|0.0|0.0|' + '2' + '|北京市|' + str(self.Ttime) + '|1|0|20|0|0|6.2.0rmrbsecurity$#%sut49fbb427a508bcc'
tt1 = self.md5(t1)
tt2 = self.md5(t2)
url =[
'http://app.peopleapp.com/Api/600/HomeApi/getContentList?category_id=1&city=&citycode=&device=03847eef-3885-36cd-87ce-d10d6ed86ce0&device_model=Nexus 5&device_os=Android 4.4.4&device_product=LGE&device_size=1080*1776&device_type=1&district=&fake_id=6863551&image_height=1776&image_wide=1080&interface_code=620&latitude=0.0&longitude=0.0&page=1&province=北京市&province_code=%s&refresh_tag=1&refresh_time=0&show_num=20&update_time=0&userId=0&version=6.2.0&securitykey=%s'%(self.Ttime,tt1),
'http://app.peopleapp.com/Api/600/HomeApi/getContentList?category_id=1&city=&citycode=&device=03847eef-3885-36cd-87ce-d10d6ed86ce0&device_model=Nexus 5&device_os=Android 4.4.4&device_product=LGE&device_size=1080*1776&device_type=1&district=&fake_id=6863551&image_height=1776&image_wide=1080&interface_code=620&latitude=0.0&longitude=0.0&page=2&province=北京市&province_code=%s&refresh_tag=1&refresh_time=0&show_num=20&update_time=0&userId=0&version=6.2.0&securitykey=%s'%(self.Ttime,tt2)
]
for i in range(len(url)):
yield scrapy.Request(url[i], callback=self.parse)
def parse(self, response):
data = json.loads(response.body)
data = data['data']
accept_title = []
for i in range(0,len(data)):
title = data[i]['title']
url = data[i]['share_url']
pic_url = data[i]['share_image']
pubtime = data[i]['news_time'].split(' ')
pubtime1 = pubtime[0]
try:
pubtime2 = pubtime[1].replace('-',":")
publishedDate = pubtime1 + ' ' + pubtime2
except:
publishedDate = pubtime1 + ' 00:00:00'
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M:%S")
publishedDate = time.mktime(timeArray)
if publishedDate >= self.timeStamp:
accept_title.append(title)
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(publishedDate))
yield scrapy.Request(url,meta={
'title':title,
'publishedDate':publishedDate,
'home_url':response.url,
'pic_url':pic_url
},callback=self.parse_item)
if len(accept_title) > 0:
num = re.findall('page=(.*?)&',str(response.url))
num0 = 'page=' + num[0]
numt = int(num[0]) + 1
t = '1|||03847eef-3885-36cd-87ce-d10d6ed86ce0|Nexus 5|Android 4.4.4|LGE|1080*1776|1||6863551|1776|1080|620|0.0|0.0|' + str(numt) + '|北京市|' + str(self.Ttime) + '|1|0|20|0|0|6.2.0rmrbsecurity$#%sut49fbb427a508bcc'
keynum = self.md5(t)
num1 = 'page=' + str(int(num[0]) + 1)
key = str(response.url).split('securitykey=')[0]
url = (key + 'securitykey=' + keynum).replace(num0,num1)
yield scrapy.Request(url,callback=self.parse)
def parse_item(self,response):
title = response.meta['title']
publishedDate = response.meta['publishedDate']
home_url = response.meta['home_url']
describe = ''
app_name = '人民日报'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
pic_url = response.meta['pic_url']
author = ''
category = u'首页新闻'.encode('utf-8')
try:
try:
content = response.xpath('//div[@class="article"]').extract()
contentt = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', contentt)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
pic_more_url = re.findall('imgsrc="(.*?)"',content)
pic_more_url1 = []
for i in range(0,len(pic_more_url)):
pic_more_url1.append(pic_more_url[i])
pic_more_url = str(set(pic_more_url1))
except:
content = response.xpath('//div[@class="article long-article"]').extract()
contentt = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', contentt)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
pic_more_url = re.findall('imgsrc="(.*?)"', content)
pic_more_url1 = []
for i in range(0, len(pic_more_url)):
pic_more_url1.append(pic_more_url[i])
pic_more_url = str(set(pic_more_url1))
except:
content = ''
pic_more_url = ''
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
self.count += 1
item['count'] = self.count
numappName = self.readjson()
if len(numappName) == 0:
items = {
'url': response.url,
'title': item['title']
}
with open('renminribao.json', 'a+') as fp:
line = json.dumps(dict(items), ensure_ascii=False) + '\n'
fp.write(line)
yield item
else:
for i in range(len(numappName)):
if numappName[i]['url'] == response.url or numappName[i]['title'] == item['title']:
return
else:
items = {
'url': response.url,
'title': item['title']
}
with open('renminribao.json', 'a+') as fp:
line = json.dumps(dict(items), ensure_ascii=False) + '\n'
fp.write(line)
yield item
def readjson(self):
s = []
file_object = open('renminribao.json', 'r')
try:
while True:
line = file_object.readline()
data = json.loads(line)
s.append(data)
finally:
return s
def md5(self,t):
import hashlib
m = hashlib.md5()
m.update(t)
return m.hexdigest()<file_sep>#coding=utf-8
import scrapy
import time,re,json
from news.items import NewsItem
class meiguozhiyin(scrapy.Spider):
name = 'meiguozhiyinzhongwenwang'
start_urls = [
'https://www.voachinese.com/z/1746',#美国
'https://www.voachinese.com/z/1757',#中国
'https://www.voachinese.com/z/1769',#台湾
'https://www.voachinese.com/z/1755',#港澳
'https://www.voachinese.com/z/1758',#法律
'https://www.voachinese.com/z/1740',#国际
'https://www.voachinese.com/z/1748',#经济
'https://www.voachinese.com/z/1776',#美中关系
'https://www.voachinese.com/z/5403',#川金会
'https://www.voachinese.com/s?k=%E4%B9%A0%E8%BF%91%E5%B9%B3&tab=all&pi=1&r=any&pp=10'
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime('2018-06-24', "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
if '1746' in response.url:
category = '美国'
elif '1757' in response.url:
category = '中国'
elif '1769' in response.url:
category = '台湾'
elif '1755' in response.url:
category = '港澳'
else:
category = '新闻'
links = response.xpath('//ul[@id="ordinaryItems"]/li/div[1]/div/a/@href').extract()
title = response.xpath('//ul[@id="ordinaryItems"]/li/div[1]/div/a/h4/span/text()').extract()
cate = response.xpath('//ul[@id="ordinaryItems"]/li/div[1]/div/a/p/text()').extract()
for i in range(len(links)):
url = 'https://www.voachinese.com' + links[i]
tit = title[i]
desc = cate[i]
print tit
print url
yield scrapy.Request(url, meta={
'title': tit,
'describe': desc,
'category': category
}, callback=self.parse_item)
def parse_item(self, response):
title = response.meta['title']
describe = response.meta['describe']
app_name = '美国之音中文网'
pubt = response.xpath('//time/text()').extract()[0].replace('\t', '').replace('\n', '').replace('\r', '')
publishedDate = pubt.replace('年', '-').replace('月', '-').replace('日', '')
pic_url = ''
author = ''
home_url = 'https://www.voachinese.com'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
content = response.xpath('//p/text()').extract()
contentt = ''
for i in range(len(content)):
contentt += content[i]
content = contentt.replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
pic_more_url = ''
category = response.meta['category']
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M")
timenum = int(time.mktime(timeArray))
if timenum >= self.timeStamp:
self.count += 1
item['count'] = self.count
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(timenum)))
item['publishedDate'] = publishedDate
yield item
<file_sep>#coding=utf-8
import json
import scrapy
import time,re,time
from news.items import NewsItem
from news.DataResource import TransportData
from lxml.etree import HTML
class fenghuang(scrapy.Spider):
name = 'fenghuang'
Ttime = int(round(time.time() * 1000))
count = 0
page = 1
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime('2018-06-01', "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def start_requests(self):
t = self.Ttime
token = self.md5(t)
url ='https://api.3g.ifeng.com/client_search_list?k=习近平&page=%s&gv=5.6.9&av=5.6.8&uid=123&deviceid=123&proid=ifengnewsdiscovery&os=ios_19&df=androidphone&vt=5&screen=720x1280&nw=wifi&token=%s&date=%s&callback=1&callback=window.jsonp.cb1'%(self.page,token,t)
yield scrapy.Request(url,callback=self.parse)
def parse(self, response):
data = response.body.replace('window.jsonp.cb1(','').replace(')','')
data = json.loads(data)
data = data['data']
num = 0
for i in range(len(data)):
title = data[i]['title'].replace('<em>','').replace('</em>','')
pubt = data[i]['createTime'].replace('/','-')
url = data[i]['link']['url']
try:
pic_url = data[i]['thumbnail']
except:
pic_url = ''
timeArray = time.strptime(pubt, "%Y-%m-%d %H:%M:%S")
timeStamp = int(time.mktime(timeArray))
if timeStamp >= self.timeStamp:
num += 1
yield scrapy.Request(url,meta={
'pic_url':pic_url,
'title':title,
'pubt':pubt
},callback=self.parse_item)
if num > 0:
self.page += 1
t = self.Ttime
token = self.md5(t)
url = 'https://api.3g.ifeng.com/client_search_list?k=习近平&page=%s&gv=5.6.9&av=5.6.8&uid=123&deviceid=123&proid=ifengnewsdiscovery&os=ios_19&df=androidphone&vt=5&screen=720x1280&nw=wifi&token=%s&date=%s&callback=1&callback=window.jsonp.cb1' % (
self.page, token, t)
yield scrapy.Request(url, callback=self.parse)
def parse_item(self,response):
title = response.meta['title']
pic_url = response.meta['pic_url']
publishedDate = response.meta['pubt']
app_name = '凤凰新闻'
describe = ''
home_url = 'https://api.3g.ifeng.com/'
category = '头条'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
data = json.loads(response.body)
content =data['body']['text']
contentt = content.replace('\t', '').replace('\n', '').replace('\r', '')
text = HTML(content)
try:
pic_more_url = text.xpath('//p/img/@src')
except:
pic_more_url = ''
content = re.findall('>(.*?)<', contentt)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
try:
author = data['body']['source']
except:
author = ''
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
self.count += 1
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
item['count'] = self.count
yield item
def md5(self,page):
import hashlib
t = 'IFENG' + str(page)
m = hashlib.md5()
m.update(t)
return m.hexdigest()<file_sep>#coding=utf-8
import scrapy
import json,re,time
from news.items import NewsItem
from lxml.etree import HTML
class eluosi(scrapy.Spider):
name = 'eluosiweixing'
start_urls = [
'http://sputniknews.cn/china/',#中国
'http://sputniknews.cn/russia/',
'http://sputniknews.cn/russia_china_relations/',
'http://sputniknews.cn/politics/',
'http://sputniknews.cn/economics/',
'http://sputniknews.cn/military/',
'http://sputniknews.cn/society/',
'http://sputniknews.cn/science/',
'http://sputniknews.cn/radio/'
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime('2018-06-01', "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
links = response.xpath('//ul[@class="b-stories__list"]/li/a/@href').extract()
pic = response.xpath('//ul[@class="b-stories__list"]/li/a/img/@src').extract()
title = response.xpath('//ul[@class="b-stories__list"]/li/div/div[2]/h2/a/text()').extract()
pubt = response.xpath('//ul[@class="b-stories__list"]/li/div/span/text()').extract()
desc = response.xpath('//ul[@class="b-stories__list"]/li/div/div[2]/div/p/text()').extract()
for i in range(len(links)):
url = 'http://sputniknews.cn' + links[i]
pic_url = pic[i]
tit = title[i]
published = pubt[i]
try:
describe = desc[i]
except:
describe = ''
yield scrapy.Request(url, meta={
'title': tit,
'pic_url':pic_url,
'publishedDate':published,
'describe':describe,
'home_url':response.url
}, callback=self.parse_item, dont_filter=True)
def parse_item(self, response):
title = response.meta['title']
pic_url = response.meta['pic_url']
publishedDate = response.meta['publishedDate']
p = publishedDate.split(' ')
p1 = p[0]
p2 = p[1]
publishedDate = p2.replace('年', '-').replace('月', '-').replace('日', ' ') + p1 + ':00'
describe = response.meta['describe']
app_name = '俄罗斯卫星中文网'
author = ''
home_url = response.meta['home_url']
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
content = response.xpath('//div[@itemprop="articleBody"]').extract()
content = content[0].replace('\t', '').replace('\n', '').replace('\r', '')
Sector = HTML(content)
content = content.replace(' ','')
pic_more_url = Sector.xpath('//img/@src')
pic = []
for i in range(len(pic_more_url)):
pic.append(pic_more_url[i])
pic_more_url = str(pic)
content = re.findall('>(.*?)<', content)
contentdata = ''
for i in range(0, len(content)):
contentdata += content[i]
content = contentdata
if 'china' in home_url:
category = '中国'
elif 'russia' in home_url:
category = '俄罗斯'
elif 'politics' in home_url:
category = '政治'
elif 'economics' in home_url:
category = '经济'
elif 'military' in home_url:
category = '军事'
elif 'society' in home_url:
category = '社会'
elif 'science' in home_url:
category = '科学'
elif 'radio' in home_url:
category = '广播'
else:
category = '俄中关系'
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M:%S")
timenum = int(time.mktime(timeArray))
if timenum >= self.timeStamp:
self.count += 1
item['count'] = self.count
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(timenum)))
item['publishedDate'] = publishedDate
numappName = self.readjson()
if len(numappName) == 0:
items = {
'url': response.url,
'title':item['title']
}
with open('eluosiweixing.json', 'a+') as fp:
line = json.dumps(dict(items), ensure_ascii=False) + '\n'
fp.write(line)
yield item
else:
for i in range(len(numappName)):
if numappName[i]['url'] == response.url or numappName[i]['title'] == item['title']:
return
else:
items = {
'url': response.url,
'title':item['title']
}
with open('eluosiweixing.json', 'a+') as fp:
line = json.dumps(dict(items), ensure_ascii=False) + '\n'
fp.write(line)
yield item
def readjson(self):
s = []
file_object = open('eluosiweixing.json', 'r')
try:
while True:
line = file_object.readline()
data = json.loads(line)
s.append(data)
finally:
return s
<file_sep>#coding=utf-8
import scrapy
import json,re,time
from news.items import NewsItem
from lxml.etree import HTML
class hanlian(scrapy.Spider):
name = 'hanlianshe'
start_urls = [
'http://chinese.yonhapnews.co.kr/allheadlines/0200000001.html',#滚动
'http://chinese.yonhapnews.co.kr/international/0306000001.html',#国际
'http://chinese.yonhapnews.co.kr/domestic/0406000001.html',#国内
]
def parse(self, response):
url = response.xpath('//div[@class="con_article_list"]/ul/li[1]/a/@href').extract()
for i in range(len(url)):
links = url[i]
yield scrapy.Request(links,callback=self.parse_item)
def parse_item(self, response):
title = response.xpath('//h1/text()').extract()[0]
pic_url = ''
describe = ''
app_name = '韩联社'
content = response.xpath('//div[@id="articleBody"]/p/text()')
contentt = ''
for i in range(len(contentt)):
contentt += content[i]
content = contentt
publishedDate = response.xpath('//p[@class="publish-time"]/text()').extract()[0]
author = ''
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
home_url = 'http://chinese.yonhapnews.co.kr/'
pic = response.xpath('//div[@id="articleBody"]').extract()[0]
Sector = HTML(pic)
pic_more = Sector.xpath('//img/@src')
pic_more_url = []
for i in range(len(pic_more)):
pic_more_url.append(pic_more[i])
category = ''
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime<file_sep>#coding=utf-8
import time,re,json
from news.items import NewsItem
import scrapy
class cnbc(scrapy.Spider):
name = 'cnbc'
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime('2018-06-1', "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def start_requests(self):
category = [
'immigration-border-crisis', 'brexit-referendum',
'puerto-rico-crisis', 'legal-pot', 'europess-border-crisis',
'hazing-in-america'
]
for i in range(len(category)):
url = 'https://deviceservices.nbcnews.com/portal/taxonomy/dreamypal?type=storyline&asset=android_adaptive&slug=%s&_devicefeed_=%s'%(category[i], category[i])
yield scrapy.Request(url, meta={
'category': category[i]
}, callback=self.parse_item)
def parse_item(self, response):
category = response.meta['category']
app_name = 'CNBC'
data = json.loads(response.body)
data = data['entries']
for i in range(len(data)):
url = data[i]['id']
pubt = data[i]['published']
title = data[i]['title']
try:
content = data[i]['content']
except:
content = ''
yield scrapy.Request(url,meta={
'pubt': pubt,
'category': category,
'app_name': app_name,
'content':content,
'title':title
}, callback=self.parse_one)
def parse_one(self, response):
category = response.meta['category']
app_name = response.meta['app_name']
publishedDate = response.meta['pubt']
publishedDate = publishedDate.replace('T',' ').replace('Z','')
pic_url = ''
author = ''
describe = ''
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
home_url = 'http://devicestransform-stg.elasticbeanstalk.com/'
# title = response.xpath('//h1/text()').extract()[0]
title = response.meta['title']
pic_more_url = ''
content = response.meta['content']
if len(content) <= 100:
content = response.xpath('//p').extract()
contentt = ''
for i in range(len(content)):
contentt += content[i]
content = contentt.replace('<p>', '').replace('</p>', '').replace('\t', '')
content = content.replace('\n', '').replace('\r', '')
content = re.findall('>(.*?)<', content)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M:%S")
timenum = int(time.mktime(timeArray))
if timenum >= self.timeStamp:
self.count += 1
item['count'] = self.count
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(timenum)))
item['publishedDate'] = publishedDate
yield item<file_sep>#coding=utf-8
import scrapy
from news.items import NewsItem
import json,re,time
class xinlang(scrapy.Spider):
name = 'xinlang'
start_urls = [
'http://newsapi.sina.cn/?resource=hbpage&newsId=HB-1-snhs/index-search&lq=1&page=1&newpage=0&keyword=%E4%B9%A0%E8%BF%91%E5%B9%B3&lDid=9777f76f-d895-4801-b7ae-2edaef39eaba&oldChwm=12040_0006&city=WMXX2971&loginType=0&authToken=<PASSWORD>&link=&authGuid=6409396086577890289&ua=Xiaomi-MI+5s__sinanews__6.8.9__android__6.0.1&deviceId=7cd27e609fa72795&connectionType=2&resolution=1080x1920&weiboUid=&mac=02%3A00%3A00%3A00%3A00%3A00&osVersion=6.0.1&chwm=12040_0006&weiboSuid=&andId=fcdf1040e0f0ec2a&from=6068995012&sn=322185b8&aId=01AuLIAggwvv6q8bymRfRsl8NQvpxv4XcbyMun5eSkTzeJ0S4.&deviceIdV1=7cd27e609fa72795&osSdk=23&abver=1528118872689&accessToken=&abt=314_302_297_281_275_269_267_255_253_251_249_241_237_231_229_228_226_217_215_207_203_191_189_187_153_149_143_141_135_128_113_111_65_57_45_37_21_18_16_13&seId=f295bff3b2&imei=864454030661742&deviceModel=Xiaomi__Xiaomi__MI+5s&location=39.963886%2C116.358482&authUid=0&urlSign=1bd4875da4&rand=25',
'http://newsapi.sina.cn/?resource=hbpage&newsId=HB-1-snhs/index-search&lq=1&page=2&newpage=0&keyword=%E4%B9%A0%E8%BF%91%E5%B9%B3&lDid=9777f76f-d895-4801-b7ae-2edaef39eaba&oldChwm=12040_0006&city=WMXX2971&loginType=0&authToken=<PASSWORD>&link=&authGuid=6409396086577890289&ua=Xiaomi-MI+5s__sinanews__6.8.9__android__6.0.1&deviceId=7cd27e609fa72795&connectionType=2&resolution=1080x1920&weiboUid=&mac=02%3A00%3A00%3A00%3A00%3A00&osVersion=6.0.1&chwm=12040_0006&weiboSuid=&andId=fcdf1040e0f0ec2a&from=6068995012&sn=322185b8&aId=01AuLIAggwvv6q8bymRfRsl8NQvpxv4XcbyMun5eSkTzeJ0S4.&deviceIdV1=7cd27e609fa72795&osSdk=23&abver=1528118872689&accessToken=&abt=314_302_297_281_275_269_267_255_253_251_249_241_237_231_229_228_226_217_215_207_203_191_189_187_153_149_143_141_135_128_113_111_65_57_45_37_21_18_16_13&seId=f295bff3b2&imei=864454030661742&deviceModel=Xiaomi__Xiaomi__MI+5s&location=39.963886%2C116.358482&authUid=0&urlSign=1bd4875da4&rand=25',
'http://newsapi.sina.cn/?resource=hbpage&newsId=HB-1-snhs/index-search&lq=1&page=3&newpage=0&keyword=%E4%B9%A0%E8%BF%91%E5%B9%B3&lDid=9777f76f-d895-4801-b7ae-2edaef39eaba&oldChwm=12040_0006&city=WMXX2971&loginType=0&authToken=<PASSWORD>&link=&authGuid=6409396086577890289&ua=Xiaomi-MI+5s__sinanews__6.8.9__android__6.0.1&deviceId=7cd27e609fa72795&connectionType=2&resolution=1080x1920&weiboUid=&mac=02%3A00%3A00%3A00%3A00%3A00&osVersion=6.0.1&chwm=12040_0006&weiboSuid=&andId=fcdf1040e0f0ec2a&from=6068995012&sn=322185b8&aId=01AuLIAggwvv6q8bymRfRsl8NQvpxv4XcbyMun5eSkTzeJ0S4.&deviceIdV1=7cd27e609fa72795&osSdk=23&abver=1528118872689&accessToken=&abt=314_302_297_281_275_269_267_255_253_251_249_241_237_231_229_228_226_217_215_207_203_191_189_187_153_149_143_141_135_128_113_111_65_57_45_37_21_18_16_13&seId=f295bff3b2&imei=864454030661742&deviceModel=Xiaomi__Xiaomi__MI+5s&location=39.963886%2C116.358482&authUid=0&urlSign=1bd4875da4&rand=25',
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
data = json.loads(response.body)
data = data['data']['list']['feed1']
for i in range(len(data)):
title = data[i]['title']
pic = data[i]['image']
url = data[i]['url']
author = data[i]['source']
yield scrapy.Request(url,meta={
'title':title,
'pic':pic,
'author':author
},callback=self.parse_item, dont_filter=True)
def parse_item(self,response):
title = response.meta['title'].replace('<','').replace('em>','').replace('/','')
pic_url = response.meta['pic']
author =response.meta['author']
app_name = '新浪新闻'
describe = ''
category = '要闻'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
home_url = 'http://newsapi.sina.cn/'
content =response.xpath('//article[@class="art_box"]').extract()
contentt = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', contentt)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
try:
publishedDate = response.xpath('//time[@class="art_time"]/text()').extract()[0].replace('.','-')
except:
publishedDate = response.xpath('//time[@class="weibo_time"]').extract()[0]
publishedDate = publishedDate.replace('\t','').replace('\n','').replace('\r','')
publishedDate = re.findall('>(.*?)<', publishedDate)
publishedDated = ''
for i in publishedDate:
publishedDated += i
publishedDate = publishedDated
publishedDate = '2018-' + publishedDate.replace('月','-').replace('日',' ') + ':00'
try:
pic_more_url = response.xpath("//div[@id='wx_pic']/img/@src").extract()[0]
except:
pic_more_url = ''
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
self.count += 1
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
item['count'] = self.count
yield item
<file_sep>#coding=utf-8
import scrapy
import time
import json,re
from news.items import NewsItem
class yidianzixun(scrapy.Spider):
name = 'yidianzixun'
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
Ttime = int(round(time.time() * 1000))
time1 = time.strftime("%Y-%m-%d %H:%M:%S")
time2 = time.strptime(time1, "%Y-%m-%d %H:%M:%S")
time3 = int(time.mktime(time2))
def start_requests(self):
import requests
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36",
"Host": "www.yidianzixun.com",
"Proxy-Connection": "keep-alive",
"Referer": "http://www.yidianzixun.com/"
}
# cookies ={
# "__guid": "147975473.3220507777170205000.1522825147643.9363",
# "JSESSIONID": "6a5877478c64c1897d198643d5242c4d09ad1ad0060bf25f664e1196e0094558",
# "wuid": "152259561985043",
# "wuid_createAt": "%s"%self.time1,
# "weather_auth": "2",
# "Hm_lvt_15fafbae2b9b11d280c79eff3b840e45": "%s"%self.time3,
# "Hm_lpvt_15fafbae2b9b11d280c79eff3b840e45": "%s"%self.time3,
# "CNZZDATA1255169715": "1404781070-1528763939-http%253A%252F%252Fwww.so.com%252F%7C1528763939",
# "cn_1255169715_dplus": "%7B%22distinct_id%22%3A%20%22163f1ba7075a0-0206ae93d5cb3-5d4e211f-100200-163f1ba7076850%22%2C%22sp%22%3A%20%7B%22%24_sessionid%22%3A%200%2C%22%24_sessionTime%22%3A%201528769010%2C%22%24dp%22%3A%200%2C%22%24_sessionPVTime%22%3A%201528769010%7D%7D",
# "UM_distinctid": "163f1ba7075a0-0206ae93d5cb3-5d4e211f-100200-163f1ba7076850",
# "monitor_count": "2",
# "captcha": "s%3Ab7f2a366cba5cab9fb4f1fd4d18a9f21.yjcDBE7X4llvMo8rnLo5OAvkOE3DWjm6OldniVRU2Ts",
# "sptoken": "Uhoy~U%3B%3AU8%3AU48261efeced332cc9f20413132c69381cad5f53f4bfc428ed84627675ba90e48"
# }
cookies = [
{
"__guid": "147975473.3220507777170205000.1522825147643.9363",
"JSESSIONID": "6a5877478c64c1897d198643d5242c4d09ad1ad0060bf25f664e1196e0094558",
"wuid": "152259561985043",
"wuid_createAt": "%s"%self.time1,
"weather_auth": "2",
"captcha": "s%3A65772dc0d08723dd86b4c515d69b3fac.HHd0I2DHiMQ0tmk64BursHkHNS5%2FAwvZno27bwff2NE",
"Hm_lvt_15fafbae2b9b11d280c79eff3b840e45": "%s"%self.time3,
"Hm_lpvt_15fafbae2b9b11d280c79eff3b840e45": "%s"%self.time3,
"CNZZDATA1255169715": "1404781070-1528763939-http%253A%252F%252Fwww.so.com%252F%7C1528763939",
"cn_1255169715_dplus": "%7B%22distinct_id%22%3A%20%22163f1ba7075a0-0206ae93d5cb3-5d4e211f-100200-163f1ba7076850%22%2C%22sp%22%3A%20%7B%22%24_sessionid%22%3A%200%2C%22%24_sessionTime%22%3A%201528769010%2C%22%24dp%22%3A%200%2C%22%24_sessionPVTime%22%3A%201528769010%7D%7D",
"UM_distinctid": "163f1ba7075a0-0206ae93d5cb3-5d4e211f-100200-163f1ba7076850",
"monitor_count": "2",
"sptoken": "Uhoy~U%3AU%3B%3AU48261efeced332cc9f20413132c69381cad5f53f4bfc428ed84627675ba90e48"
},
{
"__guid": "147975473.3220507777170205000.1522825147643.9363",
"JSESSIONID": "6a5877478c64c1897d198643d5242c4d09ad1ad0060bf25f664e1196e0094558",
"wuid": "152259561985043",
"wuid_createAt": "%s"%self.time1,
"weather_auth": "2",
"captcha": "s%3A98b6fc3c076533eaa1dac72062fb09ce.MbsdYjMxJgyHzlJwv9n1Lz2bpnNVxDkjXpw19PRTgTc",
"Hm_lvt_15fafbae2b9b11d280c79eff3b840e45": "1528768916,1528769011,1528772021,1528772151",
"Hm_lpvt_15fafbae2b9b11d280c79eff3b840e45": "%s"%self.time3,
"CNZZDATA1255169715": "1404781070-1528763939-http%253A%252F%252Fwww.so.com%252F%7C1528766868",
"cn_1255169715_dplus": "%7B%22distinct_id%22%3A%20%22163f1ba7075a0-0206ae93d5cb3-5d4e211f-100200-163f1ba7076850%22%2C%22sp%22%3A%20%7B%22%24_sessionid%22%3A%200%2C%22%24_sessionTime%22%3A%201528772204%2C%22%24dp%22%3A%200%2C%22%24_sessionPVTime%22%3A%201528772204%7D%7D",
"UM_distinctid": "163f1ba7075a0-0206ae93d5cb3-5d4e211f-100200-163f1ba7076850",
"monitor_count": "6",
"sptoken": "<PASSWORD>%3AU%3B%3AU48261efeced332cc9f20413132c69381cad5f53f4bfc428ed84627675ba90e48"
},
{
"__guid": "147975473.3220507777170205000.1522825147643.9363",
"JSESSIONID": "6a5877478c64c1897d198643d5242c4d09ad1ad0060bf25f664e1196e0094558",
"wuid": "152259561985043",
"wuid_createAt": "%s"%self.time1,
"weather_auth": "2",
"captcha": "s%3A59db0a30fa30fdd09d4238aae224e068.t9Ar9fI0XSGYSe7VBAKsaBF7M6RBftHTnkAv3hq0fng",
"Hm_lvt_15fafbae2b9b11d280c79eff3b840e45": "1528768916,1528769011,1528772021,1528772151",
"Hm_lpvt_15fafbae2b9b11d280c79eff3b840e45": "%s"%self.time3,
"CNZZDATA1255169715": "1404781070-1528763939-http%253A%252F%252Fwww.so.com%252F%7C1528772293",
"cn_1255169715_dplus": "%7B%22distinct_id%22%3A%20%22163f1ba7075a0-0206ae93d5cb3-5d4e211f-100200-163f1ba7076850%22%2C%22sp%22%3A%20%7B%22%24_sessionid%22%3A%200%2C%22%24_sessionTime%22%3A%201528772601%2C%22%24dp%22%3A%200%2C%22%24_sessionPVTime%22%3A%201528772601%7D%2C%22%24_sessionid%22%3A%200%2C%22%24_sessionTime%22%3A%201528772720%2C%22%24dp%22%3A%200%2C%22%24_sessionPVTime%22%3A%201528772720%7D",
"UM_distinctid": "163f1ba7075a0-0206ae93d5cb3-5d4e211f-100200-163f1ba7076850",
"monitor_count": "8",
"sptoken":"Ube~U%3AU%3B%3AU48261efeced332cc9f20413132c69381cad5f53f4bfc428ed84627675ba90e48"
}
]
urlt = [
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=best&cstart=0&cend=10&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=%s'%self.Ttime,
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=21388941485&cstart=0&cend=10&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=%s'%self.Ttime,
'http://www.yidianzixun.com/home/q/news_list_for_channel?channel_id=hot&cstart=0&cend=10&infinite=true&refresh=1&__from__=pc&multi=5&appid=web_yidian&_=%s'%self.Ttime,
]
for j in range(0, len(urlt)):
datay = requests.get(urlt[j], cookies=cookies[j], headers=headers)
data = datay.content
data = json.loads(data)
data = data['result']
try:
for i in range(0, len(data)):
title = data[i]['title']
try:
pubTime = data[i]['date']
except:
pubTime = ''
url = data[i]['docid']
try:
pic_url = data[i]['image_urls'][0]
if 'http' not in pic_url:
pic_url = 'http://i1.go2yd.com/image.php?type=thumbnail_336x216&url=' + pic_url
except:
pic_url = ''
try:
describe = data[i]['summary']
except:
describe = ''
try:
category = data[i]['category']
except:
category = '要闻'
url = 'http://www.yidianzixun.com/article/' + url
yield scrapy.Request(url, meta={
'title': title,
'pubTime': pubTime,
'pic_url': pic_url,
'describe': describe,
'category': category,
"home_url":"www.yidianzi.com"
}, callback=self.parse, dont_filter=True)
except:
pass
def parse(self, response):
title = response.meta['title']
publishedDate = response.meta['pubTime']
pic_url = response.meta['pic_url']
describe = response.meta['describe']
category = response.meta['category']
home_url = response.meta['home_url']
app_name = '一点资讯'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
try:
try:
try:
content = response.xpath('//div[@id="imedia-article"]').extract()
contentt = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', contentt)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
except:
content = response.xpath('//div[@class="content-bd"]').extract()
contentt = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', contentt)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
pic_more_url = re.findall('<imgsrc="(.*?)"', contentt)
pic_more_url1 = []
for i in range(0, len(pic_more_url)):
if 'http' not in pic_more_url[i]:
pic_more_urlt = 'http:' + pic_more_url[i]
pic_more_url1.append(pic_more_urlt)
else:
pic_more_url1.append(pic_more_url[i])
pic_more_url = str(set(pic_more_url1))
except:
content = response.xpath('//div[@class="video-wrapper"]').extract()
contentt = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('<videosrc="(.*?)"', contentt)[0]
pic_more_url = ''
except:
content = response.xpath('//p').extract()
contentt = ''
for i in range(0, len(content)):
contentt += content[i].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
t = re.findall('>(.*?)<', contentt)
contenttt = ''
for i in range(0, len(t)):
contenttt += t[i]
content = contenttt
pic_more_url = ''
try:
author = response.xpath('//a[@class="doc-source"]/text()').extract()[0]
except:
author = ''
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M:%S")
timenum = int(time.mktime(timeArray))
if timenum >= self.timeStamp:
self.count += 1
item['count'] = self.count
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(timenum)))
item['publishedDate'] = publishedDate
yield item
<file_sep># redis-mongo
此项目使用redis保存url
mongo库作为详情信息的存储
news项目为新闻app项目,包含国内外著名的新闻机构的app新闻,多为逆向完成的app,通过charles抓包,逆向分析url,然后用scrapy框架完成新闻的爬取。
<file_sep>#coding=utf-8
import time,re,json
from news.items import NewsItem
import scrapy
class nbc(scrapy.Spider):
name = 'nbcnews'
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def start_requests(self):
category = ['us-news', 'world', 'politics', 'investigations', 'lifestyle', 'business',
'pop-culture', 'science', 'tech', 'health', 'think', 'better', 'mach',
'weather', 'sports', 'latino asian-america', 'nbcblk', 'nbc-out nightly-news',
'meet-the-press', 'dateline'
]
for i in range(len(category)):
url = 'http://devicestransform-stg.elasticbeanstalk.com/portal/taxonomy/dreamypal?type=section/news&asset=android_adaptive&slug=%s&_devicefeed_=%s'%(category[i], category[i])
yield scrapy.Request(url, meta={
'category': category[i]
}, callback=self.parse_item)
def parse_item(self, response):
category = response.meta['category']
app_name = 'NBC'
data = json.loads(response.body)
data = data['entries']
for i in range(len(data)):
url = data[i]['id']
pubt = data[i]['published']
try:
content = data[i]['content']
except:
content = ''
yield scrapy.Request(url,meta={
'pubt': pubt,
'category': category,
'app_name': app_name,
'content':content
}, callback=self.parse_one)
def parse_one(self, response):
category = response.meta['category']
app_name = response.meta['app_name']
publishedDate = response.meta['pubt']
publishedDate = publishedDate.replace('T',' ').replace('Z','')
pic_url = ''
author = ''
describe = ''
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
home_url = 'http://devicestransform-stg.elasticbeanstalk.com/'
title = response.xpath('//h1/text()').extract()[0]
pic_more_url = ''
content = response.meta['content']
if content == "":
content = response.xpath('//p').extract()
contentt = ''
for i in range(len(content)):
contentt += content[i]
content = contentt.replace('<p>', '').replace('</p>', '').replace('\t', '')
content = content.replace('\n', '').replace('\r', '')
content = re.findall('>(.*?)<', content)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M:%S")
timenum = int(time.mktime(timeArray))
if timenum >= self.timeStamp:
self.count += 1
item['count'] = self.count
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(timenum)))
item['publishedDate'] = publishedDate
yield item<file_sep>#coding=utf-8
import scrapy
import time
import re
from news.items import NewsItem
class djy(scrapy.Spider):
name = 'dajiyuan'
allowed_domains = ['epochtimes.com']
start_urls = [
'http://www.epochtimes.com/gb/n24hr.htm', #即时
'http://www.epochtimes.com/gb/nsc413_2.htm', #要闻
'http://www.epochtimes.com/gb/nf4830.htm', #神韵
'http://www.epochtimes.com/gb/nsc1025.htm', #评论
'http://www.epochtimes.com/gb/ncid277.htm', #中国
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
links = response.xpath('//div[@class="posts column"]/div[1]/a/@href').extract()
title = response.xpath('//div[@class="posts column"]/div[1]/a/text()').extract()
pic_url = response.xpath('//div[@class="posts column"]/div[2]/a/img/@data-src').extract()
summary = response.xpath('//div[@class="content"]/text()').extract()
if len(links) > 0:
for i in range(0, len(links)):
try:
summary1 = summary[i].replace('\t','').replace('\n','').replace('\r','')
except:
summary1 = ''
try:
pic_url1 = pic_url[i]
except:
pic_url1 = ''
url = links[i].replace('\t','').replace('\n','').replace('\r','')
yield scrapy.Request(url, meta={
'title': title[i].replace('\t','').replace('\n','').replace('\r',''),
'summary': summary1,
'home_url': response.url,
'pic_url': pic_url1
}, callback=self.parse_item, dont_filter=True)
else:
links = response.xpath('//div[@class="newyork"]/ul[1]/li/a/@href').extract()
title = response.xpath('//div[@class="newyork"]/ul[1]/li/a/text()').extract()
pic_url = ''
summary = ''
for i in range(0,len(links)):
url = links[i]
yield scrapy.Request(url, meta={
'title': title[i].replace('\t', '').replace('\n', '').replace('\r', ''),
'summary': summary,
'home_url': response.url,
'pic_url': pic_url
}, callback=self.parse_item, dont_filter=True)
def parse_item(self,response):
title = response.meta['title']
describe = response.meta['summary']
home_url = response.meta['home_url']
app_name = '大纪元'
pic_url = response.meta['pic_url']
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
try:
try:
pubTime = response.xpath('//div[@class="mbottom10 large-12 medium-12 small-12 columns"]/time/text()').extract()[0]
pubTime = pubTime.replace(u'更新: ', '').replace('PM', '').replace('AM', '').replace('\t', '').replace('\n', '').replace('\r', '')
except:
pubTime = response.xpath('//div[@ class="art-head"]/span/text()').extract()
pubTime = pubTime[0].split(' ')[0].replace(u'年', '-').replace(u'月', '-').replace(u'日', '')
except:
pubTime = time.strftime("%Y-%m-%d") + ' 00:00'
content = response.xpath('//p').extract()
contentdata = ''
for i in range(0, len(content)):
contentdata += content[i]
content = re.findall('>(.*?)<', contentdata)
contentdata = ''
for i in range(0, len(content)):
contentdata += content[i]
content = contentdata
if 'n24hr' in home_url:
category = u'即时'.encode('utf-8')
elif 'nsc413' in home_url:
category = u'要闻'.encode('utf-8')
elif 'nsc1025' in home_url:
category = u'评论'.encode('utf-8')
elif 'ncid277' in home_url:
category = u'中国'.encode('utf-8')
else:
category = u'神韵'.encode('utf-8')
author = ''
pic_more_url = ''
publishedDate = pubTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
t = pubTime
try:
t1 = pubTime.split(' ')[0]
t2 = pubTime.split(' ')[1]
t = t1 + ' ' + t2
timeArray = time.strptime(t, "%Y-%m-%d %H:%M")
except:
t = pubTime.split(' ')[0]
timeArray = time.strptime(t, "%Y-%m-%d")
publishedDate = time.mktime(timeArray)
if publishedDate >= self.timeStamp:
self.count = self.count + 1
item['count'] = self.count
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(publishedDate)))
item['publishedDate'] = publishedDate
yield item
<file_sep>#coding=utf-8
import scrapy
import time
import json
import re
from news.items import NewsItem
from news.DataResource import TransportData
class dwtw(scrapy.Spider):
name='dongwang'
allowed_domains=['hk.on.cc']
start_urls=[
'http://hk.on.cc/cn/news/index.html',
'http://hk.on.cc/int/news/index.html',
'http://hk.on.cc/tw/news/index.html',
'http://hk.on.cc/hk/news/index.html'
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
base_url='http://hk.on.cc'
def parse(self, response):
links = response.xpath('//div[@class="focus clearfix"]/div[1]/a/@href').extract()
pic_url = response.xpath('//div[@class="focus clearfix"]/div[1]/a/img/@src').extract()
if len(links) == 0:
for i in range(0, len(links)):
url=self.base_url + links[i]
try:
pic_url1 = self.base_url + pic_url[i]
except:
pic_url1 = ''
yield scrapy.Request(url, meta={
'pic_url':pic_url1,
'home_url':response.url
}, callback=self.parse_item)
links = response.xpath('//div[@class="focusItem"]/a/@href').extract()
pic_url = response.xpath('//div[@class="focusItem"]/a/div/img/@src').extract()
for i in range(0, len(links)):
url = self.base_url + links[i]
try:
pic_url1 = self.base_url + pic_url[i]
except:
pic_url1 = ''
yield scrapy.Request(url, meta={
'pic_url': pic_url1,
'home_url': response.url
}, callback=self.parse_item)
def parse_item(self,response):
title = response.xpath('//h1/text()').extract()[0]
pic_url=response.meta['pic_url']
pubTime=response.xpath('//span[@class="datetime"]/text()').extract()[0]
pubTime=pubTime.replace(u'年','-').replace(u'月','-').replace(' ','').replace(u'日',' ')
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
home_url=response.meta['home_url']
app_name='东网'
describe = ''
content=response.xpath('//div[@class="breakingNewsContent"]').extract()
content = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', content)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
if 'tw' in home_url:
category = u'台湾新闻'.encode('utf-8')
elif 'cn' in home_url:
category = u'大陆新闻'.encode('utf-8')
elif 'int' in home_url:
category = u'国际新闻'.encode('utf-8')
else:
category = u'港澳新闻'.encode('utf-8')
author=''
pic_more_url=response.xpath('//div[@class="photo"]/img/@src').extract()
pic_more_url1=''
for i in range(0,len(pic_more_url)):
pic_more_url1+=self.base_url + pic_more_url[i] +';'
pic_more_url=pic_more_url1
publishedDate = pubTime
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
acceptable_title = []
print pubTime
timeArray = time.strptime(pubTime, "%Y-%m-%d %H:%M")
publishedDate = time.mktime(timeArray)
if publishedDate >= self.timeStamp:
acceptable_title.append(title)
self.count += 1
item['count'] = self.count
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(publishedDate)))
item['publishedDate'] = publishedDate
yield item<file_sep>#encoding:utf-8
'''运行对应爬虫'''
from scrapy.cmdline import execute
# execute("scrapy crawl tencent".split())
execute("scrapy crawl redis".split())<file_sep>#-*- coding: utf-8 -*-
import time
import re
import sys
import scrapy
from news.items import NewsItem
reload(sys)
sys.setdefaultencoding('utf8')
class zhangshangliuyuan(scrapy.Spider):
name = 'zhangshangliuyuan'
start_urls = [
'http://site.6parker.com/finance/index.php?app=forum&act=cachepage&cp=tree1',#经济观察
'http://news.6parker.com/newspark/index.php?p=1',#实时新闻
'http://news.6parker.com/newspark/index.php?type=8',#历史
'http://news.6parker.com/newspark/index.php?type=2',
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime('2018-06-10', "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
links = response.xpath('//div[@id="d_list"]/ul/li/a[1]/@href').extract()
title = response.xpath('//div[@id="d_list"]/ul/li/a[1]/text()').extract()
pubTime = response.xpath('//div[@id="d_list"]/ul/li/i/text()').extract()
for i in range(0,len(links)):
url = links[i]
if 'http' not in url:
url = 'http://site.6parker.com/finance/' + url
tit = title[i]
pub = pubTime[i].split('/')
t1 = pub[0]
t2 = pub[1]
t3 = pub[2]
publishedDate = '20'+t3 + '-' + t1 + '-' + t2
timeArray = time.strptime(publishedDate, "%Y-%m-%d")
publishedDa = time.mktime(timeArray)
if publishedDa >= self.timeStamp:
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(publishedDa)))
yield scrapy.Request(url,meta={
'title':tit,
'home_url':response.url,
'publishedDate':publishedDate
},callback=self.parse_item,dont_filter=True)
def parse_item(self,response):
title = response.meta['title']
home_url = response.meta['home_url']
publishedDate = response.meta['publishedDate']
app_name = u'掌上留园'.encode('utf-8')
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
describe = ''
author =''
pic_url = ''
if 'tree1' in home_url:
category = u'经济观察'.encode('utf-8')
elif 'type=2' in home_url:
category = u'财经'.encode('utf-8')
elif 'type=8' in home_url:
category = u'历史'.encode('utf-8')
else:
category = u'实时新闻'.encode('utf-8')
try:
content = response.xpath('//div[@id="mainContent"]').extract()
contentt = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '').replace(' ', '')
content = re.findall('>(.*?)<', contentt)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
pic_more_url = re.findall('<img(.*?)src="(.*?)"', contentt)
pic_more_url1 = []
if len(pic_more_url) > 0:
for i in range(0, len(pic_more_url)):
if '.js' not in pic_more_url[i][1]:
pic_more_url1.append(pic_more_url[i][1])
pic_more_url = str(set(pic_more_url1))
else:
pic_more_url = ''
except:
content = response.xpath('//p/text()').extract()
contentdata = ''
for i in content:
contentdata += i
content = contentdata
pic_more_url = ''
self.count = self.count + 1
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['count'] = self.count
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
yield item
<file_sep>#coding=utf-8
import scrapy
import json, re, time
from news.items import NewsItem
class huaerjie(scrapy.Spider):
name = 'huaerjieyg'
allow_domains = [
'https://www.wsj.com/'
]
start_urls = [
'https://www.wsj.com/',
'https://www.wsj.com/news/world',
'https://www.wsj.com/news/us',
'https://www.wsj.com/news/politics',
'https://www.wsj.com/news/economy',
'https://www.wsj.com/news/business',
'https://www.wsj.com/news/technology',
'https://www.wsj.com/news/markets',
'https://www.wsj.com/news/opinion'
]
def parse(self, response):
links = re.findall('href="https://www.wsj.com/articles/(.*?)"', response.body)
for i in range(len(links)):
url = 'https://www.wsj.com/articles/' + links[i]
yield scrapy.Request(url, meta={
'home_url': response.url
}, callback=self.parse_item)
def parse_item(self, response):
title = response.xpath('//h1/text()').extract()[0]
publishedDate = response.xpath('//time/text()').extract()[0]
pic_url = ''
describe = ''
app_name = '华尔街日报英文网'
home_url = 'https://www.wsj.com/'
author = ''
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
print "app名称", app_name
print "主图片url", pic_url
# print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
# print "所属类型", category
print "标题", title
print "描述", describe
# print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
<file_sep>#coding=utf-8
import scrapy
import time
import json
import re
from news.items import NewsItem
from news.DataResource import TransportData
class nanfangzhoumo(scrapy.Spider):
name = 'nanfangzhoumo'
start_urls = [
'http://www.infzm.com/mobile/get_list_by_cat_ids?count=10&platform=android&version=5.4.2&start=0&cat_id%5B%5D=5282&hash=1be58c23b2598f9e7448f1312da25073',#推荐
'http://www.infzm.com/mobile/get_list_by_cat_ids?count=10&platform=android&version=5.4.2&start=1&cat_id%5B%5D=5282&hash=1be58c23b2598f9e7448f1312da25073',#推荐
'http://www.infzm.com/mobile/get_list_by_cat_ids?count=10&platform=android&version=5.4.2&start=2&cat_id%5B%5D=5282&hash=1be58c23b2598f9e7448f1312da25073',#推荐
'http://www.infzm.com/mobile/get_list_by_cat_ids?count=10&platform=android&version=5.4.2&start=3&cat_id%5B%5D=5282&hash=1be58c23b2598f9e7448f1312da25073',#推荐
'http://www.infzm.com/mobile/get_list_by_cat_ids?count=10&platform=android&version=5.4.2&start=0&cat_id%5B%5D=5269&hash=1be58c23b2598f9e7448f1312da25073',#时局
'http://www.infzm.com/mobile/get_list_by_cat_ids?count=10&platform=android&version=5.4.2&start=1&cat_id%5B%5D=5269&hash=1be58c23b2598f9e7448f1312da25073',#时局
'http://www.infzm.com/mobile/get_list_by_cat_ids?count=10&platform=android&version=5.4.2&start=2&cat_id%5B%5D=5269&hash=1be58c23b2598f9e7448f1312da25073',#时局
'http://www.infzm.com/mobile/get_list_by_cat_ids?count=10&platform=android&version=5.4.2&start=3&cat_id%5B%5D=5269&hash=1be58c23b2598f9e7448f1312da25073',#时局
'http://www.infzm.com/mobile/get_list_by_cat_ids?count=10&platform=android&version=5.4.2&start=0&cat_id%5B%5D=5274&hash=1be58c23b2598f9e7448f1312da25073',#经济
'http://www.infzm.com/mobile/get_list_by_cat_ids?count=10&platform=android&version=5.4.2&start=1&cat_id%5B%5D=5274&hash=1be58c23b2598f9e7448f1312da25073',#经济
'http://www.infzm.com/mobile/get_list_by_cat_ids?count=10&platform=android&version=5.4.2&start=2&cat_id%5B%5D=5274&hash=1be58c23b2598f9e7448f1312da25073',#经济
'http://www.infzm.com/mobile/get_list_by_cat_ids?count=10&platform=android&version=5.4.2&start=3&cat_id%5B%5D=5274&hash=1be58c23b2598f9e7448f1312da25073',#经济
'http://www.infzm.com/mobile/get_list_by_cat_ids?count=10&platform=android&version=5.4.2&start=0&cat_id%5B%5D=5271&hash=1be58c23b2598f9e7448f1312da25073',#防务
'http://www.infzm.com/mobile/get_list_by_cat_ids?count=10&platform=android&version=5.4.2&start=1&cat_id%5B%5D=5271&hash=1be58c23b2598f9e7448f1312da25073',#防务
'http://www.infzm.com/mobile/get_list_by_cat_ids?count=10&platform=android&version=5.4.2&start=2&cat_id%5B%5D=5271&hash=1be58c23b2598f9e7448f1312da25073',#防务
'http://www.infzm.com/mobile/get_list_by_cat_ids?count=10&platform=android&version=5.4.2&start=3&cat_id%5B%5D=5271&hash=1be58c23b2598f9e7448f1312da25073',#防务
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
title = response.xpath('//item/@subject').extract()
published = response.xpath('//item/@publish_time').extract()
url = response.xpath('//snsShare/@url').extract()
describe = response.xpath('//introtext').extract()
category = response.xpath('//item/@source').extract()
for i in range(0,len(url)):
tit = title[i]
pub = published[i]
urlt = url[i]
try:
if category[i] ==u'Hi,南周'.encode('utf-8') :
category =u'推荐'.encode('utf-8')
cate = category
elif category[i] ==u'作品上架'.encode('utf-8'):
category = u'推荐'.encode('utf-8')
cate = category
else:
cate = category[i]
except:
cate = u'推荐'.encode('utf-8')
desc = describe[i].replace('</introtext>','').replace('<introtext>','')
yield scrapy.Request(urlt,meta={
'title':tit,
'publish':pub,
'describe':desc,
'category':cate,
'home_url':response.url
},callback=self.parse_item)
def parse_item(self,response):
title = response.meta['title']
describe =response.meta['describe']
home_url = response.meta['home_url']
app_name = '南方周末'
pic_url = ''
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
category = response.meta['category']
publishedDate = response.xpath('//em[@class="pubTime"]').extract()
publishedDate = publishedDate[0].replace('\t','').replace('\n','').replace('\r','').replace('<em class="pubTime">','').replace('</em>','').replace(u'最后更新:','')
publishedDate = publishedDate.replace(' ','').replace(' ','')
content = response.xpath('//section[@id="articleContent"]').extract()
contentt = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', contentt)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
author = ''
pic_more_url = re.findall('<img(.*?)src="(.*?)"', contentt)
pic_more_url1 = []
for i in range(0, len(pic_more_url)):
if 'http' not in pic_more_url[i][1]:
pic_more_urlt = 'http:' + pic_more_url[i][1]
pic_more_url1.append(pic_more_urlt)
else:
pic_more_url1.append(pic_more_url[i][1])
pic_more_url = str(set(pic_more_url1))
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['count'] = self.count
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M:%S")
timenum = int(time.mktime(timeArray))
# existing_title = TransportData.getData('app_nanfangzhoumo', title)
# # 符合要求,可以入库的title list
# if existing_title:
# return
# else:
if timenum >= self.timeStamp:
self.count += 1
item['count'] = self.count
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(timenum)))
item['publishedDate'] = publishedDate
yield item
<file_sep>#coding=utf-8
import scrapy
import time
import json
import re
from news.items import NewsItem
from news.DataResource import TransportData
class wxxw(scrapy.Spider):
name='wuxianxinwen'
allowed_domains = ["tvb.com"]
start_urls=[
'http://news.tvb.com/list/focus/', #要闻
'http://news.tvb.com/list/instant/',#快讯
'http://news.tvb.com/list/local/', #港澳
'http://news.tvb.com/list/greaterchina/',#两岸
'http://news.tvb.com/list/world/', #国际
'http://news.tvb.com/list/finance/',#财经
]
base_url='http://news.tvb.com/'
count = 0
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
home_url = response.url
Url=[]
title=[]
links_url=response.xpath('//a[@class="title thumb"]/@href').extract()
linke_title=response.xpath('//a[@class="title thumb"]/text()').extract()
links_url1=response.xpath('//a[@class="title"]/@href').extract()
linke_title1 =response.xpath('//a[@class="title"]/text()').extract()
for i in range(0,len(links_url)):
url=self.base_url+links_url[i]
Url.append(url)
title.append(linke_title[i])
for i in range(0,len(links_url1)):
url=self.base_url+links_url1[i]
Url.append(url)
title.append(linke_title1[i])
for i in range(0,len(Url)):
yield scrapy.Request(Url[i], meta={
'home_url':home_url,
'title':title[i]
}, callback=self.parse_item)
def parse_item(self,response):
home_url=response.meta['home_url']
publishtime =response.xpath('//span[@class = "time"]/text()').extract()
content=response.xpath('//div[@id = "c1_afterplayer"]').extract()
content=content[0].replace('\t','').replace('\n','').replace('\r','').replace(' ','')
content=re.findall('>(.*?)<',content)
contentdata=''
for i in content:
contentdata+=i
content=contentdata
title=response.meta['title'].replace('\t','').replace('\n','').replace('\r','').replace(' ','')
app_name='无线新闻'
pic_url='http://img.tvb.com/inews_web/web/generic_thumbnail.jpg'
pic_more_url=''
author=''
pubTime=publishtime[0].replace(' ','')
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
describe=''
if 'greaterchina' in response.url:
category=u'两岸'.encode('utf-8')
elif 'world' in response.url:
category = u'国际'.encode('utf-8')
elif 'finance' in response.url:
category= u'财经'.encode('utf-8')
elif 'sports' in response.url:
category = u'体育'.encode('utf-8')
elif 'parliament' in response.url:
category = u'法庭'.encode('utf-8')
elif 'focus' in response.url:
category = u'要闻'.encode('utf-8')
elif 'instant' in response.url:
category = u'快讯'.encode('utf-8')
elif 'programmes' in response.url:
category = u'专题栏目'.encode('utf-8')
elif 'local' in response.url:
category = u'港澳'.encode('utf-8')
else:
category = u'首页'.encode('utf-8')
publishedDate = pubTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
try:
timeArray = time.strptime(pubTime, "%Y-%m-%d %H:%M")
except:
t = pubTime.split(' ')[0]
timeArray = time.strptime(t, "%Y-%m-%d")
publishedDate = time.mktime(timeArray)
if publishedDate >= self.timeStamp:
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(publishedDate)))
item['publishedDate'] = publishedDate
self.count = self.count + 1
item['count'] = self.count
yield item
<file_sep>alembic==0.9.2
amqp==1.4.9
anyjson==0.3.3
appdirs==1.4.3
APScheduler==3.3.1
arrow==0.10.0
asn1crypto==0.22.0
attrs==17.2.0
Automat==0.6.0
autopep8==1.3.1
autopy==0.51
backports-abc==0.5
backports.shutil-get-terminal-size==1.0.0
beautifulsoup4==4.6.0
billiard==3.3.0.23
bleach==2.0.0
blinker==1.4
BloomFilter==0.1.0
boto==2.48.0
bz2file==0.98
celery==3.1.25
celery-with-redis==3.0
certifi==2017.4.17
cffi==1.10.0
chardet==3.0.4
click==6.7
CodeConvert==2.0.4
Cola==0.1.0
colorama==0.3.9
config==0.3.9
constantly==15.1.0
contextlib2==0.5.5
coverage==4.4.1
cryptography==2.0.2
csc-pysparse==1.1.1.4
csc-utils==0.6.7
cssselect==1.0.1
cycler==0.10.0
dateparser==0.6.0
decorator==4.1.2
defusedxml==0.5.0
demjson==2.2.4
Django==1.8
django-bootstrap-toolkit==2.15.0
django-filter==1.0.1
djangorestframework==3.5.4
djangorestframework-jwt==1.10.0
docutils==0.13.1
dominate==2.3.1
enum34==1.1.6
Flask==0.10.1
Flask-API==0.6.2
Flask-Bootstrap==3.3.7.1
Flask-HTTPAuth==3.2.2
Flask-Login==0.4.0
Flask-Mail==0.9.1
Flask-Migrate==2.0.3
Flask-Moment==0.5.1
flask-msearch==0.1.3
Flask-OpenID==1.2.5
Flask-PageDown==0.2.2
Flask-Script==2.0.5
Flask-SQLAlchemy==2.2
Flask-WhooshAlchemy==0.56
Flask-WTF==0.14.2
funcsigs==1.0.2
functools32==3.2.3.post2
futures==3.1.1
gensim==2.2.0
gevent==1.2.1
greenlet==0.4.12
grequests==0.3.0
gunicorn==19.1.1
h2==3.0.1
helper==2.4.2
hiredis==0.2.0
hpack==3.0.0
html==1.16
html5lib==0.999999999
hyperframe==5.1.0
hyperlink==17.3.0
idna==2.5
import==0.1.0
incremental==17.5.0
ipaddress==1.0.18
ipython==5.3.0
ipython-genutils==0.2.0
itsdangerous==0.24
jdatetime==1.8.2
jieba==0.38
Jinja2==2.9.6
js2xml==0.2.2
kafka-python==1.3.3
kazoo==2.2.1
kombu==3.0.37
lxml==3.7.3
Mako==1.0.6
Markdown==2.6.8
MarkupSafe==1.0
matplotlib==2.0.2
mechanize==0.3.3
mock==2.0.0
mongoengine==0.13.0
mysql==0.0.1
mysql-connector-python==2.1.6
MySQL-python==1.2.3
networkx==1.11
nltk==3.2.4
numpy==1.13.1+mkl
olefile==0.44
packaging==16.8
pandas==0.19.2
parsel==1.1.0
passlib==1.7.1
pathlib2==2.2.1
pbr==3.0.1
peewee==2.10.1
pickleshare==0.7.4
Pillow==4.1.1
ping==0.2
ply==3.10
priority==1.3.0
prompt-toolkit==1.0.14
pyasn1==0.3.1
pyasn1-modules==0.0.10
pycodestyle==2.3.1
pycparser==2.18
pycurl==7.43.0
PyDispatcher==2.0.5
Pygments==2.2.0
PyJWT==1.5.2
pykafka==2.6.0
pymongo==3.4.0
PyMySQL==0.7.11
pyodbc==4.0.16
pyOpenSSL==17.2.0
pyparsing==2.2.0
pypiwin32==219
pyquery==1.2.17
pyrecsys==0.0.3
pyserial==3.4
pyspider==0.3.9
pytagcloud==0.3.5
pytesseract==0.1.6
python-cjson==1.2.1
python-dateutil==2.6.0
python-editor==1.0.3
python-etl==1.1
python-memcached==1.58
python-openid==2.2.5
python-recsys==0.2
pytz==2017.2
pywin32==221
queuelib==1.4.2
raven==6.1.0
readability-lxml==0.6.2
recsys==0.0.4
redis==2.10.5
redis-py-cluster==1.3.4
redisearch==0.6.1
regex==2017.4.29
requests==2.18.1
requests-oauth==0.4.1
retrying==1.3.3
rmtest==0.4.0
rq==0.8.0
rsa==3.4.2
ruamel.ordereddict==0.4.9
ruamel.yaml==0.14.12
scandir==1.5
scikit-learn==0.18.2
scikit-surprise==1.0.3
scipy==0.19.1
Scrapy==1.3.3
scrapy-crawlera==1.2.2
scrapy-proxies==0.3
scrapy-random-useragent==0.2
scrapy-redis==0.6.8
scrapy-splash==0.7.2
scrapyd==1.2.0
scrapyd-client==1.1.0
scrapyjs==0.2
selenium==3.4.2
service-identity==17.0.0
sha1==1.2
simplegeneric==0.8.1
singledispatch==3.4.0.3
six==1.10.0
sklearn==0.0
slimit==0.8.1
smart-open==1.5.3
SOAPpy==0.12.22
spynner==2.19
SQLAlchemy==1.1.4
tabulate==0.7.7
tblib==1.3.2
tornado==4.5.1
tqdm==4.11.2
traitlets==4.3.2
Twisted==17.5.0
typing==3.6.1
tzlocal==1.4
u-msgpack-python==2.4.1
umalqurra==0.2
urllib3==1.21.1
utils==0.9.0
vine==1.1.3
virtualenv==15.1.0
visitor==0.1.3
vulpo==1.2.1
w3lib==1.17.0
wcwidth==0.1.7
webencodings==0.5.1
Werkzeug==0.12.2
Whoosh==2.7.4
win-unicode-console==0.5
wrapcache==1.0.8
WsgiDAV==2.2.2
wstools==0.4.5
WTForms==2.1
xlwt==1.2.0
zope.interface==4.4.2
<file_sep>#!/usr/bin/env python
# -*- coding:utf-8 -*-
#装饰器函数
def zh(sh): #接收被装饰函数的名称
def zhshi(): #装饰器功能函数
print("在执行被装饰函数前添加功能")
r = sh() # 执行装饰器函数接收到的被装饰函数,也就是执行被装饰函数
print("在执行被装饰函数后添加功能")
return r #将被装饰函数执行的结果返回给装饰器功能函数
return zhshi #将装饰器功能函数名称返回给装饰器函数
@zh # @zh 的意思是执行装饰器函数,并且将被装饰的函数名,当做参数传给装饰器函数,并且将装饰器函数的返回值重新赋值给被装饰的函数
#被装饰函数
def f1():
print("f1被装饰函数功能")
#执行f1函数
f1()
# 在执行被装饰函数前添加功能
# f1被装饰函数功能
# 在执行被装饰函数后添加功能<file_sep>#coding=utf-8
import re
import json
import scrapy,time
from news.items import NewsItem
class lcnews(scrapy.Spider):
name = 'lichangxinwen'
start_urls = [
'http://www.thestand.news/politics/?page=1',#政治
'http://www.thestand.news/international/?page=1',#国际
'http://www.thestand.news/finance/?page=1',#财经
'http://www.thestand.news/%E5%8F%B0%E7%81%A3/?page=1',#台湾
'http://www.thestand.news/china/?page=1',#中国
'http://www.thestand.news/%E6%BE%B3%E9%96%80/?page=1',#澳门
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
base_url = 'http://www.thestand.news'
def parse(self, response):
try:
title = response.xpath('//div[@clas="news-articles"]/div[1]/div/h3/a/text()').extract()
links = response.xpath('//div[@clas="news-articles"]/div[1]/div/h3/a/@href').extract()
pubti = response.xpath('//div[@clas="news-articles"]/div[1]/div/p[1]/span[2]/text()').extract()
pic_u = response.xpath('//div[@clas="news-articles"]/div[1]/div/div/a/img/@src').extract()
desc = response.xpath('//div[@clas="news-articles"]/div[1]/div/p[2]/text()').extract()
accept_title = []
for i in range(0,len(title)):
titl = title[i].replace('\t','').replace('\n','').replace('\r','').replace(' ','')
url = self.base_url + links[i]
if 'http' not in pic_u[i]:
pic_url = 'http:' + pic_u[i]
else:
pic_url = pic_u[i]
describe = desc[i].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
publishedDate = pubti[i].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '').replace(
' — ', ' ').replace('/', '-').replace('—',' ')
print publishedDate
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M")
publishedDa = int(time.mktime(timeArray))
if publishedDa >= self.timeStamp:
accept_title.append(titl)
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(publishedDa)))
yield scrapy.Request(url,meta={
'title':titl,
'pic_url':pic_url,
'publishedDate':publishedDate,
'describe':describe,
'home_url':response.url
},callback=self.parse_item,dont_filter=True)
except:
title = response.xpath('//div[@class="articles-wrap"]/div/div[2]/h3/a/text()').extract()
links = response.xpath('//div[@class="articles-wrap"]/div/div[2]/h3/a/@href').extract()
pic_u = response.xpath('//div[@class="articles-wrap"]/div/div[1]/a/img/@src').extract()
pubti = response.xpath('//div[@class="articles-wrap"]/div/div[2]/p[1]/span[2]/text()').extract()
desc = response.xpath('//div[@class="articles-wrap"]/div/div[2]/p[2]/text()').extract()
accept_title = []
for i in range(0, len(title)):
titl = title[i].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
url = self.base_url + links[i]
if 'http' not in pic_u[i]:
pic_url = 'http:' + pic_u[i]
else:
pic_url = pic_u[i]
describe = desc[i].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
publishedDate = pubti[i].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '').replace(
' — ', ' ').replace('/', '-').replace('—',' ')
print publishedDate
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M")
publishedDa = int(time.mktime(timeArray))
if publishedDa >= self.timeStamp:
accept_title.append(titl)
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(publishedDa)))
yield scrapy.Request(url, meta={
'title': titl,
'pic_url': pic_url,
'publishedDate': publishedDate,
'describe': describe,
'home_url': response.url
}, callback=self.parse_item)
if len(accept_title) == 20:
num = str(response.url).split('page=')[1]
num1 = int(num) + 1
url =str(response.url).replace( 'page='+ num , 'page=' + str(num1))
yield scrapy.Request(url,callback=self.parse)
def parse_item(self,response):
title = response.meta['title']
pic_url = response.meta['pic_url']
publishedDate =response.meta['publishedDate']
describe = response.meta['describe']
home_url = response.meta['home_url']
app_name = '立场新闻'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
content = response.xpath('//div[@class="article-content"]').extract()
contentt = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '').replace(' ', '')
content = re.findall('>(.*?)<', contentt)
contentdata = ''
for i in range(0, len(content)):
contentdata += content[i]
content = contentdata
try:
pic_more_url = response.xpath('//div[@class="article-photo article-media"]/a/img/@src').extract()[0]
except:
pic_more_url = ''
author = ''
if '%E6%BE%B3%E9%96%80' in home_url:
category = u'澳门'.encode('utf-8')
elif 'china' in home_url:
category = u'中国'.encode('utf-8')
elif 'politics' in home_url:
category = u'政治'.encode('utf-8')
elif 'international' in home_url:
category = u'国际'.encode('utf-8')
elif 'finance' in home_url:
category = u'财经'.encode('utf-8')
elif '%E5%8F%B0%E7%81%A3' in home_url:
category = u'台湾'.encode('utf-8')
else:
category = u'首页'.encode('utf-8')
self.count = self.count + 1
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['count'] = self.count
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
yield item
<file_sep>#coding=utf-8
import json,re,time
import scrapy
from news.items import NewsItem
from news.DataResource import TransportData
class zgjw(scrapy.Spider):
name='zhongguojinwen'
start_urls=[
'http://feeds.feedburner.com/bnews-guoji', #国际新闻
'http://feeds.feedburner.com/kzgnews', #中国新闻
'http://feeds.feedburner.com/bannednews', #中国禁闻
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self,response):
html = response.body
tt = re.findall('<div class="postcopyright">(.*?)<a href="(.*?)">(.*?)</a>', html)
for i in range(0,len(tt)):
url=tt[i][1]
title=tt[i][2]
yield scrapy.Request(url,meta={
'home_url':response.url,
'title':title
},callback=self.parse_item)
def parse_item(self,response):
home_url=response.meta['home_url']
title=response.meta['title']
pubTime=re.findall('<div class="postmeat ac">(.*?)<a',response.body)[0]
pubTime=pubTime.replace(u'年','-').replace(u'月','-').replace(u'日','').replace(' ','')
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
describe=''
app_name='中国禁闻'
pic_url=''
content=response.xpath('//div[@class="entry"]').extract()
contentt = content[0].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', contentt)
pic_more_url = re.findall('<img(.*?)src="(.*?)"',contentt)
pic_more_url1 = []
for i in range(0, len(pic_more_url)):
pic_more_url1.append(pic_more_url[i][1])
pic_more_url = str(set(pic_more_url1))
contentdata = ''
for i in content:
contentdata += i
content = contentdata
author = ''
if u'neimunews' in home_url:
category = u'中共高层内幕'.encode('utf-8')
elif u'bnews-guoji' in home_url:
category = u'国际新闻'.encode('utf-8')
elif u'kzgnews' in home_url:
category = u'中国新闻'.encode('utf-8')
elif u'bannednews' in home_url:
category = u'中国禁闻'.encode('utf-8')
else:
category = u'中国禁闻'.encode('utf-8')
publishedDate = pubTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
print pubTime
# t1 = pubTime.split(' ')[0]
# t2 = pubTime.split(' ')[1]
# pubTime = t1 + ' ' + t2
# timeArray = time.strptime(pubTime, "%Y-%m-%d %H:%M")
# publishedDate = time.mktime(timeArray)
try:
t1 = pubTime.split(' ')[0]
t2 = pubTime.split(' ')[1]
t = t1 + ' ' + t2
timeArray = time.strptime(t, "%Y-%m-%d %H:%M")
except:
t = pubTime.split(' ')[0]
timeArray = time.strptime(t, "%Y-%m-%d")
publishedDate = time.mktime(timeArray)
if publishedDate >= self.timeStamp:
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(publishedDate)))
item['publishedDate'] = publishedDate
self.count = self.count + 1
item['count'] = self.count
yield item
<file_sep>#coding=utf-8
import scrapy
import json,re,time
from news.items import NewsItem
from lxml.etree import HTML
class rijing(scrapy.Spider):
name = 'rijingxinwen'
start_urls = [
'https://cn.nikkei.com/politicsaeconomy.html?limitstart=0',#政经观察
'https://cn.nikkei.com/politicsaeconomy.html?start=10',#政经观察
'https://cn.nikkei.com/china.html?limitstart=0',#中国
'https://cn.nikkei.com/china.html?start=10',#中国
'https://cn.nikkei.com/industry.html?limitstart=0',#产品聚焦
'https://cn.nikkei.com/industry.html?start=10',#产品聚焦
'https://cn.nikkei.com/columnviewpoint.html?limitstart=0',#专栏/观点
'https://cn.nikkei.com/columnviewpoint.html?start=10',#专栏/观点
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime('2018-06-01', "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
links = response.xpath('//dl[@class="newsContent02"]/dt/a/@href').extract()
title = response.xpath('//dl[@class="newsContent02"]/dt/a/text()').extract()
pubt = response.xpath('//dl[@class="newsContent02"]/dt/span/text()').extract()
desc = response.xpath('//dl[@class="newsContent02"]/dd/div/text()').extract()
if len(links) == len(desc):
for i in range(len(desc)):
url = 'https://cn.nikkei.com' + links[i]
tit = title[i]
describe = desc[i]
publish = pubt[i].replace('(','').replace(')','').replace('/','-')
yield scrapy.Request(url , meta={
'title': tit,
'describe': describe,
'published': publish,
'home_url': response.url
}, callback=self.parse_item, dont_filter=True)
else:
for i in range(len(desc)):
url = 'https://cn.nikkei.com' + links[i]
tit = title[i]
describe = ''
publish = pubt[i].replace('(','').replace(')','').replace('/','-')
yield scrapy.Request(url, meta={
'title': tit,
'describe': describe,
'published': publish,
'home_url': response.url
}, callback=self.parse_item)
def parse_item(self, response):
title = response.meta['title']
describe = response.meta['describe']
home_url = response.meta['home_url']
publishedDate = response.meta['published']
app_name = '日经新闻网'
pic_url = ''
author = ''
if 'politicsaeconomy' in home_url:
category = '政经观察'
elif 'china' in home_url:
category = '中国'
elif 'industry' in home_url:
category = '产品聚焦'
elif 'columnviewpoint' in home_url:
category = '专栏/观点'
else:
category = '政经观察'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
content = response.xpath('//div[@id="contentDiv"]').extract()
content = content[0].replace('\t', '').replace('\n', '').replace('\r', '')
Sector = HTML(content)
content = content.replace(' ', '')
pic_more_url = Sector.xpath('//img/@src')
pic = []
if len(pic_more_url) > 2:
for i in range(len(pic_more_url)):
try:
pic.append('https://cn.nikkei.com' + pic_more_url[i+2])
except:
pic_more_url = str(pic)
break
else:
pic_more_url = ''
content = re.findall('>(.*?)<', content)
contentdata = ''
for i in range(0, len(content)):
contentdata += content[i]
content = contentdata
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
timeArray = time.strptime(publishedDate, "%Y-%m-%d")
timenum = int(time.mktime(timeArray))
if timenum >= self.timeStamp:
self.count += 1
item['count'] = self.count
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(timenum)))
item['publishedDate'] = publishedDate
numappName = self.readjson()
if len(numappName) == 0:
items = {
'url': response.url,
'title': item['title']
}
with open('rijingxinwenwang.json', 'a+') as fp:
line = json.dumps(dict(items), ensure_ascii=False) + '\n'
fp.write(line)
yield item
else:
for i in range(len(numappName)):
if numappName[i]['url'] == response.url or numappName[i]['title'] == item['title']:
return
else:
items = {
'url': response.url,
'title': item['title']
}
with open('rijingxinwenwang.json', 'a+') as fp:
line = json.dumps(dict(items), ensure_ascii=False) + '\n'
fp.write(line)
yield item
def readjson(self):
s = []
file_object = open('rijingxinwenwang.json', 'r')
try:
while True:
line = file_object.readline()
data = json.loads(line)
s.append(data)
finally:
return s
<file_sep>#-*- coding: utf-8 -*-
from scrapy.spider import Spider
import scrapy
import sys
import json,time,re
from news.items import NewsItem
import random
reload(sys)
sys.setdefaultencoding('utf8')
class xinwen(Spider):
name = 'tengxunxinwen'
start_urls = [
'https://r.inews.qq.com/getQQNewsUnreadList?rtAd=1&lc_ids=&forward=1&page=6&last_id=20180602A1IG9G00&newsTopPage=5&picType=0,1,2,0,0,2,2,1,2,0,0,2,0,1,0,0,2,2,0,2&user_chlid=news_news_19,news_news_bj,news_news_ent,news_news_sports,news_news_mil,news_news_nba,news_news_world&last_time=1528079861&channelPosition=0&chlid=news_news_top&Cookie=lskey%3D;skey%3D;uin%3D;%20luin%3D;logintype%3D0;%20main_login%3D;%20&omgid=9815f276f473194ffe286608f2d6b5ba263b001021181e&uid=5544c78435fe1eac&devid=867514021468675&appver=19_android_5.6.02&qn-rid=1208a091-bdbd-40b2-9ee9-90759cb7584e&qn-sig=fd43538cc388872084f05a293eadece1',
'https://r.inews.qq.com/getQQNewsUnreadList?rtAd=1&lc_ids=&forward=1&page=7&last_id=20180602A1IG9G00&newsTopPage=5&picType=0,1,2,0,0,2,2,1,2,0,0,2,0,1,0,0,2,2,0,2&user_chlid=news_news_19,news_news_bj,news_news_ent,news_news_sports,news_news_mil,news_news_nba,news_news_world&last_time=1528079861&channelPosition=0&chlid=news_news_top&Cookie=lskey%3D;skey%3D;uin%3D;%20luin%3D;logintype%3D0;%20main_login%3D;%20&omgid=9815f276f473194ffe286608f2d6b5ba263b001021181e&uid=5544c78435fe1eac&devid=867514021468675&appver=19_android_5.6.02&qn-rid=1208a091-bdbd-40b2-9ee9-90759cb7584e&qn-sig=fd43538cc388872084f05a293eadece1',
'https://r.inews.qq.com/getQQNewsUnreadList?rtAd=1&lc_ids=&forward=1&page=6&last_id=20180602A1IG9G00&newsTopPage=5&picType=0,1,2,0,0,2,2,1,2,0,0,2,0,1,0,0,2,2,0,2&user_chlid=news_news_19,news_news_bj,news_news_ent,news_news_sports,news_news_mil,news_news_nba,news_news_world&last_time=1528079861&channelPosition=0&chlid=news_news_19&Cookie=lskey%3D;skey%3D;uin%3D;%20luin%3D;logintype%3D0;%20main_login%3D;%20&omgid=9815f276f473194ffe286608f2d6b5ba263b001021181e&uid=5544c78435fe1eac&devid=867514021468675&appver=19_android_5.6.02&qn-rid=1208a091-bdbd-40b2-9ee9-90759cb7584e&qn-sig=fd43538cc388872084f05a293eadece1',
'https://r.inews.qq.com/getQQNewsUnreadList?rtAd=1&lc_ids=&forward=1&page=7&last_id=20180602A1IG9G00&newsTopPage=5&picType=0,1,2,0,0,2,2,1,2,0,0,2,0,1,0,0,2,2,0,2&user_chlid=news_news_19,news_news_bj,news_news_ent,news_news_sports,news_news_mil,news_news_nba,news_news_world&last_time=1528079861&channelPosition=0&chlid=news_news_19&Cookie=lskey%3D;skey%3D;uin%3D;%20luin%3D;logintype%3D0;%20main_login%3D;%20&omgid=9815f276f473194ffe286608f2d6b5ba263b001021181e&uid=5544c78435fe1eac&devid=867514021468675&appver=19_android_5.6.02&qn-rid=1208a091-bdbd-40b2-9ee9-90759cb7584e&qn-sig=fd43538cc388872084f05a293eadece1',
'https://r.inews.qq.com/getQQNewsUnreadList?rtAd=1&lc_ids=&forward=1&page=6&last_id=20180602A1IG9G00&newsTopPage=5&picType=0,1,2,0,0,2,2,1,2,0,0,2,0,1,0,0,2,2,0,2&user_chlid=news_news_19,news_news_bj,news_news_ent,news_news_sports,news_news_mil,news_news_nba,news_news_world&last_time=1528079861&channelPosition=0&chlid=news_news_world&Cookie=lskey%3D;skey%3D;uin%3D;%20luin%3D;logintype%3D0;%20main_login%3D;%20&omgid=9815f276f473194ffe286608f2d6b5ba263b001021181e&uid=5544c78435fe1eac&devid=867514021468675&appver=19_android_5.6.02&qn-rid=1208a091-bdbd-40b2-9ee9-90759cb7584e&qn-sig=fd43538cc388872084f05a293eadece1',
'https://r.inews.qq.com/getQQNewsUnreadList?rtAd=1&lc_ids=&forward=1&page=7&last_id=20180602A1IG9G00&newsTopPage=5&picType=0,1,2,0,0,2,2,1,2,0,0,2,0,1,0,0,2,2,0,2&user_chlid=news_news_19,news_news_bj,news_news_ent,news_news_sports,news_news_mil,news_news_nba,news_news_world&last_time=1528079861&channelPosition=0&chlid=news_news_world&Cookie=lskey%3D;skey%3D;uin%3D;%20luin%3D;logintype%3D0;%20main_login%3D;%20&omgid=9815f276f473194ffe286608f2d6b5ba263b001021181e&uid=5544c78435fe1eac&devid=867514021468675&appver=19_android_5.6.02&qn-rid=1208a091-bdbd-40b2-9ee9-90759cb7584e&qn-sig=fd43538cc388872084f05a293eadece1',
'https://r.inews.qq.com/getQQNewsUnreadList?rtAd=1&lc_ids=&forward=1&page=6&last_id=20180602A1IG9G00&newsTopPage=5&picType=0,1,2,0,0,2,2,1,2,0,0,2,0,1,0,0,2,2,0,2&user_chlid=news_news_19,news_news_bj,news_news_ent,news_news_sports,news_news_mil,news_news_nba,news_news_world&last_time=1528079861&channelPosition=0&chlid=news_news_mil&Cookie=lskey%3D;skey%3D;uin%3D;%20luin%3D;logintype%3D0;%20main_login%3D;%20&omgid=9815f276f473194ffe286608f2d6b5ba263b001021181e&uid=5544c78435fe1eac&devid=867514021468675&appver=19_android_5.6.02&qn-rid=1208a091-bdbd-40b2-9ee9-90759cb7584e&qn-sig=fd43538cc388872084f05a293eadece1',
'https://r.inews.qq.com/getQQNewsUnreadList?rtAd=1&lc_ids=&forward=1&page=7&last_id=20180602A1IG9G00&newsTopPage=5&picType=0,1,2,0,0,2,2,1,2,0,0,2,0,1,0,0,2,2,0,2&user_chlid=news_news_19,news_news_bj,news_news_ent,news_news_sports,news_news_mil,news_news_nba,news_news_world&last_time=1528079861&channelPosition=0&chlid=news_news_mil&Cookie=lskey%3D;skey%3D;uin%3D;%20luin%3D;logintype%3D0;%20main_login%3D;%20&omgid=9815f276f473194ffe286608f2d6b5ba263b001021181e&uid=5544c78435fe1eac&devid=867514021468675&appver=19_android_5.6.02&qn-rid=1208a091-bdbd-40b2-9ee9-90759cb7584e&qn-sig=fd43538cc388872084f05a293eadece1',
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
data = json.loads(response.body)
data = data['newslist']
for i in range(len(data)):
title = data[i]['title']
desc = data[i]['abstract']
pubt = data[i]['timestamp']
url = data[i]['surl']
yield scrapy.Request(url,meta={
'title':title,
'desc':desc,
'pubt':pubt,
'home_url':response.url
},callback=self.parse_item)
def parse_item(self,response):
title = response.meta['title']
describe = response.meta['desc']
publishedDate = response.meta['pubt']
pic_url = ''
app_name = '腾讯新闻'
author = ''
home_url = response.meta['home_url']
category = '要闻'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
content = re.findall('"cnt_html":"(.*?)"',str(response.body))
content =content[0].decode('unicode_escape').replace('\t','').replace('\n','').replace('\r','').replace('<P>','').replace('<\/P>','')
pic_more_url = ''
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(publishedDate)))
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
self.count += 1
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
item['count'] = self.count
yield item<file_sep>#coding=utf-8
import time, re, json, scrapy
from news.items import NewsItem
from lxml.etree import HTML
class cbnc(scrapy.Spider):
name = 'cnbcnews'
start_urls = [
'https://www.cnbc.com/us-news/?page=1',
'https://www.cnbc.com/us-news/?page=2',
'https://www.cnbc.com/investing/?page=1',
'https://www.cnbc.com/investing/?page=2',
'https://www.cnbc.com/technology/?page=1',
'https://www.cnbc.com/technology/?page=2'
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime('2018-06-01', "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
links = response.xpath('//ul[@id="pipeline_assetlist_0"]/li/div/div/a/@href').extract()
title = response.xpath('//ul[@id="pipeline_assetlist_0"]/li/div/div/a/text()').extract()
for i in range(len(links)):
if 'http' not in links[i]:
url = 'https://www.cnbc.com' + links[i]
else:
url = links[i]
yield scrapy.Request(url, meta={
'title': title[i],
'home_url': response.url
}, callback=self.parse_item, dont_filter=True)
def parse_item(self, response):
app_name = 'CNBC'
describe = ''
author = ''
pic_url = ''
title = response.meta['title'].replace('\t', '').replace('\n', '').replace('\r', '').replace(' ','')
try:
publishedDate = response.xpath('//time/@datetime').extract()[0]
publishedDate = publishedDate.replace('T', ' ').replace('-0400','')
except:
publishedDate = '2018-01-01 01:01:01'
content = response.xpath('//article').extract()
selator = HTML(content[0])
content = selator.xpath('//text()')
content = ''.join(content)
content = content.replace('\t', '').replace('\n', '').replace('\r', '')
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
home_url = response.meta['home_url']
pic_more_url = selator.xpath('//img/@src')
pic_more_urll = []
for i in range(len(pic_more_url)):
pic_more_urll.append(pic_more_url[i])
pic_more_url = str(pic_more_urll)
if 'us-news' in home_url:
category = 'us-news'
elif 'investing' in home_url:
category = 'investing'
elif 'technology' in home_url:
category = 'technology'
else:
category = 'news'
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
try:
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M:%S")
except:
timeArray = time.strptime(publishedDate, "%Y-%m-%d %H:%M")
publishedDate = time.mktime(timeArray)
if publishedDate >= self.timeStamp:
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(publishedDate)))
item['publishedDate'] = publishedDate
self.count = self.count + 1
item['count'] = self.count
yield item
<file_sep>#coding=utf-8
import json
import requests
#coding=utf-8
import scrapy
import time
import json
import re
from news.items import NewsItem
from news.DataResource import TransportData
class tiantian(scrapy.Spider):
name = 'tiantiankuaibao'
Ttime = int(round(time.time() * 1000))
start_urls = [
'https://r.cnews.qq.com'
]
count = 0
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
url = 'https://r.cnews.qq.com/searchByType?devid=864699038349266'
params = {
"REQBuildTime":"%s"%self.Ttime,
"adcode":"110108",
"ssid":"tmliu",
"source":"",
"omgid":"ecdaba5753005f4be4f95cac85563d10f7e80010213113",
"REQExecTime":"%s"%self.Ttime,
"qqnetwork":"wifi",
"commonsid":"c3dd54a003fb4bb89d4b8bdf80f23fda",
"curTab":"kuaibao",
"kingCardType":"0",
"picSizeMode":"0",
"commonGray":"1_3%7C2_1%7C12_1%7C16_1%7C22_0%7C14_0%7C17_0%7C19_1",
"currentTab":"kuaibao",
"is_wap":"0",
"lastCheckCardType":"0",
"omgbizid":"afa8d326e9dc2b441dfb950abf1d92b714870080213504",
"page":"1",
"type":"aggregate",
"imsi":"460078108159178",
"bssid":"24%3A05%3A0f%3A8a%3Abd%3Af3",
"query":"习近平",
"muid":"211471387078276596",
"curChannel":"daily_timeline",
"activefrom":"icon",
"unixtimesign":"1528248088989",
"qimei":"864699038349266",
"Cookie":"%26lskey%3D%26luin%3D%26skey%3D%26uin%3D%26logintype%3D0",
"chlid":"",
"rawQuery":"",
"imsi_history":"460078108159178",
"qn-sig":"c744f26a575fe7a08f7c0c32c3a43d34",
"qn-rid":"19649191-e9b9-4d33-9a4d-4923bbb1ffb1",
"hw_fp":"xiaomi%2Ftiffany%2Ftiffany%3A7.1.2%2FN2G47H%2F8.3.15%3Auser%2Frelease-keys",
"mid":"08bd609c01ab23254e488078e3c8d396d9a0737e",
"devid":"864699038349266",
"mac":"F4%3AF5%3ADB%3A23%3A21%3A26",
"store":"73387",
"screen_height":"1920",
"apptype":"android",
"origin_imei":"864699038349266",
"hw":"Xiaomi_MI5X",
"appversion":"4.8.10",
"appver":"25_areading_4.8.10",
"uid":"c955f03805f6ce81",
"screen_width":"1080",
"sceneid":"",
"android_id":"c955f03805f6ce81"
}
headers = {
"Content-Type":"application/x-www-form-urlencoded"
}
data = requests.post(url,data=params,headers=headers)
data = json.loads(data.content)
data = data['new_list']['data']
for i in range(len(data)):
title = data[i]['article']['title']
url = data[i]['article']['short_url']
pubt = data[i]['article']['time']
desc = data[i]['article']['abstract']
yield scrapy.Request(url,meta={
'title':title,
'pubt':pubt,
'desc':desc
},callback=self.parse_item,dont_filter=True)
def parse_item(self,response):
title = response.meta['title']
publishedDate = response.meta['pubt']
describe = response.meta['desc']
app_name = '天天快报'
pic_url = ''
author = ''
home_url = 'https://r.cnews.qq.com'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
content = response.xpath('//div[@class="content-box"]/p/text()').extract()
con = ''
for i in range(len(content)):
con += content[i]
content = con
category = '要闻'
pic_more_url = response.xpath('//div[@class="content-box"]/p/img/@src').extract()
picc = []
for i in range(len(pic_more_url)):
picc.append(pic_more_url[i])
pic_more_url = picc
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = str(pic_url)
item['pic_more_url'] = str(pic_more_url)
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
self.count += 1
item['count'] = self.count
yield item
<file_sep>#-*- coding: utf-8 -*-
from scrapy.spider import Spider
from urlparse import urljoin
from scrapy.selector import Selector
from scrapy.http import Request
import time
import json
import re
import sys
from news.DataResource import TransportData
import scrapy
from news.items import NewsItem
reload(sys)
sys.setdefaultencoding('utf8')
class shikuang(Spider):
name = "shikuang"
ts = int(time.time())
base_url = "https://my.cqtimes.cn/"
# start_urls = [
# "https://my.cqtimes.cn/?m=mobile&c=index&a=newslist&ts=1507873678&uuid=377b5fb4e857f646c5630afdc9170617&version=2.2.8&sign=07e342fc8869d5b3755a13659ebba43c"
# ]
DOWNLOAD_DELAY = 0.5
count = 0
appname = "实况"
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
url = "https://my.cqtimes.cn/?m=mobile&c=index&a=newslist&ts={ts}&uuid=377b5fb4e857f646c5630afdc9170617&version=2.2.8&sign={sign}"
# existing_title = TransportData.getData(appname)
def start_requests(self):
sign = "/?m=mobile&c=index&a=newslist&ts=" + str(self.ts) + "&uuid=377b5fb4e857f646c5630afdc9170617&version=2.2.8cqtimes"
sign = self.md5(sign)
event_id = ['15','51','225','221','163','220','242','245','16','252','256']
event_value = ['推荐','热点','视频','社会','民生','休闲','搞笑','图片','订阅','娱乐','旅游']
page = 1
self.url = self.url.format(ts=self.ts, sign=sign)
for i in range(0,len(event_id)):
yield scrapy.FormRequest(
self.url,
formdata={
"version": "2.2.8",
"app_type": "weikuai",
"mac": "53cd97b8d97b75617eb03c10e0b31195",
"uuid": "377b5fb4e857f646c5630afdc9170617",
"issimulator": "1",
"uid": "",
"event_id": event_id[i],
"event_value": event_value[i],
"p": str(page),
"type": event_id[i],
"newstype_type": "1"
},
meta={"event_id": event_id[i],"event_value": event_value[i],"page":page},callback=self.parse_next)
def parse_next(self,response):
# print response.body
acc_tit = []
results = json.loads(response.body)
results = results['post_first']
for result in results:
newsData = result['newsData']
url = newsData['url']
title = newsData['title']
# print url
# print title
# print newsData
if "addtime" in newsData.keys():
publishedDate = newsData['addtime']
elif "checktime" in newsData.keys():
publishedDate = newsData['checktime']
if 'img' in newsData.keys():
pic_url = newsData['img']
else:
pic_url = ""
pic_more_url = newsData['imglist']
# pic_more_url = set(pic_more_url)
if 'read' in newsData.keys():
content = newsData['read']
else:
content = ""
if int(publishedDate) > int(self.timeStamp):
acc_tit.append(title)
yield Request(url, meta={"title":title, "publishedDate":publishedDate, "pic_url":pic_url, "content":content,
"pic_more_url":pic_more_url,"event_value":response.meta['event_value']}, callback=self.parse_news)
if len(acc_tit) > 1:
event_id = response.meta['event_id']
event_value = response.meta['event_value']
page = response.meta['page']
page = page + 1
# page = str(page)
# page = page.encode('gdk')
# print page
yield scrapy.FormRequest(
self.url,
formdata={
"version": "2.2.8",
"app_type": "weikuai",
"mac": "53cd97b8d97b75617eb03c10e0b31195",
"uuid": "377b5fb4e857f646c5630afdc9170617",
"issimulator": "1",
"uid": "",
"event_id": event_id,
"event_value": event_value,
"p": str(page),
"type": event_id,
"newstype_type": "1"
},
meta={"event_id": event_id, "event_value": event_value, "page": page},callback=self.parse_next)
def parse_news(self,response):
title = response.meta['title']
publishedDate = response.meta['publishedDate']
pic_url = response.meta['pic_url']
hxs = Selector(response)
pic_more_url = response.meta['pic_more_url']
pic_more_url = set(pic_more_url)
author = hxs.xpath('//p[@class="art_txt sau"]/text()').extract()
print author
while "" in author:
author.remove("")
if author:
author = author[1].encode('utf-8').replace("\t","").replace("\n","")
else:
author = ""
category = response.meta['event_value']
describe = ""
content = response.meta['content']
if content == "":
content = hxs.xpath("//div[@class='article']//text()").extract()
content = "".join(content)
content = content.encode('utf-8')
content = content.replace("\n","")
home_url = self.base_url
crawlTime = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time()))
publishedDate = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(float(publishedDate)))
if pic_url:
pic_url = pic_url.encode('utf-8')
if title:
title = title.encode('utf-8')
self.count = self.count + 1
print self.count
print self.appname
print pic_url
print pic_more_url
print author
print response.url
print category
print title
print describe
print content
print home_url
print publishedDate
print crawlTime
url = response.url
item = NewsItem()
item['app_name'] = self.appname
item['count'] = self.count
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
# yield item
# TransportData.transport_data(self.appname, pic_url, pic_more_url, author, response.url, category, title,
# describe, content, home_url, publishedDate, crawlTime)
exsit_title = TransportData.getData("app_shikuang", title)
if exsit_title:
return
else:
yield item
TransportData.transport_data("app_shikuang", title, publishedDate)
def md5(self, str):
import hashlib
m = hashlib.md5()
m.update(str)
return m.hexdigest()<file_sep>#coding=utf-8
import scrapy
import json,re,time
from news.items import NewsItem
from lxml.etree import HTML
class hanlian(scrapy.Spider):
name = 'hanlianshe'
start_urls = [
'http://chinese.yonhapnews.co.kr/allheadlines/0200000001.html',#滚动
'http://chinese.yonhapnews.co.kr/international/0306000001.html',#国际
'http://chinese.yonhapnews.co.kr/domestic/0406000001.html',#国内
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
if 'allheadlines' in response.url:
category = '滚动'
elif 'international' in response.url:
category = '国际'
else:
category = '国内'
url = response.xpath('//div[@class="con_article_list"]/ul/li[1]/a/@href').extract()
for i in range(len(url)):
links = url[i]
yield scrapy.Request(links, meta={
'category': category
}, callback=self.parse_item)
def parse_item(self, response):
category = response.meta['category']
title = response.xpath('//h1/text()').extract()[0]
pic_url = ''
describe = ''
app_name = '韩联社'
content = response.xpath('//div[@id="articleBody"]').extract()
content = content[0].replace('\t', '').replace('\n', '').replace('\r', '')
content = re.findall('>(.*?)<',content)
contentt = ''
for i in range(len(content)):
contentt += content[i]
content = contentt
publishedDate = response.xpath('//p[@class="publish-time"]/text()').extract()[0].replace(' KST','')
author = ''
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
home_url = 'http://chinese.yonhapnews.co.kr/'
pic = response.xpath('//div[@id="articleBody"]').extract()[0]
Sector = HTML(pic)
pic_more = Sector.xpath('//img/@src')
pic_more_url = []
for i in range(len(pic_more)):
pic_more_url.append(pic_more[i])
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
timeArray = time.strptime(publishedDate, "%Y/%m/%d %H:%M")
timenum = int(time.mktime(timeArray))
if timenum >= self.timeStamp:
self.count += 1
item['count'] = self.count
publishedDate = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(float(timenum)))
item['publishedDate'] = publishedDate
yield item
<file_sep>#-*- coding: utf-8 -*-
from scrapy.spider import Spider
import scrapy
import sys
import json,time,re
from news.items import NewsItem
reload(sys)
sys.setdefaultencoding('utf8')
class xinwenwang(scrapy.Spider):
name = 'zhongguoxinwen'
page = 1
start_urls = [
'http://dw.chinanews.com/chinanews/getNewsList.json?version_chinanews=6.3.13&deviceId_chinanews=864454030661742&platform_chinanews=android&source=chinanews&area=%E5%8C%97%E4%BA%AC%E5%B8%82&language=chs&pageSize=10&searchType=9&searchWord=%E4%B9%A0%E8%BF%91%E5%B9%B3&pageIndex=' + str(page) + '&dtp=1'
]
count = 0
number = 1
download_delay = 2
time_str = time.strftime("%Y-%m-%d")
timeArray = time.strptime(time_str, "%Y-%m-%d")
timeStamp = int(time.mktime(timeArray))
def parse(self, response):
data = json.loads(response.body)
data = data['data']
num = 0
for i in range(len(data)):
title = data[i]['title']
pic = data[i]['picture']
id = data[i]['id']
try:
pubt = data[i]['freshTime']
except:
pubt = data[i]['pubtime']
timeArray = time.strptime(pubt, "%Y-%m-%d %H:%M:%S")
timeStamp = int(time.mktime(timeArray))
if timeStamp >= self.timeStamp:
num += 1
url = 'http://dw.chinanews.com/chinanews/newsContent.json?version_chinanews=6.3.13&deviceId_chinanews=864454030661742&platform_chinanews=android&source=chinanews&language=chs&user=&id=%s&pageSize=3&dtp=1'%id
yield scrapy.Request(url,meta={
'title':title,
'pic':pic,
'pubt':pubt
},callback=self.parse_item)
# if len(data) > 0 :
if num >= 0:
self.page += 1
url = 'http://dw.chinanews.com/chinanews/getNewsList.json?version_chinanews=6.3.13&deviceId_chinanews=864454030661742&platform_chinanews=android&source=chinanews&area=%E5%8C%97%E4%BA%AC%E5%B8%82&language=chs&pageSize=10&searchType=9&searchWord=%E4%B9%A0%E8%BF%91%E5%B9%B3&pageIndex=' + str(self.page) + '&dtp=1'
yield scrapy.Request(url, callback=self.parse)
def parse_item(self,response):
title = response.meta['title']
pic_url = response.meta['pic']
publishedDate = response.meta['pubt']
app_name = '中国新闻网'
category = '要闻'
describe = ''
author = ''
home_url = 'http://dw.chinanews.com'
crawlTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(time.time()))
data = json.loads(response.body)
content = data['data']['content']
contentt = content.replace('\t', '').replace('\n', '').replace('\r', '').replace(' ', '')
content = re.findall('>(.*?)<', contentt)
contentdata = ''
for i in content:
contentdata += i
content = contentdata
pic_more_url = data['data']['picture']
print "app名称", app_name
print "主图片url", pic_url
print "子图片url", pic_more_url
print "作者", author
print "详情页地址", response.url
print "所属类型", category
print "标题", title
print "描述", describe
print "内容", content
print "主url", home_url
print "发布时间", publishedDate
print "爬取时间", crawlTime
url = response.url
item = NewsItem()
item['app_name'] = app_name
item['pic_url'] = pic_url
item['pic_more_url'] = pic_more_url
item['author'] = author
item['url'] = url
item['category'] = category
item['title'] = title
item['describe'] = describe
item['content'] = content
item['home_url'] = home_url
item['publishedDate'] = publishedDate
item['crawlTime'] = crawlTime
yield item
|
8cf22145abbc84b0f6615f0c9f9a7720a31ee0b0
|
[
"Markdown",
"Python",
"Text"
] | 92
|
Python
|
pythonPCS/scrapy-redis-mongo-mysql-news
|
49fb44a7173d62ab34e0109c42ea608f265dcf25
|
c2c765ca1a13ce13a858ab832d89880ce0379906
|
refs/heads/main
|
<file_sep>import React from 'react';
import { View, Text, Button, TextInput, StyleSheet, ImageBackground, Image, } from 'react-native';
import { TouchableOpacity } from 'react-native-gesture-handler';
export default class Start extends React.Component {
constructor(props) {
super(props);
this.state = {
name: '',
color: '',
pressStatus1: false,
pressStatus2: false,
pressStatus3: false,
pressStatus4: false,
noColorPress: false,
};
}
render() {
return (
<View style={styles.containerOuter}>
<ImageBackground source={require('../assets/Background-Image.png')} style={styles.imageBackground}>
<Text style={styles.title}>Chat App</Text>
<View style={styles.containerInner}>
<View style={styles.sectionStyle}>
<Image source={require('../assets/icon.png')} style={styles.imageStyle} />
<TextInput
onChangeText={(name) => this.setState({name})}
value={this.state.name}
placeholder='<NAME>'
/>
</View>
<Text style={styles.backgroundColorText}>Choose Background Color:</Text>
<View style={styles.colors}>
<TouchableOpacity
onPress={() => {this.setState({color: '#090C08', pressStatus1: true, pressStatus2: false, pressStatus3: false, pressStatus4: false, noColorPress: false })}}
style={this.state.pressStatus1 ? styles.colorPressed1 : styles.color1}
/>
<TouchableOpacity
onPress={() => {this.setState({color: '#474056', pressStatus1: false, pressStatus2: true, pressStatus3: false, pressStatus4: false, noColorPress: false })}}
style={this.state.pressStatus2 ? styles.colorPressed2 : styles.color2}
/>
<TouchableOpacity
onPress={() => {this.setState({color: '#253CCD', pressStatus1: false, pressStatus2: false, pressStatus3: true, pressStatus4: false, noColorPress: false })}}
style={this.state.pressStatus3 ? styles.colorPressed3 : styles.color3}
/>
<TouchableOpacity
onPress={() => {this.setState({color: '#378A54', pressStatus1: false, pressStatus2: false, pressStatus3: false, pressStatus4: true, noColorPress: false})}}
style={this.state.pressStatus4 ? styles.colorPressed4 : styles.color4}
/>
<TouchableOpacity
onPress={() => {this.setState({color: '#fff', pressStatus1: false, pressStatus2: false, pressStatus3: false, pressStatus4: false, noColorPress: true})}}
style={this.state.noColorPress ? styles.noColorPressed : styles.noColor}
/>
</View>
<TouchableOpacity style={styles.buttonBackground}
onPress={() => this.props.navigation.navigate('Chat', { name: this.state.name, color: this.state.color })}
>
<Text style={styles.buttonText}>Start Chatting</Text>
</TouchableOpacity>
</View>
</ImageBackground>
</View>
)
}
}
const styles = StyleSheet.create({
containerOuter: {
flex: 1,
},
imageBackground: {
flex: 1,
resizeMode: "cover",
justifyContent: "center"
},
title: {
fontSize: 45,
fontWeight: '600',
color: '#ffffff',
textAlign: 'center',
},
containerInner: {
flexDirection: 'column',
backgroundColor: 'white',
margin: 15,
marginTop: 150,
},
backgroundColorText: {
margin: 15,
fontSize: 16,
fontWeight: '300',
color: '#757083',
},
imageStyle: {
padding: 10,
margin: 5,
height: 25,
width: 25,
resizeMode : 'stretch',
alignItems: 'center',
},
sectionStyle: {
flexDirection: 'row',
justifyContent: 'flex-start',
alignItems: 'center',
backgroundColor: '#fff',
borderWidth: 1,
borderColor: '#000',
height: 55,
margin: 15,
padding: 10,
},
colors: {
flexDirection: 'row',
marginBottom: 10
},
color1: {
backgroundColor: '#090C08',
width: 40,
height: 40,
borderRadius: 40/2,
margin: 15,
borderColor: '#000',
borderWidth: 1,
},
color2: {
backgroundColor: '#474056',
width: 40,
height: 40,
borderRadius: 40/2,
margin: 15,
borderColor: '#000',
borderWidth: 1,
},
color3: {
backgroundColor: '#253CCD',
width: 40,
height: 40,
borderRadius: 40/2,
margin: 15,
borderColor: '#000',
borderWidth: 1,
},
color4: {
backgroundColor: '#378A54',
width: 40,
height: 40,
borderRadius: 40/2,
margin: 15,
borderColor: '#000',
borderWidth: 1,
},
noColor: {
backgroundColor: '#fff',
borderColor: '#000',
borderWidth: 1,
width: 40,
height: 40,
borderRadius: 40/2,
margin: 15
},
colorPressed1: {
backgroundColor: '#090C08',
borderColor: '#000',
borderWidth: 1,
width: 30,
height: 30,
borderRadius: 30/2,
margin: 20,
},
colorPressed2: {
backgroundColor: '#474056',
borderColor: '#000',
borderWidth: 1,
width: 30,
height: 30,
borderRadius: 30/2,
margin: 20,
},
colorPressed3: {
backgroundColor: '#253CCD',
borderColor: '#000',
borderWidth: 1,
width: 30,
height: 30,
borderRadius: 30/2,
margin: 20,
},
colorPressed4: {
backgroundColor: '#378A54',
borderColor: '#000',
borderWidth: 1,
width: 30,
height: 30,
borderRadius: 30/2,
margin: 20,
},
noColorPressed: {
backgroundColor: '#fff',
borderColor: '#000',
borderWidth: 1,
width: 30,
height: 30,
borderRadius: 30/2,
margin: 20,
},
buttonText: {
fontSize: 16,
fontWeight: '600',
color: '#ffffff',
textAlign: 'center',
},
buttonBackground: {
backgroundColor: '#757083',
color: '#ffffff',
margin: 15,
padding: 15,
},
});<file_sep># chat-app
## Description
A chat app for mobile devices using React Native. This app provides users with a chat interface and options to share images and their location.
The app is written with React Native and developed with Expo. Chat conversation are stored locally and in the Google Firebase Database. For authentication the app use also Google Firebase to authenticate users anonymously. The Gifted Chat library is used to create the chat interface and its functionality.
The app allows users to pick and send images from their phone's image library. When the user grants permission to access their media library and camera. Users can share their location only when permission are granted.
## Key Features
- A page where users can enter their name and choose a background color for the chat screen before joining the chat.
- A page displaying the conversation, as well as an input field and submit button.
- The chat must provide users with two additional communication features: sending images and location data.
- Data gets stored online and offline.
## Get Started
### Technical Requirements
- Node.js
- Expo Command Line Interface
```
npm install expo-cli --global
```
- If you like to run the app on your mobile device, you need to install the Expo app through your app store:
- [iOS](https://apps.apple.com/app/apple-store/id982107779)
- [Android](https://play.google.com/store/apps/details?id=host.exp.exponent&referrer=www)
- You also need and Expo account which can be created via [Expo.io](https://expo.io)
- You need to login into Expo in order to access the App
- Logging into Expo trough the CLI on your machine
- Logging into Expo on your mobile device via Expo app
- If you like to run the app on your machine through a simulator/emulator, you need:
- [iOS Simulator](https://docs.expo.io/workflow/ios-simulator/)
- [Android Studio](https://docs.expo.io/workflow/android-studio-emulator/)
### Installing Dependencies
In the project directory install the application's dependencies.
```
npm install
```
### Running the App
```
expo start
```
#### Running the App on your mobile device
After using "expo start" command to run the app, you can scan the QR code displayed in the command line interface via your mobile device.
#### Running the App through a simulator/emulator
With the command line interface open after using the "expo start" command, press:
- 'a' to open the app with an Android emulator
- 'i' to open the app with the iOS simulator
## Technologies
- [React Native](https://reactnative.dev)
- [Expo](https://expo.io)
- [Google Firestore](https://firebase.google.com)
- [Gifted Chat](https://github.com/FaridSafi/react-native-gifted-chat)
- [Android Studio](https://developer.android.com/studio)
<file_sep>import React from 'react';
import { View, Alert, Platform, KeyboardAvoidingView, StyleSheet } from 'react-native';
import { GiftedChat, Bubble } from 'react-native-gifted-chat';
import AsyncStorage from '@react-native-community/async-storage';
import NetInfo from '@react-native-community/netinfo';
import CustomActions from './CustomActions';
import MapView from 'react-native-maps';
import { renderInputToolbar, renderComposer, renderSend } from './InputToolbar';
const firebase = require('firebase');
require('firebase/firestore');
export default class Chat extends React.Component {
constructor() {
super();
this.state = {
messages: [],
user: {
_id: '',
name: '',
avatar: '',
},
uid: '',
isConnected: false,
image: null,
location: null,
}
//initializes firestore
var firebaseConfig = {
apiKey: "<KEY>",
authDomain: "chat-app-31787.firebaseapp.com",
projectId: "chat-app-31787",
storageBucket: "chat-app-31787.appspot.com",
messagingSenderId: "1021412781283",
appId: "1:1021412781283:web:449dadcb53d5df278a29d6",
measurementId: "G-8DD7KZ01NG"
};
if (!firebase.apps.length) {
firebase.initializeApp(firebaseConfig);
}
this.referenceChatMessages = firebase.firestore().collection("messages");
}
componentDidMount() {
//checks user's connection
NetInfo.fetch().then(connection => {
if (connection.isConnected) {
console.log('online');
//reference to load messages via firebase
this.referenceChatMessages = firebase.firestore().collection('messages');
this.unsubscribe = this.referenceChatMessages.onSnapshot(this.onCollectionUpdate);
//authenticates user via firesbase
this.authUnsubscribe = firebase.auth().onAuthStateChanged((user) => {
if (!user) {
firebase.auth().signInAnonymously();
}
//updates state with current user data
this.setState({
isConnected: true,
user: {
_id: user.uid,
name: this.props.route.params.name,
avatar: 'http://placeimg.com/140/140/any'
},
messages: [],
});
this.unsubscribe = this.referenceChatMessages
.orderBy("createdAt", "desc")
.onSnapshot(this.onCollectionUpdate);
});
} else {
console.log("offline");
Alert.alert('No internet connection, unable to send messages');
this.setState({
isConnected: false,
});
this.getMessages();
}
});
/*
let { name } = this.props.route.params;
this.setState({
messages: [
{
_id: 1,
text: 'Hello developer',
createdAt: new Date(),
user: {
_id: 2,
name: '<NAME>',
avatar: 'https://placeimg.com/140/140/any',
},
},
{
_id: 2,
text: `${name} has entered the chat`,
createdAt: new Date(),
system: true,
},
],
}) */
}
componentWillUnmount() {
this.unsubscribe();
this.authUnsubscribe();
}
//Updates the messages in the state every time they change on the firestore
onCollectionUpdate = (querySnapshot) => {
const messages = [];
// go through each document
querySnapshot.forEach((doc) => {
//get the QueryDocumentSnapshot's data
let data = doc.data();
messages.push({
_id: data._id,
text: data.text,
createdAt: data.createdAt.toDate(),
user: {
_id: data.user._id,
name: data.user.name,
avatar: data.user.avatar
},
image: data.image || '',
location: data.location || null
});
});
this.setState({ messages });
}
// Adds messages to the current chat log through GiftedChat; state remains unaffected
onSend(messages = []) {
this.setState(previousState => ({
messages: GiftedChat.append(previousState.messages, messages),
}),
() => {
this.addMessage();
this.saveMessages();
});
}
//Adds messages to the firebase.
addMessage() {
const message = this.state.messages[0];
this.referenceChatMessages.add({
_id: message._id,
text: message.text || '',
createdAt: message.createdAt,
user: {
_id: message.user._id,
name: message.user.name,
avatar: message.user.avatar,
},
image: message.image || '',
location: message.location || null
});
}
//gets messages from asyncStorage (native local storage) to display previous messages while offline
async getMessages() {
let messages = '';
try {
messages = await AsyncStorage.getItem('messages') || [];
this.setState({
messages: JSON.parse(messages)
});
} catch (error) {
console.log(error.message);
}
};
//adds messages to asyncStorage
async saveMessages() {
try {
await AsyncStorage.setItem('messages', JSON.stringify(this.state.messages));
} catch (error) {
console.log(error.message);
}
}
//deletes messages from asyncStorage
async deleteMessages() {
try {
await AsyncStorage.removeItem('messages');
this.setState({
messages: []
})
} catch (error) {
console.log(error.message);
}
}
//changes text bubble color for users (targeted using 'right'; 'left' would be used to target received messages)
renderBubble(props) {
return (
<Bubble
{...props}
wrapperStyle={{
right: {
backgroundColor: '#147EFB'
},
left: {
backgroundColor: '#F0F1EF'
}
}}
/>
)
}
//renders action button for adding photos, location etc.
renderCustomActions = (props) => {
return <CustomActions {...props} />;
};
// Returns a MapView that shows user's location
renderCustomView(props) {
const { currentMessage } = props;
if (currentMessage.location) {
return (
<MapView
style={{ width: 150, height: 100, borderRadius: 13, margin: 3 }}
region={{
latitude: currentMessage.location.latitude,
longitude: currentMessage.location.longitude,
latitudeDelta: 0.0922,
longitudeDelta: 0.0421,
}}
/>
);
}
return null;
}
render() {
//gets name prop from start screen text input and displays name at top of chat
let { name } = this.props.route.params;
this.props.navigation.setOptions({ title: name });
//defines background color prop user selected on start screen
let color = this.props.route.params.color;
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: '#fff'
}
});
return (
<View style={{flex: 1, backgroundColor: color }}>
<GiftedChat
renderBubble={this.renderBubble.bind(this)}
renderInputToolbar={renderInputToolbar}
renderComposer={renderComposer}
renderSend={renderSend}
renderActions={this.renderCustomActions}
renderCustomView={this.renderCustomView}
isTyping
//renderUsernameOnMessage
style={{backgroundColor: color }}
messages={this.state.messages}
onSend={messages => this.onSend(messages)}
placeholder={'Type a message...'}
maxInputLength={this.state.isConnected ? 2000 : 0}
user={{
_id: this.state.user._id,
avatar: this.state.user.avatar,
name: { name } ,
}}
/>
{ Platform.OS === 'android' ? <KeyboardAvoidingView behavior="height" /> : null}
</View>
)
}
}
|
bbdba5ec67263e3b0b3420367ae03817f7c95cb2
|
[
"JavaScript",
"Markdown"
] | 3
|
JavaScript
|
willcrone11/chat-app
|
e113bfa9d2727a0ba32b45b8ed10978c95c6bed2
|
9a737d3f68f05aaf9c84b3142c549034f3981433
|
refs/heads/master
|
<repo_name>sahanadarsh/URLCount<file_sep>/README.md
# URLCount MapReduce Example
##### uses KeyValueTextInputFormat with space separator for K,V pair https://hadoop.apache.org/docs/r2.7.1/api/org/apache/hadoop/mapred/KeyValueTextInputFormat.html
##### this example reads in key as text and value as int, sums every value according to key
##### Tested on: Sandbox 2.3.2
##### Compiled with Hortonworks HDP 2.3.0
##### use included input file as dataset
##### execute the job like so:
```
yarn jar URLCount-1.0-jar-with-dependencies.jar data output
```
##### output looks like so:
```
http://url1.com 523
http://url11.com 4
http://url12.com 36
http://url20.com 36
```
<file_sep>/src/main/java/com/hortonworks/mapreduce/URLCount.java
/*
* Copyright 2016 aervits.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hortonworks.mapreduce;
/**
*
* @author aervits
*/
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
public class URLCount extends Configured implements Tool {
public static void main(String[] args) throws Exception {
int res = ToolRunner.run(new Configuration(), new URLCount(), args);
System.exit(res);
}
@Override
public int run(String[] args) throws Exception {
Configuration conf = this.getConf();
conf.set("mapreduce.input.keyvaluelinerecordreader.key.value.separator", " ");
Job job = Job.getInstance(conf, "URLCount");
job.setJarByClass(getClass());
job.setInputFormatClass(KeyValueTextInputFormat.class);
job.setOutputFormatClass(TextOutputFormat.class);
job.setMapperClass(URLCountM.class);
job.setReducerClass(URLCountR.class);
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(IntWritable.class);
job.setOutputKeyClass(IntWritable.class);
job.setOutputValueClass(Text.class);
FileInputFormat.addInputPath(job, new Path(args[0]));
FileOutputFormat.setOutputPath(job, new Path(args[1]));
return (job.waitForCompletion(true) == true ? 0 : -1);
}
}
<file_sep>/src/main/java/com/hortonworks/mapreduce/URLCountM.java
/*
* Copyright 2016 aervits.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hortonworks.mapreduce;
import java.io.IOException;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
/**
*
* @author aervits
*/
public class URLCountM extends Mapper<Text, Text,Text, IntWritable> {
private static final Logger LOG = Logger.getLogger(URLCountM.class.getName());
public final IntWritable iw = new IntWritable();
@Override
public void map(Text key, Text value, Context context){
try{
LOG.log(Level.INFO, "MAPPER_KEY: ".concat(key.toString()).concat(" MAPPER_VALUE: ".concat(value.toString())));
context.write(key, new IntWritable(Integer.valueOf(value.toString())));
}
catch(NumberFormatException | IOException | InterruptedException e){
LOG.log(Level.SEVERE, "ERROR: ".concat(e.toString()));
}
}
}
|
85cb4ffb612fdb2708c30fc6a48d434d7a01ad6d
|
[
"Markdown",
"Java"
] | 3
|
Markdown
|
sahanadarsh/URLCount
|
02c4a783747303e75762dacf31a6ce1854e12505
|
395c3922ebe0a351246a8b915e0d3996edca6f46
|
refs/heads/master
|
<file_sep>class Form{
constructor(){
}
display(){
var title = createElement ('h2');
title.html("car racing game");
title.position(130,0);
var input = createInput("name");
var button = createButton('play');
input.position(130,60);
button.position(200,200);
button.mousePressed(function(){
input.hide();
button.hide();
input.value();
button.value();
});
}
}
|
3e7ee1a72b52b98bf4031cecb49e650027fd57d3
|
[
"JavaScript"
] | 1
|
JavaScript
|
kautik10111/C-35
|
017378935ed8788aa88469a10ec4adc14bf9959d
|
6e99b1957b5f4a937e924a7c37378b2b0586276b
|
refs/heads/master
|
<repo_name>ncdc/nodejs-0-10-centos<file_sep>/Dockerfile
# nodejs-0-10-ruby
#
FROM centos:centos6
MAINTAINER <NAME> <<EMAIL>>
# Pull in updates and install nodejs
RUN yum install --assumeyes centos-release-SCL && ( \
echo "update"; \
echo "install gettext tar which"; \
echo "install gcc-c++ automake autoconf curl-devel openssl-devel"; \
echo "install zlib-devel libxslt-devel libxml2-devel"; \
echo "install mysql-libs mysql-devel postgresql-devel sqlite-devel"; \
echo "install nodejs010-nodejs nodejs010-npm"; \
echo "run" ) | yum shell --assumeyes && yum clean all --assumeyes
# Add configuration files, bashrc and other tweaks
ADD ./nodejs /opt/nodejs/
# Default STI scripts url
ENV STI_SCRIPTS_URL https://raw.githubusercontent.com/ncdc/nodejs-0-10-centos/master/.sti/bin
# Set up the nodejs directories & scripts
RUN mkdir -p /opt/nodejs/{run,src} && \
mv -f /opt/nodejs/bin/node /usr/bin/node && \
mv -f /opt/nodejs/bin/npm /usr/bin/npm
ENV APP_ROOT .
ENV HOME /opt/nodejs
ENV PATH $HOME/bin:$PATH
WORKDIR /opt/nodejs/src
EXPOSE 3000
# Display STI usage when invoked outside STI builder
#
CMD ["/opt/nodejs/bin/usage"]
<file_sep>/.sti/bin/save-artifacts
#!/bin/bash -e
echo "---> Saving build-artifacts"
pushd /opt/nodejs >/dev/null
if [ -d ./src/node_modules ]; then
tar zcf /tmp/artifacts/artifacts.tar.gz src/node_modules
fi
popd >/dev/null
<file_sep>/nodejs/bin/usage
#!/bin/sh
cat <<EOF
This is a Source To Image (STI) builder image.
To use it, install STI: https://github.com/openshift/docker-source-to-images.
Sample invocation:
sti build https://github.com/ryanj/node-echo.git openshift/nodejs-0-10-centos nodejs-echo-app
You can then run the resulting image via:
docker run -p 3000:3000 nodejs-echo-app
EOF
<file_sep>/nodejs/.bashrc
# Enable NodeJS collections
#
source /opt/rh/nodejs010/enable
export X_SCLS="`scl enable nodejs010 'echo $X_SCLS'`"
|
f12f18b1c2d65176d7d42bff989415e17d32df20
|
[
"Dockerfile",
"Shell"
] | 4
|
Dockerfile
|
ncdc/nodejs-0-10-centos
|
37337b7b65a43c1aef6a213e4ce14abc47c2f369
|
1783d8de9c12aa08d7ae63cc9cff56f2f5579b7c
|
refs/heads/master
|
<repo_name>sobngwi/java7tostream<file_sep>/src/main/java/org/sobngwi/java7to8/s2/after/Sample.java
package org.sobngwi.java7to8.s2.after;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.IntStream;
import java.util.stream.Stream;
public class Sample {
public static Boolean isPrime(int number) {
return number > 1 &&
IntStream.range(2, number)
.noneMatch(index -> number % index == 0);
}
public static void main(String[] args) {
List<Double> sqrtOfFirst100Primes =
Stream.iterate(1, e -> e + 1)
.filter(Sample::isPrime)
.mapToDouble(Math::sqrt)
.limit(100)
.collect(ArrayList::new, ArrayList::add, ArrayList::addAll) ;
System.out.println(
String.format("Computer %d values, first is %g, last is %g",
sqrtOfFirst100Primes.size(),
sqrtOfFirst100Primes.get(0),
sqrtOfFirst100Primes.get(sqrtOfFirst100Primes.size() - 1)));
Stream.iterate(1, e -> e + 1)
.filter(Sample::isPrime)
.mapToDouble(Math::sqrt)
.limit(100000)
.forEach(System.out::println);
}
}
|
d5d91b3e56bb31a67d95849272fc4c8e1f600309
|
[
"Java"
] | 1
|
Java
|
sobngwi/java7tostream
|
39669d2139ecdcc6f7384bef67db51d7ba3dbd38
|
0aa4e4f26321e9885dd8d7d23e9025c319ee35f5
|
refs/heads/master
|
<file_sep>from flask import Flask, jsonify
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
import json
# sudo apt-get install libapache2-mod-wsgi-py3
# sudo apt-get install python3-flask python3-sqlalchemy python3-psycopg2
app = Flask(__name__)
# Set up database
engine = create_engine('postgres://consulta:selectconsulta@localhost:5432/museu')
db = scoped_session(sessionmaker(bind=engine))
@app.route("/objeto/<string:id>")
def objeto(id):
objeto = db.execute("SELECT * from objetos where id_objeto = :id ;", {"id": id}).fetchone()
imagens = db.execute("SELECT caminho from imagens where ref_objeto = :id ",{"id":id}).fetchall()
imagens_lista = []
for linha in imagens:
imagens_lista.append(linha[0])
return json.dumps({'nome_objeto':objeto.nome_objeto,'audiodescricao': objeto.audiodescricao ,'descricao_intrinsica': objeto.descricao_intrisica,'descricao_extrinsica': objeto.descricao_extrinsica, 'imagens': imagens_lista })
if __name__ == "__main__":
app.run()
<file_sep>#!/usr/bin/python
import sys
import logging
logging.basicConfig(stream=sys.stderr)
sys.path.insert(0, '/var/www/api_museu')
from flk import app as application
|
75e1a581f3b97a5c91410dc19b0d28c54684e26e
|
[
"Python"
] | 2
|
Python
|
nantius/api_museu
|
0d1a23239453b09c5481990994f9c418c13aca0b
|
9648b57a27d6fb5472c7c17d3fca91478f8604cc
|
refs/heads/master
|
<file_sep>[Walkthrough: Creating a Windows Service Application in the Component Designer](https://docs.microsoft.com/en-us/dotnet/framework/windows-services/walkthrough-creating-a-windows-service-application-in-the-component-designer)
MyNewService.cs vs MyNewService.designer.cs
>A service application is designed to be long-running, so it usually polls or monitors something in the system. The monitoring is set up in the OnStart method. However, OnStart doesn’t actually do the monitoring. The OnStart method must return to the operating system after the service's operation has begun. It must not loop forever or block.
<file_sep># Continuous Intergration/Continuous Deployment
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Http;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Newtonsoft.Json;
namespace WeatherMicroservice
{
public static class Extensions
{
//Parsing the Query String.
//bool TryParse(string s, out double result);
public static double? TryParse(this string input)
{
double result;
if (double.TryParse(input, out result))
{
return result;
}
else
{
return default(double?);
}
}
}
//data container object to hold values for weather forcast
public class WeatherReport
{
//Constructor for random generation
public WeatherReport(double latitude, double longitude, int daysInFuture)
{
var generator = new Random((int)(latitude + longitude) + daysInFuture);
HiTemperature = generator.Next(40, 100);
LoTemperature = generator.Next(0, HiTemperature);
AverageWindSpeed = generator.Next(0, 45);
Conditions = PossibleConditions[generator.Next(0, PossibleConditions.Length - 1)];
}
private static readonly string[] PossibleConditions = new string[]
{
"Sunny",
"Mostly Sunny",
"Partly Sunny",
"Partly Cloudy",
"Mostly Cloudy",
"Rain"
};
public int HiTemperature { get; }
public int LoTemperature { get; }
public int AverageWindSpeed { get; }
public string Conditions { get; }
}
public class Startup
{
// This method gets called by the runtime. Use this method to add services to the container.
// For more information on how to configure your application, visit https://go.microsoft.com/fwlink/?LinkID=398940
public void ConfigureServices(IServiceCollection services)
{
}
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline.
public void Configure(IApplicationBuilder app, IHostingEnvironment env, ILoggerFactory loggerFactory)
{
loggerFactory.AddConsole();
if (env.IsDevelopment())
{
app.UseDeveloperExceptionPage();
}
//context is the HttpContext for request
app.Run(async (context) =>
{
//http://localhost:5000/?lat=-35.55&long=-12.35
String latString = context.Request.Query["lat"].FirstOrDefault(); //StringValue type, FOD() returns String
String longString = context.Request.Query["long"].FirstOrDefault();
var latitude = latString.TryParse();
var longitude = longString.TryParse();
if (latitude.HasValue && longitude.HasValue)
{
var forecast = new List<WeatherReport>();
for (var days = 1; days < 6; days++)
{
forecast.Add(new WeatherReport(latitude.Value, longitude.Value, days));
}
var json = JsonConvert.SerializeObject(forecast, Formatting.Indented);
context.Response.ContentType = "application/json; charset=utf-8";
await context.Response.WriteAsync(json);
}
//await context.Response.WriteAsync($"Weather for lat: {latitude}, long: {longitude}");
});
}
}
}
<file_sep>```
npm init # created package.json
npm install --save react # created package-lock.json & node_modules react depends on loose-envify depends on js-tokens
npm install react-dom --save # added react-dom depends on scheduler & loose-envify
```<file_sep># C# WEB API 2 TUTORIAL
*[from Microsoft Docs](https://docs.microsoft.com/en-us/aspnet/web-api/overview/getting-started-with-aspnet-web-api/tutorial-your-first-web-api)*
Goal of this tutorial was to understand more about APIs (Application Programming Interface) and familiarize myself with working in Visual Studio and C# code as it will be utilized at my current employment.
### Procedure with Notes
- Opened VS and created a new empty ASP.NET Web Application under Visual C# > Web with folders and core references for Web API.
*Compared to a new empty ASP.NET Web Application without the WebAPI folders and core references, this contained an additional 4 folders (App_Data, App_Start, Controllers, Models) and a "Global.asax" file. Under Resources folder added: System.Web.Http, System.Web.Http.Webhost, System.Net.HttpFormatting, System.Net.Http and Newtonsoft.Json. In App_Start there was a WebApiConfig.cs file with Register method that appeared to be a Router configuration with default "api/{controller}/{id}" template.*
- Added a (Model) Class in the Models folder.
*The class was a namespace which was <ApplicationName>.Models. (All code seem to be wrapped in namespace) I placed a class in the namespace. The class contained 4 properties (AutoProperties). This file appears to be a blueprint object.*
- Added an Empty Web API Controller in the Controllers folder. Named it "ProductsController".
*Tutorial said to copy and paste code. The code was again inside a nested namespace and contained one class that [inherits from](https://docs.microsoft.com/en-us/dotnet/csharp/tutorials/inheritance) (:) an ApiController class. Class consisted of two methods and a field. The field in the view used the model we created earlier to be the object where the data is stored (in an application this is where the database/external data source would be put into, but we hard coded values). One of the methods would return an IEnumerable<Product> with all the products and the other would return an IHttpActionResult of the product object of given id.*
- Added a simple UI (HTML page to the project) to interact with the API.
*This part was just copy and pasted and they did explain more in the tutorial but I was not particularly interested in what the front-end was doing to render data but rather how it was getting the results. How did the API know that when I hit api/products I was refering to which of the two methods? [It was all in the default WebApiConfig.cs file.](https://docs.microsoft.com/en-us/aspnet/web-api/overview/web-api-routing-and-actions/routing-in-aspnet-web-api) It appears that they just matched the id to the action (what they call the methods in the controller) parameter to determine which to use, however you could also match it in the api/{controller}/{action}/{id} fashion as well to be more specific.*
>After some playing around I found that you must name it <controllername>Controller and in the WebApiConfig it will pick up <controllername> as part of the route. That is how the controller name was set. There should be a way around this, seems strickt for a programming language... (but wait... its a framework? <.NET>)
> Just to confirm my theory that it matched {id} to the action parameter, I made both actions in the controller have an int parameter, sure enough I got this exception message: Multiple actions were found that match the request: GetAllProducts on type TestWebApplication.Controllers.ProductzController GetProduct on type TestWebApplication.Controllers.ProductzController. I also got the same error changing int to string on one of them. So they only counted number of parameters it appears.
> Changing the WebApiConfig file to api/{controller}/{action}/{id} did automatically take the methods in the controller as an action name.
- Checkout browser's developer tools to view HTTP traffic.
*This part wasn't mandatory, but good to help understand a bit more if you havent already. Could see the request and response headers to see what was sent and what was recieved, did not go into this too much. Also noted that it is browser functionality to serialize data to json? Recommended Fiddler for debugging, another day.*
*The file structure/organization and naming conventions are all just seem to be strong recommendations. Interesting to see all of the recommended optional coding styles, though I don't fully understand the reasoning for all of it yet due to lack of experience*
### Unknown Terms (Research Notes)
*(Definitions from research are only as accurate as I understand them to be, some copy-pasta and references; Please correct me if I am wrong.)*
- **(Global) ASAX file:** ASAX (Active Server Page) files are specific to .NET framework. [Global.asax](https://msdn.microsoft.com/en-us/library/1xaas8a2(v=vs.71).aspx) acts as an optional event handler for "application-level" events from ASP.Next or HttpModules (1 request to your application = 1 HTTP module call). Derived from HttpApplication base class. Not available externally.
- **Namespace:** Named space where your application resides. Purpose is to provide C# compiler with context for named information in program file. Namespaces are in References as well as your source folders. They can be accessed with "using" keyword or typing the name.path. Nested namespaces are also a thing usually with companies with product series, I used nested namespace in the tutorial.
```c
namespace TestWebApplication.Models
```
- **Properties and Fields:** [Properties expose fields.](https://stackoverflow.com/questions/295104/what-is-the-difference-between-a-field-and-a-property-in-c) Used AutoProperties in Model class (started at C# 3.0) which generates private field for you. Fields stores the actual data. [Properties](https://docs.microsoft.com/en-us/dotnet/csharp/programming-guide/classes-and-structs/using-properties) have get and set and sometimes can do calculations or become [method-like](https://msdn.microsoft.com/en-us/library/ms229054(v=vs.100).aspx). Property can only have 2 code blocks at a maximum, get and/or set. Properties are not variables.
- **(Web Debugging) Proxy:** This is what Fiddler is. A [proxy (server)](http://whatismyipaddress.com/proxy-server) is just something that acts like a middleman/broker. So before the traffic comes in, it will go to the proxy first, or before it leaves, it will go through the proxy first.
### Rabbitholes
- [Contollers and Conventions:](https://www.red-gate.com/simple-talk/dotnet/asp-net/asp-net-mvc-controllers-and-conventions/)
> "For each request that hits the site, ASP.NET MVC figures out the name of the controller class that will handle the request and manages to create a new instance of that class."
### Un-concreate Personal Observations
- Building the solution outputted a dll file and API was not running. Running would also run the index.html file. At work the APIs were services running. Maybe from the other layers pulling in the dll files and calling it creating an instance of it? (Lack of better wording for now.) *How do I run just the API server so I could make calls to it? Is the dll file the contents of the API?*
> Went to work the next day and ask a little about what an API would refer to as "application-level" someone mentioned the [OSI model](https://en.wikipedia.org/wiki/OSI_model) which made sense to me. Also the answer was because I had no server. At work there was the IIS serving the dll making it the API I knew of. I did not have any servers serving my API in this tutorial, but when I ran it with the index, I guess it took the methods from the dll file?
<file_sep>
function Todo() {
return (
<div className='card'>
<h2>TITLE</h2>
<div className='card-actions'>
<button className='button'>Delete</button>
</div>
</div>
);
}
export default;<file_sep>### Reads
- [Web Service VS Windows Service](https://stackoverflow.com/questions/1033570/what-are-the-differences-between-a-web-service-and-a-windows-service)
>A Windows service is an application that runs without a user being logged into the system, usually to process some data on the machine that needs no user intervention to work with.
>A Web service is a website that, when contacted, returns XML (typically) in one of several standard formats for the service consumer to process.
>One can't be substituted for the other. They are fundamentally different.
<file_sep># [Getting Started on Heroku with Node.js](https://devcenter.heroku.com/articles/getting-started-with-nodejs#set-up)
[Getting Started with Heroku Portal](https://dashboard.heroku.com/apps)
### Procedure with Notes
- Installed Heroku CLI(Toolbox)
- `heroku login`
- `git clone https://github.com/heroku/node-js-getting-started.git`
- `cd node-js-getting-started`
- `heroku create`
- `git push heroku master`
```bash
remote: Verifying deploy... done.
To https://git.heroku.com/ancient-retreat-91601.git
```
- `heroku ps:scale web=1`
```bash
Scaling dynos... done, now running web at 1:Free
```
- `heroku open` -- This failed so I had to open the launched url in browser.
- `heroku logs --tail` -- Streams the logs... probably in a log file somewhere. Noticed a variable called 'dyno'.
- Looking at [Procfile](https://devcenter.heroku.com/articles/getting-started-with-nodejs#define-a-procfile), which explicitly declares what commands to execute to start app. Reminds me of the config file for TravisCI, travis.yml.
- `heroku ps:scale web=0` -- The 'dyno' is compared to a lightweight container that runs the command in the Procfile. Web dynos serve requests. Can run `heroku ps` to see how many 'dynos' are running.
- `npm install`
- `heroku local web` -- changed to port 5005 because 5000 was not rendering oddly... `heroku local` uses the code I pushed to it.
- Edited the index.js file with a new installation of `cool-ascii-faces`. note: Demo code was optimized and the node tutorial was not. Cool to see the code optimizations.
- `heroku local` -- still displayed "Listening on port 5000 when I changed the hard coded 5000 to 5005 and it ran when I used `node index` meaning that when I run locally with heroku it sets the `process.env.PORT` variable as 5000, but node does not. So I commented that out since for some odd reason port 5000 wasn't working on my machine. (Blank page even when not running `heroku local`, some process probably is using it... Also found you get "ERR_CONNECTION_REFUSED" when no one is listening to the port and on port 6000 gives ERR_UNSAFE_PORT)
- `git add`, `git commit -m "Demo"`, `git push heroku master`, `heroku open cool` -- There was an issue and it told me to read the logs, but I knew this was because I took out the PORT = process.env.PORT and hardcoded it to 5005. This must be a security thing for heroku where their server only allowed me to listen on that process.env.PORT port that they set. `ᕙ༼ຈل͜ຈ༽ᕗ` Appropriate emoji returned.
### Unknown Terms (Research Notes... ~~May be~~ tangents)
- [**PostScript/Type 1:**](https://en.wikipedia.org/wiki/PostScript) Page description language by Adobe Systems. (.ps extention) Font format. Alternatives: [TrueType/OpenType](https://www.fonts.com/support/faq/font-formats)
- [**CRLF, LF, CR:**](https://stackoverflow.com/questions/1552749/difference-between-cr-lf-lf-and-cr-line-break-types) Line break types for Windows, Unix and Mac.
- **brownout:** Restriction of service. [Heroku brownout](https://status.heroku.com/incidents/1193)
> Heroku will be performing a "brownout" of the V2 Legacy API. This is in preparation for the complete shutdown of the V2 Legacy API scheduled for Monday, June 26th, 2017. Please see our API v2 sunset announcement for more information. During this time, requests to the V2 Legacy API will respond with 410 Gone.
- **infastructure:** [Framework.](http://searchdatacenter.techtarget.com/definition/infrastructure) Cloud Infastructure. A software layer, on which you build ontop of. IaaS. Between application layer and physical server. Software on the server?
- [Heroku Procfile](https://devcenter.heroku.com/articles/procfile)
> A Procfile is a mechanism for declaring what commands are run by your application’s dynos on the Heroku platform.
### Rabbitholes
- [Heroku HTTP Routing](https://devcenter.heroku.com/articles/http-routing)
> It declares that this process type will be attached to the HTTP routing stack of Heroku, and receive web traffic when deployed.
- **Chrome Unsafe Ports:** Tried to use port 6000 and found it threw and odd error. [List of unsafe ports](https://superuser.com/questions/188058/which-ports-are-considered-unsafe-on-chrome), and the [reason](https://jazzy.id.au/2012/08/23/why_does_chrome_consider_some_ports_unsafe.html).
> Open proxies are considered quite dangerous. No piece of software should allow itself to be used as an open proxy. It's not so much about Chrome preventing malicious activity, as it is about Chrome ensuring that it is not a hole in an otherwise secured network. The problem here is the design of HTML/HTTP, where browsers are so obliging in opening arbitrary connections to anything and sending data on behalf of an attacker who is not even on the same network. That is a fundamental security issue that should be addressed.
### Found Blogger
[**<NAME>**](https://jazzy.id.au/): "Hi! My name is <NAME>, and I am a software developer with a particular interest in open source development and trying new things. I program in Scala, Java, PHP, Python and Javascript, and I work for Lightbend as a developer on Lagom."
<file_sep># Drone
## Pipeline Configuration File
Drone's Configuration File is a `.drone.yml`, there are two versions I've encountered: version 0.8 and version 1.0. This is focused on version 1.0.
The configuration file may consist of the `kind`, `name`, `steps`, and `platform` at the root-level.
#### Steps
You can add the [`steps`](https://docs.drone.io/user-guide/pipeline/steps/) at the top-level. Each `step` may consist of `name`, `image`, `commands`, `when`, `settings` (used for plugins afaik).
> Each step starts a new container that includes a clone of your repository, and then runs the contents of your commands section inside it.
Each step is under the `steps` prop and are separated by `-` before the `name`.
| Prop | Known Enums |
| ------------- |:-------------:|
| `image` | `golang`, `node`, `plugins/slack` (*Plugins) |
| `arch` | `amd64`, `arm64` |
#### Plugins
> Plugins are *docker containers* that encapsulate commands, and can be shared and re-used in your pipeline. Examples of plugins include sending Slack notifications, building and publishing Docker images, and uploading artifacts to S3.
#### Platform
Defines the operating system and architecture for the container the steps will run on. May consist of `os` and `arch`
| Prop | Known Enums |
| ------------- |:-------------:|
| `os` | `windows`, `linux` |
| `arch` | `amd64`, `arm64` |<file_sep># TutorialProjects
*A git repo of all the tutorial projects I have done.*
### Table of Contents *(literally)*
| FileName | Description | Date |
| --- | ----------- | ---- |
| **React/KPUDEMYREACT** | | 1/04/2023
| **CSharp/CSWEBAPI2** | Just a **basic API in C# (.NET framework)** to warm up to it. Uses a Controller and a Model | 11/08/2017
| **CSharp/CSMICROSERVICE** | | 11/13/2017
### Tutorial Queue (Not filtered yet)
*Tutorial selection based on topics that are relevant to current employment to increase productivity for work and life.*<file_sep># MICROSERVICES HOSTED IN DOCKER
*[from Microsoft](https://docs.microsoft.com/en-us/dotnet/csharp/tutorials/microservices)*
Goal of this tutorial was to understand more about microservices and familiarize myself with working in Visual Studio and C# code as it will be utilized at my current employment.
### Difficulties
This tutorial required Docker, the link in the tutorial shows you to install DockerCE which requires Hyper-V for Windows, which I do not have seeing I am running on a home machine. So I had to install [Docker ToolBox](https://www.docker.com/products/docker-toolbox) instead with VirtualBox.
I had to change the version in the global.json file from the default
```javascript
{
"sdk": {
"version": "1.0.0-rc4-004771"
}
}
```
to my current dotnet version which was found with the following command
```bash
$ dotnet --info
The specified SDK version [1.0.0-rc4-004771] from global.json [C:\Users\<NAME>\TutorialProjects\CSharp\CSMICROSERVICE\global.json] not found; install specified SDK version
Microsoft .NET Core Shared Framework Host
Version : 2.0.0
Build : e8b8861ac7faf042c87a5c2f9f2d04c98b69f28d
```
The current version happened to be 2.0.0 so I put that in my global.json. Reading about .NET things are usually backwards compatable so I am hoping it is the case for this too.
note to self: read first. check dates for most current.
Reading more into the tutorial it does have a note that says the following:
> Note: Starting with .NET Core 2.0, you do not have to run dotnet restore because it is run implicitly as part of dotnet build or dotnet run. It is still a valid command in certain scenarios where doing an explicit restore makes sense, such as continuous integration builds in Visual Studio Team Services.
Which is wonderful they didn't put it sooner, or maybe I should read ahead.
**Actually this dotnet run failed and I looked more into the [generator](https://github.com/OmniSharp/generator-aspnet)... It appears that by default it uses the lts version. The correct command to run for this was: `yo aspnet --version-current`**
This doesn't work either. Because this scaffold is old, unfortunate. Ended up using `dotnet new`. [More on it here.](https://blogs.msdn.microsoft.com/dotnet/2017/08/14/announcing-net-core-2-0/)... [(WeatherMicroservice_dotnetnew)](https://github.com/cheriejw/TutorialProjects/tree/master/CSharp/CSMICROSERVICE/WeatherMicroservice_dotnetnew) and because I really wanted to do the tutorial I also... Ended up installing [the .net version](https://github.com/dotnet/core/blob/master/release-notes/download-archives/rc4-download.md) that was available a few days after this tutorial was published. [(WeatherMicroservice)](https://github.com/cheriejw/TutorialProjects/tree/master/CSharp/CSMICROSERVICE/WeatherMicroservice)
> Starting with .NET Core 2.0, you don't have to run dotnet restore because it's run implicitly as part of dotnet build or dotnet run. It's still a valid command in certain scenarios where doing an explicit restore makes sense, such as continuous integration builds in Visual Studio Team Services.
### Unknown Terms (Research Notes)
*(Definitions from research are only as accurate as I understand them to be, some copy-pasta and references; Please correct me if I am wrong.)*
- [**Protobuf:**](https://en.wikipedia.org/wiki/Protocol_Buffers) a way of serializing structured data. Designed by Google to be smaller and faster than XML.
### Rabbitholes
- [Monolithic vs. Microservices:](https://articles.microservices.com/monolithic-vs-microservices-architecture-5c4848858f59)
> "...it is important to understand Monolithic architecture since it is the basis for microservices architecture where each service by itself is implemented according to monolithic architecture."
- [Microservices with C and RabbitMQ](https://insidethecpu.com/2015/05/22/microservices-with-c-and-rabbitmq/)
> "In its simplest form, a Microservice consists of an event-listener and a message-dispatcher. The event-listener polls a service-bus – generally a durable message-queue – and handles incoming messages. Messages consist of instructions bound in metadata and encoded in a data-interchange format such as JSON, or Protobuf."
<file_sep>After running `dotnet run` after the `dotnet build` and `dotnet restore`, I could visit http://localhost:5000 to see my expected 'hello world' scaffold app.
Speaking of the Startup.cs file and the project.json file (obj/project.assets.json) :
> The ConfigureServices method describes the services that are necessary for this application. You're building a lean microservice, so it doesn't need to configure any dependencies. **The Configure method configures the handlers for incoming HTTP Requests.**
Went into extension nullable methods which reminded me of using the [Convert.toString() vs string.toString()](https://stackoverflow.com/questions/2828154/difference-between-convert-tostring-and-tostring) I encountered today.
I had `dotnet run` serving localhost at port 5000 and tried to stop it and rebuild, it continued to run in the background and I had to go to Task Manager to close proceess: 'dotnet'. *Rebuild was required to test the new changes after completing the parsing step.*
I got an odd error: `Failed to create prime the NuGet cache. new failed with: -2147352571`, that upon Googling lead me to [this](https://github.com/dotnet/cli/issues/7812), global.json file may be unnecessary. Error was not stopping me from running application in anycase.
Running with no query does not crash app because of the nullable likely. In convergence application at work, we used an IHttpContext to "program network communications".
Found the bug again where after hitting Ctrl+C process was still running in background.
Had to run `dotnet restore` again after adding the nuget package for json package parsing, then `dotnet build`.
> Starting with .NET Core 2.0, you don't have to run dotnet restore because it's run implicitly as part of dotnet build or dotnet run
`using Newtonsoft.Json;`
### Unknown Terms (Research Notes)
**Convert.toString() vs .toString() :**
> "In most cases Convert will call ToString on the value but in the general case of calling it on an object Convert actually defers to IConvertible if it is implemented. This is important because IConvertible does not generally map to ToString. ToString is generally used for a quick and friendly string representation of an object but IConvertible is designed for conversion so the output can vary.
> Additionally Convert generally calls the ToString overload of the primitives that pass the format provider. This means that, in some cases, the returned value of Convert will be culture-specific whereas the parameterless ToString() version may not."
[**Interpolated Strings :**](http://geekswithblogs.net/BlackRabbitCoder/archive/2015/03/26/c.net-little-wonders-string-interpolation-in-c-6.aspx) An cleaner way to create strings. [MSDN here.](https://docs.microsoft.com/en-us/dotnet/csharp/language-reference/keywords/interpolated-strings)
**Telemetry :** Telemetry is an automated communications process by which measurements and other data are collected at remote or inaccessible points and transmitted to receiving equipment for monitoring.
### Rabbitholes
- [Generic Interfaces](https://docs.microsoft.com/en-us/dotnet/csharp/programming-guide/generics/generic-interfaces)
### Un-concreate Personal Observations
- A microservice is an API?
### Work experiences in same timeline
- Enum Types, casting to int returns value.
|
e195b0d0bd43bdc9cd1a4f029799ef35e69483eb
|
[
"Markdown",
"C#",
"JavaScript"
] | 12
|
Markdown
|
cheriejw/TutorialProjects
|
91be6092be74a1f20b7551dff05c0687d02f751a
|
8497381143f21b55fe44fe1dfab50cc58f97fe68
|
refs/heads/master
|
<file_sep># -*- coding: utf-8 -*-
import ConfigParser
import argparse
from xml.dom import minidom
import sqlite3
from datetime import datetime
import time
import requests
import json
import sys
import feedparser
ADD_URL = 'https://getpocket.com/v3/add'
HEADERS = {'X-Accept': 'application/json', 'Content-Type': 'application/json'}
CREATE_DB_QUERY = '''CREATE TABLE IF NOT EXISTS subscriber
(scan_start_time TIMESTAMP NOT NULL,
scan_end_time TIMESTAMP NOT NULL,
new_count INT NOT NULL,
feeds_count INT NOT NULL,
status_string VARCHAR(32) NOT NULL,
status_extras MEDIUMTEXT NOT NULL)'''
LAST_CHECK_QUERY = '''SELECT scan_start_time
FROM subscriber
ORDER BY scan_start_time
DESC LIMIT 1'''
INSERT_QUERY = '''INSERT INTO subscriber VALUES(?, ?, ?, ?, ?, ?)'''
def pocket_add(data):
try:
r = requests.post(ADD_URL, headers=HEADERS,
data=json.dumps(data), timeout=5)
if r.status_code != 200:
print 'Error: {}'.format(r.headers.get('x-error'))
if r.headers.get('x-limit-user-remaining') == '0':
sleep = int(r.headers.get('x-limit-user-reset'))
if sleep:
print 'Wait {}sec'.format(sleep)
time.sleep(sleep)
except Exception as error:
print error
def is_outline(node):
if node.nodeType == node.ELEMENT_NODE and node.nodeName == 'outline':
return True
else:
return False
def getText(nodelist):
rc = []
for node in nodelist:
if node.nodeType == node.TEXT_NODE:
rc.append(node.data)
return ''.join(rc)
def date_created_oml(doc):
head = doc.getElementsByTagName('head')[0]
if head.getElementsByTagName('dateModified'):
date = head.getElementsByTagName('dateModified')[0]
else:
date = head.getElementsByTagName('dateCreated')[0]
date = getText(date.childNodes)
date = datetime.strptime(' '.join(date.split()[:-1]),
'%a, %d %b %Y %H:%M:%S')
return date
class Subscriber():
def __init__(self, _file, opml_file):
config = ConfigParser.ConfigParser()
config.readfp(_file)
self.sqlite_path = config.get('subscriber', 'sqlite_path')
if opml_file:
self.opml_path = opml_file
else:
self.opml_path = config.get('subscriber', 'opml_path')
self.key = config.get('subscriber', 'pocket_consumer_key')
self.token = config.get('subscriber', 'pocket_access_token')
def bd_last_check(self):
con = sqlite3.connect(self.sqlite_path)
cur = con.cursor()
try:
cur.execute(CREATE_DB_QUERY)
cur.execute(LAST_CHECK_QUERY)
last_check = cur.fetchone()
con.commit()
except Exception as error:
con.close()
print error
else:
con.close()
return last_check
def send_to_pocket(self, item):
try:
data = {"url": item.link, "title": item.title,
"consumer_key": self.key,
"access_token": self.token}
pocket_add(data)
except Exception as error:
print item, error
else:
print '+' * 10, data['url'], data['title']
self.new_count += 1
def rss_to_pocket(self, feeds):
for url in feeds:
k = 0
try:
rss = feedparser.parse(url)
except Exception as error:
self.errors['errors'].append({'feed': url, 'error': error})
else:
if 'title' in rss.feed:
print rss.feed.title
else:
print url
for item in rss.entries:
k += 1
if ('published_parsed' in rss.feed
and time.mktime(item.published_parsed) > self.last_run
or ('updated_parsed' in rss.feed
and time.mktime(item.updated_parsed)) > self.last_run):
self.send_to_pocket(item)
percents = '{}%'.format(int(k / (len(rss.entries)/100.0)))
sys.stdout.write(percents)
sys.stdout.flush()
sys.stdout.write('\b' * len(percents))
sys.stdout.write('\b' * 18)
def parse_outline(self, element):
sub_level = [node for node in element.childNodes if is_outline(node)]
if sub_level:
for node in sub_level:
self.parse_outline(node)
else:
self.feeds_count += 1
keys = element.attributes.keys()
if 'xmlUrl' in keys:
self.feeds.append(element.attributes['xmlUrl'].value)
else:
el_repr = ';'.join('{}:{}'.format(key, element.attributes[key].value) for key in keys)
self.errors['errors'].append({'feed': el_repr, 'error': 'No xmlUrl'})
def parse_opml(self):
doc = minidom.parse(self.opml_path)
self.feeds_count = 0
self.feeds = []
body = doc.getElementsByTagName('body')[0]
toplevel = [node for node in body.childNodes if is_outline(node)]
for el in toplevel:
self.parse_outline(el)
def write_database(self, scan_start_time, status_string, status_extras):
con = sqlite3.connect(self.sqlite_path)
cur = con.cursor()
scan_end_time = time.time()
try:
cur.execute(INSERT_QUERY, (scan_start_time, scan_end_time,
self.new_count, self.feeds_count,
status_string, status_extras))
con.commit()
except Exception as error:
print error
finally:
con.close()
def get_all_from_db(self):
db = sqlite3.connect(self.sqlite_path)
cur = db.cursor()
cur.execute('SELECT * FROM subscriber')
print cur.fetchall()
db.close()
def run(self):
self.last_run = self.bd_last_check()
scan_start_time = time.time()
self.new_count = 0
self.errors = {'errors': []}
self.parse_opml()
# print self.feeds
self.rss_to_pocket(self.feeds)
if self.errors['errors']:
status_extras = json.dumps(self.errors)
status_string = 'done with errors'
if self.new_count == 0:
status_string = 'failed'
else:
status_extras = ''
status_string = 'done'
self.write_database(scan_start_time, status_string, status_extras)
line = 'new_count: {}; feeds_count:{}; status_string:{};'
print line.format(self.new_count, self.feeds_count, status_string)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--conf', dest='config_file', required=True,
type=argparse.FileType(mode='r'))
parser.add_argument('--opml', dest='opml_file', type=str)
args = parser.parse_args()
Subscriber(args.config_file, opml_file=args.opml_file or None).run()
# Subscriber(args.config_file).get_all_from_db()
<file_sep># getpocket-subscriber
usage:
- install reqs: pip install -r requirements.txt
- modify config.cfg:
- sqlite_path - path to the sqlite db file that will store status info.
- opml_path - path to the OPML file to read.
- pocket_consumer_key - a key getpocket api needs to authenticate user.
- pocket_access_token - a token for getpocket api.
- run: python subcriber.py --conf config.cfg --opml opml.xml (optional)
-
|
912be1f4843c4eae7f006c27a335063f39cf217b
|
[
"Markdown",
"Python"
] | 2
|
Python
|
pieceofchalk/getpocket-subscriber
|
584bfffa29c7acf4fd58c20d66b52c42b25663cb
|
46a4f45090c14b821163f7cbcf7d0dad63ec4855
|
refs/heads/master
|
<file_sep>import csv
import matplotlib.pyplot as plt
def worst_county(year, path):
counties = info
#list of counties information that correspond to the state provided in the parameter
county_year_lst = []
#iterates thorugh the list
for county in counties:
#checks to see if the key "Years" is equeal to the year provided within the parameter
if county["Year"] == year:
#if so, appends it to the county_year_lst
county_year_lst.append(county)
#the worst county starts out as the first item in the list
worst_county = county_year_lst[0]
#the minumum average is the male + female life expectancy for that county
min_avg = (worst_county["Male life expectancy (years)"] + worst_county["Female life expectancy (years)"]) / 2
average = 0
#iterates through the counties in the county_year_lst
for county in county_year_lst:
#looks at the current county and finds the average life expectancy of males + females
average = (county["Male life expectancy (years)"] + county["Female life expectancy (years)"]) / 2
#if that county's average is lower than the current minimum averege "min_avg"
if average < min_avg:
#the new min_avg equals the average for that county
min_avg = average
#and the worst county changes to become the info from that current county
worst_county = county
result = worst_county["State"] + " " + worst_county["County"]
return result.split(" ")
def plotdata(state, county, filename):
counties = info
#a list of the county's information from each year
county_each_year = []
#iterates through every county
for item in counties:
#checks to see if the current county "item"'s county equals the county parameter and also if the current county "item"'s state is equal to the state parameter
if item["County"] == county and item["State"] == state:
#if so, it appends that county's information "item" into the county_each_year list
county_each_year.append(item)
#sorts the counties by year
county_each_year.sort(key = lambda x:x["Year"])
#creates emply lists
years = []
#le means life expectancy
male_county_le = []
male_state_le = []
male_nation_le = []
female_county_le = []
female_state_le = []
female_nation_le = []
for item in county_each_year:
#appends each year to the year list and does similar to the following below with each corresponding list
years.append(item["Year"])
#cle means county life expectancy
#sle means state life expectancy
#nle means national life expectancy
male_cle = item["Male life expectancy (years)"]
male_sle = item["Male life expectancy (state, years)"]
male_nle = item["Male life expectancy (national, years)"]
male_county_le.append(male_cle)
male_state_le.append(male_sle)
male_nation_le.append(male_nle)
female_cle = item["Female life expectancy (years)"]
female_sle = item["Female life expectancy (state, years)"]
female_nle = item["Female life expectancy (national, years)"]
female_county_le.append(female_cle)
female_state_le.append(female_sle)
female_nation_le.append(female_nle)
#plotting
#RESOURCE TO PLOT: https://matplotlib.org/tutorials/introductory/usage.html#sphx-glr-tutorials-introductory-usage-py
#RESOURCE TO USE LINESTYLE: https://matplotlib.org/3.1.1/tutorials/intermediate/legend_guide.html
fig = plt.figure()
plt.plot(years,female_county_le, label="Female in County", linestyle=":")
plt.plot(years, female_state_le, label="Female in State", linestyle="--")
plt.plot(years, female_nation_le, label="Female in Nation", linestyle="-")
plt.plot(years,male_county_le, label="Male in County", linestyle=":")
plt.plot(years, male_state_le, label="Male in State", linestyle="--")
plt.plot(years, male_nation_le, label="Male in Nation", linestyle="-")
plt.xlabel("Years")
plt.ylabel("Life Expectancy")
plt.title("%s, %s: Life expectancy" % (county, state))
plt.legend()
plt.show()
#RESOURCE: https://chartio.com/resources/tutorials/how-to-save-a-plot-to-a-file-using-matplotlib/
fig.savefig("Assignent50_graph.png")
if __name__ == "__main__":
filename = "Assignment50/data.csv"
data = open(filename,"r")
info = list(csv.DictReader(data))
#making data easier to use
for county in info:
#capitalizes each word of each county's name as well as each word in each state
county["County"] = county["County"].title()
county["State"] = county["State"].title()
#turns string values that should be integers or floats into integers or floats
county["Year"] = int(county["Year"])
county["fips"] = int(county["fips"])
county["Female life expectancy (state, years)"] = float(county["Female life expectancy (state, years)"])
county["Female life expectancy (years)"] = float(county["Female life expectancy (years)"])
county["Female life expectancy (national, years)"] = float(county["Female life expectancy (national, years)"])
county["Male life expectancy (years)"] = float(county["Male life expectancy (years)"])
county['Male life expectancy (national, years)'] = float(county["Male life expectancy (national, years)"])
county["Male life expectancy (state, years)"] = float(county["Male life expectancy (state, years)"])
state,county = worst_county(2005,filename)
plotdata(state,county,filename)
|
811ba37932f521ec4bc8269342d80ebe0b5e95a3
|
[
"Python"
] | 1
|
Python
|
cjhayne/life-expectancy-per-county
|
5f12713c1a6ecc242e3e1462e8d0dcc12fc32829
|
f29cd32e0a90eebe83915c3a8102756aa86fca3e
|
refs/heads/master
|
<repo_name>SeifMostafa/seshat<file_sep>/app/src/main/java/com/example/l/seshatmvp/presenter/WordPresenter.java
package com.example.l.seshatmvp.presenter;
import android.util.Log;
import com.example.l.seshatmvp.repositories.WordsRepository;
import com.example.l.seshatmvp.view.MainActivityView;
import java.util.List;
import static android.content.ContentValues.TAG;
public class WordPresenter {
private MainActivityView mainActivityView;
private WordsRepository wordsRepository;
//the constructor take 2 parameter the view and the repository to make an access between them
public WordPresenter(MainActivityView mainActivityView, WordsRepository wordsRepository) {
this.mainActivityView = mainActivityView;
this.wordsRepository = wordsRepository;
}
//call wordRepository interface to read words from archive file
public void loadArchiveWords(){
List<String> words = wordsRepository.readArchiveWords();
if(words.isEmpty()){
mainActivityView.DisplayNoArchiveWords();
}else {
mainActivityView.DisplayArchiveWords(words);
}
}
////call wordRepository interface to save word to archive file
public void saveWordstoArchive(String word){
try {
wordsRepository.assignWordAsFinished(word);
Log.i(TAG, "saveWordstoArchive: Success");
}catch (Exception e){
e.printStackTrace();
}
}
}
<file_sep>/app/src/main/java/com/example/l/seshatmvp/repositories/imp/ImpLessonRepository.java
package com.example.l.seshatmvp.repositories.imp;
import android.content.Context;
import com.example.l.seshatmvp.Utils.SharedPreferenceUtils;
import com.example.l.seshatmvp.model.Word;
import com.example.l.seshatmvp.repositories.LessonRepository;
import java.io.BufferedReader;
import java.io.FileReader;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static com.example.l.seshatmvp.Utils.WordUtils.form_word;
public class ImpLessonRepository implements LessonRepository{
Context context;
FilesRepositoryImp filesRepositoryImp;
public ImpLessonRepository(Context context){
this.context = context;
filesRepositoryImp = new FilesRepositoryImp(context);
}
//read all lessons and words in each lesson to lesson hashMap
@Override
public Map<Integer, Word[]> getLessons() {
Map<Integer, Word[]> lessons = new HashMap<>();
try {
FileReader wordsReader = new FileReader(filesRepositoryImp.getWordFilePath());
FileReader phraseReader = new FileReader(filesRepositoryImp.getPhrasesFilePath());
BufferedReader WordsBufferedReader = new BufferedReader(wordsReader);
BufferedReader PhrasesBufferedReader = new BufferedReader(phraseReader);
String StringlessonCapacity = WordsBufferedReader.readLine();
if (StringlessonCapacity != null) {
int k = 1;
while (true) {
int lessonCapacity = Integer.parseInt(StringlessonCapacity);
Word[] lessonWords = new Word[lessonCapacity];
for (int i = 0; i < lessonCapacity; i++) {
String word_txt = WordsBufferedReader.readLine();
String phrase = PhrasesBufferedReader.readLine();
lessonWords[i] = form_word(word_txt, phrase, filesRepositoryImp.getSF());
}
lessons.put(k, lessonWords);
StringlessonCapacity = WordsBufferedReader.readLine();
k++;
if (StringlessonCapacity == null)
break;
}
}
wordsReader.close();
} catch (IOException e) {
e.printStackTrace();
return Collections.emptyMap();
}
return lessons;
}
}
<file_sep>/app/src/main/java/com/example/l/seshatmvp/layout/LessonFragment.java
package com.example.l.seshatmvp.layout;
import android.app.Fragment;
import android.content.Context;
import android.graphics.Typeface;
import android.os.Bundle;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageButton;
import com.example.l.seshatmvp.MainActivity;
import com.example.l.seshatmvp.R;
import com.example.l.seshatmvp.UpdateWord;
import com.example.l.seshatmvp.Utils.SharedPreferenceUtils;
import com.example.l.seshatmvp.WordView;
import com.example.l.seshatmvp.model.Word;
import com.example.l.seshatmvp.presenter.WordPresenter;
import com.example.l.seshatmvp.repositories.imp.ImpWordsRepository;
import com.example.l.seshatmvp.view.MainActivityView;
public class LessonFragment extends Fragment implements UpdateWord{
public static final int RESULT_SPEECH = 177, WAIT2SayInstructions = 1000;
public static int DEFAULT_LOOP_COUNTER = 4;
public static int DEFAULT_TYPEFACE_LEVELS = 4;
public static String LessonFragment_TAG = "LessonFragment";
public static boolean phraseIsAnimated = false;
public static boolean wordIsAnimated = false;
public static boolean isPicked = false;
ImageButton helpiBtn, PreviBtn, NextiBtn, PlaySoundiBtn, DisplayImageiBtn;
WordView wordView_MainText = null;
Thread Thread_WordJourney = null;
LessonFragment instance;
private Boolean isPronunced = false;
private Boolean isWritten = false;
private Word[] words;
private Word word = null;
private int CurrentWordsArrayIndex = 0;
private Boolean firstTime = false;
private Context mContext;
WordPresenter wordPresenter;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
if (getArguments() != null) {
//getting lesson words when sending lesson key through openLessonFragment(int i)
words = (Word[]) getArguments().getParcelableArray(MainActivity.LessonKey);//will be null when calling animation
//getting text when sending word text through openLessonFragment(String word)
word = getArguments().getParcelable(MainActivity.WordKey);//will be not null when calling animation
//getting boolean to check if first time or not
firstTime = getArguments().getBoolean(MainActivity.firstTimekey);
//getting word index (till now always 0 for each lesson )
CurrentWordsArrayIndex = getArguments().getInt(MainActivity.WordIndexKey);
if (word == null && words != null) {
//filling when calling by openLessonFragment(int i)
word = words[CurrentWordsArrayIndex];
Log.i("onCreate", "from LessonFragment" + "word == null");
}
}
instance = this;
mContext = getActivity();
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
// Inflate the layout for this fragment
View view = inflater.inflate(R.layout.fragment_mai, container, false);
wordView_MainText = view.findViewById(R.id.textView_maintext);
wordView_MainText.setText(word.getText());
wordView_MainText.setmLessonFragment(this);
if (word.getFV() != null) {
//setting word guided vector in the wordView to help you check writing
wordView_MainText.setGuidedVector(word.getFV());
Log.d("LessonFragment", "FV = " + word.getFV());
} else if (!firstTime) {
//setting word from lessons' words
word = words[CurrentWordsArrayIndex];
//setting word guided vector in the wordView to help you check writing
wordView_MainText.setGuidedVector(word.getFV());
//putting text into wordView
wordView_MainText.setText(word.getText());
Log.d("LessonFragment", "FV = " + word.getFV());
}
helpiBtn = getActivity().findViewById(R.id.imagebutton_moreInfo);
helpiBtn.setOnClickListener(view15 -> {
Log.i("helpiBtn", "is clicked!");
try {
if (Thread_WordJourney != null) {
if (Thread_WordJourney.isAlive()) {
Thread_WordJourney.interrupt();
Log.i("helpiBtn", "is clicked!" + "Thread_WordJourney.is alive");
}
}
} catch (Throwable throwable) {
throwable.printStackTrace();
}
((MainActivity) getActivity()).OpenHelpFragment();
wordIsAnimated = false;
phraseIsAnimated = false;
});
PreviBtn = view.findViewById(R.id.imagebutton_prevword);
PreviBtn.setOnClickListener(view13 -> {
// request prev word
prevWordCall();
setPreviBtnVisibilty();
setNextiBtnVisibility();
});
NextiBtn = view.findViewById(R.id.imagebutton_skipword);
NextiBtn.setOnClickListener(view14 -> {
wordIsAnimated = false;
phraseIsAnimated = false;
// request nxt word
nextWordCall();
setPreviBtnVisibilty();
setNextiBtnVisibility();
});
PlaySoundiBtn = view.findViewById(R.id.imagebutton_soundhelp);
PlaySoundiBtn.setOnClickListener(view12 -> {
try {
//playing Audio file of word
((MainActivity) getActivity()).voiceoffer(PlaySoundiBtn, word.getText());
Log.i("PlaySoundiBtn", word.getText());
} catch (Exception e) {
e.printStackTrace();
Log.e("PlaySoundiBtn", e.toString());
}
});
DisplayImageiBtn = view.findViewById(R.id.imagebutton_photohelp);
DisplayImageiBtn.setOnClickListener(view1 -> {
try {
//displaying image file of word
((MainActivity) getActivity()).helpbypic(DisplayImageiBtn, word.getText());
Log.i("DisplayImageiBtn", word.getText());
} catch (Exception e) {
e.printStackTrace();
Log.e("DisplayImageiBtn", e.toString());
}
});
setNextiBtnVisibility();
setPreviBtnVisibilty();
return view;
}
@Override
public void onResume() {
super.onResume();
if (!firstTime && !wordIsAnimated) {
//word not animated yet
if (!phraseIsAnimated && words != null) {
//word's phrase not animated
((MainActivity) getActivity()).openAnimationFragment(word.getPhrase());
phraseIsAnimated = true;
} else {
//word phrase animated but word not animated
((MainActivity) getActivity()).openAnimationFragment(word.getText());
wordIsAnimated = true;
}
} else if (!firstTime && instance.isWritten &&/* instance.isPronunced &&*/ !isPicked) {
//word has been written and not picked from phrase
//saying picking instructions and start phrase fragment to pick word
((MainActivity) mContext).voiceoffer(instance.wordView_MainText, mContext.getString(R.string.pickwordinstr));
((MainActivity) getActivity()).openPhraseFragment(word.getPhrase(), word.getText());
} else if (!firstTime && isPicked /*&& instance.isPronunced*/) {
//word has been picked
if (instance.CurrentWordsArrayIndex + 1 == instance.words.length) {
//if it's the last word
//update lesson, reset all word and phrase booleans, get the next lesson word and setting it's guided vector
Log.i("LessonFragment: ", "UpdateLesson: ");
CurrentWordsArrayIndex = 0;
instance.word = instance.words[CurrentWordsArrayIndex];
isPicked = false;
instance.isWritten = false;
instance.isPronunced = false;
wordIsAnimated = false;
phraseIsAnimated = false;
instance.wordView_MainText.setGuidedVector(instance.word.getFV());
instance.wordView_MainText.setText(
instance.word.getText());
instance.wordView_MainText.invalidate();
setNextiBtnVisibility();
setPreviBtnVisibilty();
// ((MainActivity) instance.mContext).updatelesson(1, true);
((MainActivity) instance.mContext).updateLesson(1);
} else {
//not the last word in lesson so get next word
phraseIsAnimated = false;
wordIsAnimated = false;
instance.nextWordCall();
instance.setPreviBtnVisibilty();
instance.setNextiBtnVisibility();
}
}
}
//next button visibility checking
private void setNextiBtnVisibility() {
if (words == null) {
//when calling openAnimationFragment(String word) and words == null
NextiBtn.setVisibility(View.INVISIBLE);
} else {
//during the lesson adventure
if (CurrentWordsArrayIndex == words.length - 1) {
//if last word
NextiBtn.setVisibility(View.INVISIBLE);
} else {
//not last word
NextiBtn.setVisibility(View.VISIBLE);
}
}
}
//previous button visibility checking
private void setPreviBtnVisibilty() {
if (CurrentWordsArrayIndex == 0) {
//if first word
PreviBtn.setVisibility(View.INVISIBLE);
} else {
//not first word
PreviBtn.setVisibility(View.VISIBLE);
}
}
//instructions after finishing word trip
private Thread Thread_WordJourney_voice_speech() {
Thread_WordJourney = new Thread() {
@Override
public void run() {
try {
Log.i("XX", "XX");
sleep(WAIT2SayInstructions);
} catch (InterruptedException ignored) {
}
((MainActivity) mContext).runOnUiThread(() -> {
try {
((MainActivity) mContext).voiceoffer(null, instance.word.getText());
sleep(1500);
if (words == null) {
//when calling openAnimationFragment(String word) and words == null
//using archive words
((MainActivity) getActivity()).OpenHelpFragment();
} else {
//playing pick instruction when finish writing
((MainActivity) mContext).voiceoffer(instance.wordView_MainText, mContext.getString(R.string.pickwordinstr));
sleep(2500);
// instance.voicerec(null);
//start picking word
((MainActivity) getActivity()).openPhraseFragment(word.getPhrase(), word.getText());
}
} catch (Exception e) {
e.printStackTrace();
}
});
}
@Override
public void interrupt() {
super.interrupt();
((MainActivity) getActivity()).StopMediaPlayer();
onDetach();
}
};
return Thread_WordJourney;
}
//an override method from UpdateWord interface to make WordView interact with LessonFragment to update word's fonts levels
@Override
public Typeface updateWordLoop(Typeface typeface, int word_loop) {
Typeface tf;
//check word loop counter
if (word_loop < (DEFAULT_LOOP_COUNTER * DEFAULT_TYPEFACE_LEVELS) - 2) {
if (word_loop % DEFAULT_LOOP_COUNTER == 0) {
// change font
if (word_loop > 0 && word_loop == DEFAULT_LOOP_COUNTER) {
//level 2 (less dots level)
tf = Typeface.createFromAsset(mContext.getAssets(), "fonts/lvl2.ttf");
} else if (word_loop > DEFAULT_LOOP_COUNTER && word_loop == DEFAULT_LOOP_COUNTER * 2) {
//level 3 (less dots and arrows level)
tf = Typeface.createFromAsset(mContext.getAssets(), "fonts/lvl3.ttf");
} else {
//level 4 (Blank level)
return null;
}
} else {
tf = typeface;
}
} else {
//finish writing
// change word
isWritten = true;
//store word in archive
wordPresenter = new WordPresenter((MainActivityView) mContext, new ImpWordsRepository(mContext));
wordPresenter.saveWordstoArchive(instance.word.getText());
instance.Thread_WordJourney_voice_speech().start();
Log.i("LessonFragment: ", "UpdateWordLoop: changeword");
//return to font level 1 (hollow with arrows)
tf = Typeface.createFromAsset(getActivity().getAssets(), "fonts/lvl1.ttf");
}
return tf;
}
@Override
public void setmContext(Context context) {
mContext = context;
}
@Override
public void setLessonFragment(LessonFragment fragment) {
instance = fragment;
}
@Override
public void onStop() {
super.onStop();
Log.i("LessonFragment", "onStop");
if (Thread_WordJourney != null) {
Thread_WordJourney.interrupt();
}
}
@Override
public void onDetach() {
super.onDetach();
Log.i("LessonFragment", "onDetach");
}
@Override
public void onPause() {
super.onPause();
}
//request next word
private void nextWordCall() {
instance = this;
if (instance.Thread_WordJourney != null) instance.Thread_WordJourney.interrupt();
//get next word
instance.word = instance.words[++instance.CurrentWordsArrayIndex];
//save word index
SharedPreferenceUtils.getInstance(getContext()).setValue(MainActivity.WordIndexKey, String.valueOf(instance.CurrentWordsArrayIndex));
if (!phraseIsAnimated) {
//animate the new phrase if not animated
((MainActivity) getActivity()).openAnimationFragment(instance.word.getPhrase());
phraseIsAnimated = true;
} else {
//animate the new word if not animated
((MainActivity) getActivity()).openAnimationFragment(instance.word.getText());
wordIsAnimated = true;
}
//reset word operations
isPicked = false;
instance.isWritten = false;
instance.isPronunced = false;
//set word guided vector
instance.wordView_MainText.setGuidedVector(instance.word.getFV());
instance.wordView_MainText.setText(
instance.word.getText());
instance.wordView_MainText.invalidate();
}
//request previous word
private void prevWordCall() {
if (instance.Thread_WordJourney != null) instance.Thread_WordJourney.interrupt();
//getting previous word
instance.word = instance.words[--instance.CurrentWordsArrayIndex];
//store word index
SharedPreferenceUtils.getInstance(getContext()).setValue(MainActivity.WordIndexKey, String.valueOf(instance.CurrentWordsArrayIndex));
if (!phraseIsAnimated) {
//animate the new phrase if not animated
((MainActivity) getActivity()).openAnimationFragment(instance.word.getPhrase());
phraseIsAnimated = true;
} else {
//animate the new word if not animated
((MainActivity) getActivity()).openAnimationFragment(instance.word.getText());
wordIsAnimated = true;
}
//reset word operation
isPicked = false;
instance.isPronunced = false;
//set word guided vector
instance.wordView_MainText.setGuidedVector(instance.word.getFV());
instance.wordView_MainText.setText(
instance.word.getText());
instance.wordView_MainText.invalidate();
}
}
|
e3ac1a4067ffa993d5222cef2f6cdb7f90c756ac
|
[
"Java"
] | 3
|
Java
|
SeifMostafa/seshat
|
0322caefb71f7495d6d3788231a07803dfea26de
|
83f6150de8af86f22877cc6f710d47130df5778a
|
refs/heads/master
|
<repo_name>erdelf/Multiplayer-Compatibility<file_sep>/Source/Mods/VanillaFactionsMechanoid.cs
using System;
using HarmonyLib;
using Multiplayer.API;
using RimWorld;
using Verse;
namespace Multiplayer.Compat
{
/// <summary>Vanilla Traits Expanded by <NAME>, ISOREX, <NAME>, erdelf, Kikohi, Taranchuk, Kentington, Chowder</summary>
/// <see href="https://github.com/AndroidQuazar/VanillaFactionsExpanded-Mechanoid"/>
/// <see href="https://steamcommunity.com/sharedfiles/filedetails/?id=2329011599"/>
[MpCompatFor("OskarPotocki.VFE.Mechanoid")]
public class VanillaFactionsMechanoid
{
public VanillaFactionsMechanoid(ModContentPack mod) => LongEventHandler.ExecuteWhenFinished(LatePatch);
private static void LatePatch()
{
var type = AccessTools.TypeByName("VFEMech.MissileSilo");
MP.RegisterSyncMethod(type, "StartFire");
var configureNewTargetMethod = AccessTools.Method(type, "ConfigureNewTarget");
MP.RegisterSyncMethod(configureNewTargetMethod);
foreach (var method in MpCompat.RegisterLambdaMethod(type, "GetGizmos", 2, 3)) method.SetDebugOnly();
MpCompat.harmony.Patch(configureNewTargetMethod,
postfix: new HarmonyMethod(typeof(VanillaFactionsMechanoid), nameof(CloseWorldTargetter)));
type = AccessTools.TypeByName("VFE.Mechanoids.Buildings.Building_AutoPlant");
MpCompat.RegisterLambdaMethod(type, "GetGizmos", 0, 2, 3);
MpCompat.harmony.Patch(AccessTools.Method("VFE.Mechanoids.PlaceWorkers.PlaceWorker_AutoPlant:DrawGhost"),
prefix: new HarmonyMethod(typeof(VanillaFactionsMechanoid), nameof(PreDrawGhost)));
}
private static void CloseWorldTargetter(bool __result)
{
// Force close the targetter to give the players a visual cue that it was successful
// Otherwise, it would successfully mark the target but not give any indication
if (MP.IsInMultiplayer && __result && Find.WorldTargeter.IsTargeting && Find.WorldTargeter.closeWorldTabWhenFinished)
Find.WorldTargeter.StopTargeting();
}
private static void PreDrawGhost(ref Thing thing)
{
// Fix the bug (which causes error spam) in the original mod if the building is a blueprint or a building frame
// This error spam seems capable of causing desyncs
switch (thing)
{
case Blueprint_Build or Frame when thing.def.entityDefToBuild is ThingDef def:
thing = (Thing)Activator.CreateInstance(def.thingClass);
thing.def = def;
break;
case Blueprint_Build or Frame:
thing = null;
break;
// Handle the case of (re)installing in case another mods lets this be reinstalled
case Blueprint_Install install:
thing = install.ThingToInstall;
break;
}
}
}
}
<file_sep>/Source/ReferenceBuilder.cs
using System;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Security.Cryptography;
using System.Text;
using Mono.Cecil;
using Verse;
namespace Multiplayer.Compat
{
static class ReferenceBuilder
{
public static void Restore(string refsFolder)
{
var requestedFiles = Directory.CreateDirectory(refsFolder).GetFiles("*.txt");
foreach(var request in requestedFiles)
{
BuildReference(refsFolder, request);
}
}
static void BuildReference(string refsFolder, FileInfo request)
{
var asmId = Path.GetFileNameWithoutExtension(request.Name);
var assembly = LoadedModManager.RunningModsListForReading
.SelectMany(m => ModContentPack.GetAllFilesForModPreserveOrder(m, "Assemblies/", f => f.ToLower() == ".dll"))
.FirstOrDefault(f => f.Item2.Name == asmId + ".dll")?.Item2;
var hash = ComputeHash(assembly.FullName);
var hashFile = Path.Combine(refsFolder, asmId + ".txt");
if (File.Exists(hashFile) && File.ReadAllText(hashFile) == hash)
return;
Console.WriteLine($"MpCompat References: Writing {asmId}.dll");
var outFile = Path.Combine(refsFolder, asmId + ".dll");
var asmDef = AssemblyDefinition.ReadAssembly(assembly.FullName);
foreach (var t in asmDef.MainModule.GetTypes())
{
if (t.IsNested)
t.IsNestedPublic = true;
else
t.IsPublic = true;
foreach (var m in t.Methods)
{
m.IsPublic = true;
m.Body = new Mono.Cecil.Cil.MethodBody(m);
}
foreach (var f in t.Fields)
{
f.IsInitOnly = false;
f.IsPublic = true;
}
}
asmDef.Write(outFile);
File.WriteAllText(hashFile, hash);
}
static string ComputeHash(string assemblyPath)
{
var res = new StringBuilder();
using var hash = SHA1.Create();
using FileStream file = File.Open(assemblyPath, FileMode.Open, FileAccess.Read);
hash.ComputeHash(file);
foreach (byte b in hash.Hash)
res.Append(b.ToString("X2"));
return res.ToString();
}
}
}<file_sep>/Source/MpCompat.cs
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reflection;
using HarmonyLib;
using Multiplayer.API;
using Verse;
namespace Multiplayer.Compat
{
public class MpCompat : Mod
{
const string REFERENCES_FOLDER = "References";
internal static readonly Harmony harmony = new Harmony("rimworld.multiplayer.compat");
public MpCompat(ModContentPack content) : base(content)
{
if (!MP.enabled) {
Log.Warning($"MPCompat :: Multiplayer is disabled. Running in Reference Building mode.\nPut any reference building requests under {REFERENCES_FOLDER} as {{DLLNAME}}.txt");
ReferenceBuilder.Restore(Path.Combine(content.RootDir, REFERENCES_FOLDER));
Log.Warning($"MPCompat :: Done Rebuilding. Bailing out...");
return;
}
MpCompatLoader.Load(content);
harmony.PatchAll();
}
static IEnumerable<ISyncMethod> RegisterLambdaMethod_Impl(Type parentType, string parentMethod, params int[] lambdaOrdinals)
{
foreach (int ord in lambdaOrdinals)
{
var method = MpMethodUtil.GetLambda(parentType, parentMethod, MethodType.Normal, null, ord);
yield return MP.RegisterSyncMethod(method);
}
}
public static ISyncMethod[] RegisterLambdaMethod(Type parentType, string parentMethod, params int[] lambdaOrdinals)
{
return RegisterLambdaMethod_Impl(parentType, parentMethod, lambdaOrdinals).ToArray();
}
public static ISyncMethod[] RegisterLambdaMethod(string parentType, string parentMethod, params int[] lambdaOrdinals)
{
return RegisterLambdaMethod_Impl(AccessTools.TypeByName(parentType), parentMethod, lambdaOrdinals).ToArray();
}
static IEnumerable<ISyncDelegate> RegisterLambdaDelegate_Impl(Type parentType, string parentMethod, params int[] lambdaOrdinals)
{
foreach (int ord in lambdaOrdinals)
{
var method = MpMethodUtil.GetLambda(parentType, parentMethod, MethodType.Normal, null, ord);
yield return MP.RegisterSyncDelegate(parentType, method.DeclaringType.Name, method.Name);
}
}
public static ISyncDelegate[] RegisterLambdaDelegate(Type parentType, string parentMethod, params int[] lambdaOrdinals)
{
return RegisterLambdaDelegate_Impl(parentType, parentMethod, lambdaOrdinals).ToArray();
}
public static ISyncDelegate[] RegisterLambdaDelegate(string parentType, string parentMethod, params int[] lambdaOrdinals)
{
return RegisterLambdaDelegate_Impl(AccessTools.TypeByName(parentType), parentMethod, lambdaOrdinals).ToArray();
}
[Obsolete]
public static IEnumerable<MethodInfo> MethodsByIndex(Type type, string prefix, params int[] index)
{
return type.GetMethods(AccessTools.allDeclared)
.Where(delegate (MethodInfo m) {
return m.Name.StartsWith(prefix, StringComparison.Ordinal);
})
.Where((m, i) => index.Contains(i));
}
[Obsolete]
public static IEnumerable<ISyncMethod> RegisterSyncMethodsByIndex(Type type, string prefix, params int[] index) {
if (index.Length == 1) {
return new[] {
RegisterSyncMethodByIndex(type, prefix, index[0])
};
}
var methods = MethodsByIndex(type, prefix, index).ToList();
var handles = new List<ISyncMethod>(methods.Count);
foreach(var method in methods) {
handles.Add(MP.RegisterSyncMethod(method));
}
return handles;
}
[Obsolete]
public static MethodInfo MethodByIndex(Type type, string prefix, int index) {
return MethodsByIndex(type, prefix, index).First();
}
[Obsolete]
public static ISyncMethod RegisterSyncMethodByIndex(Type type, string prefix, int index) {
return MP.RegisterSyncMethod(MethodByIndex(type, prefix, index));
}
/// <summary>Get the first method in the given type that matches the specified signature, return null if failed.</summary>
/// <param name="type">The type of the target method</param>
/// <param name="paramsType">The list of types of the target method's parameter</param>
public static MethodInfo GetFirstMethodBySignature(Type type, Type[] paramsType)
{
foreach (MethodInfo mi in AccessTools.GetDeclaredMethods(type))
{
List<Type> foundParamsType = new List<Type>();
if (mi.GetParameters().Length != 0)
{
foreach (ParameterInfo pi in mi.GetParameters())
{
foundParamsType.Add(pi.ParameterType);
}
}
if (paramsType.All(foundParamsType.Contains) && paramsType.Count() == foundParamsType.Count) { return mi; }
}
return null;
}
/// <summary>Get the first method in the given type that matches the specified signature, return null if failed.</summary>
/// <param name="type">The type of the target method</param>
/// <param name="paramsType">The list of types of the target method's parameter</param>
/// <param name="returnType">The return type of the target method</param>
public static MethodInfo GetFirstMethodBySignature(Type type, Type[] paramsType, Type returnType)
{
foreach (MethodInfo mi in AccessTools.GetDeclaredMethods(type))
{
List<Type> foundParamsType = new List<Type>();
if (mi.GetParameters().Length != 0)
{
foreach (ParameterInfo pi in mi.GetParameters())
{
foundParamsType.Add(pi.ParameterType);
}
}
if (paramsType.All(foundParamsType.Contains) && paramsType.Count() == foundParamsType.Count && returnType == mi.ReturnType) { return mi; }
}
return null;
}
}
}<file_sep>/Source/Mods/RimFridge.cs
using System;
using System.Reflection;
using System.Runtime.Serialization;
using HarmonyLib;
using Multiplayer.API;
using RimWorld;
using Verse;
namespace Multiplayer.Compat
{
/// <summary>RimFridge by KiameV</summary>
/// <remarks>Fixes for gizmos</remarks>
/// <see href="https://github.com/KiameV/rimworld-rimfridge"/>
/// <see href="https://steamcommunity.com/sharedfiles/filedetails/?id=1180721235"/>
[MpCompatFor("rimfridge.kv.rw")]
public class RimFridgeCompat
{
private static FieldInfo fridgeField;
private static Type dialogType;
public RimFridgeCompat(ModContentPack mod)
{
// Several Gizmos
{
MpCompat.RegisterLambdaDelegate("RimFridge.CompRefrigerator", "CompGetGizmosExtra", 1, 2, 3, 4, 5);
MpCompat.RegisterLambdaMethod("RimFridge.CompToggleGlower", "CompGetGizmosExtra", 0);
dialogType = AccessTools.TypeByName("RimFridge.Dialog_RenameFridge");
fridgeField = AccessTools.Field(dialogType, "fridge");
MP.RegisterSyncWorker<Dialog_Rename>(SyncFridgeName, dialogType);
MP.RegisterSyncMethod(dialogType, "SetName");
}
}
private static void SyncFridgeName(SyncWorker sync, ref Dialog_Rename dialog)
{
if (sync.isWriting)
sync.Write((ThingComp)fridgeField.GetValue(dialog));
else
{
dialog = (Dialog_Rename)FormatterServices.GetUninitializedObject(dialogType);
fridgeField.SetValue(dialog, sync.Read<ThingComp>());
}
}
}
}
<file_sep>/Source/Mods/AvoidFriendlyFire.cs
using System;
using System.Collections;
using System.Linq;
using System.Reflection;
using HarmonyLib;
using Multiplayer.API;
using Verse;
namespace Multiplayer.Compat
{
/// <summary>Avoid Friendly Fire by Falconne</summary>
/// <see href="https://steamcommunity.com/sharedfiles/filedetails/?id=1134165362"/>
/// <see href="https://github.com/Falconne/AvoidFriendlyFire"/>
[MpCompatFor("falconne.AFF")]
public class AvoidFriendlyFire
{
static IDictionary extendedPawnDataDictionary;
static Type extendedPawnDataType;
static FieldInfo avoidFriendlyFireField;
public AvoidFriendlyFire(ModContentPack mod)
{
{
var type = extendedPawnDataType = AccessTools.TypeByName("AvoidFriendlyFire.ExtendedPawnData");
MP.RegisterSyncWorker<object>(SyncWorkerFor, type);
avoidFriendlyFireField = type.GetField("AvoidFriendlyFire");
}
{
MpCompat.RegisterLambdaDelegate("AvoidFriendlyFire.Pawn_DraftController_GetGizmos_Patch", "Postfix", 1);
}
}
static IDictionary ExtendedPawnDataDictionary {
get {
if (extendedPawnDataDictionary == null) {
Type type = AccessTools.TypeByName("AvoidFriendlyFire.ExtendedDataStorage");
var comp = Find.World.GetComponent(type);
extendedPawnDataDictionary = AccessTools.Field(type, "_store").GetValue(comp) as IDictionary;
}
return extendedPawnDataDictionary;
}
}
static int GetIdFromExtendedPawnData(object extendedPawnData) {
foreach(object key in ExtendedPawnDataDictionary.Keys)
{
if (ExtendedPawnDataDictionary[key] == extendedPawnData) {
return (int) key;
}
}
return 0;
}
static void SyncWorkerFor(SyncWorker sw, ref object extendedPawnData)
{
if (sw.isWriting) {
sw.Write(GetIdFromExtendedPawnData(extendedPawnData));
} else {
int id = sw.Read<int>();
extendedPawnData = ExtendedPawnDataDictionary[id];
}
}
}
}
<file_sep>/Source/MpCompatLoader.cs
using System;
using System.IO;
using System.Linq;
using System.Reflection;
using Mono.Cecil;
using Verse;
namespace Multiplayer.Compat
{
public static class MpCompatLoader
{
internal static void Load(ModContentPack content)
{
LoadConditional(content);
foreach (var asm in content.assemblies.loadedAssemblies)
InitCompatInAsm(asm);
}
static void LoadConditional(ModContentPack content)
{
var asmPath = ModContentPack
.GetAllFilesForModPreserveOrder(content, "Referenced/", f => f.ToLower() == ".dll")
.FirstOrDefault(f => f.Item2.Name == "Multiplayer_Compat_Referenced.dll")?.Item2;
if (asmPath == null)
{
return;
}
var asm = AssemblyDefinition.ReadAssembly(asmPath.FullName);
foreach (var t in asm.MainModule.GetTypes().ToArray())
{
var attr = t.CustomAttributes
.FirstOrDefault(a => a.Constructor.DeclaringType.Name == nameof(MpCompatForAttribute));
if (attr == null) continue;
var modId = (string)attr.ConstructorArguments.First().Value;
var mod = LoadedModManager.RunningMods.FirstOrDefault(m => m.PackageId.NoModIdSuffix() == modId);
if (mod == null)
asm.MainModule.Types.Remove(t);
}
var stream = new MemoryStream();
asm.Write(stream);
var loadedAsm = AppDomain.CurrentDomain.Load(stream.ToArray());
InitCompatInAsm(loadedAsm);
}
static void InitCompatInAsm(Assembly asm)
{
var queue = asm.GetTypes()
.Where(t => t.HasAttribute<MpCompatForAttribute>())
.SelectMany(
t => (MpCompatForAttribute[]) t.GetCustomAttributes(typeof(MpCompatForAttribute), false),
(type, compat) => new { type, compat }
)
.Join(LoadedModManager.RunningMods,
box => box.compat.PackageId.ToLower(),
mod => mod.PackageId.NoModIdSuffix(),
(box, mod) => new { box.type, mod });
foreach (var action in queue)
{
try {
Activator.CreateInstance(action.type, action.mod);
Log.Message($"MPCompat :: Initialized compatibility for {action.mod.PackageId}");
} catch(Exception e) {
Log.Error($"MPCompat :: Exception loading {action.mod.PackageId}: {e.InnerException}");
}
}
}
}
}<file_sep>/Source/MpCompatForAttribute.cs
using System;
namespace Multiplayer.Compat
{
[AttributeUsage(AttributeTargets.Class, AllowMultiple = true)]
public class MpCompatForAttribute : Attribute
{
public string PackageId { get; }
public MpCompatForAttribute(string packageId)
{
this.PackageId = packageId;
}
public override object TypeId {
get {
return this;
}
}
}
}<file_sep>/Source/Mods/VanillaExpandedFramework.cs
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using HarmonyLib;
using Multiplayer.API;
using RimWorld;
using RimWorld.Planet;
using Verse;
namespace Multiplayer.Compat
{
/// <summary>Vanilla Expanded Framework and other Vanilla Expanded mods by <NAME>, <NAME>, Chowder, XeoNovaDan, Orion, Kikohi, erdelf, Taranchuk, and more</summary>
/// <see href="https://github.com/juanosarg/ItemProcessor"/>
/// <see href="https://github.com/juanosarg/VanillaCookingExpanded"/>
/// <see href="https://steamcommunity.com/sharedfiles/filedetails/?id=2023507013"/>
[MpCompatFor("OskarPotocki.VanillaFactionsExpanded.Core")]
class VanillaExpandedFramework
{
// VFECore
private static FieldInfo learnedAbilitiesField;
// Vanilla Furniture Expanded
private static FieldInfo setStoneBuildingField;
// MVCF
// VerbManager
private static ConstructorInfo mvcfVerbManagerCtor;
private static MethodInfo mvcfInitializeManagerMethod;
private static MethodInfo mvcfPawnGetter;
private static FieldInfo mvcfVerbsField;
// WorldComponent_MVCF
private static MethodInfo mvcfGetWorldCompMethod;
private static FieldInfo mvcfAllManagersListField;
private static FieldInfo mvcfManagersTableField;
// ManagedVerb
private static FieldInfo mvcfManagerVerbManagerField;
// System
// WeakReference
private static ConstructorInfo weakReferenceCtor;
// ConditionalWeakTable
private static MethodInfo conditionalWeakTableAddMethod;
private static MethodInfo conditionalWeakTableTryGetValueMethod;
public VanillaExpandedFramework(ModContentPack mod)
{
// ItemProcessor
{
var type = AccessTools.TypeByName("ItemProcessor.Building_ItemProcessor");
// _1, _5 and _7 are used to check if gizmo should be enabled, so we don't sync them
MpCompat.RegisterLambdaMethod(type, "GetGizmos", 0, 2, 3, 4, 6, 8, 9, 10);
type = AccessTools.TypeByName("ItemProcessor.Command_SetQualityList");
MP.RegisterSyncWorker<Command>(SyncCommandWithBuilding, type, shouldConstruct: true);
MpCompat.RegisterLambdaMethod(type, "ProcessInput", Enumerable.Range(0, 8).ToArray());
type = AccessTools.TypeByName("ItemProcessor.Command_SetOutputList");
MP.RegisterSyncWorker<Command>(SyncCommandWithBuilding, type, shouldConstruct: true);
MP.RegisterSyncMethod(type, "TryConfigureIngredientsByOutput");
// Keep an eye on this in the future, seems like something the devs could combine into a single class at some point
foreach (var ingredientNumber in new[] { "First", "Second", "Third", "Fourth" })
{
type = AccessTools.TypeByName($"ItemProcessor.Command_Set{ingredientNumber}ItemList");
MP.RegisterSyncWorker<Command>(SyncSetIngredientCommand, type, shouldConstruct: true);
MP.RegisterSyncMethod(type, $"TryInsert{ingredientNumber}Thing");
MpCompat.RegisterLambdaMethod(type, "ProcessInput", 0);
}
}
// Vanilla Cooking Expanded
{
// AddHediff desyncs with Arbiter, but seems fine without it
PatchingUtilities.PatchPushPopRand("VanillaCookingExpanded.Thought_Hediff:MoodOffset");
}
// VFE Core
{
MpCompat.RegisterLambdaMethod("VFECore.CompPawnDependsOn", "CompGetGizmosExtra", 0).SetDebugOnly();
learnedAbilitiesField = AccessTools.Field(AccessTools.TypeByName("VFECore.Abilities.CompAbilities"), "learnedAbilities");
MP.RegisterSyncWorker<ITargetingSource>(SyncVEFAbility, AccessTools.TypeByName("VFECore.Abilities.Ability"), true);
MP.RegisterSyncMethod(AccessTools.TypeByName("VFECore.Abilities.Ability"), "CreateCastJob");
}
// Vanilla Furniture Expanded
{
MpCompat.RegisterLambdaMethod("VanillaFurnitureExpanded.CompConfigurableSpawner", "CompGetGizmosExtra", 0).SetDebugOnly();
var type = AccessTools.TypeByName("VanillaFurnitureExpanded.Command_SetItemsToSpawn");
MpCompat.RegisterLambdaDelegate(type, "ProcessInput", 1);
MP.RegisterSyncWorker<Command>(SyncCommandWithBuilding, type, shouldConstruct: true);
MpCompat.RegisterLambdaMethod("VanillaFurnitureExpanded.CompRockSpawner", "CompGetGizmosExtra", 0);
type = AccessTools.TypeByName("VanillaFurnitureExpanded.Command_SetStoneType");
setStoneBuildingField = AccessTools.Field(type, "building");
MpCompat.RegisterLambdaMethod(type, "ProcessInput", 0);
MP.RegisterSyncWorker<Command>(SyncSetStoneTypeCommand, type, shouldConstruct: true);
MpCompat.RegisterLambdaDelegate(type, "ProcessInput", 1);
type = AccessTools.TypeByName("VanillaFurnitureExpanded.CompRandomBuildingGraphic");
MpCompat.RegisterLambdaMethod(type, "CompGetGizmosExtra", 0);
}
// Vanilla Faction Mechanoids
{
var type = AccessTools.TypeByName("VFE.Mechanoids.CompMachineChargingStation");
MpCompat.RegisterLambdaDelegate(type, "CompGetGizmosExtra", 1, 6).SetContext(SyncContext.MapSelected);
MpCompat.RegisterLambdaDelegate(type, "CompGetGizmosExtra", 4);
}
// AnimalBehaviours
{
// RNG
PatchingUtilities.PatchSystemRand("AnimalBehaviours.DamageWorker_ExtraInfecter:ApplySpecialEffectsToPart", false);
var rngFixConstructors = new[]
{
"AnimalBehaviours.CompAnimalProduct",
"AnimalBehaviours.CompFilthProducer",
"AnimalBehaviours.CompGasProducer",
"AnimalBehaviours.CompInitialHediff",
"AnimalBehaviours.DeathActionWorker_DropOnDeath",
};
PatchingUtilities.PatchSystemRandCtor(rngFixConstructors, false);
// Gizmos
var type = AccessTools.TypeByName("AnimalBehaviours.CompDestroyThisItem");
MP.RegisterSyncMethod(type, "SetObjectForDestruction");
MP.RegisterSyncMethod(type, "CancelObjectForDestruction");
}
// MVCF (Multi Verb Combat Framework)
{
var type = AccessTools.TypeByName("MVCF.WorldComponent_MVCF");
mvcfGetWorldCompMethod = AccessTools.Method(type, "GetComp");
mvcfAllManagersListField = AccessTools.Field(type, "allManagers");
mvcfManagersTableField = AccessTools.Field(type, "managers");
MP.RegisterSyncMethod(typeof(VanillaExpandedFramework), nameof(SyncedInitVerbManager));
MpCompat.harmony.Patch(AccessTools.Method(type, "GetManagerFor"),
prefix: new HarmonyMethod(typeof(VanillaExpandedFramework), nameof(GetManagerForPrefix)));
type = AccessTools.TypeByName("MVCF.VerbManager");
MP.RegisterSyncWorker<object>(SyncVerbManager, type, isImplicit: true);
mvcfVerbManagerCtor = AccessTools.Constructor(type);
mvcfInitializeManagerMethod = AccessTools.Method(type, "Initialize");
mvcfPawnGetter = AccessTools.PropertyGetter(type, "Pawn");
mvcfVerbsField = AccessTools.Field(type, "verbs");
var weakReferenceType = typeof(System.WeakReference<>).MakeGenericType(new[] { type });
weakReferenceCtor = AccessTools.FirstConstructor(weakReferenceType, ctor => ctor.GetParameters().Count() == 1);
var conditionalWeakTableType = typeof(System.Runtime.CompilerServices.ConditionalWeakTable<,>).MakeGenericType(new[] { typeof(Pawn), type });
conditionalWeakTableAddMethod = AccessTools.Method(conditionalWeakTableType, "Add");
conditionalWeakTableTryGetValueMethod = AccessTools.Method(conditionalWeakTableType, "TryGetValue");
type = AccessTools.TypeByName("MVCF.ManagedVerb");
mvcfManagerVerbManagerField = AccessTools.Field(type, "man");
MP.RegisterSyncWorker<object>(SyncManagedVerb, type, isImplicit: true);
// Seems like selecting the Thing that holds the verb inits some stuff, so we need to set the context
MP.RegisterSyncMethod(type, "Toggle");
type = AccessTools.TypeByName("MVCF.Harmony.Gizmos");
MpCompat.RegisterLambdaDelegate(type, "GetGizmos_Postfix", 1); // Fire at will
MpCompat.RegisterLambdaDelegate(type, "GetAttackGizmos_Postfix", 4); // Interrupt Attack
MpCompat.RegisterLambdaDelegate(type, "Pawn_GetGizmos_Postfix", 0); // Also interrupt Attack
}
// Explosive Trails Effect
{
// RNG
PatchingUtilities.PatchPushPopRand("ExplosiveTrailsEffect.SmokeThrowher:ThrowSmokeTrail");
}
// KCSG (Custom Structure Generation)
{
// RNG
var methods = new[]
{
"KCSG.SymbolResolver_AddFields:Resolve",
"KCSG.SymbolResolver_Settlement:GenerateRooms",
};
PatchingUtilities.PatchSystemRand(methods, false);
}
}
private static void SyncCommandWithBuilding(SyncWorker sync, ref Command command)
{
var traverse = Traverse.Create(command);
var building = traverse.Field("building");
if (sync.isWriting)
sync.Write(building.GetValue() as Thing);
else
building.SetValue(sync.Read<Thing>());
}
private static void SyncSetIngredientCommand(SyncWorker sync, ref Command command)
{
var traverse = Traverse.Create(command);
var building = traverse.Field("building");
var ingredientList = traverse.Field("things");
if (sync.isWriting)
{
sync.Write(building.GetValue() as Thing);
var ingredientListValue = ingredientList.GetValue();
if (ingredientListValue == null)
{
sync.Write(false);
}
else
{
sync.Write(true);
sync.Write(ingredientList.GetValue() as List<Thing>);
}
}
else
{
building.SetValue(sync.Read<Thing>());
if (sync.Read<bool>()) ingredientList.SetValue(sync.Read<List<Thing>>());
}
}
private static void SyncSetStoneTypeCommand(SyncWorker sync, ref Command obj)
{
if (sync.isWriting)
sync.Write(setStoneBuildingField.GetValue(obj) as ThingComp);
else
setStoneBuildingField.SetValue(obj, sync.Read<ThingComp>());
}
private static void SyncVerbManager(SyncWorker sync, ref object obj)
{
if (sync.isWriting)
// Sync the pawn that has the VerbManager
sync.Write((Pawn)mvcfPawnGetter.Invoke(obj, Array.Empty<object>()));
else
{
var pawn = sync.Read<Pawn>();
var comp = mvcfGetWorldCompMethod.Invoke(null, Array.Empty<object>());
var weakTable = mvcfManagersTableField.GetValue(comp);
var outParam = new object[] { pawn, null };
// Either try getting the VerbManager from the comp, or create it if it's missing
if ((bool)conditionalWeakTableTryGetValueMethod.Invoke(weakTable, outParam))
obj = outParam[1];
else
obj = InitVerbManager(pawn, (WorldComponent)comp, table: weakTable);
}
}
private static void SyncManagedVerb(SyncWorker sync, ref object obj)
{
if (sync.isWriting)
{
// Get the VerbManager from inside of the ManagedVerb itself
var verbManager = mvcfManagerVerbManagerField.GetValue(obj);
// Find the ManagedVerb inside of list of all verbs
var managedVerbsList = mvcfVerbsField.GetValue(verbManager) as IList;
var index = managedVerbsList.IndexOf(obj);
// Sync the index of the verb as well as the manager (if it's valid)
sync.Write(index);
if (index >= 0)
SyncVerbManager(sync, ref verbManager);
}
else
{
// Read and check if the index is valid
var index = sync.Read<int>();
if (index >= 0)
{
// Read the verb manager
object verbManager = null;
SyncVerbManager(sync, ref verbManager);
// Find the ManagedVerb with specific index inside of list of all verbs
var managedVerbsList = mvcfVerbsField.GetValue(verbManager) as IList;
obj = managedVerbsList[index];
}
}
}
private static void SyncVEFAbility(SyncWorker sync, ref ITargetingSource source)
{
if (sync.isWriting)
{
sync.Write(source.Caster);
sync.Write(source.GetVerb.GetUniqueLoadID());
}
else
{
var caster = sync.Read<Thing>();
var uid = sync.Read<string>();
if (caster is ThingWithComps thing)
{
var compAbilities = thing.AllComps.First(c => c.GetType() == learnedAbilitiesField.DeclaringType);
var list = learnedAbilitiesField.GetValue(compAbilities) as IEnumerable;
foreach (object o in list)
{
ITargetingSource its = o as ITargetingSource;
if (its.GetVerb.GetUniqueLoadID() == uid)
{
source = its;
break;
}
}
}
else
{
Log.Error("MultiplayerCompat :: SyncVEFAbility : Caster isn't a ThingWithComps");
}
}
}
private static bool GetManagerForPrefix(Pawn pawn, bool createIfMissing, WorldComponent __instance, ref object __result)
{
if (MP.IsInMultiplayer || !createIfMissing) return true; // We don't care and let the method run, we only care if we might need to creat a VerbManager
var table = mvcfManagersTableField.GetValue(__instance);
var parameters = new object[] { pawn, null };
if ((bool)conditionalWeakTableTryGetValueMethod.Invoke(table, parameters))
{
// Might as well give the result back instead of continuing the normal execution of the method,
// as it would just do the same stuff as we do here again
__result = parameters[1];
}
else
{
// We basically setup an empty reference, but we'll initialize it in the synced method.
// We just return the reference for it so other objects can use it now. The data they
// have will be updated after the sync, so the gizmos related to verbs might not be
// shown immediately for players who selected specific pawns.
__result = CreateAndAddVerbManagerToCollections(pawn, __instance, table: table);
}
// Ensure VerbManager is initialized for all players, as it might not be
SyncedInitVerbManager(pawn);
return false;
}
// Synced method for initializing the verb manager for all players, used in sitations where the moment of creation of the verb might not be synced
private static void SyncedInitVerbManager(Pawn pawn) => InitVerbManager(pawn);
private static object InitVerbManager(Pawn pawn, WorldComponent comp = null, object list = null, object table = null)
{
if (comp == null) comp = (WorldComponent)mvcfGetWorldCompMethod.Invoke(null, Array.Empty<object>());
if (comp == null) return null;
if (table == null) table = mvcfManagersTableField.GetValue(comp);
var parameters = new object[] { pawn, null };
object verbManager;
// Try to find the verb manager first, as it might exist (and it will definitely exist for at least one player)
if ((bool)conditionalWeakTableTryGetValueMethod.Invoke(table, parameters))
{
verbManager = parameters[1];
// If the manager has the pawn assigned, it means it's initialized, if it's not - we initialize it
if (mvcfPawnGetter.Invoke(verbManager, Array.Empty<object>()) == null)
mvcfInitializeManagerMethod.Invoke(verbManager, new object[] { pawn });
}
// If the verb manager doesn't exist, we create an empty one here and add it to the verb manager list and table, and then initialize it
else
{
verbManager = CreateAndAddVerbManagerToCollections(pawn, comp, list, table);
mvcfInitializeManagerMethod.Invoke(verbManager, new object[] { pawn });
}
return verbManager;
}
// Helper method for creating an empty verb manager for a pawn
private static object CreateAndAddVerbManagerToCollections(Pawn pawn, WorldComponent worldComponent, object list = null, object table = null)
{
var verbManager = mvcfVerbManagerCtor.Invoke(Array.Empty<object>());
if (list == null) list = mvcfAllManagersListField.GetValue(worldComponent);
if (table == null) table = mvcfManagersTableField.GetValue(worldComponent);
conditionalWeakTableAddMethod.Invoke(table, new object[] { pawn, verbManager });
((IList)list).Add(weakReferenceCtor.Invoke(new object[] { verbManager }));
return verbManager;
}
}
}
<file_sep>/Source/Mods/VanillaEventsExpanded.cs
using Verse;
namespace Multiplayer.Compat
{
/// <summary>Vanilla Events Expanded by <NAME>, <NAME></summary>
/// <see href="https://steamcommunity.com/sharedfiles/filedetails/?id=1938420742"/>
/// Contribution to Multiplayer Compatibility by Sokyran and Reshiram
[MpCompatFor("VanillaExpanded.VEE")]
class VEE
{
public VEE(ModContentPack mod)
{
var methodsForAll = new[]
{
"VEE.HeddifComp_MightJoin:CompPostTick",
"VEE.HuntingParty:TryExecuteWorker",
// These 4 methods initialize System.Random, but don't use them in any way whatsoever.
//"VEE.PurpleEvents.GlobalWarming:ChangeBiomes",
//"VEE.PurpleEvents.GlobalWarming:ChangeTileTemp",
//"VEE.PurpleEvents.IceAge:ChangeBiomes",
//"VEE.PurpleEvents.IceAge:ChangeTileTemp",
"VEE.PurpleEvents.PsychicBloom:Init",
"VEE.RegularEvents.ApparelPod:TryExecuteWorker",
"VEE.RegularEvents.CaravanAnimalWI:GenerateGroup",
"VEE.RegularEvents.MeteoriteShower:TryExecuteWorker",
"VEE.RegularEvents.WeaponPod:TryExecuteWorker",
};
PatchingUtilities.PatchSystemRand(methodsForAll, false);
// This method only calls other methods that use RNG calls
PatchingUtilities.PatchPushPopRand("VEE.RegularEvents.EarthQuake:TryExecuteWorker");
// Only patch System.Random out, as this methods is only called by other ones
PatchingUtilities.PatchSystemRand("VEE.RegularEvents.EarthQuake:DamageInRadius", false);
// Unity RNG
PatchingUtilities.PatchUnityRand("VEE.Shuttle:Tick");
LongEventHandler.ExecuteWhenFinished(LatePatch);
}
public static void LatePatch() => PatchingUtilities.PatchSystemRand("VEE.RegularEvents.SpaceBattle:GameConditionTick", false);
}
}
<file_sep>/Source/Extensions.cs
using System;
using Multiplayer.API;
namespace Multiplayer.Compat
{
internal static class Extensions
{
internal static string After(this string s, char c)
{
if (s.IndexOf(c) == -1)
throw new Exception($"Char {c} not found in string {s}");
return s.Substring(s.IndexOf(c) + 1);
}
internal static string Until(this string s, char c)
{
if (s.IndexOf(c) == -1)
throw new Exception($"Char {c} not found in string {s}");
return s.Substring(0, s.IndexOf(c));
}
internal static int CharacterCount(this string s, char c)
{
int num = 0;
for (int i = 0; i < s.Length; i++)
if (s[i] == c)
num++;
return num;
}
internal static void SetDebugOnly(this ISyncMethod[] syncMethods)
{
foreach(var method in syncMethods)
{
method.SetDebugOnly();
}
}
internal static void SetContext(this ISyncDelegate[] syncDelegates, SyncContext context)
{
foreach(var method in syncDelegates)
{
method.SetContext(context);
}
}
private const string SteamSuffix = "_steam";
private const string CopySuffix = "_copy";
internal static string NoModIdSuffix(this string modId)
{
while (true)
{
if (modId.EndsWith(SteamSuffix))
{
modId = modId.Substring(0, modId.Length - SteamSuffix.Length);
continue;
}
if (modId.EndsWith(CopySuffix))
{
modId = modId.Substring(0, modId.Length - CopySuffix.Length);
continue;
}
return modId;
}
}
}
}
|
4078f71cff015e33e29cf3b5ccbbc32508466216
|
[
"C#"
] | 10
|
C#
|
erdelf/Multiplayer-Compatibility
|
103965031a95ed74901fd6949e2223580ca3726a
|
68db17211a6efd53dd8dec9c2e732bae8e01a20c
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.