branch_name
stringclasses 149
values | text
stringlengths 23
89.3M
| directory_id
stringlengths 40
40
| languages
listlengths 1
19
| num_files
int64 1
11.8k
| repo_language
stringclasses 38
values | repo_name
stringlengths 6
114
| revision_id
stringlengths 40
40
| snapshot_id
stringlengths 40
40
|
|---|---|---|---|---|---|---|---|---|
refs/heads/master
|
<repo_name>kamal09rumba/ReactJS<file_sep>/forms/src/components/FormPractice.js
import React from "react"
const FormPracticeComponent = (props) => {
return(
<div>
<form onSubmit={props.handleSubmit} className="form-group">
<label>First Name:</label>
<input type="text"
name="firstname"
placeholder="<NAME>"
value={props.data.firstname}
onChange={props.handleChange}
className="form-control"
/>
<br/>
<label>Last Name:</label>
<input
type="text"
name="lastname"
placeholder="<NAME>"
value={props.data.lastname}
onChange={props.handleChange}
className="form-control"
/>
<br/>
<label>Age:</label>
<input
type="number"
name="age"
placeholder="Age"
value={props.data.age}
onChange={props.handleChange}
className="form-control"
/>
<br/>
<label>Gender:</label>
<input
type="radio"
name="gender"
value="male"
checked={props.data.gender === 'male'}
onChange={props.handleChange}
className="form-control"
/>Male
<input
type="radio"
name="gender"
value="female"
checked={props.data.gender === 'female'}
onChange={props.handleChange}
className="form-control"
/>Female
<br/>
<label>Travel Destination: </label>
<select
name="location"
value={props.data.location}
onChange={props.handleChange}
className="form-control"
>
<option value="">----Please Select A Location----</option>
<option value="Kathmandu">Kathmandu</option>
<option value="Pokhara">Pokhara</option>
<option value="Dharan">Dharan</option>
</select>
<br/>
<label>Dietary Restrictions(Please select any):</label>
<br/>
<input
type="checkbox"
name="isVegan"
checked={props.data.isVegan}
onChange={props.handleChange}
className="form-control"
/>Vegan?<br/>
<input
type="checkbox"
name="isKosher"
checked={props.data.isKosher}
onChange={props.handleChange}
className="form-control"
/>Kosher?<br/>
<input
type="checkbox"
name="isLactoseFree"
checked={props.data.isLactoseFree}
onChange={props.handleChange}
className="form-control"
/>Lactose free?<br/>
<br/>
{/* <button>Submit</button> */}
<input type="submit" value="submit" className="btn btn-primary"/>
</form>
<hr/>
<div>
<h1><u>Entered Information:-</u></h1>
<label>Name: {props.data.firstname} {props.data.lastname}</label><br/>
<label>Age: {props.data.age}</label><br/>
<label>Gender: {props.data.gender}</label><br/>
<label>Destination: {props.data.location}</label><br/>
<label>Vegan?: {props.data.isVegan ? "Yes" : "No"}</label><br/>
<label>Kosher?: {props.data.isKosher ? "Yes" : "No"}</label><br/>
<label>Lactose Free?: {props.data.isLactoseFree ? "Yes" : "No"}</label><br/>
</div>
</div>
)
}
export default FormPracticeComponent
<file_sep>/todo/src/App.js
import React from "react"
import TodoItem from "./components/TodoItem"
import todolist from "./todoDatas"
// fat arrow function/lambda function(one way to define the function)
// const App = () => {
class App extends React.Component{
constructor(){
super()
this.state = {
todos: todolist
}
this.handleCheck = this.handleCheck.bind(this)
}
handleCheck(id){
this.setState(prevState => {
const updatedTodos = prevState.todos.map(todo => {
if(todo.id === id){
todo.completed = !todo.completed
}
return todo
})
return {
todos: updatedTodos
}
})
}
render(){
const TodoComponent = this.state.todos.map(list=>
<TodoItem key={list.id}
todos={list}
handleCheck={this.handleCheck}
/>
)
const styles = {
fontSize:"30px",
textAlign:"center"
}
return(
<div className="checkboxContainer">
<h3 style={styles}> Todo List</h3>
{TodoComponent}
</div>
)
}
}
export default App
<file_sep>/todo/src/components/TodoItem.js
import React from "react"
// fat arrow function/lambda function(one way to define the function)
const TodoItem = (props) => {
const id = props.todos.completed ? 'completed-tasks' : ''
return (
<ul>
<li id={id}>
<input type="checkbox" className="checkbox"
checked={props.todos.completed}
onChange={() => props.handleCheck(props.todos.id)}
/>
<label>{props.todos.item}</label>
</li>
</ul>
)
}
export default TodoItem
<file_sep>/forms/src/components/Form.js
import React, {Component} from "react"
class Form extends Component{
constructor(){
super()
this.state = {
firstname : "",
lastname : "",
isFriendly: true,
gender: "",
favcolor:""
}
this.handleChange = this.handleChange.bind(this)
this.handleSubmit = this.handleSubmit.bind(this)
}
handleChange(event){
const {name,value,type,checked} = event.target
type === "checkbox" ? this.setState({[name]:checked}) : this.setState({[name]: value})
}
handleSubmit(event){
event.preventDefault()
}
render(){
return(
<div className="row justify-content-md-center well">
<form className="form-group" onSubmit={this.handleSubmit}>
<input
name="firstname"
type="text"
placeholder="<NAME>"
value = {this.state.firstname}
onChange={this.handleChange}
className="form-control"
/>
<br/>
<input
name="lastname"
type="text"
placeholder="<NAME>"
value={this.state.lastname}
onChange={this.handleChange}
className="form-control"
/>
<h1>{this.state.firstname} {this.state.lastname}</h1>
<div className="form-group">
<label>Textarea</label><br/>
<textarea
value={"some default value"}
onChange={this.handleChange}
class="form-control"
/>
</div>
<br/>
<label>
<input
type="checkbox"
name="isFriendly"
checked={this.state.isFriendly}
onChange={this.handleChange}
/>
Check Box
</label>
<br/>
<label>
<input
type="radio"
name="gender"
value="male"
checked={this.state.gender === 'male'}
onChange={this.handleChange}
/>
Male
</label>
<br/>
<label>
<input
type="radio"
name="gender"
value="female"
checked={this.state.gender === 'female'}
onChange={this.handleChange}
/>
Female
</label>
<h1>You are {this.state.gender}</h1>
<label>Favorite Color:</label>
<select
value={this.state.favcolor}
onChange={this.handleChange}
name="favcolor"
>
<option value="Red">Red</option>
<option value="Green">Green</option>
<option value="Blue">Blue</option>
</select>
<h1>Your Favorite Color is {this.state.favcolor}</h1>
<input type="submit" class="btn btn-primary"/>
</form>
</div>
)
}
}
export default Form
<file_sep>/todo/src/todoDatas.js
const todolist = [
{
id:1,
item:'First item',
completed:true
},
{
id:2,
item:'Second Item',
completed:false
},
{
id:3,
item:'Third Item',
completed:true
},
{
id:4,
item:'Fourth Item',
completed:false
},{
id:5,
item:'Fifth Item',
completed:true
}
]
export default todolist
<file_sep>/react-redux-eg-2/README.md
### React Redux Implementation
<file_sep>/react-redux-eg-1/src/index.js
import React from 'react';
import ReactDOM from 'react-dom';
import './index.css';
import App from './App';
import * as serviceWorker from './serviceWorker';
import { applyMiddleware, compose, combineReducers, createStore } from "redux"
import thunk from "redux-thunk"
import { Provider } from "react-redux"
import productsReducer from "./reducers/products-reducer"
import userReduer from "./reducers/user-reducer"
const allReducers = combineReducers({
products: productsReducer,
user: userReduer
})
const initialState = {
products: [{name:'iPhone'},{name:'samsung'}],
user:'Kamal'
}
const allStoreEnhancers = compose(
applyMiddleware(thunk),
// window.__REDUX_DEVTOOLS_EXTENSION__ && window.__REDUX_DEVTOOLS_EXTENSION__()
)
const store = createStore(allReducers,
initialState,
allStoreEnhancers
)
const rootElement = document.getElementById('root')
ReactDOM.render(
<Provider store={store}>
<App
aRandomProps="whatever!!!"
/>
</Provider>
, rootElement);
serviceWorker.unregister();
<file_sep>/README.md
# ReactJS
Collection of React Project
<file_sep>/forms/src/containers/FormPractice.js
import React from "react"
import FormPracticeComponent from "../components/FormPractice"
class FormPractice extends React.Component{
constructor(){
super()
this.state = {
firstname: "",
lastname: "",
age: 0,
gender: "",
location: "",
dietary_restrictions: "",
isVegan: false,
isKosher: false,
isLactoseFree: false
}
this.handleChange = this.handleChange.bind(this)
this.handleSubmit = this.handleSubmit.bind(this)
}
handleChange(event){
const {name, value, type, checked} = event.target
type === "checkbox" ? this.setState({[name]: checked}) : this.setState({[name]: value})
}
handleSubmit(event){
event.preventDefault()
const {name,value} = event.target
var diet = []
if(this.state.isVegan){
diet.push('Vegan')
}
if(this.state.isKosher){diet.push('Kosher')}
if(this.state.isLactoseFree){diet.push('Lactose Free')}
var output = ''
output += 'First Name : ' + this.state.firstname
output += '\n'
output += 'Last Name : ' + this.state.lastname
output += '\n'
output += 'Age : ' + this.state.age
output += '\n'
output += 'Gender : ' + this.state.gender
output += '\n'
output += 'Location : ' + this.state.location
output += '\n'
output += 'Dietary Restrictions : ' + diet
alert(output)
}
render(){
return(
<FormPracticeComponent
data={this.state}
handleChange={this.handleChange}
handleSubmit={this.handleSubmit}
/>
/*
==> Using Object Spread Operator
<FormPracticeComponent
{..this.state}
/>
==> props.firstname --> to get value
*/
)
}
}
export default FormPractice
<file_sep>/react-router/src/App.js
import "bootstrap/dist/css/bootstrap.min.css"
import $ from "jquery"
import Popper from "popper.js"
import "bootstrap/dist/js/bootstrap.min.js"
import React, { Component } from 'react';
import logo from './logo.svg';
import './App.css';
import { BrowserRouter, Route, Switch } from "react-router-dom"
import Home from "./components/Home"
import About from "./components/About"
import Contact from "./components/Contact"
import Error from "./components/Error"
import Navigation from "./components/Navigation"
class App extends Component {
render() {
return (
<div className="App">
<header className="App-header">
<img src={logo} className="App-logo" alt="logo" />
</header>
<BrowserRouter>
<div>
<Navigation />
<Switch>
<Route path="/" component={Home} exact />
<Route path="/about" component={About} />
<Route path="/contact" component={Contact} />
<Route component={Error} />
</Switch>
</div>
</BrowserRouter>
</div>
);
}
}
export default App;
<file_sep>/todo/README.md
## Todo
A simple todo application built with ReactJS.
<file_sep>/react-redux-eg-1/src/reducers/user-reducer.js
import { UPDATE_USER,API_REQUEST_ERROR } from "../actions/user-actions"
const userReduer = (state=[], action) => {
switch(action.type){
case UPDATE_USER:
return action.payload.user
case API_REQUEST_ERROR:
return action.payload.user
default:
return state
}
}
export default userReduer
<file_sep>/forms/src/App.js
import React, {Component} from "react"
import FormPractice from "./containers/FormPractice"
import Form from "./components/Form"
class App extends Component{
constructor(){
super()
}
render(){
return(
<div className="container">
<label className="h1"><u>Form Sample - 1 </u></label>
<Form />
<hr/>
<hr />
<label className="h1"><u>Form Sample - 2</u></label>
<FormPractice/>
</div>
)
}
}
export default App
<file_sep>/react-redux-eg-2/src/routes.js
import React from "react"
import {Route} from "react-router-dom"
import Posts from "./components/Posts"
const BaseRouter = () => (
<div>
<Route exact path="/" component={Posts} />
</div>
)
export default BaseRouter
<file_sep>/forms/README.md
## React From
Collection of basic forms using ReactJS
<file_sep>/autocomplete-textbox/README.md
### Autocomplete TextBox
A simple autocomplete textbox built with ReactJs
<file_sep>/react-redux-eg-1/src/actions/user-actions.js
import $ from "jquery"
export const UPDATE_USER = "users:updateUser"
export const API_REQUEST_ERROR = "users:showError"
export function updateUser(newUser){
return {
type: UPDATE_USER,
payload: {
user: newUser
}
}
}
export function showError(){
return{
type: API_REQUEST_ERROR,
payload: {
user: 'Error!!!'
}
}
}
export function apiRequest(){
return dispatch => {
$.ajax({
url:'localhost:8000',
success(response){
console.log('success')
// dispatch(updateUser(response.newUser))
},
error(){
console.log('error')
dispatch(showError())
}
})
}
}
|
8500b75d9857ca2c81450f40adacf926f8beba42
|
[
"JavaScript",
"Markdown"
] | 17
|
JavaScript
|
kamal09rumba/ReactJS
|
b05e947d0507c5671b7ff002d6302616f5dfc950
|
251fa8115dbf018124dca7d09a637f8e692f2742
|
refs/heads/master
|
<file_sep># 一个vue-table的组件
### 说明:
#####1.基于element-ui开发的vue表格组件。
####功能:
1.表格支持树形数据的展示
2.行拖拽排序
3.单元格拖拽排序
### 使用方法:
#####1.下载npm包:
#####你的VUE项目的根目录底下运行:
``` sh
npm install ele-table
```
#####2.引入本npm包并注册为vue的组件:
> 例如:在需要使用的vue页面中:
``` html
<template>
<!-- 里面写ele-table组件-->
<ele-table :data="tableData" treetable style="width: 100%">
<ele-table-column prop="id" label="姓名">
<template slot-scope="scope">
<div :style="`padding-left:${20*(scope.row._indent-1)}px`">
<span v-if="scope.row.children">
<i v-if="scope.row._expand" >-</i><i v-else>+</i>
</span>
<span>{{scope.row.id}}</span>
</div>
</template>
</ele-table-column>
<ele-table-column prop="id" label="年龄" width="180">
</ele-table-column>
<ele-table-column
prop="label"
label="地址">
</ele-table-column>
</ele-table>
<ele-table
draggablerow //能否行拖拽
:allow-drag="allowdrag" //能否被拖拽
:allow-drop="allowDrop" //能否被放置
:data="tableData"
@node-drag-start="handleDragStart"
@node-drag-enter="handleDragEnter"
@node-drag-leave="handleDragLeave"
@node-drag-over="handleDragOver"
@node-drag-end="handleDragEnd"
style="width: 100%">
<ele-table-column prop="id" label="姓名" width="180">
</ele-table-column>
<ele-table-column
prop="id"
label="年龄"
width="180">
</ele-table-column>
<ele-table-column
prop="label"
label="地址">
</ele-table-column>
</ele-table>
</template>
<script>
import { eleTable, eleTableColumn } from "ele-table";
import 'ele-table/dist/ele-table.css';
//项目引入element-ui 不需要引入样式,但需要class类指定拖拽样式
//<style>
//.el-table--dropNode{
// background-color: #409eff !important;
//}
// .el-tree__drop-indicator {
// position: absolute;
// left: 0;
// right: 0;
// height: 2px !important;
// background-color: #409eff;
// z-index: 10000;
//}
//</style>
export default {
data(){
return{
tableData: [{
id: 1,
label: '一级 1',
_expand:true, //设置默认展开节点
children: [{
id: 4,
label: '二级 1-1',
_expand:true,
children: [{
id: 9,
label: '三级 1-1-1'
}, {
id: 10,
label: '三级 1-1-2'
}]
}]
}, {
id: 2,
label: '一级 2',
children: [{
id: 5,
label: '二级 2-1'
}, {
id: 6,
label: '二级 2-2'
}]
}]
}
},
components: {
eleTable,
eleTableColumn
},
methods: {
handleDragEnd(row, column, cell, event) {
let data = this.tableData[row.draggingcolumn];
if (cell == "after") {
this.tableData.splice(column.dropcolumn + 1, 0, data);
if (row.draggingcolumn > column.dropcolumn) {
this.tableData.splice(row.draggingcolumn + 1, 1);
} else {
this.tableData.splice(row.draggingcolumn, 1);
}
}
if (cell == "before") {
this.tableData.splice(column.dropcolumn, 0, data);
if (row.draggingcolumn > column.dropcolumn) {
this.tableData.splice(row.draggingcolumn + 1, 1);
} else {
this.tableData.splice(row.draggingcolumn, 1);
}
}
if (cell == "inner") {
this.$set(
this.tableData,
row.draggingcolumn,
this.tableData[column.dropcolumn]
);
this.$set(this.tableData, column.dropcolumn, data);
}
},
},
}
}
</script>
```
### Calendar Attributes
| 参数 | 说明 | 类型 | 可选值 | 默认值 |
| -------------------- | ---------------------------------------------- | ---------------------------------- | ------------------------------ | ------------------------------------------------------------------------------------------- |
| data | 显示的数据 | array | — | — |
| treetable | 是否树形数据 | boolean | — | false |
| _expand| 树形数据默认展开节点(不支持递归关联) | boolean | — | false |
| draggablerow | 是否开启行拖拽 | boolean | — | false |
| draggable | 是否开启单元格拖拽 | boolean | — | false |
| allow-drag | 能否被拖拽 | Function(row(行数据), column(行拖拽为index,单元格为所在列), cell(节点), event)| — | 要求返回boolean |
| allow-drop | 能否被放置 | Function(row, column, cell, event, type) | — | 要求返回boolean |
### Calendar Events
| 事件名 | 说明 | 参数 |
| -------------------- | ---------------------------------------------- | --------------------------------------------------------------------------------------------- |
| node-drag-start | 节点开始拖拽时触发的事件 | Function(row(行数据), column(行拖拽为index,单元格为所在列), cell(节点), event) |
| node-drag-enter | 拖拽进入其他节点时触发的事件 | Function(row(行数据), column(行拖拽为index,单元格为所在列), cell(节点), event, draggingNode(被拖拽节点) |
| node-drag-leave | 拖拽离开某个节点时触发的事件 | Function(row(行数据), column(行拖拽为index,单元格为所在列), cell(节点), event, draggingNode(被拖拽节点) |
| node-drag-over | 在拖拽节点时触发的事件 | Function(row(行数据), column(行拖拽为index,单元格为所在列), cell(节点), event, draggingNode(被拖拽节点) |
| node-drag-end | 拖拽结束时触发的事件 | Function(dragging(被拖拽节点对象), drop(放置节点对象), dropType(放置位置(before、after、inner)), event) |
| node-drop | 拖拽完成时触发的事件 | Function(dragging(被拖拽节点对象), drop(放置节点对象), dropType(放置位置(before、after、inner)), event) |
<file_sep>import eleTableColumn from './components/table/src/table-column';
import eleTable from './components/table/src/table.vue';
eleTable.install = function(Vue) {
Vue.component(eleTable.name, eleTable);
};
// export default eleTable;
eleTableColumn.install = function(Vue) {
Vue.component(eleTableColumn.name, eleTableColumn);
};
console.log(eleTable)
export {eleTable, eleTableColumn};
|
cd0c65eeba39f5a21b6688f045698ab974763764
|
[
"Markdown",
"JavaScript"
] | 2
|
Markdown
|
xuyanming/ele-table
|
d44ee19d47ab497e9b06a1637500be34353f29f0
|
521088b81635d0749f1bc1697fc61e973f2913f3
|
refs/heads/master
|
<file_sep>url=jdbc:oracle:thin:@kontas2101etl.cbzkbupjh0xq.us-east-1.rds.amazonaws.com:1521:ORCL
username=alikontas
password=<PASSWORD>
<file_sep>package dev.kontas.exceptions;
public class InappropriateCharacterException extends Exception{
}
<file_sep>package dev.kontas.daotests;
import static org.junit.jupiter.api.Assertions.*;
import org.junit.jupiter.api.Test;
import dev.kontas.daos.UserDAO;
import dev.kontas.daos.UserDAOImpl;
import dev.kontas.entities.User;
class UsersDAOTests {
private static UserDAO udao = new UserDAOImpl();
@Test
public void createUser() {
User u = new User("usertest", "passtest", "nametest");
System.out.println(u);
udao.createUser(u);
System.out.println(u);
User u2 = new User("usertest2", "passtest2", "nametest2");
System.out.println(u2);
udao.createUser(u2);
System.out.println(u2);
}
@Test
public void updateUser() {
User u = new User("usertest1", "passtest1", "nametest1");
udao.createUser(u);
System.out.println(u);
u.setName("updated name");
udao.updateUser(u);
System.out.println(u);
}
@Test
public void getUser() {
User u = new User("usertest","passtest1","nametest1");
udao.createUser(u);
System.out.println(u);
User u2 = new User("usertest2","passtest2","nametest2");
udao.createUser(u2);
System.out.println(u2);
User user = udao.getUserById(u.getUserId());
System.out.println(user);
}
@Test
public void deleteUser() {
User u = new User("usertest","passtest1","nametest1");
udao.createUser(u);
System.out.println(u);
User u2 = new User("usertest2","passtest2","nametest2");
udao.createUser(u2);
System.out.println(u2);
udao.deleteUser(u.getUserId());
udao.deleteUser(u2.getUserId());
}
}
<file_sep>package dev.kontas.servicetests;
import static org.junit.jupiter.api.Assertions.*;
import org.junit.jupiter.api.Test;
import dev.kontas.daos.UserDAO;
import dev.kontas.daos.UserDAOImpl;
import dev.kontas.entities.User;
class UserServicesTest {
private static UserDAO udao = new UserDAOImpl();
@Test
public void createUser() {
User u = new User("user", "pass", "name");
System.out.println(u);
udao.createUser(u);
System.out.println(u);
User u2 = new User("user2", "<PASSWORD>", "name2");
System.out.println(u2);
udao.createUser(u2);
System.out.println(u2);
}
}
<file_sep>package dev.kontas.exceptions;
public class UsernameTooShortException extends Exception{
public int inputLength;
public UsernameTooShortException(int length) {
super("Sorry needs to be at least 6 characters");
this.inputLength = length;
}
}
<file_sep>package dev.kontas.daos;
import java.beans.Statement;
import java.sql.CallableStatement;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashSet;
import java.util.Set;
import dev.kontas.entities.User;
import dev.kontas.util.JDBCConnection;
public class UserDAOImpl implements UserDAO {
public static Connection conn = JDBCConnection.getConnection();
public User createUser(User user) {
// TODO Auto-generated method stub
try {
String sql = "CALL add_person(?,?,?,?)";
CallableStatement cs = conn.prepareCall(sql);
cs.setString(1, user.getUsername());
cs.setString(2, user.getPassword());
cs.setString(3, user.getName());
cs.setString(4, user.getSuperUser());
cs.execute();
} catch (SQLException e) {
e.printStackTrace();
}
return user;
}
public User getUserById(int id) {
// TODO Auto-generated method stub
try {
String sql = "SELECT * FROM person WHERE USER_ID = ?";
PreparedStatement ps = conn.prepareStatement(sql);
ps.setString(1, Integer.toString(id));
ResultSet rs = ps.executeQuery();
if (rs.next()) {
User u = new User();
u.setUsername(rs.getString("USERNAME"));
u.setPassword(rs.getString("<PASSWORD>"));
u.setName(rs.getString("NAME"));
u.setUserId(rs.getInt("USER_ID"));
u.setSuperUser(rs.getString("SYSADMIN"));
return u;
}
} catch (SQLException e) {
e.printStackTrace();
}
return null;
}
public Set<User> getAllUsers() {
// TODO Auto-generated method stub
Set<User> users= new HashSet<User>();
try {
String sql = "SELECT * FROM person";
PreparedStatement ps = conn.prepareStatement(sql);
ResultSet rs = ps.executeQuery();
while (rs.next()) {
User u = new User();
u.setUsername(rs.getString("USERNAME"));
u.setPassword(rs.getString("<PASSWORD>"));
u.setName(rs.getString("NAME"));
u.setUserId(rs.getInt("USER_ID"));
u.setSuperUser(rs.getString("SYSADMIN"));
users.add(u);
}
return users;
} catch (SQLException e) {
e.printStackTrace();
}
return users;
}
public boolean updateUser(User user) {
// TODO Auto-generated method stub
try {
String sql = "UPDATE person SET username = ?, password = ?, name =?, sysadmin=? WHERE USER_ID =?";
PreparedStatement ps = conn.prepareStatement(sql);
ps.setString(1, user.getUsername());
ps.setString(2, user.getPassword());
ps.setString(3, user.getName());
ps.setString(4, user.getSuperUser());
ps.setString(5, Integer.toString(user.getUserId()));
ps.executeQuery();
return true;
} catch (SQLException e) {
e.printStackTrace();
}
return false;
}
public boolean deleteUser(int id) {
// TODO Auto-generated method stub
try {
String sql = "DELETE person WHERE USER_ID = ?";
PreparedStatement ps = conn.prepareStatement(sql);
ps.setInt(1, id);
ps.executeQuery();
return true;
} catch (SQLException e) {
e.printStackTrace();
}
return false;
}
}
<file_sep>package dev.kontas.daos;
import java.sql.CallableStatement;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashSet;
import java.util.Set;
import dev.kontas.entities.Account;
import dev.kontas.util.JDBCConnection;
public class AccountDAOImpl implements AccountDAO {
public static Connection conn = JDBCConnection.getConnection();
public boolean createAccount(Account account) {
// TODO Auto-generated method stub
try {
String sql = "CALL add_accounts(?,?)";
CallableStatement cs = conn.prepareCall(sql);
cs.setString(1, account.getAccountName());
cs.setString(2, Integer.toString(account.getUSER_ID()));
cs.execute();
return true;
} catch (SQLException e) {
e.printStackTrace();
}
return false;
}
public Account getAccount(int id) {
// TODO Auto-generated method stub
try {
String sql = "SELECT * FROM accounts WHERE USER_ID = ?";
PreparedStatement ps = conn.prepareStatement(sql);
ps.setString(1, Integer.toString(id));
ResultSet rs = ps.executeQuery();
if (rs.next()) {
Account a = new Account();
a.setAccountName(rs.getString("ACCOUNTNAME"));
a.setBANK_ACCOUNT_ID(rs.getInt("BANK_ACCOUNT_ID"));
a.setCurrentBalance(rs.getInt("CURRENTBALANCE"));
a.setUSER_ID(rs.getInt("USER_ID"));
return a;
}
} catch (SQLException e) {
e.printStackTrace();
}
return null;
}
public Set<Account> getAllAccount() {
// TODO Auto-generated method stub
Set<Account> account = new HashSet<Account>();
try {
String sql = "SELECT * FROM accounts";
PreparedStatement ps = conn.prepareStatement(sql);
ResultSet rs = ps.executeQuery();
while (rs.next()) {
Account a = new Account();
a.setAccountName(rs.getString("ACCOUNTNAME"));
a.setBANK_ACCOUNT_ID(rs.getInt("BANK_ACCOUNT_ID"));
a.setCurrentBalance(rs.getInt("CURRENTBALANCE"));
a.setUSER_ID(rs.getInt("USER_ID"));
account.add(a);
}
return account;
} catch (SQLException e) {
e.printStackTrace();
}
return null;
}
public boolean updateAccount(Account account) {
// TODO Auto-generated method stub
try {
String sql = "UPDATE accounts SET accountName = ?, currentBalance = ?, USER_ID = ? WHERE BANK_ACCOUNT_ID = ?";
PreparedStatement ps = conn.prepareStatement(sql);
ps.setString(1, account.getAccountName());
ps.setString(2, Double.toString(account.getCurrentBalance()));
ps.setString(3, Integer.toString(account.getUSER_ID()));
ps.setString(4, Integer.toString(account.getBANK_ACCOUNT_ID()));
ps.executeQuery();
return true;
} catch (SQLException e) {
e.printStackTrace();
}
return false;
}
public boolean deleteAccount(int id) {
// TODO Auto-generated method stub
try {
String sql = "DELETE accounts WHERE BANK_ACCOUNT_ID = ?";
PreparedStatement ps = conn.prepareStatement(sql);
ps.setInt(1, id);
ps.executeQuery();
return true;
} catch (SQLException e) {
e.printStackTrace();
}
return false;
}
}
<file_sep>package dev.kontas.services;
import java.util.Iterator;
import java.util.Scanner;
import java.util.Set;
import dev.kontas.daos.UserDAO;
import dev.kontas.daos.UserDAOImpl;
import dev.kontas.entities.User;
public class UserServicesImpl implements UserServices {
private static UserDAO udao = new UserDAOImpl();
private static Set<User> users;
public User registerUser(String username, String password, String name) {
// TODO Auto-generated method stub
User user = new User(username, password, name);
return udao.createUser(user);
}
public User login(String username, String password) {
// TODO Auto-generated method stub
for (User u : udao.getAllUsers()) {
if (u.getUsername().equals(username)) {
if (u.getPassword().equals(password)) {
return u;
} else {
System.out.println("Wrong password");
}
}
}
return null;
}
public Set<User> viewUser(User supr) {
// TODO Auto-generated method stub
System.out.println("User List");
Iterator<User> i = udao.getAllUsers().iterator();
while (i.hasNext()) {
System.out.println(i.next());
}
return udao.getAllUsers();
}
public User createUser(User supr, User u) {
// TODO Auto-generated method stub
return null;
}
public boolean deleteUser(int USER_ID) {
// TODO Auto-generated method stub
users = udao.getAllUsers();
for (User u : udao.getAllUsers()) {
if (u.getUserId() == USER_ID) {
users.remove(u);/// delete user here
udao.deleteUser(USER_ID);
}
}
return true;
}
public boolean updateUser(int id) {
// TODO Auto-generated method stub
Scanner com = new Scanner(System.in);
System.out.println("What operation you want to do for id user "+udao.getUserById(id));
System.out.println("USERNAME PASSWORD NAME ACCOUNT");
String op=com.next();
User u=udao.getUserById(id);
if (u.getUserId() == id) {
switch (op) {
case "USERNAME": {
System.out.println("What you want change username to?");
String name = com.next();
u.setUsername(name);
System.out.println("Succesfully changed USERNAME!");
}
break;
case "PASSWORD": {
System.out.println("What you want change password to?");
String password = <PASSWORD>();
u.setPassword(password);
System.out.println("Succesfully changed PASSWORD!");
}
break;
case "NAME": {
System.out.println("What you want change name to?");
String name = com.next();
u.setName(name);
System.out.println("Succesfully changed NAME!");
}
break;
case "ACCOUNT": {
System.out.println("1:Create Account 2:Delete Account?");
int a = com.nextInt();
System.out.println("Succesfully changed ACCOUNT INFO!");
}}}
udao.updateUser(u);
return true;
}
}
<file_sep>package dev.kontas.services;
import java.util.Set;
import dev.kontas.entities.User;
public interface UserServices {
User registerUser(String username,String password, String name);
User login(String username, String password);
Set<User> viewUser(User supr);
User createUser(User supr, User u);
boolean deleteUser(int USER_ID);
boolean updateUser(int id);
}
<file_sep>package dev.kontas.app;
import java.nio.file.spi.FileSystemProvider;
import java.util.Scanner;
import dev.kontas.entities.Account;
import dev.kontas.entities.User;
import dev.kontas.services.AccountServices;
import dev.kontas.services.AccountServicesImpl;
import dev.kontas.services.UserServices;
import dev.kontas.services.UserServicesImpl;
import dev.kontas.exceptions.*;
public class App {
private static Scanner scan = new Scanner(System.in);
private static UserServices userv = new UserServicesImpl();
private static AccountServices aserv = new AccountServicesImpl();
private static User loggedInEmployee = null;
private static Account a = null;
public static void main(String[] args) {
UsernameValidator validator = new UsernameValidatorImpl();
while (true) {
boolean loggedIn = false;
System.out.println("Welcome to bank !!");
System.out.println("1: for enrollment, 2: login");
int choice = scan.nextInt();
switch (choice) {
case 1: {
System.out.println("Please enter a username");
String username = scan.next();
System.out.println("Please enter a password");
String password = scan.next();
System.out.println("Please enter your name");
String name = scan.next();
try {
boolean isValid;
isValid = validator.validUsername(username) && validator.validUsername(password);
System.out.println("Account created successfully");
User usr = userv.registerUser(username, password, name);
System.out.println(usr);
} catch (InappropriateCharacterException e) {
System.out.println("You used an inappropriate character");
} catch (UsernameTooShortException e) {
System.out.println("Username or password was too short");
System.out.println(e.getMessage() + " your username was this long: " + e.inputLength);
System.out.println(e.getMessage() + " your password was this long: " + password.length());
}
}
break;
case 2: {
System.out.println("Please enter your username");
String username = scan.next();
System.out.println("Please enter your password");
String password = scan.next();
loggedInEmployee = userv.login(username, password);
if (loggedInEmployee != null) {
loggedIn = true;
System.out.println(loggedInEmployee);
}
}
break;
}
while (loggedIn) {
if (loggedIn) {
if (loggedInEmployee.getSuperUser().equals("Y")) {
System.out.println("Entered as super user");
System.out.println(
"1:view all users\n2:create new user\n3:update a user\n4:delete a user\n5:See all accounts\n6:Log Out");
int decision = scan.nextInt();
switch (decision) {
case 1: {
userv.viewUser(loggedInEmployee);
}
;
break;
case 2: {
System.out.println("Please enter a username");
String username = scan.next();
System.out.println("Please enter a password");
String password = scan.next();
System.out.println("Please enter your name");
String name = scan.next();
User usr = userv.registerUser(username, password, name);
System.out.println(usr);
}
;
break;
case 3: { ///mess with tomorrow
System.out.println("Please enter user id for update function");
int uId=scan.nextInt();
userv.updateUser(uId);
}
;
break;
case 4: {
System.out.println("Please enter user id for delete");
int uId = scan.nextInt();
System.out.println("Please enter user name for delete");
int uName = scan.nextInt();
userv.deleteUser(uId);
}
;
break;
case 5: {
System.out.println("Accounts:");
aserv.allAccount();
}
;
break;
case 6: {
loggedIn = false;
}
;
break;
}
} else {
System.out.println("Welcome");
System.out.println("Accounts:");
aserv.currentAccount(loggedInEmployee.getUserId());
System.out.println("1:Create Account\n2:Delete Account\n3:Deposit\n4:Withdrawl\n5:See All Accounts\n6:Log Out");
int decision = scan.nextInt();
switch (decision) {
case 1: {
System.out.println("Please enter a account name");
String accName = scan.next();
int userId = loggedInEmployee.getUserId();
a = aserv.createAccount(accName, userId);
System.out.println("Account created\nName is :" + accName + "\nBank Account ID : "
+ a.getBANK_ACCOUNT_ID());
}
;
break;
case 2: {
System.out.println("Please enter account id for delete");
int accId = scan.nextInt();
int userId = loggedInEmployee.getUserId();
aserv.deleteAccount(accId, userId);
}
;
break;
case 3: {
System.out.println("Please enter account id for deposit");
int accId = scan.nextInt();
System.out.println("Please enter amount id for deposit");
double amount = scan.nextDouble();
int userId = loggedInEmployee.getUserId();
aserv.depositToAccount(amount, accId, userId);
System.out.println(amount + " added to account number " + accId);
}
;
break;
case 4: {
System.out.println("Please enter account id for withdraw");
int accId = scan.nextInt();
System.out.println("Please enter amount id for withdraw");
double amount = scan.nextDouble();
int userId = loggedInEmployee.getUserId();
aserv.withdrawFromAccount(amount, accId, userId);
}
;
break;
case 5: {
System.out.println("Accounts:");
aserv.currentAccount(loggedInEmployee.getUserId());
}
;
break;
case 6: {
loggedIn = false;
}
;
break;
}
}
}
}
}
}
}
<file_sep>package dev.kontas.daotests;
import static org.junit.jupiter.api.Assertions.*;
import org.junit.jupiter.api.Test;
import dev.kontas.daos.AccountDAO;
import dev.kontas.daos.AccountDAOImpl;
import dev.kontas.entities.Account;
class AccountsDAOTests {
private static AccountDAO adao = new AccountDAOImpl();
@Test
public void createAccount() {
Account a = new Account("testName",126);
System.out.println(a);
adao.createAccount(a);
System.out.println(a);
Account a2 = new Account("testName2",127 );
System.out.println(a2);
adao.createAccount(a2);
System.out.println(a2);
}
@Test
public void updateAccount() {
Account a = new Account("testName",1); // Any entity before it is saved
adao.createAccount(a);
System.out.println(a);
a.setCurrentBalance(100);// updating an object is modifying the object
adao.updateAccount(a);
System.out.println(a);
}
@Test
public void getAccount() {
Account a = new Account("ASD",3); // Any entity before it is saved
adao.createAccount(a);
System.out.println(a);
Account a2 = new Account("ASF",4 );
System.out.println(a2);
adao.createAccount(a2);
System.out.println(a2);
Account acc = adao.getAccount(a.getUSER_ID());
System.out.println(acc);
}
@Test
public void deleteAccount() {
Account a = new Account("testName",1); // Any entity before it is saved
adao.createAccount(a);
System.out.println(a);
Account a2 = new Account("testName2",1 );
System.out.println(a2);
adao.createAccount(a2);
System.out.println(a2);
adao.deleteAccount(a.getUSER_ID());
adao.deleteAccount(a2.getUSER_ID());
}
}
<file_sep>package dev.kontas.exceptions;
import dev.kontas.exceptions.InappropriateCharacterException;
import dev.kontas.exceptions.UsernameTooShortException;
public interface UsernameValidator {
boolean validUsername(String username) throws InappropriateCharacterException, UsernameTooShortException;
}
<file_sep>package dev.kontas.services;
import java.util.Set;
import dev.kontas.entities.Account;
public interface AccountServices {
Account createAccount(String accountName, int USER_ID);
boolean deleteAccount(int BANK_ACCOUNT_ID, int USER_ID );
Account depositToAccount(double amount, int BANK_ACCOUNT_ID, int USER_ID);
Account withdrawFromAccount(double amount,int BANK_ACCOUNT_ID, int USER_ID);
void currentAccount(int userId);
void allAccount();
}
|
288f622cdf9743f28f6426aa842563b8375533a1
|
[
"Java",
"INI"
] | 13
|
INI
|
baturhankontas/RevatureBankApp
|
2c5cd5918f4747cc8b7168291c3898ca3ff2c478
|
61b775d2d57cda5b955ed2d0672df74a79a9119f
|
refs/heads/master
|
<repo_name>JaeKeoung/kcc<file_sep>/PhpProject1/kccSignInView.php
<?php
include 'sub3.php';
show_all_record($pdo);
?>
|
fab92257239f9c662c152c3247c53e66c8999ad4
|
[
"PHP"
] | 1
|
PHP
|
JaeKeoung/kcc
|
f6533dc4e83a698cc0c080b312cd0cd5c22ed087
|
0007f0ca8a19480e96f61a9feb4b4eccd2a14d5c
|
refs/heads/master
|
<file_sep>Procedural Cave Generator
-------------------------
Tool to generate Cellular Automata based caves procedurally using primitives
Description:
------------
The tool enables a custom "Cave Generator" menu to launch an editor window that lets the user generate procedural caves based on the cellular automata model by setting various parameters.
Usage Description:
------------------
1) Download the ProceduralCaveGenerator unity package file from the repository
2) Create a new project in unity & import the downloaded package
3) Select Cave Generator from the top Menu Bar & select Launch Editor
4) Enter values for CaveMap width & height eg: 50 for both
5) Enter percentage of walls required (ideal is between 20-30) eg: 27
6) Enter threshold value for empty cells to be generated (ideal is upto 1/3 of width * height) eg: 800 for 50 * 50. Threshold value autoupdates depening on width & height entered to generate reasonable cave maps.
7) Enter seed value as 0 to keep generating randomized caves or a value other than 0 to be able to regenerate a cave eg: 47. If seed value is set it will ignore threshold value.
8) Click Generate Cave button to generate randomized cave or regenerate previously generated cave from seed. If the seed is not set i.e. set to "0", the user can keep on clicking Generate Cave button to get different randomized caves.
9) Click on Save Generated Cave as Prefab button to save the currently generated cave as a prefab with current timestamp. This button is not enabled unless a new cave is generated or if the user clicks on play button to run the generated cave in the scene.
10) Click on Remove Single Walls button to remove single walls if any from the currently generated cave. This button is not enabled unless a new cave is generated or if the user clicks on play button to run the generated cave in the scene.
11) You can test the cave by dropping the FPSController and MiniMap prefabs in the generated cave from the FPSController Prefabs folder in the package & pressing play.
Alternatively, you can download the entire project from the repository & play around with the demo GameScene that has been setup.
References:
-----------
Cellular Automata Rules - http://www.roguebasin.com/index.php?title=Cellular_Automata_Method_for_Generating_Random_Cave-Like_Levels#Rule_Tweaking
Flood Fill Algorithm - https://en.wikipedia.org/wiki/Flood_fill
<file_sep>/*
* ==================================================================
* CharacterController.cs
* CaveGenerator
*
* Created by <NAME>
* Copyright (c) 2016, <NAME>. All rights reserved.
* ==================================================================
*/
using UnityEngine;
public class SimpleCharacterController : MonoBehaviour
{
public int speed = 2;
float horizontal;
float vertical;
void Start()
{
//Hide Cursor
Cursor.lockState = CursorLockMode.Locked;
}
void Update()
{
//Get keyboard input axes
horizontal = Input.GetAxis("Horizontal") * speed * Time.deltaTime;
vertical = Input.GetAxis("Vertical") * speed * Time.deltaTime;
// Simple Translation
transform.Translate(horizontal,0,vertical);
}
}
<file_sep>/*
* ==================================================================
* CaveGenerator.cs
* CaveGenerator
*
* Created by <NAME>
* Copyright (c) 2016, <NAME>. All rights reserved.
* ==================================================================
*/
using UnityEngine;
using System.Collections.Generic;
public static class CaveGenerator
{
public static GameObject cave;
static int[,] caveMap;
static int width;
static int height;
static int threshold;
static int seed;
/// <summary>
/// Generates a cave with the given parameters
/// </summary>
/// <param name="caveWidth">Width of the cave</param>
/// <param name="caveHeight">Height of the cave</param>
/// <param name="wallFillPercent">Threshold Percentage of cave to be filled with walls</param>
/// <param name="roomthreshold">Threshold value required for expected empty cells</param>
/// <param name="caveSeed">Set to 0 for random caves or any other integer value for cave regeneration given same width and height</param>
public static void CreateCaveMap(int caveWidth, int caveHeight, int wallFillPercent, int roomthreshold,int caveSeed)
{
//Initialize required values
width = caveWidth;
height = caveHeight;
caveMap = new int[width, height];
threshold = roomthreshold;
seed = caveSeed;
int smoothValue = 7; //Iterations to run on generated cave for surrounding wall checks
//Generate random with or without seed
System.Random prng = (seed != 0) ? new System.Random(seed) : new System.Random();
for (int x = 0; x < width; x++)
{
for (int y = 0; y < height; y++)
{
//Set border cells to walls
if (x == 0 || y == 0 || x == width - 1 || y == height - 1)
{
caveMap[x, y] = 1;
}
else
{
caveMap[x, y] = ((prng.Next(1, 100) >= wallFillPercent) ? 0 : 1); //set walls inside grid depending on threshold value
}
}
}
//Smooth out the walls a few times
SmoothWalls(smoothValue, false);
//Fill Empty cell Islands with walls
FillMap();
//Remove as many single Single cells as possible
SmoothWalls(smoothValue, true);
//Draw 3D Primitives for walls
DrawCaveMap();
}
/// <summary>
/// Funtion to make multiple wall adjustments
/// </summary>
/// <param name="loopCount">Number of iterations to run</param>
/// <param name="doCleanse">Try to reduce single cells?</param>
static void SmoothWalls(int loopCount, bool doCleanse)
{
for (int i = 0; i < loopCount; i++)
{
AjustWalls(false);
}
}
/// <summary>
/// Function to Adjust walls
/// </summary>
/// <param name="doCleanse">Try to reduce single cells?</param>
static void AjustWalls(bool doCleanse)
{
//Tweakable values
int wallUpperThreshold = 5;
int wallLowerThreshold = 5;
for (int x = 0; x < width; x++)
{
for (int y = 0; y < height; y++)
{
//Get Surrounding Walls Count
int wallCount = GetSurroundingWalls(x, y);
if (!doCleanse)
{
if (wallCount > wallUpperThreshold)
{
caveMap[x, y] = 1;
}
else if (wallCount < wallLowerThreshold)
{
caveMap[x, y] = 0;
}
}
else
{
if (wallCount == 0)
{
caveMap[x, y] = 0; //Get rid of single cells
}
}
}
}
}
/// <summary>
/// Function to get surrounding walls of a cell
/// </summary>
/// <param name="cellX">Row number of cell</param>
/// <param name="cellY">Column number of cell</param>
static int GetSurroundingWalls(int cellX, int cellY)
{
int surroundWalls = 0;
for (int x = cellX - 1; x <= cellX + 1; x++)
{
for (int y = cellY - 1; y <= cellY + 1; y++)
{
//Check bounds
if (x >= 0 && x < width && y >= 0 && y < height)
{
//Filter self
if (x != cellX || y != cellY)
{
surroundWalls += caveMap[cellX, cellY];
}
}
else
{
surroundWalls++;
}
}
}
return surroundWalls;
}
/// <summary>
/// Function to Get biggest connected region based on threshold and fill the rest with walls
/// </summary>
static void FillMap()
{
int loopFailSafeCount = 1000; // Number of tries before giving up to get desired cave
//Get first random empty cell
int[] startXY = isEmptyCell();
int startX = startXY[0];
int startY = startXY[1];
//Run FloodFill function with random cell indices for given seed
List<Cell> cells = FloodFill(startX, startY);
int iterations = 1; //Set FailSafe loop count to 1
while (cells.Count < threshold && seed == 0)
{
iterations++;
startXY = isEmptyCell();
startX = startXY[0];
startY = startXY[1];
//Run FloodFill function with random empty cell indices without seed
cells = FloodFill(startX, startY);
//Break if failed to get desired cave after trying too many times
if (iterations > loopFailSafeCount)
{
//Log warning
Debug.LogWarning("Falied to get desired cave. Try changing parameters and generate cave again.");
break;
}
}
//Log calculation output
Debug.Log("Empty cell count : " + cells.Count + " iterations: " + iterations);
//Convert everything outside threshold region to walls
foreach (Cell cell in cells)
{
for (int x = 0; x < width; x++)
{
for (int y = 0; y < height; y++)
{
if (x != cell.cellX && y != cell.cellY)
{
caveMap[x, y] = 1;
}
}
}
}
//Set everything not a wall back to empty cells
foreach (Cell cell in cells)
{
for (int x = 0; x < width; x++)
{
for (int y = 0; y < height; y++)
{
if (x == cell.cellX && y == cell.cellY)
{
caveMap[x, y] = 0;
}
}
}
}
}
/// <summary>
/// Function to return empty cell with or without seed
/// </summary>
static int[] isEmptyCell()
{
bool isEmpty = false;
int randX;
int randY;
int[] emptyXY = new int[2];
while (!isEmpty)
{
System.Random rand;
rand = (seed != 0) ? new System.Random(seed) : new System.Random();
randX = rand.Next(1, width);
randY = rand.Next(1, height);
if (caveMap[randX, randY] == 0 || seed != 0)
{
emptyXY[0] = randX;
emptyXY[1] = randY;
isEmpty = true;
}
}
return emptyXY;
}
/// <summary>
/// Function for FloodFill Algorithm
/// </summary>
/// <param name="startX">Row Number of Start cell value</param>
/// <param name="startY">Column Number of Start cell value</param>
static List<Cell> FloodFill(int startX, int startY)
{
//Setup Cell List & 2D array for tracking cell visits with passed random cell indices
List<Cell> cells = new List<Cell>();
int[,] visitMap = new int[width, height];
int startCell = caveMap[startX, startY];
//Create Queue to store visited cells
Queue<Cell> cellQueue = new Queue<Cell>();
cellQueue.Enqueue(new Cell(startX, startY));
visitMap[startX, startY] = 1;
while(cellQueue.Count > 0)
{
Cell cell = cellQueue.Dequeue();
cells.Add(cell);
//Check Adjacent cells
for (int x = cell.cellX - 1; x <= cell.cellX; x++)
{
for (int y = cell.cellY - 1; y <= cell.cellY; y++)
{
//Filter out diagonal cells and check range
if ((x >= 0 && x < width && y >= 0 && y < height) && (x == cell.cellX || y == cell.cellY))
{
//Check if visited and cell type
if (visitMap[x,y] == 0 && caveMap[x,y] == startCell)
{
visitMap[x, y] = 1;
cellQueue.Enqueue(new Cell(x, y));
}
}
}
}
}
//Return empty cells' list
return cells;
}
/// <summary>
/// Function to generate 3D cave using CaveMap grid
/// </summary>
static void DrawCaveMap()
{
cave = new GameObject();
cave.name = "Cave";
float wallOffset = 0.5f;
//Setup cave ground and its material
GameObject ground = GameObject.CreatePrimitive(PrimitiveType.Plane);
ground.name = "Ground";
ground.transform.localScale = new Vector3((float)width / 10f, 1f, (float)height / 10f);
ground.transform.parent = cave.transform;
Material groundMat = Resources.Load("Ground_MAT") as Material;
ground.GetComponent<Renderer>().material = groundMat;
//Wall Material
Material wallMat = Resources.Load("Wall_MAT") as Material;
for (int x = 0; x < width; x++)
{
for (int y = 0; y < height; y++)
{
if (caveMap[x, y] == 1)
{
//Generate and setup walls from cube primitives
GameObject wall = GameObject.CreatePrimitive(PrimitiveType.Cube);
wall.name = "Wall";
wall.transform.position = new Vector3((float)(-(float)width / 2 + x) + wallOffset, wallOffset, (float)(-(float)height / 2 + y) + wallOffset); // offset wall position to start from ground and grid center
wall.transform.parent = cave.transform;
wall.GetComponent<Renderer>().material = wallMat;
}
}
}
}
/// <summary>
/// Function to remove single walls in generated CaveMap
/// </summary>
public static void CleanCaveMap()
{
List<GameObject> walls = new List<GameObject>();
float overlapBoxSize = 0.7f;
int overlapCount = 3;
int childCount = cave.transform.childCount;
int singleWallsCount = 0;
//Get list of all child walls
for (int i = 0; i < childCount - 1; i++)
{
if (cave.transform.GetChild(i).transform.name == "Wall")
{
walls.Add(cave.transform.GetChild(i).transform.gameObject);
}
}
//Detect collision with other walls if any
for (int i = 0; i < walls.Count; i++)
{
Collider[] cols = Physics.OverlapBox(walls[i].transform.position, new Vector3(overlapBoxSize, overlapBoxSize, overlapBoxSize));
//Destroy single wall
if (cols.Length < overlapCount)
{
singleWallsCount++;
UnityEngine.GameObject.DestroyImmediate(walls[i]);
}
}
//Output Operation Log
if (singleWallsCount == 0)
{
Debug.LogWarning("No single walls detected!");
}
else
{
Debug.Log("Deleted " + singleWallsCount + " walls");
}
}
}
<file_sep>/*
* ==================================================================
* MiniMap.cs
* CaveGenerator
*
* Created by <NAME>
* Copyright (c) 2016, <NAME>. All rights reserved.
* ==================================================================
*/
using UnityEngine;
public class MiniMap : MonoBehaviour
{
void Start()
{
//Reset Minimap size on start
if (GameObject.Find("Ground") != null)
{
gameObject.GetComponent<Camera>().orthographicSize = GameObject.Find("Ground").transform.localScale.x * 5.5f; //Expensive - Only for Testing
}
}
}
<file_sep>/*
* ==================================================================
* MouseLook.cs
* CaveGenerator
*
* Created by <NAME>
* Copyright (c) 2016, <NAME>. All rights reserved.
* ==================================================================
*/
using UnityEngine;
public class MouseLook : MonoBehaviour
{
public float sensitivity;
public float minY = -60F;
public float maxY = 60F;
Vector2 rotationXY;
GameObject character;
void Start()
{
character = transform.parent.gameObject;
}
void Update()
{
//Get mouse input & store cumulative value
Vector2 mouseDelta = new Vector2(Input.GetAxis("Mouse X") * sensitivity, Input.GetAxis("Mouse Y") * sensitivity);
rotationXY += mouseDelta;
//Clamp y-rotation angle
rotationXY.y = ClampAngle(rotationXY.y, minY, maxY);
//Apply y-roation to camera & x-roation to character
transform.localRotation = Quaternion.AngleAxis(-rotationXY.y, Vector3.right);
character.transform.localRotation = Quaternion.AngleAxis(rotationXY.x, character.transform.up);
}
//Rotation Clamping function
float ClampAngle(float angle, float min, float max)
{
if (angle < -360F)
angle += 360F;
if (angle > 360F)
angle -= 360F;
return Mathf.Clamp(angle, min, max);
}
}
<file_sep>/*
* ==================================================================
* Cell.cs
* CaveGenerator
*
* Created by <NAME>
* Copyright (c) 2016, <NAME>. All rights reserved.
* ==================================================================
*/
public class Cell
{
public int cellX;
public int cellY;
public Cell(int x, int y)
{
cellX = x;
cellY = y;
}
}
<file_sep>/*
* ==================================================================
* CaveMenu.cs
* CaveGenerator
*
* Created by <NAME>
* Copyright (c) 2016, <NAME>. All rights reserved.
* ==================================================================
*/
#if UNITY_EDITOR
using UnityEditor;
#endif
public static class CaveMenu
{
//Create Custome Editor Menu
[MenuItem("Cave Generator/Launch Editor")]
public static void InitCaveEditor()
{
CaveGeneratorWindow.InitCaveEditorWindow();
}
}
<file_sep>/*
* ==================================================================
* CaveGeneratorWindow.cs
* CaveGenerator
*
* Created by <NAME>
* Copyright (c) 2016, <NAME>. All rights reserved.
* ==================================================================
*/
using UnityEngine;
#if UNITY_EDITOR
using UnityEditor;
#endif
public class CaveGeneratorWindow : EditorWindow
{
public static CaveGeneratorWindow caveWindow;
public int width;
public int height;
public int seed;
public int threshold;
public int fillProbability;
/// <summary>
/// Function to Initialize Cave Editor Window
/// </summary>
public static void InitCaveEditorWindow()
{
//Setup Custom Editor Window
caveWindow = (CaveGeneratorWindow)EditorWindow.GetWindow<CaveGeneratorWindow>();
caveWindow.titleContent = new GUIContent("Cave Editor");
caveWindow.minSize = new Vector2(300, 400);
}
void OnGUI()
{
//Create required setup fields
width = EditorGUILayout.IntField("CaveMap Width : ", width);
height = EditorGUILayout.IntField("CaveMap Height : ", height);
fillProbability = EditorGUILayout.IntSlider("Wall Percent : ", fillProbability, 1, 100, GUILayout.MinWidth(100));
//Threshold for walls - Autoadjusts to new width & height for reasonable value
threshold = EditorGUILayout.IntSlider("Threshold : ", threshold, 1, (width*height)/3, GUILayout.MinWidth(100));
//Set seed to "0" for random generation or any number other than "0" to be able to regenerate cave next time
seed = EditorGUILayout.IntField("Seed : ", seed);
//Button to Generate Cave
if (GUILayout.Button("Generate Cave"))
{
//Destroy previous cave if it exists
if (CaveGenerator.cave != null)
{
DestroyImmediate(CaveGenerator.cave);
}
CaveGenerator.CreateCaveMap(width, height, fillProbability, threshold,seed);
}
//Disable buttons by default
EditorGUI.BeginDisabledGroup(CaveGenerator.cave == null);
//Button to Save Cave as Prefab with a unique name
if (GUILayout.Button("Save Generated Cave as Prefab"))
{
string prefabPath = "Assets/ProceduralCaveGenerator/Prefabs/";
Object prefab = PrefabUtility.CreateEmptyPrefab(prefabPath + CaveGenerator.cave.name + System.DateTime.Now.ToString("_MMddyyhhmmss") + ".prefab");
//Null check for new cave
if (CaveGenerator.cave != null)
{
PrefabUtility.ReplacePrefab(CaveGenerator.cave, prefab, ReplacePrefabOptions.ConnectToPrefab);
}
else
{
Debug.LogError("Cannot find a newly generated cave. Please generate a new cave to save as prefab."); //Just for safety - control should not be able to come here
}
//Log output path
Debug.Log("Saved to " + prefabPath);
}
//Button to optionally remove single walls if any
if (GUILayout.Button("Remove Single Walls"))
{
//Null check for new cave
if (CaveGenerator.cave != null)
{
CaveGenerator.CleanCaveMap();
}
else
{
Debug.LogError("Cannot find a newly generated cave. Please generate a new cave to check for single walls."); //Just for safety - control should not be able to come here
}
}
EditorGUI.EndDisabledGroup();
}
}
|
4549bc4fb792beb23ae498b26dcb126feee1cea7
|
[
"Markdown",
"C#"
] | 8
|
Markdown
|
parag-padubidri/CaveGenerator
|
d183028b31809b743339fb1ea4272346077a98f6
|
40ee95453b9f07b596eae037c5f7611fbc7a3b91
|
refs/heads/master
|
<file_sep>import React, { Component } from 'react';
import { Link } from 'react-router'
class Header extends Component {
render() {
return (
<div className="header clearfix">
<nav>
<ul className="nav nav-pills pull-right">
<li role="presentation"><Link to="/" activeClassName="active">Home</Link></li>
<li role="presentation"><Link to="about" activeClassName="active">About</Link></li>
<li role="presentation"><Link to="contact" activeClassName="active">Contact</Link></li>
</ul>
</nav>
<h3 className="text-muted">First react app</h3>
</div>
)
}
}
export default Header;<file_sep>import React, { Component } from 'react';
import Header from './tpl/Header';
import Footer from './tpl/Footer';
class About extends Component {
render() {
return (
<div className="container">
<Header />
<div className="jumbotron">
<h1>About</h1>
<p className="lead">Cras justo odio, dapibus ac facilisis in, egestas eget quam. Fusce dapibus, tellus ac cursus commodo, tortor mauris condimentum nibh, ut fermentum massa justo sit amet risus.</p>
<p><a className="btn btn-lg btn-success" href="#" role="button">Sign up today</a></p>
</div>
<div className="row marketing">
<div className="col-lg-6">
<h4>Subheading</h4>
<p>Donec id elit non mi porta gravida at eget metus. Maecenas faucibus mollis interdum.</p>
<h4>Subheading</h4>
<p>Morbi leo risus, porta ac consectetur ac, vestibulum at eros. Cras mattis consectetur purus sit amet fermentum.</p>
<h4>Subheading</h4>
<p>Maecenas sed diam eget risus varius blandit sit amet non magna.</p>
</div>
<div className="col-lg-6">
<h4>Subheading</h4>
<p>Donec id elit non mi porta gravida at eget metus. Maecenas faucibus mollis interdum.</p>
<h4>Subheading</h4>
<p>Morbi leo risus, porta ac consectetur ac, vestibulum at eros. Cras mattis consectetur purus sit amet fermentum.</p>
<h4>Subheading</h4>
<p>Maecenas sed diam eget risus varius blandit sit amet non magna.</p>
</div>
</div>
<Footer />
</div>
)
}
}
export default About;<file_sep>import React from 'react';
import { render } from 'react-dom'
import { Router, Route, Link, browserHistory } from 'react-router'
import Home from './Home';
import About from './About';
import Contact from './Contact';
render((
<Router history={browserHistory} >
<Route path="/" component={Home} />
<Route path="/about" component={About} />
<Route path="/contact" component={Contact} />
</Router>
), document.getElementById('app'));
|
dbd69d91c205779eb00f49411878a374333d8cad
|
[
"JavaScript"
] | 3
|
JavaScript
|
mathloureiro/first-app-react
|
74f2b688807190191a6ff5567dba4d37e3b3eefc
|
6592ac767dfc78366d18fc8bc41f04ce1676eb95
|
refs/heads/main
|
<file_sep># cdk8s+ (cdk8s-plus)
### High level constructs for Kubernetes
<br>

**cdk8s+** is a software development framework that provides high level abstractions for authoring Kubernetes applications.
Built on top of the auto generated building blocks provided by [cdk8s](../cdk8s), this library includes a hand crafted *construct*
for each native kubernetes object, exposing richer API's with reduced complexity.
## Documentation
See [cdk8s.io](https://cdk8s.io/docs/latest/plus).
## License
This project is distributed under the [Apache License, Version 2.0](./LICENSE).
This module is part of the [cdk8s project](https://github.com/awslabs/cdk8s).
<file_sep>package cdk8splus17
import (
"reflect"
_jsii_ "github.com/aws/jsii-runtime-go/runtime"
)
func init() {
_jsii_.RegisterStruct(
"cdk8s-plus-17.AddDirectoryOptions",
reflect.TypeOf((*AddDirectoryOptions)(nil)).Elem(),
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.CommandProbeOptions",
reflect.TypeOf((*CommandProbeOptions)(nil)).Elem(),
)
_jsii_.RegisterClass(
"cdk8s-plus-17.ConfigMap",
reflect.TypeOf((*ConfigMap)(nil)).Elem(),
[]_jsii_.Member{
_jsii_.MemberMethod{JsiiMethod: "addBinaryData", GoMethod: "AddBinaryData"},
_jsii_.MemberMethod{JsiiMethod: "addData", GoMethod: "AddData"},
_jsii_.MemberMethod{JsiiMethod: "addDirectory", GoMethod: "AddDirectory"},
_jsii_.MemberMethod{JsiiMethod: "addFile", GoMethod: "AddFile"},
_jsii_.MemberProperty{JsiiProperty: "apiObject", GoGetter: "ApiObject"},
_jsii_.MemberProperty{JsiiProperty: "binaryData", GoGetter: "BinaryData"},
_jsii_.MemberProperty{JsiiProperty: "data", GoGetter: "Data"},
_jsii_.MemberProperty{JsiiProperty: "metadata", GoGetter: "Metadata"},
_jsii_.MemberProperty{JsiiProperty: "name", GoGetter: "Name"},
_jsii_.MemberMethod{JsiiMethod: "onPrepare", GoMethod: "OnPrepare"},
_jsii_.MemberMethod{JsiiMethod: "onSynthesize", GoMethod: "OnSynthesize"},
_jsii_.MemberMethod{JsiiMethod: "onValidate", GoMethod: "OnValidate"},
_jsii_.MemberMethod{JsiiMethod: "toString", GoMethod: "ToString"},
},
func() interface{} {
j := jsiiProxy_ConfigMap{}
_jsii_.InitJsiiProxy(&j.jsiiProxy_Resource)
_jsii_.InitJsiiProxy(&j.jsiiProxy_IConfigMap)
return &j
},
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.ConfigMapProps",
reflect.TypeOf((*ConfigMapProps)(nil)).Elem(),
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.ConfigMapVolumeOptions",
reflect.TypeOf((*ConfigMapVolumeOptions)(nil)).Elem(),
)
_jsii_.RegisterClass(
"cdk8s-plus-17.Container",
reflect.TypeOf((*Container)(nil)).Elem(),
[]_jsii_.Member{
_jsii_.MemberMethod{JsiiMethod: "addEnv", GoMethod: "AddEnv"},
_jsii_.MemberProperty{JsiiProperty: "args", GoGetter: "Args"},
_jsii_.MemberProperty{JsiiProperty: "command", GoGetter: "Command"},
_jsii_.MemberProperty{JsiiProperty: "env", GoGetter: "Env"},
_jsii_.MemberProperty{JsiiProperty: "image", GoGetter: "Image"},
_jsii_.MemberProperty{JsiiProperty: "imagePullPolicy", GoGetter: "ImagePullPolicy"},
_jsii_.MemberMethod{JsiiMethod: "mount", GoMethod: "Mount"},
_jsii_.MemberProperty{JsiiProperty: "mounts", GoGetter: "Mounts"},
_jsii_.MemberProperty{JsiiProperty: "name", GoGetter: "Name"},
_jsii_.MemberProperty{JsiiProperty: "port", GoGetter: "Port"},
_jsii_.MemberProperty{JsiiProperty: "workingDir", GoGetter: "WorkingDir"},
},
func() interface{} {
return &jsiiProxy_Container{}
},
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.ContainerProps",
reflect.TypeOf((*ContainerProps)(nil)).Elem(),
)
_jsii_.RegisterClass(
"cdk8s-plus-17.Deployment",
reflect.TypeOf((*Deployment)(nil)).Elem(),
[]_jsii_.Member{
_jsii_.MemberMethod{JsiiMethod: "addContainer", GoMethod: "AddContainer"},
_jsii_.MemberMethod{JsiiMethod: "addVolume", GoMethod: "AddVolume"},
_jsii_.MemberProperty{JsiiProperty: "apiObject", GoGetter: "ApiObject"},
_jsii_.MemberProperty{JsiiProperty: "containers", GoGetter: "Containers"},
_jsii_.MemberMethod{JsiiMethod: "expose", GoMethod: "Expose"},
_jsii_.MemberProperty{JsiiProperty: "labelSelector", GoGetter: "LabelSelector"},
_jsii_.MemberProperty{JsiiProperty: "metadata", GoGetter: "Metadata"},
_jsii_.MemberProperty{JsiiProperty: "name", GoGetter: "Name"},
_jsii_.MemberMethod{JsiiMethod: "onPrepare", GoMethod: "OnPrepare"},
_jsii_.MemberMethod{JsiiMethod: "onSynthesize", GoMethod: "OnSynthesize"},
_jsii_.MemberMethod{JsiiMethod: "onValidate", GoMethod: "OnValidate"},
_jsii_.MemberProperty{JsiiProperty: "podMetadata", GoGetter: "PodMetadata"},
_jsii_.MemberProperty{JsiiProperty: "replicas", GoGetter: "Replicas"},
_jsii_.MemberProperty{JsiiProperty: "restartPolicy", GoGetter: "RestartPolicy"},
_jsii_.MemberMethod{JsiiMethod: "selectByLabel", GoMethod: "SelectByLabel"},
_jsii_.MemberProperty{JsiiProperty: "serviceAccount", GoGetter: "ServiceAccount"},
_jsii_.MemberMethod{JsiiMethod: "toString", GoMethod: "ToString"},
_jsii_.MemberProperty{JsiiProperty: "volumes", GoGetter: "Volumes"},
},
func() interface{} {
j := jsiiProxy_Deployment{}
_jsii_.InitJsiiProxy(&j.jsiiProxy_Resource)
_jsii_.InitJsiiProxy(&j.jsiiProxy_IPodTemplate)
return &j
},
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.DeploymentProps",
reflect.TypeOf((*DeploymentProps)(nil)).Elem(),
)
_jsii_.RegisterEnum(
"cdk8s-plus-17.EmptyDirMedium",
reflect.TypeOf((*EmptyDirMedium)(nil)).Elem(),
map[string]interface{}{
"DEFAULT": EmptyDirMedium_DEFAULT,
"MEMORY": EmptyDirMedium_MEMORY,
},
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.EmptyDirVolumeOptions",
reflect.TypeOf((*EmptyDirVolumeOptions)(nil)).Elem(),
)
_jsii_.RegisterClass(
"cdk8s-plus-17.EnvValue",
reflect.TypeOf((*EnvValue)(nil)).Elem(),
[]_jsii_.Member{
_jsii_.MemberProperty{JsiiProperty: "value", GoGetter: "Value"},
_jsii_.MemberProperty{JsiiProperty: "valueFrom", GoGetter: "ValueFrom"},
},
func() interface{} {
return &jsiiProxy_EnvValue{}
},
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.EnvValueFromConfigMapOptions",
reflect.TypeOf((*EnvValueFromConfigMapOptions)(nil)).Elem(),
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.EnvValueFromProcessOptions",
reflect.TypeOf((*EnvValueFromProcessOptions)(nil)).Elem(),
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.EnvValueFromSecretOptions",
reflect.TypeOf((*EnvValueFromSecretOptions)(nil)).Elem(),
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.ExposeOptions",
reflect.TypeOf((*ExposeOptions)(nil)).Elem(),
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.HttpGetProbeOptions",
reflect.TypeOf((*HttpGetProbeOptions)(nil)).Elem(),
)
_jsii_.RegisterInterface(
"cdk8s-plus-17.IConfigMap",
reflect.TypeOf((*IConfigMap)(nil)).Elem(),
[]_jsii_.Member{
_jsii_.MemberProperty{JsiiProperty: "name", GoGetter: "Name"},
},
func() interface{} {
j := jsiiProxy_IConfigMap{}
_jsii_.InitJsiiProxy(&j.jsiiProxy_IResource)
return &j
},
)
_jsii_.RegisterInterface(
"cdk8s-plus-17.IPodSpec",
reflect.TypeOf((*IPodSpec)(nil)).Elem(),
[]_jsii_.Member{
_jsii_.MemberMethod{JsiiMethod: "addContainer", GoMethod: "AddContainer"},
_jsii_.MemberMethod{JsiiMethod: "addVolume", GoMethod: "AddVolume"},
_jsii_.MemberProperty{JsiiProperty: "containers", GoGetter: "Containers"},
_jsii_.MemberProperty{JsiiProperty: "restartPolicy", GoGetter: "RestartPolicy"},
_jsii_.MemberProperty{JsiiProperty: "serviceAccount", GoGetter: "ServiceAccount"},
_jsii_.MemberProperty{JsiiProperty: "volumes", GoGetter: "Volumes"},
},
func() interface{} {
return &jsiiProxy_IPodSpec{}
},
)
_jsii_.RegisterInterface(
"cdk8s-plus-17.IPodTemplate",
reflect.TypeOf((*IPodTemplate)(nil)).Elem(),
[]_jsii_.Member{
_jsii_.MemberMethod{JsiiMethod: "addContainer", GoMethod: "AddContainer"},
_jsii_.MemberMethod{JsiiMethod: "addVolume", GoMethod: "AddVolume"},
_jsii_.MemberProperty{JsiiProperty: "containers", GoGetter: "Containers"},
_jsii_.MemberProperty{JsiiProperty: "podMetadata", GoGetter: "PodMetadata"},
_jsii_.MemberProperty{JsiiProperty: "restartPolicy", GoGetter: "RestartPolicy"},
_jsii_.MemberProperty{JsiiProperty: "serviceAccount", GoGetter: "ServiceAccount"},
_jsii_.MemberProperty{JsiiProperty: "volumes", GoGetter: "Volumes"},
},
func() interface{} {
j := jsiiProxy_IPodTemplate{}
_jsii_.InitJsiiProxy(&j.jsiiProxy_IPodSpec)
return &j
},
)
_jsii_.RegisterInterface(
"cdk8s-plus-17.IResource",
reflect.TypeOf((*IResource)(nil)).Elem(),
[]_jsii_.Member{
_jsii_.MemberProperty{JsiiProperty: "name", GoGetter: "Name"},
},
func() interface{} {
return &jsiiProxy_IResource{}
},
)
_jsii_.RegisterInterface(
"cdk8s-plus-17.ISecret",
reflect.TypeOf((*ISecret)(nil)).Elem(),
[]_jsii_.Member{
_jsii_.MemberProperty{JsiiProperty: "name", GoGetter: "Name"},
},
func() interface{} {
j := jsiiProxy_ISecret{}
_jsii_.InitJsiiProxy(&j.jsiiProxy_IResource)
return &j
},
)
_jsii_.RegisterInterface(
"cdk8s-plus-17.IServiceAccount",
reflect.TypeOf((*IServiceAccount)(nil)).Elem(),
[]_jsii_.Member{
_jsii_.MemberProperty{JsiiProperty: "name", GoGetter: "Name"},
},
func() interface{} {
j := jsiiProxy_IServiceAccount{}
_jsii_.InitJsiiProxy(&j.jsiiProxy_IResource)
return &j
},
)
_jsii_.RegisterEnum(
"cdk8s-plus-17.ImagePullPolicy",
reflect.TypeOf((*ImagePullPolicy)(nil)).Elem(),
map[string]interface{}{
"ALWAYS": ImagePullPolicy_ALWAYS,
"IF_NOT_PRESENT": ImagePullPolicy_IF_NOT_PRESENT,
"NEVER": ImagePullPolicy_NEVER,
},
)
_jsii_.RegisterClass(
"cdk8s-plus-17.IngressV1Beta1",
reflect.TypeOf((*IngressV1Beta1)(nil)).Elem(),
[]_jsii_.Member{
_jsii_.MemberMethod{JsiiMethod: "addDefaultBackend", GoMethod: "AddDefaultBackend"},
_jsii_.MemberMethod{JsiiMethod: "addHostDefaultBackend", GoMethod: "AddHostDefaultBackend"},
_jsii_.MemberMethod{JsiiMethod: "addHostRule", GoMethod: "AddHostRule"},
_jsii_.MemberMethod{JsiiMethod: "addRule", GoMethod: "AddRule"},
_jsii_.MemberMethod{JsiiMethod: "addRules", GoMethod: "AddRules"},
_jsii_.MemberProperty{JsiiProperty: "apiObject", GoGetter: "ApiObject"},
_jsii_.MemberProperty{JsiiProperty: "metadata", GoGetter: "Metadata"},
_jsii_.MemberProperty{JsiiProperty: "name", GoGetter: "Name"},
_jsii_.MemberMethod{JsiiMethod: "onPrepare", GoMethod: "OnPrepare"},
_jsii_.MemberMethod{JsiiMethod: "onSynthesize", GoMethod: "OnSynthesize"},
_jsii_.MemberMethod{JsiiMethod: "onValidate", GoMethod: "OnValidate"},
_jsii_.MemberMethod{JsiiMethod: "toString", GoMethod: "ToString"},
},
func() interface{} {
j := jsiiProxy_IngressV1Beta1{}
_jsii_.InitJsiiProxy(&j.jsiiProxy_Resource)
return &j
},
)
_jsii_.RegisterClass(
"cdk8s-plus-17.IngressV1Beta1Backend",
reflect.TypeOf((*IngressV1Beta1Backend)(nil)).Elem(),
nil, // no members
func() interface{} {
return &jsiiProxy_IngressV1Beta1Backend{}
},
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.IngressV1Beta1Props",
reflect.TypeOf((*IngressV1Beta1Props)(nil)).Elem(),
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.IngressV1Beta1Rule",
reflect.TypeOf((*IngressV1Beta1Rule)(nil)).Elem(),
)
_jsii_.RegisterClass(
"cdk8s-plus-17.Job",
reflect.TypeOf((*Job)(nil)).Elem(),
[]_jsii_.Member{
_jsii_.MemberProperty{JsiiProperty: "activeDeadline", GoGetter: "ActiveDeadline"},
_jsii_.MemberMethod{JsiiMethod: "addContainer", GoMethod: "AddContainer"},
_jsii_.MemberMethod{JsiiMethod: "addVolume", GoMethod: "AddVolume"},
_jsii_.MemberProperty{JsiiProperty: "apiObject", GoGetter: "ApiObject"},
_jsii_.MemberProperty{JsiiProperty: "backoffLimit", GoGetter: "BackoffLimit"},
_jsii_.MemberProperty{JsiiProperty: "containers", GoGetter: "Containers"},
_jsii_.MemberProperty{JsiiProperty: "metadata", GoGetter: "Metadata"},
_jsii_.MemberProperty{JsiiProperty: "name", GoGetter: "Name"},
_jsii_.MemberMethod{JsiiMethod: "onPrepare", GoMethod: "OnPrepare"},
_jsii_.MemberMethod{JsiiMethod: "onSynthesize", GoMethod: "OnSynthesize"},
_jsii_.MemberMethod{JsiiMethod: "onValidate", GoMethod: "OnValidate"},
_jsii_.MemberProperty{JsiiProperty: "podMetadata", GoGetter: "PodMetadata"},
_jsii_.MemberProperty{JsiiProperty: "restartPolicy", GoGetter: "RestartPolicy"},
_jsii_.MemberProperty{JsiiProperty: "serviceAccount", GoGetter: "ServiceAccount"},
_jsii_.MemberMethod{JsiiMethod: "toString", GoMethod: "ToString"},
_jsii_.MemberProperty{JsiiProperty: "ttlAfterFinished", GoGetter: "TtlAfterFinished"},
_jsii_.MemberProperty{JsiiProperty: "volumes", GoGetter: "Volumes"},
},
func() interface{} {
j := jsiiProxy_Job{}
_jsii_.InitJsiiProxy(&j.jsiiProxy_Resource)
_jsii_.InitJsiiProxy(&j.jsiiProxy_IPodTemplate)
return &j
},
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.JobProps",
reflect.TypeOf((*JobProps)(nil)).Elem(),
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.MountOptions",
reflect.TypeOf((*MountOptions)(nil)).Elem(),
)
_jsii_.RegisterEnum(
"cdk8s-plus-17.MountPropagation",
reflect.TypeOf((*MountPropagation)(nil)).Elem(),
map[string]interface{}{
"NONE": MountPropagation_NONE,
"HOST_TO_CONTAINER": MountPropagation_HOST_TO_CONTAINER,
"BIDIRECTIONAL": MountPropagation_BIDIRECTIONAL,
},
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.PathMapping",
reflect.TypeOf((*PathMapping)(nil)).Elem(),
)
_jsii_.RegisterClass(
"cdk8s-plus-17.Pod",
reflect.TypeOf((*Pod)(nil)).Elem(),
[]_jsii_.Member{
_jsii_.MemberMethod{JsiiMethod: "addContainer", GoMethod: "AddContainer"},
_jsii_.MemberMethod{JsiiMethod: "addVolume", GoMethod: "AddVolume"},
_jsii_.MemberProperty{JsiiProperty: "apiObject", GoGetter: "ApiObject"},
_jsii_.MemberProperty{JsiiProperty: "containers", GoGetter: "Containers"},
_jsii_.MemberProperty{JsiiProperty: "metadata", GoGetter: "Metadata"},
_jsii_.MemberProperty{JsiiProperty: "name", GoGetter: "Name"},
_jsii_.MemberMethod{JsiiMethod: "onPrepare", GoMethod: "OnPrepare"},
_jsii_.MemberMethod{JsiiMethod: "onSynthesize", GoMethod: "OnSynthesize"},
_jsii_.MemberMethod{JsiiMethod: "onValidate", GoMethod: "OnValidate"},
_jsii_.MemberProperty{JsiiProperty: "restartPolicy", GoGetter: "RestartPolicy"},
_jsii_.MemberProperty{JsiiProperty: "serviceAccount", GoGetter: "ServiceAccount"},
_jsii_.MemberMethod{JsiiMethod: "toString", GoMethod: "ToString"},
_jsii_.MemberProperty{JsiiProperty: "volumes", GoGetter: "Volumes"},
},
func() interface{} {
j := jsiiProxy_Pod{}
_jsii_.InitJsiiProxy(&j.jsiiProxy_Resource)
_jsii_.InitJsiiProxy(&j.jsiiProxy_IPodSpec)
return &j
},
)
_jsii_.RegisterEnum(
"cdk8s-plus-17.PodManagementPolicy",
reflect.TypeOf((*PodManagementPolicy)(nil)).Elem(),
map[string]interface{}{
"ORDERED_READY": PodManagementPolicy_ORDERED_READY,
"PARALLEL": PodManagementPolicy_PARALLEL,
},
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.PodProps",
reflect.TypeOf((*PodProps)(nil)).Elem(),
)
_jsii_.RegisterClass(
"cdk8s-plus-17.PodSpec",
reflect.TypeOf((*PodSpec)(nil)).Elem(),
[]_jsii_.Member{
_jsii_.MemberMethod{JsiiMethod: "addContainer", GoMethod: "AddContainer"},
_jsii_.MemberMethod{JsiiMethod: "addVolume", GoMethod: "AddVolume"},
_jsii_.MemberProperty{JsiiProperty: "containers", GoGetter: "Containers"},
_jsii_.MemberProperty{JsiiProperty: "restartPolicy", GoGetter: "RestartPolicy"},
_jsii_.MemberProperty{JsiiProperty: "serviceAccount", GoGetter: "ServiceAccount"},
_jsii_.MemberProperty{JsiiProperty: "volumes", GoGetter: "Volumes"},
},
func() interface{} {
j := jsiiProxy_PodSpec{}
_jsii_.InitJsiiProxy(&j.jsiiProxy_IPodSpec)
return &j
},
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.PodSpecProps",
reflect.TypeOf((*PodSpecProps)(nil)).Elem(),
)
_jsii_.RegisterClass(
"cdk8s-plus-17.PodTemplate",
reflect.TypeOf((*PodTemplate)(nil)).Elem(),
[]_jsii_.Member{
_jsii_.MemberMethod{JsiiMethod: "addContainer", GoMethod: "AddContainer"},
_jsii_.MemberMethod{JsiiMethod: "addVolume", GoMethod: "AddVolume"},
_jsii_.MemberProperty{JsiiProperty: "containers", GoGetter: "Containers"},
_jsii_.MemberProperty{JsiiProperty: "podMetadata", GoGetter: "PodMetadata"},
_jsii_.MemberProperty{JsiiProperty: "restartPolicy", GoGetter: "RestartPolicy"},
_jsii_.MemberProperty{JsiiProperty: "serviceAccount", GoGetter: "ServiceAccount"},
_jsii_.MemberProperty{JsiiProperty: "volumes", GoGetter: "Volumes"},
},
func() interface{} {
j := jsiiProxy_PodTemplate{}
_jsii_.InitJsiiProxy(&j.jsiiProxy_PodSpec)
_jsii_.InitJsiiProxy(&j.jsiiProxy_IPodTemplate)
return &j
},
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.PodTemplateProps",
reflect.TypeOf((*PodTemplateProps)(nil)).Elem(),
)
_jsii_.RegisterClass(
"cdk8s-plus-17.Probe",
reflect.TypeOf((*Probe)(nil)).Elem(),
nil, // no members
func() interface{} {
return &jsiiProxy_Probe{}
},
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.ProbeOptions",
reflect.TypeOf((*ProbeOptions)(nil)).Elem(),
)
_jsii_.RegisterEnum(
"cdk8s-plus-17.Protocol",
reflect.TypeOf((*Protocol)(nil)).Elem(),
map[string]interface{}{
"TCP": Protocol_TCP,
"UDP": Protocol_UDP,
"SCTP": Protocol_SCTP,
},
)
_jsii_.RegisterClass(
"cdk8s-plus-17.Resource",
reflect.TypeOf((*Resource)(nil)).Elem(),
[]_jsii_.Member{
_jsii_.MemberProperty{JsiiProperty: "apiObject", GoGetter: "ApiObject"},
_jsii_.MemberProperty{JsiiProperty: "metadata", GoGetter: "Metadata"},
_jsii_.MemberProperty{JsiiProperty: "name", GoGetter: "Name"},
_jsii_.MemberMethod{JsiiMethod: "onPrepare", GoMethod: "OnPrepare"},
_jsii_.MemberMethod{JsiiMethod: "onSynthesize", GoMethod: "OnSynthesize"},
_jsii_.MemberMethod{JsiiMethod: "onValidate", GoMethod: "OnValidate"},
_jsii_.MemberMethod{JsiiMethod: "toString", GoMethod: "ToString"},
},
func() interface{} {
j := jsiiProxy_Resource{}
_jsii_.InitJsiiProxy(&j.Type__constructsConstruct)
_jsii_.InitJsiiProxy(&j.jsiiProxy_IResource)
return &j
},
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.ResourceProps",
reflect.TypeOf((*ResourceProps)(nil)).Elem(),
)
_jsii_.RegisterEnum(
"cdk8s-plus-17.RestartPolicy",
reflect.TypeOf((*RestartPolicy)(nil)).Elem(),
map[string]interface{}{
"ALWAYS": RestartPolicy_ALWAYS,
"ON_FAILURE": RestartPolicy_ON_FAILURE,
"NEVER": RestartPolicy_NEVER,
},
)
_jsii_.RegisterClass(
"cdk8s-plus-17.Secret",
reflect.TypeOf((*Secret)(nil)).Elem(),
[]_jsii_.Member{
_jsii_.MemberMethod{JsiiMethod: "addStringData", GoMethod: "AddStringData"},
_jsii_.MemberProperty{JsiiProperty: "apiObject", GoGetter: "ApiObject"},
_jsii_.MemberMethod{JsiiMethod: "getStringData", GoMethod: "GetStringData"},
_jsii_.MemberProperty{JsiiProperty: "metadata", GoGetter: "Metadata"},
_jsii_.MemberProperty{JsiiProperty: "name", GoGetter: "Name"},
_jsii_.MemberMethod{JsiiMethod: "onPrepare", GoMethod: "OnPrepare"},
_jsii_.MemberMethod{JsiiMethod: "onSynthesize", GoMethod: "OnSynthesize"},
_jsii_.MemberMethod{JsiiMethod: "onValidate", GoMethod: "OnValidate"},
_jsii_.MemberMethod{JsiiMethod: "toString", GoMethod: "ToString"},
},
func() interface{} {
j := jsiiProxy_Secret{}
_jsii_.InitJsiiProxy(&j.jsiiProxy_Resource)
_jsii_.InitJsiiProxy(&j.jsiiProxy_ISecret)
return &j
},
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.SecretProps",
reflect.TypeOf((*SecretProps)(nil)).Elem(),
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.SecretValue",
reflect.TypeOf((*SecretValue)(nil)).Elem(),
)
_jsii_.RegisterClass(
"cdk8s-plus-17.Service",
reflect.TypeOf((*Service)(nil)).Elem(),
[]_jsii_.Member{
_jsii_.MemberMethod{JsiiMethod: "addDeployment", GoMethod: "AddDeployment"},
_jsii_.MemberMethod{JsiiMethod: "addSelector", GoMethod: "AddSelector"},
_jsii_.MemberProperty{JsiiProperty: "apiObject", GoGetter: "ApiObject"},
_jsii_.MemberProperty{JsiiProperty: "clusterIP", GoGetter: "ClusterIP"},
_jsii_.MemberProperty{JsiiProperty: "externalName", GoGetter: "ExternalName"},
_jsii_.MemberProperty{JsiiProperty: "metadata", GoGetter: "Metadata"},
_jsii_.MemberProperty{JsiiProperty: "name", GoGetter: "Name"},
_jsii_.MemberMethod{JsiiMethod: "onPrepare", GoMethod: "OnPrepare"},
_jsii_.MemberMethod{JsiiMethod: "onSynthesize", GoMethod: "OnSynthesize"},
_jsii_.MemberMethod{JsiiMethod: "onValidate", GoMethod: "OnValidate"},
_jsii_.MemberProperty{JsiiProperty: "ports", GoGetter: "Ports"},
_jsii_.MemberProperty{JsiiProperty: "selector", GoGetter: "Selector"},
_jsii_.MemberMethod{JsiiMethod: "serve", GoMethod: "Serve"},
_jsii_.MemberMethod{JsiiMethod: "toString", GoMethod: "ToString"},
_jsii_.MemberProperty{JsiiProperty: "type", GoGetter: "Type"},
},
func() interface{} {
j := jsiiProxy_Service{}
_jsii_.InitJsiiProxy(&j.jsiiProxy_Resource)
return &j
},
)
_jsii_.RegisterClass(
"cdk8s-plus-17.ServiceAccount",
reflect.TypeOf((*ServiceAccount)(nil)).Elem(),
[]_jsii_.Member{
_jsii_.MemberMethod{JsiiMethod: "addSecret", GoMethod: "AddSecret"},
_jsii_.MemberProperty{JsiiProperty: "apiObject", GoGetter: "ApiObject"},
_jsii_.MemberProperty{JsiiProperty: "metadata", GoGetter: "Metadata"},
_jsii_.MemberProperty{JsiiProperty: "name", GoGetter: "Name"},
_jsii_.MemberMethod{JsiiMethod: "onPrepare", GoMethod: "OnPrepare"},
_jsii_.MemberMethod{JsiiMethod: "onSynthesize", GoMethod: "OnSynthesize"},
_jsii_.MemberMethod{JsiiMethod: "onValidate", GoMethod: "OnValidate"},
_jsii_.MemberProperty{JsiiProperty: "secrets", GoGetter: "Secrets"},
_jsii_.MemberMethod{JsiiMethod: "toString", GoMethod: "ToString"},
},
func() interface{} {
j := jsiiProxy_ServiceAccount{}
_jsii_.InitJsiiProxy(&j.jsiiProxy_Resource)
_jsii_.InitJsiiProxy(&j.jsiiProxy_IServiceAccount)
return &j
},
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.ServiceAccountProps",
reflect.TypeOf((*ServiceAccountProps)(nil)).Elem(),
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.ServiceIngressV1BetaBackendOptions",
reflect.TypeOf((*ServiceIngressV1BetaBackendOptions)(nil)).Elem(),
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.ServicePort",
reflect.TypeOf((*ServicePort)(nil)).Elem(),
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.ServicePortOptions",
reflect.TypeOf((*ServicePortOptions)(nil)).Elem(),
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.ServiceProps",
reflect.TypeOf((*ServiceProps)(nil)).Elem(),
)
_jsii_.RegisterEnum(
"cdk8s-plus-17.ServiceType",
reflect.TypeOf((*ServiceType)(nil)).Elem(),
map[string]interface{}{
"CLUSTER_IP": ServiceType_CLUSTER_IP,
"NODE_PORT": ServiceType_NODE_PORT,
"LOAD_BALANCER": ServiceType_LOAD_BALANCER,
"EXTERNAL_NAME": ServiceType_EXTERNAL_NAME,
},
)
_jsii_.RegisterClass(
"cdk8s-plus-17.StatefulSet",
reflect.TypeOf((*StatefulSet)(nil)).Elem(),
[]_jsii_.Member{
_jsii_.MemberMethod{JsiiMethod: "addContainer", GoMethod: "AddContainer"},
_jsii_.MemberMethod{JsiiMethod: "addVolume", GoMethod: "AddVolume"},
_jsii_.MemberProperty{JsiiProperty: "apiObject", GoGetter: "ApiObject"},
_jsii_.MemberProperty{JsiiProperty: "containers", GoGetter: "Containers"},
_jsii_.MemberProperty{JsiiProperty: "labelSelector", GoGetter: "LabelSelector"},
_jsii_.MemberProperty{JsiiProperty: "metadata", GoGetter: "Metadata"},
_jsii_.MemberProperty{JsiiProperty: "name", GoGetter: "Name"},
_jsii_.MemberMethod{JsiiMethod: "onPrepare", GoMethod: "OnPrepare"},
_jsii_.MemberMethod{JsiiMethod: "onSynthesize", GoMethod: "OnSynthesize"},
_jsii_.MemberMethod{JsiiMethod: "onValidate", GoMethod: "OnValidate"},
_jsii_.MemberProperty{JsiiProperty: "podManagementPolicy", GoGetter: "PodManagementPolicy"},
_jsii_.MemberProperty{JsiiProperty: "podMetadata", GoGetter: "PodMetadata"},
_jsii_.MemberProperty{JsiiProperty: "replicas", GoGetter: "Replicas"},
_jsii_.MemberProperty{JsiiProperty: "restartPolicy", GoGetter: "RestartPolicy"},
_jsii_.MemberMethod{JsiiMethod: "selectByLabel", GoMethod: "SelectByLabel"},
_jsii_.MemberProperty{JsiiProperty: "serviceAccount", GoGetter: "ServiceAccount"},
_jsii_.MemberMethod{JsiiMethod: "toString", GoMethod: "ToString"},
_jsii_.MemberProperty{JsiiProperty: "volumes", GoGetter: "Volumes"},
},
func() interface{} {
j := jsiiProxy_StatefulSet{}
_jsii_.InitJsiiProxy(&j.jsiiProxy_Resource)
_jsii_.InitJsiiProxy(&j.jsiiProxy_IPodTemplate)
return &j
},
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.StatefulSetProps",
reflect.TypeOf((*StatefulSetProps)(nil)).Elem(),
)
_jsii_.RegisterClass(
"cdk8s-plus-17.Volume",
reflect.TypeOf((*Volume)(nil)).Elem(),
[]_jsii_.Member{
_jsii_.MemberProperty{JsiiProperty: "name", GoGetter: "Name"},
},
func() interface{} {
return &jsiiProxy_Volume{}
},
)
_jsii_.RegisterStruct(
"cdk8s-plus-17.VolumeMount",
reflect.TypeOf((*VolumeMount)(nil)).Elem(),
)
}
<file_sep>## cdk8s-go
Go bindings for the `CDK8s` project are now maintained under the following repositories:
- https://github.com/cdk8s-team/cdk8s-core-go
- https://github.com/cdk8s-team/cdk8s-plus-go
## License
This project is licensed under the Apache-2.0 License.
<file_sep>module github.com/awslabs/cdk8s-go/cdk8splus17
go 1.16
require (
github.com/aws/jsii-runtime-go v1.30.0
github.com/cdk8s-team/cdk8s-go/cdk8s v1.0.0-beta.25
github.com/aws/constructs-go/constructs/v3 v3.3.85
)
<file_sep>// High level abstractions on top of cdk8s
package cdk8splus17
import (
_jsii_ "github.com/aws/jsii-runtime-go/runtime"
_init_ "github.com/awslabs/cdk8s-go/cdk8splus17/jsii"
"github.com/aws/constructs-go/constructs/v3"
"github.com/awslabs/cdk8s-go/cdk8splus17/internal"
"github.com/cdk8s-team/cdk8s-go/cdk8s"
)
// Options for `configmap.addDirectory()`.
type AddDirectoryOptions struct {
// Glob patterns to exclude when adding files.
Exclude *[]*string `json:"exclude"`
// A prefix to add to all keys in the config map.
KeyPrefix *string `json:"keyPrefix"`
}
// Options for `Probe.fromCommand()`.
type CommandProbeOptions struct {
// Minimum consecutive failures for the probe to be considered failed after having succeeded.
//
// Defaults to 3. Minimum value is 1.
FailureThreshold *float64 `json:"failureThreshold"`
// Number of seconds after the container has started before liveness probes are initiated.
// See: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes
//
InitialDelaySeconds cdk8s.Duration `json:"initialDelaySeconds"`
// How often (in seconds) to perform the probe.
//
// Default to 10 seconds. Minimum value is 1.
PeriodSeconds cdk8s.Duration `json:"periodSeconds"`
// Minimum consecutive successes for the probe to be considered successful after having failed. Defaults to 1.
//
// Must be 1 for liveness and startup. Minimum value is 1.
SuccessThreshold *float64 `json:"successThreshold"`
// Number of seconds after which the probe times out.
//
// Defaults to 1 second. Minimum value is 1.
// See: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes
//
TimeoutSeconds cdk8s.Duration `json:"timeoutSeconds"`
}
// ConfigMap holds configuration data for pods to consume.
type ConfigMap interface {
Resource
IConfigMap
ApiObject() cdk8s.ApiObject
BinaryData() *map[string]*string
Data() *map[string]*string
Metadata() cdk8s.ApiObjectMetadataDefinition
Name() *string
AddBinaryData(key *string, value *string)
AddData(key *string, value *string)
AddDirectory(localDir *string, options *AddDirectoryOptions)
AddFile(localFile *string, key *string)
OnPrepare()
OnSynthesize(session constructs.ISynthesisSession)
OnValidate() *[]*string
ToString() *string
}
// The jsii proxy struct for ConfigMap
type jsiiProxy_ConfigMap struct {
jsiiProxy_Resource
jsiiProxy_IConfigMap
}
func (j *jsiiProxy_ConfigMap) ApiObject() cdk8s.ApiObject {
var returns cdk8s.ApiObject
_jsii_.Get(
j,
"apiObject",
&returns,
)
return returns
}
func (j *jsiiProxy_ConfigMap) BinaryData() *map[string]*string {
var returns *map[string]*string
_jsii_.Get(
j,
"binaryData",
&returns,
)
return returns
}
func (j *jsiiProxy_ConfigMap) Data() *map[string]*string {
var returns *map[string]*string
_jsii_.Get(
j,
"data",
&returns,
)
return returns
}
func (j *jsiiProxy_ConfigMap) Metadata() cdk8s.ApiObjectMetadataDefinition {
var returns cdk8s.ApiObjectMetadataDefinition
_jsii_.Get(
j,
"metadata",
&returns,
)
return returns
}
func (j *jsiiProxy_ConfigMap) Name() *string {
var returns *string
_jsii_.Get(
j,
"name",
&returns,
)
return returns
}
func NewConfigMap(scope constructs.Construct, id *string, props *ConfigMapProps) ConfigMap {
_init_.Initialize()
j := jsiiProxy_ConfigMap{}
_jsii_.Create(
"cdk8s-plus-17.ConfigMap",
[]interface{}{scope, id, props},
&j,
)
return &j
}
func NewConfigMap_Override(c ConfigMap, scope constructs.Construct, id *string, props *ConfigMapProps) {
_init_.Initialize()
_jsii_.Create(
"cdk8s-plus-17.ConfigMap",
[]interface{}{scope, id, props},
c,
)
}
// Represents a ConfigMap created elsewhere.
func ConfigMap_FromConfigMapName(name *string) IConfigMap {
_init_.Initialize()
var returns IConfigMap
_jsii_.StaticInvoke(
"cdk8s-plus-17.ConfigMap",
"fromConfigMapName",
[]interface{}{name},
&returns,
)
return returns
}
// Adds a binary data entry to the config map.
//
// BinaryData can contain byte
// sequences that are not in the UTF-8 range.
func (c *jsiiProxy_ConfigMap) AddBinaryData(key *string, value *string) {
_jsii_.InvokeVoid(
c,
"addBinaryData",
[]interface{}{key, value},
)
}
// Adds a data entry to the config map.
func (c *jsiiProxy_ConfigMap) AddData(key *string, value *string) {
_jsii_.InvokeVoid(
c,
"addData",
[]interface{}{key, value},
)
}
// Adds a directory to the ConfigMap.
func (c *jsiiProxy_ConfigMap) AddDirectory(localDir *string, options *AddDirectoryOptions) {
_jsii_.InvokeVoid(
c,
"addDirectory",
[]interface{}{localDir, options},
)
}
// Adds a file to the ConfigMap.
func (c *jsiiProxy_ConfigMap) AddFile(localFile *string, key *string) {
_jsii_.InvokeVoid(
c,
"addFile",
[]interface{}{localFile, key},
)
}
// Perform final modifications before synthesis.
//
// This method can be implemented by derived constructs in order to perform
// final changes before synthesis. prepare() will be called after child
// constructs have been prepared.
//
// This is an advanced framework feature. Only use this if you
// understand the implications.
func (c *jsiiProxy_ConfigMap) OnPrepare() {
_jsii_.InvokeVoid(
c,
"onPrepare",
nil, // no parameters
)
}
// Allows this construct to emit artifacts into the cloud assembly during synthesis.
//
// This method is usually implemented by framework-level constructs such as `Stack` and `Asset`
// as they participate in synthesizing the cloud assembly.
func (c *jsiiProxy_ConfigMap) OnSynthesize(session constructs.ISynthesisSession) {
_jsii_.InvokeVoid(
c,
"onSynthesize",
[]interface{}{session},
)
}
// Validate the current construct.
//
// This method can be implemented by derived constructs in order to perform
// validation logic. It is called on all constructs before synthesis.
//
// Returns: An array of validation error messages, or an empty array if there the construct is valid.
// Deprecated: use `Node.addValidation()` to subscribe validation functions on this construct
// instead of overriding this method.
func (c *jsiiProxy_ConfigMap) OnValidate() *[]*string {
var returns *[]*string
_jsii_.Invoke(
c,
"onValidate",
nil, // no parameters
&returns,
)
return returns
}
// Returns a string representation of this construct.
func (c *jsiiProxy_ConfigMap) ToString() *string {
var returns *string
_jsii_.Invoke(
c,
"toString",
nil, // no parameters
&returns,
)
return returns
}
// Properties for initialization of `ConfigMap`.
type ConfigMapProps struct {
// Metadata that all persisted resources must have, which includes all objects users must create.
Metadata *cdk8s.ApiObjectMetadata `json:"metadata"`
// BinaryData contains the binary data.
//
// Each key must consist of alphanumeric characters, '-', '_' or '.'.
// BinaryData can contain byte sequences that are not in the UTF-8 range. The
// keys stored in BinaryData must not overlap with the ones in the Data field,
// this is enforced during validation process. Using this field will require
// 1.10+ apiserver and kubelet.
//
// You can also add binary data using `configMap.addBinaryData()`.
BinaryData *map[string]*string `json:"binaryData"`
// Data contains the configuration data.
//
// Each key must consist of alphanumeric characters, '-', '_' or '.'. Values
// with non-UTF-8 byte sequences must use the BinaryData field. The keys
// stored in Data must not overlap with the keys in the BinaryData field, this
// is enforced during validation process.
//
// You can also add data using `configMap.addData()`.
Data *map[string]*string `json:"data"`
}
// Options for the ConfigMap-based volume.
type ConfigMapVolumeOptions struct {
// Mode bits to use on created files by default.
//
// Must be a value between 0 and
// 0777. Defaults to 0644. Directories within the path are not affected by
// this setting. This might be in conflict with other options that affect the
// file mode, like fsGroup, and the result can be other mode bits set.
DefaultMode *float64 `json:"defaultMode"`
// If unspecified, each key-value pair in the Data field of the referenced ConfigMap will be projected into the volume as a file whose name is the key and content is the value.
//
// If specified, the listed keys will be projected
// into the specified paths, and unlisted keys will not be present. If a key
// is specified which is not present in the ConfigMap, the volume setup will
// error unless it is marked optional. Paths must be relative and may not
// contain the '..' path or start with '..'.
Items *map[string]*PathMapping `json:"items"`
// The volume name.
Name *string `json:"name"`
// Specify whether the ConfigMap or its keys must be defined.
Optional *bool `json:"optional"`
}
// A single application container that you want to run within a pod.
type Container interface {
Args() *[]*string
Command() *[]*string
Env() *map[string]EnvValue
Image() *string
ImagePullPolicy() ImagePullPolicy
Mounts() *[]*VolumeMount
Name() *string
Port() *float64
WorkingDir() *string
AddEnv(name *string, value EnvValue)
Mount(path *string, volume Volume, options *MountOptions)
}
// The jsii proxy struct for Container
type jsiiProxy_Container struct {
_ byte // padding
}
func (j *jsiiProxy_Container) Args() *[]*string {
var returns *[]*string
_jsii_.Get(
j,
"args",
&returns,
)
return returns
}
func (j *jsiiProxy_Container) Command() *[]*string {
var returns *[]*string
_jsii_.Get(
j,
"command",
&returns,
)
return returns
}
func (j *jsiiProxy_Container) Env() *map[string]EnvValue {
var returns *map[string]EnvValue
_jsii_.Get(
j,
"env",
&returns,
)
return returns
}
func (j *jsiiProxy_Container) Image() *string {
var returns *string
_jsii_.Get(
j,
"image",
&returns,
)
return returns
}
func (j *jsiiProxy_Container) ImagePullPolicy() ImagePullPolicy {
var returns ImagePullPolicy
_jsii_.Get(
j,
"imagePullPolicy",
&returns,
)
return returns
}
func (j *jsiiProxy_Container) Mounts() *[]*VolumeMount {
var returns *[]*VolumeMount
_jsii_.Get(
j,
"mounts",
&returns,
)
return returns
}
func (j *jsiiProxy_Container) Name() *string {
var returns *string
_jsii_.Get(
j,
"name",
&returns,
)
return returns
}
func (j *jsiiProxy_Container) Port() *float64 {
var returns *float64
_jsii_.Get(
j,
"port",
&returns,
)
return returns
}
func (j *jsiiProxy_Container) WorkingDir() *string {
var returns *string
_jsii_.Get(
j,
"workingDir",
&returns,
)
return returns
}
func NewContainer(props *ContainerProps) Container {
_init_.Initialize()
j := jsiiProxy_Container{}
_jsii_.Create(
"cdk8s-plus-17.Container",
[]interface{}{props},
&j,
)
return &j
}
func NewContainer_Override(c Container, props *ContainerProps) {
_init_.Initialize()
_jsii_.Create(
"cdk8s-plus-17.Container",
[]interface{}{props},
c,
)
}
// Add an environment value to the container.
//
// The variable value can come
// from various dynamic sources such a secrets of config maps.
// See: EnvValue.fromXXX
//
func (c *jsiiProxy_Container) AddEnv(name *string, value EnvValue) {
_jsii_.InvokeVoid(
c,
"addEnv",
[]interface{}{name, value},
)
}
// Mount a volume to a specific path so that it is accessible by the container.
//
// Every pod that is configured to use this container will autmoatically have access to the volume.
func (c *jsiiProxy_Container) Mount(path *string, volume Volume, options *MountOptions) {
_jsii_.InvokeVoid(
c,
"mount",
[]interface{}{path, volume, options},
)
}
// Properties for creating a container.
type ContainerProps struct {
// Docker image name.
Image *string `json:"image"`
// Arguments to the entrypoint. The docker image's CMD is used if `command` is not provided.
//
// Variable references $(VAR_NAME) are expanded using the container's
// environment. If a variable cannot be resolved, the reference in the input
// string will be unchanged. The $(VAR_NAME) syntax can be escaped with a
// double $$, ie: $$(VAR_NAME). Escaped references will never be expanded,
// regardless of whether the variable exists or not.
//
// Cannot be updated.
// See: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell
//
Args *[]*string `json:"args"`
// Entrypoint array.
//
// Not executed within a shell. The docker image's ENTRYPOINT is used if this is not provided. Variable references $(VAR_NAME) are expanded using the container's environment.
// If a variable cannot be resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax can be escaped with a double $$, ie: $$(VAR_NAME).
// Escaped references will never be expanded, regardless of whether the variable exists or not. Cannot be updated.
// More info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell
Command *[]*string `json:"command"`
// List of environment variables to set in the container.
//
// Cannot be updated.
Env *map[string]EnvValue `json:"env"`
// Image pull policy for this container.
ImagePullPolicy ImagePullPolicy `json:"imagePullPolicy"`
// Periodic probe of container liveness.
//
// Container will be restarted if the probe fails.
Liveness Probe `json:"liveness"`
// Name of the container specified as a DNS_LABEL.
//
// Each container in a pod must have a unique name (DNS_LABEL). Cannot be updated.
Name *string `json:"name"`
// Number of port to expose on the pod's IP address.
//
// This must be a valid port number, 0 < x < 65536.
Port *float64 `json:"port"`
// Determines when the container is ready to serve traffic.
Readiness Probe `json:"readiness"`
// StartupProbe indicates that the Pod has successfully initialized.
//
// If specified, no other probes are executed until this completes successfully
Startup Probe `json:"startup"`
// Pod volumes to mount into the container's filesystem.
//
// Cannot be updated.
VolumeMounts *[]*VolumeMount `json:"volumeMounts"`
// Container's working directory.
//
// If not specified, the container runtime's default will be used, which might be configured in the container image. Cannot be updated.
WorkingDir *string `json:"workingDir"`
}
// A Deployment provides declarative updates for Pods and ReplicaSets.
//
// You describe a desired state in a Deployment, and the Deployment Controller changes the actual
// state to the desired state at a controlled rate. You can define Deployments to create new ReplicaSets, or to remove
// existing Deployments and adopt all their resources with new Deployments.
//
// > Note: Do not manage ReplicaSets owned by a Deployment. Consider opening an issue in the main Kubernetes repository if your use case is not covered below.
//
// Use Case
// ---------
//
// The following are typical use cases for Deployments:
//
// - Create a Deployment to rollout a ReplicaSet. The ReplicaSet creates Pods in the background.
// Check the status of the rollout to see if it succeeds or not.
// - Declare the new state of the Pods by updating the PodTemplateSpec of the Deployment.
// A new ReplicaSet is created and the Deployment manages moving the Pods from the old ReplicaSet to the new one at a controlled rate.
// Each new ReplicaSet updates the revision of the Deployment.
// - Rollback to an earlier Deployment revision if the current state of the Deployment is not stable.
// Each rollback updates the revision of the Deployment.
// - Scale up the Deployment to facilitate more load.
// - Pause the Deployment to apply multiple fixes to its PodTemplateSpec and then resume it to start a new rollout.
// - Use the status of the Deployment as an indicator that a rollout has stuck.
// - Clean up older ReplicaSets that you don't need anymore.
type Deployment interface {
Resource
IPodTemplate
ApiObject() cdk8s.ApiObject
Containers() *[]Container
LabelSelector() *map[string]*string
Metadata() cdk8s.ApiObjectMetadataDefinition
Name() *string
PodMetadata() cdk8s.ApiObjectMetadataDefinition
Replicas() *float64
RestartPolicy() RestartPolicy
ServiceAccount() IServiceAccount
Volumes() *[]Volume
AddContainer(container *ContainerProps) Container
AddVolume(volume Volume)
Expose(port *float64, options *ExposeOptions) Service
OnPrepare()
OnSynthesize(session constructs.ISynthesisSession)
OnValidate() *[]*string
SelectByLabel(key *string, value *string)
ToString() *string
}
// The jsii proxy struct for Deployment
type jsiiProxy_Deployment struct {
jsiiProxy_Resource
jsiiProxy_IPodTemplate
}
func (j *jsiiProxy_Deployment) ApiObject() cdk8s.ApiObject {
var returns cdk8s.ApiObject
_jsii_.Get(
j,
"apiObject",
&returns,
)
return returns
}
func (j *jsiiProxy_Deployment) Containers() *[]Container {
var returns *[]Container
_jsii_.Get(
j,
"containers",
&returns,
)
return returns
}
func (j *jsiiProxy_Deployment) LabelSelector() *map[string]*string {
var returns *map[string]*string
_jsii_.Get(
j,
"labelSelector",
&returns,
)
return returns
}
func (j *jsiiProxy_Deployment) Metadata() cdk8s.ApiObjectMetadataDefinition {
var returns cdk8s.ApiObjectMetadataDefinition
_jsii_.Get(
j,
"metadata",
&returns,
)
return returns
}
func (j *jsiiProxy_Deployment) Name() *string {
var returns *string
_jsii_.Get(
j,
"name",
&returns,
)
return returns
}
func (j *jsiiProxy_Deployment) PodMetadata() cdk8s.ApiObjectMetadataDefinition {
var returns cdk8s.ApiObjectMetadataDefinition
_jsii_.Get(
j,
"podMetadata",
&returns,
)
return returns
}
func (j *jsiiProxy_Deployment) Replicas() *float64 {
var returns *float64
_jsii_.Get(
j,
"replicas",
&returns,
)
return returns
}
func (j *jsiiProxy_Deployment) RestartPolicy() RestartPolicy {
var returns RestartPolicy
_jsii_.Get(
j,
"restartPolicy",
&returns,
)
return returns
}
func (j *jsiiProxy_Deployment) ServiceAccount() IServiceAccount {
var returns IServiceAccount
_jsii_.Get(
j,
"serviceAccount",
&returns,
)
return returns
}
func (j *jsiiProxy_Deployment) Volumes() *[]Volume {
var returns *[]Volume
_jsii_.Get(
j,
"volumes",
&returns,
)
return returns
}
func NewDeployment(scope constructs.Construct, id *string, props *DeploymentProps) Deployment {
_init_.Initialize()
j := jsiiProxy_Deployment{}
_jsii_.Create(
"cdk8s-plus-17.Deployment",
[]interface{}{scope, id, props},
&j,
)
return &j
}
func NewDeployment_Override(d Deployment, scope constructs.Construct, id *string, props *DeploymentProps) {
_init_.Initialize()
_jsii_.Create(
"cdk8s-plus-17.Deployment",
[]interface{}{scope, id, props},
d,
)
}
// Add a container to the pod.
func (d *jsiiProxy_Deployment) AddContainer(container *ContainerProps) Container {
var returns Container
_jsii_.Invoke(
d,
"addContainer",
[]interface{}{container},
&returns,
)
return returns
}
// Add a volume to the pod.
func (d *jsiiProxy_Deployment) AddVolume(volume Volume) {
_jsii_.InvokeVoid(
d,
"addVolume",
[]interface{}{volume},
)
}
// Expose a deployment via a service.
//
// This is equivalent to running `kubectl expose deployment <deployment-name>`.
func (d *jsiiProxy_Deployment) Expose(port *float64, options *ExposeOptions) Service {
var returns Service
_jsii_.Invoke(
d,
"expose",
[]interface{}{port, options},
&returns,
)
return returns
}
// Perform final modifications before synthesis.
//
// This method can be implemented by derived constructs in order to perform
// final changes before synthesis. prepare() will be called after child
// constructs have been prepared.
//
// This is an advanced framework feature. Only use this if you
// understand the implications.
func (d *jsiiProxy_Deployment) OnPrepare() {
_jsii_.InvokeVoid(
d,
"onPrepare",
nil, // no parameters
)
}
// Allows this construct to emit artifacts into the cloud assembly during synthesis.
//
// This method is usually implemented by framework-level constructs such as `Stack` and `Asset`
// as they participate in synthesizing the cloud assembly.
func (d *jsiiProxy_Deployment) OnSynthesize(session constructs.ISynthesisSession) {
_jsii_.InvokeVoid(
d,
"onSynthesize",
[]interface{}{session},
)
}
// Validate the current construct.
//
// This method can be implemented by derived constructs in order to perform
// validation logic. It is called on all constructs before synthesis.
//
// Returns: An array of validation error messages, or an empty array if there the construct is valid.
// Deprecated: use `Node.addValidation()` to subscribe validation functions on this construct
// instead of overriding this method.
func (d *jsiiProxy_Deployment) OnValidate() *[]*string {
var returns *[]*string
_jsii_.Invoke(
d,
"onValidate",
nil, // no parameters
&returns,
)
return returns
}
// Configure a label selector to this deployment.
//
// Pods that have the label will be selected by deployments configured with this spec.
func (d *jsiiProxy_Deployment) SelectByLabel(key *string, value *string) {
_jsii_.InvokeVoid(
d,
"selectByLabel",
[]interface{}{key, value},
)
}
// Returns a string representation of this construct.
func (d *jsiiProxy_Deployment) ToString() *string {
var returns *string
_jsii_.Invoke(
d,
"toString",
nil, // no parameters
&returns,
)
return returns
}
// Properties for initialization of `Deployment`.
type DeploymentProps struct {
// Metadata that all persisted resources must have, which includes all objects users must create.
Metadata *cdk8s.ApiObjectMetadata `json:"metadata"`
// List of containers belonging to the pod.
//
// Containers cannot currently be
// added or removed. There must be at least one container in a Pod.
//
// You can add additionnal containers using `podSpec.addContainer()`
Containers *[]*ContainerProps `json:"containers"`
// Restart policy for all containers within the pod.
// See: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#restart-policy
//
RestartPolicy RestartPolicy `json:"restartPolicy"`
// A service account provides an identity for processes that run in a Pod.
//
// When you (a human) access the cluster (for example, using kubectl), you are
// authenticated by the apiserver as a particular User Account (currently this
// is usually admin, unless your cluster administrator has customized your
// cluster). Processes in containers inside pods can also contact the
// apiserver. When they do, they are authenticated as a particular Service
// Account (for example, default).
// See: https://kubernetes.io/docs/tasks/configure-pod-container/configure-service-account/
//
ServiceAccount IServiceAccount `json:"serviceAccount"`
// List of volumes that can be mounted by containers belonging to the pod.
//
// You can also add volumes later using `podSpec.addVolume()`
// See: https://kubernetes.io/docs/concepts/storage/volumes
//
Volumes *[]Volume `json:"volumes"`
// The pod metadata.
PodMetadata *cdk8s.ApiObjectMetadata `json:"podMetadata"`
// Automatically allocates a pod selector for this deployment.
//
// If this is set to `false` you must define your selector through
// `deployment.podMetadata.addLabel()` and `deployment.selectByLabel()`.
DefaultSelector *bool `json:"defaultSelector"`
// Number of desired pods.
Replicas *float64 `json:"replicas"`
}
// The medium on which to store the volume.
type EmptyDirMedium string
const (
EmptyDirMedium_DEFAULT EmptyDirMedium = "DEFAULT"
EmptyDirMedium_MEMORY EmptyDirMedium = "MEMORY"
)
// Options for volumes populated with an empty directory.
type EmptyDirVolumeOptions struct {
// By default, emptyDir volumes are stored on whatever medium is backing the node - that might be disk or SSD or network storage, depending on your environment.
//
// However, you can set the emptyDir.medium field to
// `EmptyDirMedium.MEMORY` to tell Kubernetes to mount a tmpfs (RAM-backed
// filesystem) for you instead. While tmpfs is very fast, be aware that unlike
// disks, tmpfs is cleared on node reboot and any files you write will count
// against your Container's memory limit.
Medium EmptyDirMedium `json:"medium"`
// Total amount of local storage required for this EmptyDir volume.
//
// The size
// limit is also applicable for memory medium. The maximum usage on memory
// medium EmptyDir would be the minimum value between the SizeLimit specified
// here and the sum of memory limits of all containers in a pod.
SizeLimit cdk8s.Size `json:"sizeLimit"`
}
// Utility class for creating reading env values from various sources.
type EnvValue interface {
Value() interface{}
ValueFrom() interface{}
}
// The jsii proxy struct for EnvValue
type jsiiProxy_EnvValue struct {
_ byte // padding
}
func (j *jsiiProxy_EnvValue) Value() interface{} {
var returns interface{}
_jsii_.Get(
j,
"value",
&returns,
)
return returns
}
func (j *jsiiProxy_EnvValue) ValueFrom() interface{} {
var returns interface{}
_jsii_.Get(
j,
"valueFrom",
&returns,
)
return returns
}
// Create a value by reading a specific key inside a config map.
func EnvValue_FromConfigMap(configMap IConfigMap, key *string, options *EnvValueFromConfigMapOptions) EnvValue {
_init_.Initialize()
var returns EnvValue
_jsii_.StaticInvoke(
"cdk8s-plus-17.EnvValue",
"fromConfigMap",
[]interface{}{configMap, key, options},
&returns,
)
return returns
}
// Create a value from a key in the current process environment.
func EnvValue_FromProcess(key *string, options *EnvValueFromProcessOptions) EnvValue {
_init_.Initialize()
var returns EnvValue
_jsii_.StaticInvoke(
"cdk8s-plus-17.EnvValue",
"fromProcess",
[]interface{}{key, options},
&returns,
)
return returns
}
// Defines an environment value from a secret JSON value.
func EnvValue_FromSecretValue(secretValue *SecretValue, options *EnvValueFromSecretOptions) EnvValue {
_init_.Initialize()
var returns EnvValue
_jsii_.StaticInvoke(
"cdk8s-plus-17.EnvValue",
"fromSecretValue",
[]interface{}{secretValue, options},
&returns,
)
return returns
}
// Create a value from the given argument.
func EnvValue_FromValue(value *string) EnvValue {
_init_.Initialize()
var returns EnvValue
_jsii_.StaticInvoke(
"cdk8s-plus-17.EnvValue",
"fromValue",
[]interface{}{value},
&returns,
)
return returns
}
// Options to specify an envionment variable value from a ConfigMap key.
type EnvValueFromConfigMapOptions struct {
// Specify whether the ConfigMap or its key must be defined.
Optional *bool `json:"optional"`
}
// Options to specify an environment variable value from the process environment.
type EnvValueFromProcessOptions struct {
// Specify whether the key must exist in the environment.
//
// If this is set to true, and the key does not exist, an error will thrown.
Required *bool `json:"required"`
}
// Options to specify an environment variable value from a Secret.
type EnvValueFromSecretOptions struct {
// Specify whether the Secret or its key must be defined.
Optional *bool `json:"optional"`
}
// Options for exposing a deployment via a service.
type ExposeOptions struct {
// The name of the service to expose.
//
// This will be set on the Service.metadata and must be a DNS_LABEL
Name *string `json:"name"`
// The IP protocol for this port.
//
// Supports "TCP", "UDP", and "SCTP". Default is TCP.
Protocol Protocol `json:"protocol"`
// The type of the exposed service.
ServiceType ServiceType `json:"serviceType"`
// The port number the service will redirect to.
TargetPort *float64 `json:"targetPort"`
}
// Options for `Probe.fromHttpGet()`.
type HttpGetProbeOptions struct {
// Minimum consecutive failures for the probe to be considered failed after having succeeded.
//
// Defaults to 3. Minimum value is 1.
FailureThreshold *float64 `json:"failureThreshold"`
// Number of seconds after the container has started before liveness probes are initiated.
// See: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes
//
InitialDelaySeconds cdk8s.Duration `json:"initialDelaySeconds"`
// How often (in seconds) to perform the probe.
//
// Default to 10 seconds. Minimum value is 1.
PeriodSeconds cdk8s.Duration `json:"periodSeconds"`
// Minimum consecutive successes for the probe to be considered successful after having failed. Defaults to 1.
//
// Must be 1 for liveness and startup. Minimum value is 1.
SuccessThreshold *float64 `json:"successThreshold"`
// Number of seconds after which the probe times out.
//
// Defaults to 1 second. Minimum value is 1.
// See: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes
//
TimeoutSeconds cdk8s.Duration `json:"timeoutSeconds"`
// The TCP port to use when sending the GET request.
Port *float64 `json:"port"`
}
// Represents a config map.
type IConfigMap interface {
IResource
}
// The jsii proxy for IConfigMap
type jsiiProxy_IConfigMap struct {
jsiiProxy_IResource
}
// Represents a resource that can be configured with a kuberenets pod spec. (e.g `Deployment`, `Job`, `Pod`, ...).
//
// Use the `PodSpec` class as an implementation helper.
type IPodSpec interface {
// Add a container to the pod.
AddContainer(container *ContainerProps) Container
// Add a volume to the pod.
AddVolume(volume Volume)
// The containers belonging to the pod.
//
// Use `addContainer` to add containers.
Containers() *[]Container
// Restart policy for all containers within the pod.
RestartPolicy() RestartPolicy
// The service account used to run this pod.
ServiceAccount() IServiceAccount
// The volumes associated with this pod.
//
// Use `addVolume` to add volumes.
Volumes() *[]Volume
}
// The jsii proxy for IPodSpec
type jsiiProxy_IPodSpec struct {
_ byte // padding
}
func (i *jsiiProxy_IPodSpec) AddContainer(container *ContainerProps) Container {
var returns Container
_jsii_.Invoke(
i,
"addContainer",
[]interface{}{container},
&returns,
)
return returns
}
func (i *jsiiProxy_IPodSpec) AddVolume(volume Volume) {
_jsii_.InvokeVoid(
i,
"addVolume",
[]interface{}{volume},
)
}
func (j *jsiiProxy_IPodSpec) Containers() *[]Container {
var returns *[]Container
_jsii_.Get(
j,
"containers",
&returns,
)
return returns
}
func (j *jsiiProxy_IPodSpec) RestartPolicy() RestartPolicy {
var returns RestartPolicy
_jsii_.Get(
j,
"restartPolicy",
&returns,
)
return returns
}
func (j *jsiiProxy_IPodSpec) ServiceAccount() IServiceAccount {
var returns IServiceAccount
_jsii_.Get(
j,
"serviceAccount",
&returns,
)
return returns
}
func (j *jsiiProxy_IPodSpec) Volumes() *[]Volume {
var returns *[]Volume
_jsii_.Get(
j,
"volumes",
&returns,
)
return returns
}
// Represents a resource that can be configured with a kuberenets pod template. (e.g `Deployment`, `Job`, ...).
//
// Use the `PodTemplate` class as an implementation helper.
type IPodTemplate interface {
IPodSpec
// Provides read/write access to the underlying pod metadata of the resource.
PodMetadata() cdk8s.ApiObjectMetadataDefinition
}
// The jsii proxy for IPodTemplate
type jsiiProxy_IPodTemplate struct {
jsiiProxy_IPodSpec
}
func (j *jsiiProxy_IPodTemplate) PodMetadata() cdk8s.ApiObjectMetadataDefinition {
var returns cdk8s.ApiObjectMetadataDefinition
_jsii_.Get(
j,
"podMetadata",
&returns,
)
return returns
}
// Represents a resource.
type IResource interface {
// The Kubernetes name of this resource.
Name() *string
}
// The jsii proxy for IResource
type jsiiProxy_IResource struct {
_ byte // padding
}
func (j *jsiiProxy_IResource) Name() *string {
var returns *string
_jsii_.Get(
j,
"name",
&returns,
)
return returns
}
type ISecret interface {
IResource
}
// The jsii proxy for ISecret
type jsiiProxy_ISecret struct {
jsiiProxy_IResource
}
type IServiceAccount interface {
IResource
}
// The jsii proxy for IServiceAccount
type jsiiProxy_IServiceAccount struct {
jsiiProxy_IResource
}
type ImagePullPolicy string
const (
ImagePullPolicy_ALWAYS ImagePullPolicy = "ALWAYS"
ImagePullPolicy_IF_NOT_PRESENT ImagePullPolicy = "IF_NOT_PRESENT"
ImagePullPolicy_NEVER ImagePullPolicy = "NEVER"
)
// Ingress is a collection of rules that allow inbound connections to reach the endpoints defined by a backend.
//
// An Ingress can be configured to give services
// externally-reachable urls, load balance traffic, terminate SSL, offer name
// based virtual hosting etc.
type IngressV1Beta1 interface {
Resource
ApiObject() cdk8s.ApiObject
Metadata() cdk8s.ApiObjectMetadataDefinition
Name() *string
AddDefaultBackend(backend IngressV1Beta1Backend)
AddHostDefaultBackend(host *string, backend IngressV1Beta1Backend)
AddHostRule(host *string, path *string, backend IngressV1Beta1Backend)
AddRule(path *string, backend IngressV1Beta1Backend)
AddRules(rules ...*IngressV1Beta1Rule)
OnPrepare()
OnSynthesize(session constructs.ISynthesisSession)
OnValidate() *[]*string
ToString() *string
}
// The jsii proxy struct for IngressV1Beta1
type jsiiProxy_IngressV1Beta1 struct {
jsiiProxy_Resource
}
func (j *jsiiProxy_IngressV1Beta1) ApiObject() cdk8s.ApiObject {
var returns cdk8s.ApiObject
_jsii_.Get(
j,
"apiObject",
&returns,
)
return returns
}
func (j *jsiiProxy_IngressV1Beta1) Metadata() cdk8s.ApiObjectMetadataDefinition {
var returns cdk8s.ApiObjectMetadataDefinition
_jsii_.Get(
j,
"metadata",
&returns,
)
return returns
}
func (j *jsiiProxy_IngressV1Beta1) Name() *string {
var returns *string
_jsii_.Get(
j,
"name",
&returns,
)
return returns
}
func NewIngressV1Beta1(scope constructs.Construct, id *string, props *IngressV1Beta1Props) IngressV1Beta1 {
_init_.Initialize()
j := jsiiProxy_IngressV1Beta1{}
_jsii_.Create(
"cdk8s-plus-17.IngressV1Beta1",
[]interface{}{scope, id, props},
&j,
)
return &j
}
func NewIngressV1Beta1_Override(i IngressV1Beta1, scope constructs.Construct, id *string, props *IngressV1Beta1Props) {
_init_.Initialize()
_jsii_.Create(
"cdk8s-plus-17.IngressV1Beta1",
[]interface{}{scope, id, props},
i,
)
}
// Defines the default backend for this ingress.
//
// A default backend capable of
// servicing requests that don't match any rule.
func (i *jsiiProxy_IngressV1Beta1) AddDefaultBackend(backend IngressV1Beta1Backend) {
_jsii_.InvokeVoid(
i,
"addDefaultBackend",
[]interface{}{backend},
)
}
// Specify a default backend for a specific host name.
//
// This backend will be used as a catch-all for requests
// targeted to this host name (the `Host` header matches this value).
func (i *jsiiProxy_IngressV1Beta1) AddHostDefaultBackend(host *string, backend IngressV1Beta1Backend) {
_jsii_.InvokeVoid(
i,
"addHostDefaultBackend",
[]interface{}{host, backend},
)
}
// Adds an ingress rule applied to requests to a specific host and a specific HTTP path (the `Host` header matches this value).
func (i *jsiiProxy_IngressV1Beta1) AddHostRule(host *string, path *string, backend IngressV1Beta1Backend) {
_jsii_.InvokeVoid(
i,
"addHostRule",
[]interface{}{host, path, backend},
)
}
// Adds an ingress rule applied to requests sent to a specific HTTP path.
func (i *jsiiProxy_IngressV1Beta1) AddRule(path *string, backend IngressV1Beta1Backend) {
_jsii_.InvokeVoid(
i,
"addRule",
[]interface{}{path, backend},
)
}
// Adds rules to this ingress.
func (i *jsiiProxy_IngressV1Beta1) AddRules(rules ...*IngressV1Beta1Rule) {
args := []interface{}{}
for _, a := range rules {
args = append(args, a)
}
_jsii_.InvokeVoid(
i,
"addRules",
args,
)
}
// Perform final modifications before synthesis.
//
// This method can be implemented by derived constructs in order to perform
// final changes before synthesis. prepare() will be called after child
// constructs have been prepared.
//
// This is an advanced framework feature. Only use this if you
// understand the implications.
func (i *jsiiProxy_IngressV1Beta1) OnPrepare() {
_jsii_.InvokeVoid(
i,
"onPrepare",
nil, // no parameters
)
}
// Allows this construct to emit artifacts into the cloud assembly during synthesis.
//
// This method is usually implemented by framework-level constructs such as `Stack` and `Asset`
// as they participate in synthesizing the cloud assembly.
func (i *jsiiProxy_IngressV1Beta1) OnSynthesize(session constructs.ISynthesisSession) {
_jsii_.InvokeVoid(
i,
"onSynthesize",
[]interface{}{session},
)
}
// (deprecated) Validate the current construct.
//
// This method can be implemented by derived constructs in order to perform
// validation logic. It is called on all constructs before synthesis.
func (i *jsiiProxy_IngressV1Beta1) OnValidate() *[]*string {
var returns *[]*string
_jsii_.Invoke(
i,
"onValidate",
nil, // no parameters
&returns,
)
return returns
}
// Returns a string representation of this construct.
func (i *jsiiProxy_IngressV1Beta1) ToString() *string {
var returns *string
_jsii_.Invoke(
i,
"toString",
nil, // no parameters
&returns,
)
return returns
}
// The backend for an ingress path.
type IngressV1Beta1Backend interface {
}
// The jsii proxy struct for IngressV1Beta1Backend
type jsiiProxy_IngressV1Beta1Backend struct {
_ byte // padding
}
// A Kubernetes `Service` to use as the backend for this path.
func IngressV1Beta1Backend_FromService(service Service, options *ServiceIngressV1BetaBackendOptions) IngressV1Beta1Backend {
_init_.Initialize()
var returns IngressV1Beta1Backend
_jsii_.StaticInvoke(
"cdk8s-plus-17.IngressV1Beta1Backend",
"fromService",
[]interface{}{service, options},
&returns,
)
return returns
}
// Properties for `Ingress`.
type IngressV1Beta1Props struct {
// Metadata that all persisted resources must have, which includes all objects users must create.
Metadata *cdk8s.ApiObjectMetadata `json:"metadata"`
// The default backend services requests that do not match any rule.
//
// Using this option or the `addDefaultBackend()` method is equivalent to
// adding a rule with both `path` and `host` undefined.
DefaultBackend IngressV1Beta1Backend `json:"defaultBackend"`
// Routing rules for this ingress.
//
// Each rule must define an `IngressBackend` that will receive the requests
// that match this rule. If both `host` and `path` are not specifiec, this
// backend will be used as the default backend of the ingress.
//
// You can also add rules later using `addRule()`, `addHostRule()`,
// `addDefaultBackend()` and `addHostDefaultBackend()`.
Rules *[]*IngressV1Beta1Rule `json:"rules"`
}
// Represents the rules mapping the paths under a specified host to the related backend services.
//
// Incoming requests are first evaluated for a host match,
// then routed to the backend associated with the matching path.
type IngressV1Beta1Rule struct {
// Backend defines the referenced service endpoint to which the traffic will be forwarded to.
Backend IngressV1Beta1Backend `json:"backend"`
// Host is the fully qualified domain name of a network host, as defined by RFC 3986.
//
// Note the following deviations from the "host" part of the URI as
// defined in the RFC: 1. IPs are not allowed. Currently an IngressRuleValue
// can only apply to the IP in the Spec of the parent Ingress. 2. The `:`
// delimiter is not respected because ports are not allowed. Currently the
// port of an Ingress is implicitly :80 for http and :443 for https. Both
// these may change in the future. Incoming requests are matched against the
// host before the IngressRuleValue.
Host *string `json:"host"`
// Path is an extended POSIX regex as defined by IEEE Std 1003.1, (i.e this follows the egrep/unix syntax, not the perl syntax) matched against the path of an incoming request. Currently it can contain characters disallowed from the conventional "path" part of a URL as defined by RFC 3986. Paths must begin with a '/'.
Path *string `json:"path"`
}
// A Job creates one or more Pods and ensures that a specified number of them successfully terminate.
//
// As pods successfully complete,
// the Job tracks the successful completions. When a specified number of successful completions is reached, the task (ie, Job) is complete.
// Deleting a Job will clean up the Pods it created. A simple case is to create one Job object in order to reliably run one Pod to completion.
// The Job object will start a new Pod if the first Pod fails or is deleted (for example due to a node hardware failure or a node reboot).
// You can also use a Job to run multiple Pods in parallel.
type Job interface {
Resource
IPodTemplate
ActiveDeadline() cdk8s.Duration
ApiObject() cdk8s.ApiObject
BackoffLimit() *float64
Containers() *[]Container
Metadata() cdk8s.ApiObjectMetadataDefinition
Name() *string
PodMetadata() cdk8s.ApiObjectMetadataDefinition
RestartPolicy() RestartPolicy
ServiceAccount() IServiceAccount
TtlAfterFinished() cdk8s.Duration
Volumes() *[]Volume
AddContainer(container *ContainerProps) Container
AddVolume(volume Volume)
OnPrepare()
OnSynthesize(session constructs.ISynthesisSession)
OnValidate() *[]*string
ToString() *string
}
// The jsii proxy struct for Job
type jsiiProxy_Job struct {
jsiiProxy_Resource
jsiiProxy_IPodTemplate
}
func (j *jsiiProxy_Job) ActiveDeadline() cdk8s.Duration {
var returns cdk8s.Duration
_jsii_.Get(
j,
"activeDeadline",
&returns,
)
return returns
}
func (j *jsiiProxy_Job) ApiObject() cdk8s.ApiObject {
var returns cdk8s.ApiObject
_jsii_.Get(
j,
"apiObject",
&returns,
)
return returns
}
func (j *jsiiProxy_Job) BackoffLimit() *float64 {
var returns *float64
_jsii_.Get(
j,
"backoffLimit",
&returns,
)
return returns
}
func (j *jsiiProxy_Job) Containers() *[]Container {
var returns *[]Container
_jsii_.Get(
j,
"containers",
&returns,
)
return returns
}
func (j *jsiiProxy_Job) Metadata() cdk8s.ApiObjectMetadataDefinition {
var returns cdk8s.ApiObjectMetadataDefinition
_jsii_.Get(
j,
"metadata",
&returns,
)
return returns
}
func (j *jsiiProxy_Job) Name() *string {
var returns *string
_jsii_.Get(
j,
"name",
&returns,
)
return returns
}
func (j *jsiiProxy_Job) PodMetadata() cdk8s.ApiObjectMetadataDefinition {
var returns cdk8s.ApiObjectMetadataDefinition
_jsii_.Get(
j,
"podMetadata",
&returns,
)
return returns
}
func (j *jsiiProxy_Job) RestartPolicy() RestartPolicy {
var returns RestartPolicy
_jsii_.Get(
j,
"restartPolicy",
&returns,
)
return returns
}
func (j *jsiiProxy_Job) ServiceAccount() IServiceAccount {
var returns IServiceAccount
_jsii_.Get(
j,
"serviceAccount",
&returns,
)
return returns
}
func (j *jsiiProxy_Job) TtlAfterFinished() cdk8s.Duration {
var returns cdk8s.Duration
_jsii_.Get(
j,
"ttlAfterFinished",
&returns,
)
return returns
}
func (j *jsiiProxy_Job) Volumes() *[]Volume {
var returns *[]Volume
_jsii_.Get(
j,
"volumes",
&returns,
)
return returns
}
func NewJob(scope constructs.Construct, id *string, props *JobProps) Job {
_init_.Initialize()
j := jsiiProxy_Job{}
_jsii_.Create(
"cdk8s-plus-17.Job",
[]interface{}{scope, id, props},
&j,
)
return &j
}
func NewJob_Override(j Job, scope constructs.Construct, id *string, props *JobProps) {
_init_.Initialize()
_jsii_.Create(
"cdk8s-plus-17.Job",
[]interface{}{scope, id, props},
j,
)
}
// Add a container to the pod.
func (j *jsiiProxy_Job) AddContainer(container *ContainerProps) Container {
var returns Container
_jsii_.Invoke(
j,
"addContainer",
[]interface{}{container},
&returns,
)
return returns
}
// Add a volume to the pod.
func (j *jsiiProxy_Job) AddVolume(volume Volume) {
_jsii_.InvokeVoid(
j,
"addVolume",
[]interface{}{volume},
)
}
// Perform final modifications before synthesis.
//
// This method can be implemented by derived constructs in order to perform
// final changes before synthesis. prepare() will be called after child
// constructs have been prepared.
//
// This is an advanced framework feature. Only use this if you
// understand the implications.
func (j *jsiiProxy_Job) OnPrepare() {
_jsii_.InvokeVoid(
j,
"onPrepare",
nil, // no parameters
)
}
// Allows this construct to emit artifacts into the cloud assembly during synthesis.
//
// This method is usually implemented by framework-level constructs such as `Stack` and `Asset`
// as they participate in synthesizing the cloud assembly.
func (j *jsiiProxy_Job) OnSynthesize(session constructs.ISynthesisSession) {
_jsii_.InvokeVoid(
j,
"onSynthesize",
[]interface{}{session},
)
}
// Validate the current construct.
//
// This method can be implemented by derived constructs in order to perform
// validation logic. It is called on all constructs before synthesis.
//
// Returns: An array of validation error messages, or an empty array if there the construct is valid.
// Deprecated: use `Node.addValidation()` to subscribe validation functions on this construct
// instead of overriding this method.
func (j *jsiiProxy_Job) OnValidate() *[]*string {
var returns *[]*string
_jsii_.Invoke(
j,
"onValidate",
nil, // no parameters
&returns,
)
return returns
}
// Returns a string representation of this construct.
func (j *jsiiProxy_Job) ToString() *string {
var returns *string
_jsii_.Invoke(
j,
"toString",
nil, // no parameters
&returns,
)
return returns
}
// Properties for initialization of `Job`.
type JobProps struct {
// Metadata that all persisted resources must have, which includes all objects users must create.
Metadata *cdk8s.ApiObjectMetadata `json:"metadata"`
// List of containers belonging to the pod.
//
// Containers cannot currently be
// added or removed. There must be at least one container in a Pod.
//
// You can add additionnal containers using `podSpec.addContainer()`
Containers *[]*ContainerProps `json:"containers"`
// Restart policy for all containers within the pod.
// See: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#restart-policy
//
RestartPolicy RestartPolicy `json:"restartPolicy"`
// A service account provides an identity for processes that run in a Pod.
//
// When you (a human) access the cluster (for example, using kubectl), you are
// authenticated by the apiserver as a particular User Account (currently this
// is usually admin, unless your cluster administrator has customized your
// cluster). Processes in containers inside pods can also contact the
// apiserver. When they do, they are authenticated as a particular Service
// Account (for example, default).
// See: https://kubernetes.io/docs/tasks/configure-pod-container/configure-service-account/
//
ServiceAccount IServiceAccount `json:"serviceAccount"`
// List of volumes that can be mounted by containers belonging to the pod.
//
// You can also add volumes later using `podSpec.addVolume()`
// See: https://kubernetes.io/docs/concepts/storage/volumes
//
Volumes *[]Volume `json:"volumes"`
// The pod metadata.
PodMetadata *cdk8s.ApiObjectMetadata `json:"podMetadata"`
// Specifies the duration the job may be active before the system tries to terminate it.
ActiveDeadline cdk8s.Duration `json:"activeDeadline"`
// Specifies the number of retries before marking this job failed.
BackoffLimit *float64 `json:"backoffLimit"`
// Limits the lifetime of a Job that has finished execution (either Complete or Failed).
//
// If this field is set, after the Job finishes, it is eligible to
// be automatically deleted. When the Job is being deleted, its lifecycle
// guarantees (e.g. finalizers) will be honored. If this field is set to zero,
// the Job becomes eligible to be deleted immediately after it finishes. This
// field is alpha-level and is only honored by servers that enable the
// `TTLAfterFinished` feature.
TtlAfterFinished cdk8s.Duration `json:"ttlAfterFinished"`
}
// Options for mounts.
type MountOptions struct {
// Determines how mounts are propagated from the host to container and the other way around.
//
// When not set, MountPropagationNone is used.
//
// Mount propagation allows for sharing volumes mounted by a Container to
// other Containers in the same Pod, or even to other Pods on the same node.
//
// This field is beta in 1.10.
Propagation MountPropagation `json:"propagation"`
// Mounted read-only if true, read-write otherwise (false or unspecified).
//
// Defaults to false.
ReadOnly *bool `json:"readOnly"`
// Path within the volume from which the container's volume should be mounted.).
SubPath *string `json:"subPath"`
// Expanded path within the volume from which the container's volume should be mounted.
//
// Behaves similarly to SubPath but environment variable references
// $(VAR_NAME) are expanded using the container's environment. Defaults to ""
// (volume's root). SubPathExpr and SubPath are mutually exclusive. This field
// is beta in 1.15.
//
// `subPathExpr` and `subPath` are mutually exclusive. This field is beta in
// 1.15.
SubPathExpr *string `json:"subPathExpr"`
}
type MountPropagation string
const (
MountPropagation_NONE MountPropagation = "NONE"
MountPropagation_HOST_TO_CONTAINER MountPropagation = "HOST_TO_CONTAINER"
MountPropagation_BIDIRECTIONAL MountPropagation = "BIDIRECTIONAL"
)
// Maps a string key to a path within a volume.
type PathMapping struct {
// The relative path of the file to map the key to.
//
// May not be an absolute
// path. May not contain the path element '..'. May not start with the string
// '..'.
Path *string `json:"path"`
// Optional: mode bits to use on this file, must be a value between 0 and 0777.
//
// If not specified, the volume defaultMode will be used. This might be
// in conflict with other options that affect the file mode, like fsGroup, and
// the result can be other mode bits set.
Mode *float64 `json:"mode"`
}
// Pod is a collection of containers that can run on a host.
//
// This resource is
// created by clients and scheduled onto hosts.
type Pod interface {
Resource
IPodSpec
ApiObject() cdk8s.ApiObject
Containers() *[]Container
Metadata() cdk8s.ApiObjectMetadataDefinition
Name() *string
RestartPolicy() RestartPolicy
ServiceAccount() IServiceAccount
Volumes() *[]Volume
AddContainer(container *ContainerProps) Container
AddVolume(volume Volume)
OnPrepare()
OnSynthesize(session constructs.ISynthesisSession)
OnValidate() *[]*string
ToString() *string
}
// The jsii proxy struct for Pod
type jsiiProxy_Pod struct {
jsiiProxy_Resource
jsiiProxy_IPodSpec
}
func (j *jsiiProxy_Pod) ApiObject() cdk8s.ApiObject {
var returns cdk8s.ApiObject
_jsii_.Get(
j,
"apiObject",
&returns,
)
return returns
}
func (j *jsiiProxy_Pod) Containers() *[]Container {
var returns *[]Container
_jsii_.Get(
j,
"containers",
&returns,
)
return returns
}
func (j *jsiiProxy_Pod) Metadata() cdk8s.ApiObjectMetadataDefinition {
var returns cdk8s.ApiObjectMetadataDefinition
_jsii_.Get(
j,
"metadata",
&returns,
)
return returns
}
func (j *jsiiProxy_Pod) Name() *string {
var returns *string
_jsii_.Get(
j,
"name",
&returns,
)
return returns
}
func (j *jsiiProxy_Pod) RestartPolicy() RestartPolicy {
var returns RestartPolicy
_jsii_.Get(
j,
"restartPolicy",
&returns,
)
return returns
}
func (j *jsiiProxy_Pod) ServiceAccount() IServiceAccount {
var returns IServiceAccount
_jsii_.Get(
j,
"serviceAccount",
&returns,
)
return returns
}
func (j *jsiiProxy_Pod) Volumes() *[]Volume {
var returns *[]Volume
_jsii_.Get(
j,
"volumes",
&returns,
)
return returns
}
func NewPod(scope constructs.Construct, id *string, props *PodProps) Pod {
_init_.Initialize()
j := jsiiProxy_Pod{}
_jsii_.Create(
"cdk8s-plus-17.Pod",
[]interface{}{scope, id, props},
&j,
)
return &j
}
func NewPod_Override(p Pod, scope constructs.Construct, id *string, props *PodProps) {
_init_.Initialize()
_jsii_.Create(
"cdk8s-plus-17.Pod",
[]interface{}{scope, id, props},
p,
)
}
// Add a container to the pod.
func (p *jsiiProxy_Pod) AddContainer(container *ContainerProps) Container {
var returns Container
_jsii_.Invoke(
p,
"addContainer",
[]interface{}{container},
&returns,
)
return returns
}
// Add a volume to the pod.
func (p *jsiiProxy_Pod) AddVolume(volume Volume) {
_jsii_.InvokeVoid(
p,
"addVolume",
[]interface{}{volume},
)
}
// Perform final modifications before synthesis.
//
// This method can be implemented by derived constructs in order to perform
// final changes before synthesis. prepare() will be called after child
// constructs have been prepared.
//
// This is an advanced framework feature. Only use this if you
// understand the implications.
func (p *jsiiProxy_Pod) OnPrepare() {
_jsii_.InvokeVoid(
p,
"onPrepare",
nil, // no parameters
)
}
// Allows this construct to emit artifacts into the cloud assembly during synthesis.
//
// This method is usually implemented by framework-level constructs such as `Stack` and `Asset`
// as they participate in synthesizing the cloud assembly.
func (p *jsiiProxy_Pod) OnSynthesize(session constructs.ISynthesisSession) {
_jsii_.InvokeVoid(
p,
"onSynthesize",
[]interface{}{session},
)
}
// Validate the current construct.
//
// This method can be implemented by derived constructs in order to perform
// validation logic. It is called on all constructs before synthesis.
//
// Returns: An array of validation error messages, or an empty array if there the construct is valid.
// Deprecated: use `Node.addValidation()` to subscribe validation functions on this construct
// instead of overriding this method.
func (p *jsiiProxy_Pod) OnValidate() *[]*string {
var returns *[]*string
_jsii_.Invoke(
p,
"onValidate",
nil, // no parameters
&returns,
)
return returns
}
// Returns a string representation of this construct.
func (p *jsiiProxy_Pod) ToString() *string {
var returns *string
_jsii_.Invoke(
p,
"toString",
nil, // no parameters
&returns,
)
return returns
}
// Controls how pods are created during initial scale up, when replacing pods on nodes, or when scaling down.
//
// The default policy is `OrderedReady`, where pods are created in increasing order
// (pod-0, then pod-1, etc) and the controller will wait until each pod is ready before
// continuing. When scaling down, the pods are removed in the opposite order.
//
// The alternative policy is `Parallel` which will create pods in parallel to match the
// desired scale without waiting, and on scale down will delete all pods at once.
type PodManagementPolicy string
const (
PodManagementPolicy_ORDERED_READY PodManagementPolicy = "ORDERED_READY"
PodManagementPolicy_PARALLEL PodManagementPolicy = "PARALLEL"
)
// Properties for initialization of `Pod`.
type PodProps struct {
// Metadata that all persisted resources must have, which includes all objects users must create.
Metadata *cdk8s.ApiObjectMetadata `json:"metadata"`
// List of containers belonging to the pod.
//
// Containers cannot currently be
// added or removed. There must be at least one container in a Pod.
//
// You can add additionnal containers using `podSpec.addContainer()`
Containers *[]*ContainerProps `json:"containers"`
// Restart policy for all containers within the pod.
// See: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#restart-policy
//
RestartPolicy RestartPolicy `json:"restartPolicy"`
// A service account provides an identity for processes that run in a Pod.
//
// When you (a human) access the cluster (for example, using kubectl), you are
// authenticated by the apiserver as a particular User Account (currently this
// is usually admin, unless your cluster administrator has customized your
// cluster). Processes in containers inside pods can also contact the
// apiserver. When they do, they are authenticated as a particular Service
// Account (for example, default).
// See: https://kubernetes.io/docs/tasks/configure-pod-container/configure-service-account/
//
ServiceAccount IServiceAccount `json:"serviceAccount"`
// List of volumes that can be mounted by containers belonging to the pod.
//
// You can also add volumes later using `podSpec.addVolume()`
// See: https://kubernetes.io/docs/concepts/storage/volumes
//
Volumes *[]Volume `json:"volumes"`
}
// Provides read/write capabilities ontop of a `PodSpecProps`.
type PodSpec interface {
IPodSpec
Containers() *[]Container
RestartPolicy() RestartPolicy
ServiceAccount() IServiceAccount
Volumes() *[]Volume
AddContainer(container *ContainerProps) Container
AddVolume(volume Volume)
}
// The jsii proxy struct for PodSpec
type jsiiProxy_PodSpec struct {
jsiiProxy_IPodSpec
}
func (j *jsiiProxy_PodSpec) Containers() *[]Container {
var returns *[]Container
_jsii_.Get(
j,
"containers",
&returns,
)
return returns
}
func (j *jsiiProxy_PodSpec) RestartPolicy() RestartPolicy {
var returns RestartPolicy
_jsii_.Get(
j,
"restartPolicy",
&returns,
)
return returns
}
func (j *jsiiProxy_PodSpec) ServiceAccount() IServiceAccount {
var returns IServiceAccount
_jsii_.Get(
j,
"serviceAccount",
&returns,
)
return returns
}
func (j *jsiiProxy_PodSpec) Volumes() *[]Volume {
var returns *[]Volume
_jsii_.Get(
j,
"volumes",
&returns,
)
return returns
}
func NewPodSpec(props *PodSpecProps) PodSpec {
_init_.Initialize()
j := jsiiProxy_PodSpec{}
_jsii_.Create(
"cdk8s-plus-17.PodSpec",
[]interface{}{props},
&j,
)
return &j
}
func NewPodSpec_Override(p PodSpec, props *PodSpecProps) {
_init_.Initialize()
_jsii_.Create(
"cdk8s-plus-17.PodSpec",
[]interface{}{props},
p,
)
}
// Add a container to the pod.
func (p *jsiiProxy_PodSpec) AddContainer(container *ContainerProps) Container {
var returns Container
_jsii_.Invoke(
p,
"addContainer",
[]interface{}{container},
&returns,
)
return returns
}
// Add a volume to the pod.
func (p *jsiiProxy_PodSpec) AddVolume(volume Volume) {
_jsii_.InvokeVoid(
p,
"addVolume",
[]interface{}{volume},
)
}
// Properties of a `PodSpec`.
type PodSpecProps struct {
// List of containers belonging to the pod.
//
// Containers cannot currently be
// added or removed. There must be at least one container in a Pod.
//
// You can add additionnal containers using `podSpec.addContainer()`
Containers *[]*ContainerProps `json:"containers"`
// Restart policy for all containers within the pod.
// See: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#restart-policy
//
RestartPolicy RestartPolicy `json:"restartPolicy"`
// A service account provides an identity for processes that run in a Pod.
//
// When you (a human) access the cluster (for example, using kubectl), you are
// authenticated by the apiserver as a particular User Account (currently this
// is usually admin, unless your cluster administrator has customized your
// cluster). Processes in containers inside pods can also contact the
// apiserver. When they do, they are authenticated as a particular Service
// Account (for example, default).
// See: https://kubernetes.io/docs/tasks/configure-pod-container/configure-service-account/
//
ServiceAccount IServiceAccount `json:"serviceAccount"`
// List of volumes that can be mounted by containers belonging to the pod.
//
// You can also add volumes later using `podSpec.addVolume()`
// See: https://kubernetes.io/docs/concepts/storage/volumes
//
Volumes *[]Volume `json:"volumes"`
}
// Provides read/write capabilities ontop of a `PodTemplateProps`.
type PodTemplate interface {
PodSpec
IPodTemplate
Containers() *[]Container
PodMetadata() cdk8s.ApiObjectMetadataDefinition
RestartPolicy() RestartPolicy
ServiceAccount() IServiceAccount
Volumes() *[]Volume
AddContainer(container *ContainerProps) Container
AddVolume(volume Volume)
}
// The jsii proxy struct for PodTemplate
type jsiiProxy_PodTemplate struct {
jsiiProxy_PodSpec
jsiiProxy_IPodTemplate
}
func (j *jsiiProxy_PodTemplate) Containers() *[]Container {
var returns *[]Container
_jsii_.Get(
j,
"containers",
&returns,
)
return returns
}
func (j *jsiiProxy_PodTemplate) PodMetadata() cdk8s.ApiObjectMetadataDefinition {
var returns cdk8s.ApiObjectMetadataDefinition
_jsii_.Get(
j,
"podMetadata",
&returns,
)
return returns
}
func (j *jsiiProxy_PodTemplate) RestartPolicy() RestartPolicy {
var returns RestartPolicy
_jsii_.Get(
j,
"restartPolicy",
&returns,
)
return returns
}
func (j *jsiiProxy_PodTemplate) ServiceAccount() IServiceAccount {
var returns IServiceAccount
_jsii_.Get(
j,
"serviceAccount",
&returns,
)
return returns
}
func (j *jsiiProxy_PodTemplate) Volumes() *[]Volume {
var returns *[]Volume
_jsii_.Get(
j,
"volumes",
&returns,
)
return returns
}
func NewPodTemplate(props *PodTemplateProps) PodTemplate {
_init_.Initialize()
j := jsiiProxy_PodTemplate{}
_jsii_.Create(
"cdk8s-plus-17.PodTemplate",
[]interface{}{props},
&j,
)
return &j
}
func NewPodTemplate_Override(p PodTemplate, props *PodTemplateProps) {
_init_.Initialize()
_jsii_.Create(
"cdk8s-plus-17.PodTemplate",
[]interface{}{props},
p,
)
}
// Add a container to the pod.
func (p *jsiiProxy_PodTemplate) AddContainer(container *ContainerProps) Container {
var returns Container
_jsii_.Invoke(
p,
"addContainer",
[]interface{}{container},
&returns,
)
return returns
}
// Add a volume to the pod.
func (p *jsiiProxy_PodTemplate) AddVolume(volume Volume) {
_jsii_.InvokeVoid(
p,
"addVolume",
[]interface{}{volume},
)
}
// Properties of a `PodTemplate`.
//
// Adds metadata information on top of the spec.
type PodTemplateProps struct {
// List of containers belonging to the pod.
//
// Containers cannot currently be
// added or removed. There must be at least one container in a Pod.
//
// You can add additionnal containers using `podSpec.addContainer()`
Containers *[]*ContainerProps `json:"containers"`
// Restart policy for all containers within the pod.
// See: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#restart-policy
//
RestartPolicy RestartPolicy `json:"restartPolicy"`
// A service account provides an identity for processes that run in a Pod.
//
// When you (a human) access the cluster (for example, using kubectl), you are
// authenticated by the apiserver as a particular User Account (currently this
// is usually admin, unless your cluster administrator has customized your
// cluster). Processes in containers inside pods can also contact the
// apiserver. When they do, they are authenticated as a particular Service
// Account (for example, default).
// See: https://kubernetes.io/docs/tasks/configure-pod-container/configure-service-account/
//
ServiceAccount IServiceAccount `json:"serviceAccount"`
// List of volumes that can be mounted by containers belonging to the pod.
//
// You can also add volumes later using `podSpec.addVolume()`
// See: https://kubernetes.io/docs/concepts/storage/volumes
//
Volumes *[]Volume `json:"volumes"`
// The pod metadata.
PodMetadata *cdk8s.ApiObjectMetadata `json:"podMetadata"`
}
// Probe describes a health check to be performed against a container to determine whether it is alive or ready to receive traffic.
type Probe interface {
}
// The jsii proxy struct for Probe
type jsiiProxy_Probe struct {
_ byte // padding
}
func NewProbe_Override(p Probe) {
_init_.Initialize()
_jsii_.Create(
"cdk8s-plus-17.Probe",
nil, // no parameters
p,
)
}
// Defines a probe based on a command which is executed within the container.
func Probe_FromCommand(command *[]*string, options *CommandProbeOptions) Probe {
_init_.Initialize()
var returns Probe
_jsii_.StaticInvoke(
"cdk8s-plus-17.Probe",
"fromCommand",
[]interface{}{command, options},
&returns,
)
return returns
}
// Defines a probe based on an HTTP GET request to the IP address of the container.
func Probe_FromHttpGet(path *string, options *HttpGetProbeOptions) Probe {
_init_.Initialize()
var returns Probe
_jsii_.StaticInvoke(
"cdk8s-plus-17.Probe",
"fromHttpGet",
[]interface{}{path, options},
&returns,
)
return returns
}
// Probe options.
type ProbeOptions struct {
// Minimum consecutive failures for the probe to be considered failed after having succeeded.
//
// Defaults to 3. Minimum value is 1.
FailureThreshold *float64 `json:"failureThreshold"`
// Number of seconds after the container has started before liveness probes are initiated.
// See: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes
//
InitialDelaySeconds cdk8s.Duration `json:"initialDelaySeconds"`
// How often (in seconds) to perform the probe.
//
// Default to 10 seconds. Minimum value is 1.
PeriodSeconds cdk8s.Duration `json:"periodSeconds"`
// Minimum consecutive successes for the probe to be considered successful after having failed. Defaults to 1.
//
// Must be 1 for liveness and startup. Minimum value is 1.
SuccessThreshold *float64 `json:"successThreshold"`
// Number of seconds after which the probe times out.
//
// Defaults to 1 second. Minimum value is 1.
// See: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes
//
TimeoutSeconds cdk8s.Duration `json:"timeoutSeconds"`
}
type Protocol string
const (
Protocol_TCP Protocol = "TCP"
Protocol_UDP Protocol = "UDP"
Protocol_SCTP Protocol = "SCTP"
)
// Base class for all Kubernetes objects in stdk8s.
//
// Represents a single
// resource.
type Resource interface {
constructs.Construct
IResource
ApiObject() cdk8s.ApiObject
Metadata() cdk8s.ApiObjectMetadataDefinition
Name() *string
OnPrepare()
OnSynthesize(session constructs.ISynthesisSession)
OnValidate() *[]*string
ToString() *string
}
// The jsii proxy struct for Resource
type jsiiProxy_Resource struct {
internal.Type__constructsConstruct
jsiiProxy_IResource
}
func (j *jsiiProxy_Resource) ApiObject() cdk8s.ApiObject {
var returns cdk8s.ApiObject
_jsii_.Get(
j,
"apiObject",
&returns,
)
return returns
}
func (j *jsiiProxy_Resource) Metadata() cdk8s.ApiObjectMetadataDefinition {
var returns cdk8s.ApiObjectMetadataDefinition
_jsii_.Get(
j,
"metadata",
&returns,
)
return returns
}
func (j *jsiiProxy_Resource) Name() *string {
var returns *string
_jsii_.Get(
j,
"name",
&returns,
)
return returns
}
// Creates a new construct node.
func NewResource_Override(r Resource, scope constructs.Construct, id *string, options *constructs.ConstructOptions) {
_init_.Initialize()
_jsii_.Create(
"cdk8s-plus-17.Resource",
[]interface{}{scope, id, options},
r,
)
}
// Perform final modifications before synthesis.
//
// This method can be implemented by derived constructs in order to perform
// final changes before synthesis. prepare() will be called after child
// constructs have been prepared.
//
// This is an advanced framework feature. Only use this if you
// understand the implications.
func (r *jsiiProxy_Resource) OnPrepare() {
_jsii_.InvokeVoid(
r,
"onPrepare",
nil, // no parameters
)
}
// Allows this construct to emit artifacts into the cloud assembly during synthesis.
//
// This method is usually implemented by framework-level constructs such as `Stack` and `Asset`
// as they participate in synthesizing the cloud assembly.
func (r *jsiiProxy_Resource) OnSynthesize(session constructs.ISynthesisSession) {
_jsii_.InvokeVoid(
r,
"onSynthesize",
[]interface{}{session},
)
}
// Validate the current construct.
//
// This method can be implemented by derived constructs in order to perform
// validation logic. It is called on all constructs before synthesis.
//
// Returns: An array of validation error messages, or an empty array if there the construct is valid.
// Deprecated: use `Node.addValidation()` to subscribe validation functions on this construct
// instead of overriding this method.
func (r *jsiiProxy_Resource) OnValidate() *[]*string {
var returns *[]*string
_jsii_.Invoke(
r,
"onValidate",
nil, // no parameters
&returns,
)
return returns
}
// Returns a string representation of this construct.
func (r *jsiiProxy_Resource) ToString() *string {
var returns *string
_jsii_.Invoke(
r,
"toString",
nil, // no parameters
&returns,
)
return returns
}
// Initialization properties for resources.
type ResourceProps struct {
// Metadata that all persisted resources must have, which includes all objects users must create.
Metadata *cdk8s.ApiObjectMetadata `json:"metadata"`
}
// Restart policy for all containers within the pod.
type RestartPolicy string
const (
RestartPolicy_ALWAYS RestartPolicy = "ALWAYS"
RestartPolicy_ON_FAILURE RestartPolicy = "ON_FAILURE"
RestartPolicy_NEVER RestartPolicy = "NEVER"
)
// Kubernetes Secrets let you store and manage sensitive information, such as passwords, OAuth tokens, and ssh keys.
//
// Storing confidential information in a
// Secret is safer and more flexible than putting it verbatim in a Pod
// definition or in a container image.
// See: https://kubernetes.io/docs/concepts/configuration/secret
//
type Secret interface {
Resource
ISecret
ApiObject() cdk8s.ApiObject
Metadata() cdk8s.ApiObjectMetadataDefinition
Name() *string
AddStringData(key *string, value *string)
GetStringData(key *string) *string
OnPrepare()
OnSynthesize(session constructs.ISynthesisSession)
OnValidate() *[]*string
ToString() *string
}
// The jsii proxy struct for Secret
type jsiiProxy_Secret struct {
jsiiProxy_Resource
jsiiProxy_ISecret
}
func (j *jsiiProxy_Secret) ApiObject() cdk8s.ApiObject {
var returns cdk8s.ApiObject
_jsii_.Get(
j,
"apiObject",
&returns,
)
return returns
}
func (j *jsiiProxy_Secret) Metadata() cdk8s.ApiObjectMetadataDefinition {
var returns cdk8s.ApiObjectMetadataDefinition
_jsii_.Get(
j,
"metadata",
&returns,
)
return returns
}
func (j *jsiiProxy_Secret) Name() *string {
var returns *string
_jsii_.Get(
j,
"name",
&returns,
)
return returns
}
func NewSecret(scope constructs.Construct, id *string, props *SecretProps) Secret {
_init_.Initialize()
j := jsiiProxy_Secret{}
_jsii_.Create(
"cdk8s-plus-17.Secret",
[]interface{}{scope, id, props},
&j,
)
return &j
}
func NewSecret_Override(s Secret, scope constructs.Construct, id *string, props *SecretProps) {
_init_.Initialize()
_jsii_.Create(
"cdk8s-plus-17.Secret",
[]interface{}{scope, id, props},
s,
)
}
// Imports a secret from the cluster as a reference.
func Secret_FromSecretName(name *string) ISecret {
_init_.Initialize()
var returns ISecret
_jsii_.StaticInvoke(
"cdk8s-plus-17.Secret",
"fromSecretName",
[]interface{}{name},
&returns,
)
return returns
}
// Adds a string data field to the secert.
func (s *jsiiProxy_Secret) AddStringData(key *string, value *string) {
_jsii_.InvokeVoid(
s,
"addStringData",
[]interface{}{key, value},
)
}
// Gets a string data by key or undefined.
func (s *jsiiProxy_Secret) GetStringData(key *string) *string {
var returns *string
_jsii_.Invoke(
s,
"getStringData",
[]interface{}{key},
&returns,
)
return returns
}
// Perform final modifications before synthesis.
//
// This method can be implemented by derived constructs in order to perform
// final changes before synthesis. prepare() will be called after child
// constructs have been prepared.
//
// This is an advanced framework feature. Only use this if you
// understand the implications.
func (s *jsiiProxy_Secret) OnPrepare() {
_jsii_.InvokeVoid(
s,
"onPrepare",
nil, // no parameters
)
}
// Allows this construct to emit artifacts into the cloud assembly during synthesis.
//
// This method is usually implemented by framework-level constructs such as `Stack` and `Asset`
// as they participate in synthesizing the cloud assembly.
func (s *jsiiProxy_Secret) OnSynthesize(session constructs.ISynthesisSession) {
_jsii_.InvokeVoid(
s,
"onSynthesize",
[]interface{}{session},
)
}
// Validate the current construct.
//
// This method can be implemented by derived constructs in order to perform
// validation logic. It is called on all constructs before synthesis.
//
// Returns: An array of validation error messages, or an empty array if there the construct is valid.
// Deprecated: use `Node.addValidation()` to subscribe validation functions on this construct
// instead of overriding this method.
func (s *jsiiProxy_Secret) OnValidate() *[]*string {
var returns *[]*string
_jsii_.Invoke(
s,
"onValidate",
nil, // no parameters
&returns,
)
return returns
}
// Returns a string representation of this construct.
func (s *jsiiProxy_Secret) ToString() *string {
var returns *string
_jsii_.Invoke(
s,
"toString",
nil, // no parameters
&returns,
)
return returns
}
type SecretProps struct {
// Metadata that all persisted resources must have, which includes all objects users must create.
Metadata *cdk8s.ApiObjectMetadata `json:"metadata"`
// stringData allows specifying non-binary secret data in string form.
//
// It is
// provided as a write-only convenience method. All keys and values are merged
// into the data field on write, overwriting any existing values. It is never
// output when reading from the API.
StringData *map[string]*string `json:"stringData"`
// Optional type associated with the secret.
//
// Used to facilitate programmatic
// handling of secret data by various controllers.
Type *string `json:"type"`
}
// Represents a specific value in JSON secret.
type SecretValue struct {
// The JSON key.
Key *string `json:"key"`
// The secret.
Secret ISecret `json:"secret"`
}
// An abstract way to expose an application running on a set of Pods as a network service.
//
// With Kubernetes you don't need to modify your application to use an unfamiliar service discovery mechanism.
// Kubernetes gives Pods their own IP addresses and a single DNS name for a set of Pods, and can load-balance across them.
//
// For example, consider a stateless image-processing backend which is running with 3 replicas. Those replicas are fungible—frontends do not care which backend they use.
// While the actual Pods that compose the backend set may change, the frontend clients should not need to be aware of that,
// nor should they need to keep track of the set of backends themselves.
// The Service abstraction enables this decoupling.
//
// If you're able to use Kubernetes APIs for service discovery in your application, you can query the API server for Endpoints,
// that get updated whenever the set of Pods in a Service changes. For non-native applications, Kubernetes offers ways to place a network port
// or load balancer in between your application and the backend Pods.
type Service interface {
Resource
ApiObject() cdk8s.ApiObject
ClusterIP() *string
ExternalName() *string
Metadata() cdk8s.ApiObjectMetadataDefinition
Name() *string
Ports() *[]*ServicePort
Selector() *map[string]*string
Type() ServiceType
AddDeployment(deployment Deployment, port *float64, options *ServicePortOptions)
AddSelector(label *string, value *string)
OnPrepare()
OnSynthesize(session constructs.ISynthesisSession)
OnValidate() *[]*string
Serve(port *float64, options *ServicePortOptions)
ToString() *string
}
// The jsii proxy struct for Service
type jsiiProxy_Service struct {
jsiiProxy_Resource
}
func (j *jsiiProxy_Service) ApiObject() cdk8s.ApiObject {
var returns cdk8s.ApiObject
_jsii_.Get(
j,
"apiObject",
&returns,
)
return returns
}
func (j *jsiiProxy_Service) ClusterIP() *string {
var returns *string
_jsii_.Get(
j,
"clusterIP",
&returns,
)
return returns
}
func (j *jsiiProxy_Service) ExternalName() *string {
var returns *string
_jsii_.Get(
j,
"externalName",
&returns,
)
return returns
}
func (j *jsiiProxy_Service) Metadata() cdk8s.ApiObjectMetadataDefinition {
var returns cdk8s.ApiObjectMetadataDefinition
_jsii_.Get(
j,
"metadata",
&returns,
)
return returns
}
func (j *jsiiProxy_Service) Name() *string {
var returns *string
_jsii_.Get(
j,
"name",
&returns,
)
return returns
}
func (j *jsiiProxy_Service) Ports() *[]*ServicePort {
var returns *[]*ServicePort
_jsii_.Get(
j,
"ports",
&returns,
)
return returns
}
func (j *jsiiProxy_Service) Selector() *map[string]*string {
var returns *map[string]*string
_jsii_.Get(
j,
"selector",
&returns,
)
return returns
}
func (j *jsiiProxy_Service) Type() ServiceType {
var returns ServiceType
_jsii_.Get(
j,
"type",
&returns,
)
return returns
}
func NewService(scope constructs.Construct, id *string, props *ServiceProps) Service {
_init_.Initialize()
j := jsiiProxy_Service{}
_jsii_.Create(
"cdk8s-plus-17.Service",
[]interface{}{scope, id, props},
&j,
)
return &j
}
func NewService_Override(s Service, scope constructs.Construct, id *string, props *ServiceProps) {
_init_.Initialize()
_jsii_.Create(
"cdk8s-plus-17.Service",
[]interface{}{scope, id, props},
s,
)
}
// Associate a deployment to this service.
//
// If not targetPort is specific in the portOptions, then requests will be routed
// to the port exposed by the first container in the deployment's pods.
// The deployment's `labelSelector` will be used to select pods.
func (s *jsiiProxy_Service) AddDeployment(deployment Deployment, port *float64, options *ServicePortOptions) {
_jsii_.InvokeVoid(
s,
"addDeployment",
[]interface{}{deployment, port, options},
)
}
// Services defined using this spec will select pods according the provided label.
func (s *jsiiProxy_Service) AddSelector(label *string, value *string) {
_jsii_.InvokeVoid(
s,
"addSelector",
[]interface{}{label, value},
)
}
// Perform final modifications before synthesis.
//
// This method can be implemented by derived constructs in order to perform
// final changes before synthesis. prepare() will be called after child
// constructs have been prepared.
//
// This is an advanced framework feature. Only use this if you
// understand the implications.
func (s *jsiiProxy_Service) OnPrepare() {
_jsii_.InvokeVoid(
s,
"onPrepare",
nil, // no parameters
)
}
// Allows this construct to emit artifacts into the cloud assembly during synthesis.
//
// This method is usually implemented by framework-level constructs such as `Stack` and `Asset`
// as they participate in synthesizing the cloud assembly.
func (s *jsiiProxy_Service) OnSynthesize(session constructs.ISynthesisSession) {
_jsii_.InvokeVoid(
s,
"onSynthesize",
[]interface{}{session},
)
}
// Validate the current construct.
//
// This method can be implemented by derived constructs in order to perform
// validation logic. It is called on all constructs before synthesis.
//
// Returns: An array of validation error messages, or an empty array if there the construct is valid.
// Deprecated: use `Node.addValidation()` to subscribe validation functions on this construct
// instead of overriding this method.
func (s *jsiiProxy_Service) OnValidate() *[]*string {
var returns *[]*string
_jsii_.Invoke(
s,
"onValidate",
nil, // no parameters
&returns,
)
return returns
}
// Configure a port the service will bind to.
//
// This method can be called multiple times.
func (s *jsiiProxy_Service) Serve(port *float64, options *ServicePortOptions) {
_jsii_.InvokeVoid(
s,
"serve",
[]interface{}{port, options},
)
}
// Returns a string representation of this construct.
func (s *jsiiProxy_Service) ToString() *string {
var returns *string
_jsii_.Invoke(
s,
"toString",
nil, // no parameters
&returns,
)
return returns
}
// A service account provides an identity for processes that run in a Pod.
//
// When you (a human) access the cluster (for example, using kubectl), you are
// authenticated by the apiserver as a particular User Account (currently this
// is usually admin, unless your cluster administrator has customized your
// cluster). Processes in containers inside pods can also contact the apiserver.
// When they do, they are authenticated as a particular Service Account (for
// example, default).
// See: https://kubernetes.io/docs/tasks/configure-pod-container/configure-service-account
//
type ServiceAccount interface {
Resource
IServiceAccount
ApiObject() cdk8s.ApiObject
Metadata() cdk8s.ApiObjectMetadataDefinition
Name() *string
Secrets() *[]ISecret
AddSecret(secret ISecret)
OnPrepare()
OnSynthesize(session constructs.ISynthesisSession)
OnValidate() *[]*string
ToString() *string
}
// The jsii proxy struct for ServiceAccount
type jsiiProxy_ServiceAccount struct {
jsiiProxy_Resource
jsiiProxy_IServiceAccount
}
func (j *jsiiProxy_ServiceAccount) ApiObject() cdk8s.ApiObject {
var returns cdk8s.ApiObject
_jsii_.Get(
j,
"apiObject",
&returns,
)
return returns
}
func (j *jsiiProxy_ServiceAccount) Metadata() cdk8s.ApiObjectMetadataDefinition {
var returns cdk8s.ApiObjectMetadataDefinition
_jsii_.Get(
j,
"metadata",
&returns,
)
return returns
}
func (j *jsiiProxy_ServiceAccount) Name() *string {
var returns *string
_jsii_.Get(
j,
"name",
&returns,
)
return returns
}
func (j *jsiiProxy_ServiceAccount) Secrets() *[]ISecret {
var returns *[]ISecret
_jsii_.Get(
j,
"secrets",
&returns,
)
return returns
}
func NewServiceAccount(scope constructs.Construct, id *string, props *ServiceAccountProps) ServiceAccount {
_init_.Initialize()
j := jsiiProxy_ServiceAccount{}
_jsii_.Create(
"cdk8s-plus-17.ServiceAccount",
[]interface{}{scope, id, props},
&j,
)
return &j
}
func NewServiceAccount_Override(s ServiceAccount, scope constructs.Construct, id *string, props *ServiceAccountProps) {
_init_.Initialize()
_jsii_.Create(
"cdk8s-plus-17.ServiceAccount",
[]interface{}{scope, id, props},
s,
)
}
// Imports a service account from the cluster as a reference.
func ServiceAccount_FromServiceAccountName(name *string) IServiceAccount {
_init_.Initialize()
var returns IServiceAccount
_jsii_.StaticInvoke(
"cdk8s-plus-17.ServiceAccount",
"fromServiceAccountName",
[]interface{}{name},
&returns,
)
return returns
}
// Allow a secret to be accessed by pods using this service account.
func (s *jsiiProxy_ServiceAccount) AddSecret(secret ISecret) {
_jsii_.InvokeVoid(
s,
"addSecret",
[]interface{}{secret},
)
}
// Perform final modifications before synthesis.
//
// This method can be implemented by derived constructs in order to perform
// final changes before synthesis. prepare() will be called after child
// constructs have been prepared.
//
// This is an advanced framework feature. Only use this if you
// understand the implications.
func (s *jsiiProxy_ServiceAccount) OnPrepare() {
_jsii_.InvokeVoid(
s,
"onPrepare",
nil, // no parameters
)
}
// Allows this construct to emit artifacts into the cloud assembly during synthesis.
//
// This method is usually implemented by framework-level constructs such as `Stack` and `Asset`
// as they participate in synthesizing the cloud assembly.
func (s *jsiiProxy_ServiceAccount) OnSynthesize(session constructs.ISynthesisSession) {
_jsii_.InvokeVoid(
s,
"onSynthesize",
[]interface{}{session},
)
}
// Validate the current construct.
//
// This method can be implemented by derived constructs in order to perform
// validation logic. It is called on all constructs before synthesis.
//
// Returns: An array of validation error messages, or an empty array if there the construct is valid.
// Deprecated: use `Node.addValidation()` to subscribe validation functions on this construct
// instead of overriding this method.
func (s *jsiiProxy_ServiceAccount) OnValidate() *[]*string {
var returns *[]*string
_jsii_.Invoke(
s,
"onValidate",
nil, // no parameters
&returns,
)
return returns
}
// Returns a string representation of this construct.
func (s *jsiiProxy_ServiceAccount) ToString() *string {
var returns *string
_jsii_.Invoke(
s,
"toString",
nil, // no parameters
&returns,
)
return returns
}
// Properties for initialization of `ServiceAccount`.
//
// Properties for initialization of `ServiceAccount`.
type ServiceAccountProps struct {
// Metadata that all persisted resources must have, which includes all objects users must create.
Metadata *cdk8s.ApiObjectMetadata `json:"metadata"`
// List of secrets allowed to be used by pods running using this ServiceAccount.
// See: https://kubernetes.io/docs/concepts/configuration/secret
//
Secrets *[]ISecret `json:"secrets"`
}
// Options for setting up backends for ingress rules.
type ServiceIngressV1BetaBackendOptions struct {
// The port to use to access the service.
//
// - This option will fail if the service does not expose any ports.
// - If the service exposes multiple ports, this option must be specified.
// - If the service exposes a single port, this option is optional and if
// specified, it must be the same port exposed by the service.
Port *float64 `json:"port"`
}
// Definition of a service port.
type ServicePort struct {
// The name of this port within the service.
//
// This must be a DNS_LABEL. All
// ports within a ServiceSpec must have unique names. This maps to the 'Name'
// field in EndpointPort objects. Optional if only one ServicePort is defined
// on this service.
Name *string `json:"name"`
// The port on each node on which this service is exposed when type=NodePort or LoadBalancer.
//
// Usually assigned by the system. If specified, it will be
// allocated to the service if unused or else creation of the service will
// fail. Default is to auto-allocate a port if the ServiceType of this Service
// requires one.
// See: https://kubernetes.io/docs/concepts/services-networking/service/#type-nodeport
//
NodePort *float64 `json:"nodePort"`
// The IP protocol for this port.
//
// Supports "TCP", "UDP", and "SCTP". Default is TCP.
Protocol Protocol `json:"protocol"`
// The port number the service will redirect to.
TargetPort *float64 `json:"targetPort"`
// The port number the service will bind to.
Port *float64 `json:"port"`
}
type ServicePortOptions struct {
// The name of this port within the service.
//
// This must be a DNS_LABEL. All
// ports within a ServiceSpec must have unique names. This maps to the 'Name'
// field in EndpointPort objects. Optional if only one ServicePort is defined
// on this service.
Name *string `json:"name"`
// The port on each node on which this service is exposed when type=NodePort or LoadBalancer.
//
// Usually assigned by the system. If specified, it will be
// allocated to the service if unused or else creation of the service will
// fail. Default is to auto-allocate a port if the ServiceType of this Service
// requires one.
// See: https://kubernetes.io/docs/concepts/services-networking/service/#type-nodeport
//
NodePort *float64 `json:"nodePort"`
// The IP protocol for this port.
//
// Supports "TCP", "UDP", and "SCTP". Default is TCP.
Protocol Protocol `json:"protocol"`
// The port number the service will redirect to.
TargetPort *float64 `json:"targetPort"`
}
// Properties for initialization of `Service`.
type ServiceProps struct {
// Metadata that all persisted resources must have, which includes all objects users must create.
Metadata *cdk8s.ApiObjectMetadata `json:"metadata"`
// The IP address of the service and is usually assigned randomly by the master.
//
// If an address is specified manually and is not in use by others, it
// will be allocated to the service; otherwise, creation of the service will
// fail. This field can not be changed through updates. Valid values are
// "None", empty string (""), or a valid IP address. "None" can be specified
// for headless services when proxying is not required. Only applies to types
// ClusterIP, NodePort, and LoadBalancer. Ignored if type is ExternalName.
// See: https://kubernetes.io/docs/concepts/services-networking/service/#virtual-ips-and-service-proxies
//
ClusterIP *string `json:"clusterIP"`
// A list of IP addresses for which nodes in the cluster will also accept traffic for this service.
//
// These IPs are not managed by Kubernetes. The user
// is responsible for ensuring that traffic arrives at a node with this IP. A
// common example is external load-balancers that are not part of the
// Kubernetes system.
ExternalIPs *[]*string `json:"externalIPs"`
// The externalName to be used when ServiceType.EXTERNAL_NAME is set.
ExternalName *string `json:"externalName"`
// A list of CIDR IP addresses, if specified and supported by the platform, will restrict traffic through the cloud-provider load-balancer to the specified client IPs.
//
// More info: https://kubernetes.io/docs/tasks/access-application-cluster/configure-cloud-provider-firewall/
LoadBalancerSourceRanges *[]*string `json:"loadBalancerSourceRanges"`
// The port exposed by this service.
//
// More info: https://kubernetes.io/docs/concepts/services-networking/service/#virtual-ips-and-service-proxies
Ports *[]*ServicePort `json:"ports"`
// Determines how the Service is exposed.
//
// More info: https://kubernetes.io/docs/concepts/services-networking/service/#publishing-services-service-types
Type ServiceType `json:"type"`
}
// For some parts of your application (for example, frontends) you may want to expose a Service onto an external IP address, that's outside of your cluster.
//
// Kubernetes ServiceTypes allow you to specify what kind of Service you want.
// The default is ClusterIP.
type ServiceType string
const (
ServiceType_CLUSTER_IP ServiceType = "CLUSTER_IP"
ServiceType_NODE_PORT ServiceType = "NODE_PORT"
ServiceType_LOAD_BALANCER ServiceType = "LOAD_BALANCER"
ServiceType_EXTERNAL_NAME ServiceType = "EXTERNAL_NAME"
)
// StatefulSet is the workload API object used to manage stateful applications.
//
// Manages the deployment and scaling of a set of Pods, and provides guarantees
// about the ordering and uniqueness of these Pods.
//
// Like a Deployment, a StatefulSet manages Pods that are based on an identical
// container spec. Unlike a Deployment, a StatefulSet maintains a sticky identity
// for each of their Pods. These pods are created from the same spec, but are not
// interchangeable: each has a persistent identifier that it maintains across any
// rescheduling.
//
// If you want to use storage volumes to provide persistence for your workload, you
// can use a StatefulSet as part of the solution. Although individual Pods in a StatefulSet
// are susceptible to failure, the persistent Pod identifiers make it easier to match existing
// volumes to the new Pods that replace any that have failed.
//
// Using StatefulSets
// ------------------
// StatefulSets are valuable for applications that require one or more of the following.
//
// - Stable, unique network identifiers.
// - Stable, persistent storage.
// - Ordered, graceful deployment and scaling.
// - Ordered, automated rolling updates.
type StatefulSet interface {
Resource
IPodTemplate
ApiObject() cdk8s.ApiObject
Containers() *[]Container
LabelSelector() *map[string]*string
Metadata() cdk8s.ApiObjectMetadataDefinition
Name() *string
PodManagementPolicy() PodManagementPolicy
PodMetadata() cdk8s.ApiObjectMetadataDefinition
Replicas() *float64
RestartPolicy() RestartPolicy
ServiceAccount() IServiceAccount
Volumes() *[]Volume
AddContainer(container *ContainerProps) Container
AddVolume(volume Volume)
OnPrepare()
OnSynthesize(session constructs.ISynthesisSession)
OnValidate() *[]*string
SelectByLabel(key *string, value *string)
ToString() *string
}
// The jsii proxy struct for StatefulSet
type jsiiProxy_StatefulSet struct {
jsiiProxy_Resource
jsiiProxy_IPodTemplate
}
func (j *jsiiProxy_StatefulSet) ApiObject() cdk8s.ApiObject {
var returns cdk8s.ApiObject
_jsii_.Get(
j,
"apiObject",
&returns,
)
return returns
}
func (j *jsiiProxy_StatefulSet) Containers() *[]Container {
var returns *[]Container
_jsii_.Get(
j,
"containers",
&returns,
)
return returns
}
func (j *jsiiProxy_StatefulSet) LabelSelector() *map[string]*string {
var returns *map[string]*string
_jsii_.Get(
j,
"labelSelector",
&returns,
)
return returns
}
func (j *jsiiProxy_StatefulSet) Metadata() cdk8s.ApiObjectMetadataDefinition {
var returns cdk8s.ApiObjectMetadataDefinition
_jsii_.Get(
j,
"metadata",
&returns,
)
return returns
}
func (j *jsiiProxy_StatefulSet) Name() *string {
var returns *string
_jsii_.Get(
j,
"name",
&returns,
)
return returns
}
func (j *jsiiProxy_StatefulSet) PodManagementPolicy() PodManagementPolicy {
var returns PodManagementPolicy
_jsii_.Get(
j,
"podManagementPolicy",
&returns,
)
return returns
}
func (j *jsiiProxy_StatefulSet) PodMetadata() cdk8s.ApiObjectMetadataDefinition {
var returns cdk8s.ApiObjectMetadataDefinition
_jsii_.Get(
j,
"podMetadata",
&returns,
)
return returns
}
func (j *jsiiProxy_StatefulSet) Replicas() *float64 {
var returns *float64
_jsii_.Get(
j,
"replicas",
&returns,
)
return returns
}
func (j *jsiiProxy_StatefulSet) RestartPolicy() RestartPolicy {
var returns RestartPolicy
_jsii_.Get(
j,
"restartPolicy",
&returns,
)
return returns
}
func (j *jsiiProxy_StatefulSet) ServiceAccount() IServiceAccount {
var returns IServiceAccount
_jsii_.Get(
j,
"serviceAccount",
&returns,
)
return returns
}
func (j *jsiiProxy_StatefulSet) Volumes() *[]Volume {
var returns *[]Volume
_jsii_.Get(
j,
"volumes",
&returns,
)
return returns
}
func NewStatefulSet(scope constructs.Construct, id *string, props *StatefulSetProps) StatefulSet {
_init_.Initialize()
j := jsiiProxy_StatefulSet{}
_jsii_.Create(
"cdk8s-plus-17.StatefulSet",
[]interface{}{scope, id, props},
&j,
)
return &j
}
func NewStatefulSet_Override(s StatefulSet, scope constructs.Construct, id *string, props *StatefulSetProps) {
_init_.Initialize()
_jsii_.Create(
"cdk8s-plus-17.StatefulSet",
[]interface{}{scope, id, props},
s,
)
}
// Add a container to the pod.
func (s *jsiiProxy_StatefulSet) AddContainer(container *ContainerProps) Container {
var returns Container
_jsii_.Invoke(
s,
"addContainer",
[]interface{}{container},
&returns,
)
return returns
}
// Add a volume to the pod.
func (s *jsiiProxy_StatefulSet) AddVolume(volume Volume) {
_jsii_.InvokeVoid(
s,
"addVolume",
[]interface{}{volume},
)
}
// Perform final modifications before synthesis.
//
// This method can be implemented by derived constructs in order to perform
// final changes before synthesis. prepare() will be called after child
// constructs have been prepared.
//
// This is an advanced framework feature. Only use this if you
// understand the implications.
func (s *jsiiProxy_StatefulSet) OnPrepare() {
_jsii_.InvokeVoid(
s,
"onPrepare",
nil, // no parameters
)
}
// Allows this construct to emit artifacts into the cloud assembly during synthesis.
//
// This method is usually implemented by framework-level constructs such as `Stack` and `Asset`
// as they participate in synthesizing the cloud assembly.
func (s *jsiiProxy_StatefulSet) OnSynthesize(session constructs.ISynthesisSession) {
_jsii_.InvokeVoid(
s,
"onSynthesize",
[]interface{}{session},
)
}
// Validate the current construct.
//
// This method can be implemented by derived constructs in order to perform
// validation logic. It is called on all constructs before synthesis.
//
// Returns: An array of validation error messages, or an empty array if there the construct is valid.
// Deprecated: use `Node.addValidation()` to subscribe validation functions on this construct
// instead of overriding this method.
func (s *jsiiProxy_StatefulSet) OnValidate() *[]*string {
var returns *[]*string
_jsii_.Invoke(
s,
"onValidate",
nil, // no parameters
&returns,
)
return returns
}
// Configure a label selector to this deployment.
//
// Pods that have the label will be selected by deployments configured with this spec.
func (s *jsiiProxy_StatefulSet) SelectByLabel(key *string, value *string) {
_jsii_.InvokeVoid(
s,
"selectByLabel",
[]interface{}{key, value},
)
}
// Returns a string representation of this construct.
func (s *jsiiProxy_StatefulSet) ToString() *string {
var returns *string
_jsii_.Invoke(
s,
"toString",
nil, // no parameters
&returns,
)
return returns
}
// Properties for initialization of `StatefulSet`.
type StatefulSetProps struct {
// Metadata that all persisted resources must have, which includes all objects users must create.
Metadata *cdk8s.ApiObjectMetadata `json:"metadata"`
// List of containers belonging to the pod.
//
// Containers cannot currently be
// added or removed. There must be at least one container in a Pod.
//
// You can add additionnal containers using `podSpec.addContainer()`
Containers *[]*ContainerProps `json:"containers"`
// Restart policy for all containers within the pod.
// See: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle/#restart-policy
//
RestartPolicy RestartPolicy `json:"restartPolicy"`
// A service account provides an identity for processes that run in a Pod.
//
// When you (a human) access the cluster (for example, using kubectl), you are
// authenticated by the apiserver as a particular User Account (currently this
// is usually admin, unless your cluster administrator has customized your
// cluster). Processes in containers inside pods can also contact the
// apiserver. When they do, they are authenticated as a particular Service
// Account (for example, default).
// See: https://kubernetes.io/docs/tasks/configure-pod-container/configure-service-account/
//
ServiceAccount IServiceAccount `json:"serviceAccount"`
// List of volumes that can be mounted by containers belonging to the pod.
//
// You can also add volumes later using `podSpec.addVolume()`
// See: https://kubernetes.io/docs/concepts/storage/volumes
//
Volumes *[]Volume `json:"volumes"`
// The pod metadata.
PodMetadata *cdk8s.ApiObjectMetadata `json:"podMetadata"`
// Service to associate with the statefulset.
Service Service `json:"service"`
// Automatically allocates a pod selector for this statefulset.
//
// If this is set to `false` you must define your selector through
// `statefulset.podMetadata.addLabel()` and `statefulset.selectByLabel()`.
DefaultSelector *bool `json:"defaultSelector"`
// Pod management policy to use for this statefulset.
PodManagementPolicy PodManagementPolicy `json:"podManagementPolicy"`
// Number of desired pods.
Replicas *float64 `json:"replicas"`
}
// Volume represents a named volume in a pod that may be accessed by any container in the pod.
//
// Docker also has a concept of volumes, though it is somewhat looser and less
// managed. In Docker, a volume is simply a directory on disk or in another
// Container. Lifetimes are not managed and until very recently there were only
// local-disk-backed volumes. Docker now provides volume drivers, but the
// functionality is very limited for now (e.g. as of Docker 1.7 only one volume
// driver is allowed per Container and there is no way to pass parameters to
// volumes).
//
// A Kubernetes volume, on the other hand, has an explicit lifetime - the same
// as the Pod that encloses it. Consequently, a volume outlives any Containers
// that run within the Pod, and data is preserved across Container restarts. Of
// course, when a Pod ceases to exist, the volume will cease to exist, too.
// Perhaps more importantly than this, Kubernetes supports many types of
// volumes, and a Pod can use any number of them simultaneously.
//
// At its core, a volume is just a directory, possibly with some data in it,
// which is accessible to the Containers in a Pod. How that directory comes to
// be, the medium that backs it, and the contents of it are determined by the
// particular volume type used.
//
// To use a volume, a Pod specifies what volumes to provide for the Pod (the
// .spec.volumes field) and where to mount those into Containers (the
// .spec.containers[*].volumeMounts field).
//
// A process in a container sees a filesystem view composed from their Docker
// image and volumes. The Docker image is at the root of the filesystem
// hierarchy, and any volumes are mounted at the specified paths within the
// image. Volumes can not mount onto other volumes
type Volume interface {
Name() *string
}
// The jsii proxy struct for Volume
type jsiiProxy_Volume struct {
_ byte // padding
}
func (j *jsiiProxy_Volume) Name() *string {
var returns *string
_jsii_.Get(
j,
"name",
&returns,
)
return returns
}
func NewVolume(name *string, config interface{}) Volume {
_init_.Initialize()
j := jsiiProxy_Volume{}
_jsii_.Create(
"cdk8s-plus-17.Volume",
[]interface{}{name, config},
&j,
)
return &j
}
func NewVolume_Override(v Volume, name *string, config interface{}) {
_init_.Initialize()
_jsii_.Create(
"cdk8s-plus-17.Volume",
[]interface{}{name, config},
v,
)
}
// Populate the volume from a ConfigMap.
//
// The configMap resource provides a way to inject configuration data into
// Pods. The data stored in a ConfigMap object can be referenced in a volume
// of type configMap and then consumed by containerized applications running
// in a Pod.
//
// When referencing a configMap object, you can simply provide its name in the
// volume to reference it. You can also customize the path to use for a
// specific entry in the ConfigMap.
func Volume_FromConfigMap(configMap IConfigMap, options *ConfigMapVolumeOptions) Volume {
_init_.Initialize()
var returns Volume
_jsii_.StaticInvoke(
"cdk8s-plus-17.Volume",
"fromConfigMap",
[]interface{}{configMap, options},
&returns,
)
return returns
}
// An emptyDir volume is first created when a Pod is assigned to a Node, and exists as long as that Pod is running on that node.
//
// As the name says, it is
// initially empty. Containers in the Pod can all read and write the same
// files in the emptyDir volume, though that volume can be mounted at the same
// or different paths in each Container. When a Pod is removed from a node for
// any reason, the data in the emptyDir is deleted forever.
// See: http://kubernetes.io/docs/user-guide/volumes#emptydir
//
func Volume_FromEmptyDir(name *string, options *EmptyDirVolumeOptions) Volume {
_init_.Initialize()
var returns Volume
_jsii_.StaticInvoke(
"cdk8s-plus-17.Volume",
"fromEmptyDir",
[]interface{}{name, options},
&returns,
)
return returns
}
// Mount a volume from the pod to the container.
type VolumeMount struct {
// Determines how mounts are propagated from the host to container and the other way around.
//
// When not set, MountPropagationNone is used.
//
// Mount propagation allows for sharing volumes mounted by a Container to
// other Containers in the same Pod, or even to other Pods on the same node.
//
// This field is beta in 1.10.
Propagation MountPropagation `json:"propagation"`
// Mounted read-only if true, read-write otherwise (false or unspecified).
//
// Defaults to false.
ReadOnly *bool `json:"readOnly"`
// Path within the volume from which the container's volume should be mounted.).
SubPath *string `json:"subPath"`
// Expanded path within the volume from which the container's volume should be mounted.
//
// Behaves similarly to SubPath but environment variable references
// $(VAR_NAME) are expanded using the container's environment. Defaults to ""
// (volume's root). SubPathExpr and SubPath are mutually exclusive. This field
// is beta in 1.15.
//
// `subPathExpr` and `subPath` are mutually exclusive. This field is beta in
// 1.15.
SubPathExpr *string `json:"subPathExpr"`
// Path within the container at which the volume should be mounted.
//
// Must not
// contain ':'.
Path *string `json:"path"`
// The volume to mount.
Volume Volume `json:"volume"`
}
|
df42c08b37121f092c73005a1cee21a2e69aed1b
|
[
"Markdown",
"Go Module",
"Go"
] | 5
|
Markdown
|
cdk8s-team/cdk8s-go
|
0a599153938e9d49ef7c42f8d619ec712cede510
|
3a560523bc8299ca260cc06b4588fb0ed56c5e31
|
refs/heads/master
|
<file_sep># Jenkins_the_DnD_Bot
Jenkins is a discord bot that helps me dm. He still under construction, but can roll dice and show the players' stats and skills.
Jenkins is written in JavaScript using NodeJS.
## Features
Jenkins' features range from keeping track of the party and players' gold, calculating all derived statistics for PCs, and inteligently rolling skill/stat checks. The full list of his features are detailed in the [wiki](https://github.com/jac21934/Jenkins_the_DnD_Bot/wiki).
## Dependencies
Jenkins needs:
node(windows)/nodejs(linux) version > 8.0
npm
discord.js from npm
ytdl-core from npm
fs from npm
child_process from npm
opusscript from npm
ffmpeg
## Citations
The spell-list Jenkins uses is taken from vorpalhex's spell-list project:
[github.com/vorpalhex/srd_spells](https://github.com/vorpalhex/srd_spells)
<file_sep>var aliases = require("./aliases.json");
var config = require("./config.json");
var regex = require("./RegEx.json");
function getPlayer(string, players){
var playerID = 0
for( id in players){
if(String(players[id]["name"].get()).toLowerCase().indexOf(String(string).toLowerCase()) == 0){
if(id == config.DM_ID){
continue;
}
playerID = id;
break;
}
}
return playerID;
}
function getAliases(){
var aliasMessage = "";
var space = 28;
var discordMax = config.discordWidth;
for(alias in aliases){
var messageBuff = "";
var aliasHeader = "";
if(aliases[alias][0][0] != "\\"){
aliasHeader= aliases[alias][0] + Array(space - aliases[alias][0].length).join(" ") + '-- "' +alias + '" '; ;
}
else{
aliasHeader= alias + Array(space - String(alias).length).join(" ") + '-- "' + alias + '" '; ;
}
for(i = 0; i < aliases[alias].length; i++){
if(aliases[alias][i][0] != "\\"){
messageBuff += '"' + aliases[alias][i] + '" ';
}
}
if((messageBuff.length + aliasHeader.length) > discordMax){
var maxIndex = discordMax - (messageBuff.length + aliasHeader.length);
var buffString = "";
var newIndex = breakOnCharSpace(messageBuff,maxIndex, '"', ' ');
// if(newIndex == maxIndex || newIndex < maxIndex - 15){
// buffString = arrBuff[k].slice(maxIndex);
// arrBuff[k] = arrBuff[k].replace(buffString,"-");
// buffString = "-" + buffString;
// }
// //
// else{
buffString = messageBuff.slice(newIndex);
messageBuff = messageBuff.replace(buffString, "");
// }
messageBuff += "\n" +Array(space + String('-- ').length).join(" ") + buffString;
}
aliasMessage += aliasHeader + messageBuff + "\n\n";
}
return aliasMessage;
}
function breakUpString(someString, someBreak){
var buffArr = someString.split(someBreak);
var charMax = config.discordMax;
var Arr = [];
var buffString = "";//buffArr[0] + "\n";
for(i = 0; i < buffArr.length;i++){
if(buffString.length + buffArr[i].length < charMax){
buffString += buffArr[i] + someBreak;
}
else{
Arr.push(buffString);
buffString = buffArr[i] + someBreak;
}
}
if(Arr[Arr.length-1] != String(buffString)){
Arr.push(buffString);
}
return Arr;
}
function parseStringForStat(someString){
var stat = "";
for(alias in aliases){
for(i = 0; i < aliases[alias].length; i++){
if(someString.indexOf(aliases[alias][i]) >-1){
stat = alias;
return stat;
}
else{
var aliasString = "\\b" + aliases[alias][i] + "\\b";
var re = new RegExp(aliasString);
if( someString.match(re) != null){
stat = alias;
return stat;
}
}
}
}
var stat = "";
for(alias in aliases){
for(i = 0; i < aliases[alias].length; i++){
if(someString.indexOf(aliases[alias][i]) >-1){
stat = alias;
return stat;
}
}
}
return stat;
}
function parseAlias(someString){
var stat = "";
var aliasString = "";
for(alias in aliases){
for(i = 0; i < aliases[alias].length; i++){
if(someString.indexOf(aliases[alias][i]) >-1){
stat = alias;
aliasString = aliases[alias][i];
return [stat, aliasString];
}
else{
var re = new RegExp(aliases[alias][i]);
if( someString.match(re) != null){
stat = alias;
aliasString = aliases[alias][i];
return [stat, aliasString];
}
}
}
}
var stat = "";
for(alias in aliases){
for(i = 0; i < aliases[alias].length; i++){
if(someString.indexOf(aliases[alias][i]) >-1){
stat = alias;
return [stat, aliasString];
}
}
}
return [stat, aliasString];
}
function inWords (num) {
var a = ['zero','one','two','three','four','five','six','seven','eight','nine','ten','eleven','twelve','thirteen','fourteen','fifteen','sixteen','seventeen','eighteen','nineteen'];
if (num > 19) return num.toString();
else return a[num];
}
function getProf(prof){
var profChar = " ";
if(Number(prof) == 1){
profChar = "*";
}
else{
profChar = " ";
}
return profChar;
}
function getRandomInt(min, max) {
min = Math.ceil(min);
max = Math.floor(max + 1);
return Math.floor(Math.random() * (max - min)) + min; //The maximum is exclusive and the minimum is inclusive
}
function findNumberIndex(someString){
for(i = 0; i < someString.length; i++){
if(String(Number(someString[i])) == someString[i]){
return i;
}
}
return -1;
}
function parseNumberFromString(Index,someString){
var periodCheck = false;
buffVal = "";
var numFlag = true;
var initIndex = Index;
while(numFlag){
if( String(Number(someString[Index])) == someString[Index] ){
buffVal += String(someString[Index])
}
else if((someString[Index] == '.') && (periodCheck == false) ){
periodCheck = true;
if(buffVal.length == 0){
buffVal += "0";
}
buffVal += String(someString[Index]);
}
else{
numFlag = false;
// Index -=1;
break;
}
if(Index == someString.length -1 ){
numFlag = false;
// Index -=1;
break;
}
Index += 1;
}
return Number(buffVal)
}
function findSpace(someString, index){
var spaceIndex = index;
for(i = index; i >-1; i--){
if( someString[i] == " "){
spaceIndex = i;
break;
}
}
return spaceIndex;
}
function breakOnCharSpace(someString, index, char, breakchar){
var spaceIndex = index;
for(i = index; i >-1; i--){
if( someString[i] == char && i > 0 ){
if(someString[i-1] == breakchar){
spaceIndex = i-1;
break;
}
}
}
return spaceIndex;
}
function parseSum(someString){
var re = new RegExp(regex.sumCheck);
var reSplit = new RegExp(regex.sumPrefix);
var buff = someString.match(re);
var sum = 0;
while((buff = someString.match(re)) != null){
someString = someString.replace(re, "");
var sumBuff = Number(buff[0].replace(reSplit,""));
if( buff[0][0] == "-"){
sumBuff = -1 * sumBuff;
}
sum += Number(sumBuff);
}
return [sum.toFixed(2), someString];
}
function getDice(someString){
var numDie = 0;
var dieMax = 0;
var dice = [];
if(parseStringForStat(someString) != "")
{
dice = [[1,20]];
}
else{
// re = new RegExp("\\b(([1-9])([0-9]*))?d([1-9])([0-9]*)\\b");
re = new RegExp(regex.dieCheck);
var buff;
while((buff = someString.match(re)) != null){
var die = buff[0].split("d");
if( die[0] == ''){
dice.push( [ Number(1) , Number(die[1]) ] );
}
else{
dice.push( [ Number(die[0]) , Number(die[1]) ] );
}
someString = someString.replace(re, "");
}
}
return [dice, someString];
}
function getMaxWidth(text){
textArr = text.split("\n");
var max = 0;
for(i=0;i<textArr.length;i++){
max = Math.max(max, textArr[i].length);
}
return max;
}
function getModFromString(players, id, stat)
{
var modifier = 0;
if(stat.length < 1){
modifier = 0;
}
else{
var type = players[id][stat]["type"];
if(type == "stat"){
modifier += Number(players[id][stat]["modifier"]);
}
else if( type == "skill"){
modifier += Number(players[id][players[id][stat]["mainStat"]]["modifier"])
+ Number(players[id][stat]["bonus"])
+ Number(players[id][stat]["prof"]) * Number(players[id]["prof"].get());
}
else if( type == "prof"){
modifier += Number(players[id][stat].get());
}
}
return modifier;
}
function toTitleCase(str)
{
return str.replace(/\w\S*/g, function(txt){return txt.charAt(0).toUpperCase() + txt.substr(1).toLowerCase();});
}
function getRollMessage(numDieArr, maxDieArr, modifier, players, id, sumFlag, advFlag, disFlag,rollType){
var rollMessage = "";
var numDie = 0;
var maxCheck = 0;
var name = players[id]["name"].get();
// var name = "";
// for(i=0; i < players.length;i++){
// if(id == players[i].getId()){
// name = players[i].getName();
// break;
// }
// }
for(i = 0; i < numDieArr.length; i++){
numDie += Number(numDieArr[i]);
}
for(i = 0; i < maxDieArr.length; i++){
maxCheck += Number(maxDieArr[i]);
}
if(isNaN(numDie) || isNaN(maxCheck) || (numDieArr.length == 0) || (maxDieArr.length == 0) ){
rollMessage += "I didn't understand that input. I can take values like 3d6 + 2 or 12d13 - 21 or strength.\n";
}
else if ( numDieArr.length != maxDieArr.length){
rollMessage += "The number of dice vector and number of sides vector have different lengths! I do not know how this happened. Please tell Jacob.";
}
else if(numDie > 100){
rollMessage += "Please roll one hundred or less dice.\n";
}
else if(numDie == 0){
rollMessage += "Ok rolling zero dice...\n";
}
else if(((numDieArr.length >1)
|| (maxDieArr.length > 1)
|| (numDieArr[0] !=1)
|| (maxDieArr[0] !=20) )
&& (advFlag || disFlag)
){
rollMessage += "You can only roll 1d20 with advantage or disadvantage right now.";
}
else{
rollMessage += "Rolling ";
for(k = 0; k < numDieArr.length; k++){
rollMessage += numDieArr[k] + "d"+ maxDieArr[k];
if(numDieArr.length > 2){
if(k < numDieArr.length - 1){
rollMessage += ", ";
}
if( k == numDieArr.length - 2){
rollMessage += "and ";
}
}
else if(numDieArr.length == 2){
if( k < numDieArr.length - 1){
rollMessage += " and ";
}
}
}
if (Number(modifier) != 0){
if (modifier < 0){
if(numDieArr.length == 1){
rollMessage += modifier;
}
else{
rollMessage += " - " + Math.abs(modifier);
}
}
else if (modifier >0){
if(numDieArr.length > 1){
rollMessage += " + " + modifier;
}
else{
rollMessage += "+" + modifier;
}
}
}
if( rollType != ""){
for( alias in aliases){
if (rollType == alias){
rollMessage += " (" + toTitleCase(aliases[alias][0]) + ")";
}
}
}
if(advFlag)
{
rollMessage += " with advantage";
}
if(disFlag)
{
rollMessage += " with disadvantage";
}
if(name != ""){
rollMessage += " for " + name + "\n";
}
else{
rollMessage += "\n";
}
var filler = Array(rollMessage.length).join("-") + "\n";
if(sumFlag == false){
rollMessage += filler;
}
var rollSum = 0;
var multiRoll = false
if(numDieArr.length > 1){
multiRoll = true;
}
for(j = 0; j < numDieArr.length; j++){
if(multiRoll){
rollMessage += numDieArr[j] + "d" + maxDieArr[j] + ":\n";
rollMessage += Array(String(numDieArr[j] + "d" + maxDieArr[j] + ":").length + 1).join("-") + "\n" ;
}
for(k = 0; k < numDieArr[j]; k++) {
if(multiRoll){
rollMessage += " ";
}
var dieRoll = getRandomInt(1, maxDieArr[j]);
if (sumFlag == false) {
rollMessage += dieRoll;
if(maxDieArr[j] == 20){
if( dieRoll == 20){
rollMessage += " *Crit*";
}
else if(dieRoll == 1){
rollMessage += " *Crit Fail*";
}
}
rollMessage += "\n";
if (advFlag || disFlag)
{
var dieRollNew = getRandomInt(1, maxDieArr[j]);
rollMessage += dieRollNew;
if(maxDieArr[j] == 20){
if( dieRollNew == 20){
rollMessage += " *Crit*";
}
else if(dieRollNew == 1){
rollMessage += " *Crit Fail*";
}
}
rollMessage += "\n";
}
if(advFlag)
{
dieRoll = Math.max(dieRoll, dieRollNew);
}
else if(disFlag)
{
dieRoll = Math.min(dieRoll, dieRollNew);
}
}
rollSum += dieRoll;
}
if(multiRoll && (j < numDieArr.length - 1)){
rollMessage += "\n";
}
}
var sumMessage = "";
var modMessage = "";
var totMessage = "";
var advMessage = "";
var maxFill = 0;
if(numDie > 1){
sumMessage += "Sum: " + rollSum + "\n";
}
if(advFlag)
{
advMessage += "Max: " + dieRoll + "\n";
}
if(disFlag)
{
advMessage += "Min: " + dieRoll + "\n";
}
if(modifier != ""){
modMessage += "Modifier: " + modifier + "\n";
var totalSum = rollSum+modifier;
totMessage += "Total Sum: " + totalSum + "\n";
}
if( numDie>1 || modifier !="" || disFlag || advFlag){
if(sumFlag == true){
maxFill = Math.max(String(sumMessage).length, String(modMessage).length, String(totMessage).length, String(filler).length, String(advMessage).length);
}
else{
maxFill = Math.max(String(sumMessage).length, String(modMessage).length, String(totMessage).length, String(advMessage).length);
}
filler = Array(maxFill).join("-") + "\n";
rollMessage +=filler;
if(advFlag || disFlag)
{
rollMessage += advMessage;
}
if(numDie > 1){
rollMessage += sumMessage;
}
if(modifier != ""){
rollMessage += modMessage;
rollMessage += totMessage;
}
}
}
return rollMessage;
}
module.exports = {
getPlayer,
toTitleCase,
inWords,
getMaxWidth,
getAliases,
getProf,
parseStringForStat,
findSpace,
getRandomInt,
findNumberIndex,
parseNumberFromString,
// parseSumNew,
parseSum,
getDice,
breakUpString,
getModFromString,
getRollMessage
}
//module.export = tools;
<file_sep>
var fs = require('fs');
var tools = require("./tools.js");
var config = require("./config.json");
var armor = require("./inventory/armor.json");
function Player() {}
Player.prototype = {
///////////////////////////
initialize: function(){
this.parseNotes();
this.setMods();
this.parseArmor();
},
parseArmor: function(){
var armor_type = "none";
if(this["armor"].get() == "na"){
return;
}
for( ar in armor){
if(this["armor"].get() == armor[ar][0]){
armor_type = ar;
break;
}
}
var newAC = armor[armor_type][2];
var ACdexBuff = 0;
if(armor[armor_type][3] != "na"){
if(armor[armor_type][3] == "Inf"){
ACdexBuff = Number(this["dex"]["modifier"]);
}
else{
ACdexBuff = Math.min(Number(this["dex"]["modifier"]), Number(armor[armor_type][3]));
}
}
newAC = Number(newAC) + Number(ACdexBuff);
this["ac"].set(newAC);
},
getStatsMessage: function(){
var statsMessage = "";
var space = 4;
var headerLength = String(this["name"].get()).length + String("Level ").length + String(this["level"].get()).length + String(this["class"].get()).length;
var defHeader = "AC = " + String(Number(this["ac"].get()) + Number(this["ac"]["bonus"]) ) + " | INIT = " + String(Number(this["init"].get()) + Number(this["init"]["bonus"])) + " | SPD = " + String(Number(this["spd"].get()) +Number(this["spd"]["bonus"])) + " | PER = " + String(10 + Number(this["per"]["bonus"]) + Number(this["per"]["prof"])*Number(this["prof"].get()) + Number(this["wis"]["modifier"])) + " | HP = " + String(Number(this["hp"].get()) + Number(this["hp"]["bonus"])) + "\n";
if(defHeader.length > headerLength){
space = Math.ceil((defHeader.length - headerLength)/2) +1;
}
var messageHeader = this["name"].get() + Array(space).join(" ") + "Level " + this["level"].get() + Array(space).join(" ") + this["class"].get() + "\n";
fillerLength = Math.max(messageHeader.length, defHeader.length);
var messagefiller = Array(fillerLength).join("-") + "\n";
var deffiller = Array(defHeader.length).join("-") + "\n";
statsMessage += messageHeader + messagefiller + defHeader + messagefiller;
for(key in this){
if(this[key]["type"] == "stat"){
statsMessage += key.toUpperCase() + " = " + Number(Number(this[key].get()) + Number(this[key]["bonus"])) + " (" + String(this[key]["modifier"]) + ")\n";
}
}
var profMessage = "Proficiency Bonus = " + this["prof"].get() + "\n";
var filler = Array(profMessage.length).join("-") + "\n";
statsMessage += filler + profMessage;
return statsMessage;
},
getSkillsMessage: function(){
var skillsMessage = "";
var space = 30;
var messageHeader = this["name"].get() + Array((space - this["name"].get().length) - String("Skills").length).join(" ") + "Skills" + "\n";
skillsMessage += messageHeader + Array(messageHeader.length).join("-") + "\n";
var counter = 0;
var oldStat = "str";
for( key in this){
if( this[key]["type"] == "skill"){
if( this[key]["mainStat"] != oldStat){
if( counter <= 3){
for( i = counter; i < oldStat.length; i++){
skillsMessage += oldStat[i].toUpperCase() + "| | |" + "\n";;
}
}
skillsMessage += Array(messageHeader.length).join("-") + "\n"
oldStat = this[key]["mainStat"];
counter = 0;
}
if(counter < 3){
skillsMessage += this[key]["mainStat"][counter].toUpperCase();
}
else{
skillsMessage += " ";
}
skillsMessage += "| " + this[key]["name"] + Array(18 - this[key]["name"].length).join(" ") + " |";
skillsMessage += tools.getProf(this[key]["prof"]) + "| (" +String( Number(this[this[key]["mainStat"]]["modifier"]) + Number(this[key]["bonus"]) + Number(this[key]["prof"]) * Number(this["prof"].get()) )+")" ;
skillsMessage += "\n";
counter ++;
}
}
skillsMessage += Array(messageHeader.length).join("-") + "\n"
return skillsMessage;
},
getBonusMessage: function(){
var bonusMessage = "";
var space = 28;
var messageHeader = this["name"].get() + Array((space - this["name"].get().length) - String("Bonus").length).join(" ") + "Bonus" + "\n";
bonusMessage += messageHeader + Array(messageHeader.length).join("-") + "\n";
var counter = 0;
var oldStat = "COM";
for( key in this){
if( this[key]["type"] == "combat"){
if(counter < 3){
bonusMessage += oldStat[counter].toUpperCase();
}
else{
bonusMessage += " ";
}
bonusMessage += "| " + this[key]["name"] + Array(18 - this[key]["name"].length).join(" ") + " |";
bonusMessage += " (" +String( Number(this[key]["bonus"]) )+")" ;
bonusMessage += "\n";
counter ++;
}
}
bonusMessage += Array(messageHeader.length).join("-") + "\n";
oldStat = "ATTR";
counter = 0;
for( key in this){
if( this[key]["type"] == "stat"){
if(counter < 4){
bonusMessage += oldStat[counter].toUpperCase();
}
else{
bonusMessage += " ";
}
bonusMessage += "| " + this[key]["name"] + Array(18 - this[key]["name"].length).join(" ") + " |";
bonusMessage += " (" +String( Number(this[key]["bonus"]) )+")" ;
bonusMessage += "\n";
counter ++;
}
}
bonusMessage += Array(messageHeader.length).join("-") + "\n";
oldStat = "str";
counter = 0;
for( key in this){
if( this[key]["type"] == "skill"){
if( this[key]["mainStat"] != oldStat){
if( counter <= 3){
for( i = counter; i < oldStat.length; i++){
bonusMessage += oldStat[i].toUpperCase() + "| | " + "\n";;
}
}
bonusMessage += Array(messageHeader.length).join("-") + "\n"
oldStat = this[key]["mainStat"];
counter = 0;
}
if(counter < 3){
bonusMessage += this[key]["mainStat"][counter].toUpperCase();
}
else{
bonusMessage += " ";
}
bonusMessage += "| " + this[key]["name"] + Array(18 - this[key]["name"].length).join(" ") + " |";
bonusMessage += " (" +String( Number(this[key]["bonus"]) )+")" ;
bonusMessage += "\n";
counter ++;
}
}
bonusMessage += Array(messageHeader.length).join("-") + "\n"
return bonusMessage;
},
getNotesMessage: function(){
var notesMessage = "";
var notesBuff = this["notes"].get();
var notesMax = 0;
var space = 4;
var spaceArr = Array(space).join(" ");
var discordMax = config.discordWidth;
var notesArr = [];
if(notesBuff.length >0){
for(j = 0; j < notesBuff.length;j++){
var noteLength = notesBuff[j].length;
noteLength += String(j+1).length + 2 + space;
if(noteLength > notesMax){
notesMax = noteLength;
}
var arrBuff = notesBuff[j].split("\n");
var k = 0;
while( k < arrBuff.length) {
if( (arrBuff[k].length + String(j+1).length + 2 + space) >= discordMax) {
var maxIndex = discordMax - (String(j+1).length + 2 + space);
var buffString = "";
var newIndex = tools.findSpace(arrBuff[k],maxIndex);
// if(newIndex == maxIndex || newIndex < maxIndex - 15){
// buffString = arrBuff[k].slice(maxIndex);
// arrBuff[k] = arrBuff[k].replace(buffString,"-");
// buffString = "-" + buffString;
// }
// //
// else{
buffString = arrBuff[k].slice(newIndex);
arrBuff[k] = arrBuff[k].replace(buffString, "");
// }
if( k < arrBuff.length - 1){
arrBuff[k+1] = buffString + " " + arrBuff[k+1];
}
else{
arrBuff.push(buffString);
}
}
k++;
}
notesArr.push(arrBuff);
if(noteLength > discordMax){
}
// notesMessage += "(" + String(Number(j + 1)) + ") " + notesBuff[j] + "\n\n";
}
notesMax = Math.min(notesMax,discordMax);
notesMessage += this["name"].get() + Array(Math.floor(notesMax/2 - 4)).join(" ") + "Notes\n";
notesMessage += Array(notesMax).join("-") + "\n";
for(j = 0; j < notesArr.length;j++){
notesMessage += "(" + String(Number(j + 1)) + ")";
var frontSpace = Array(String("(" + String(Number(j + 1)) + ")").length).join(" ");
for(k=0; k < notesArr[j].length;k++){
if(k == 0){
notesMessage += spaceArr + notesArr[j][k] + "\n";
}
else{
notesMessage += frontSpace + spaceArr + notesArr[j][k] + "\n";
}
}
notesMessage += "\n";
//notesMessage += "(" + String(Number(j + 1)) + ")" + Array(space).join(" ") + notesBuff[j] + "\n\n";
}
}
else{
notesMessage = this["name"].get() + " has no notes saved.\n";
}
return notesMessage;
},
parseNotes: function(){
for( key in this){
if( "bonus" in this[key]){
this[key]["bonus"] = 0;
}
}
var notes = this["notes"].get();
if(notes.length == 0){
console.log("No Notes!");
return;
}
for(k=0; k < notes.length; k++){
note = notes[k].trim().split(/ +/g).join("");
var buffString = tools.parseStringForStat(String(notes[k]));
var buffArr = tools.parseSum(String(note));
var buffSum = buffArr[0];
var buffAdd = 0;
if( buffString in this){
this[buffString]["bonus"] = this[buffString]["bonus"] + Number(buffSum);
}
}
this.setMods();
},
getInvMessage: function(){
var invMessage = "";
var invBuff = this["inventory"].get();
var invMax = 0;
var space = 4;
var spaceArr = Array(space).join(" ");
var discordMax = config.discordWidth;
var invArr = [];
if(invBuff.length >0){
for(j = 0; j < invBuff.length;j++){
var noteLength = invBuff[j].length;
noteLength += String(j+1).length + 2 + space;
if(noteLength > invMax){
invMax = noteLength;
}
var arrBuff = invBuff[j].split("\n");
var k = 0;
while( k < arrBuff.length) {
if( (arrBuff[k].length + String(j+1).length + 2 + space) >= discordMax) {
var maxIndex = discordMax - (String(j+1).length + 2 + space);
var buffString = "";
var newIndex = tools.findSpace(arrBuff[k],maxIndex);
// if(newIndex == maxIndex || newIndex < maxIndex - 15){
// buffString = arrBuff[k].slice(maxIndex);
// arrBuff[k] = arrBuff[k].replace(buffString,"-");
// buffString = "-" + buffString;
// }
// //
// else{
buffString = arrBuff[k].slice(newIndex);
arrBuff[k] = arrBuff[k].replace(buffString, "");
// }
if( k < arrBuff.length - 1){
arrBuff[k+1] = buffString + " " + arrBuff[k+1];
}
else{
arrBuff.push(buffString);
}
}
k++;
}
invArr.push(arrBuff);
if(noteLength > discordMax){
}
// notesMessage += "(" + String(Number(j + 1)) + ") " + notesBuff[j] + "\n\n";
}
invMax = Math.min(invMax,discordMax);
invMessage += this["name"].get() + Array(Math.floor(invMax/2 - 4)).join(" ") + "Inventory\n";
var headerLength = invMessage.length;
if(headerLength > invMax){
if(headerLength > discordMax){
//nothing
}
else{
invMax = headerLength;
}
}
invMessage += Array(invMax).join("-") + "\n";
for(j = 0; j < invArr.length;j++){
invMessage += "(" + String(Number(j + 1)) + ")";
var frontSpace = Array(String("(" + String(Number(j + 1)) + ")").length).join(" ");
for(k=0; k < invArr[j].length;k++){
if(k == 0){
invMessage += spaceArr + invArr[j][k] + "\n";
}
else{
invMessage += frontSpace + spaceArr + invArr[j][k] + "\n";
}
}
invMessage += "\n";
//invMessage += "(" + String(Number(j + 1)) + ")" + Array(space).join(" ") + invBuff[j] + "\n\n";
}
}
else{
invMessage = this["name"].get() + " has no items saved.\n";
}
return invMessage;
},
setMods: function(){
for( key in this){
if(this[key]["type"] == "stat"){
this[key]["modifier"] = String(Number(Math.floor( (Number(this[key].get()) + Number(this[key]["bonus"]) - 10)/2)));
}
}
this["init"].set(String(Number(this["dex"]["modifier"]) + Number(this["init"]["bonus"])));
this["prof"].set(String(Math.floor(( Number(this["level"].get()) + 7)/4) + Number(this["prof"]["bonus"])));
}
};
module.exports = Player;
<file_sep>var Stat = {
value: "",
prof : "",
bonus: "",
mainStat: "",
type: ""
}
Stat.prototype = {
get: function(){
return this['value'];
},
set: function(value){
this['value'] = value;
}
}
module.exports = Stat;
|
f99390522ca2144ef5969de58f700ee116b6c6a4
|
[
"Markdown",
"JavaScript"
] | 4
|
Markdown
|
jac21934/Jenkins_the_DnD_Bot
|
c081a64d314093704755db3c031a48ec3c6ac562
|
a2734bc94ef14f121109bca98587688e053991c5
|
refs/heads/master
|
<file_sep>#![no_std]
#![no_main]
#![deny(unsafe_code)]
use cortex_m::asm::delay;
use stm32f4_playground as _; // Global logger + panicking-behavior
use stm32f4xx_hal::{prelude::*, pwm, stm32 as device};
#[cortex_m_rt::entry]
fn main() -> ! {
defmt::info!("Plug your RGB LED into PA8-10! Don't forget current limiting resistors!");
// Take ownership of the device peripherals singleton
if let Some(dp) = device::Peripherals::take() {
let rcc = dp.RCC.constrain();
let clocks = rcc.cfgr.freeze();
let gpioa = dp.GPIOA.split();
let channels = (
gpioa.pa8.into_alternate_af1(), // Red
gpioa.pa9.into_alternate_af1(), // Green
gpioa.pa10.into_alternate_af1(), // Blue
);
let pwm = pwm::tim1(dp.TIM1, channels, clocks, 20u32.khz());
let (mut r, mut g, mut b) = pwm;
let max_duty = r.get_max_duty();
r.enable();
g.enable();
b.enable();
loop {
r.set_duty(max_duty);
g.set_duty(0);
b.set_duty(0);
delay(5_000_000);
r.set_duty(0);
g.set_duty(max_duty);
b.set_duty(0);
delay(5_000_000);
r.set_duty(0);
g.set_duty(0);
b.set_duty(max_duty);
delay(5_000_000);
}
}
defmt::panic!("Unreachable code");
}
<file_sep>#![deny(unsafe_code)]
#![no_std]
#![no_main]
use cortex_m::asm::delay;
use stm32f4::stm32f401 as device;
use stm32f4::stm32f401::TIM1;
use stm32f4_playground as _; // Global logger + panicking-behavior
#[cortex_m_rt::entry]
fn main() -> ! {
defmt::info!("RGB LED PWM! PB15 = Red, PB14 = Green, PB13 = Blue.");
// Take ownership of the device peripherals singleton
if let Some(dp) = device::Peripherals::take() {
// Take and own RCC RegisterBlock out of dp
let rcc = dp.RCC;
// Take and own the GPIOB and TIM1 struct out of dp
let (gpiob, tim1) = (dp.GPIOB, dp.TIM1);
/* GPIO configuration: PB15 = Red, PB14 = Green, PB13 = Blue */
// Enable clock for GPIOB
rcc.ahb1enr.write(|w| w.gpioben().enabled());
// Set mode of PB13-15 as alternative function
gpiob.moder.modify(|_, w| w.moder13().alternate());
gpiob.moder.modify(|_, w| w.moder14().alternate());
gpiob.moder.modify(|_, w| w.moder15().alternate());
// Alternate function mapping 1 for TIM1_CHxN (see DS9716 datasheet)
gpiob.afrh.modify(|_, w| w.afrh13().af1()); // TIM1_CH1N
gpiob.afrh.modify(|_, w| w.afrh14().af1()); // TIM1_CH2N
gpiob.afrh.modify(|_, w| w.afrh15().af1()); // TIM1_CH3N
// Set GPIO speed for PB13-15 as high speed
gpiob.ospeedr.modify(|_, w| w.ospeedr13().high_speed());
gpiob.ospeedr.modify(|_, w| w.ospeedr14().high_speed());
gpiob.ospeedr.modify(|_, w| w.ospeedr15().high_speed());
// Set PB13-15 as push-pull
gpiob.otyper.modify(|_, w| w.ot13().push_pull());
gpiob.otyper.modify(|_, w| w.ot14().push_pull());
gpiob.otyper.modify(|_, w| w.ot15().push_pull());
/* TIM1 PWM configuration */
rcc.apb2enr.write(|w| w.tim1en().enabled());
// Setup TIM1 in PWM mode
tim1.setup_as_pwm();
// Turn everything off (assuming common cathode)
tim1.set_rgb(0, 0, 0);
loop {
tim1.set_rgb(255, 0, 0); // Red
delay(10_000_000); // Delay for at least n instruction cycles
tim1.set_rgb(0, 255, 0); // Red
delay(10_000_000); // Delay for at least n instruction cycles
tim1.set_rgb(0, 0, 255); // Green
delay(10_000_000); // Delay for at least n instruction cycles
}
};
defmt::panic!("Uh oh, reached unreachable code!");
}
pub trait TIM1Ext {
fn setup_as_pwm(&self);
fn set_red(&self, value: u16);
fn set_green(&self, value: u16);
fn set_blue(&self, value: u16);
fn set_rgb(&self, r: u16, g: u16, b: u16);
}
/// Writes to the Capture/Compare register (ccrX where X is the channel)
/// which in turns determines the duty cycle of each PWM pin.
/// Each channel corresponds to a specific physical pin which is connected
/// to a red, green, or blue LED pin.
impl TIM1Ext for TIM1 {
/// Setup up the timer and channels in PWM mode, this assumes your main
/// program has already enabled the clock for TIM1, via the RCC register
fn setup_as_pwm(&self) {
// Up-counting
self.cr1.modify(|_, w| w.dir().up());
// Clock prescalar (16 bit value, max 65,535)
self.psc.write(|w| w.psc().bits(2500 - 1));
// Auto-realod value, for up counting goes from 0->ARR
self.arr.write(|w| w.arr().bits(255 - 1));
// PWM Mode 1 output on channel 1, 2, 3
// Output channel 1, 2, 3 preload enabled
self.ccmr1_output().write(|w| {
w.oc1m()
.pwm_mode1()
.oc1pe()
.enabled()
.oc2m()
.pwm_mode1()
.oc2pe()
.enabled()
});
self.ccmr2_output()
.write(|w| w.oc3m().pwm_mode1().oc3pe().enabled());
// Enable complementary output of channel 1, 2, 3
self.ccer.write(|w| {
w.cc1ne()
.set_bit()
.cc1p()
.clear_bit()
.cc2ne()
.set_bit()
.cc2p()
.clear_bit()
.cc3ne()
.set_bit()
.cc3p()
.clear_bit()
});
// Main output enable
self.bdtr.write(|w| w.moe().enabled());
// Enable counter
self.cr1.modify(|_, w| w.cen().enabled());
}
fn set_red(&self, value: u16) {
self.ccr3.write(|w| w.ccr().bits(value));
}
fn set_green(&self, value: u16) {
self.ccr2.write(|w| w.ccr().bits(value));
}
fn set_blue(&self, value: u16) {
self.ccr1.write(|w| w.ccr().bits(value));
}
fn set_rgb(&self, r: u16, g: u16, b: u16) {
self.ccr3.write(|w| w.ccr().bits(r));
self.ccr2.write(|w| w.ccr().bits(g));
self.ccr1.write(|w| w.ccr().bits(b));
}
}
<file_sep>[target.'cfg(all(target_arch = "arm", target_os = "none"))']
# See `probe-run --list-chips` to find your chip
runner = "probe-run --chip STM32F401CCUx"
rustflags = [
# Use the link.x script provided by the cortex-m-rt crate
"-C", "link-arg=-Tlink.x",
"-C", "linker=flip-link",
"-C", "link-arg=-Tdefmt.x",
# Needed if your flash or ram addresses are not aligned to 0x10000 in memory.x
"-C", "link-arg=--nmagic",
]
[build]
# target = "thumbv6m-none-eabi" # Cortex-M0 and Cortex-M0+
# target = "thumbv7m-none-eabi" # Cortex-M3
# target = "thumbv7em-none-eabi" # Cortex-M4 and Cortex-M7 (no FPU)
target = "thumbv7em-none-eabihf" # Cortex-M4F and Cortex-M7F (with FPU)
[alias]
rb = "run --bin"
rrb = "run --release --bin"
<file_sep>#![deny(unsafe_code)]
#![no_std]
#![no_main]
use stm32f4_playground as _; // Global logger + panicking-behavior
use cortex_m::asm::wfi;
use stm32f4::stm32f401 as device;
#[cortex_m_rt::entry]
fn main() -> ! {
defmt::info!("Hook up an LED to PA7, TIM1 is toggling it!");
// Take ownership of the device peripheral singleton
if let Some(dp) = device::Peripherals::take() {
cortex_m::interrupt::free(move |_| {
// Take and own RCC, SYSCFG, and EXTI RegisterBlocks out of dp
let rcc = dp.RCC;
// Take and own the GPIOA struct dp
let gpioa = dp.GPIOA;
// Take and own the TIM1 struct out of dp
let tim1 = dp.TIM1;
// Enable GPIOA clock
rcc.ahb1enr.write(|w| w.gpioaen().enabled());
// Set PA1 as alternate function
gpioa.moder.write(|w| w.moder7().alternate());
// Set alternate function 1 (01) for PA7 (TIM1_CH1N)
gpioa.afrl.write(|w| w.afrl7().af1());
// Set output speed of PA7 as low by clearing speed bits
gpioa.ospeedr.write(|w| w.ospeedr7().low_speed());
// Set PA7 as no pull-up, no pull-down
gpioa.pupdr.write(|w| w.pupdr7().floating());
// Setup and enable TIM1 CH1
tim1_ch1_init(&rcc, &tim1);
});
};
loop {
wfi();
}
}
/// Setup Channel 1 of the Advanced Timer TIM1 to output toggle
/// For this specific program f_clk = 16 MHz, so with the prescalar below,
/// f_counter = 16 MHz / 8000 = 2 KHz, then with the auto-reload value below,
/// counting period = 1000 / f_counter = 0.5 ms
/// Therefore, the output should toggle on/off every 0.5 ms
fn tim1_ch1_init(rcc: &device::RCC, tim1: &device::TIM1) {
// Enable advanced TIM1 clock
rcc.apb2enr.modify(|_, w| w.tim1en().enabled());
// Set count direction as up-counting
tim1.cr1.write(|w| w.dir().up());
// Clock prescalar (16 bit value, max 65,535)
tim1.psc.write(|w| w.psc().bits(8000 - 1));
// Auto-realod value, for up counting goes from 0->ARR
tim1.arr.write(|w| w.arr().bits(1000 - 1));
// Capture/compare register can be any value 0 < CCR < ARR
tim1.ccr1.write(|w| w.ccr().bits(500));
// Main output enable (MOE): 0 = Disable, 1 = Enable
tim1.bdtr.write(|w| w.moe().enabled());
// Select toggle mode (0011) for channel 1
tim1.ccmr1_output().write(|w| w.oc1m().toggle());
// Select output polarity as active high
// Enable output for channel 1 complementary output
tim1.ccer.write(|w| w.cc1np().clear_bit().cc1ne().set_bit());
// Enable TIM1 counter
tim1.cr1.write(|w| w.cen().enabled());
}
<file_sep># Random HAL-Based Experiments
* [ChaCha8Poly1305 AEAD over USB](src/bin/serial_rust_crypto.rs)
* [TIM1 PWM RGB](src/bin/pwm_rgb.rs)
<file_sep>#![deny(unsafe_code)]
#![no_std]
#![no_main]
use stm32f4_playground as _; // Global logger + panicking-behavior
use core::{cell::RefCell, ops::Deref};
use cortex_m::{asm::wfi, interrupt::Mutex, peripheral::NVIC};
use stm32f4::stm32f401 as device;
use stm32f4::stm32f401::interrupt;
static GPIOC: Mutex<RefCell<Option<device::GPIOC>>> = Mutex::new(RefCell::new(None));
static EXTI: Mutex<RefCell<Option<device::EXTI>>> = Mutex::new(RefCell::new(None));
#[cortex_m_rt::entry]
fn main() -> ! {
defmt::info!("External interrupt enabled, trying pressing PA0!");
// Take ownership of the device peripheral singleton
if let Some(dp) = device::Peripherals::take() {
cortex_m::interrupt::free(move |cs| {
// Take and own RCC, SYSCFG, and EXTI RegisterBlocks out of dp
let (rcc, syscfg, exti) = (dp.RCC, dp.SYSCFG, dp.EXTI);
// Take and own the GPIO A & C structs out of dp
let (gpioa, gpioc) = (dp.GPIOA, dp.GPIOC);
// Enable GPIO A & C clocks
rcc.ahb1enr
.write(|w| w.gpiocen().enabled().gpioaen().enabled());
// Set PC13 as an output
gpioc.moder.write(|w| w.moder13().output());
// Set PC13 to low speed (default)
gpioc.ospeedr.write(|w| w.ospeedr13().low_speed());
// Set PC13 as no pull-up, no pull-down
gpioc.pupdr.write(|w| w.pupdr13().floating());
// Set PA0 as an input
gpioa.moder.write(|w| w.moder0().input());
// Set PA0 as pull-up (normally high)
gpioa.pupdr.write(|w| w.pupdr0().pull_up());
// Configure EXTI0 to trigger on PA0 falling edge (button press)
exti0_init(&rcc, &syscfg, &exti);
// Transfer EXTI & GPIOC into shared global structures
GPIOC.borrow(cs).replace(Some(gpioc));
EXTI.borrow(cs).replace(Some(exti));
});
};
// Enable EXTI0 interrupt
#[allow(unsafe_code)]
unsafe {
NVIC::unmask(device::Interrupt::EXTI0);
}
loop {
wfi();
}
}
/// Configure EXTI0 and set the conditions for the interrupt to trigger
fn exti0_init(rcc: &device::RCC, syscfg: &device::SYSCFG, exti: &device::EXTI) {
// Enabled system configuration controller clock
rcc.apb2enr.modify(|_, w| w.syscfgen().enabled());
// Set PA0 as the trigger source of EXTI0
#[allow(unsafe_code)]
unsafe {
syscfg.exticr1.write(|w| w.exti0().bits(0));
}
// Disable EXTI0 rising edge trigger, via Rising trigger selection register
exti.rtsr.modify(|_, w| w.tr0().disabled());
// Enable EXTI0 falling edge trigger, via Falling trigger selection register
exti.ftsr.modify(|_, w| w.tr0().enabled());
// Unmask EXTI0 interrupt bit, allowing it to be enabled
exti.imr.modify(|_, w| w.mr0().unmasked());
}
/// This is the interrupt handler that gets called when something triggers
/// the EXTI0 line
#[interrupt]
fn EXTI0() {
cortex_m::interrupt::free(|cs| {
// Toggle GPIOC
if let Some(ref gpioc) = GPIOC.borrow(cs).borrow().deref() {
if gpioc.odr.read().odr13().is_high() {
gpioc.odr.write(|w| w.odr13().low()); // ON
} else {
gpioc.odr.write(|w| w.odr13().high()); // OFF
}
}
// Clear interrupt pending request on EXTI0
if let Some(exti) = EXTI.borrow(cs).borrow().deref() {
exti.pr.write(|w| w.pr0().clear());
}
});
}
<file_sep>#![deny(unsafe_code)]
#![no_std]
#![no_main]
use stm32f4_playground as _; // Global logger + panicking-behavior
use stm32f4::stm32f401 as device;
#[cortex_m_rt::entry]
fn main() -> ! {
defmt::info!("Try pressing PA0 (on-board button)!");
// Take ownership of the device peripherals singleton
if let Some(dp) = device::Peripherals::take() {
// Take and own RCC RegisterBlock out of dp
let rcc = dp.RCC;
// Take and own the GPIO A & C structs out of dp
let (gpioa, gpioc) = (dp.GPIOA, dp.GPIOC);
// Enable GPIO A & C clock
rcc.ahb1enr
.write(|w| w.gpiocen().enabled().gpioaen().enabled());
// Set PC13 as an output
gpioc.moder.write(|w| w.moder13().output());
// Set PC13 to low speed (default)
gpioc.ospeedr.write(|w| w.ospeedr13().low_speed());
// Set PC13 as no pull-up, no pull-down
gpioc.pupdr.write(|w| w.pupdr13().floating());
// Set PA0 as an input
gpioa.moder.write(|w| w.moder0().input());
// Set PA0 as pull-up (normally high)
gpioa.pupdr.write(|w| w.pupdr0().pull_up());
// Turn PC13 off. NOTE: Reverse logic, high == OFF, low == ON
gpioc.odr.write(|w| w.odr13().set_bit()); // OFF
loop {
// NOTE: This is not very reliable, you must consider button debouncing
// If button is pressed
if gpioa.idr.read().idr0().is_low() {
// Turn PC13 ON (low) if it is currently OFF (high),
// else turn PC13 OFF (high)
if gpioc.idr.read().idr13().is_high() {
gpioc.odr.write(|w| w.odr13().clear_bit()); // ON
} else {
gpioc.odr.write(|w| w.odr13().set_bit()); // OFF
}
}
}
};
defmt::panic!("Uh oh, reached unreachable code!");
}
<file_sep># Learning embedded systems with an STM32F4 and Rust!
This repository is primarily for learning and trying random things.
You may find basic code for learning embedded at the register level in [without_hal](without_hal).
Eventually, there will be more interesting things using a hardware abstraction library (HAL) in [with_hal](with_hal).
## Development Board
- [STM32F401CC](https://www.st.com/en/microcontrollers-microprocessors/stm32f401cc.html)
- [WeAct STM32F4x1](https://github.com/WeActTC/MiniF4-STM32F4x1)
- [Banggood STM32F401 Development Board](https://www.banggood.com/STM32F401-Development-Board-STM32F401CCU6-STM32F4-Learning-Board-p-1568897.html?rmmds=search&cur_warehouse=CN)
## Getting started:
These are the high-level steps and requirements you need to run the examples in this repo.
Please see [app-template](https://github.com/knurling-rs/app-template) for more detail.
Install Cargo related tooling:
```sh
cargo install flip-link
cargo install probe-run
cargo install cargo-binutils # Optional
rustup component add llvm-tools-preview # Optional
```
Setup udev rules:
```sh
# 1. Create and edit new udev rule file
sudo vim /etc/udev/rules.d/70-st-link.rules
# 2. Add the following four lines
# STM32F3DISCOVERY rev A/B - ST-LINK/V2
ATTRS{idVendor}=="0483", ATTRS{idProduct}=="3748", TAG+="uaccess"
# STM32F3DISCOVERY rev C+ - ST-LINK/V2-1
ATTRS{idVendor}=="0483", ATTRS{idProduct}=="374b", TAG+="uaccess"
# 3. Reload and retrigger your rules
udevadm control --reload
udevadm trigger
```
Uploading code:
```sh
cargo run --bin ${TARGET}
# Or using shortcut (defined in .cargo/config.toml):
cargo rb ${TARGET}
```
## Resources:
* [probe-run](https://github.com/knurling-rs/probe-run)
* [defmt](https://github.com/knurling-rs/defmt)
* [flip-link](https://github.com/knurling-rs/flip-link)
* [OpenOCD](http://openocd.org/)
* [cargo-flash](https://github.com/probe-rs/cargo-flash)
* [cortex-m-rt startup code crate](https://docs.rs/cortex-m-rt)
* [cortex-m low-level access crate](https://docs.rs/cortex-m)
* [stm32f4 peripheral access crate](https://docs.rs/crate/stm32f4)
* [stm32f4xx-hal](https://docs.rs/stm32f4xx-hal)
* [Real-Time Interrupt-driven Concurrency (RTIC)](https://github.com/rtic-rs/cortex-m-rtic)
* [The Embedded Rust Book](https://rust-embedded.github.io/book/)
* [A look into ways to implement and share data with interrupt handlers in Rust by therealprof](https://therealprof.github.io/blog/interrupt-comparison/)
* [cargo-binutils](https://github.com/rust-embedded/cargo-binutils)
* [cargo-make](https://github.com/sagiegurari/cargo-make)
## Miscellaneous Commands
```sh
# Flash a program with OpenOCD, replace ${TARGET_BIN} with your binary
openocd -f interface/stlink-v2.cfg -f target/stm32f4x.cfg -c "program ${TARGET_BIN} reset exit 0x08000000"
# Create a raw binary from an ELF, replace ${TARGET_ELF} with your compiled Rust code
# ${TARGET_BIN} can be named whatever you like
cargo objcopy --bin ${TARGET_ELF} -- -O binary ${TARGET_BIN}
# Use OpenOCD to erase all flash memory on target board
openocd -f interface/stlink-v2.cfg -f target/stm32f4x.cfg -c "init; reset halt; stm32f4x mass_erase 0; exit"
# Use semi-hosting to see debug output, requires STlink debugger
openocd -f interface/stlink-v2.cfg -f target/stm32f4x.cfg -c "init; arm semihosting enable"
# Attach to running OpenOCD server via GDB
arm-none-eabi-gdb -q ${TARGET_ELF} -ex "target remote localhost:3333"
```
<file_sep>[package]
name = "stm32f4-playground"
version = "0.1.0"
authors = ["<NAME> <<EMAIL>>"]
edition = "2018"
[dependencies]
# Provides startup code and useful attributes
# https://docs.rs/cortex-m-rt
cortex-m-rt = "0.6.13"
# Provides low-level access to registers and assembly instructions
# https://docs.rs/cortex-m
cortex-m = "0.7.2"
# Efficient logging framework
# https://docs.rs/defmt
defmt = "0.2.1"
# Transmit defmt log messages over the RTT (Real-Time Transfer) protocol
# https://docs.rs/defmt-rtt
defmt-rtt = "0.2.0"
# Panic handler, feature to log panic messages via defmt
# https://docs.rs/crate/panic-halt
panic-probe = { version = "0.2.0", features = ["print-defmt"] }
# Autogenerated API for the STM32F4 peripherals
# https://docs.rs/crate/stm32f4
stm32f4 = {version = "0.13.0", features = ["stm32f401", "rt"]}
[features]
# Set logging levels here
default = [ "defmt-default", ]
defmt-default = []
defmt-trace = []
defmt-debug = []
defmt-info = []
defmt-warn = []
defmt-error = []
# cargo build/run
[profile.dev]
codegen-units = 1
debug = 2
debug-assertions = true
incremental = false
opt-level = 3
overflow-checks = true
# cargo test
[profile.test]
codegen-units = 1
debug = 2
debug-assertions = true
incremental = false
opt-level = 3
overflow-checks = true
# cargo build/run --release
[profile.release]
codegen-units = 1
debug = 2
debug-assertions = false
incremental = false
lto = 'fat'
opt-level = 3
overflow-checks = false
# cargo test --release
[profile.bench]
codegen-units = 1
debug = 2
debug-assertions = false
incremental = false
lto = 'fat'
opt-level = 3
overflow-checks = false
<file_sep># Look Ma, No HAL!
* GPIO
- [x] [Blinky (output)](src/bin/blinky.rs)
- [x] [Button (input)](src/bin/button.rs)
* [SysTick](src/bin/systick.rs)
* [External Interrupts](src/bin/external_interrupt.rs)
* Timers
- [x] [Advanced Timers (TIM1)](src/bin/advanced_timer.rs)
- [x] [Three channel pulse-width modulation (PWM) via TIM1](src/bin/pwm_tim1.rs)
- [ ] General Purpose Timers
* UART
- [x] [Basic UART](src/bin/uart.rs)
- [ ] UART with Interrupts
* I2C
- [x] [ADXL345 via I2C](src/bin/adxl345.rs)
* SPI
* DMA
* ADC
* DAC
<file_sep>#![deny(unsafe_code)]
#![no_std]
#![no_main]
use stm32f4_playground as _; // Global logger + panicking-behavior
use cortex_m::asm::delay;
use stm32f4::stm32f401 as device;
#[cortex_m_rt::entry]
fn main() -> ! {
defmt::info!("Starting blinky!");
// Take ownership of the device peripherals singleton
if let Some(dp) = device::Peripherals::take() {
// Take and own RCC RegisterBlock out of dp
let rcc = dp.RCC;
// Take and own the GPIOC struct out of dp
let gpioc = dp.GPIOC;
// Enable GPIOC clock
rcc.ahb1enr.write(|w| w.gpiocen().enabled());
// Set PC13 as an output
gpioc.moder.write(|w| w.moder13().output());
// Set PC13 to low speed (default)
gpioc.ospeedr.write(|w| w.ospeedr13().low_speed());
// Set PC13 as no pull-up, no pull-down
gpioc.pupdr.write(|w| w.pupdr13().floating());
loop {
gpioc.odr.write(|w| w.odr13().clear_bit()); // ON
delay(5_000000); // Delay for at least n instruction cycles
gpioc.odr.write(|w| w.odr13().set_bit()); // OFF
delay(5_000000); // Delay for at least n instruction cycles
}
};
defmt::panic!("Uh oh, reached unreachable code!");
}
<file_sep>#![no_std]
#![no_main]
use stm32f4_playground as _; // Global logger + panicking-behavior
use embedded_nrf24l01::{Configuration, CrcMode, DataRate, NRF24L01};
use hal::prelude::*;
use hal::spi::{Mode, Phase, Polarity, Spi};
use stm32f4xx_hal as hal;
macro_rules! success_blink {
($led: expr, $delay: expr) => {
for _ in 0..10 {
$led.set_high().unwrap();
$delay.delay_ms(200_u32);
$led.set_low().unwrap();
$delay.delay_ms(200_u32);
}
};
}
#[cortex_m_rt::entry]
fn main() -> ! {
if let (Some(dp), Some(cp)) = (
hal::stm32::Peripherals::take(),
cortex_m::peripheral::Peripherals::take(),
) {
let rcc = dp.RCC.constrain();
let clocks = rcc.cfgr.sysclk(48.mhz()).freeze();
let gpioc = dp.GPIOC.split();
let gpioa = dp.GPIOA.split();
// On-board LED
let mut led = gpioc.pc13.into_push_pull_output();
// Delay
let mut delay = hal::delay::Delay::new(cp.SYST, clocks);
// SPI Setup
let sck = gpioa.pa5.into_alternate_af5();
let miso = gpioa.pa6.into_alternate_af5();
let mosi = gpioa.pa7.into_alternate_af5();
let spi = Spi::spi1(
dp.SPI1,
(sck, miso, mosi),
Mode {
polarity: Polarity::IdleLow,
phase: Phase::CaptureOnFirstTransition,
},
hal::time::KiloHertz(8000).into(),
clocks,
);
// CE and CSN pins for nrf24l01
let ce = gpioa.pa4.into_push_pull_output();
let csn = gpioa.pa3.into_push_pull_output();
// nrf24l01 setup
let mut radio = NRF24L01::new(ce, csn, spi).unwrap();
radio.set_frequency(8).unwrap();
radio.set_auto_retransmit(0, 0).unwrap();
radio.set_rf(&DataRate::R2Mbps, 3).unwrap();
radio
.set_pipes_rx_enable(&[true, false, false, false, false, false])
.unwrap();
radio.set_auto_ack(&[false; 6]).unwrap();
radio.set_crc(CrcMode::Disabled).unwrap();
radio.set_tx_addr(b"stm32").unwrap();
radio.set_rx_addr(0, b"stm32").unwrap();
let mut tx = radio.tx().unwrap();
let data = b"hello";
loop {
led.set_high().unwrap(); // OFF
delay.delay_ms(500_u32);
if let Ok(_) = tx.wait_empty() {
// Sending message
led.set_low().unwrap(); // ON
if let Ok(true) = tx.can_send() {
if tx.send(data).is_ok() {
success_blink!(led, delay);
}
}
}
led.set_high().unwrap(); // OFF
delay.delay_ms(500_u32);
}
}
loop {}
}
<file_sep>#![deny(unsafe_code)]
#![no_std]
#![no_main]
use stm32f4_playground as _; // Global logger + panicking-behavior
use core::{cell::RefCell, ops::Deref};
use cortex_m::{asm::wfi, interrupt::Mutex, peripheral::syst::SystClkSource};
use stm32f4::stm32f401 as device;
static GPIOC: Mutex<RefCell<Option<device::GPIOC>>> = Mutex::new(RefCell::new(None));
#[cortex_m_rt::entry]
fn main() -> ! {
defmt::info!("Starting blinky via SysTick interrupts!");
// Take ownership of the core & device peripheral singletons
if let (Some(cp), Some(dp)) = (
cortex_m::Peripherals::take(),
device::Peripherals::take(),
) {
cortex_m::interrupt::free(move |cs| {
// Take and own SYST (systick) out of cp
let mut systick = cp.SYST;
// Take and own RCC RegisterBlock out of dp
let rcc = dp.RCC;
// Take and own the GPIOC struct out of dp
let gpioc = dp.GPIOC;
// Use internal clock provided by the core for SysTick
// NOTE: See `clock_configuration.rs` for how to configure the clock frequency
systick.set_clock_source(SystClkSource::Core);
// Reload value must be less than 0x00FFFFFF
systick.set_reload(1_440_000 - 1);
systick.clear_current();
// Enable GPIOC clock
rcc.ahb1enr.write(|w| w.gpiocen().enabled());
// Set PC13 as an output
gpioc.moder.write(|w| w.moder13().output());
// Set PC13 to low speed (default)
gpioc.ospeedr.write(|w| w.ospeedr13().low_speed());
// Set PC13 as no pull-up, no pull-down
gpioc.pupdr.write(|w| w.pupdr13().floating());
// Transfer GPIOC into shared global structure
GPIOC.borrow(cs).replace(Some(gpioc));
// Enable SysTick counter & interrupt
systick.enable_counter();
systick.enable_interrupt();
});
};
loop {
wfi();
}
}
// This is the exception handler that gets called when the the SysTick
// triggers an exception after its countdown
#[cortex_m_rt::exception]
fn SysTick() {
cortex_m::interrupt::free(|cs| {
if let Some(ref gpioc) = GPIOC.borrow(cs).borrow().deref() {
if gpioc.odr.read().odr13().is_high() {
gpioc.odr.write(|w| w.odr13().low()); // ON
} else {
gpioc.odr.write(|w| w.odr13().high()); // OFF
}
}
});
}
<file_sep>#![deny(unsafe_code)]
#![no_std]
#![no_main]
use stm32f4_playground as _; // Global logger + panicking-behavior
use core::{cell::RefCell, ops::Deref};
use cortex_m::{asm::wfi, interrupt::Mutex, peripheral::syst::SystClkSource};
use stm32f4::stm32f401 as device;
static GPIOC: Mutex<RefCell<Option<device::GPIOC>>> = Mutex::new(RefCell::new(None));
#[cortex_m_rt::entry]
fn main() -> ! {
defmt::info!("Gotta go fast!");
// Take ownership of the core & device peripheral singletons
if let (Some(cp), Some(dp)) = (
cortex_m::Peripherals::take(),
device::Peripherals::take(),
) {
cortex_m::interrupt::free(move |cs| {
// Take and own SYST (systick) out of cp
let mut systick = cp.SYST;
// Take and own FLASH & RCC RegisterBlock out of dp
let (flash, rcc) = (dp.FLASH, dp.RCC);
// Take and own the GPIOC struct out of dp
let gpioc = dp.GPIOC;
// Initialize clock to use PLL and HSI for 84 MHz frequency
system_clock_init(&flash, &rcc);
// Use internal clock provided by the core for SysTick
systick.set_clock_source(SystClkSource::Core);
// Reload value must be less than 0x00FFFFFF
// With N = 840_0000, light toggles every 100 ms at f_clk = 84 MHz
systick.set_reload(840_0000 - 1);
systick.clear_current();
// Enable GPIOC clock
rcc.ahb1enr.write(|w| w.gpiocen().enabled());
// Set PC13 as an output
gpioc.moder.write(|w| w.moder13().output());
// Set PC13 to low speed (default)
gpioc.ospeedr.write(|w| w.ospeedr13().low_speed());
// Set PC13 as no pull-up, no pull-down
gpioc.pupdr.write(|w| w.pupdr13().floating());
// Transfer GPIOC into shared global structure
GPIOC.borrow(cs).replace(Some(gpioc));
// Enable SysTick counter & interrupt
systick.enable_counter();
systick.enable_interrupt();
});
};
loop {
wfi();
}
}
/// SystemCoreClock = ((INPUT_CLK / PLL_M) * PLL_N) / PLL_P
/// See Section 6, Figure 12 of RM0368
fn system_clock_init(flash: &device::FLASH, rcc: &device::RCC) {
// TODO: Replace this with safe code after PAC update
// https://github.com/stm32-rs/stm32-rs/pull/374
// To read data from FLASH memory, the correct number of wait states must
// be set, two wait states, if 60 < HCLK ≤ 84 and 2.7V - 3.6V
#[allow(unsafe_code)]
unsafe {
flash.acr.write(|w| w.bits(2));
}
// Enable the Internal High Speed oscillator (HSI)
rcc.cr.modify(|_, w| w.hsion().on());
while rcc.cr.read().hsirdy().is_not_ready() {}
// Select HSI as clock source for PLL
rcc.pllcfgr.modify(|_, w| w.pllsrc().hsi());
// Configure PLL to output 84 MHz, where HSI = 16 MHz
// ((16 / 16) * 336) / 4 = 84
#[allow(unsafe_code)]
unsafe {
rcc.pllcfgr.modify(|_, w| {
w.pllm()
.bits(16)
.plln()
.bits(336)
.pllp()
.div4()
.pllq()
.bits(7)
});
}
// Enable Phase Lock Loop (PLL)
rcc.cr.modify(|_, w| w.pllon().on());
while rcc.cr.read().pllrdy().is_not_ready() {}
// AHB will run at 84 MHz, APB1 at 42 MHz, APB2 at 84 MHz
rcc.cfgr
.modify(|_, w| w.hpre().div1().ppre1().div2().ppre2().div1());
// Select PLL as system clock input
rcc.cfgr.modify(|_, w| w.sw().pll());
while !rcc.cfgr.read().sws().is_pll() {}
}
// This is the exception handler that gets called when the the SysTick
// triggers an exception after its countdown
#[cortex_m_rt::exception]
fn SysTick() {
cortex_m::interrupt::free(|cs| {
if let Some(ref gpioc) = GPIOC.borrow(cs).borrow().deref() {
if gpioc.odr.read().odr13().is_high() {
gpioc.odr.write(|w| w.odr13().low()); // ON
} else {
gpioc.odr.write(|w| w.odr13().high()); // OFF
}
}
});
}
<file_sep>#![no_std]
#![no_main]
/// ADXL345 via I2C
/// Make sure the CS and SDO pins are tied high
///
/// Dear reader: This is horrible code. Solely for learning purposes. It is not supposed to
/// represent a proper driver in any way.
///
/// TODO: Add some error checking like: https://github.com/stm32-rs/stm32f4xx-hal/blob/master/src/i2c.rs#L804
/// TODO: Look into type state pattern to ensure enable_clock() clock is called before moving on
use cortex_m::asm::delay;
use stm32f4::stm32f401 as device;
use stm32f4_playground as _; // Global logger + panicking-behavior
const ADXL345_ADDRESS: u8 = 0x53;
#[allow(non_camel_case_types)]
#[allow(dead_code)]
enum ADXL345_Reg {
DEVID = 0x0, // Device ID
DATAX0 = 0x32, // X-axis data 0 (read 6 bytes for X/Y/Z)
POWER_CTL = 0x2D, // Power-saving features control
DATA_FORMAT = 0x31, // Controls the presentation of data
BW_RATE = 0x2c,
}
#[cortex_m_rt::entry]
fn main() -> ! {
defmt::info!("TODO!");
// Take ownership of the device peripherals singleton
if let Some(dp) = device::Peripherals::take() {
// Take and own RCC RegisterBlock out of dp
let rcc = dp.RCC;
// Take and own GPIOB & I2C1 out of dp
let (gpiob, i2c1) = (dp.GPIOB, dp.I2C1);
/* GPIO configuration: PB6 = SCL1, PB7 = SDA1 */
// Enable clock for GPIOB
rcc.ahb1enr.write(|w| w.gpioben().enabled());
// Set mode of PB6-7 as alternative function
gpiob
.moder
.write(|w| w.moder6().alternate().moder7().alternate());
// Alternate function mapping 4 for I2C1 (see DS9716 datasheet)
gpiob.afrl.write(|w| w.afrl6().af4().afrl7().af4());
// Set GPIO speed for PB6-7 as high speed
gpiob
.ospeedr
.write(|w| w.ospeedr6().high_speed().ospeedr7().high_speed());
// Set PB6-7 as open drain
gpiob
.otyper
.write(|w| w.ot6().open_drain().ot7().open_drain());
/* I2C1 setup */
i2c1.enable_clock(&rcc);
i2c1.init();
defmt::info!("I2C Initialization Complete");
let mut data = [0; 6];
// i2c1.write(ADXL345_ADDRESS, &[ADXL345_Reg::DEVID as u8]);
// i2c1.read(ADXL345_ADDRESS, &mut data[..1]);
// defmt::info!("Device ID: {}", data[0]);
// Put the device into measurement mode by setting the Measure Bit in POWER_CTL
i2c1.write(ADXL345_ADDRESS, &[ADXL345_Reg::POWER_CTL as u8, 0x8]);
// Set D0 = 1, D1 = 1 to get range of +- 16 g
i2c1.write(ADXL345_ADDRESS, &[ADXL345_Reg::DATA_FORMAT as u8, 0x3]);
loop {
i2c1.write(ADXL345_ADDRESS, &[ADXL345_Reg::DATAX0 as u8]);
i2c1.read(ADXL345_ADDRESS, &mut data);
defmt::info!("X: {:?}", format(&data[0..2]));
defmt::info!("Y: {:?}", format(&data[2..4]));
defmt::info!("Z: {:?}", format(&data[4..6]));
delay(5_000_000); // Delay for at least n instruction cycles
}
};
defmt::panic!("Uh oh, reached unreachable code!");
}
/// Returns normalized acceleration value in m/s^2 (e.g., 1.0 == 9.8g)
fn format(val: &[u8]) -> f32 {
let value = ((val[1] as i16) << 8) | val[0] as i16;
// let value = value as i16;
(value as f32 * ((16 * 2) as f32 / 1024.0)) as f32
}
trait I2CExt {
fn enable_clock(&self, rcc: &device::RCC);
fn init(&self);
fn write(&self, addr: u8, bytes: &[u8]);
fn read(&self, addr: u8, bytes: &mut [u8]);
}
impl I2CExt for device::I2C1 {
fn enable_clock(&self, rcc: &device::RCC) {
rcc.apb1enr.modify(|_, w| w.i2c1en().enabled());
// Stall the pipeline to work around erratum 2.1.13 (DM00037591)
cortex_m::asm::dsb();
}
fn init(&self) {
// Disable I2C so we can configure it
self.cr1.modify(|_, w| w.pe().disabled());
// I2C mode
self.cr1.modify(|_, w| w.smbus().i2c());
// Enable clock stretching
self.cr1.modify(|_, w| w.nostretch().enabled());
// Enable analog noise filter, disable digital noise filter
self.fltr.write(|w| w.anoff().enabled().dnf().no_filter());
// 16 MHz frequency, assume default 16 MHz HSI being used for APB1
let freq: u8 = 16;
self.cr2.write(|w| unsafe { w.freq().bits(freq) });
// Configure correct maximum rise time
let trise: u32 = (freq as u32 * 300) / 1000 + 1;
self.trise.write(|w| w.trise().bits(trise as u8));
// Configure as Fm (fast) mode, max 400 kHz SCL clock frequency
self.ccr.modify(|_, w| w.f_s().fast());
// Fm mode 2:1 duty cycle, meaning DUTY = 0
self.ccr.modify(|_, w| w.duty().duty2_1());
// Let's use a 400 kHz SCL frequency (see RM0368 p.503):
// f_SCL ~= 1 / (T_high + T_low)
// where T_high = CCR*T_PCLK, T_low = 2*CCR*T_PCLK, and T_PCLK = 1/f_PCLK
// Then, CCR = f_PCLK / (3 * f_SCL) = 16 MHz / (3 * 400 kHz) ~= 13
self.ccr.modify(|_, w| unsafe { w.ccr().bits(13) });
// Enable I2C
self.cr1.modify(|_, w| w.pe().enabled());
}
fn write(&self, addr: u8, bytes: &[u8]) {
// Send a START condition
self.cr1.modify(|_, w| w.start().start());
// Wait until the START condition is generated
while self.sr1.read().sb().is_no_start() {}
// Wait until back in Master mode (MSL = 1) and communication in progress (BUSY = 1)
while {
let sr2 = self.sr2.read();
sr2.msl().bit_is_clear() && sr2.busy().bit_is_clear()
} {}
// Send slave address
self.dr.write(|w| unsafe { w.bits(u32::from(addr) << 1) });
// Wait until address is sent
while self.sr1.read().addr().bit_is_clear() {}
// Clear ADDR condition by reading SR2
self.sr2.read();
for c in bytes {
// Wait until DR is empty
while self.sr1.read().tx_e().is_not_empty() {}
// Write a byte
self.dr.write(|w| unsafe { w.bits(u32::from(*c)) });
// Wait until byte has been transferred
while self.sr1.read().btf().is_not_finished() {}
}
// Send a STOP condition
self.cr1.modify(|_, w| w.stop().stop());
// Wait for STOP condition to transmit
while self.cr1.read().stop().is_no_stop() {}
}
fn read(&self, addr: u8, bytes: &mut [u8]) {
if let Some((last, bytes)) = bytes.split_last_mut() {
// Send a START condition
self.cr1.modify(|_, w| w.start().start());
// Set ACK bit
self.cr1.modify(|_, w| w.ack().ack());
// Wait until the START condition is generated
while self.sr1.read().sb().is_no_start() {}
// Wait until back in Master mode (MSL = 1) and communication in progress (BUSY = 1)
while {
let sr2 = self.sr2.read();
sr2.msl().bit_is_clear() && sr2.busy().bit_is_clear()
} {}
// Send slave address
self.dr
.write(|w| unsafe { w.bits((u32::from(addr) << 1) + 1) });
// Wait until address is sent
while self.sr1.read().addr().bit_is_clear() {}
// Clear ADDR condition by reading SR2
self.sr2.read();
for c in bytes {
// Wait until DR is not empty
while self.sr1.read().rx_ne().is_empty() {}
// Receive a byte
*c = self.dr.read().bits() as u8;
}
// Set NACK bit
self.cr1.modify(|_, w| w.ack().nak());
// Send a STOP condition to stop receiving
self.cr1.modify(|_, w| w.stop().stop());
// Read in last byte
while self.sr1.read().rx_ne().is_empty() {}
*last = self.dr.read().bits() as u8;
// Wait for STOP condition to transmit
while self.cr1.read().stop().is_no_stop() {}
}
}
}
<file_sep>#![no_std]
#![no_main]
use stm32f4_playground as _; // Global logger + panicking-behavior
use embedded_nrf24l01::{Configuration, CrcMode, DataRate, NRF24L01};
use hal::prelude::*;
use hal::spi::{Mode, Phase, Polarity, Spi};
use stm32f4xx_hal as hal;
#[cortex_m_rt::entry]
fn main() -> ! {
if let (Some(dp), Some(cp)) = (
hal::stm32::Peripherals::take(),
cortex_m::peripheral::Peripherals::take(),
) {
let rcc = dp.RCC.constrain();
let clocks = rcc.cfgr.sysclk(48.mhz()).freeze();
// let gpioc = dp.GPIOC.split();
let gpioa = dp.GPIOA.split();
// On-board LED
// let mut led = gpioc.pc13.into_push_pull_output();
// Delay
let mut delay = hal::delay::Delay::new(cp.SYST, clocks);
// SPI Setup
let sck = gpioa.pa5.into_alternate_af5();
let miso = gpioa.pa6.into_alternate_af5();
let mosi = gpioa.pa7.into_alternate_af5();
let spi = Spi::spi1(
dp.SPI1,
(sck, miso, mosi),
Mode {
polarity: Polarity::IdleLow,
phase: Phase::CaptureOnFirstTransition,
},
hal::time::KiloHertz(8000).into(),
clocks,
);
// CE and CSN pins for nrf24l01
let ce = gpioa.pa4.into_push_pull_output();
let csn = gpioa.pa3.into_push_pull_output();
// nrf24l01 setup
let mut radio = NRF24L01::new(ce, csn, spi).unwrap();
radio.set_frequency(8).unwrap();
radio.set_auto_retransmit(0, 0).unwrap();
radio.set_rf(&DataRate::R2Mbps, 3).unwrap();
radio
.set_pipes_rx_enable(&[true, false, false, false, false, false])
.unwrap();
radio.set_auto_ack(&[false; 6]).unwrap();
radio.set_crc(CrcMode::Disabled).unwrap();
radio.set_rx_addr(0, b"stm32").unwrap();
radio.set_tx_addr(b"stm32").unwrap();
let mut rx = radio.rx().unwrap();
loop {
defmt::info!("Waiting...");
if let Ok(_) = rx.can_read() {
// if let Ok(false) = rx.is_empty() {
if let Ok(data) = rx.read() {
defmt::info!("Data received: {:?}", data.as_ref());
} else {
defmt::info!("ERROR");
}
// }
}
delay.delay_ms(1000_u32);
}
}
loop {}
}
<file_sep>#![deny(unsafe_code)]
#![no_std]
#![no_main]
use core::char;
use stm32f4::stm32f401 as device;
use stm32f4_playground as _; // Global logger + panicking-behavior
#[cortex_m_rt::entry]
fn main() -> ! {
defmt::info!("Configuring USART1 to 9600 baud. PB6 is TX and PB7 is RX!");
// Take ownership of the device peripherals singleton
if let Some(dp) = device::Peripherals::take() {
let rcc = dp.RCC;
let gpiob = dp.GPIOB;
let usart1 = dp.USART1;
/* GPIO configuration: PB6 = USART1_TX, PB7 = USART1_RX */
// Enable clock for GPIOB
rcc.ahb1enr.write(|w| w.gpioben().enabled());
// Set mode of PB6-7 as alternative function
gpiob
.moder
.write(|w| w.moder6().alternate().moder7().alternate());
// Alternate function mapping 7 for USART1 (see DS9716 datasheet)
gpiob.afrl.write(|w| w.afrl6().af7().afrl7().af7());
// Set GPIO speed for PB6-7 as high speed
gpiob
.ospeedr
.write(|w| w.ospeedr6().high_speed().ospeedr7().high_speed());
// Set PB6-7 as pull-up
gpiob
.pupdr
.write(|w| w.pupdr6().pull_up().pupdr7().pull_up());
// Set PB6-7 as push-pull
gpiob
.otyper
.write(|w| w.ot6().push_pull().ot7().push_pull());
/* USART1 configuration */
// Enable clock for USART1
rcc.apb2enr.modify(|_, w| w.usart1en().enabled());
// Disable USART1 while we configure it
usart1.cr1.modify(|_, w| w.ue().disabled());
// Set data length to 8 bits
usart1.cr1.modify(|_, w| w.m().m8());
// Select 1 stop bit
usart1.cr2.modify(|_, w| w.stop().stop1());
// Set parity control as no parity
usart1.cr1.modify(|_, w| w.pce().disabled());
// Oversampling by 16, means OVER8 = 0
usart1.cr1.modify(|_, w| w.over8().oversample16());
// Set baudrate of 9600, assuming 16 MHz clock
// USARTDIV = f_clk / (8 * (2 - OVER8) * baudrate)
// = 16 MHz / (8 * (2 - 0) * 9600)
// = 104.17
// DIV_Fraction = 16*0.17 = 2.72 ~= 0x3
// DIV_Mantissa = 104.17 ~= 0x68
usart1
.brr
.modify(|_, w| w.div_mantissa().bits(0x68).div_fraction().bits(0x3));
// Enable transmission and reception
usart1.cr1.modify(|_, w| w.re().enabled().te().enabled());
// Enable USART1
usart1.cr1.modify(|_, w| w.ue().enabled());
// NOTE: This is very inefficient. Only for learning purposes.
let mut read_byte: u16;
loop {
// Wait until hardware sets RXNE bit
while !usart1.sr.read().rxne().bit_is_set() {}
// Reading from DR clears RXNE flag
read_byte = usart1.dr.read().dr().bits();
// Wait until hardware sets TXE bit
while !usart1.sr.read().txe().bit_is_set() {}
// Write the received character back as uppercase (if applicable)
if let Some(c) = char::from_u32(read_byte as u32) {
// Writing to DR clears TXE bit
usart1
.dr
.write(|w| w.dr().bits(c.to_ascii_uppercase() as u16));
}
// Wait until TC = 1
while usart1.sr.read().tc().bit_is_clear() {}
}
};
defmt::panic!();
}
<file_sep>#![no_std]
#![no_main]
use stm32f4_playground as _; // Global logger + panicking-behavior
use chacha20poly1305::aead::heapless::{consts::U128, Vec};
use chacha20poly1305::aead::{AeadInPlace, NewAead};
use chacha20poly1305::{ChaCha8Poly1305, Key, Nonce};
use stm32f4xx_hal::otg_fs::{UsbBus, USB};
use stm32f4xx_hal::{prelude::*, stm32};
use usb_device::prelude::*;
use usbd_serial;
static mut EP_MEMORY: [u32; 1024] = [0; 1024];
/// Accepts a string via USB serial, encrypts the given string, then outputs the
/// encrypted string back out via the USB connection
/// Accepted strings should be terminated with ~ character
/// Ex. "Hello world~"
#[cortex_m_rt::entry]
fn main() -> ! {
defmt::info!("Unplug your debugger and send messages to be encrypted over USB!");
let dp = stm32::Peripherals::take().unwrap();
let rcc = dp.RCC.constrain();
let clocks = rcc.cfgr
.use_hse(25.mhz())
.sysclk(48.mhz())
.require_pll48clk()
.freeze();
let gpioa = dp.GPIOA.split();
let usb = USB {
usb_global: dp.OTG_FS_GLOBAL,
usb_device: dp.OTG_FS_DEVICE,
usb_pwrclk: dp.OTG_FS_PWRCLK,
pin_dm: gpioa.pa11.into_alternate_af10(),
pin_dp: gpioa.pa12.into_alternate_af10(),
hclk: clocks.hclk(),
};
let usb_bus = UsbBus::new(usb, unsafe { &mut EP_MEMORY });
let mut serial = usbd_serial::SerialPort::new(&usb_bus);
let mut usb_dev = UsbDeviceBuilder::new(&usb_bus, UsbVidPid(0x16c0, 0x27dd))
.manufacturer("Yusef")
.product("Crypto")
.serial_number("1234")
.device_class(usbd_serial::USB_CLASS_CDC)
.build();
let key = Key::from_slice(b"an example very very secret key.");
let cipher = ChaCha8Poly1305::new(key);
let mut buffer: Vec<u8, U128> = Vec::new();
loop {
if !usb_dev.poll(&mut [&mut serial]) {
continue;
}
let mut tmp = [0u8; 64];
match serial.read(&mut tmp) {
Ok(count) if count > 0 => {
if let Ok(_) = buffer.extend_from_slice(&tmp[0..count]) {
if buffer.ends_with(&[b'~']) {
let nonce = Nonce::from_slice(b"unique nonce");
if let Ok(_) = cipher.encrypt_in_place(nonce, b"", &mut buffer) {
serial.write(&buffer).unwrap();
} else {
serial.write(&"ENCRYPTION FAILED".as_bytes()).unwrap();
}
buffer.truncate(0);
};
} else {
buffer.truncate(0);
}
}
_ => {}
}
}
}
<file_sep>[package]
name = "stm32f4-playground"
version = "0.1.0"
authors = ["<NAME> <<EMAIL>>"]
edition = "2018"
[dependencies]
# Provides startup code and useful attributes
# https://docs.rs/cortex-m-rt
cortex-m-rt = "0.6.13"
# Provides low-level access to registers and assembly instructions
# https://docs.rs/cortex-m
cortex-m = "0.7.2"
# Efficient logging framework
# https://docs.rs/defmt
defmt = "0.2.1"
# Transmit defmt log messages over the RTT (Real-Time Transfer) protocol
# https://docs.rs/defmt-rtt
defmt-rtt = "0.2.0"
# Panic handler, feature to log panic messages via defmt
# https://docs.rs/crate/panic-halt
panic-probe = { version = "0.2.0", features = ["print-defmt"] }
# Autogenerated API for the STM32F4 peripherals
# https://docs.rs/crate/stm32f4
# stm32f4 = {version = "0.13.0", features = ["stm32f401", "rt"]}
# Device hardware abstraction library
# https://docs.rs/stm32f4xx-hal
stm32f4xx-hal = {version = "0.9.0", features=["stm32f401", "rt", "usb_fs"]}
# Experimental device-side USB framework
# https://docs.rs/usb-device/0.2.8/usb_device/
usb-device = "0.2.8"
# CDC-ACM USB serial port implementation for usb-device
# https://docs.rs/usbd-serial
usbd-serial = "0.1.1"
# ChaCha20Poly1305 (RFC 8439) AEAD
# https://docs.rs/chacha20poly1305
chacha20poly1305 = {version = "0.7.1", default-features = false, features=["heapless", "reduced-round"]}
# A driver for NRF24L01(+) transceivers on embedded-hal platforms.
# https://docs.rs/embedded-nrf24l01
embedded-nrf24l01 = "0.2"
[features]
# Set logging levels here
default = [ "defmt-default", ]
defmt-default = []
defmt-trace = []
defmt-debug = []
defmt-info = []
defmt-warn = []
defmt-error = []
# cargo build/run
[profile.dev]
codegen-units = 1
debug = 2
debug-assertions = true
incremental = false
opt-level = 3
overflow-checks = true
# cargo test
[profile.test]
codegen-units = 1
debug = 2
debug-assertions = true
incremental = false
opt-level = 3
overflow-checks = true
# cargo build/run --release
[profile.release]
codegen-units = 1
debug = 2
debug-assertions = false
incremental = false
lto = 'fat'
opt-level = 3
overflow-checks = false
# cargo test --release
[profile.bench]
codegen-units = 1
debug = 2
debug-assertions = false
incremental = false
lto = 'fat'
opt-level = 3
overflow-checks = false
|
c8e5a74cebb6e1e796de9028da231428e9799d28
|
[
"TOML",
"Rust",
"Markdown"
] | 19
|
Rust
|
yusefkarim/stm32f4-playground
|
4cd4cc0d55b8f443d8ed321266cc9dab8eadfad1
|
44d1cb807f20b77febb4be645c315a62e6480fc6
|
refs/heads/master
|
<repo_name>MartinThoma/pyspell<file_sep>/pyspell/get_special_chars.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Get all 'special' characters in a text file."""
from builtins import open
import os
def main(filename):
with open(filename, encoding='utf-8') as f:
content = f.read()
chars = {}
for char in content:
if char not in chars:
chars[char] = 1
else:
chars[char] += 1
chars = sorted(chars.items(), key=lambda n: n[1], reverse=True)
for char, count in chars:
print(u"%s: %i" % (char, count))
def is_valid_file(parser, arg):
"""Check if arg is a valid file that already exists on the file
system.
"""
arg = os.path.abspath(arg)
if not os.path.exists(arg):
parser.error("The file %s does not exist!" % arg)
else:
return arg
def get_parser():
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
parser = ArgumentParser(description=__doc__,
formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument("-f", "--file",
dest="filename",
type=lambda x: is_valid_file(parser, x),
help="write report to FILE",
metavar="FILE")
return parser
if __name__ == "__main__":
args = get_parser().parse_args()
main(args.filename)
<file_sep>/pyspell/check.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Check spelling of a file."""
import logging
# pyspell files
import utils
def check(text, vocabulary):
"""Check ``text`` for mistakes by using ``vocabulary``."""
pass
def main(input_file, vocabulary_file):
"""Automatically check and correct the spelling of a file."""
vocabulary = utils.read_vocabulary(vocabulary_file)
logging.info("Read %i words.", len(vocabulary))
text = utils.read_text(input_file)
check(text, vocabulary)
def get_parser():
"""Return the parser object for this script."""
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
parser = ArgumentParser(description=__doc__,
formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument("-i", "--input", dest="input",
required=True,
help="input FILE",
metavar="FILE")
parser.add_argument("-v", "--vocabulary", dest="vocabulary_file",
required=True,
help="vocabulary file",
metavar="FILE")
return parser
if __name__ == "__main__":
args = get_parser().parse_args()
main(args.input, args.vocabulary_file)
<file_sep>/pyspell/utils.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
def read_vocabulary(vocabulary_file):
"""Read ``vocabulary_file`` and return them in a structured way."""
vocabulary = []
with open(vocabulary_file) as f:
vocabulary = f.readlines()
return set(vocabulary)
def read_text(text, markup=None):
"""Parse ``text`` and return a natural language text without markup."""
return text
<file_sep>/README.md
[](https://travis-ci.org/MartinThoma/pyspell)
[](https://coveralls.io/r/MartinThoma/pyspell?branch=master)
[](https://pythonhosted.org/pyspell)
pyspell
=======
A Python package with interactive and fully-automatic spelling correction as
well as the possibility to create new language files.
This package should contain scripts to check different data formats like
Markdown, TeX and plain text as well as the possibility to check different
languages. Tools to build domain-specific language models should be contained
as well.
Currently, nothing has been implemented. This project is in a pre-alpha phase
where the definition what should get done, how it should get done, how
data formats should look like is not clear.
Other, similar projects are examined.
Interfaces
----------
I would like to have 3 interaces:
* A command line interface for interactive spelling correction
* A command line interface for spelling correction by programs
* A Python interface
The latter two should return lists of words with their probability to be wrong
and their possible corrections with the probability of being the correct
correction (assuming the word was wrong):
```text
word;probability to be wrong;list of words with probabilities
acessible;99.2;(accessible,99.9)
```
Current state of the project
----------------------------
This project cannot be used by now. It is a low-priority project of mine. If
you want to help, please send an email to <EMAIL>.<file_sep>/Makefile
docs:
python setup.py upload_docs --upload-dir docs/build/html
upload:
make clean
python3 setup.py sdist bdist_wheel && twine upload dist/*
clean:
python setup.py clean --all
pyclean .
rm -rf *.pyc __pycache__ build dist pyspell.egg-info pyspell/__pycache__ tests/__pycache__ tests/reports docs/build
test:
nosetests --with-coverage --cover-erase --cover-package pyspell --logging-level=INFO --cover-html
cheesecake_index -n pyspell -v
count:
cloc . --exclude-dir=docs,cover,dist,pyspell.egg-info<file_sep>/pyspell/misc/README.md
* Search for protected spaces ( )
* &
* /
* length 1 (^.{1}::)
## Math
×→∨⇒∫Φ˧ʌ±ㅕ<file_sep>/pyspell/gen_language_file.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Generate wordlist of a context by using open sources like websites
(e.g. Wikipedia).
"""
from __future__ import unicode_literals
from builtins import open
from six.moves.urllib.request import urlopen
from six.moves.urllib.parse import urlparse
from six.moves.urllib.parse import urljoin
from bs4 import BeautifulSoup
import re
import nltk
import logging
import sys
import numpy
logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',
level=logging.DEBUG,
stream=sys.stdout)
seen_paths = None
def main(url, output_file, max_depth=2, language="english"):
"""Parse ``url`` and get words to generate ``output_file``.
:param max_depth: Defines how many following pages should be taken into
consideration.
"""
global seen_paths
logging.info("Generate word list with '%s' for %s.", url, language)
logging.info("Write word list to '%s'.", output_file)
logging.info("Recursion depth of %i.", max_depth)
word_dict = recursive_webpage_parsing(url, max_depth, language)
# Output
logging.info("Visited %i web pages.", len(seen_paths))
logging.info("Found %i different words.", len(word_dict))
atleast_5 = len([1 for w, v in word_dict.items() if v['count'] >= 5])
words_total = sum([v['count'] for w, v in word_dict.items()])
logging.info("Looked at %i words in total.", words_total)
logging.info("%i words with at least 5 occurences.", atleast_5)
# ARPA (or Doug Paul) format for N-gram backoff models
# see http://www.speech.sri.com/projects/srilm/manpages/ngram-format.5.html
# and http://cmusphinx.sourceforge.net/wiki/sphinx4:standardgrammarformats
write(output_file, word_dict)
return word_dict
def write(output_file, word_dict):
words = sorted(word_dict.items(),
key=lambda n: (n[1]['count'], n[0]),
reverse=True)
atleast_5 = len([1 for w, v in word_dict.items() if v['count'] >= 5])
words_total = sum([v['count'] for w, v in word_dict.items()])
with open(output_file, "w", encoding='utf-8') as f:
f.write(u"\\data\\\n")
f.write(u"ngram 1=%i\n" % atleast_5)
f.write(u"\\1-grams:\n")
for word, value in words:
word = unicode(word)
if value['count'] >= 5:
prob = numpy.log10(value['count'] / float(words_total))
f.write(u"%0.8f\t%s\n" % (prob,
word))
# Find mistakes
# else:
# f.write(u"%s:: %i (%s)\n" %
# (word,
# value['count'],
# str(list(value['occurences'])).replace('/wiki/', '')))
f.write(u"\\end")
def recursive_webpage_parsing(url, max_depth=2, language="english"):
global seen_paths
print("%s : %i" % (url, max_depth))
if seen_paths is None:
seen_paths = [urlparse(url).path]
# Get HTML
try:
response = urlopen(url)
except:
return {}
html = unicode(response.read(), 'utf-8')
# Prepare what should be looked at
if max_depth > 0:
links = get_hyperlinks(html, url)
links = sorted(links)
else:
links = []
# Parse this page
if ".wikipedia.org" in url:
soup = BeautifulSoup(html)
soup = soup.find(id="mw-content-text")
soup = soup.findAll('p')
# Remove pronounciation tags
contents = ""
for el in soup:
[s.extract() for s in el('span', {"class": "IPA"})]
[s.extract() for s in el('div', {"class": "portal"})]
[s.extract() for s in el('div', {"class": "sisterproject"})]
[s.extract() for s in el('style')]
contents += el.get_text() + "\n"
else:
soup = BeautifulSoup(html)
contents = soup.get_text()
word_dict = get_words_from_text(contents, urlparse(url).path, language)
# Parse linked pages
other_word_dicts = []
for link in links:
same_domain = urlparse(url).netloc == urlparse(link).netloc
path = urlparse(link).path
if not link.startswith("http"):
# logging.info("Skip '%s': not http", link)
continue
if not same_domain:
# logging.info("Skip '%s': not same_domain", link)
continue
if path in seen_paths:
# logging.info("Skip '%s': seen_paths (%s)", link, path)
continue
continue_t = False
special_prefixes = ["File:", "Special:", "Portal:", "Category:",
"Wikipedia:", "Talk:"]
special_prefixes_german = ["Spezial:", "Datei:", "Hilfe:"]
for prefix in special_prefixes + special_prefixes_german:
if prefix in path:
continue_t = True
if ("wiki/" not in path) or continue_t:
continue
seen_paths.append(path)
local = max_depth-1
returned_dict = recursive_webpage_parsing(link, local, language)
other_word_dicts.append(returned_dict)
word_dict = reduce(join_worddicts, other_word_dicts, word_dict)
return word_dict
def get_hyperlinks(html, base_url):
urls_in_links = []
soup = BeautifulSoup(html)
for link in soup.find_all('a', href=True):
if not link['href'].startswith('#') and \
not link['href'].startswith('http'):
urls_in_links.append(urljoin(base_url, link['href']))
else:
urls_in_links.append(link['href'])
return urls_in_links
def join_worddicts(word_dict1, word_dict2):
for word, value in word_dict2.items():
if word in word_dict1:
word_dict1[word]['count'] += value['count']
word_dict1[word]['occurences'].union(value['occurences'])
else:
word_dict1[word] = {'count': value['count'],
'occurences': value['occurences']}
return word_dict1
def get_words_from_text(text, source, language):
"""Get a natural language text as a string and return a dictionary with
maps words to the number of occurences in the text.
"""
text = single_space(text)
text = text.replace(u"—", u"-") # Type 1 dash
text = text.replace(u"−", u"-") # Type 2 dash
text = text.replace(u"–", u"-") # Type 3 dash
text = text.replace(u"•", u" ")
text = text.replace(u"/", u" ")
text = text.replace(u"|", u" ")
text = remove_references(text)
text = remove_quotes(text)
text = remove_pronounciation(text)
text = remove_tags(text)
text = remove_sentence_starts(text, language)
text = text.replace(u"’", u"'")
text = remove_nonalpha(text)
text = replace_percentages(text)
text = replace_coordinates(text)
text = replace_temperature(text)
text = single_space(text)
# \xa0 is a nonbreaking space
# \xe9 is a e with an accent
special_pattern = re.compile(u".*([^a-zA-Z0-9\-.:; üäöÜÄÖß\xa0\xe9\u0161]+).*")
matches = special_pattern.findall(text)
# for match in matches:
# print(u"%s: %s (%s)" % (source, match, repr(match)))
# print(text.encode('utf-8'))
words = text.split(" ")
word_dict = {}
for word in words:
if word not in word_dict:
word_dict[word] = {'count': 1, 'occurences': set([source])}
else:
word_dict[word]['count'] += 1
word_dict[word]['occurences'].add(source)
return word_dict
def replace_coordinates(text):
pattern = re.compile(u"[0-9]+\xb0\s*")
# u"(?:[0-9]+′\s*)?"
# u"(?:[0-9]+″\s*[NO]?,?)?)")
text = pattern.sub(" COORDINATE ", text)
pattern = re.compile(u"\xb0\s*[NO]")
text = pattern.sub(" COORDINATE ", text)
return text
def replace_temperature(text):
pattern = re.compile(u"\xb0C") #[+-]?[\d]+[,.]?[\d]*\s?
text = pattern.sub(" TEMPERATURE ", text)
return text
def replace_percentages(text):
pattern = re.compile("([+\-])?[0-9,. ]+%")
text = pattern.sub(" PERCENTAGE ", text)
return text
def remove_sentence_starts(text, language="english"):
tokenizer = nltk.data.load('tokenizers/punkt/%s.pickle' % language)
sentences = tokenizer.tokenize(text)
sentences = [" ".join(sentence.split(" ")[1:]) for sentence in sentences]
return ". ".join(sentences)
def remove_nonalpha(text):
pattern = re.compile("[\d\(\)\.,!?:\";\[\]]")
text = pattern.sub(" ", text)
return text
def remove_references(text):
"""Remove everything like [1] or [123] or [3412]."""
pattern = re.compile("\[\d+\]")
return pattern.sub(" ", text)
def remove_quotes(text):
"""Remove 'test'."""
text = text.replace(u"“", '"')
text = text.replace(u"”", '"')
text = text.replace(u"„", '"') # German double left quote
text = text.replace(u"«", '"') # German double left quote
text = text.replace(u"»", '"') # German double left quote
text = text.replace(u"‘", "'")
text = text.replace(u"’", "'")
text = text.replace(u"‚", "'") # German left quote
text = text.replace(u"''", "'")
unquote = lambda m: u' %s ' % m.group(1)
pattern1 = re.compile(u"[\s\(]'(.*?)'[\s\.,;?:\)]")
pattern2 = re.compile(u"[\s\(]“(.*?)”[\s\.,;?:\)]") # correct
pattern3 = re.compile(u"[\s\(]”(.*?)”[\s\.,;?:\)]") # wrong
pattern4 = re.compile(u'[\s\(]"(.*?)"[\s\.,;?:\)]')
pattern5 = re.compile(u'[\s\(]“(.*?)"[\s\.,;?:\)]') # wrong
pattern6 = re.compile(u'[\s\(]"(.*?)”[\s\.,;?:\)]') # wrong
pattern7 = re.compile(u"[\s\(]‘(.*?)'[\s\.,;?:\)]")
text = pattern1.sub(unquote, text)
text = pattern2.sub(unquote, text)
text = pattern3.sub(unquote, text)
text = pattern4.sub(unquote, text)
text = pattern5.sub(unquote, text)
text = pattern6.sub(unquote, text)
text = pattern7.sub(unquote, text)
return text
def remove_pronounciation(text):
"""e.g. /x/"""
pattern = re.compile("/[^ ]*?/")
return pattern.sub(" ", text)
def remove_tags(text):
"""e.g. <references>"""
pattern = re.compile("<.*?>")
return pattern.sub(" ", text)
def remove_wiki_tags(text):
"""e.g. {{reflist}}"""
pattern = re.compile(u"\{\{.*\}\}")
return pattern.sub(" ", text)
def single_space(text):
pattern = re.compile(u"[\s\xa0]+", re.DOTALL)
text = pattern.sub(" ", text)
# pattern = re.compile("\n\n+")
# text = pattern.sub(" ", text)
return text
def get_parser():
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
parser = ArgumentParser(description=__doc__,
formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument("-u", "--url",
dest="url",
default=("https://en.wikipedia.org/wiki/"
"English_language"),
help="generate wordlist from this page",
metavar="URL")
parser.add_argument("-l", "--language",
dest="language",
default="english",
help=("for which language do you want to create a "
"wordlist?"),
metavar="LANG")
parser.add_argument("-o", "--output",
default="wordlist.txt",
dest="output_file",
help="write wordlist to FILE", metavar="FILE")
parser.add_argument("-d", "--max_depth",
dest="max_depth",
default=2,
type=int,
help="how many pages should get viewed by recursion?")
return parser
if __name__ == "__main__":
args = get_parser().parse_args()
main(args.url, args.output_file, args.max_depth, args.language)
<file_sep>/bin/pyspell
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Spell-checker for various languages and data formats."""
import argparse
import logging
import sys
logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',
level=logging.DEBUG,
stream=sys.stdout)
# pyspell modules
import pyspell
from pyspell import check
from pyspell import gen_language_file
# Every pyspell tool that should be available through
# pyspell TOOL
# has to be added to ``get_parser()`` and to ``main``.
def get_parser():
"""Return the parser object for this script."""
parser = argparse.ArgumentParser(description=__doc__,
prog='pyspell')
subparsers = parser.add_subparsers(dest='cmd')
subparsers.add_parser('check',
add_help=False,
description=("check spelling, grammar and "
"conventions"),
parents=[check.get_parser()])
subparsers.add_parser('gen_language_file',
add_help=False,
description="""Generate a language file.""",
parents=[gen_language_file.get_parser()])
parser.add_argument('--version',
action='version',
version=('pyspell %s' % str(pyspell.__version__)))
return parser
def main(args):
if args.cmd == 'check':
check.main(args.input, args.vocabulary_file)
elif args.cmd == 'gen_language_file':
gen_language_file.main(args.url,
args.output_file,
args.max_depth,
args.language)
else:
logging.info('No such tool.')
if __name__ == '__main__':
args = get_parser().parse_args()
main(args)
<file_sep>/pyspell/__init__.py
"""pyspell is a collection of tools for checking the spelling, grammar and
conventions of documents.
If any questions are not covered by the documentation or if difficulties
with using pyspell occur, please contact <EMAIL>"""
from pkg_resources import get_distribution, DistributionNotFound
import os.path
try:
_dist = get_distribution('pyspell')
# Normalize case for Windows systems
dist_loc = os.path.normcase(_dist.location)
here = os.path.normcase(__file__)
if not here.startswith(os.path.join(dist_loc, 'pyspell')):
# not installed, but there is another version that *is*
raise DistributionNotFound
except DistributionNotFound:
__version__ = 'Please install this project with setup.py'
else:
__version__ = _dist.version
<file_sep>/tests/check_test.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import nose
# asr modules
import pyspell.check as check
# Tests
def execution_test():
check.get_parser()
|
a1f8d77d92cd797e39e24a263a51db6d81749562
|
[
"Markdown",
"Python",
"Makefile"
] | 10
|
Python
|
MartinThoma/pyspell
|
642d9ed4159b4a99479b0fc5415432e26c9d7778
|
21ff0a6e0977e62e784fbe6857169b3c6ce9614e
|
refs/heads/master
|
<repo_name>smacneil1/bild_signatures<file_sep>/Rmarkdowns_scripts/ICBP/25Feb15_ICBP__correlation_bootstrap_wej.Rmd
---
title: "Pathway Predictions/Drug Response Correlations"
author: "Shelley"
date: "February 9, 2015"
output: html_document
---
Read in the single and multi-pathway ASSIGN predictions
```{r}
setwd("~/Desktop/boot/")
drug_response_data<-read.delim("ICBP_drugs.txt", header=1, sep='\t',row.names=1)
multipathway_data<-read.delim("multipathway_preds.txt", header=1, sep='\t',row.names=1)
singlepathway_data=read.csv("single_pathway_results.csv")
row.names(singlepathway_data)=singlepathway_data[,1]
singlepathway_data=singlepathway_data[,-1]
```
Merge the pathway predictions and the ICBP drugs
```{r}
merge_drop<-function(x,y,by=0)
{
new_m<-merge(x,y,by=by)
rownames(new_m)<-new_m$Row.names
return(new_m[,2:length(colnames(new_m))])
}
pred_drug_multi<-merge_drop(multipathway_data,drug_response_data,by=0)
pred_drug_single<-merge_drop(singlepathway_data,drug_response_data,by=0)
```
Perform correlations and bootstrap, save to file
```{r}
drug_prediction_correlations_multi_spear= cor(pred_drug_multi[1:25],pred_drug_multi[36:125],use="na.or.complete", method="spearman")
#### Bootstrap:
pathways = 25 # number of pathway signatures
samplesize = nrow(pred_drug_multi) # number of cell lines
n.boot = 10000 # number of bootstrap samples -- set at 10,000 or more for your final run
boot_cors = array(0,dim=c(25,90,n.boot)) # make a three dimensional array to store the bootstrap results
for (i in 1:n.boot){
boot.sample = sample(1:samplesize,replace=T)
boot_cors[,,i]=cor(pred_drug_multi[boot.sample,1:25],pred_drug_multi[boot.sample,36:125],use="na.or.complete")
}
# means
cor_mean = apply(boot_cors, c(1,2), mean, na.rm=T) ## average bootstrap cors. Should be similar to the non-boot values
dimnames(cor_mean)=dimnames(drug_prediction_correlations_multi_spear)
View(cor_mean)
write.table(cor_mean,"~/Documents/ThesisWork/GitRepos/bild_signatures/cor_mean.txt",sep='\t', col.names = NA,quote=F)
cor_means_ICBP=read.delim("~/Documents/ThesisWork/GitRepos/bild_signatures/cor_mean.txt", header=1, sep='\t',row.names=1)
#lower
lower = apply(boot_cors, c(1,2), quantile,na.rm=T,probs=.025) ## lower 95% CI
dimnames(lower)=dimnames(drug_prediction_correlations_multi_spear)
write.table(lower,"~/Documents/ThesisWork/GitRepos/bild_signatures/lower.txt",sep='\t', col.names = NA,quote=F)
cor_lower_ICBP=read.delim("~/Documents/ThesisWork/GitRepos/bild_signatures/lower.txt", header=1, sep='\t',row.names=1)
#upper
upper = apply(boot_cors, c(1,2), quantile,na.rm=T,probs=.975) ## upper 95% CI
dimnames(upper)=dimnames(drug_prediction_correlations_multi_spear)
write.table(upper,"~/Documents/ThesisWork/GitRepos/bild_signatures/upper.txt",sep='\t', col.names = NA,quote=F)
cor_upper_ICBP=read.delim("~/Documents/ThesisWork/GitRepos/bild_signatures/upper.txt", header=1, sep='\t',row.names=1)
## p-values: to test that a correlation is bigger than some value:
cor_cutoff = 0 ## i.e. to test is the correlation is > or < 0. Note this can be anything, i.e. .1, .5, etc
p_calc = function(x,direction="greater",cor_cut=0){
if (!(direction %in% c("greater","less"))){stop("Invalid value for direction")}
if (direction=="greater"){return(mean(x>cor_cut,na.rm=T))}
if (direction=="less"){return(mean(x<cor_cut,na.rm=T))}
}
ps_above_0 = apply(boot_cors, c(1,2), p_calc)
ps_below_0 = apply(boot_cors, c(1,2), p_calc,direction="less")
## p-values: to test two correlations against each other:
pathway = colnames(pred_drug_multi)[1:25] ## print to see the pathway names
drugs = colnames(pred_drug_multi)[36:125] ## print to see the drug names
# to test
p_calc_compare=function(path1,path2,drug,cor_cut=0,cors=boot_cors,pathlist=pathway,druglist=drugs){
ind_p1 = which(pathlist==path1)
ind_p2 = which(pathlist==path2)
ind_drug = which(druglist==drug)
mean((cors[ind_p1,ind_drug,]-cors[ind_p2,ind_drug,])>cor_cut,na.rm=T)
}
pval_comp = p_calc_compare("AKT_BAD.adap_multi.pathway_activity_testset.csv.akt","AKT_BAD_HER2.adap_multi.pathway_activity_testset.csv.akt","Sigma.AKT1.2.inhibitor")
## pval_comp = 0.91 in this case means that AKT+BAD > AKT+BAD+HER2 in 91% of the bootstrap correlations. Thus the p-value for testing Ha: AKT+BAD > AKT+BAD+HER2 is 0.09
```
Box plot the results using a for loop
```{r}
#just a test
test =c(lower[1,1],cor_mean[1,1], upper[1,1]) ## print the upper, mean, lower for first pathway and first drug
# loop through
# Subset each signature from means, lower, and upper.
AKT_means=cor_mean[c(1,3,6,9, 13, 22), ]
dim(AKT_means)
AKT_lower=lower[c(1,3,6,9, 13, 22), ]
AKT_upper=upper[c(1,3,6,9, 13, 22), ]
Her2_means=cor_mean[c(5,14,16,18, 21, 24), ]
dim(Her2_means)
Her2_lower=lower[c(5,14,16,18, 21, 24), ]
Her2_upper=upper[c(5,14,16,18, 21, 24), ]
IGFR_means=cor_mean[c(8,10,12,17, 19, 25), ]
dim(IGFR_means) #6
IGFR_lower=lower[c(8,10,12,17, 19, 25), ]
IGFR_upper=upper[c(8,10,12,17, 19, 25), ]
BAD_means=cor_mean[c(2,4,7,11, 15, 20, 23), ]
dim(BAD_means) # 7
BAD_lower=lower[c(2,4,7,11, 15, 20, 23), ]
BAD_upper=upper[c(2,4,7,11, 15, 20, 23), ]
#create a function that loops through the rows and columns of each matrix and boxplots each pathway/sdrug combination
makeMatrix=function(lowerMatrix, meanMatrix, upperMatrix){
final=tmp=name=NULL
Drug=NULL
for (i in 1:nrow(meanMatrix) ){
for( j in 1:ncol(meanMatrix)){
Drug =c(lowerMatrix[i,j],meanMatrix[i,j], upperMatrix[i,j])
name=c(name,(paste(rownames(meanMatrix)[i],colnames(meanMatrix)[j],sep=')')))
name=gsub(".adap_multi.pathway_activity_testset.csv.", "(", name)
tmp=cbind(tmp,Drug)
colnames(tmp)<-name
}
final[i]<-tmp
par(mar=c(1,10,2,2))
boxplot(BAD_BADandAKTandHER, main = "BAD Pathway Drug Correlation Bootstrap", col = 3, las=2, horizontal = TRUE, cex.axis=0.7, boxwex=1, xlab= "Spearnman Correlation")
abline(v=0, col = "red", lwd = 2)
tmp=name=NULL
}
return (final)
}
#BAD
pdf("~/BAD_boxplots.pdf")
ba=makeMatrix(BAD_means, BAD_lower, BAD_upper)
dev.off()
#Trying differenet box plot functions
#Bad_AKTnBADnIGF1R=final
#par(mar=c(19,5,2,1))
#boxplot(ba[1], main = "BAD Pathway Drug Correlation Bootstrap using AKT/BAD/IGF1R", col= ifelse(Bad_AKTnBADnIGF1R <= 0, "red", ifelse(Bad_AKTnBADnIGF1R >=0,"blue", "black")), las=2, cex.axis=0.7, boxwex=.7, ylab= "Spearnman Correlation")
#BAD_AKTandBAD=final
#par(mar=c(19,5,2,1))
#boxplot(BAD_AKTandBAD,main = "BAD Pathway Drug Correlation Bootstrap using AKT and BAD", col = 3, las=2, cex.axis=0.7, boxwex=.5, ylab= "Spearnman Correlation")
#abline(h=0, col = "red", lwd = 2)
#BAD_BADandAKTandHER=final
#par(mar=c(1,5,2,2))
#boxplot(BAD_BADandAKTandHER, main = "BAD Pathway Drug Correlation Bootstrap using AKT and and BAD and HER2 and BAD", col = 3, las=2, horizontal = TRUE, cex.axis=0.7, boxwex=1, ylab= "Spearnman Correlation")
#abline(h=0, col = "red", lwd = 2)
```
Do the same thing for single pathway
```{r}
drug_prediction_correlations_single= cor(pred_drug_single[1:4],pred_drug_single[15:104],use="na.or.complete")
single_and_multi= rbind(drug_prediction_correlations_multi,drug_prediction_correlations_single)
write.table(single_and_multi,"Correlations_ICBP_Single_Multi2.txt",sep='\t', col.names = NA,quote=F)
```
<file_sep>/Rmarkdowns_scripts/ICBP/Heatmap_icbp_3_15.Rmd
---
title: "Heatmaps of pathway predictions in icbp breast cancer cell line based on subtypes"
output: html_document
---
```{r include=FALSE}
source('~/Dropbox/bild_signatures/bild_signatures/Rmarkdowns_scripts//Key_ASSIGN_functions.Rmd', echo=TRUE)
setwd("~/Dropbox/bild_signatures/icbp_15_mar_all/")
filenames_icbp_multi<-system("ls */*/pathway_activity_testset*", intern=TRUE)
filenames_icbp_multi
for(i in 1:length(filenames_icbp_multi))
{
f<-read.csv(filenames_icbp_multi[i], header=1,row.names=1) ###reading in the filess one at a time
colnames(f)<-paste(filenames_icbp_multi[i],colnames(f),sep='/')
if(i==1){
data_icbp<-f
}
else{
data_icbp<-cbind(data_icbp,f)
}
}
head(data_icbp)
dim(data_icbp)
colnames(data_icbp)<-gsub(pattern = "/pathway_activity_testset.csv",replacement = "",x = colnames(data_icbp))
head(data_icbp)
rownames(data_icbp)[1:7]<-c("184A1","184B5","21MT1","21MT2","21NT","21PT","600MPE")
setwd("~/Dropbox/bild_signatures//Datasets")
drugs<-read.delim("ICBP_drugs.txt", header=1, sep='\t',row.names=1)
head(drugs);dim(drugs)
pred_drug<-merge_drop(data_icbp,drugs,by=0)
dim(pred_drug)
```
Creating heatmaps
```{r include=FALSE}
if (!require("gplots")) {
install.packages("gplots", dependencies = TRUE)
library(gplots)
}
if (!require("RColorBrewer")) {
install.packages("RColorBrewer", dependencies = TRUE)
library(RColorBrewer)
}
prediction_heatmap<-function(x,type=NULL)
{
adapB_single=subset(x,select=grep("adapB_single",colnames(x)))
adap_adap_single=subset(x,select=grep("adap_adap_single",colnames(x)))
adapB_multi=subset(x,select=grep("akt_bad_her2_igf1r_erk/adapB",colnames(x)))
adap_adap_multi=subset(x,select=grep("akt_bad_her2_igf1r_erk/adap_adap",colnames(x)))
adapB_multi4=subset(x,select=grep("akt_bad_her2_igf1r/adapB",colnames(x)))
adap_adap_multi4=subset(x,select=grep("akt_bad_her2_igf1r/adap_adap",colnames(x)))
colnames(adapB_single)=colnames(adap_adap_single)=c("AKT","BAD","ERK","HER2","IGF1R")
adapB_single= adapB_single[,c("AKT","BAD","HER2","IGF1R","ERK")]
adap_adap_single=adap_adap_single[,c("AKT","BAD","HER2","IGF1R","ERK")]
colnames(adapB_multi)=colnames(adap_adap_multi)=c("AKT","BAD","HER2","IGF1R","ERK")
colnames(adapB_multi4)=colnames(adap_adap_multi4)=c("AKT","BAD","HER2","IGF1R")
heatmap.2(as.matrix(adapB_single),col=my_palette,margins=c(15,9),dendrogram="none", trace="none",main=paste(type,"Single Adap BG",sep = "\n"),Rowv = NULL, Colv = NULL,density.info = "none",scale = 'row')#cellnote = round(x[,43:46],digits = 2),notecol = 'black'
heatmap.2(as.matrix(adap_adap_single),col=my_palette,margins=c(15,9),dendrogram="none", trace="none",main=paste(type,"Single Adap BGSG",sep = "\n"),Rowv = NULL, Colv = NULL,density.info = "none",scale = 'row')#cellnote = round(x[,43:46],digits = 2),notecol = 'black
heatmap.2(as.matrix(adapB_multi),col=my_palette,margins=c(15,9),dendrogram="none",Rowv = NULL, Colv = NULL, trace="none",main=paste(type,"Multi Adap BG",sep = "\n"),density.info = 'none',scale = 'row')#,cellnote = round(x[,51:54],digits = 2),notecol = 'black',
heatmap.2(as.matrix(adap_adap_multi),col=my_palette,margins=c(15,9),dendrogram="none",Rowv = NULL, Colv = NULL, trace="none",main=paste(type,"Multi Adap BGSG",sep = "\n"),density.info = 'none',scale = 'row')#,cellnote = round(x[,51:54],digits = 2),notecol = 'black',
heatmap.2(as.matrix(adapB_multi4),col=my_palette,margins=c(15,9),dendrogram="none",Rowv = NULL, Colv = NULL, trace="none",main=paste(type,"Multi 4 Adap BG",sep = "\n"),density.info = 'none',scale = 'row')#,cellnote = round(x[,51:54],digits = 2),notecol = 'black',
heatmap.2(as.matrix(adap_adap_multi4),col=my_palette,margins=c(15,9),dendrogram="none",Rowv = NULL, Colv = NULL, trace="none",main=paste(type,"Multi 4 Adap BGSG",sep = "\n"),density.info = 'none',scale = 'row')#,cellnote = round(x[,51:54],digits = 2),notecol = 'black',
}
correlation_heatmap<-function(x,drugs_names=NULL,type=NULL)
{
adapB_single=subset(x,select=grep("adapB_single",colnames(x)))
adap_adap_single=subset(x,select=grep("adap_adap_single",colnames(x)))
adapB_multi=subset(x,select=grep("akt_bad_her2_igf1r_erk/adapB",colnames(x)))
adap_adap_multi=subset(x,select=grep("akt_bad_her2_igf1r_erk/adap_adap",colnames(x)))
cors = pval=matrix(-2,20,7)
#rownames(cors)=c(colnames(x)[43:46],colnames(x)[51:54])#selecting the adaptive multipathway and single pathway prediction columns
pathways<-cbind(adapB_single,adap_adap_single,adapB_multi,adap_adap_multi)
drugs<-subset(x,select=drugs_names)
colnames(cors)=drugs_names
rownames(cors)=colnames(pathways)
rownames(cors)=gsub(pattern = "akt_bad_her2_igf1r_erk/",replacement = "",x = rownames(cors))
#rownames(cors)=gsub(pattern = "*/*/adapB",replacement = "B",x = rownames(cors))
for (i in 1:20){
for (j in 1:length(colnames(drugs))){
ci=cor.ci(cbind(pathways[,i],drugs[,j]),method="spearman",plot=F)
cors[i,j]=ci$rho[2,1]
#print(cors[i,j]);print(ci$ci[[5]])
pval[i,j]=ci$ci[[5]]
}
}
par(mar=c(1, 0.5, 3, 10),lwd=4)
heatmap.2(as.matrix(cors),col=redgreen,margins =c(13,18),dendrogram="none",Rowv = NULL, Colv = NULL, trace="none",main=paste("Correlations of pathway"," drug response in",type,sep='\n '),cellnote = round(cors,digits = 2),notecol = 'black',density.info = 'none')
}
my_palette <- colorRampPalette(c("darkblue","aliceblue","brown4"))(n = 299)
col_breaks = c(seq(0,0.2,length=100), seq(0.2,0.4,length=100), seq(0.4,1,length=100))
```
```{r echo=FALSE}
library(psych)
comb_drug<-pred_drug
drugs_names=c("Sigma.AKT1.2.inhibitor","GSK2141795","GSK2119563","GSK1838705","Lapatinib","BEZ235", "GSK1059868")
#pdf("~/Desktop/subtypes_ICBP.pdf")
basal<-subset(comb_drug,comb_drug$Transcriptional.subtype...ERBB2.status=="Basal")
prediction_heatmap(x=basal,type = "BASAL")
correlation_heatmap(x=basal,drugs_names =drugs_names,type = "BASAL" )
her<-subset(comb_drug,comb_drug$Transcriptional.subtype...ERBB2.status=="ERBB2-amp"|comb_drug$Transcriptional.subtype...ERBB2.status=="ERBB2Amp")
prediction_heatmap(x=her,type = "ERBB2 Amplified")
correlation_heatmap(x=her,drugs_names =drugs_names,type = "ERBB2 amplified" )
claudin<-subset(comb_drug,comb_drug$Transcriptional.subtype...ERBB2.status=="Claudin-low")
prediction_heatmap(x=claudin,type = "Claudin Low")
correlation_heatmap(x=claudin,drugs_names =drugs_names,type = "Claudin Low" )###no variance in lapatinib drug response correlation cannot be determined
luminal<-subset(comb_drug,comb_drug$Transcriptional.subtype...ERBB2.status=="Luminal")
prediction_heatmap(x=luminal,type = "Luminal")
correlation_heatmap(x=luminal,drugs_names =drugs_names,type = "Luminal" )###no variance in lapatinib drug response correlation cannot be determined
#dev.off()
```
Now, trying to see patterns across all the subtypes in ICBP breast cancer cell lines
```{r,echo=FALSE}
adapB_single=rbind(subset(basal,select=grep("adapB_single",colnames(basal))),subset(her,select=grep("adapB_single",colnames(her))),subset(claudin,select=grep("adapB_single",colnames(claudin))),subset(luminal,select=grep("adapB_single",colnames(luminal))))
adap_adap_single=rbind(subset(basal,select=grep("adap_adap_single",colnames(basal))),subset(her,select=grep("adap_adap_single",colnames(her))),subset(claudin,select=grep("adap_adap_single",colnames(claudin))),subset(luminal,select=grep("adap_adap_single",colnames(luminal))))
adapB_multi=rbind(subset(basal,select=grep("akt_bad_her2_igf1r_erk/adapB",colnames(basal))),subset(her,select=grep("akt_bad_her2_igf1r_erk/adapB",colnames(her))),subset(claudin,select=grep("akt_bad_her2_igf1r_erk/adapB",colnames(claudin))),subset(luminal,select=grep("akt_bad_her2_igf1r_erk/adapB",colnames(luminal))))
adap_adap_multi=rbind(subset(basal,select=grep("akt_bad_her2_igf1r_erk/adap_adap",colnames(basal))),subset(her,select=grep("akt_bad_her2_igf1r_erk/adap_adap",colnames(her))),subset(claudin,select=grep("akt_bad_her2_igf1r_erk/adap_adap",colnames(claudin))),subset(luminal,select=grep("akt_bad_her2_igf1r_erk/adap_adap",colnames(luminal))))
colnames(adapB_single)=colnames(adap_adap_single)=c("AKT","BAD","ERK","HER2","IGF1R")
adapB_single= adapB_single[,c("AKT","BAD","HER2","IGF1R","ERK")]
adap_adap_single=adap_adap_single[,c("AKT","BAD","HER2","IGF1R","ERK")]
colnames(adapB_multi)=colnames(adap_adap_multi)=c("AKT","BAD","HER2","IGF1R","ERK")
#png("heatmaps_multi_adap.png",width = 5*300,height = 5*300,res = 800, pointsize = 8)
heatmap.2(as.matrix(adapB_single), RowSideColors = c(rep("gray", length(rownames(basal))),rep("blue", length(rownames(her))),rep("black", length(rownames(claudin))),rep("green",length(rownames(luminal)))),col=my_palette,dendrogram="none", trace="none",margins=c(12,9),main="Adaptive BG Single",scale='row',Rowv=F,Colv=F)
par(lend = 1) # square line ends for the color legend
legend("bottomleft",legend = c("Basal", "HER2", "Claudin","Luminal"), col = c("gray", "blue", "black","green"), lty= 1,lwd = 10)
#png("heatmaps_multi_adap.png",width = 5*300,height = 5*300,res = 800, pointsize = 8)
heatmap.2(as.matrix(adap_adap_single), RowSideColors = c(rep("gray", length(rownames(basal))),rep("blue", length(rownames(her))),rep("black", length(rownames(claudin))),rep("green",length(rownames(luminal)))),col=my_palette,dendrogram="none", trace="none",margins=c(12,9),main="Adaptive BG SG Single",scale='row',Rowv=F,Colv=F)
par(lend = 1) # square line ends for the color legend
legend("bottomleft",legend = c("Basal", "HER2", "Claudin","Luminal"), col = c("gray", "blue", "black","green"), lty= 1,lwd = 10)
heatmap.2(as.matrix(adapB_multi), RowSideColors = c(rep("gray", length(rownames(basal))),rep("blue", length(rownames(her))),rep("black", length(rownames(claudin))),rep("green",length(rownames(luminal)))),col=my_palette,dendrogram="none", trace="none",margins=c(12,9),main="Multi Adaptive BG",scale="row",Rowv=F,Colv=F)
par(lend = 1) # square line ends for the color legend
legend("bottomleft",legend = c("Basal", "HER2", "Claudin","Luminal"), col = c("gray", "blue", "black","green"), lty = 1,lwd = 10)
heatmap.2(as.matrix(adap_adap_multi), RowSideColors = c(rep("gray", length(rownames(basal))),rep("blue", length(rownames(her))),rep("black", length(rownames(claudin))),rep("green",length(rownames(luminal)))),col=my_palette,dendrogram="none", trace="none",margins=c(12,9),main="Multi Adaptive BG SG",scale="row",Rowv=F,Colv=F)
par(lend = 1) # square line ends for the color legend
legend("bottomleft",legend = c("Basal", "HER2", "Claudin","Luminal"), col = c("gray", "blue", "black","green"), lty = 1,lwd = 10)
```
```{r echo=FALSE}
time<-format(Sys.time(),"%a %b %d %X %Y")
```
This analysis was run on `r time`
<file_sep>/code/tcga_assign_call.R
setwd("~/Dropbox/bild_signatures/Datasets/")
expr<-as.matrix(read.table("GFP18_AKT_BAD_HER2_IGF1R_RAF_ERK.tpmlog",sep='\t',row.names=1,header=1))
control<-subset(expr, select=GFP.1:GFP.12)
her2<-subset(expr, select=HER2.1:HER2.6)
akt<-subset(expr,select=AKT.1:AKT.6)
bad<-subset(expr,select=BAD.1:BAD.6)
igf1r<-subset(expr,select=IGF1R.1:IGF1R.6)
erk<-subset(expr,select=ERK.1:ERK.6)
expr_all<-cbind(control,akt,bad,her2,igf1r,erk)
tcga<-as.matrix(read.table("~/Dropbox/Datasets/TCGA20_brca_1_23.txt", sep='\t', stringsAsFactors=T, header=1, row.names=1))
expr_all_f <-expr_all[apply(expr_all[,1:41]==0,1,mean) < 0.85,]
dim(expr_all_f)
expr_all_tcga_f<-merge_drop(expr_all_f,tcga,by=0)
dim(expr_all_tcga_f)
sub<-c(12,6,6,5,6,6,length(colnames(tcga)))
pdf(file='~/Dropbox/bild_signatures/tcga_hmec_pca_plot_3_14_15.pdf')
pcaplot(expr_all_tcga_f,sub)
bat1<-as.matrix(cbind(c(colnames(expr_all_f),colnames(tcga)),c(rep(1,length(colnames(expr_all_f))),rep(2,length(colnames(tcga))))))
#bat1
combat_expr1<-ComBat(dat=expr_all_tcga_f, batch=bat1[,2], mod=NULL, numCovs=NULL)
pcaplot(combat_expr1,sub)
dev.off()
combat_tcga<-combat_expr1;#as.matrix(read.table("~/Dropbox/Datasets/TCGA20_brca_hmec_combat.txt", sep='\t', stringsAsFactors=T, header=1, row.names=1))
c_gfp<-subset(combat_tcga, select=GFP.1:GFP.12)
c_akt<-subset(combat_tcga, select=AKT.1:AKT.6)
c_bad<-subset(combat_tcga, select=BAD.1:BAD.6)
c_her2<-subset(combat_tcga, select=HER2.1:HER2.6)
c_igf1r<-subset(combat_tcga, select=IGF1R.1:IGF1R.6)
c_erk<-subset(combat_tcga, select=ERK.1:ERK.6)
c_test<-combat_tcga[,42:ncol(combat_tcga)]
basedir="~/Dropbox/bild_signatures/tcga_15_mar_all"
dir.create( basedir)
#############trying one pathways at a time in multipathway#############
#1. AKT
trainingLabela<-list(control=list(akt=1:12),akt=13:18)
sub_dir<-paste(basedir,"akt",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = cbind(c_gfp,c_akt),test=c_test,trainingLabel1 = trainingLabela,g=150,out_dir_base = sub_dir,single = 1)
#2. BAD
trainingLabelb<-list(control=list(bad=1:12),bad=13:18)
sub_dir<-paste(basedir,"bad",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = cbind(c_gfp,c_bad),test=c_test,trainingLabel1 = trainingLabelb,g=150,out_dir_base = sub_dir,single = 1)
#3. HER2
trainingLabelh<-list(control=list(her2=1:12),her2=13:17)
sub_dir<-paste(basedir,"her2",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = cbind(c_gfp,c_her2),test=c_test,trainingLabel1 = trainingLabelh,g=15,out_dir_base = sub_dir,single = 1)
#4. IGF1R
trainingLabeli<-list(control=list(igf1r=1:12),igf1r=13:18)
sub_dir<-paste(basedir,"igf1r",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = cbind(c_gfp,c_igf1r),test=c_test,trainingLabel1 = trainingLabeli,g=100,out_dir_base = sub_dir,single = 1)
#5. ERK
trainingLabele<-list(control=list(erk=1:12),erk=13:18)
sub_dir<-paste(basedir,"erk",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = cbind(c_gfp,c_erk),test=c_test,trainingLabel1 = trainingLabele,g=100,out_dir_base = sub_dir,single = 1)
#############trying two pathways at a time in multipathway#############
#1. HER2 & AKT
trainha<-cbind(c_gfp,c_akt,c_her2)
trainingLabelha<-list(control=list(akt=1:12,her2=1:12),akt=13:18,her2=19:23)
sub_dir=paste(basedir,"her2_akt",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = trainha,test=c_test,trainingLabel1 = trainingLabelha,g=c(150,15),out_dir_base = sub_dir)
#2. HER2 & BAD
trainhb<-cbind(c_gfp,c_bad,c_her2)
trainingLabelhb<-list(control=list(bad=1:12,her2=1:12),bad=13:18,her2=19:23)
sub_dir=paste(basedir,"her2_bad",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainhb,test=c_test,trainingLabel1 = trainingLabelhb,g=c(150,15),out_dir_base = sub_dir)
#3. HER2 & IGF1R
trainhi<-cbind(c_gfp,c_igf1r,c_her2)
trainingLabelhi<-list(control=list(igf1r=1:12,her2=1:12),igf1r=13:18,her2=19:23)
sub_dir=paste(basedir,"her2_igf1r",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainhi,test=c_test,trainingLabel1 = trainingLabelhi,g=c(100,15),out_dir_base = sub_dir)
#4. AKT & BAD
trainab<-cbind(c_gfp,c_akt,c_bad)
trainingLabelab<-list(control=list(akt=1:12,bad=1:12),akt=13:18,bad=19:24)
sub_dir=paste(basedir,"akt_bad",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainab,test=c_test,trainingLabel1 = trainingLabelab,g=c(150,150),out_dir_base = sub_dir)
#5. AKT & IGF1R
trainai<-cbind(c_gfp,c_akt,c_igf1r)
trainingLabelai<-list(control=list(akt=1:12,igf1r=1:12),akt=13:18,igf1r=19:24)
sub_dir=paste(basedir,"akt_igf1r",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = trainai,test=c_test,trainingLabel1 = trainingLabelai,g=c(150,100),out_dir_base = sub_dir)
#6. BAD & IGF1R
trainbi<-cbind(c_gfp,c_bad,c_igf1r)
trainingLabelbi<-list(control=list(bad=1:12,igf1r=1:12),bad=13:18,igf1r=19:24)
sub_dir=paste(basedir,"bad_igf1r",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = trainbi,test=c_test,trainingLabel1 = trainingLabelbi,g=c(150,100),out_dir_base = sub_dir)
#7. ERK & IGF1R
trainei<-cbind(c_gfp,c_erk,c_igf1r)
trainingLabelei<-list(control=list(erk=1:12,igf1r=1:12),erk=13:18,igf1r=19:24)
sub_dir=paste(basedir,"erk_igf1r",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = trainei,test=c_test,trainingLabel1 = trainingLabelei,g=c(100,100),out_dir_base = sub_dir)
#8. ERK & AKT
trainea<-cbind(c_gfp,c_erk,c_akt)
trainingLabelea<-list(control=list(erk=1:12,akt=1:12),erk=13:18,akt=19:24)
sub_dir=paste(basedir,"erk_akt",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = trainea,test=c_test,trainingLabel1 = trainingLabelea,g=c(100,150),out_dir_base = sub_dir)
#9. ERK & BAD
traineb<-cbind(c_gfp,c_erk,c_bad)
trainingLabeleb<-list(control=list(erk=1:12,bad=1:12),erk=13:18,bad=19:24)
sub_dir=paste(basedir,"erk_bad",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = traineb,test=c_test,trainingLabel1 = trainingLabeleb,g=c(100,150),out_dir_base = sub_dir)
#10. ERK & HER2
traineh<-cbind(c_gfp,c_erk,c_her2)
trainingLabeleh<-list(control=list(erk=1:12,her2=1:12),erk=13:18,her2=19:23)
sub_dir=paste(basedir,"erk_her2",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = traineh,test=c_test,trainingLabel1 = trainingLabeleh,g=c(100,15),out_dir_base = sub_dir)
#############trying three pathways at a time in multipathway#############
#1. HER2, AKT & BAD
trainhab<-cbind(c_gfp,c_akt,c_bad,c_her2)
trainingLabelhab<-list(control=list(akt=1:12,bad=1:12,her2=1:12),akt=13:18,bad=19:24,her2=25:29)
sub_dir=paste(basedir,"her2_akt_bad",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainhab,test=c_test,trainingLabel1 = trainingLabelhab,g=c(150,150,15),out_dir_base = sub_dir)
#2. HER2, BAD & IGF1R
trainhbi<-cbind(c_gfp,c_igf1r,c_bad,c_her2)
trainingLabelhbi<-list(control=list(igf1r=1:12,bad=1:12,her2=1:12),igf1r=13:18,bad=19:24,her2=25:29)
sub_dir=paste(basedir,"her2_bad_igf1r",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainhbi,test=c_test,trainingLabel1 = trainingLabelhbi,g=c(100,150,15),out_dir_base = sub_dir)
#3. AKT, BAD & IGF1R
trainabi<-cbind(c_gfp,c_akt,c_bad,c_igf1r)
trainingLabelabi<-list(control=list(akt=1:12,bad=1:12,igf1r=1:12),akt=13:18,bad=19:24,igf1r=25:30)
sub_dir=paste(basedir,"akt_bad_igf1r",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainabi,test=c_test,trainingLabel1 = trainingLabelabi,g=c(150,150,100),out_dir_base = sub_dir)
#4. AKT, BAD & ERK
trainabe<-cbind(c_gfp,c_akt,c_bad,c_erk)
trainingLabelabe<-list(control=list(akt=1:12,bad=1:12,erk=1:12),akt=13:18,bad=19:24,erk=25:30)
sub_dir=paste(basedir,"akt_bad_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainabe,test=c_test,trainingLabel1 = trainingLabelabe,g=c(150,150,100),out_dir_base = sub_dir)
#5. AKT, HER2 & IGF1R
trainahi<-cbind(c_gfp,c_akt,c_her2,c_igf1r)
trainingLabelahi<-list(control=list(akt=1:12,her2=1:12,igf1r=1:12),akt=13:18,her2=19:23,igf1r=24:29)
sub_dir=paste(basedir,"akt_her2_igf1r",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainahi,test=c_test,trainingLabel1 = trainingLabelahi,g=c(150,15,100),out_dir_base = sub_dir)
#6. AKT, HER2 & ERK
trainahe<-cbind(c_gfp,c_akt,c_her2,c_erk)
trainingLabelahe<-list(control=list(akt=1:12,her2=1:12,igf1r=1:12),akt=13:18,her2=19:23,erk=24:29)
sub_dir=paste(basedir,"akt_her2_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainahe,test=c_test,trainingLabel1 = trainingLabelahe,g=c(150,15,100),out_dir_base = sub_dir)
#7. AKT, IGF1R & ERK
trainaie<-cbind(c_gfp,c_akt,c_igf1r,c_erk)
trainingLabelaie<-list(control=list(akt=1:12,igf1r=1:12,erk=1:12),akt=13:18,igf1r=19:24,erk=25:30)
sub_dir=paste(basedir,"akt_igf1r_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainaie,test=c_test,trainingLabel1 = trainingLabelaie,g=c(150,100,100),out_dir_base = sub_dir)
#8. BAD, IGF1R & ERK
trainbie<-cbind(c_gfp,c_bad,c_igf1r,c_erk)
trainingLabelbie<-list(control=list(bad=1:12,igf1r=1:12,erk=1:12),bad=13:18,igf1r=19:24,erk=25:30)
sub_dir=paste(basedir,"bad_igf1r_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainbie,test=c_test,trainingLabel1 = trainingLabelbie,g=c(150,100,100),out_dir_base = sub_dir)
#9. BAD, HER2 & ERK
trainbhe<-cbind(c_gfp,c_bad,c_her2,c_erk)
trainingLabelbhe<-list(control=list(bad=1:12,her2=1:12,erk=1:12),bad=13:18,her2=19:23,erk=24:29)
sub_dir=paste(basedir,"bad_her2_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainbhe,test=c_test,trainingLabel1 = trainingLabelbhe,g=c(150,15,100),out_dir_base = sub_dir)
#10. IGF1R, HER2 & ERK
trainihe<-cbind(c_gfp,c_igf1r,c_her2,c_erk)
trainingLabelihe<-list(control=list(igf1r=1:12,her2=1:12,erk=1:12),igf1r=13:18,her2=19:23,erk=24:29)
sub_dir=paste(basedir,"igf1r_her2_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainihe,test=c_test,trainingLabel1 = trainingLabelihe,g=c(100,15,100),out_dir_base = sub_dir)
########################trying four at a time#####################
#1. AKT, BAD, HER2 & IGF1R
trainabhi<-cbind(c_gfp,c_akt,c_bad,c_her2,c_igf1r)
trainingLabelabhi<-list(control=list(akt=1:12,bad=1:12,her2=1:12,igf1r=1:12),akt=13:18, bad=19:24,her2=25:29,igf1r=30:35)
sub_dir=paste(basedir,"akt_bad_her2_igf1r",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainabhi,test=c_test,trainingLabel1 = trainingLabelabhi,g=c(150,150,15,100),out_dir_base = sub_dir)
#2. AKT, BAD, HER2 & ERH
trainabhe<-cbind(c_gfp,c_akt,c_bad,c_her2,c_erk)
trainingLabelabhe<-list(control=list(akt=1:12,bad=1:12,her2=1:12,erk=1:12),akt=13:18, bad=19:24,her2=25:29,erk=30:35)
sub_dir=paste(basedir,"akt_bad_her2_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainabhe,test=c_test,trainingLabel1 = trainingLabelabhe,g=c(150,150,15,100),out_dir_base = sub_dir)
#3. AKT, BAD, IGF1R & ERK
trainabie<-cbind(c_gfp,c_akt,c_bad,c_igf1r,c_erk)
trainingLabelabie<-list(control=list(akt=1:12,bad=1:12,igf1r=1:12,erk=1:12),akt=13:18, bad=19:24,igf1r=25:30,erk=31:36)
sub_dir=paste(basedir,"akt_bad_igf1r_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainabie,test=c_test,trainingLabel1 = trainingLabelabie,g=c(150,150,100,100),out_dir_base = sub_dir)
#4. AKT, IGF1R, HER2 & ERK
trainaihe<-cbind(c_gfp,c_akt,c_igf1r,c_her2,c_erk)
trainingLabelaihe<-list(control=list(akt=1:12,igf1r=1:12,her2=1:12,erk=1:12),akt=13:18, 1gf1r=19:24,her2=25:29,erk=30:35)
sub_dir=paste(basedir,"akt_igf1r_her2_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainaihe,test=c_test,trainingLabel1 = trainingLabelaihe,g=c(150,100,15,100),out_dir_base = sub_dir)
#5. BAD, IGF1R, HER2 & ERK
trainbihe<-cbind(c_gfp,c_bad,c_igf1r,c_her2,c_erk)
trainingLabelbihe<-list(control=list(bad=1:12,igf1r=1:12,her2=1:12,erk=1:12),bad=13:18, 1gf1r=19:24,her2=25:29,erk=30:35)
sub_dir=paste(basedir,"bad_igf1r_her2_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainbihe,test=c_test,trainingLabel1 = trainingLabelbihe,g=c(150,100,15,100),out_dir_base = sub_dir)
#########including all 5 pathways######
trainhall5<-cbind(c_gfp,c_akt,c_bad,c_her2,c_igf1r, c_erk)
trainingLabelall5<-list(control=list(akt=1:12,bad=1:12,her2=1:12,igf1r=1:12, erk=1:12),akt=13:18, bad=19:24,her2=25:29,igf1r=30:35, erk=36:41)
sub_dir=paste(basedir,"akt_bad_her2_igf1r_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainhall5,test=c_test,trainingLabel1 = trainingLabelall5,g=c(150,150,15,100,100),out_dir_base = sub_dir)<file_sep>/Rmarkdowns_scripts/Bootstrap_prediction_correlation_comparison.Rmd
---
title: "multipathway_ASSIGN"
author: "<NAME>"
date: "December 23, 2014"
output: pdf_document
---
```{r,echo=FALSE,cache=TRUE,include=FALSE}
library(sva)
library(ASSIGN)
library(utils)
```
Reading in the signature datasets...
```{r include=FALSE,echo=FALSE}
#setwd("~/Documents/ThesisWork/GitRepos/bild_signature_validation_old_repo/Datasets")
setwd("~/Dropbox/bild_signature/Datasets")
expr<-as.matrix(read.table("GFP18_AKT_BAD_HER2_IGF1R_RAF_ERK.tpmlog",sep='\t',row.names=1,header=1))
control<-subset(expr, select=GFP.1:GFP.12)
her2<-subset(expr, select=HER2.1:HER2.6)
akt<-subset(expr,select=AKT.1:AKT.6)
bad<-subset(expr,select=BAD.1:BAD.6)
igf1r<-subset(expr,select=IGF1R.1:IGF1R.6)
erk<-subset(expr,select=ERK.1:ERK.6)
expr_all<-cbind(control,akt,bad,her2,igf1r,erk)
dim(expr_all)
#icbp<-as.matrix(read.table("~/Documents/ThesisWork/GitRepos/bild_signature_validation_old_repo/Datasets/ICBP/icbp_Rsubread_tpmlog.txt", sep='\t', stringsAsFactors=FALSE, header=1, row.names=1))
icbp<-as.matrix(read.table("icbp_Rsubread_tpmlog.txt", sep='\t', stringsAsFactors=FALSE, header=1, row.names=1))
expr_all_f <-expr_all[apply(expr_all[,1:41]==0,1,mean) < 0.85,]
dim(expr_all_f)
expr_all_icbp_f<-merge_drop(expr_all_f,icbp,by=0)
sub<-c(12,6,6,5,6,6,55)
pdf("pca_plots.pdf")
pcaplot(expr_all_icbp_f,sub)
bat1<-as.matrix(cbind(c(colnames(expr_all_f),colnames(icbp)),c(rep(1,length(colnames(expr_all_f))),rep(2,length(colnames(icbp))))))
#bat1
combat_expr1<-ComBat(dat=expr_all_icbp_f, batch=bat1[,2], mod=NULL, numCovs=NULL)
pcaplot(combat_expr1,sub)
dev.off()
c_gfp<-subset(combat_expr1, select=GFP.1:GFP.12)
c_akt<-subset(combat_expr1, select=AKT.1:AKT.6)
c_bad<-subset(combat_expr1, select=BAD.1:BAD.6)
c_her2<-subset(combat_expr1, select=HER2.1:HER2.6)
c_igf1r<-subset(combat_expr1, select=IGF1R.1:IGF1R.6)
c_erk<-subset(combat_expr1, select=ERK.1:ERK.6)
c_test<-combat_expr1[,42:96]
basedir="~/Dropbox/bild_signatures/icbp_15_mar_all"
dir.create( basedir)
#############trying one pathways at a time in multipathway#############
#1. AKT
trainingLabela<-list(control=list(akt=1:12),akt=13:18)
sub_dir<-paste(basedir,"akt",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = cbind(c_gfp,c_akt),test=c_test,trainingLabel1 = trainingLabela,g=150,out_dir_base = sub_dir,single = 1)
#2. BAD
trainingLabelb<-list(control=list(bad=1:12),bad=13:18)
sub_dir<-paste(basedir,"bad",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = cbind(c_gfp,c_bad),test=c_test,trainingLabel1 = trainingLabelb,g=150,out_dir_base = sub_dir,single = 1)
#3. HER2
trainingLabelh<-list(control=list(her2=1:12),her2=13:17)
sub_dir<-paste(basedir,"her2",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = cbind(c_gfp,c_her2),test=c_test,trainingLabel1 = trainingLabelh,g=15,out_dir_base = sub_dir,single = 1)
#4. IGF1R
trainingLabeli<-list(control=list(igf1r=1:12),igf1r=13:18)
sub_dir<-paste(basedir,"igf1r",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = cbind(c_gfp,c_igf1r),test=c_test,trainingLabel1 = trainingLabeli,g=100,out_dir_base = sub_dir,single = 1)
#5. ERK
trainingLabele<-list(control=list(erk=1:12),erk=13:18)
sub_dir<-paste(basedir,"erk",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = cbind(c_gfp,c_erk),test=c_test,trainingLabel1 = trainingLabele,g=100,out_dir_base = sub_dir,single = 1)
#############trying two pathways at a time in multipathway#############
#1. HER2 & AKT
trainha<-cbind(c_gfp,c_akt,c_her2)
trainingLabelha<-list(control=list(akt=1:12,her2=1:12),akt=13:18,her2=19:23)
sub_dir=paste(basedir,"her2_akt",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = trainha,test=c_test,trainingLabel1 = trainingLabelha,g=c(150,15),out_dir_base = sub_dir)
#2. HER2 & BAD
trainhb<-cbind(c_gfp,c_bad,c_her2)
trainingLabelhb<-list(control=list(bad=1:12,her2=1:12),bad=13:18,her2=19:23)
sub_dir=paste(basedir,"her2_bad",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainhb,test=c_test,trainingLabel1 = trainingLabelhb,g=c(150,15),out_dir_base = sub_dir)
#3. HER2 & IGF1R
trainhi<-cbind(c_gfp,c_igf1r,c_her2)
trainingLabelhi<-list(control=list(igf1r=1:12,her2=1:12),igf1r=13:18,her2=19:23)
sub_dir=paste(basedir,"her2_igf1r",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainhi,test=c_test,trainingLabel1 = trainingLabelhi,g=c(100,15),out_dir_base = sub_dir)
#4. AKT & BAD
trainab<-cbind(c_gfp,c_akt,c_bad)
trainingLabelab<-list(control=list(akt=1:12,bad=1:12),akt=13:18,bad=19:24)
sub_dir=paste(basedir,"akt_bad",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainab,test=c_test,trainingLabel1 = trainingLabelab,g=c(150,150),out_dir_base = sub_dir)
#5. AKT & IGF1R
trainai<-cbind(c_gfp,c_akt,c_igf1r)
trainingLabelai<-list(control=list(akt=1:12,igf1r=1:12),akt=13:18,igf1r=19:24)
sub_dir=paste(basedir,"akt_igf1r",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = trainai,test=c_test,trainingLabel1 = trainingLabelai,g=c(150,100),out_dir_base = sub_dir)
#6. BAD & IGF1R
trainbi<-cbind(c_gfp,c_bad,c_igf1r)
trainingLabelbi<-list(control=list(bad=1:12,igf1r=1:12),bad=13:18,igf1r=19:24)
sub_dir=paste(basedir,"bad_igf1r",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = trainbi,test=c_test,trainingLabel1 = trainingLabelbi,g=c(150,100),out_dir_base = sub_dir)
#7. ERK & IGF1R
trainei<-cbind(c_gfp,c_erk,c_igf1r)
trainingLabelei<-list(control=list(erk=1:12,igf1r=1:12),erk=13:18,igf1r=19:24)
sub_dir=paste(basedir,"erk_igf1r",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = trainei,test=c_test,trainingLabel1 = trainingLabelei,g=c(100,100),out_dir_base = sub_dir)
#8. ERK & AKT
trainea<-cbind(c_gfp,c_erk,c_akt)
trainingLabelea<-list(control=list(erk=1:12,akt=1:12),erk=13:18,akt=19:24)
sub_dir=paste(basedir,"erk_akt",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = trainea,test=c_test,trainingLabel1 = trainingLabelea,g=c(100,150),out_dir_base = sub_dir)
#9. ERK & BAD
traineb<-cbind(c_gfp,c_erk,c_bad)
trainingLabeleb<-list(control=list(erk=1:12,bad=1:12),erk=13:18,bad=19:24)
sub_dir=paste(basedir,"erk_bad",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = traineb,test=c_test,trainingLabel1 = trainingLabeleb,g=c(100,150),out_dir_base = sub_dir)
#10. ERK & HER2
traineh<-cbind(c_gfp,c_erk,c_her2)
trainingLabeleh<-list(control=list(erk=1:12,her2=1:12),erk=13:18,her2=19:23)
sub_dir=paste(basedir,"erk_her2",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = traineh,test=c_test,trainingLabel1 = trainingLabeleh,g=c(100,15),out_dir_base = sub_dir)
#############trying three pathways at a time in multipathway#############
#1. HER2, AKT & BAD
trainhab<-cbind(c_gfp,c_akt,c_bad,c_her2)
trainingLabelhab<-list(control=list(akt=1:12,bad=1:12,her2=1:12),akt=13:18,bad=19:24,her2=25:29)
sub_dir=paste(basedir,"her2_akt_bad",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainhab,test=c_test,trainingLabel1 = trainingLabelhab,g=c(150,150,15),out_dir_base = sub_dir)
#2. HER2, BAD & IGF1R
trainhbi<-cbind(c_gfp,c_igf1r,c_bad,c_her2)
trainingLabelhbi<-list(control=list(igf1r=1:12,bad=1:12,her2=1:12),igf1r=13:18,bad=19:24,her2=25:29)
sub_dir=paste(basedir,"her2_bad_igf1r",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainhbi,test=c_test,trainingLabel1 = trainingLabelhbi,g=c(100,150,15),out_dir_base = sub_dir)
#3. AKT, BAD & IGF1R
trainabi<-cbind(c_gfp,c_akt,c_bad,c_igf1r)
trainingLabelabi<-list(control=list(akt=1:12,bad=1:12,igf1r=1:12),akt=13:18,bad=19:24,igf1r=25:30)
sub_dir=paste(basedir,"akt_bad_igf1r",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainabi,test=c_test,trainingLabel1 = trainingLabelabi,g=c(150,150,100),out_dir_base = sub_dir)
#4. AKT, BAD & ERK
trainabe<-cbind(c_gfp,c_akt,c_bad,c_erk)
trainingLabelabe<-list(control=list(akt=1:12,bad=1:12,erk=1:12),akt=13:18,bad=19:24,erk=25:30)
sub_dir=paste(basedir,"akt_bad_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainabe,test=c_test,trainingLabel1 = trainingLabelabe,g=c(150,150,100),out_dir_base = sub_dir)
#5. AKT, HER2 & IGF1R
trainahi<-cbind(c_gfp,c_akt,c_her2,c_igf1r)
trainingLabelahi<-list(control=list(akt=1:12,her2=1:12,igf1r=1:12),akt=13:18,her2=19:23,igf1r=24:29)
sub_dir=paste(basedir,"akt_her2_igf1r",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainahi,test=c_test,trainingLabel1 = trainingLabelahi,g=c(150,15,100),out_dir_base = sub_dir)
#6. AKT, HER2 & ERK
trainahe<-cbind(c_gfp,c_akt,c_her2,c_erk)
trainingLabelahe<-list(control=list(akt=1:12,her2=1:12,igf1r=1:12),akt=13:18,her2=19:23,erk=24:29)
sub_dir=paste(basedir,"akt_her2_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainahe,test=c_test,trainingLabel1 = trainingLabelahe,g=c(150,15,100),out_dir_base = sub_dir)
#7. AKT, IGF1R & ERK
trainaie<-cbind(c_gfp,c_akt,c_igf1r,c_erk)
trainingLabelaie<-list(control=list(akt=1:12,igf1r=1:12,erk=1:12),akt=13:18,igf1r=19:24,erk=25:30)
sub_dir=paste(basedir,"akt_igf1r_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainaie,test=c_test,trainingLabel1 = trainingLabelaie,g=c(150,100,100),out_dir_base = sub_dir)
#8. BAD, IGF1R & ERK
trainbie<-cbind(c_gfp,c_bad,c_igf1r,c_erk)
trainingLabelbie<-list(control=list(bad=1:12,igf1r=1:12,erk=1:12),bad=13:18,igf1r=19:24,erk=25:30)
sub_dir=paste(basedir,"bad_igf1r_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainbie,test=c_test,trainingLabel1 = trainingLabelbie,g=c(150,100,100),out_dir_base = sub_dir)
#9. BAD, HER2 & ERK
trainbhe<-cbind(c_gfp,c_bad,c_her2,c_erk)
trainingLabelbhe<-list(control=list(bad=1:12,her2=1:12,erk=1:12),bad=13:18,her2=19:23,erk=24:29)
sub_dir=paste(basedir,"bad_her2_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainbhe,test=c_test,trainingLabel1 = trainingLabelbhe,g=c(150,15,100),out_dir_base = sub_dir)
#10. IGF1R, HER2 & ERK
trainihe<-cbind(c_gfp,c_igf1r,c_her2,c_erk)
trainingLabelihe<-list(control=list(igf1r=1:12,her2=1:12,erk=1:12),igf1r=13:18,her2=19:23,erk=24:29)
sub_dir=paste(basedir,"igf1r_her2_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainihe,test=c_test,trainingLabel1 = trainingLabelihe,g=c(100,15,100),out_dir_base = sub_dir)
########################trying four at a time#####################
#1. AKT, BAD, HER2 & IGF1R
trainabhi<-cbind(c_gfp,c_akt,c_bad,c_her2,c_igf1r)
trainingLabelabhi<-list(control=list(akt=1:12,bad=1:12,her2=1:12,igf1r=1:12),akt=13:18, bad=19:24,her2=25:29,igf1r=30:35)
sub_dir=paste(basedir,"akt_bad_her2_igf1r",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainabhi,test=c_test,trainingLabel1 = trainingLabelabhi,g=c(150,150,15,100),out_dir_base = sub_dir)
#2. AKT, BAD, HER2 & ERK
trainabhe<-cbind(c_gfp,c_akt,c_bad,c_her2,c_erk)
trainingLabelabhe<-list(control=list(akt=1:12,bad=1:12,her2=1:12,erk=1:12),akt=13:18, bad=19:24,her2=25:29,erk=30:35)
sub_dir=paste(basedir,"akt_bad_her2_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainabhe,test=c_test,trainingLabel1 = trainingLabelabhe,g=c(150,150,15,100),out_dir_base = sub_dir)
#3. AKT, BAD, IGF1R & ERK
trainabie<-cbind(c_gfp,c_akt,c_bad,c_igf1r,c_erk)
trainingLabelabie<-list(control=list(akt=1:12,bad=1:12,igf1r=1:12,erk=1:12),akt=13:18, bad=19:24,igf1r=25:30,erk=31:36)
sub_dir=paste(basedir,"akt_bad_igf1r_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainabie,test=c_test,trainingLabel1 = trainingLabelabie,g=c(150,150,100,100),out_dir_base = sub_dir)
#4. AKT, IGF1R, HER2 & ERK
trainaihe<-cbind(c_gfp,c_akt,c_igf1r,c_her2,c_erk)
#trainingLabelabie<-list(control=list(akt=1:12,bad=1:12,igf1r=1:12,erk=1:12),akt=13:18, bad=19:24,igf1r=25:30,erk=31:36)
trainingLabelaihe<-list(control=list(akt=1:12,igf1r=1:12,her2=1:12,erk=1:12),akt=13:18, igf1r=19:24,her2=25:29,erk=30:35)
sub_dir=paste(basedir,"akt_igf1r_her2_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainaihe,test=c_test,trainingLabel1 = trainingLabelaihe,g=c(150,100,15,100),out_dir_base = sub_dir)
#5. BAD, IGF1R, HER2 & ERK
trainbihe<-cbind(c_gfp,c_bad,c_igf1r,c_her2,c_erk)
trainingLabelbihe<-list(control=list(bad=1:12,igf1r=1:12,her2=1:12,erk=1:12),bad=13:18, igf1r=19:24,her2=25:29,erk=30:35)
sub_dir=paste(basedir,"bad_igf1r_her2_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainbihe,test=c_test,trainingLabel1 = trainingLabelbihe,g=c(150,100,15,100),out_dir_base = sub_dir)
#########including all 5 pathways######
trainhall5<-cbind(c_gfp,c_akt,c_bad,c_her2,c_igf1r, c_erk)
trainingLabelall5<-list(control=list(akt=1:12,bad=1:12,her2=1:12,igf1r=1:12, erk=1:12),akt=13:18, bad=19:24,her2=25:29,igf1r=30:35, erk=36:41)
sub_dir=paste(basedir,"akt_bad_her2_igf1r_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainhall5,test=c_test,trainingLabel1 = trainingLabelall5,g=c(150,150,15,100,100),out_dir_base = sub_dir)
```
```{r}
```
Read in all the predictions are make files with all results
```{r}
# multi nonadpative
#setwd("~/Documents/Thesiswork/ICBP/multi_icbp_expr_pc/")
setwd("~//Documents/Thesiswork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/icbp_15_mar_all/")
getwd()
filenames_icbp_multi<-system("ls */*/pathway_activity_testset*", intern=TRUE)
filenames_icbp_multi
for(i in 1:length(filenames_icbp_multi))
{
f<-read.csv(filenames_icbp_multi[i], header=1,row.names=1) ###reading in the filess one at a time
colnames(f)<-paste(filenames_icbp_multi[i],colnames(f),sep='/')
if(i==1){
data_icbp<-f
}
else{
data_icbp<-cbind(data_icbp,f)
}
}
head(data_icbp)
View(data_icbp)
write.table(data_icbp,"~/Documents/ThesisWork/GitRepos/bild_signatures/ICBP_pathwaypredictions_all_combos.txt",sep='\t', col.names = NA,quote=F)
colnames(data_icbp)<-gsub(pattern = "/pathway_activity_testset.csv",replacement = "",x = colnames(data_icbp))
head(data_icbp)
rownames(data_icbp)[1:7]<-c("184A1","184B5","21MT1","21MT2","21NT","21PT","600MPE")
View(data_icbp)
write.table(data_icbp,"~/Documents/ThesisWork/GitRepos/bild_signatures/ICBP_pathwaypredictions_all_combos.txt",sep='\t', col.names = NA,quote=F)
setwd("~/Documents/ThesisWork/GitRepos/bild_signatures/")
drugs<-read.delim("ICBP_drugs_1.txt", header=1, sep='\t',row.names=1)
View(drugs)
head(drugs);dim(drugs)
pred_drug<-merge_drop(data_icbp,drugs,by=0)
pred_drug
dim(pred_drug)
dimnames(pred_drug)
pred_drug_only=subset(pred_drug, select=c(1:160,175,206,204,180,209,202,191))
pred_drug2=pred_drug[, c(1:160,171:260)]
head(pred_drug2)
colnames(pred_drug)
View(pred_drug_only[161:167 ])
write.table(pred_drug2,"~/Documents/ThesisWork/GitRepos/bild_signatures/ICBP_pathwaypredictions_drug_all_combos.txt",sep='\t', col.names = NA,quote=F)
getwd()
rhos=p_val=NULL
temp=temp_f=NULL
for(i in 1:160){
for(j in 1:7) {
#i=1
#j=1
pathway_drug=subset(pred_drug_only, select=c(i,160+j))
temp_cors= cor.ci(pathway_drug, method="spearman", plot=FALSE)
temp<-c(temp_cors$rho[2,1],temp_cors$ci[5])
temp<- cbind(temp[1],temp[2])
temp_f<-rbind(temp_f,temp)
rownames(temp_f)[j+(i-1)*7]=paste(colnames(pathway_drug)[1],colnames(pathway_drug)[2],sep='__')
}
}
colnames(temp_f)<-c("Corr","p_val")
writeFile(temp_f, "ICBP_allpathwaycombos_drug_correlations.txt")
writeFile(temp_f, "ICBP_allpathwaycombos_drug_correlations_2.txt")
# list for chemo drugs
#list for pi3k
#list for akt
# diffent classes of chemo
getwd()
erk_preds<-temp_f[grepl("*/erk__*", rownames(temp_f)), ]
erk_preds_adapB<-erk_preds[grepl("*/adapB_*", rownames(erk_preds)), ]
writeFile(erk_preds_adapB, "ICBP_ERK_drug_cors_allcombos_adapB.txt")
erk_preds_adap_adap<-erk_preds[grepl("*/adap_adap*", rownames(erk_preds)), ]
writeFile(erk_preds_adap_adap, "ICBP_ERK_drug_cors_allcombos_adap_adap.txt")
bad_preds<-temp_f[grepl("*/bad__*", rownames(temp_f)), ]
bad_preds_adapB<-bad_preds[grepl("*/adapB_*", rownames(bad_preds)), ]
writeFile(bad_preds_adapB, "ICBP_bad_drug_cors_allcombos_adapB.txt")
bad_preds_adap_adap<-bad_preds[grepl("*/adap_adap*", rownames(bad_preds)), ]
writeFile(bad_preds_adap_adap, "ICBP_bad_drug_cors_allcombos_adap_adap.txt")
akt_preds<-temp_f[grepl("*/akt__*", rownames(temp_f)), ]
akt_preds_adapB<-akt_preds[grepl("*/adapB_*", rownames(akt_preds)), ]
writeFile(akt_preds_adapB, "ICBP_akt_drug_cors_allcombos_adapB.txt")
akt_preds_adap_adap<-akt_preds[grepl("*/adap_adap*", rownames(akt_preds)), ]
writeFile(akt_preds_adap_adap, "ICBP_akt_drug_cors_allcombos_adap_adap.txt")
her2_preds<-temp_f[grepl("*/her2__*", rownames(temp_f)), ]
her2_preds_adapB<-her2_preds[grepl("*/adapB_*", rownames(her2_preds)), ]
writeFile(her2_preds_adapB, "ICBP_her2_drug_cors_allcombos_adapB.txt")
her2_preds_adap_adap<-her2_preds[grepl("*/adap_adap*", rownames(her2_preds)), ]
writeFile(her2_preds_adap_adap, "ICBP_her2_drug_cors_allcombos_adap_adap.txt")
igf1r_preds<-temp_f[grepl("*/igf1r__*", rownames(temp_f)), ]
igf1r_preds_adapB<-igf1r_preds[grepl("*/adapB_*", rownames(igf1r_preds)), ]
writeFile(igf1r_preds_adapB, "ICBP_igf1r_drug_cors_allcombos_adapB.txt")
igf1r_preds_adap_adap<-igf1r_preds[grepl("*/adap_adap*", rownames(igf1r_preds)), ]
writeFile(igf1r_preds_adap_adap, "ICBP_igf1r_drug_cors_allcombos_adap_adap.txt")
matrix(cbind(rhos,p_val),ncol=2)
pred_drug_AKT=pred_drug_only[,c(1:80,165)]
pred_drug_AKT
dim(pred_drug_AKT)
colnames(pred_drug_only)
View(pred_drug)
sigmaAKTcors= cor.ci(pred_drug_AKT, method="spearman", plot=FALSE)
sigmaAKTcors$ci[5]
sigmaAKTcors$rho[2,1]
bind = cbind(pred_drug_only[,1], pred_drug_only[,165])
View(bind)
# cors = matrix(0,144,4)
# rownames(cors)=colnames(pred_drug)[1:144]
#colnames(cors)=c("Lapatinib","Sigma.akt.1.2.inhibitor","Temsirolimus","Everolimus")
#pathways<-data_icbp
#drugs<-drugs$Sigma.AKT1.2.inhibitor
#akt_preds<-subset(pred_drug,select=grep("/akt", colnames(pred_drug) , ignore.case=FALSE, fixed=T))
#akt_adapB<-subset(akt_preds,select=grep("/adapB_multi", colnames(akt_preds) , ignore.case=FALSE, fixed=T))
#akt_adap_adap<-subset(akt_preds,select=grep("/adap_adap_mult", colnames(akt_preds) , ignore.case=FALSE, fixed=T))
#min(corr.test(cbind(akt_preds,pred_drug$Sigma.AKT1.2.inhibitor),method="spearman", use="pairwise")$p[,33])
bad_preds<-subset(pred_drug,select=grep("/bad", colnames(pred_drug) , ignore.case=FALSE, fixed=T))
her2_preds<-subset(pred_drug,select=grep("/her2", colnames(pred_drug) , ignore.case=FALSE, fixed=T))
igf1r_preds<-subset(pred_drug,select=grep("/igf1r", colnames(pred_drug) , ignore.case=FALSE, fixed=T))
erk_preds<-subset(pred_drug,select=grep("/erk", colnames(pred_drug) , ignore.case=FALSE, fixed=T))
length(bad_preds)
adapB<-subset(pred_drug,select=grep("adapB",colnames(pred_drug)))
adapS<-subset(pred_drug,select=grep("adap_adap",colnames(pred_drug)))
##############
#### Bootstrap:
dim(data_icb)
pathways = 160 # number of pathway signatures
samplesize = nrow(pred_drug_only) # number of cell lines
n.boot = 10000 # number of bootstrap samples -- set at 10,000 or more for your final run
all_boot_cors = array(0,dim=c(160,90,n.boot)) # make a three dimensional array to store the bootstrap results
# added spearman here, it did not have it
for (i in 1:n.boot){
boot.sample = sample(1:samplesize,replace=T)
all_boot_cors[,,i]=cor(pred_drug_only[boot.sample,1:160],pred_drug_only[boot.sample,161:250],use="pairwise", method="spearman")
}
dim(akt_boot_cors) #24 x 90 x 10000
# to test
p_calc_compare=function(path1,path2,drug,cor_cut=0,cors=boot_cors,pathlist=pathway,druglist=drugs){
ind_p1 = which(pathlist==path1)
ind_p2 = which(pathlist==path2)
ind_drug = which(druglist==drug)
mean((cors[ind_p1,ind_drug,]-cors[ind_p2,ind_drug,])>cor_cut,na.rm=T)
}
pval_comp_akt_adapB_vs_adap_adap = p_calc_compare(combn(colnames(akt_adap_adap),1),combn(colnames(akt_adapB),1),cors=akt_boot_cors, drug="Sigma.AKT1.2.inhibitor",pathlist = colnames(akt_preds),druglist = pred_drug[171:260] )
head(pval_comp)
# # means
# cor_mean = apply(boot_cors, c(1,2), mean, na.rm=T) ## average bootstrap cors. Should be similar to the non-boot values
# dimnames(cor_mean)=dimnames(drug_prediction_correlations_multi_spear)
# View(cor_mean)
# write.table(cor_mean,"~/Documents/ThesisWork/GitRepos/bild_signatures/cor_mean.txt",sep='\t', col.names = NA,quote=F)
# cor_means_ICBP=read.delim("~/Documents/ThesisWork/GitRepos/bild_signatures/cor_mean.txt", header=1, sep='\t',row.names=1)
#
# #lower
# lower = apply(boot_cors, c(1,2), quantile,na.rm=T,probs=.025) ## lower 95% CI
# dimnames(lower)=dimnames(drug_prediction_correlations_multi_spear)
# write.table(lower,"~/Documents/ThesisWork/GitRepos/bild_signatures/lower.txt",sep='\t', col.names = NA,quote=F)
# cor_lower_ICBP=read.delim("~/Documents/ThesisWork/GitRepos/bild_signatures/lower.txt", header=1, sep='\t',row.names=1)
# #upper
#
# upper = apply(boot_cors, c(1,2), quantile,na.rm=T,probs=.975) ## upper 95% CI
# dimnames(upper)=dimnames(drug_prediction_correlations_multi_spear)
# write.table(upper,"~/Documents/ThesisWork/GitRepos/bild_signatures/upper.txt",sep='\t', col.names = NA,quote=F)
# cor_upper_ICBP=read.delim("~/Documents/ThesisWork/GitRepos/bild_signatures/upper.txt", header=1, sep='\t',row.names=1)
## p-values: to test that a correlation is bigger than some value:
# is this the p-value that is it bigger than some value??
cor_cutoff = 0 ## i.e. to test is the correlation is > or < 0. Note this can be anything, i.e. .1, .5, etc
p_calc = function(x,direction="greater",cor_cut=0){
if (!(direction %in% c("greater","less"))){stop("Invalid value for direction")}
if (direction=="greater"){return(mean(x>cor_cut,na.rm=T))}
if (direction=="less"){return(mean(x<cor_cut,na.rm=T))}
}
head(boot_cors)
# These are not p-values, or are they, they just seem like correlations above or below a certain value
# Obtaining the cor means for all 10000 interations that are either above or below a certain value for all rows and columns.
# I think above and below equal to 1...
View(boot_cors)
# This just contains the means from the 10000 bootstrap cor values above 0
# I dont get how you can get different p-values for the same correlation?
ps_above_0 = apply(boot_cors, c(1,2), p_calc)
dimnames(ps_above_0)=dimnames(drug_prediction_correlations_multi_spear)
View(ps_above_0)
dim(ps_above_0)
ps_below_0 = apply(boot_cors, c(1,2), p_calc,direction="less")
dimnames(ps_below_0 )=dimnames(drug_prediction_correlations_multi_spear)
View(ps_below_0)
dim(ps_below_0)
## p-values: to test two correlations against each other:
pathway = colnames(pred_drug_multi)[1:25] ## print to see the pathway names
print(pathway)
drugs = colnames(pred_drug_multi)[36:125] ## print to see the drug names
print(drugs)
# to test
p_calc_compare=function(path1,path2,drug,cor_cut=0,cors=boot_cors,pathlist=pathway,druglist=drugs){
ind_p1 = which(pathlist==path1)
ind_p2 = which(pathlist==path2)
ind_drug = which(druglist==drug)
mean((cors[ind_p1,ind_drug,]-cors[ind_p2,ind_drug,])>cor_cut,na.rm=T)
}
pval_comp = p_calc_compare("AKT_BAD.adap_multi.pathway_activity_testset.csv.akt","AKT_BAD_HER2.adap_multi.pathway_activity_testset.csv.akt","Sigma.AKT1.2.inhibitor")
head(pval_comp)
## pval_comp = 0.91 in this case means that AKT+BAD > AKT+BAD+HER2 in 91% of the bootstrap correlations. Thus the p-value for testing Ha: AKT+BAD > AKT+BAD+HER2 is 0.09
#######################
library(psych)
for (i in 1:length(colnames(data_icbp))){
for (j in 1:1){ #length(colnames(drug_srt))){
cors[i,j]=corr.test(pred_drug[,i],pred_drug$Sigma.AKT1.2.inhibitor,use="pairwise",method="spearman")
}
}
View(drug_pre_diction_correlations_multi)
View(drug_prediction_correlations_single)
single_and_multi= rbind(drug_prediction_correlations_multi,drug_prediction_correlations_single)
plot(single_and_multi[2])
View(single_and_multi)
row.names(single_and_multi)
# for sinlge
single_and_multi[26,]
cor(single_and_multi[26,], single_and_multi[27,] ) # 0.5269367
cor(single_and_multi[26,], single_and_multi[28,] ) #0.7882588
cor(single_and_multi[26,], single_and_multi[29,] ) # 0.6173746
cor(single_and_multi[27,], single_and_multi[28,] ) # 0.2896494
cor(single_and_multi[27,], single_and_multi[29,] ) # -0.02523773
cor(single_and_multi[28,], single_and_multi[29,] ) # 0.7182353
#mutli
cor(single_and_multi[22,], single_and_multi[23,] ) #-0.6161527
cor(single_and_multi[22,], single_and_multi[24,] ) # -0.2015345
cor(single_and_multi[22,], single_and_multi[25,] ) # 0.4247083
cor(single_and_multi[23,], single_and_multi[24,] ) # -0.04692151
cor(single_and_multi[23,], single_and_multi[25,] ) # -0.4218923
cor(single_and_multi[24,], single_and_multi[25,] ) # -0.7734885
write.table(single_and_multi,"~/Documents/ThesisWork/GitRepos/bild_signatures/Correlations_ICBP_Single_Multi.txt",sep='\t', col.names = NA,quote=F)
```
Testing cor.ci
```{r}
data_icbp_small=data_icbp[,1:3]
cor(data_icbp_small)
corr.test(data_icbp_small, use= "pairwise", method= "spearman")
ci = cor.ci(data_icbp_small, use= "pairwise", method= "spearman", plot=FALSE)
ci[6]
ci$ci[5]
```
<file_sep>/Rmarkdowns_scripts/ICBP/ICBP_drug_cors_ERK_BAD_analysis.Rmd
---
title: "Multi_pathway Correlations with ICBP"
author: "Shelley"
date: "March 18, 2015"
output: html_document
---
```{r, echo=FALSE}
# setwd("~//Documents/Thesiswork/GitRepos/bild_signatures/ICBP/")
#
# system("ls")
#
# ICBP_preds_drugs<-(read.table("ICBP_pathwaypredictions_drug_all_combos.txt", sep='\t', stringsAsFactors=FALSE, header=1, row.names=1))
# View(ICBP_preds_drugs)
# colnames(ICBP_preds_drugs)
# dimnames(ICBP_preds_drugs)
```
```{r echo=FALSE}
# colnames(ICBP_preds_drugs)
# rhos=p_val=NULL
# temp=cors_pvals=NULL
# for(i in 1:160){
# for(j in 1:90) {
# #i=1
# #j=1
# pathway_drug=subset(ICBP_preds_drugs, select=c(i,160+j))
# temp_cors= cor.ci(pathway_drug, method="spearman", plot=FALSE, n.iter=1000)
# temp<-c(temp_cors$rho[2,1],temp_cors$ci[5])
# temp<- cbind(temp[1],temp[2])
# cors_pvals<-rbind(cors_pvals,temp)
# rownames(cors_pvals)[j+(i-1)*90]=paste(colnames(pathway_drug)[1],colnames(pathway_drug)[2],sep='__')
# }
# }
#
#
# colnames(cors_pvals)<-c("Corr","p_val")
# temp_f
# writeFile(cors_pvals, "ICBP_allpathwaycombos_all_drugs_1000.txt")
cors_pvals<-read.table("~/Desktop/ICBP_allpathwaycombos_all_drugs.txt",header=1, row.names=1,sep='\t')
#dim(cors_pvals)
#head(cors_pvals)
chemolist<-c("5.FU","5.FdUR","CGC.11047","CGC.11144","CPT.11","Carboplatin","Cisplatin","Docetaxel","Doxorubicin","Epirubicin","Etoposide","Gemcitabine","Oxaliplatin","Methotrexate","ICRF.193","Ixabepilone","Paclitaxel","Pemetrexed","Topotecan","Vinorelbine")
#cellCycleInhibitors<-c("ZM447439","VX.680")
chemo_preds<-NULL
for(i in 1:length(chemolist)){
chemo_preds<-rbind(chemo_preds,cors_pvals[grepl(chemolist[i], rownames(cors_pvals), ignore.case = T),])
}
#dim(cors_pvals)
#dim(chemo_preds)
bad_s<-chemo_preds[grepl("_multi.bad__",rownames(chemo_preds)),]
erk_s<-chemo_preds[grepl("_multi.erk__",rownames(chemo_preds)),]
plot(bad_s$Corr,erk_s$Corr,main="BAD and ERK pathway\nCorrelations with Chemo drugs")
print(paste("correlations of correlations in BAD and ERK predictions with chemotherapy",cor(bad_s$Corr,erk_s$Corr),sep=':'))
boxplot(bad_s$Corr,erk_s$Corr,names=c("bad predictions","erk predictions"),main="BAD and ERK pathway\nCorrelations with Chemo drugs")
print(t.test(bad_s$Corr,erk_s$Corr))
#wilcox.test(bad_s$Corr,erk_s$Corr)
# cors_sig<-subset(chemo_preds,chemo_preds$p_val<=0.05)
# bad_sig<-cors_sig[grepl("_multi.bad__",rownames(cors_sig)),]
# dim(bad_sig)
# erk_sig<-cors_sig[grepl("_multi.erk__",rownames(cors_sig)),]
# dim(erk_sig)
# boxplot(bad_sig$Corr,erk_sig$Corr,names=c("bad predictions","erk predictions"),main="Correlations with Chemo drugs")
# t.test(bad_sig$Corr,erk_sig$Corr)
# wilcox.test(bad_sig$Corr,erk_sig$Corr)
# bad_all<-bad_sig[grepl("akt_bad_her2_igf1r_erk.adap_adap_multi.bad",rownames(bad_sig)),]
# erk_all<-erk_sig[grepl("erk",rownames(erk_sig)),]
# erk_all
#-----------------------
targeted_list<-c("Sigma.AKT1.2.inhibitor", "Triciribine", "AS.252424", "BEZ235", "BIBW2992", "ERKi.II..FR180304.", "Erlotinib", "Everolimus", "GSK1120212", "GSK1059868", "GSK1838705", "GSK2119563", "GSK2126458", "GSK2141795", "GSK1059615", "Lapatinib", "Imatinib", "Gefitinib", "Rapamycin", "Temsirolimus")
#aurora<-c("GSK1070916")
#targeted_list<-c("AG1478","Sigma.AKT1.2.inhibitor", "Triciribine", "AS.252424", "AZD6244", "BEZ235", "BIBW2992", "ERKi.II..FR180304.", "Erlotinib", "Everolimus", "GSK923295", "GSK1070916", "GSK1120212", "GSK1059868", "GSK1838705", "GSK461364", "GSK2119563", "GSK2126458", "GSK2141795", "GSK1059615", "GSK650394", "Lapatinib", "Imatinib", "Gefitinib", "Rapamycin", "Vorinostat", "Temsirolimus", "XRP44X", "ZM447439")
target_preds=NULL
for(i in 1:length(targeted_list)){
target_preds<-rbind(target_preds,cors_pvals[grepl(targeted_list[i], rownames(cors_pvals), ignore.case = T),])
}
erk_sig_t<-target_preds[grepl("_multi.erk__",rownames(target_preds)),]
bad_sig_t<-target_preds[grepl("_multi.bad__",rownames(target_preds)),]
#plot(bad_sig_t$Corr,erk_sig_t$Corr,main="BAD and ERK pathway\nCorrelations with Targeted drugs")
#print(paste("correlations of correlations in BAD and ERK predictions with targeted therapy",cor(bad_sig_t$Corr,erk_sig_t$Corr),sep=':'))
boxplot(bad_sig_t$Corr,erk_sig_t$Corr,names=c("bad predictions","erk predictions"),main="Correlations with Targeted drugs")
print(t.test(bad_sig_t$Corr,erk_sig_t$Corr))
# cors_sig_t<-subset(target_preds,target_preds$p_val<=0.05)
# dim(cors_sig_t)
# bad_sig_t<-cors_sig_t[grepl("_multi.bad__",rownames(cors_sig_t)),]
# dim(bad_sig_t)
# erk_sig_t<-cors_sig_t[grepl("_multi.erk__",rownames(cors_sig_t)),]
# akt_sig_t<-cors_sig_t[grepl("_multi.akt__",rownames(cors_sig_t)),]
# head(akt_sig_t)
# dim(erk_sig_t)
# boxplot(bad_sig_t$Corr,erk_sig_t$Corr,names=c("bad predictions","erk predictions"),main="Correlations with Targeted drugs")
#
# t.test(bad_sig_t$Corr,erk_sig_t$Corr)
# wilcox.test(bad_sig$Corr,erk_sig$Corr)
# boxplot(bad_sig_t$Corr,akt_sig_t$Corr,names=c("bad predictions","akt predictions"),main="Correlations with Targeted drugs")
# t.test(bad_sig_t$Corr,akt_sig_t$Corr)
# wilcox.test(bad_sig$Corr,akt_sig_t$Corr)
#bad_all<-bad_sig[grepl("akt_bad_her2_igf1r_erk.adap_adap_multi.bad",rownames(bad_sig)),]
#erk_all<-erk_sig[grepl(".adap_adap_multi.erk",rownames(erk_sig)),]
#erk_all
```
<file_sep>/Rmarkdowns_scripts/ICBP/18_Mar_multi_icbp_her2_exploration.Rmd
---
title: "multipathway_ASSIGN"
author: "<NAME>"
date: "December 23, 2014"
output: pdf_document
---
```{r,echo=FALSE,cache=TRUE,include=FALSE}
library(sva)
library(ASSIGN)
library(utils)
```
Reading in the signature datasets...
```{r include=FALSE,echo=FALSE}
#setwd("~/Documents/ThesisWork/GitRepos/bild_signature_validation_old_repo/Datasets")
setwd("~/Dropbox/bild_signatures//Datasets")
expr<-as.matrix(read.table("GFP18_AKT_BAD_HER2_IGF1R_RAF_ERK.tpmlog",sep='\t',row.names=1,header=1))
control<-subset(expr, select=GFP.1:GFP.12)
her2<-subset(expr, select=HER2.1:HER2.6)
akt<-subset(expr,select=AKT.1:AKT.6)
bad<-subset(expr,select=BAD.1:BAD.6)
igf1r<-subset(expr,select=IGF1R.1:IGF1R.6)
erk<-subset(expr,select=ERK.1:ERK.6)
expr_all<-cbind(control,akt,bad,her2,igf1r,erk)
dim(expr_all)
#icbp<-as.matrix(read.table("~/Documents/ThesisWork/GitRepos/bild_signature_validation_old_repo/Datasets/ICBP/icbp_Rsubread_tpmlog.txt", sep='\t', stringsAsFactors=FALSE, header=1, row.names=1))
icbp<-as.matrix(read.table("icbp_Rsubread_tpmlog.txt", sep='\t', stringsAsFactors=FALSE, header=1, row.names=1))
expr_all_f <-expr_all[apply(expr_all[,1:41]==0,1,mean) < 0.85,]
dim(expr_all_f)
expr_all_icbp_f<-merge_drop(expr_all_f,icbp,by=0)
sub<-c(12,6,6,5,6,6,55)
pdf("pca_plots.pdf")
pcaplot(expr_all_icbp_f,sub)
bat1<-as.matrix(cbind(c(colnames(expr_all_f),colnames(icbp)),c(rep(1,length(colnames(expr_all_f))),rep(2,length(colnames(icbp))))))
#bat1
combat_expr1<-ComBat(dat=expr_all_icbp_f, batch=bat1[,2], mod=NULL, numCovs=NULL)
pcaplot(combat_expr1,sub)
dev.off()
c_gfp<-subset(combat_expr1, select=GFP.1:GFP.12)
c_akt<-subset(combat_expr1, select=AKT.1:AKT.6)
c_bad<-subset(combat_expr1, select=BAD.1:BAD.6)
c_her2<-subset(combat_expr1, select=HER2.1:HER2.6)
c_igf1r<-subset(combat_expr1, select=IGF1R.1:IGF1R.6)
c_erk<-subset(combat_expr1, select=ERK.1:ERK.6)
c_test<-combat_expr1[,42:96]
basedir="~/Dropbox/bild_signatures/icbp_15_mar_all"
dir.create( basedir)
#############trying one pathways at a time in multipathway#############
#1. AKT
trainingLabela<-list(control=list(akt=1:12),akt=13:18)
sub_dir<-paste(basedir,"akt",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = cbind(c_gfp,c_akt),test=c_test,trainingLabel1 = trainingLabela,g=150,out_dir_base = sub_dir,single = 1)
#2. BAD
trainingLabelb<-list(control=list(bad=1:12),bad=13:18)
sub_dir<-paste(basedir,"bad",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = cbind(c_gfp,c_bad),test=c_test,trainingLabel1 = trainingLabelb,g=150,out_dir_base = sub_dir,single = 1)
#3. HER2
trainingLabelh<-list(control=list(her2=1:12),her2=13:17)
sub_dir<-paste(basedir,"her2_rerun_1_50",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = cbind(c_gfp,c_her2),test=c_test,trainingLabel1 = trainingLabelh,g=50,out_dir_base = sub_dir,single = 1)
gfp_her2<-cbind(c_gfp,c_her2)
HER2_multi<-getGeneList("~/Dropbox/bild_signatures/icbp_15_mar_all/akt_bad_her2_igf1r_erk/adapB_multi/output.rda")
dir.create("~/Dropbox//bild_signatures/icbp_15_mar_all/Her2_1")
set.seed(1234)
assign.wrapper(trainingData=gfp_her2, testData=c_test, trainingLabel=trainingLabelh, n_sigGene=5, adaptive_B=TRUE, adaptive_S=TRUE, mixture_beta=F, outputDir= "~/Dropbox//bild_signatures//icbp_15_mar_all/Her2_1", p_beta=0.01, theta0=0.05, theta1=0.9, iter=10000, burn_in=1000)
set.seed(1234)
dir.create("~/Dropbox//bild_signatures//icbp_15_mar_all/Her2_try3_adap_adap_setseed")
assign.wrapper(trainingData=gfp_her2, testData=c_test, trainingLabel=trainingLabelh, n_sigGene=50, adaptive_B=TRUE, adaptive_S=TRUE, mixture_beta=F, outputDir= "~/Dropbox//bild_signatures//icbp_15_mar_all/Her2_try3_adap_adap_setseed", p_beta=0.01, theta0=0.05, theta1=0.9, iter=100000, burn_in=5000)
set.seed(1234)
dir.create("~/Dropbox//bild_signatures//icbp_15_mar_all/Her2_try3_adap_b_setseed")
assign.wrapper(trainingData=gfp_her2, testData=c_test, trainingLabel=trainingLabelh, n_sigGene=50, adaptive_B=TRUE, adaptive_S=F, mixture_beta=F, outputDir= "~/Dropbox//bild_signatures//icbp_15_mar_all/Her2_try3_adap_b_setseed", p_beta=0.01, theta0=0.05, theta1=0.9, iter=100000, burn_in=5000)
gfp_her2["KLK8",]
gfp_her2["KRT81",]
gfp_her2["CALB2",]
gfp_her2["ERBB2",]
gfp_her2["EEF1A2",]
#4. IGF1R
trainingLabeli<-list(control=list(igf1r=1:12),igf1r=13:18)
sub_dir<-paste(basedir,"igf1r",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = cbind(c_gfp,c_igf1r),test=c_test,trainingLabel1 = trainingLabeli,g=100,out_dir_base = sub_dir,single = 1)
#5. ERK
trainingLabele<-list(control=list(erk=1:12),erk=13:18)
sub_dir<-paste(basedir,"erk",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = cbind(c_gfp,c_erk),test=c_test,trainingLabel1 = trainingLabele,g=100,out_dir_base = sub_dir,single = 1)
#############trying two pathways at a time in multipathway#############
#1. HER2 & AKT
trainha<-cbind(c_gfp,c_akt,c_her2)
trainingLabelha<-list(control=list(akt=1:12,her2=1:12),akt=13:18,her2=19:23)
sub_dir=paste(basedir,"her2_akt",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = trainha,test=c_test,trainingLabel1 = trainingLabelha,g=c(150,15),out_dir_base = sub_dir)
#2. HER2 & BAD
trainhb<-cbind(c_gfp,c_bad,c_her2)
trainingLabelhb<-list(control=list(bad=1:12,her2=1:12),bad=13:18,her2=19:23)
sub_dir=paste(basedir,"her2_bad",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainhb,test=c_test,trainingLabel1 = trainingLabelhb,g=c(150,15),out_dir_base = sub_dir)
#3. HER2 & IGF1R
trainhi<-cbind(c_gfp,c_igf1r,c_her2)
trainingLabelhi<-list(control=list(igf1r=1:12,her2=1:12),igf1r=13:18,her2=19:23)
sub_dir=paste(basedir,"her2_igf1r",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainhi,test=c_test,trainingLabel1 = trainingLabelhi,g=c(100,15),out_dir_base = sub_dir)
#4. AKT & BAD
trainab<-cbind(c_gfp,c_akt,c_bad)
trainingLabelab<-list(control=list(akt=1:12,bad=1:12),akt=13:18,bad=19:24)
sub_dir=paste(basedir,"akt_bad",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainab,test=c_test,trainingLabel1 = trainingLabelab,g=c(150,150),out_dir_base = sub_dir)
#5. AKT & IGF1R
trainai<-cbind(c_gfp,c_akt,c_igf1r)
trainingLabelai<-list(control=list(akt=1:12,igf1r=1:12),akt=13:18,igf1r=19:24)
sub_dir=paste(basedir,"akt_igf1r",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = trainai,test=c_test,trainingLabel1 = trainingLabelai,g=c(150,100),out_dir_base = sub_dir)
#6. BAD & IGF1R
trainbi<-cbind(c_gfp,c_bad,c_igf1r)
trainingLabelbi<-list(control=list(bad=1:12,igf1r=1:12),bad=13:18,igf1r=19:24)
sub_dir=paste(basedir,"bad_igf1r",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = trainbi,test=c_test,trainingLabel1 = trainingLabelbi,g=c(150,100),out_dir_base = sub_dir)
#7. ERK & IGF1R
trainei<-cbind(c_gfp,c_erk,c_igf1r)
trainingLabelei<-list(control=list(erk=1:12,igf1r=1:12),erk=13:18,igf1r=19:24)
sub_dir=paste(basedir,"erk_igf1r",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = trainei,test=c_test,trainingLabel1 = trainingLabelei,g=c(100,100),out_dir_base = sub_dir)
#8. ERK & AKT
trainea<-cbind(c_gfp,c_erk,c_akt)
trainingLabelea<-list(control=list(erk=1:12,akt=1:12),erk=13:18,akt=19:24)
sub_dir=paste(basedir,"erk_akt",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = trainea,test=c_test,trainingLabel1 = trainingLabelea,g=c(100,150),out_dir_base = sub_dir)
#9. ERK & BAD
traineb<-cbind(c_gfp,c_erk,c_bad)
trainingLabeleb<-list(control=list(erk=1:12,bad=1:12),erk=13:18,bad=19:24)
sub_dir=paste(basedir,"erk_bad",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = traineb,test=c_test,trainingLabel1 = trainingLabeleb,g=c(100,150),out_dir_base = sub_dir)
#10. ERK & HER2
traineh<-cbind(c_gfp,c_erk,c_her2)
trainingLabeleh<-list(control=list(erk=1:12,her2=1:12),erk=13:18,her2=19:23)
sub_dir=paste(basedir,"erk_her2",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = traineh,test=c_test,trainingLabel1 = trainingLabeleh,g=c(100,15),out_dir_base = sub_dir)
#############trying three pathways at a time in multipathway#############
#1. HER2, AKT & BAD
trainhab<-cbind(c_gfp,c_akt,c_bad,c_her2)
trainingLabelhab<-list(control=list(akt=1:12,bad=1:12,her2=1:12),akt=13:18,bad=19:24,her2=25:29)
sub_dir=paste(basedir,"her2_akt_bad",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainhab,test=c_test,trainingLabel1 = trainingLabelhab,g=c(150,150,15),out_dir_base = sub_dir)
#2. HER2, BAD & IGF1R
trainhbi<-cbind(c_gfp,c_igf1r,c_bad,c_her2)
trainingLabelhbi<-list(control=list(igf1r=1:12,bad=1:12,her2=1:12),igf1r=13:18,bad=19:24,her2=25:29)
sub_dir=paste(basedir,"her2_bad_igf1r",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainhbi,test=c_test,trainingLabel1 = trainingLabelhbi,g=c(100,150,15),out_dir_base = sub_dir)
#3. AKT, BAD & IGF1R
trainabi<-cbind(c_gfp,c_akt,c_bad,c_igf1r)
trainingLabelabi<-list(control=list(akt=1:12,bad=1:12,igf1r=1:12),akt=13:18,bad=19:24,igf1r=25:30)
sub_dir=paste(basedir,"akt_bad_igf1r",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainabi,test=c_test,trainingLabel1 = trainingLabelabi,g=c(150,150,100),out_dir_base = sub_dir)
#4. AKT, BAD & ERK
trainabe<-cbind(c_gfp,c_akt,c_bad,c_erk)
trainingLabelabe<-list(control=list(akt=1:12,bad=1:12,erk=1:12),akt=13:18,bad=19:24,erk=25:30)
sub_dir=paste(basedir,"akt_bad_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainabe,test=c_test,trainingLabel1 = trainingLabelabe,g=c(150,150,100),out_dir_base = sub_dir)
#5. AKT, HER2 & IGF1R
trainahi<-cbind(c_gfp,c_akt,c_her2,c_igf1r)
trainingLabelahi<-list(control=list(akt=1:12,her2=1:12,igf1r=1:12),akt=13:18,her2=19:23,igf1r=24:29)
sub_dir=paste(basedir,"akt_her2_igf1r",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainahi,test=c_test,trainingLabel1 = trainingLabelahi,g=c(150,15,100),out_dir_base = sub_dir)
#6. AKT, HER2 & ERK
trainahe<-cbind(c_gfp,c_akt,c_her2,c_erk)
trainingLabelahe<-list(control=list(akt=1:12,her2=1:12,igf1r=1:12),akt=13:18,her2=19:23,erk=24:29)
sub_dir=paste(basedir,"akt_her2_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainahe,test=c_test,trainingLabel1 = trainingLabelahe,g=c(150,15,100),out_dir_base = sub_dir)
#7. AKT, IGF1R & ERK
trainaie<-cbind(c_gfp,c_akt,c_igf1r,c_erk)
trainingLabelaie<-list(control=list(akt=1:12,igf1r=1:12,erk=1:12),akt=13:18,igf1r=19:24,erk=25:30)
sub_dir=paste(basedir,"akt_igf1r_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainaie,test=c_test,trainingLabel1 = trainingLabelaie,g=c(150,100,100),out_dir_base = sub_dir)
#8. BAD, IGF1R & ERK
trainbie<-cbind(c_gfp,c_bad,c_igf1r,c_erk)
trainingLabelbie<-list(control=list(bad=1:12,igf1r=1:12,erk=1:12),bad=13:18,igf1r=19:24,erk=25:30)
sub_dir=paste(basedir,"bad_igf1r_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainbie,test=c_test,trainingLabel1 = trainingLabelbie,g=c(150,100,100),out_dir_base = sub_dir)
#9. BAD, HER2 & ERK
trainbhe<-cbind(c_gfp,c_bad,c_her2,c_erk)
trainingLabelbhe<-list(control=list(bad=1:12,her2=1:12,erk=1:12),bad=13:18,her2=19:23,erk=24:29)
sub_dir=paste(basedir,"bad_her2_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainbhe,test=c_test,trainingLabel1 = trainingLabelbhe,g=c(150,15,100),out_dir_base = sub_dir)
#10. IGF1R, HER2 & ERK
trainihe<-cbind(c_gfp,c_igf1r,c_her2,c_erk)
trainingLabelihe<-list(control=list(igf1r=1:12,her2=1:12,erk=1:12),igf1r=13:18,her2=19:23,erk=24:29)
sub_dir=paste(basedir,"igf1r_her2_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainihe,test=c_test,trainingLabel1 = trainingLabelihe,g=c(100,15,100),out_dir_base = sub_dir)
########################trying four at a time#####################
#1. AKT, BAD, HER2 & IGF1R
trainabhi<-cbind(c_gfp,c_akt,c_bad,c_her2,c_igf1r)
trainingLabelabhi<-list(control=list(akt=1:12,bad=1:12,her2=1:12,igf1r=1:12),akt=13:18, bad=19:24,her2=25:29,igf1r=30:35)
sub_dir=paste(basedir,"akt_bad_her2_igf1r",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainabhi,test=c_test,trainingLabel1 = trainingLabelabhi,g=c(150,150,15,100),out_dir_base = sub_dir)
#2. AKT, BAD, HER2 & ERK
trainabhe<-cbind(c_gfp,c_akt,c_bad,c_her2,c_erk)
trainingLabelabhe<-list(control=list(akt=1:12,bad=1:12,her2=1:12,erk=1:12),akt=13:18, bad=19:24,her2=25:29,erk=30:35)
sub_dir=paste(basedir,"akt_bad_her2_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainabhe,test=c_test,trainingLabel1 = trainingLabelabhe,g=c(150,150,15,100),out_dir_base = sub_dir)
#3. AKT, BAD, IGF1R & ERK
trainabie<-cbind(c_gfp,c_akt,c_bad,c_igf1r,c_erk)
trainingLabelabie<-list(control=list(akt=1:12,bad=1:12,igf1r=1:12,erk=1:12),akt=13:18, bad=19:24,igf1r=25:30,erk=31:36)
sub_dir=paste(basedir,"akt_bad_igf1r_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainabie,test=c_test,trainingLabel1 = trainingLabelabie,g=c(150,150,100,100),out_dir_base = sub_dir)
#4. AKT, IGF1R, HER2 & ERK
trainaihe<-cbind(c_gfp,c_akt,c_igf1r,c_her2,c_erk)
#trainingLabelabie<-list(control=list(akt=1:12,bad=1:12,igf1r=1:12,erk=1:12),akt=13:18, bad=19:24,igf1r=25:30,erk=31:36)
trainingLabelaihe<-list(control=list(akt=1:12,igf1r=1:12,her2=1:12,erk=1:12),akt=13:18, igf1r=19:24,her2=25:29,erk=30:35)
sub_dir=paste(basedir,"akt_igf1r_her2_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainaihe,test=c_test,trainingLabel1 = trainingLabelaihe,g=c(150,100,15,100),out_dir_base = sub_dir)
#5. BAD, IGF1R, HER2 & ERK
trainbihe<-cbind(c_gfp,c_bad,c_igf1r,c_her2,c_erk)
trainingLabelbihe<-list(control=list(bad=1:12,igf1r=1:12,her2=1:12,erk=1:12),bad=13:18, igf1r=19:24,her2=25:29,erk=30:35)
sub_dir=paste(basedir,"bad_igf1r_her2_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainbihe,test=c_test,trainingLabel1 = trainingLabelbihe,g=c(150,100,15,100),out_dir_base = sub_dir)
#########including all 5 pathways######
trainhall5<-cbind(c_gfp,c_akt,c_bad,c_her2,c_igf1r, c_erk)
trainingLabelall5<-list(control=list(akt=1:12,bad=1:12,her2=1:12,igf1r=1:12, erk=1:12),akt=13:18, bad=19:24,her2=25:29,igf1r=30:35, erk=36:41)
sub_dir=paste(basedir,"akt_bad_her2_igf1r_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainhall5,test=c_test,trainingLabel1 = trainingLabelall5,g=c(150,150,15,100,100),out_dir_base = sub_dir)
```
```{r}
```
Read in all the predictions are make files with all results
```{r, echo=TRUE}
# multi nonadpative
#setwd("~/Documents/Thesiswork/ICBP/multi_icbp_expr_pc/")
source('~/Dropbox/bild_signatures/bild_signatures/Rmarkdowns_scripts//Key_ASSIGN_functions.Rmd', echo=TRUE)
setwd("~/Dropbox/bild_signatures/icbp_15_mar_all/")
filenames_icbp_multi<-system("ls */*/pathway_activity_testset*", intern=TRUE)
filenames_icbp_multi
for(i in 1:length(filenames_icbp_multi))
{
f<-read.csv(filenames_icbp_multi[i], header=1,row.names=1) ###reading in the filess one at a time
colnames(f)<-paste(filenames_icbp_multi[i],colnames(f),sep='/')
if(i==1){
data_icbp<-f
}
else{
data_icbp<-cbind(data_icbp,f)
}
}
head(data_icbp)
dim(data_icbp)
colnames(data_icbp)<-gsub(pattern = "/pathway_activity_testset.csv",replacement = "",x = colnames(data_icbp))
head(data_icbp)
rownames(data_icbp)[1:7]<-c("184A1","184B5","21MT1","21MT2","21NT","21PT","600MPE")
setwd("~/Dropbox/bild_signatures//Datasets")
drugs<-read.delim("ICBP_drugs.txt", header=1, sep='\t',row.names=1)
head(drugs);dim(drugs)
pred_drug<-merge_drop(data_icbp,drugs,by=0)
dim(pred_drug)
```
colnames(cors)=c("Lapatinib","Sigma.akt.1.2.inhibitor","Temsirolimus","Everolimus")
#pathways<-data_icbp
#drugs<-drugs$Sigma.AKT1.2.inhibitor
akt_preds<-subset(pred_drug,select=grep("/akt", colnames(pred_drug) , ignore.case=FALSE, fixed=T))
akt_adapB<-subset(akt_preds,select=grep("/adapB_", colnames(akt_preds) , ignore.case=FALSE, fixed=T))
dim(akt_adapB)
akt_adap_adap<-subset(akt_preds,select=grep("/adap_adap_", colnames(akt_preds) , ignore.case=FALSE, fixed=T))
dim(akt_adap_adap)
min(corr.test(cbind(akt_preds,pred_drug$Sigma.AKT1.2.inhibitor),method="spearman", use="pairwise")$p[,33])
bad_preds<-subset(pred_drug,select=grep("/bad", colnames(pred_drug) , ignore.case=FALSE, fixed=T))
her2_preds<-subset(pred_drug,select=grep("/her2", colnames(pred_drug) , ignore.case=FALSE, fixed=T))
igf1r_preds<-subset(pred_drug,select=grep("/igf1r", colnames(pred_drug) , ignore.case=FALSE, fixed=T))
erk_preds<-subset(pred_drug,select=grep("/erk", colnames(pred_drug) , ignore.case=FALSE, fixed=T))
length(bad_preds)
adapB<-subset(pred_drug,select=grep("adapB",colnames(pred_drug)))
adapS<-subset(pred_drug,select=grep("adap_adap",colnames(pred_drug)))
##############
#### Bootstrap:
pathways = 80 # number of pathway signatures
samplesize = nrow(adapB) # number of cell lines
n.boot = 100 # number of bootstrap samples -- set at 10,000 or more for your final run
adapB_boot_cors = array(0,dim=c(80,1,n.boot)) # make a three dimensional array to store the bootstrap results
akt_adapB_boot_cors = akt_adap_adap_boot_cors =array(0,dim=c(16,1,n.boot))
boot.sample
test<-cor(adapB[boot.sample,1:80],pred_drug[boot.sample,171:260],use="pairwise", method="spearman")
dim(test)
# added spearman here, it did not have it
for (i in 1:100){
boot.sample = sample(1:samplesize,replace=T)
akt_adapB_boot_cors[,,i]=cor(akt_adapB[boot.sample,1:16],pred_drug[boot.sample,175],use="pairwise", method="spearman")
akt_adap_adap_boot_cors[,,i]=cor(akt_adap_adap[boot.sample,1:16],pred_drug[boot.sample,175],use="pairwise", method="spearman")
}
adapB[1]
View(adapB_boot_cors[1,1,1])
dim(adapB_boot_cors)
dim(akt_boot_cors) #24 x 90 x 10000
# to test
p_calc_compare=function(path1,path2,drug,cor_cut=0,cors=boot_cors,pathlist=pathway,druglist=drugs){
ind_p1 = which(pathlist==path1)
ind_p2 = which(pathlist==path2)
ind_drug = which(druglist==drug)
mean((cors[ind_p1,ind_drug,]-cors[ind_p2,ind_drug,])>cor_cut,na.rm=T)
}
pval_comp_akt_adapB_vs_adap_adap = p_calc_compare(combn(colnames(akt_adap_adap),1),combn(colnames(akt_adapB),1),cors=akt_boot_cors, drug="Sigma.AKT1.2.inhibitor",pathlist = colnames(akt_preds),druglist = pred_drug[171:260] )
head(pval_comp)
# # means
# cor_mean = apply(boot_cors, c(1,2), mean, na.rm=T) ## average bootstrap cors. Should be similar to the non-boot values
# dimnames(cor_mean)=dimnames(drug_prediction_correlations_multi_spear)
# View(cor_mean)
# write.table(cor_mean,"~/Documents/ThesisWork/GitRepos/bild_signatures/cor_mean.txt",sep='\t', col.names = NA,quote=F)
# cor_means_ICBP=read.delim("~/Documents/ThesisWork/GitRepos/bild_signatures/cor_mean.txt", header=1, sep='\t',row.names=1)
#
# #lower
# lower = apply(boot_cors, c(1,2), quantile,na.rm=T,probs=.025) ## lower 95% CI
# dimnames(lower)=dimnames(drug_prediction_correlations_multi_spear)
# write.table(lower,"~/Documents/ThesisWork/GitRepos/bild_signatures/lower.txt",sep='\t', col.names = NA,quote=F)
# cor_lower_ICBP=read.delim("~/Documents/ThesisWork/GitRepos/bild_signatures/lower.txt", header=1, sep='\t',row.names=1)
# #upper
#
# upper = apply(boot_cors, c(1,2), quantile,na.rm=T,probs=.975) ## upper 95% CI
# dimnames(upper)=dimnames(drug_prediction_correlations_multi_spear)
# write.table(upper,"~/Documents/ThesisWork/GitRepos/bild_signatures/upper.txt",sep='\t', col.names = NA,quote=F)
# cor_upper_ICBP=read.delim("~/Documents/ThesisWork/GitRepos/bild_signatures/upper.txt", header=1, sep='\t',row.names=1)
## p-values: to test that a correlation is bigger than some value:
# is this the p-value that is it bigger than some value??
cor_cutoff = 0 ## i.e. to test is the correlation is > or < 0. Note this can be anything, i.e. .1, .5, etc
p_calc = function(x,direction="greater",cor_cut=0){
if (!(direction %in% c("greater","less"))){stop("Invalid value for direction")}
if (direction=="greater"){return(mean(x>cor_cut,na.rm=T))}
if (direction=="less"){return(mean(x<cor_cut,na.rm=T))}
}
head(boot_cors)
# These are not p-values, or are they, they just seem like correlations above or below a certain value
# Obtaining the cor means for all 10000 interations that are either above or below a certain value for all rows and columns.
# I think above and below equal to 1...
View(boot_cors)
# This just contains the means from the 10000 bootstrap cor values above 0
# I dont get how you can get different p-values for the same correlation?
ps_above_0 = apply(boot_cors, c(1,2), p_calc)
dimnames(ps_above_0)=dimnames(drug_prediction_correlations_multi_spear)
View(ps_above_0)
dim(ps_above_0)
ps_below_0 = apply(boot_cors, c(1,2), p_calc,direction="less")
dimnames(ps_below_0 )=dimnames(drug_prediction_correlations_multi_spear)
View(ps_below_0)
dim(ps_below_0)
## p-values: to test two correlations against each other:
pathway = colnames(pred_drug_multi)[1:25] ## print to see the pathway names
print(pathway)
drugs = colnames(pred_drug_multi)[36:125] ## print to see the drug names
print(drugs)
# to test
p_calc_compare=function(path1,path2,drug,cor_cut=0,cors=boot_cors,pathlist=pathway,druglist=drugs){
ind_p1 = which(pathlist==path1)
ind_p2 = which(pathlist==path2)
ind_drug = which(druglist==drug)
mean((cors[ind_p1,ind_drug,]-cors[ind_p2,ind_drug,])>cor_cut,na.rm=T)
}
pval_comp = p_calc_compare("AKT_BAD.adap_multi.pathway_activity_testset.csv.akt","AKT_BAD_HER2.adap_multi.pathway_activity_testset.csv.akt","Sigma.AKT1.2.inhibitor")
head(pval_comp)
## pval_comp = 0.91 in this case means that AKT+BAD > AKT+BAD+HER2 in 91% of the bootstrap correlations. Thus the p-value for testing Ha: AKT+BAD > AKT+BAD+HER2 is 0.09
#######################
library(psych)
for (i in 1:length(colnames(data_icbp))){
for (j in 1:1){ #length(colnames(drug_srt))){
cors[i,j]=corr.test(pred_drug[,i],pred_drug$Sigma.AKT1.2.inhibitor,use="pairwise",method="spearman")
}
}
View(drug_pre_diction_correlations_multi)
View(drug_prediction_correlations_single)
single_and_multi= rbind(drug_prediction_correlations_multi,drug_prediction_correlations_single)
plot(single_and_multi[2])
View(single_and_multi)
row.names(single_and_multi)
# for sinlge
single_and_multi[26,]
cor(single_and_multi[26,], single_and_multi[27,] ) # 0.5269367
cor(single_and_multi[26,], single_and_multi[28,] ) #0.7882588
cor(single_and_multi[26,], single_and_multi[29,] ) # 0.6173746
cor(single_and_multi[27,], single_and_multi[28,] ) # 0.2896494
cor(single_and_multi[27,], single_and_multi[29,] ) # -0.02523773
cor(single_and_multi[28,], single_and_multi[29,] ) # 0.7182353
#mutli
cor(single_and_multi[22,], single_and_multi[23,] ) #-0.6161527
cor(single_and_multi[22,], single_and_multi[24,] ) # -0.2015345
cor(single_and_multi[22,], single_and_multi[25,] ) # 0.4247083
cor(single_and_multi[23,], single_and_multi[24,] ) # -0.04692151
cor(single_and_multi[23,], single_and_multi[25,] ) # -0.4218923
cor(single_and_multi[24,], single_and_multi[25,] ) # -0.7734885
write.table(single_and_multi,"~/Documents/ThesisWork/GitRepos/bild_signatures/Correlations_ICBP_Single_Multi.txt",sep='\t', col.names = NA,quote=F)
```
Creating heatmaps
```{r }
if (!require("gplots")) {
install.packages("gplots", dependencies = TRUE)
library(gplots)
}
if (!require("RColorBrewer")) {
install.packages("RColorBrewer", dependencies = TRUE)
library(RColorBrewer)
}
multi<-data#read.table("~/Desktop/multipathway_preds.txt", sep='\t',row.names=1,header=1)
single<-read.csv("~/Dropbox/bild_signatures/multi_icbp_expr_pc/single_pathway_results.csv", row.names=1,header=1)
my_palette <- colorRampPalette(c("darkblue","aliceblue","brown4"))(n = 299)
col_breaks = c(seq(0,0.2,length=100), seq(0.2,0.4,length=100), seq(0.4,1,length=100))
# creates a 5 x 5 inch image
png("heatmaps_in_r.png", # create PNG for the heat map
width = 5*300, # 5 x 300 pixels
height = 5*300,
res = 300, # 300 pixels per inch
pointsize = 8)
comb<-cbind(multi,single)
dim(comb)
colnames(comb)<-gsub(pattern = "adap_multi.pathway_activity_testset.csv",replacement = "A",x = colnames(comb))
#colnames(comb)<-gsub(pattern = "non",replacement = "NA",x = colnames(comb))
pdf(file='~/Dropbox/bild_signatures//bild_signatures/activity_subtype.pdf')
heatmap.2(as.matrix(comb),col=my_palette,margins=c(12,9),Rowv=F,Colv=F,dendrogram="none", trace="none",main="All possibilities",breaks = col_breaks)
heatmap.2(as.matrix(comb[,47:50]),col=my_palette,trace="none",main="Multipathway activity",margins=c(12,9),Rowv =F,Colv=F,dendrogram="none",ylab="ICBP Cell lines",breaks = col_breaks)
heatmap.2(as.matrix(comb[,51:54]),margins=c(12,9),col=my_palette, Rowv=F,Colv=F,dendrogram="none",trace="none",main="Single pathway activity",ylab="ICBP Cell lines",scale = "row")
comb_drug<-merge_drop(comb,drugs,by=0)
#plot(hclust(dist(comb_drug[,1:48]), method = "complete", members = NULL))
<<<<<<< HEAD
basal<-subset(comb_drug,comb_drug$Transcriptional.subtype...ERBB2.status=="Basal")
heatmap.2(as.matrix(basal[,22:25]),col=my_palette,margins=c(12,9),dendrogram="none", trace="none",main="Basal Multi")
heatmap.2(as.matrix(basal[,26:29]),col=my_palette,margins=c(12,9),dendrogram="none",lmat=rbind( c(0, 3, 4), c(2,1,0 ) ), lwid=c(1.5, 4, 2 ), trace="none",main="Basal single")
cor(basal[,22],basal$Sigma.AKT1.2.inhibitor,use="pairwise")
her<-subset(comb_drug[,1:44],comb_drug$Transcriptional.subtype...ERBB2.status=="ERBB2-amp"|comb_drug$Transcriptional.subtype...ERBB2.status=="ERBB2Amp")
heatmap.2(as.matrix(her[,22:25]),col=my_palette,dendrogram="none",lmat=rbind( c(0, 3, 4), c(2,1,0 ) ), lwid=c(1.5, 4, 2 ), trace="none",margins=c(12,9),main="HER2 Multi")
heatmap.2(as.matrix(her[,26:29]),col=my_palette,dendrogram="none",lmat=rbind( c(0, 3, 4), c(2,1,0 ) ), lwid=c(1.5, 4, 2 ), trace="none",margins=c(12,9),main="HER2 Single",scale="column")
cor(her[,22],her$Sigma.AKT1.2.inhibitor,use="pairwise")
claudin<-subset(comb_drug,comb_drug$Transcriptional.subtype...ERBB2.status=="Claudin-low")
heatmap.2(as.matrix(claudin[,22:25]),col=my_palette,dendrogram="none",lmat=rbind( c(0, 3, 4), c(2,1,0 ) ), lwid=c(1.5, 4, 2 ), trace="none",margins=c(12,9),main="Claudin Multi")
heatmap.2(as.matrix(claudin[,26:29]),col=my_palette,dendrogram="none",lmat=rbind( c(0, 3, 4), c(2,1,0 ) ), lwid=c(1.5, 4, 2 ), trace="none",margins=c(12,9),main="Claudin Single",scale="column")
cor(claudin[,22],claudin$Sigma.AKT1.2.inhibitor,use="na.or.complete")
luminal<-subset(comb_drug,comb_drug$Transcriptional.subtype...ERBB2.status=="Luminal")
heatmap.2(as.matrix(luminal[,22:25]),col=my_palette,dendrogram="none",lmat=rbind( c(0, 3, 4), c(2,1,0 ) ), lwid=c(1.5, 4, 2 ), trace="none",margins=c(12,9),main="Luminal Multi")
heatmap.2(as.matrix(luminal[,26:29]),col=my_palette,dendrogram="none",lmat=rbind( c(0, 3, 4), c(2,1,0 ) ), lwid=c(1.5, 4, 2 ), trace="none",margins=c(12,9),main="Luminal Single",scale="row")
=======
# write(file="~/Dropbox/bild_signatures/bild_signatures/pathway prediction drug response results.xls","Correlations for pathway predictions with Lapatinib and Sigma.AKT.inhibitor in BASAL subtype",append = T)
# write('',file="~/Dropbox/bild_signatures/bild_signatures/pathway prediction drug response results.xls", append=T)
basal<-subset(comb_drug,comb_drug$Transcriptional.subtype...ERBB2.status=="Basal")
# write("For Lapatinib",file="~/Dropbox/bild_signatures/bild_signatures/pathway prediction drug response results.xls", append =T)
# write(paste(colnames(basal)[1:54],cor(basal[,1:54],basal$Lapatinib,use="pairwise.complete.obs"),sep='\t'), file="~/Dropbox/bild_signatures/bild_signatures/pathway prediction drug response results.xls", append = T)
# write("For Sigma AKT inhibitor",file="~/Dropbox/bild_signatures/bild_signatures/pathway prediction drug response results.xls", append =T)
# write(paste(colnames(basal)[1:54],cor(basal[,1:54],basal$Sigma.AKT1.2.inhibitor,use="pairwise.complete.obs"),sep='\t'),file="~/Dropbox/bild_signatures/bild_signatures/pathway prediction drug response results.xls",append = T)
par(mfrow = c(2,1))
heatmap.2(as.matrix(basal[,47:50]),col=my_palette,margins=c(12,9),dendrogram="none", trace="none",main="Basal Multi",Rowv = NULL, Colv = NULL)
heatmap.2(as.matrix(basal[,51:54]),col=my_palette,margins=c(12,9),dendrogram="none",Rowv = NULL, Colv = NULL, trace="none",main="Basal single")
her<-subset(comb_drug,comb_drug$Transcriptional.subtype...ERBB2.status=="ERBB2-amp"|comb_drug$Transcriptional.subtype...ERBB2.status=="ERBB2Amp")
# wrtie(paste(colnames(her)[1:54],cor(her[,1:54],her$Lapatinib,use="pairwise.complete.obs"),sep='\t'),file="~/Dropbox/bild_signatures/bild_signatures/pathway prediction drug response results.xls",append = T)
# write("For Sigma AKT inhibitor",file="~/Dropbox/bild_signatures/bild_signatures/pathway prediction drug response results.xls", append =T)
# write(paste(colnames(her),cor(her[,1:54],her$Sigma.AKT1.2.inhibitor,use="pairwise.complete.obs"),sep='\t'),file="~/Dropbox/bild_signatures/bild_signatures/pathway prediction drug response results.xls",append = T)
#
heatmap.2(as.matrix(her[,43:54]),col=my_palette,dendrogram="none",lmat=rbind( c(0, 3, 4), c(2,1,0 ) ), lwid=c(1.5, 4, 2 ), trace="none",margins=c(14,9),main="HER2 Multi",Rowv=F, Colv=F)
heatmap.2(as.matrix(her[,51:54]),col=my_palette,dendrogram="none",lmat=rbind( c(0, 3, 4), c(2,1,0 ) ), lwid=c(1.5, 4, 2 ), trace="none",margins=c(12,9),main="HER2 Single",Rowv=F, Colv=F,scale = "row")
claudin<-subset(comb_drug,comb_drug$Transcriptional.subtype...ERBB2.status=="Claudin-low")
cor(claudin[,1:54],claudin$Lapatinib,use="pairwise.complete.obs")# variance zero in this group
cor(claudin[,1:54],claudin$Sigma.AKT1.2.inhibitor,use="pairwise.complete.obs")
heatmap.2(as.matrix(claudin[,47:50]),col=my_palette,dendrogram="none",trace="none",margins=c(12,9),main="Claudin Multi",,Rowv=F, Colv=F)
heatmap.2(as.matrix(claudin[,51:54]),col=my_palette,dendrogram="none", trace="none",margins=c(12,9),main="Claudin Single",scale="row",,Rowv=F, Colv=F)
cor(claudin[,22],claudin$Sigma.AKT1.2.inhibitor,use="na.or.complete")
luminal<-subset(comb_drug,comb_drug$Transcriptional.subtype...ERBB2.status=="Luminal")
cor(luminal[,47:50],luminal$Lapatinib,use="pairwise.complete.obs")
cor(luminal[,51:54],luminal$Sigma.AKT1.2.inhibitor,use="pairwise.complete.obs")
heatmap.2(as.matrix(luminal[,47:50]),col=my_palette,dendrogram="none", trace="none",margins=c(12,9),main="Luminal Multi",Rowv=F, Colv=F)
heatmap.2(as.matrix(luminal[,47:50]),col=my_palette,dendrogram="none",trace="none",margins=c(12,9),main="Luminal Single",scale="row",Rowv=F, Colv=F)
>>>>>>> 2fac797b6fb95768996bf82bab2763ae0f043efa
cor(luminal[,22],luminal$Sigma.AKT1.2.inhibitor,use="na.or.complete")
multi_4<-rbind(basal[,43:46],her[,43:46],claudin[,43:46],luminal[,43:46])
png("heatmaps_multi_adap.png",width = 5*300,height = 5*300,res = 800, pointsize = 8)
heatmap.2(as.matrix(multi_4), RowSideColors = c(rep("gray", length(rownames(basal))),rep("blue", length(rownames(her))),rep("black", length(rownames(claudin))),rep("green",length(rownames(luminal)))),col=my_palette,dendrogram="none", trace="none",margins=c(15,10),main="Multi Preds within Subtypes",scale="row",Rowv=F)
par(lend = 10) # square line ends for the color legend
legend("topright",legend = c("Basal", "HER2", "Claudin","Luminal"), col = c("gray", "blue", "black","green"), lty= 1,lwd = 10)
dev.off()
single_4<-rbind(basal[,51:54],her[,51:54],claudin[,51:54],luminal[,51:54])
heatmap.2(as.matrix(single_4), RowSideColors = c(rep("gray", length(rownames(basal))),rep("blue", length(rownames(her))),rep("black", length(rownames(claudin))),rep("green",length(rownames(luminal)))),col=my_palette,dendrogram="none", trace="none",margins=c(12,9),main="Single Preds within Subtypes",scale="row",Rowv=F)
par(lend = 10) # square line ends for the color legend
legend("topright",legend = c("Basal", "HER2", "Claudin","Luminal"), col = c("gray", "blue", "black","green"), lty= 1,lwd = 10)
dev.off()
```
```{r echo=FALSE}
time<-format(Sys.time(),"%a %b %d %X %Y")
```
This analysis was run on `r time`
<file_sep>/Rmarkdowns_scripts/TCGA/TCGA_assign_run.Rmd
---
title: "TCGA_ASSIGN"
author: "<NAME>"
date: "January 23, 2015"
output: html_document
---
---
title: "multipathway_ASSIGN"
author: "<NAME>"
date: "December 23, 2014"
output: pdf_document
---
```{r,echo=FALSE,cache=TRUE,include=FALSE}
library(sva)
library(ASSIGN)
merge_drop<-function(x,y,by=0)
{
new_m<-merge(x,y,by=by)
rownames(new_m)<-new_m$Row.names
return(new_m[,2:length(colnames(new_m))])
}
pcaplot<-function(mat,sub){
if(sum(sub)!=length(mat))
{
print("verify the subscripts...exiting now")
}
else{
pca_mat <- prcomp(t(mat), center=T,scale=T)
plot(pca_mat)
plot(pca_mat$x[,1],pca_mat$x[,2])
for(i in 1:length(sub))
{
#print(rownames(pca_mat$x)[index:sub[i]+index-1],"has color", )
print(i)
if(i==1)
{
points(pca_mat$x[1:sub[i]],pca_mat$x[1:sub[i],2],col=i)
}
else if(i==length(sub))
{
points(pca_mat$x[sum(sub[1:i-1]):length(rownames(pca_mat$x))],pca_mat$x[sum(sub[1:i-1]):length(rownames(pca_mat$x)),2],col=i)
}
else
{
points(pca_mat$x[sum(sub[1:i-1]):sum(sub[1:i])],pca_mat$x[sum(sub[1:i-1]):sum(sub[1:i]),2],col=i)
}
}
}
}
assign_easy_multi<-function(trainingData=train, testData=test, trainingLabel1=NULL,g=100,out_dir_base="~/Desktop/tmp",cov=0){
if(cov==0){
adap_folder<-paste(out_dir_base,paste( "adap_multi",sep=''),sep='/')
dir.create(file.path(out_dir_base,paste( "adap_multi",sep='')))
nonadap_folder<-paste(out_dir_base,paste( "nonadap_multi",sep=''),sep='/')
dir.create(file.path(out_dir_base,paste( "nonadap_multi",sep='')))
}
else{
adap_folder<-paste(out_dir_base,paste( "adap_cov",sep=''),sep='/')
dir.create(file.path(out_dir_base,paste( "adap_cov",sep='')))
nonadap_folder<-paste(out_dir_base,paste( "nonadap_cov",sep=''),sep='/')
dir.create(file.path(out_dir_base,paste( "nonadap_cov",sep='')))
}
set.seed(1234)
assign.wrapper(trainingData=trainingData, testData=testData, trainingLabel=trainingLabel1, testLabel=NULL, geneList=NULL, n_sigGene=g, adaptive_B=T, adaptive_S=F, mixture_beta=F, outputDir=adap_folder, theta0=0.05, theta1=0.9, iter=10000, burn_in=1000)
set.seed(1234)
assign.wrapper(trainingData=trainingData, testData=testData, trainingLabel=trainingLabel1, testLabel=NULL, geneList=NULL, n_sigGene=g, adaptive_B=F, adaptive_S=F, mixture_beta=F, outputDir=nonadap_folder, theta0=0.05, theta1=0.9, iter=10000, burn_in=1000)
}
```
Reading in the signature datasets...
```{r include=FALSE,echo=FALSE}
setwd("~/Dropbox/bild_signatures/Datasets/")
expr<-as.matrix(read.table("GFP18_AKT_BAD_HER2_IGF1R_RAF_ERK.tpmlog",sep='\t',row.names=1,header=1))
control<-subset(expr, select=GFP.1:GFP.12)
her2<-subset(expr, select=HER2.1:HER2.6)
akt<-subset(expr,select=AKT.1:AKT.6)
bad<-subset(expr,select=BAD.1:BAD.6)
igf1r<-subset(expr,select=IGF1R.1:IGF1R.6)
erk<-ubset(expr,select=ERK.1:ERK.6)
expr_all<-cbind(control,akt,bad,her2,igf1r)
tcga<-as.matrix(read.table("~/Dropbox/Datasets/TCGA20_brca_1_23.txt", sep='\t', stringsAsFactors=T, header=1, row.names=1))
expr_all_f <-expr_all[apply(expr_all[,1:41]==0,1,mean) < 0.85,]
dim(expr_all_f)
expr_all_tcga_f<-merge_drop(expr_all_f,tcga,by=0)
dim(expr_all_tcga_f)
sub<-c(12,6,6,5,6,length(colnames(tcga)))
pdf(file='~/Dropbox/bild_signatures/tcga_hmec_pca_plot_3_14_15.pdf')
pcaplot(expr_all_tcga_f,sub)
bat1<-as.matrix(cbind(c(colnames(expr_all_f),colnames(tcga)),c(rep(1,length(colnames(expr_all_f))),rep(2,length(colnames(tcga))))))
#bat1
combat_expr1<-ComBat(dat=expr_all_tcga_f, batch=bat1[,2], mod=NULL, numCovs=NULL)
pcaplot(combat_expr1,sub)
dev.off()
write.table(combat_expr1,"~/Dropbox/bild_signatures/Datasets/tcga_hmec_combat_3_14_15.txt", col.names = NA, quote = F,sep='\t')
combat_expr1<-as.matrix(read.table("~/Dropbox/bild_signatures/Datasets/tcga_hmec_combat_3_14_15.txt",header=1,row.names=1,sep='\t'))
c_gfp<-subset(combat_expr1, select=GFP.1:GFP.12)
c_akt<-subset(combat_expr1, select=AKT.1:AKT.6)
c_bad<-subset(combat_expr1, select=BAD.1:BAD.6)
c_her2<-subset(combat_expr1, select=HER2.1:HER2.6)
c_igf1r<-subset(combat_expr1, select=IGF1R.1:IGF1R.6)
c_erk<-subset(combat_expr1, select=ERK.1:ERK.6)
c_test<-combat_expr1[,42:96]
```
Now running ASSIGN with multiple pathways..
```{r}
#############running single pathway assign predictions#################
#1.HER2
trainh<-cbind(c_gfp,c_her2)
testh<-combat_expr1[,36:ncol(combat_expr1)]
trainingLabelh<-list(control=list(her2=1:12),her2=13:17)
dir.create("~/Dropbox/bild_signatures/multi_tcga_expr_pc/single")
setwd("~/Dropbox/bild_signatures/multi_tcga_expr_pc/single")
dir.create( "Her2")
#
assign_easy_multi(trainingData = trainh,test=testh,trainingLabel1 = trainingLabelh,g=c(15),out_dir_base = "Her2")
#2. AKT
traina<-cbind(c_gfp,c_akt)
testa<-combat_expr1[,36:ncol(combat_expr1)]
trainingLabela<-list(control=list(akt=1:12),akt=13:18)
dir.create( "AKT")
#
assign_easy_multi(trainingData = traina,test=testa,trainingLabel1 = trainingLabela,g=c(150),out_dir_base = "AKT")
#3. BAD
trainb<-cbind(c_gfp,c_bad)
testb<-combat_expr1[,36:ncol(combat_expr1)]
trainingLabelb<-list(control=list(bad=1:12),bad=13:18)
dir.create( "BAD")
#
assign_easy_multi(trainingData = trainb,test=testb,trainingLabel1 = trainingLabelb,g=c(150),out_dir_base = "BAD")
#4. IGF1R
traini<-cbind(c_gfp,c_igf1r)
testi<-combat_expr1[,36:ncol(combat_expr1)]
trainingLabeli<-list(control=list(igf1r=1:12),igf1r=13:18)
dir.create( "IGF1R")
#
assign_easy_multi(trainingData = traini,test=testi,trainingLabel1 = trainingLabeli,g=c(100),out_dir_base = "IGF1R")
#############trying two pathways at a time in multipathway#############
#1. HER2 & AKT
trainha<-cbind(c_gfp,c_akt,c_her2)
testha<-combat_expr1[,36:ncol(combat_expr1)]
trainingLabelha<-list(control=list(akt=1:12,her2=1:12),akt=13:18,her2=19:23)
setwd("~/Dropbox/bild_signatures/multi_tcga_expr_pc/")
dir.create( "Her2_AKT")
#
assign_easy_multi(trainingData = trainha,test=testha,trainingLabel1 = trainingLabelha,g=c(150,15),out_dir_base = "Her2_AKT/")
#2. HER2 & BAD
trainhb<-cbind(c_gfp,c_bad,c_her2)
testhb<-combat_expr1[,36:90]
trainingLabelhb<-list(control=list(bad=1:12,her2=1:12),bad=13:18,her2=19:23)
dir.create( "Her2_BAD")
#
assign_easy_multi(trainingData = trainhb,test=testhb,trainingLabel1 = trainingLabelhb,g=c(150,15),out_dir_base = "Her2_BAD")
#3. HER2 & IGF1R
trainhi<-cbind(c_gfp,c_igf1r,c_her2)
testhi<-combat_expr1[,36:90]
trainingLabelhi<-list(control=list(igf1r=1:12,her2=1:12),igf1r=13:18,her2=19:23)
dir.create( "~/Dropbox/bild_signatures/multi_tcga_expr_pc/Her2_IGF1R")
#
assign_easy_multi(trainingData = trainhi,test=testhi,trainingLabel1 = trainingLabelhi,g=c(100,15),out_dir_base = "~/Dropbox/bild_signatures/multi_tcga_expr_pc/Her2_IGF1R/")
#4. AKT & BAD
trainab<-cbind(c_gfp,c_akt,c_bad)
testab<-combat_expr1[,36:90]
trainingLabelab<-list(control=list(akt=1:12,bad=1:12),akt=13:18,bad=19:24)
dir.create( "~/Dropbox/bild_signatures/multi_tcga_expr_pc/AKT_BAD")
#
assign_easy_multi(trainingData = trainab,test=testab,trainingLabel1 = trainingLabelab,g=c(150,150),out_dir_base = "~/Dropbox/bild_signatures/multi_tcga_expr_pc/AKT_BAD/")
#5. AKT & IGF1R
trainai<-cbind(c_gfp,c_akt,c_igf1r)
testai<-combat_expr1[,36:90]
trainingLabelai<-list(control=list(akt=1:12,igf1r=1:12),akt=13:18,igf1r=19:24)
dir.create( "AKT_IGF1R")
#
assign_easy_multi(trainingData = trainai,test=testai,trainingLabel1 = trainingLabelai,g=c(150,100),out_dir_base = "~/Dropbox/bild_signatures/multi_tcga_expr_pc/AKT_IGF1R")
#6. BAD & IGF1R
trainbi<-cbind(c_gfp,c_bad,c_igf1r)
testbi<-combat_expr1[,36:90]
trainingLabelbi<-list(control=list(bad=1:12,igf1r=1:12),bad=13:18,igf1r=19:24)
dir.create( "~/Desktop/tmp/multi_icbp_expr_pc/BAD_IGF1R")
#
assign_easy_multi(trainingData = trainbi,test=testbi,trainingLabel1 = trainingLabelbi,g=c(150,100),out_dir_base = "~/Desktop/tmp/multi_icbp_expr_pc/BAD_IGF1R")
#############trying three pathways at a time in multipathway#############
#1. HER2, AKT & BAD
trainhab<-cbind(c_gfp,c_akt,c_bad,c_her2)
testhab<-combat_expr1[,36:90]
trainingLabelhab<-list(control=list(akt=1:12,bad=1:12,her2=1:12),akt=13:18,bad=19:24,her2=25:29)
dir.create( "~/Desktop/tmp/multi_icbp_expr_pc/AKT_BAD_HER2")
#
assign_easy_multi(trainingData = trainhab,test=testhab,trainingLabel1 = trainingLabelhab,g=c(150,150,15),out_dir_base = "~/Desktop/tmp/multi_icbp_expr_pc/AKT_BAD_HER2")
#2. HER2, BAD & IGF1R
trainhbi<-cbind(c_gfp,c_igf1r,c_bad,c_her2)
testhbi<-combat_expr1[,36:90]
trainingLabelhbi<-list(control=list(igf1r=1:12,bad=1:12,her2=1:12),igf1r=13:18,bad=19:24,her2=25:29)
dir.create( "~/Desktop/tmp/multi_icbp_expr_pc/IGF1R_BAD_HER2")
#
assign_easy_multi(trainingData = trainhbi,test=testhbi,trainingLabel1 = trainingLabelhbi,g=c(100,150,15),out_dir_base = "~/Desktop/tmp/multi_icbp_expr_pc/IGF1R_BAD_HER2")
#3. AKT, BAD & IGF1R
trainabi<-cbind(c_gfp,c_akt,c_bad,c_igf1r)
testabi<-combat_expr1[,36:90]
trainingLabelabi<-list(control=list(akt=1:12,bad=1:12,igf1r=1:12),akt=13:18,bad=19:24,igf1r=25:30)
dir.create( "~/Desktop/tmp/multi_icbp_expr_pc/AKT_BAD_IGF1R")
#
assign_easy_multi(trainingData = trainhabi,test=testabi,trainingLabel1 = trainingLabelabi,g=c(150,150,100),out_dir_base = "~/Desktop/tmp/multi_icbp_expr_pc/AKT_BAD_IGF1R")
########################trying all four at once#####################
trainhall<-cbind(c_gfp,c_akt,c_bad,c_her2,c_igf1r)
testall<-combat_expr1[,36:ncol(combat_expr1)]
trainingLabel<-list(control=list(akt=1:12,bad=1:12,her2=1:12,igf1r=1:12),akt=13:18, bad=19:24,her2=25:29,igf1r=30:35)
dir.create( "all_4")
#
assign_easy_multi(trainingData = trainhall,test=testall,trainingLabel1 = trainingLabel,g=c(150,150,15,100),out_dir_base = "all_4")
```
Checking for correlation..
```{r include=FALSE}
setwd("~/Dropbox/bild_signatures/multi_icbp_expr_pc/")
filenames<-system("ls */*/pathway_activity_testset*", intern=TRUE)
filenames
for(i in 1:length(filenames))
{
f<-read.csv(filenames[i], header=1,row.names=1) ###reading in the filess one at a time
colnames(f)<-paste(filenames[i],colnames(f),sep='/')
if(i==1){
data<-f
}
else{
data<-cbind(data,f)
}
}
#write.table(data,"~/Desktop/multipathway_preds.txt",sep='\t', col.names = NA,quote=F)
```
<file_sep>/Rmarkdowns_scripts/ICBP/9Feb14_ICBP_predicitons_drugresponse_correlations.Rmd
---
title: "Pathway Predictions/Drug Response Correlations"
author: "Shelley"
date: "February 9, 2015"
output: html_document
---
Read in the single and multi-pathway ASSIGN predictions
```{r}
drug_response_data<-read.delim("~/Documents/ThesisWork/GitRepos/u01project/SignatureValidations/EGFR/ICBP/ICBP_drugs.txt", header=1, sep='\t',row.names=1)
multipathway_data<-read.delim("~/Documents/ThesisWork/GitRepos/bild_signatures/Results/predictions/multipathway_preds.txt", header=1, sep='\t',row.names=1)
singlepathway_data=read.csv("~/Documents/ThesisWork/GitRepos/bild_signatures/Results/predictions/single_pathway_results.csv")
row.names(singlepathway_data)=singlepathway_data[,1]
singlepathway_data=singlepathway_data[,-1]
```
Merge the pathway predictions and the ICBP drugs
```{r}
merge_drop<-function(x,y,by=0)
{
new_m<-merge(x,y,by=by)
rownames(new_m)<-new_m$Row.names
return(new_m[,2:length(colnames(new_m))])
}
pred_drug_multi<-merge_drop(multipathway_data,drugs,by=0)
pred_drug_single<-merge_drop(singlepathway_data,drugs,by=0)
```
Perform correlations, rbind, and write to a file
```{r}
drug_prediction_correlations_multi= cor(pred_drug_multi[1:25],pred_drug_multi[36:125],use="na.or.complete")
drug_prediction_correlations_single= cor(pred_drug_single[1:4],pred_drug_single[15:104],use="na.or.complete")
single_and_multi= rbind(drug_prediction_correlations_multi,drug_prediction_correlations_single)
write.table(single_and_multi,"~/Documents/ThesisWork/GitRepos/bild_signatures/Correlations_ICBP_Single_Multi2.txt",sep='\t', col.names = NA,quote=F)
```
<file_sep>/Rmarkdowns_scripts/ICBP/HER2_for_Evan.Rmd
---
title: "multipathway_ASSIGN"
author: "<NAME>"
date: "December 23, 2014"
output: pdf_document
---
```{r,echo=FALSE,cache=TRUE,include=FALSE}
library(sva)
library(ASSIGN)
library(utils)
source("~/Documents/ThesisWork/GitRepos/bild_signatures/code/ASSIGN_Functions.R")
```
Reading in the signature datasets...
```{r include=FALSE,echo=FALSE}
setwd("~/Documents/ThesisWork/GitRepos/bild_signature_validation_old_repo/Datasets")
expr<-as.matrix(read.table("GFP18_AKT_BAD_HER2_IGF1R_RAF_ERK.tpmlog",sep='\t',row.names=1,header=1))
control<-subset(expr, select=GFP.1:GFP.12)
her2<-subset(expr, select=HER2.1:HER2.6)
akt<-subset(expr,select=AKT.1:AKT.6)
bad<-subset(expr,select=BAD.1:BAD.6)
igf1r<-subset(expr,select=IGF1R.1:IGF1R.6)
erk<-subset(expr,select=ERK.1:ERK.6)
expr_all<-cbind(control,akt,bad,her2,igf1r,erk)
#read in ICBP data
icbp<-as.matrix(read.table("~/Documents/ThesisWork/GitRepos/bild_signature_validation_old_repo/Datasets/ICBP/icbp_Rsubread_tpmlog.txt", sep='\t', stringsAsFactors=FALSE, header=1, row.names=1))
#filter
expr_all_f <-expr_all[apply(expr_all[,1:41]==0,1,mean) < 0.85,]
dim(expr_all_f)
expr_all_icbp_f<-merge_drop(expr_all_f,icbp,by=0)
```
Batch adjust
```{r}
# PCA Plot
sub<-c(12,6,6,5,6,6,55)
pdf("pca_plots.pdf")
pcaplot(expr_all_icbp_f,sub)
bat1<-as.matrix(cbind(c(colnames(expr_all_f),colnames(icbp)),c(rep(1,length(colnames(expr_all_f))),rep(2,length(colnames(icbp))))))
#Run ComBat
combat_expr1<-ComBat(dat=expr_all_icbp_f, batch=bat1[,2], mod=NULL, numCovs=NULL)
pcaplot(combat_expr1,sub)
dev.off()
# Subset comBat data
c_gfp<-subset(combat_expr1, select=GFP.1:GFP.12)
c_akt<-subset(combat_expr1, select=AKT.1:AKT.6)
c_bad<-subset(combat_expr1, select=BAD.1:BAD.6)
c_her2<-subset(combat_expr1, select=HER2.1:HER2.6)
c_igf1r<-subset(combat_expr1, select=IGF1R.1:IGF1R.6)
c_erk<-subset(combat_expr1, select=ERK.1:ERK.6)
```
Run ASSIGN Single pathway
```{r}
basedir="~/Documents/ThesisWork/GitRepos/bild_signatures/Her2_Results_Rerun"
dir.create( basedir)
#############trying one pathways at a time in multipathway#############
# HER2
trainingLabelh<-list(control=list(her2=1:12),her2=13:17)
sub_dir<-paste(basedir,"her2_rerun",sep='/')
dir.create( sub_dir)
assign_easy_multi(trainingData = cbind(c_gfp,c_her2),test=c_test,trainingLabel1 = trainingLabelh,g=15,out_dir_base = sub_dir,single = 1)
#########including all 5 pathways######
trainhall5<-cbind(c_gfp,c_akt,c_bad,c_her2,c_igf1r, c_erk)
trainingLabelall5<-list(control=list(akt=1:12,bad=1:12,her2=1:12,igf1r=1:12, erk=1:12),akt=13:18, bad=19:24,her2=25:29,igf1r=30:35, erk=36:41)
sub_dir=paste(basedir,"akt_bad_her2_igf1r_erk",sep='/')
dir.create(sub_dir)
assign_easy_multi(trainingData = trainhall5,test=c_test,trainingLabel1 = trainingLabelall5,g=c(150,150,15,100,100),out_dir_base = sub_dir)
```
Get the gene list
```{r}
setwd("~/Documents/Thesiswork/GitRepos/bild_signatures")
filenames_single_her2_noseed<-system("ls *rda*", intern=TRUE)
filenames_single_her2_noseed
getGeneList(filenames_single_her2_noseed)
```
<file_sep>/Rmarkdowns_scripts/ICBP/9Feb15_ICBP_predicitons_drugresponse_correlations_wej.Rmd
---
title: "Pathway Predictions/Drug Response Correlations"
author: "Shelley"
date: "February 9, 2015"
output: html_document
---
Read in the single and multi-pathway ASSIGN predictions
```{r}
source("~/Documents/ThesisWork/GitRepos/bild_signatures/code/Common.R")
setwd("~/Documents/ThesisWork/GitRepos/bild_signatures")
drug_response_data<-read.delim("ICBP_drugs.txt", header=1, sep='\t',row.names=1)
multipathway_data<-read.delim("Results/ICBP/predictions/multipathway_preds.txt", header=1, sep='\t',row.names=1)
singlepathway_data=read.csv("Results/ICBP/predictions/single_pathway_results.csv")
row.names(singlepathway_data)=singlepathway_data[,1]
singlepathway_data=singlepathway_data[,-1]
```
Merge the pathway predictions and the ICBP drugs
```{r}
merge_drop<-function(x,y,by=0)
{
new_m<-merge(x,y,by=by)
rownames(new_m)<-new_m$Row.names
return(new_m[,2:length(colnames(new_m))])
}
pred_drug_multi<-merge_drop(multipathway_data,drug_response_data,by=0)
pred_drug_single<-merge_drop(singlepathway_data,drug_response_data,by=0)
```
Perform correlations, rbind, and write to a file
```{r}
drug_prediction_correlations_multi_spear= cor(pred_drug_multi[1:25],pred_drug_multi[36:125],use="na.or.complete", method="spearman")
#### Bootstrap:
pathways = 25 # number of pathway signatures
samplesize = nrow(pred_drug_multi) # number of cell lines
n.boot = 10000 # number of bootstrap samples -- set at 10,000 or more for your final run
boot_cors = array(0,dim=c(25,90,n.boot)) # make a three dimensional array to store the bootstrap results
for (i in 1:n.boot){
boot.sample = sample(1:samplesize,replace=T)
boot_cors[,,i]=cor(pred_drug_multi[boot.sample,1:25],pred_drug_multi[boot.sample,36:125],use="na.or.complete")
}
```
Save the upper,mean, and lower to files
```{r}
# means
cor_mean = apply(boot_cors, c(1,2), mean, na.rm=T) ## average bootstrap cors. Should be similar to the non-boot values
dimnames(cor_mean)=dimnames(drug_prediction_correlations_multi_spear)
View(cor_mean)
write.table(cor_mean,"~/Documents/ThesisWork/GitRepos/bild_signatures/cor_mean.txt",sep='\t', col.names = NA,quote=F)
cor_means_ICBP=read.delim("~/Documents/ThesisWork/GitRepos/bild_signatures/cor_mean.txt", header=1, sep='\t',row.names=1)
View(cor_means_ICBP)
#lower
lower = apply(boot_cors, c(1,2), quantile,na.rm=T,probs=.025) ## lower 95% CI
dimnames(lower)=dimnames(drug_prediction_correlations_multi_spear)
write.table(lower,"~/Documents/ThesisWork/GitRepos/bild_signatures/lower.txt",sep='\t', col.names = NA,quote=F)
cor_lower_ICBP=read.delim("~/Documents/ThesisWork/GitRepos/bild_signatures/lower.txt", header=1, sep='\t',row.names=1)
View(cor_lower_ICBP)
#upper
upper = apply(boot_cors, c(1,2), quantile,na.rm=T,probs=.975) ## upper 95% CI
dimnames(upper)=dimnames(drug_prediction_correlations_multi_spear)
write.table(upper,"~/Documents/ThesisWork/GitRepos/bild_signatures/upper.txt",sep='\t', col.names = NA,quote=F)
cor_upper_ICBP=read.delim("~/Documents/ThesisWork/GitRepos/bild_signatures/upper.txt", header=1, sep='\t',row.names=1)
View(cor_upper_ICBP)
```
Make a matix with the quantiles for each drug/pathway combination as columns
```{r}
#just a test
test =c(lower[1,1],cor_mean[1,1], upper[1,1]) ## print the upper, mean, lower for first pathway and first drug
test2 =c(lower[1,2],cor_mean[1,2], upper[1,2])
test2
mat <- cbind(test, test2)
mat
boxplot(mat, main = "testing", col = 3, names=TRUE)
# loop through
name=NULL
name=c(name,(paste(rownames(cor_mean)[i],colnames(cor_mean)[j],sep=')')))
name=gsub(".adap_multi.pathway_activity_testset.csv.", "(", name)
#try to subset with grep
#AKT=cor_mean[grepl(".akt", cor_mean[,0]),]
#AKT=subset(cor_mean)
AKT_means=cor_mean[c(1,3,6,9, 13, 22), ]
dim(AKT_means)
AKT_lower=lower[c(1,3,6,9, 13, 22), ]
AKT_upper=upper[c(1,3,6,9, 13, 22), ]
Her2_means=cor_mean[c(5,14,16,18, 21, 24), ]
dim(Her2_means)
Her2_lower=lower[c(5,14,16,18, 21, 24), ]
Her2_upper=upper[c(5,14,16,18, 21, 24), ]
#
IGFR_means=cor_mean[c(8,10,12,17, 19, 25), ]
dim(IGFR_means) #6
IGFR_lower=lower[c(8,10,12,17, 19, 25), ]
IGFR_upper=upper[c(8,10,12,17, 19, 25), ]
BAD_means=cor_mean[c(2,4,7,11, 15, 20, 23), ]
dim(BAD_means) # 7
BAD_lower=lower[c(2,4,7,11, 15, 20, 23), ]
BAD_upper=upper[c(2,4,7,11, 15, 20, 23), ]
total=6+6+6+7
total
makeMatrix=function(lowerMatrix, meanMatrix, upperMatrix){
final=tmp=name=NULL
Drug=NULL
for (i in 1:nrow(meanMatrix) ){
for( j in 1:ncol(meanMatrix)){
Drug =c(lowerMatrix[i,j],meanMatrix[i,j], upperMatrix[i,j])
#print(Drug)
name=c(name,(paste(rownames(meanMatrix)[i],colnames(meanMatrix)[j],sep=')')))
name=gsub(".adap_multi.pathway_activity_testset.csv.", "(", name)
tmp=cbind(tmp,Drug)
colnames(tmp)<-name
# print(final)
}
final[i]<-tmp
par(mar=c(1,10,2,2))
boxplot(BAD_BADandAKTandHER, main = "BAD Pathway Drug Correlation Bootstrap", col = 3, las=2, horizontal = TRUE, cex.axis=0.7, boxwex=1, xlab= "Spearnman Correlation")
abline(v=0, col = "red", lwd = 2)
tmp=name=NULL
}
return (final)
}
pdf("~/BAD_boxplots.pdf")
ba=makeMatrix(BAD_means, BAD_lower, BAD_upper)
dev.off()
akt-
ba
ba
signaturesList=c()
temp_pos=list()=c()
for h in 1:nrow(BAD_means){
final=name=NULL
Drug=name=NULL
#AKT and BAD for BAD
for (i in 1:1 ){
for( j in 1:ncol(BAD_means)){
Drug =c(BAD_lower[i,j],BAD_means[i,j], BAD_upper[i,j])
#print(Drug)
name=c(name,(paste(rownames(BAD_means)[i],colnames(BAD_means)[j],sep=')')))
name=gsub(".adap_multi.pathway_activity_testset.csv.", "(", name)
final=cbind(final,Drug)
colnames(final)<-name
par(mar=c(19,5,2,1))
}
}
final=
#}
finallist=c(finals)
for
box =function(final)
dim(ba)
Bad_AKTnBADnIGF1R=final
par(mar=c(19,5,2,1))
boxplot(ba[1], main = "BAD Pathway Drug Correlation Bootstrap using AKT/BAD/IGF1R", col= ifelse(Bad_AKTnBADnIGF1R <= 0, "red", ifelse(Bad_AKTnBADnIGF1R >=0,"blue", "black")), las=2, cex.axis=0.7, boxwex=.7, ylab= "Spearnman Correlation")
BAD_AKTandBAD=final
View(final)
dim(final)
par(mar=c(19,5,2,1))
boxplot(BAD_AKTandBAD,main = "BAD Pathway Drug Correlation Bootstrap using AKT and BAD", col = 3, las=2, cex.axis=0.7, boxwex=.5, ylab= "Spearnman Correlation")
abline(h=0, col = "red", lwd = 2)
BAD_BADandAKTandHER=final
par(mar=c(1,5,2,2))
boxplot(BAD_BADandAKTandHER, main = "BAD Pathway Drug Correlation Bootstrap using AKT and and BAD and HER2 and BAD", col = 3, las=2, horizontal = TRUE, cex.axis=0.7, boxwex=1, ylab= "Spearnman Correlation")
abline(h=0, col = "red", lwd = 2)
#Whenever an effect is significant, all values in the confidence interval will be on the same side of zero (either all positive or all negative
#a<-matrix(nrow=100,ncol=3,data=runif(300,max=2))
#b<-matrix(nrow=100,ncol=3,data=runif(300,max=1))
#boxplot(a, at = 0:2*3 + 1, xlim = c(0, 9), ylim = range(a, b), xaxt = "n")
#boxplot(b, at = 0:2*3 + 2, xaxt = "n", add = TRUE)
#axis(1, at = 0:2*3 + 1.5, labels = colnames(a), tick = TRUE)
```
###########
drug_prediction_correlations_single= cor(pred_drug_single[1:4],pred_drug_single[15:104],use="na.or.complete")
single_and_multi= rbind(drug_prediction_correlations_multi,drug_prediction_correlations_single)
write.table(single_and_multi,"Correlations_ICBP_Single_Multi2.txt",sep='\t', col.names = NA,quote=F)<file_sep>/Rmarkdowns_scripts/Key_ASSIGN_functions.Rmd
merge_drop<-function(x,y,by=0)
{
new_m<-merge(x,y,by=by)
rownames(new_m)<-new_m$Row.names
return(new_m[,2:length(colnames(new_m))])
}
pcaplot<-function(mat,sub,scale=T){
if(sum(sub)!=length(mat)){
print("verify the subscripts...exiting now")
}
else{
pca_mat <- prcomp(t(mat), center=T,scale=scale)
plot(pca_mat)
plot(pca_mat$x[,1],pca_mat$x[,2])
index= 1
for(i in 1:length(sub)){
#print(rownames(pca_mat$x)[index:sub[i]+index-1],"has color", )
print(i)
if(i==1){
points(pca_mat$x[1:sub[i]],pca_mat$x[1:sub[i],2],col=i+1)
}
else if(i==length(sub)){
points(pca_mat$x[index:length(rownames(pca_mat$x))],pca_mat$x[index:length(rownames(pca_mat$x)),2],col=i+1)
}
else{
points(pca_mat$x[index:index+sub[i]],pca_mat$x[index:index+sub[i],2],col=i+1)
}
index=index+sub[i]
}
}
}
assign_easy<-function(trainingData=train, testData=test, trainingLabel1=NULL,g=100,out_dir_base="~/Dropbox/bild_signatures/",cov=0){
if(cov==0){
adap_folder<-paste(out_dir,paste( "adap",g,sep=''),sep='/')
dir.create(file.path(out_dir,paste( "adap",g,sep='')))
nonadap_folder<-paste(out_dir,paste( "nonadap",g,sep=''),sep='/')
dir.create(file.path(out_dir,paste( "nonadap",g,sep='')))
}
else{
adap_folder<-paste(out_dir,paste( "adap_cov",g,sep=''),sep='/')
dir.create(file.path(out_dir,paste( "adap_cov",g,sep='')))
nonadap_folder<-paste(out_dir,paste( "nonadap_cov",g,sep=''),sep='/')
dir.create(file.path(out_dir,paste( "nonadap_cov",g,sep='')))
}
set.seed(1234)
assign.wrapper(trainingData=trainingData, testData=testData, trainingLabel=trainingLabel1, testLabel=NULL, geneList=NULL, n_sigGene=g, adaptive_B=T, adaptive_S=F, mixture_beta=F, outputDir=adap_folder, theta0=0.05, theta1=0.9, iter=2000, burn_in=1000)
set.seed(1234)
assign.wrapper(trainingData=trainingData, testData=testData, trainingLabel=trainingLabel1, testLabel=NULL, geneList=NULL, n_sigGene=g, adaptive_B=F, adaptive_S=F, mixture_beta=F, outputDir=nonadap_folder, theta0=0.05, theta1=0.9, iter=2000, burn_in=1000)
}
##
# 1 = true, 0 = false
assign_easy_multi<-function(trainingData=train, testData=test, trainingLabel1=NULL,g=100,out_dir_base="~/Desktop/tmp",cov=0, single=0){
if(cov==0 & single==0){
adapB_folder<-paste(out_dir_base,paste( "adapB_multi",sep=''),sep='/')
dir.create(file.path(out_dir_base,paste( "adapB_multi",sep='')))
adap_adap_folder<-paste(out_dir_base,paste( "adap_adap_multi",sep=''),sep='/')
dir.create(file.path(out_dir_base,paste( "adap_adap_multi",sep='')))
}
else if (cov==0 & single==1){
adapB_folder<-paste(out_dir_base,paste( "adapB_single",sep=''),sep='/')
dir.create(file.path(out_dir_base,paste( "adapB_single",sep='')))
adap_adap_folder<-paste(out_dir_base,paste( "adap_adap_single",sep=''),sep='/')
dir.create(file.path(out_dir_base,paste( "adap_adap_single",sep='')))
}
set.seed(1234)
assign.wrapper(trainingData=trainingData, testData=testData, trainingLabel=trainingLabel1, geneList=NULL, n_sigGene=g, adaptive_B=T, adaptive_S=F, mixture_beta=F, outputDir=adapB_folder, theta0=0.05, theta1=0.9, iter=100000, burn_in=5000)
set.seed(1234)
assign.wrapper(trainingData=trainingData, testData=testData, trainingLabel=trainingLabel1, geneList=NULL, n_sigGene=g, adaptive_B=T, adaptive_S=T, mixture_beta=F, outputDir=adap_adap_folder, theta0=0.05, theta1=0.9, iter=100000, burn_in=5000)
}
getGeneList = function(rDataPath) {
load(rDataPath)
output.data$processed.data$diffGeneList####for a gene list
output.data$processed.data$S_matrix##signature matrix with coefficients
}
writeFile = function(variable, filename) {
write.table(variable, filename ,sep='\t', col.names = NA,quote=F)
}
combineMultiplePredictionFiles= function(fileNames, outFileName)
for(i in 1:length(fileNames))
{
f<-read.csv(fileNames[i], header=1,row.names=1) ###reading in the filess one at a time
colnames(f)<-paste(fileNames[i],colnames(f),sep='/')
if(i==1){
data<-f
}
else{
data<-cbind(data,f)
}
write.table(data,outFileName ,sep='\t', col.names = NA,quote=F)
}
#sum(!rownames(temp)%in%rownames(temp1))
<file_sep>/Rmarkdowns_scripts/ICBP/23Feb15_multi_icbp.Rmd
---
title: "multipathway_ASSIGN"
author: "<NAME>"
date: "December 23, 2014"
output: pdf_document
---
```{r,echo=FALSE,cache=TRUE,include=FALSE}
library(sva)
library(ASSIGN)
source("~/Documents/ThesisWork/GitRepos/bild_signatures/code/ASSIGN_Functions.R")
# training_label_generation<-function(items,sub,label){
# print(dim(items))
# print(sub)
# print(label)
# {
# traininglabel=list(control=list(for(i in 2:length(sub)){label[i]=1:sub[i]}),)
# trainingLabel1<-list(control=list(egfr=1:6,akt=13:24,bad=13:24,her2=13:24,igf1r=13:24), egfr=7:12,akt=25:30, bad=31:36,her2=37:41,igf1r=42:47)
# }
#
# }
```
Reading in the signature datasets...
```{r include=FALSE,echo=FALSE}
setwd("~/Documents/ThesisWork/GitRepos/bild_signature_validation_old_repo/Datasets")
expr<-as.matrix(read.table("GFP18_AKT_BAD_HER2_IGF1R_RAF_ERK.tpmlog",sep='\t',row.names=1,header=1))
control<-subset(expr, select=GFP.1:GFP.12)
her2<-subset(expr, select=HER2.1:HER2.6)
akt<-subset(expr,select=AKT.1:AKT.6)
bad<-subset(expr,select=BAD.1:BAD.6)
igf1r<-subset(expr,select=IGF1R.1:IGF1R.6)
expr_all<-cbind(control,akt,bad,her2,igf1r)
icbp<-as.matrix(read.table("~/Documents/ThesisWork/GitRepos/bild_signature_validation_old_repo/Datasets/ICBP/icbp_Rsubread_tpmlog.txt", sep='\t', stringsAsFactors=FALSE, header=1, row.names=1))
expr_all_f <-expr_all[apply(expr_all[,1:35]==0,1,mean) < 0.85,]
dim(expr_all_f)
expr_all_icbp_f<-merge_drop(expr_all_f,icbp,by=0)
sub<-c(12,6,6,5,6,55)
pcaplot(expr_all_icbp_f,sub)
bat1<-as.matrix(cbind(c(colnames(expr_all_f),colnames(icbp)),c(rep(1,length(colnames(expr_all_f))),rep(2,length(colnames(icbp))))))
#bat1he
combat_expr1<-ComBat(dat=expr_all_icbp_f, batch=bat1[,2], mod=NULL, numCovs=NULL)
pcaplot(combat_expr1,sub)
c_gfp<-subset(combat_expr1, select=GFP.1:GFP.12)
c_akt<-subset(combat_expr1, select=AKT.1:AKT.6)
c_bad<-subset(combat_expr1, select=BAD.1:BAD.6)
c_her2<-subset(combat_expr1, select=HER2.1:HER2.6)
head(c_her2)
c_igf1r<-subset(combat_expr1, select=IGF1R.1:IGF1R.6)
#############trying two pathways at a time in multipathway#############
````
```{r include=FALSE,echo=FALSE}
#1. HER2 & AKT
trainha<-cbind(c_gfp,c_akt,c_her2)
testha<-combat_expr1[,36:90]
trainingLabelha<-list(control=list(akt=1:12,her2=1:12),akt=13:18,her2=19:23)
dir.create( "~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_multi/Her2_AKT")
#
assign_easy_multi(trainingData = trainha,test=testha,trainingLabel1 = trainingLabelha,g=c(150,15),out_dir_base = "~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_multi/Her2_AKT")
#2. HER2 & BAD
trainhb<-cbind(c_gfp,c_bad,c_her2)
testhb<-combat_expr1[,36:90]
trainingLabelhb<-list(control=list(bad=1:12,her2=1:12),bad=13:18,her2=19:23)
dir.create( "~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_multi/Her2_BAD")
#
assign_easy_multi(trainingData = trainhb,test=testhb,trainingLabel1 = trainingLabelhb,g=c(150,15),out_dir_base = "~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_multi/Her2_BAD/")
#3. HER2 & IGF1R
trainhi<-cbind(c_gfp,c_igf1r,c_her2)
testhi<-combat_expr1[,36:90]
trainingLabelhi<-list(control=list(igf1r=1:12,her2=1:12),igf1r=13:18,her2=19:23)
dir.create( "~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_multi/Her2_IGF1R")
#
assign_easy_multi(trainingData = trainhi,test=testhi,trainingLabel1 = trainingLabelhi,g=c(100,15),out_dir_base = "~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_multi/Her2_IGF1R/")
#4. AKT & BAD
trainab<-cbind(c_gfp,c_akt,c_bad)
testab<-combat_expr1[,36:90]
trainingLabelab<-list(control=list(akt=1:12,bad=1:12),akt=13:18,bad=19:24)
dir.create( "~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_multi/AKT_BAD")
#
assign_easy_multi(trainingData = trainab,test=testab,trainingLabel1 = trainingLabelab,g=c(150,150),out_dir_base = "~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_multi/AKT_BAD/")
#5. AKT & IGF1R
trainai<-cbind(c_gfp,c_akt,c_igf1r)
testai<-combat_expr1[,36:90]
trainingLabelai<-list(control=list(akt=1:12,igf1r=1:12),akt=13:18,igf1r=19:24)
dir.create( "~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_multi/AKT_IGF1R")
#
assign_easy_multi(trainingData = trainai,test=testai,trainingLabel1 = trainingLabelai,g=c(150,100),out_dir_base = "~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_multi/AKT_IGF1R")
#6. BAD & IGF1R
trainbi<-cbind(c_gfp,c_bad,c_igf1r)
testbi<-combat_expr1[,36:90]
trainingLabelbi<-list(control=list(bad=1:12,igf1r=1:12),bad=13:18,igf1r=19:24)
dir.create( "~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_multi/BAD_IGF1R")
#
assign_easy_multi(trainingData = trainbi,test=testbi,trainingLabel1 = trainingLabelbi,g=c(150,100),out_dir_base = "~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_multi/BAD_IGF1R")
#############trying three pathways at a time in multipathway#############
#1. HER2, AKT & BAD
trainhab<-cbind(c_gfp,c_akt,c_bad,c_her2)
testhab<-combat_expr1[,36:90]
trainingLabelhab<-list(control=list(akt=1:12,bad=1:12,her2=1:12),akt=13:18,bad=19:24,her2=25:29)
dir.create( "~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_multi/AKT_BAD_HER2")
#
assign_easy_multi(trainingData = trainhab,test=testhab,trainingLabel1 = trainingLabelhab,g=c(150,150,15),out_dir_base = "~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_multi/AKT_BAD_HER2")
#2. HER2, BAD & IGF1R
trainhbi<-cbind(c_gfp,c_igf1r,c_bad,c_her2)
testhbi<-combat_expr1[,36:90]
trainingLabelhbi<-list(control=list(igf1r=1:12,bad=1:12,her2=1:12),igf1r=13:18,bad=19:24,her2=25:29)
dir.create( "~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_multi/IGF1R_BAD_HER2")
#
assign_easy_multi(trainingData = trainhbi,test=testhbi,trainingLabel1 = trainingLabelhbi,g=c(100,150,15),out_dir_base = "~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_multi/IGF1R_BAD_HER2")
#3. AKT, BAD & IGF1R
trainabi<-cbind(c_gfp,c_akt,c_bad,c_igf1r)
testabi<-combat_expr1[,36:90]
trainingLabelabi<-list(control=list(akt=1:12,bad=1:12,igf1r=1:12),akt=13:18,bad=19:24,igf1r=25:30)
dir.create( "~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_multi/AKT_BAD_IGF1R")
#
assign_easy_multi(trainingData = trainabi,test=testabi,trainingLabel1 = trainingLabelabi,g=c(150,150,100),out_dir_base = "~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_multi/AKT_BAD_IGF1R")
########################trying all four at once#####################
trainhall<-cbind(c_gfp,c_akt,c_bad,c_her2,c_igf1r)
testall<-combat_expr1[,36:90]
trainingLabel<-list(control=list(akt=1:12,bad=1:12,her2=1:12,igf1r=1:12),akt=13:18, bad=19:24,her2=25:29,igf1r=30:35)
dir.create( "~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_multi/all_4")
#
assign_easy_multi(trainingData = trainhall,test=testall,trainingLabel1 = trainingLabel,g=c(150,150,15,100),out_dir_base = "~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_multi/all_4")
```
```{r}
```
ASSIGN single pathway, didn't end up using these results because they were different than Mooms. I think she used different iterations.
```{r}
icbp_test=combat_expr1[,36:90]
#1. HER2
train_her<-cbind(c_gfp,c_her2)
trainingLabelha<-list(control=list(her2=1:12),her2=13:17)
dir.create("~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_single/Her2")
assign_easy_multi(trainingData = train_her,test=icbp_test,trainingLabel1 = trainingLabelha, g=15,out_dir_base = "~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_single/Her2")
#run assign with 100,000 and burn in 5000
train_her<-cbind(c_gfp,c_her2)
head(train_her)
trainingLabelha<-list(control=list(her2=1:12),her2=13:17)
trainingLabelha
dir.create("~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_single/Her2")
train_her
head(icbp_test)
dir.create("~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_single/Her2_try3_adap_adap_setseed")
dir.create("~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_single/Her2_try3_adap_b_setseed")
dir.create("~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_single/Her2_try3_adap_adap_noseed_50")
dir.create("~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_single/Her2_try3_adap_b_noseed_50")
assign.wrapper(trainingData=train_her, testData=icbp_test, trainingLabel=trainingLabelha, n_sigGene=50, adaptive_B=TRUE, adaptive_S=TRUE, mixture_beta=F, outputDir= "~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_single/Her2_try3_adap_adap_noseed_50", p_beta=0.01, theta0=0.05, theta1=0.9, iter=100000, burn_in=5000)
assign.wrapper(trainingData=train_her, testData=icbp_test, trainingLabel=trainingLabelha, n_sigGene=50, adaptive_B=TRUE, adaptive_S=F, mixture_beta=F, outputDir= "~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_single/Her2_try3_adap_b_noseed_50", p_beta=0.01, theta0=0.05, theta1=0.9, iter=100000, burn_in=5000)
assign_easy_multi(trainingData = train_her,test=icbp_test,trainingLabel1 = trainingLabelha, g=15,out_dir_base = "~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_single/Her2")
#2. AKT
train_akt<-cbind(c_gfp,c_akt)
trainingLabel_akt<-list(control=list(akt=1:12),akt=13:18)
dir.create("~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_single/AKT")
assign_easy_multi(trainingData = train_akt,test=icbp_test,trainingLabel1 = trainingLabel_akt,g=150,out_dir_base = "~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_single/AKT")
#3. BAD
train_bad<-cbind(c_gfp,c_bad)
trainingLabel_bad<-list(control=list(bad=1:12),bad=13:18)
dir.create( "~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_single/BAD")
assign_easy_multi(trainingData =train_bad,test=icbp_test,trainingLabel1 = trainingLabel_bad,g=150,out_dir_base = "~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_single/BAD")
#4. IGF1R
train_igfr<-cbind(c_gfp,c_igf1r)
trainingLabel_igfr<-list(control=list(igf1r=1:12),igf1r=13:18)
dir.create( "~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_single/IGF1R")
assign_easy_multi(trainingData =train_igfr,test=icbp_test,trainingLabel1 = trainingLabel_igfr,g=100,out_dir_base = "~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_single/IGF1R")
#5
```
Read in all the predictions are make files with all results
```{r}
# multi nonadpative
setwd("~/Documents/Thesiswork/ICBP/multi_icbp_expr_pc/")
filenames_nonadapative_multi<-system("ls */nonadap*/pathway_activity_testset*", intern=TRUE)
filenames_nonadapative_multi
combineMultiplePredictionFiles(filenames_nonadapative_multi, "~/Documents/ThesisWork/GitRepos/bild_signatures/Results/ICBP/ICBP_Multipathway_Preds_NonAdaptiveB_NonAdaptS.txt" )
# multi adpative
filenames_adapative_multi<-system("ls */adap*/pathway_activity_testset*", intern=TRUE)
filenames_adapative_multi
combineMultiplePredictionFiles(filenames_nonadapative_multi, "~/Documents/ThesisWork/GitRepos/bild_signatures/Results/ICBP/ICBP_Multipathway_Preds_AdaptiveB_NonAdaptS.txt" )
#multi_adaptive_adaptive
setwd("~/Documents/ThesisWork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_multi/")
filenames_multi_adap_adap<-system("ls */adap_adap*/pathway_activity_testset*", intern=TRUE)
filenames_multi_adap_adap
combineMultiplePredictionFiles(filenames_multi_adap_adap, "~/Documents/ThesisWork/GitRepos/bild_signatures/Results/ICBP/predictions/New/IBCP_Multipathway_preds_adaptiveB_adapativeS.txt" )
############################
#Single pathway
setwd("~/Documents/Thesiswork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_single/Shelley_Try2/")
# Single Adap_Adap
filenames_single_adapat_adapt<-system("ls */adap_adap*/pathway_activity_testset*", intern=TRUE)
filenames_single_adapat_adapt
combineMultiplePredictionFiles(filenames_single_adapat_adapt, "~/Documents/ThesisWork/GitRepos/bild_signatures/Results/ICBP/predictions/New/ICBP_predictions_single_adapB_adaptS.txt")
# Single_adaptivebackground_nonapaptive
filenames_single_adapativeBonly<-system("ls */adap_multi*/pathway_activity_testset*", intern=TRUE)
filenames_single_adapativeBonly
combineMultiplePredictionFiles(filenames_single_adapativeBonly, "~/Documents/ThesisWork/GitRepos/bild_signatures/Results/ICBP/predictions/New/ICBP_predictions_single_adapB_nonadaptS.txt")
#Single nonadp_nonadap
filenames_single_nonadapative<-system("ls */nonadap*/pathway_activity_testset*", intern=TRUE)
filenames_single_nonadapative
combineMultiplePredictionFiles(filenames_single_nonadapative, "~/Documents/ThesisWork/GitRepos/bild_signatures/Results/ICBP/predictions/New/ICBP_predictions_single_nonadapB_nonadaptS.txt"
```
Gene list for no seed
```{r}
setwd("~/Documents/Thesiswork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_single/Her2_try3_adap_b/")
filenames_single_her2_noseed<-system("ls *rda*", intern=TRUE)
filenames_single_her2_noseed
getGeneList(filenames_single_her2_noseed)
```
# Prediction Files are done are ready, just need to boot strap them, and get p-v
Read in Moom's non-adaptive single pathway Results
```{r}
#setwd("~/Documents/Thesiswork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_single/single_pathway_results_ICBP/")
setwd("~/Documents/Thesiswork/GitRepos/bild_signatures/ASSIGN_Results_Ignore_too_big/ICBP_single/Her2_try3_adap_b/")
getwed()
filenames_single_nonadapative_moom<-system("ls */nonadap*/pathway_activity_testset*", intern=TRUE)
filenames_single_nonadapative_moom
for(i in 1:length(filenames_single_nonadapative_moom))
{
f<-read.csv(filenames_single_nonadapative_moom[i], header=1,row.names=1) ###reading in the filess one at a time
#print(filenames_single_nonadapative_moom[i])
#print(f)
#print(colnames(f))
colnames(f)<-paste(filenames_single_nonadapative_moom[i],colnames(f),sep='/')
print(colnames(f))
if(i==1){
data_single_nonadapative_moom<-f
print(data_single_nonadapative_moom)
}
else{
data_single_nonadapative_moom<-cbind(data_single_nonadapative_moom,f)
}
}
View(data_single_nonadapative_moom)
write.table(data_single_nonadapative_moom,"~/Documents/ThesisWork/GitRepos/bild_signatures/singlepathway_preds_nonadaptive_moom.txt",sep='\t', col.names = NA,quote=F)
```
Non-adaptive Multi and single pathway
```{r}
data_nonapative_multi<-read.delim("~/Documents/ThesisWork/GitRepos/bild_signatures/Results/ICBP/predictions/multipathway_preds_nonadaptive.txt", header=1, sep='\t',row.names=1)
View(data_nonapative_multi)
data_trans_NA=t(data_nonapative_multi)
single_nonadaptive_moom=read.csv("~/Documents/ThesisWork/GitRepos/bild_signatures/singlepathway_preds_nonadaptive_moom.txt", header=1, sep='\t',row.names=1)
View(single_nonadaptive_moom)
writeFile(cor(data_nonapative_multi, use="na.or.complete", method="spearman"),
#akt and bad
Viewcor(data_nonapative_multi)
cor(data_nonapative_multi$all_4.nonadap_multi.pathway_activity_testset.csv.akt, data_nonapative_multi$all_4.nonadap_multi.pathway_activity_testset.csv.bad, use="na.or.complete", method="spearman") #-0.15
# akt and her2
cor(data_nonapative_multi$all_4.nonadap_multi.pathway_activity_testset.csv.akt, data_nonapative_multi$all_4.nonadap_multi.pathway_activity_testset.csv.her2, use="na.or.complete", method="spearman") #-0.119
# akt and igf1r
cor(data_nonapative_multi$all_4.nonadap_multi.pathway_activity_testset.csv.akt, data_nonapative_multi$all_4.nonadap_multi.pathway_activity_testset.csv.igf1r, use="na.or.complete", method="spearman") #-0.232
# bad and her2
cor(data_nonapative_multi$all_4.nonadap_multi.pathway_activity_testset.csv.bad, data_nonapative_multi$all_4.nonadap_multi.pathway_activity_testset.csv.her2, use="na.or.complete", method="spearman") #-0.153
#bad and igf1r
cor(data_nonapative_multi$all_4.nonadap_multi.pathway_activity_testset.csv.bad, data_nonapative_multi$all_4.nonadap_multi.pathway_activity_testset.csv.igf1r, use="na.or.complete", method="spearman") #-0.412
# her2 and igfr1
cor(data_nonapative_multi$all_4.nonadap_multi.pathway_activity_testset.csv.her2, data_nonapative_multi$all_4.nonadap_multi.pathway_activity_testset.csv.igf1r, use="na.or.complete", method="spearman") #-0.47
# the comments on the side were for pearson, pretty much the same
#akt and igfr
cor(single_nonadaptive_moom$akt.nonadap150.pathway_activity_testset.csv.akt, single_nonadaptive_moom$igf1r.nonadap100.pathway_activity_testset.csv.igf1r, use="na.or.complete", method="spearman")
#akt and bad
cor(single_nonadaptive_moom$akt.nonadap150.pathway_activity_testset.csv.akt, single_nonadaptive_moom$bad.nonadap150.pathway_activity_testset.csv.bad, use="na.or.complete", method="spearman")
# akt and her
cor(single_nonadaptive_moom$akt.nonadap150.pathway_activity_testset.csv.akt, single_nonadaptive_moom$her2.nonadap15.pathway_activity_testset.csv.her2, use="na.or.complete", method="spearman")
# bad and her2
cor(single_nonadaptive_moom$bad.nonadap150.pathway_activity_testset.csv.bad, single_nonadaptive_moom$her2.nonadap15.pathway_activity_testset.csv.her2, use="na.or.complete", method="spearman")
# bad and igfr
cor(single_nonadaptive_moom$bad.nonadap150.pathway_activity_testset.csv.bad, single_nonadaptive_moom$igf1r.nonadap100.pathway_activity_testset.csv.igf1r, use="na.or.complete", method="spearman")
#igfr and her2
cor(single_nonadaptive_moom$her2.nonadap15.pathway_activity_testset.csv.her2, single_nonadaptive_moom$igf1r.nonadap100.pathway_activity_testset.csv.igf1r, use="na.or.complete", method="spearman")
```
Coreelate adaptive and nonadaptive muulti and single
```{r}
# multi pathway adaptive vs non-adaptive
cor(data$all_4.adap_multi.pathway_activity_testset.csv.akt, data_nonapative_multi$all_4.nonadap_multi.pathway_activity_testset.csv.akt,use="na.or.complete", method="spearman")
cor(data$all_4.adap_multi.pathway_activity_testset.csv.her2, data_nonapative_multi$all_4.nonadap_multi.pathway_activity_testset.csv.her2,use="na.or.complete", method="spearman")
cor(data$all_4.adap_multi.pathway_activity_testset.csv.igf1r, data_nonapative_multi$all_4.nonadap_multi.pathway_activity_testset.csv.igf1r,use="na.or.complete", method="spearman")
cor(data$all_4.adap_multi.pathway_activity_testset.csv.bad, data_nonapative_multi$all_4.nonadap_multi.pathway_activity_testset.csv.bad,use="na.or.complete", method="spearman")
#single pathway
cor(single_nonadaptive_moom$her2.nonadap15.pathway_activity_testset.csv.her2, data_single$her2_s_a, use="na.or.complete", method="spearman")
cor(single_nonadaptive_moom$igf1r.nonadap100.pathway_activity_testset.csv.igf1r, data_single$igf1r_s_a, use="na.or.complete", method="spearman")
cor(single_nonadaptive_moom$bad.nonadap150.pathway_activity_testset.csv.bad, data_single$bad_s_a, use="na.or.complete", method="spearman")
cor(single_nonadaptive_moom$akt.nonadap150.pathway_activity_testset.csv.akt, data_single$akt_s_a, use="na.or.complete", method="spearman")
#drugs<-read.delim("~/Dropbox/bild_signatures/Datasets/ICBP_drugs.txt", header=1, sep='\t',row.names=1)
drugs<-read.delim("~/Documents/ThesisWork/GitRepos/u01project/SignatureValidations/EGFR/ICBP/ICBP_drugs.txt", header=1, sep='\t',row.names=1)
data<-read.delim("~/Documents/ThesisWork/GitRepos/bild_signatures/Results/ICBP/predictions/multipathway_preds.txt", header=1, sep='\t',row.names=1)
data_trans=t(data)
write.table(data_trans,"~/Desktop/multipathway_preds_trans.txt",sep='\t', col.names = NA,quote=F)
data_single=read.csv("~/Documents/ThesisWork/GitRepos/bild_signatures/Results/ICBP/predictions/single_pathway_results.csv", header=1, row.names=1)
View(data_single)
data_single_shelley=read.csv("~/Documents/ThesisWork/GitRepos/bild_signatures/singlepathway_preds_adaptive_2.txt",header=1, sep='\t',row.names=1)
View(data_single_shelley)
# not the same
View(cor(data_single, data_single_shelley))
View(data_single)
data_single[1]
data$AKT_BAD.adap_multi.pathway_activity_testset.csv.akt
data_nonapative_multi$all_4.nonadap_multi.pathway_activity_testset.csv.her2
#AKT
# didnt use this!Correlations between correlations
cor(data$AKT_BAD.adap_multi.pathway_activity_testset.csv.akt, data$AKT_IGF1R.adap_multi.pathway_activity_testset.csv.akt, use="na.or.complete" )
cor(data$AKT_BAD.adap_multi.pathway_activity_testset.csv.akt, data$Her2_AKT.adap_multi.pathway_activity_testset.csv.akt, ,use="na.or.complete" )
cor(data$AKT_BAD.adap_multi.pathway_activity_testset.csv.akt, data$AKT_BAD_HER2.adap_multi.pathway_activity_testset.csv.akt, use="na.or.complete" )
cor(data$AKT_BAD.adap_multi.pathway_activity_testset.csv.akt, data$AKT_BAD_IGF1R.adap_multi.pathway_activity_testset.csv.akt, use="na.or.complete" )
cor(data$AKT_BAD.adap_multi.pathway_activity_testset.csv.akt, data$all_4.adap_multi.pathway_activity_testset.csv.akt, use="na.or.complete" )
cor(data$Her2_AKT.adap_multi.pathway_activity_testset.csv.akt, data$AKT_BAD_IGF1R.adap_multi.pathway_activity_testset.csv.akt,use="na.or.complete" )
cor(data$Her2_AKT.adap_multi.pathway_activity_testset.csv.akt,data$AKT_BAD_HER2.adap_multi.pathway_activity_testset.csv.akt,use="na.or.complete")
cor(data$Her2_AKT.adap_multi.pathway_activity_testset.csv.akt, data$AKT_BAD_IGF1R.adap_multi.pathway_activity_testset.csv.akt, use="na.or.complete" )
cor(data$Her2_AKT.adap_multi.pathway_activity_testset.csv.akt, data$all_4.adap_multi.pathway_activity_testset.csv.akt, use="na.or.complete" )
cor(data$AKT_IGF1R.adap_multi.pathway_activity_testset.csv.akt, data$AKT_BAD_HER2.adap_multi.pathway_activity_testset.csv.akt, use="na.or.complete" )
cor(data$AKT_IGF1R.adap_multi.pathway_activity_testset.csv.akt, data$AKT_BAD_IGF1R.adap_multi.pathway_activity_testset.csv.akt, use="na.or.complete" )
cor(data$AKT_IGF1R.adap_multi.pathway_activity_testset.csv.akt, data$all_4.adap_multi.pathway_activity_testset.csv.akt, use="na.or.complete" )
cor(data$AKT_BAD_HER2.adap_multi.pathway_activity_testset.csv.akt, data$AKT_BAD_IGF1R.adap_multi.pathway_activity_testset.csv.akt, use="na.or.complete" )
cor(data$AKT_BAD_HER2.adap_multi.pathway_activity_testset.csv.akt, data$all_4.adap_multi.pathway_activity_testset.csv.akt, use="na.or.complete" )
cor(data$AKT_BAD_IGF1R.adap_multi.pathway_activity_testset.csv.akt, data$all_4.adap_multi.pathway_activity_testset.csv.akt, use="na.or.complete" )
#BAD
#HER2
cor(data$AKT_BAD_HER2.adap_multi.pathway_activity_testset.csv.her2, data$Her2_AKT.adap_multi.pathway_activity_testset.csv.her2, use="na.or.complete" )
cor(data$AKT_BAD_HER2.adap_multi.pathway_activity_testset.csv.her2, data$Her2_BAD.adap_multi.pathway_activity_testset.csv.her2, use="na.or.complete" )
cor(data$Her2_AKT.adap_multi.pathway_activity_testset.csv.her2, data$Her2_BAD.adap_multi.pathway_activity_testset.csv.her2, use="na.or.complete" )
#IGFR
cor(data$AKT_BAD_IGF1R.adap_multi.pathway_activity_testset.csv.igf1r, data$AKT_IGF1R.adap_multi.pathway_activity_testset.csv.igf1r, use="na.or.complete" )
cor(data$AKT_BAD_IGF1R.adap_multi.pathway_activity_testset.csv.igf1r, data$BAD_IGF1R.adap_multi.pathway_activity_testset.csv.igf1r, use="na.or.complete" )
cor(data$BAD_IGF1R.adap_multi.pathway_activity_testset.csv.igf1r, data$AKT_IGF1R.adap_multi.pathway_activity_testset.csv.igf1r, use="na.or.complete" )
#correlations between adaptive and non-adap
# multi all four, for table
cor(data$all_4.adap_multi.pathway_activity_testset.csv.akt, data$all_4.adap_multi.pathway_activity_testset.csv.igf1r,use="na.or.complete", method="spearman") # 0.1874326
cor(data$all_4.adap_multi.pathway_activity_testset.csv.akt, data$all_4.adap_multi.pathway_activity_testset.csv.bad,use="na.or.complete", method="spearman") # -0.4327576
cor(data$all_4.adap_multi.pathway_activity_testset.csv.akt, data$all_4.adap_multi.pathway_activity_testset.csv.her2,use="na.or.complete",method="spearman") # 0.26
cor(data$all_4.adap_multi.pathway_activity_testset.csv.her2, data$all_4.adap_multi.pathway_activity_testset.csv.igf1r,use="na.or.complete", method="spearman") #-0.41
cor(data$all_4.adap_multi.pathway_activity_testset.csv.her2, data$all_4.adap_multi.pathway_activity_testset.csv.bad,use="na.or.complete", method="spearman") #-0.34
cor(data$all_4.adap_multi.pathway_activity_testset.csv.bad, data$all_4.adap_multi.pathway_activity_testset.csv.igf1r,use="na.or.complete", method="spearman") #-0.46
# the comments on the side were for pearson, pretty much the same
cor(data_single$akt_s_a, data_single$igf1r_s_a, use="na.or.complete", method="spearman") #0.01
cor(data_single$akt_s_a, data_single$bad_s_a, use="na.or.complete", method="spearman") #0.45 #AKT and BAD correlated
cor(data_single$igf1r_s_a, data_single$bad_s_a, use="na.or.complete", method="spearman") #-0.51 #IGFR and BAD neg correlation
cor(data_single$igf1r_s_a, data_single$her2_s_a, use="na.or.complete", method="spearman") #0.01
cor(data_single$akt_s_a, data_single$her2_s_a, use="na.or.complete", method="spearman") #0.33 AKT and Her2 predictions corelated
cor(data_single$bad, data_single$her2_s_a, use="na.or.complete", method="spearman") #0.03
#do it for all of them (Shelley will do this)
```
Now correlating multi pathway ADAPTive predictions with ICBP drugs..
```{r, cache=TRUE,include=FALSE,echo=FALSE}
View(data_single)
row.names(data_single)=data_single[,1]
View(data_single)
data_single=data_single[,-1]
View(data_single)
# mk2206<-read.table("~/Desktop/Drug reponse analysis/bildlab_drug response/mk2206.txt",sep='\t',row.names=1,header=1)
# mk2206
View(data)
View(drugs)
pred_drug<-merge_drop(data,drugs,by=0)
View(pred_drug)
=======
data_all<-cbind(data,single)
pred_drug<-merge_drop(data_all,drugs,by=0)
drug_srt<-subset(drugs,select = c("Lapatinib","Sigma.AKT1.2.inhibitor","Temsirolimus","Everolimus"))
pred_drug<-merge_drop(data_all,drug_srt,by=0)
# mk2206<-subset(mk2206,select="Sensitivity..logEC50")
# pred_drug<-merge_drop(pred_drug,mk2206,by=0)
#dim(pred_drug)
View(pred_drug)
dim(pred_drug)
pred_drug_multi<-merge_drop(data,drugs,by=0)
pred_drug_single<-merge_drop(data_single,drugs,by=0)
drug_prediction_correlations_multi= cor(pred_drug_multi[1:25],pred_drug_multi[36:125],use="na.or.complete")
View(pred_drug_multi)
View(pred_drug_single)
ncol(pred_drug_single)
pred_drug_single[15]
drug_prediction_correlations_single= cor(pred_drug_single[1:4],pred_drug_single[15:104],use="na.or.complete")
colnames(pred_drug)<-gsub(pattern = "pathway_activity_testset.csv",replacement = "",x = colnames(pred_drug))
head(pred_drug)
cors = matrix(0,54,4)
rownames(cors)=colnames(pred_drug)[1:54]
colnames(cors)=c("Lapatinib","Sigma.akt.1.2.inhibitor","Temsirolimus","Everolimus")
pathways<-pred_drug[,1:54]
drugs<-pred_drug[,55:58]
for (i in 1:length(colnames(data))){
for (j in 1:length(colnames(drug_srt))){
cors[i,j]=cor(pathways[,i],drugs[,j],use="pairwise.complete.obs")
}
}
View(drug_pre_diction_correlations_multi)
View(drug_prediction_correlations_single)
single_and_multi= rbind(drug_prediction_correlations_multi,drug_prediction_correlations_single)
plot(single_and_multi[2])
View(single_and_multi)
row.names(single_and_multi)
# for sinlge
single_and_multi[26,]
cor(single_and_multi[26,], single_and_multi[27,] ) # 0.5269367
cor(single_and_multi[26,], single_and_multi[28,] ) #0.7882588
cor(single_and_multi[26,], single_and_multi[29,] ) # 0.6173746
cor(single_and_multi[27,], single_and_multi[28,] ) # 0.2896494
cor(single_and_multi[27,], single_and_multi[29,] ) # -0.02523773
cor(single_and_multi[28,], single_and_multi[29,] ) # 0.7182353
#mutli
cor(single_and_multi[22,], single_and_multi[23,] ) #-0.6161527
cor(single_and_multi[22,], single_and_multi[24,] ) # -0.2015345
cor(single_and_multi[22,], single_and_multi[25,] ) # 0.4247083
cor(single_and_multi[23,], single_and_multi[24,] ) # -0.04692151
cor(single_and_multi[23,], single_and_multi[25,] ) # -0.4218923
cor(single_and_multi[24,], single_and_multi[25,] ) # -0.7734885
write.table(single_and_multi,"~/Documents/ThesisWork/GitRepos/bild_signatures/Correlations_ICBP_Single_Multi.txt",sep='\t', col.names = NA,quote=F)
# do with non_adpative
# run assign single nonadaptive
#for(i in 1:length(colnames(data)))
{
#print(colnames(data)[i])
#write.table(colnames(data)[i], file = "output.csv", row.names = FALSE,
#append = TRUE, col.names = FALSE, sep = ", ")
# print("correlation with MK2206..an AKT inhibitor")
# print(cor(pred_drug[i],as.numeric(pred_drug$Sensitivity..logEC50),use="na.or.complete"))
#print("correlation with Sigma AKT1/2..an AKT inhibitor")
# cor(pred_drug[i],pred_drug$Sigma.AKT1.2.inhibitor,use="na.or.complete")
#print("correlation with GSK2126458..an AKT inhibitor")
#print(cor(pred_drug[i],pred_drug$GSK2126458,use="na.or.complete"))
#print("correlation with TRIciribine..an AKT inhibitor")
#print(cor(pred_drug[i],pred_drug$Triciribine,use="na.or.complete"))
=======
# for(i in 1:length(colnames(data)))
# {
# print(colnames(data)[i])
# print("correlation with MK2206..an AKT inhibitor")
# print(cor(pred_drug[i],as.numeric(pred_drug$Sensitivity..logEC50),use="na.or.complete"))
# print("correlation with Sigma AKT1/2..an AKT inhibitor")
# print(cor(pred_drug[i],pred_drug$Sigma.AKT1.2.inhibitor,use="na.or.complete"))
# print("correlation with GSK2126458..an AKT inhibitor")
# print(cor(pred_drug[i],pred_drug$GSK2126458,use="na.or.complete"))
# print("correlation with TRIciribine..an AKT inhibitor")
# print(cor(pred_drug[i],pred_drug$Triciribine,use="na.or.complete"))
# print("correlation with Lapatinib...EGFR/HER2 inhibitor")
# print(cor(pred_drug[i],pred_drug$Lapatinib,use="na.or.complete"))
>>>>>>> 2fac797b6fb95768996bf82bab2763ae0f043efa
# print("correlation with Tykerb.IGF1R..1.1...EGFR/HER2/IGF1R inhibitor")
# print(cor(pred_drug[i],pred_drug$Tykerb.IGF1R..1.1.,use="na.or.complete"))
# print("correlation with GSK1838705...an IGF1R inhibitor")
# print(cor(pred_drug[i],pred_drug$GSK1838705, use = "na.or.complete"))
}
# what we have done so far in term of cors is the AKT drugs to all combos on multi-path/single predictions.
# THE IGF1R predictions where good with AKT inhibitors
# Shelley should correlate other drugs with all predictions.
-# Don't have bad drugs
# HER2 with labpatinib, BIBW2206
# we hav
=======
# # #
# }
```
Correlate Non-adaptive with ICBP drugs
```{r} cache=TRUE,include=FALSE,echo=FALSE}
data<-read.delim("~/Documents/ThesisWork/GitRepos/bild_signatures/Results/ICBP/predictions/multipathway_preds.txt", header=1, sep='\t',row.names=1)
data_trans=t(data)
write.table(data_trans,"~/Desktop/multipathway_preds_trans.txt",sep='\t', col.names = NA,quote=F)
#data_single=read.csv("~/Documents/ThesisWork/GitRepos/bild_signatures/Results/ICBP/predictions/single_pathway_results.csv")
#AKT
# didnt use this!Correlations between correlations
cor(data$AKT_BAD.adap_multi.pathway_activity_testset.csv.akt, data$AKT_IGF1R.adap_multi.pathway_activity_testset.csv.akt, use="na.or.complete" )
cor(data$AKT_BAD.adap_multi.pathway_activity_testset.csv.akt, data$Her2_AKT.adap_multi.pathway_activity_testset.csv.akt, ,use="na.or.complete" )
cor(data$AKT_BAD.adap_multi.pathway_activity_testset.csv.akt, data$AKT_BAD_HER2.adap_multi.pathway_activity_testset.csv.akt, use="na.or.complete" )
cor(data$AKT_BAD.adap_multi.pathway_activity_testset.csv.akt, data$AKT_BAD_IGF1R.adap_multi.pathway_activity_testset.csv.akt, use="na.or.complete" )
cor(data$AKT_BAD.adap_multi.pathway_activity_testset.csv.akt, data$all_4.adap_multi.pathway_activity_testset.csv.akt, use="na.or.complete" )
cor(data$Her2_AKT.adap_multi.pathway_activity_testset.csv.akt, data$AKT_BAD_IGF1R.adap_multi.pathway_activity_testset.csv.akt,use="na.or.complete" )
cor(data$Her2_AKT.adap_multi.pathway_activity_testset.csv.akt,data$AKT_BAD_HER2.adap_multi.pathway_activity_testset.csv.akt,use="na.or.complete")
cor(data$Her2_AKT.adap_multi.pathway_activity_testset.csv.akt, data$AKT_BAD_IGF1R.adap_multi.pathway_activity_testset.csv.akt, use="na.or.complete" )
cor(data$Her2_AKT.adap_multi.pathway_activity_testset.csv.akt, data$all_4.adap_multi.pathway_activity_testset.csv.akt, use="na.or.complete" )
cor(data$AKT_IGF1R.adap_multi.pathway_activity_testset.csv.akt, data$AKT_BAD_HER2.adap_multi.pathway_activity_testset.csv.akt, use="na.or.complete" )
cor(data$AKT_IGF1R.adap_multi.pathway_activity_testset.csv.akt, data$AKT_BAD_IGF1R.adap_multi.pathway_activity_testset.csv.akt, use="na.or.complete" )
cor(data$AKT_IGF1R.adap_multi.pathway_activity_testset.csv.akt, data$all_4.adap_multi.pathway_activity_testset.csv.akt, use="na.or.complete" )
cor(data$AKT_BAD_HER2.adap_multi.pathway_activity_testset.csv.akt, data$AKT_BAD_IGF1R.adap_multi.pathway_activity_testset.csv.akt, use="na.or.complete" )
cor(data$AKT_BAD_HER2.adap_multi.pathway_activity_testset.csv.akt, data$all_4.adap_multi.pathway_activity_testset.csv.akt, use="na.or.complete" )
cor(data$AKT_BAD_IGF1R.adap_multi.pathway_activity_testset.csv.akt, data$all_4.adap_multi.pathway_activity_testset.csv.akt, use="na.or.complete" )
#BAD
#HER2
cor(data$AKT_BAD_HER2.adap_multi.pathway_activity_testset.csv.her2, data$Her2_AKT.adap_multi.pathway_activity_testset.csv.her2, use="na.or.complete" )
cor(data$AKT_BAD_HER2.adap_multi.pathway_activity_testset.csv.her2, data$Her2_BAD.adap_multi.pathway_activity_testset.csv.her2, use="na.or.complete" )
cor(data$Her2_AKT.adap_multi.pathway_activity_testset.csv.her2, data$Her2_BAD.adap_multi.pathway_activity_testset.csv.her2, use="na.or.complete" )
#IGFR
cor(data$AKT_BAD_IGF1R.adap_multi.pathway_activity_testset.csv.igf1r, data$AKT_IGF1R.adap_multi.pathway_activity_testset.csv.igf1r, use="na.or.complete" )
cor(data$AKT_BAD_IGF1R.adap_multi.pathway_activity_testset.csv.igf1r, data$BAD_IGF1R.adap_multi.pathway_activity_testset.csv.igf1r, use="na.or.complete" )
cor(data$BAD_IGF1R.adap_multi.pathway_activity_testset.csv.igf1r, data$AKT_IGF1R.adap_multi.pathway_activity_testset.csv.igf1r, use="na.or.complete" )
# multi all four, for table
cor(data$all_4.adap_multi.pathway_activity_testset.csv.akt, data$all_4.adap_multi.pathway_activity_testset.csv.igf1r,use="na.or.complete", method="spearman") # 0.1874326
cor(data$all_4.adap_multi.pathway_activity_testset.csv.akt, data$all_4.adap_multi.pathway_activity_testset.csv.bad,use="na.or.complete", method="spearman") # -0.4327576
cor(data$all_4.adap_multi.pathway_activity_testset.csv.akt, data$all_4.adap_multi.pathway_activity_testset.csv.her2,use="na.or.complete",method="spearman") # 0.26
cor(data$all_4.adap_multi.pathway_activity_testset.csv.her2, data$all_4.adap_multi.pathway_activity_testset.csv.igf1r,use="na.or.complete", method="spearman") #-0.41
cor(data$all_4.adap_multi.pathway_activity_testset.csv.her2, data$all_4.adap_multi.pathway_activity_testset.csv.bad,use="na.or.complete", method="spearman") #-0.34
cor(data$all_4.adap_multi.pathway_activity_testset.csv.bad, data$all_4.adap_multi.pathway_activity_testset.csv.igf1r,use="na.or.complete", method="spearman") #-0.46
# the comments on the side were for pearson, pretty much the same
cor(data_single$akt_s_a, data_single$igf1r_s_a, use="na.or.complete", method="spearman") #0.01
cor(data_single$akt_s_a, data_single$bad_s_a, use="na.or.complete", method="spearman") #0.45 #AKT and BAD correlated
cor(data_single$igf1r_s_a, data_single$bad_s_a, use="na.or.complete", method="spearman") #-0.51 #IGFR and BAD neg correlation
cor(data_single$igf1r_s_a, data_single$her2_s_a, use="na.or.complete", method="spearman") #0.01
cor(data_single$akt_s_a, data_single$her2_s_a, use="na.or.complete", method="spearman") #0.33 AKT and Her2 predictions corelated
cor(data_single$bad, data_single$her2_s_a, use="na.or.complete", method="spearman") #0.03
#do it for all of them (Shelley will do this)
View(data_single)
row.names(data_single)=data_single[,1]
View(data_single)
data_single=data_single[,-1]
View(data_single)
# mk2206<-read.table("~/Desktop/Drug reponse analysis/bildlab_drug response/mk2206.txt",sep='\t',row.names=1,header=1)
# mk2206
View(data)
View(drugs)
pred_drug<-merge_drop(data,drugs,by=0)
View(pred_drug)
=======
data_all<-cbind(data,single)
pred_drug<-merge_drop(data_all,drugs,by=0)
drug_srt<-subset(drugs,select = c("Lapatinib","Sigma.AKT1.2.inhibitor","Temsirolimus","Everolimus"))
pred_drug<-merge_drop(data_all,drug_srt,by=0)
# mk2206<-subset(mk2206,select="Sensitivity..logEC50")
# pred_drug<-merge_drop(pred_drug,mk2206,by=0)
pred_drug_multi<-merge_drop(data,drugs,by=0)
pred_drug_single<-merge_drop(data_single,drugs,by=0)
drug_prediction_correlations_multi= cor(pred_drug_multi[,1:25],pred_drug_multi[,36:125],use="na.or.complete")
View(drug_prediction_correlations_multi)
dimnames(all_drug)<-c(colnames(pred_drug_multi)[1:25],colnames(pred_drug_multi)[36:125])
drug_prediction_correlations_single= cor(pred_drug_single[1:4],pred_drug_single[15:104],use="na.or.complete")
colnames(pred_drug)<-gsub(pattern = "pathway_activity_testset.csv",replacement = "",x = colnames(pred_drug))
head(pred_drug)
cors = matrix(0,54,4)
rownames(cors)=colnames(pred_drug)[1:54]
colnames(cors)=c("Lapatinib","Sigma.akt.1.2.inhibitor","Temsirolimus","Everolimus")
pathways<-pred_drug[,1:54]
drugs<-pred_drug[,55:58]
for (i in 1:length(colnames(data))){
for (j in 1:length(colnames(drug_srt))){
cors[i,j]=cor(pathways[,i],drugs[,j],use="pairwise.complete.obs")
}
}
View(drug_pre_diction_correlations_multi)
View(drug_prediction_correlations_single)
single_and_multi= rbind(drug_prediction_correlations_multi,drug_prediction_correlations_single)
plot(single_and_multi[2])
View(single_and_multi)
row.names(single_and_multi)
# for sinlge
single_and_multi[26,]
cor(single_and_multi[26,], single_and_multi[27,] ) # 0.5269367
cor(single_and_multi[26,], single_and_multi[28,] ) #0.7882588
cor(single_and_multi[26,], single_and_multi[29,] ) # 0.6173746
cor(single_and_multi[27,], single_and_multi[28,] ) # 0.2896494
cor(single_and_multi[27,], single_and_multi[29,] ) # -0.02523773
cor(single_and_multi[28,], single_and_multi[29,] ) # 0.7182353
#mutli
cor(single_and_multi[22,], single_and_multi[23,] ) #-0.6161527
cor(single_and_multi[22,], single_and_multi[24,] ) # -0.2015345
cor(single_and_multi[22,], single_and_multi[25,] ) # 0.4247083
cor(single_and_multi[23,], single_and_multi[24,] ) # -0.04692151
cor(single_and_multi[23,], single_and_multi[25,] ) # -0.4218923
cor(single_and_multi[24,], single_and_multi[25,] ) # -0.7734885
write.table(single_and_multi,"~/Documents/ThesisWork/GitRepos/bild_signatures/Correlations_ICBP_Single_Multi.txt",sep='\t', col.names = NA,quote=F)
```
Creating heatmaps
```{r }
if (!require("gplots")) {
install.packages("gplots", dependencies = TRUE)
library(gplots)
}
if (!require("RColorBrewer")) {
install.packages("RColorBrewer", dependencies = TRUE)
library(RColorBrewer)
}
multi<-data#read.table("~/Desktop/multipathway_preds.txt", sep='\t',row.names=1,header=1)
single<-read.csv("~/Dropbox/bild_signatures/multi_icbp_expr_pc/single_pathway_results.csv", row.names=1,header=1)
my_palette <- colorRampPalette(c("darkblue","aliceblue","brown4"))(n = 299)
col_breaks = c(seq(0,0.2,length=100), seq(0.2,0.4,length=100), seq(0.4,1,length=100))
# creates a 5 x 5 inch image
png("heatmaps_in_r.png", # create PNG for the heat map
width = 5*300, # 5 x 300 pixels
height = 5*300,
res = 300, # 300 pixels per inch
pointsize = 8)
comb<-cbind(multi,single)
dim(comb)
colnames(comb)<-gsub(pattern = "adap_multi.pathway_activity_testset.csv",replacement = "A",x = colnames(comb))
#colnames(comb)<-gsub(pattern = "non",replacement = "NA",x = colnames(comb))
pdf(file='~/Dropbox/bild_signatures//bild_signatures/activity_subtype.pdf')
heatmap.2(as.matrix(comb),col=my_palette,margins=c(12,9),Rowv=F,Colv=F,dendrogram="none", trace="none",main="All possibilities",breaks = col_breaks)
heatmap.2(as.matrix(comb[,47:50]),col=my_palette,trace="none",main="Multipathway activity",margins=c(12,9),Rowv =F,Colv=F,dendrogram="none",ylab="ICBP Cell lines",breaks = col_breaks)
heatmap.2(as.matrix(comb[,51:54]),margins=c(12,9),col=my_palette, Rowv=F,Colv=F,dendrogram="none",trace="none",main="Single pathway activity",ylab="ICBP Cell lines",scale = "row")
comb_drug<-merge_drop(comb,drugs,by=0)
#plot(hclust(dist(comb_drug[,1:48]), method = "complete", members = NULL))
<<<<<<< HEAD
basal<-subset(comb_drug,comb_drug$Transcriptional.subtype...ERBB2.status=="Basal")
heatmap.2(as.matrix(basal[,22:25]),col=my_palette,margins=c(12,9),dendrogram="none", trace="none",main="Basal Multi")
heatmap.2(as.matrix(basal[,26:29]),col=my_palette,margins=c(12,9),dendrogram="none",lmat=rbind( c(0, 3, 4), c(2,1,0 ) ), lwid=c(1.5, 4, 2 ), trace="none",main="Basal single")
cor(basal[,22],basal$Sigma.AKT1.2.inhibitor,use="pairwise")
her<-subset(comb_drug[,1:44],comb_drug$Transcriptional.subtype...ERBB2.status=="ERBB2-amp"|comb_drug$Transcriptional.subtype...ERBB2.status=="ERBB2Amp")
heatmap.2(as.matrix(her[,22:25]),col=my_palette,dendrogram="none",lmat=rbind( c(0, 3, 4), c(2,1,0 ) ), lwid=c(1.5, 4, 2 ), trace="none",margins=c(12,9),main="HER2 Multi")
heatmap.2(as.matrix(her[,26:29]),col=my_palette,dendrogram="none",lmat=rbind( c(0, 3, 4), c(2,1,0 ) ), lwid=c(1.5, 4, 2 ), trace="none",margins=c(12,9),main="HER2 Single",scale="column")
cor(her[,22],her$Sigma.AKT1.2.inhibitor,use="pairwise")
claudin<-subset(comb_drug,comb_drug$Transcriptional.subtype...ERBB2.status=="Claudin-low")
heatmap.2(as.matrix(claudin[,22:25]),col=my_palette,dendrogram="none",lmat=rbind( c(0, 3, 4), c(2,1,0 ) ), lwid=c(1.5, 4, 2 ), trace="none",margins=c(12,9),main="Claudin Multi")
heatmap.2(as.matrix(claudin[,26:29]),col=my_palette,dendrogram="none",lmat=rbind( c(0, 3, 4), c(2,1,0 ) ), lwid=c(1.5, 4, 2 ), trace="none",margins=c(12,9),main="Claudin Single",scale="column")
cor(claudin[,22],claudin$Sigma.AKT1.2.inhibitor,use="na.or.complete")
luminal<-subset(comb_drug,comb_drug$Transcriptional.subtype...ERBB2.status=="Luminal")
heatmap.2(as.matrix(luminal[,22:25]),col=my_palette,dendrogram="none",lmat=rbind( c(0, 3, 4), c(2,1,0 ) ), lwid=c(1.5, 4, 2 ), trace="none",margins=c(12,9),main="Luminal Multi")
heatmap.2(as.matrix(luminal[,26:29]),col=my_palette,dendrogram="none",lmat=rbind( c(0, 3, 4), c(2,1,0 ) ), lwid=c(1.5, 4, 2 ), trace="none",margins=c(12,9),main="Luminal Single",scale="row")
=======
# write(file="~/Dropbox/bild_signatures/bild_signatures/pathway prediction drug response results.xls","Correlations for pathway predictions with Lapatinib and Sigma.AKT.inhibitor in BASAL subtype",append = T)
# write('',file="~/Dropbox/bild_signatures/bild_signatures/pathway prediction drug response results.xls", append=T)
basal<-subset(comb_drug,comb_drug$Transcriptional.subtype...ERBB2.status=="Basal")
# write("For Lapatinib",file="~/Dropbox/bild_signatures/bild_signatures/pathway prediction drug response results.xls", append =T)
# write(paste(colnames(basal)[1:54],cor(basal[,1:54],basal$Lapatinib,use="pairwise.complete.obs"),sep='\t'), file="~/Dropbox/bild_signatures/bild_signatures/pathway prediction drug response results.xls", append = T)
# write("For Sigma AKT inhibitor",file="~/Dropbox/bild_signatures/bild_signatures/pathway prediction drug response results.xls", append =T)
# write(paste(colnames(basal)[1:54],cor(basal[,1:54],basal$Sigma.AKT1.2.inhibitor,use="pairwise.complete.obs"),sep='\t'),file="~/Dropbox/bild_signatures/bild_signatures/pathway prediction drug response results.xls",append = T)
par(mfrow = c(2,1))
heatmap.2(as.matrix(basal[,47:50]),col=my_palette,margins=c(12,9),dendrogram="none", trace="none",main="Basal Multi",Rowv = NULL, Colv = NULL)
heatmap.2(as.matrix(basal[,51:54]),col=my_palette,margins=c(12,9),dendrogram="none",Rowv = NULL, Colv = NULL, trace="none",main="Basal single")
her<-subset(comb_drug,comb_drug$Transcriptional.subtype...ERBB2.status=="ERBB2-amp"|comb_drug$Transcriptional.subtype...ERBB2.status=="ERBB2Amp")
# wrtie(paste(colnames(her)[1:54],cor(her[,1:54],her$Lapatinib,use="pairwise.complete.obs"),sep='\t'),file="~/Dropbox/bild_signatures/bild_signatures/pathway prediction drug response results.xls",append = T)
# write("For Sigma AKT inhibitor",file="~/Dropbox/bild_signatures/bild_signatures/pathway prediction drug response results.xls", append =T)
# write(paste(colnames(her),cor(her[,1:54],her$Sigma.AKT1.2.inhibitor,use="pairwise.complete.obs"),sep='\t'),file="~/Dropbox/bild_signatures/bild_signatures/pathway prediction drug response results.xls",append = T)
#
heatmap.2(as.matrix(her[,43:54]),col=my_palette,dendrogram="none",lmat=rbind( c(0, 3, 4), c(2,1,0 ) ), lwid=c(1.5, 4, 2 ), trace="none",margins=c(14,9),main="HER2 Multi",Rowv=F, Colv=F)
heatmap.2(as.matrix(her[,51:54]),col=my_palette,dendrogram="none",lmat=rbind( c(0, 3, 4), c(2,1,0 ) ), lwid=c(1.5, 4, 2 ), trace="none",margins=c(12,9),main="HER2 Single",Rowv=F, Colv=F,scale = "row")
claudin<-subset(comb_drug,comb_drug$Transcriptional.subtype...ERBB2.status=="Claudin-low")
cor(claudin[,1:54],claudin$Lapatinib,use="pairwise.complete.obs")# variance zero in this group
cor(claudin[,1:54],claudin$Sigma.AKT1.2.inhibitor,use="pairwise.complete.obs")
heatmap.2(as.matrix(claudin[,47:50]),col=my_palette,dendrogram="none",trace="none",margins=c(12,9),main="Claudin Multi",,Rowv=F, Colv=F)
heatmap.2(as.matrix(claudin[,51:54]),col=my_palette,dendrogram="none", trace="none",margins=c(12,9),main="Claudin Single",scale="row",,Rowv=F, Colv=F)
cor(claudin[,22],claudin$Sigma.AKT1.2.inhibitor,use="na.or.complete")
luminal<-subset(comb_drug,comb_drug$Transcriptional.subtype...ERBB2.status=="Luminal")
cor(luminal[,47:50],luminal$Lapatinib,use="pairwise.complete.obs")
cor(luminal[,51:54],luminal$Sigma.AKT1.2.inhibitor,use="pairwise.complete.obs")
heatmap.2(as.matrix(luminal[,47:50]),col=my_palette,dendrogram="none", trace="none",margins=c(12,9),main="Luminal Multi",Rowv=F, Colv=F)
heatmap.2(as.matrix(luminal[,47:50]),col=my_palette,dendrogram="none",trace="none",margins=c(12,9),main="Luminal Single",scale="row",Rowv=F, Colv=F)
>>>>>>> 2fac797b6fb95768996bf82bab2763ae0f043efa
cor(luminal[,22],luminal$Sigma.AKT1.2.inhibitor,use="na.or.complete")
multi_4<-rbind(basal[,43:46],her[,43:46],claudin[,43:46],luminal[,43:46])
png("heatmaps_multi_adap.png",width = 5*300,height = 5*300,res = 800, pointsize = 8)
heatmap.2(as.matrix(multi_4), RowSideColors = c(rep("gray", length(rownames(basal))),rep("blue", length(rownames(her))),rep("black", length(rownames(claudin))),rep("green",length(rownames(luminal)))),col=my_palette,dendrogram="none", trace="none",margins=c(15,10),main="Multi Preds within Subtypes",scale="row",Rowv=F)
par(lend = 10) # square line ends for the color legend
legend("topright",legend = c("Basal", "HER2", "Claudin","Luminal"), col = c("gray", "blue", "black","green"), lty= 1,lwd = 10)
dev.off()
single_4<-rbind(basal[,51:54],her[,51:54],claudin[,51:54],luminal[,51:54])
heatmap.2(as.matrix(single_4), RowSideColors = c(rep("gray", length(rownames(basal))),rep("blue", length(rownames(her))),rep("black", length(rownames(claudin))),rep("green",length(rownames(luminal)))),col=my_palette,dendrogram="none", trace="none",margins=c(12,9),main="Single Preds within Subtypes",scale="row",Rowv=F)
par(lend = 10) # square line ends for the color legend
legend("topright",legend = c("Basal", "HER2", "Claudin","Luminal"), col = c("gray", "blue", "black","green"), lty= 1,lwd = 10)
dev.off()
```
```{r echo=FALSE}
time<-format(Sys.time(),"%a %b %d %X %Y")
```
This analysis was run on `r time`
<file_sep>/Rmarkdowns_scripts/TCGA/Heatmap_tcga_3_15.Rmd
---
title: "Heatmaps of pathway predictions in TCGA BRCA samples based on subtypes"
output: html_document
---
```{r include=FALSE}
source('~/Dropbox/bild_signatures/bild_signatures/Rmarkdowns_scripts//Key_ASSIGN_functions.Rmd', echo=TRUE)
setwd("~/Dropbox/bild_signatures/tcga_15_mar_all/")
filenames_tcga_multi<-system("ls */*/pathway_activity_testset*", intern=TRUE)
filenames_tcga_multi
for(i in 1:length(filenames_tcga_multi))
{
f<-read.csv(filenames_tcga_multi[i], header=1,row.names=1) ###reading in the filess one at a time
colnames(f)<-paste(filenames_tcga_multi[i],colnames(f),sep='/')
if(i==1){
data_tcga<-f
}
else{
data_tcga<-cbind(data_tcga,f)
}
}
head(data_tcga)
dim(data_tcga)
colnames(data_tcga)<-gsub(pattern = "/pathway_activity_testset.csv",replacement = "",x = colnames(data_tcga))
head(data_tcga)
```
```{r include=FALSE}
if (!require("gplots")) {
install.packages("gplots", dependencies = TRUE)
library(gplots)
}
if (!require("RColorBrewer")) {
install.packages("RColorBrewer", dependencies = TRUE)
library(RColorBrewer)
}
prediction_heatmap<-function(x,type=NULL)
{
adapB_single=subset(x,select=grep("adapB_single",colnames(x)))
adap_adap_single=subset(x,select=grep("adap_adap_single",colnames(x)))
adapB_multi=subset(x,select=grep("akt_bad_her2_igf1r_erk/adapB",colnames(x)))
adap_adap_multi=subset(x,select=grep("akt_bad_her2_igf1r_erk/adap_adap",colnames(x)))
adapB_multi4=subset(x,select=grep("akt_bad_her2_igf1r/adapB",colnames(x)))
adap_adap_multi4=subset(x,select=grep("akt_bad_her2_igf1r/adap_adap",colnames(x)))
colnames(adapB_single)=colnames(adap_adap_single)=c("AKT","BAD","ERK","HER2","IGF1R")
adapB_single= adapB_single[,c("AKT","BAD","HER2","IGF1R","ERK")]
adap_adap_single=adap_adap_single[,c("AKT","BAD","HER2","IGF1R","ERK")]
colnames(adapB_multi)=colnames(adap_adap_multi)=c("AKT","BAD","HER2","IGF1R","ERK")
colnames(adapB_multi4)=colnames(adap_adap_multi4)=c("AKT","BAD","HER2","IGF1R")
heatmap.2(as.matrix(adapB_single),col=my_palette,margins=c(15,9),dendrogram="none", trace="none",main=paste(type,"Single Adap BG",sep = "\n"),Rowv = NULL, Colv = NULL,density.info = "none",scale = 'row')#cellnote = round(x[,43:46],digits = 2),notecol = 'black'
heatmap.2(as.matrix(adap_adap_single),col=my_palette,margins=c(15,9),dendrogram="none", trace="none",main=paste(type,"Single Adap BGSG",sep = "\n"),Rowv = NULL, Colv = NULL,density.info = "none",scale = 'row')#cellnote = round(x[,43:46],digits = 2),notecol = 'black
heatmap.2(as.matrix(adapB_multi),col=my_palette,margins=c(15,9),dendrogram="none",Rowv = NULL, Colv = NULL, trace="none",main=paste(type,"Multi Adap BG",sep = "\n"),density.info = 'none',scale = 'row')#,cellnote = round(x[,51:54],digits = 2),notecol = 'black',
heatmap.2(as.matrix(adap_adap_multi),col=my_palette,margins=c(15,9),dendrogram="none",Rowv = NULL, Colv = NULL, trace="none",main=paste(type,"Multi Adap BGSG",sep = "\n"),density.info = 'none',scale = 'row')#,cellnote = round(x[,51:54],digits = 2),notecol = 'black',
heatmap.2(as.matrix(adapB_multi4),col=my_palette,margins=c(15,9),dendrogram="none",Rowv = NULL, Colv = NULL, trace="none",main=paste(type,"Multi 4 Adap BG",sep = "\n"),density.info = 'none',scale = 'row')#,cellnote = round(x[,51:54],digits = 2),notecol = 'black',
heatmap.2(as.matrix(adap_adap_multi4),col=my_palette,margins=c(15,9),dendrogram="none",Rowv = NULL, Colv = NULL, trace="none",main=paste(type,"Multi 4 Adap BGSG",sep = "\n"),density.info = 'none',scale = 'row')#,cellnote = round(x[,51:54],digits = 2),notecol = 'black',
}
correlation_heatmap<-function(x,drugs_names=NULL,type=NULL)
{
adapB_single=subset(x,select=grep("adapB_single",colnames(x)))
adap_adap_single=subset(x,select=grep("adap_adap_single",colnames(x)))
adapB_multi=subset(x,select=grep("akt_bad_her2_igf1r_erk/adapB",colnames(x)))
adap_adap_multi=subset(x,select=grep("akt_bad_her2_igf1r_erk/adap_adap",colnames(x)))
cors = pval=matrix(-2,20,7)
#rownames(cors)=c(colnames(x)[43:46],colnames(x)[51:54])#selecting the adaptive multipathway and single pathway prediction columns
pathways<-cbind(adapB_single,adap_adap_single,adapB_multi,adap_adap_multi)
drugs<-subset(x,select=drugs_names)
colnames(cors)=drugs_names
rownames(cors)=colnames(pathways)
rownames(cors)=gsub(pattern = "akt_bad_her2_igf1r_erk/",replacement = "",x = rownames(cors))
#rownames(cors)=gsub(pattern = "*/*/adapB",replacement = "B",x = rownames(cors))
for (i in 1:20){
for (j in 1:length(colnames(drugs))){
ci=cor.ci(cbind(pathways[,i],drugs[,j]),method="spearman",plot=F)
cors[i,j]=ci$rho[2,1]
print(cors[i,j]);print(ci$ci[[5]])
pval[i,j]=ci$ci[[5]]
}
}
par(mar=c(1, 0.5, 3, 10),lwd=4)
heatmap.2(as.matrix(cors),col=redgreen,margins =c(13,18),dendrogram="none",Rowv = NULL, Colv = NULL, trace="none",main=paste("Correlations of pathway"," drug response in",type,sep='\n '),cellnote = round(cors,digits = 2),notecol = 'black',density.info = 'none')
}
pam50<-read.table("~/Dropbox/Datasets/tcga_breast_pam50.txt",sep='\t', stringsAsFactors = T,header=T, row.names=1)
partial_sample_names<-rownames(pam50)
sample_names<-rownames(data_tcga)
counter=0
for (j in 1:length(partial_sample_names)){
if(!is.na(pmatch(partial_sample_names[j],sample_names))){
partial_sample_names[j]<-sample_names[pmatch(partial_sample_names[j],sample_names, duplicates.ok=F)]
counter=counter+1
}
}
rownames(pam50)<-partial_sample_names
my_palette <- colorRampPalette(c("darkblue","aliceblue","brown4"))(n = 299)
col_breaks = c(seq(0,0.2,length=100), seq(0.2,0.4,length=100), seq(0.4,1,length=100))
pred_sub<-merge_drop(data_tcga,pam50,by=0)
colnames(pred_sub)<-gsub(pattern = "pathway_activity_testset.csv",replacement = "",x = colnames(pred_sub))
```
Creating heatmaps for predictions within subtypes
```{r}
basal<-subset(pred_sub,pred_sub$PAM50.mRNA=="Basal-like")
prediction_heatmap(x=basal,type = "Basal")
her<-subset(pred_sub,pred_sub$PAM50.mRNA=="HER2-enriched")
prediction_heatmap(x=her,type = "ERBB2 Amplified")
luminal<-subset(pred_sub,pred_sub$PAM50.mRNA=="Luminal A"|pred_sub$PAM50.mRNA=="Luminal B")
prediction_heatmap(x=luminal,type = "Luminal")
normal<-subset(pred_sub,pred_sub$PAM50.mRNA=="Normal-like")
prediction_heatmap(x=normal,type = "Normal-like")
```
```{r echo=FALSE}
time<-format(Sys.time(),"%a %b %d %X %Y")
```
This analysis was run on `r time`
<file_sep>/Rmarkdowns_scripts/ICBP/18Mar_ICBP_multi_drug_cors.Rmd
---
title: "Multi_pathway Correlations with ICBP"
author: "Shelley"
date: "March 18, 2015"
output: html_document
---
```{r}
library(psych)
setwd("~//Documents/Thesiswork/GitRepos/bild_signatures/")
system("ls")
ICBP_preds_drugs<-(read.table("ICBP_pathwaypredictions_drug_all_combos.txt", sep='\t', stringsAsFactors=FALSE, header=1, row.names=1))
View(ICBP_preds_drugs)
colnames(ICBP_preds_drugs)
dimnames(ICBP_preds_drugs)
```
```{r}
colnames(ICBP_preds_drugs)
rhos=p_val=NULL
temp=cors_pvals=NULL
for(i in 1:160){
for(j in 1:90) {
#i=1
#j=1
pathway_drug=subset(ICBP_preds_drugs, select=c(i,160+j))
temp_cors= cor.ci(pathway_drug, method="spearman", plot=FALSE, n.iter=1000)
temp<-c(temp_cors$rho[2,1],temp_cors$ci[5])
temp<- cbind(temp[1],temp[2])
cors_pvals<-rbind(cors_pvals,temp)
rownames(cors_pvals)[j+(i-1)*90]=paste(colnames(pathway_drug)[1],colnames(pathway_drug)[2],sep='__')
print(i)
print(j)
}
colnames(cors_pvals)<-c("Corr","p_val")
dim(cors_pvals)
writeFile(cors_pvals, "ICBP_allpathwaycombos_all_drugs_1000.txt")
```
|
39e0b816e3e9191db1d67298ecc2ae24b0f96bfb
|
[
"R",
"RMarkdown"
] | 14
|
RMarkdown
|
smacneil1/bild_signatures
|
aa016a74d34bc7e894640a40de6838926435be31
|
ce7ce861bc5af85825099885817145d7794f9fee
|
refs/heads/master
|
<file_sep>package test.spring;
import org.apache.tomcat.InstanceManager;
import org.apache.tomcat.SimpleInstanceManager;
import org.eclipse.jetty.apache.jsp.JettyJasperInitializer;
import org.eclipse.jetty.plus.annotation.ContainerInitializer;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.handler.HandlerCollection;
import org.eclipse.jetty.webapp.WebAppContext;
import java.nio.file.Paths;
public class Main {
public static void main(String[] args) throws Exception {
Server server = new Server(8080);
HandlerCollection handlers = new HandlerCollection();
WebAppContext context = new WebAppContext();
context.setResourceBase(Paths.get("src", "main", "webapp").toString());
context.setDescriptor(Paths.get("target", "web.xml").toString());
context.setContextPath("/");
context.setAttribute("org.eclipse.jetty.containerInitializers", new ContainerInitializer(new JettyJasperInitializer(), null));
context.setAttribute(InstanceManager.class.getName(), new SimpleInstanceManager());
handlers.addHandler(context);
server.setHandler(handlers);
server.start();
server.join();
}
}
<file_sep>package test.spring;
import freemarker.template.TemplateException;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.servlet.ServletHolder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.ui.freemarker.FreeMarkerConfigurationFactory;
import org.springframework.web.context.ContextLoaderListener;
import org.springframework.web.context.support.AnnotationConfigWebApplicationContext;
import org.springframework.web.servlet.DispatcherServlet;
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
import org.springframework.web.servlet.view.freemarker.FreeMarkerConfigurer;
import org.springframework.web.servlet.view.freemarker.FreeMarkerViewResolver;
import java.io.IOException;
@EnableWebMvc
@Configuration
@ComponentScan({"test.spring"})
public class Main {
public static void main(String[] args) throws Exception {
Server server = new Server(8080);
AnnotationConfigWebApplicationContext context = new AnnotationConfigWebApplicationContext();
context.setConfigLocation("test.spring");
ServletContextHandler contextHandler = new ServletContextHandler();
contextHandler.setErrorHandler(null);
contextHandler.setContextPath("/");
contextHandler.addServlet(new ServletHolder(new DispatcherServlet(context)), "/*");
contextHandler.addEventListener(new ContextLoaderListener(context));
server.setHandler(contextHandler);
server.start();
server.join();
}
@Bean
public FreeMarkerViewResolver freemarkerViewResolver() {
FreeMarkerViewResolver resolver = new FreeMarkerViewResolver();
resolver.setCache(true);
resolver.setPrefix("");
resolver.setSuffix(".ftl");
resolver.setContentType("text/html;charset=UTF-8");
return resolver;
}
@Bean
public FreeMarkerConfigurer freemarkerConfig() throws IOException, TemplateException {
FreeMarkerConfigurationFactory factory = new FreeMarkerConfigurationFactory();
factory.setTemplateLoaderPath("classpath:templates");
factory.setDefaultEncoding("UTF-8");
FreeMarkerConfigurer result = new FreeMarkerConfigurer();
result.setConfiguration(factory.createConfiguration());
return result;
}
}
<file_sep>import org.eclipse.jetty.server.Connector;
import org.eclipse.jetty.server.Handler;
import org.eclipse.jetty.server.Server;
import org.eclipse.jetty.server.ServerConnector;
import org.eclipse.jetty.server.handler.ContextHandler;
import org.eclipse.jetty.server.handler.ContextHandlerCollection;
import org.eclipse.jetty.server.handler.ResourceHandler;
import org.eclipse.jetty.util.thread.QueuedThreadPool;
public class Jetty2
{
public static void main( String[] args ) throws Exception
{
QueuedThreadPool threadPool = new QueuedThreadPool(5, 1);
Server server = new Server(threadPool);
ServerConnector http = new ServerConnector(server, 1, 2);
http.setPort(8080);
http.setIdleTimeout(30000);
server.setConnectors(new Connector[] { http });
ResourceHandler resourceHandler = new ResourceHandler();
resourceHandler.setDirectoriesListed(true);
resourceHandler.setResourceBase(".");
ContextHandler fileContext = new ContextHandler();
fileContext.setContextPath("/files");
fileContext.setHandler(resourceHandler);
ContextHandler httpContext = new ContextHandler();
httpContext.setContextPath("/data");
httpContext.setHandler(new Jetty());
ContextHandlerCollection contexts = new ContextHandlerCollection();
contexts.setHandlers(new Handler[] { httpContext, fileContext });
server.setHandler(contexts);
server.start();
server.join();
}
}
<file_sep>## Servlet examples
Презентации:
https://docs.google.com/presentation/d/1LJyzAWU312WOIHk6Dqy1QgR2EhDL9WaDe3nVT8BzAFo/edit?usp=sharing
https://docs.google.com/presentation/d/10fHBgwZN6Swib5iNB-ZvhJEjysSdIBrGpDLF5_NUiFY/edit?usp=sharing
### servers
Пример создания простого сервера на jetty и netty.
Каждый класс - отдельный пример.
Для сборки следующих приложений нужно выgолнить ```mvn clean package``` в соответствующей папке.
### servlet
Пример простого сервлета
Запускать ```java -jar target/servlet.jar```
```
http://localhost:8080/hello
```
### jsp
Пример использования jsp.
Запускать ```java -jar target/jsp.jar```
```
http://localhost:8080/
http://localhost:8080/jjj
http://localhost:8080/example.jsp
```
### spring-mvc
Пример использования spring-mvc
Запускать ```java -jar target/spring.jar```
```
http://localhost:8080/
http://localhost:8080/calc/{value}
```
### spring-freemarker
Запускать ```java -jar target/spring-freemarker.jar```
```
http://localhost:8080/
http://localhost:8080/test/1
http://localhost:8080/test/2
```
### jersey
Jersey.
Запускать ```java -jar target/jersey.jar```
```
http://localhost:8080/status
http://localhost:8080/data?service={service}
http://localhost:8080/data/param/{test}
http://localhost:8080/error
```
### jersey-mustache
Jersey с шаблонизатором mustache.
Запускать ```java -jar target/jersey-mustache.jar```
```
http://localhost:8080/
```
<file_sep>package test.jersey.resource;
import javax.inject.Named;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
@Path("/status")
@Named
public class StatusResource {
@GET
@Produces(MediaType.TEXT_PLAIN)
public String status() {
return "ok";
}
}
<file_sep>package test.jersey.resource;
import org.glassfish.jersey.server.mvc.Template;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import java.util.HashMap;
import java.util.Map;
@Path("/")
@Produces(MediaType.TEXT_HTML)
public class MainResource {
@GET
@Template(name = "/main")
public Map<String, Object> data() {
Map<String, Object> context = new HashMap<>();
context.put("name", "Test name");
context.put("comment", "Test comment");
return context;
}
}
<file_sep>package test.jsp.filter;
import java.io.IOException;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
import javax.servlet.FilterConfig;
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
public class MainFilter implements Filter {
FilterConfig filterConfig = null;
public void init(FilterConfig filterConfig) throws ServletException {
this.filterConfig = filterConfig;
}
public void destroy() { }
public void doFilter(ServletRequest servletRequest, ServletResponse servletResponse, FilterChain filterChain)
throws IOException, ServletException {
System.out.println("Filter start!");
filterChain.doFilter(servletRequest, servletResponse);
System.out.println("Filter end!");
}
}
|
dd71377611e8dad57f32d5c22fbecfc9017d7440
|
[
"Markdown",
"Java"
] | 7
|
Java
|
Aulust/example-servlets
|
de10d9263e3c5c9998389da101d81405b66f1c5d
|
f141135f598bb80e4f626fa86fa1467251a0f965
|
refs/heads/master
|
<repo_name>Jesse2131/Proxy-Cache-Server<file_sep>/echo/CacheHandler.java
package echo;
import java.net.Socket;
import java.util.*;
public class CacheHandler extends ProxyHandler {
protected Correspondent peer;
private static Cache cache = new Cache();
public CacheHandler(Socket s) { super(s); }
public CacheHandler() { super(); }
public void initPeer(String host, int port) {
peer = new Correspondent();
peer.requestConnection(host, port);
}
protected String response(String msg) throws Exception {
System.out.println("Searching cache..." + msg);
if(cache.search(msg) != null){ //Exists within the cache
return cache.search(msg) + " from cache";
}
else{ //Not in cache, update cache
peer.send(msg);
String response = peer.receive();
cache.update(msg,response);
return response + " from peer";
}
}
}
class Cache extends HashMap<String, String>{
public synchronized String search(String request){
return this.get(request);
}
public synchronized void update(String request, String response){
this.put(request, response);
}
}
<file_sep>/echo/SecurityHandler.java
package echo;
import java.net.Socket;
import java.util.*;
public class SecurityHandler extends CacheHandler {
protected Correspondent peer;
private static Security security = new Security();
private boolean loggedIn = false;
public SecurityHandler(Socket s) { super(s); }
public SecurityHandler() { super(); }
public void initPeer(String host, int port) {
peer = new Correspondent();
peer.requestConnection(host, port);
}
protected String response(String msg) throws Exception {
String[] request = msg.split("\\s+");
String response = " ";
if(loggedIn == false){
if(request[0].equalsIgnoreCase("new")){ //Add user
if(security.userExists(request[1])){
response = "user already exists!";
}
else{
security.addUser(request[1],request[2]);
response = "added user";
}
}
else if(request[0].equalsIgnoreCase("login")){ //Login in
if(security.validLogin(request[1],request[2])){ //Credentials match
loggedIn = true;
response = "logged in";
}
else{ //Credentials don't match
response = "invalid login";
}
}
else{
response = "invalid input";
}
return response;
}
else{ //Once logged in, simply send to peer, security no longer needed.
peer.send(msg);
return peer.receive();
/*Connect pipelines
java echo.Server echo.MathHandler
java echo.ProxyServer echo.CacheHandler 5555 6666
java echo.ProxyServer echo.SecurityHandler 6666 7777
java echo.SimpleClient 7777*/
}
}
}
class Security extends HashMap<String, String>{
public synchronized void addUser(String user, String pass){
System.out.println("from method: added user");
this.put(user,pass);
}
public synchronized boolean validLogin(String user, String pass){
if(this.get(user) != null && this.get(user).equals(pass)){ //Check if passwords match and check if user exists
System.out.println("from method: logged in");
return true;
}
else{
System.out.println("from method: invalid input");
return false;
}
}
public synchronized boolean userExists(String user){
if(this.containsKey(user)){
return true;
}
else{
return false;
}
}
}
|
da3b0355cf93034a747e8758364a3e3bae00540b
|
[
"Java"
] | 2
|
Java
|
Jesse2131/Proxy-Cache-Server
|
83ceb95791ef3192c7da0cd8f8625220c99b77c7
|
f565d5c19732e83641be7c9f806a3cb05f313d03
|
refs/heads/master
|
<repo_name>Mitsos83/GrabThatDesktop<file_sep>/GrabThatDesktop/Form1.cs
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Drawing;
using System.Drawing.Imaging;
using System.Runtime.InteropServices;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using System.Windows.Forms;
using System.IO;
using SharpAvi;
using SharpAvi.Codecs;
using SharpAvi.Output;
namespace GrabThatDesktop
{
public partial class Form1 : Form
{
private bool m_isRecording;
private List<ToolStripMenuItem> screens;
private int m_selectedScreenIndex;
private DimmedForm m_dimmed;
private Captura.Recorder m_recorder;
private String m_tempFile;
public Form1()
{
InitializeComponent();
}
private void updateContextStrip()
{
if (m_isRecording)
{
contextMenuStrip1.Items[2].Enabled = true;
contextMenuStrip1.Items[3].Enabled = false;
}
else
{
contextMenuStrip1.Items[2].Enabled = false;
contextMenuStrip1.Items[3].Enabled = true;
}
}
private void Form1_Load(object sender, EventArgs e)
{
screens = new List<ToolStripMenuItem>();
Size = new Size(0, 0);
Visible = false;
m_isRecording = false;
m_selectedScreenIndex = -1;
desktopSelectorStrip.Items.Clear();
foreach (Screen screen in Screen.AllScreens)
{
string display_number = Regex.Match(screen.DeviceName, @"\d+").Value;
ToolStripMenuItem item = (ToolStripMenuItem) desktopSelectorStrip.Items.Add(display_number);
screens.Add(item);
if (screen.Primary)
{
item.Checked = true;
m_selectedScreenIndex = screens.Count - 1;
}
}
updateContextStrip();
m_dimmed = new DimmedForm();
}
private void ShowSelectedScreen()
{
if (m_selectedScreenIndex < 0 || m_selectedScreenIndex > screens.Count - 1)
{
return;
}
m_dimmed.Hide();
m_dimmed.Bounds = Screen.AllScreens[m_selectedScreenIndex].Bounds;
m_dimmed.Location = Screen.AllScreens[m_selectedScreenIndex].WorkingArea.Location;
m_dimmed.Show();
}
private void Form1_Shown(object sender, EventArgs e)
{
Hide();
ShowSelectedScreen();
}
private void desktopSelectorStrip_Opening(object sender, CancelEventArgs e)
{
}
private void StartRecording()
{
m_isRecording = true;
updateContextStrip();
m_dimmed.Hide();
if (m_recorder != null)
{
m_recorder.Dispose();
m_recorder = null;
}
m_tempFile = Path.GetTempFileName();
m_recorder = new Captura.Recorder(new Captura.RecorderParams(m_tempFile, 10, SharpAvi.KnownFourCCs.Codecs.X264, 100, m_selectedScreenIndex));
}
private void StopRecording()
{
m_isRecording = false;
updateContextStrip();
m_dimmed.Show();
m_recorder.Dispose();
m_recorder = null;
saveFileDialog1.ShowDialog();
}
private void contextMenuStrip1_ItemClicked(object sender, ToolStripItemClickedEventArgs e)
{
if (e.ClickedItem == exitMenuItem)
{
System.Windows.Forms.Application.Exit();
}
if (e.ClickedItem == startRecording)
{
StartRecording();
return;
}
if (e.ClickedItem == stopRecording)
{
StopRecording();
return;
}
}
private void desktopSelectorStrip_ItemClicked(object sender, ToolStripItemClickedEventArgs e)
{
ToolStripMenuItem selected = screens[0];
foreach(ToolStripMenuItem item in screens)
{
item.Checked = false;
if (e.ClickedItem == item)
{
selected = item;
}
}
selected.Checked = true;
Int32.TryParse(selected.Text.Trim(), out m_selectedScreenIndex);
m_selectedScreenIndex--;
ShowSelectedScreen();
}
private void saveFileDialog1_FileOk(object sender, CancelEventArgs e)
{
File.Copy(m_tempFile, saveFileDialog1.FileName, true);
File.Delete(m_tempFile);
}
}
namespace Captura
{
// Used to Configure the Recorder
public class RecorderParams
{
public RecorderParams(string filename, int FrameRate, FourCC Encoder, int Quality, int ScreenIndex)
{
FileName = filename;
FramesPerSecond = FrameRate;
Codec = Encoder;
this.Quality = Quality;
Location = Screen.AllScreens[ScreenIndex].Bounds.Location;
Height = Screen.AllScreens[ScreenIndex].Bounds.Height;
Width = Screen.AllScreens[ScreenIndex].Bounds.Width;
}
string FileName;
public int FramesPerSecond, Quality;
FourCC Codec;
public Point Location { get; private set; }
public int Height { get; private set; }
public int Width { get; private set; }
public AviWriter CreateAviWriter()
{
return new AviWriter(FileName)
{
FramesPerSecond = FramesPerSecond,
EmitIndex1 = true,
};
}
public IAviVideoStream CreateVideoStream(AviWriter writer)
{
// Select encoder type based on FOURCC of codec
if (Codec == KnownFourCCs.Codecs.Uncompressed)
return writer.AddUncompressedVideoStream(Width, Height);
else if (Codec == KnownFourCCs.Codecs.MotionJpeg)
return writer.AddMotionJpegVideoStream(Width, Height, Quality);
else
{
return writer.AddMpeg4VideoStream(Width, Height, (double)writer.FramesPerSecond,
// It seems that all tested MPEG-4 VfW codecs ignore the quality affecting parameters passed through VfW API
// They only respect the settings from their own configuration dialogs, and Mpeg4VideoEncoder currently has no support for this
quality: Quality,
codec: Codec,
// Most of VfW codecs expect single-threaded use, so we wrap this encoder to special wrapper
// Thus all calls to the encoder (including its instantiation) will be invoked on a single thread although encoding (and writing) is performed asynchronously
forceSingleThreadedAccess: true);
}
}
}
public class Recorder : IDisposable
{
#region Fields
AviWriter writer;
RecorderParams Params;
IAviVideoStream videoStream;
Thread screenThread;
ManualResetEvent stopThread = new ManualResetEvent(false);
#endregion
public Recorder(RecorderParams Params)
{
this.Params = Params;
// Create AVI writer and specify FPS
writer = Params.CreateAviWriter();
// Create video stream
videoStream = Params.CreateVideoStream(writer);
// Set only name. Other properties were when creating stream,
// either explicitly by arguments or implicitly by the encoder used
videoStream.Name = "Captura";
screenThread = new Thread(RecordScreen)
{
Name = typeof(Recorder).Name + ".RecordScreen",
IsBackground = true
};
screenThread.Start();
}
public void Dispose()
{
stopThread.Set();
screenThread.Join();
// Close writer: the remaining data is written to a file and file is closed
writer.Close();
stopThread.Dispose();
}
void RecordScreen()
{
var frameInterval = TimeSpan.FromSeconds(1 / (double)writer.FramesPerSecond);
var buffer = new byte[Params.Width * Params.Height * 4];
Task videoWriteTask = null;
var timeTillNextFrame = TimeSpan.Zero;
while (!stopThread.WaitOne(timeTillNextFrame))
{
var timestamp = DateTime.Now;
Screenshot(buffer);
// Wait for the previous frame is written
videoWriteTask?.Wait();
// Start asynchronous (encoding and) writing of the new frame
videoWriteTask = videoStream.WriteFrameAsync(true, buffer, 0, buffer.Length);
timeTillNextFrame = timestamp + frameInterval - DateTime.Now;
if (timeTillNextFrame < TimeSpan.Zero)
timeTillNextFrame = TimeSpan.Zero;
}
// Wait for the last frame is written
videoWriteTask?.Wait();
}
public void Screenshot(byte[] Buffer)
{
using (var BMP = new Bitmap(Params.Width, Params.Height))
{
using (var g = Graphics.FromImage(BMP))
{
g.CopyFromScreen(Params.Location, new Point(0,0), new Size(Params.Width, Params.Height), CopyPixelOperation.SourceCopy);
g.Flush();
var bits = BMP.LockBits(new Rectangle(0, 0, Params.Width, Params.Height), ImageLockMode.ReadOnly, PixelFormat.Format32bppRgb);
Marshal.Copy(bits.Scan0, Buffer, 0, Buffer.Length);
BMP.UnlockBits(bits);
}
}
}
}
}
}
|
6ca6430e160c1177b2f8c94541eb5788b628ca12
|
[
"C#"
] | 1
|
C#
|
Mitsos83/GrabThatDesktop
|
3c04ca91358d6433a0569c212d1daf1313fd554f
|
37698b84ab40580a8ef4b0f610e7136b743bbf20
|
refs/heads/master
|
<file_sep>import React, {useState, useEffect} from "react";
export const MoviesTable = () => {
const [movies, setMovies] = useState([])
const fetchMovies = () => {
setMovies([
{id: 1, category: "Documentary", price: "$49.99", stocked: true, name: "Pulp Fiction"},
{id: 2, category: "Documentary", price: "$9.99", stocked: true, name: "<NAME>"},
{id: 3, category: "Documentary", price: "$29.99", stocked: false, name: "Kill Bill: Vol 2"},
{id: 4, category: "Comedy", price: "$99.99", stocked: true, name: "Avengers: War of Infinity"},
{id: 5, category: "Comedy", price: "$99.99", stocked: false, name: "Inception"},
{id: 6, category: "Comedy", price: "$99.99", stocked: true, name: "Reservoir dogs"}
]);
}
useEffect(() => {
fetchMovies();
}, [])
return (
<div>
<ul style={{textAlign: 'left'}}>
{
movies.map(movie => {
return (
<li key={movie.id}>
{movie.name}
</li>
)
})
}
</ul>
</div>
)
}
<file_sep>import React from "react";
export class CounterButton extends React.Component {
constructor(props) {
super(props);
this.state = {
color: 'green'
};
}
render() {
return (
<button
style={{background: this.state.color}}
onClick={() => this.props.update(this.props.type)}>
{this.props.type}
</button>
);
}
}
<file_sep>import React from "react";
export class Title extends React.Component {
render() {
return React.createElement("h1", null, `Say ${this.props.name} from React`);
}
}
<file_sep>import React from "react";
import {CounterButton} from "./CounterButton";
export class Counter extends React.PureComponent {
constructor(props) {
super(props);
this.state = {
count: 1
};
}
updateCounter = (e) => {
if (e === 'minus') {
this.setState({count: this.state.count - 1});
} else {
this.setState({count: this.state.count + 1});
}
}
render() {
return (
<div className="counter">
<CounterButton type="minus" update={this.updateCounter} />
<p className="counter-text">{this.state.count}</p>
<CounterButton type="plus" update={this.updateCounter} />
</div>
)
}
}
|
bbca33111c88d38bf1e50937f91521237cefa07b
|
[
"JavaScript"
] | 4
|
JavaScript
|
manhengest/react-start
|
c9ee89b17e09d18cd933d77bc56b564de0cb3bbb
|
dd58951056154358eddaa9005b856bf530895923
|
refs/heads/master
|
<file_sep>using System;
using System.Collections.Generic;
using System.Text;
using SDL2;
using static SDL2.SDL;
using static SDL2.SDL_image;
namespace ProcSharpCore
{
class ContentManager
{
Dictionary<string, IntPtr> images = new Dictionary<string, IntPtr>();
IntPtr renderer;
string contentFolderPath;
public ContentManager(IntPtr renderer, string contentFolderPath)
{
this.renderer = renderer;
this.contentFolderPath = contentFolderPath;
}
public PImage LoadImage(string url)
{
PImage pimage = new PImage(LoadTexture(url));
return pimage;
}
IntPtr LoadTexture(string url)
{
url = contentFolderPath + url;
if (images.ContainsKey(url))
{
return images[url];
}
IntPtr texture;
IntPtr loadedSurface = IMG_Load(url);
if (loadedSurface == null)
{
throw new Exception("Could not load image " + url);
}
texture = SDL_CreateTextureFromSurface(renderer, loadedSurface);
if (texture == null)
{
throw new Exception("Unable to create texture from image " + url);
}
images.Add(url, texture);
return texture;
}
internal void Destroy()
{
foreach (var kvp in images)
{
SDL_DestroyTexture(kvp.Value);
}
images.Clear();
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Text;
using ProcSharpCore;
using static ProcSharpCore.ProcSharp;
namespace ProcSharpGame
{
class Game
{
public void Setup()
{
}
public void Draw()
{
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Text;
using System.Reflection;
using static SDL2.SDL;
namespace ProcSharpCore
{
internal class Mouse
{
private int x = 0;
private int y = 0;
private int previousX = 0;
private int previousY = 0;
private object gameObject;
private MouseState mouseState = new MouseState();
private MethodInfo mouseMoved;
private MethodInfo mouseDragged;
private MethodInfo mousePressed;
private MethodInfo mouseReleased;
private MethodInfo mouseClicked;
private MethodInfo mouseWheel;
private uint lastPressedButton = 0;
public Mouse(Type gameType, object gameObject)
{
this.gameObject = gameObject;
mouseMoved = gameType.GetMethod("MouseMoved");
mousePressed = gameType.GetMethod("MousePressed");
mouseReleased = gameType.GetMethod("MouseReleased");
mouseClicked = gameType.GetMethod("MouseClicked");
mouseDragged = gameType.GetMethod("MouseDragged");
mouseWheel = gameType.GetMethod("MouseWheel", new Type[] { typeof(MouseEvent)});
}
internal void Update()
{
previousX = x;
previousY = y;
SDL_GetMouseState(out x, out y);
if (x != previousX || y != previousY)
{
if (AnyButtonDown())
{
mouseDragged?.Invoke(gameObject, null);
}
else
{
mouseMoved?.Invoke(gameObject, null);
}
}
}
internal int MouseX
{
get { return x; }
}
internal int MouseY
{
get { return y; }
}
internal int PMouseX
{
get { return previousX; }
}
internal int PMouseY
{
get { return previousY; }
}
internal void MouseButtonDown(SDL_Event e)
{
lastPressedButton = e.button.button;
mouseState.ButtonPressed(e.button.button);
mousePressed?.Invoke(gameObject, null);
}
internal void MouseButtonUp(SDL_Event e)
{
mouseState.ButtonReleased(e.button.button);
if (!AnyButtonDown())
{
lastPressedButton = 0;
}
mouseReleased?.Invoke(gameObject, null);
mouseClicked?.Invoke(gameObject, null);
}
internal bool AnyButtonDown()
{
return mouseState.AnyButtonDown();
}
internal uint MouseButton
{
get { return lastPressedButton; }
}
internal void MouseWheel(SDL_Event e)
{
MouseEvent mouseEvent = new MouseEvent();
mouseEvent.SetCount(e.wheel.y);
mouseWheel?.Invoke(gameObject, new object[] { mouseEvent });
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Text;
using static SDL2.SDL;
namespace ProcSharpCore
{
public class PImage
{
internal IntPtr texture;
internal int width;
internal int height;
internal PImage(IntPtr texture)
{
this.texture = texture;
SDL_QueryTexture(texture, out _, out _, out width, out height);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Text;
using ProcSharpCore;
using static ProcSharpCore.ProcSharp;
namespace ProcSharpGame
{
class Game
{
public void Setup()
{
Size(800, 600);
}
public void Draw()
{
Background(255, 255, 255);
Line(100, 100, MouseX, MouseY);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Text;
using static SDL2.SDL;
using static SDL2.SDL_mixer;
namespace ProcSharpCore
{
public class SoundFile
{
internal static string contentFolderPath = "content/";
internal static List<SoundFile> soundFiles = new List<SoundFile>();
private IntPtr mixChunk;
private object parent;
private string fullPath;
/// <summary>
/// Creates and loads a SoundFile into memory
/// </summary>
/// <param name="parent">The ProcSharp game using this SoundFile</param>
/// <param name="path">Path to the file to be loaded</param>
public SoundFile(object parent, string path)
{
this.parent = parent;
fullPath = contentFolderPath + path;
soundFiles.Add(this);
mixChunk = Mix_LoadWAV(fullPath);
if (mixChunk == IntPtr.Zero)
{
throw new Exception($"Could not load the file {path}");
}
}
/// <summary>
/// Plays the soundfile
/// </summary>
public void Play()
{
Mix_PlayChannel(-1, mixChunk, 0);
}
internal void Destroy()
{
Mix_FreeChunk(mixChunk);
}
}
}
<file_sep>using System;
using SDL2;
using static SDL2.SDL;
using static SDL2.SDL_image;
using static SDL2.SDL_mixer;
using static SDL2.SDL_ttf;
namespace ProcSharpCore
{
public static class ProcSharp
{
private static IntPtr window;
private static IntPtr renderer;
private static Type gameType;
private static object gameObject;
private static bool quit = false;
private static Initializer initializer;
private static Mouse mouse;
private static Keyboard keyboard;
private static ContentManager contentManager;
private static SDL_Color fillColor;
private static SDL_Color strokeColor;
private static PFont activeFont;
#region PUBLIC_CONSTANTS
/// <summary>
/// The left mouse button
/// </summary>
public const uint LEFT = 1;
/// <summary>
/// The middle mouse button
/// </summary>
public const uint MIDDLE = 2;
/// <summary>
/// The right mouse button
/// </summary>
public const uint RIGHT = 3;
#endregion
public static void Initialize(Type type, object game)
{
gameType = type;
gameObject = game;
// Default fill white
fillColor.r = 255;
fillColor.g = 255;
fillColor.b = 255;
fillColor.a = 255;
// Default stroke gray
strokeColor.r = 100;
strokeColor.g = 100;
strokeColor.b = 100;
strokeColor.a = 255;
initializer = new Initializer();
initializer.Initialize(ref window, ref renderer);
// Set up all services
mouse = new Mouse(gameType, gameObject);
contentManager = new ContentManager(renderer, "content/");
keyboard = new Keyboard(gameType, gameObject);
SDL_RenderClear(renderer);
// Call setup from the user
var userSetup = gameType.GetMethod("Setup");
userSetup?.Invoke(gameObject, null);
SDL_RenderPresent(renderer);
// Start the main loop
MainLoop();
}
private static void MainLoop()
{
var userDraw = gameType.GetMethod("Draw");
SDL_Event e;
while (!quit)
{
// Update services
mouse.Update();
while (SDL_PollEvent(out e) != 0)
{
switch (e.type)
{
case SDL_EventType.SDL_QUIT:
quit = true;
break;
case SDL_EventType.SDL_MOUSEBUTTONDOWN:
mouse.MouseButtonDown(e);
break;
case SDL_EventType.SDL_MOUSEBUTTONUP:
mouse.MouseButtonUp(e);
break;
case SDL_EventType.SDL_MOUSEWHEEL:
mouse.MouseWheel(e);
break;
case SDL_EventType.SDL_KEYDOWN:
keyboard.KeyDown(e);
break;
case SDL_EventType.SDL_KEYUP:
keyboard.KeyUp(e);
break;
}
}
// Call the users draw function
userDraw?.Invoke(gameObject, null);
// Render to the screen
SDL_RenderPresent(renderer);
}
InternalExit();
}
private static void InternalExit()
{
// CLEANUP
contentManager.Destroy();
foreach (var sf in SoundFile.soundFiles)
{
sf.Destroy();
}
foreach (var font in PFont.fonts)
{
font.Destroy();
}
SDL_DestroyRenderer(renderer);
SDL_DestroyWindow(window);
SDL_AudioQuit();
TTF_Quit();
Mix_Quit();
IMG_Quit();
SDL_Quit();
}
private static void SetColor(SDL_Color color)
{
SDL_SetRenderDrawColor(renderer, color.r, color.b, color.g, color.a);
}
#region Public methods
#region Colors and structure
/// <summary>
/// Sets the stroke color
/// </summary>
/// <param name="rgb">The RGB part of the color (all get the same value)</param>
public static void Fill(float rgb)
{
fillColor.r = Convert.ToByte(rgb);
fillColor.g = Convert.ToByte(rgb);
fillColor.b = Convert.ToByte(rgb);
fillColor.a = 255;
}
/// <summary>
/// Sets the fill color
/// </summary>
/// <param name="r">Red part of the color, 0 - 255</param>
/// <param name="g">Green part of the color, 0 - 255</param>
/// <param name="b">Blue part of the color, 0 - 255</param>
/// <param name="a">Alpha part of the color, 0 - 255</param>
public static void Fill(float r, float g, float b, float a)
{
fillColor.r = Convert.ToByte(r);
fillColor.g = Convert.ToByte(g);
fillColor.b = Convert.ToByte(b);
fillColor.a = Convert.ToByte(a);
}
/// <summary>
/// Sets the stroke color
/// </summary>
/// <param name="rgb">The RGB part of the color (all get the same value)</param>
public static void Stroke(float rgb)
{
strokeColor.r = Convert.ToByte(rgb);
strokeColor.g = Convert.ToByte(rgb);
strokeColor.b = Convert.ToByte(rgb);
strokeColor.a = 255;
}
/// <summary>
/// Sets the stroke color
/// </summary>
/// <param name="r">Red part of the color, 0 - 255</param>
/// <param name="g">Green part of the color, 0 - 255</param>
/// <param name="b">Blue part of the color, 0 - 255</param>
/// <param name="a">Alpha part of the color, 0 - 255</param>
public static void Stroke(float r, float g, float b, float a)
{
strokeColor.r = Convert.ToByte(r);
strokeColor.g = Convert.ToByte(g);
strokeColor.b = Convert.ToByte(b);
strokeColor.a = Convert.ToByte(a);
}
/// <summary>
/// Sets the window size
/// </summary>
/// <param name="width">Width of the window in pixels</param>
/// <param name="height">Height of the window in pixels</param>
public static void Size(int width, int height)
{
SDL_SetWindowSize(window, width, height);
SDL_SetWindowPosition(window, SDL_WINDOWPOS_CENTERED, SDL_WINDOWPOS_CENTERED);
}
/// <summary>
/// Exits the program after the current Draw has finished
/// </summary>
public static void Exit()
{
quit = true;
}
#endregion
#region Mouse
/// <summary>
/// The X-coordinate of the mouse
/// </summary>
public static int MouseX
{
get
{ return mouse.MouseX; }
}
/// <summary>
/// The Y-coordinate of the mouse
/// </summary>
public static int MouseY
{
get
{ return mouse.MouseY; }
}
/// <summary>
/// The X-coordinate of the mouse during the previous Draw
/// </summary>
public static int PMouseX
{
get { return mouse.PMouseX; }
}
/// <summary>
/// The Y-coordinate of the mouse during the previous Draw
/// </summary>
public static int PMouseY
{
get { return mouse.PMouseY; }
}
/// <summary>
/// true if any mouse button is currently being pressed, otherwise false
/// </summary>
public static bool MouseIsPressed
{
get { return mouse.AnyButtonDown(); }
}
/// <summary>
/// The last pressed mouse button. Is either LEFT, MIDDLE or RIGHT
/// </summary>
public static uint MouseButton
{
get { return mouse.MouseButton; }
}
#endregion
#region Keyboard
/// <summary>
/// The keycode of the latest key that was pressed
/// </summary>
public static string Key
{
get { return keyboard.LatestKey(); }
}
/// <summary>
/// true if any key on the keyboard is currently pressed down
/// </summary>
public static bool KeyIsPressed
{
get { return keyboard.AnyKeyPressed(); }
}
#endregion
#region Images
/// <summary>
/// Loads the image with the specified filename
/// </summary>
/// <param name="file"></param>
/// <returns></returns>
public static PImage LoadImage(string file)
{
return contentManager.LoadImage(file);
}
/// <summary>
/// Draws an image onto the screen
/// </summary>
/// <param name="pimage">The PImage to draw</param>
/// <param name="x">X-coordinate of the top left corner</param>
/// <param name="y">Y-coordinate of the top left corner</param>
public static void Image(PImage pimage, float x, float y)
{
SDL_Rect destRect;
destRect.x = (int)x;
destRect.y = (int)y;
destRect.w = pimage.width;
destRect.h = pimage.height;
SDL_RenderCopy(renderer, pimage.texture, IntPtr.Zero, ref destRect);
}
/// <summary>
/// Draws an image onto the screen
/// </summary>
/// <param name="pimage"></param>
/// <param name="x">X-coordinate of the top left corner</param>
/// <param name="y">Y-coordinate of the top left corner</param>
/// <param name="width">The width that the image will be drawn in</param>
/// <param name="height">The height that the image will be drawn in</param>
public static void Image(PImage pimage, float x, float y, float width, float height)
{
SDL_Rect destRect;
destRect.x = (int)x;
destRect.y = (int)y;
destRect.w = (int)width;
destRect.h = (int)height;
SDL_RenderCopy(renderer, pimage.texture, IntPtr.Zero, ref destRect);
}
#endregion
#region Random
private static Random internalRandom;
/// <summary>
/// Returns a random float between 0 and a given upper bound.
/// </summary>
/// <param name="hi">The upper bound of the range.</param>
/// <returns>A random float between 0 and a given upper bound.</returns>
public static float Random(float hi)
{
if (hi == 0)
{
return 0;
}
if (internalRandom == null)
{
internalRandom = new Random();
}
return (float) internalRandom.NextDouble() * hi;
}
/// <summary>
/// Returns a random float between two given boundaries.
/// </summary>
/// <param name="lo">The lower bound of the range.</param>
/// <param name="hi">The upper bound of the range.</param>
/// <returns>A random float between two given boundaries.</returns>
public static float Random(float lo, float hi)
{
if (lo >= hi) return lo;
float diff = hi - lo;
return Random(diff) + lo;
}
/// <summary>
/// Returns a float from a random series of numbers having a mean of 0 and standard deviation of 1.
/// </summary>
/// <returns>A random float from a normal distribution with a mean of 0 and a standard deviation of 1.</returns>
public static float RandomGaussian()
{
if (internalRandom == null)
{
internalRandom = new Random();
}
// Use the Box-Muller transform to produce a random Gaussian sample.
// Note that we assume mean = 0 and stDev = 1.
double uniRand1 = 1.0 - internalRandom.NextDouble();
double uniRand2 = 1.0 - internalRandom.NextDouble();
double randGaussian = Math.Sqrt(-2.0 * Math.Log(uniRand1)) * Math.Sin(2.0 * Math.PI * uniRand2);
return (float) randGaussian;
}
/// <summary>
/// Sets the seed value for the random generator used in the Random methods.
/// </summary>
/// <param name="seed">The seed with which the RNG will be initialized.</param>
public static void RandomSeed(int seed)
{
internalRandom = new Random(seed);
}
// Declare shared private constants for noise-related functions, which are implemented using Perlin noise.
// Perlin constants
private const int PerlinYWrapB = 4;
private const int PerlinYWrap = 1 << PerlinYWrapB;
private const int PerlinZWrapB = 8;
private const int PerlinZWrap = 1 << PerlinZWrapB;
private const int PerlinSize = 4095;
// Perlin variables that affect the "smoothness" of the noise
private static int perlinOctaves = 4; // PO = 4 results in "medium smooth" noise
private static float perlinAmpFalloff = 0.5f;
// Shared Perlin variables
private static int perlinTwopi, perlinPi;
private static float[] perlinCosTable;
private static float[] perlin;
private static Random perlinRandom;
// Pre-calculate the cosine lookup table once in Noise(x, y, z) and reuse it afterwards to improve performance
private static float[] cosLookupTable;
private const float SinCosPrecision = 0.5f;
private const int SinCosLength= (int) (360f / SinCosPrecision);
/// <summary>
/// Returns the Perlin noise value at specified coordinates.
/// </summary>
/// <param name="x">The x-coordinate in noise space.</param>
/// <param name="y">The y-coordinate in noise space.</param>
/// <param name="z">The z-coordinate in noise space.</param>
/// <returns>A float equal to the Perlin noise value at the specified x, y, and z coordinates.</returns>
public static float Noise(float x, float y, float z)
{
if (cosLookupTable == null)
{
float degToRad = (float)Math.PI / 180.0f;
cosLookupTable = new float[SinCosLength];
for (int i = 0; i < SinCosLength; i++)
{
cosLookupTable[i] = (float) Math.Cos(i * degToRad * SinCosPrecision);
}
}
if (perlin == null)
{
if (perlinRandom == null) perlinRandom = new Random();
perlin = new float[PerlinSize + 1];
for (int i = 0; i < PerlinSize + 1; i++)
{
perlin[i] = (float) perlinRandom.NextDouble();
}
perlinCosTable = cosLookupTable;
perlinTwopi = perlinPi = SinCosLength;
perlinPi >>= 1;
}
// Declare variables used in the Perlin noise algorithm
if (x < 0) x = -x;
if (y < 0) y = -y;
if (z < 0) z = -z;
int xi = (int)x, yi = (int)y, zi = (int)z;
float xf = x - xi;
float yf = y - yi;
float zf = z - zi;
float rxf, ryf;
float r = 0;
float ampl = 0.5f;
float n1, n2, n3;
// Repeat the Perlin noise algorithm as many times as specified by the level of detail (perlinOctaves)
for (int i = 0; i < perlinOctaves; i++)
{
int of = xi + (yi << PerlinYWrapB) + (zi << PerlinZWrapB);
rxf = NoiseFsc(xf);
ryf = NoiseFsc(yf);
n1 = perlin[of & PerlinSize];
n1 += rxf * (perlin[(of + 1) & PerlinSize] - n1);
n2 = perlin[(of + PerlinYWrap) & PerlinSize];
n2 += rxf * (perlin[(of + PerlinYWrap + 1) & PerlinSize] - n2);
n1 += ryf * (n2 - n1);
of += PerlinZWrap;
n2 = perlin[of & PerlinSize];
n2 += rxf * (perlin[(of + 1) & PerlinSize] - n2);
n3 = perlin[(of + PerlinYWrap) & PerlinSize];
n3 += rxf * (perlin[(of + PerlinYWrap + 1) & PerlinSize] - n3);
n2 += ryf * (n3 - n2);
n1 += NoiseFsc(zf) * (n2 - n1);
r += n1 * ampl;
ampl *= perlinAmpFalloff;
xi <<= 1; xf *= 2;
yi <<= 1; yf *= 2;
zi <<= 1; zf *= 2;
if (xf >= 1.0f) { xi++; xf--; }
if (yf >= 1.0f) { yi++; yf--; }
if (zf >= 1.0f) { zi++; zf--; }
}
return r;
}
/// <summary>
/// Returns the Perlin noise value at specified x and y coordinates with z = 0.
/// </summary>
/// <param name="x">The x-coordinate in noise space.</param>
/// <param name="y">The y-coordinate in noise space.</param>
/// <returns>A float equal to the Perlin noise value at the specified x and y coordinates with z = 0.</returns>
public static float Noise(float x, float y)
{
return Noise(x, y, 0f);
}
/// <summary>
/// Returns the Perlin noise value at the specified x-coordinate with y = 0 and z = 0..
/// </summary>
/// <param name="x">The x-coordinate in noise space.</param>
/// <returns>A float equal to the Perlin noise value at the specified x, y, and z coordinates.</returns>
public static float Noise(float x)
{
return Noise(x, 0f, 0f);
}
/// <summary>
/// Returns a transformed version of cos(i) for a given float i for usage in the Perlin noise algorithm.
/// </summary>
/// <param name="i">The scalar by which pi will be multiplied in the transformation.</param>
/// <returns></returns>
private static float NoiseFsc(float i)
{
return 0.5f * (1.0f - perlinCosTable[(int) (i * perlinPi) % perlinTwopi]);
}
/// <summary>
/// Adjusts the level of detail produced by the Perlin noise function.
/// </summary>
/// <param name="levelOfDetail">The desired level of detail for the Perlin noise function.</param>
public static void NoiseDetail(int levelOfDetail)
{
if (levelOfDetail > 0) perlinOctaves = levelOfDetail;
}
/// <summary>
/// Adjusts the level of detail and character produced by the Perlin noise function.
/// </summary>
/// <param name="levelOfDetail">The desired level of detail for the Perlin noise function.</param>
/// <param name="falloff">The desired falloff factor for each octave of the level of detail.</param>
public static void NoiseDetail(int levelOfDetail, float falloff)
{
if (levelOfDetail > 0) perlinOctaves = levelOfDetail;
if (falloff > 0) perlinAmpFalloff = falloff;
}
/// <summary>
/// Sets the seed value for the random generator used in the Noise methods.
/// </summary>
/// <param name="seed">The seed with which the random generator will be initialized.</param>
public static void NoiseSeed(int seed)
{
perlinRandom = new Random(seed);
perlin = null; // Reset the Perlin table when changing the seed
}
#endregion
#region Primitive shapes
/// <summary>
/// Draws a square in the currently selected color
/// </summary>
/// <param name="x">X-coordinate of the top left corner of the square</param>
/// <param name="y">Y-coordinate of the top left corner of the square</param>
/// <param name="extent">The length of the squares side</param>
public static void Square(float x, float y, float extent)
{
SDL_Rect drawRect;
drawRect.x = (int)x;
drawRect.y = (int)y;
drawRect.h = (int)extent;
drawRect.w = (int)extent;
// Draw the filling
SetColor(fillColor);
SDL_RenderFillRect(renderer, ref drawRect);
// Draw the stroke
SetColor(strokeColor);
SDL_RenderDrawRect(renderer, ref drawRect);
}
/// <summary>
/// Draws a point, a coordinate in space at the dimension of one pixel
/// </summary>
/// <param name="x">x-coordinate of the point</param>
/// <param name="y">y-coordinate of the point</param>
public static void Point(float x, float y)
{
SDL_Point drawPoint;
drawPoint.x = (int)x;
drawPoint.y = (int)y;
// Draw the stroke
SetColor(strokeColor);
SDL_RenderDrawPoint(renderer, drawPoint.x, drawPoint.y);
}
/// <summary>
/// Draws a rectangle to the screen
/// </summary>
/// <param name="a">x-coordinate of the rectangle by default</param>
/// <param name="b">y-coordinate of the rectangle by default</param>
/// <param name="c">width of the rectangle by default</param>
/// <param name="d">height of the rectangle by default</param>
public static void Rectangle(float a, float b, float c, float d)
{
SDL_Rect drawRect;
drawRect.x = (int)a;
drawRect.y = (int)b;
drawRect.h = (int)c;
drawRect.w = (int)d;
// Draw the filling
SetColor(fillColor);
SDL_RenderFillRect(renderer, ref drawRect);
// Draw the stroke
SetColor(strokeColor);
SDL_RenderDrawRect(renderer, ref drawRect);
}
/// <summary>
/// Draw a triangle. It is a plane created by connecting three points.
/// </summary>
/// <param name="x1">x-coordinate of the first point</param>
/// <param name="y1">y-coordinate of the first point</param>
/// <param name="x2">x-coordinate of the second point</param>
/// <param name="y2">y-coordinate of the second point</param>
/// <param name="x3">x-coordinate of the third point</param>
/// <param name="y3">y-coordinate of the third point</param>
public static void Triangle(float x1, float y1, float x2, float y2, float x3, float y3)
{
SDL_Point pointA;
SDL_Point pointB;
SDL_Point pointC;
pointA.x = (int)x1;
pointA.y = (int)y1;
pointB.x = (int)x2;
pointB.y = (int)y2;
pointC.x = (int)x3;
pointC.y = (int)y3;
// Draw the stroke
SetColor(strokeColor);
SDL_RenderDrawLine(renderer, pointA.x, pointA.y, pointB.x, pointB.y);
SDL_RenderDrawLine(renderer, pointB.x, pointB.y, pointC.x, pointC.y);
SDL_RenderDrawLine(renderer, pointC.x, pointC.y, pointA.x, pointA.y);
}
/// <summary>
/// Draw a Quad. A quad is a quadrilateral, a four sided polygon.
/// </summary>
/// <param name="x1">x-coordinate of the first point</param>
/// <param name="y1">y-coordinate of the first point</param>
/// <param name="x2">x-coordinate of the second point</param>
/// <param name="y2">y-coordinate of the second point</param>
/// <param name="x3">x-coordinate of the third point</param>
/// <param name="y3">y-coordinate of the third point</param>
/// <param name="x4">x-coordinate of the fourth point</param>
/// <param name="y4">y-coordinate of the fourth point</param>
public static void Quad(float x1, float y1, float x2, float y2, float x3, float y3, float x4, float y4)
{
SDL_Point pointA;
SDL_Point pointB;
SDL_Point pointC;
SDL_Point pointD;
pointA.x = (int)x1;
pointA.y = (int)y1;
pointB.x = (int)x2;
pointB.y = (int)y2;
pointC.x = (int)x3;
pointC.y = (int)y3;
pointD.x = (int)x4;
pointD.y = (int)y4;
// Draw the stroke
SetColor(strokeColor);
SDL_RenderDrawLine(renderer, pointA.x, pointA.y, pointB.x, pointB.y);
SDL_RenderDrawLine(renderer, pointB.x, pointB.y, pointC.x, pointC.y);
SDL_RenderDrawLine(renderer, pointC.x, pointC.y, pointD.x, pointD.y);
SDL_RenderDrawLine(renderer, pointD.x, pointD.y, pointA.x, pointA.y);
}
/// <summary>
/// Draws an ellipse (oval) to the screen. An ellipse with equal width and height is a circle.
/// </summary>
/// <param name="x0">x-coordinate of the first point</param>
/// <param name="y0">y-coordinate of the first point</param>
/// <param name="radiusX">width of the ellipse by default</param>
/// <param name="radiusY">height of the ellipse by default</param>
public static void Ellipse(float x0, float y0, float radiusX, float radiusY)
{
SDL_Point point0;
point0.x = (int)x0;
point0.y = (int)y0;
float pi = 3.1415926535897932384626F;
float pih = pi / 2.0F;
const int prec = 27;
float theta = 0;
int x = (int)(radiusX * Math.Cos(theta));
int y = (int)(radiusY * Math.Sin(theta));
int x1 = x;
int y1 = y;
// Draw the stroke
SetColor(strokeColor);
float step = pih / prec;
for (theta = step; theta <= pih; theta += step)
{
x1 = (int)(radiusX * Math.Cos(theta) + 0.5);
y1 = (int)(radiusY * Math.Sin(theta) + 0.5);
if ((x != x1) || (y != y1))
{
SDL_RenderDrawLine(renderer, point0.x + x, point0.y - y, point0.x + x1, point0.y - y1);
SDL_RenderDrawLine(renderer, point0.x - x, point0.y - y, point0.x - x1, point0.y - y1);
SDL_RenderDrawLine(renderer, point0.x - x, point0.y + y, point0.x - x1, point0.y + y1);
SDL_RenderDrawLine(renderer, point0.x + x, point0.y + y, point0.x + x1, point0.y + y1);
}
x = x1;
y = y1;
}
if (x != 0)
{
x = 0;
SDL_RenderDrawLine(renderer, point0.x + x, point0.y - y, point0.x + x1, point0.y - y1);
SDL_RenderDrawLine(renderer, point0.x - x, point0.y - y, point0.x - x1, point0.y - y1);
SDL_RenderDrawLine(renderer, point0.x - x, point0.y + y, point0.x - x1, point0.y + y1);
SDL_RenderDrawLine(renderer, point0.x + x, point0.y + y, point0.x + x1, point0.y + y1);
}
}
/// <summary>
/// Draws a line (a direct path between two points) to the screen.
/// </summary>
/// <param name="x1">x-coordinate of the first point</param>
/// <param name="y1">y-coordinate of the first point</param>
/// <param name="x2">x-coordinate of the second point</param>
/// <param name="y2">y-coordinate of the second point</param>
public static void Line(float x1, float y1, float x2, float y2)
{
SDL_Point pointA;
SDL_Point pointB;
pointA.x = (int)x1;
pointA.y = (int)y1;
pointB.x = (int)x2;
pointB.y = (int)y2;
// Draw the stroke
SetColor(strokeColor);
SDL_RenderDrawLine(renderer, pointA.x, pointA.y, pointB.x, pointB.y);
}
/// <summary>
/// Clears the background with the specified color
/// </summary>
/// <param name="v1">Red part of the color, 0 - 255</param>
/// <param name="v2">Green part of the color, 0 - 255</param>
/// <param name="v3">Blue part of the color, 0 - 255</param>
public static void Background(float v1, float v2, float v3)
{
byte currentRed;
byte currentGreen;
byte currentBlue;
byte currentAlpha;
SDL_GetRenderDrawColor(renderer, out currentRed, out currentGreen, out currentBlue, out currentAlpha);
SDL_SetRenderDrawColor(renderer, Convert.ToByte(v1), Convert.ToByte(v2), Convert.ToByte(v3), 255);
SDL_RenderClear(renderer);
SDL_SetRenderDrawColor(renderer, currentRed, currentGreen, currentBlue, currentAlpha);
}
#endregion
#region Fonts
/// <summary>
/// Creates a PFont witht the specified fontname and font size (in pt)
/// </summary>
/// <param name="fontname"></param>
/// <param name="size"></param>
/// <returns></returns>
public static PFont CreateFont(string fontname, int size)
{
return new PFont(fontname, size, renderer);
}
/// <summary>
/// Sets the active font used when writing text
/// </summary>
/// <param name="font"></param>
public static void TextFont(PFont font)
{
activeFont = font;
}
/// <summary>
/// Writes text using the active font
/// </summary>
/// <param name="text">The text to write</param>
/// <param name="x">x-position of the text</param>
/// <param name="y">y-position of the text</param>
public static void Text(string text, int x, int y)
{
activeFont.Text(text, x, y);
}
#endregion
#endregion
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Text;
using System.Reflection;
using static SDL2.SDL;
namespace ProcSharpCore
{
class Keyboard
{
private MethodInfo keyPressed;
private MethodInfo keyReleased;
private MethodInfo keyTyped;
string latestKey;
private object gameObject;
private List<string> pressedKeys = new List<string>();
internal Keyboard(Type gameType, object gameObject)
{
this.gameObject = gameObject;
keyPressed = gameType.GetMethod("KeyPressed");
keyReleased = gameType.GetMethod("KeyReleased");
keyTyped = gameType.GetMethod("KeyTyped");
}
internal void KeyDown(SDL_Event e)
{
pressedKeys.Add(latestKey);
latestKey = SDL_GetKeyName(e.key.keysym.sym);
keyPressed?.Invoke(gameObject, null);
}
internal void KeyUp(SDL_Event e)
{
pressedKeys.Remove(SDL_GetKeyName(e.key.keysym.sym));
keyReleased?.Invoke(gameObject, null);
}
internal string LatestKey()
{
return latestKey;
}
internal bool AnyKeyPressed()
{
return pressedKeys.Count != 0;
}
}
}
<file_sep>using System;
using ProcSharpCore;
namespace ProcSharpGame
{
class Program
{
static void Main(string[] args)
{
ProcSharp.Initialize(typeof(Game), new Game());
}
}
}
<file_sep># ProcSharp
An interpretation of the Processing framework in C#
## Installation
### Installing ProcSharp as a Visual Studio extension (recommended)
The recommended way of installing ProcSharp is to install the ProcSharp Visual Studio extension. It includes project templates for creating a ProcSharp app and also includes all native libraries that ProcSharp depends on. Install it through Visual Studio or read more about it [here](https://marketplace.visualstudio.com/items?itemName=ProcSharp.windows-app-template).
### Installing ProcSharp using NuGet
Create a new Console App (.NET Core) and add a reference to ProcSharp using NuGet. The following code shows how to set up a simple ProcSharp program. In addition to added ProcSharp as a NuGet reference you also need to include all native library references that SDL requires. They can be found [here](https://www.libsdl.org/download-2.0.php).
## Usage
Create a new ProcSharp App in Visual Studio, open upp the Game.cs file and write your code there. See out [website](https://simoneddeland.github.io/proc-sharp/) on GitHub Pagesfor an API reference.
### Hello ProcSharp
#### Game.cs
```csharp
using System;
using System.Collections.Generic;
using System.Text;
using ProcSharpCore;
using static ProcSharpCore.ProcSharp;
namespace ProcSharpUser
{
class Game
{
public void Setup()
{
Size(480, 120);
}
public void Draw()
{
if (MousePressed)
{
Fill(0);
}
else
{
Fill(255);
}
Square(MouseX, MouseY, 80);
}
}
}
```
If you didn't install the ProcSharp templates as a Visual Studio extension, you need to edit your Program.cs to
#### Program.cs
```csharp
using System;
using ProcSharpCore;
namespace ProcSharpUser
{
class Program
{
static void Main(string[] args)
{
ProcSharp.Initialize(typeof(Game), new Game());
}
}
}
```
This is a screenshot from the example program.

### Drawing images
The following example shows how to load and draw an image. The image file "moose.png" should be places in the folder called "content" which is included in the ProcSharp template.
```csharp
using System;
using System.Collections.Generic;
using System.Text;
using ProcSharpCore;
using static ProcSharpCore.ProcSharp;
namespace ProcSharpUser
{
class Game
{
PImage testImage;
public void Setup()
{
Size(1280, 720);
testImage = LoadImage("moose.png");
}
public void Draw()
{
Background(255, 255, 255);
Image(testImage, MouseX, MouseY, 100, 200);
}
}
}
```
## Contributing
See the [CONTRIBUTING.md](https://github.com/simoneddeland/proc-sharp/blob/master/CONTRIBUTING.md) for information on how to contribute to ProcSharp.
## Credits
...
## License
Licensed under the MIT license.<file_sep>using System;
using System.Collections.Generic;
using System.Text;
using SDL2;
using static SDL2.SDL;
using static SDL2.SDL_mixer;
using static SDL2.SDL_ttf;
namespace ProcSharpCore
{
class Initializer
{
internal void Initialize(ref IntPtr window, ref IntPtr renderer)
{
if (SDL_Init(SDL_INIT_EVERYTHING) < 0)
{
throw new Exception(SDL_GetError());
}
window = SDL_CreateWindow("ProcSharp",
SDL_WINDOWPOS_CENTERED,
SDL_WINDOWPOS_CENTERED,
100,
100,
SDL_WindowFlags.SDL_WINDOW_RESIZABLE
);
renderer = SDL_CreateRenderer(window, -1, SDL_RendererFlags.SDL_RENDERER_ACCELERATED);
SDL_SetRenderDrawColor(renderer, 100, 100, 100, 255);
if (window == IntPtr.Zero)
{
throw new Exception($"Unable to create a window. SDL. Error: {SDL_GetError()}");
}
InitializeAudio();
if (TTF_Init() == -1)
{
throw new Exception("Could not initialize TTF: " + SDL_GetError());
}
}
private void InitializeAudio()
{
if (Mix_OpenAudio(MIX_DEFAULT_FREQUENCY, MIX_DEFAULT_FORMAT, MIX_DEFAULT_CHANNELS, 2048) < 0)
{
throw new Exception($"Unable to initialize audio: {SDL_GetError()}");
}
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Text;
using static SDL2.SDL;
using static SDL2.SDL_image;
using static SDL2.SDL_ttf;
namespace ProcSharpCore
{
public class PFont
{
internal static List<PFont> fonts = new List<PFont>();
IntPtr font;
internal static string contentFolder = "content/";
IntPtr renderer;
IntPtr textSurface;
IntPtr textTexture;
string lastText = "";
internal PFont(string fontname, int size, IntPtr renderer)
{
font = TTF_OpenFont(contentFolder + fontname, size);
this.renderer = renderer;
if (font == IntPtr.Zero)
{
throw new Exception("Could not load font " + fontname + ", " + SDL_GetError());
}
fonts.Add(this);
}
internal void Text(string text, int x, int y)
{
if (text == "")
{
return;
}
if (lastText != text)
{
SDL_Color color = new SDL_Color();
color.a = 255;
color.r = 0;
color.g = 0;
color.b = 0;
SDL_FreeSurface(textSurface);
SDL_DestroyTexture(textTexture);
textSurface = TTF_RenderText_Solid(font, text, color);
if (textSurface == IntPtr.Zero)
{
throw new Exception("Could not render text to surface: " + SDL_GetError());
}
textTexture = SDL_CreateTextureFromSurface(renderer, textSurface);
if (textTexture == IntPtr.Zero)
{
throw new Exception("Could not create a texture from the font surface: " + SDL_GetError());
}
lastText = text;
}
SDL_Rect targetRect = new SDL_Rect();
targetRect.x = x;
targetRect.y = y;
SDL_QueryTexture(textTexture, out _, out _, out targetRect.w, out targetRect.h);
SDL_RenderCopy(renderer, textTexture, IntPtr.Zero, ref targetRect);
}
internal void Destroy()
{
SDL_FreeSurface(textSurface);
SDL_DestroyTexture(textTexture);
TTF_CloseFont(font);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Text;
namespace ProcSharpCore
{
public class MouseEvent
{
private int count;
public int GetCount()
{
return count;
}
internal void SetCount(int count)
{
this.count = count;
}
}
}
<file_sep>Contributing to ProcSharp
--------------------
1. Find a function from the Processing library not yet implemented in ProcSharp, see the [Processing reference](https://processing.org/reference/).
2. Fork ProcSharp.
3. Implement the selected function as a static function in ProcSharp.cs. If you need more than a few lines of code, write the implementation in another and call a method from that class from inside the static method in ProcSharp.cs. For an example of this, this is how the MouseButton function is written in ProcSharp.cs
```csharp
/// <summary>
/// The last pressed mouse button. Is either LEFT, MIDDLE or RIGHT
/// </summary>
public static uint MouseButton
{
get { return mouse.MouseButton; }
}
```
The code that manages which mouse button was last pressed is contained inside the Mouse class.
4. Create a pull request with your contribution.
## Building and testing ProcSharp using the source
The easiest way to get all dependencies set up without getting them from external sources is to use the project found in the *playground* folder.
1. Build the ProcSharp solution found in the src folder.
2. Open the ProcSharpPlayground solution found in the *playground* folder. It should automatically be using the version of ProcSharp that you build in step 1.
3. Make any changes you want in the ProcSharp source code and test them in the ProcSharpPlayground solution. Don't forget to rebuild ProcSharp after you made changes to the source code.
<file_sep>using System;
using System.Collections.Generic;
using System.Text;
using static SDL2.SDL;
namespace ProcSharpCore
{
class MouseState
{
Dictionary<uint, bool> buttonsDown = new Dictionary<uint, bool>();
internal MouseState()
{
buttonsDown.Add(SDL_BUTTON_LEFT, false);
buttonsDown.Add(SDL_BUTTON_MIDDLE, false);
buttonsDown.Add(SDL_BUTTON_RIGHT, false);
buttonsDown.Add(SDL_BUTTON_X1, false);
buttonsDown.Add(SDL_BUTTON_X2, false);
}
internal void ButtonPressed(uint button)
{
buttonsDown[button] = true;
}
internal void ButtonReleased(uint button)
{
buttonsDown[button] = false;
}
internal bool IsButtonDown(uint button)
{
return buttonsDown[button];
}
internal bool AnyButtonDown()
{
return buttonsDown.ContainsValue(true);
}
}
}
<file_sep># ProcSharp documentation
This is the official API documentation of ProcSharp.
## Structure
| Name in ProcSharp | Name in Processing |
| ------------- |-------------|
| Setup() | [setup()](https://processing.org/reference/setup_.html) |
| Draw() | [draw()](https://processing.org/reference/draw_.html) |
| Exit() | [exit()](https://processing.org/reference/exit_.html) |
## Environment
| Name in ProcSharp | Name in Processing |
| ------------- |-------------|
| Size() | [size()](https://processing.org/reference/size_.html) |
## Shape
### 2D Primitives
| Name in ProcSharp | Name in Processing |
| ------------- |-------------|
| Line() | [line()](https://processing.org/reference/line_.html) |
| Point() | [point()](https://processing.org/reference/point_.html) |
| Square() | [square()](https://processing.org/reference/square_.html) |
## Input
### Mouse
| Name in ProcSharp | Name in Processing |
| ------------- |-------------|
| MouseButton | [mouseButton](https://processing.org/reference/mouseButton.html) |
| MouseClicked() | [mouseClicked()](https://processing.org/reference/mouseClicked_.html) |
| MouseDragged() | [mouseDragged()](https://processing.org/reference/mouseDragged_.html) |
| MouseMoved() | [mouseMoved()](https://processing.org/reference/mouseMoved_.html) |
| MousePressed() | [mousePressed()](https://processing.org/reference/mousePressed_.html) |
| **MouseIsPressed** | [MousePressed](https://processing.org/reference/mousePressed.html) |
| MouseReleased() | [mouseReleased()](https://processing.org/reference/mouseReleased_.html) |
| MouseWheel() | [mouseWheel()](https://processing.org/reference/mouseWheel_.html) |
| MouseX | [mouseX](https://processing.org/reference/mouseX.html) |
| MouseY | [mouseY](https://processing.org/reference/mouseY.html) |
| PMouseX | [pmouseX](https://processing.org/reference/pmouseX.html) |
| PMouseY | [pmouseY](https://processing.org/reference/pmouseY.html) |
### Keyboard
| Name in ProcSharp | Name in Processing |
| ------------- |-------------|
| Key | [key](https://processing.org/reference/key.html) |
| KeyPressed() | [keyPressed()](https://processing.org/reference/keyPressed_.html) |
| **KeyIsPressed** | [keyPressed](https://processing.org/reference/keyPressed.html) |
| KeyReleased() | [keyReleased()](https://processing.org/reference/keyReleased_.html) |
## Color
### Setting
| Name in ProcSharp | Name in Processing |
| ------------- |-------------|
| Background() | [background()](https://processing.org/reference/background_.html) |
| Fill() | [fill()](https://processing.org/reference/fill_.html) |
| Stroke() | [stroke()](https://processing.org/reference/stroke_.html) |
## Image
| Name in ProcSharp | Name in Processing |
| ------------- |-------------|
| PImage | [PImage()](https://processing.org/reference/PImage.html) |
### Loading & Displaying
| Name in ProcSharp | Name in Processing |
| ------------- |-------------|
| Image() | [image()](https://processing.org/reference/image_.html) |
| LoadImage() | [loadImage()](https://processing.org/reference/loadImage_.html) |
## Math
### Random
| Name in ProcSharp | Name in Processing |
| ------------- |-------------|
| Noise() | [noise()](https://processing.org/reference/noise_.html) |
| NoiseDetail() | [noiseDetail()](hhttps://processing.org/reference/noiseDetail_.html) |
| NoiseSeed() | [noiseSeed()](https://processing.org/reference/noiseSeed_.html) |
| Random() | [random()](https://processing.org/reference/random_.html) |
| RandomGaussian() | [randomGaussian()](https://processing.org/reference/randomGaussian_.html) |
| RandomSeed() | [randomSeed()](https://processing.org/reference/randomSeed_.html) |
|
f2f212eb95a6537e1c6e0893b2908d5df948d778
|
[
"Markdown",
"C#"
] | 16
|
C#
|
simoneddeland/proc-sharp
|
2c471d64a9d2ed49c3a4722b4117d424177d161b
|
f563c3d5a066d67b793f0c28424f9e19a8ddd7ac
|
refs/heads/master
|
<repo_name>gruns/aiohttp<file_sep>/docs/websocket_utilities.rst
.. currentmodule:: aiohttp
WebSocket utilities
===================
.. class:: WSCloseCode
An :class:`~enum.IntEnum` for keeping close message code.
.. attribute:: OK
A normal closure, meaning that the purpose for
which the connection was established has been fulfilled.
.. attribute:: GOING_AWAY
An endpoint is "going away", such as a server
going down or a browser having navigated away from a page.
.. attribute:: PROTOCOL_ERROR
An endpoint is terminating the connection due
to a protocol error.
.. attribute:: UNSUPPORTED_DATA
An endpoint is terminating the connection
because it has received a type of data it cannot accept (e.g., an
endpoint that understands only text data MAY send this if it
receives a binary message).
.. attribute:: INVALID_TEXT
An endpoint is terminating the connection
because it has received data within a message that was not
consistent with the type of the message (e.g., non-UTF-8 :rfc:`3629`
data within a text message).
.. attribute:: POLICY_VIOLATION
An endpoint is terminating the connection because it has
received a message that violates its policy. This is a generic
status code that can be returned when there is no other more
suitable status code (e.g.,
:attr:`~WSCloseCode.unsupported_data` or
:attr:`~WSCloseCode.message_too_big`) or if there is a need to
hide specific details about the policy.
.. attribute:: MESSAGE_TOO_BIG
An endpoint is terminating the connection
because it has received a message that is too big for it to
process.
.. attribute:: MANDATORY_EXTENSION
An endpoint (client) is terminating the
connection because it has expected the server to negotiate one or
more extension, but the server did not return them in the response
message of the WebSocket handshake. The list of extensions that
are needed should appear in the /reason/ part of the Close frame.
Note that this status code is not used by the server, because it
can fail the WebSocket handshake instead.
.. attribute:: INTERNAL_ERROR
A server is terminating the connection because
it encountered an unexpected condition that prevented it from
fulfilling the request.
.. attribute:: SERVICE_RESTART
The service is restarted. a client may reconnect, and if it
chooses to do, should reconnect using a randomized delay of 5-30s.
.. attribute:: TRY_AGAIN_LATER
The service is experiencing overload. A client should only
connect to a different IP (when there are multiple for the
target) or reconnect to the same IP upon user action.
.. class:: WSMsgType
An :class:`~enum.IntEnum` for describing :class:`WSMessage` type.
.. attribute:: CONTINUATION
A mark for continuation frame, user will never get the message
with this type.
.. attribute:: TEXT
Text message, the value has :class:`str` type.
.. attribute:: BINARY
Binary message, the value has :class:`bytes` type.
.. attribute:: PING
Ping frame (sent by client peer).
.. attribute:: PONG
Pong frame, answer on ping. Sent by server peer.
.. attribute:: CLOSE
Close frame.
.. attribute:: CLOSED FRAME
Actually not frame but a flag indicating that websocket was
closed.
.. attribute:: ERROR
Actually not frame but a flag indicating that websocket was
received an error.
.. class:: WSMessage
Websocket message, returned by ``.receive()`` calls.
.. attribute:: type
Message type, :class:`WSMsgType` instance.
.. attribute:: data
Message payload.
1. :class:`str` for :attr:`WSMsgType.TEXT` messages.
2. :class:`bytes` for :attr:`WSMsgType.BINARY` messages.
3. :class:`WSCloseCode` for :attr:`WSMsgType.CLOSE` messages.
4. :class:`bytes` for :attr:`WSMsgType.PING` messages.
5. :class:`bytes` for :attr:`WSMsgType.PONG` messages.
.. attribute:: extra
Additional info, :class:`str`.
Makes sense only for :attr:`WSMsgType.CLOSE` messages, contains
optional message description.
.. method:: json(*, loads=json.loads)
Returns parsed JSON data.
:param loads: optional JSON decoder function.
<file_sep>/examples/background_tasks.py
#!/usr/bin/env python3
"""Example of aiohttp.web.Application.on_startup signal handler"""
import asyncio
import aioredis
from aiohttp import web
async def websocket_handler(request):
ws = web.WebSocketResponse()
await ws.prepare(request)
request.app['websockets'].append(ws)
try:
async for msg in ws:
print(msg)
await asyncio.sleep(1)
finally:
request.app['websockets'].remove(ws)
return ws
async def on_shutdown(app):
for ws in app['websockets']:
await ws.close(code=999, message='Server shutdown')
async def listen_to_redis(app):
try:
sub = await aioredis.create_redis(('localhost', 6379), loop=app.loop)
ch, *_ = await sub.subscribe('news')
async for msg in ch.iter(encoding='utf-8'):
# Forward message to all connected websockets:
for ws in app['websockets']:
await ws.send_str('{}: {}'.format(ch.name, msg))
print("message in {}: {}".format(ch.name, msg))
except asyncio.CancelledError:
pass
finally:
print('Cancel Redis listener: close connection...')
await sub.unsubscribe(ch.name)
await sub.quit()
print('Redis connection closed.')
async def start_background_tasks(app):
app['redis_listener'] = app.loop.create_task(listen_to_redis(app))
async def cleanup_background_tasks(app):
print('cleanup background tasks...')
app['redis_listener'].cancel()
await app['redis_listener']
def init():
app = web.Application()
app['websockets'] = []
app.router.add_get('/news', websocket_handler)
app.on_startup.append(start_background_tasks)
app.on_cleanup.append(cleanup_background_tasks)
app.on_shutdown.append(on_shutdown)
return app
web.run_app(init())
<file_sep>/tests/test_client_connection.py
import gc
from unittest import mock
import pytest
from aiohttp.connector import Connection
@pytest.fixture
def key():
return object()
@pytest.fixture
def loop():
return mock.Mock()
@pytest.fixture
def connector():
return mock.Mock()
@pytest.fixture
def protocol():
return mock.Mock(should_close=False)
def test_ctor(connector, key, protocol, loop) -> None:
conn = Connection(connector, key, protocol, loop)
assert conn.protocol is protocol
conn.close()
def test_callbacks_on_close(connector, key, protocol, loop) -> None:
conn = Connection(connector, key, protocol, loop)
notified = False
def cb():
nonlocal notified
notified = True
conn.add_callback(cb)
conn.close()
assert notified
def test_callbacks_on_release(connector, key, protocol, loop) -> None:
conn = Connection(connector, key, protocol, loop)
notified = False
def cb():
nonlocal notified
notified = True
conn.add_callback(cb)
conn.release()
assert notified
def test_callbacks_exception(connector, key, protocol, loop) -> None:
conn = Connection(connector, key, protocol, loop)
notified = False
def cb1():
raise Exception
def cb2():
nonlocal notified
notified = True
conn.add_callback(cb1)
conn.add_callback(cb2)
conn.close()
assert notified
def test_del(connector, key, protocol, loop) -> None:
loop.is_closed.return_value = False
conn = Connection(connector, key, protocol, loop)
exc_handler = mock.Mock()
loop.set_exception_handler(exc_handler)
with pytest.warns(ResourceWarning):
del conn
gc.collect()
connector._release.assert_called_with(key, protocol, should_close=True)
msg = {'client_connection': mock.ANY, # conn was deleted
'message': 'Unclosed connection'}
if loop.get_debug():
msg['source_traceback'] = mock.ANY
loop.call_exception_handler.assert_called_with(msg)
def test_close(connector, key, protocol, loop) -> None:
conn = Connection(connector, key, protocol, loop)
assert not conn.closed
conn.close()
assert conn._protocol is None
connector._release.assert_called_with(key, protocol, should_close=True)
assert conn.closed
def test_release(connector, key, protocol, loop) -> None:
conn = Connection(connector, key, protocol, loop)
assert not conn.closed
conn.release()
assert not protocol.transport.close.called
assert conn._protocol is None
connector._release.assert_called_with(key, protocol, should_close=False)
assert conn.closed
def test_release_proto_should_close(connector, key, protocol, loop) -> None:
protocol.should_close = True
conn = Connection(connector, key, protocol, loop)
assert not conn.closed
conn.release()
assert not protocol.transport.close.called
assert conn._protocol is None
connector._release.assert_called_with(key, protocol, should_close=True)
assert conn.closed
def test_release_released(connector, key, protocol, loop) -> None:
conn = Connection(connector, key, protocol, loop)
conn.release()
connector._release.reset_mock()
conn.release()
assert not protocol.transport.close.called
assert conn._protocol is None
assert not connector._release.called
<file_sep>/examples/client_json.py
import asyncio
import aiohttp
async def fetch(session):
print('Query http://httpbin.org/get')
async with session.get(
'http://httpbin.org/get') as resp:
print(resp.status)
data = await resp.json()
print(data)
async def go():
async with aiohttp.ClientSession() as session:
await fetch(session)
loop = asyncio.get_event_loop()
loop.run_until_complete(go())
loop.close()
<file_sep>/tests/test_web_runner.py
import asyncio
import platform
import signal
import pytest
from aiohttp import web
from aiohttp.abc import AbstractAccessLogger
from aiohttp.test_utils import get_unused_port_socket
@pytest.fixture
def app():
return web.Application()
@pytest.fixture
def make_runner(loop, app):
asyncio.set_event_loop(loop)
runners = []
def go(**kwargs):
runner = web.AppRunner(app, **kwargs)
runners.append(runner)
return runner
yield go
for runner in runners:
loop.run_until_complete(runner.cleanup())
async def test_site_for_nonfrozen_app(make_runner) -> None:
runner = make_runner()
with pytest.raises(RuntimeError):
web.TCPSite(runner)
assert len(runner.sites) == 0
@pytest.mark.skipif(platform.system() == "Windows",
reason="the test is not valid for Windows")
async def test_runner_setup_handle_signals(make_runner) -> None:
runner = make_runner(handle_signals=True)
await runner.setup()
assert signal.getsignal(signal.SIGTERM) is not signal.SIG_DFL
await runner.cleanup()
assert signal.getsignal(signal.SIGTERM) is signal.SIG_DFL
@pytest.mark.skipif(platform.system() == "Windows",
reason="the test is not valid for Windows")
async def test_runner_setup_without_signal_handling(make_runner) -> None:
runner = make_runner(handle_signals=False)
await runner.setup()
assert signal.getsignal(signal.SIGTERM) is signal.SIG_DFL
await runner.cleanup()
assert signal.getsignal(signal.SIGTERM) is signal.SIG_DFL
async def test_site_double_added(make_runner) -> None:
_sock = get_unused_port_socket('127.0.0.1')
runner = make_runner()
await runner.setup()
site = web.SockSite(runner, _sock)
await site.start()
with pytest.raises(RuntimeError):
await site.start()
assert len(runner.sites) == 1
async def test_site_stop_not_started(make_runner) -> None:
runner = make_runner()
await runner.setup()
site = web.TCPSite(runner)
with pytest.raises(RuntimeError):
await site.stop()
assert len(runner.sites) == 0
async def test_custom_log_format(make_runner) -> None:
runner = make_runner(access_log_format='abc')
await runner.setup()
assert runner.server._kwargs['access_log_format'] == 'abc'
async def test_unreg_site(make_runner) -> None:
runner = make_runner()
await runner.setup()
site = web.TCPSite(runner)
with pytest.raises(RuntimeError):
runner._unreg_site(site)
async def test_app_property(make_runner, app) -> None:
runner = make_runner()
assert runner.app is app
def test_non_app() -> None:
with pytest.raises(TypeError):
web.AppRunner(object())
def test_app_handler_args() -> None:
app = web.Application(handler_args={'test': True})
runner = web.AppRunner(app)
assert runner._kwargs == {'access_log_class': web.AccessLogger,
'test': True}
async def test_app_make_handler_access_log_class_bad_type1() -> None:
class Logger:
pass
app = web.Application()
with pytest.raises(TypeError):
web.AppRunner(app, access_log_class=Logger)
async def test_app_make_handler_access_log_class_bad_type2() -> None:
class Logger:
pass
app = web.Application(handler_args={'access_log_class': Logger})
with pytest.raises(TypeError):
web.AppRunner(app)
async def test_app_make_handler_access_log_class1() -> None:
class Logger(AbstractAccessLogger):
def log(self, request, response, time):
pass
app = web.Application()
runner = web.AppRunner(app, access_log_class=Logger)
assert runner._kwargs['access_log_class'] is Logger
async def test_app_make_handler_access_log_class2() -> None:
class Logger(AbstractAccessLogger):
def log(self, request, response, time):
pass
app = web.Application(handler_args={'access_log_class': Logger})
runner = web.AppRunner(app)
assert runner._kwargs['access_log_class'] is Logger
async def test_addresses(make_runner, unix_sockname) -> None:
_sock = get_unused_port_socket('127.0.0.1')
runner = make_runner()
await runner.setup()
tcp = web.SockSite(runner, _sock)
await tcp.start()
unix = web.UnixSite(runner, unix_sockname)
await unix.start()
actual_addrs = runner.addresses
expected_host, expected_post = _sock.getsockname()[:2]
assert actual_addrs == [(expected_host, expected_post), unix_sockname]
@pytest.mark.skipif(platform.system() != "Windows",
reason="Proactor Event loop present only in Windows")
async def test_named_pipe_runner_wrong_loop(
app,
selector_loop,
pipe_name
) -> None:
runner = web.AppRunner(app)
await runner.setup()
with pytest.raises(RuntimeError):
web.NamedPipeSite(runner, pipe_name)
@pytest.mark.skipif(platform.system() != "Windows",
reason="Proactor Event loop present only in Windows")
async def test_named_pipe_runner_proactor_loop(
proactor_loop,
app,
pipe_name
) -> None:
runner = web.AppRunner(app)
await runner.setup()
pipe = web.NamedPipeSite(runner, pipe_name)
await pipe.start()
await runner.cleanup()
<file_sep>/tests/test_signals.py
import re
from unittest import mock
import pytest
from multidict import CIMultiDict
from aiohttp.signals import Signal
from aiohttp.test_utils import make_mocked_coro, make_mocked_request
from aiohttp.web import Application, Response
@pytest.fixture
def app():
return Application()
def make_request(app, method, path, headers=CIMultiDict()):
return make_mocked_request(method, path, headers, app=app)
async def test_add_signal_handler_not_a_callable(app) -> None:
callback = True
app.on_response_prepare.append(callback)
app.on_response_prepare.freeze()
with pytest.raises(TypeError):
await app.on_response_prepare(None, None)
async def test_function_signal_dispatch(app) -> None:
signal = Signal(app)
kwargs = {'foo': 1, 'bar': 2}
callback_mock = mock.Mock()
async def callback(**kwargs):
callback_mock(**kwargs)
signal.append(callback)
signal.freeze()
await signal.send(**kwargs)
callback_mock.assert_called_once_with(**kwargs)
async def test_function_signal_dispatch2(app) -> None:
signal = Signal(app)
args = {'a', 'b'}
kwargs = {'foo': 1, 'bar': 2}
callback_mock = mock.Mock()
async def callback(*args, **kwargs):
callback_mock(*args, **kwargs)
signal.append(callback)
signal.freeze()
await signal.send(*args, **kwargs)
callback_mock.assert_called_once_with(*args, **kwargs)
async def test_response_prepare(app) -> None:
callback = mock.Mock()
async def cb(*args, **kwargs):
callback(*args, **kwargs)
app.on_response_prepare.append(cb)
app.on_response_prepare.freeze()
request = make_request(app, 'GET', '/')
response = Response(body=b'')
await response.prepare(request)
callback.assert_called_once_with(request, response)
async def test_non_coroutine(app) -> None:
signal = Signal(app)
kwargs = {'foo': 1, 'bar': 2}
callback = mock.Mock()
signal.append(callback)
signal.freeze()
with pytest.raises(TypeError):
await signal.send(**kwargs)
def test_setitem(app) -> None:
signal = Signal(app)
m1 = mock.Mock()
signal.append(m1)
assert signal[0] is m1
m2 = mock.Mock()
signal[0] = m2
assert signal[0] is m2
def test_delitem(app) -> None:
signal = Signal(app)
m1 = mock.Mock()
signal.append(m1)
assert len(signal) == 1
del signal[0]
assert len(signal) == 0
def test_cannot_append_to_frozen_signal(app) -> None:
signal = Signal(app)
m1 = mock.Mock()
m2 = mock.Mock()
signal.append(m1)
signal.freeze()
with pytest.raises(RuntimeError):
signal.append(m2)
assert list(signal) == [m1]
def test_cannot_setitem_in_frozen_signal(app) -> None:
signal = Signal(app)
m1 = mock.Mock()
m2 = mock.Mock()
signal.append(m1)
signal.freeze()
with pytest.raises(RuntimeError):
signal[0] = m2
assert list(signal) == [m1]
def test_cannot_delitem_in_frozen_signal(app) -> None:
signal = Signal(app)
m1 = mock.Mock()
signal.append(m1)
signal.freeze()
with pytest.raises(RuntimeError):
del signal[0]
assert list(signal) == [m1]
async def test_cannot_send_non_frozen_signal(app) -> None:
signal = Signal(app)
callback = make_mocked_coro()
signal.append(callback)
with pytest.raises(RuntimeError):
await signal.send()
assert not callback.called
async def test_repr(app) -> None:
signal = Signal(app)
callback = make_mocked_coro()
signal.append(callback)
assert re.match(r"<Signal owner=<Application .+>, frozen=False, "
r"\[<Mock id='\d+'>\]>",
repr(signal))
<file_sep>/docs/web_exceptions.rst
.. currentmodule:: aiohttp.web
.. _aiohttp-web-exceptions:
Web Server Exceptions
=====================
Overview
--------
:mod:`aiohttp.web` defines a set of exceptions for every *HTTP status code*.
Each exception is a subclass of :exc:`HTTPException` and relates to a single
HTTP status code::
async def handler(request):
raise aiohttp.web.HTTPFound('/redirect')
Each exception class has a status code according to :rfc:`2068`:
codes with 100-300 are not really errors; 400s are client errors,
and 500s are server errors.
HTTP Exception hierarchy chart::
Exception
HTTPException
HTTPSuccessful
* 200 - HTTPOk
* 201 - HTTPCreated
* 202 - HTTPAccepted
* 203 - HTTPNonAuthoritativeInformation
* 204 - HTTPNoContent
* 205 - HTTPResetContent
* 206 - HTTPPartialContent
HTTPRedirection
* 300 - HTTPMultipleChoices
* 301 - HTTPMovedPermanently
* 302 - HTTPFound
* 303 - HTTPSeeOther
* 304 - HTTPNotModified
* 305 - HTTPUseProxy
* 307 - HTTPTemporaryRedirect
* 308 - HTTPPermanentRedirect
HTTPError
HTTPClientError
* 400 - HTTPBadRequest
* 401 - HTTPUnauthorized
* 402 - HTTPPaymentRequired
* 403 - HTTPForbidden
* 404 - HTTPNotFound
* 405 - HTTPMethodNotAllowed
* 406 - HTTPNotAcceptable
* 407 - HTTPProxyAuthenticationRequired
* 408 - HTTPRequestTimeout
* 409 - HTTPConflict
* 410 - HTTPGone
* 411 - HTTPLengthRequired
* 412 - HTTPPreconditionFailed
* 413 - HTTPRequestEntityTooLarge
* 414 - HTTPRequestURITooLong
* 415 - HTTPUnsupportedMediaType
* 416 - HTTPRequestRangeNotSatisfiable
* 417 - HTTPExpectationFailed
* 421 - HTTPMisdirectedRequest
* 422 - HTTPUnprocessableEntity
* 424 - HTTPFailedDependency
* 426 - HTTPUpgradeRequired
* 428 - HTTPPreconditionRequired
* 429 - HTTPTooManyRequests
* 431 - HTTPRequestHeaderFieldsTooLarge
* 451 - HTTPUnavailableForLegalReasons
HTTPServerError
* 500 - HTTPInternalServerError
* 501 - HTTPNotImplemented
* 502 - HTTPBadGateway
* 503 - HTTPServiceUnavailable
* 504 - HTTPGatewayTimeout
* 505 - HTTPVersionNotSupported
* 506 - HTTPVariantAlsoNegotiates
* 507 - HTTPInsufficientStorage
* 510 - HTTPNotExtended
* 511 - HTTPNetworkAuthenticationRequired
All HTTP exceptions have the same constructor signature::
HTTPNotFound(*, headers=None, reason=None,
body=None, text=None, content_type=None)
If not directly specified, *headers* will be added to the *default
response headers*.
Classes :exc:`HTTPMultipleChoices`, :exc:`HTTPMovedPermanently`,
:exc:`HTTPFound`, :exc:`HTTPSeeOther`, :exc:`HTTPUseProxy`,
:exc:`HTTPTemporaryRedirect` have the following constructor signature::
HTTPFound(location, *, headers=None, reason=None,
body=None, text=None, content_type=None)
where *location* is value for *Location HTTP header*.
:exc:`HTTPMethodNotAllowed` is constructed by providing the incoming
unsupported method and list of allowed methods::
HTTPMethodNotAllowed(method, allowed_methods, *,
headers=None, reason=None,
body=None, text=None, content_type=None)
Base HTTP Exception
-------------------
.. exception:: HTTPException(*, headers=None, reason=None, text=None, \
content_type=None)
The base class for HTTP server exceptions. Inherited from :exc:`Exception`.
:param headers: HTTP headers (:class:`~collections.abc.Mapping`)
:param str reason: an optional custom HTTP reason. aiohttp uses *default reason
string* if not specified.
:param str text: an optional text used in response body. If not specified *default
text* is constructed from status code and reason, e.g. `"404: Not
Found"`.
:param str content_type: an optional Content-Type, `"text/plain"` by default.
.. attribute:: status
HTTP status code for the exception, :class:`int`
.. attribute:: reason
HTTP status reason for the exception, :class:`str`
.. attribute:: text
HTTP status reason for the exception, :class:`str` or ``None``
for HTTP exceptions without body, e.g. "204 No Content"
.. attribute:: headers
HTTP headers for the exception, :class:`multidict.CIMultiDict`
Successful Exceptions
---------------------
HTTP exceptions for status code in range 200-299. They are not *errors* but special
classes reflected in exceptions hierarchy. E.g. ``raise web.HTTPNoContent`` may look
strange a little but the construction is absolutely legal.
.. exception:: HTTPSuccessful
A base class for the category, a subclass of :exc:`HTTPException`.
.. exception:: HTTPOk
An exception for *200 OK*, a subclass of :exc:`HTTPSuccessful`.
.. exception:: HTTPCreated
An exception for *201 Created*, a subclass of :exc:`HTTPSuccessful`.
.. exception:: HTTPAccepted
An exception for *202 Accepted*, a subclass of :exc:`HTTPSuccessful`.
.. exception:: HTTPNonAuthoritativeInformation
An exception for *203 Non-Authoritative Information*, a subclass of
:exc:`HTTPSuccessful`.
.. exception:: HTTPNoContent
An exception for *204 No Content*, a subclass of :exc:`HTTPSuccessful`.
Has no HTTP body.
.. exception:: HTTPResetContent
An exception for *205 Reset Content*, a subclass of :exc:`HTTPSuccessful`.
Has no HTTP body.
.. exception:: HTTPPartialContent
An exception for *206 Partial Content*, a subclass of :exc:`HTTPSuccessful`.
Redirections
------------
HTTP exceptions for status code in range 300-399, e.g. ``raise
web.HTTPMovedPermanently(location='/new/path')``.
.. exception:: HTTPRedirection
A base class for the category, a subclass of :exc:`HTTPException`.
.. exception:: HTTPMove(location, *, headers=None, reason=None, text=None, \
content_type=None)
A base class for redirections with implied *Location* header,
all redirections except :exc:`HTTPNotModified`.
:param location: a :class:`yarl.URL` or :class:`str` used for *Location* HTTP
header.
For other arguments see :exc:`HTTPException` constructor.
.. attribute:: location
A *Location* HTTP header value, :class:`yarl.URL`.
.. exception:: HTTPMultipleChoices
An exception for *300 Multiple Choices*, a subclass of :exc:`HTTPMove`.
.. exception:: HTTPMovedPermanently
An exception for *301 Moved Permanently*, a subclass of :exc:`HTTPMove`.
.. exception:: HTTPFound
An exception for *302 Found*, a subclass of :exc:`HTTPMove`.
.. exception:: HTTPSeeOther
An exception for *303 See Other*, a subclass of :exc:`HTTPMove`.
.. exception:: HTTPNotModified
An exception for *304 Not Modified*, a subclass of :exc:`HTTPRedirection`.
Has no HTTP body.
.. exception:: HTTPUseProxy
An exception for *305 Use Proxy*, a subclass of :exc:`HTTPMove`.
.. exception:: HTTPTemporaryRedirect
An exception for *307 Temporary Redirect*, a subclass of :exc:`HTTPMove`.
.. exception:: HTTPPermanentRedirect
An exception for *308 Permanent Redirect*, a subclass of :exc:`HTTPMove`.
Client Errors
-------------
HTTP exceptions for status code in range 400-499, e.g. ``raise web.HTTPNotFound()``.
.. exception:: HTTPClientError
A base class for the category, a subclass of :exc:`HTTPException`.
.. exception:: HTTPBadRequest
An exception for *400 Bad Request*, a subclass of :exc:`HTTPClientError`.
.. exception:: HTTPUnauthorized
An exception for *401 Unauthorized*, a subclass of :exc:`HTTPClientError`.
.. exception:: HTTPPaymentRequired
An exception for *402 Payment Required*, a subclass of
:exc:`HTTPClientError`.
.. exception:: HTTPForbidden
An exception for *403 Forbidden*, a subclass of :exc:`HTTPClientError`.
.. exception:: HTTPNotFound
An exception for *404 Not Found*, a subclass of :exc:`HTTPClientError`.
.. exception:: HTTPMethodNotAllowed(method, allowed_methods, *, \
headers=None, reason=None, text=None, \
content_type=None)
An exception for *405 Method Not Allowed*, a subclass of
:exc:`HTTPClientError`.
:param str method: requested but not allowed HTTP method.
:param allowed_methods: an iterable of allowed HTTP methods (:class:`str`),
*Allow* HTTP header is constructed from
the sequence separated by comma.
For other arguments see :exc:`HTTPException` constructor.
.. attribute:: allowed_methods
A set of allowed HTTP methods.
.. attribute:: method
Requested but not allowed HTTP method.
.. exception:: HTTPNotAcceptable
An exception for *406 Not Acceptable*, a subclass of :exc:`HTTPClientError`.
.. exception:: HTTPProxyAuthenticationRequired
An exception for *407 Proxy Authentication Required*, a subclass of
:exc:`HTTPClientError`.
.. exception:: HTTPRequestTimeout
An exception for *408 Request Timeout*, a subclass of :exc:`HTTPClientError`.
.. exception:: HTTPConflict
An exception for *409 Conflict*, a subclass of :exc:`HTTPClientError`.
.. exception:: HTTPGone
An exception for *410 Gone*, a subclass of :exc:`HTTPClientError`.
.. exception:: HTTPLengthRequired
An exception for *411 Length Required*, a subclass of :exc:`HTTPClientError`.
.. exception:: HTTPPreconditionFailed
An exception for *412 Precondition Failed*, a subclass of
:exc:`HTTPClientError`.
.. exception:: HTTPRequestEntityTooLarge(max_size, actual_size, **kwargs)
An exception for *413 Entity Too Large*, a subclass of :exc:`HTTPClientError`.
:param int max_size: Maximum allowed request body size
:param int actual_size: Actual received size
For other acceptable parameters see :exc:`HTTPException` constructor.
.. exception:: HTTPRequestURITooLong
An exception for *414 URI is too long*, a subclass of :exc:`HTTPClientError`.
.. exception:: HTTPUnsupportedMediaType
An exception for *415 Entity body in unsupported format*, a subclass of
:exc:`HTTPClientError`.
.. exception:: HTTPRequestRangeNotSatisfiable
An exception for *416 Cannot satisfy request range*, a subclass of
:exc:`HTTPClientError`.
.. exception:: HTTPExpectationFailed
An exception for *417 Expect condition could not be satisfied*, a subclass of
:exc:`HTTPClientError`.
.. exception:: HTTPMisdirectedRequest
An exception for *421 Misdirected Request*, a subclass of :exc:`HTTPClientError`.
.. exception:: HTTPUnprocessableEntity
An exception for *422 Unprocessable Entity*, a subclass of :exc:`HTTPClientError`.
.. exception:: HTTPFailedDependency
An exception for *424 Failed Dependency*, a subclass of :exc:`HTTPClientError`.
.. exception:: HTTPUpgradeRequired
An exception for *426 Upgrade Required*, a subclass of :exc:`HTTPClientError`.
.. exception:: HTTPPreconditionRequired
An exception for *428 Precondition Required*, a subclass of :exc:`HTTPClientError`.
.. exception:: HTTPTooManyRequests
An exception for *429 Too Many Requests*, a subclass of :exc:`HTTPClientError`.
.. exception:: HTTPRequestHeaderFieldsTooLarge
An exception for *431 Requests Header Fields Too Large*, a subclass of
:exc:`HTTPClientError`.
.. exception:: HTTPUnavailableForLegalReasons(link, *, \
headers=None, \
reason=None, \
text=None, \
content_type=None)
An exception for *451 Unavailable For Legal Reasons*, a subclass of
:exc:`HTTPClientError`.
:param link: A link to a resource with information for blocking reason,
:class:`str` or :class:`URL`
For other parameters see :exc:`HTTPException` constructor.
.. attribute:: link
A :class:`URL` link to a resource with information for blocking reason,
read-only property.
Server Errors
-------------
HTTP exceptions for status code in range 500-599, e.g. ``raise web.HTTPBadGateway()``.
.. exception:: HTTPServerError
A base class for the category, a subclass of :exc:`HTTPException`.
.. exception:: HTTPInternalServerError
An exception for *500 Server got itself in trouble*, a subclass of
:exc:`HTTPServerError`.
.. exception:: HTTPNotImplemented
An exception for *501 Server does not support this operation*, a subclass of
:exc:`HTTPServerError`.
.. exception:: HTTPBadGateway
An exception for *502 Invalid responses from another server/proxy*, a
subclass of :exc:`HTTPServerError`.
.. exception:: HTTPServiceUnavailable
An exception for *503 The server cannot process the request due to a high
load*, a subclass of :exc:`HTTPServerError`.
.. exception:: HTTPGatewayTimeout
An exception for *504 The gateway server did not receive a timely response*,
a subclass of :exc:`HTTPServerError`.
.. exception:: HTTPVersionNotSupported
An exception for *505 Cannot fulfill request*, a subclass of :exc:`HTTPServerError`.
.. exception:: HTTPVariantAlsoNegotiates
An exception for *506 Variant Also Negotiates*, a subclass of :exc:`HTTPServerError`.
.. exception:: HTTPInsufficientStorage
An exception for *507 Insufficient Storage*, a subclass of :exc:`HTTPServerError`.
.. exception:: HTTPNotExtended
An exception for *510 Not Extended*, a subclass of :exc:`HTTPServerError`.
.. exception:: HTTPNetworkAuthenticationRequired
An exception for *511 Network Authentication Required*, a subclass of
:exc:`HTTPServerError`.
<file_sep>/tests/test_tcp_helpers.py
import socket
from unittest import mock
import pytest
from aiohttp.tcp_helpers import CORK, tcp_cork, tcp_nodelay
has_ipv6 = socket.has_ipv6
if has_ipv6:
# The socket.has_ipv6 flag may be True if Python was built with IPv6
# support, but the target system still may not have it.
# So let's ensure that we really have IPv6 support.
try:
socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
except OSError:
has_ipv6 = False
# nodelay
def test_tcp_nodelay_exception() -> None:
transport = mock.Mock()
s = mock.Mock()
s.setsockopt = mock.Mock()
s.family = socket.AF_INET
s.setsockopt.side_effect = OSError
transport.get_extra_info.return_value = s
tcp_nodelay(transport, True)
s.setsockopt.assert_called_with(
socket.IPPROTO_TCP,
socket.TCP_NODELAY,
True
)
def test_tcp_nodelay_enable() -> None:
transport = mock.Mock()
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
transport.get_extra_info.return_value = s
tcp_nodelay(transport, True)
assert s.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY)
def test_tcp_nodelay_enable_and_disable() -> None:
transport = mock.Mock()
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
transport.get_extra_info.return_value = s
tcp_nodelay(transport, True)
assert s.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY)
tcp_nodelay(transport, False)
assert not s.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY)
@pytest.mark.skipif(not has_ipv6, reason="IPv6 is not available")
def test_tcp_nodelay_enable_ipv6() -> None:
transport = mock.Mock()
with socket.socket(socket.AF_INET6, socket.SOCK_STREAM) as s:
transport.get_extra_info.return_value = s
tcp_nodelay(transport, True)
assert s.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY)
@pytest.mark.skipif(not hasattr(socket, 'AF_UNIX'),
reason="requires unix sockets")
def test_tcp_nodelay_enable_unix() -> None:
# do not set nodelay for unix socket
transport = mock.Mock()
s = mock.Mock(family=socket.AF_UNIX, type=socket.SOCK_STREAM)
transport.get_extra_info.return_value = s
tcp_nodelay(transport, True)
assert not s.setsockopt.called
def test_tcp_nodelay_enable_no_socket() -> None:
transport = mock.Mock()
transport.get_extra_info.return_value = None
tcp_nodelay(transport, True)
# cork
@pytest.mark.skipif(CORK is None, reason="TCP_CORK or TCP_NOPUSH required")
def test_tcp_cork_enable() -> None:
transport = mock.Mock()
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
transport.get_extra_info.return_value = s
tcp_cork(transport, True)
assert s.getsockopt(socket.IPPROTO_TCP, CORK)
@pytest.mark.skipif(CORK is None, reason="TCP_CORK or TCP_NOPUSH required")
def test_set_cork_enable_and_disable() -> None:
transport = mock.Mock()
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
transport.get_extra_info.return_value = s
tcp_cork(transport, True)
assert s.getsockopt(socket.IPPROTO_TCP, CORK)
tcp_cork(transport, False)
assert not s.getsockopt(socket.IPPROTO_TCP, CORK)
@pytest.mark.skipif(not has_ipv6, reason="IPv6 is not available")
@pytest.mark.skipif(CORK is None, reason="TCP_CORK or TCP_NOPUSH required")
def test_set_cork_enable_ipv6() -> None:
transport = mock.Mock()
with socket.socket(socket.AF_INET6, socket.SOCK_STREAM) as s:
transport.get_extra_info.return_value = s
tcp_cork(transport, True)
assert s.getsockopt(socket.IPPROTO_TCP, CORK)
@pytest.mark.skipif(not hasattr(socket, 'AF_UNIX'),
reason="requires unix sockets")
@pytest.mark.skipif(CORK is None, reason="TCP_CORK or TCP_NOPUSH required")
def test_set_cork_enable_unix() -> None:
transport = mock.Mock()
s = mock.Mock(family=socket.AF_UNIX, type=socket.SOCK_STREAM)
transport.get_extra_info.return_value = s
tcp_cork(transport, True)
assert not s.setsockopt.called
@pytest.mark.skipif(CORK is None, reason="TCP_CORK or TCP_NOPUSH required")
def test_set_cork_enable_no_socket() -> None:
transport = mock.Mock()
transport.get_extra_info.return_value = None
tcp_cork(transport, True)
@pytest.mark.skipif(CORK is None, reason="TCP_CORK or TCP_NOPUSH required")
def test_set_cork_exception() -> None:
transport = mock.Mock()
s = mock.Mock()
s.setsockopt = mock.Mock()
s.family = socket.AF_INET
s.setsockopt.side_effect = OSError
transport.get_extra_info.return_value = s
tcp_cork(transport, True)
s.setsockopt.assert_called_with(
socket.IPPROTO_TCP,
CORK,
True
)
<file_sep>/examples/web_classview.py
#!/usr/bin/env python3
"""Example for aiohttp.web class based views
"""
import functools
import json
from aiohttp import web
class MyView(web.View):
async def get(self):
return web.json_response({
'method': self.request.method,
'args': dict(self.request.rel_url.query),
'headers': dict(self.request.headers),
}, dumps=functools.partial(json.dumps, indent=4))
async def post(self):
data = await self.request.post()
return web.json_response({
'method': self.request.method,
'data': dict(data),
'headers': dict(self.request.headers),
}, dumps=functools.partial(json.dumps, indent=4))
async def index(request):
txt = """
<html>
<head>
<title>Class based view example</title>
</head>
<body>
<h1>Class based view example</h1>
<ul>
<li><a href="/">/</a> This page
<li><a href="/get">/get</a> Returns GET data.
<li><a href="/post">/post</a> Returns POST data.
</ul>
</body>
</html>
"""
return web.Response(text=txt, content_type='text/html')
def init():
app = web.Application()
app.router.add_get('/', index)
app.router.add_get('/get', MyView)
app.router.add_post('/post', MyView)
return app
web.run_app(init())
<file_sep>/docs/logging.rst
.. currentmodule:: aiohttp
.. _aiohttp-logging:
Logging
=======
*aiohttp* uses standard :mod:`logging` for tracking the
library activity.
We have the following loggers enumerated by names:
- ``'aiohttp.access'``
- ``'aiohttp.client'``
- ``'aiohttp.internal'``
- ``'aiohttp.server'``
- ``'aiohttp.web'``
- ``'aiohttp.websocket'``
You may subscribe to these loggers for getting logging messages. The
page does not provide instructions for logging subscribing while the
most friendly method is :func:`logging.config.dictConfig` for
configuring whole loggers in your application.
Logging does not work out of the box. It requires at least minimal ``'logging'``
configuration.
Example of minimal working logger setup::
import logging
from aiohttp import web
app = web.Application()
logging.basicConfig(level=logging.DEBUG)
web.run_app(app, port=5000)
.. versionadded:: 4.0.0
Access logs
-----------
Access logs are enabled by default. If the `debug` flag is set, and the default
logger ``'aiohttp.access'`` is used, access logs will be output to
:obj:`~sys.stderr` if no handlers are attached.
Furthermore, if the default logger has no log level set, the log level will be
set to :obj:`logging.DEBUG`.
This logging may be controlled by :meth:`aiohttp.web.AppRunner` and
:func:`aiohttp.web.run_app`.
To override the default logger, pass an instance of :class:`logging.Logger` to
override the default logger.
.. note::
Use ``web.run_app(app, access_log=None)`` to disable access logs.
In addition, *access_log_format* may be used to specify the log format.
.. _aiohttp-logging-access-log-format-spec:
Format specification
^^^^^^^^^^^^^^^^^^^^
The library provides custom micro-language to specifying info about
request and response:
+--------------+---------------------------------------------------------+
| Option | Meaning |
+==============+=========================================================+
| ``%%`` | The percent sign |
+--------------+---------------------------------------------------------+
| ``%a`` | Remote IP-address |
| | (IP-address of proxy if using reverse proxy) |
+--------------+---------------------------------------------------------+
| ``%t`` | Time when the request was started to process |
+--------------+---------------------------------------------------------+
| ``%P`` | The process ID of the child that serviced the request |
+--------------+---------------------------------------------------------+
| ``%r`` | First line of request |
+--------------+---------------------------------------------------------+
| ``%s`` | Response status code |
+--------------+---------------------------------------------------------+
| ``%b`` | Size of response in bytes, including HTTP headers |
+--------------+---------------------------------------------------------+
| ``%T`` | The time taken to serve the request, in seconds |
+--------------+---------------------------------------------------------+
| ``%Tf`` | The time taken to serve the request, in seconds |
| | with fraction in %.06f format |
+--------------+---------------------------------------------------------+
| ``%D`` | The time taken to serve the request, in microseconds |
+--------------+---------------------------------------------------------+
| ``%{FOO}i`` | ``request.headers['FOO']`` |
+--------------+---------------------------------------------------------+
| ``%{FOO}o`` | ``response.headers['FOO']`` |
+--------------+---------------------------------------------------------+
The default access log format is::
'%a %t "%r" %s %b "%{Referer}i" "%{User-Agent}i"'
.. versionadded:: 2.3.0
*access_log_class* introduced.
Example of a drop-in replacement for the default access logger::
from aiohttp.abc import AbstractAccessLogger
class AccessLogger(AbstractAccessLogger):
def log(self, request, response, time):
self.logger.info(f'{request.remote} '
f'"{request.method} {request.path} '
f'done in {time}s: {response.status}')
.. versionadded:: 4.0.0
``AccessLogger.log()`` can now access any exception raised while processing
the request with ``sys.exc_info()``.
.. versionadded:: 4.0.0
If your logging needs to perform IO you can instead inherit from
:class:`aiohttp.abc.AbstractAsyncAccessLogger`::
from aiohttp.abc import AbstractAsyncAccessLogger
class AccessLogger(AbstractAsyncAccessLogger):
async def log(self, request, response, time):
logging_service = request.app['logging_service']
await logging_service.log(f'{request.remote} '
f'"{request.method} {request.path} '
f'done in {time}s: {response.status}')
This also allows access to the results of coroutines on the ``request`` and
``response``, e.g. ``request.text()``.
.. _gunicorn-accesslog:
Gunicorn access logs
^^^^^^^^^^^^^^^^^^^^
When `Gunicorn <http://docs.gunicorn.org/en/latest/index.html>`_ is used for
:ref:`deployment <aiohttp-deployment-gunicorn>`, its default access log format
will be automatically replaced with the default aiohttp's access log format.
If Gunicorn's option access_logformat_ is
specified explicitly, it should use aiohttp's format specification.
Gunicorn's access log works only if accesslog_ is specified explicitly in your
config or as a command line option.
This configuration can be either a path or ``'-'``. If the application uses
a custom logging setup intercepting the ``'gunicorn.access'`` logger,
accesslog_ should be set to ``'-'`` to prevent Gunicorn to create an empty
access log file upon every startup.
Error logs
----------
:mod:`aiohttp.web` uses a logger named ``'aiohttp.server'`` to store errors
given on web requests handling.
This log is enabled by default.
To use a different logger name, pass *logger* (:class:`logging.Logger`
instance) to the :meth:`aiohttp.web.AppRunner` constructor.
.. _access_logformat:
http://docs.gunicorn.org/en/stable/settings.html#access-log-format
.. _accesslog:
http://docs.gunicorn.org/en/stable/settings.html#accesslog
<file_sep>/docs/new_router.rst
.. _aiohttp-router-refactoring-021:
Router refactoring in 0.21
==========================
Rationale
---------
First generation (v1) of router has mapped ``(method, path)`` pair to
:term:`web-handler`. Mapping is named **route**. Routes used to have
unique names if any.
The main mistake with the design is coupling the **route** to
``(method, path)`` pair while really URL construction operates with
**resources** (**location** is a synonym). HTTP method is not part of URI
but applied on sending HTTP request only.
Having different **route names** for the same path is confusing. Moreover
**named routes** constructed for the same path should have unique
non overlapping names which is cumbersome is certain situations.
From other side sometimes it's desirable to bind several HTTP methods
to the same web handler. For *v1* router it can be solved by passing '*'
as HTTP method. Class based views require '*' method also usually.
Implementation
--------------
The change introduces **resource** as first class citizen::
resource = router.add_resource('/path/{to}', name='name')
*Resource* has a **path** (dynamic or constant) and optional **name**.
The name is **unique** in router context.
*Resource* has **routes**.
*Route* corresponds to *HTTP method* and :term:`web-handler` for the method::
route = resource.add_route('GET', handler)
User still may use wildcard for accepting all HTTP methods (maybe we
will add something like ``resource.add_wildcard(handler)`` later).
Since **names** belongs to **resources** now ``app.router['name']``
returns a **resource** instance instead of :class:`aiohttp.web.Route`.
**resource** has ``.url()`` method, so
``app.router['name'].url(parts={'a': 'b'}, query={'arg': 'param'})``
still works as usual.
The change allows to rewrite static file handling and implement nested
applications as well.
Decoupling of *HTTP location* and *HTTP method* makes life easier.
Backward compatibility
----------------------
The refactoring is 99% compatible with previous implementation.
99% means all example and the most of current code works without
modifications but we have subtle API backward incompatibles.
``app.router['name']`` returns a :class:`aiohttp.web.BaseResource`
instance instead of :class:`aiohttp.web.Route` but resource has the
same ``resource.url(...)`` most useful method, so end user should feel no
difference.
``route.match(...)`` is **not** supported anymore, use
:meth:`aiohttp.web.AbstractResource.resolve` instead.
``app.router.add_route(method, path, handler, name='name')`` now is just
shortcut for::
resource = app.router.add_resource(path, name=name)
route = resource.add_route(method, handler)
return route
``app.router.register_route(...)`` is still supported, it creates
:class:`aiohttp.web.ResourceAdapter` for every call (but it's deprecated now).
<file_sep>/tests/test_web_exceptions.py
import collections
import pickle
from traceback import format_exception
import pytest
from yarl import URL
from aiohttp import web
def test_all_http_exceptions_exported() -> None:
assert 'HTTPException' in web.__all__
for name in dir(web):
if name.startswith('_'):
continue
obj = getattr(web, name)
if isinstance(obj, type) and issubclass(obj, web.HTTPException):
assert name in web.__all__
async def test_ctor() -> None:
resp = web.HTTPOk()
assert resp.text == "200: OK"
assert resp.headers == {'Content-Type': 'text/plain'}
assert resp.reason == "OK"
assert resp.status == 200
assert bool(resp)
async def test_ctor_with_headers() -> None:
resp = web.HTTPOk(headers={"X-Custom": "value"})
assert resp.text == "200: OK"
assert resp.headers == {'Content-Type': 'text/plain', "X-Custom": "value"}
assert resp.reason == "OK"
assert resp.status == 200
async def test_ctor_content_type() -> None:
resp = web.HTTPOk(text="text", content_type="custom")
assert resp.text == "text"
assert resp.headers == {'Content-Type': 'custom'}
assert resp.reason == "OK"
assert resp.status == 200
assert bool(resp)
async def test_ctor_content_type_without_text() -> None:
with pytest.warns(DeprecationWarning):
resp = web.HTTPResetContent(content_type="custom")
assert resp.text is None
assert resp.headers == {'Content-Type': 'custom'}
assert resp.reason == "Reset Content"
assert resp.status == 205
assert bool(resp)
async def test_ctor_text_for_empty_body() -> None:
with pytest.warns(DeprecationWarning):
resp = web.HTTPResetContent(text="text")
assert resp.text == "text"
assert resp.headers == {'Content-Type': 'text/plain'}
assert resp.reason == "Reset Content"
assert resp.status == 205
def test_terminal_classes_has_status_code() -> None:
terminals = set()
for name in dir(web):
obj = getattr(web, name)
if isinstance(obj, type) and issubclass(obj, web.HTTPException):
terminals.add(obj)
dup = frozenset(terminals)
for cls1 in dup:
for cls2 in dup:
if cls1 in cls2.__bases__:
terminals.discard(cls1)
for cls in terminals:
assert cls.status_code is not None
codes = collections.Counter(cls.status_code for cls in terminals)
assert None not in codes
assert 1 == codes.most_common(1)[0][1]
def test_with_text() -> None:
resp = web.HTTPNotFound(text="Page not found")
assert 404 == resp.status
assert "Page not found" == resp.text
assert "text/plain" == resp.headers['Content-Type']
def test_default_text() -> None:
resp = web.HTTPOk()
assert '200: OK' == resp.text
def test_empty_text_204() -> None:
resp = web.HTTPNoContent()
assert resp.text is None
def test_empty_text_205() -> None:
resp = web.HTTPResetContent()
assert resp.text is None
def test_empty_text_304() -> None:
resp = web.HTTPNoContent()
resp.text is None
def test_HTTPException_retains_cause() -> None:
with pytest.raises(web.HTTPException) as ei:
try:
raise Exception('CustomException')
except Exception as exc:
raise web.HTTPException() from exc
tb = ''.join(format_exception(ei.type, ei.value, ei.tb))
assert 'CustomException' in tb
assert 'direct cause' in tb
class TestHTTPOk:
def test_ctor_all(self) -> None:
resp = web.HTTPOk(headers={'X-Custom': 'value'},
reason='Done',
text='text', content_type='custom')
assert resp.text == 'text'
assert resp.headers == {'X-Custom': 'value',
'Content-Type': 'custom'}
assert resp.reason == 'Done'
assert resp.status == 200
def test_pickle(self) -> None:
resp = web.HTTPOk(headers={'X-Custom': 'value'},
reason='Done',
text='text', content_type='custom')
resp.foo = 'bar'
for proto in range(2, pickle.HIGHEST_PROTOCOL + 1):
pickled = pickle.dumps(resp, proto)
resp2 = pickle.loads(pickled)
assert resp2.text == 'text'
assert resp2.headers == resp.headers
assert resp2.reason == 'Done'
assert resp2.status == 200
assert resp2.foo == 'bar'
async def test_app(self, aiohttp_client) -> None:
async def handler(request):
raise web.HTTPOk()
app = web.Application()
app.router.add_get('/', handler)
cli = await aiohttp_client(app)
resp = await cli.get('/')
assert 200 == resp.status
txt = await resp.text()
assert "200: OK" == txt
class TestHTTPFound:
def test_location_str(self) -> None:
exc = web.HTTPFound(location='/redirect')
assert exc.location == URL('/redirect')
assert exc.headers['Location'] == '/redirect'
def test_location_url(self) -> None:
exc = web.HTTPFound(location=URL('/redirect'))
assert exc.location == URL('/redirect')
assert exc.headers['Location'] == '/redirect'
def test_empty_location(self) -> None:
with pytest.raises(ValueError):
web.HTTPFound(location='')
with pytest.raises(ValueError):
web.HTTPFound(location=None)
def test_location_CRLF(self) -> None:
exc = web.HTTPFound(location='/redirect\r\n')
assert '\r\n' not in exc.headers['Location']
def test_pickle(self) -> None:
resp = web.HTTPFound(location='http://example.com',
headers={'X-Custom': 'value'},
reason='Wow',
text='text', content_type='custom')
resp.foo = 'bar'
for proto in range(2, pickle.HIGHEST_PROTOCOL + 1):
pickled = pickle.dumps(resp, proto)
resp2 = pickle.loads(pickled)
assert resp2.location == URL('http://example.com')
assert resp2.text == 'text'
assert resp2.headers == resp.headers
assert resp2.reason == 'Wow'
assert resp2.status == 302
assert resp2.foo == 'bar'
async def test_app(self, aiohttp_client) -> None:
async def handler(request):
raise web.HTTPFound(location='/redirect')
app = web.Application()
app.router.add_get('/', handler)
cli = await aiohttp_client(app)
resp = await cli.get('/', allow_redirects=False)
assert 302 == resp.status
txt = await resp.text()
assert "302: Found" == txt
assert '/redirect' == resp.headers['location']
class TestHTTPMethodNotAllowed:
async def test_ctor(self) -> None:
resp = web.HTTPMethodNotAllowed('GET', ['POST', 'PUT'],
headers={'X-Custom': 'value'},
reason='Unsupported',
text='text', content_type='custom')
assert resp.method == 'GET'
assert resp.allowed_methods == {'POST', 'PUT'}
assert resp.text == 'text'
assert resp.headers == {'X-Custom': 'value',
'Content-Type': 'custom',
'Allow': 'POST,PUT'}
assert resp.reason == 'Unsupported'
assert resp.status == 405
def test_pickle(self) -> None:
resp = web.HTTPMethodNotAllowed(method='GET',
allowed_methods=('POST', 'PUT'),
headers={'X-Custom': 'value'},
reason='Unsupported',
text='text', content_type='custom')
resp.foo = 'bar'
for proto in range(2, pickle.HIGHEST_PROTOCOL + 1):
pickled = pickle.dumps(resp, proto)
resp2 = pickle.loads(pickled)
assert resp2.method == 'GET'
assert resp2.allowed_methods == {'POST', 'PUT'}
assert resp2.text == 'text'
assert resp2.headers == resp.headers
assert resp2.reason == 'Unsupported'
assert resp2.status == 405
assert resp2.foo == 'bar'
class TestHTTPRequestEntityTooLarge:
def test_ctor(self) -> None:
resp = web.HTTPRequestEntityTooLarge(max_size=100, actual_size=123,
headers={'X-Custom': 'value'},
reason='Too large')
assert resp.text == ('Maximum request body size 100 exceeded, '
'actual body size 123')
assert resp.headers == {'X-Custom': 'value',
'Content-Type': 'text/plain'}
assert resp.reason == 'Too large'
assert resp.status == 413
def test_pickle(self) -> None:
resp = web.HTTPRequestEntityTooLarge(100, actual_size=123,
headers={'X-Custom': 'value'},
reason='Too large')
resp.foo = 'bar'
for proto in range(2, pickle.HIGHEST_PROTOCOL + 1):
pickled = pickle.dumps(resp, proto)
resp2 = pickle.loads(pickled)
assert resp2.text == resp.text
assert resp2.headers == resp.headers
assert resp2.reason == 'Too large'
assert resp2.status == 413
assert resp2.foo == 'bar'
class TestHTTPUnavailableForLegalReasons:
def test_ctor(self) -> None:
resp = web.HTTPUnavailableForLegalReasons(
link='http://warning.or.kr/',
headers={'X-Custom': 'value'},
reason='Zaprescheno',
text='text', content_type='custom')
assert resp.link == URL('http://warning.or.kr/')
assert resp.text == 'text'
assert resp.headers == {
'X-Custom': 'value',
'Content-Type': 'custom',
'Link': '<http://warning.or.kr/>; rel="blocked-by"'}
assert resp.reason == 'Zaprescheno'
assert resp.status == 451
def test_pickle(self) -> None:
resp = web.HTTPUnavailableForLegalReasons(
link='http://warning.or.kr/',
headers={'X-Custom': 'value'},
reason='Zaprescheno',
text='text', content_type='custom')
resp.foo = 'bar'
for proto in range(2, pickle.HIGHEST_PROTOCOL + 1):
pickled = pickle.dumps(resp, proto)
resp2 = pickle.loads(pickled)
assert resp2.link == URL('http://warning.or.kr/')
assert resp2.text == 'text'
assert resp2.headers == resp.headers
assert resp2.reason == 'Zaprescheno'
assert resp2.status == 451
assert resp2.foo == 'bar'
<file_sep>/docs/migration_to_2xx.rst
.. _aiohttp-migration:
Migration to 2.x
================
Client
------
chunking
^^^^^^^^
aiohttp does not support custom chunking sizes. It is up to the developer
to decide how to chunk data streams. If chunking is enabled, aiohttp
encodes the provided chunks in the "Transfer-encoding: chunked" format.
aiohttp does not enable chunked encoding automatically even if a
*transfer-encoding* header is supplied: *chunked* has to be set
explicitly. If *chunked* is set, then the *Transfer-encoding* and
*content-length* headers are disallowed.
compression
^^^^^^^^^^^
Compression has to be enabled explicitly with the *compress* parameter.
If compression is enabled, adding a *content-encoding* header is not allowed.
Compression also enables the *chunked* transfer-encoding.
Compression can not be combined with a *Content-Length* header.
Client Connector
^^^^^^^^^^^^^^^^
1. By default a connector object manages a total number of concurrent
connections. This limit was a per host rule in version 1.x. In
2.x, the `limit` parameter defines how many concurrent connection
connector can open and a new `limit_per_host` parameter defines the
limit per host. By default there is no per-host limit.
2. BaseConnector.close is now a normal function as opposed to
coroutine in version 1.x
3. BaseConnector.conn_timeout was moved to ClientSession
ClientResponse.release
^^^^^^^^^^^^^^^^^^^^^^
Internal implementation was significantly redesigned. It is not
required to call `release` on the response object. When the client
fully receives the payload, the underlying connection automatically
returns back to pool. If the payload is not fully read, the connection
is closed
Client exceptions
^^^^^^^^^^^^^^^^^
Exception hierarchy has been significantly modified. aiohttp now defines only
exceptions that covers connection handling and server response misbehaviors.
For developer specific mistakes, aiohttp uses python standard exceptions
like ValueError or TypeError.
Reading a response content may raise a ClientPayloadError
exception. This exception indicates errors specific to the payload
encoding. Such as invalid compressed data, malformed chunked-encoded
chunks or not enough data that satisfy the content-length header.
All exceptions are moved from `aiohttp.errors` module to top level
`aiohttp` module.
New hierarchy of exceptions:
* `ClientError` - Base class for all client specific exceptions
- `ClientResponseError` - exceptions that could happen after we get
response from server
* `WSServerHandshakeError` - web socket server response error
- `ClientHttpProxyError` - proxy response
- `ClientConnectionError` - exceptions related to low-level
connection problems
* `ClientOSError` - subset of connection errors that are initiated
by an OSError exception
- `ClientConnectorError` - connector related exceptions
* `ClientProxyConnectionError` - proxy connection initialization error
- `ServerConnectionError` - server connection related errors
* `ServerDisconnectedError` - server disconnected
* `ServerTimeoutError` - server operation timeout, (read timeout, etc)
* `ServerFingerprintMismatch` - server fingerprint mismatch
- `ClientPayloadError` - This exception can only be raised while
reading the response payload if one of these errors occurs:
invalid compression, malformed chunked encoding or not enough data
that satisfy content-length header.
Client payload (form-data)
^^^^^^^^^^^^^^^^^^^^^^^^^^
To unify form-data/payload handling a new `Payload` system was
introduced. It handles customized handling of existing types and
provide implementation for user-defined types.
1. FormData.__call__ does not take an encoding arg anymore
and its return value changes from an iterator or bytes to a Payload instance.
aiohttp provides payload adapters for some standard types like `str`, `byte`,
`io.IOBase`, `StreamReader` or `DataQueue`.
2. a generator is not supported as data provider anymore, `streamer`
can be used instead. For example, to upload data from file::
@aiohttp.streamer
def file_sender(writer, file_name=None):
with open(file_name, 'rb') as f:
chunk = f.read(2**16)
while chunk:
yield from writer.write(chunk)
chunk = f.read(2**16)
# Then you can use `file_sender` like this:
async with session.post('http://httpbin.org/post',
data=file_sender(file_name='huge_file')) as resp:
print(await resp.text())
Various
^^^^^^^
1. the `encoding` parameter is deprecated in `ClientSession.request()`.
Payload encoding is controlled at the payload level.
It is possible to specify an encoding for each payload instance.
2. the `version` parameter is removed in `ClientSession.request()`
client version can be specified in the `ClientSession` constructor.
3. `aiohttp.MsgType` dropped, use `aiohttp.WSMsgType` instead.
4. `ClientResponse.url` is an instance of `yarl.URL` class (`url_obj`
is deprecated)
5. `ClientResponse.raise_for_status()` raises
:exc:`aiohttp.ClientResponseError` exception
6. `ClientResponse.json()` is strict about response's content type. if
content type does not match, it raises
:exc:`aiohttp.ClientResponseError` exception. To disable content
type check you can pass ``None`` as `content_type` parameter.
Server
------
ServerHttpProtocol and low-level details
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Internal implementation was significantly redesigned to provide
better performance and support HTTP pipelining.
ServerHttpProtocol is dropped, implementation is merged with RequestHandler
a lot of low-level api's are dropped.
Application
^^^^^^^^^^^
1. Constructor parameter `loop` is deprecated. Loop is get configured by application runner,
`run_app` function for any of gunicorn workers.
2. `Application.router.add_subapp` is dropped, use `Application.add_subapp` instead
3. `Application.finished` is dropped, use `Application.cleanup` instead
WebRequest and WebResponse
^^^^^^^^^^^^^^^^^^^^^^^^^^
1. the `GET` and `POST` attributes no longer exist. Use the `query` attribute instead of `GET`
2. Custom chunking size is not support `WebResponse.chunked` - developer is
responsible for actual chunking.
3. Payloads are supported as body. So it is possible to use client response's content
object as body parameter for `WebResponse`
4. `FileSender` api is dropped, it is replaced with more general `FileResponse` class::
async def handle(request):
return web.FileResponse('path-to-file.txt')
5. `WebSocketResponse.protocol` is renamed to `WebSocketResponse.ws_protocol`.
`WebSocketResponse.protocol` is instance of `RequestHandler` class.
RequestPayloadError
^^^^^^^^^^^^^^^^^^^
Reading request's payload may raise a `RequestPayloadError` exception. The behavior is similar
to `ClientPayloadError`.
WSGI
^^^^
*WSGI* support has been dropped, as well as gunicorn wsgi support. We still provide default and uvloop gunicorn workers for `web.Application`
<file_sep>/tests/test_tracing.py
from types import SimpleNamespace
from unittest.mock import Mock
import pytest
from aiohttp.test_utils import make_mocked_coro
from aiohttp.tracing import (
Trace,
TraceConfig,
TraceConnectionCreateEndParams,
TraceConnectionCreateStartParams,
TraceConnectionQueuedEndParams,
TraceConnectionQueuedStartParams,
TraceConnectionReuseconnParams,
TraceDnsCacheHitParams,
TraceDnsCacheMissParams,
TraceDnsResolveHostEndParams,
TraceDnsResolveHostStartParams,
TraceRequestChunkSentParams,
TraceRequestEndParams,
TraceRequestExceptionParams,
TraceRequestRedirectParams,
TraceRequestStartParams,
TraceResponseChunkReceivedParams,
)
class TestTraceConfig:
def test_trace_config_ctx_default(self) -> None:
trace_config = TraceConfig()
assert isinstance(trace_config.trace_config_ctx(), SimpleNamespace)
def test_trace_config_ctx_factory(self) -> None:
trace_config = TraceConfig(trace_config_ctx_factory=dict)
assert isinstance(trace_config.trace_config_ctx(), dict)
def test_trace_config_ctx_request_ctx(self) -> None:
trace_request_ctx = Mock()
trace_config = TraceConfig()
trace_config_ctx = trace_config.trace_config_ctx(
trace_request_ctx=trace_request_ctx)
assert trace_config_ctx.trace_request_ctx is trace_request_ctx
def test_freeze(self) -> None:
trace_config = TraceConfig()
trace_config.freeze()
assert trace_config.on_request_start.frozen
assert trace_config.on_request_chunk_sent.frozen
assert trace_config.on_response_chunk_received.frozen
assert trace_config.on_request_end.frozen
assert trace_config.on_request_exception.frozen
assert trace_config.on_request_redirect.frozen
assert trace_config.on_connection_queued_start.frozen
assert trace_config.on_connection_queued_end.frozen
assert trace_config.on_connection_create_start.frozen
assert trace_config.on_connection_create_end.frozen
assert trace_config.on_connection_reuseconn.frozen
assert trace_config.on_dns_resolvehost_start.frozen
assert trace_config.on_dns_resolvehost_end.frozen
assert trace_config.on_dns_cache_hit.frozen
assert trace_config.on_dns_cache_miss.frozen
class TestTrace:
@pytest.mark.parametrize('signal,params,param_obj', [
(
'request_start',
(Mock(), Mock(), Mock()),
TraceRequestStartParams
),
(
'request_chunk_sent',
(Mock(), ),
TraceRequestChunkSentParams
),
(
'response_chunk_received',
(Mock(), ),
TraceResponseChunkReceivedParams
),
(
'request_end',
(Mock(), Mock(), Mock(), Mock()),
TraceRequestEndParams
),
(
'request_exception',
(Mock(), Mock(), Mock(), Mock()),
TraceRequestExceptionParams
),
(
'request_redirect',
(Mock(), Mock(), Mock(), Mock()),
TraceRequestRedirectParams
),
(
'connection_queued_start',
(),
TraceConnectionQueuedStartParams
),
(
'connection_queued_end',
(),
TraceConnectionQueuedEndParams
),
(
'connection_create_start',
(),
TraceConnectionCreateStartParams
),
(
'connection_create_end',
(),
TraceConnectionCreateEndParams
),
(
'connection_reuseconn',
(),
TraceConnectionReuseconnParams
),
(
'dns_resolvehost_start',
(Mock(),),
TraceDnsResolveHostStartParams
),
(
'dns_resolvehost_end',
(Mock(),),
TraceDnsResolveHostEndParams
),
(
'dns_cache_hit',
(Mock(),),
TraceDnsCacheHitParams
),
(
'dns_cache_miss',
(Mock(),),
TraceDnsCacheMissParams
)
])
async def test_send(self, signal, params, param_obj) -> None:
session = Mock()
trace_request_ctx = Mock()
callback = Mock(side_effect=make_mocked_coro(Mock()))
trace_config = TraceConfig()
getattr(trace_config, "on_%s" % signal).append(callback)
trace_config.freeze()
trace = Trace(
session,
trace_config,
trace_config.trace_config_ctx(trace_request_ctx=trace_request_ctx)
)
await getattr(trace, "send_%s" % signal)(*params)
callback.assert_called_once_with(
session,
SimpleNamespace(trace_request_ctx=trace_request_ctx),
param_obj(*params)
)
<file_sep>/examples/web_ws.py
#!/usr/bin/env python3
"""Example for aiohttp.web websocket server
"""
import os
from aiohttp import web
WS_FILE = os.path.join(os.path.dirname(__file__), 'websocket.html')
async def wshandler(request):
resp = web.WebSocketResponse()
available = resp.can_prepare(request)
if not available:
with open(WS_FILE, 'rb') as fp:
return web.Response(body=fp.read(), content_type='text/html')
await resp.prepare(request)
await resp.send_str('Welcome!!!')
try:
print('Someone joined.')
for ws in request.app['sockets']:
await ws.send_str('Someone joined')
request.app['sockets'].append(resp)
async for msg in resp:
if msg.type == web.WSMsgType.TEXT:
for ws in request.app['sockets']:
if ws is not resp:
await ws.send_str(msg.data)
else:
return resp
return resp
finally:
request.app['sockets'].remove(resp)
print('Someone disconnected.')
for ws in request.app['sockets']:
await ws.send_str('Someone disconnected.')
async def on_shutdown(app):
for ws in app['sockets']:
await ws.close()
def init():
app = web.Application()
app['sockets'] = []
app.router.add_get('/', wshandler)
app.on_shutdown.append(on_shutdown)
return app
web.run_app(init())
<file_sep>/examples/static_files.py
import pathlib
from aiohttp import web
app = web.Application()
app.router.add_static('/', pathlib.Path(__file__).parent, show_index=True)
web.run_app(app)
<file_sep>/tests/test_client_fingerprint.py
import hashlib
from unittest import mock
import pytest
import aiohttp
ssl = pytest.importorskip('ssl')
def test_fingerprint_sha256() -> None:
sha256 = hashlib.sha256(b'12345678'*64).digest()
fp = aiohttp.Fingerprint(sha256)
assert fp.fingerprint == sha256
def test_fingerprint_sha1() -> None:
sha1 = hashlib.sha1(b'12345678'*64).digest()
with pytest.raises(ValueError):
aiohttp.Fingerprint(sha1)
def test_fingerprint_md5() -> None:
md5 = hashlib.md5(b'12345678'*64).digest()
with pytest.raises(ValueError):
aiohttp.Fingerprint(md5)
def test_fingerprint_check_no_ssl() -> None:
sha256 = hashlib.sha256(b'12345678'*64).digest()
fp = aiohttp.Fingerprint(sha256)
transport = mock.Mock()
transport.get_extra_info.return_value = None
assert fp.check(transport) is None
<file_sep>/tests/test_base_protocol.py
import asyncio
from contextlib import suppress
from unittest import mock
import pytest
from aiohttp.base_protocol import BaseProtocol
async def test_loop() -> None:
loop = asyncio.get_event_loop()
asyncio.set_event_loop(None)
pr = BaseProtocol(loop)
assert pr._loop is loop
async def test_pause_writing() -> None:
loop = asyncio.get_event_loop()
pr = BaseProtocol(loop)
assert not pr._paused
pr.pause_writing()
assert pr._paused
async def test_resume_writing_no_waiters() -> None:
loop = asyncio.get_event_loop()
pr = BaseProtocol(loop=loop)
pr.pause_writing()
assert pr._paused
pr.resume_writing()
assert not pr._paused
async def test_connection_made() -> None:
loop = asyncio.get_event_loop()
pr = BaseProtocol(loop=loop)
tr = mock.Mock()
assert pr.transport is None
pr.connection_made(tr)
assert pr.transport is not None
async def test_connection_lost_not_paused() -> None:
loop = asyncio.get_event_loop()
pr = BaseProtocol(loop=loop)
tr = mock.Mock()
pr.connection_made(tr)
assert not pr._connection_lost
pr.connection_lost(None)
assert pr.transport is None
assert pr._connection_lost
async def test_connection_lost_paused_without_waiter() -> None:
loop = asyncio.get_event_loop()
pr = BaseProtocol(loop=loop)
tr = mock.Mock()
pr.connection_made(tr)
assert not pr._connection_lost
pr.pause_writing()
pr.connection_lost(None)
assert pr.transport is None
assert pr._connection_lost
async def test_drain_lost() -> None:
loop = asyncio.get_event_loop()
pr = BaseProtocol(loop=loop)
tr = mock.Mock()
pr.connection_made(tr)
pr.connection_lost(None)
with pytest.raises(ConnectionResetError):
await pr._drain_helper()
async def test_drain_not_paused() -> None:
loop = asyncio.get_event_loop()
pr = BaseProtocol(loop=loop)
tr = mock.Mock()
pr.connection_made(tr)
assert pr._drain_waiter is None
await pr._drain_helper()
assert pr._drain_waiter is None
async def test_resume_drain_waited() -> None:
loop = asyncio.get_event_loop()
pr = BaseProtocol(loop=loop)
tr = mock.Mock()
pr.connection_made(tr)
pr.pause_writing()
t = loop.create_task(pr._drain_helper())
await asyncio.sleep(0)
assert pr._drain_waiter is not None
pr.resume_writing()
assert (await t) is None
assert pr._drain_waiter is None
async def test_lost_drain_waited_ok() -> None:
loop = asyncio.get_event_loop()
pr = BaseProtocol(loop=loop)
tr = mock.Mock()
pr.connection_made(tr)
pr.pause_writing()
t = loop.create_task(pr._drain_helper())
await asyncio.sleep(0)
assert pr._drain_waiter is not None
pr.connection_lost(None)
assert (await t) is None
assert pr._drain_waiter is None
async def test_lost_drain_waited_exception() -> None:
loop = asyncio.get_event_loop()
pr = BaseProtocol(loop=loop)
tr = mock.Mock()
pr.connection_made(tr)
pr.pause_writing()
t = loop.create_task(pr._drain_helper())
await asyncio.sleep(0)
assert pr._drain_waiter is not None
exc = RuntimeError()
pr.connection_lost(exc)
with pytest.raises(RuntimeError) as cm:
await t
assert cm.value is exc
assert pr._drain_waiter is None
async def test_lost_drain_cancelled() -> None:
loop = asyncio.get_event_loop()
pr = BaseProtocol(loop=loop)
tr = mock.Mock()
pr.connection_made(tr)
pr.pause_writing()
fut = loop.create_future()
async def wait():
fut.set_result(None)
await pr._drain_helper()
t = loop.create_task(wait())
await fut
t.cancel()
assert pr._drain_waiter is not None
pr.connection_lost(None)
with suppress(asyncio.CancelledError):
await t
assert pr._drain_waiter is None
async def test_resume_drain_cancelled() -> None:
loop = asyncio.get_event_loop()
pr = BaseProtocol(loop=loop)
tr = mock.Mock()
pr.connection_made(tr)
pr.pause_writing()
fut = loop.create_future()
async def wait():
fut.set_result(None)
await pr._drain_helper()
t = loop.create_task(wait())
await fut
t.cancel()
assert pr._drain_waiter is not None
pr.resume_writing()
with suppress(asyncio.CancelledError):
await t
assert pr._drain_waiter is None
<file_sep>/.cherry_picker.toml
team = "aio-libs"
repo = "aiohttp"
check_sha = "f382b5ffc445e45a110734f5396728da7914aeb6"
fix_commit_msg = false
<file_sep>/docs/http_request_lifecycle.rst
.. _aiohttp-request-lifecycle:
The aiohttp Request Lifecycle
=============================
Why is aiohttp client API that way?
--------------------------------------
The first time you use aiohttp, you'll notice that a simple HTTP request is performed not with one, but with up to three steps:
.. code-block:: python
async with aiohttp.ClientSession() as session:
async with session.get('http://python.org') as response:
print(await response.text())
It's especially unexpected when coming from other libraries such as the very popular :term:`requests`, where the "hello world" looks like this:
.. code-block:: python
response = requests.get('http://python.org')
print(response.text())
So why is the aiohttp snippet so verbose?
Because aiohttp is asynchronous, its API is designed to make the most out of non-blocking network operations. In a code like this, requests will block three times, and does it transparently, while aiohttp gives the event loop three opportunities to switch context:
- When doing the ``.get()``, both libraries send a GET request to the remote server. For aiohttp, this means asynchronous I/O, which is here marked with an ``async with`` that gives you the guarantee that not only it doesn't block, but that it's cleanly finalized.
- When doing ``response.text`` in requests, you just read an attribute. The call to ``.get()`` already preloaded and decoded the entire response payload, in a blocking manner. aiohttp loads only the headers when ``.get()`` is executed, letting you decide to pay the cost of loading the body afterward, in a second asynchronous operation. Hence the ``await response.text()``.
- ``async with aiohttp.ClientSession()`` does not perform I/O when entering the block, but at the end of it, it will ensure all remaining resources are closed correctly. Again, this is done asynchronously and must be marked as such. The session is also a performance tool, as it manages a pool of connections for you, allowing you to reuse them instead of opening and closing a new one at each request. You can even `manage the pool size by passing a connector object <client_advanced.html#limiting-connection-pool-size>`_.
Using a session as a best practice
-----------------------------------
The requests library does in fact also provides a session system. Indeed, it lets you do:
.. code-block:: python
with requests.session() as session:
response = session.get('http://python.org')
print(response.text)
It just not the default behavior, nor is it advertised early in the documentation. Because of this, most users take a hit in performances, but can quickly start hacking. And for requests, it's an understandable trade-off, since its goal is to be "HTTP for humans" and simplicity has always been more important than performance in this context.
However, if one uses aiohttp, one chooses asynchronous programming, a paradigm that makes the opposite trade-off: more verbosity for better performances. And so the library default behavior reflects this, encouraging you to use performant best practices from the start.
How to use the ClientSession ?
-------------------------------
By default the :class:`aiohttp.ClientSession` object will hold a connector with a maximum of 100 connections, putting the rest in a queue. This is quite a big number, this means you must be connected to a hundred different servers (not pages!) concurrently before even having to consider if your task needs resource adjustment.
In fact, you can picture the session object as a user starting and closing a browser: it wouldn't make sense to do that every time you want to load a new tab.
So you are expected to reuse a session object and make many requests from it. For most scripts and average-sized softwares, this means you can create a single session, and reuse it for the entire execution of the program. You can even pass the session around as a parameter in functions. E.G, the typical "hello world":
.. code-block:: python
import aiohttp
import asyncio
async def main():
async with aiohttp.ClientSession() as session:
async with session.get('http://python.org') as response:
html = await response.text()
print(html)
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
Can become this:
.. code-block:: python
import aiohttp
import asyncio
async def fetch(session, url):
async with session.get(url) as response:
return await response.text()
async def main():
async with aiohttp.ClientSession() as session:
html = await fetch(session, 'http://python.org')
print(html)
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
On more complex code bases, you can even create a central registry to hold the session object from anywhere in the code, or a higher level ``Client`` class that holds a reference to it.
When to create more than one session object then? It arises when you want more granularity with your resources management:
- you want to group connections by a common configuration. E.G: sessions can set cookies, headers, timeout values, etc. that are shared for all connections they holds.
- you need several threads and want to avoid sharing a mutable object between them.
- you want several connection pools to benefit from different queues and assign priorities. E.G: one session never uses the queue and is for high priority requests, the other one has a small concurrency limit and a very long queue, for non important requests.
<file_sep>/docs/whats_new_3_0.rst
.. _aiohttp_whats_new_3_0:
=========================
What's new in aiohttp 3.0
=========================
async/await everywhere
======================
The main change is dropping ``yield from`` support and using
``async``/``await`` everywhere. Farewell, Python 3.4.
The minimal supported Python version is **3.5.3** now.
Why not *3.5.0*? Because *3.5.3* has a crucial change:
:func:`asyncio.get_event_loop()` returns the running loop instead of
*default*, which may be different, e.g.::
loop = asyncio.new_event_loop()
loop.run_until_complete(f())
Note, :func:`asyncio.set_event_loop` was not called and default loop
is not equal to actually executed one.
Application Runners
===================
People constantly asked about ability to run aiohttp servers together
with other asyncio code, but :func:`aiohttp.web.run_app` is blocking
synchronous call.
aiohttp had support for starting the application without ``run_app`` but the API
was very low-level and cumbersome.
Now application runners solve the task in a few lines of code, see
:ref:`aiohttp-web-app-runners` for details.
Client Tracing
==============
Other long awaited feature is tracing client request life cycle to
figure out when and why client request spends a time waiting for
connection establishment, getting server response headers etc.
Now it is possible by registering special signal handlers on every
request processing stage. :ref:`aiohttp-client-tracing` provides more
info about the feature.
HTTPS support
=============
Unfortunately asyncio has a bug with checking SSL certificates for
non-ASCII site DNS names, e.g. `https://историк.рф <https://историк.рф>`_ or
`https://雜草工作室.香港 <https://雜草工作室.香港>`_.
The bug has been fixed in upcoming Python 3.7 only (the change
requires breaking backward compatibility in :mod:`ssl` API).
aiohttp installs a fix for older Python versions (3.5 and 3.6).
Dropped obsolete API
====================
A switch to new major version is a great chance for dropping already
deprecated features.
The release dropped a lot, see :ref:`aiohttp_changes` for details.
All removals was already marked as deprecated or related to very low
level implementation details.
If user code did not raise :exc:`DeprecationWarning` it is compatible
with aiohttp 3.0 most likely.
Summary
=======
Enjoy aiohttp 3.0 release!
The full change log is here: :ref:`aiohttp_changes`.
<file_sep>/docs/powered_by.rst
.. _aiohttp-powered-by:
Powered by aiohttp
==================
Web sites powered by aiohttp.
Feel free to fork documentation on github, add a link to your site and
make a Pull Request!
* `Farmer Business Network <https://www.farmersbusinessnetwork.com>`_
* `Home Assistant <https://home-assistant.io>`_
* `KeepSafe <https://www.getkeepsafe.com/>`_
* `Skyscanner Hotels <https://www.skyscanner.net/hotels>`_
* `Ocean S.A. <https://ocean.io/>`_
* `GNS3 <http://gns3.com>`_
* `TutorCruncher socket
<https://tutorcruncher.com/features/tutorcruncher-socket/>`_
* `Morpheus messaging microservice <https://github.com/tutorcruncher/morpheus>`_
* `Eyepea - Custom telephony solutions <http://www.eyepea.eu>`_
* `ALLOcloud - Telephony in the cloud <https://www.allocloud.com>`_
* `helpmanual - comprehensive help and man page database
<https://helpmanual.io/>`_
* `bedevere <https://github.com/python/bedevere>`_ - CPython's GitHub
bot, helps maintain and identify issues with a CPython pull request.
* `miss-islington <https://github.com/python/miss-islington>`_ -
CPython's GitHub bot, backports and merge CPython's pull requests
* `noa technologies - Bike-sharing management platform
<https://noa.one/>`_ - SSE endpoint, pushes real time updates of
bikes location.
* `Wargaming: World of Tanks <https://worldoftanks.ru/>`_
* `Yandex <https://yandex.ru>`_
* `Rambler <https://rambler.ru>`_
* `Escargot <https://escargot.log1p.xyz>`_ - Chat server
* `Prom.ua <https://prom.ua/>`_ - Online trading platform
* `globo.com <https://www.globo.com/>`_ - (some parts) Brazilian largest media portal
* `Glose <https://www.glose.com/>`_ - Social reader for E-Books
* `Emoji Generator <https://emoji-gen.ninja>`_ - Text icon generator
<file_sep>/docs/signals.rst
.. currentmodule:: aiohttp
Signals
=======
Signal is a list of registered asynchronous callbacks.
The signal's life-cycle has two stages: after creation its content
could be filled by using standard list operations: ``sig.append()``
etc.
After ``sig.freeze()`` call the signal is *frozen*: adding, removing
and dropping callbacks are forbidden.
The only available operation is calling previously registered
callbacks by ``await sig.send(data)``.
For concrete usage examples see :ref:`signals in aiohttp.web
<aiohttp-web-signals>` chapter.
.. versionchanged:: 3.0
``sig.send()`` call is forbidden for non-frozen signal.
Support for regular (non-async) callbacks is dropped. All callbacks
should be async functions.
.. class:: Signal
The signal, implements :class:`collections.abc.MutableSequence`
interface.
.. comethod:: send(*args, **kwargs)
Call all registered callbacks one by one starting from the begin
of list.
.. attribute:: frozen
``True`` if :meth:`freeze` was called, read-only property.
.. method:: freeze()
Freeze the list. After the call any content modification is forbidden.
<file_sep>/tests/test_classbasedview.py
from unittest import mock
import pytest
from aiohttp import web
from aiohttp.web_urldispatcher import View
def test_ctor() -> None:
request = mock.Mock()
view = View(request)
assert view.request is request
async def test_render_ok() -> None:
resp = web.Response(text='OK')
class MyView(View):
async def get(self):
return resp
request = mock.Mock()
request.method = 'GET'
resp2 = await MyView(request)
assert resp is resp2
async def test_render_unknown_method() -> None:
class MyView(View):
async def get(self):
return web.Response(text='OK')
options = get
request = mock.Mock()
request.method = 'UNKNOWN'
with pytest.raises(web.HTTPMethodNotAllowed) as ctx:
await MyView(request)
assert ctx.value.headers['allow'] == 'GET,OPTIONS'
assert ctx.value.status == 405
async def test_render_unsupported_method() -> None:
class MyView(View):
async def get(self):
return web.Response(text='OK')
options = delete = get
request = mock.Mock()
request.method = 'POST'
with pytest.raises(web.HTTPMethodNotAllowed) as ctx:
await MyView(request)
assert ctx.value.headers['allow'] == 'DELETE,GET,OPTIONS'
assert ctx.value.status == 405
<file_sep>/tools/gen.py
#!/usr/bin/env python3
import aiohttp
import pathlib
import aiohttp.hdrs
import multidict
from collections import defaultdict
import io
headers = [getattr(aiohttp.hdrs, name)
for name in dir(aiohttp.hdrs)
if isinstance(getattr(aiohttp.hdrs, name), multidict.istr)]
def factory():
return defaultdict(factory)
TERMINAL = object()
def build(headers):
dct = defaultdict(factory)
for hdr in headers:
d = dct
for ch in hdr:
d = d[ch]
d[TERMINAL] = hdr
return dct
dct = build(headers)
HEADER = """\
/* The file is autogenerated from aiohttp/hdrs.py
Run ./tools/gen.py to update it after the origin changing. */
#include "_find_header.h"
#define NEXT_CHAR() \\
{ \\
count++; \\
if (count == size) { \\
/* end of search */ \\
return -1; \\
} \\
pchar++; \\
ch = *pchar; \\
last = (count == size -1); \\
} while(0);
int
find_header(const char *str, int size)
{
char *pchar = str;
int last;
char ch;
int count = -1;
pchar--;
"""
BLOCK = """
{label}:
NEXT_CHAR();
switch (ch) {{
{cases}
default:
return -1;
}}
"""
CASE = """\
case '{char}':
if (last) {{
return {index};
}}
goto {next};"""
FOOTER = """
{missing}
missing:
/* nothing found */
return -1;
}}
"""
def gen_prefix(prefix, k):
if k == '-':
return prefix + '_'
else:
return prefix + k.upper()
def gen_block(dct, prefix, used_blocks, missing, out):
cases = []
for k, v in dct.items():
if k is TERMINAL:
continue
next_prefix = gen_prefix(prefix, k)
term = v.get(TERMINAL)
if term is not None:
index = headers.index(term)
else:
index = -1
hi = k.upper()
lo = k.lower()
case = CASE.format(char=hi, index=index, next=next_prefix)
cases.append(case)
if lo != hi:
case = CASE.format(char=lo, index=index, next=next_prefix)
cases.append(case)
label = prefix if prefix else 'INITIAL'
if cases:
block = BLOCK.format(label=label, cases='\n'.join(cases))
out.write(block)
else:
missing.add(label)
for k, v in dct.items():
if not isinstance(v, defaultdict):
continue
block_name = gen_prefix(prefix, k)
if block_name in used_blocks:
continue
used_blocks.add(block_name)
gen_block(v, block_name, used_blocks, missing, out)
def gen(dct):
out = io.StringIO()
out.write(HEADER)
missing = set()
gen_block(dct, '', set(), missing, out)
missing_labels = '\n'.join(m + ':' for m in sorted(missing))
out.write(FOOTER.format(missing=missing_labels))
return out
def gen_headers(headers):
out = io.StringIO()
out.write("# The file is autogenerated from aiohttp/hdrs.py\n")
out.write("# Run ./tools/gen.py to update it after the origin changing.")
out.write("\n\n")
out.write("from . import hdrs\n")
out.write("cdef tuple headers = (\n")
for hdr in headers:
out.write(" hdrs.{},\n".format(hdr.upper().replace('-', '_')))
out.write(")\n")
return out
# print(gen(dct).getvalue())
# print(gen_headers(headers).getvalue())
folder = pathlib.Path(aiohttp.__file__).parent
with (folder / '_find_header.c').open('w') as f:
f.write(gen(dct).getvalue())
with (folder / '_headers.pxi').open('w') as f:
f.write(gen_headers(headers).getvalue())
<file_sep>/examples/legacy/tcp_protocol_parser.py
#!/usr/bin/env python3
"""Protocol parser example."""
import argparse
import asyncio
import collections
import aiohttp
try:
import signal
except ImportError:
signal = None
MSG_TEXT = b'text:'
MSG_PING = b'ping:'
MSG_PONG = b'pong:'
MSG_STOP = b'stop:'
Message = collections.namedtuple('Message', ('tp', 'data'))
def my_protocol_parser(out, buf):
"""Parser is used with StreamParser for incremental protocol parsing.
Parser is a generator function, but it is not a coroutine. Usually
parsers are implemented as a state machine.
more details in asyncio/parsers.py
existing parsers:
* HTTP protocol parsers asyncio/http/protocol.py
* websocket parser asyncio/http/websocket.py
"""
while True:
tp = yield from buf.read(5)
if tp in (MSG_PING, MSG_PONG):
# skip line
yield from buf.skipuntil(b'\r\n')
out.feed_data(Message(tp, None))
elif tp == MSG_STOP:
out.feed_data(Message(tp, None))
elif tp == MSG_TEXT:
# read text
text = yield from buf.readuntil(b'\r\n')
out.feed_data(Message(tp, text.strip().decode('utf-8')))
else:
raise ValueError('Unknown protocol prefix.')
class MyProtocolWriter:
def __init__(self, transport):
self.transport = transport
def ping(self):
self.transport.write(b'ping:\r\n')
def pong(self):
self.transport.write(b'pong:\r\n')
def stop(self):
self.transport.write(b'stop:\r\n')
def send_text(self, text):
self.transport.write(
'text:{}\r\n'.format(text.strip()).encode('utf-8'))
class EchoServer(asyncio.Protocol):
def connection_made(self, transport):
print('Connection made')
self.transport = transport
self.stream = aiohttp.StreamParser()
asyncio.Task(self.dispatch())
def data_received(self, data):
self.stream.feed_data(data)
def eof_received(self):
self.stream.feed_eof()
def connection_lost(self, exc):
print('Connection lost')
async def dispatch(self):
reader = self.stream.set_parser(my_protocol_parser)
writer = MyProtocolWriter(self.transport)
while True:
try:
msg = await reader.read()
except aiohttp.ConnectionError:
# client has been disconnected
break
print('Message received: {}'.format(msg))
if msg.type == MSG_PING:
writer.pong()
elif msg.type == MSG_TEXT:
writer.send_text('Re: ' + msg.data)
elif msg.type == MSG_STOP:
self.transport.close()
break
async def start_client(loop, host, port):
transport, stream = await loop.create_connection(
aiohttp.StreamProtocol, host, port)
reader = stream.reader.set_parser(my_protocol_parser)
writer = MyProtocolWriter(transport)
writer.ping()
message = 'This is the message. It will be echoed.'
while True:
try:
msg = await reader.read()
except aiohttp.ConnectionError:
print('Server has been disconnected.')
break
print('Message received: {}'.format(msg))
if msg.type == MSG_PONG:
writer.send_text(message)
print('data sent:', message)
elif msg.type == MSG_TEXT:
writer.stop()
print('stop sent')
break
transport.close()
def start_server(loop, host, port):
f = loop.create_server(EchoServer, host, port)
srv = loop.run_until_complete(f)
x = srv.sockets[0]
print('serving on', x.getsockname())
loop.run_forever()
ARGS = argparse.ArgumentParser(description="Protocol parser example.")
ARGS.add_argument(
'--server', action="store_true", dest='server',
default=False, help='Run tcp server')
ARGS.add_argument(
'--client', action="store_true", dest='client',
default=False, help='Run tcp client')
ARGS.add_argument(
'--host', action="store", dest='host',
default='127.0.0.1', help='Host name')
ARGS.add_argument(
'--port', action="store", dest='port',
default=9999, type=int, help='Port number')
if __name__ == '__main__':
args = ARGS.parse_args()
if ':' in args.host:
args.host, port = args.host.split(':', 1)
args.port = int(port)
if (not (args.server or args.client)) or (args.server and args.client):
print('Please specify --server or --client\n')
ARGS.print_help()
else:
loop = asyncio.get_event_loop()
if signal is not None:
loop.add_signal_handler(signal.SIGINT, loop.stop)
if args.server:
start_server(loop, args.host, args.port)
else:
loop.run_until_complete(start_client(loop, args.host, args.port))
<file_sep>/aiohttp/web_exceptions.py
import warnings
from http import HTTPStatus
from typing import Any, Iterable, Optional, Set, Tuple
from multidict import CIMultiDict
from yarl import URL
from . import hdrs
from .typedefs import LooseHeaders, StrOrURL
__all__ = (
'HTTPException',
'HTTPError',
'HTTPRedirection',
'HTTPSuccessful',
'HTTPOk',
'HTTPCreated',
'HTTPAccepted',
'HTTPNonAuthoritativeInformation',
'HTTPNoContent',
'HTTPResetContent',
'HTTPPartialContent',
'HTTPMultipleChoices',
'HTTPMovedPermanently',
'HTTPFound',
'HTTPSeeOther',
'HTTPNotModified',
'HTTPUseProxy',
'HTTPTemporaryRedirect',
'HTTPPermanentRedirect',
'HTTPClientError',
'HTTPBadRequest',
'HTTPUnauthorized',
'HTTPPaymentRequired',
'HTTPForbidden',
'HTTPNotFound',
'HTTPMethodNotAllowed',
'HTTPNotAcceptable',
'HTTPProxyAuthenticationRequired',
'HTTPRequestTimeout',
'HTTPConflict',
'HTTPGone',
'HTTPLengthRequired',
'HTTPPreconditionFailed',
'HTTPRequestEntityTooLarge',
'HTTPRequestURITooLong',
'HTTPUnsupportedMediaType',
'HTTPRequestRangeNotSatisfiable',
'HTTPExpectationFailed',
'HTTPMisdirectedRequest',
'HTTPUnprocessableEntity',
'HTTPFailedDependency',
'HTTPUpgradeRequired',
'HTTPPreconditionRequired',
'HTTPTooManyRequests',
'HTTPRequestHeaderFieldsTooLarge',
'HTTPUnavailableForLegalReasons',
'HTTPServerError',
'HTTPInternalServerError',
'HTTPNotImplemented',
'HTTPBadGateway',
'HTTPServiceUnavailable',
'HTTPGatewayTimeout',
'HTTPVersionNotSupported',
'HTTPVariantAlsoNegotiates',
'HTTPInsufficientStorage',
'HTTPNotExtended',
'HTTPNetworkAuthenticationRequired',
)
############################################################
# HTTP Exceptions
############################################################
class HTTPException(Exception):
# You should set in subclasses:
# status = 200
status_code = -1
empty_body = False
default_reason = "" # Initialized at the end of the module
def __init__(self, *,
headers: Optional[LooseHeaders]=None,
reason: Optional[str]=None,
text: Optional[str]=None,
content_type: Optional[str]=None) -> None:
if reason is None:
reason = self.default_reason
if text is None:
if not self.empty_body:
text = "{}: {}".format(self.status_code, reason)
else:
if self.empty_body:
warnings.warn(
"text argument is deprecated for HTTP status {} "
"since 4.0 and scheduled for removal in 5.0 (#3462),"
"the response should be provided without a body".format(
self.status_code),
DeprecationWarning,
stacklevel=2)
if headers is not None:
real_headers = CIMultiDict(headers)
else:
real_headers = CIMultiDict()
if content_type is not None:
if not text:
warnings.warn("content_type without text is deprecated "
"since 4.0 and scheduled for removal in 5.0 "
"(#3462)",
DeprecationWarning,
stacklevel=2)
real_headers[hdrs.CONTENT_TYPE] = content_type
elif hdrs.CONTENT_TYPE not in real_headers and text:
real_headers[hdrs.CONTENT_TYPE] = 'text/plain'
self._reason = reason
self._text = text
self._headers = real_headers
self.args = ()
def __bool__(self) -> bool:
return True
@property
def status(self) -> int:
return self.status_code
@property
def reason(self) -> str:
return self._reason
@property
def text(self) -> Optional[str]:
return self._text
@property
def headers(self) -> 'CIMultiDict[str]':
return self._headers
def __str__(self) -> str:
return self.reason
def __repr__(self) -> str:
return "<%s: %s>" % (self.__class__.__name__, self.reason)
__reduce__ = object.__reduce__
def __getnewargs__(self) -> Tuple[Any, ...]:
return self.args
class HTTPError(HTTPException):
"""Base class for exceptions with status codes in the 400s and 500s."""
class HTTPRedirection(HTTPException):
"""Base class for exceptions with status codes in the 300s."""
class HTTPSuccessful(HTTPException):
"""Base class for exceptions with status codes in the 200s."""
class HTTPOk(HTTPSuccessful):
status_code = 200
class HTTPCreated(HTTPSuccessful):
status_code = 201
class HTTPAccepted(HTTPSuccessful):
status_code = 202
class HTTPNonAuthoritativeInformation(HTTPSuccessful):
status_code = 203
class HTTPNoContent(HTTPSuccessful):
status_code = 204
empty_body = True
class HTTPResetContent(HTTPSuccessful):
status_code = 205
empty_body = True
class HTTPPartialContent(HTTPSuccessful):
status_code = 206
############################################################
# 3xx redirection
############################################################
class HTTPMove(HTTPRedirection):
def __init__(self,
location: StrOrURL,
*,
headers: Optional[LooseHeaders]=None,
reason: Optional[str]=None,
text: Optional[str]=None,
content_type: Optional[str]=None) -> None:
if not location:
raise ValueError("HTTP redirects need a location to redirect to.")
super().__init__(headers=headers, reason=reason,
text=text, content_type=content_type)
self._location = URL(location)
self.headers['Location'] = str(self.location)
@property
def location(self) -> URL:
return self._location
class HTTPMultipleChoices(HTTPMove):
status_code = 300
class HTTPMovedPermanently(HTTPMove):
status_code = 301
class HTTPFound(HTTPMove):
status_code = 302
# This one is safe after a POST (the redirected location will be
# retrieved with GET):
class HTTPSeeOther(HTTPMove):
status_code = 303
class HTTPNotModified(HTTPRedirection):
# FIXME: this should include a date or etag header
status_code = 304
empty_body = True
class HTTPUseProxy(HTTPMove):
# Not a move, but looks a little like one
status_code = 305
class HTTPTemporaryRedirect(HTTPMove):
status_code = 307
class HTTPPermanentRedirect(HTTPMove):
status_code = 308
############################################################
# 4xx client error
############################################################
class HTTPClientError(HTTPError):
pass
class HTTPBadRequest(HTTPClientError):
status_code = 400
class HTTPUnauthorized(HTTPClientError):
status_code = 401
class HTTPPaymentRequired(HTTPClientError):
status_code = 402
class HTTPForbidden(HTTPClientError):
status_code = 403
class HTTPNotFound(HTTPClientError):
status_code = 404
class HTTPMethodNotAllowed(HTTPClientError):
status_code = 405
def __init__(self,
method: str,
allowed_methods: Iterable[str],
*,
headers: Optional[LooseHeaders]=None,
reason: Optional[str]=None,
text: Optional[str]=None,
content_type: Optional[str]=None) -> None:
allow = ','.join(sorted(allowed_methods))
super().__init__(headers=headers, reason=reason,
text=text, content_type=content_type)
self.headers['Allow'] = allow
self._allowed = set(allowed_methods) # type: Set[str]
self._method = method
@property
def allowed_methods(self) -> Set[str]:
return self._allowed
@property
def method(self) -> str:
return self._method
class HTTPNotAcceptable(HTTPClientError):
status_code = 406
class HTTPProxyAuthenticationRequired(HTTPClientError):
status_code = 407
class HTTPRequestTimeout(HTTPClientError):
status_code = 408
class HTTPConflict(HTTPClientError):
status_code = 409
class HTTPGone(HTTPClientError):
status_code = 410
class HTTPLengthRequired(HTTPClientError):
status_code = 411
class HTTPPreconditionFailed(HTTPClientError):
status_code = 412
class HTTPRequestEntityTooLarge(HTTPClientError):
status_code = 413
def __init__(self,
max_size: int,
actual_size: int,
**kwargs: Any) -> None:
kwargs.setdefault(
'text',
'Maximum request body size {} exceeded, '
'actual body size {}'.format(max_size, actual_size)
)
super().__init__(**kwargs)
class HTTPRequestURITooLong(HTTPClientError):
status_code = 414
class HTTPUnsupportedMediaType(HTTPClientError):
status_code = 415
class HTTPRequestRangeNotSatisfiable(HTTPClientError):
status_code = 416
class HTTPExpectationFailed(HTTPClientError):
status_code = 417
class HTTPMisdirectedRequest(HTTPClientError):
status_code = 421
class HTTPUnprocessableEntity(HTTPClientError):
status_code = 422
class HTTPFailedDependency(HTTPClientError):
status_code = 424
class HTTPUpgradeRequired(HTTPClientError):
status_code = 426
class HTTPPreconditionRequired(HTTPClientError):
status_code = 428
class HTTPTooManyRequests(HTTPClientError):
status_code = 429
class HTTPRequestHeaderFieldsTooLarge(HTTPClientError):
status_code = 431
class HTTPUnavailableForLegalReasons(HTTPClientError):
status_code = 451
def __init__(self,
link: StrOrURL,
*,
headers: Optional[LooseHeaders]=None,
reason: Optional[str]=None,
text: Optional[str]=None,
content_type: Optional[str]=None) -> None:
super().__init__(headers=headers, reason=reason,
text=text, content_type=content_type)
self.headers['Link'] = '<{}>; rel="blocked-by"'.format(str(link))
self._link = URL(link)
@property
def link(self) -> URL:
return self._link
############################################################
# 5xx Server Error
############################################################
# Response status codes beginning with the digit "5" indicate cases in
# which the server is aware that it has erred or is incapable of
# performing the request. Except when responding to a HEAD request, the
# server SHOULD include an entity containing an explanation of the error
# situation, and whether it is a temporary or permanent condition. User
# agents SHOULD display any included entity to the user. These response
# codes are applicable to any request method.
class HTTPServerError(HTTPError):
pass
class HTTPInternalServerError(HTTPServerError):
status_code = 500
class HTTPNotImplemented(HTTPServerError):
status_code = 501
class HTTPBadGateway(HTTPServerError):
status_code = 502
class HTTPServiceUnavailable(HTTPServerError):
status_code = 503
class HTTPGatewayTimeout(HTTPServerError):
status_code = 504
class HTTPVersionNotSupported(HTTPServerError):
status_code = 505
class HTTPVariantAlsoNegotiates(HTTPServerError):
status_code = 506
class HTTPInsufficientStorage(HTTPServerError):
status_code = 507
class HTTPNotExtended(HTTPServerError):
status_code = 510
class HTTPNetworkAuthenticationRequired(HTTPServerError):
status_code = 511
def _initialize_default_reason() -> None:
for obj in globals().values():
if isinstance(obj, type) and issubclass(obj, HTTPException):
if obj.status_code >= 0:
try:
status = HTTPStatus(obj.status_code)
obj.default_reason = status.phrase
except ValueError:
pass
_initialize_default_reason()
del _initialize_default_reason
<file_sep>/tests/test_websocket_writer.py
import random
from unittest import mock
import pytest
from aiohttp.http import WebSocketWriter
from aiohttp.test_utils import make_mocked_coro
@pytest.fixture
def protocol():
ret = mock.Mock()
ret._drain_helper = make_mocked_coro()
return ret
@pytest.fixture
def transport():
ret = mock.Mock()
ret.is_closing.return_value = False
return ret
@pytest.fixture
def writer(protocol, transport):
return WebSocketWriter(protocol, transport, use_mask=False)
async def test_pong(writer) -> None:
await writer.pong()
writer.transport.write.assert_called_with(b'\x8a\x00')
async def test_ping(writer) -> None:
await writer.ping()
writer.transport.write.assert_called_with(b'\x89\x00')
async def test_send_text(writer) -> None:
await writer.send(b'text')
writer.transport.write.assert_called_with(b'\x81\x04text')
async def test_send_binary(writer) -> None:
await writer.send('binary', True)
writer.transport.write.assert_called_with(b'\x82\x06binary')
async def test_send_binary_long(writer) -> None:
await writer.send(b'b' * 127, True)
assert writer.transport.write.call_args[0][0].startswith(b'\x82~\x00\x7fb')
async def test_send_binary_very_long(writer) -> None:
await writer.send(b'b' * 65537, True)
assert (writer.transport.write.call_args_list[0][0][0] ==
b'\x82\x7f\x00\x00\x00\x00\x00\x01\x00\x01')
assert writer.transport.write.call_args_list[1][0][0] == b'b' * 65537
async def test_close(writer) -> None:
await writer.close(1001, 'msg')
writer.transport.write.assert_called_with(b'\x88\x05\x03\xe9msg')
await writer.close(1001, b'msg')
writer.transport.write.assert_called_with(b'\x88\x05\x03\xe9msg')
# Test that Service Restart close code is also supported
await writer.close(1012, b'msg')
writer.transport.write.assert_called_with(b'\x88\x05\x03\xf4msg')
async def test_send_text_masked(protocol, transport) -> None:
writer = WebSocketWriter(protocol,
transport,
use_mask=True,
random=random.Random(123))
await writer.send(b'text')
writer.transport.write.assert_called_with(b'\x81\x84\rg\xb3fy\x02\xcb\x12')
async def test_send_compress_text(protocol, transport) -> None:
writer = WebSocketWriter(protocol, transport, compress=15)
await writer.send(b'text')
writer.transport.write.assert_called_with(b'\xc1\x06*I\xad(\x01\x00')
await writer.send(b'text')
writer.transport.write.assert_called_with(b'\xc1\x05*\x01b\x00\x00')
async def test_send_compress_text_notakeover(protocol, transport) -> None:
writer = WebSocketWriter(protocol,
transport,
compress=15,
notakeover=True)
await writer.send(b'text')
writer.transport.write.assert_called_with(b'\xc1\x06*I\xad(\x01\x00')
await writer.send(b'text')
writer.transport.write.assert_called_with(b'\xc1\x06*I\xad(\x01\x00')
async def test_send_compress_text_per_message(protocol, transport) -> None:
writer = WebSocketWriter(protocol, transport)
await writer.send(b'text', compress=15)
writer.transport.write.assert_called_with(b'\xc1\x06*I\xad(\x01\x00')
await writer.send(b'text')
writer.transport.write.assert_called_with(b'\x81\x04text')
await writer.send(b'text', compress=15)
writer.transport.write.assert_called_with(b'\xc1\x06*I\xad(\x01\x00')
<file_sep>/docs/faq.rst
FAQ
===
.. contents::
:local:
Are there plans for an @app.route decorator like in Flask?
----------------------------------------------------------
As of aiohttp 2.3, :class:`~aiohttp.web.RouteTableDef` provides an API
similar to Flask's ``@app.route``. See
:ref:`aiohttp-web-alternative-routes-definition`.
Unlike Flask's ``@app.route``, :class:`~aiohttp.web.RouteTableDef`
does not require an ``app`` in the module namespace (which often leads
to circular imports).
Instead, a :class:`~aiohttp.web.RouteTableDef` is decoupled from an application instance::
routes = web.RouteTableDef()
@routes.get('/get')
async def handle_get(request):
...
@routes.post('/post')
async def handle_post(request):
...
app.router.add_routes(routes)
Does aiohttp have a concept like Flask's "blueprint" or Django's "app"?
-----------------------------------------------------------------------
If you're writing a large application, you may want to consider
using :ref:`nested applications <aiohttp-web-nested-applications>`, which
are similar to Flask's "blueprints" or Django's "apps".
See: :ref:`aiohttp-web-nested-applications`.
How do I create a route that matches urls with a given prefix?
--------------------------------------------------------------
You can do something like the following: ::
app.router.add_route('*', '/path/to/{tail:.+}', sink_handler)
The first argument, ``*``, matches any HTTP method
(*GET, POST, OPTIONS*, etc). The second argument matches URLS with the desired prefix.
The third argument is the handler function.
Where do I put my database connection so handlers can access it?
----------------------------------------------------------------
:class:`aiohttp.web.Application` object supports the :class:`dict`
interface and provides a place to store your database connections or any
other resource you want to share between handlers.
::
async def go(request):
db = request.app['db']
cursor = await db.cursor()
await cursor.execute('SELECT 42')
# ...
return web.Response(status=200, text='ok')
async def init_app():
app = Application()
db = await create_connection(user='user', password='123')
app['db'] = db
app.router.add_get('/', go)
return app
How can middleware store data for web handlers to use?
------------------------------------------------------
Both :class:`aiohttp.web.Request` and :class:`aiohttp.web.Application`
support the :class:`dict` interface.
Therefore, data may be stored inside a request object. ::
async def handler(request):
request['unique_key'] = data
See https://github.com/aio-libs/aiohttp_session code for an example.
The ``aiohttp_session.get_session(request)`` method uses ``SESSION_KEY``
for saving request-specific session information.
As of aiohttp 3.0, all response objects are dict-like structures as
well.
.. _aiohttp_faq_parallel_event_sources:
Can a handler receive incoming events from different sources in parallel?
-------------------------------------------------------------------------
Yes.
As an example, we may have two event sources:
1. WebSocket for events from an end user
2. Redis PubSub for events from other parts of the application
The most native way to handle this is to create a separate task for
PubSub handling.
Parallel :meth:`aiohttp.web.WebSocketResponse.receive` calls are forbidden;
a single task should perform WebSocket reading.
However, other tasks may use the same WebSocket object for sending data to
peers. ::
async def handler(request):
ws = web.WebSocketResponse()
await ws.prepare(request)
task = asyncio.create_task(
read_subscription(ws,
request.app['redis']))
try:
async for msg in ws:
# handle incoming messages
# use ws.send_str() to send data back
...
finally:
task.cancel()
async def read_subscription(ws, redis):
channel, = await redis.subscribe('channel:1')
try:
async for msg in channel.iter():
answer = process_the_message(msg) # your function here
await ws.send_str(answer)
finally:
await redis.unsubscribe('channel:1')
.. _aiohttp_faq_terminating_websockets:
How do I programmatically close a WebSocket server-side?
--------------------------------------------------------
Let's say we have an application with two endpoints:
1. ``/echo`` a WebSocket echo server that authenticates the user
2. ``/logout_user`` that, when invoked, closes all open
WebSockets for that user.
One simple solution is to keep a shared registry of WebSocket
responses for a user in the :class:`aiohttp.web.Application` instance
and call :meth:`aiohttp.web.WebSocketResponse.close` on all of them in
``/logout_user`` handler::
async def echo_handler(request):
ws = web.WebSocketResponse()
user_id = authenticate_user(request)
await ws.prepare(request)
request.app['websockets'][user_id].add(ws)
try:
async for msg in ws:
ws.send_str(msg.data)
finally:
request.app['websockets'][user_id].remove(ws)
return ws
async def logout_handler(request):
user_id = authenticate_user(request)
ws_closers = [ws.close()
for ws in request.app['websockets'][user_id]
if not ws.closed]
# Watch out, this will keep us from returing the response
# until all are closed
ws_closers and await asyncio.gather(*ws_closers)
return web.Response(text='OK')
def main():
loop = asyncio.get_event_loop()
app = web.Application()
app.router.add_route('GET', '/echo', echo_handler)
app.router.add_route('POST', '/logout', logout_handler)
app['websockets'] = defaultdict(set)
web.run_app(app, host='localhost', port=8080)
How do I make a request from a specific IP address?
---------------------------------------------------
If your system has several IP interfaces, you may choose one which will
be used used to bind a socket locally::
conn = aiohttp.TCPConnector(local_addr=('127.0.0.1', 0))
async with aiohttp.ClientSession(connector=conn) as session:
...
.. seealso:: :class:`aiohttp.TCPConnector` and ``local_addr`` parameter.
What is the API stability and deprecation policy?
-------------------------------------------------
*aiohttp* follows strong `Semantic Versioning <https://semver.org>`_ (SemVer).
Obsolete attributes and methods are marked as *deprecated* in the
documentation and raise :class:`DeprecationWarning` upon usage.
Assume aiohttp ``X.Y.Z`` where ``X`` is major version,
``Y`` is minor version and ``Z`` is bugfix number.
For example, if the latest released version is ``aiohttp==3.0.6``:
``3.0.7`` fixes some bugs but have no new features.
``3.1.0`` introduces new features and can deprecate some API but never
remove it, also all bug fixes from previous release are merged.
``4.0.0`` removes all deprecations collected from ``3.Y`` versions
**except** deprecations from the **last** ``3.Y`` release. These
deprecations will be removed by ``5.0.0``.
Unfortunately we may have to break these rules when a **security
vulnerability** is found.
If a security problem cannot be fixed without breaking backward
compatibility, a bugfix release may break compatibility. This is unlikely, but
possible.
All backward incompatible changes are explicitly marked in
:ref:`the changelog <aiohttp_changes>`.
How do I enable gzip compression globally for my entire application?
--------------------------------------------------------------------
It's impossible. Choosing what to compress and what not to compress
is a tricky matter.
If you need global compression, write a custom middleware. Or
enable compression in NGINX (you are deploying aiohttp behind reverse
proxy, right?).
How do I manage a ClientSession within a web server?
----------------------------------------------------
:class:`aiohttp.ClientSession` should be created once for the lifetime
of the server in order to benefit from connection pooling.
Sessions save cookies internally. If you don't need cookie processing,
use :class:`aiohttp.DummyCookieJar`. If you need separate cookies
for different http calls but process them in logical chains, use a single
:class:`aiohttp.TCPConnector` with separate
client sessions and ``own_connector=False``.
How do I access database connections from a subapplication?
-----------------------------------------------------------
Restricting access from subapplication to main (or outer) app is a
deliberate choice.
A subapplication is an isolated unit by design. If you need to share a
database object, do it explicitly::
subapp['db'] = mainapp['db']
mainapp.add_subapp('/prefix', subapp)
How do I perform operations in a request handler after sending the response?
----------------------------------------------------------------------------
Middlewares can be written to handle post-response operations, but
they run after every request. You can explicitly send the response by
calling :meth:`aiohttp.web.Response.write_eof`, which starts sending
before the handler returns, giving you a chance to execute follow-up
operations::
def ping_handler(request):
"""Send PONG and increase DB counter."""
# explicitly send the response
resp = web.json_response({'message': 'PONG'})
await resp.prepare(request)
await resp.write_eof()
# increase the pong count
APP['db'].inc_pong()
return resp
A :class:`aiohttp.web.Response` object must be returned. This is
required by aiohttp web contracts, even though the response
already been sent.
How do I make sure my custom middleware response will behave correctly?
------------------------------------------------------------------------
Sometimes your middleware handlers might need to send a custom response.
This is just fine as long as you always create a new
:class:`aiohttp.web.Response` object when required.
The response object is a Finite State Machine. Once it has been dispatched
by the server, it will reach its final state and cannot be used again.
The following middleware will make the server hang, once it serves the second
response::
from aiohttp import web
def misbehaved_middleware():
# don't do this!
cached = web.Response(status=200, text='Hi, I am cached!')
async def middleware(request, handler):
# ignoring response for the sake of this example
_res = handler(request)
return cached
return middleware
The rule of thumb is *one request, one response*.
Why is creating a ClientSession outside of an event loop dangerous?
-------------------------------------------------------------------
Short answer is: life-cycle of all asyncio objects should be shorter
than life-cycle of event loop.
Full explanation is longer. All asyncio object should be correctly
finished/disconnected/closed before event loop shutdown. Otherwise
user can get unexpected behavior. In the best case it is a warning
about unclosed resource, in the worst case the program just hangs,
awaiting for coroutine is never resumed etc.
Consider the following code from ``mod.py``::
import aiohttp
session = aiohttp.ClientSession()
async def fetch(url):
async with session.get(url) as resp:
return await resp.text()
The session grabs current event loop instance and stores it in a
private variable.
The main module imports the module and installs ``uvloop`` (an
alternative fast event loop implementation).
``main.py``::
import asyncio
import uvloop
import mod
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
asyncio.run(main())
The code is broken: ``session`` is bound to default ``asyncio`` loop
on import time but the loop is changed **after the import** by
``set_event_loop()``. As result ``fetch()`` call hangs.
To avoid import dependency hell *aiohttp* encourages creation of
``ClientSession`` from async function. The same policy works for
``web.Application`` too.
Another use case is unit test writing. Very many test libraries
(*aiohttp test tools* first) creates a new loop instance for every
test function execution. It's done for sake of tests isolation.
Otherwise pending activity (timers, network packets etc.) from
previous test may interfere with current one producing very cryptic
and unstable test failure.
Note: *class variables* are hidden globals actually. The following
code has the same problem as ``mod.py`` example, ``session`` variable
is the hidden global object::
class A:
session = aiohttp.ClientSession()
async def fetch(self, url):
async with session.get(url) as resp:
return await resp.text()
<file_sep>/tests/test_http_writer.py
# Tests for aiohttp/http_writer.py
from unittest import mock
import pytest
from aiohttp import http
from aiohttp.test_utils import make_mocked_coro
@pytest.fixture
def buf():
return bytearray()
@pytest.fixture
def transport(buf):
transport = mock.Mock()
def write(chunk):
buf.extend(chunk)
transport.write.side_effect = write
transport.is_closing.return_value = False
return transport
@pytest.fixture
def protocol(loop, transport):
protocol = mock.Mock(transport=transport)
protocol._drain_helper = make_mocked_coro()
return protocol
def test_payloadwriter_properties(transport,
protocol, loop) -> None:
writer = http.StreamWriter(protocol, loop)
assert writer.protocol == protocol
assert writer.transport == transport
async def test_write_payload_eof(transport, protocol, loop) -> None:
write = transport.write = mock.Mock()
msg = http.StreamWriter(protocol, loop)
await msg.write(b'data1')
await msg.write(b'data2')
await msg.write_eof()
content = b''.join([c[1][0] for c in list(write.mock_calls)])
assert b'data1data2' == content.split(b'\r\n\r\n', 1)[-1]
async def test_write_payload_chunked(buf, protocol, transport, loop) -> None:
msg = http.StreamWriter(protocol, loop)
msg.enable_chunking()
await msg.write(b'data')
await msg.write_eof()
assert b'4\r\ndata\r\n0\r\n\r\n' == buf
async def test_write_payload_chunked_multiple(buf,
protocol,
transport, loop) -> None:
msg = http.StreamWriter(protocol, loop)
msg.enable_chunking()
await msg.write(b'data1')
await msg.write(b'data2')
await msg.write_eof()
assert b'5\r\ndata1\r\n5\r\ndata2\r\n0\r\n\r\n' == buf
async def test_write_payload_length(protocol, transport, loop) -> None:
write = transport.write = mock.Mock()
msg = http.StreamWriter(protocol, loop)
msg.length = 2
await msg.write(b'd')
await msg.write(b'ata')
await msg.write_eof()
content = b''.join([c[1][0] for c in list(write.mock_calls)])
assert b'da' == content.split(b'\r\n\r\n', 1)[-1]
async def test_write_payload_chunked_filter(protocol, transport, loop) -> None:
write = transport.write = mock.Mock()
msg = http.StreamWriter(protocol, loop)
msg.enable_chunking()
await msg.write(b'da')
await msg.write(b'ta')
await msg.write_eof()
content = b''.join([c[1][0] for c in list(write.mock_calls)])
assert content.endswith(b'2\r\nda\r\n2\r\nta\r\n0\r\n\r\n')
async def test_write_payload_chunked_filter_mutiple_chunks(
protocol,
transport,
loop):
write = transport.write = mock.Mock()
msg = http.StreamWriter(protocol, loop)
msg.enable_chunking()
await msg.write(b'da')
await msg.write(b'ta')
await msg.write(b'1d')
await msg.write(b'at')
await msg.write(b'a2')
await msg.write_eof()
content = b''.join([c[1][0] for c in list(write.mock_calls)])
assert content.endswith(
b'2\r\nda\r\n2\r\nta\r\n2\r\n1d\r\n2\r\nat\r\n'
b'2\r\na2\r\n0\r\n\r\n')
async def test_write_payload_deflate_compression(protocol,
transport, loop) -> None:
COMPRESSED = b'x\x9cKI,I\x04\x00\x04\x00\x01\x9b'
write = transport.write = mock.Mock()
msg = http.StreamWriter(protocol, loop)
msg.enable_compression('deflate')
await msg.write(b'data')
await msg.write_eof()
chunks = [c[1][0] for c in list(write.mock_calls)]
assert all(chunks)
content = b''.join(chunks)
assert COMPRESSED == content.split(b'\r\n\r\n', 1)[-1]
async def test_write_payload_deflate_and_chunked(
buf,
protocol,
transport,
loop):
msg = http.StreamWriter(protocol, loop)
msg.enable_compression('deflate')
msg.enable_chunking()
await msg.write(b'da')
await msg.write(b'ta')
await msg.write_eof()
thing = (
b'2\r\nx\x9c\r\n'
b'a\r\nKI,I\x04\x00\x04\x00\x01\x9b\r\n'
b'0\r\n\r\n'
)
assert thing == buf
async def test_write_drain(protocol, transport, loop) -> None:
msg = http.StreamWriter(protocol, loop)
msg.drain = make_mocked_coro()
await msg.write(b'1' * (64 * 1024 * 2), drain=False)
assert not msg.drain.called
await msg.write(b'1', drain=True)
assert msg.drain.called
assert msg.buffer_size == 0
async def test_write_calls_callback(protocol, transport, loop) -> None:
on_chunk_sent = make_mocked_coro()
msg = http.StreamWriter(
protocol, loop,
on_chunk_sent=on_chunk_sent
)
chunk = b'1'
await msg.write(chunk)
assert on_chunk_sent.called
assert on_chunk_sent.call_args == mock.call(chunk)
async def test_write_eof_calls_callback(protocol, transport, loop) -> None:
on_chunk_sent = make_mocked_coro()
msg = http.StreamWriter(
protocol, loop,
on_chunk_sent=on_chunk_sent
)
chunk = b'1'
await msg.write_eof(chunk=chunk)
assert on_chunk_sent.called
assert on_chunk_sent.call_args == mock.call(chunk)
async def test_write_to_closing_transport(protocol, transport, loop) -> None:
msg = http.StreamWriter(protocol, loop)
await msg.write(b'Before closing')
transport.is_closing.return_value = True
with pytest.raises(ConnectionResetError):
await msg.write(b'After closing')
async def test_drain(protocol, transport, loop) -> None:
msg = http.StreamWriter(protocol, loop)
await msg.drain()
assert protocol._drain_helper.called
async def test_drain_no_transport(protocol, transport, loop) -> None:
msg = http.StreamWriter(protocol, loop)
msg._protocol.transport = None
await msg.drain()
assert not protocol._drain_helper.called
<file_sep>/examples/lowlevel_srv.py
import asyncio
from aiohttp import web
async def handler(request):
return web.Response(text="OK")
async def main(loop):
server = web.Server(handler)
await loop.create_server(server, "127.0.0.1", 8080)
print("======= Serving on http://127.0.0.1:8080/ ======")
# pause here for very long time by serving HTTP requests and
# waiting for keyboard interruption
await asyncio.sleep(100*3600)
loop = asyncio.get_event_loop()
try:
loop.run_until_complete(main(loop))
except KeyboardInterrupt:
pass
loop.close()
<file_sep>/examples/cli_app.py
"""
Example of serving an Application using the `aiohttp.web` CLI.
Serve this app using::
$ python -m aiohttp.web -H localhost -P 8080 --repeat 10 cli_app:init \
> "Hello World"
Here ``--repeat`` & ``"Hello World"`` are application specific command-line
arguments. `aiohttp.web` only parses & consumes the command-line arguments it
needs (i.e. ``-H``, ``-P`` & ``entry-func``) and passes on any additional
arguments to the `cli_app:init` function for processing.
"""
from argparse import ArgumentParser
from aiohttp import web
def display_message(req):
args = req.app["args"]
text = "\n".join([args.message] * args.repeat)
return web.Response(text=text)
def init(argv):
arg_parser = ArgumentParser(
prog="aiohttp.web ...", description="Application CLI", add_help=False
)
# Positional argument
arg_parser.add_argument(
"message",
help="message to print"
)
# Optional argument
arg_parser.add_argument(
"--repeat",
help="number of times to repeat message", type=int, default="1"
)
# Avoid conflict with -h from `aiohttp.web` CLI parser
arg_parser.add_argument(
"--app-help",
help="show this message and exit", action="help"
)
args = arg_parser.parse_args(argv)
app = web.Application()
app["args"] = args
app.router.add_get('/', display_message)
return app
<file_sep>/docs/essays.rst
Essays
======
.. toctree::
new_router
whats_new_1_1
migration_to_2xx
whats_new_3_0
<file_sep>/tests/test_route_def.py
import pathlib
import pytest
from yarl import URL
from aiohttp import web
from aiohttp.web_urldispatcher import UrlDispatcher
@pytest.fixture
def router():
return UrlDispatcher()
def test_get(router) -> None:
async def handler(request):
pass
router.add_routes([web.get('/', handler)])
assert len(router.routes()) == 2 # GET and HEAD
route = list(router.routes())[1]
assert route.handler is handler
assert route.method == 'GET'
assert str(route.url_for()) == '/'
route2 = list(router.routes())[0]
assert route2.handler is handler
assert route2.method == 'HEAD'
def test_head(router) -> None:
async def handler(request):
pass
router.add_routes([web.head('/', handler)])
assert len(router.routes()) == 1
route = list(router.routes())[0]
assert route.handler is handler
assert route.method == 'HEAD'
assert str(route.url_for()) == '/'
def test_options(router) -> None:
async def handler(request):
pass
router.add_routes([web.options('/', handler)])
assert len(router.routes()) == 1
route = list(router.routes())[0]
assert route.handler is handler
assert route.method == 'OPTIONS'
assert str(route.url_for()) == '/'
def test_post(router) -> None:
async def handler(request):
pass
router.add_routes([web.post('/', handler)])
route = list(router.routes())[0]
assert route.handler is handler
assert route.method == 'POST'
assert str(route.url_for()) == '/'
def test_put(router) -> None:
async def handler(request):
pass
router.add_routes([web.put('/', handler)])
assert len(router.routes()) == 1
route = list(router.routes())[0]
assert route.handler is handler
assert route.method == 'PUT'
assert str(route.url_for()) == '/'
def test_patch(router) -> None:
async def handler(request):
pass
router.add_routes([web.patch('/', handler)])
assert len(router.routes()) == 1
route = list(router.routes())[0]
assert route.handler is handler
assert route.method == 'PATCH'
assert str(route.url_for()) == '/'
def test_delete(router) -> None:
async def handler(request):
pass
router.add_routes([web.delete('/', handler)])
assert len(router.routes()) == 1
route = list(router.routes())[0]
assert route.handler is handler
assert route.method == 'DELETE'
assert str(route.url_for()) == '/'
def test_route(router) -> None:
async def handler(request):
pass
router.add_routes([web.route('OTHER', '/', handler)])
assert len(router.routes()) == 1
route = list(router.routes())[0]
assert route.handler is handler
assert route.method == 'OTHER'
assert str(route.url_for()) == '/'
def test_static(router) -> None:
folder = pathlib.Path(__file__).parent
router.add_routes([web.static('/prefix', folder)])
assert len(router.resources()) == 1 # 2 routes: for HEAD and GET
resource = list(router.resources())[0]
info = resource.get_info()
assert info['prefix'] == '/prefix'
assert info['directory'] == folder
url = resource.url_for(filename='aiohttp.png')
assert url == URL('/prefix/aiohttp.png')
def test_head_deco(router) -> None:
routes = web.RouteTableDef()
@routes.head('/path')
async def handler(request):
pass
router.add_routes(routes)
assert len(router.routes()) == 1
route = list(router.routes())[0]
assert route.method == 'HEAD'
assert str(route.url_for()) == '/path'
def test_get_deco(router) -> None:
routes = web.RouteTableDef()
@routes.get('/path')
async def handler(request):
pass
router.add_routes(routes)
assert len(router.routes()) == 2
route1 = list(router.routes())[0]
assert route1.method == 'HEAD'
assert str(route1.url_for()) == '/path'
route2 = list(router.routes())[1]
assert route2.method == 'GET'
assert str(route2.url_for()) == '/path'
def test_post_deco(router) -> None:
routes = web.RouteTableDef()
@routes.post('/path')
async def handler(request):
pass
router.add_routes(routes)
assert len(router.routes()) == 1
route = list(router.routes())[0]
assert route.method == 'POST'
assert str(route.url_for()) == '/path'
def test_put_deco(router) -> None:
routes = web.RouteTableDef()
@routes.put('/path')
async def handler(request):
pass
router.add_routes(routes)
assert len(router.routes()) == 1
route = list(router.routes())[0]
assert route.method == 'PUT'
assert str(route.url_for()) == '/path'
def test_patch_deco(router) -> None:
routes = web.RouteTableDef()
@routes.patch('/path')
async def handler(request):
pass
router.add_routes(routes)
assert len(router.routes()) == 1
route = list(router.routes())[0]
assert route.method == 'PATCH'
assert str(route.url_for()) == '/path'
def test_delete_deco(router) -> None:
routes = web.RouteTableDef()
@routes.delete('/path')
async def handler(request):
pass
router.add_routes(routes)
assert len(router.routes()) == 1
route = list(router.routes())[0]
assert route.method == 'DELETE'
assert str(route.url_for()) == '/path'
def test_options_deco(router) -> None:
routes = web.RouteTableDef()
@routes.options('/path')
async def handler(request):
pass
router.add_routes(routes)
assert len(router.routes()) == 1
route = list(router.routes())[0]
assert route.method == 'OPTIONS'
assert str(route.url_for()) == '/path'
def test_route_deco(router) -> None:
routes = web.RouteTableDef()
@routes.route('OTHER', '/path')
async def handler(request):
pass
router.add_routes(routes)
assert len(router.routes()) == 1
route = list(router.routes())[0]
assert route.method == 'OTHER'
assert str(route.url_for()) == '/path'
def test_routedef_sequence_protocol() -> None:
routes = web.RouteTableDef()
@routes.delete('/path')
async def handler(request):
pass
assert len(routes) == 1
info = routes[0]
assert isinstance(info, web.RouteDef)
assert info in routes
assert list(routes)[0] is info
def test_repr_route_def() -> None:
routes = web.RouteTableDef()
@routes.get('/path')
async def handler(request):
pass
rd = routes[0]
assert repr(rd) == "<RouteDef GET /path -> 'handler'>"
def test_repr_route_def_with_extra_info() -> None:
routes = web.RouteTableDef()
@routes.get('/path', extra='info')
async def handler(request):
pass
rd = routes[0]
assert repr(rd) == "<RouteDef GET /path -> 'handler', extra='info'>"
def test_repr_static_def() -> None:
routes = web.RouteTableDef()
routes.static('/prefix', '/path', name='name')
rd = routes[0]
assert repr(rd) == "<StaticDef /prefix -> /path, name='name'>"
def test_repr_route_table_def() -> None:
routes = web.RouteTableDef()
@routes.get('/path')
async def handler(request):
pass
assert repr(routes) == "<RouteTableDef count=1>"
<file_sep>/docs/external.rst
Who uses aiohttp?
=================
The list of *aiohttp* users: both libraries, big projects and web sites.
Please don't hesitate to add your awesome project to the list by
making a Pull Request on GitHub_.
If you like the project -- please go to GitHub_ and press *Star* button!
.. toctree::
third_party
built_with
powered_by
.. _GitHub: https://github.com/aio-libs/aiohttp
<file_sep>/tests/test_flowcontrol_streams.py
from unittest import mock
import pytest
from aiohttp import streams
@pytest.fixture
def protocol():
return mock.Mock(_reading_paused=False)
@pytest.fixture
def stream(loop, protocol):
out = streams.StreamReader(protocol, limit=1, loop=loop)
out._allow_pause = True
return out
@pytest.fixture
def buffer(loop, protocol):
out = streams.FlowControlDataQueue(protocol, limit=1, loop=loop)
out._allow_pause = True
return out
class TestFlowControlStreamReader:
async def test_read(self, stream) -> None:
stream.feed_data(b'da', 2)
res = await stream.read(1)
assert res == b'd'
assert not stream._protocol.resume_reading.called
async def test_read_resume_paused(self, stream) -> None:
stream.feed_data(b'test', 4)
stream._protocol._reading_paused = True
res = await stream.read(1)
assert res == b't'
assert stream._protocol.pause_reading.called
async def test_readline(self, stream) -> None:
stream.feed_data(b'd\n', 5)
res = await stream.readline()
assert res == b'd\n'
assert not stream._protocol.resume_reading.called
async def test_readline_resume_paused(self, stream) -> None:
stream._protocol._reading_paused = True
stream.feed_data(b'd\n', 5)
res = await stream.readline()
assert res == b'd\n'
assert stream._protocol.resume_reading.called
async def test_readany(self, stream) -> None:
stream.feed_data(b'data', 4)
res = await stream.readany()
assert res == b'data'
assert not stream._protocol.resume_reading.called
async def test_readany_resume_paused(self, stream) -> None:
stream._protocol._reading_paused = True
stream.feed_data(b'data', 4)
res = await stream.readany()
assert res == b'data'
assert stream._protocol.resume_reading.called
async def test_readchunk(self, stream) -> None:
stream.feed_data(b'data', 4)
res, end_of_http_chunk = await stream.readchunk()
assert res == b'data'
assert not end_of_http_chunk
assert not stream._protocol.resume_reading.called
async def test_readchunk_resume_paused(self, stream) -> None:
stream._protocol._reading_paused = True
stream.feed_data(b'data', 4)
res, end_of_http_chunk = await stream.readchunk()
assert res == b'data'
assert not end_of_http_chunk
assert stream._protocol.resume_reading.called
async def test_readexactly(self, stream) -> None:
stream.feed_data(b'data', 4)
res = await stream.readexactly(3)
assert res == b'dat'
assert not stream._protocol.resume_reading.called
async def test_feed_data(self, stream) -> None:
stream._protocol._reading_paused = False
stream.feed_data(b'datadata', 8)
assert stream._protocol.pause_reading.called
async def test_read_nowait(self, stream) -> None:
stream._protocol._reading_paused = True
stream.feed_data(b'data1', 5)
stream.feed_data(b'data2', 5)
stream.feed_data(b'data3', 5)
res = await stream.read(5)
assert res == b'data1'
assert stream._protocol.resume_reading.call_count == 0
res = stream.read_nowait(5)
assert res == b'data2'
assert stream._protocol.resume_reading.call_count == 0
res = stream.read_nowait(5)
assert res == b'data3'
assert stream._protocol.resume_reading.call_count == 1
stream._protocol._reading_paused = False
res = stream.read_nowait(5)
assert res == b''
assert stream._protocol.resume_reading.call_count == 1
class TestFlowControlDataQueue:
def test_feed_pause(self, buffer) -> None:
buffer._protocol._reading_paused = False
buffer.feed_data(object(), 100)
assert buffer._protocol.pause_reading.called
async def test_resume_on_read(self, buffer) -> None:
buffer.feed_data(object(), 100)
buffer._protocol._reading_paused = True
await buffer.read()
assert buffer._protocol.resume_reading.called
<file_sep>/tests/test_payload.py
from io import StringIO
import pytest
from aiohttp import payload
@pytest.fixture
def registry():
old = payload.PAYLOAD_REGISTRY
reg = payload.PAYLOAD_REGISTRY = payload.PayloadRegistry()
yield reg
payload.PAYLOAD_REGISTRY = old
class Payload(payload.Payload):
async def write(self, writer):
pass
def test_register_type(registry) -> None:
class TestProvider:
pass
payload.register_payload(Payload, TestProvider)
p = payload.get_payload(TestProvider())
assert isinstance(p, Payload)
def test_register_unsupported_order(registry) -> None:
class TestProvider:
pass
with pytest.raises(ValueError):
payload.register_payload(Payload, TestProvider, order=object())
def test_payload_ctor() -> None:
p = Payload('test', encoding='utf-8', filename='test.txt')
assert p._value == 'test'
assert p._encoding == 'utf-8'
assert p.size is None
assert p.filename == 'test.txt'
assert p.content_type == 'text/plain'
def test_payload_content_type() -> None:
p = Payload('test', headers={'content-type': 'application/json'})
assert p.content_type == 'application/json'
def test_bytes_payload_default_content_type() -> None:
p = payload.BytesPayload(b'data')
assert p.content_type == 'application/octet-stream'
def test_bytes_payload_explicit_content_type() -> None:
p = payload.BytesPayload(b'data', content_type='application/custom')
assert p.content_type == 'application/custom'
def test_bytes_payload_bad_type() -> None:
with pytest.raises(TypeError):
payload.BytesPayload(object())
def test_string_payload() -> None:
p = payload.StringPayload('test')
assert p.encoding == 'utf-8'
assert p.content_type == 'text/plain; charset=utf-8'
p = payload.StringPayload('test', encoding='koi8-r')
assert p.encoding == 'koi8-r'
assert p.content_type == 'text/plain; charset=koi8-r'
p = payload.StringPayload(
'test', content_type='text/plain; charset=koi8-r')
assert p.encoding == 'koi8-r'
assert p.content_type == 'text/plain; charset=koi8-r'
def test_string_io_payload() -> None:
s = StringIO('ű' * 5000)
p = payload.StringIOPayload(s)
assert p.encoding == 'utf-8'
assert p.content_type == 'text/plain; charset=utf-8'
assert p.size == 10000
def test_async_iterable_payload_default_content_type() -> None:
async def gen():
return
yield
p = payload.AsyncIterablePayload(gen())
assert p.content_type == 'application/octet-stream'
def test_async_iterable_payload_explicit_content_type() -> None:
async def gen():
return
yield
p = payload.AsyncIterablePayload(gen(), content_type='application/custom')
assert p.content_type == 'application/custom'
def test_async_iterable_payload_not_async_iterable() -> None:
with pytest.raises(TypeError):
payload.AsyncIterablePayload(object())
<file_sep>/examples/fake_server.py
import asyncio
import pathlib
import socket
import ssl
import aiohttp
from aiohttp import web
from aiohttp.resolver import DefaultResolver
from aiohttp.test_utils import unused_port
class FakeResolver:
_LOCAL_HOST = {0: '127.0.0.1',
socket.AF_INET: '127.0.0.1',
socket.AF_INET6: '::1'}
def __init__(self, fakes):
"""fakes -- dns -> port dict"""
self._fakes = fakes
self._resolver = DefaultResolver()
async def resolve(self, host, port=0, family=socket.AF_INET):
fake_port = self._fakes.get(host)
if fake_port is not None:
return [{'hostname': host,
'host': self._LOCAL_HOST[family], 'port': fake_port,
'family': family, 'proto': 0,
'flags': socket.AI_NUMERICHOST}]
else:
return await self._resolver.resolve(host, port, family)
class FakeFacebook:
def __init__(self):
self.app = web.Application()
self.app.router.add_routes(
[web.get('/v2.7/me', self.on_me),
web.get('/v2.7/me/friends', self.on_my_friends)])
self.runner = None
here = pathlib.Path(__file__)
ssl_cert = here.parent / 'server.crt'
ssl_key = here.parent / 'server.key'
self.ssl_context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
self.ssl_context.load_cert_chain(str(ssl_cert), str(ssl_key))
async def start(self):
port = unused_port()
self.runner = web.AppRunner(self.app)
await self.runner.setup()
site = web.TCPSite(self.runner, '127.0.0.1', port,
ssl_context=self.ssl_context)
await site.start()
return {'graph.facebook.com': port}
async def stop(self):
await self.runner.cleanup()
async def on_me(self, request):
return web.json_response({
"name": "<NAME>",
"id": "12345678901234567"
})
async def on_my_friends(self, request):
return web.json_response({
"data": [
{
"name": "<NAME>",
"id": "233242342342"
},
{
"name": "<NAME>",
"id": "2342342343222"
},
{
"name": "<NAME>",
"id": "234234234344"
},
],
"paging": {
"cursors": {
"before": "QVFIUjRtc2c5NEl0ajN",
"after": "QVFIUlpFQWM0TmVuaDRad0dt",
},
"next": ("https://graph.facebook.com/v2.7/12345678901234567/"
"friends?access_token=<PASSWORD>")
},
"summary": {
"total_count": 3
}})
async def main():
token = "<KEY>"
fake_facebook = FakeFacebook()
info = await fake_facebook.start()
resolver = FakeResolver(info)
connector = aiohttp.TCPConnector(resolver=resolver, ssl=False)
async with aiohttp.ClientSession(connector=connector) as session:
async with session.get('https://graph.facebook.com/v2.7/me',
params={'access_token': token}) as resp:
print(await resp.json())
async with session.get('https://graph.facebook.com/v2.7/me/friends',
params={'access_token': token}) as resp:
print(await resp.json())
await fake_facebook.stop()
loop = asyncio.get_event_loop()
loop.run_until_complete(main())
<file_sep>/docs/whats_new_1_1.rst
=========================
What's new in aiohttp 1.1
=========================
YARL and URL encoding
======================
Since aiohttp 1.1 the library uses :term:`yarl` for URL processing.
New API
-------
:class:`yarl.URL` gives handy methods for URL operations etc.
Client API still accepts :class:`str` everywhere *url* is used,
e.g. ``session.get('http://example.com')`` works as well as
``session.get(yarl.URL('http://example.com'))``.
Internal API has been switched to :class:`yarl.URL`.
:class:`aiohttp.CookieJar` accepts :class:`~yarl.URL` instances only.
On server side has added :class:`web.Request.url` and
:class:`web.Request.rel_url` properties for representing relative and
absolute request's URL.
URL using is the recommended way, already existed properties for
retrieving URL parts are deprecated and will be eventually removed.
Redirection web exceptions accepts :class:`yarl.URL` as *location*
parameter. :class:`str` is still supported and will be supported forever.
Reverse URL processing for *router* has been changed.
The main API is :class:`aiohttp.web.Request.url_for(name, **kwargs)`
which returns a :class:`yarl.URL` instance for named resource. It
does not support *query args* but adding *args* is trivial:
``request.url_for('named_resource', param='a').with_query(arg='val')``.
The method returns a *relative* URL, absolute URL may be constructed by
``request.url.join(request.url_for(...)`` call.
URL encoding
------------
YARL encodes all non-ASCII symbols on :class:`yarl.URL` creation.
Thus ``URL('https://www.python.org/путь')`` becomes
``'https://www.python.org/%D0%BF%D1%83%D1%82%D1%8C'``.
On filling route table it's possible to use both non-ASCII and percent
encoded paths::
app.router.add_get('/путь', handler)
and::
app.router.add_get('/%D0%BF%D1%83%D1%82%D1%8C', handler)
are the same. Internally ``'/путь'`` is converted into
percent-encoding representation.
Route matching also accepts both URL forms: raw and encoded by
converting the route pattern to *canonical* (encoded) form on route
registration.
Sub-Applications
================
Sub applications are designed for solving the problem of the big
monolithic code base.
Let's assume we have a project with own business logic and tools like
administration panel and debug toolbar.
Administration panel is a separate application by its own nature but all
toolbar URLs are served by prefix like ``/admin``.
Thus we'll create a totally separate application named ``admin`` and
connect it to main app with prefix::
admin = web.Application()
# setup admin routes, signals and middlewares
app.add_subapp('/admin/', admin)
Middlewares and signals from ``app`` and ``admin`` are chained.
It means that if URL is ``'/admin/something'`` middlewares from
``app`` are applied first and ``admin.middlewares`` are the next in
the call chain.
The same is going for
:attr:`~aiohttp.web.Application.on_response_prepare` signal -- the
signal is delivered to both top level ``app`` and ``admin`` if
processing URL is routed to ``admin`` sub-application.
Common signals like :attr:`~aiohttp.web.Application.on_startup`,
:attr:`~aiohttp.web.Application.on_shutdown` and
:attr:`~aiohttp.web.Application.on_cleanup` are delivered to all
registered sub-applications. The passed parameter is sub-application
instance, not top-level application.
Third level sub-applications can be nested into second level ones --
there are no limitation for nesting level.
Url reversing
-------------
Url reversing for sub-applications should generate urls with proper prefix.
But for getting URL sub-application's router should be used::
admin = web.Application()
admin.add_get('/resource', handler, name='name')
app.add_subapp('/admin/', admin)
url = admin.router['name'].url_for()
The generated ``url`` from example will have a value
``URL('/admin/resource')``.
Application freezing
====================
Application can be used either as main app (``app.make_handler()``) or as
sub-application -- not both cases at the same time.
After connecting application by ``.add_subapp()`` call or starting
serving web-server as toplevel application the application is
**frozen**.
It means that registering new routes, signals and middlewares is
forbidden. Changing state (``app['name'] = 'value'``) of frozen application is
deprecated and will be eventually removed.
<file_sep>/docs/misc.rst
.. _aiohttp-misc:
Miscellaneous
=============
Helpful pages.
.. toctree::
:name: misc
essays
glossary
.. toctree::
:titlesonly:
changes
Indices and tables
------------------
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
<file_sep>/CONTRIBUTING.rst
Contributing
============
Instructions for contributors
-----------------------------
In order to make a clone of the GitHub_ repo: open the link and press the
"Fork" button on the upper-right menu of the web page.
I hope everybody knows how to work with git and github nowadays :)
Workflow is pretty straightforward:
1. Clone the GitHub_ repo using the ``--recurse-submodules`` argument
2. Setup your machine with the required dev environment
3. Make a change
4. Make sure all tests passed
5. Add a file into the ``CHANGES`` folder, named after the ticket or PR number
6. Commit changes to your own aiohttp clone
7. Make a pull request from the github page of your clone against the master branch
8. Optionally make backport Pull Request(s) for landing a bug fix into released aiohttp versions.
.. important::
Please open the "`contributing <https://docs.aiohttp.org/en/stable/contributing.html>`_"
documentation page to get detailed informations about all steps.
.. _GitHub: https://github.com/aio-libs/aiohttp
<file_sep>/CHANGES.rst
=========
Changelog
=========
..
You should *NOT* be adding new change log entries to this file, this
file is managed by towncrier. You *may* edit previous change logs to
fix problems like typo corrections or such.
To add a new change log entry, please see
https://pip.pypa.io/en/latest/development/#adding-a-news-entry
we named the news folder "changes".
WARNING: Don't drop the next directive!
.. towncrier release notes start
3.6.2 (2019-10-09)
==================
Features
--------
- Made exceptions pickleable. Also changed the repr of some exceptions.
`#4077 <https://github.com/aio-libs/aiohttp/issues/4077>`_
- Use ``Iterable`` type hint instead of ``Sequence`` for ``Application`` *middleware*
parameter. `#4125 <https://github.com/aio-libs/aiohttp/issues/4125>`_
Bugfixes
--------
- Reset the ``sock_read`` timeout each time data is received for a
``aiohttp.ClientResponse``. `#3808
<https://github.com/aio-libs/aiohttp/issues/3808>`_
- Fix handling of expired cookies so they are not stored in CookieJar.
`#4063 <https://github.com/aio-libs/aiohttp/issues/4063>`_
- Fix misleading message in the string representation of ``ClientConnectorError``;
``self.ssl == None`` means default SSL context, not SSL disabled `#4097
<https://github.com/aio-libs/aiohttp/issues/4097>`_
- Don't clobber HTTP status when using FileResponse.
`#4106 <https://github.com/aio-libs/aiohttp/issues/4106>`_
Improved Documentation
----------------------
- Added minimal required logging configuration to logging documentation.
`#2469 <https://github.com/aio-libs/aiohttp/issues/2469>`_
- Update docs to reflect proxy support.
`#4100 <https://github.com/aio-libs/aiohttp/issues/4100>`_
- Fix typo in code example in testing docs.
`#4108 <https://github.com/aio-libs/aiohttp/issues/4108>`_
Misc
----
- `#4102 <https://github.com/aio-libs/aiohttp/issues/4102>`_
----
3.6.1 (2019-09-19)
==================
Features
--------
- Compatibility with Python 3.8.
`#4056 <https://github.com/aio-libs/aiohttp/issues/4056>`_
Bugfixes
--------
- correct some exception string format
`#4068 <https://github.com/aio-libs/aiohttp/issues/4068>`_
- Emit a warning when ``ssl.OP_NO_COMPRESSION`` is
unavailable because the runtime is built against
an outdated OpenSSL.
`#4052 <https://github.com/aio-libs/aiohttp/issues/4052>`_
- Update multidict requirement to >= 4.5
`#4057 <https://github.com/aio-libs/aiohttp/issues/4057>`_
Improved Documentation
----------------------
- Provide pytest-aiohttp namespace for pytest fixtures in docs.
`#3723 <https://github.com/aio-libs/aiohttp/issues/3723>`_
----
3.6.0 (2019-09-06)
==================
Features
--------
- Add support for Named Pipes (Site and Connector) under Windows. This feature requires
Proactor event loop to work. `#3629
<https://github.com/aio-libs/aiohttp/issues/3629>`_
- Removed ``Transfer-Encoding: chunked`` header from websocket responses to be
compatible with more http proxy servers. `#3798
<https://github.com/aio-libs/aiohttp/issues/3798>`_
- Accept non-GET request for starting websocket handshake on server side.
`#3980 <https://github.com/aio-libs/aiohttp/issues/3980>`_
Bugfixes
--------
- Raise a ClientResponseError instead of an AssertionError for a blank
HTTP Reason Phrase.
`#3532 <https://github.com/aio-libs/aiohttp/issues/3532>`_
- Fix an issue where cookies would sometimes not be set during a redirect.
`#3576 <https://github.com/aio-libs/aiohttp/issues/3576>`_
- Change normalize_path_middleware to use 308 redirect instead of 301.
This behavior should prevent clients from being unable to use PUT/POST
methods on endpoints that are redirected because of a trailing slash.
`#3579 <https://github.com/aio-libs/aiohttp/issues/3579>`_
- Drop the processed task from ``all_tasks()`` list early. It prevents logging about a
task with unhandled exception when the server is used in conjunction with
``asyncio.run()``. `#3587 <https://github.com/aio-libs/aiohttp/issues/3587>`_
- ``Signal`` type annotation changed from ``Signal[Callable[['TraceConfig'],
Awaitable[None]]]`` to ``Signal[Callable[ClientSession, SimpleNamespace, ...]``.
`#3595 <https://github.com/aio-libs/aiohttp/issues/3595>`_
- Use sanitized URL as Location header in redirects
`#3614 <https://github.com/aio-libs/aiohttp/issues/3614>`_
- Improve typing annotations for multipart.py along with changes required
by mypy in files that references multipart.py.
`#3621 <https://github.com/aio-libs/aiohttp/issues/3621>`_
- Close session created inside ``aiohttp.request`` when unhandled exception occurs
`#3628 <https://github.com/aio-libs/aiohttp/issues/3628>`_
- Cleanup per-chunk data in generic data read. Memory leak fixed.
`#3631 <https://github.com/aio-libs/aiohttp/issues/3631>`_
- Use correct type for add_view and family
`#3633 <https://github.com/aio-libs/aiohttp/issues/3633>`_
- Fix _keepalive field in __slots__ of ``RequestHandler``.
`#3644 <https://github.com/aio-libs/aiohttp/issues/3644>`_
- Properly handle ConnectionResetError, to silence the "Cannot write to closing
transport" exception when clients disconnect uncleanly.
`#3648 <https://github.com/aio-libs/aiohttp/issues/3648>`_
- Suppress pytest warnings due to ``test_utils`` classes
`#3660 <https://github.com/aio-libs/aiohttp/issues/3660>`_
- Fix overshadowing of overlapped sub-application prefixes.
`#3701 <https://github.com/aio-libs/aiohttp/issues/3701>`_
- Fixed return type annotation for WSMessage.json()
`#3720 <https://github.com/aio-libs/aiohttp/issues/3720>`_
- Properly expose TooManyRedirects publicly as documented.
`#3818 <https://github.com/aio-libs/aiohttp/issues/3818>`_
- Fix missing brackets for IPv6 in proxy CONNECT request
`#3841 <https://github.com/aio-libs/aiohttp/issues/3841>`_
- Make the signature of ``aiohttp.test_utils.TestClient.request`` match
``asyncio.ClientSession.request`` according to the docs `#3852
<https://github.com/aio-libs/aiohttp/issues/3852>`_
- Use correct style for re-exported imports, makes mypy ``--strict`` mode happy.
`#3868 <https://github.com/aio-libs/aiohttp/issues/3868>`_
- Fixed type annotation for add_view method of UrlDispatcher to accept any subclass of
View `#3880 <https://github.com/aio-libs/aiohttp/issues/3880>`_
- Made cython HTTP parser set Reason-Phrase of the response to an empty string if it is
missing. `#3906 <https://github.com/aio-libs/aiohttp/issues/3906>`_
- Add URL to the string representation of ClientResponseError.
`#3959 <https://github.com/aio-libs/aiohttp/issues/3959>`_
- Accept ``istr`` keys in ``LooseHeaders`` type hints.
`#3976 <https://github.com/aio-libs/aiohttp/issues/3976>`_
- Fixed race conditions in _resolve_host caching and throttling when tracing is enabled.
`#4013 <https://github.com/aio-libs/aiohttp/issues/4013>`_
- For URLs like "unix://localhost/..." set Host HTTP header to "localhost" instead of
"localhost:None". `#4039 <https://github.com/aio-libs/aiohttp/issues/4039>`_
Improved Documentation
----------------------
- Modify documentation for Background Tasks to remove deprecated usage of event loop.
`#3526 <https://github.com/aio-libs/aiohttp/issues/3526>`_
- use ``if __name__ == '__main__':`` in server examples.
`#3775 <https://github.com/aio-libs/aiohttp/issues/3775>`_
- Update documentation reference to the default access logger.
`#3783 <https://github.com/aio-libs/aiohttp/issues/3783>`_
- Improve documentation for ``web.BaseRequest.path`` and ``web.BaseRequest.raw_path``.
`#3791 <https://github.com/aio-libs/aiohttp/issues/3791>`_
- Removed deprecation warning in tracing example docs
`#3964 <https://github.com/aio-libs/aiohttp/issues/3964>`_
----
3.5.4 (2019-01-12)
==================
Bugfixes
--------
- Fix stream ``.read()`` / ``.readany()`` / ``.iter_any()`` which used to return a
partial content only in case of compressed content
`#3525 <https://github.com/aio-libs/aiohttp/issues/3525>`_
3.5.3 (2019-01-10)
==================
Bugfixes
--------
- Fix type stubs for ``aiohttp.web.run_app(access_log=True)`` and fix edge case of
``access_log=True`` and the event loop being in debug mode. `#3504
<https://github.com/aio-libs/aiohttp/issues/3504>`_
- Fix ``aiohttp.ClientTimeout`` type annotations to accept ``None`` for fields
`#3511 <https://github.com/aio-libs/aiohttp/issues/3511>`_
- Send custom per-request cookies even if session jar is empty
`#3515 <https://github.com/aio-libs/aiohttp/issues/3515>`_
- Restore Linux binary wheels publishing on PyPI
----
3.5.2 (2019-01-08)
==================
Features
--------
- ``FileResponse`` from ``web_fileresponse.py`` uses a ``ThreadPoolExecutor`` to work
with files asynchronously. I/O based payloads from ``payload.py`` uses a
``ThreadPoolExecutor`` to work with I/O objects asynchronously. `#3313
<https://github.com/aio-libs/aiohttp/issues/3313>`_
- Internal Server Errors in plain text if the browser does not support HTML.
`#3483 <https://github.com/aio-libs/aiohttp/issues/3483>`_
Bugfixes
--------
- Preserve MultipartWriter parts headers on write. Refactor the way how
``Payload.headers`` are handled. Payload instances now always have headers and
Content-Type defined. Fix Payload Content-Disposition header reset after initial
creation. `#3035 <https://github.com/aio-libs/aiohttp/issues/3035>`_
- Log suppressed exceptions in ``GunicornWebWorker``.
`#3464 <https://github.com/aio-libs/aiohttp/issues/3464>`_
- Remove wildcard imports.
`#3468 <https://github.com/aio-libs/aiohttp/issues/3468>`_
- Use the same task for app initialization and web server handling in gunicorn workers.
It allows to use Python3.7 context vars smoothly.
`#3471 <https://github.com/aio-libs/aiohttp/issues/3471>`_
- Fix handling of chunked+gzipped response when first chunk does not give uncompressed
data `#3477 <https://github.com/aio-libs/aiohttp/issues/3477>`_
- Replace ``collections.MutableMapping`` with ``collections.abc.MutableMapping`` to
avoid a deprecation warning. `#3480
<https://github.com/aio-libs/aiohttp/issues/3480>`_
- ``Payload.size`` type annotation changed from ``Optional[float]`` to
``Optional[int]``. `#3484 <https://github.com/aio-libs/aiohttp/issues/3484>`_
- Ignore done tasks when cancels pending activities on ``web.run_app`` finalization.
`#3497 <https://github.com/aio-libs/aiohttp/issues/3497>`_
Improved Documentation
----------------------
- Add documentation for ``aiohttp.web.HTTPException``.
`#3490 <https://github.com/aio-libs/aiohttp/issues/3490>`_
Misc
----
- `#3487 <https://github.com/aio-libs/aiohttp/issues/3487>`_
----
3.5.1 (2018-12-24)
====================
- Fix a regression about ``ClientSession._requote_redirect_url`` modification in debug
mode.
3.5.0 (2018-12-22)
====================
Features
--------
- The library type annotations are checked in strict mode now.
- Add support for setting cookies for individual request (`#2387
<https://github.com/aio-libs/aiohttp/pull/2387>`_)
- Application.add_domain implementation (`#2809
<https://github.com/aio-libs/aiohttp/pull/2809>`_)
- The default ``app`` in the request returned by ``test_utils.make_mocked_request`` can
now have objects assigned to it and retrieved using the ``[]`` operator. (`#3174
<https://github.com/aio-libs/aiohttp/pull/3174>`_)
- Make ``request.url`` accessible when transport is closed. (`#3177
<https://github.com/aio-libs/aiohttp/pull/3177>`_)
- Add ``zlib_executor_size`` argument to ``Response`` constructor to allow compression
to run in a background executor to avoid blocking the main thread and potentially
triggering health check failures. (`#3205
<https://github.com/aio-libs/aiohttp/pull/3205>`_)
- Enable users to set ``ClientTimeout`` in ``aiohttp.request`` (`#3213
<https://github.com/aio-libs/aiohttp/pull/3213>`_)
- Don't raise a warning if ``NETRC`` environment variable is not set and ``~/.netrc``
file doesn't exist. (`#3267 <https://github.com/aio-libs/aiohttp/pull/3267>`_)
- Add default logging handler to web.run_app If the ``Application.debug``` flag is set
and the default logger ``aiohttp.access`` is used, access logs will now be output
using a *stderr* ``StreamHandler`` if no handlers are attached. Furthermore, if the
default logger has no log level set, the log level will be set to ``DEBUG``. (`#3324
<https://github.com/aio-libs/aiohttp/pull/3324>`_)
- Add method argument to ``session.ws_connect()``. Sometimes server API requires a
different HTTP method for WebSocket connection establishment. For example, ``Docker
exec`` needs POST. (`#3378 <https://github.com/aio-libs/aiohttp/pull/3378>`_)
- Create a task per request handling. (`#3406
<https://github.com/aio-libs/aiohttp/pull/3406>`_)
Bugfixes
--------
- Enable passing ``access_log_class`` via ``handler_args`` (`#3158
<https://github.com/aio-libs/aiohttp/pull/3158>`_)
- Return empty bytes with end-of-chunk marker in empty stream reader. (`#3186
<https://github.com/aio-libs/aiohttp/pull/3186>`_)
- Accept ``CIMultiDictProxy`` instances for ``headers`` argument in ``web.Response``
constructor. (`#3207 <https://github.com/aio-libs/aiohttp/pull/3207>`_)
- Don't uppercase HTTP method in parser (`#3233
<https://github.com/aio-libs/aiohttp/pull/3233>`_)
- Make method match regexp RFC-7230 compliant (`#3235
<https://github.com/aio-libs/aiohttp/pull/3235>`_)
- Add ``app.pre_frozen`` state to properly handle startup signals in
sub-applications. (`#3237 <https://github.com/aio-libs/aiohttp/pull/3237>`_)
- Enhanced parsing and validation of helpers.BasicAuth.decode. (`#3239
<https://github.com/aio-libs/aiohttp/pull/3239>`_)
- Change imports from collections module in preparation for 3.8. (`#3258
<https://github.com/aio-libs/aiohttp/pull/3258>`_)
- Ensure Host header is added first to ClientRequest to better replicate browser (`#3265
<https://github.com/aio-libs/aiohttp/pull/3265>`_)
- Fix forward compatibility with Python 3.8: importing ABCs directly from the
collections module will not be supported anymore. (`#3273
<https://github.com/aio-libs/aiohttp/pull/3273>`_)
- Keep the query string by ``normalize_path_middleware``. (`#3278
<https://github.com/aio-libs/aiohttp/pull/3278>`_)
- Fix missing parameter ``raise_for_status`` for aiohttp.request() (`#3290
<https://github.com/aio-libs/aiohttp/pull/3290>`_)
- Bracket IPv6 addresses in the HOST header (`#3304
<https://github.com/aio-libs/aiohttp/pull/3304>`_)
- Fix default message for server ping and pong frames. (`#3308
<https://github.com/aio-libs/aiohttp/pull/3308>`_)
- Fix tests/test_connector.py typo and tests/autobahn/server.py duplicate loop
def. (`#3337 <https://github.com/aio-libs/aiohttp/pull/3337>`_)
- Fix false-negative indicator end_of_HTTP_chunk in StreamReader.readchunk function
(`#3361 <https://github.com/aio-libs/aiohttp/pull/3361>`_)
- Release HTTP response before raising status exception (`#3364
<https://github.com/aio-libs/aiohttp/pull/3364>`_)
- Fix task cancellation when ``sendfile()`` syscall is used by static file
handling. (`#3383 <https://github.com/aio-libs/aiohttp/pull/3383>`_)
- Fix stack trace for ``asyncio.TimeoutError`` which was not logged, when it is caught
in the handler. (`#3414 <https://github.com/aio-libs/aiohttp/pull/3414>`_)
Improved Documentation
----------------------
- Improve documentation of ``Application.make_handler`` parameters. (`#3152
<https://github.com/aio-libs/aiohttp/pull/3152>`_)
- Fix BaseRequest.raw_headers doc. (`#3215
<https://github.com/aio-libs/aiohttp/pull/3215>`_)
- Fix typo in TypeError exception reason in ``web.Application._handle`` (`#3229
<https://github.com/aio-libs/aiohttp/pull/3229>`_)
- Make server access log format placeholder %b documentation reflect
behavior and docstring. (`#3307 <https://github.com/aio-libs/aiohttp/pull/3307>`_)
Deprecations and Removals
-------------------------
- Deprecate modification of ``session.requote_redirect_url`` (`#2278
<https://github.com/aio-libs/aiohttp/pull/2278>`_)
- Deprecate ``stream.unread_data()`` (`#3260
<https://github.com/aio-libs/aiohttp/pull/3260>`_)
- Deprecated use of boolean in ``resp.enable_compression()`` (`#3318
<https://github.com/aio-libs/aiohttp/pull/3318>`_)
- Encourage creation of aiohttp public objects inside a coroutine (`#3331
<https://github.com/aio-libs/aiohttp/pull/3331>`_)
- Drop dead ``Connection.detach()`` and ``Connection.writer``. Both methods were broken
for more than 2 years. (`#3358 <https://github.com/aio-libs/aiohttp/pull/3358>`_)
- Deprecate ``app.loop``, ``request.loop``, ``client.loop`` and ``connector.loop``
properties. (`#3374 <https://github.com/aio-libs/aiohttp/pull/3374>`_)
- Deprecate explicit debug argument. Use asyncio debug mode instead. (`#3381
<https://github.com/aio-libs/aiohttp/pull/3381>`_)
- Deprecate body parameter in HTTPException (and derived classes) constructor. (`#3385
<https://github.com/aio-libs/aiohttp/pull/3385>`_)
- Deprecate bare connector close, use ``async with connector:`` and ``await
connector.close()`` instead. (`#3417
<https://github.com/aio-libs/aiohttp/pull/3417>`_)
- Deprecate obsolete ``read_timeout`` and ``conn_timeout`` in ``ClientSession``
constructor. (`#3438 <https://github.com/aio-libs/aiohttp/pull/3438>`_)
Misc
----
- #3341, #3351
<file_sep>/.github/ISSUE_TEMPLATE/bug_report.md
---
name: 🐛 Bug report
about: Create a report to help us improve
labels: bug
assignees: aio-libs/triagers
---
🐞 **Describe the bug**
<!-- A clear and concise description of what the bug is, on the next line. -->
💡 **To Reproduce**
<!-- How to reproduce the behavior?
For example:
1. Have certain environment
2. Run given code snippet in a certain way
3. See some behavior described
Add these steps below this comment: -->
💡 **Expected behavior**
<!-- A clear and concise description of what you expected to happen. -->
📋 **Logs/tracebacks**
<!-- If applicable, add logs/tracebacks to help explain your problem. -->
```python-traceback (paste your traceback in the next line)
```
📋 **Your version of the Python**
<!-- Attach your version of the Python. -->
```console
$ python --version
...
```
📋 **Your version of the aiohttp/yarl/multidict distributions**
<!-- Attach your version of the distributions in the code blocks below. -->
```console
$ python -m pip show aiohttp
...
```
```console
$ python -m pip show multidict
...
```
```console
$ python -m pip show yarl
...
```
📋 **Additional context**
<!-- Add any other context about the problem here, in the next line. -->
<!-- Describe the environment you have that lead to your issue.
This includes aiohttp version, OS, proxy server and other bits that
are related to your case.
IMPORTANT: aiohttp is both server framework and client library.
For getting rid of confusing please put 'server', 'client' or 'both'
word here.
-->
<file_sep>/examples/web_rewrite_headers_middleware.py
#!/usr/bin/env python3
"""
Example for rewriting response headers by middleware.
"""
from aiohttp import web
async def handler(request):
return web.Response(text="Everything is fine")
async def middleware(request, handler):
try:
response = await handler(request)
except web.HTTPException as exc:
raise exc
if not response.prepared:
response.headers['SERVER'] = "Secured Server Software"
return response
def init():
app = web.Application(middlewares=[middleware])
app.router.add_get('/', handler)
return app
web.run_app(init())
<file_sep>/tests/autobahn/client.py
#!/usr/bin/env python3
import asyncio
import aiohttp
async def client(loop, url, name):
ws = await aiohttp.ws_connect(url + '/getCaseCount')
num_tests = int((await ws.receive()).data)
print('running %d cases' % num_tests)
await ws.close()
for i in range(1, num_tests + 1):
print('running test case:', i)
text_url = url + '/runCase?case=%d&agent=%s' % (i, name)
ws = await aiohttp.ws_connect(text_url)
while True:
msg = await ws.receive()
if msg.type == aiohttp.WSMsgType.TEXT:
await ws.send_str(msg.data)
elif msg.type == aiohttp.WSMsgType.BINARY:
await ws.send_bytes(msg.data)
elif msg.type == aiohttp.WSMsgType.CLOSE:
await ws.close()
break
else:
break
url = url + '/updateReports?agent=%s' % name
ws = await aiohttp.ws_connect(url)
await ws.close()
async def run(loop, url, name):
try:
await client(loop, url, name)
except Exception:
import traceback
traceback.print_exc()
if __name__ == '__main__':
loop = asyncio.get_event_loop()
try:
loop.run_until_complete(run(loop, 'http://localhost:9001', 'aiohttp'))
except KeyboardInterrupt:
pass
finally:
loop.close()
<file_sep>/docs/structures.rst
.. currentmodule:: aiohttp
.. _aiohttp-structures:
Common data structures
======================
Common data structures used by *aiohttp* internally.
FrozenList
----------
A list-like structure which implements
:class:`collections.abc.MutableSequence`.
The list is *mutable* unless :meth:`FrozenList.freeze` is called,
after that the list modification raises :exc:`RuntimeError`.
.. class:: FrozenList(items)
Construct a new *non-frozen* list from *items* iterable.
The list implements all :class:`collections.abc.MutableSequence`
methods plus two additional APIs.
.. attribute:: frozen
A read-only property, ``True`` is the list is *frozen*
(modifications are forbidden).
.. method:: freeze()
Freeze the list. There is no way to *thaw* it back.
ChainMapProxy
-------------
An *immutable* version of :class:`collections.ChainMap`. Internally
the proxy is a list of mappings (dictionaries), if the requested key
is not present in the first mapping the second is looked up and so on.
The class supports :class:`collections.abc.Mapping` interface.
.. class:: ChainMapProxy(maps)
Create a new chained mapping proxy from a list of mappings (*maps*).
.. versionadded:: 3.2
<file_sep>/tests/test_multipart_helpers.py
import pytest
import aiohttp
from aiohttp import content_disposition_filename, parse_content_disposition
class TestParseContentDisposition:
# http://greenbytes.de/tech/tc2231/
def test_parse_empty(self) -> None:
disptype, params = parse_content_disposition(None)
assert disptype is None
assert {} == params
def test_inlonly(self) -> None:
disptype, params = parse_content_disposition('inline')
assert 'inline' == disptype
assert {} == params
def test_inlonlyquoted(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionHeader):
disptype, params = parse_content_disposition('"inline"')
assert disptype is None
assert {} == params
def test_semicolon(self) -> None:
disptype, params = parse_content_disposition(
'form-data; name="data"; filename="file ; name.mp4"')
assert disptype == 'form-data'
assert params == {'name': 'data', 'filename': 'file ; name.mp4'}
def test_inlwithasciifilename(self) -> None:
disptype, params = parse_content_disposition(
'inline; filename="foo.html"')
assert 'inline' == disptype
assert {'filename': 'foo.html'} == params
def test_inlwithfnattach(self) -> None:
disptype, params = parse_content_disposition(
'inline; filename="Not an attachment!"')
assert 'inline' == disptype
assert {'filename': 'Not an attachment!'} == params
def test_attonly(self) -> None:
disptype, params = parse_content_disposition('attachment')
assert 'attachment' == disptype
assert {} == params
def test_attonlyquoted(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionHeader):
disptype, params = parse_content_disposition('"attachment"')
assert disptype is None
assert {} == params
def test_attonlyucase(self) -> None:
disptype, params = parse_content_disposition('ATTACHMENT')
assert 'attachment' == disptype
assert {} == params
def test_attwithasciifilename(self) -> None:
disptype, params = parse_content_disposition(
'attachment; filename="foo.html"')
assert 'attachment' == disptype
assert {'filename': 'foo.html'} == params
def test_inlwithasciifilenamepdf(self) -> None:
disptype, params = parse_content_disposition(
'attachment; filename="foo.pdf"')
assert 'attachment' == disptype
assert {'filename': 'foo.pdf'} == params
def test_attwithasciifilename25(self) -> None:
disptype, params = parse_content_disposition(
'attachment; filename="0000000000111111111122222"')
assert 'attachment' == disptype
assert {'filename': '0000000000111111111122222'} == params
def test_attwithasciifilename35(self) -> None:
disptype, params = parse_content_disposition(
'attachment; filename="00000000001111111111222222222233333"')
assert 'attachment' == disptype
assert {'filename': '00000000001111111111222222222233333'} == params
def test_attwithasciifnescapedchar(self) -> None:
disptype, params = parse_content_disposition(
r'attachment; filename="f\oo.html"')
assert 'attachment' == disptype
assert {'filename': 'foo.html'} == params
def test_attwithasciifnescapedquote(self) -> None:
disptype, params = parse_content_disposition(
'attachment; filename="\"quoting\" tested.html"')
assert 'attachment' == disptype
assert {'filename': '"quoting" tested.html'} == params
@pytest.mark.skip('need more smart parser which respects quoted text')
def test_attwithquotedsemicolon(self) -> None:
disptype, params = parse_content_disposition(
'attachment; filename="Here\'s a semicolon;.html"')
assert 'attachment' == disptype
assert {'filename': 'Here\'s a semicolon;.html'} == params
def test_attwithfilenameandextparam(self) -> None:
disptype, params = parse_content_disposition(
'attachment; foo="bar"; filename="foo.html"')
assert 'attachment' == disptype
assert {'filename': 'foo.html', 'foo': 'bar'} == params
def test_attwithfilenameandextparamescaped(self) -> None:
disptype, params = parse_content_disposition(
'attachment; foo="\"\\";filename="foo.html"')
assert 'attachment' == disptype
assert {'filename': 'foo.html', 'foo': '"\\'} == params
def test_attwithasciifilenameucase(self) -> None:
disptype, params = parse_content_disposition(
'attachment; FILENAME="foo.html"')
assert 'attachment' == disptype
assert {'filename': 'foo.html'} == params
def test_attwithasciifilenamenq(self) -> None:
disptype, params = parse_content_disposition(
'attachment; filename=foo.html')
assert 'attachment' == disptype
assert {'filename': 'foo.html'} == params
def test_attwithtokfncommanq(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; filename=foo,bar.html')
assert disptype is None
assert {} == params
def test_attwithasciifilenamenqs(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; filename=foo.html ;')
assert disptype is None
assert {} == params
def test_attemptyparam(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; ;filename=foo')
assert disptype is None
assert {} == params
def test_attwithasciifilenamenqws(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; filename=foo bar.html')
assert disptype is None
assert {} == params
def test_attwithfntokensq(self) -> None:
disptype, params = parse_content_disposition(
"attachment; filename='foo.html'")
assert 'attachment' == disptype
assert {'filename': "'foo.html'"} == params
def test_attwithisofnplain(self) -> None:
disptype, params = parse_content_disposition(
'attachment; filename="foo-ä.html"')
assert 'attachment' == disptype
assert {'filename': 'foo-ä.html'} == params
def test_attwithutf8fnplain(self) -> None:
disptype, params = parse_content_disposition(
'attachment; filename="foo-ä.html"')
assert 'attachment' == disptype
assert {'filename': 'foo-ä.html'} == params
def test_attwithfnrawpctenca(self) -> None:
disptype, params = parse_content_disposition(
'attachment; filename="foo-%41.html"')
assert 'attachment' == disptype
assert {'filename': 'foo-%41.html'} == params
def test_attwithfnusingpct(self) -> None:
disptype, params = parse_content_disposition(
'attachment; filename="50%.html"')
assert 'attachment' == disptype
assert {'filename': '50%.html'} == params
def test_attwithfnrawpctencaq(self) -> None:
disptype, params = parse_content_disposition(
r'attachment; filename="foo-%\41.html"')
assert 'attachment' == disptype
assert {'filename': r'foo-%41.html'} == params
def test_attwithnamepct(self) -> None:
disptype, params = parse_content_disposition(
'attachment; filename="foo-%41.html"')
assert 'attachment' == disptype
assert {'filename': 'foo-%41.html'} == params
def test_attwithfilenamepctandiso(self) -> None:
disptype, params = parse_content_disposition(
'attachment; filename="ä-%41.html"')
assert 'attachment' == disptype
assert {'filename': 'ä-%41.html'} == params
def test_attwithfnrawpctenclong(self) -> None:
disptype, params = parse_content_disposition(
'attachment; filename="foo-%c3%a4-%e2%82%ac.html"')
assert 'attachment' == disptype
assert {'filename': 'foo-%c3%a4-%e2%82%ac.html'} == params
def test_attwithasciifilenamews1(self) -> None:
disptype, params = parse_content_disposition(
'attachment; filename ="foo.html"')
assert 'attachment' == disptype
assert {'filename': 'foo.html'} == params
def test_attwith2filenames(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; filename="foo.html"; filename="bar.html"')
assert disptype is None
assert {} == params
def test_attfnbrokentoken(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; filename=foo[1](2).html')
assert disptype is None
assert {} == params
def test_attfnbrokentokeniso(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; filename=foo-ä.html')
assert disptype is None
assert {} == params
def test_attfnbrokentokenutf(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; filename=foo-ä.html')
assert disptype is None
assert {} == params
def test_attmissingdisposition(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'filename=foo.html')
assert disptype is None
assert {} == params
def test_attmissingdisposition2(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'x=y; filename=foo.html')
assert disptype is None
assert {} == params
def test_attmissingdisposition3(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'"foo; filename=bar;baz"; filename=qux')
assert disptype is None
assert {} == params
def test_attmissingdisposition4(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'filename=foo.html, filename=bar.html')
assert disptype is None
assert {} == params
def test_emptydisposition(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'; filename=foo.html')
assert disptype is None
assert {} == params
def test_doublecolon(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
': inline; attachment; filename=foo.html')
assert disptype is None
assert {} == params
def test_attandinline(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'inline; attachment; filename=foo.html')
assert disptype is None
assert {} == params
def test_attandinline2(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; inline; filename=foo.html')
assert disptype is None
assert {} == params
def test_attbrokenquotedfn(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; filename="foo.html".txt')
assert disptype is None
assert {} == params
def test_attbrokenquotedfn2(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; filename="bar')
assert disptype is None
assert {} == params
def test_attbrokenquotedfn3(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; filename=foo"bar;baz"qux')
assert disptype is None
assert {} == params
def test_attmultinstances(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; filename=foo.html, attachment; filename=bar.html')
assert disptype is None
assert {} == params
def test_attmissingdelim(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; foo=foo filename=bar')
assert disptype is None
assert {} == params
def test_attmissingdelim2(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; filename=bar foo=foo')
assert disptype is None
assert {} == params
def test_attmissingdelim3(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment filename=bar')
assert disptype is None
assert {} == params
def test_attreversed(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'filename=foo.html; attachment')
assert disptype is None
assert {} == params
def test_attconfusedparam(self) -> None:
disptype, params = parse_content_disposition(
'attachment; xfilename=foo.html')
assert 'attachment' == disptype
assert {'xfilename': 'foo.html'} == params
def test_attabspath(self) -> None:
disptype, params = parse_content_disposition(
'attachment; filename="/foo.html"')
assert 'attachment' == disptype
assert {'filename': 'foo.html'} == params
def test_attabspathwin(self) -> None:
disptype, params = parse_content_disposition(
'attachment; filename="\\foo.html"')
assert 'attachment' == disptype
assert {'filename': 'foo.html'} == params
def test_attcdate(self) -> None:
disptype, params = parse_content_disposition(
'attachment; creation-date="Wed, 12 Feb 1997 16:29:51 -0500"')
assert 'attachment' == disptype
assert {'creation-date': 'Wed, 12 Feb 1997 16:29:51 -0500'} == params
def test_attmdate(self) -> None:
disptype, params = parse_content_disposition(
'attachment; modification-date="Wed, 12 Feb 1997 16:29:51 -0500"')
assert 'attachment' == disptype
assert {'modification-date':
'Wed, 12 Feb 1997 16:29:51 -0500'} == params
def test_dispext(self) -> None:
disptype, params = parse_content_disposition('foobar')
assert 'foobar' == disptype
assert {} == params
def test_dispextbadfn(self) -> None:
disptype, params = parse_content_disposition(
'attachment; example="filename=example.txt"')
assert 'attachment' == disptype
assert {'example': 'filename=example.txt'} == params
def test_attwithisofn2231iso(self) -> None:
disptype, params = parse_content_disposition(
"attachment; filename*=iso-8859-1''foo-%E4.html")
assert 'attachment' == disptype
assert {'filename*': 'foo-ä.html'} == params
def test_attwithfn2231utf8(self) -> None:
disptype, params = parse_content_disposition(
"attachment; filename*=UTF-8''foo-%c3%a4-%e2%82%ac.html")
assert 'attachment' == disptype
assert {'filename*': 'foo-ä-€.html'} == params
def test_attwithfn2231noc(self) -> None:
disptype, params = parse_content_disposition(
"attachment; filename*=''foo-%c3%a4-%e2%82%ac.html")
assert 'attachment' == disptype
assert {'filename*': 'foo-ä-€.html'} == params
def test_attwithfn2231utf8comp(self) -> None:
disptype, params = parse_content_disposition(
"attachment; filename*=UTF-8''foo-a%cc%88.html")
assert 'attachment' == disptype
assert {'filename*': 'foo-ä.html'} == params
@pytest.mark.skip('should raise decoding error: %82 is invalid for latin1')
def test_attwithfn2231utf8_bad(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionParam):
disptype, params = parse_content_disposition(
"attachment; filename*=iso-8859-1''foo-%c3%a4-%e2%82%ac.html")
assert 'attachment' == disptype
assert {} == params
@pytest.mark.skip('should raise decoding error: %E4 is invalid for utf-8')
def test_attwithfn2231iso_bad(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionParam):
disptype, params = parse_content_disposition(
"attachment; filename*=utf-8''foo-%E4.html")
assert 'attachment' == disptype
assert {} == params
def test_attwithfn2231ws1(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionParam):
disptype, params = parse_content_disposition(
"attachment; filename *=UTF-8''foo-%c3%a4.html")
assert 'attachment' == disptype
assert {} == params
def test_attwithfn2231ws2(self) -> None:
disptype, params = parse_content_disposition(
"attachment; filename*= UTF-8''foo-%c3%a4.html")
assert 'attachment' == disptype
assert {'filename*': 'foo-ä.html'} == params
def test_attwithfn2231ws3(self) -> None:
disptype, params = parse_content_disposition(
"attachment; filename* =UTF-8''foo-%c3%a4.html")
assert 'attachment' == disptype
assert {'filename*': 'foo-ä.html'} == params
def test_attwithfn2231quot(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionParam):
disptype, params = parse_content_disposition(
"attachment; filename*=\"UTF-8''foo-%c3%a4.html\"")
assert 'attachment' == disptype
assert {} == params
def test_attwithfn2231quot2(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionParam):
disptype, params = parse_content_disposition(
"attachment; filename*=\"foo%20bar.html\"")
assert 'attachment' == disptype
assert {} == params
def test_attwithfn2231singleqmissing(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionParam):
disptype, params = parse_content_disposition(
"attachment; filename*=UTF-8'foo-%c3%a4.html")
assert 'attachment' == disptype
assert {} == params
@pytest.mark.skip('urllib.parse.unquote is tolerate to standalone % chars')
def test_attwithfn2231nbadpct1(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionParam):
disptype, params = parse_content_disposition(
"attachment; filename*=UTF-8''foo%")
assert 'attachment' == disptype
assert {} == params
@pytest.mark.skip('urllib.parse.unquote is tolerate to standalone % chars')
def test_attwithfn2231nbadpct2(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionParam):
disptype, params = parse_content_disposition(
"attachment; filename*=UTF-8''f%oo.html")
assert 'attachment' == disptype
assert {} == params
def test_attwithfn2231dpct(self) -> None:
disptype, params = parse_content_disposition(
"attachment; filename*=UTF-8''A-%2541.html")
assert 'attachment' == disptype
assert {'filename*': 'A-%41.html'} == params
def test_attwithfn2231abspathdisguised(self) -> None:
disptype, params = parse_content_disposition(
"attachment; filename*=UTF-8''%5cfoo.html")
assert 'attachment' == disptype
assert {'filename*': '\\foo.html'} == params
def test_attfncont(self) -> None:
disptype, params = parse_content_disposition(
'attachment; filename*0="foo."; filename*1="html"')
assert 'attachment' == disptype
assert {'filename*0': 'foo.',
'filename*1': 'html'} == params
def test_attfncontqs(self) -> None:
disptype, params = parse_content_disposition(
r'attachment; filename*0="foo"; filename*1="\b\a\r.html"')
assert 'attachment' == disptype
assert {'filename*0': 'foo',
'filename*1': 'bar.html'} == params
def test_attfncontenc(self) -> None:
disptype, params = parse_content_disposition(
'attachment; filename*0*=UTF-8''foo-%c3%a4; filename*1=".html"')
assert 'attachment' == disptype
assert {'filename*0*': 'UTF-8''foo-%c3%a4',
'filename*1': '.html'} == params
def test_attfncontlz(self) -> None:
disptype, params = parse_content_disposition(
'attachment; filename*0="foo"; filename*01="bar"')
assert 'attachment' == disptype
assert {'filename*0': 'foo',
'filename*01': 'bar'} == params
def test_attfncontnc(self) -> None:
disptype, params = parse_content_disposition(
'attachment; filename*0="foo"; filename*2="bar"')
assert 'attachment' == disptype
assert {'filename*0': 'foo',
'filename*2': 'bar'} == params
def test_attfnconts1(self) -> None:
disptype, params = parse_content_disposition(
'attachment; filename*0="foo."; filename*2="html"')
assert 'attachment' == disptype
assert {'filename*0': 'foo.',
'filename*2': 'html'} == params
def test_attfncontord(self) -> None:
disptype, params = parse_content_disposition(
'attachment; filename*1="bar"; filename*0="foo"')
assert 'attachment' == disptype
assert {'filename*0': 'foo',
'filename*1': 'bar'} == params
def test_attfnboth(self) -> None:
disptype, params = parse_content_disposition(
'attachment; filename="foo-ae.html";'
" filename*=UTF-8''foo-%c3%a4.html")
assert 'attachment' == disptype
assert {'filename': 'foo-ae.html',
'filename*': 'foo-ä.html'} == params
def test_attfnboth2(self) -> None:
disptype, params = parse_content_disposition(
"attachment; filename*=UTF-8''foo-%c3%a4.html;"
' filename="foo-ae.html"')
assert 'attachment' == disptype
assert {'filename': 'foo-ae.html',
'filename*': 'foo-ä.html'} == params
def test_attfnboth3(self) -> None:
disptype, params = parse_content_disposition(
"attachment; filename*0*=ISO-8859-15''euro-sign%3d%a4;"
" filename*=ISO-8859-1''currency-sign%3d%a4")
assert 'attachment' == disptype
assert {'filename*': 'currency-sign=¤',
'filename*0*': "ISO-8859-15''euro-sign%3d%a4"} == params
def test_attnewandfn(self) -> None:
disptype, params = parse_content_disposition(
'attachment; foobar=x; filename="foo.html"')
assert 'attachment' == disptype
assert {'foobar': 'x',
'filename': 'foo.html'} == params
def test_attrfc2047token(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionHeader):
disptype, params = parse_content_disposition(
'attachment; filename==?ISO-8859-1?Q?foo-=E4.html?=')
assert disptype is None
assert {} == params
def test_attrfc2047quoted(self) -> None:
disptype, params = parse_content_disposition(
'attachment; filename="=?ISO-8859-1?Q?foo-=E4.html?="')
assert 'attachment' == disptype
assert {'filename': '=?ISO-8859-1?Q?foo-=E4.html?='} == params
def test_bad_continuous_param(self) -> None:
with pytest.warns(aiohttp.BadContentDispositionParam):
disptype, params = parse_content_disposition(
'attachment; filename*0=foo bar')
assert 'attachment' == disptype
assert {} == params
class TestContentDispositionFilename:
# http://greenbytes.de/tech/tc2231/
def test_no_filename(self) -> None:
assert content_disposition_filename({}) is None
assert content_disposition_filename({'foo': 'bar'}) is None
def test_filename(self) -> None:
params = {'filename': 'foo.html'}
assert 'foo.html' == content_disposition_filename(params)
def test_filename_ext(self) -> None:
params = {'filename*': 'файл.html'}
assert 'файл.html' == content_disposition_filename(params)
def test_attfncont(self) -> None:
params = {'filename*0': 'foo.', 'filename*1': 'html'}
assert 'foo.html' == content_disposition_filename(params)
def test_attfncontqs(self) -> None:
params = {'filename*0': 'foo', 'filename*1': 'bar.html'}
assert 'foobar.html' == content_disposition_filename(params)
def test_attfncontenc(self) -> None:
params = {'filename*0*': "UTF-8''foo-%c3%a4",
'filename*1': '.html'}
assert 'foo-ä.html' == content_disposition_filename(params)
def test_attfncontlz(self) -> None:
params = {'filename*0': 'foo',
'filename*01': 'bar'}
assert 'foo' == content_disposition_filename(params)
def test_attfncontnc(self) -> None:
params = {'filename*0': 'foo',
'filename*2': 'bar'}
assert 'foo' == content_disposition_filename(params)
def test_attfnconts1(self) -> None:
params = {'filename*1': 'foo',
'filename*2': 'bar'}
assert content_disposition_filename(params) is None
def test_attfnboth(self) -> None:
params = {'filename': 'foo-ae.html',
'filename*': 'foo-ä.html'}
assert 'foo-ä.html' == content_disposition_filename(params)
def test_attfnboth3(self) -> None:
params = {'filename*0*': "ISO-8859-15''euro-sign%3d%a4",
'filename*': 'currency-sign=¤'}
assert 'currency-sign=¤' == content_disposition_filename(params)
def test_attrfc2047quoted(self) -> None:
params = {'filename': '=?ISO-8859-1?Q?foo-=E4.html?='}
assert '=?ISO-8859-1?Q?foo-=E4.html?=' == content_disposition_filename(
params)
<file_sep>/HISTORY.rst
3.4.4 (2018-09-05)
==================
- Fix installation from sources when compiling toolkit is not available (`#3241 <https://github.com/aio-libs/aiohttp/pull/3241>`_)
3.4.3 (2018-09-04)
==================
- Add ``app.pre_frozen`` state to properly handle startup signals in sub-applications. (`#3237 <https://github.com/aio-libs/aiohttp/pull/3237>`_)
3.4.2 (2018-09-01)
==================
- Fix ``iter_chunks`` type annotation (`#3230 <https://github.com/aio-libs/aiohttp/pull/3230>`_)
3.4.1 (2018-08-28)
==================
- Fix empty header parsing regression. (`#3218 <https://github.com/aio-libs/aiohttp/pull/3218>`_)
- Fix BaseRequest.raw_headers doc. (`#3215 <https://github.com/aio-libs/aiohttp/pull/3215>`_)
- Fix documentation building on ReadTheDocs (`#3221 <https://github.com/aio-libs/aiohttp/pull/3221>`_)
3.4.0 (2018-08-25)
==================
Features
--------
- Add type hints (`#3049 <https://github.com/aio-libs/aiohttp/pull/3049>`_)
- Add ``raise_for_status`` request parameter (`#3073 <https://github.com/aio-libs/aiohttp/pull/3073>`_)
- Add type hints to HTTP client (`#3092 <https://github.com/aio-libs/aiohttp/pull/3092>`_)
- Minor server optimizations (`#3095 <https://github.com/aio-libs/aiohttp/pull/3095>`_)
- Preserve the cause when `HTTPException` is raised from another exception. (`#3096 <https://github.com/aio-libs/aiohttp/pull/3096>`_)
- Add `close_boundary` option in `MultipartWriter.write` method. Support streaming (`#3104 <https://github.com/aio-libs/aiohttp/pull/3104>`_)
- Added a ``remove_slash`` option to the ``normalize_path_middleware`` factory. (`#3173 <https://github.com/aio-libs/aiohttp/pull/3173>`_)
- The class `AbstractRouteDef` is importable from `aiohttp.web`. (`#3183 <https://github.com/aio-libs/aiohttp/pull/3183>`_)
Bugfixes
--------
- Prevent double closing when client connection is released before the
last ``data_received()`` callback. (`#3031 <https://github.com/aio-libs/aiohttp/pull/3031>`_)
- Make redirect with `normalize_path_middleware` work when using url encoded paths. (`#3051 <https://github.com/aio-libs/aiohttp/pull/3051>`_)
- Postpone web task creation to connection establishment. (`#3052 <https://github.com/aio-libs/aiohttp/pull/3052>`_)
- Fix ``sock_read`` timeout. (`#3053 <https://github.com/aio-libs/aiohttp/pull/3053>`_)
- When using a server-request body as the `data=` argument of a client request, iterate over the content with `readany` instead of `readline` to avoid `Line too long` errors. (`#3054 <https://github.com/aio-libs/aiohttp/pull/3054>`_)
- fix `UrlDispatcher` has no attribute `add_options`, add `web.options` (`#3062 <https://github.com/aio-libs/aiohttp/pull/3062>`_)
- correct filename in content-disposition with multipart body (`#3064 <https://github.com/aio-libs/aiohttp/pull/3064>`_)
- Many HTTP proxies has buggy keepalive support.
Let's not reuse connection but close it after processing every response. (`#3070 <https://github.com/aio-libs/aiohttp/pull/3070>`_)
- raise 413 "Payload Too Large" rather than raising ValueError in request.post()
Add helpful debug message to 413 responses (`#3087 <https://github.com/aio-libs/aiohttp/pull/3087>`_)
- Fix `StreamResponse` equality, now that they are `MutableMapping` objects. (`#3100 <https://github.com/aio-libs/aiohttp/pull/3100>`_)
- Fix server request objects comparison (`#3116 <https://github.com/aio-libs/aiohttp/pull/3116>`_)
- Do not hang on `206 Partial Content` response with `Content-Encoding: gzip` (`#3123 <https://github.com/aio-libs/aiohttp/pull/3123>`_)
- Fix timeout precondition checkers (`#3145 <https://github.com/aio-libs/aiohttp/pull/3145>`_)
Improved Documentation
----------------------
- Add a new FAQ entry that clarifies that you should not reuse response
objects in middleware functions. (`#3020 <https://github.com/aio-libs/aiohttp/pull/3020>`_)
- Add FAQ section "Why is creating a ClientSession outside of an event loop dangerous?" (`#3072 <https://github.com/aio-libs/aiohttp/pull/3072>`_)
- Fix link to Rambler (`#3115 <https://github.com/aio-libs/aiohttp/pull/3115>`_)
- Fix TCPSite documentation on the Server Reference page. (`#3146 <https://github.com/aio-libs/aiohttp/pull/3146>`_)
- Fix documentation build configuration file for Windows. (`#3147 <https://github.com/aio-libs/aiohttp/pull/3147>`_)
- Remove no longer existing lingering_timeout parameter of Application.make_handler from documentation. (`#3151 <https://github.com/aio-libs/aiohttp/pull/3151>`_)
- Mention that ``app.make_handler`` is deprecated, recommend to use runners
API instead. (`#3157 <https://github.com/aio-libs/aiohttp/pull/3157>`_)
Deprecations and Removals
-------------------------
- Drop ``loop.current_task()`` from ``helpers.current_task()`` (`#2826 <https://github.com/aio-libs/aiohttp/pull/2826>`_)
- Drop ``reader`` parameter from ``request.multipart()``. (`#3090 <https://github.com/aio-libs/aiohttp/pull/3090>`_)
3.3.2 (2018-06-12)
==================
- Many HTTP proxies has buggy keepalive support. Let's not reuse connection but
close it after processing every response. (`#3070 <https://github.com/aio-libs/aiohttp/pull/3070>`_)
- Provide vendor source files in tarball (`#3076 <https://github.com/aio-libs/aiohttp/pull/3076>`_)
3.3.1 (2018-06-05)
==================
- Fix ``sock_read`` timeout. (`#3053 <https://github.com/aio-libs/aiohttp/pull/3053>`_)
- When using a server-request body as the ``data=`` argument of a client request,
iterate over the content with ``readany`` instead of ``readline`` to avoid ``Line
too long`` errors. (`#3054 <https://github.com/aio-libs/aiohttp/pull/3054>`_)
3.3.0 (2018-06-01)
==================
Features
--------
- Raise ``ConnectionResetError`` instead of ``CancelledError`` on trying to
write to a closed stream. (`#2499 <https://github.com/aio-libs/aiohttp/pull/2499>`_)
- Implement ``ClientTimeout`` class and support socket read timeout. (`#2768 <https://github.com/aio-libs/aiohttp/pull/2768>`_)
- Enable logging when ``aiohttp.web`` is used as a program (`#2956 <https://github.com/aio-libs/aiohttp/pull/2956>`_)
- Add canonical property to resources (`#2968 <https://github.com/aio-libs/aiohttp/pull/2968>`_)
- Forbid reading response BODY after release (`#2983 <https://github.com/aio-libs/aiohttp/pull/2983>`_)
- Implement base protocol class to avoid a dependency from internal
``asyncio.streams.FlowControlMixin`` (`#2986 <https://github.com/aio-libs/aiohttp/pull/2986>`_)
- Cythonize ``@helpers.reify``, 5% boost on macro benchmark (`#2995 <https://github.com/aio-libs/aiohttp/pull/2995>`_)
- Optimize HTTP parser (`#3015 <https://github.com/aio-libs/aiohttp/pull/3015>`_)
- Implement ``runner.addresses`` property. (`#3036 <https://github.com/aio-libs/aiohttp/pull/3036>`_)
- Use ``bytearray`` instead of a list of ``bytes`` in websocket reader. It
improves websocket message reading a little. (`#3039 <https://github.com/aio-libs/aiohttp/pull/3039>`_)
- Remove heartbeat on closing connection on keepalive timeout. The used hack
violates HTTP protocol. (`#3041 <https://github.com/aio-libs/aiohttp/pull/3041>`_)
- Limit websocket message size on reading to 4 MB by default. (`#3045 <https://github.com/aio-libs/aiohttp/pull/3045>`_)
Bugfixes
--------
- Don't reuse a connection with the same URL but different proxy/TLS settings
(`#2981 <https://github.com/aio-libs/aiohttp/pull/2981>`_)
- When parsing the Forwarded header, the optional port number is now preserved.
(`#3009 <https://github.com/aio-libs/aiohttp/pull/3009>`_)
Improved Documentation
----------------------
- Make Change Log more visible in docs (`#3029 <https://github.com/aio-libs/aiohttp/pull/3029>`_)
- Make style and grammar improvements on the FAQ page. (`#3030 <https://github.com/aio-libs/aiohttp/pull/3030>`_)
- Document that signal handlers should be async functions since aiohttp 3.0
(`#3032 <https://github.com/aio-libs/aiohttp/pull/3032>`_)
Deprecations and Removals
-------------------------
- Deprecate custom application's router. (`#3021 <https://github.com/aio-libs/aiohttp/pull/3021>`_)
Misc
----
- #3008, #3011
3.2.1 (2018-05-10)
==================
- Don't reuse a connection with the same URL but different proxy/TLS settings
(`#2981 <https://github.com/aio-libs/aiohttp/pull/2981>`_)
3.2.0 (2018-05-06)
==================
Features
--------
- Raise ``TooManyRedirects`` exception when client gets redirected too many
times instead of returning last response. (`#2631 <https://github.com/aio-libs/aiohttp/pull/2631>`_)
- Extract route definitions into separate ``web_routedef.py`` file (`#2876 <https://github.com/aio-libs/aiohttp/pull/2876>`_)
- Raise an exception on request body reading after sending response. (`#2895 <https://github.com/aio-libs/aiohttp/pull/2895>`_)
- ClientResponse and RequestInfo now have real_url property, which is request
url without fragment part being stripped (`#2925 <https://github.com/aio-libs/aiohttp/pull/2925>`_)
- Speed up connector limiting (`#2937 <https://github.com/aio-libs/aiohttp/pull/2937>`_)
- Added and links property for ClientResponse object (`#2948 <https://github.com/aio-libs/aiohttp/pull/2948>`_)
- Add ``request.config_dict`` for exposing nested applications data. (`#2949 <https://github.com/aio-libs/aiohttp/pull/2949>`_)
- Speed up HTTP headers serialization, server micro-benchmark runs 5% faster
now. (`#2957 <https://github.com/aio-libs/aiohttp/pull/2957>`_)
- Apply assertions in debug mode only (`#2966 <https://github.com/aio-libs/aiohttp/pull/2966>`_)
Bugfixes
--------
- expose property `app` for TestClient (`#2891 <https://github.com/aio-libs/aiohttp/pull/2891>`_)
- Call on_chunk_sent when write_eof takes as a param the last chunk (`#2909 <https://github.com/aio-libs/aiohttp/pull/2909>`_)
- A closing bracket was added to `__repr__` of resources (`#2935 <https://github.com/aio-libs/aiohttp/pull/2935>`_)
- Fix compression of FileResponse (`#2942 <https://github.com/aio-libs/aiohttp/pull/2942>`_)
- Fixes some bugs in the limit connection feature (`#2964 <https://github.com/aio-libs/aiohttp/pull/2964>`_)
Improved Documentation
----------------------
- Drop ``async_timeout`` usage from documentation for client API in favor of
``timeout`` parameter. (`#2865 <https://github.com/aio-libs/aiohttp/pull/2865>`_)
- Improve Gunicorn logging documentation (`#2921 <https://github.com/aio-libs/aiohttp/pull/2921>`_)
- Replace multipart writer `.serialize()` method with `.write()` in
documentation. (`#2965 <https://github.com/aio-libs/aiohttp/pull/2965>`_)
Deprecations and Removals
-------------------------
- Deprecate Application.make_handler() (`#2938 <https://github.com/aio-libs/aiohttp/pull/2938>`_)
Misc
----
- #2958
3.1.3 (2018-04-12)
==================
- Fix cancellation broadcast during DNS resolve (`#2910 <https://github.com/aio-libs/aiohttp/pull/2910>`_)
3.1.2 (2018-04-05)
==================
- Make ``LineTooLong`` exception more detailed about actual data size (`#2863 <https://github.com/aio-libs/aiohttp/pull/2863>`_)
- Call ``on_chunk_sent`` when write_eof takes as a param the last chunk (`#2909 <https://github.com/aio-libs/aiohttp/pull/2909>`_)
3.1.1 (2018-03-27)
==================
- Support *asynchronous iterators* (and *asynchronous generators* as
well) in both client and server API as request / response BODY
payloads. (`#2802 <https://github.com/aio-libs/aiohttp/pull/2802>`_)
3.1.0 (2018-03-21)
==================
Welcome to aiohttp 3.1 release.
This is an *incremental* release, fully backward compatible with *aiohttp 3.0*.
But we have added several new features.
The most visible one is ``app.add_routes()`` (an alias for existing
``app.router.add_routes()``. The addition is very important because
all *aiohttp* docs now uses ``app.add_routes()`` call in code
snippets. All your existing code still do register routes / resource
without any warning but you've got the idea for a favorite way: noisy
``app.router.add_get()`` is replaced by ``app.add_routes()``.
The library does not make a preference between decorators::
routes = web.RouteTableDef()
@routes.get('/')
async def hello(request):
return web.Response(text="Hello, world")
app.add_routes(routes)
and route tables as a list::
async def hello(request):
return web.Response(text="Hello, world")
app.add_routes([web.get('/', hello)])
Both ways are equal, user may decide basing on own code taste.
Also we have a lot of minor features, bug fixes and documentation
updates, see below.
Features
--------
- Relax JSON content-type checking in the ``ClientResponse.json()`` to allow
"application/xxx+json" instead of strict "application/json". (`#2206 <https://github.com/aio-libs/aiohttp/pull/2206>`_)
- Bump C HTTP parser to version 2.8 (`#2730 <https://github.com/aio-libs/aiohttp/pull/2730>`_)
- Accept a coroutine as an application factory in ``web.run_app`` and gunicorn
worker. (`#2739 <https://github.com/aio-libs/aiohttp/pull/2739>`_)
- Implement application cleanup context (``app.cleanup_ctx`` property). (`#2747 <https://github.com/aio-libs/aiohttp/pull/2747>`_)
- Make ``writer.write_headers`` a coroutine. (`#2762 <https://github.com/aio-libs/aiohttp/pull/2762>`_)
- Add tracking signals for getting request/response bodies. (`#2767 <https://github.com/aio-libs/aiohttp/pull/2767>`_)
- Deprecate ClientResponseError.code in favor of .status to keep similarity
with response classes. (`#2781 <https://github.com/aio-libs/aiohttp/pull/2781>`_)
- Implement ``app.add_routes()`` method. (`#2787 <https://github.com/aio-libs/aiohttp/pull/2787>`_)
- Implement ``web.static()`` and ``RouteTableDef.static()`` API. (`#2795 <https://github.com/aio-libs/aiohttp/pull/2795>`_)
- Install a test event loop as default by ``asyncio.set_event_loop()``. The
change affects aiohttp test utils but backward compatibility is not broken
for 99.99% of use cases. (`#2804 <https://github.com/aio-libs/aiohttp/pull/2804>`_)
- Refactor ``ClientResponse`` constructor: make logically required constructor
arguments mandatory, drop ``_post_init()`` method. (`#2820 <https://github.com/aio-libs/aiohttp/pull/2820>`_)
- Use ``app.add_routes()`` in server docs everywhere (`#2830 <https://github.com/aio-libs/aiohttp/pull/2830>`_)
- Websockets refactoring, all websocket writer methods are converted into
coroutines. (`#2836 <https://github.com/aio-libs/aiohttp/pull/2836>`_)
- Provide ``Content-Range`` header for ``Range`` requests (`#2844 <https://github.com/aio-libs/aiohttp/pull/2844>`_)
Bugfixes
--------
- Fix websocket client return EofStream. (`#2784 <https://github.com/aio-libs/aiohttp/pull/2784>`_)
- Fix websocket demo. (`#2789 <https://github.com/aio-libs/aiohttp/pull/2789>`_)
- Property ``BaseRequest.http_range`` now returns a python-like slice when
requesting the tail of the range. It's now indicated by a negative value in
``range.start`` rather then in ``range.stop`` (`#2805 <https://github.com/aio-libs/aiohttp/pull/2805>`_)
- Close a connection if an unexpected exception occurs while sending a request
(`#2827 <https://github.com/aio-libs/aiohttp/pull/2827>`_)
- Fix firing DNS tracing events. (`#2841 <https://github.com/aio-libs/aiohttp/pull/2841>`_)
Improved Documentation
----------------------
- Document behavior when cchardet detects encodings that are unknown to Python.
(`#2732 <https://github.com/aio-libs/aiohttp/pull/2732>`_)
- Add diagrams for tracing request life style. (`#2748 <https://github.com/aio-libs/aiohttp/pull/2748>`_)
- Drop removed functionality for passing ``StreamReader`` as data at client
side. (`#2793 <https://github.com/aio-libs/aiohttp/pull/2793>`_)
3.0.9 (2018-03-14)
==================
- Close a connection if an unexpected exception occurs while sending a request
(`#2827 <https://github.com/aio-libs/aiohttp/pull/2827>`_)
3.0.8 (2018-03-12)
==================
- Use ``asyncio.current_task()`` on Python 3.7 (`#2825 <https://github.com/aio-libs/aiohttp/pull/2825>`_)
3.0.7 (2018-03-08)
==================
- Fix SSL proxy support by client. (`#2810 <https://github.com/aio-libs/aiohttp/pull/2810>`_)
- Restore an imperative check in ``setup.py`` for python version. The check
works in parallel to environment marker. As effect an error about unsupported
Python versions is raised even on outdated systems with very old
``setuptools`` version installed. (`#2813 <https://github.com/aio-libs/aiohttp/pull/2813>`_)
3.0.6 (2018-03-05)
==================
- Add ``_reuse_address`` and ``_reuse_port`` to
``web_runner.TCPSite.__slots__``. (`#2792 <https://github.com/aio-libs/aiohttp/pull/2792>`_)
3.0.5 (2018-02-27)
==================
- Fix ``InvalidStateError`` on processing a sequence of two
``RequestHandler.data_received`` calls on web server. (`#2773 <https://github.com/aio-libs/aiohttp/pull/2773>`_)
3.0.4 (2018-02-26)
==================
- Fix ``IndexError`` in HTTP request handling by server. (`#2752 <https://github.com/aio-libs/aiohttp/pull/2752>`_)
- Fix MultipartWriter.append* no longer returning part/payload. (`#2759 <https://github.com/aio-libs/aiohttp/pull/2759>`_)
3.0.3 (2018-02-25)
==================
- Relax ``attrs`` dependency to minimal actually supported version
17.0.3 The change allows to avoid version conflicts with currently
existing test tools.
3.0.2 (2018-02-23)
==================
Security Fix
------------
- Prevent Windows absolute URLs in static files. Paths like
``/static/D:\path`` and ``/static/\\hostname\drive\path`` are
forbidden.
3.0.1
=====
- Technical release for fixing distribution problems.
3.0.0 (2018-02-12)
==================
Features
--------
- Speed up the `PayloadWriter.write` method for large request bodies. (`#2126 <https://github.com/aio-libs/aiohttp/pull/2126>`_)
- StreamResponse and Response are now MutableMappings. (`#2246 <https://github.com/aio-libs/aiohttp/pull/2246>`_)
- ClientSession publishes a set of signals to track the HTTP request execution.
(`#2313 <https://github.com/aio-libs/aiohttp/pull/2313>`_)
- Content-Disposition fast access in ClientResponse (`#2455 <https://github.com/aio-libs/aiohttp/pull/2455>`_)
- Added support to Flask-style decorators with class-based Views. (`#2472 <https://github.com/aio-libs/aiohttp/pull/2472>`_)
- Signal handlers (registered callbacks) should be coroutines. (`#2480 <https://github.com/aio-libs/aiohttp/pull/2480>`_)
- Support ``async with test_client.ws_connect(...)`` (`#2525 <https://github.com/aio-libs/aiohttp/pull/2525>`_)
- Introduce *site* and *application runner* as underlying API for `web.run_app`
implementation. (`#2530 <https://github.com/aio-libs/aiohttp/pull/2530>`_)
- Only quote multipart boundary when necessary and sanitize input (`#2544 <https://github.com/aio-libs/aiohttp/pull/2544>`_)
- Make the `aiohttp.ClientResponse.get_encoding` method public with the
processing of invalid charset while detecting content encoding. (`#2549 <https://github.com/aio-libs/aiohttp/pull/2549>`_)
- Add optional configurable per message compression for
`ClientWebSocketResponse` and `WebSocketResponse`. (`#2551 <https://github.com/aio-libs/aiohttp/pull/2551>`_)
- Add hysteresis to `StreamReader` to prevent flipping between paused and
resumed states too often. (`#2555 <https://github.com/aio-libs/aiohttp/pull/2555>`_)
- Support `.netrc` by `trust_env` (`#2581 <https://github.com/aio-libs/aiohttp/pull/2581>`_)
- Avoid to create a new resource when adding a route with the same name and
path of the last added resource (`#2586 <https://github.com/aio-libs/aiohttp/pull/2586>`_)
- `MultipartWriter.boundary` is `str` now. (`#2589 <https://github.com/aio-libs/aiohttp/pull/2589>`_)
- Allow a custom port to be used by `TestServer` (and associated pytest
fixtures) (`#2613 <https://github.com/aio-libs/aiohttp/pull/2613>`_)
- Add param access_log_class to web.run_app function (`#2615 <https://github.com/aio-libs/aiohttp/pull/2615>`_)
- Add ``ssl`` parameter to client API (`#2626 <https://github.com/aio-libs/aiohttp/pull/2626>`_)
- Fixes performance issue introduced by #2577. When there are no middlewares
installed by the user, no additional and useless code is executed. (`#2629 <https://github.com/aio-libs/aiohttp/pull/2629>`_)
- Rename PayloadWriter to StreamWriter (`#2654 <https://github.com/aio-libs/aiohttp/pull/2654>`_)
- New options *reuse_port*, *reuse_address* are added to `run_app` and
`TCPSite`. (`#2679 <https://github.com/aio-libs/aiohttp/pull/2679>`_)
- Use custom classes to pass client signals parameters (`#2686 <https://github.com/aio-libs/aiohttp/pull/2686>`_)
- Use ``attrs`` library for data classes, replace `namedtuple`. (`#2690 <https://github.com/aio-libs/aiohttp/pull/2690>`_)
- Pytest fixtures renaming, add ``aiohttp_`` prefix (`#2578 <https://github.com/aio-libs/aiohttp/pull/2578>`_)
- Add ``aiohttp-`` prefix for ``pytest-aiohttp`` command line
parameters (`#2578 <https://github.com/aio-libs/aiohttp/pull/2578>`_)
Bugfixes
--------
- Correctly process upgrade request from server to HTTP2. ``aiohttp`` does not
support HTTP2 yet, the protocol is not upgraded but response is handled
correctly. (`#2277 <https://github.com/aio-libs/aiohttp/pull/2277>`_)
- Fix ClientConnectorSSLError and ClientProxyConnectionError for proxy
connector (`#2408 <https://github.com/aio-libs/aiohttp/pull/2408>`_)
- Fix connector convert OSError to ClientConnectorError (`#2423 <https://github.com/aio-libs/aiohttp/pull/2423>`_)
- Fix connection attempts for multiple dns hosts (`#2424 <https://github.com/aio-libs/aiohttp/pull/2424>`_)
- Fix writing to closed transport by raising `asyncio.CancelledError` (`#2499 <https://github.com/aio-libs/aiohttp/pull/2499>`_)
- Fix warning in `ClientSession.__del__` by stopping to try to close it.
(`#2523 <https://github.com/aio-libs/aiohttp/pull/2523>`_)
- Fixed race-condition for iterating addresses from the DNSCache. (`#2620 <https://github.com/aio-libs/aiohttp/pull/2620>`_)
- Fix default value of `access_log_format` argument in `web.run_app` (`#2649 <https://github.com/aio-libs/aiohttp/pull/2649>`_)
- Freeze sub-application on adding to parent app (`#2656 <https://github.com/aio-libs/aiohttp/pull/2656>`_)
- Do percent encoding for `.url_for()` parameters (`#2668 <https://github.com/aio-libs/aiohttp/pull/2668>`_)
- Correctly process request start time and multiple request/response
headers in access log extra (`#2641 <https://github.com/aio-libs/aiohttp/pull/2641>`_)
Improved Documentation
----------------------
- Improve tutorial docs, using `literalinclude` to link to the actual files.
(`#2396 <https://github.com/aio-libs/aiohttp/pull/2396>`_)
- Small improvement docs: better example for file uploads. (`#2401 <https://github.com/aio-libs/aiohttp/pull/2401>`_)
- Rename `from_env` to `trust_env` in client reference. (`#2451 <https://github.com/aio-libs/aiohttp/pull/2451>`_)
- Fixed mistype in `Proxy Support` section where `trust_env` parameter was
used in `session.get("http://python.org", trust_env=True)` method instead of
aiohttp.ClientSession constructor as follows:
`aiohttp.ClientSession(trust_env=True)`. (`#2688 <https://github.com/aio-libs/aiohttp/pull/2688>`_)
- Fix issue with unittest example not compiling in testing docs. (`#2717 <https://github.com/aio-libs/aiohttp/pull/2717>`_)
Deprecations and Removals
-------------------------
- Simplify HTTP pipelining implementation (`#2109 <https://github.com/aio-libs/aiohttp/pull/2109>`_)
- Drop `StreamReaderPayload` and `DataQueuePayload`. (`#2257 <https://github.com/aio-libs/aiohttp/pull/2257>`_)
- Drop `md5` and `sha1` finger-prints (`#2267 <https://github.com/aio-libs/aiohttp/pull/2267>`_)
- Drop WSMessage.tp (`#2321 <https://github.com/aio-libs/aiohttp/pull/2321>`_)
- Drop Python 3.4 and Python 3.5.0, 3.5.1, 3.5.2. Minimal supported Python
versions are 3.5.3 and 3.6.0. `yield from` is gone, use `async/await` syntax.
(`#2343 <https://github.com/aio-libs/aiohttp/pull/2343>`_)
- Drop `aiohttp.Timeout` and use `async_timeout.timeout` instead. (`#2348 <https://github.com/aio-libs/aiohttp/pull/2348>`_)
- Drop `resolve` param from TCPConnector. (`#2377 <https://github.com/aio-libs/aiohttp/pull/2377>`_)
- Add DeprecationWarning for returning HTTPException (`#2415 <https://github.com/aio-libs/aiohttp/pull/2415>`_)
- `send_str()`, `send_bytes()`, `send_json()`, `ping()` and `pong()` are
genuine async functions now. (`#2475 <https://github.com/aio-libs/aiohttp/pull/2475>`_)
- Drop undocumented `app.on_pre_signal` and `app.on_post_signal`. Signal
handlers should be coroutines, support for regular functions is dropped.
(`#2480 <https://github.com/aio-libs/aiohttp/pull/2480>`_)
- `StreamResponse.drain()` is not a part of public API anymore, just use `await
StreamResponse.write()`. `StreamResponse.write` is converted to async
function. (`#2483 <https://github.com/aio-libs/aiohttp/pull/2483>`_)
- Drop deprecated `slow_request_timeout` param and `**kwargs`` from
`RequestHandler`. (`#2500 <https://github.com/aio-libs/aiohttp/pull/2500>`_)
- Drop deprecated `resource.url()`. (`#2501 <https://github.com/aio-libs/aiohttp/pull/2501>`_)
- Remove `%u` and `%l` format specifiers from access log format. (`#2506 <https://github.com/aio-libs/aiohttp/pull/2506>`_)
- Drop deprecated `request.GET` property. (`#2547 <https://github.com/aio-libs/aiohttp/pull/2547>`_)
- Simplify stream classes: drop `ChunksQueue` and `FlowControlChunksQueue`,
merge `FlowControlStreamReader` functionality into `StreamReader`, drop
`FlowControlStreamReader` name. (`#2555 <https://github.com/aio-libs/aiohttp/pull/2555>`_)
- Do not create a new resource on `router.add_get(..., allow_head=True)`
(`#2585 <https://github.com/aio-libs/aiohttp/pull/2585>`_)
- Drop access to TCP tuning options from PayloadWriter and Response classes
(`#2604 <https://github.com/aio-libs/aiohttp/pull/2604>`_)
- Drop deprecated `encoding` parameter from client API (`#2606 <https://github.com/aio-libs/aiohttp/pull/2606>`_)
- Deprecate ``verify_ssl``, ``ssl_context`` and ``fingerprint`` parameters in
client API (`#2626 <https://github.com/aio-libs/aiohttp/pull/2626>`_)
- Get rid of the legacy class StreamWriter. (`#2651 <https://github.com/aio-libs/aiohttp/pull/2651>`_)
- Forbid non-strings in `resource.url_for()` parameters. (`#2668 <https://github.com/aio-libs/aiohttp/pull/2668>`_)
- Deprecate inheritance from ``ClientSession`` and ``web.Application`` and
custom user attributes for ``ClientSession``, ``web.Request`` and
``web.Application`` (`#2691 <https://github.com/aio-libs/aiohttp/pull/2691>`_)
- Drop `resp = await aiohttp.request(...)` syntax for sake of `async with
aiohttp.request(...) as resp:`. (`#2540 <https://github.com/aio-libs/aiohttp/pull/2540>`_)
- Forbid synchronous context managers for `ClientSession` and test
server/client. (`#2362 <https://github.com/aio-libs/aiohttp/pull/2362>`_)
Misc
----
- #2552
2.3.10 (2018-02-02)
===================
- Fix 100% CPU usage on HTTP GET and websocket connection just after it (`#1955 <https://github.com/aio-libs/aiohttp/pull/1955>`_)
- Patch broken `ssl.match_hostname()` on Python<3.7 (`#2674 <https://github.com/aio-libs/aiohttp/pull/2674>`_)
2.3.9 (2018-01-16)
==================
- Fix colon handing in path for dynamic resources (`#2670 <https://github.com/aio-libs/aiohttp/pull/2670>`_)
2.3.8 (2018-01-15)
==================
- Do not use `yarl.unquote` internal function in aiohttp. Fix
incorrectly unquoted path part in URL dispatcher (`#2662 <https://github.com/aio-libs/aiohttp/pull/2662>`_)
- Fix compatibility with `yarl==1.0.0` (`#2662 <https://github.com/aio-libs/aiohttp/pull/2662>`_)
2.3.7 (2017-12-27)
==================
- Fixed race-condition for iterating addresses from the DNSCache. (`#2620 <https://github.com/aio-libs/aiohttp/pull/2620>`_)
- Fix docstring for request.host (`#2591 <https://github.com/aio-libs/aiohttp/pull/2591>`_)
- Fix docstring for request.remote (`#2592 <https://github.com/aio-libs/aiohttp/pull/2592>`_)
2.3.6 (2017-12-04)
==================
- Correct `request.app` context (for handlers not just middlewares). (`#2577 <https://github.com/aio-libs/aiohttp/pull/2577>`_)
2.3.5 (2017-11-30)
==================
- Fix compatibility with `pytest` 3.3+ (`#2565 <https://github.com/aio-libs/aiohttp/pull/2565>`_)
2.3.4 (2017-11-29)
==================
- Make `request.app` point to proper application instance when using nested
applications (with middlewares). (`#2550 <https://github.com/aio-libs/aiohttp/pull/2550>`_)
- Change base class of ClientConnectorSSLError to ClientSSLError from
ClientConnectorError. (`#2563 <https://github.com/aio-libs/aiohttp/pull/2563>`_)
- Return client connection back to free pool on error in `connector.connect()`.
(`#2567 <https://github.com/aio-libs/aiohttp/pull/2567>`_)
2.3.3 (2017-11-17)
==================
- Having a `;` in Response content type does not assume it contains a charset
anymore. (`#2197 <https://github.com/aio-libs/aiohttp/pull/2197>`_)
- Use `getattr(asyncio, 'async')` for keeping compatibility with Python 3.7.
(`#2476 <https://github.com/aio-libs/aiohttp/pull/2476>`_)
- Ignore `NotImplementedError` raised by `set_child_watcher` from `uvloop`.
(`#2491 <https://github.com/aio-libs/aiohttp/pull/2491>`_)
- Fix warning in `ClientSession.__del__` by stopping to try to close it.
(`#2523 <https://github.com/aio-libs/aiohttp/pull/2523>`_)
- Fixed typo's in Third-party libraries page. And added async-v20 to the list
(`#2510 <https://github.com/aio-libs/aiohttp/pull/2510>`_)
2.3.2 (2017-11-01)
==================
- Fix passing client max size on cloning request obj. (`#2385 <https://github.com/aio-libs/aiohttp/pull/2385>`_)
- Fix ClientConnectorSSLError and ClientProxyConnectionError for proxy
connector. (`#2408 <https://github.com/aio-libs/aiohttp/pull/2408>`_)
- Drop generated `_http_parser` shared object from tarball distribution. (`#2414 <https://github.com/aio-libs/aiohttp/pull/2414>`_)
- Fix connector convert OSError to ClientConnectorError. (`#2423 <https://github.com/aio-libs/aiohttp/pull/2423>`_)
- Fix connection attempts for multiple dns hosts. (`#2424 <https://github.com/aio-libs/aiohttp/pull/2424>`_)
- Fix ValueError for AF_INET6 sockets if a preexisting INET6 socket to the
`aiohttp.web.run_app` function. (`#2431 <https://github.com/aio-libs/aiohttp/pull/2431>`_)
- `_SessionRequestContextManager` closes the session properly now. (`#2441 <https://github.com/aio-libs/aiohttp/pull/2441>`_)
- Rename `from_env` to `trust_env` in client reference. (`#2451 <https://github.com/aio-libs/aiohttp/pull/2451>`_)
2.3.1 (2017-10-18)
==================
- Relax attribute lookup in warning about old-styled middleware (`#2340 <https://github.com/aio-libs/aiohttp/pull/2340>`_)
2.3.0 (2017-10-18)
==================
Features
--------
- Add SSL related params to `ClientSession.request` (`#1128 <https://github.com/aio-libs/aiohttp/pull/1128>`_)
- Make enable_compression work on HTTP/1.0 (`#1828 <https://github.com/aio-libs/aiohttp/pull/1828>`_)
- Deprecate registering synchronous web handlers (`#1993 <https://github.com/aio-libs/aiohttp/pull/1993>`_)
- Switch to `multidict 3.0`. All HTTP headers preserve casing now but compared
in case-insensitive way. (`#1994 <https://github.com/aio-libs/aiohttp/pull/1994>`_)
- Improvement for `normalize_path_middleware`. Added possibility to handle URLs
with query string. (`#1995 <https://github.com/aio-libs/aiohttp/pull/1995>`_)
- Use towncrier for CHANGES.txt build (`#1997 <https://github.com/aio-libs/aiohttp/pull/1997>`_)
- Implement `trust_env=True` param in `ClientSession`. (`#1998 <https://github.com/aio-libs/aiohttp/pull/1998>`_)
- Added variable to customize proxy headers (`#2001 <https://github.com/aio-libs/aiohttp/pull/2001>`_)
- Implement `router.add_routes` and router decorators. (`#2004 <https://github.com/aio-libs/aiohttp/pull/2004>`_)
- Deprecated `BaseRequest.has_body` in favor of
`BaseRequest.can_read_body` Added `BaseRequest.body_exists`
attribute that stays static for the lifetime of the request (`#2005 <https://github.com/aio-libs/aiohttp/pull/2005>`_)
- Provide `BaseRequest.loop` attribute (`#2024 <https://github.com/aio-libs/aiohttp/pull/2024>`_)
- Make `_CoroGuard` awaitable and fix `ClientSession.close` warning message
(`#2026 <https://github.com/aio-libs/aiohttp/pull/2026>`_)
- Responses to redirects without Location header are returned instead of
raising a RuntimeError (`#2030 <https://github.com/aio-libs/aiohttp/pull/2030>`_)
- Added `get_client`, `get_server`, `setUpAsync` and `tearDownAsync` methods to
AioHTTPTestCase (`#2032 <https://github.com/aio-libs/aiohttp/pull/2032>`_)
- Add automatically a SafeChildWatcher to the test loop (`#2058 <https://github.com/aio-libs/aiohttp/pull/2058>`_)
- add ability to disable automatic response decompression (`#2110 <https://github.com/aio-libs/aiohttp/pull/2110>`_)
- Add support for throttling DNS request, avoiding the requests saturation when
there is a miss in the DNS cache and many requests getting into the connector
at the same time. (`#2111 <https://github.com/aio-libs/aiohttp/pull/2111>`_)
- Use request for getting access log information instead of message/transport
pair. Add `RequestBase.remote` property for accessing to IP of client
initiated HTTP request. (`#2123 <https://github.com/aio-libs/aiohttp/pull/2123>`_)
- json() raises a ContentTypeError exception if the content-type does not meet
the requirements instead of raising a generic ClientResponseError. (`#2136 <https://github.com/aio-libs/aiohttp/pull/2136>`_)
- Make the HTTP client able to return HTTP chunks when chunked transfer
encoding is used. (`#2150 <https://github.com/aio-libs/aiohttp/pull/2150>`_)
- add `append_version` arg into `StaticResource.url` and
`StaticResource.url_for` methods for getting an url with hash (version) of
the file. (`#2157 <https://github.com/aio-libs/aiohttp/pull/2157>`_)
- Fix parsing the Forwarded header. * commas and semicolons are allowed inside
quoted-strings; * empty forwarded-pairs (as in for=_1;;by=_2) are allowed; *
non-standard parameters are allowed (although this alone could be easily done
in the previous parser). (`#2173 <https://github.com/aio-libs/aiohttp/pull/2173>`_)
- Don't require ssl module to run. aiohttp does not require SSL to function.
The code paths involved with SSL will only be hit upon SSL usage. Raise
`RuntimeError` if HTTPS protocol is required but ssl module is not present.
(`#2221 <https://github.com/aio-libs/aiohttp/pull/2221>`_)
- Accept coroutine fixtures in pytest plugin (`#2223 <https://github.com/aio-libs/aiohttp/pull/2223>`_)
- Call `shutdown_asyncgens` before event loop closing on Python 3.6. (`#2227 <https://github.com/aio-libs/aiohttp/pull/2227>`_)
- Speed up Signals when there are no receivers (`#2229 <https://github.com/aio-libs/aiohttp/pull/2229>`_)
- Raise `InvalidURL` instead of `ValueError` on fetches with invalid URL.
(`#2241 <https://github.com/aio-libs/aiohttp/pull/2241>`_)
- Move `DummyCookieJar` into `cookiejar.py` (`#2242 <https://github.com/aio-libs/aiohttp/pull/2242>`_)
- `run_app`: Make `print=None` disable printing (`#2260 <https://github.com/aio-libs/aiohttp/pull/2260>`_)
- Support `brotli` encoding (generic-purpose lossless compression algorithm)
(`#2270 <https://github.com/aio-libs/aiohttp/pull/2270>`_)
- Add server support for WebSockets Per-Message Deflate. Add client option to
add deflate compress header in WebSockets request header. If calling
ClientSession.ws_connect() with `compress=15` the client will support deflate
compress negotiation. (`#2273 <https://github.com/aio-libs/aiohttp/pull/2273>`_)
- Support `verify_ssl`, `fingerprint`, `ssl_context` and `proxy_headers` by
`client.ws_connect`. (`#2292 <https://github.com/aio-libs/aiohttp/pull/2292>`_)
- Added `aiohttp.ClientConnectorSSLError` when connection fails due
`ssl.SSLError` (`#2294 <https://github.com/aio-libs/aiohttp/pull/2294>`_)
- `aiohttp.web.Application.make_handler` support `access_log_class` (`#2315 <https://github.com/aio-libs/aiohttp/pull/2315>`_)
- Build HTTP parser extension in non-strict mode by default. (`#2332 <https://github.com/aio-libs/aiohttp/pull/2332>`_)
Bugfixes
--------
- Clear auth information on redirecting to other domain (`#1699 <https://github.com/aio-libs/aiohttp/pull/1699>`_)
- Fix missing app.loop on startup hooks during tests (`#2060 <https://github.com/aio-libs/aiohttp/pull/2060>`_)
- Fix issue with synchronous session closing when using `ClientSession` as an
asynchronous context manager. (`#2063 <https://github.com/aio-libs/aiohttp/pull/2063>`_)
- Fix issue with `CookieJar` incorrectly expiring cookies in some edge cases.
(`#2084 <https://github.com/aio-libs/aiohttp/pull/2084>`_)
- Force use of IPv4 during test, this will make tests run in a Docker container
(`#2104 <https://github.com/aio-libs/aiohttp/pull/2104>`_)
- Warnings about unawaited coroutines now correctly point to the user's code.
(`#2106 <https://github.com/aio-libs/aiohttp/pull/2106>`_)
- Fix issue with `IndexError` being raised by the `StreamReader.iter_chunks()`
generator. (`#2112 <https://github.com/aio-libs/aiohttp/pull/2112>`_)
- Support HTTP 308 Permanent redirect in client class. (`#2114 <https://github.com/aio-libs/aiohttp/pull/2114>`_)
- Fix `FileResponse` sending empty chunked body on 304. (`#2143 <https://github.com/aio-libs/aiohttp/pull/2143>`_)
- Do not add `Content-Length: 0` to GET/HEAD/TRACE/OPTIONS requests by default.
(`#2167 <https://github.com/aio-libs/aiohttp/pull/2167>`_)
- Fix parsing the Forwarded header according to RFC 7239. (`#2170 <https://github.com/aio-libs/aiohttp/pull/2170>`_)
- Securely determining remote/scheme/host #2171 (`#2171 <https://github.com/aio-libs/aiohttp/pull/2171>`_)
- Fix header name parsing, if name is split into multiple lines (`#2183 <https://github.com/aio-libs/aiohttp/pull/2183>`_)
- Handle session close during connection, `KeyError:
<aiohttp.connector._TransportPlaceholder>` (`#2193 <https://github.com/aio-libs/aiohttp/pull/2193>`_)
- Fixes uncaught `TypeError` in `helpers.guess_filename` if `name` is not a
string (`#2201 <https://github.com/aio-libs/aiohttp/pull/2201>`_)
- Raise OSError on async DNS lookup if resolved domain is an alias for another
one, which does not have an A or CNAME record. (`#2231 <https://github.com/aio-libs/aiohttp/pull/2231>`_)
- Fix incorrect warning in `StreamReader`. (`#2251 <https://github.com/aio-libs/aiohttp/pull/2251>`_)
- Properly clone state of web request (`#2284 <https://github.com/aio-libs/aiohttp/pull/2284>`_)
- Fix C HTTP parser for cases when status line is split into different TCP
packets. (`#2311 <https://github.com/aio-libs/aiohttp/pull/2311>`_)
- Fix `web.FileResponse` overriding user supplied Content-Type (`#2317 <https://github.com/aio-libs/aiohttp/pull/2317>`_)
Improved Documentation
----------------------
- Add a note about possible performance degradation in `await resp.text()` if
charset was not provided by `Content-Type` HTTP header. Pass explicit
encoding to solve it. (`#1811 <https://github.com/aio-libs/aiohttp/pull/1811>`_)
- Drop `disqus` widget from documentation pages. (`#2018 <https://github.com/aio-libs/aiohttp/pull/2018>`_)
- Add a graceful shutdown section to the client usage documentation. (`#2039 <https://github.com/aio-libs/aiohttp/pull/2039>`_)
- Document `connector_owner` parameter. (`#2072 <https://github.com/aio-libs/aiohttp/pull/2072>`_)
- Update the doc of web.Application (`#2081 <https://github.com/aio-libs/aiohttp/pull/2081>`_)
- Fix mistake about access log disabling. (`#2085 <https://github.com/aio-libs/aiohttp/pull/2085>`_)
- Add example usage of on_startup and on_shutdown signals by creating and
disposing an aiopg connection engine. (`#2131 <https://github.com/aio-libs/aiohttp/pull/2131>`_)
- Document `encoded=True` for `yarl.URL`, it disables all yarl transformations.
(`#2198 <https://github.com/aio-libs/aiohttp/pull/2198>`_)
- Document that all app's middleware factories are run for every request.
(`#2225 <https://github.com/aio-libs/aiohttp/pull/2225>`_)
- Reflect the fact that default resolver is threaded one starting from aiohttp
1.1 (`#2228 <https://github.com/aio-libs/aiohttp/pull/2228>`_)
Deprecations and Removals
-------------------------
- Drop deprecated `Server.finish_connections` (`#2006 <https://github.com/aio-libs/aiohttp/pull/2006>`_)
- Drop %O format from logging, use %b instead. Drop %e format from logging,
environment variables are not supported anymore. (`#2123 <https://github.com/aio-libs/aiohttp/pull/2123>`_)
- Drop deprecated secure_proxy_ssl_header support (`#2171 <https://github.com/aio-libs/aiohttp/pull/2171>`_)
- Removed TimeService in favor of simple caching. TimeService also had a bug
where it lost about 0.5 seconds per second. (`#2176 <https://github.com/aio-libs/aiohttp/pull/2176>`_)
- Drop unused response_factory from static files API (`#2290 <https://github.com/aio-libs/aiohttp/pull/2290>`_)
Misc
----
- #2013, #2014, #2048, #2094, #2149, #2187, #2214, #2225, #2243, #2248
2.2.5 (2017-08-03)
==================
- Don't raise deprecation warning on
`loop.run_until_complete(client.close())` (`#2065 <https://github.com/aio-libs/aiohttp/pull/2065>`_)
2.2.4 (2017-08-02)
==================
- Fix issue with synchronous session closing when using ClientSession
as an asynchronous context manager. (`#2063 <https://github.com/aio-libs/aiohttp/pull/2063>`_)
2.2.3 (2017-07-04)
==================
- Fix `_CoroGuard` for python 3.4
2.2.2 (2017-07-03)
==================
- Allow `await session.close()` along with `yield from session.close()`
2.2.1 (2017-07-02)
==================
- Relax `yarl` requirement to 0.11+
- Backport #2026: `session.close` *is* a coroutine (`#2029 <https://github.com/aio-libs/aiohttp/pull/2029>`_)
2.2.0 (2017-06-20)
==================
- Add doc for add_head, update doc for add_get. (`#1944 <https://github.com/aio-libs/aiohttp/pull/1944>`_)
- Fixed consecutive calls for `Response.write_eof`.
- Retain method attributes (e.g. :code:`__doc__`) when registering synchronous
handlers for resources. (`#1953 <https://github.com/aio-libs/aiohttp/pull/1953>`_)
- Added signal TERM handling in `run_app` to gracefully exit (`#1932 <https://github.com/aio-libs/aiohttp/pull/1932>`_)
- Fix websocket issues caused by frame fragmentation. (`#1962 <https://github.com/aio-libs/aiohttp/pull/1962>`_)
- Raise RuntimeError is you try to set the Content Length and enable
chunked encoding at the same time (`#1941 <https://github.com/aio-libs/aiohttp/pull/1941>`_)
- Small update for `unittest_run_loop`
- Use CIMultiDict for ClientRequest.skip_auto_headers (`#1970 <https://github.com/aio-libs/aiohttp/pull/1970>`_)
- Fix wrong startup sequence: test server and `run_app()` are not raise
`DeprecationWarning` now (`#1947 <https://github.com/aio-libs/aiohttp/pull/1947>`_)
- Make sure cleanup signal is sent if startup signal has been sent (`#1959 <https://github.com/aio-libs/aiohttp/pull/1959>`_)
- Fixed server keep-alive handler, could cause 100% cpu utilization (`#1955 <https://github.com/aio-libs/aiohttp/pull/1955>`_)
- Connection can be destroyed before response get processed if
`await aiohttp.request(..)` is used (`#1981 <https://github.com/aio-libs/aiohttp/pull/1981>`_)
- MultipartReader does not work with -OO (`#1969 <https://github.com/aio-libs/aiohttp/pull/1969>`_)
- Fixed `ClientPayloadError` with blank `Content-Encoding` header (`#1931 <https://github.com/aio-libs/aiohttp/pull/1931>`_)
- Support `deflate` encoding implemented in `httpbin.org/deflate` (`#1918 <https://github.com/aio-libs/aiohttp/pull/1918>`_)
- Fix BadStatusLine caused by extra `CRLF` after `POST` data (`#1792 <https://github.com/aio-libs/aiohttp/pull/1792>`_)
- Keep a reference to `ClientSession` in response object (`#1985 <https://github.com/aio-libs/aiohttp/pull/1985>`_)
- Deprecate undocumented `app.on_loop_available` signal (`#1978 <https://github.com/aio-libs/aiohttp/pull/1978>`_)
2.1.0 (2017-05-26)
==================
- Added support for experimental `async-tokio` event loop written in Rust
https://github.com/PyO3/tokio
- Write to transport ``\r\n`` before closing after keepalive timeout,
otherwise client can not detect socket disconnection. (`#1883 <https://github.com/aio-libs/aiohttp/pull/1883>`_)
- Only call `loop.close` in `run_app` if the user did *not* supply a loop.
Useful for allowing clients to specify their own cleanup before closing the
asyncio loop if they wish to tightly control loop behavior
- Content disposition with semicolon in filename (`#917 <https://github.com/aio-libs/aiohttp/pull/917>`_)
- Added `request_info` to response object and `ClientResponseError`. (`#1733 <https://github.com/aio-libs/aiohttp/pull/1733>`_)
- Added `history` to `ClientResponseError`. (`#1741 <https://github.com/aio-libs/aiohttp/pull/1741>`_)
- Allow to disable redirect url re-quoting (`#1474 <https://github.com/aio-libs/aiohttp/pull/1474>`_)
- Handle RuntimeError from transport (`#1790 <https://github.com/aio-libs/aiohttp/pull/1790>`_)
- Dropped "%O" in access logger (`#1673 <https://github.com/aio-libs/aiohttp/pull/1673>`_)
- Added `args` and `kwargs` to `unittest_run_loop`. Useful with other
decorators, for example `@patch`. (`#1803 <https://github.com/aio-libs/aiohttp/pull/1803>`_)
- Added `iter_chunks` to response.content object. (`#1805 <https://github.com/aio-libs/aiohttp/pull/1805>`_)
- Avoid creating TimerContext when there is no timeout to allow
compatibility with Tornado. (`#1817 <https://github.com/aio-libs/aiohttp/pull/1817>`_) (`#1180 <https://github.com/aio-libs/aiohttp/pull/1180>`_)
- Add `proxy_from_env` to `ClientRequest` to read from environment
variables. (`#1791 <https://github.com/aio-libs/aiohttp/pull/1791>`_)
- Add DummyCookieJar helper. (`#1830 <https://github.com/aio-libs/aiohttp/pull/1830>`_)
- Fix assertion errors in Python 3.4 from noop helper. (`#1847 <https://github.com/aio-libs/aiohttp/pull/1847>`_)
- Do not unquote `+` in match_info values (`#1816 <https://github.com/aio-libs/aiohttp/pull/1816>`_)
- Use Forwarded, X-Forwarded-Scheme and X-Forwarded-Host for better scheme and
host resolution. (`#1134 <https://github.com/aio-libs/aiohttp/pull/1134>`_)
- Fix sub-application middlewares resolution order (`#1853 <https://github.com/aio-libs/aiohttp/pull/1853>`_)
- Fix applications comparison (`#1866 <https://github.com/aio-libs/aiohttp/pull/1866>`_)
- Fix static location in index when prefix is used (`#1662 <https://github.com/aio-libs/aiohttp/pull/1662>`_)
- Make test server more reliable (`#1896 <https://github.com/aio-libs/aiohttp/pull/1896>`_)
- Extend list of web exceptions, add HTTPUnprocessableEntity,
HTTPFailedDependency, HTTPInsufficientStorage status codes (`#1920 <https://github.com/aio-libs/aiohttp/pull/1920>`_)
2.0.7 (2017-04-12)
==================
- Fix *pypi* distribution
- Fix exception description (`#1807 <https://github.com/aio-libs/aiohttp/pull/1807>`_)
- Handle socket error in FileResponse (`#1773 <https://github.com/aio-libs/aiohttp/pull/1773>`_)
- Cancel websocket heartbeat on close (`#1793 <https://github.com/aio-libs/aiohttp/pull/1793>`_)
2.0.6 (2017-04-04)
==================
- Keeping blank values for `request.post()` and `multipart.form()` (`#1765 <https://github.com/aio-libs/aiohttp/pull/1765>`_)
- TypeError in data_received of ResponseHandler (`#1770 <https://github.com/aio-libs/aiohttp/pull/1770>`_)
- Fix ``web.run_app`` not to bind to default host-port pair if only socket is
passed (`#1786 <https://github.com/aio-libs/aiohttp/pull/1786>`_)
2.0.5 (2017-03-29)
==================
- Memory leak with aiohttp.request (`#1756 <https://github.com/aio-libs/aiohttp/pull/1756>`_)
- Disable cleanup closed ssl transports by default.
- Exception in request handling if the server responds before the body
is sent (`#1761 <https://github.com/aio-libs/aiohttp/pull/1761>`_)
2.0.4 (2017-03-27)
==================
- Memory leak with aiohttp.request (`#1756 <https://github.com/aio-libs/aiohttp/pull/1756>`_)
- Encoding is always UTF-8 in POST data (`#1750 <https://github.com/aio-libs/aiohttp/pull/1750>`_)
- Do not add "Content-Disposition" header by default (`#1755 <https://github.com/aio-libs/aiohttp/pull/1755>`_)
2.0.3 (2017-03-24)
==================
- Call https website through proxy will cause error (`#1745 <https://github.com/aio-libs/aiohttp/pull/1745>`_)
- Fix exception on multipart/form-data post if content-type is not set (`#1743 <https://github.com/aio-libs/aiohttp/pull/1743>`_)
2.0.2 (2017-03-21)
==================
- Fixed Application.on_loop_available signal (`#1739 <https://github.com/aio-libs/aiohttp/pull/1739>`_)
- Remove debug code
2.0.1 (2017-03-21)
==================
- Fix allow-head to include name on route (`#1737 <https://github.com/aio-libs/aiohttp/pull/1737>`_)
- Fixed AttributeError in WebSocketResponse.can_prepare (`#1736 <https://github.com/aio-libs/aiohttp/pull/1736>`_)
2.0.0 (2017-03-20)
==================
- Added `json` to `ClientSession.request()` method (`#1726 <https://github.com/aio-libs/aiohttp/pull/1726>`_)
- Added session's `raise_for_status` parameter, automatically calls
raise_for_status() on any request. (`#1724 <https://github.com/aio-libs/aiohttp/pull/1724>`_)
- `response.json()` raises `ClientReponseError` exception if response's
content type does not match (`#1723 <https://github.com/aio-libs/aiohttp/pull/1723>`_)
- Cleanup timer and loop handle on any client exception.
- Deprecate `loop` parameter for Application's constructor
`2.0.0rc1` (2017-03-15)
=======================
- Properly handle payload errors (`#1710 <https://github.com/aio-libs/aiohttp/pull/1710>`_)
- Added `ClientWebSocketResponse.get_extra_info()` (`#1717 <https://github.com/aio-libs/aiohttp/pull/1717>`_)
- It is not possible to combine Transfer-Encoding and chunked parameter,
same for compress and Content-Encoding (`#1655 <https://github.com/aio-libs/aiohttp/pull/1655>`_)
- Connector's `limit` parameter indicates total concurrent connections.
New `limit_per_host` added, indicates total connections per endpoint. (`#1601 <https://github.com/aio-libs/aiohttp/pull/1601>`_)
- Use url's `raw_host` for name resolution (`#1685 <https://github.com/aio-libs/aiohttp/pull/1685>`_)
- Change `ClientResponse.url` to `yarl.URL` instance (`#1654 <https://github.com/aio-libs/aiohttp/pull/1654>`_)
- Add max_size parameter to web.Request reading methods (`#1133 <https://github.com/aio-libs/aiohttp/pull/1133>`_)
- Web Request.post() stores data in temp files (`#1469 <https://github.com/aio-libs/aiohttp/pull/1469>`_)
- Add the `allow_head=True` keyword argument for `add_get` (`#1618 <https://github.com/aio-libs/aiohttp/pull/1618>`_)
- `run_app` and the Command Line Interface now support serving over
Unix domain sockets for faster inter-process communication.
- `run_app` now supports passing a preexisting socket object. This can be useful
e.g. for socket-based activated applications, when binding of a socket is
done by the parent process.
- Implementation for Trailer headers parser is broken (`#1619 <https://github.com/aio-libs/aiohttp/pull/1619>`_)
- Fix FileResponse to not fall on bad request (range out of file size)
- Fix FileResponse to correct stream video to Chromes
- Deprecate public low-level api (`#1657 <https://github.com/aio-libs/aiohttp/pull/1657>`_)
- Deprecate `encoding` parameter for ClientSession.request() method
- Dropped aiohttp.wsgi (`#1108 <https://github.com/aio-libs/aiohttp/pull/1108>`_)
- Dropped `version` from ClientSession.request() method
- Dropped websocket version 76 support (`#1160 <https://github.com/aio-libs/aiohttp/pull/1160>`_)
- Dropped: `aiohttp.protocol.HttpPrefixParser` (`#1590 <https://github.com/aio-libs/aiohttp/pull/1590>`_)
- Dropped: Servers response's `.started`, `.start()` and
`.can_start()` method (`#1591 <https://github.com/aio-libs/aiohttp/pull/1591>`_)
- Dropped: Adding `sub app` via `app.router.add_subapp()` is deprecated
use `app.add_subapp()` instead (`#1592 <https://github.com/aio-libs/aiohttp/pull/1592>`_)
- Dropped: `Application.finish()` and `Application.register_on_finish()` (`#1602 <https://github.com/aio-libs/aiohttp/pull/1602>`_)
- Dropped: `web.Request.GET` and `web.Request.POST`
- Dropped: aiohttp.get(), aiohttp.options(), aiohttp.head(),
aiohttp.post(), aiohttp.put(), aiohttp.patch(), aiohttp.delete(), and
aiohttp.ws_connect() (`#1593 <https://github.com/aio-libs/aiohttp/pull/1593>`_)
- Dropped: `aiohttp.web.WebSocketResponse.receive_msg()` (`#1605 <https://github.com/aio-libs/aiohttp/pull/1605>`_)
- Dropped: `ServerHttpProtocol.keep_alive_timeout` attribute and
`keep-alive`, `keep_alive_on`, `timeout`, `log` constructor parameters (`#1606 <https://github.com/aio-libs/aiohttp/pull/1606>`_)
- Dropped: `TCPConnector's`` `.resolve`, `.resolved_hosts`,
`.clear_resolved_hosts()` attributes and `resolve` constructor
parameter (`#1607 <https://github.com/aio-libs/aiohttp/pull/1607>`_)
- Dropped `ProxyConnector` (`#1609 <https://github.com/aio-libs/aiohttp/pull/1609>`_)
1.3.5 (2017-03-16)
==================
- Fixed None timeout support (`#1720 <https://github.com/aio-libs/aiohttp/pull/1720>`_)
1.3.4 (2017-03-14)
==================
- Revert timeout handling in client request
- Fix StreamResponse representation after eof
- Fix file_sender to not fall on bad request (range out of file size)
- Fix file_sender to correct stream video to Chromes
- Fix NotImplementedError server exception (`#1703 <https://github.com/aio-libs/aiohttp/pull/1703>`_)
- Clearer error message for URL without a host name. (`#1691 <https://github.com/aio-libs/aiohttp/pull/1691>`_)
- Silence deprecation warning in __repr__ (`#1690 <https://github.com/aio-libs/aiohttp/pull/1690>`_)
- IDN + HTTPS = `ssl.CertificateError` (`#1685 <https://github.com/aio-libs/aiohttp/pull/1685>`_)
1.3.3 (2017-02-19)
==================
- Fixed memory leak in time service (`#1656 <https://github.com/aio-libs/aiohttp/pull/1656>`_)
1.3.2 (2017-02-16)
==================
- Awaiting on WebSocketResponse.send_* does not work (`#1645 <https://github.com/aio-libs/aiohttp/pull/1645>`_)
- Fix multiple calls to client ws_connect when using a shared header
dict (`#1643 <https://github.com/aio-libs/aiohttp/pull/1643>`_)
- Make CookieJar.filter_cookies() accept plain string parameter. (`#1636 <https://github.com/aio-libs/aiohttp/pull/1636>`_)
1.3.1 (2017-02-09)
==================
- Handle CLOSING in WebSocketResponse.__anext__
- Fixed AttributeError 'drain' for server websocket handler (`#1613 <https://github.com/aio-libs/aiohttp/pull/1613>`_)
1.3.0 (2017-02-08)
==================
- Multipart writer validates the data on append instead of on a
request send (`#920 <https://github.com/aio-libs/aiohttp/pull/920>`_)
- Multipart reader accepts multipart messages with or without their epilogue
to consistently handle valid and legacy behaviors (`#1526 <https://github.com/aio-libs/aiohttp/pull/1526>`_) (`#1581 <https://github.com/aio-libs/aiohttp/pull/1581>`_)
- Separate read + connect + request timeouts # 1523
- Do not swallow Upgrade header (`#1587 <https://github.com/aio-libs/aiohttp/pull/1587>`_)
- Fix polls demo run application (`#1487 <https://github.com/aio-libs/aiohttp/pull/1487>`_)
- Ignore unknown 1XX status codes in client (`#1353 <https://github.com/aio-libs/aiohttp/pull/1353>`_)
- Fix sub-Multipart messages missing their headers on serialization (`#1525 <https://github.com/aio-libs/aiohttp/pull/1525>`_)
- Do not use readline when reading the content of a part
in the multipart reader (`#1535 <https://github.com/aio-libs/aiohttp/pull/1535>`_)
- Add optional flag for quoting `FormData` fields (`#916 <https://github.com/aio-libs/aiohttp/pull/916>`_)
- 416 Range Not Satisfiable if requested range end > file size (`#1588 <https://github.com/aio-libs/aiohttp/pull/1588>`_)
- Having a `:` or `@` in a route does not work (`#1552 <https://github.com/aio-libs/aiohttp/pull/1552>`_)
- Added `receive_timeout` timeout for websocket to receive complete
message. (`#1325 <https://github.com/aio-libs/aiohttp/pull/1325>`_)
- Added `heartbeat` parameter for websocket to automatically send
`ping` message. (`#1024 <https://github.com/aio-libs/aiohttp/pull/1024>`_) (`#777 <https://github.com/aio-libs/aiohttp/pull/777>`_)
- Remove `web.Application` dependency from `web.UrlDispatcher` (`#1510 <https://github.com/aio-libs/aiohttp/pull/1510>`_)
- Accepting back-pressure from slow websocket clients (`#1367 <https://github.com/aio-libs/aiohttp/pull/1367>`_)
- Do not pause transport during set_parser stage (`#1211 <https://github.com/aio-libs/aiohttp/pull/1211>`_)
- Lingering close does not terminate before timeout (`#1559 <https://github.com/aio-libs/aiohttp/pull/1559>`_)
- `setsockopt` may raise `OSError` exception if socket is closed already (`#1595 <https://github.com/aio-libs/aiohttp/pull/1595>`_)
- Lots of CancelledError when requests are interrupted (`#1565 <https://github.com/aio-libs/aiohttp/pull/1565>`_)
- Allow users to specify what should happen to decoding errors
when calling a responses `text()` method (`#1542 <https://github.com/aio-libs/aiohttp/pull/1542>`_)
- Back port std module `http.cookies` for python3.4.2 (`#1566 <https://github.com/aio-libs/aiohttp/pull/1566>`_)
- Maintain url's fragment in client response (`#1314 <https://github.com/aio-libs/aiohttp/pull/1314>`_)
- Allow concurrently close WebSocket connection (`#754 <https://github.com/aio-libs/aiohttp/pull/754>`_)
- Gzipped responses with empty body raises ContentEncodingError (`#609 <https://github.com/aio-libs/aiohttp/pull/609>`_)
- Return 504 if request handle raises TimeoutError.
- Refactor how we use keep-alive and close lingering timeouts.
- Close response connection if we can not consume whole http
message during client response release
- Abort closed ssl client transports, broken servers can keep socket
open un-limit time (`#1568 <https://github.com/aio-libs/aiohttp/pull/1568>`_)
- Log warning instead of `RuntimeError` is websocket connection is closed.
- Deprecated: `aiohttp.protocol.HttpPrefixParser`
will be removed in 1.4 (`#1590 <https://github.com/aio-libs/aiohttp/pull/1590>`_)
- Deprecated: Servers response's `.started`, `.start()` and
`.can_start()` method will be removed in 1.4 (`#1591 <https://github.com/aio-libs/aiohttp/pull/1591>`_)
- Deprecated: Adding `sub app` via `app.router.add_subapp()` is deprecated
use `app.add_subapp()` instead, will be removed in 1.4 (`#1592 <https://github.com/aio-libs/aiohttp/pull/1592>`_)
- Deprecated: aiohttp.get(), aiohttp.options(), aiohttp.head(), aiohttp.post(),
aiohttp.put(), aiohttp.patch(), aiohttp.delete(), and aiohttp.ws_connect()
will be removed in 1.4 (`#1593 <https://github.com/aio-libs/aiohttp/pull/1593>`_)
- Deprecated: `Application.finish()` and `Application.register_on_finish()`
will be removed in 1.4 (`#1602 <https://github.com/aio-libs/aiohttp/pull/1602>`_)
1.2.0 (2016-12-17)
==================
- Extract `BaseRequest` from `web.Request`, introduce `web.Server`
(former `RequestHandlerFactory`), introduce new low-level web server
which is not coupled with `web.Application` and routing (`#1362 <https://github.com/aio-libs/aiohttp/pull/1362>`_)
- Make `TestServer.make_url` compatible with `yarl.URL` (`#1389 <https://github.com/aio-libs/aiohttp/pull/1389>`_)
- Implement range requests for static files (`#1382 <https://github.com/aio-libs/aiohttp/pull/1382>`_)
- Support task attribute for StreamResponse (`#1410 <https://github.com/aio-libs/aiohttp/pull/1410>`_)
- Drop `TestClient.app` property, use `TestClient.server.app` instead
(BACKWARD INCOMPATIBLE)
- Drop `TestClient.handler` property, use `TestClient.server.handler` instead
(BACKWARD INCOMPATIBLE)
- `TestClient.server` property returns a test server instance, was
`asyncio.AbstractServer` (BACKWARD INCOMPATIBLE)
- Follow gunicorn's signal semantics in `Gunicorn[UVLoop]WebWorker` (`#1201 <https://github.com/aio-libs/aiohttp/pull/1201>`_)
- Call worker_int and worker_abort callbacks in
`Gunicorn[UVLoop]WebWorker` (`#1202 <https://github.com/aio-libs/aiohttp/pull/1202>`_)
- Has functional tests for client proxy (`#1218 <https://github.com/aio-libs/aiohttp/pull/1218>`_)
- Fix bugs with client proxy target path and proxy host with port (`#1413 <https://github.com/aio-libs/aiohttp/pull/1413>`_)
- Fix bugs related to the use of unicode hostnames (`#1444 <https://github.com/aio-libs/aiohttp/pull/1444>`_)
- Preserve cookie quoting/escaping (`#1453 <https://github.com/aio-libs/aiohttp/pull/1453>`_)
- FileSender will send gzipped response if gzip version available (`#1426 <https://github.com/aio-libs/aiohttp/pull/1426>`_)
- Don't override `Content-Length` header in `web.Response` if no body
was set (`#1400 <https://github.com/aio-libs/aiohttp/pull/1400>`_)
- Introduce `router.post_init()` for solving (`#1373 <https://github.com/aio-libs/aiohttp/pull/1373>`_)
- Fix raise error in case of multiple calls of `TimeServive.stop()`
- Allow to raise web exceptions on router resolving stage (`#1460 <https://github.com/aio-libs/aiohttp/pull/1460>`_)
- Add a warning for session creation outside of coroutine (`#1468 <https://github.com/aio-libs/aiohttp/pull/1468>`_)
- Avoid a race when application might start accepting incoming requests
but startup signals are not processed yet e98e8c6
- Raise a `RuntimeError` when trying to change the status of the HTTP response
after the headers have been sent (`#1480 <https://github.com/aio-libs/aiohttp/pull/1480>`_)
- Fix bug with https proxy acquired cleanup (`#1340 <https://github.com/aio-libs/aiohttp/pull/1340>`_)
- Use UTF-8 as the default encoding for multipart text parts (`#1484 <https://github.com/aio-libs/aiohttp/pull/1484>`_)
1.1.6 (2016-11-28)
==================
- Fix `BodyPartReader.read_chunk` bug about returns zero bytes before
`EOF` (`#1428 <https://github.com/aio-libs/aiohttp/pull/1428>`_)
1.1.5 (2016-11-16)
==================
- Fix static file serving in fallback mode (`#1401 <https://github.com/aio-libs/aiohttp/pull/1401>`_)
1.1.4 (2016-11-14)
==================
- Make `TestServer.make_url` compatible with `yarl.URL` (`#1389 <https://github.com/aio-libs/aiohttp/pull/1389>`_)
- Generate informative exception on redirects from server which
does not provide redirection headers (`#1396 <https://github.com/aio-libs/aiohttp/pull/1396>`_)
1.1.3 (2016-11-10)
==================
- Support *root* resources for sub-applications (`#1379 <https://github.com/aio-libs/aiohttp/pull/1379>`_)
1.1.2 (2016-11-08)
==================
- Allow starting variables with an underscore (`#1379 <https://github.com/aio-libs/aiohttp/pull/1379>`_)
- Properly process UNIX sockets by gunicorn worker (`#1375 <https://github.com/aio-libs/aiohttp/pull/1375>`_)
- Fix ordering for `FrozenList`
- Don't propagate pre and post signals to sub-application (`#1377 <https://github.com/aio-libs/aiohttp/pull/1377>`_)
1.1.1 (2016-11-04)
==================
- Fix documentation generation (`#1120 <https://github.com/aio-libs/aiohttp/pull/1120>`_)
1.1.0 (2016-11-03)
==================
- Drop deprecated `WSClientDisconnectedError` (BACKWARD INCOMPATIBLE)
- Use `yarl.URL` in client API. The change is 99% backward compatible
but `ClientResponse.url` is an `yarl.URL` instance now. (`#1217 <https://github.com/aio-libs/aiohttp/pull/1217>`_)
- Close idle keep-alive connections on shutdown (`#1222 <https://github.com/aio-libs/aiohttp/pull/1222>`_)
- Modify regex in AccessLogger to accept underscore and numbers (`#1225 <https://github.com/aio-libs/aiohttp/pull/1225>`_)
- Use `yarl.URL` in web server API. `web.Request.rel_url` and
`web.Request.url` are added. URLs and templates are percent-encoded
now. (`#1224 <https://github.com/aio-libs/aiohttp/pull/1224>`_)
- Accept `yarl.URL` by server redirections (`#1278 <https://github.com/aio-libs/aiohttp/pull/1278>`_)
- Return `yarl.URL` by `.make_url()` testing utility (`#1279 <https://github.com/aio-libs/aiohttp/pull/1279>`_)
- Properly format IPv6 addresses by `aiohttp.web.run_app` (`#1139 <https://github.com/aio-libs/aiohttp/pull/1139>`_)
- Use `yarl.URL` by server API (`#1288 <https://github.com/aio-libs/aiohttp/pull/1288>`_)
* Introduce `resource.url_for()`, deprecate `resource.url()`.
* Implement `StaticResource`.
* Inherit `SystemRoute` from `AbstractRoute`
* Drop old-style routes: `Route`, `PlainRoute`, `DynamicRoute`,
`StaticRoute`, `ResourceAdapter`.
- Revert `resp.url` back to `str`, introduce `resp.url_obj` (`#1292 <https://github.com/aio-libs/aiohttp/pull/1292>`_)
- Raise ValueError if BasicAuth login has a ":" character (`#1307 <https://github.com/aio-libs/aiohttp/pull/1307>`_)
- Fix bug when ClientRequest send payload file with opened as
open('filename', 'r+b') (`#1306 <https://github.com/aio-libs/aiohttp/pull/1306>`_)
- Enhancement to AccessLogger (pass *extra* dict) (`#1303 <https://github.com/aio-libs/aiohttp/pull/1303>`_)
- Show more verbose message on import errors (`#1319 <https://github.com/aio-libs/aiohttp/pull/1319>`_)
- Added save and load functionality for `CookieJar` (`#1219 <https://github.com/aio-libs/aiohttp/pull/1219>`_)
- Added option on `StaticRoute` to follow symlinks (`#1299 <https://github.com/aio-libs/aiohttp/pull/1299>`_)
- Force encoding of `application/json` content type to utf-8 (`#1339 <https://github.com/aio-libs/aiohttp/pull/1339>`_)
- Fix invalid invocations of `errors.LineTooLong` (`#1335 <https://github.com/aio-libs/aiohttp/pull/1335>`_)
- Websockets: Stop `async for` iteration when connection is closed (`#1144 <https://github.com/aio-libs/aiohttp/pull/1144>`_)
- Ensure TestClient HTTP methods return a context manager (`#1318 <https://github.com/aio-libs/aiohttp/pull/1318>`_)
- Raise `ClientDisconnectedError` to `FlowControlStreamReader` read function
if `ClientSession` object is closed by client when reading data. (`#1323 <https://github.com/aio-libs/aiohttp/pull/1323>`_)
- Document deployment without `Gunicorn` (`#1120 <https://github.com/aio-libs/aiohttp/pull/1120>`_)
- Add deprecation warning for MD5 and SHA1 digests when used for fingerprint
of site certs in TCPConnector. (`#1186 <https://github.com/aio-libs/aiohttp/pull/1186>`_)
- Implement sub-applications (`#1301 <https://github.com/aio-libs/aiohttp/pull/1301>`_)
- Don't inherit `web.Request` from `dict` but implement
`MutableMapping` protocol.
- Implement frozen signals
- Don't inherit `web.Application` from `dict` but implement
`MutableMapping` protocol.
- Support freezing for web applications
- Accept access_log parameter in `web.run_app`, use `None` to disable logging
- Don't flap `tcp_cork` and `tcp_nodelay` in regular request handling.
`tcp_nodelay` is still enabled by default.
- Improve performance of web server by removing premature computing of
Content-Type if the value was set by `web.Response` constructor.
While the patch boosts speed of trivial `web.Response(text='OK',
content_type='text/plain)` very well please don't expect significant
boost of your application -- a couple DB requests and business logic
is still the main bottleneck.
- Boost performance by adding a custom time service (`#1350 <https://github.com/aio-libs/aiohttp/pull/1350>`_)
- Extend `ClientResponse` with `content_type` and `charset`
properties like in `web.Request`. (`#1349 <https://github.com/aio-libs/aiohttp/pull/1349>`_)
- Disable aiodns by default (`#559 <https://github.com/aio-libs/aiohttp/pull/559>`_)
- Don't flap `tcp_cork` in client code, use TCP_NODELAY mode by default.
- Implement `web.Request.clone()` (`#1361 <https://github.com/aio-libs/aiohttp/pull/1361>`_)
1.0.5 (2016-10-11)
==================
- Fix StreamReader._read_nowait to return all available
data up to the requested amount (`#1297 <https://github.com/aio-libs/aiohttp/pull/1297>`_)
1.0.4 (2016-09-22)
==================
- Fix FlowControlStreamReader.read_nowait so that it checks
whether the transport is paused (`#1206 <https://github.com/aio-libs/aiohttp/pull/1206>`_)
1.0.2 (2016-09-22)
==================
- Make CookieJar compatible with 32-bit systems (`#1188 <https://github.com/aio-libs/aiohttp/pull/1188>`_)
- Add missing `WSMsgType` to `web_ws.__all__`, see (`#1200 <https://github.com/aio-libs/aiohttp/pull/1200>`_)
- Fix `CookieJar` ctor when called with `loop=None` (`#1203 <https://github.com/aio-libs/aiohttp/pull/1203>`_)
- Fix broken upper-casing in wsgi support (`#1197 <https://github.com/aio-libs/aiohttp/pull/1197>`_)
1.0.1 (2016-09-16)
==================
- Restore `aiohttp.web.MsgType` alias for `aiohttp.WSMsgType` for sake
of backward compatibility (`#1178 <https://github.com/aio-libs/aiohttp/pull/1178>`_)
- Tune alabaster schema.
- Use `text/html` content type for displaying index pages by static
file handler.
- Fix `AssertionError` in static file handling (`#1177 <https://github.com/aio-libs/aiohttp/pull/1177>`_)
- Fix access log formats `%O` and `%b` for static file handling
- Remove `debug` setting of GunicornWorker, use `app.debug`
to control its debug-mode instead
1.0.0 (2016-09-16)
==================
- Change default size for client session's connection pool from
unlimited to 20 (`#977 <https://github.com/aio-libs/aiohttp/pull/977>`_)
- Add IE support for cookie deletion. (`#994 <https://github.com/aio-libs/aiohttp/pull/994>`_)
- Remove deprecated `WebSocketResponse.wait_closed` method (BACKWARD
INCOMPATIBLE)
- Remove deprecated `force` parameter for `ClientResponse.close`
method (BACKWARD INCOMPATIBLE)
- Avoid using of mutable CIMultiDict kw param in make_mocked_request
(`#997 <https://github.com/aio-libs/aiohttp/pull/997>`_)
- Make WebSocketResponse.close a little bit faster by avoiding new
task creating just for timeout measurement
- Add `proxy` and `proxy_auth` params to `client.get()` and family,
deprecate `ProxyConnector` (`#998 <https://github.com/aio-libs/aiohttp/pull/998>`_)
- Add support for websocket send_json and receive_json, synchronize
server and client API for websockets (`#984 <https://github.com/aio-libs/aiohttp/pull/984>`_)
- Implement router shourtcuts for most useful HTTP methods, use
`app.router.add_get()`, `app.router.add_post()` etc. instead of
`app.router.add_route()` (`#986 <https://github.com/aio-libs/aiohttp/pull/986>`_)
- Support SSL connections for gunicorn worker (`#1003 <https://github.com/aio-libs/aiohttp/pull/1003>`_)
- Move obsolete examples to legacy folder
- Switch to multidict 2.0 and title-cased strings (`#1015 <https://github.com/aio-libs/aiohttp/pull/1015>`_)
- `{FOO}e` logger format is case-sensitive now
- Fix logger report for unix socket 8e8469b
- Rename aiohttp.websocket to aiohttp._ws_impl
- Rename aiohttp.MsgType tp aiohttp.WSMsgType
- Introduce aiohttp.WSMessage officially
- Rename Message -> WSMessage
- Remove deprecated decode param from resp.read(decode=True)
- Use 5min default client timeout (`#1028 <https://github.com/aio-libs/aiohttp/pull/1028>`_)
- Relax HTTP method validation in UrlDispatcher (`#1037 <https://github.com/aio-libs/aiohttp/pull/1037>`_)
- Pin minimal supported asyncio version to 3.4.2+ (`loop.is_close()`
should be present)
- Remove aiohttp.websocket module (BACKWARD INCOMPATIBLE)
Please use high-level client and server approaches
- Link header for 451 status code is mandatory
- Fix test_client fixture to allow multiple clients per test (`#1072 <https://github.com/aio-libs/aiohttp/pull/1072>`_)
- make_mocked_request now accepts dict as headers (`#1073 <https://github.com/aio-libs/aiohttp/pull/1073>`_)
- Add Python 3.5.2/3.6+ compatibility patch for async generator
protocol change (`#1082 <https://github.com/aio-libs/aiohttp/pull/1082>`_)
- Improvement test_client can accept instance object (`#1083 <https://github.com/aio-libs/aiohttp/pull/1083>`_)
- Simplify ServerHttpProtocol implementation (`#1060 <https://github.com/aio-libs/aiohttp/pull/1060>`_)
- Add a flag for optional showing directory index for static file
handling (`#921 <https://github.com/aio-libs/aiohttp/pull/921>`_)
- Define `web.Application.on_startup()` signal handler (`#1103 <https://github.com/aio-libs/aiohttp/pull/1103>`_)
- Drop ChunkedParser and LinesParser (`#1111 <https://github.com/aio-libs/aiohttp/pull/1111>`_)
- Call `Application.startup` in GunicornWebWorker (`#1105 <https://github.com/aio-libs/aiohttp/pull/1105>`_)
- Fix client handling hostnames with 63 bytes when a port is given in
the url (`#1044 <https://github.com/aio-libs/aiohttp/pull/1044>`_)
- Implement proxy support for ClientSession.ws_connect (`#1025 <https://github.com/aio-libs/aiohttp/pull/1025>`_)
- Return named tuple from WebSocketResponse.can_prepare (`#1016 <https://github.com/aio-libs/aiohttp/pull/1016>`_)
- Fix access_log_format in `GunicornWebWorker` (`#1117 <https://github.com/aio-libs/aiohttp/pull/1117>`_)
- Setup Content-Type to application/octet-stream by default (`#1124 <https://github.com/aio-libs/aiohttp/pull/1124>`_)
- Deprecate debug parameter from app.make_handler(), use
`Application(debug=True)` instead (`#1121 <https://github.com/aio-libs/aiohttp/pull/1121>`_)
- Remove fragment string in request path (`#846 <https://github.com/aio-libs/aiohttp/pull/846>`_)
- Use aiodns.DNSResolver.gethostbyname() if available (`#1136 <https://github.com/aio-libs/aiohttp/pull/1136>`_)
- Fix static file sending on uvloop when sendfile is available (`#1093 <https://github.com/aio-libs/aiohttp/pull/1093>`_)
- Make prettier urls if query is empty dict (`#1143 <https://github.com/aio-libs/aiohttp/pull/1143>`_)
- Fix redirects for HEAD requests (`#1147 <https://github.com/aio-libs/aiohttp/pull/1147>`_)
- Default value for `StreamReader.read_nowait` is -1 from now (`#1150 <https://github.com/aio-libs/aiohttp/pull/1150>`_)
- `aiohttp.StreamReader` is not inherited from `asyncio.StreamReader` from now
(BACKWARD INCOMPATIBLE) (`#1150 <https://github.com/aio-libs/aiohttp/pull/1150>`_)
- Streams documentation added (`#1150 <https://github.com/aio-libs/aiohttp/pull/1150>`_)
- Add `multipart` coroutine method for web Request object (`#1067 <https://github.com/aio-libs/aiohttp/pull/1067>`_)
- Publish ClientSession.loop property (`#1149 <https://github.com/aio-libs/aiohttp/pull/1149>`_)
- Fix static file with spaces (`#1140 <https://github.com/aio-libs/aiohttp/pull/1140>`_)
- Fix piling up asyncio loop by cookie expiration callbacks (`#1061 <https://github.com/aio-libs/aiohttp/pull/1061>`_)
- Drop `Timeout` class for sake of `async_timeout` external library.
`aiohttp.Timeout` is an alias for `async_timeout.timeout`
- `use_dns_cache` parameter of `aiohttp.TCPConnector` is `True` by
default (BACKWARD INCOMPATIBLE) (`#1152 <https://github.com/aio-libs/aiohttp/pull/1152>`_)
- `aiohttp.TCPConnector` uses asynchronous DNS resolver if available by
default (BACKWARD INCOMPATIBLE) (`#1152 <https://github.com/aio-libs/aiohttp/pull/1152>`_)
- Conform to RFC3986 - do not include url fragments in client requests (`#1174 <https://github.com/aio-libs/aiohttp/pull/1174>`_)
- Drop `ClientSession.cookies` (BACKWARD INCOMPATIBLE) (`#1173 <https://github.com/aio-libs/aiohttp/pull/1173>`_)
- Refactor `AbstractCookieJar` public API (BACKWARD INCOMPATIBLE) (`#1173 <https://github.com/aio-libs/aiohttp/pull/1173>`_)
- Fix clashing cookies with have the same name but belong to different
domains (BACKWARD INCOMPATIBLE) (`#1125 <https://github.com/aio-libs/aiohttp/pull/1125>`_)
- Support binary Content-Transfer-Encoding (`#1169 <https://github.com/aio-libs/aiohttp/pull/1169>`_)
0.22.5 (08-02-2016)
===================
- Pin miltidict version to >=1.2.2
0.22.3 (07-26-2016)
===================
- Do not filter cookies if unsafe flag provided (`#1005 <https://github.com/aio-libs/aiohttp/pull/1005>`_)
0.22.2 (07-23-2016)
===================
- Suppress CancelledError when Timeout raises TimeoutError (`#970 <https://github.com/aio-libs/aiohttp/pull/970>`_)
- Don't expose `aiohttp.__version__`
- Add unsafe parameter to CookieJar (`#968 <https://github.com/aio-libs/aiohttp/pull/968>`_)
- Use unsafe cookie jar in test client tools
- Expose aiohttp.CookieJar name
0.22.1 (07-16-2016)
===================
- Large cookie expiration/max-age does not break an event loop from now
(fixes (`#967 <https://github.com/aio-libs/aiohttp/pull/967>`_))
0.22.0 (07-15-2016)
===================
- Fix bug in serving static directory (`#803 <https://github.com/aio-libs/aiohttp/pull/803>`_)
- Fix command line arg parsing (`#797 <https://github.com/aio-libs/aiohttp/pull/797>`_)
- Fix a documentation chapter about cookie usage (`#790 <https://github.com/aio-libs/aiohttp/pull/790>`_)
- Handle empty body with gzipped encoding (`#758 <https://github.com/aio-libs/aiohttp/pull/758>`_)
- Support 451 Unavailable For Legal Reasons http status (`#697 <https://github.com/aio-libs/aiohttp/pull/697>`_)
- Fix Cookie share example and few small typos in docs (`#817 <https://github.com/aio-libs/aiohttp/pull/817>`_)
- UrlDispatcher.add_route with partial coroutine handler (`#814 <https://github.com/aio-libs/aiohttp/pull/814>`_)
- Optional support for aiodns (`#728 <https://github.com/aio-libs/aiohttp/pull/728>`_)
- Add ServiceRestart and TryAgainLater websocket close codes (`#828 <https://github.com/aio-libs/aiohttp/pull/828>`_)
- Fix prompt message for `web.run_app` (`#832 <https://github.com/aio-libs/aiohttp/pull/832>`_)
- Allow to pass None as a timeout value to disable timeout logic (`#834 <https://github.com/aio-libs/aiohttp/pull/834>`_)
- Fix leak of connection slot during connection error (`#835 <https://github.com/aio-libs/aiohttp/pull/835>`_)
- Gunicorn worker with uvloop support
`aiohttp.worker.GunicornUVLoopWebWorker` (`#878 <https://github.com/aio-libs/aiohttp/pull/878>`_)
- Don't send body in response to HEAD request (`#838 <https://github.com/aio-libs/aiohttp/pull/838>`_)
- Skip the preamble in MultipartReader (`#881 <https://github.com/aio-libs/aiohttp/pull/881>`_)
- Implement BasicAuth decode classmethod. (`#744 <https://github.com/aio-libs/aiohttp/pull/744>`_)
- Don't crash logger when transport is None (`#889 <https://github.com/aio-libs/aiohttp/pull/889>`_)
- Use a create_future compatibility wrapper instead of creating
Futures directly (`#896 <https://github.com/aio-libs/aiohttp/pull/896>`_)
- Add test utilities to aiohttp (`#902 <https://github.com/aio-libs/aiohttp/pull/902>`_)
- Improve Request.__repr__ (`#875 <https://github.com/aio-libs/aiohttp/pull/875>`_)
- Skip DNS resolving if provided host is already an ip address (`#874 <https://github.com/aio-libs/aiohttp/pull/874>`_)
- Add headers to ClientSession.ws_connect (`#785 <https://github.com/aio-libs/aiohttp/pull/785>`_)
- Document that server can send pre-compressed data (`#906 <https://github.com/aio-libs/aiohttp/pull/906>`_)
- Don't add Content-Encoding and Transfer-Encoding if no body (`#891 <https://github.com/aio-libs/aiohttp/pull/891>`_)
- Add json() convenience methods to websocket message objects (`#897 <https://github.com/aio-libs/aiohttp/pull/897>`_)
- Add client_resp.raise_for_status() (`#908 <https://github.com/aio-libs/aiohttp/pull/908>`_)
- Implement cookie filter (`#799 <https://github.com/aio-libs/aiohttp/pull/799>`_)
- Include an example of middleware to handle error pages (`#909 <https://github.com/aio-libs/aiohttp/pull/909>`_)
- Fix error handling in StaticFileMixin (`#856 <https://github.com/aio-libs/aiohttp/pull/856>`_)
- Add mocked request helper (`#900 <https://github.com/aio-libs/aiohttp/pull/900>`_)
- Fix empty ALLOW Response header for cls based View (`#929 <https://github.com/aio-libs/aiohttp/pull/929>`_)
- Respect CONNECT method to implement a proxy server (`#847 <https://github.com/aio-libs/aiohttp/pull/847>`_)
- Add pytest_plugin (`#914 <https://github.com/aio-libs/aiohttp/pull/914>`_)
- Add tutorial
- Add backlog option to support more than 128 (default value in
"create_server" function) concurrent connections (`#892 <https://github.com/aio-libs/aiohttp/pull/892>`_)
- Allow configuration of header size limits (`#912 <https://github.com/aio-libs/aiohttp/pull/912>`_)
- Separate sending file logic from StaticRoute dispatcher (`#901 <https://github.com/aio-libs/aiohttp/pull/901>`_)
- Drop deprecated share_cookies connector option (BACKWARD INCOMPATIBLE)
- Drop deprecated support for tuple as auth parameter.
Use aiohttp.BasicAuth instead (BACKWARD INCOMPATIBLE)
- Remove deprecated `request.payload` property, use `content` instead.
(BACKWARD INCOMPATIBLE)
- Drop all mentions about api changes in documentation for versions
older than 0.16
- Allow to override default cookie jar (`#963 <https://github.com/aio-libs/aiohttp/pull/963>`_)
- Add manylinux wheel builds
- Dup a socket for sendfile usage (`#964 <https://github.com/aio-libs/aiohttp/pull/964>`_)
0.21.6 (05-05-2016)
===================
- Drop initial query parameters on redirects (`#853 <https://github.com/aio-libs/aiohttp/pull/853>`_)
0.21.5 (03-22-2016)
===================
- Fix command line arg parsing (`#797 <https://github.com/aio-libs/aiohttp/pull/797>`_)
0.21.4 (03-12-2016)
===================
- Fix ResourceAdapter: don't add method to allowed if resource is not
match (`#826 <https://github.com/aio-libs/aiohttp/pull/826>`_)
- Fix Resource: append found method to returned allowed methods
0.21.2 (02-16-2016)
===================
- Fix a regression: support for handling ~/path in static file routes was
broken (`#782 <https://github.com/aio-libs/aiohttp/pull/782>`_)
0.21.1 (02-10-2016)
===================
- Make new resources classes public (`#767 <https://github.com/aio-libs/aiohttp/pull/767>`_)
- Add `router.resources()` view
- Fix cmd-line parameter names in doc
0.21.0 (02-04-2016)
===================
- Introduce on_shutdown signal (`#722 <https://github.com/aio-libs/aiohttp/pull/722>`_)
- Implement raw input headers (`#726 <https://github.com/aio-libs/aiohttp/pull/726>`_)
- Implement web.run_app utility function (`#734 <https://github.com/aio-libs/aiohttp/pull/734>`_)
- Introduce on_cleanup signal
- Deprecate Application.finish() / Application.register_on_finish() in favor of
on_cleanup.
- Get rid of bare aiohttp.request(), aiohttp.get() and family in docs (`#729 <https://github.com/aio-libs/aiohttp/pull/729>`_)
- Deprecate bare aiohttp.request(), aiohttp.get() and family (`#729 <https://github.com/aio-libs/aiohttp/pull/729>`_)
- Refactor keep-alive support (`#737 <https://github.com/aio-libs/aiohttp/pull/737>`_):
- Enable keepalive for HTTP 1.0 by default
- Disable it for HTTP 0.9 (who cares about 0.9, BTW?)
- For keepalived connections
- Send `Connection: keep-alive` for HTTP 1.0 only
- don't send `Connection` header for HTTP 1.1
- For non-keepalived connections
- Send `Connection: close` for HTTP 1.1 only
- don't send `Connection` header for HTTP 1.0
- Add version parameter to ClientSession constructor,
deprecate it for session.request() and family (`#736 <https://github.com/aio-libs/aiohttp/pull/736>`_)
- Enable access log by default (`#735 <https://github.com/aio-libs/aiohttp/pull/735>`_)
- Deprecate app.router.register_route() (the method was not documented
intentionally BTW).
- Deprecate app.router.named_routes() in favor of app.router.named_resources()
- route.add_static accepts pathlib.Path now (`#743 <https://github.com/aio-libs/aiohttp/pull/743>`_)
- Add command line support: `$ python -m aiohttp.web package.main` (`#740 <https://github.com/aio-libs/aiohttp/pull/740>`_)
- FAQ section was added to docs. Enjoy and fill free to contribute new topics
- Add async context manager support to ClientSession
- Document ClientResponse's host, method, url properties
- Use CORK/NODELAY in client API (`#748 <https://github.com/aio-libs/aiohttp/pull/748>`_)
- ClientSession.close and Connector.close are coroutines now
- Close client connection on exception in ClientResponse.release()
- Allow to read multipart parts without content-length specified (`#750 <https://github.com/aio-libs/aiohttp/pull/750>`_)
- Add support for unix domain sockets to gunicorn worker (`#470 <https://github.com/aio-libs/aiohttp/pull/470>`_)
- Add test for default Expect handler (`#601 <https://github.com/aio-libs/aiohttp/pull/601>`_)
- Add the first demo project
- Rename `loader` keyword argument in `web.Request.json` method. (`#646 <https://github.com/aio-libs/aiohttp/pull/646>`_)
- Add local socket binding for TCPConnector (`#678 <https://github.com/aio-libs/aiohttp/pull/678>`_)
0.20.2 (01-07-2016)
===================
- Enable use of `await` for a class based view (`#717 <https://github.com/aio-libs/aiohttp/pull/717>`_)
- Check address family to fill wsgi env properly (`#718 <https://github.com/aio-libs/aiohttp/pull/718>`_)
- Fix memory leak in headers processing (thanks to <NAME>) (`#723 <https://github.com/aio-libs/aiohttp/pull/723>`_)
0.20.1 (12-30-2015)
===================
- Raise RuntimeError is Timeout context manager was used outside of
task context.
- Add number of bytes to stream.read_nowait (`#700 <https://github.com/aio-libs/aiohttp/pull/700>`_)
- Use X-FORWARDED-PROTO for wsgi.url_scheme when available
0.20.0 (12-28-2015)
===================
- Extend list of web exceptions, add HTTPMisdirectedRequest,
HTTPUpgradeRequired, HTTPPreconditionRequired, HTTPTooManyRequests,
HTTPRequestHeaderFieldsTooLarge, HTTPVariantAlsoNegotiates,
HTTPNotExtended, HTTPNetworkAuthenticationRequired status codes (`#644 <https://github.com/aio-libs/aiohttp/pull/644>`_)
- Do not remove AUTHORIZATION header by WSGI handler (`#649 <https://github.com/aio-libs/aiohttp/pull/649>`_)
- Fix broken support for https proxies with authentication (`#617 <https://github.com/aio-libs/aiohttp/pull/617>`_)
- Get REMOTE_* and SEVER_* http vars from headers when listening on
unix socket (`#654 <https://github.com/aio-libs/aiohttp/pull/654>`_)
- Add HTTP 308 support (`#663 <https://github.com/aio-libs/aiohttp/pull/663>`_)
- Add Tf format (time to serve request in seconds, %06f format) to
access log (`#669 <https://github.com/aio-libs/aiohttp/pull/669>`_)
- Remove one and a half years long deprecated
ClientResponse.read_and_close() method
- Optimize chunked encoding: use a single syscall instead of 3 calls
on sending chunked encoded data
- Use TCP_CORK and TCP_NODELAY to optimize network latency and
throughput (`#680 <https://github.com/aio-libs/aiohttp/pull/680>`_)
- Websocket XOR performance improved (`#687 <https://github.com/aio-libs/aiohttp/pull/687>`_)
- Avoid sending cookie attributes in Cookie header (`#613 <https://github.com/aio-libs/aiohttp/pull/613>`_)
- Round server timeouts to seconds for grouping pending calls. That
leads to less amount of poller syscalls e.g. epoll.poll(). (`#702 <https://github.com/aio-libs/aiohttp/pull/702>`_)
- Close connection on websocket handshake error (`#703 <https://github.com/aio-libs/aiohttp/pull/703>`_)
- Implement class based views (`#684 <https://github.com/aio-libs/aiohttp/pull/684>`_)
- Add *headers* parameter to ws_connect() (`#709 <https://github.com/aio-libs/aiohttp/pull/709>`_)
- Drop unused function `parse_remote_addr()` (`#708 <https://github.com/aio-libs/aiohttp/pull/708>`_)
- Close session on exception (`#707 <https://github.com/aio-libs/aiohttp/pull/707>`_)
- Store http code and headers in WSServerHandshakeError (`#706 <https://github.com/aio-libs/aiohttp/pull/706>`_)
- Make some low-level message properties readonly (`#710 <https://github.com/aio-libs/aiohttp/pull/710>`_)
0.19.0 (11-25-2015)
===================
- Memory leak in ParserBuffer (`#579 <https://github.com/aio-libs/aiohttp/pull/579>`_)
- Support gunicorn's `max_requests` settings in gunicorn worker
- Fix wsgi environment building (`#573 <https://github.com/aio-libs/aiohttp/pull/573>`_)
- Improve access logging (`#572 <https://github.com/aio-libs/aiohttp/pull/572>`_)
- Drop unused host and port from low-level server (`#586 <https://github.com/aio-libs/aiohttp/pull/586>`_)
- Add Python 3.5 `async for` implementation to server websocket (`#543 <https://github.com/aio-libs/aiohttp/pull/543>`_)
- Add Python 3.5 `async for` implementation to client websocket
- Add Python 3.5 `async with` implementation to client websocket
- Add charset parameter to web.Response constructor (`#593 <https://github.com/aio-libs/aiohttp/pull/593>`_)
- Forbid passing both Content-Type header and content_type or charset
params into web.Response constructor
- Forbid duplicating of web.Application and web.Request (`#602 <https://github.com/aio-libs/aiohttp/pull/602>`_)
- Add an option to pass Origin header in ws_connect (`#607 <https://github.com/aio-libs/aiohttp/pull/607>`_)
- Add json_response function (`#592 <https://github.com/aio-libs/aiohttp/pull/592>`_)
- Make concurrent connections respect limits (`#581 <https://github.com/aio-libs/aiohttp/pull/581>`_)
- Collect history of responses if redirects occur (`#614 <https://github.com/aio-libs/aiohttp/pull/614>`_)
- Enable passing pre-compressed data in requests (`#621 <https://github.com/aio-libs/aiohttp/pull/621>`_)
- Expose named routes via UrlDispatcher.named_routes() (`#622 <https://github.com/aio-libs/aiohttp/pull/622>`_)
- Allow disabling sendfile by environment variable AIOHTTP_NOSENDFILE (`#629 <https://github.com/aio-libs/aiohttp/pull/629>`_)
- Use ensure_future if available
- Always quote params for Content-Disposition (`#641 <https://github.com/aio-libs/aiohttp/pull/641>`_)
- Support async for in multipart reader (`#640 <https://github.com/aio-libs/aiohttp/pull/640>`_)
- Add Timeout context manager (`#611 <https://github.com/aio-libs/aiohttp/pull/611>`_)
0.18.4 (13-11-2015)
===================
- Relax rule for router names again by adding dash to allowed
characters: they may contain identifiers, dashes, dots and columns
0.18.3 (25-10-2015)
===================
- Fix formatting for _RequestContextManager helper (`#590 <https://github.com/aio-libs/aiohttp/pull/590>`_)
0.18.2 (22-10-2015)
===================
- Fix regression for OpenSSL < 1.0.0 (`#583 <https://github.com/aio-libs/aiohttp/pull/583>`_)
0.18.1 (20-10-2015)
===================
- Relax rule for router names: they may contain dots and columns
starting from now
0.18.0 (19-10-2015)
===================
- Use errors.HttpProcessingError.message as HTTP error reason and
message (`#459 <https://github.com/aio-libs/aiohttp/pull/459>`_)
- Optimize cythonized multidict a bit
- Change repr's of multidicts and multidict views
- default headers in ClientSession are now case-insensitive
- Make '=' char and 'wss://' schema safe in urls (`#477 <https://github.com/aio-libs/aiohttp/pull/477>`_)
- `ClientResponse.close()` forces connection closing by default from now (`#479 <https://github.com/aio-libs/aiohttp/pull/479>`_)
N.B. Backward incompatible change: was `.close(force=False) Using
`force` parameter for the method is deprecated: use `.release()`
instead.
- Properly requote URL's path (`#480 <https://github.com/aio-libs/aiohttp/pull/480>`_)
- add `skip_auto_headers` parameter for client API (`#486 <https://github.com/aio-libs/aiohttp/pull/486>`_)
- Properly parse URL path in aiohttp.web.Request (`#489 <https://github.com/aio-libs/aiohttp/pull/489>`_)
- Raise RuntimeError when chunked enabled and HTTP is 1.0 (`#488 <https://github.com/aio-libs/aiohttp/pull/488>`_)
- Fix a bug with processing io.BytesIO as data parameter for client API (`#500 <https://github.com/aio-libs/aiohttp/pull/500>`_)
- Skip auto-generation of Content-Type header (`#507 <https://github.com/aio-libs/aiohttp/pull/507>`_)
- Use sendfile facility for static file handling (`#503 <https://github.com/aio-libs/aiohttp/pull/503>`_)
- Default `response_factory` in `app.router.add_static` now is
`StreamResponse`, not `None`. The functionality is not changed if
default is not specified.
- Drop `ClientResponse.message` attribute, it was always implementation detail.
- Streams are optimized for speed and mostly memory in case of a big
HTTP message sizes (`#496 <https://github.com/aio-libs/aiohttp/pull/496>`_)
- Fix a bug for server-side cookies for dropping cookie and setting it
again without Max-Age parameter.
- Don't trim redirect URL in client API (`#499 <https://github.com/aio-libs/aiohttp/pull/499>`_)
- Extend precision of access log "D" to milliseconds (`#527 <https://github.com/aio-libs/aiohttp/pull/527>`_)
- Deprecate `StreamResponse.start()` method in favor of
`StreamResponse.prepare()` coroutine (`#525 <https://github.com/aio-libs/aiohttp/pull/525>`_)
`.start()` is still supported but responses begun with `.start()`
does not call signal for response preparing to be sent.
- Add `StreamReader.__repr__`
- Drop Python 3.3 support, from now minimal required version is Python
3.4.1 (`#541 <https://github.com/aio-libs/aiohttp/pull/541>`_)
- Add `async with` support for `ClientSession.request()` and family (`#536 <https://github.com/aio-libs/aiohttp/pull/536>`_)
- Ignore message body on 204 and 304 responses (`#505 <https://github.com/aio-libs/aiohttp/pull/505>`_)
- `TCPConnector` processed both IPv4 and IPv6 by default (`#559 <https://github.com/aio-libs/aiohttp/pull/559>`_)
- Add `.routes()` view for urldispatcher (`#519 <https://github.com/aio-libs/aiohttp/pull/519>`_)
- Route name should be a valid identifier name from now (`#567 <https://github.com/aio-libs/aiohttp/pull/567>`_)
- Implement server signals (`#562 <https://github.com/aio-libs/aiohttp/pull/562>`_)
- Drop a year-old deprecated *files* parameter from client API.
- Added `async for` support for aiohttp stream (`#542 <https://github.com/aio-libs/aiohttp/pull/542>`_)
0.17.4 (09-29-2015)
===================
- Properly parse URL path in aiohttp.web.Request (`#489 <https://github.com/aio-libs/aiohttp/pull/489>`_)
- Add missing coroutine decorator, the client api is await-compatible now
0.17.3 (08-28-2015)
===================
- Remove Content-Length header on compressed responses (`#450 <https://github.com/aio-libs/aiohttp/pull/450>`_)
- Support Python 3.5
- Improve performance of transport in-use list (`#472 <https://github.com/aio-libs/aiohttp/pull/472>`_)
- Fix connection pooling (`#473 <https://github.com/aio-libs/aiohttp/pull/473>`_)
0.17.2 (08-11-2015)
===================
- Don't forget to pass `data` argument forward (`#462 <https://github.com/aio-libs/aiohttp/pull/462>`_)
- Fix multipart read bytes count (`#463 <https://github.com/aio-libs/aiohttp/pull/463>`_)
0.17.1 (08-10-2015)
===================
- Fix multidict comparison to arbitrary abc.Mapping
0.17.0 (08-04-2015)
===================
- Make StaticRoute support Last-Modified and If-Modified-Since headers (`#386 <https://github.com/aio-libs/aiohttp/pull/386>`_)
- Add Request.if_modified_since and Stream.Response.last_modified properties
- Fix deflate compression when writing a chunked response (`#395 <https://github.com/aio-libs/aiohttp/pull/395>`_)
- Request`s content-length header is cleared now after redirect from
POST method (`#391 <https://github.com/aio-libs/aiohttp/pull/391>`_)
- Return a 400 if server received a non HTTP content (`#405 <https://github.com/aio-libs/aiohttp/pull/405>`_)
- Fix keep-alive support for aiohttp clients (`#406 <https://github.com/aio-libs/aiohttp/pull/406>`_)
- Allow gzip compression in high-level server response interface (`#403 <https://github.com/aio-libs/aiohttp/pull/403>`_)
- Rename TCPConnector.resolve and family to dns_cache (`#415 <https://github.com/aio-libs/aiohttp/pull/415>`_)
- Make UrlDispatcher ignore quoted characters during url matching (`#414 <https://github.com/aio-libs/aiohttp/pull/414>`_)
Backward-compatibility warning: this may change the url matched by
your queries if they send quoted character (like %2F for /) (`#414 <https://github.com/aio-libs/aiohttp/pull/414>`_)
- Use optional cchardet accelerator if present (`#418 <https://github.com/aio-libs/aiohttp/pull/418>`_)
- Borrow loop from Connector in ClientSession if loop is not set
- Add context manager support to ClientSession for session closing.
- Add toplevel get(), post(), put(), head(), delete(), options(),
patch() coroutines.
- Fix IPv6 support for client API (`#425 <https://github.com/aio-libs/aiohttp/pull/425>`_)
- Pass SSL context through proxy connector (`#421 <https://github.com/aio-libs/aiohttp/pull/421>`_)
- Make the rule: path for add_route should start with slash
- Don't process request finishing by low-level server on closed event loop
- Don't override data if multiple files are uploaded with same key (`#433 <https://github.com/aio-libs/aiohttp/pull/433>`_)
- Ensure multipart.BodyPartReader.read_chunk read all the necessary data
to avoid false assertions about malformed multipart payload
- Don't send body for 204, 205 and 304 http exceptions (`#442 <https://github.com/aio-libs/aiohttp/pull/442>`_)
- Correctly skip Cython compilation in MSVC not found (`#453 <https://github.com/aio-libs/aiohttp/pull/453>`_)
- Add response factory to StaticRoute (`#456 <https://github.com/aio-libs/aiohttp/pull/456>`_)
- Don't append trailing CRLF for multipart.BodyPartReader (`#454 <https://github.com/aio-libs/aiohttp/pull/454>`_)
0.16.6 (07-15-2015)
===================
- Skip compilation on Windows if vcvarsall.bat cannot be found (`#438 <https://github.com/aio-libs/aiohttp/pull/438>`_)
0.16.5 (06-13-2015)
===================
- Get rid of all comprehensions and yielding in _multidict (`#410 <https://github.com/aio-libs/aiohttp/pull/410>`_)
0.16.4 (06-13-2015)
===================
- Don't clear current exception in multidict's `__repr__` (cythonized
versions) (`#410 <https://github.com/aio-libs/aiohttp/pull/410>`_)
0.16.3 (05-30-2015)
===================
- Fix StaticRoute vulnerability to directory traversal attacks (`#380 <https://github.com/aio-libs/aiohttp/pull/380>`_)
0.16.2 (05-27-2015)
===================
- Update python version required for `__del__` usage: it's actually
3.4.1 instead of 3.4.0
- Add check for presence of loop.is_closed() method before call the
former (`#378 <https://github.com/aio-libs/aiohttp/pull/378>`_)
0.16.1 (05-27-2015)
===================
- Fix regression in static file handling (`#377 <https://github.com/aio-libs/aiohttp/pull/377>`_)
0.16.0 (05-26-2015)
===================
- Unset waiter future after cancellation (`#363 <https://github.com/aio-libs/aiohttp/pull/363>`_)
- Update request url with query parameters (`#372 <https://github.com/aio-libs/aiohttp/pull/372>`_)
- Support new `fingerprint` param of TCPConnector to enable verifying
SSL certificates via MD5, SHA1, or SHA256 digest (`#366 <https://github.com/aio-libs/aiohttp/pull/366>`_)
- Setup uploaded filename if field value is binary and transfer
encoding is not specified (`#349 <https://github.com/aio-libs/aiohttp/pull/349>`_)
- Implement `ClientSession.close()` method
- Implement `connector.closed` readonly property
- Implement `ClientSession.closed` readonly property
- Implement `ClientSession.connector` readonly property
- Implement `ClientSession.detach` method
- Add `__del__` to client-side objects: sessions, connectors,
connections, requests, responses.
- Refactor connections cleanup by connector (`#357 <https://github.com/aio-libs/aiohttp/pull/357>`_)
- Add `limit` parameter to connector constructor (`#358 <https://github.com/aio-libs/aiohttp/pull/358>`_)
- Add `request.has_body` property (`#364 <https://github.com/aio-libs/aiohttp/pull/364>`_)
- Add `response_class` parameter to `ws_connect()` (`#367 <https://github.com/aio-libs/aiohttp/pull/367>`_)
- `ProxyConnector` does not support keep-alive requests by default
starting from now (`#368 <https://github.com/aio-libs/aiohttp/pull/368>`_)
- Add `connector.force_close` property
- Add ws_connect to ClientSession (`#374 <https://github.com/aio-libs/aiohttp/pull/374>`_)
- Support optional `chunk_size` parameter in `router.add_static()`
0.15.3 (04-22-2015)
===================
- Fix graceful shutdown handling
- Fix `Expect` header handling for not found and not allowed routes (`#340 <https://github.com/aio-libs/aiohttp/pull/340>`_)
0.15.2 (04-19-2015)
===================
- Flow control subsystem refactoring
- HTTP server performance optimizations
- Allow to match any request method with `*`
- Explicitly call drain on transport (`#316 <https://github.com/aio-libs/aiohttp/pull/316>`_)
- Make chardet module dependency mandatory (`#318 <https://github.com/aio-libs/aiohttp/pull/318>`_)
- Support keep-alive for HTTP 1.0 (`#325 <https://github.com/aio-libs/aiohttp/pull/325>`_)
- Do not chunk single file during upload (`#327 <https://github.com/aio-libs/aiohttp/pull/327>`_)
- Add ClientSession object for cookie storage and default headers (`#328 <https://github.com/aio-libs/aiohttp/pull/328>`_)
- Add `keep_alive_on` argument for HTTP server handler.
0.15.1 (03-31-2015)
===================
- Pass Autobahn Testsuite tests
- Fixed websocket fragmentation
- Fixed websocket close procedure
- Fixed parser buffer limits
- Added `timeout` parameter to WebSocketResponse ctor
- Added `WebSocketResponse.close_code` attribute
0.15.0 (03-27-2015)
===================
- Client WebSockets support
- New Multipart system (`#273 <https://github.com/aio-libs/aiohttp/pull/273>`_)
- Support for "Except" header (`#287 <https://github.com/aio-libs/aiohttp/pull/287>`_) (`#267 <https://github.com/aio-libs/aiohttp/pull/267>`_)
- Set default Content-Type for post requests (`#184 <https://github.com/aio-libs/aiohttp/pull/184>`_)
- Fix issue with construction dynamic route with regexps and trailing slash (`#266 <https://github.com/aio-libs/aiohttp/pull/266>`_)
- Add repr to web.Request
- Add repr to web.Response
- Add repr for NotFound and NotAllowed match infos
- Add repr for web.Application
- Add repr to UrlMappingMatchInfo (`#217 <https://github.com/aio-libs/aiohttp/pull/217>`_)
- Gunicorn 19.2.x compatibility
0.14.4 (01-29-2015)
===================
- Fix issue with error during constructing of url with regex parts (`#264 <https://github.com/aio-libs/aiohttp/pull/264>`_)
0.14.3 (01-28-2015)
===================
- Use path='/' by default for cookies (`#261 <https://github.com/aio-libs/aiohttp/pull/261>`_)
0.14.2 (01-23-2015)
===================
- Connections leak in BaseConnector (`#253 <https://github.com/aio-libs/aiohttp/pull/253>`_)
- Do not swallow websocket reader exceptions (`#255 <https://github.com/aio-libs/aiohttp/pull/255>`_)
- web.Request's read, text, json are memorized (`#250 <https://github.com/aio-libs/aiohttp/pull/250>`_)
0.14.1 (01-15-2015)
===================
- HttpMessage._add_default_headers does not overwrite existing headers (`#216 <https://github.com/aio-libs/aiohttp/pull/216>`_)
- Expose multidict classes at package level
- add `aiohttp.web.WebSocketResponse`
- According to RFC 6455 websocket subprotocol preference order is
provided by client, not by server
- websocket's ping and pong accept optional message parameter
- multidict views do not accept `getall` parameter anymore, it
returns the full body anyway.
- multidicts have optional Cython optimization, cythonized version of
multidicts is about 5 times faster than pure Python.
- multidict.getall() returns `list`, not `tuple`.
- Backward incompatible change: now there are two mutable multidicts
(`MultiDict`, `CIMultiDict`) and two immutable multidict proxies
(`MultiDictProxy` and `CIMultiDictProxy`). Previous edition of
multidicts was not a part of public API BTW.
- Router refactoring to push Not Allowed and Not Found in middleware processing
- Convert `ConnectionError` to `aiohttp.DisconnectedError` and don't
eat `ConnectionError` exceptions from web handlers.
- Remove hop headers from Response class, wsgi response still uses hop headers.
- Allow to send raw chunked encoded response.
- Allow to encode output bytes stream into chunked encoding.
- Allow to compress output bytes stream with `deflate` encoding.
- Server has 75 seconds keepalive timeout now, was non-keepalive by default.
- Application does not accept `**kwargs` anymore ((`#243 <https://github.com/aio-libs/aiohttp/pull/243>`_)).
- Request is inherited from dict now for making per-request storage to
middlewares ((`#242 <https://github.com/aio-libs/aiohttp/pull/242>`_)).
0.13.1 (12-31-2014)
===================
- Add `aiohttp.web.StreamResponse.started` property (`#213 <https://github.com/aio-libs/aiohttp/pull/213>`_)
- HTML escape traceback text in `ServerHttpProtocol.handle_error`
- Mention handler and middlewares in `aiohttp.web.RequestHandler.handle_request`
on error ((`#218 <https://github.com/aio-libs/aiohttp/pull/218>`_))
0.13.0 (12-29-2014)
===================
- `StreamResponse.charset` converts value to lower-case on assigning.
- Chain exceptions when raise `ClientRequestError`.
- Support custom regexps in route variables (`#204 <https://github.com/aio-libs/aiohttp/pull/204>`_)
- Fixed graceful shutdown, disable keep-alive on connection closing.
- Decode HTTP message with `utf-8` encoding, some servers send headers
in utf-8 encoding (`#207 <https://github.com/aio-libs/aiohttp/pull/207>`_)
- Support `aiohtt.web` middlewares (`#209 <https://github.com/aio-libs/aiohttp/pull/209>`_)
- Add ssl_context to TCPConnector (`#206 <https://github.com/aio-libs/aiohttp/pull/206>`_)
0.12.0 (12-12-2014)
===================
- Deep refactoring of `aiohttp.web` in backward-incompatible manner.
Sorry, we have to do this.
- Automatically force aiohttp.web handlers to coroutines in
`UrlDispatcher.add_route()` (`#186 <https://github.com/aio-libs/aiohttp/pull/186>`_)
- Rename `Request.POST()` function to `Request.post()`
- Added POST attribute
- Response processing refactoring: constructor does not accept Request
instance anymore.
- Pass application instance to finish callback
- Exceptions refactoring
- Do not unquote query string in `aiohttp.web.Request`
- Fix concurrent access to payload in `RequestHandle.handle_request()`
- Add access logging to `aiohttp.web`
- Gunicorn worker for `aiohttp.web`
- Removed deprecated `AsyncGunicornWorker`
- Removed deprecated HttpClient
0.11.0 (11-29-2014)
===================
- Support named routes in `aiohttp.web.UrlDispatcher` (`#179 <https://github.com/aio-libs/aiohttp/pull/179>`_)
- Make websocket subprotocols conform to spec (`#181 <https://github.com/aio-libs/aiohttp/pull/181>`_)
0.10.2 (11-19-2014)
===================
- Don't unquote `environ['PATH_INFO']` in wsgi.py (`#177 <https://github.com/aio-libs/aiohttp/pull/177>`_)
0.10.1 (11-17-2014)
===================
- aiohttp.web.HTTPException and descendants now files response body
with string like `404: NotFound`
- Fix multidict `__iter__`, the method should iterate over keys, not
(key, value) pairs.
0.10.0 (11-13-2014)
===================
- Add aiohttp.web subpackage for highlevel HTTP server support.
- Add *reason* optional parameter to aiohttp.protocol.Response ctor.
- Fix aiohttp.client bug for sending file without content-type.
- Change error text for connection closed between server responses
from 'Can not read status line' to explicit 'Connection closed by
server'
- Drop closed connections from connector (`#173 <https://github.com/aio-libs/aiohttp/pull/173>`_)
- Set server.transport to None on .closing() (`#172 <https://github.com/aio-libs/aiohttp/pull/172>`_)
0.9.3 (10-30-2014)
==================
- Fix compatibility with asyncio 3.4.1+ (`#170 <https://github.com/aio-libs/aiohttp/pull/170>`_)
0.9.2 (10-16-2014)
==================
- Improve redirect handling (`#157 <https://github.com/aio-libs/aiohttp/pull/157>`_)
- Send raw files as is (`#153 <https://github.com/aio-libs/aiohttp/pull/153>`_)
- Better websocket support (`#150 <https://github.com/aio-libs/aiohttp/pull/150>`_)
0.9.1 (08-30-2014)
==================
- Added MultiDict support for client request params and data (`#114 <https://github.com/aio-libs/aiohttp/pull/114>`_).
- Fixed parameter type for IncompleteRead exception (`#118 <https://github.com/aio-libs/aiohttp/pull/118>`_).
- Strictly require ASCII headers names and values (`#137 <https://github.com/aio-libs/aiohttp/pull/137>`_)
- Keep port in ProxyConnector (`#128 <https://github.com/aio-libs/aiohttp/pull/128>`_).
- Python 3.4.1 compatibility (`#131 <https://github.com/aio-libs/aiohttp/pull/131>`_).
0.9.0 (07-08-2014)
==================
- Better client basic authentication support (`#112 <https://github.com/aio-libs/aiohttp/pull/112>`_).
- Fixed incorrect line splitting in HttpRequestParser (`#97 <https://github.com/aio-libs/aiohttp/pull/97>`_).
- Support StreamReader and DataQueue as request data.
- Client files handling refactoring (`#20 <https://github.com/aio-libs/aiohttp/pull/20>`_).
- Backward incompatible: Replace DataQueue with StreamReader for
request payload (`#87 <https://github.com/aio-libs/aiohttp/pull/87>`_).
0.8.4 (07-04-2014)
==================
- Change ProxyConnector authorization parameters.
0.8.3 (07-03-2014)
==================
- Publish TCPConnector properties: verify_ssl, family, resolve, resolved_hosts.
- Don't parse message body for HEAD responses.
- Refactor client response decoding.
0.8.2 (06-22-2014)
==================
- Make ProxyConnector.proxy immutable property.
- Make UnixConnector.path immutable property.
- Fix resource leak for aiohttp.request() with implicit connector.
- Rename Connector's reuse_timeout to keepalive_timeout.
0.8.1 (06-18-2014)
==================
- Use case insensitive multidict for server request/response headers.
- MultiDict.getall() accepts default value.
- Catch server ConnectionError.
- Accept MultiDict (and derived) instances in aiohttp.request header argument.
- Proxy 'CONNECT' support.
0.8.0 (06-06-2014)
==================
- Add support for utf-8 values in HTTP headers
- Allow to use custom response class instead of HttpResponse
- Use MultiDict for client request headers
- Use MultiDict for server request/response headers
- Store response headers in ClientResponse.headers attribute
- Get rid of timeout parameter in aiohttp.client API
- Exceptions refactoring
0.7.3 (05-20-2014)
==================
- Simple HTTP proxy support.
0.7.2 (05-14-2014)
==================
- Get rid of `__del__` methods
- Use ResourceWarning instead of logging warning record.
0.7.1 (04-28-2014)
==================
- Do not unquote client request urls.
- Allow multiple waiters on transport drain.
- Do not return client connection to pool in case of exceptions.
- Rename SocketConnector to TCPConnector and UnixSocketConnector to
UnixConnector.
0.7.0 (04-16-2014)
==================
- Connection flow control.
- HTTP client session/connection pool refactoring.
- Better handling for bad server requests.
0.6.5 (03-29-2014)
==================
- Added client session reuse timeout.
- Better client request cancellation support.
- Better handling responses without content length.
- Added HttpClient verify_ssl parameter support.
0.6.4 (02-27-2014)
==================
- Log content-length missing warning only for put and post requests.
0.6.3 (02-27-2014)
==================
- Better support for server exit.
- Read response body until EOF if content-length is not defined (`#14 <https://github.com/aio-libs/aiohttp/pull/14>`_)
0.6.2 (02-18-2014)
==================
- Fix trailing char in allowed_methods.
- Start slow request timer for first request.
0.6.1 (02-17-2014)
==================
- Added utility method HttpResponse.read_and_close()
- Added slow request timeout.
- Enable socket SO_KEEPALIVE if available.
0.6.0 (02-12-2014)
==================
- Better handling for process exit.
0.5.0 (01-29-2014)
==================
- Allow to use custom HttpRequest client class.
- Use gunicorn keepalive setting for asynchronous worker.
- Log leaking responses.
- python 3.4 compatibility
0.4.4 (11-15-2013)
==================
- Resolve only AF_INET family, because it is not clear how to pass
extra info to asyncio.
0.4.3 (11-15-2013)
==================
- Allow to wait completion of request with `HttpResponse.wait_for_close()`
0.4.2 (11-14-2013)
==================
- Handle exception in client request stream.
- Prevent host resolving for each client request.
0.4.1 (11-12-2013)
==================
- Added client support for `expect: 100-continue` header.
0.4 (11-06-2013)
================
- Added custom wsgi application close procedure
- Fixed concurrent host failure in HttpClient
0.3 (11-04-2013)
================
- Added PortMapperWorker
- Added HttpClient
- Added TCP connection timeout to HTTP client
- Better client connection errors handling
- Gracefully handle process exit
0.2
===
- Fix packaging
<file_sep>/tests/test_client_exceptions.py
# Tests for client_exceptions.py
import errno
import pickle
import sys
from aiohttp import client, client_reqrep
class TestClientResponseError:
request_info = client.RequestInfo(url='http://example.com',
method='GET',
headers={},
real_url='http://example.com')
def test_default_status(self) -> None:
err = client.ClientResponseError(history=(),
request_info=self.request_info)
assert err.status == 0
def test_status(self) -> None:
err = client.ClientResponseError(status=400,
history=(),
request_info=self.request_info)
assert err.status == 400
def test_pickle(self) -> None:
err = client.ClientResponseError(request_info=self.request_info,
history=())
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
pickled = pickle.dumps(err, proto)
err2 = pickle.loads(pickled)
assert err2.request_info == self.request_info
assert err2.history == ()
assert err2.status == 0
assert err2.message == ''
assert err2.headers is None
err = client.ClientResponseError(request_info=self.request_info,
history=(),
status=400,
message='Something wrong',
headers={})
err.foo = 'bar'
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
pickled = pickle.dumps(err, proto)
err2 = pickle.loads(pickled)
assert err2.request_info == self.request_info
assert err2.history == ()
assert err2.status == 400
assert err2.message == 'Something wrong'
assert err2.headers == {}
assert err2.foo == 'bar'
def test_repr(self) -> None:
err = client.ClientResponseError(request_info=self.request_info,
history=())
assert repr(err) == ("ClientResponseError(%r, ())" %
(self.request_info,))
err = client.ClientResponseError(request_info=self.request_info,
history=(),
status=400,
message='Something wrong',
headers={})
assert repr(err) == ("ClientResponseError(%r, (), status=400, "
"message='Something wrong', headers={})" %
(self.request_info,))
def test_str(self) -> None:
err = client.ClientResponseError(request_info=self.request_info,
history=(),
status=400,
message='Something wrong',
headers={})
assert str(err) == ("400, message='Something wrong', "
"url='http://example.com'")
class TestClientConnectorError:
connection_key = client_reqrep.ConnectionKey(
host='example.com', port=8080,
is_ssl=False, ssl=None,
proxy=None, proxy_auth=None, proxy_headers_hash=None)
def test_ctor(self) -> None:
err = client.ClientConnectorError(
connection_key=self.connection_key,
os_error=OSError(errno.ENOENT, 'No such file'))
assert err.errno == errno.ENOENT
assert err.strerror == 'No such file'
assert err.os_error.errno == errno.ENOENT
assert err.os_error.strerror == 'No such file'
assert err.host == 'example.com'
assert err.port == 8080
assert err.ssl is None
def test_pickle(self) -> None:
err = client.ClientConnectorError(
connection_key=self.connection_key,
os_error=OSError(errno.ENOENT, 'No such file'))
err.foo = 'bar'
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
pickled = pickle.dumps(err, proto)
err2 = pickle.loads(pickled)
assert err2.errno == errno.ENOENT
assert err2.strerror == 'No such file'
assert err2.os_error.errno == errno.ENOENT
assert err2.os_error.strerror == 'No such file'
assert err2.host == 'example.com'
assert err2.port == 8080
assert err2.ssl is None
assert err2.foo == 'bar'
def test_repr(self) -> None:
os_error = OSError(errno.ENOENT, 'No such file')
err = client.ClientConnectorError(connection_key=self.connection_key,
os_error=os_error)
assert repr(err) == ("ClientConnectorError(%r, %r)" %
(self.connection_key, os_error))
def test_str(self) -> None:
err = client.ClientConnectorError(
connection_key=self.connection_key,
os_error=OSError(errno.ENOENT, 'No such file'))
assert str(err) == ("Cannot connect to host example.com:8080 ssl:"
"default [No such file]")
class TestClientConnectorCertificateError:
connection_key = client_reqrep.ConnectionKey(
host='example.com', port=8080,
is_ssl=False, ssl=None,
proxy=None, proxy_auth=None, proxy_headers_hash=None)
def test_ctor(self) -> None:
certificate_error = Exception('Bad certificate')
err = client.ClientConnectorCertificateError(
connection_key=self.connection_key,
certificate_error=certificate_error)
assert err.certificate_error == certificate_error
assert err.host == 'example.com'
assert err.port == 8080
assert err.ssl is False
def test_pickle(self) -> None:
certificate_error = Exception('Bad certificate')
err = client.ClientConnectorCertificateError(
connection_key=self.connection_key,
certificate_error=certificate_error)
err.foo = 'bar'
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
pickled = pickle.dumps(err, proto)
err2 = pickle.loads(pickled)
assert err2.certificate_error.args == ('Bad certificate',)
assert err2.host == 'example.com'
assert err2.port == 8080
assert err2.ssl is False
assert err2.foo == 'bar'
def test_repr(self) -> None:
certificate_error = Exception('Bad certificate')
err = client.ClientConnectorCertificateError(
connection_key=self.connection_key,
certificate_error=certificate_error)
assert repr(err) == ("ClientConnectorCertificateError(%r, %r)" %
(self.connection_key, certificate_error))
def test_str(self) -> None:
certificate_error = Exception('Bad certificate')
err = client.ClientConnectorCertificateError(
connection_key=self.connection_key,
certificate_error=certificate_error)
assert str(err) == ("Cannot connect to host example.com:8080 ssl:False"
" [Exception: ('Bad certificate',)]")
class TestServerDisconnectedError:
def test_ctor(self) -> None:
err = client.ServerDisconnectedError()
assert err.message == 'Server disconnected'
err = client.ServerDisconnectedError(message='No connection')
assert err.message == 'No connection'
def test_pickle(self) -> None:
err = client.ServerDisconnectedError(message='No connection')
err.foo = 'bar'
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
pickled = pickle.dumps(err, proto)
err2 = pickle.loads(pickled)
assert err2.message == 'No connection'
assert err2.foo == 'bar'
def test_repr(self) -> None:
err = client.ServerDisconnectedError()
if sys.version_info < (3, 7):
assert repr(err) == ("ServerDisconnectedError"
"('Server disconnected',)")
else:
assert repr(err) == ("ServerDisconnectedError"
"('Server disconnected')")
err = client.ServerDisconnectedError(message='No connection')
if sys.version_info < (3, 7):
assert repr(err) == "ServerDisconnectedError('No connection',)"
else:
assert repr(err) == "ServerDisconnectedError('No connection')"
def test_str(self) -> None:
err = client.ServerDisconnectedError()
assert str(err) == 'Server disconnected'
err = client.ServerDisconnectedError(message='No connection')
assert str(err) == 'No connection'
class TestServerFingerprintMismatch:
def test_ctor(self) -> None:
err = client.ServerFingerprintMismatch(expected=b'exp', got=b'got',
host='example.com', port=8080)
assert err.expected == b'exp'
assert err.got == b'got'
assert err.host == 'example.com'
assert err.port == 8080
def test_pickle(self) -> None:
err = client.ServerFingerprintMismatch(expected=b'exp', got=b'got',
host='example.com', port=8080)
err.foo = 'bar'
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
pickled = pickle.dumps(err, proto)
err2 = pickle.loads(pickled)
assert err2.expected == b'exp'
assert err2.got == b'got'
assert err2.host == 'example.com'
assert err2.port == 8080
assert err2.foo == 'bar'
def test_repr(self) -> None:
err = client.ServerFingerprintMismatch(b'exp', b'got',
'example.com', 8080)
assert repr(err) == ("<ServerFingerprintMismatch expected=b'exp' "
"got=b'got' host='example.com' port=8080>")
class TestInvalidURL:
def test_ctor(self) -> None:
err = client.InvalidURL(url=':wrong:url:')
assert err.url == ':wrong:url:'
def test_pickle(self) -> None:
err = client.InvalidURL(url=':wrong:url:')
err.foo = 'bar'
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
pickled = pickle.dumps(err, proto)
err2 = pickle.loads(pickled)
assert err2.url == ':wrong:url:'
assert err2.foo == 'bar'
def test_repr(self) -> None:
err = client.InvalidURL(url=':wrong:url:')
assert repr(err) == "<InvalidURL :wrong:url:>"
def test_str(self) -> None:
err = client.InvalidURL(url=':wrong:url:')
assert str(err) == ':wrong:url:'
<file_sep>/examples/curl.py
#!/usr/bin/env python3
import argparse
import asyncio
import aiohttp
async def curl(url):
async with aiohttp.ClientSession() as session:
async with session.request('GET', url) as response:
print(repr(response))
chunk = await response.content.read()
print('Downloaded: %s' % len(chunk))
if __name__ == '__main__':
ARGS = argparse.ArgumentParser(description="GET url example")
ARGS.add_argument('url', nargs=1, metavar='URL',
help="URL to download")
ARGS.add_argument('--iocp', default=False, action="store_true",
help="Use ProactorEventLoop on Windows")
options = ARGS.parse_args()
if options.iocp:
from asyncio import events, windows_events
el = windows_events.ProactorEventLoop()
events.set_event_loop(el)
loop = asyncio.get_event_loop()
loop.run_until_complete(curl(options.url[0]))
|
4a253cd3e3997377054d8cadeae8b534697a58f2
|
[
"TOML",
"Python",
"Markdown",
"reStructuredText"
] | 50
|
reStructuredText
|
gruns/aiohttp
|
055c5342d5249ee508be693a76260553f9faa21d
|
1087f044f456d28aa2c3be7918a0bce4de3e518e
|
refs/heads/master
|
<file_sep>module github.com/taylorza/go-ratelimiter
<file_sep>package ratelimiter
import (
"sync"
"time"
)
const defaultRefreshPeriod = 10
// Limiter used to limit the rate at which work is done
type Limiter struct {
tpp float64
tokens float64
m sync.Mutex
c *sync.Cond
t *time.Ticker
done chan bool
started bool
}
// New creates a rate limiter that can be used to throttle work to target rate per second. The returned ratelimiter is started and ready to throttle.
func New(rate uint) *Limiter {
if rate == 0 {
panic("rate must be greater than 0")
}
l := &Limiter{}
l.done = make(chan bool)
l.c = sync.NewCond(&l.m)
l.t = time.NewTicker(defaultRefreshPeriod * time.Millisecond)
l.SetRate(rate)
l.Start()
return l
}
// Start a stopped ratelimiter if the rate limiter is already started the operation does nothing.
func (l *Limiter) Start() {
l.m.Lock()
defer l.m.Unlock()
if !l.started {
l.tokens = l.tpp
l.started = true
go l.tokenReplenisher()
}
}
// Stop the rate limiter and releases the internal resources.
func (l *Limiter) Stop() {
l.m.Lock()
defer l.m.Unlock()
if l.started {
l.done <- true
l.t.Stop()
l.started = false
}
}
// SetRate updates the rate of the ratelimiter on the fly.
func (l *Limiter) SetRate(rate uint) {
l.m.Lock()
defer l.m.Unlock()
l.tpp = float64(rate) / 1000 * defaultRefreshPeriod
}
// Throttle blocks if the current rate of work exceeds the limiter.
func (l *Limiter) Throttle() {
l.m.Lock()
if l.started {
for l.tokens <= 0 {
l.c.Wait()
}
l.tokens--
}
l.m.Unlock()
}
func (l *Limiter) tokenReplenisher() {
for {
select {
case <-l.done:
return
case <-l.t.C:
l.m.Lock()
notify := l.tokens <= 0
l.tokens += l.tpp
if l.tokens > l.tpp {
l.tokens = l.tpp
}
l.m.Unlock()
if notify {
l.c.Broadcast()
}
}
}
}
<file_sep>package main
import (
"fmt"
"time"
"github.com/taylorza/go-ratelimiter"
)
func main() {
l := ratelimiter.New(1)
// print a '.' at a rate of 1 per second
start := time.Now()
for i := 0; i < 10; i++ {
l.Throttle()
fmt.Print(".")
}
fmt.Println()
fmt.Println("Time taken:", time.Since(start))
// print a '.' at a rate of 10 per second
l.SetRate(10)
start = time.Now()
for i := 0; i < 100; i++ {
l.Throttle()
fmt.Print(".")
}
fmt.Println()
fmt.Println("Time taken:", time.Since(start))
// print a '.' at a rate of 2 per second
l.SetRate(2)
start = time.Now()
for i := 0; i < 20; i++ {
l.Throttle()
fmt.Print(".")
}
fmt.Println()
fmt.Println("Time taken:", time.Since(start))
}
<file_sep># RateLimiter [](https://travis-ci.org/taylorza/go-ratelimiter) [](https://goreportcard.com/report/github.com/taylorza/go-ratelimiter) [](http://godoc.org/github.com/taylorza/go-ratelimiter)
# go-ratelimiter
RateLimiter can be used to limit the rate at which work is done, for example to control the TPS in a load testing client
## Installation
Use the 'go' command:
$ go get github.com/taylorza/go-ratelimiter
## Example
```go
package main
import (
"fmt"
"time"
"github.com/taylorza/go-ratelimiter"
)
func main() {
// create a rate limiter that will limit work to 1 per second
l := ratelimiter.New(1)
// print a '.' at a rate of 1 per second
start := time.Now()
for i := 0; i < 10; i++ {
l.Throttle()
fmt.Print(".")
}
fmt.Println()
fmt.Println("Time taken:", time.Since(start))
}
```
## Copyright
Copyright (C)2013-2018 by <NAME> (taylorza)
See [LICENSE](https://github.com/taylorza/go-ratelimiter/blob/master/LICENSE)
|
96a426bbe903c1054400bd8e040702965b22734a
|
[
"Go",
"Go Module",
"Markdown"
] | 4
|
Go Module
|
taylorza/go-ratelimiter
|
68135e78bcb9c966f977cdb38a0857b267f1a63d
|
3cb012a8ad65ea7f62438253bf3303b8283a586c
|
refs/heads/master
|
<repo_name>cokesnort/readme<file_sep>/spec/factories/authors.rb
FactoryGirl.define do
factory(:author) do
first_name { Faker::Name.first_name }
last_name { Faker::Name.last_name }
trait(:book_author) { author_type 'book_author' }
trait(:translator) { author_type 'translator' }
trait(:document_author) { author_type 'document_author' }
end
end
<file_sep>/spec/services/fb_header_parser.rb
require 'rails_helper'
RSpec.describe FbHeaderParser, type: :service do
before { allow_any_instance_of(Base64ToImage).to receive(:call).and_return('file') }
let(:path) { Rails.root.join('spec/fixtures/fiction.fb2') }
subject { described_class.new(path).call }
it 'parses the book' do
expect(subject).to eq(
authors: [{
first_name: 'Пол',
middle_name: '',
last_name: 'Боулз',
email: '' }],
translators: [{
first_name: 'Валерий',
middle_name: '',
last_name: 'Нугатов',
email: '' }],
title: 'Знаки во времени. Марокканские истории',
annotation: "\"Знаки во времени\" - необычная книга, ее жанр невозможно определить. Марокканские легенды, исторические анекдоты, записки путешественников и строки песен перемежаются с вымыслом.",
genre: ['story'],
lang: 'ru',
keywords: '',
cover: 'file')
end
end
<file_sep>/app/services/fb_header_parser.rb
class FbHeaderParser < BookParserBase
def call
xml = Nokogiri::XML(@file, nil, 'UTF-8')
title_info = xml.at_css('title-info')
book = {
authors: title_info.css('author').map { |a| map_author(a) },
translators: title_info.css('translator').map { |a| map_author(a) },
title: title_info.css('book-title').text,
annotation: title_info.css('annotation > p').text,
genre: title_info.css('genre').map(&:text),
lang: title_info.css('lang').text,
keywords: title_info.css('keywords').text,
cover: grab_image
}
book
end
private
def grab_image
@file.pos = 0
base64 = @file.read.match(/binary.*image.*>(.*)<\/binary/m)[1].gsub("\n",'')
Base64ToImage.new(base64).call
end
def map_author(author)
{
first_name: author.css('first-name').text,
middle_name: author.css('middle-name').text,
last_name: author.css('last-name').text,
email: author.css('email').text
}
end
end
<file_sep>/app/admin/author.rb
ActiveAdmin.register Author do
form do |f|
f.semantic_errors
inputs 'Author details' do
input :first_name
input :last_name
input :email
input :author_type, as: :select, collection: Author.types_for_select
end
f.submit
end
permit_params %i(first_name last_name email author_type)
end
<file_sep>/db/migrate/20151026203118_create_books.rb
class CreateBooks < ActiveRecord::Migration
def change
create_table :books do |t|
t.string :title, null: false
t.string :annotation
t.string :publisher
t.string :custom
t.string :language
t.datetime :date
t.string :year
t.string :book_id
t.string :cover
t.string :file
t.timestamps null: false
end
add_index :books, :title
end
end
<file_sep>/app/services/base64_to_image.rb
class Base64ToImage
def initialize(base64)
@base64 = base64
end
def call(path = nil)
image = path.present? ? File.new(path, 'wb') : Tempfile.new('image')
image.binmode
image.write(Base64.decode64(@base64))
image.flush
image
end
end
<file_sep>/db/migrate/20151026211137_create_books_genres.rb
class CreateBooksGenres < ActiveRecord::Migration
def change
create_table :books_genres do |t|
t.belongs_to :book
t.belongs_to :genre
end
add_index :books_genres, [:book_id, :genre_id]
end
end
<file_sep>/app/services/encoding_converter.rb
class EncodingConverter
def initialize(path)
@path = path
end
def call
File.open(@path, encoding: 'utf-8') do |file|
xml_header = file.readline
encoding = Nokogiri::XML(xml_header).encoding
File.read(@path, encoding: "#{encoding}:utf-8")
end
end
end
<file_sep>/app/models/book.rb
class Book < ActiveRecord::Base
validates :title, presence: :true
has_and_belongs_to_many :authors, -> { book_author }
has_and_belongs_to_many :translators, -> { translator }, class_name: 'Author'
has_and_belongs_to_many :document_authors, -> { document_author }, class_name: 'Author'
has_and_belongs_to_many :genres
accepts_nested_attributes_for :authors
accepts_nested_attributes_for :genres
mount_uploader :file, BookUploader
mount_uploader :cover, BookCoverUploader
end
<file_sep>/spec/services/base64_to_image_spec.rb
require 'rails_helper'
RSpec.describe Base64ToImage, type: :service do
let(:base64) { Rails.root.join('spec/fixtures/base64.txt').read }
subject { described_class.new(base64).call }
it 'extracts image' do
expect(subject.size).to eq 6157
end
end
<file_sep>/spec/factories/books.rb
FactoryGirl.define do
factory(:book) do
title { Faker::Book.title }
publisher { Faker::Book.publisher }
end
end
<file_sep>/spec/services/encoding_converter_spec.rb
require 'rails_helper'
RSpec.describe EncodingConverter, type: :service do
let(:book_path) { Rails.root.join('spec/fixtures/cp1251.fb2') }
let(:instance) { described_class.new(book_path) }
subject { instance.call }
it 'should return utf-8 encoded string' do
expect(subject.encoding).to eq Encoding::UTF_8
end
it { is_expected.to match /Джером Д. Сэлинджер/ }
end
<file_sep>/Gemfile
source 'https://rubygems.org'
gem 'rails', '4.2.4'
gem 'sqlite3'
gem 'sass-rails'
gem 'uglifier'
gem 'coffee-rails'
gem 'bootstrap-sass'
gem 'jquery-rails'
gem 'turbolinks'
gem 'jbuilder'
gem 'devise'
gem 'haml-rails'
gem 'puma'
gem 'quiet_assets'
gem 'simple_form'
gem 'carrierwave'
gem 'mini_magick'
gem 'activeadmin', '~> 1.0.0.pre2'
gem 'nokogiri'
group :development, :test do
gem 'faker'
gem 'factory_girl_rails'
gem 'database_cleaner'
gem 'pry'
gem 'byebug'
gem 'rspec-rails'
gem 'spring-commands-rspec'
gem 'spring'
end
group :development do
gem 'web-console'
end
<file_sep>/spec/models/book_spec.rb
require 'rails_helper'
RSpec.describe Book, type: :model do
context 'when books has many authors' do
let!(:authors) { create_list(:author, 2, :book_author) }
let!(:translators) { create_list(:author, 3, :translator) }
let!(:document_authors) { create_list(:author, 1, :document_author) }
let!(:book) do
create :book, authors: authors,
translators: translators,
document_authors: document_authors
end
subject { book.reload }
it 'has authors' do
expect(subject.authors).to match_array authors
end
it 'has translators' do
expect(subject.translators).to match_array translators
end
it 'has document authors' do
expect(subject.document_authors).to match_array document_authors
end
end
end
<file_sep>/app/models/author.rb
class Author < ActiveRecord::Base
enum author_type: { book_author: 'book_author', translator: 'translator',
document_author: 'document_author' }
has_and_belongs_to_many :books
validates :author_type, inclusion: { in: %w{ book_author translator document_author } }
def self.types_for_select
Hash[['Book author', 'Translator', 'Document author'].zip(author_types.values)]
end
end
<file_sep>/app/services/book_parser_base.rb
class BookParserBase
def initialize(path)
content = EncodingConverter.new(path).call
@file = Tempfile.new('book')
@file.write(content)
@file.pos = 0
end
def header
fail('Not implemented.')
end
end
<file_sep>/app/admin/book.rb
ActiveAdmin.register Book do
form do |f|
f.semantic_errors
inputs 'Book details' do
input :title
input :file, as: :file, label: 'Document', hint: 'Upload the book to fill fields automatically.'
input :publisher
input :language
input :year
input :book_id, label: 'Book ID'
end
inputs do
has_many :genres do |a|
a.input :name
end
end
inputs do
has_many :authors do |a|
a.input :first_name
a.input :last_name
a.input :author_type, as: :hidden, html: { value: 'book_author' }
end
end
inputs do
input :annotation, as: :text
end
submit
end
permit_params %i(title publisher language year book_id file annotation cover)
end
|
c98de3221c3d1e06b0a7c5ca64183e7b058d473f
|
[
"Ruby"
] | 17
|
Ruby
|
cokesnort/readme
|
91df7439bb5cd83feb39468fb4d83618690254a5
|
76636dc0ff071a05fcc27ae509a901a08addb457
|
refs/heads/master
|
<repo_name>farhadrgh/torchani<file_sep>/test_requirements.txt
tb-nightly
tqdm
ase
h5py
pillow
pkbar
<file_sep>/ci/install_dependencies.sh
#!/bin/bash
python -m pip install --upgrade pip
pip install --pre torch torchvision -f https://download.pytorch.org/whl/nightly/cpu/torch_nightly.html
pip install tqdm pyyaml future pkbar
pip install 'ase<=3.17'
<file_sep>/examples/jit.py
# -*- coding: utf-8 -*-
"""
Using TorchScript to serialize and deploy model
===============================================
Models in TorchANI's model zoo support TorchScript. TorchScript is a way to create
serializable and optimizable models from PyTorch code. It allows users to saved their
models from a Python process and loaded in a process where there is no Python dependency.
"""
###############################################################################
# To begin with, let's first import the modules we will use:
import torch
import torchani
###############################################################################
# Let's now load the built-in ANI-1ccx models. The builtin ANI-1ccx contains 8
# models trained with diffrent initialization.
model = torchani.models.ANI1ccx()
###############################################################################
# It is very easy to compile and save the model using `torch.jit`.
compiled_model = torch.jit.script(model)
torch.jit.save(compiled_model, 'compiled_model.pt')
###############################################################################
# Besides compiling the ensemble, it is also possible to compile a single network
compiled_model0 = torch.jit.script(model[0])
torch.jit.save(compiled_model0, 'compiled_model0.pt')
###############################################################################
# For testing purposes, we will now load the models we just saved and see if they
# produces the same output as the original model:
loaded_compiled_model = torch.jit.load('compiled_model.pt')
loaded_compiled_model0 = torch.jit.load('compiled_model0.pt')
###############################################################################
# We use the molecule below to test:
coordinates = torch.tensor([[[0.03192167, 0.00638559, 0.01301679],
[-0.83140486, 0.39370209, -0.26395324],
[-0.66518241, -0.84461308, 0.20759389],
[0.45554739, 0.54289633, 0.81170881],
[0.66091919, -0.16799635, -0.91037834]]])
species = model.species_to_tensor('CHHHH').unsqueeze(0)
###############################################################################
# And here is the result:
energies_ensemble = model((species, coordinates)).energies
energies_single = model[0]((species, coordinates)).energies
energies_ensemble_jit = loaded_compiled_model((species, coordinates)).energies
energies_single_jit = loaded_compiled_model0((species, coordinates)).energies
print('Ensemble energy, eager mode vs loaded jit:', energies_ensemble.item(), energies_ensemble_jit.item())
print('Single network energy, eager mode vs loaded jit:', energies_single.item(), energies_single_jit.item())
<file_sep>/torchani/nn.py
import torch
from torch import Tensor
from typing import Tuple, NamedTuple
class SpeciesEnergies(NamedTuple):
species: Tensor
energies: Tensor
class ANIModel(torch.nn.Module):
"""ANI model that compute properties from species and AEVs.
Different atom types might have different modules, when computing
properties, for each atom, the module for its corresponding atom type will
be applied to its AEV, after that, outputs of modules will be reduced along
different atoms to obtain molecular properties.
Arguments:
modules (:class:`collections.abc.Sequence`): Modules for each atom
types. Atom types are distinguished by their order in
:attr:`modules`, which means, for example ``modules[i]`` must be
the module for atom type ``i``. Different atom types can share a
module by putting the same reference in :attr:`modules`.
"""
def __init__(self, modules):
super(ANIModel, self).__init__()
self.module_list = torch.nn.ModuleList(modules)
def __getitem__(self, i):
return self.module_list[i]
def forward(self, species_aev: Tuple[Tensor, Tensor]) -> SpeciesEnergies:
species, aev = species_aev
species_ = species.flatten()
aev = aev.flatten(0, 1)
output = aev.new_zeros(species_.shape)
for i, m in enumerate(self.module_list):
mask = (species_ == i)
midx = mask.nonzero().flatten()
if midx.shape[0] > 0:
input_ = aev.index_select(0, midx)
output.masked_scatter_(mask, m(input_).flatten())
output = output.view_as(species)
return SpeciesEnergies(species, torch.sum(output, dim=1))
class Ensemble(torch.nn.Module):
"""Compute the average output of an ensemble of modules."""
def __init__(self, modules):
super(Ensemble, self).__init__()
self.modules_list = torch.nn.ModuleList(modules)
self.size = len(self.modules_list)
def forward(self, species_input: Tuple[Tensor, Tensor]) -> SpeciesEnergies:
sum_ = 0
for x in self.modules_list:
sum_ += x(species_input)[1]
species, _ = species_input
return SpeciesEnergies(species, sum_ / self.size)
def __getitem__(self, i):
return self.modules_list[i]
class Sequential(torch.nn.Module):
"""Modified Sequential module that accept Tuple type as input"""
def __init__(self, *modules):
super(Sequential, self).__init__()
self.modules_list = torch.nn.ModuleList(modules)
def forward(self, input_: Tuple[Tensor, Tensor]):
for module in self.modules_list:
input_ = module(input_)
return input_
class Gaussian(torch.nn.Module):
"""Gaussian activation"""
def forward(self, x: Tensor) -> Tensor:
return torch.exp(- x * x)
|
fa6cd69bd663d901b4fe84221433f0361b5a6e66
|
[
"Python",
"Text",
"Shell"
] | 4
|
Text
|
farhadrgh/torchani
|
fb2072c926b2e2facbe8de108c345ce2c15da50a
|
7ec812d39af2cef00e412e5a29079524ffce5f28
|
refs/heads/main
|
<file_sep>import "./App.css";
import TVSeries from "./components/tvSeries";
import { FormattedMessage } from "react-intl";
function App() {
return (
<>
<header>
<h1>
<FormattedMessage id='TVSeries' />
</h1>
</header>
<hr></hr>
<main>
<TVSeries></TVSeries>
</main>
</>
);
}
export default App;
<file_sep># Programación Web - Parcial 2
Aplicación PWA e Internacionalizable creada con React
|
b56e36542c58d6f936e64656bf3c34142c0a8795
|
[
"JavaScript",
"Markdown"
] | 2
|
JavaScript
|
jjpenad/WEB_Parcial2
|
dca692cb8d4c786662bcdfa9db3374fbafcaf984
|
5e21e8827cda444af37df2b207788cf1425e3a16
|
refs/heads/master
|
<file_sep>require 'spec_helper'
RSpec.describe "when running hosts" do
include_context "demo context"
it "collects uptime" do
on(hosts, 'ping -c 1 server')
end
it "has free memory" do
on(hosts, 'free -m')
end
end
<file_sep>require 'logger'
require 'fileutils'
require 'bolt'
require 'bolt/cli'
require "flask/version"
require 'rspec'
require 'ruby_terraform'
class Flask
attr_accessor :terraform_modules, :bolt_inventory, :workspace
def initialize
logger = Logger.new($stdout)
# logger.level = Logger::DEBUG
RubyTerraform.configure do |config|
config.logger = logger
end
ENV['TF_IN_AUTOMATION'] = 'true'
end
def init
FileUtils.mkdir_p(@workspace)
@statefile = File.join(@workspace, 'terraform.tfstate')
Dir.chdir(@workspace) do
if Dir.glob("*.tf").empty?
RubyTerraform.init(
from_module: @terraform_modules,
get: true,
backend: true
)
else
RubyTerraform.init(
get: true,
backend: true
)
end
end
end
def provision
Dir.chdir(@workspace) do
RubyTerraform.apply(
input: false,
no_backup: true,
auto_approve: true
)
end
end
def exec(targets, command = "whoami")
ENV['BOLT_DISABLE_ANALYTICS'] = 'true'
Dir.chdir(@workspace) do
argv = ["command", "run"]
argv << command
argv.concat(
[
"--targets", targets, "--inventoryfile", @bolt_inventory,
"--debug", "--modulepath", "/Users/josh/.puppetlabs/bolt/modules"
]
)
cli = Bolt::CLI.new(argv)
opts = cli.parse
cli.execute(opts)
end
end
def destroy
Dir.chdir(@workspace) do
RubyTerraform.destroy(
force: true,
)
end
FileUtils.rm_r(@workspace)
end
end
class RSpecListener
def initialize
@failure_count = 0
end
def example_failed(failure)
@failure_count += 1
end
def failures?
@failure_count > 0
end
end
module DSL
def on(hosts, command)
# need to return an array of results, including exit code
# and raise if there are failure
flask.exec(hosts, command)
end
end
flask = Flask.new
shared_context "global context" do
let(:flask) { flask }
end
RSpec.configure do |config|
config.include DSL
config.include_context "global context"
config.add_setting :terraform_modules
config.add_setting :bolt_inventory
config.add_setting(:workspace, default: ".flask")
listener = RSpecListener.new
config.reporter.register_listener(listener, :example_failed)
config.before(:suite) do
flask.bolt_inventory = config.bolt_inventory
flask.terraform_modules = config.terraform_modules
flask.workspace = config.workspace
flask.init
flask.provision
end
config.after(:suite) do
unless listener.failures?
flask.destroy
end
end
end
<file_sep>require "test_helper"
class FlaskTest < Minitest::Test
def test_that_it_has_a_version_number
refute_nil ::Flask::VERSION
end
end
<file_sep>$LOAD_PATH.unshift File.expand_path("../../lib", __FILE__)
require "flask"
require "minitest/autorun"
<file_sep>require 'json'
#ENV['PATH'] = "C:\\Program Files\\Puppet Labs\\Puppet\\bin;#{ENV['PATH']}"
#puts ENV['PATH']
params = JSON.parse(ENV['PT_foo'])
#output = %x(cmd.exe /c "C:\\Program Files\\Puppet Labs\\Puppet\\bin\\puppet.bat" --version)
%x(puppet --version)
puts output
result = {"result" => output.chomp}
puts result.to_json
<file_sep>require 'flask'
# These belong in the spec_helper for the project using flask,
# so it can define what the topology should be.
RSpec.shared_context "demo context" do
let(:hosts) { 'nodes' }
# let(:agents) { collect agents nodes from inventory }
end
BASEDIR = File.expand_path("~/work/terraform-bolt").freeze
RSpec.configure do |config|
config.terraform_modules = File.join(BASEDIR, 'terraform')
config.bolt_inventory = File.join(BASEDIR, 'Boltdir', 'inventory.yaml')
end
<file_sep>#!/usr/bin/env ruby
# frozen_string_literal: true
require 'flask'
flask = Flask.new
flask.init
flask.provision
flask.exec
flask.destroy
|
a25b0b00b65ef03374e99dd8ec2161a766515fef
|
[
"Ruby"
] | 7
|
Ruby
|
joshcooper/flask
|
8cfa340b8987d146c18a29fb425413c7ce498461
|
a64670199a6fe00599e7f8432e830654d4a98e52
|
refs/heads/master
|
<repo_name>chrisands/rust_learning<file_sep>/3.5.1.fahrenheit_and_celsius/src/main.rs
use std::io;
fn main() {
let mut f;
loop {
println!("Convert fahrenheit or celsius? (f/c)");
f = String::new();
io::stdin().read_line(&mut f)
.expect("Failed to read line");
if f.trim() == "f" {
convert_f_to_c();
break;
} else if f.trim() == "c" {
convert_c_to_f();
break;
}
}
}
fn convert_f_to_c() {
let mut f = String::new();
loop {
println!("Input fahrenheit number");
io::stdin().read_line(&mut f)
.expect("Failed to read line");
let c: f64 = match f.trim().parse() {
Ok(num) => num,
Err(_) => continue,
};
println!("{} celsius", (c - 32.0) / 1.8);
break;
}
}
fn convert_c_to_f() {
let mut c = String::new();
loop {
println!("Input celsius number");
io::stdin().read_line(&mut c)
.expect("Failed to read line");
let f: f64 = match c.trim().parse() {
Ok(num) => num,
Err(_) => continue,
};
println!("{} fahrenheit", (f * 1.8) + 32.0);
break;
}
}
<file_sep>/3.5.2.fibonacci_number/src/main.rs
use std::io;
fn main() {
const GOLDEN_RATIO: f64 = 1.61803398874989484820;
let mut number = String::new();
loop {
println!("Enter fibonacci sequence index");
io::stdin().read_line(&mut number)
.expect("Failed to read line");
let number: i32 = match number.trim().parse() {
Ok(num) => num,
Err(_) => continue,
};
let square_root= 5.0_f64.sqrt();
let fibonacci_number = (GOLDEN_RATIO.powi(number) - (1.0 - GOLDEN_RATIO).powi(number)) / square_root;
println!("Index {} of Fibonacci number is {}", number, fibonacci_number);
break;
}
}
<file_sep>/README.md
# Rust learning project
> repository with projects that I complete while learning Rust language
Most tasks for learning projects came from official [Rust book](https://doc.rust-lang.org/stable/book/)
## Contribute
If you have some advice or another method of doing any project, feel free to post an issue or PR.
|
00c0e740355cd84571f5123015681349c23cd1ef
|
[
"Markdown",
"Rust"
] | 3
|
Rust
|
chrisands/rust_learning
|
1d5461eb5871c2c6e170f70bf36626498a6b4ee5
|
1ea7807b361718e580d2d8a94af45a77ecf6422f
|
refs/heads/master
|
<file_sep>package com.example.wladek.wira.activity;
import android.content.Context;
import android.os.Bundle;
import android.support.v7.app.ActionBar;
import android.support.v7.app.AppCompatActivity;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.CheckBox;
import android.widget.CompoundButton;
import android.widget.ImageView;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import com.example.wladek.wira.R;
import com.example.wladek.wira.pojo.ExpenseClaim;
import com.example.wladek.wira.pojo.ExpenseItem;
import com.example.wladek.wira.utils.DatabaseHelper;
import com.squareup.picasso.Picasso;
import java.io.File;
import java.util.ArrayList;
public class AttachExpenseActivity extends AppCompatActivity {
ExpenseClaim expenseClaim;
DatabaseHelper databaseHelper;
ActionBar actionBar;
ArrayList<ExpenseItem> expenses = new ArrayList<>();
ArrayList<ExpenseItem> checkedItems = new ArrayList<>();
ListView lstExpenses;
CustomAdaptor customAdaptor;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_attach_expense);
databaseHelper = new DatabaseHelper(this);
actionBar = getSupportActionBar();
actionBar.setHomeButtonEnabled(true);
expenseClaim = (ExpenseClaim) getIntent().getSerializableExtra("claim");
loadExpenses();
if (expenseClaim != null) {
actionBar.setTitle(expenseClaim.getTitle());
}
lstExpenses = (ListView) findViewById(R.id.expensesListView);
if (customAdaptor == null) {
customAdaptor = new CustomAdaptor(this, expenses, checkedItems);
} else {
loadExpenses();
customAdaptor.notifyDataSetChanged();
}
lstExpenses.setAdapter(customAdaptor);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.shared_ok, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.okIcon:
onBackPressed();
return true;
default:
return false;
}
}
private class CustomAdaptor extends BaseAdapter {
private Context context;
private ArrayList<ExpenseItem> items = new ArrayList<>();
private ArrayList<ExpenseItem> checked = new ArrayList<>();
ViewHolder viewHolder;
LayoutInflater layoutInflater;
Picasso mPicasso;
public CustomAdaptor(Context context, ArrayList<ExpenseItem> expenseItems,
ArrayList<ExpenseItem> checked) {
layoutInflater = LayoutInflater.from(context);
this.context = context;
this.items = expenseItems;
this.checked = checked;
this.mPicasso = Picasso.with(context);
}
@Override
public int getCount() {
return items.size();
}
@Override
public Object getItem(int position) {
return items.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
final ExpenseItem expenseItem = items.get(position);
// if (convertView == null) {
viewHolder = new ViewHolder();
convertView = layoutInflater.inflate(R.layout.claim_lst_expenses_to_attach_custom_item, null);
viewHolder.imgExpensePic = (ImageView) convertView.findViewById(R.id.imgExpensePic);
viewHolder.txtExpenseTitle = (TextView) convertView.findViewById(R.id.txtExpenseTitle);
viewHolder.txtExpenseAmount = (TextView) convertView.findViewById(R.id.txtExpenseAmount);
viewHolder.checkBoxAttach = (CheckBox) convertView.findViewById(R.id.checkBoxAttach);
convertView.setTag(viewHolder);
// } else {
// viewHolder = (ViewHolder) convertView.getTag();
// }
if (expenseItem.getImagePath() != null) {
mPicasso
.load(new File(expenseItem.getImagePath()))
.placeholder(R.mipmap.report_icon)
.error(R.mipmap.report_icon)
.resize(150, 100)
.into(viewHolder.imgExpensePic);
}
if (!checked.isEmpty() && checked.contains(expenseItem)) {
viewHolder.checkBoxAttach.setChecked(true);
}
viewHolder.txtExpenseTitle.setText(expenseItem.getExpenseName());
viewHolder.txtExpenseAmount.setText("Ksh. " + expenseItem.getExpenseAmount());
viewHolder.checkBoxAttach.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
if (isChecked) {
attachExpense(expenseItem, context);
} else {
removeExpense(expenseItem, context);
}
}
});
return convertView;
}
}
private void removeExpense(ExpenseItem expenseItem, Context context) {
String response = databaseHelper.removeExpenseFromClaim(expenseItem);
Toast.makeText(context, response, Toast.LENGTH_SHORT).show();
setResult(1);
}
private void attachExpense(ExpenseItem expenseItem, Context context) {
String response = databaseHelper.attachExpenseToClaim(expenseItem, expenseClaim.getId());
Toast.makeText(context, response, Toast.LENGTH_SHORT).show();
setResult(1);
}
static class ViewHolder {
ImageView imgExpensePic;
TextView txtExpenseTitle;
TextView txtExpenseAmount;
CheckBox checkBoxAttach;
}
private void loadExpenses() {
expenses.clear();
expenses.addAll(databaseHelper.getExpenseItems());
checkedItems.clear();
checkedItems.addAll(databaseHelper.getClaimExpenses(expenseClaim));
}
@Override
protected void onStart() {
super.onStart();
loadExpenses();
if (customAdaptor != null) {
customAdaptor.notifyDataSetChanged();
}
}
@Override
protected void onStop() {
super.onStop();
loadExpenses();
if (customAdaptor != null) {
customAdaptor.notifyDataSetChanged();
}
finish();
}
}
<file_sep>package com.example.wladek.wira.activity;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.BaseAdapter;
import android.widget.ListView;
import android.widget.TextView;
import com.example.wladek.wira.R;
import com.example.wladek.wira.utils.ExpenseCategory;
import java.util.ArrayList;
import java.util.EnumSet;
public class AttachCategoryActivity extends AppCompatActivity {
ArrayList<ExpenseCategory> expenseCategories =
new ArrayList<ExpenseCategory>(EnumSet.allOf(ExpenseCategory.class));
CustomListAdaptor customListAdaptor;
ListView listViewCategories;
static final int CATEGORY_RESULT_CODE = 146;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_attach_category);
listViewCategories = (ListView) findViewById(R.id.listViewCategories);
customListAdaptor = new CustomListAdaptor(AttachCategoryActivity.this, expenseCategories);
listViewCategories.setAdapter(customListAdaptor);
listViewCategories.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
Intent intent = new Intent();
intent.putExtra("claimCategory" , expenseCategories.get(position).name());
setResult(CATEGORY_RESULT_CODE , intent);
finish();
}
});
}
private class CustomListAdaptor extends BaseAdapter {
private LayoutInflater layoutInflater;
Context context;
ArrayList<ExpenseCategory> categories = new ArrayList<>();
ViewHolder viewHolder;
public CustomListAdaptor(AttachCategoryActivity context, ArrayList<ExpenseCategory> expenseCategories) {
layoutInflater = LayoutInflater.from(context);
this.context = context;
this.categories = expenseCategories;
}
@Override
public int getCount() {
return categories.size();
}
@Override
public Object getItem(int position) {
return categories.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
final ExpenseCategory category = categories.get(position);
if (convertView == null) {
convertView = layoutInflater.inflate(R.layout.attach_claim_listview_row, null);
viewHolder = new ViewHolder();
viewHolder.claimTitle = (TextView) convertView.findViewById(R.id.textClaimTitle);
convertView.setTag(viewHolder);
} else {
viewHolder = (ViewHolder) convertView.getTag();
}
viewHolder.claimTitle.setText(category.name());
return convertView;
}
}
static class ViewHolder {
TextView claimTitle;
}
}
<file_sep>package com.example.wladek.wira.activity;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.app.ActionBar;
import android.support.v7.app.AppCompatActivity;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import com.example.wladek.wira.R;
import com.example.wladek.wira.pojo.ExpenseClaim;
import com.example.wladek.wira.pojo.ExpenseItem;
import com.example.wladek.wira.utils.DatabaseHelper;
import com.squareup.picasso.Picasso;
import java.io.File;
import java.util.ArrayList;
public class ViewClaimActivity extends AppCompatActivity {
ExpenseClaim expenseClaim;
TextView txtClaimTitle;
ListView lstClaimExpenses;
TextView txtNoExpenses;
TextView txtTotalClaim;
LinearLayout layoutExpenses;
LinearLayout layoutAttach;
ArrayList<ExpenseItem> claimExpenses = new ArrayList<>();
CustomAdaptor customListAdaptor;
DatabaseHelper dbHelper;
ActionBar actionBar;
Double total = new Double(0);
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_view_claim);
dbHelper = new DatabaseHelper(getApplicationContext());
expenseClaim = (ExpenseClaim) getIntent().getSerializableExtra("claim");
actionBar = getSupportActionBar();
if (expenseClaim != null) {
loadClaimExpenses(expenseClaim);
actionBar.setTitle(expenseClaim.getTitle());
}
layoutExpenses = (LinearLayout) findViewById(R.id.layoutExpenses);
layoutAttach = (LinearLayout) findViewById(R.id.layoutAttach);
txtClaimTitle = (TextView) findViewById(R.id.txtClaimTitle);
txtTotalClaim = (TextView) findViewById(R.id.txtTotalClaim);
txtNoExpenses = (TextView) findViewById(R.id.txtNoExpenses);
lstClaimExpenses = (ListView) findViewById(R.id.lstClaimExpenses);
if (expenseClaim == null) {
expenseClaim = new ExpenseClaim();
} else {
txtClaimTitle.setText("Add expense");
}
if (customListAdaptor == null) {
customListAdaptor = new CustomAdaptor(getApplicationContext(), claimExpenses);
} else {
customListAdaptor.notifyDataSetChanged();
}
lstClaimExpenses.setAdapter(customListAdaptor);
checkData();
layoutAttach.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
attachExpense();
}
});
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.shared_ok, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.okIcon:
onBackPressed();
return true;
default:
return false;
}
}
private class CustomAdaptor extends BaseAdapter {
private Context context;
private ArrayList<ExpenseItem> items = new ArrayList<>();
ViewHolder viewHolder;
LayoutInflater layoutInflater;
Picasso mPicasso;
public CustomAdaptor(Context context, ArrayList<ExpenseItem> expenseItems) {
layoutInflater = LayoutInflater.from(context);
this.context = context;
this.items = expenseItems;
this.mPicasso = Picasso.with(context);
}
@Override
public int getCount() {
return items.size();
}
@Override
public Object getItem(int position) {
return items.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
final ExpenseItem expenseItem = items.get(position);
if (convertView == null) {
viewHolder = new ViewHolder();
convertView = layoutInflater.inflate(R.layout.claim_lst_expenses_custom_item, null);
viewHolder.imgExpensePic = (ImageView) convertView.findViewById(R.id.imgExpensePic);
viewHolder.txtExpenseTitle = (TextView) convertView.findViewById(R.id.txtExpenseTitle);
viewHolder.txtExpenseAmount = (TextView) convertView.findViewById(R.id.txtExpenseAmount);
viewHolder.btnRemoveExpense = (Button) convertView.findViewById(R.id.btnAttachExpense);
convertView.setTag(viewHolder);
} else {
viewHolder = (ViewHolder) convertView.getTag();
}
if (expenseItem.getImagePath() != null) {
mPicasso
.load(new File(expenseItem.getImagePath()))
.placeholder(R.mipmap.report_icon)
.error(R.mipmap.report_icon)
.resize(450, 400)
.into(viewHolder.imgExpensePic);
}
viewHolder.txtExpenseTitle.setText(expenseItem.getExpenseName());
viewHolder.txtExpenseAmount.setText("Ksh. " + expenseItem.getExpenseAmount());
// viewHolder.btnRemoveExpense.setOnClickListener(new View.OnClickListener() {
// @Override
// public void onClick(View v) {
// removeExpenseFromClaim(expenseItem);
// }
// });
return convertView;
}
}
private void removeExpenseFromClaim(ExpenseItem expenseItem) {
String response = dbHelper.removeExpenseFromClaim(expenseItem);
loadClaimExpenses(expenseClaim);
checkData();
customListAdaptor.notifyDataSetChanged();
Toast.makeText(getApplicationContext(), response, Toast.LENGTH_SHORT).show();
}
static class ViewHolder {
ImageView imgExpensePic;
TextView txtExpenseTitle;
TextView txtExpenseAmount;
Button btnRemoveExpense;
}
public void loadClaimExpenses(ExpenseClaim expenseClaim) {
claimExpenses.clear();
claimExpenses.addAll(dbHelper.getClaimExpenses(expenseClaim));
}
public void attachExpense() {
Intent intent = new Intent(getApplicationContext(), AttachExpenseActivity.class);
intent.putExtra("claim", expenseClaim);
startActivityForResult(intent, 1);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
// super.onActivityResult(requestCode, resultCode, data);
if (resultCode == 1) {
loadClaimExpenses(expenseClaim);
checkData();
customListAdaptor.notifyDataSetChanged();
}
}
private void checkData() {
if (claimExpenses.isEmpty()) {
layoutExpenses.setVisibility(View.INVISIBLE);
txtNoExpenses.setVisibility(View.VISIBLE);
customListAdaptor.notifyDataSetInvalidated();
} else {
layoutExpenses.setVisibility(View.VISIBLE);
txtNoExpenses.setVisibility(View.INVISIBLE);
total = new Double(0);
for (ExpenseItem i : claimExpenses) {
total = total + i.getExpenseAmount();
}
txtTotalClaim.setText("Ksh. " + total);
}
}
@Override
protected void onResume() {
super.onResume();
if (customListAdaptor != null) {
customListAdaptor.notifyDataSetChanged();
}
}
}
<file_sep>package com.example.wladek.wira.activity;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.BaseAdapter;
import android.widget.ListView;
import android.widget.TextView;
import com.example.wladek.wira.R;
import com.example.wladek.wira.pojo.ExpenseClaim;
import com.example.wladek.wira.utils.DatabaseHelper;
import java.util.ArrayList;
public class AttachClaimActivity extends AppCompatActivity {
ListView listViewAttachClaims;
DatabaseHelper dbHelper;
ArrayList<ExpenseClaim> claims = new ArrayList<>();
CustomListAdaptor customListAdaptor;
static final int CLAIM_RESULT_CODE = 145;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_attach_claim);
dbHelper = new DatabaseHelper(this);
loadClaims();
listViewAttachClaims = (ListView) findViewById(R.id.listViewAttachClaims);
customListAdaptor = new CustomListAdaptor(AttachClaimActivity.this , claims);
listViewAttachClaims.setAdapter(customListAdaptor);
listViewAttachClaims.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
final ExpenseClaim expenseClaim = claims.get(position);
Intent intent = new Intent();
intent.putExtra("claimId" , expenseClaim.getId());
intent.putExtra("claimTitle" , expenseClaim.getTitle());
setResult(CLAIM_RESULT_CODE , intent);
finish();
}
});
}
private class CustomListAdaptor extends BaseAdapter {
private LayoutInflater layoutInflater;
Context context;
ArrayList<ExpenseClaim> expenseClaims = new ArrayList<>();
ViewHolder viewHolder;
public CustomListAdaptor(Context context, ArrayList<ExpenseClaim> claims) {
layoutInflater = LayoutInflater.from(context);
this.context = context;
this.expenseClaims = claims;
}
@Override
public int getCount() {
return expenseClaims.size();
}
@Override
public Object getItem(int position) {
return expenseClaims.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
final ExpenseClaim expenseClaim = expenseClaims.get(position);
if (convertView == null){
convertView = layoutInflater.inflate(R.layout.attach_claim_listview_row , null);
viewHolder = new ViewHolder();
viewHolder.claimTitle = (TextView) convertView.findViewById(R.id.textClaimTitle);
convertView.setTag(viewHolder);
}else {
viewHolder = (ViewHolder) convertView.getTag();
}
viewHolder.claimTitle.setText(expenseClaim.getTitle());
return convertView;
}
}
static class ViewHolder{
TextView claimTitle;
}
public void loadClaims() {
claims.clear();
claims.addAll(dbHelper.getClaims());
}
}
<file_sep>package com.example.wladek.wira.fragments.tab_fragments;
import android.content.Intent;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Toast;
import com.example.wladek.wira.R;
import com.example.wladek.wira.activity.BarCodeActivity;
import com.example.wladek.wira.activity.ClaimsActivity;
import com.google.android.gms.common.api.CommonStatusCodes;
/**
* Created by wladek on 8/9/16.
*/
public class ProfileFragment extends Fragment {
private static final String ARG_EXAMPLE = "This is an expense argument";
private String example_data = "";
final int QRCODE_REQUEST = 567;
public ProfileFragment(){
}
public static ProfileFragment newInstance(String expenseArgument){
ProfileFragment expenseFragment = new ProfileFragment();
Bundle args = new Bundle();
args.putString(ARG_EXAMPLE , expenseArgument);
return expenseFragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setHasOptionsMenu(true);
}
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
return inflater.inflate(R.layout.profile_layout , container , false);
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
super.onCreateOptionsMenu(menu, inflater);
inflater.inflate(R.menu.profile_options_menu, menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
if (item.getItemId() == R.id.action_qrCode) {
Intent intent = new Intent(getActivity(), BarCodeActivity.class);
startActivityForResult(intent, QRCODE_REQUEST);
return true;
}
if (item.getItemId() == R.id.action_new){
Intent intent = new Intent(getActivity(), ClaimsActivity.class);
startActivity(intent);
}
return super.onOptionsItemSelected(item);
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
if (resultCode == CommonStatusCodes.SUCCESS) {
if (requestCode == QRCODE_REQUEST) {
if (data != null) {
String barcode = data.getStringExtra("qrCode");
Toast.makeText(getActivity(), "Data : " + barcode, Toast.LENGTH_LONG).show();
} else {
Toast.makeText(getActivity(), "No data received from scanner", Toast.LENGTH_LONG).show();
}
}
}
}
}
<file_sep>package com.example.wladek.wira;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.database.Cursor;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.provider.MediaStore;
import android.support.design.widget.TabLayout;
import android.support.v4.view.ViewPager;
import android.support.v7.app.ActionBar;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.view.View;
import android.widget.ImageButton;
import android.widget.TextView;
import com.afollestad.materialdialogs.MaterialDialog;
import com.example.wladek.wira.fragments.tab_fragments.ClaimsFragment;
import com.example.wladek.wira.fragments.tab_fragments.ExpenseFragment;
import com.example.wladek.wira.fragments.tab_fragments.ProfileFragment;
import com.example.wladek.wira.pager_adapters.ViewPagerAdapter;
import com.example.wladek.wira.pojo.ExpenseItem;
import com.example.wladek.wira.utils.DatabaseHelper;
import com.getbase.floatingactionbutton.AddFloatingActionButton;
import com.kosalgeek.android.photoutil.GalleryPhoto;
import java.io.File;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
public class MainActivity extends AppCompatActivity {
private ViewPager viewPager;
AddFloatingActionButton fab;
TabLayout tabLayout;
final int CAMERA_REQUEST = 321;
final int GALLERY_REQUEST = 3233;
final int QRCODE_REQUEST = 567;
GalleryPhoto galleryPhoto;
ArrayList<ExpenseItem> expenseExpenseItems = new ArrayList<>();
ExpenseFragment expenseFragment;
MaterialDialog.Builder builder;
MaterialDialog dialog;
DatabaseHelper myDb;
ActionBar actionBar;
Toolbar toolbar;
private String photoPath;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
myDb = new DatabaseHelper(this);
setContentView(R.layout.activity_main);
toolbar = (Toolbar) findViewById(R.id.toolbar);
toolbar.setTitleTextColor(getResources().getColor(R.color.white));
toolbar.setTitle("Expenses");
setSupportActionBar(toolbar);
actionBar = getSupportActionBar();
viewPager = (ViewPager) findViewById(R.id.view_pager);
setUpViewPager(viewPager);
tabLayout = (TabLayout) findViewById(R.id.tabs);
tabLayout.setupWithViewPager(viewPager);
tabLayout.getTabAt(0).setIcon(R.mipmap.expense_active);
tabLayout.getTabAt(1).setIcon(R.mipmap.report_inactive_icon);
tabLayout.getTabAt(2).setIcon(R.mipmap.contact_inactive);
tabLayout.setSelectedTabIndicatorColor(getResources().getColor(R.color.colorAccent));
fab = (AddFloatingActionButton) findViewById(R.id.fab);
fab.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
showGalleryOptions();
}
});
fab.setPlusColor(R.color.blue_btn_bg_color);
tabLayout.setOnTabSelectedListener(new TabLayout.ViewPagerOnTabSelectedListener(viewPager) {
@Override
public void onTabSelected(TabLayout.Tab tab) {
super.onTabSelected(tab);
viewPager.setCurrentItem(tab.getPosition());
if (tab.getPosition() == 0) {
toolbar.setTitle("Expenses");
tab.setIcon(R.mipmap.expense_active);
fab.setVisibility(View.VISIBLE);
fab.setImageResource(android.R.drawable.ic_menu_camera);
fab.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
showGalleryOptions();
}
});
} else if (tab.getPosition() == 1) {
toolbar.setTitle("Claims");
tab.setIcon(R.mipmap.report_active);
} else {
toolbar.setTitle("Profile");
tab.setIcon(R.mipmap.contact_active);
fab.setVisibility(View.INVISIBLE);
}
}
@Override
public void onTabUnselected(TabLayout.Tab tab) {
super.onTabUnselected(tab);
viewPager.setCurrentItem(tab.getPosition());
if (tab.getPosition() == 0) {
toolbar.setTitle("Expenses");
tab.setIcon(R.mipmap.expense_inactive);
fab.setVisibility(View.VISIBLE);
fab.setImageResource(android.R.drawable.ic_menu_camera);
fab.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
showGalleryOptions();
}
});
} else if (tab.getPosition() == 1) {
toolbar.setTitle("Claims");
tab.setIcon(R.mipmap.report_inactive_icon);
} else {
toolbar.setTitle("Profile");
tab.setIcon(R.mipmap.contact_inactive);
fab.setVisibility(View.INVISIBLE);
}
}
@Override
public void onTabReselected(TabLayout.Tab tab) {
super.onTabReselected(tab);
viewPager.setCurrentItem(tab.getPosition());
if (tab.getPosition() == 0) {
toolbar.setTitle("Expenses");
tab.setIcon(R.mipmap.expense_active);
fab.setVisibility(View.VISIBLE);
fab.setImageResource(android.R.drawable.ic_menu_camera);
fab.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
showGalleryOptions();
}
});
} else if (tab.getPosition() == 1) {
toolbar.setTitle("Claims");
tab.setIcon(R.mipmap.report_active);
} else {
toolbar.setTitle("Profile");
tab.setIcon(R.mipmap.contact_active);
fab.setVisibility(View.INVISIBLE);
}
}
});
}
public void showGalleryOptions() {
boolean wrapInScrollView = true;
builder = new MaterialDialog.Builder(this);
builder.title("Upload");
builder.customView(R.layout.select_gallery_layout, wrapInScrollView);
builder.cancelable(true);
dialog = builder.build();
dialog.show();
View customView = dialog.getCustomView();
ImageButton imgBtnGallery = (ImageButton) customView.findViewById(R.id.imgBtnGallery);
ImageButton imgBtnCam = (ImageButton) customView.findViewById(R.id.imgBtnCam);
TextView txtCamera = (TextView) customView.findViewById(R.id.txtCamera);
TextView txtGallery = (TextView) customView.findViewById(R.id.txtGallery);
imgBtnCam.setOnClickListener(new CustomClickListener(this, "imgBtnCam"));
imgBtnGallery.setOnClickListener(new CustomClickListener(this, "imgBtnGallery"));
txtGallery.setOnClickListener(new CustomClickListener(this, "imgBtnGallery"));
txtCamera.setOnClickListener(new CustomClickListener(this, "imgBtnCam"));
}
public void setUpViewPager(ViewPager viewPager) {
//TO DO: write an adapter for this viewpager
ViewPagerAdapter viewPagerAdapter = new ViewPagerAdapter(getSupportFragmentManager());
expenseFragment = ExpenseFragment.newInstance("Data for Exp fragment 1");
viewPagerAdapter.addFragment(expenseFragment, "Expense");
viewPagerAdapter.addFragment(ClaimsFragment.newInstance("Data for Clms fragment 2"), "Claims");
viewPagerAdapter.addFragment(ProfileFragment.newInstance("Data for Pf fragment 3"), "Profile");
viewPager.setAdapter(viewPagerAdapter);
}
private class CustomClickListener implements View.OnClickListener {
String btnName;
Context context;
public CustomClickListener(Context context, String imgBtnCam) {
this.btnName = imgBtnCam;
this.context = context;
}
@Override
public void onClick(View v) {
if (btnName.equals("imgBtnCam")) {
dialog.dismiss();
launchCamera(context);
} else if (btnName.equals("imgBtnGallery")) {
dialog.dismiss();
launchGallery(context);
}
}
}
public void launchCamera(Context context) {
Intent in = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
if (in.resolveActivity(context.getPackageManager()) != null) {
File photoFile = createImageFile();
if (photoFile != null) {
// Uri photoURI = FileProvider.getUriForFile(this,
// "com.example.wladek.wira",
// photoFile);
in.putExtra("output", Uri.fromFile(photoFile));
}
}
startActivityForResult(in, CAMERA_REQUEST);
addToGallery();
}
//Create file
private File createImageFile() {
String timeStamp = (new SimpleDateFormat("yyyyMMdd_HHmmss")).format(new Date());
String imageFileName = "JPEG_" + timeStamp + "_";
File storageDir = Environment.getExternalStoragePublicDirectory(Environment.DIRECTORY_PICTURES);
File image = null;
try {
image = File.createTempFile(imageFileName, ".jpg", storageDir);
} catch (IOException e) {
e.printStackTrace();
}
photoPath = image.getAbsolutePath();
return image;
}
public void addToGallery() {
Intent mediaScanIntent = new Intent(Intent.ACTION_MEDIA_SCANNER_SCAN_FILE);
File f = new File(this.photoPath);
Uri contentUri = Uri.fromFile(f);
mediaScanIntent.setData(contentUri);
this.sendBroadcast(mediaScanIntent);
}
public void launchGallery(Context context) {
galleryPhoto = new GalleryPhoto(context);
// Intent intent = new Intent();
// intent.setType("image/*");
// intent.setAction(Intent.ACTION_GET_CONTENT);//
// startActivityForResult(Intent.createChooser(intent, "Select File"), GALLERY_REQUEST);
startActivityForResult(galleryPhoto.openGalleryIntent(), GALLERY_REQUEST);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == Activity.RESULT_OK) {
if (requestCode == CAMERA_REQUEST) {
if (photoPath != null) {
updateExpense(photoPath);
}
} else if (requestCode == GALLERY_REQUEST) {
Uri uri = data.getData();
galleryPhoto.setPhotoUri(uri);
// photoPath = getPhotoPathFromGallery(uri);
photoPath = galleryPhoto.getPath();
updateExpense(photoPath);
}
}
}
public void updateExpense(String imageSrc) {
ExpenseItem expenseItem = new ExpenseItem();
expenseItem.setImagePath(imageSrc);
expenseItem.setExpenseAmount(new Double(0));
expenseItem = myDb.save(expenseItem);
if (expenseFragment != null) {
expenseFragment.loadExpenses();
expenseFragment.updateFragment1ListView();
}
}
public String getPhotoPathFromGallery(Uri uri){
String[] filePathColumn = {MediaStore.Images.Media.DATA};
Cursor cursor = getContentResolver()
.query(uri, filePathColumn, null, null,
null);
cursor.moveToFirst();
int columnIndex = cursor.getColumnIndex(filePathColumn[0]);
String picturePath = cursor.getString(columnIndex);
cursor.close();
return picturePath;
}
}
|
aa2661392670ccd98dc10357ee740f750c0744f9
|
[
"Java"
] | 6
|
Java
|
wladekAiro/WiraMobi
|
bceaecf64c925210ca830078b1bf6d3b39c680a4
|
db4fc8e3b5bed9c3cf94308731cab30ced75b6cf
|
refs/heads/master
|
<file_sep># -*- coding: utf-8 -*-
##########################################################################
# NSAp - Copyright (C) CEA, 2019
# Distributed under the terms of the CeCILL-B license, as published by
# the CEA-CNRS-INRIA. Refer to the LICENSE file or to
# http://www.cecill.info/licences/Licence_CeCILL-B_V1-en.html
# for details.
##########################################################################
"""
Module that defines common transformations that can be applied when the dataset
is loaded.
"""
# Imports
import collections
import numpy as np
from torchvision import transforms
class RandomFlipDimensions(object):
""" Apply a random mirror flip for all axes with a defined probability.
"""
def __init__(self, ndims, proba, with_channels=True):
""" Initilaize the class.
Parameters
----------
ndims: int
the number of dimensions.
proba: float
apply flip on each axis with this probability [0 - 1].
with_channels: bool, default True
if set expect the array to contain the channels in first dimension.
"""
if proba < 0 or proba > 1:
raise ValueError("The probabilty must be in [0 - 1].")
self.ndims = ndims
self.proba = proba
self.with_channels = with_channels
def _random_flip(self):
""" Generate a random axes flip.
"""
axis = []
for dim in range(self.ndims):
if np.random.choice([True, False], p=[self.proba, 1 - self.proba]):
axis.append(dim)
return tuple(axis)
def __call__(self, arr):
""" Flip an array axes randomly.
Parameters
----------
arr: np.array
an input array.
Returns
-------
flip_arr: np.array
the fliped array.
"""
if self.with_channels:
data = []
flip = self._random_flip()
for _arr in arr:
data.append(np.flip(_arr, axis=flip))
return np.asarray(data)
else:
return np.flip(arr, axis=self._random_flip())
class Offset(object):
""" Apply an intensity offset (shift and scale) on input channels.
"""
def __init__(self, nb_channels, factor):
""" Initilaize the class.
Parameters
----------
nb_channels: int
the number of channels.
factor: float
the offset scale factor [0 - 1].
"""
if factor < 0 or factor > 1:
raise ValueError("The offset factor must be in [0 - 1].")
self.nb_channels = nb_channels
self.factor = factor
def _random_offset(self):
""" Generate a random offset factor.
"""
return (2 * self.factor * np.random.random(self.nb_channels) +
(1 - self.factor))
def __call__(self, arr):
""" Normalize an array.
Parameters
----------
arr: np.array
an input array.
Returns
-------
offset_arr: np.array
the rescaled array.
"""
assert len(arr) == self.nb_channels
mean_scale_factors = self._random_offset()
std_scale_factors = self._random_offset()
data = []
for _arr, _mfactor, _sfactor in zip(
arr, mean_scale_factors, std_scale_factors):
logical_mask = (_arr != 0)
mean = _arr[logical_mask].mean()
std = _arr[logical_mask].std()
data.append((_arr - (mean * _mfactor)) / (std * _sfactor))
return np.asarray(data)
class Padding(object):
""" A class to pad an image.
"""
def __init__(self, shape, nb_channels=1, fill_value=0):
""" Initialize the instance.
Parameters
----------
shape: list of int
the desired shape.
nb_channels: int, default 1
the number of channels.
fill_value: int or list of int, default 0
the value used to fill the array, if a list is given, use the
specified value on each channel.
"""
self.shape = shape
self.nb_channels = nb_channels
self.fill_value = fill_value
if self.nb_channels > 1 and not isinstance(self.fill_value, list):
self.fill_value = [self.fill_value] * self.nb_channels
elif isinstance(self.fill_value, list):
assert len(self.fill_value) == self.nb_channels
def __call__(self, arr):
""" Fill an array to fit the desired shape.
Parameters
----------
arr: np.array
an input array.
Returns
-------
fill_arr: np.array
the zero padded array.
"""
if len(arr.shape) - len(self.shape) == 1:
data = []
for _arr, _fill_value in zip(arr, self.fill_value):
data.append(self._apply_padding(_arr, _fill_value))
return np.asarray(data)
elif len(arr.shape) - len(self.shape) == 0:
return self._apply_padding(arr, self.fill_value)
else:
raise ValueError("Wrong input shape specified!")
def _apply_padding(self, arr, fill_value):
""" See Padding.__call__().
"""
orig_shape = arr.shape
padding = []
for orig_i, final_i in zip(orig_shape, self.shape):
shape_i = final_i - orig_i
half_shape_i = shape_i // 2
if shape_i % 2 == 0:
padding.append((half_shape_i, half_shape_i))
else:
padding.append((half_shape_i, half_shape_i + 1))
for cnt in range(len(arr.shape) - len(padding)):
padding.append((0, 0))
fill_arr = np.pad(arr, padding, mode="constant",
constant_values=fill_value)
return fill_arr
class Downsample(object):
""" A class to downsample an array.
"""
def __init__(self, scale, with_channels=True):
""" Initialize the instance.
Parameters
----------
scale: int
the downsampling scale factor in all directions.
with_channels: bool, default True
if set expect the array to contain the channels in first dimension.
"""
self.scale = scale
self.with_channels = with_channels
def __call__(self, arr):
""" Downsample an array to fit the desired shape.
Parameters
----------
arr: np.array
an input array
Returns
-------
down_arr: np.array
the downsampled array.
"""
if self.with_channels:
data = []
for _arr in arr:
data.append(self._apply_downsample(_arr))
return np.asarray(data)
else:
return self._apply_downsample(arr)
def _apply_downsample(self, arr):
""" See Downsample.__call__().
"""
slices = []
for cnt, orig_i in enumerate(arr.shape):
if cnt == 3:
break
slices.append(slice(0, orig_i, self.scale))
down_arr = arr[tuple(slices)]
return down_arr
|
33bc09ad9a350cb421a8f7f6bc6368c3a38838ca
|
[
"Python"
] | 1
|
Python
|
HChegraoui/pynet
|
3e26f7992e5b6954f637e3a68e4766f3886e2ce9
|
095113e1b71ddabe871e6c75e19e33caf362abd9
|
refs/heads/master
|
<file_sep>package com.yunjishi.lixiang.yunjishi.service
import com.yunjishi.lixiang.yunjishi.presenter.data.bean.DemandDetailBean
import com.yunjishi.lixiang.yunjishi.presenter.data.bean.SubmitOrderBean
import io.reactivex.Observable
interface OrderDetailService {
fun getDemandDetail(string1: String,string2: String): Observable<DemandDetailBean>
}<file_sep>package com.yunjishi.lixiang.yunjishi.presenter.injection.component
import com.android.lixiang.base.injection.ComponentScope
import com.android.lixiang.base.injection.component.FragmentComponent
import com.yunjishi.lixiang.yunjishi.presenter.injection.module.MissionModule
import com.yunjishi.lixiang.yunjishi.view.fragment.MissionFragment
import dagger.Component
@ComponentScope
@Component(dependencies = arrayOf(FragmentComponent::class), modules = arrayOf(MissionModule::class))
interface MissionComponent {
fun inject(fragment: MissionFragment)
}<file_sep>package com.yunjishi.lixiang.yunjishi.service.impl
class MainServiceImpl {
}<file_sep>package com.yunjishi.lixiang.yunjishi.view.fragment
import android.Manifest
import android.animation.ObjectAnimator
import android.animation.ValueAnimator
import android.annotation.SuppressLint
import android.content.Context
import android.content.SharedPreferences
import android.content.pm.PackageManager
import android.graphics.Color
import android.graphics.Point
import android.hardware.Sensor
import android.hardware.SensorEvent
import android.hardware.SensorEventListener
import android.hardware.SensorManager
import android.os.Build
import android.os.Bundle
import android.os.Handler
import android.support.annotation.RequiresApi
import android.support.v4.app.ActivityCompat
import android.support.v4.app.Fragment
import android.support.v7.app.AppCompatActivity
import android.support.v7.widget.AppCompatButton
import android.support.v7.widget.AppCompatImageView
import android.support.v7.widget.AppCompatTextView
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.widget.*
import com.android.lixiang.base.utils.view.DimenUtil
import com.baidu.location.*
import com.baidu.mapapi.map.*
import com.baidu.mapapi.model.LatLng
import com.baidu.mapapi.utils.DistanceUtil
import com.orhanobut.logger.Logger
import com.yunjishi.lixiang.yunjishi.GetArea
import com.yunjishi.lixiang.yunjishi.R.id.*
import com.yunjishi.lixiang.yunjishi.view.activity.MainActivity
import kotlinx.android.synthetic.main.fragment_map.*
class MapFragment : Fragment(), View.OnClickListener, SensorEventListener {
private var lastX: Double? = 0.0
private var locData: MyLocationData? = null
override fun onAccuracyChanged(sensor: Sensor?, accuracy: Int) {
}
override fun onSensorChanged(event: SensorEvent?) {
val x = event!!.values[SensorManager.DATA_X].toDouble()
if (Math.abs(x - lastX!!) > 1.0) {
mCurrentDirection = x.toInt()
locData = MyLocationData.Builder()
.accuracy(0F)
.direction(mCurrentDirection.toFloat()).latitude(mCurrentLat)
.longitude(mCurrentLon).build()
mBaiduMap!!.setMyLocationData(locData)
}
lastX = x
}
override fun onClick(v: View?) {
when (v) {
ZIB -> {
val zoomIn: MapStatusUpdate? = MapStatusUpdateFactory.zoomIn()
mBaiduMap!!.animateMapStatus(zoomIn)
}
ZOB -> {
val zoomOut: MapStatusUpdate? = MapStatusUpdateFactory.zoomOut()
mBaiduMap!!.animateMapStatus(zoomOut)
}
CA -> {
if (scopeGeo == "") {
Toast.makeText(activity, "请选择区域", Toast.LENGTH_SHORT).show()
} else
(activity as MainActivity).changeFragment(4)
}
}
}
var mMapView: MapView? = null
private var mBaiduMap: BaiduMap? = null
var myListener = MyLocationListenner()
private var mLocClient: LocationClient? = null
private var isFirstLoc = true
private var mCurrentDirection = 0
private var mCurrentLat = 0.0
private var mCurrentLon = 0.0
private var mCurrentAccracy: Float = 0.toFloat()
private var geoString = String()
private var scopeGeo = String()
private var area: Double? = 0.0
private var demandGeo: String? = null
private var center = String()
private var T: RelativeLayout? = null
private var R: RelativeLayout? = null
private var B: RelativeLayout? = null
private var L: RelativeLayout? = null
private var C: RelativeLayout? = null
private var CA: RelativeLayout? = null
private var TV: AppCompatTextView? = null
private var ITL: AppCompatImageView? = null
private var ITR: AppCompatImageView? = null
private var IBR: AppCompatImageView? = null
private var IBL: AppCompatImageView? = null
private var S: AppCompatImageView? = null// scan line
var task: Runnable? = null
val handler = Handler()
private var Z: AppCompatImageView? = null
private var ZIB: AppCompatButton? = null
private var ZOB: AppCompatButton? = null
override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?,
savedInstanceState: Bundle?): View? {
// Inflate the layout for this fragment
return inflater.inflate(com.yunjishi.lixiang.yunjishi.R.layout.fragment_map, container, false)
}
@RequiresApi(Build.VERSION_CODES.M)
override fun onActivityCreated(savedInstanceState: Bundle?) {
super.onActivityCreated(savedInstanceState)
handlePermisson()
initMap()
initViews()
}
@SuppressLint("ResourceType")
private fun initViews() {
var screenHeight = activity!!.application.resources.displayMetrics.heightPixels.toFloat()
var screenWidth = activity!!.application.resources.displayMetrics.widthPixels.toFloat()
var realWidth = screenWidth * 964 / 1024
val SQUARE = screenHeight * 463 / 768
val THeight = screenHeight * 94 / 768
val RWidth = (realWidth - SQUARE) * 0.5
val BHeight = screenHeight * 99 / 1024
val CAWidth = screenHeight * 46 / 768
val IWidth = screenHeight * 31 / 768// four corner purple
val SWidth = screenHeight * 424 / 768// four corner purple
val SHeight = screenHeight * 3 / 768
val ZWidth = screenHeight * 28 / 768
val ZHeight = screenHeight * 59 / 768
val BWidth = screenHeight * 40 / 768
Logger.d("$THeight $screenHeight $SQUARE $BHeight")
T = RelativeLayout(activity)
T!!.id = 1
val TL = RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, THeight.toInt())
T!!.setBackgroundColor(Color.parseColor("#99000000"))
T!!.layoutParams = TL
mMapRoot.addView(T)
R = RelativeLayout(activity)
R!!.id = 2
val RL = RelativeLayout.LayoutParams(RWidth.toInt(), SQUARE.toInt())
RL.addRule(RelativeLayout.ALIGN_PARENT_RIGHT)
RL.setMargins(0, THeight.toInt(), 0, 0)
R!!.setBackgroundColor(Color.parseColor("#99000000"))
R!!.layoutParams = RL
mMapRoot.addView(R)
B = RelativeLayout(activity)
B!!.id = 3
val BL = RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, BHeight.toInt())
BL.addRule(RelativeLayout.BELOW, R!!.id)
B!!.setBackgroundColor(Color.parseColor("#99000000"))
B!!.layoutParams = BL
mMapRoot.addView(B)
L = RelativeLayout(activity)
L!!.id = 4
val LL = RelativeLayout.LayoutParams(RWidth.toInt(), SQUARE.toInt())
LL.setMargins(0, THeight.toInt(), 0, 0)
L!!.setBackgroundColor(Color.parseColor("#99000000"))
L!!.layoutParams = LL
mMapRoot.addView(L)
C = RelativeLayout(activity)
C!!.id = 5
val CL = RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.MATCH_PARENT, RelativeLayout.LayoutParams.MATCH_PARENT)
CL.addRule(RelativeLayout.ALIGN_PARENT_BOTTOM)
CL.addRule(RelativeLayout.BELOW, B!!.id)
C!!.setBackgroundColor(Color.parseColor("#BF000000"))
C!!.layoutParams = CL
mMapRoot.addView(C)
CA = RelativeLayout(activity)
CA!!.id = 6
val CAL = RelativeLayout.LayoutParams(CAWidth.toInt(), CAWidth.toInt())
CAL.addRule(RelativeLayout.CENTER_IN_PARENT)
CA!!.setBackgroundResource(com.yunjishi.lixiang.yunjishi.R.drawable.ic_camera)
CA!!.layoutParams = CAL
C!!.addView(CA)
CA!!.setOnClickListener(this)
TV = AppCompatTextView(activity)
TV!!.id = 7
val TVL = RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.WRAP_CONTENT, RelativeLayout.LayoutParams.WRAP_CONTENT)
TVL.addRule(RelativeLayout.CENTER_IN_PARENT)
TV!!.setTextColor(Color.parseColor("#FFFFFF"))
TV!!.textSize = 14F
TV!!.layoutParams = TVL
B!!.addView(TV)
mMapFragmentbar.title = "请选择拍摄区域"
(activity as AppCompatActivity).setSupportActionBar(mMapFragmentbar)
(activity as AppCompatActivity).supportActionBar!!.setDisplayHomeAsUpEnabled(true)
mMapFragmentbar.setNavigationOnClickListener {
(activity as MainActivity).changeFragment(2)
}
ITL = AppCompatImageView(activity)
val ITLL = RelativeLayout.LayoutParams(IWidth.toInt(), IWidth.toInt())
ITLL.addRule(RelativeLayout.BELOW, T!!.id)
ITLL.addRule(RelativeLayout.RIGHT_OF, L!!.id)
ITL!!.setBackgroundResource(com.yunjishi.lixiang.yunjishi.R.drawable.ic_lt)
ITL!!.layoutParams = ITLL
mMapRoot!!.addView(ITL)
ITR = AppCompatImageView(activity)
val ITRL = RelativeLayout.LayoutParams(IWidth.toInt(), IWidth.toInt())
ITRL.addRule(RelativeLayout.BELOW, T!!.id)
ITRL.addRule(RelativeLayout.LEFT_OF, R!!.id)
ITR!!.setBackgroundResource(com.yunjishi.lixiang.yunjishi.R.drawable.ic_rt)
ITR!!.layoutParams = ITRL
mMapRoot!!.addView(ITR)
IBR = AppCompatImageView(activity)
val IBRL = RelativeLayout.LayoutParams(IWidth.toInt(), IWidth.toInt())
IBRL.addRule(RelativeLayout.ABOVE, B!!.id)
IBRL.addRule(RelativeLayout.LEFT_OF, R!!.id)
IBR!!.setBackgroundResource(com.yunjishi.lixiang.yunjishi.R.drawable.ic_rb)
IBR!!.layoutParams = IBRL
mMapRoot!!.addView(IBR)
IBL = AppCompatImageView(activity)
val IBLL = RelativeLayout.LayoutParams(IWidth.toInt(), IWidth.toInt())
IBLL.addRule(RelativeLayout.ABOVE, B!!.id)
IBLL.addRule(RelativeLayout.RIGHT_OF, L!!.id)
IBL!!.setBackgroundResource(com.yunjishi.lixiang.yunjishi.R.drawable.ic_lb)
IBL!!.layoutParams = IBLL
mMapRoot!!.addView(IBL)
S = AppCompatImageView(activity)
val SL = RelativeLayout.LayoutParams(SWidth.toInt(), SHeight.toInt())
SL.addRule(RelativeLayout.LEFT_OF, R!!.id)
SL.addRule(RelativeLayout.BELOW, T!!.id)
SL.addRule(RelativeLayout.RIGHT_OF, L!!.id)
SL.setMargins(10, 0, 10, 0)
S!!.setBackgroundResource(com.yunjishi.lixiang.yunjishi.R.drawable.img_scan)
S!!.layoutParams = SL
mMapRoot!!.addView(S)
task = Runnable {
// TODO Auto-generated method stub
val animator: ObjectAnimator = ObjectAnimator.ofFloat(S!!, "translationY", 0F, SQUARE, 0F)
animator.duration = 5000
animator.repeatCount = ValueAnimator.INFINITE//无限循环
animator.repeatMode = ValueAnimator.INFINITE
animator.start()
}
handler.post(task)
Z = AppCompatImageView(activity)
Z!!.id = 8
val ZL = RelativeLayout.LayoutParams(ZWidth.toInt(), ZHeight.toInt())
ZL.addRule(RelativeLayout.CENTER_VERTICAL)
ZL.setMargins(47, 0, 0, 0)
Z!!.setBackgroundResource(com.yunjishi.lixiang.yunjishi.R.drawable.ic_zoom)
Z!!.layoutParams = ZL
R!!.addView(Z)
ZIB = AppCompatButton(activity)
ZIB!!.id = 9
val ZIBL = RelativeLayout.LayoutParams(ZWidth.toInt(), ZWidth.toInt())
ZIBL.addRule(RelativeLayout.ALIGN_TOP, Z!!.id)
ZIBL.setMargins(41, -10, 0, 0)
ZIB!!.setBackgroundColor(Color.parseColor("#00000000"))
ZIB!!.layoutParams = ZIBL
R!!.addView(ZIB)
ZIB!!.setOnClickListener(this)
ZOB = AppCompatButton(activity)
val ZOBL = RelativeLayout.LayoutParams(ZWidth.toInt(), ZWidth.toInt())
ZOBL.addRule(RelativeLayout.BELOW, ZIB!!.id)
ZOBL.setMargins(41, 0, 0, 0)
ZOB!!.setBackgroundColor(Color.parseColor("#00000000"))
ZOB!!.layoutParams = ZOBL
R!!.addView(ZOB)
ZOB!!.setOnClickListener(this)
}
private fun initMap() {
val mCurrentMode = MyLocationConfiguration.LocationMode.NORMAL
mMapView = mMapFragmentMapView
mLocClient = LocationClient(activity)
mLocClient!!.registerLocationListener(myListener)
val option = LocationClientOption()
option.isOpenGps = true
option.setCoorType("bd09ll")
option.setScanSpan(1000)
option.setAddrType("all")
option.setIsNeedLocationPoiList(true)
mLocClient!!.locOption = option
mLocClient!!.start()
mBaiduMap = mMapView!!.map
mBaiduMap!!.isMyLocationEnabled = true
mBaiduMap!!.setMyLocationConfigeration(MyLocationConfiguration(mCurrentMode, true, null))
val builder = MapStatus.Builder()
builder.overlook(0f)
val child = mMapView!!.getChildAt(1)
if (child != null && (child is ImageView || child is ZoomControls)) {
child.visibility = View.INVISIBLE
}
mMapView!!.showScaleControl(false)
mMapView!!.showZoomControls(false)
val mUiSettings = mBaiduMap!!.uiSettings
mUiSettings.isScrollGesturesEnabled = true
mUiSettings.isOverlookingGesturesEnabled = true
mUiSettings.isZoomGesturesEnabled = true
mUiSettings.isOverlookingGesturesEnabled = false
val listener: BaiduMap.OnMapStatusChangeListener = object : BaiduMap.OnMapStatusChangeListener {
override fun onMapStatusChangeStart(p0: MapStatus?) {}
override fun onMapStatusChangeStart(p0: MapStatus?, p1: Int) {}
override fun onMapStatusChange(p0: MapStatus?) {}
override fun onMapStatusChangeFinish(p0: MapStatus?) {
if (context != null) {
if (mBaiduMap!!.projection != null) {
val ltp = Point()
ltp.x = DimenUtil().dip2px(context!!, 356.5F)
ltp.y = DimenUtil().dip2px(context!!, 105F)
val lt = mBaiduMap!!.projection.fromScreenLocation(ltp)
val rbp = Point()
rbp.x = DimenUtil().dip2px(context!!, 782.5F)
rbp.y = DimenUtil().dip2px(context!!, 531F)
val rb = mBaiduMap!!.projection.fromScreenLocation(rbp)
geoString = String.format("%s,%s,%s,%s", lt.longitude, lt.latitude, rb.longitude, rb.latitude)
println("geoString$geoString")
scopeGeo = geoFormat(geoString)
demandGeo = scopeGeo
var mSharedPreferences: SharedPreferences? = null
mSharedPreferences = activity!!.getSharedPreferences("GEO", Context.MODE_PRIVATE)
val editor = mSharedPreferences!!.edit()
editor.putString("geo", demandGeo)
editor.commit()
println("scopeGeo$scopeGeo")
center = String.format("%s,%s", (lt.longitude + rb.longitude) / 2, (lt.latitude + rb.latitude) / 2)
val leftTop = LatLng(lt.latitude, lt.longitude)
val rightBottom = LatLng(rb.latitude, rb.longitude)
val leftBottom = LatLng(rb.latitude, lt.longitude)
val rightTop = LatLng(lt.latitude, rb.longitude)
var list: MutableList<LatLng> = mutableListOf()
list.add(leftTop)
list.add(rightTop)
list.add(rightBottom)
list.add(leftBottom)
Logger.d(GetArea.getArea(list))
println(GetArea.getArea(list))
area = DistanceUtil.getDistance(leftTop, rightBottom) * DistanceUtil.getDistance(leftTop, rightBottom) / 2000000
val areaString = area.toString()
val temp = areaString.substring(0, areaString.indexOf(".") + 3)
if (areaString.contains("E")) {
if (areaString.contains("-")) {
TV!!.text = String.format("当前面积:小于 0.01平方公里", temp)
} else
TV!!.text = String.format("当前面积:%s 亿平方公里", temp)
} else {
TV!!.text = String.format("当前面积:%s 平方公里", temp)
}
}
}
}
}
mMapView!!.map.setOnMapStatusChangeListener(listener)
}
inner class MyLocationListenner : BDLocationListener {
var lati: Double = 0.toDouble()
var longi: Double = 0.toDouble()
var address: String = ""
internal lateinit var poi: List<Poi>
override fun onReceiveLocation(location: BDLocation?) {
if (location == null || mMapView == null) {
return
}
val locData = MyLocationData.Builder()
.accuracy(0F)
.direction(mCurrentDirection.toFloat())
.latitude(location.latitude)
.longitude(location.longitude).build()
lati = location.latitude
longi = location.longitude
mCurrentLat = location.latitude
mCurrentLon = location.longitude
address = location.addrStr
mCurrentAccracy = location.radius
poi = location.poiList
mBaiduMap!!.setMyLocationData(locData)
if (isFirstLoc) {
isFirstLoc = false
val ll = LatLng(location.latitude,
location.longitude)
val builder = MapStatus.Builder()
builder.target(ll).zoom(8.0f)
mBaiduMap!!.animateMapStatus(MapStatusUpdateFactory.newMapStatus(builder.build()))
}
}
fun onConnectHotSpotMessage(s: String, i: Int) {
}
}
fun geoFormat(geo: String): String {
val prefix = "{\"type\":\"Polygon\",\"coordinates\":[["
val suffix = "]]}"
val geoArray = geo.split(",".toRegex())
val data = "[" + geoArray[0] + "," + geoArray[1] +
"],[" + geoArray[2] + "," + geoArray[1] +
"],[" + geoArray[2] + "," + geoArray[3] +
"],[" + geoArray[0] + "," + geoArray[3] +
"],[" + geoArray[0] + "," + geoArray[1] +
"]"
return String.format("%s%s%s", prefix, data, suffix)
}
override fun onDestroy() {
super.onDestroy()
mMapView!!.onDestroy()
handler.removeCallbacks(task)
}
override fun onResume() {
super.onResume()
mMapView!!.onResume()
handler.post(task)
}
override fun onPause() {
super.onPause()
mMapView!!.onPause()
}
@RequiresApi(Build.VERSION_CODES.M)
fun handlePermisson() {
val permission = Manifest.permission.ACCESS_COARSE_LOCATION
val checkSelfPermission = ActivityCompat.checkSelfPermission(activity!!, permission)
if (checkSelfPermission == PackageManager.PERMISSION_GRANTED) {
} else {
if (ActivityCompat.shouldShowRequestPermissionRationale(activity!!, permission)) {
} else {
myRequestPermission()
}
}
}
@RequiresApi(Build.VERSION_CODES.M)
private fun myRequestPermission() {
val permissions = arrayOf(Manifest.permission.ACCESS_COARSE_LOCATION,
Manifest.permission.ACCESS_FINE_LOCATION,
Manifest.permission.READ_PHONE_STATE,
Manifest.permission.WRITE_EXTERNAL_STORAGE)
requestPermissions(permissions, 1)
}
override fun onRequestPermissionsResult(requestCode: Int, permissions: Array<out String>, grantResults: IntArray) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults)
// if (grantResults[0] == PackageManager.PERMISSION_GRANTED) {
// }
}
}
<file_sep>package com.yunjishi.lixiang.yunjishi.view.activity
import android.app.Activity
import android.graphics.Color
import android.support.v7.app.AppCompatActivity
import android.os.Bundle
import android.widget.AbsListView
import com.android.lixiang.base.utils.view.StatusBarUtil
import com.yunjishi.lixiang.yunjishi.R
import com.yunjishi.lixiang.yunjishi.view.adapter.SelectTypeParamsAdapter
import kotlinx.android.synthetic.main.activity_select_type_params.*
import android.content.Intent
class SelectTypeParamsActivity : AppCompatActivity() {
var titleList: MutableList<String>? = mutableListOf()
var adapter: SelectTypeParamsAdapter? = null
var index = -1
var INDEX = ""
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_select_type_params)
StatusBarUtil.setColor(this, Color.parseColor("#000000"), 0)
mSelectTypeParamsToolbar.title = "类型"
setSupportActionBar(mSelectTypeParamsToolbar)
supportActionBar!!.setDisplayHomeAsUpEnabled(true)
val intent = intent
val bundle = intent.extras
INDEX = bundle.getString("INDEX")
// println(bundle.getString("INDEX"))
initView()
if (bundle.getString("INDEX") != "-1"){
adapter!!.setSelectedItem(INDEX.toInt())
adapter!!.notifyDataSetInvalidated()
}
mTypeListView.setOnItemClickListener { parent, view, position, id ->
index = position
adapter!!.setSelectedItem(position)
adapter!!.notifyDataSetInvalidated()
}
mSelectTypeParamsToolbar.setNavigationOnClickListener {
println("index$index")
println("INDEX$INDEX")
val intent = Intent()
if(index == -1 && INDEX == "-1"){
intent.putExtra("TYPE", index.toString())
setResult(Activity.RESULT_OK, intent)
this.finish()
}else if(index == -1 && INDEX != "-1"){
intent.putExtra("TYPE", INDEX)
setResult(Activity.RESULT_OK, intent)
this.finish()
}else{
intent.putExtra("TYPE", index.toString())
setResult(Activity.RESULT_OK, intent)
this.finish()
}
}
}
override fun onBackPressed() {
// super.onBackPressed()
println("index$index")
println("INDEX$INDEX")
val intent = Intent()
if(index == -1 && INDEX == "-1"){
intent.putExtra("TYPE", index.toString())
setResult(Activity.RESULT_OK, intent)
this.finish()
}else if(index == -1 && INDEX != "-1"){
intent.putExtra("TYPE", INDEX)
setResult(Activity.RESULT_OK, intent)
this.finish()
}else{
println("ddddddd")
intent.putExtra("TYPE", index.toString())
setResult(Activity.RESULT_OK, intent)
this.finish()
}
}
private fun initView() {
titleList!!.add("标准卫星图")
titleList!!.add("夜光卫星图")
titleList!!.add("卫星视频")
adapter = SelectTypeParamsAdapter(titleList, this)
mTypeListView.adapter = adapter
mTypeListView.choiceMode = AbsListView.CHOICE_MODE_SINGLE
}
}
<file_sep>package com.android.lixiang.base.database
import android.content.Context
import org.greenrobot.greendao.database.Database
/**
* Created by lixiang on 08/02/2018.
*/
class DatabaseManager {
// var mDaoSession: DaoSession? = null
// var mDao: UserProfileDao? = null
//
//
// fun init(context: Context): DatabaseManager? {
// initDao(context)
// return this
// }
//
//
// object Holder {
// val INSTANCE = DatabaseManager()
// }
//
//
// fun getInstance(): DatabaseManager? {
// return Holder.INSTANCE
// }
//
//
// fun initDao(context: Context) {
// val helper: ReleaseOpenHelper? = ReleaseOpenHelper(context, "fast_ec_1.db")
// var db: Database? = helper!!.writableDb
// mDaoSession = DaoMaster(db).newSession()
// mDao = mDaoSession!!.userProfileDao
//
// }
//
// fun getDao(): UserProfileDao? {
// return mDao
// }
//
}<file_sep>package com.yunjishi.lixiang.yunjishi.presenter.injection.component
import com.android.lixiang.base.injection.ComponentScope
import com.android.lixiang.base.injection.component.FragmentComponent
import com.yunjishi.lixiang.yunjishi.presenter.injection.module.MissionModule
import com.yunjishi.lixiang.yunjishi.presenter.injection.module.ParamsModule
import com.yunjishi.lixiang.yunjishi.view.fragment.MissionFragment
import com.yunjishi.lixiang.yunjishi.view.fragment.ParamsFragment
import dagger.Component
@ComponentScope
@Component(dependencies = arrayOf(FragmentComponent::class), modules = arrayOf(ParamsModule::class))
interface ParamsComponent {
fun inject(fragment: ParamsFragment)
}<file_sep>package com.yunjishi.lixiang.yunjishi.presenter.injection.module
import com.yunjishi.lixiang.yunjishi.service.MissionService
import com.yunjishi.lixiang.yunjishi.service.ParamsService
import com.yunjishi.lixiang.yunjishi.service.impl.MissionServiceImpl
import com.yunjishi.lixiang.yunjishi.service.impl.ParamsServiceImpl
import dagger.Module
import dagger.Provides
@Module
class ParamsModule {
@Provides
fun provideParamsService(service: ParamsServiceImpl): ParamsService {
return service
}
}<file_sep>package com.android.lixiang.base.database;
import android.database.Cursor;
import android.database.sqlite.SQLiteStatement;
import org.greenrobot.greendao.AbstractDao;
import org.greenrobot.greendao.Property;
import org.greenrobot.greendao.internal.DaoConfig;
import org.greenrobot.greendao.database.Database;
import org.greenrobot.greendao.database.DatabaseStatement;
// THIS CODE IS GENERATED BY greenDAO, DO NOT EDIT.
/**
* DAO for table "user_profile".
*/
public class UserProfileDao extends AbstractDao<UserProfile, String> {
public static final String TABLENAME = "user_profile";
/**
* Properties of entity UserProfile.<br/>
* Can be used for QueryBuilder and for referencing column names.
*/
public static class Properties {
public final static Property UserId = new Property(0, String.class, "userId", true, "USER_ID");
public final static Property Username = new Property(1, String.class, "username", false, "USERNAME");
public final static Property Level = new Property(2, String.class, "level", false, "LEVEL");
public final static Property Label = new Property(3, String.class, "label", false, "LABEL");
public final static Property UserTelephone = new Property(4, String.class, "userTelephone", false, "USER_TELEPHONE");
public final static Property UserPassword = new Property(5, String.class, "userPassword", false, "USER_PASSWORD");
public final static Property ImagePath = new Property(6, String.class, "imagePath", false, "IMAGE_PATH");
public final static Property GmtCreated = new Property(7, String.class, "gmtCreated", false, "GMT_CREATED");
}
public UserProfileDao(DaoConfig config) {
super(config);
}
public UserProfileDao(DaoConfig config, DaoSession daoSession) {
super(config, daoSession);
}
/** Creates the underlying database table. */
public static void createTable(Database db, boolean ifNotExists) {
String constraint = ifNotExists? "IF NOT EXISTS ": "";
db.execSQL("CREATE TABLE " + constraint + "\"user_profile\" (" + //
"\"USER_ID\" TEXT PRIMARY KEY NOT NULL ," + // 0: userId
"\"USERNAME\" TEXT," + // 1: username
"\"LEVEL\" TEXT," + // 2: level
"\"LABEL\" TEXT," + // 3: label
"\"USER_TELEPHONE\" TEXT," + // 4: userTelephone
"\"USER_PASSWORD\" TEXT," + // 5: userPassword
"\"IMAGE_PATH\" TEXT," + // 6: imagePath
"\"GMT_CREATED\" TEXT);"); // 7: gmtCreated
}
/** Drops the underlying database table. */
public static void dropTable(Database db, boolean ifExists) {
String sql = "DROP TABLE " + (ifExists ? "IF EXISTS " : "") + "\"user_profile\"";
db.execSQL(sql);
}
@Override
protected final void bindValues(DatabaseStatement stmt, UserProfile entity) {
stmt.clearBindings();
String userId = entity.getUserId();
if (userId != null) {
stmt.bindString(1, userId);
}
String username = entity.getUsername();
if (username != null) {
stmt.bindString(2, username);
}
String level = entity.getLevel();
if (level != null) {
stmt.bindString(3, level);
}
String label = entity.getLabel();
if (label != null) {
stmt.bindString(4, label);
}
String userTelephone = entity.getUserTelephone();
if (userTelephone != null) {
stmt.bindString(5, userTelephone);
}
String userPassword = entity.getUserPassword();
if (userPassword != null) {
stmt.bindString(6, userPassword);
}
String imagePath = entity.getImagePath();
if (imagePath != null) {
stmt.bindString(7, imagePath);
}
String gmtCreated = entity.getGmtCreated();
if (gmtCreated != null) {
stmt.bindString(8, gmtCreated);
}
}
@Override
protected final void bindValues(SQLiteStatement stmt, UserProfile entity) {
stmt.clearBindings();
String userId = entity.getUserId();
if (userId != null) {
stmt.bindString(1, userId);
}
String username = entity.getUsername();
if (username != null) {
stmt.bindString(2, username);
}
String level = entity.getLevel();
if (level != null) {
stmt.bindString(3, level);
}
String label = entity.getLabel();
if (label != null) {
stmt.bindString(4, label);
}
String userTelephone = entity.getUserTelephone();
if (userTelephone != null) {
stmt.bindString(5, userTelephone);
}
String userPassword = entity.getUserPassword();
if (userPassword != null) {
stmt.bindString(6, userPassword);
}
String imagePath = entity.getImagePath();
if (imagePath != null) {
stmt.bindString(7, imagePath);
}
String gmtCreated = entity.getGmtCreated();
if (gmtCreated != null) {
stmt.bindString(8, gmtCreated);
}
}
@Override
public String readKey(Cursor cursor, int offset) {
return cursor.isNull(offset + 0) ? null : cursor.getString(offset + 0);
}
@Override
public UserProfile readEntity(Cursor cursor, int offset) {
UserProfile entity = new UserProfile( //
cursor.isNull(offset + 0) ? null : cursor.getString(offset + 0), // userId
cursor.isNull(offset + 1) ? null : cursor.getString(offset + 1), // username
cursor.isNull(offset + 2) ? null : cursor.getString(offset + 2), // level
cursor.isNull(offset + 3) ? null : cursor.getString(offset + 3), // label
cursor.isNull(offset + 4) ? null : cursor.getString(offset + 4), // userTelephone
cursor.isNull(offset + 5) ? null : cursor.getString(offset + 5), // userPassword
cursor.isNull(offset + 6) ? null : cursor.getString(offset + 6), // imagePath
cursor.isNull(offset + 7) ? null : cursor.getString(offset + 7) // gmtCreated
);
return entity;
}
@Override
public void readEntity(Cursor cursor, UserProfile entity, int offset) {
entity.setUserId(cursor.isNull(offset + 0) ? null : cursor.getString(offset + 0));
entity.setUsername(cursor.isNull(offset + 1) ? null : cursor.getString(offset + 1));
entity.setLevel(cursor.isNull(offset + 2) ? null : cursor.getString(offset + 2));
entity.setLabel(cursor.isNull(offset + 3) ? null : cursor.getString(offset + 3));
entity.setUserTelephone(cursor.isNull(offset + 4) ? null : cursor.getString(offset + 4));
entity.setUserPassword(cursor.isNull(offset + 5) ? null : cursor.getString(offset + 5));
entity.setImagePath(cursor.isNull(offset + 6) ? null : cursor.getString(offset + 6));
entity.setGmtCreated(cursor.isNull(offset + 7) ? null : cursor.getString(offset + 7));
}
@Override
protected final String updateKeyAfterInsert(UserProfile entity, long rowId) {
return entity.getUserId();
}
@Override
public String getKey(UserProfile entity) {
if(entity != null) {
return entity.getUserId();
} else {
return null;
}
}
@Override
public boolean hasKey(UserProfile entity) {
return entity.getUserId() != null;
}
@Override
protected final boolean isEntityUpdateable() {
return true;
}
}
<file_sep>package com.yunjishi.lixiang.yunjishi.presenter
import com.android.lixiang.base.ext.execute
import com.android.lixiang.base.presenter.BasePresenter
import com.android.lixiang.base.rx.BaseObserver
import com.yunjishi.lixiang.yunjishi.presenter.data.bean.DemandDetailBean
import com.yunjishi.lixiang.yunjishi.presenter.data.bean.SubmitOrderBean
import com.yunjishi.lixiang.yunjishi.presenter.view.MissionView
import com.yunjishi.lixiang.yunjishi.presenter.view.OrderDetailView
import com.yunjishi.lixiang.yunjishi.service.MissionService
import com.yunjishi.lixiang.yunjishi.service.OrderDetailService
import javax.inject.Inject
class OrderDetailPresenter @Inject constructor() : BasePresenter<OrderDetailView>() {
@Inject
lateinit var mOrderDetailService: OrderDetailService
fun getDemandDetail(string1: String, string2: String) {
mOrderDetailService.getDemandDetail(string1, string2).execute(object : BaseObserver<DemandDetailBean>() {
override fun onNext(t: DemandDetailBean) {
super.onNext(t)
mView.onGetDemandDetailResult(t)
}
}, lifecycleProvider)
}
}<file_sep>package com.yunjishi.lixiang.yunjishi.presenter.database;
import java.util.Map;
import org.greenrobot.greendao.AbstractDao;
import org.greenrobot.greendao.AbstractDaoSession;
import org.greenrobot.greendao.database.Database;
import org.greenrobot.greendao.identityscope.IdentityScopeType;
import org.greenrobot.greendao.internal.DaoConfig;
import com.yunjishi.lixiang.yunjishi.presenter.data.bean.UserBean;
import com.yunjishi.lixiang.yunjishi.presenter.data.bean.UserBean2;
import com.yunjishi.lixiang.yunjishi.presenter.database.UserBeanDao;
import com.yunjishi.lixiang.yunjishi.presenter.database.UserBean2Dao;
// THIS CODE IS GENERATED BY greenDAO, DO NOT EDIT.
/**
* {@inheritDoc}
*
* @see org.greenrobot.greendao.AbstractDaoSession
*/
public class DaoSession extends AbstractDaoSession {
private final DaoConfig userBeanDaoConfig;
private final DaoConfig userBean2DaoConfig;
private final UserBeanDao userBeanDao;
private final UserBean2Dao userBean2Dao;
public DaoSession(Database db, IdentityScopeType type, Map<Class<? extends AbstractDao<?, ?>>, DaoConfig>
daoConfigMap) {
super(db);
userBeanDaoConfig = daoConfigMap.get(UserBeanDao.class).clone();
userBeanDaoConfig.initIdentityScope(type);
userBean2DaoConfig = daoConfigMap.get(UserBean2Dao.class).clone();
userBean2DaoConfig.initIdentityScope(type);
userBeanDao = new UserBeanDao(userBeanDaoConfig, this);
userBean2Dao = new UserBean2Dao(userBean2DaoConfig, this);
registerDao(UserBean.class, userBeanDao);
registerDao(UserBean2.class, userBean2Dao);
}
public void clear() {
userBeanDaoConfig.clearIdentityScope();
userBean2DaoConfig.clearIdentityScope();
}
public UserBeanDao getUserBeanDao() {
return userBeanDao;
}
public UserBean2Dao getUserBean2Dao() {
return userBean2Dao;
}
}
<file_sep>package com.yunjishi.lixiang.yunjishi.presenter.data.bean;
import org.greenrobot.greendao.annotation.Entity;
import org.greenrobot.greendao.annotation.Id;
import org.greenrobot.greendao.annotation.Generated;
@Entity
public class UserBean {
@Id
private String userId;
private String userName;
private String userTel;
private String userPassword;
@Generated(hash = 1794157267)
public UserBean(String userId, String userName, String userTel,
String userPassword) {
this.userId = userId;
this.userName = userName;
this.userTel = userTel;
this.userPassword = <PASSWORD>;
}
@Generated(hash = 1203313951)
public UserBean() {
}
public String getUserId() {
return this.userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
public String getUserName() {
return this.userName;
}
public void setUserName(String userName) {
this.userName = userName;
}
public String getUserTel() {
return this.userTel;
}
public void setUserTel(String userTel) {
this.userTel = userTel;
}
public String getUserPassword() {
return this.userPassword;
}
public void setUserPassword(String userPassword) {
this.userPassword = <PASSWORD>Password;
}
}
<file_sep>package com.yunjishi.lixiang.yunjishi.presenter.database
import android.content.Context
import com.yunjishi.lixiang.yunjishi.presenter.data.bean.UserBean2
import org.greenrobot.greendao.database.Database
class DataBaseManager {
var mDaoSession: DaoSession? = null
var userBean = UserBean2()
fun initDao(context: Context): DaoSession? {
var openHelper: DaoMaster.DevOpenHelper = DaoMaster.DevOpenHelper(context, "USER")
var db: Database = openHelper.writableDb
var daoMaster: DaoMaster = DaoMaster(db)
mDaoSession = daoMaster.newSession()
return mDaoSession
}
}<file_sep>package com.yunjishi.lixiang.yunjishi.view.activity
import android.content.Intent
import android.graphics.Color
import android.support.v7.app.AppCompatActivity
import android.os.Bundle
import com.yunjishi.lixiang.yunjishi.R
import cn.jzvd.JZVideoPlayerStandard
import kotlinx.android.synthetic.main.activity_video_player.*
import cn.jzvd.JZVideoPlayer
import com.android.lixiang.base.utils.view.StatusBarUtil
import io.vov.vitamio.Vitamio
import io.vov.vitamio.widget.MediaController
import kotlinx.android.synthetic.main.activity_select_time.*
class VideoPlayerActivity : AppCompatActivity() {
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
Vitamio.isInitialized(applicationContext)
setContentView(R.layout.activity_video_player)
StatusBarUtil.setColor(this, Color.parseColor("#000000"), 0)
// println(intent.extras.getString("URL"))
// mVideoPlayer.setUp("http://202.111.178.10/unzip/video/长滩1/长滩1.mp4",
// JZVideoPlayerStandard.SCREEN_WINDOW_NORMAL,
// "")
if (Vitamio.isInitialized(this)) {
videoview!!.setVideoPath(intent.extras.getString("URL"))
// videoview!!.setVideoPath("http://202.111.178.10/unzip/video/长滩1/长滩1.mp4")
videoview!!.setMediaController(MediaController(this))
videoview!!.start()
}
mVideoPlayerToolbar.title = "视频"
setSupportActionBar(mVideoPlayerToolbar)
supportActionBar!!.setDisplayHomeAsUpEnabled(true)
mVideoPlayerToolbar.setNavigationOnClickListener {
this.finish()
}
}
override fun onBackPressed() {
this.finish()
}
}
<file_sep>apply plugin: 'com.android.library'
apply plugin: 'kotlin-android'
apply plugin: 'kotlin-kapt'
apply plugin: 'kotlin-android-extensions'
kapt {
generateStubs = true
}
android {
compileSdkVersion 27
defaultConfig {
minSdkVersion 21
targetSdkVersion 27
versionCode 1
versionName "1.0"
testInstrumentationRunner "android.support.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
sourceSets {
main {
jniLibs.srcDirs = ['libs']
}
}
}
dependencies {
implementation fileTree(include: ['*.jar'], dir: 'libs')
api 'com.android.support:appcompat-v7:27.1.1'
api 'com.android.support:design:27.1.1'
implementation 'com.android.support.constraint:constraint-layout:1.1.2'
testImplementation 'junit:junit:4.12'
androidTestImplementation 'com.android.support.test:runner:1.0.2'
androidTestImplementation 'com.android.support.test.espresso:espresso-core:3.0.2'
api 'org.jetbrains.kotlin:kotlin-stdlib-jre7:1.2.41'
api 'com.android.support:design:27.1.1'
api 'org.jetbrains.anko:anko:0.10.5'
api "org.jetbrains.anko:anko-sdk25:0.10.5"
api 'org.jetbrains.anko:anko-coroutines:0.10.5'
api 'org.jetbrains.anko:anko-commons:0.10.5'
api "org.jetbrains.anko:anko-appcompat-v7:0.10.5"
api "org.jetbrains.anko:anko-support-v4:0.10.5"
api "org.jetbrains.anko:anko-design-coroutines:0.10.5"
api "org.jetbrains.anko:anko-design:0.10.5"
api "org.jetbrains.anko:anko-constraint-layout:0.10.5"
api 'com.blankj:utilcode:1.13.5'
api 'io.reactivex.rxjava2:rxandroid:2.0.2'
api 'io.reactivex.rxjava2:rxkotlin:2.2.0'
api 'com.squareup.okhttp3:okhttp:3.10.0'
api 'com.squareup.okhttp3:logging-interceptor:3.10.0'
api 'com.squareup.retrofit2:retrofit:2.4.0'
api 'com.squareup.retrofit2:converter-scalars:2.3.0'
api 'com.squareup.retrofit2:converter-gson:2.3.0'
api 'com.squareup.retrofit2:adapter-rxjava2:2.3.0'
api 'com.orhanobut:logger:1.15'
api 'com.google.dagger:dagger:2.16'
kapt 'com.google.dagger:dagger-compiler:2.16'
api 'com.trello.rxlifecycle2:rxlifecycle-kotlin:2.2.1'
api 'com.trello.rxlifecycle2:rxlifecycle-components:2.2.1'
api 'com.ashokvarma.android:bottom-navigation-bar:2.0.4'
api 'com.github.CymChad:BaseRecyclerViewAdapterHelper:2.9.35'
api 'com.youth.banner:banner:1.4.10'
api 'com.github.bumptech.glide:glide:4.7.1'
annotationProcessor 'com.github.bumptech.glide:compiler:4.7.1'
api 'com.github.VyacheslavShmakin:gp-collapsing-toolbar:1.0.1'
api 'com.jaeger.statusbaruitl:library:1.3.5'
api files('libs/BaiduLBS_Android.jar')
api files('libs/cordova-6.2.0-dev.jar')
api 'com.github.ikidou:FragmentBackHandler:2.1'
api 'com.github.lzyzsd:jsbridge:1.0.4'
api 'com.tencent.bugly:crashreport:2.6.6.1'
api 'org.greenrobot:greendao:3.2.2'
api 'org.greenrobot:greendao-generator:3.2.2'
api 'com.alibaba:fastjson:1.1.57.android'
api 'cn.jzvd:jiaozivideoplayer:6.2.10'
api 'com.github.barteksc:android-pdf-viewer:3.0.0-beta.5'
api 'es.voghdev.pdfviewpager:library:1.0.4'
}
<file_sep>package com.yunjishi.lixiang.yunjishi.data.repository
class MainRepository {
}<file_sep>package com.yunjishi.lixiang.yunjishi.presenter.data.repository
import com.android.lixiang.base.common.BaseConstant
import com.android.lixiang.base.data.net.RetrofitFactory
import com.yunjishi.lixiang.yunjishi.presenter.data.bean.SubmitOrderBean
import com.yunjishi.lixiang.yunjishi.presenter.data.api.Api
import io.reactivex.Observable
import javax.inject.Inject
class MainRepository @Inject constructor() {
fun submitOrder(string1: String,string2: String,string3: String,string4: String,string5: String,string6: String,string7: String): Observable<SubmitOrderBean> {
return RetrofitFactory(BaseConstant.SERVER_ADDRESS_8081)
.create(Api::class.java)
.submitOrder(string1, string2, string3, string4, string5, string6, string7)
}
}<file_sep>package com.yunjishi.lixiang.yunjishi.presenter
import com.android.lixiang.base.ext.execute
import com.android.lixiang.base.presenter.BasePresenter
import com.android.lixiang.base.rx.BaseObserver
import com.yunjishi.lixiang.yunjishi.presenter.data.bean.SubmitOrderBean
import com.yunjishi.lixiang.yunjishi.presenter.view.MissionView
import com.yunjishi.lixiang.yunjishi.presenter.view.ParamsView
import com.yunjishi.lixiang.yunjishi.service.MissionService
import com.yunjishi.lixiang.yunjishi.service.ParamsService
import javax.inject.Inject
class ParamsPresenter @Inject constructor() : BasePresenter<ParamsView>() {
@Inject
lateinit var mParamsService: ParamsService
fun submitOrder(string1: String, string2: String, string3: String, string4: String, string5: String, string6: String, string7: String) {
mParamsService.submitOrder(string1, string2, string3, string4, string5, string6, string7).execute(object : BaseObserver<SubmitOrderBean>() {
override fun onNext(t: SubmitOrderBean) {
super.onNext(t)
mView.onSubmitOrderResult(t)
}
}, lifecycleProvider)
}
}<file_sep>package com.yunjishi.lixiang.yunjishi.view.fragment
import android.Manifest
import android.animation.ObjectAnimator
import android.animation.ValueAnimator
import android.annotation.SuppressLint
import android.app.Activity
import android.content.Context
import android.content.Intent
import android.content.SharedPreferences
import android.content.pm.PackageManager
import android.graphics.Color
import android.graphics.Point
import android.hardware.Sensor
import android.hardware.SensorEvent
import android.hardware.SensorEventListener
import android.hardware.SensorManager
import android.net.Uri
import android.os.*
import android.support.annotation.RequiresApi
import android.support.v4.app.ActivityCompat
import android.support.v7.app.AlertDialog
import android.support.v7.app.AppCompatActivity
import android.view.Gravity
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.webkit.ValueCallback
import android.webkit.WebChromeClient
import android.widget.*
import com.android.lixiang.base.database.UserProfile
import com.android.lixiang.base.ui.fragment.BaseMvpFragment
import com.android.lixiang.base.utils.view.DimenUtil
import com.android.lixiang.base.utils.view.StatusBarUtil
import com.baidu.location.*
import com.baidu.mapapi.map.*
import com.baidu.mapapi.model.LatLng
import com.baidu.mapapi.utils.DistanceUtil
import com.android.lixiang.base.database.DatabaseManager
import com.github.ikidou.fragmentBackHandler.BackHandlerHelper
import com.github.ikidou.fragmentBackHandler.FragmentBackHandler
import com.github.lzyzsd.jsbridge.BridgeHandler
import com.github.lzyzsd.jsbridge.DefaultHandler
import com.orhanobut.logger.Logger
import com.yunjishi.lixiang.yunjishi.GetArea
import com.yunjishi.lixiang.yunjishi.R
import com.yunjishi.lixiang.yunjishi.presenter.MissionPresenter
import com.yunjishi.lixiang.yunjishi.presenter.data.bean.SubmitOrderBean
import com.yunjishi.lixiang.yunjishi.presenter.injection.component.DaggerMissionComponent
import com.yunjishi.lixiang.yunjishi.presenter.injection.module.MissionModule
import com.yunjishi.lixiang.yunjishi.presenter.view.MissionView
import com.yunjishi.lixiang.yunjishi.view.activity.*
import com.yunjishi.lixiang.yunjishi.view.adapter.SelectParamsAdapter
import kotlinx.android.synthetic.main.fragment_mission.*
class MissionFragment : BaseMvpFragment<MissionPresenter>(), MissionView, SensorEventListener, FragmentBackHandler {
var mUploadMessage: ValueCallback<Uri>? = null
var mMapView: MapView? = null
private var mBaiduMap: BaiduMap? = null
var myListener = MyLocationListenner()
private var mLocClient: LocationClient? = null
private var isFirstLoc = true
private var locData: MyLocationData? = null
private var mCurrentDirection = 0
private var mCurrentLat = 0.0
private var mCurrentLon = 0.0
private var mCurrentAccracy: Float = 0.toFloat()
private var lastX: Double? = 0.0
val handler = Handler()
var task: Runnable? = null
private var center = String()
private var geoString = String()
private var scopeGeo = String()
private var area: Double? = 0.0
private var userId: String? = null
private var demandType: String? = null
private var demandGeo: String? = null
private var resolution: String? = null
private var startTime: String? = null
private var endTime: String? = null
private var timesParam: String? = null
private var handleBackPressed = true
var mSharedPreferences: SharedPreferences? = null
var titleList: MutableList<String>? = mutableListOf()
var subTitleList: MutableList<String>? = mutableListOf()
var detailList: MutableList<String>? = mutableListOf()
var typeList: MutableList<String>? = mutableListOf()
var ratioList: MutableList<String>? = mutableListOf()
var typeIndex = -1
var ratioIndex = -1
var time = ""
var times = ""
var TIMES = ""
var adapter: SelectParamsAdapter? = null
var flag1 = false
var flag2 = false
var flag3 = false
var flag4 = false
var pageIndex = 0
override fun onAttach(activity: Activity?) {
super.onAttach(activity)
userId = (activity as MainActivity).getUserID()
}
override fun injectComponent() {
DaggerMissionComponent.builder().fragmentComponent(fragmentComponent)
.missionModule(MissionModule())
.build().inject(this)
}
override fun onSubmitOrderResult(res: SubmitOrderBean) {
println("onSubmitOrderResult$res")
mOrderWebViewReletiveLayout.visibility = View.VISIBLE
pageIndex = 3
mOrderWebView.reload()
}
override fun onBackPressed(): Boolean {
when (pageIndex) {
0 -> {
//外理返回键
println("--0")
// return BackHandlerHelper.handleBackPress(this)
activity!!.moveTaskToBack(false)
return true
}
1 -> {
println("--1")
pageIndex = 0
mMapViewRelativeLayout.visibility = View.GONE
return true
}
2 -> {
println("--2")
pageIndex = 1
mSelectParamsRelativeLayout.visibility = View.GONE
return true
}
3 -> {
println("--3")
pageIndex = 2
mOrderWebViewReletiveLayout.visibility = View.GONE
return true
}
else -> {
// 如果不包含子Fragment
// 或子Fragment没有外理back需求
// 可如直接 return false;
// 注:如果Fragment/Activity 中可以使用ViewPager 代替 this
//
println("》》》》")
return BackHandlerHelper.handleBackPress(this)
}
}
}
override fun onAccuracyChanged(sensor: Sensor?, accuracy: Int) {
}
override fun onSensorChanged(event: SensorEvent?) {
val x = event!!.values[SensorManager.DATA_X].toDouble()
if (Math.abs(x - lastX!!) > 1.0) {
mCurrentDirection = x.toInt()
locData = MyLocationData.Builder()
.accuracy(0F)
.direction(mCurrentDirection.toFloat()).latitude(mCurrentLat)
.longitude(mCurrentLon).build()
mBaiduMap!!.setMyLocationData(locData)
}
lastX = x
}
override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?,
savedInstanceState: Bundle?): View? {
// Inflate the layout for this fragment
return inflater.inflate(R.layout.fragment_mission, container, false)
}
@RequiresApi(Build.VERSION_CODES.M)
override fun onActivityCreated(savedInstanceState: Bundle?) {
super.onActivityCreated(savedInstanceState)
initView()
handlePermisson()
initMap()
initSelectViews()
initOrder()
mMissionFeild1.setOnClickListener {
pageIndex = 1
if (userId != "-1") {
mMapViewRelativeLayout.visibility = View.VISIBLE
} else {
var mLoginLayout = activity!!.findViewById<LinearLayout>(R.id.mLoginLayout)
mLoginLayout.visibility = View.VISIBLE
}
}
mMissionFeild2.setOnClickListener {
pageIndex = 3
if (userId != "-1")
mOrderWebViewReletiveLayout.visibility = View.VISIBLE
else {
var mLoginLayout = activity!!.findViewById<LinearLayout>(R.id.mLoginLayout)
mLoginLayout.visibility = View.VISIBLE
}
}
mCameraImageView.setOnClickListener {
pageIndex = 2
if (scopeGeo == "") {
Toast.makeText(activity, "请选择区域", Toast.LENGTH_SHORT).show()
} else
mSelectParamsRelativeLayout.visibility = View.VISIBLE
}
mZoomInButton.setOnClickListener {
val zoomIn: MapStatusUpdate? = MapStatusUpdateFactory.zoomIn()
mBaiduMap!!.animateMapStatus(zoomIn)
}
mZoomOutButton.setOnClickListener {
val zoomOut: MapStatusUpdate? = MapStatusUpdateFactory.zoomOut()
mBaiduMap!!.animateMapStatus(zoomOut)
}
// mOrderWebViewToolbar.setNavigationOnClickListener {
// mOrderWebViewReletiveLayout.visibility = View.GONE
// }
}
private fun initOrder() {
// mOrderWebViewToolbar.title = "我的订单"
// (activity as AppCompatActivity).setSupportActionBar(mOrderWebViewToolbar)
(activity as AppCompatActivity).supportActionBar!!.setDisplayHomeAsUpEnabled(true)
mOrderWebView.setBackgroundColor(0);
// mOrderWebViewToolbar.setNavigationOnClickListener {
// mOrderWebViewReletiveLayout.visibility = View.INVISIBLE
// }
mOrderWebView.setDefaultHandler(DefaultHandler())
mOrderWebView.webChromeClient = object : WebChromeClient() {
fun openFileChooser(uploadMsg: ValueCallback<Uri>, AcceptType: String, capture: String) {
this.openFileChooser(uploadMsg)
}
fun openFileChooser(uploadMsg: ValueCallback<Uri>, AcceptType: String) {
this.openFileChooser(uploadMsg)
}
fun openFileChooser(uploadMsg: ValueCallback<Uri>) {
mUploadMessage = uploadMsg
}
}
com.orhanobut.logger.Logger.d("http://172.16.17.32:12380/demand.html?userId=$userId")
mOrderWebView.loadUrl("http://172.16.17.32:12380/demand.html?userId=$userId")
mOrderWebView.registerHandler("demandShow", BridgeHandler { data, function ->
println("demandId$data")
if (data != null) {
val intent = Intent(activity, OrderDetailActivity::class.java)
val bundle = Bundle()
bundle.putString("DEMAND_ID", data)
bundle.putString("USER_ID", userId)
intent.putExtras(bundle)
startActivity(intent)
}
})
mOrderWebView.registerHandler("productShow", BridgeHandler { data, function ->
println("productId$data")
if (data != null) {
mSharedPreferences = activity!!.getSharedPreferences("XXX", Context.MODE_PRIVATE)
val editor = mSharedPreferences!!.edit()
editor.putString("PRODUCT_ID", data)
editor.commit()
val intent = activity!!.intent
activity!!.overridePendingTransition(0, 0)
activity!!.finish()
activity!!.overridePendingTransition(0, 0)
startActivity(intent)
}
})
}
private fun initSelectViews() {
StatusBarUtil.setColor(activity, Color.parseColor("#000000"), 0)
mSelectParamsToolbar.title = "请选择拍摄时的参数"
(activity as AppCompatActivity).setSupportActionBar(mSelectParamsToolbar)
(activity as AppCompatActivity).supportActionBar!!.setDisplayHomeAsUpEnabled(true)
initView2()
mSelectParamsToolbar.setNavigationOnClickListener {
mSelectParamsRelativeLayout.visibility = View.GONE
}
mListView.setOnItemClickListener { parent, view, position, id ->
when (position) {
0 -> {
val intent = Intent(activity, SelectTypeParamsActivity::class.java)
val bundle = Bundle()
bundle.putString("INDEX", typeIndex.toString())
intent.putExtras(bundle)
startActivityForResult(intent, 0)
}
1 -> {
val intent = Intent(activity, SelectRatioActivity::class.java)
val bundle = Bundle()
bundle.putString("INDEX", ratioIndex.toString())
intent.putExtras(bundle)
startActivityForResult(intent, 1)
}
2 -> {
val intent = Intent(activity, SelectTimeActivity::class.java)
val bundle = Bundle()
bundle.putString("TIME", time)
intent.putExtras(bundle)
startActivityForResult(intent, 2)
}
3 -> {
initNumberPicker()
}
}
}
mDoneTextView.setOnClickListener {
mPresenter.mView = this
println(userId!! + " " + demandType!! + " " + demandGeo!!
+ " " + resolution!! + " " + startTime!! + " " + endTime!! + " " + timesParam!!)
mPresenter.submitOrder(userId!!, demandType!!, demandGeo!!
, resolution!!, startTime!!, endTime!!, timesParam!!)
}
}
private fun initNumberPicker() {
val builder = AlertDialog.Builder(activity!!)
builder.setTitle("拍摄频次")
val inflater = LayoutInflater.from(activity)
val v = inflater.inflate(R.layout.item_dialog, null)
builder.setView(v)
builder.setNegativeButton("取消") { arg0, arg1 ->
arg0.dismiss()
}
builder.setPositiveButton("确定") { arg0, arg1 ->
val et = v.findViewById(R.id.mEditText) as EditText
times = et.text.toString()
arg0.dismiss()
if (times != "") {
detailList!![3] = times
timesParam = times
TIMES = times
flag4 = true
adapter!!.notifyDataSetChanged()
} else if (times == "" && TIMES != "") {
detailList!![3] = TIMES
timesParam = TIMES
TIMES = times
flag4 = true
adapter!!.notifyDataSetChanged()
}
checkDone()
}
val dialog = builder.create()
dialog.show()
dialog.window.setGravity(Gravity.CENTER)//可以设置显示的位置
}
override fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) {
super.onActivityResult(requestCode, resultCode, data)
println(data!!.getStringExtra("TYPE"))
if (requestCode == 0 && resultCode == AppCompatActivity.RESULT_OK) {
val detail = data!!.getStringExtra("TYPE")
typeIndex = detail.toInt()
println("typeIndex$typeIndex")
if (detail != "-1") {
detailList!![0] = typeList!![detail.toInt()]
flag1 = true
adapter!!.notifyDataSetChanged()
checkDone()
demandType = typeIndex.toString()
}
} else if (requestCode == 1 && resultCode == AppCompatActivity.RESULT_OK) {
val detail = data!!.getStringExtra("RATIO")
ratioIndex = detail.toInt()
if (detail != "-1") {
detailList!![1] = ratioList!![detail.toInt()]
flag2 = true
adapter!!.notifyDataSetChanged()
resolution = ratioList!![detail.toInt()]
checkDone()
}
} else if (requestCode == 2 && resultCode == AppCompatActivity.RESULT_OK) {
val detail = data!!.getStringExtra("TIME")
time = detail
if (detail != "-") {
detailList!![2] = detail
flag3 = true
adapter!!.notifyDataSetChanged()
startTime = detail.split("-")[0]
endTime = detail.split("-")[1]
checkDone()
}
}
}
private fun initView2() {
titleList!!.add("类型")
titleList!!.add("分辨率")
titleList!!.add("拍摄时间")
titleList!!.add("拍摄频次")
typeList!!.add("标准卫星图")
typeList!!.add("夜光卫星图")
typeList!!.add("卫星视频")
ratioList!!.add("小于 1m")
ratioList!!.add("1m - 3m")
ratioList!!.add("3m - 8m")
ratioList!!.add("8m - 16m")
detailList!!.add("请选择")
detailList!!.add("请选择")
detailList!!.add("请选择")
detailList!!.add("请选择")
subTitleList!!.add("选择您想拍摄的影响类型")
subTitleList!!.add("选择的分辨率值越低,清晰度越高")
subTitleList!!.add("您想在什么时间段进行拍摄")
subTitleList!!.add("在你您选择的时间范围内,您想拍摄几次")
adapter = SelectParamsAdapter(titleList, subTitleList, detailList, activity)
mListView.adapter = adapter
}
private fun checkDone() {
if (flag1 && flag2 && flag3 && flag4) {
mDoneTextView.visibility = View.VISIBLE
}
}
@SuppressLint("ResourceType")
private fun initView() {
mMapToolbar.title = "请选择拍摄区域"
(activity as AppCompatActivity).setSupportActionBar(mMapToolbar)
(activity as AppCompatActivity).supportActionBar!!.setDisplayHomeAsUpEnabled(true)
var width = activity!!.application.resources.displayMetrics.widthPixels
var height = activity!!.application.resources.displayMetrics.heightPixels
//1200 1920 px 706 1129 dp 768
println(DimenUtil().px2dip(activity!!, height.toFloat()))
println(DimenUtil().px2dip(activity!!, width.toFloat()))
task = Runnable {
// TODO Auto-generated method stub
val animator: ObjectAnimator = ObjectAnimator.ofFloat(mScanImageView!!,
"translationY",
0F,
DimenUtil().dip2px(activity!!, 396F).toFloat(),
0F)
animator.duration = 5000
animator.repeatCount = ValueAnimator.INFINITE//无限循环
animator.repeatMode = ValueAnimator.INFINITE//
animator.start()
}
handler.post(task)
mMapToolbar.setNavigationOnClickListener {
mMapViewRelativeLayout.visibility = View.GONE
}
}
private fun initMap() {
val mCurrentMode = MyLocationConfiguration.LocationMode.NORMAL
mMapView = mapView
mLocClient = LocationClient(activity)
mLocClient!!.registerLocationListener(myListener)
val option = LocationClientOption()
option.isOpenGps = true
option.setCoorType("bd09ll")
option.setScanSpan(1000)
option.setAddrType("all")
option.setIsNeedLocationPoiList(true)
mLocClient!!.locOption = option
mLocClient!!.start()
mBaiduMap = mMapView!!.map
mBaiduMap!!.isMyLocationEnabled = true
mBaiduMap!!.setMyLocationConfigeration(MyLocationConfiguration(mCurrentMode, true, null))
val builder = MapStatus.Builder()
builder.overlook(0f)
val child = mMapView!!.getChildAt(1)
if (child != null && (child is ImageView || child is ZoomControls)) {
child.visibility = View.INVISIBLE
}
mMapView!!.showScaleControl(false)
mMapView!!.showZoomControls(false)
val mUiSettings = mBaiduMap!!.uiSettings
mUiSettings.isScrollGesturesEnabled = true
mUiSettings.isOverlookingGesturesEnabled = true
mUiSettings.isZoomGesturesEnabled = true
mUiSettings.isOverlookingGesturesEnabled = false
val listener: BaiduMap.OnMapStatusChangeListener = object : BaiduMap.OnMapStatusChangeListener {
override fun onMapStatusChangeStart(p0: MapStatus?) {}
override fun onMapStatusChangeStart(p0: MapStatus?, p1: Int) {}
override fun onMapStatusChange(p0: MapStatus?) {}
override fun onMapStatusChangeFinish(p0: MapStatus?) {
if (context != null) {
if (mBaiduMap!!.projection != null) {
val ltp = Point()
ltp.x = DimenUtil().dip2px(context!!, 356.5F)
ltp.y = DimenUtil().dip2px(context!!, 105F)
val lt = mBaiduMap!!.projection.fromScreenLocation(ltp)
val rbp = Point()
rbp.x = DimenUtil().dip2px(context!!, 782.5F)
rbp.y = DimenUtil().dip2px(context!!, 531F)
val rb = mBaiduMap!!.projection.fromScreenLocation(rbp)
geoString = String.format("%s,%s,%s,%s", lt.longitude, lt.latitude, rb.longitude, rb.latitude)
println("geoString$geoString")
scopeGeo = geoFormat(geoString)
demandGeo = scopeGeo
println("scopeGeo$scopeGeo")
center = String.format("%s,%s", (lt.longitude + rb.longitude) / 2, (lt.latitude + rb.latitude) / 2)
val leftTop = LatLng(lt.latitude, lt.longitude)
val rightBottom = LatLng(rb.latitude, rb.longitude)
val leftBottom = LatLng(rb.latitude, lt.longitude)
val rightTop = LatLng(lt.latitude, rb.longitude)
var list:MutableList<LatLng> = mutableListOf()
list.add(leftTop)
list.add(rightTop)
list.add(rightBottom)
list.add(leftBottom)
Logger.d(GetArea.getArea(list))
println(GetArea.getArea(list))
area = DistanceUtil.getDistance(leftTop, rightBottom) * DistanceUtil.getDistance(leftTop, rightBottom) / 2000000
val areaString = area.toString()
val temp = areaString.substring(0, areaString.indexOf(".") + 3)
if (areaString.contains("E")) {
if (areaString.contains("-")) {
mAreaTextView!!.text = String.format("当前面积:小于 0.01平方公里", temp)
} else
mAreaTextView!!.text = String.format("当前面积:%s 亿平方公里", temp)
} else {
mAreaTextView!!.text = String.format("当前面积:%s 平方公里", temp)
}
}
}
}
}
mMapView!!.map.setOnMapStatusChangeListener(listener)
}
fun geoFormat(geo: String): String {
val prefix = "{\"type\":\"Polygon\",\"coordinates\":[["
val suffix = "]]}"
val geoArray = geo.split(",".toRegex())
val data = "[" + geoArray[0] + "," + geoArray[1] +
"],[" + geoArray[2] + "," + geoArray[1] +
"],[" + geoArray[2] + "," + geoArray[3] +
"],[" + geoArray[0] + "," + geoArray[3] +
"],[" + geoArray[0] + "," + geoArray[1] +
"]"
return String.format("%s%s%s", prefix, data, suffix)
}
@RequiresApi(Build.VERSION_CODES.M)
fun handlePermisson() {
val permission = Manifest.permission.ACCESS_COARSE_LOCATION
val checkSelfPermission = ActivityCompat.checkSelfPermission(activity!!, permission)
if (checkSelfPermission == PackageManager.PERMISSION_GRANTED) {
} else {
if (ActivityCompat.shouldShowRequestPermissionRationale(activity!!, permission)) {
} else {
myRequestPermission()
}
}
}
@RequiresApi(Build.VERSION_CODES.M)
private fun myRequestPermission() {
val permissions = arrayOf(Manifest.permission.ACCESS_COARSE_LOCATION,
Manifest.permission.ACCESS_FINE_LOCATION,
Manifest.permission.READ_PHONE_STATE,
Manifest.permission.WRITE_EXTERNAL_STORAGE)
requestPermissions(permissions, 1)
}
override fun onRequestPermissionsResult(requestCode: Int, permissions: Array<out String>, grantResults: IntArray) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults)
// if (grantResults[0] == PackageManager.PERMISSION_GRANTED) {
// }
}
override fun onDestroy() {
super.onDestroy()
mMapView!!.onDestroy()
handler.removeCallbacks(task)
}
override fun onResume() {
super.onResume()
mMapView!!.onResume()
handler.post(task)
}
override fun onPause() {
super.onPause()
mMapView!!.onPause()
}
inner class MyLocationListenner : BDLocationListener {
var lati: Double = 0.toDouble()
var longi: Double = 0.toDouble()
var address: String = ""
internal lateinit var poi: List<Poi>
override fun onReceiveLocation(location: BDLocation?) {
if (location == null || mMapView == null) {
return
}
val locData = MyLocationData.Builder()
.accuracy(0F)
.direction(mCurrentDirection.toFloat())
.latitude(location.latitude)
.longitude(location.longitude).build()
lati = location.latitude
longi = location.longitude
mCurrentLat = location.latitude
mCurrentLon = location.longitude
address = location.addrStr
mCurrentAccracy = location.radius
poi = location.poiList
mBaiduMap!!.setMyLocationData(locData)
if (isFirstLoc) {
isFirstLoc = false
val ll = LatLng(location.latitude,
location.longitude)
val builder = MapStatus.Builder()
builder.target(ll).zoom(8.0f)
mBaiduMap!!.animateMapStatus(MapStatusUpdateFactory.newMapStatus(builder.build()))
}
}
fun onConnectHotSpotMessage(s: String, i: Int) {
}
}
override fun onStart() {
super.onStart()
initMap()
}
}
<file_sep>package com.yunjishi.lixiang.yunjishi.service
import com.yunjishi.lixiang.yunjishi.presenter.data.bean.SubmitOrderBean
import io.reactivex.Observable
interface MissionService {
fun submitOrder(string1: String,string2: String,string3: String,string4: String,string5: String,string6: String,string7: String): Observable<SubmitOrderBean>
}<file_sep>package com.yunjishi.lixiang.yunjishi.view.activity
import android.app.Activity
import android.content.Intent
import android.graphics.Color
import android.support.v7.app.AppCompatActivity
import android.os.Bundle
import android.widget.AbsListView
import com.android.lixiang.base.utils.view.StatusBarUtil
import com.yunjishi.lixiang.yunjishi.R
import com.yunjishi.lixiang.yunjishi.view.adapter.SelectTypeParamsAdapter
import kotlinx.android.synthetic.main.activity_select_ratio.*
class SelectRatioActivity : AppCompatActivity() {
var titleList: MutableList<String>? = mutableListOf()
var adapter: SelectTypeParamsAdapter? = null
var index = -1
var INDEX = ""
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_select_ratio)
StatusBarUtil.setColor(this, Color.parseColor("#000000"), 0)
mSelectRatioToolbar.title = "分辨率"
setSupportActionBar(mSelectRatioToolbar)
supportActionBar!!.setDisplayHomeAsUpEnabled(true)
val intent = intent
val bundle = intent.extras
INDEX = bundle.getString("INDEX")
initView()
if (bundle.getString("INDEX") != "-1"){
adapter!!.setSelectedItem(INDEX.toInt())
adapter!!.notifyDataSetInvalidated()
}
mRatioListView.setOnItemClickListener { parent, view, position, id ->
index = position
println(position)
adapter!!.setSelectedItem(position)
adapter!!.notifyDataSetInvalidated()
}
mSelectRatioToolbar.setNavigationOnClickListener {
val intent = Intent()
if(index == -1 && INDEX == "-1"){
intent.putExtra("RATIO", index.toString())
setResult(Activity.RESULT_OK, intent)
this.finish()
}else if(index == -1 && INDEX != "-1"){
intent.putExtra("RATIO", INDEX)
setResult(Activity.RESULT_OK, intent)
this.finish()
}else{
intent.putExtra("RATIO", index.toString())
setResult(Activity.RESULT_OK, intent)
this.finish()
}
}
}
override fun onBackPressed() {
// super.onBackPressed()
val intent = Intent()
if(index == -1 && INDEX == "-1"){
intent.putExtra("RATIO", index.toString())
setResult(Activity.RESULT_OK, intent)
this.finish()
}else if(index == -1 && INDEX != "-1"){
intent.putExtra("RATIO", INDEX)
setResult(Activity.RESULT_OK, intent)
this.finish()
}else{
intent.putExtra("RATIO", index.toString())
setResult(Activity.RESULT_OK, intent)
this.finish()
}
}
private fun initView() {
titleList!!.add("小于 1m")
titleList!!.add("1m - 3m")
titleList!!.add("3m - 8m")
titleList!!.add("8m - 16m")
adapter = SelectTypeParamsAdapter(titleList, this)
mRatioListView.adapter = adapter
mRatioListView.choiceMode = AbsListView.CHOICE_MODE_SINGLE
}
}
<file_sep>package com.android.lixiang.base.database;
import org.greenrobot.greendao.annotation.Entity;
import org.greenrobot.greendao.annotation.Generated;
import org.greenrobot.greendao.annotation.Id;
/**
* Created by lixiang on 08/02/2018.
*/
@Entity(nameInDb = "user_profile")
public class UserProfile {
@Id
private String userId = null;
private String username = null;
private String level = null;
private String label = null;
private String userTelephone = null;
private String userPassword = null;
private String imagePath = null;
private String gmtCreated = null;
@Generated(hash = 1323035424)
public UserProfile(String userId, String username, String level, String label,
String userTelephone, String userPassword, String imagePath,
String gmtCreated) {
this.userId = userId;
this.username = username;
this.level = level;
this.label = label;
this.userTelephone = userTelephone;
this.userPassword = <PASSWORD>;
this.imagePath = imagePath;
this.gmtCreated = gmtCreated;
}
@Generated(hash = 968487393)
public UserProfile() {
}
public String getUserId() {
return this.userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
public String getUsername() {
return this.username;
}
public void setUsername(String username) {
this.username = username;
}
public String getLevel() {
return this.level;
}
public void setLevel(String level) {
this.level = level;
}
public String getLabel() {
return this.label;
}
public void setLabel(String label) {
this.label = label;
}
public String getUserTelephone() {
return this.userTelephone;
}
public void setUserTelephone(String userTelephone) {
this.userTelephone = userTelephone;
}
public String getUserPassword() {
return this.userPassword;
}
public void setUserPassword(String userPassword) {
this.userPassword = <PASSWORD>;
}
public String getImagePath() {
return this.imagePath;
}
public void setImagePath(String imagePath) {
this.imagePath = imagePath;
}
public String getGmtCreated() {
return this.gmtCreated;
}
public void setGmtCreated(String gmtCreated) {
this.gmtCreated = gmtCreated;
}
}
<file_sep>package com.android.lixiang.base.database
//import android.content.Context
//import com.android.lixiang.base.database.DaoMaster
//import org.greenrobot.greendao.database.Database
//
///**
// * Created by lixiang on 08/02/2018.
// */
//class ReleaseOpenHelper(context: Context?, name: String?) : DaoMaster.OpenHelper(context, name) {
// override fun onCreate(db: Database?) {
// super.onCreate(db)
// }
//
//}
<file_sep>package com.android.lixiang.base.ui.activity
import com.trello.rxlifecycle2.components.support.RxAppCompatActivity
open class BaseActivity: RxAppCompatActivity(){
}<file_sep>package com.yunjishi.lixiang.yunjishi.view.fragment
import android.app.Activity
import android.content.Context
import android.content.Intent
import android.graphics.Color
import android.os.Bundle
import android.support.v4.app.Fragment
import android.support.v7.app.AlertDialog
import android.support.v7.app.AppCompatActivity
import android.view.Gravity
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.widget.EditText
import com.android.lixiang.base.ui.fragment.BaseMvpFragment
import com.android.lixiang.base.utils.view.StatusBarUtil
import com.orhanobut.logger.Logger
import com.yunjishi.lixiang.yunjishi.R
import com.yunjishi.lixiang.yunjishi.presenter.MissionPresenter
import com.yunjishi.lixiang.yunjishi.presenter.ParamsPresenter
import com.yunjishi.lixiang.yunjishi.presenter.data.bean.SubmitOrderBean
import com.yunjishi.lixiang.yunjishi.presenter.injection.component.DaggerParamsComponent
import com.yunjishi.lixiang.yunjishi.presenter.injection.module.MissionModule
import com.yunjishi.lixiang.yunjishi.presenter.injection.module.ParamsModule
import com.yunjishi.lixiang.yunjishi.presenter.view.MissionView
import com.yunjishi.lixiang.yunjishi.presenter.view.ParamsView
import com.yunjishi.lixiang.yunjishi.view.activity.MainActivity
import com.yunjishi.lixiang.yunjishi.view.activity.SelectRatioActivity
import com.yunjishi.lixiang.yunjishi.view.activity.SelectTimeActivity
import com.yunjishi.lixiang.yunjishi.view.activity.SelectTypeParamsActivity
import com.yunjishi.lixiang.yunjishi.view.adapter.SelectParamsAdapter
import kotlinx.android.synthetic.main.fragment_mission.*
import kotlinx.android.synthetic.main.fragment_params.*
class ParamsFragment : BaseMvpFragment<ParamsPresenter>(), ParamsView {
override fun injectComponent() {
DaggerParamsComponent.builder().fragmentComponent(fragmentComponent)
.paramsModule(ParamsModule())
.build().inject(this)
}
override fun onSubmitOrderResult(res: SubmitOrderBean) {
println("onSubmitOrderResult$res")
(activity as MainActivity).changeFragment(5)
}
private var titleList: MutableList<String>? = mutableListOf()
private var subTitleList: MutableList<String>? = mutableListOf()
private var detailList: MutableList<String>? = mutableListOf()
private var typeList: MutableList<String>? = mutableListOf()
private var ratioList: MutableList<String>? = mutableListOf()
private var adapter: SelectParamsAdapter? = null
private var typeIndex = -1
private var ratioIndex = -1
private var userId: String? = null
private var demandType: String? = null
private var demandGeo: String? = null
private var resolution: String? = null
private var startTime: String? = null
private var endTime: String? = null
private var timesParam: String? = null
private var time: String? = ""
private var times: String? = ""
private var TIMES: String? = ""
private var flag1 = false
private var flag2 = false
private var flag3 = false
private var flag4 = false
private var pageIndex = 0
override fun onAttach(activity: Activity?) {
super.onAttach(activity)
userId = (activity as MainActivity).getUserID()
Logger.d(userId)
}
override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?,
savedInstanceState: Bundle?): View? {
// Inflate the layout for this fragment
return inflater.inflate(R.layout.fragment_params, container, false)
}
override fun onActivityCreated(savedInstanceState: Bundle?) {
super.onActivityCreated(savedInstanceState)
initViews()
}
private fun initViews() {
titleList!!.add("类型")
titleList!!.add("分辨率")
titleList!!.add("拍摄时间")
titleList!!.add("拍摄频次")
typeList!!.add("标准卫星图")
typeList!!.add("夜光卫星图")
typeList!!.add("卫星视频")
ratioList!!.add("小于 1m")
ratioList!!.add("1m - 3m")
ratioList!!.add("3m - 8m")
ratioList!!.add("8m - 16m")
detailList!!.add("请选择")
detailList!!.add("请选择")
detailList!!.add("请选择")
detailList!!.add("请选择")
subTitleList!!.add("选择您想拍摄的影响类型")
subTitleList!!.add("选择的分辨率值越低,清晰度越高")
subTitleList!!.add("您想在什么时间段进行拍摄")
subTitleList!!.add("在你您选择的时间范围内,您想拍摄几次")
StatusBarUtil.setColor(activity, Color.parseColor("#000000"), 0)
mParamsToolbar.title = "请选择拍摄时的参数"
(activity as AppCompatActivity).setSupportActionBar(mParamsToolbar)
(activity as AppCompatActivity).supportActionBar!!.setDisplayHomeAsUpEnabled(true)
mParamsToolbar.setNavigationOnClickListener {
(activity as MainActivity).changeFragment(2)
}
adapter = SelectParamsAdapter(titleList, subTitleList, detailList, activity)
mParamsListView.adapter = adapter
mParamsToolbar.setNavigationOnClickListener {
(activity as MainActivity).changeFragment(3)
}
mParamsListView.setOnItemClickListener { parent, view, position, id ->
when (position) {
0 -> {
val intent = Intent(activity, SelectTypeParamsActivity::class.java)
val bundle = Bundle()
bundle.putString("INDEX", typeIndex.toString())
intent.putExtras(bundle)
startActivityForResult(intent, 0)
}
1 -> {
val intent = Intent(activity, SelectRatioActivity::class.java)
val bundle = Bundle()
bundle.putString("INDEX", ratioIndex.toString())
intent.putExtras(bundle)
startActivityForResult(intent, 1)
}
2 -> {
val intent = Intent(activity, SelectTimeActivity::class.java)
val bundle = Bundle()
bundle.putString("TIME", time)
intent.putExtras(bundle)
startActivityForResult(intent, 2)
}
3 -> {
initNumberPicker()
}
}
}
mParamsDoneTextView.setOnClickListener {
val sp = activity!!.getSharedPreferences("GEO", Context.MODE_PRIVATE)
demandGeo = sp.getString("geo", "")
mPresenter.mView = this
println(userId!! + " " + demandType!! + " " + demandGeo!!
+ " " + resolution!! + " " + startTime!! + " " + endTime!! + " " + timesParam!!)
mPresenter.submitOrder(userId!!, demandType!!, demandGeo!!
, resolution!!, startTime!!, endTime!!, timesParam!!)
sp.edit().clear().commit()
}
}
private fun initNumberPicker() {
val builder = AlertDialog.Builder(activity!!)
builder.setTitle("拍摄频次")
val inflater = LayoutInflater.from(activity)
val v = inflater.inflate(R.layout.item_dialog, null)
builder.setView(v)
builder.setNegativeButton("取消") { arg0, arg1 ->
arg0.dismiss()
}
builder.setPositiveButton("确定") { arg0, arg1 ->
val et = v.findViewById(R.id.mEditText) as EditText
times = et.text.toString()
arg0.dismiss()
if (times != "") {
detailList!![3] = times.toString()
timesParam = times
TIMES = times
flag4 = true
adapter!!.notifyDataSetChanged()
} else if (times == "" && TIMES != "") {
detailList!![3] = TIMES.toString()
timesParam = TIMES
TIMES = times
flag4 = true
adapter!!.notifyDataSetChanged()
}
checkDone()
}
val dialog = builder.create()
dialog.show()
dialog.window.setGravity(Gravity.CENTER)//可以设置显示的位置
}
private fun checkDone() {
if (flag1 && flag2 && flag3 && flag4) {
mParamsDoneTextView.visibility = View.VISIBLE
}
}
override fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) {
super.onActivityResult(requestCode, resultCode, data)
println(data!!.getStringExtra("TYPE"))
if (requestCode == 0 && resultCode == AppCompatActivity.RESULT_OK) {
val detail = data!!.getStringExtra("TYPE")
typeIndex = detail.toInt()
println("typeIndex$typeIndex")
if (detail != "-1") {
detailList!![0] = typeList!![detail.toInt()]
flag1 = true
adapter!!.notifyDataSetChanged()
checkDone()
demandType = typeIndex.toString()
}
} else if (requestCode == 1 && resultCode == AppCompatActivity.RESULT_OK) {
val detail = data!!.getStringExtra("RATIO")
ratioIndex = detail.toInt()
if (detail != "-1") {
detailList!![1] = ratioList!![detail.toInt()]
flag2 = true
adapter!!.notifyDataSetChanged()
resolution = ratioList!![detail.toInt()]
checkDone()
}
} else if (requestCode == 2 && resultCode == AppCompatActivity.RESULT_OK) {
val detail = data!!.getStringExtra("TIME")
time = detail
if (detail != "-") {
detailList!![2] = detail
flag3 = true
adapter!!.notifyDataSetChanged()
startTime = detail.split("-")[0]
endTime = detail.split("-")[1]
checkDone()
}
}
}
}
<file_sep>package com.yunjishi.lixiang.yunjishi.view.fragment
import android.app.Activity
import android.content.Context
import android.content.Intent
import android.content.SharedPreferences
import android.net.Uri
import android.os.Bundle
import android.support.v4.app.Fragment
import android.support.v7.app.AppCompatActivity
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.webkit.ValueCallback
import android.webkit.WebChromeClient
import com.github.lzyzsd.jsbridge.BridgeHandler
import com.github.lzyzsd.jsbridge.DefaultHandler
import com.yunjishi.lixiang.yunjishi.R
import com.yunjishi.lixiang.yunjishi.view.activity.MainActivity
import com.yunjishi.lixiang.yunjishi.view.activity.OrderDetailActivity
import kotlinx.android.synthetic.main.fragment_earth.*
import kotlinx.android.synthetic.main.fragment_login.*
import kotlinx.android.synthetic.main.fragment_mission.*
import kotlinx.android.synthetic.main.fragment_order.*
class OrderFragment : Fragment() {
var mUploadMessage: ValueCallback<Uri>? = null
private var userId: String? = null
var mSharedPreferences: SharedPreferences? = null
override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?,
savedInstanceState: Bundle?): View? {
// Inflate the layout for this fragment
return inflater.inflate(R.layout.fragment_order, container, false)
}
override fun onAttach(activity: Activity?) {
super.onAttach(activity)
userId = (activity as MainActivity).getUserID()
}
override fun onActivityCreated(savedInstanceState: Bundle?) {
super.onActivityCreated(savedInstanceState)
initView()
}
private fun initView() {
mOrderFragmentWebView.setBackgroundColor(0);
mOrderFragmentWebView.setDefaultHandler(DefaultHandler())
mOrderFragmentWebView.webChromeClient = object : WebChromeClient() {
fun openFileChooser(uploadMsg: ValueCallback<Uri>, AcceptType: String, capture: String) {
this.openFileChooser(uploadMsg)
}
fun openFileChooser(uploadMsg: ValueCallback<Uri>, AcceptType: String) {
this.openFileChooser(uploadMsg)
}
fun openFileChooser(uploadMsg: ValueCallback<Uri>) {
mUploadMessage = uploadMsg
}
}
com.orhanobut.logger.Logger.d("http://172.16.17.32:12380/demand.html?userId=$userId")
mOrderFragmentWebView.loadUrl("http://172.16.17.32:12380/demand.html?userId=$userId")
mOrderFragmentWebView.registerHandler("demandShow", BridgeHandler { data, function ->
println("demandId$data")
if (data != null) {
val intent = Intent(activity, OrderDetailActivity::class.java)
val bundle = Bundle()
bundle.putString("DEMAND_ID", data)
bundle.putString("USER_ID", userId)
intent.putExtras(bundle)
startActivity(intent)
}
})
mOrderFragmentWebView.registerHandler("productShow", BridgeHandler { data, function ->
println("productId$data")
if (data != null) {
mSharedPreferences = activity!!.getSharedPreferences("XXX", Context.MODE_PRIVATE)
val editor = mSharedPreferences!!.edit()
editor.putString("PRODUCT_ID", data)
editor.commit()
val intent = activity!!.intent
activity!!.overridePendingTransition(0, 0)
activity!!.finish()
activity!!.overridePendingTransition(0, 0)
startActivity(intent)
}
})
}
}<file_sep>package com.yunjishi.lixiang.yunjishi.presenter.data.api
import com.yunjishi.lixiang.yunjishi.presenter.data.bean.DemandDetailBean
import com.yunjishi.lixiang.yunjishi.presenter.data.bean.SubmitOrderBean
import io.reactivex.Observable
import retrofit2.http.*
interface Api {
@POST("submitOrder")
@FormUrlEncoded
fun submitOrder(@Field("userId") targetSentence: String,
@Field("demandType") targetSentence2: String, @Field("demandGeo") targetSentenc3: String,
@Field("resolution") targetSentence4: String, @Field("startTime") targetSentenc5: String,
@Field("endTime") targetSentence6: String, @Field("times") targetSentenc7: String): Observable<SubmitOrderBean>
@Headers("Content-Type:text/html;charset=utf-8", "Accept:application/json;")
@GET("getDemandDetail")
fun getDemandDetail(@Query("userId") targetSentence: String,
@Query("demandId") targetSentence2: String): Observable<DemandDetailBean>
}<file_sep>package com.yunjishi.lixiang.yunjishi.view.activity
import android.content.Context
import android.net.Uri
import android.support.v7.app.AppCompatActivity
import android.os.Bundle
import com.github.barteksc.pdfviewer.listener.OnLoadCompleteListener
import com.github.barteksc.pdfviewer.listener.OnPageChangeListener
import com.github.barteksc.pdfviewer.listener.OnPageErrorListener
import com.github.barteksc.pdfviewer.scroll.DefaultScrollHandle
import com.yunjishi.lixiang.yunjishi.R
import es.voghdev.pdfviewpager.library.remote.DownloadFile
import kotlinx.android.synthetic.main.activity_pdf_viewer.*
import kotlinx.android.synthetic.main.fragment_mission.*
import java.lang.Exception
import es.voghdev.pdfviewpager.library.RemotePDFViewPager
import es.voghdev.pdfviewpager.library.adapter.PDFPagerAdapter
import es.voghdev.pdfviewpager.library.util.FileUtil
import android.widget.LinearLayout
import android.content.Intent
import android.graphics.Color
import android.view.View
import android.widget.Button
import android.widget.EditText
import com.android.lixiang.base.utils.view.StatusBarUtil
import kotlinx.android.synthetic.main.activity_select_time.*
//class PdfViewerActivity : AppCompatActivity(), DownloadFile.Listener {
// var adapter: PDFPagerAdapter? = null
// var remotePDFViewPager: RemotePDFViewPager? = null
//
// override fun onSuccess(url: String?, destinationPath: String?) {
// adapter = PDFPagerAdapter(this, FileUtil.extractFileNameFromURL(url))
// remotePDFViewPager!!.setAdapter(adapter)
// }
//
// override fun onFailure(e: Exception?) {
// }
//
// override fun onProgressUpdate(progress: Int, total: Int) {
// }
//
//
// override fun onCreate(savedInstanceState: Bundle?) {
// super.onCreate(savedInstanceState)
// setContentView(R.layout.activity_pdf_viewer)
// mPdfToolbar.title = "我的订单"
// setSupportActionBar(mPdfToolbar)
// supportActionBar!!.setDisplayHomeAsUpEnabled(true)
// mPdfToolbar.setNavigationOnClickListener {
// finish()
// }
// val url = "http://www.cals.uidaho.edu/edComm/curricula/CustRel_curriculum/content/sample.pdf"
//
// remotePDFViewPager = RemotePDFViewPager(this, url, this)
//
//
// }
//
// override fun onDestroy() {
// super.onDestroy()
//
// adapter!!.close()
// }
//}
class PdfViewerActivity : AppCompatActivity(), DownloadFile.Listener {
internal lateinit var root: LinearLayout
internal lateinit var remotePDFViewPager: RemotePDFViewPager
internal var adapter: PDFPagerAdapter? = null
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_pdf_viewer)
StatusBarUtil.setColor(this, Color.parseColor("#000000"), 0)
mPdfToolbar.title = "详情"
setSupportActionBar(mPdfToolbar)
supportActionBar!!.setDisplayHomeAsUpEnabled(true)
remotePDFViewPager = RemotePDFViewPager(this, intent.extras.getString("URL"), this)
remotePDFViewPager.id = R.id.pdfViewPager
remote_pdf_root.addView(remotePDFViewPager,
LinearLayout.LayoutParams.MATCH_PARENT, LinearLayout.LayoutParams.MATCH_PARENT)
mPdfToolbar.setNavigationOnClickListener {
this.finish()
}
}
override fun onDestroy() {
super.onDestroy()
if (adapter != null) {
adapter!!.close()
}
}
override fun onSuccess(url: String, destinationPath: String) {
adapter = PDFPagerAdapter(this, FileUtil.extractFileNameFromURL(url))
remotePDFViewPager.adapter = adapter
}
override fun onFailure(e: Exception) {
e.printStackTrace()
}
override fun onProgressUpdate(progress: Int, total: Int) {
}
companion object {
fun open(context: Context) {
val i = Intent(context, PdfViewerActivity::class.java)
context.startActivity(i)
}
}
}
<file_sep>package com.yunjishi.lixiang.yunjishi.service
interface MainService {
}<file_sep>package com.yunjishi.lixiang.yunjishi.presenter.view
import com.android.lixiang.base.presenter.view.BaseView
import com.yunjishi.lixiang.yunjishi.presenter.data.bean.SubmitOrderBean
interface MissionView : BaseView {
fun onSubmitOrderResult(res: SubmitOrderBean)
}<file_sep>package com.android.lixiang.base.ui.fragment
import com.trello.rxlifecycle2.components.support.RxAppCompatActivity
import com.trello.rxlifecycle2.components.support.RxFragment
open class BaseFragment: RxFragment(){
}<file_sep>package com.yunjishi.lixiang.yunjishi.presenter.view
import com.android.lixiang.base.presenter.view.BaseView
import com.yunjishi.lixiang.yunjishi.presenter.data.bean.DemandDetailBean
import com.yunjishi.lixiang.yunjishi.presenter.data.bean.SubmitOrderBean
interface OrderDetailView : BaseView {
fun onGetDemandDetailResult(res: DemandDetailBean)
}<file_sep>package com.yunjishi.lixiang.yunjishi.presenter.injection.component
import com.android.lixiang.base.injection.ComponentScope
import com.android.lixiang.base.injection.component.ActivityComponent
import com.android.lixiang.base.injection.component.FragmentComponent
import com.yunjishi.lixiang.yunjishi.presenter.injection.module.MissionModule
import com.yunjishi.lixiang.yunjishi.presenter.injection.module.OrderDetailModule
import com.yunjishi.lixiang.yunjishi.view.activity.OrderDetailActivity
import com.yunjishi.lixiang.yunjishi.view.fragment.MissionFragment
import dagger.Component
@ComponentScope
@Component(dependencies = arrayOf(ActivityComponent::class), modules = arrayOf(OrderDetailModule::class))
interface OrderDetailComponent {
fun inject(activity: OrderDetailActivity)
}<file_sep>package com.yunjishi.lixiang.yunjishi.presenter.data.repository
import com.android.lixiang.base.common.BaseConstant
import com.android.lixiang.base.data.net.RetrofitFactory
import com.yunjishi.lixiang.yunjishi.presenter.data.bean.SubmitOrderBean
import com.yunjishi.lixiang.yunjishi.presenter.data.api.Api
import com.yunjishi.lixiang.yunjishi.presenter.data.bean.DemandDetailBean
import io.reactivex.Observable
import javax.inject.Inject
class OrderDetailRepository @Inject constructor() {
fun getDemandDetail(string1: String,string2: String): Observable<DemandDetailBean> {
return RetrofitFactory(BaseConstant.SERVER_ADDRESS_8081)
.create(Api::class.java)
.getDemandDetail(string1, string2)
}
}<file_sep>package com.yunjishi.lixiang.yunjishi.presenter
import com.android.lixiang.base.ext.execute
import com.android.lixiang.base.presenter.BasePresenter
import com.android.lixiang.base.rx.BaseObserver
import com.yunjishi.lixiang.yunjishi.presenter.data.bean.SubmitOrderBean
import com.yunjishi.lixiang.yunjishi.presenter.view.MissionView
import com.yunjishi.lixiang.yunjishi.service.MissionService
import javax.inject.Inject
class MissionPresenter @Inject constructor() : BasePresenter<MissionView>() {
@Inject
lateinit var mMissionService: MissionService
fun submitOrder(string1: String, string2: String, string3: String, string4: String, string5: String, string6: String, string7: String) {
mMissionService.submitOrder(string1, string2, string3, string4, string5, string6, string7).execute(object : BaseObserver<SubmitOrderBean>() {
override fun onNext(t: SubmitOrderBean) {
super.onNext(t)
mView.onSubmitOrderResult(t)
}
}, lifecycleProvider)
}
}<file_sep>package com.yunjishi.lixiang.yunjishi.view.activity
import android.annotation.SuppressLint
import android.app.Activity
import android.app.DatePickerDialog
import android.content.Intent
import android.graphics.Color
import android.support.v7.app.AppCompatActivity
import android.os.Bundle
import android.widget.AbsListView
import android.widget.Toast
import com.android.lixiang.base.utils.view.StatusBarUtil
import com.yunjishi.lixiang.yunjishi.R
import com.yunjishi.lixiang.yunjishi.view.adapter.SelectTimeAdapter
import kotlinx.android.synthetic.main.activity_select_time.*
import java.text.SimpleDateFormat
import java.util.*
class SelectTimeActivity : AppCompatActivity() {
var ca = Calendar.getInstance()
var mYear = ca.get(Calendar.YEAR)
var mMonth = ca.get(Calendar.MONTH)
var mDay = ca.get(Calendar.DAY_OF_MONTH)
var titleList: MutableList<String>? = mutableListOf()
var detailList: MutableList<String>? = mutableListOf()
var startTime = String()
var endTime = String()
var adapter: SelectTimeAdapter? = null
var index = -1
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_select_time)
StatusBarUtil.setColor(this, Color.parseColor("#000000"), 0)
mSelectTimeToolbar.title = "选择拍摄时间"
setSupportActionBar(mSelectTimeToolbar)
supportActionBar!!.setDisplayHomeAsUpEnabled(true)
val intent = intent
val bundle = intent.extras
val TIME = bundle.getString("TIME")
initView()
println("TIME1$TIME")
if (TIME != ""){
println("TIME2$TIME")
detailList!![0] = TIME.split("-")[0]
detailList!![1] = TIME.split("-")[1]
adapter!!.notifyDataSetChanged()
}
mTimeListView.setOnItemClickListener { parent, view, position, id ->
when (position) {
0 -> {
DatePickerDialog(this, R.style.MyDatePickerDialogTheme, onDateSetListener, mYear, mMonth, mDay).show()
}
1 -> {
DatePickerDialog(this, R.style.MyDatePickerDialogTheme, onDateSetListener2, mYear, mMonth, mDay).show()
}
}
}
mSelectTimeToolbar.setNavigationOnClickListener {
if (compare_date(detailList!![0], detailList!![1]) == 1) {
Toast.makeText(this, "请输入正确的时间", Toast.LENGTH_SHORT).show()
} else {
if(detailList!![0] == "" || detailList!![1] == ""){
Toast.makeText(this, "请输入正确的时间", Toast.LENGTH_SHORT).show()
}else{
val intent = Intent()
intent.putExtra("TIME", detailList!![0] + "-" + detailList!![1])
setResult(Activity.RESULT_OK, intent)
this.finish()
}
}
}
}
override fun onBackPressed() {
// super.onBackPressed()
if (compare_date(detailList!![0], detailList!![1]) == 1) {
Toast.makeText(this, "请输入正确的时间", Toast.LENGTH_SHORT).show()
} else {
if(detailList!![0] == "" || detailList!![1] == ""){
Toast.makeText(this, "请输入正确的时间", Toast.LENGTH_SHORT).show()
}else{
val intent = Intent()
intent.putExtra("TIME", detailList!![0] + "-" + detailList!![1])
setResult(Activity.RESULT_OK, intent)
this.finish()
}
}
}
private fun initView() {
titleList!!.add("拍摄起始时间")
titleList!!.add("拍摄终止时间")
detailList!!.add("")
detailList!!.add("")
adapter = SelectTimeAdapter(titleList, detailList, this)
mTimeListView.adapter = adapter
mTimeListView.choiceMode = AbsListView.CHOICE_MODE_SINGLE
}
private val onDateSetListener = DatePickerDialog.OnDateSetListener { view, year, monthOfYear, dayOfMonth ->
mYear = year
mMonth = monthOfYear
mDay = dayOfMonth
val days: String = if (mMonth + 1 < 10) {
if (mDay < 10)
StringBuffer().append(mYear).append(".").append("0").append(mMonth + 1).append(".").append("0").append(mDay).toString()
else
StringBuffer().append(mYear).append(".").append("0").append(mMonth + 1).append(".").append(mDay).toString()
} else {
if (mDay < 10)
StringBuffer().append(mYear).append(".").append(mMonth + 1).append(".").append("0").append(mDay).toString()
else
StringBuffer().append(mYear).append(".").append(mMonth + 1).append(".").append(mDay).toString()
}
detailList!![0] = days
adapter!!.notifyDataSetChanged()
}
private val onDateSetListener2 = DatePickerDialog.OnDateSetListener { view, year, monthOfYear, dayOfMonth ->
mYear = year
mMonth = monthOfYear
mDay = dayOfMonth
val days: String = if (mMonth + 1 < 10) {
if (mDay < 10)
StringBuffer().append(mYear).append(".").append("0").append(mMonth + 1).append(".").append("0").append(mDay).toString()
else
StringBuffer().append(mYear).append(".").append("0").append(mMonth + 1).append(".").append(mDay).toString()
} else {
if (mDay < 10)
StringBuffer().append(mYear).append(".").append(mMonth + 1).append(".").append("0").append(mDay).toString()
else
StringBuffer().append(mYear).append(".").append(mMonth + 1).append(".").append(mDay).toString()
}
detailList!![1] = days
adapter!!.notifyDataSetChanged()
}
@SuppressLint("SimpleDateFormat")
private fun compare_date(DATE1: String, DATE2: String): Int {
val df = SimpleDateFormat("yyyy.MM.dd")
try {
val dt1 = df.parse(DATE1)
val dt2 = df.parse(DATE2)
return when {
dt1.time > dt2.time -> {
1
}
dt1.time < dt2.time -> {
-1
}
else -> 0
}
} catch (exception: Exception) {
exception.printStackTrace()
}
return 0
}
}
<file_sep>package com.yunjishi.lixiang.yunjishi.service.impl
import com.yunjishi.lixiang.yunjishi.presenter.data.bean.SubmitOrderBean
import com.yunjishi.lixiang.yunjishi.presenter.data.repository.MainRepository
import com.yunjishi.lixiang.yunjishi.service.MissionService
import com.yunjishi.lixiang.yunjishi.service.ParamsService
import io.reactivex.Observable
import io.reactivex.ObservableSource
import io.reactivex.functions.Function
import javax.inject.Inject
class ParamsServiceImpl @Inject constructor(): ParamsService {
@Inject
lateinit var mainRepository: MainRepository
override fun submitOrder(string1: String,string2: String,string3: String,string4: String,string5: String,string6: String,string7: String): Observable<SubmitOrderBean> {
return mainRepository.submitOrder(string1, string2, string3, string4, string5, string6, string7).flatMap(Function<SubmitOrderBean, ObservableSource<SubmitOrderBean>> { t ->
return@Function Observable.just(t)
})
}
}<file_sep>package com.yunjishi.lixiang.yunjishi.presenter.view
import com.android.lixiang.base.presenter.view.BaseView
import com.yunjishi.lixiang.yunjishi.presenter.data.bean.SubmitOrderBean
interface ParamsView : BaseView {
fun onSubmitOrderResult(res: SubmitOrderBean)
}<file_sep>package com.yunjishi.lixiang.yunjishi.view.fragment
import android.annotation.SuppressLint
import android.app.Activity
import android.graphics.Color
import android.os.Bundle
import android.support.v4.app.Fragment
import android.support.v7.widget.AppCompatImageView
import android.support.v7.widget.AppCompatTextView
import android.util.TypedValue
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.widget.ImageView
import android.widget.LinearLayout
import android.widget.RelativeLayout
import com.android.lixiang.base.utils.view.DimenUtil
import com.baidu.location.g.j.B
import com.orhanobut.logger.Logger
import com.yunjishi.lixiang.yunjishi.R
import com.yunjishi.lixiang.yunjishi.view.activity.MainActivity
import kotlinx.android.synthetic.main.fragment_login.*
import org.jetbrains.anko.support.v4.act
import java.util.*
class DefaultFragment : Fragment(), View.OnClickListener {
private var mImageView1: AppCompatImageView? = null
private var mImageView2: AppCompatImageView? = null
private var userId: String? = null
override fun onAttach(activity: Activity?) {
super.onAttach(activity)
userId = (activity as MainActivity).getUserID()
Logger.d(userId)
}
override fun onClick(v: View?) {
when (v) {
mImageView1 -> {
if (userId != "-1")
(activity as MainActivity).changeFragment(3)
else {
var mLoginLayout = activity!!.findViewById<LinearLayout>(R.id.mLoginLayout)
mLoginLayout.visibility = View.VISIBLE
}
}
mImageView2 -> {
if (userId != "-1")
(activity as MainActivity).changeFragment(5)
else {
var mLoginLayout = activity!!.findViewById<LinearLayout>(R.id.mLoginLayout)
mLoginLayout.visibility = View.VISIBLE
}
}
}
}
override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?,
savedInstanceState: Bundle?): View? {
return inflater.inflate(R.layout.fragment_login, container, false)
}
@SuppressLint("ResourceType")
override fun onActivityCreated(savedInstanceState: Bundle?) {
super.onActivityCreated(savedInstanceState)
var screenWidth = activity!!.application.resources.displayMetrics.heightPixels.toFloat()
var screenHeight = activity!!.application.resources.displayMetrics.widthPixels.toFloat()
val imageWidth = (DimenUtil().px2dip(context!!, screenHeight) - 60 - 60 - 35) * 0.5.toFloat()
val textMargin = imageWidth + 82
val imageHeight = imageWidth * 0.5.toFloat()
val textMarginHeight = (imageHeight * 149) / 216 + 34
mImageView1 = AppCompatImageView(activity)
mImageView1!!.id = 1
val mImageView1LayoutParams = RelativeLayout.LayoutParams(DimenUtil().dip2px(context!!, imageWidth), DimenUtil().dip2px(context!!, imageHeight))
mImageView1LayoutParams.setMargins(DimenUtil().dip2px(context!!, 30.toFloat()), DimenUtil().dip2px(context!!, 34.toFloat()), 0, 0)
mImageView1!!.setBackgroundResource(R.drawable.img_submit)
mImageView1!!.layoutParams = mImageView1LayoutParams
mMapFragmentRelativeLayout.addView(mImageView1)
mImageView2 = AppCompatImageView(activity)
mImageView2!!.id = 2
val mImageView2LayoutParams = RelativeLayout.LayoutParams(DimenUtil().dip2px(context!!, imageWidth), DimenUtil().dip2px(context!!, imageHeight))
mImageView2LayoutParams.setMargins(0, DimenUtil().dip2px(context!!, 34.toFloat()), DimenUtil().dip2px(context!!, 30.toFloat()), 0)
mImageView2LayoutParams.addRule(RelativeLayout.ALIGN_PARENT_RIGHT)
mImageView2!!.setBackgroundResource(R.drawable.img_order)
mImageView2!!.layoutParams = mImageView2LayoutParams
mMapFragmentRelativeLayout.addView(mImageView2)
mImageView1!!.setOnClickListener(this)
mImageView2!!.setOnClickListener(this)
val T1 = AppCompatTextView(activity)
val T1L = RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.WRAP_CONTENT, RelativeLayout.LayoutParams.WRAP_CONTENT)
T1L.setMargins(DimenUtil().dip2px(context!!, 48.toFloat()), DimenUtil().dip2px(context!!, textMarginHeight), 0, 0)
T1.setTextColor(Color.parseColor("#FFFFFF"))
T1.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14F)
T1.layoutParams = T1L
T1.text = "在线提交"
mMapFragmentRelativeLayout.addView(T1)
val T2 = AppCompatTextView(activity)
val T2L = RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.WRAP_CONTENT, RelativeLayout.LayoutParams.WRAP_CONTENT)
T2L.setMargins(DimenUtil().dip2px(context!!, 48.toFloat()), DimenUtil().dip2px(context!!, textMarginHeight + 30), 0, 0)
T2.setTextColor(Color.parseColor("#F5A623"))
T2.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14F)
T2.layoutParams = T2L
T2.text = "在线提交拍摄需求,调动卫星为您拍摄"
mMapFragmentRelativeLayout.addView(T2)
val T3 = AppCompatTextView(activity)
val T3L = RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.WRAP_CONTENT, RelativeLayout.LayoutParams.WRAP_CONTENT)
T3L.setMargins(DimenUtil().dip2px(context!!, textMargin), DimenUtil().dip2px(context!!, textMarginHeight), 0, 0)
T3.setTextColor(Color.parseColor("#FFFFFF"))
T3.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14F)
T3.text = "我的订单"
T3.layoutParams = T3L
mMapFragmentRelativeLayout.addView(T3)
val T4 = AppCompatTextView(activity)
val T4L = RelativeLayout.LayoutParams(RelativeLayout.LayoutParams.WRAP_CONTENT, RelativeLayout.LayoutParams.WRAP_CONTENT)
T4L.setMargins(DimenUtil().dip2px(context!!, textMargin), DimenUtil().dip2px(context!!, textMarginHeight + 30), 0, 0)
T4.setTextColor(Color.parseColor("#F5A623"))
T4.setTextSize(TypedValue.COMPLEX_UNIT_DIP, 14F)
T4.text = "实时查看我的订单"
T4.layoutParams = T4L
mMapFragmentRelativeLayout.addView(T4)
}
}
<file_sep>package com.yunjishi.lixiang.yunjishi.view.activity
import android.content.BroadcastReceiver
import android.content.Context
import android.content.Intent
import android.content.IntentFilter
import android.graphics.Color
import android.net.Uri
import android.support.v7.app.AppCompatActivity
import android.os.Bundle
import android.support.design.widget.Snackbar
import android.support.v4.app.Fragment
import android.view.KeyEvent
import android.view.View
import android.webkit.ValueCallback
import android.webkit.WebChromeClient
import android.widget.Toast
import com.android.lixiang.base.utils.view.StatusBarUtil
import com.yunjishi.lixiang.yunjishi.R
import kotlinx.android.synthetic.main.activity_main.*
import java.util.*
import com.github.ikidou.fragmentBackHandler.BackHandlerHelper
import com.github.lzyzsd.jsbridge.DefaultHandler
import com.google.gson.Gson
import com.orhanobut.logger.Logger
import com.yunjishi.lixiang.yunjishi.NetworkChangeReceiver
import com.yunjishi.lixiang.yunjishi.presenter.data.bean.UserBean2
import com.yunjishi.lixiang.yunjishi.presenter.database.DaoMaster
import com.yunjishi.lixiang.yunjishi.presenter.database.DaoSession
import com.yunjishi.lixiang.yunjishi.presenter.database.DataBaseManager
import org.greenrobot.greendao.database.Database
import com.yunjishi.lixiang.yunjishi.view.fragment.*
class MainActivity : AppCompatActivity() {
private var firstTime: Long = 0
var broadcastReceiver: BroadcastReceiver = object : BroadcastReceiver() {
override fun onReceive(context: Context?, intent: Intent?) {
mNoAccessRelativeLayout.visibility = View.VISIBLE
}
}
var broadcastReceiver2: BroadcastReceiver = object : BroadcastReceiver() {
override fun onReceive(context: Context?, intent: Intent?) {
mNoAccessRelativeLayout.visibility = View.INVISIBLE
}
}
var mUploadMessage: ValueCallback<Uri>? = null
var mDaoSession: DaoSession? = null
var userBean = UserBean2()
private var intentFilter: IntentFilter? = null
private var networkChangeReceiver: NetworkChangeReceiver? = null
var mactivity: MainActivity? = null
var mCurrentFragment: Fragment? = null
override fun onResume() {
super.onResume()
checkAccess()
}
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_main)
mactivity = this
StatusBarUtil.setColor(this, Color.parseColor("#262626"), 0)
registerReceiver(broadcastReceiver, IntentFilter("NO_ACCESS"))
registerReceiver(broadcastReceiver2, IntentFilter("ACCESS"))
mDaoSession = DataBaseManager().initDao(this)
initFragment()
initLogin()
initLogout()
checkLogin()
initNavigationView()
changeFragment(0)
mAvatarImageView.setOnClickListener {
println(checkLogin())
if (checkLogin()) {
mLogoutLayout.visibility = View.VISIBLE
} else
mLoginLayout.visibility = View.VISIBLE
}
}
private fun checkAccess() {
intentFilter = IntentFilter()
intentFilter!!.addAction("android.net.conn.CONNECTIVITY_CHANGE")
// var dianLiangBR = NetworkChangeReceiver()
networkChangeReceiver = NetworkChangeReceiver()
registerReceiver(networkChangeReceiver, intentFilter)
// dianLiangBR.setBRInteractionListener(this)
}
private fun initLogout() {
mLogoutWebView.setBackgroundColor(0)
mLogoutWebView.setDefaultHandler(DefaultHandler())
mLogoutWebView.webChromeClient = object : WebChromeClient() {
fun openFileChooser(uploadMsg: ValueCallback<Uri>, AcceptType: String, capture: String) {
this.openFileChooser(uploadMsg)
}
fun openFileChooser(uploadMsg: ValueCallback<Uri>, AcceptType: String) {
this.openFileChooser(uploadMsg)
}
fun openFileChooser(uploadMsg: ValueCallback<Uri>) {
mUploadMessage = uploadMsg
}
}
mLogoutWebView.loadUrl("http://172.16.58.3:12380/logout.html")
mLogoutWebView.registerHandler("closeLoginPage") { data, function ->
println("data$data")
if (data == "closeLoginPage") {
mLogoutLayout.visibility = View.INVISIBLE
}
}
mLogoutWebView.registerHandler("logoutCancel") { data, function ->
println("data$data")
if (data == "logoutCancel") {
mLogoutLayout.visibility = View.INVISIBLE
}
}
mLogoutWebView.registerHandler("logoutSuccess") { data, function ->
println("data$data")
if (data == "logoutSuccess") {
mLogoutLayout.visibility = View.INVISIBLE
mDaoSession!!.userBean2Dao.deleteAll()
mAvatarImageView.setImageResource(R.drawable.ic_avatar)
val intent = intent
overridePendingTransition(0, 0)
finish()
overridePendingTransition(0, 0)
startActivity(intent)
}
}
}
private fun checkLogin(): Boolean {
if (mDaoSession!!.userBean2Dao!!.loadAll().isEmpty()) {
println("---->>>>")
println("未登录")
return false
} else {
println("---->>>>")
println("已登陆")
mAvatarImageView.setImageResource(R.drawable.ic_avatar_login)
return true
}
}
private fun initDao() {
var openHelper: DaoMaster.DevOpenHelper = DaoMaster.DevOpenHelper(this, "USER")
var db: Database = openHelper.writableDb
var daoMaster: DaoMaster = DaoMaster(db)
mDaoSession = daoMaster.newSession()
}
fun getUserID(): String? {
if (mDaoSession!!.userBean2Dao.loadAll().isEmpty()) {
return "-1"
} else
return mDaoSession!!.userBean2Dao.loadAll()[0].userId
}
private fun initLogin() {
mLoginWebView.setBackgroundColor(0)
mLoginWebView.setDefaultHandler(DefaultHandler())
mLoginWebView.webChromeClient = object : WebChromeClient() {
fun openFileChooser(uploadMsg: ValueCallback<Uri>, AcceptType: String, capture: String) {
this.openFileChooser(uploadMsg)
}
fun openFileChooser(uploadMsg: ValueCallback<Uri>, AcceptType: String) {
this.openFileChooser(uploadMsg)
}
fun openFileChooser(uploadMsg: ValueCallback<Uri>) {
mUploadMessage = uploadMsg
}
}
mLoginWebView.loadUrl("http://172.16.58.3:12380/login.html")
// mLoginWebView.loadUrl("http://10.10.90.14:8081/login.html")
mLoginWebView.registerHandler("closeLoginPage") { data, function ->
println("data$data")
if (data == "closeLoginPage") {
mLoginLayout.visibility = View.GONE
}
}
mLoginWebView.registerHandler("loginSuccess") { data, function ->
println("data$data")
userBean = Gson().fromJson(data, UserBean2::class.java)
println("userBean.userId" + userBean.userId)
mDaoSession!!.userBean2Dao.insert(userBean)
mLoginLayout.visibility = View.GONE
mAvatarImageView.setImageResource(R.drawable.ic_avatar_login)
// mLoginWebView.reload()
val intent = intent
overridePendingTransition(0, 0)
finish()
overridePendingTransition(0, 0)
startActivity(intent)
}
mLoginWebView.registerHandler("resetPwd") { data, function ->
println("data$data")
val intent = Intent()
intent.action = "android.intent.action.VIEW"
intent.data = Uri.parse(data)
startActivity(intent)
}
mLoginWebView.registerHandler("register") { data, function ->
println("data$data")
val intent = Intent()
intent.action = "android.intent.action.VIEW"
intent.data = Uri.parse(data)
startActivity(intent)
}
}
private val mStack = Stack<Fragment>()
private val mEarthFragment by lazy { EarthFragment() }
private val mMissionFragment by lazy { MissionFragment() }
private val mDefaultFragment by lazy { DefaultFragment() }
private val mMapFragment by lazy { MapFragment() }
private val mParamsFragment by lazy { ParamsFragment() }
private val mOrderFragment by lazy { OrderFragment() }
private fun initFragment() {
val manager = supportFragmentManager.beginTransaction()
manager.add(R.id.mFrameLayout, mEarthFragment)
manager.add(R.id.mFrameLayout, mMissionFragment)
manager.add(R.id.mFrameLayout, mDefaultFragment)
manager.add(R.id.mFrameLayout, mMapFragment)
manager.add(R.id.mFrameLayout, mParamsFragment)
manager.add(R.id.mFrameLayout, mOrderFragment)
manager.commit()
mStack.add(mEarthFragment)
mStack.add(mMissionFragment)
mStack.add(mDefaultFragment)
mStack.add(mMapFragment)
mStack.add(mParamsFragment)
mStack.add(mOrderFragment)
}
private fun initNavigationView() {
mEarthImageView.setBackgroundResource(R.drawable.ic_earth)
mEarthTextView.setTextColor(Color.parseColor("#738FFE"))
mFeild1.setOnClickListener {
mEarthImageView.setBackgroundResource(R.drawable.ic_earth)
mMissionImageView.setBackgroundResource(R.drawable.ic_mission_white)
mSearchImageView.setBackgroundResource(R.drawable.ic_search_white)
mEarthTextView.setTextColor(Color.parseColor("#738FFE"))
mMissionTextView.setTextColor(Color.parseColor("#FFFFFF"))
mSearchTextView.setTextColor(Color.parseColor("#FFFFFF"))
mLoginLayout.visibility = View.GONE
changeFragment(0)
}
mFeild2.setOnClickListener {
mEarthImageView.setBackgroundResource(R.drawable.ic_earth_white)
mMissionImageView.setBackgroundResource(R.drawable.ic_mission)
mSearchImageView.setBackgroundResource(R.drawable.ic_search_white)
mEarthTextView.setTextColor(Color.parseColor("#FFFFFF"))
mMissionTextView.setTextColor(Color.parseColor("#738FFE"))
mSearchTextView.setTextColor(Color.parseColor("#FFFFFF"))
mLoginLayout.visibility = View.GONE
changeFragment(2)
}
mFeild3.setOnClickListener {
// mEarthImageView.setBackgroundResource(R.drawable.ic_earth_white)
// mMissionImageView.setBackgroundResource(R.drawable.ic_mission_white)
// mSearchImageView.setBackgroundResource(R.drawable.ic_search)
//
// mEarthTextView.setTextColor(Color.parseColor("#FFFFFF"))
// mMissionTextView.setTextColor(Color.parseColor("#FFFFFF"))
// mSearchTextView.setTextColor(Color.parseColor("#738FFE"))
// changeFragment(2)
val snackBar = Snackbar.make(window.decorView!!, "功能暂未开通", Snackbar.LENGTH_SHORT)
snackBar.setAction("确认") { snackBar.dismiss() }
snackBar.setActionTextColor(Color.parseColor("#738FFE"))
snackBar.show()
}
}
fun changeFragment(position: Int) {
val manager = supportFragmentManager.beginTransaction()
for (fragment in mStack) {
manager.hide(fragment)
}
manager.show(mStack[position])
mCurrentFragment = mStack[position]
manager.commit()
}
override fun onBackPressed() {
if (!BackHandlerHelper.handleBackPress(this)) {
super.onBackPressed()
}
if (mLoginLayout.visibility == View.VISIBLE) {
Logger.d(">>??")
}
}
override fun onKeyDown(keyCode: Int, event: KeyEvent): Boolean {
if (keyCode == KeyEvent.KEYCODE_BACK) {
if (mLoginLayout.visibility == View.VISIBLE) {
mLoginLayout.visibility = View.GONE
return false
}
if (mLogoutLayout.visibility == View.VISIBLE) {
mLogoutLayout.visibility = View.GONE
return false
}
if (mCurrentFragment == mOrderFragment) {
changeFragment(2)
return false
}
if (mCurrentFragment == mDefaultFragment) {
changeFragment(0)
mEarthImageView.setBackgroundResource(R.drawable.ic_earth)
mEarthTextView.setTextColor(Color.parseColor("#738FFE"))
mMissionImageView.setBackgroundResource(R.drawable.ic_mission_white)
mMissionTextView.setTextColor(Color.parseColor("#FFFFFF"))
return false
}
if (mCurrentFragment == mMapFragment) {
changeFragment(2)
return false
}
if (mCurrentFragment == mParamsFragment) {
changeFragment(3)
return false
}
if (mCurrentFragment == mEarthFragment) {
// return super.onKeyDown(keyCode, event)
var secondTime = System.currentTimeMillis()
if (secondTime - firstTime < 2000) {
System.exit(0);
} else {
Toast.makeText(this, "再按一次返回键退出", Toast.LENGTH_SHORT).show();
firstTime = System.currentTimeMillis()
}
return true
}
}
return super.onKeyDown(keyCode, event)
}
override fun onDestroy() {
super.onDestroy()
unregisterReceiver(networkChangeReceiver)
}
}
<file_sep>package com.yunjishi.lixiang.yunjishi.presenter.injection.module
import com.yunjishi.lixiang.yunjishi.service.MissionService
import com.yunjishi.lixiang.yunjishi.service.impl.MissionServiceImpl
import dagger.Module
import dagger.Provides
@Module
class MissionModule {
@Provides
fun provideMissionService(service: MissionServiceImpl): MissionService{
return service
}
}<file_sep>package com.yunjishi.lixiang.yunjishi.view.fragment
import android.content.Context
import android.net.Uri
import android.os.Bundle
import android.support.v4.app.Fragment
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.webkit.ValueCallback
import android.webkit.WebChromeClient
import com.github.lzyzsd.jsbridge.DefaultHandler
import com.yunjishi.lixiang.yunjishi.R
import kotlinx.android.synthetic.main.fragment_earth.*
import kotlinx.android.synthetic.main.fragment_login.*
class LoginFragment : Fragment() {
var mUploadMessage: ValueCallback<Uri>? = null
override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?,
savedInstanceState: Bundle?): View? {
// Inflate the layout for this fragment
return inflater.inflate(R.layout.fragment_login, container, false)
}
// override fun onActivityCreated(savedInstanceState: Bundle?) {
// super.onActivityCreated(savedInstanceState)
// mLoginWebView.setDefaultHandler(DefaultHandler())
// mLoginWebView.webChromeClient = object : WebChromeClient() {
//
// fun openFileChooser(uploadMsg: ValueCallback<Uri>, AcceptType: String, capture: String) {
// this.openFileChooser(uploadMsg)
// }
//
// fun openFileChooser(uploadMsg: ValueCallback<Uri>, AcceptType: String) {
// this.openFileChooser(uploadMsg)
// }
//
// fun openFileChooser(uploadMsg: ValueCallback<Uri>) {
// mUploadMessage = uploadMsg
// }
// }
// mLoginWebView.loadUrl("http://10.10.90.14:8088/login.html")
//// mWebView.registerHandler("submitFromWeb", BridgeHandler { data, function ->
//// Toast.makeText(activity, data, Toast.LENGTH_SHORT).show()
//// })
// }
}
<file_sep>package com.yunjishi.lixiang.yunjishi.data.api
interface Api {
}<file_sep>package com.yunjishi.lixiang.yunjishi.view.activity
import android.annotation.SuppressLint
import android.graphics.Color
import android.os.Bundle
import android.view.View
import android.widget.ImageView
import android.widget.ZoomControls
import com.android.lixiang.base.ui.activity.BaseMvpActivity
import com.android.lixiang.base.utils.view.StatusBarUtil
import com.baidu.mapapi.map.*
import com.baidu.mapapi.model.LatLng
import com.blankj.utilcode.util.TimeUtils
import com.yunjishi.lixiang.yunjishi.R
import com.yunjishi.lixiang.yunjishi.presenter.OrderDetailPresenter
import com.yunjishi.lixiang.yunjishi.presenter.data.bean.DemandDetailBean
import com.yunjishi.lixiang.yunjishi.presenter.injection.component.DaggerOrderDetailComponent
import com.yunjishi.lixiang.yunjishi.presenter.injection.module.OrderDetailModule
import com.yunjishi.lixiang.yunjishi.presenter.view.OrderDetailView
import kotlinx.android.synthetic.main.activity_order_detail.*
import java.lang.Double
import java.util.ArrayList
class OrderDetailActivity : BaseMvpActivity<OrderDetailPresenter>(), OrderDetailView {
var typeList: MutableList<String> = mutableListOf()
var statusList: MutableList<String> = mutableListOf()
var mBaiduMap: BaiduMap? = null
var demandId: String? = ""
var demandDetailBean = DemandDetailBean()
override fun injectComponent() {
DaggerOrderDetailComponent.builder().activityComponent(activityComponent)
.orderDetailModule(OrderDetailModule())
.build().inject(this)
}
@SuppressLint("SetTextI18n")
override fun onGetDemandDetailResult(res: DemandDetailBean) {
demandDetailBean = res
println(res)
mTypeTextView.text = typeList[demandDetailBean.data.demandType]
mRatioTextView.text = demandDetailBean.data.resolution
mTimeTextView.text = TimeUtils.millis2String(demandDetailBean.data.startTime)
.split(" ")[0]
.replace("-", ".") + " - " + TimeUtils.millis2String(demandDetailBean.data.endTime)
.split(" ")[0]
.replace("-", ".")
mTimesTextView.text = demandDetailBean.data.times.toString()
mStateTextView.text = statusList[demandDetailBean.data.demandStatus]
var temp = (demandDetailBean.data.demandArea / 1000000).toString()
mAreaTextView.text = "面积 " + temp.substring(0, temp.indexOf(".") + 3) + " 平方公里"
drawMap(getGeo(demandDetailBean.data.geo))
}
private fun drawMap(geo: MutableList<Array<String>>) {
println(geo)
val pts = ArrayList<LatLng>()
for (i in 0 until geo.size) {
val pt = LatLng(Double.parseDouble(geo[i][1]), Double.parseDouble(geo[i][0]))
pts.add(pt)
}
val polygonOption = PolygonOptions()
.points(pts)
.stroke(Stroke(10, Color.parseColor("#F56161")))
.fillColor(Color.parseColor("#00000000"))
mBaiduMap!!.addOverlay(polygonOption)
val latlng = LatLng((Double.parseDouble(geo[1][1]) + Double.parseDouble(geo[2][1])) / 2, (Double.parseDouble(geo[3][0]) + Double.parseDouble(geo[2][0])) / 2)
val mMapStatus: MapStatus = MapStatus.Builder().target(latlng).zoom(12F).build()
val mapStatusUpdate: MapStatusUpdate = MapStatusUpdateFactory.newMapStatus(mMapStatus)
mBaiduMap!!.setMapStatus(mapStatusUpdate)
}
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
setContentView(R.layout.activity_order_detail)
StatusBarUtil.setColor(this, Color.parseColor("#333333"), 0)
mFakeBackButton.setOnClickListener {
this.finish()
}
mPresenter.mView = this
val intent = intent
val bundle = intent.extras
demandId = bundle.getString("DEMAND_ID")
mPresenter.getDemandDetail(bundle.getString("USER_ID"), demandId.toString())
initView()
}
private fun initView() {
typeList.add("标准卫星图")
typeList.add("夜光卫星图")
typeList.add("卫星视频")
statusList.add("审核中")
statusList.add("生产中")
statusList.add("审核未通过")
statusList.add("订单取消")
statusList.add("订单完成")
statusList.add("部分完成")
mBaiduMap = mOrderDetailMapView!!.map
val child = mOrderDetailMapView!!.getChildAt(1)
if (child != null && (child is ImageView || child is ZoomControls)) {
child.visibility = View.INVISIBLE
}
mOrderDetailMapView!!.showScaleControl(false)
mOrderDetailMapView!!.showZoomControls(false)
}
private fun getGeo(geo: String): MutableList<Array<String>> {
val geos: MutableList<Array<String>> = mutableListOf()
val temp3 = geo.substring(geo.indexOf("[[") + 1, geo.indexOf("]}"))
val temp2 = temp3.replace("[", "")
val temp = temp2.replace("]", "")
val array = temp.split(',')
for (i in 0 until array.size step 2) {
geos.add(arrayOf(array[i], array[i + 1]))
}
return geos
}
}
<file_sep>package com.yunjishi.lixiang.yunjishi.view.fragment
import android.content.Intent
import android.net.Uri
import android.os.Bundle
import android.support.v4.app.Fragment
import android.view.LayoutInflater
import android.view.View
import android.view.ViewGroup
import android.webkit.ValueCallback
import android.webkit.WebChromeClient
import android.widget.Toast
import com.github.lzyzsd.jsbridge.BridgeHandler
import com.github.lzyzsd.jsbridge.DefaultHandler
import com.yunjishi.lixiang.yunjishi.R
import com.yunjishi.lixiang.yunjishi.view.activity.OrderDetailActivity
import com.yunjishi.lixiang.yunjishi.view.activity.VideoPlayerActivity
import kotlinx.android.synthetic.main.fragment_earth.*
import org.jetbrains.anko.support.v4.startActivity
import com.yunjishi.lixiang.yunjishi.view.activity.MainActivity
import android.app.Activity
import android.content.Context
import android.os.Handler
import android.webkit.WebView
import android.widget.LinearLayout
import android.widget.RelativeLayout
import com.yunjishi.lixiang.yunjishi.view.activity.PdfViewerActivity
import kotlinx.android.synthetic.main.activity_main.*
import java.util.logging.Logger
class EarthFragment : Fragment() {
var mUploadMessage: ValueCallback<Uri>? = null
var userId: String? = ""
var loginStatus: String? = ""
var productId: String? = "-1"
override fun onCreateView(inflater: LayoutInflater, container: ViewGroup?,
savedInstanceState: Bundle?): View? {
// Inflate the layout for this fragment
return inflater.inflate(R.layout.fragment_earth, container, false)
}
override fun onAttach(activity: Activity?) {
super.onAttach(activity)
userId = (activity as MainActivity).getUserID()
if (userId == "-1")
loginStatus = "no"
else
loginStatus = "yes"
println("userId$userId")
}
override fun onActivityCreated(savedInstanceState: Bundle?) {
super.onActivityCreated(savedInstanceState)
mWebView.setDefaultHandler(DefaultHandler())
val sp = activity!!.getSharedPreferences("XXX", Context.MODE_PRIVATE)
println("sp.getString" + sp.getString("PRODUCT_ID", ""))
productId = sp.getString("PRODUCT_ID", "")
mWebView.webChromeClient = object : WebChromeClient() {
fun openFileChooser(uploadMsg: ValueCallback<Uri>, AcceptType: String, capture: String) {
this.openFileChooser(uploadMsg)
}
fun openFileChooser(uploadMsg: ValueCallback<Uri>, AcceptType: String) {
this.openFileChooser(uploadMsg)
}
fun openFileChooser(uploadMsg: ValueCallback<Uri>) {
mUploadMessage = uploadMsg
}
override fun onProgressChanged(view: WebView, progress: Int) {
// Handler().postDelayed({ rl.visibility = View.GONE }, 5000)
}
}
if (userId == "-1") {
mWebView.loadUrl("http://192.168.127.12:12380/globe.html?loginStatus=$loginStatus")
} else {
mWebView.loadUrl("http://192.168.127.12:12380/globe.html?userId=$userId&loginStatus=$loginStatus&productId=$productId")
}
// if (userId == "-1") {
// mWebView.loadUrl("http://10.10.90.14:8081/globe.html?loginStatus=$loginStatus")
// } else {
// mWebView.loadUrl("http://10.10.90.14:8081/globe.html?userId=$userId&loginStatus=$loginStatus&productId=$productId")
// }
mWebView.reload()
mWebView.registerHandler("videoPlay") { data, function ->
println("data$data")
val intent = Intent(activity, VideoPlayerActivity::class.java)
val bundle = Bundle()
bundle.putString("URL", data)
intent.putExtras(bundle)
startActivity(intent)
}
mWebView.registerHandler("showLoginPage") { data, function ->
println("data$data")
if (data == "showLoginPage") {
var mLoginLayout = activity!!.findViewById<LinearLayout>(R.id.mLoginLayout)
mLoginLayout.visibility = View.VISIBLE
}
}
mWebView.registerHandler("showThematicReport") { data, function ->
println("data$data")
if (data != "") {
val intent = Intent(activity, PdfViewerActivity::class.java)
val bundle = Bundle()
bundle.putString("URL", data)
intent.putExtras(bundle)
startActivity(intent)
}
}
mWebView.registerHandler("loadSuccess") { data, function ->
println("data$data")
rl.visibility = View.GONE
// com.orhanobut.logger.Logger.d(data)
// if (data == "loadSuccess") {
// Handler().postDelayed({ rl.visibility = View.GONE }, 5000)
// }
}
sp.edit().clear().commit()
}
}
<file_sep>package com.yunjishi.lixiang.yunjishi.service.impl
import com.yunjishi.lixiang.yunjishi.presenter.data.bean.DemandDetailBean
import com.yunjishi.lixiang.yunjishi.presenter.data.bean.SubmitOrderBean
import com.yunjishi.lixiang.yunjishi.presenter.data.repository.MainRepository
import com.yunjishi.lixiang.yunjishi.presenter.data.repository.OrderDetailRepository
import com.yunjishi.lixiang.yunjishi.service.MissionService
import com.yunjishi.lixiang.yunjishi.service.OrderDetailService
import io.reactivex.Observable
import io.reactivex.ObservableSource
import io.reactivex.functions.Function
import javax.inject.Inject
class OrderDetailServiceImpl @Inject constructor(): OrderDetailService {
override fun getDemandDetail(string1: String, string2: String): Observable<DemandDetailBean> {
return orderDetailRepository.getDemandDetail(string1, string2).flatMap(Function<DemandDetailBean, ObservableSource<DemandDetailBean>> { t ->
return@Function Observable.just(t)
})
}
@Inject
lateinit var orderDetailRepository: OrderDetailRepository
}
|
4750dbb1cc1bc663d3c2e5a4803d6ce5fe74ddc5
|
[
"Java",
"Kotlin",
"Gradle"
] | 46
|
Kotlin
|
lixxxiang/yunjishi
|
3a8f78b777ef2ff4b53c336d16ae479def461e1f
|
a2550fa7a7f766819f35f4dce77d31fd8f8dc85e
|
refs/heads/master
|
<repo_name>Dazzingdusk/external-hub<file_sep>/Hello/hello.cpp
#include<iostream>
int main(void)
{
char Author[] = "Smile";
char saying[] = "Hello";
std::cout << saying << " good,night " << Author << std::endl;
return 0;
}
<file_sep>/Hello/hello.c
#include<stdio.h>
//First repleace
int main(void)
{
char Author[] = "Smile";
printf("Hello,%s/n",Author);
return 0;
}
<file_sep>/README.md
# external-hub
这是一个远程仓库,用以学习。
问题:
提示:更新被拒绝,因为您当前分支的最新提交落后于其对应的远程分支。
提示:再次推送前,先与远程变更合并(如 'git pull ...')。详见
提示:'git push --help' 中的 'Note about fast-forwards' 小节。
解决方法:
git fetch origin //获取远程更新
git merge origin/master //合并到本地分支
或者直接加-f 选项强制更新
<file_sep>/Shell/shell~
#!/bin/bash
echo "<<< ===============>>>>"
echo
echo "Now is `date`"
echo "welcome back! Smile"
echo
echo "<<< ===============>>>>"
|
3e445dc54c32786c44ac1c65f75ea339f6288dcc
|
[
"Markdown",
"C",
"C++",
"Shell"
] | 4
|
C++
|
Dazzingdusk/external-hub
|
9ea25cb5933e250d160fbca647219781146a2385
|
2dff75ddc8092bdd3d3186c899469dfee2191550
|
refs/heads/master
|
<repo_name>jrbeverly/Server-Quickstart<file_sep>/README.md
# docker-quickstart
A collection of docker environment setup
<file_sep>/gogs/setup
#!/bin/bash
GOGS=/prj/gogs/
USERNAME=docker-gogs-user
USERGROUP=docker-users
echo "Creating directories"
sudo mkdir -p $GOGS
sudo chmod -R u+rwx,g+rwx,o+rwx $GOGS
echo "Creating users"
sudo useradd --system -M --shell /usr/sbin/nologin $USERNAME
sudo groupadd $USERGROUP
sudo gpasswd -a $USERNAME $USERGROUP
echo "Setting user ownership"
sudo chown -R $USERNAME $GOGS
sudo chgrp -R $USERGROUP $GOGS
echo "Setting docker-compose"
I_UID=$(id -u $USERNAME)
I_GID=$(getent group $USERGROUP | cut -d: -f3)
cp compose.yml docker-compose.yml
sed -i "s/INSERT_UID/${I_UID}/g" docker-compose.yml
sed -i "s/INSERT_GID/${I_GID}/g" docker-compose.yml<file_sep>/owncloud/setup
#!/bin/bash
DATA=/prj/owncloud/data
CONFIG=/prj/owncloud/config
DB=/prj/owncloud/db
USERNAME=docker-owncloud-user
USERGROUP=docker-users
echo "Creating directories"
sudo mkdir -p $DATA $CONFIG $DB
echo "Creating users"
sudo useradd --system -M --shell /usr/sbin/nologin $USERNAME
sudo groupadd $USERGROUP
sudo gpasswd -a $USERNAME $USERGROUP
echo "Setting user ownership"
sudo chmod -R 0770 $DATA $DB
sudo chmod -R u+rw,g+rw $CONFIG
sudo chown -R www-data:www-data $CONFIG
sudo chgrp -R www-data $DATA
sudo chgrp -R www-data $CONFIG
echo "Setting docker-compose"
I_UID=$(id -u $USERNAME)
I_GID=$(getent group $USERGROUP | cut -d: -f3)
cp compose.yml docker-compose.yml
sed -i "s/INSERT_UID/${I_UID}/g" docker-compose.yml
sed -i "s/INSERT_GID/${I_GID}/g" docker-compose.yml<file_sep>/deluge/configure
#!/bin/bash
DELUGE=/prj/deluge/
DELUGE=deluge-console
DOCKER="/usr/bin/docker"
#$DOCKER exec -t -i ""
# TODO Commands
# Allow-remote
# Setup the paths (donwloads/config/)
# Setup credentials
# Setup blocklist
|
f38e0378c9162d5e1318f47e7c81a539d730b5d2
|
[
"Markdown",
"Shell"
] | 4
|
Markdown
|
jrbeverly/Server-Quickstart
|
c1428088a5cf22e08ef1ce7f771a35426609f18f
|
cd6a5e3f527b197eb01818a7a22b97b56186904f
|
refs/heads/master
|
<file_sep>
function menuClicker(j){
let res = (e)=>{
dash.destroy();
dash = new Control(app.node, 'div', 'basic_block dash_wrapper', '');
app.category = j;
category.textContent = cards[0][j-1];
catDescription.textContent = "Use listen button to learn word and rotate button to see a translate. Click start game to test youself.";
menu.childList.forEach(it=>{
it.node.className=('menu_button');
});
menu.childList[j].node.className=('menu_button menu_button_active');
for (let i=0; i<cards[1].length; i++){
//let el = new Control(dash.node, 'div', 'basic_block dash_item', 'item'+i);
let el = new DoubleCard(dash.node, cards[j][i].word, cards[j][i].translation, 'assets/'+cards[j][i].image);
//el.node.style='transform: scale(1);'
dash.childList.push(el);
}
}
return res;
}
function menuExtClicker(j){
let res = function (e){
dash.destroy();
dash = new Control(app.node, 'div', 'basic_block dash_wrapper', '');
app.category = j;
category.textContent = "Main";
catDescription.textContent = "Select one of word categories to learn";
menu.childList.forEach(it=>{
it.node.className=('menu_button');
});
menu.childList[0].node.className=('menu_button menu_button_active');
for (let i=0; i<cards[0].length; i++){
//let el = new Control(dash.node, 'div', 'basic_block dash_item', 'item'+i);
let el = new CategoryCard(dash.node, cards[0][i], i+1, 'assets/'+cards[i+1][0].image);
dash.childList.push(el);
}
}
return res;
}
var category=document.querySelector('#category');
var catDescription=document.querySelector('#category-description');
var seqPos=0;
var difWords=[];
var cardSnd=[];
// entry point
var mainNode = document.querySelector('#app-main-node');
//var gameControl = document.querySelector('.game_control');
var gameControl_ = new Control(mainNode, 'div', 'game_control', '');
var gameControl = gameControl_.node;
var returnButton = new Button(gameControl, 'menu_button', 'to Main', menuExtClicker('main'));
//var app = new Control(mainNode, 'div', 'basic_block', 'rslang');
var app = new Control(mainNode, 'div', 'basic_block app_wrapper', '');
app.gameMode = 0;
app.category = 1;
var md = new Control(app.node, 'div', 'dash_modal', '');
var mdw = new Control(md.node, 'div', 'dash_modal_window', '');
var mdm = new Control(mdw.node, 'div', 'menu_button', '');
var mdb = new Button(mdw.node, 'menu_button', 'ok',()=>{
md.node.style='display:none';
playButton.click();
});
md.node.style = "display:none";
var btNode = document.querySelector('#play-button');
//var app = new Control(mainNode, 'div', 'basic_block', 'rslang');
var playButton = new Button(gameControl, 'menu_button', 'click to play', ()=>{
if (app.gameMode){
dash.destroy();
dash = new Control(app.node, 'div', 'basic_block dash_wrapper', '');
app.gameMode = 0;
playButton.node.textContent = 'click to play';
let j = app.category;
for (let i=0; i<cards[1].length; i++){
//let el = new Control(dash.node, 'div', 'basic_block dash_item', 'item'+i);
let el = new DoubleCard(dash.node, cards[j][i].word, cards[j][i].translation, 'assets/'+cards[j][i].image);
dash.childList.push(el);
}
} else {
dash.destroy();
dash = new Control(app.node, 'div', 'basic_block dash_wrapper', '');
app.gameMode = 1;
starBlock.node.textContent='';
playButton.node.textContent = 'stop game';
difWords=[];
let j = app.category;
let cardIds = [];
for (let i=0; i<cards[1].length; i++){
cardIds.push(i);
}
cardIds.sort(()=>Math.random()-0.5);
cardSnd = [];
for (let iq=0; iq<cards[1].length; iq++){
//cardIds.push(i);
let i = cardIds[iq];
let aud = new Control(mainNode, 'audio','','');
aud.node.src='assets/audio/'+cards[j][i].word+'.mp3';
aud.name = cards[j][i].word;
aud.statName =cards[j][i].word + " ("+ cards[j][i].translation+")";
cardSnd.push(aud);
}
cardSnd[0].node.play();
cardIds.sort(()=>Math.random()-0.5);
for (let iq=0; iq<cards[1].length; iq++){
//let el = new Control(dash.node, 'div', 'basic_block dash_item', 'item'+i);
let i = cardIds[iq];
let el = new PlayCard(dash.node, cards[j][i].word, i, 'assets/'+cards[j][i].image);
dash.childList.push(el);
}
seqPos=0;
}
});
var hdr = document.querySelector('#header-menu');
var menu = new Control(hdr, 'div', 'basic_block menu_burger', '');
let el = new Button(menu.node,'menu_button', 'Main Page', menuExtClicker('main'));
menu.childList.push(el);
menu.visible = 0;
menu.node.style = "height:0px; overflow-y:hidden";
var burg = new Button(hdr, 'burger', '',()=>{
console.log(menu.visible);
if (menu.visible == 1){
menu.node.style = "height:0px; overflow-y:hidden";
menu.visible = 0;
burg.node.style="";
} else {
menu.node.style = "";
menu.visible = 1;
burg.node.style="transform: rotateZ(90deg);";
}
});
el = new Control (burg.node, 'img', '', '');
el.node.src = "assets/ico/burger.png";
for (let i=0; i<cards[0].length; i++){
//let el = new Control(dash.node, 'div', 'basic_block dash_item', 'item'+i);
let el = new Button(menu.node,'menu_button', cards[0][i], menuClicker(i+1));
menu.childList.push(el);
}
var dash = new Control(app.node, 'div', 'basic_block dash_wrapper', '');
/*for (let i=0; i<12; i++){
//let el = new Control(dash.node, 'div', 'basic_block dash_item', 'item'+i);
let el = new DoubleCard(dash.node, 'card'+i, 'ru', 'img/i'+(i+1)+'.png');
dash.childList.push(el);
}*/
/*for (let i=0; i<cards[1].length; i++){
//let el = new Control(dash.node, 'div', 'basic_block dash_item', 'item'+i);
let el = new DoubleCard(dash.node, cards[1][i].word, cards[1][i].translation, 'assets/'+cards[1][i].image);
dash.childList.push(el);
}*/
var repeatButton = new Button(gameControl, 'menu_button', 'repeat word',()=>{
if (app.gameMode) {
cardSnd[0].node.play();
}
});
var starBlock = new Control(gameControl, 'div', 'star_block', '');
returnButton.click();
var statData=[];
class StatRecord {
constructor(name, win, fail){
this.name = name;
this.win = win;
this.fail = fail;
}
}
for (let i=0; i<cards[0].length; i++){
for (let j=0; j<cards[i+1].length; j++){
statData.push(new StatRecord(cards[i+1][j].word + " ("+ cards[i+1][j].translation+")", 0, 0));
}
}
var stat = document.querySelector('#app-stat');
function refreshStat(){
stat.textContent='';
new Control(stat, 'h1', '', 'Statistic:');
for (let i=0; i<statData.length; i++){
new Control(stat, 'p', '', statData[i].name + " " + statData[i].fail);
}
}
refreshStat();
/*for (let i=0; i<cards[0].length; i++){
for (let j=0; j<cards[i+1].length; j++){
new Control(stat, 'p', '', cards[i+1][j].word + " ("+ cards[i+1][j].translation+")");
}
}*/
/*
var snde = new Control(stat, 'audio', '', '');
var btt = new Button(stat, '', 'play',()=>{
snde.node.src="https://wooordhunt.ru/data/sound/word/us/mp3/"+'word'+".mp3";
snde.node.play();
});*/
<file_sep>class Control {
// Hard DOM functions
constructor (parentNode, tagName, className, textContent){
this.node = document.createElement(tagName);
this.childList = [];
this.render(className, textContent);
parentNode.appendChild(this.node);
}
destroy (){
this.childList.forEach = ((it)=>{
it.destroy();
});
//this.childList = null;
this.node.remove();
//this = null;
}
// style and content functions
render (className, textContent){
this.node.className = className;
this.node.textContent = textContent;
}
// own props functions
}
class Button extends Control {
constructor (parentNode, className, textContent, click){
super(parentNode, 'div', className, textContent);
this.click = click;
this.node.addEventListener('click', this.click);
}
}
class Card extends Control{
constructor (parentNode, cardName, ruName, imgURL){
super(parentNode, 'div', 'dash_item', '');
this.name = new Control(this.node,'div', 'card_name', cardName);
let imgWrapper = new Control(this.node,'div', 'card_img', '');
this.img = new Control(imgWrapper.node,'img', '', '');
this.img.node.src = imgURL;
let cardMenu = new Control(this.node,'div', 'card_menu', '');
this.rotateButton = new Button(cardMenu.node, 'card_button', 'rotate', (event)=>{
this.rotate(180);
});
this.node.addEventListener('mouseleave',()=>{
this.rotate(0);
})
this.listenButton = new Control(cardMenu.node,'div', 'card_button', 'listen');
}
rotate (deg){
if (deg>0){
this.rotateButton.node.style = 'transition-duration: 400ms; opacity:0';
this.listenButton.node.style = 'transition-duration: 400ms; opacity:0';
this.name.node.style = 'transition-duration: 400ms; opacity:0';
this.node.style = 'z-index: 1; transform: perspective(500px) rotateY('+deg+'deg)';
} else {
this.rotateButton.node.style = 'transition-duration: 400ms; opacity:100';
this.listenButton.node.style = 'transition-duration: 400ms; opacity:100';
this.name.node.style = 'transition-duration: 400ms; opacity:100';
this.node.style = 'z-index: 1; transform: perspective(500px) rotateY('+deg+'deg)';
}
}
}
class DoubleCard extends Control {
constructor (parentNode, cardName1, cardName2, imgURL){
super(parentNode, 'div', 'dash_item', '');
this.sideA = new Control (this.node,'div', 'card_side card_side_a', '');
this.sideB = new Control (this.node,'div', 'card_side', '');
this.sideB.name = new Control(this.sideB.node,'div', 'card_name', cardName2);
let imgWrapper = new Control(this.sideB.node,'div', 'card_img', '');
this.sideB.img = new Control(imgWrapper.node,'img', '', '');
imgWrapper.node.style='transform: rotateY(180deg);'
this.sideB.img.node.src = imgURL;
this.sideB.node.style = 'z-index: 1; transform: perspective(500px) rotateY('+(180)+'deg)';
this.aud = new Control(this.node, 'audio','','');
this.aud.node.src='assets/audio/'+cardName1+'.mp3';
this.sideA.name = new Control(this.sideA.node,'div', 'card_name', cardName1);
imgWrapper = new Control(this.sideA.node,'div', 'card_img', '');
this.sideA.img = new Control(imgWrapper.node,'img', '', '');
this.sideA.img.node.src = imgURL;
let cardMenu = new Control(this.sideA.node,'div', 'card_menu', '');
this.rotateButton = new Button(cardMenu.node, 'card_button', 'rotate', (event)=>{
this.rotate(180);
});
this.node.addEventListener('mouseleave',()=>{
this.rotate(0);
})
this.listenButton = new Button(cardMenu.node, 'card_button', 'listen', (event)=>{
console.log(3);
this.aud.node.play();
});
this.rotate(0);
//this.node.style = 'height:'+(this.sideA.node.clientHeight)+'px';
}
rotate (deg){
this.sideA.node.style = 'z-index: 1; transform: perspective(500px) rotateY('+deg+'deg)';
this.sideB.node.style = 'z-index: 1; transform: perspective(500px) rotateY('+(180+deg)+'deg)';
}
}
class PlayCard extends Control {
constructor (parentNode, cardName1, num, imgURL){
super(parentNode, 'div', 'dash_item', '');
this.cardName = cardName1;
this.num=num;
this.sideA = new Button (this.node, 'card_side card_side_a', '',()=>{
if (this.active) {
this.active = false;
this.play();
}
});
this.active = true;
let imgWrapper = new Control(this.sideA.node,'div', 'card_img', '');
this.sideA.img = new Control(imgWrapper.node,'img', '', '');
this.sideA.img.node.src = imgURL;
//this.rotate(0);
}
play (){
//starBlock.node.innerHtml='';
if (this.cardName == cardSnd[0].name){
let star = new Control(starBlock.node, 'div', 'star_item star_item_ok', '');
this.sideA.node.style = 'opacity: 50%; background-color:#00ff00';
} else {
let star = new Control(starBlock.node, 'div', 'star_item star_item_err', '');
seqPos++;
//difWords.push(this.cardName); ///maybe its right
if (difWords.indexOf(cardSnd[0].name)==-1) {
difWords.push(cardSnd[0].name);
statData.forEach((it, i, arr)=>{if(it.name==cardSnd[0].statName){arr[i].fail++;}});
}
this.sideA.node.style = 'opacity: 50%; background-color:#ff0000';
//cardSnd.push(cardSnd[seqPos]);
}
if (cardSnd.length>0){
cardSnd = cardSnd.filter((it)=>{
return this.cardName != it.name;
});
}
//seqPos++;
if (cardSnd[0]){
cardSnd[0].node.play();
}
if (cardSnd.length==0) {
// playButton.click()
if (seqPos==0){
mdm.node.textContent = "Amazing! You havent any mistake";
} else {
mdm.node.textContent = "Youve got "+seqPos+" mistakes with words "+difWords.join(', ');
}
refreshStat();
md.node.style='';
}
// this.sideB.node.style = 'z-index: 1; transform: perspective(500px) rotateY('+(180+deg)+'deg)';
}
}
class CategoryCard extends Control {
constructor (parentNode, cardName1, num, imgURL){
super(parentNode, 'div', 'dash_item', '');
this.num=num;
this.sideA = new Button (this.node, 'card_side card_side_a', '',
menuClicker(num));
this.active = true;
this.cardName = new Control(this.sideA.node,'div', 'card_name', cardName1);
let imgWrapper = new Control(this.sideA.node,'div', 'card_img category_img', '');
this.sideA.img = new Control(imgWrapper.node,'img', '', '');
this.sideA.img.node.src = imgURL;
//this.rotate(0);
}
}
|
3cf441fdce202f563f319c7dccb9af1dc6c85eac
|
[
"JavaScript"
] | 2
|
JavaScript
|
InikonZS/govnocode
|
46f0e642d0612be6ed01c22760cdb13c1ecc8193
|
676f45e945f72a6cb169627c064dba29d63a8ef0
|
refs/heads/master
|
<file_sep>#!/usr/bin/env python
"""
Hodor.
"""
__version__ = 'Hodor.'
__author__ = 'Hodor.'
__copyright__ = 'Hodor.'
# Hodor.
HODOR = 'Hodor.'
class Hodor(object):
"""
Hodor.
:param hodor:
Hodor.
"""
def __init__(self, hodor=HODOR):
self._hodor = hodor
def hodor(self):
hodor = self._hodor
return hodor
def __call__(self):
hodor = self.hodor()
return hodor
hodor = Hodor(HODOR)
if __name__ == '__main__':
print(hodor.hodor())
|
4edbd9dd72050135e80e4ee844a2edc0b1a3afe0
|
[
"Python"
] | 1
|
Python
|
laco/hodorpy
|
5aaa17e0cb8bb6a6c4fd28ca0b9a3029e55dc833
|
c8b6b072d788206ec592b203440bbc7476eace77
|
refs/heads/master
|
<repo_name>alfiemitchell123/drum-sequencer<file_sep>/initialise.js
var counter;
var filters = [];
for (counter = 0; counter < 8; counter+=1) {
filters[counter] = new Tone.Filter(0, "highpass");
}
var distortion = [];
for (counter = 0; counter < 8; counter+=1) {
distortion[counter] = new Tone.Distortion(1);
distortion[counter].wet.value = 0;
}
var reverb = [];
for (counter = 0; counter < 8; counter+=1) {
reverb[counter] = new Tone.Freeverb();
reverb[counter].roomSize.value = 0.75;
reverb[counter].dampening.value = 8000;
reverb[counter].wet.value = 0;
}
// this array will store our players
// we later load samples in to them
var players = [];
for (counter = 0; counter < 8; counter+=1) {
players[counter] = new Tone.Player();
}
// we have a bunch of different kit samples - all in different folder
var kitNames = [
"Hip_Hop/",
"House/",
"Garage/",
"Drum_And_Bass/"
];
// every sample MUST have the same name, no matter which kit folder it was in
var filenames = [
"Kick.wav",
"Snare.wav",
"Clap.wav",
"Closed_Hat.wav",
"Open_Hat.wav",
"Tom.wav",
"Perc_1.wav",
"Perc_2.wav"
];
function changeKitTo(kitNumber) {
// select the kit and load it
for (var counter = 0; counter < 8; counter+=1) {
var filename = kitNames[kitNumber] + filenames[counter];
console.log("player number " + counter + " will load " + filename);
players[counter].load(filename);
// players[counter].connect(filters[counter]);
players[counter].chain(filters[counter], distortion[counter], reverb[counter], Tone.Master);
players[counter].retrigger = true;
}
}
// load kit number 0 by default
changeKitTo(0);
nx.onload = function() {
// Set up the size (rows and columns) of the sequencer
sequencer.col = 16;
sequencer.row = 8;
sequencer.init(); // Initalises the sequencer
// set up colours
nx.colorize("accent" , "#00c1c1" ); // Sets the overall fill colour of the Nexus UI objects
nx.labelSize(50); // Sets the label size of the Nexus UI objects
// Set up the sequence callback
sequencer.on("*", sequencerActions);
// Set up the on/off button
startStopBtn.on("*", toggleActions);
// Set up the tempo dial
tempoDial.on("*", tempoActions);
tempoDial.responsivity = 0.002;
// Set up the arrays for the sliders
var gainSliders = [kickGainSlider, snareGainSlider, clapGainSlider, cHatGainSlider, oHatGainSlider, tomGainSlider, perc1GainSlider, perc2GainSlider];
var filterSliders = [kickCutoffSlider, snareCutoffSlider, clapCutoffSlider, cHatCutoffSlider, oHatCutoffSlider, tomCutoffSlider, perc1CutoffSlider, perc2CutoffSlider];
var distortionSliders = [kickDryWetSlider, snareDryWetSlider, clapDryWetSlider, cHatDryWetSlider, oHatDryWetSlider, tomDryWetSlider, perc1DryWetSlider, perc2DryWetSlider];
var reverbSliders = [kickReverbSlider, snareReverbSlider, clapReverbSlider, cHatReverbSlider, oHatReverbSlider, tomReverbSlider, perc1ReverbSlider, perc2ReverbSlider];
// Set the initial parameters of the gain sliders within a loop
for (counter = 0; counter < 8; counter+=1) {
gainSliders[counter].on("*", gainSliderActions);
gainSliders[counter].min = -35;
gainSliders[counter].max = 0;
gainSliders[counter].val.value = 0;
gainSliders[counter].init();
}
// Set the initial parameters of the filter sliders within a loop
for (counter = 0; counter < 8; counter+=1) {
filterSliders[counter].on("*", filterSliderActions);
filterSliders[counter].min = 0;
filterSliders[counter].max = 5000;
filterSliders[counter].val.value = 0;
filterSliders[counter].init();
}
// Set the initial parameters of the distortion sliders within a loop
for (counter = 0; counter < 8; counter+=1) {
distortionSliders[counter].on("*", distortionSliderActions);
distortionSliders[counter].min = 0;
distortionSliders[counter].max = 1;
distortionSliders[counter].val.value = 0;
distortionSliders[counter].init();
}
// Set the initial parameters of the reverb sliders within a loop
for (counter = 0; counter < 8; counter+=1) {
reverbSliders[counter].on("*", reverbSliderActions);
reverbSliders[counter].min = 0;
reverbSliders[counter].max = 1;
reverbSliders[counter].val.value = 0;
reverbSliders[counter].init();
}
}<file_sep>/function.js
function tempoActions(event) { // This function sets up the dial to control the BPM of the sequencer.
sequencer.bpm = tempoDial.val.value * 2;
}
var jumpToStartButton = document.getElementById("jumpToStartButton");
jumpToStartButton.addEventListener("click", jumpToStartOfSequencer);
function jumpToStartOfSequencer() { // This function sets up the ability to jump to the start of the sequencer using a button.
sequencer.jumpToCol(0);
}
var clearButton = document.getElementById("clearButton");
clearButton.addEventListener("click", clearActions);
function clearActions(event) { // This function sets up the ability to clear the sequencer using a button.
for (var c = 0; c < sequencer.col; c+=1) {
for (var r = 0; r < sequencer.row; r+=1) {
sequencer.matrix[c][r] = 0;
}
}
sequencer.draw();
}
function sequencerActions(data) { // This function sets up the sequencer so the players can play according to which step is on/off.
if (data.list != undefined) {
for (counter = 0; counter < 8; counter+=1) {
if (data.list[counter] == 1) {
players[counter].start();
}
}
}
}
function toggleActions(event) { // This functions controls the on/off state of the sequencer, which the user controls using the toggle button.
if(event.value == 1) {
sequencer.sequence(tempoDial.val.value);
}
else {
sequencer.stop();
}
}
window.addEventListener("keypress", keyboardActions);
function keyboardActions(key) { // This function controls the use of the QWERTY keyboard, which (when a certain key is pressed), can control a certain parameter.
if (key.keyCode == "32") {
if (startStopBtn.val.value == 0) {
startStopBtn.val.value = 1;
startStopBtn.init();
sequencer.sequence(tempoDial.val.value);
}
else if (startStopBtn.val.value == 1) {
startStopBtn.val.value = 0;
startStopBtn.init();
sequencer.stop();
}
}
}
var drumKitSelect = document.getElementById("drumKitSelect");
drumKitSelect.addEventListener("input", drumKitActions);
function drumKitActions() { // This function controls the dropdown menu, so the user can select which type of kit they want to use.
switch(drumKitSelect.value){
case "hipHop":
changeKitTo(0);
console.log("value 1 selected");
break;
case "house":
changeKitTo(1);
console.log("value 2 selected");
break;
case "garage":
changeKitTo(2);
console.log("value 3 selected");
break;
case "drumAndBass":
changeKitTo(3);
console.log("value 4 selected");
}
}
function gainSliderActions() { // This function controls the gain sliders.
var gainSliders = [kickGainSlider, snareGainSlider, clapGainSlider, cHatGainSlider, oHatGainSlider, tomGainSlider, perc1GainSlider, perc2GainSlider];
for (counter = 0; counter < 8; counter+=1) {
players[counter].volume.value = gainSliders[counter].val.value;
}
}
function filterSliderActions() { // This function controls the filter sliders.
var filterSliders = [kickCutoffSlider, snareCutoffSlider, clapCutoffSlider, cHatCutoffSlider, oHatCutoffSlider, tomCutoffSlider, perc1CutoffSlider, perc2CutoffSlider];
for (counter = 0; counter < 8; counter+=1) {
filters[counter].frequency.value = filterSliders[counter].val.value;
}
}
function distortionSliderActions() { // This function controls the distortion sliders.
var distortionSliders = [kickDryWetSlider, snareDryWetSlider, clapDryWetSlider, cHatDryWetSlider, oHatDryWetSlider, tomDryWetSlider, perc1DryWetSlider, perc2DryWetSlider];
for (counter = 0; counter < 8; counter+=1) {
distortion[counter].wet.value = distortionSliders[counter].val.value;
}
}
function reverbSliderActions() { // This function controls the reverb sliders.
var reverbSliders = [kickReverbSlider, snareReverbSlider, clapReverbSlider, cHatReverbSlider, oHatReverbSlider, tomReverbSlider, perc1ReverbSlider, perc2ReverbSlider];
for (counter = 0; counter < 8; counter+=1) {
reverb[counter].wet.value = reverbSliders[counter].val.value;
}
}
// var textChangeBtn = document.getElementsByClassName("textChangeBtn");
// textChangeBtn.addEventListener("click", textChangeBtnActions);
// function textChangeBtnActions() {
// if (textChangeBtn.getAttribute("data-text-swap") == textChangeBtn.innerHTML) {
// textChangeBtn.innerHTML = textChangeBtn.getAttribute("data-text-original");
// }
// else {
// textChangeBtn.setAttribute("data-text-original", textChangeBtn.innerHTML);
// textChangeBtn.innerHTML = textChangeBtn.getAttribute("data-text-swap");
// }
// }
var showMixerBtn = document.getElementById("showMixer");
showMixerBtn.addEventListener("click", showMixerBtnActions);
function showMixerBtnActions() { // This function controls how the 'Show Mixer' button acts and the text of the button depending on which state it is in.
if (showMixerBtn.getAttribute("data-text-swap") == showMixerBtn.innerHTML) {
showMixerBtn.innerHTML = showMixerBtn.getAttribute("data-text-original");
mixerSliders.style.display = "none";
}
else {
showMixerBtn.setAttribute("data-text-original", showMixerBtn.innerHTML);
showMixerBtn.innerHTML = showMixerBtn.getAttribute("data-text-swap");
mixerSliders.style.display = "block";
}
}
var showFilterBtn = document.getElementById("showFilters");
showFilterBtn.addEventListener("click", showFilterBtnActions);
function showFilterBtnActions() { // This function controls how the 'Show Filters' button acts and the text of the button depending on which state it is in.
if (showFilterBtn.getAttribute("data-text-swap") == showFilterBtn.innerHTML) {
showFilterBtn.innerHTML = showFilterBtn.getAttribute("data-text-original");
filterSliders.style.display = "none";
}
else {
showFilterBtn.setAttribute("data-text-original", showFilterBtn.innerHTML);
showFilterBtn.innerHTML = showFilterBtn.getAttribute("data-text-swap");
filterSliders.style.display = "block";
}
}
var changeFilterSetting = document.getElementById("changeFilterSetting");
changeFilterSetting.addEventListener("click", changeFilterSettingActions);
function changeFilterSettingActions() { // This function controls how the 'Change Filter Setting' button acts and the text of the button depending on which state it is in.
if (changeFilterSetting.getAttribute("data-text-swap") == changeFilterSetting.innerHTML) {
changeFilterSetting.innerHTML = changeFilterSetting.getAttribute("data-text-original");
for (counter = 0; counter < 8; counter+=1) {
filters[counter].type = "highpass";
}
}
else {
changeFilterSetting.setAttribute("data-text-original", changeFilterSetting.innerHTML);
changeFilterSetting.innerHTML = changeFilterSetting.getAttribute("data-text-swap");
for (counter = 0; counter < 8; counter+=1) {
filters[counter].type = "lowpass";
}
}
}
var showDistortionBtn = document.getElementById("showDistortion");
showDistortionBtn.addEventListener("click", showDistortionBtnActions);
function showDistortionBtnActions() { // This function controls how the 'Show Distortion' button acts and the text of the button depending on which state it is in.
if (showDistortionBtn.getAttribute("data-text-swap") == showDistortionBtn.innerHTML) {
showDistortionBtn.innerHTML = showDistortionBtn.getAttribute("data-text-original");
distortionSliders.style.display = "none";
}
else {
showDistortionBtn.setAttribute("data-text-original", showDistortionBtn.innerHTML);
showDistortionBtn.innerHTML = showDistortionBtn.getAttribute("data-text-swap");
distortionSliders.style.display = "block";
}
}
var showReverbBtn = document.getElementById("showReverb");
showReverbBtn.addEventListener("click", showReverbBtnActions);
function showReverbBtnActions() { // This function controls how the 'Show Reverb' button acts and the text of the button depending on which state it is in.
if (showReverbBtn.getAttribute("data-text-swap") == showReverbBtn.innerHTML) {
showReverbBtn.innerHTML = showReverbBtn.getAttribute("data-text-original");
reverbSliders.style.display = "none";
}
else {
showReverbBtn.setAttribute("data-text-original", showReverbBtn.innerHTML);
showReverbBtn.innerHTML = showReverbBtn.getAttribute("data-text-swap");
reverbSliders.style.display = "block";
}
}<file_sep>/README.md
# drum-sequencer
Drum Sequencer for UWE
|
7deed401c613f53117052dc49b749518c7392766
|
[
"JavaScript",
"Markdown"
] | 3
|
JavaScript
|
alfiemitchell123/drum-sequencer
|
db27bad345738a5abfcdf3a6126d53f6824faa2c
|
dba99cf9565bcb319231b7dce672f96fec40a33a
|
refs/heads/master
|
<repo_name>MeghanaAnvekar/pset7<file_sep>/public/buy.php
<?php
// configuration
require("../includes/config.php");
if ($_SERVER["REQUEST_METHOD"] == "GET")
{
// else render form
render("buy_form.php", ["title" => "Buy"]);
}
// else if user reached page via POST (as by submitting a form via POST)
else if ($_SERVER["REQUEST_METHOD"] == "POST")
{
$stock = lookup($_POST["symbol"]);
$cost = (float)$stock["price"] * $_POST["shares"];
$d = CS50::query("SELECT cash FROM users WHERE id =?", $_SESSION["id"]);
foreach ($d as $data)
$data[] = ["cash" => $d];
if($data["cash"] >= $cost* $_POST["shares"])
{
$insert = CS50::query("SELECT * FROM bought WHERE user_id = ? AND symbol = ?",$_SESSION["id"],$_POST["symbol"]);
$cash = $data["cash"] - $cost * $_POST["shares"];
$query = CS50::query("UPDATE users SET cash = ? WHERE id = ?", $cash, $_SESSION["id"]);
CS50::query("INSERT INTO history (user_id,symbol,shares,action,price,date_time) VALUES(?,?,?,?,?,?)", $_SESSION["id"],$_POST["symbol"],$_POST["shares"],"bought",$cost,date("Y-m-d H:i:s", $current_timestamp = time()));
if($insert == false)
{
CS50::query("INSERT INTO bought (user_id,symbol,shares) VALUES(?,?,?)", $_SESSION["id"],$_POST["symbol"],$_POST["shares"]);
}
else
{
CS50::query("UPDATE bought SET shares = ? WHERE symbol = ?", $insert[0]["shares"] + $_POST["shares"] , $_POST["symbol"]);
}
$rows = CS50::query("SELECT * FROM bought WHERE user_id = ?", $_SESSION["id"]);
$positions = [];
foreach ($rows as $row)
{
$stock = lookup($row["symbol"]);
if ($stock !== false)
{
$positions[] = [
"name" => $stock["name"],
"price" => $stock["price"],
"shares" => $row["shares"],
"symbol" => $row["symbol"]
];
}
}
// render portfolio
render("portfolio.php", ["positions" => $positions, "title" => "Portfolio"]);
}
else
{
apologize("You don't have enough cash!");
}
}
?><file_sep>/public/sell.php
<?php
// configuration
require("../includes/config.php");
if ($_SERVER["REQUEST_METHOD"] == "GET")
{
// else render form
$rows = CS50::query("SELECT * FROM bought WHERE user_id = ?", $_SESSION["id"]);
$positions = [];
foreach ($rows as $row)
{
$stock = lookup($row["symbol"]);
if ($stock !== false)
{
$positions[] = [
"name" => $stock["name"],
"price" => $stock["price"],
"shares" => $row["shares"],
"symbol" => $row["symbol"]
];
}
}
render("sell_form.php", ["positions" => $positions, "title" => "Sell"]);
}
// else if user reached page via POST (as by submitting a form via POST)
else if ($_SERVER["REQUEST_METHOD"] == "POST")
{
$data = lookup($_POST["symbol"]);
if($data === false)
{
apologize("Symbol not found!.");
}
else
{
$d = CS50::query("SELECT shares FROM bought WHERE (symbol = ?) AND (user_id =?)", $_POST["symbol"], $_SESSION["id"]);
if( $d ==false)
{
apologize("You don't own share(s) from that company. ");
}
else
{
$positions=[];
$positions = CS50::query("SELECT * FROM bought WHERE (symbol = ?) AND (user_id = ?)",$_POST["symbol"], $_SESSION["id"]);
$q = CS50::query("DELETE FROM bought WHERE (symbol = ?) AND (user_id = ?)",$_POST["symbol"],$_SESSION["id"]);
if($q === false)
{
apologize("Couldn't sell the share you selected!");
}
else
{
foreach ($d as $a)
$a[] = ["shares" => $d];
;
CS50::query("UPDATE users SET cash = (cash + ?) WHERE id = ?",((float)$a["shares"] * (float)$data["price"]),$_SESSION["id"]);
CS50::query("INSERT INTO history (user_id,symbol,shares,action,price,date_time) VALUES(?,?,?,?,?,?)", $_SESSION["id"],$_POST["symbol"],$a["shares"],"sold",$data["price"],date("Y-m-d H:i:s", $current_timestamp = time()));
render("sold.php", ["positions" => $positions, "title" => "Sold"]);
}
}
}
}
?><file_sep>/README.md
# pset7
pset7
<file_sep>/public/funds.php
<?php
// configuration
require("../includes/config.php");
if ($_SERVER["REQUEST_METHOD"] == "GET")
{
// else render form
render("funds_form.php", ["title" => "Add Funds"]);
}
else
{
if((float)$_POST["cash"] < 0)
{
apologize("Amount can't be negative.");
}
else
{
CS50::query("UPDATE users SET cash = (cash + ?) WHERE id = ?",(float)$_POST["cash"],$_SESSION["id"]);
render("funds_view.php" ,["title" => "Cash"]);
}
}
?><file_sep>/views/sell_form.php
<table style="width:100% ;text-align: left" >
<thead>
<tr>
<th>Symbol</th>
<th>Name</th>
<th>Shares</th>
<th>Price</th>
<th>Total</th>
</tr>
</thead>
<tbody>
<?php foreach ($positions as $position): ?>
<tr>
<td><?= $position["symbol"] ?></td>
<td><?php $s = lookup($position["symbol"])?> <?= $s["name"] ?> </td>
<td><?= $position["shares"] ?></td>
<td><?= $position["price"] ?></td>
<td><?= ($position["price"] * $position["shares"]) ?></td>
</tr>
<?php endforeach ?>
</tbody>
</table>
<form action="sell.php" method="post">
<fieldset>
<div class="form-group">
<input autocomplete="off" autofocus class="form-control" name="symbol" placeholder="Symbol" type="text"/>
</div>
<div class="form-group">
<button class="btn btn-default" type="submit">Sell</button>
</div>
</fieldset>
</form><file_sep>/views/buy_form.php
<?php
$c = CS50::query("SELECT cash FROM users WHERE id = ?", $_SESSION["id"]);
foreach ($c as $d)
$d[] = ["cash" => $c];
$cash = number_format((double)$d["cash"],"4",'.',',');
?>
<form action="buy.php" method="post">
<fieldset>
<div class="form-group">
<b>CASH = $</b> <?= $cash ?>
<br>
<br>
<input autocomplete="off" autofocus class="form-control" name="symbol" placeholder="Symbol" type="text"/>
</div>
<div class="form-group">
<input autocomplete="off" class="form-control" name="shares" placeholder="Shares" type="number" min="1"/>
</div>
<div class="form-group">
<button class="btn btn-default" type="submit">Buy</button>
</div>
</fieldset>
</form><file_sep>/views/quote.php
<?php
print("A share of " . $name ."costs $" . $price);
?><file_sep>/views/funds_form.php
<?php
$c = CS50::query("SELECT cash FROM users WHERE id = ?", $_SESSION["id"]);
foreach ($c as $d)
$d[] = ["cash" => $c];
$cash = number_format((double)$d["cash"],"4",'.',',');
?>
<form action="funds.php" method="post">
<fieldset>
<div class="form-group">
<b>CASH = $</b> <?= $cash ?>
<br>
<br>
<input autocomplete="off" autofocus class="form-control" name="cash" placeholder="$0.00" type="text"/>
</div>
<div class="form-group">
<button class="btn btn-default" type="submit">Add</button>
</div>
</fieldset>
</form><file_sep>/views/funds_view.php
<?php
$c = CS50::query("SELECT cash FROM users WHERE id = ?", $_SESSION["id"]);
foreach ($c as $d)
$d[] = ["cash" => $c];
$cash = number_format((double)$d["cash"],"4",'.',',');
?>
<div class="form-group">
<b>CASH = $</b> <?= $cash ?>
</div>
|
45d97d861520932a4f2146e097ef5401063a1331
|
[
"Markdown",
"PHP"
] | 9
|
PHP
|
MeghanaAnvekar/pset7
|
733319ba910bd47e9eb3521177c315efe1d5add0
|
6e60fe9b6f99c2d4bb48b1ff11a911b091ce8008
|
refs/heads/master
|
<file_sep># these magic numbers come from the IANA considerations of RFCXXXX (ietf-cose-msg-24)
module Cose
class Msg
# from table 2: Common Header Paramters, page 15.
# https://www.iana.org/assignments/cose/cose.xhtml#header-parameters
ALG = 1
CRIT = 2
CONTENT_TYPE = 3
KID = 4
IV = 5
PARTIAL_IV = 6
COUNTER_SIGNATURE = 7
X5BAG = 32
VOUCHER_PUBKEY = 60299 # private value, remove it.
# from table 5, ECDSA Algorithm Values
ES256 = -7
ES384 = -35
ES512 = -36
ES256K = -47
def self.alg_from_int(algno)
case algno
when ES256
:ES256
when ES384
:ES384
when ES512
:ES512
when ES256K
:ES256k
else
algno
end
end
end
end
<file_sep>#!/bin/sh
bundle exec rspec -I. spec/model/signature_spec.rb spec/model/voucher_spec.rb spec/model/voucher_request_spec.rb
<file_sep>module Chariwt
class VoucherSIDClass
class MissingSIDMapping < Exception
attr_reader :mapping
def initialize(msg, mapping)
@mapping = mapping
super(msg)
end
end
def self.calc_sidkeys(sidkeys)
rev = Hash.new
sidkeys.each {|k,v|
case v
when Array
v.each {|str|
rev[str] = k
}
else
rev[v]=k
end
}
rev
end
def self.sid4key(key)
case key
when String
sidkeys[key.downcase]
when Number
key
else
byebug
puts "bad key: #{key}"
end
end
def self.translate_assertion_fromsid(assertion)
case assertion
when 0
:verified
when 1
:logged
when 2
:proximity
else
assertion.to_sym
end
end
def self.translate_assertion_tosid(assertion)
case assertion.to_s
when "verified"
0
when "logged"
1
when "proximity"
2
else
assertion
end
end
# This method rewrites a hash based upon deltas against the parent
# SID, which is not modified.
# It is used when mapping into constrained SID based YANG.
# The input has should look like:
#
# { NUM1 => { NUM2 => 'stuff' }}
# and results in:
# { NUM1 => { (NUM2-NUM1) => 'stuff' }}
#
def self.mapkeys(base, hash)
raise MissingSIDMapping.new("bad base id", base) unless base
nhash = Hash.new
hash.each { |k,v|
kn = sid4key(k)
#byebug unless kn
raise MissingSIDMapping.new("missing mapping", k) unless kn
sidkey = kn - base
case k.to_s
when "assertion"
v = translate_assertion_tosid(v)
when "nonce"
# this forces nonce to be a bstr rather than a tstr
v = v.force_encoding('ASCII-8BIT')
end
if v.is_a? DateTime
v = v.iso8601(0) # this turns it into a string.
end
case v
when Hash
nhash[sidkey] = mapkeys(sidkey, v)
else
nhash[sidkey] = v
end
}
nhash
end
def self.hash2yangsid(hash)
nhash = Hash.new
hash.each { |k,v|
sidkey = sid4key(k)
raise MissingSIDMapping.new("missing base object", k) unless sidkey
nhash[sidkey] = mapkeys(sidkey,v)
}
nhash
end
def self.yangsid2hash(hash)
nhash = Hash.new
return nil unless hash.kind_of? Hash
hash.each { |k,v|
basenum = k
v.each { |relk,v|
if relk.is_a? Integer
abskey = basenum+relk
yangkey = hashkeys[abskey]
if yangkey
if(abskey == 2502 || abskey == 2452)
v = translate_assertion_fromsid(v)
end
nhash[yangkey] = v
else
nhash['unknown'] ||= []
nhash['unknown'] << [abskey,v]
end
else
nhash[relk] = v
end
}
}
nhash
end
end
class VoucherSID < VoucherSIDClass
SIDKeys = {
2451 => ['ietf-cwt-voucher', 'ietf-voucher:voucher'],
2452 => 'assertion',
2453 => 'created-on',
2454 => 'domain-cert-revocation-checks',
2455 => 'expires-on',
2456 => 'idevid-issuer',
2457 => 'last-renewal-date',
2458 => 'nonce',
2459 => 'pinned-domain-cert',
2460 => 'pinned-domain-subject-public-key-info',
2461 => 'pinned-sha256-of-subject-public-key-info',
2462 => 'serial-number',
}
def self.hashkeys
SIDKeys
end
def self.sidkeys
@sidkeys_voucher ||= calc_sidkeys(SIDKeys)
end
end
class VoucherRequestSID < VoucherSIDClass
SIDKeys = {
2501 => ['ietf-cwt-voucher-request',
'ietf-cwt-voucher-request:voucher',
'ietf-voucher-request:voucher'],
2502 => 'assertion',
2503 => 'created-on',
2504 => 'domain-cert-revocation-checks',
2505 => 'expires-on',
2506 => 'idevid-issuer',
2507 => 'last-renewal-date',
2508 => 'nonce',
2509 => 'pinned-domain-cert',
2510 => 'prior-signed-voucher-request',
2511 => 'proximity-registrar-cert',
2512 => 'proximity-registrar-sha256-of-subject-public-key-info',
2513 => 'proximity-registrar-subject-public-key-info',
2514 => 'serial-number',
}
def self.hashkeys
SIDKeys
end
def self.sidkeys
@sidkeys_voucher_request ||= calc_sidkeys(SIDKeys)
end
end
end
<file_sep>module Chariwt
def self.tmpdir
# make sure that tmp directory is available for output.
@tmpdir ||=
begin
Rails.root.join("tmp")
rescue
"tmp"
end
FileUtils::mkdir_p(@tmpdir)
@tmpdir
end
def self.cmp_pkcs_file(smime, base, certfile=nil)
ofile = File.join(tmpdir, base + ".pkcs")
otfile = File.join(tmpdir, base+ ".txt")
File.open(ofile, "wb") do |f| f.write smime end
location = File.dirname(__FILE__) + "/../../bin"
#puts "Location is: #{location}, wrote to #{ofile}, #{otfile}, #{base}"
cmd0 = "#{location}/pkcs2json #{ofile} #{otfile} #{certfile}"
exitcode = system(cmd0)
unless exitcode
puts sprintf("CMD FAILED: %s\n", cmd0);
return false
end
cmd = "diff #{otfile} spec/files/#{base}.txt"
exitcode = system(cmd)
unless exitcode
puts sprintf("CMD FAILED: %s\n", cmd);
return false
end
return exitcode
end
def self.cmp_vch_voucher(basename)
diffcmd = sprintf("cbor2diag.rb tmp/%s.vch >tmp/%s.diag",
basename, basename)
system(diffcmd)
cmd = sprintf("diff tmp/%s.diag spec/files/%s.diag",
basename, basename)
#puts cmd
exitcode = system(cmd)
unless exitcode
puts cmd
end
return exitcode
end
def self.cmp_vch_pretty_voucher(basename)
cvtcmd = sprintf("cbor2pretty.rb tmp/%s.vch >tmp/%s.pretty",
basename, basename)
unless system(cvtcmd)
puts cvtcmd
return false
end
diffcmd = sprintf("diff tmp/%s.pretty spec/files/%s.pretty",
basename, basename)
exitcode = system(diffcmd)
unless exitcode
puts diffcmd
end
return exitcode
end
def self.cmp_vch_detailed_voucher(basename)
pretty = sprintf("tmp/%s.pretty", basename)
cvtcmd = sprintf("cbor2pretty.rb tmp/%s.vch >%s",
basename, pretty)
system(cvtcmd)
unless system(cvtcmd)
puts sprintf("\nCONVERT FAILED: %s\n", cvtcmd)
return false
end
diffcmd = sprintf("diff %s spec/files/%s.pretty",
pretty, basename)
exitcode = system(diffcmd)
unless exitcode
puts sprintf("\nFAILED: %s\n", diffcmd)
return exitcode
end
return true
end
def self.cmp_vch_file(token, basename)
ofile = File.join(tmpdir, basename + ".vch")
File.open(ofile, "wb") do |f| f.write token end
return cmp_vch_detailed_voucher(basename)
end
def self.cmp_signing_record(record, basename)
outname="#{basename}.example.json"
File.open("tmp/#{outname}", "w") {|f|
out=record.to_s.gsub(",",",\n")
f.puts out
}
diffcmd = sprintf("diff tmp/%s spec/files/%s",outname,outname)
exitcode = system(diffcmd)
unless exitcode
puts sprintf("\nFAILED: %s\n", diffcmd)
end
return exitcode
end
end
<file_sep>source 'https://rubygems.org'
gem "cbor", "~> 0.5.9.2"
gem 'cbor-diag', :git => 'https://github.com/AnimaGUS-minerva/cbor-diag', :branch => 'put-pretty-extract-into-library'
gem "json"
gem 'jwt'
#gem 'openssl', :git => 'https://github.com/mcr/ruby-openssl.git'
gem 'openssl', :path => '../minerva/ruby-openssl'
gem 'ecdsa', :git => 'https://github.com/AnimaGUS-minerva/ruby_ecdsa.git', :branch => 'ecdsa_interface_openssl'
#gem 'ecdsa', :path => '../minerva/ruby_ecdsa'
#gem 'ecdsa', "~> 1.3.0"
gem 'rbnacl-libsodium'
gem 'rbnacl', "<5.0.0"
gem 'rake'
# dependabot reports
gem 'tzinfo', "~> 2.0"
# for acts_like?
gem 'activesupport', "~> 6.1.7.1"
group :test do
gem 'byebug'
gem "rspec"
gem "rspec-core"
gem "rspec_junit_formatter"
end
|
fe89b797ca363952be8c3eb9b544b2e48b0ff861
|
[
"Ruby",
"Shell"
] | 5
|
Ruby
|
AnimaGUS-minerva/ChariWTs
|
02de3f8884fb55451ffed79dcb710c731f6caec1
|
02c6f5a81c4d32f572dc4bcf22f78bd12b037655
|
refs/heads/master
|
<file_sep># Timesuperin
Timesuperin 은 '**Time Superin**tendent (시간 관리자)'의 줄임말로써 시계열 데이터 분석 및 이상 탐지를 위한 라이브러리입니다.
시계열 지표를 이벤트 정보와 트렌드를 고려하여 회귀 모델을 생성하여 향후 지표의 변화를 추정하고, 이렇게 추정된 결과와 크게 차이가 발생하는 이상 현상을 탐지하는 기능을 제공합니다.
## 설치 방법
timesuperin을 사용하기 위해선 먼저 아래 라이브러리를 설치해야 합니다.
* Rcpp
* rstan
* BH
위 라이브러리가 설치된 상태에서 아래 코드를 실행하시면 됩니다.
devtools::install_github("ncsoft/timesuperin")
## 사용 방법
모델링 및 시계열 이상 탐지 방법은 아래와 같습니다.
먼저 timesuperin/resources 에 있는 데이터를 불러옵니다.
setwd('./timesuperin/resources')
train_data <- read.csv('./train_data.csv')
test_data <- read.csv('./test_data.csv')
train_data.csv 와 test_data.csv 는 두 개의 이벤트 변수에 영향을 받는 시계열 데이터입니다.
모델링에 사용할 학습 데이터는 아래 그림과 같습니다.

이제 위 데이터를 이용해 아래와 같이 시계열 모델을 생성합니다.
model <- model.timesuperin(train_data, model.type = 'lm', period = 6)
위 모델을 이용해 테스트 데이터에 있는 이상 데이터를 탐지하기 위해 아래와 같이 detect_anomal.timesuperin 함수를 실행합니다.
anomaly.detect <- detect_anomal.timesuperin(model, test_data, value = test_data$value)
이상 탐지 결과는 다음과 같이 확인할 수 있습니다 (두 개의 데이터가 upr과 lwr 범위를 벗어난 것을 확인할 수 있습니다).
result$Interval_Plot

<file_sep>#' Fitting timeseries models
#'
#' timesuperin is used to fit timeseries models.
#' @param data Dataframe containing the history. Must have columns date type and y.
#' @param model.type String 'lm' or 'rlm' to specify a linear or robust limear model
#' @param step.fun Fit feature selection : TRUE, FALSE
#' @param period Data period
#' @param changepoints Vector of dates at which to include potential changepoints. If not specified, potential changepoints are selected automatically.
#' @param changepoint.prior.scale Parameter modulating the flexibility of the automatic changepoint selection. Large values will allow many changepoints, small values will allow few changepoints.
#' @keywords timesuperin
#' @export
#' @importFrom MASS rlm
#' @import Rcpp
model.timesuperin <- function(data, model.type = 'lm',
formula = NULL,
use.timevalue = TRUE,
period = 24,
step.wise = FALSE,
changepoints = NULL,
changepoint.prior.scale = 0.05) {
if (model.type != 'rlm' && model.type != 'lm') {
warning(paste('model.type=', model.type, "is not supported.", "Try 'lm' or 'rlm'", "Used model.type default 'lm'"))
model.type = 'lm'
}
if (step.wise != TRUE && step.wise != FALSE) {
warning(sprintf("step.wise = %s is not supported (step.wise should be TRUE or FALSE). step.wise is set to FALSE", step.wise))
step.wise <- FALSE
}
if (step.wise == TRUE && model.type != 'lm') {
warning("step.wise is valid only if model.type is 'lm'")
}
if (!is.null(formula)) {
formula.timesuperin = as.formula(paste(formula[[2]], "~", formula[3], "+ time_value", "+ trend_value"))
} else {
formula.timesuperin = as.formula(paste(names(data)[2], "~."))
}
data.info <- data.preprocessing(data,
value = gsub("()", "", formula.timesuperin[2]),
period = period,
changepoints = changepoints,
changepoint.prior.scale = changepoint.prior.scale,
use.timevalue = use.timevalue)
train.data <- subset(data.info[[1]], select = -c(time))
if (model.type == "rlm") {
regression.model <- MASS::rlm(formula.timesuperin, data = train.data)
} else if (model.type=="lm") {
regression.model <- lm(formula.timesuperin, data = train.data)
if (step.wise == TRUE) {
regression.model <- step(regression.model, direction = 'both')
formula.timesuperin <- formula(regression.model)
}
}
result<-list(model = model.type,
formula = formula.timesuperin,
lm_model = regression.model,
model_summary = summary(regression.model),
time_interval = data.info$time_interval,
period = data.info$time_period,
trend_params = data.info$trend_param,
trend = TRUE,
use.timevalue = use.timevalue)
return(result)
}
###시간 변수 생성###
#입력데이터들의 형식을 받아 시간 format으로 변경
#' @importFrom stringr str_detect
format_time <- function(data) {
if (class(data)[1] == "POSIXlt" | class(data)[1] == "POSIXct" ) {
return(data)
}
if (stringr::str_detect(data[1], "^\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2} \\+\\d{4}$")) {
data <- as.POSIXct(strptime(data, format="%Y-%m-%d %H:%M:%S", tz="UTC"))
}
else if (stringr::str_detect(data[1], "^\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}:\\d{2}$")) {
data <- as.POSIXct(strptime(data, format="%Y-%m-%d %H:%M:%S", tz="UTC"))
}
else if (stringr::str_detect(data[1], "^\\d{4}-\\d{2}-\\d{2} \\d{2}:\\d{2}$")) {
data <- as.POSIXct(strptime(data, format="%Y-%m-%d %H:%M", tz="UTC"))
}
else if (stringr::str_detect(data[1], "^\\d{4}-\\d{2}-\\d{2} \\d{1}$")) {
data <- as.POSIXct(strptime(data, format="%Y-%m-%d %H", tz="UTC"))
}
else if (stringr::str_detect(data[1], "^\\d{4}-\\d{2}-\\d{2} \\d{2}$")) {
data <- as.POSIXct(strptime(data, format="%Y-%m-%d %H", tz="UTC"))
}
else if (stringr::str_detect(data[1], "^\\d{4}\\d{2}\\d{2} \\d{2}$")) {
data <- as.POSIXct(strptime(data, format="%Y%m%d %H", tz="UTC"))
}
else if (stringr::str_detect(data[1], "^\\d{4}-\\d{2}-\\d{2}$")) {
data <- as.Date(data, "%Y-%m-%d")
}
else if (stringr::str_detect(data[1], "^\\d{2}/\\d{2}/\\d{2}$")) {
data <- as.POSIXct(strptime(data, format="%m/%d/%y", tz="UTC"))
}
else if (stringr::str_detect(data[1], "^\\d{2}/\\d{2}/\\d{4}$")) {
data <- as.POSIXct(strptime(data, format="%m/%d/%Y", tz="UTC"))
}
else if (stringr::str_detect(data[1], "^\\d{4}\\d{2}\\d{2}$")) {
data <- as.Date(data, "%Y%m%d")
}
else if (stringr::str_detect(data[1], "^\\d{4}/\\d{2}/\\d{2}/\\d{2}$")) {
data <- as.POSIXct(strptime(data, format="%Y/%m/%d/%H", tz="UTC"))
}
else if( stringr::str_detect(data[1],"^\\d{4}-\\d{2}$")){
data <- as.Date(paste0((data),"-01"),"%Y-%m-%d")
}
else if( stringr::str_detect(data[1],"^\\d{4}/\\d{2}$")){
data <- as.Date(paste0((data),"/01"),"%Y/%m/%d")
}
return(data)
}
###연,월,일,시,분,초 구분 함수###
#입력데이터의 시간 간격을 구분
get_gran <- function(data, index='time') {
n = length(data[[index]])
gran = round(difftime(max(data[[index]]), sort(data[[index]], partial=n-1)[n-1], units="secs"))
if (gran >= 2419200){
return("mon")
}
else if (gran >= 86400) {
return("day")
}
else if (gran >= 3600) {
return("hr")
}
else if (gran >= 60) {
return("min")
}
else if (gran >= 1) {
return("sec")
} else {
stop("can't extract time interval from data")
}
}
###시간변수 생성###
#구분된 시간간격을 이용하여 시간변수 생성
#시간 구분이 월단위 : 01~12
#시간 구분이 일단위 : 01~31
#시간 구분이 시간단위 : 01~24
#시간 구분이 초단위 : 01~60
timevalue <- function(data){
gran <- get_gran(data)
if (gran == "mon"){
return(substr(data[['time']], 6, 7))
}
else if (gran == "day"){
return(substr(data[['time']], 9, 10))
}
else if (gran == "hr"){
return(substr(data[['time']], 12, 13))
}
else if (gran == "min"){
return(substr(data[['time']], 15, 16))
}
else if (gran == "sec"){
return(substr(data[['time']], 18, 19))
}
}
### data.preprocessing ###
### 데이터 핸들링 및 변수 생성 함수 ###
# 트랜드 변수 생성 여부에 따라 사용 옵션이 달라짐
# data: 학습데이터
## data의 시간값을 가지는 컬럼명은 time으로 지정해줘야함
# period: 데이터의 주기
# changepoints: 데이터의 특성 변경점 , 데이터의 시간단위와 일치
# changepoint.prior.scale: 변경점 선택의 유연성을 조정하는 변수, 큰값은 많은수를 작은값은 적은수의 변경점 허용
#' @importFrom dplyr arrange
data.preprocessing <- function(data,
value = NULL,
period = NULL,
changepoints = NULL,
n.changepoints = NULL,
changepoint.prior.scale = NULL,
use.timevalue = TRUE) {
#시간데이터 입력 형식에 따른 시간 포맷으로 변경
data$time <- format_time(data$time)
data <- dplyr::arrange(data, data[['time']])
#트랜드 변수 생성#
if (!is.null(changepoints)) {
n.changepoints <- length(changepoints)
} else {
n.changepoints <- c(25)
}
m <- list(
value = value,
period = period,
changepoints = changepoints,
n.changepoints = n.changepoints,
seasonality.prior.scale = 10,
changepoint.prior.scale = changepoint.prior.scale,
y.scale = NULL,
t.scale = NULL,
changepoints.t = NULL,
params = list(),
history = data
)
m <- mk.trend.parm(m)
if (use.timevalue == TRUE) {
data <- data.frame(data,
time_value = as.factor(timevalue(data)),
trend_value = predict_trend(m, data))
} else {
data <- data.frame(data,
trend_value = predict_trend(m, data))
}
return(list(data = data,
time_interval = get_gran(data),
time_period = period,
trend_param = list(params = m$params,
start = m$start,
t.scale = m$t.scale,
y.scale = m$y.scale,
changepoints = m$changepoints,
changepoints.t = m$changepoints.t)))
}
#######################################예측#########################################
#예측에 사용될 데이터 테이블 생성#
#예측에 필요한 데이터를 정제시키는 함수
#data : 예측 데이터
#data의 시간값을 가지는 컬럼명은 time으로 지정해줘야함
#trend_param : 예측시 사용되는 트랜드 생성 모수
make.target_data <- function(data,
use.time = TRUE,
trend = TRUE,
trend_param = NULL) {
#시간데이터 입력 형식에 따른 시간 포맷으로 변경
data$time <- format_time(data$time)
if (use.time) {
data$time_value <- as.factor(timevalue(data))
}
if (trend == TRUE) {
if (is.null(trend_param)) {
stop("trend_param should be not null if trend is TRUE")
}
data$trend_value <- predict_trend(trend_param, data)
}
return(data)
}
#예측#
#정제된 예측데이터를 입력하여 목표가되는 시간대의 예측값 및 예측값의 상한값, 하한값을 생성
#object : 모델생성 함수의 결과, 생성된 모델 및 트랜드 파라미터 정보
#newdata : 정제된 예측데이터
#level : 예측 신뢰구간의 예측 수준
#' Model predictions
#'
#' This function for predictions from the results of model fitting functions.
#' @param object result of model fitting functions.
#' @param newdata dataframe for predictions. Must have columns date type.
#' @param level Tolerance/confidence level.
#' @keywords predict
#' @export
pred.table.timesuperin <- function(object,
newdata,
level = 0.95) {
target <- make.target_data(newdata,
use.time = object$use.timevalue,
trend = object$trend,
trend_param = object$trend_params)
predic.table <- data.frame(time = target[['time']],
predict(object$lm_model,
newdata = target,
interval='prediction',
level = level))
return(predic.table)
}
###output 생성 : 모델 summary & 전체 생성 값 출력 & outlier 탐지값 & plot###
#목표가 되는 시간대의 생성된 예측값 및 상한값, 하한값 활용하여 이상탐지
#object : 모델 생성 함수의 결과, 생성된 모델 및 트랜드 파라미터 정보
#direction : 탐지 방향, upr일경우 상한값을 넘는 이상치들만 탐색, lwr일경우 하한값을 넘는 이상치들만 탐색
#both일 경우 상한값과 하한값 모두 넘는 이상치들 탐색
#level : 탐지에 사용된 예측 신뢰구간의 수준
#cumul.thre : 경험적 수치로 지정하는 누적잔차 기준
#결과는 탐지된 이상치 테이블, 상한,하한,예측값의 plot, 실제값과 예측값의 차이인 잔차의 누적합과 상한,하한plot으로 구성
#' Detect anomaly data
#'
#' Detection for anomaly data based on predcition interval
#' @param object result of model fitting functions.
#' @param newdata dataframe for predictions. Must have columns date type.
#' @param level Tolerance/confidence level.
#' @param value Anormal confirmation data.
#' @param direction Anomaly detection direction : 'upper', 'lower', 'both'
#' @param cumul.thre threshold for Cumulative residual
#' @keywords Detection
#' @export
detect_anormal.timesuperin <- function(object,
newdata,
level = 0.95,
value,
direction='both',
cumul.thre = NULL) {
predic.table <- pred.table.timesuperin(object, newdata, level)
#신뢰구간, 신뢰수준 기본값은 prediction & 0.95
interval.type <- 'prediction'
#outlier탐지
predic.table$value <- value
#names(predic.table)[ncol(predic.table)]<-c('value')
#' @import ggplot2
#' @importFrom reshape melt
#하한,상한, 예측값, 실제값 plot
predic.table.melt<-reshape::melt(predic.table[,c('time', 'value', 'fit', 'upr', 'lwr')], id.vars = 'time')
plot.ex <- ggplot2::ggplot(predic.table.melt, ggplot2::aes(time,value,group=variable,col=variable)) +
ggplot2::geom_point() +
ggplot2::geom_line() +
ggplot2::theme_bw() +
ggplot2::theme(axis.title.y = ggplot2::element_text(face='bold', angle = 90),
plot.title = ggplot2::element_text(size = ggplot2::rel(1.5),face = 'bold',hjust = 0.5)) +
ggplot2::scale_y_continuous(labels=scales ::comma) +
ggplot2::ggtitle(paste(level, "level", "Prediction", "Interval"))
#누적 잔차 plot
#여기서 누적 잔차는 상한, 하한을 넘지 않는 값들의 잔차를 누적
predic.table$residual <- predic.table$value - predic.table$fit
predic.table$residual_for_cumul <- predic.table$residual
predic.table$residual_for_cumul[predic.table$value < predic.table$lwr |
predic.table$value > predic.table$upr] <- c(0)
predic.table$cumulative_sum_of_residual <- cumsum(predic.table$residual_for_cumul)
if (!is.null(cumul.thre)) {
predic.table$cumul_upr <- 0 + cumul.thre
predic.table$cumul_lwr <- 0 - cumul.thre
} else {
predic.table$cumul_upr <- 0 + qnorm(1 - (1 - level) / 8) * sd(predic.table$cumulative_sum_of_residual)
predic.table$cumul_lwr <- 0 - qnorm(1 - (1 - level) / 8) * sd(predic.table$cumulative_sum_of_residual)
}
predic.table.melt <- reshape::melt(predic.table[, c('time','cumulative_sum_of_residual', 'cumul_upr','cumul_lwr')],
id.vars = 'time')
plot.resid.ex <- ggplot2::ggplot(predic.table.melt, ggplot2::aes(time, value, group = variable, col = variable)) +
ggplot2::geom_point() +
ggplot2::geom_line() +
ggplot2::theme_bw() +
ggplot2::theme(axis.title.y = ggplot2::element_text(face='bold', angle = 90),
plot.title = ggplot2::element_text(size = ggplot2::rel(1.5), face = 'bold', hjust = 0.5)) +
ggplot2::scale_y_continuous(labels=scales::comma) +
ggplot2::ggtitle("Cumulative Sum of Residual ") + ggplot2::ylab("Cumulative Sum of Residual")
#이상치 판별
if (direction == "upr") {
predic.table$anormal_flag <- c('normal')
predic.table$anormal_flag[predic.table$upr < predic.table$value] <- c('upr_anormal')
} else if(direction == 'lwr') {
predic.table$anormal_flag <- c('normal')
predic.table$anormal_flag[predic.table$lwr > predic.table$value]<-c('lwr_anormal')
} else if(direction == 'both') {
predic.table$anormal_flag <- c('normal')
predic.table$anormal_flag[predic.table$lwr > predic.table$value ] <- c('lwr_anormal')
predic.table$anormal_flag[predic.table$upr < predic.table$value ] <- c('upr_anormal')
} else {
warning(paste('direction=', direction, "is not supported.", "Try 'upr' or 'lwr' or 'both'.", "Used direction default 'both'"))
}
predic.table$anormal_flag[predic.table$cumul_lwr > predic.table$cumulative_sum_of_residual|
predic.table$cumul_upr < predic.table$cumulative_sum_of_residual] <- c('cum_resid_anormal')
predic.table$anormal_flag <- as.factor(predic.table$anormal_flag)
return(list("result_table" = predic.table[,-7],
"Interval_Plot" = plot.ex,
"Cumulative_Sum_of_Residual_Plot" = plot.resid.ex))
}
######################################################################################################
########트렌드 생성#######
#seasonality features 생성
#data : 학습데이터
#period : 데이터 주기
#series.order : 구성요소의 수
#seasonality features matrix return
fourier_series <- function(data,
period,
series.order) {
gran <- get_gran(data)
if (gran == 'day') {
t <- data[['time']] - zoo::as.Date('1970-01-01')
} else if (gran == 'hr') {
t <- as.numeric(difftime(data[['time']],
as.POSIXct(strptime('1970-01-01 00', format = "%Y-%m-%d %H", tz = "UTC"),
units = c('days'))))
}
features <- matrix(0, length(t), 2 * series.order)
for (i in 1:series.order) {
x <- as.numeric(2 * i * pi * t / period)
features[, i * 2 - 1] <- sin(x)
features[, i * 2] <- cos(x)
}
return(features)
}
#seasonality features matrix 생성
#prefix : 컬럼명 첨자
make_seasonality_features <- function(data,
period,
series.order,
prefix) {
features <- fourier_series(data, period, series.order)
colnames(features) <- paste(prefix, 1:ncol(features), sep = '_delim_')
return(data.frame(features))
}
# seasonality features data frame 생성
#m : prophet trend 생성을 위한 object
#data : 학습데이터
make_all_seasonality_features <- function(m, data) {
seasonal.features <- data.frame(zeros = rep(0, nrow(data)))
gran <- get_gran(data)
period <- m$period
if (m$period > 7 & gran == 'day') {
order <- 10
prefix <- 'yearly'
} else if (m$period == 7 & gran == 'day') {
order <- 3
prefix <- 'weekly'
} else if (m$period > 24 & gran == 'hr') {
order <- 10
prefix <- 'hourly'
} else if (m$period <= 24 & gran == 'hr') {
order <- 3
prefix <- 'hourly'
} else if (m$period > 1 & gran == 'mon') {
order <- 10
prefix <- 'monthly'
} else if (m$period <= 1 & gran == 'mon') {
order <- 3
prefix <- 'monthly'
} else if (m$period > 60 & gran == 'min') {
order <- 10
prefix <- 'monthly'
} else if (m$period <= 60 & gran == 'min') {
order <- 3
prefix <- 'monthly'
} else if (m$period > 60 & gran == 'sec') {
order <- 10
prefix <- 'monthly'
} else if (m$period <= 60 & gran == 'sec') {
order <- 3
prefix <- 'montly'
}
return(data.frame(zeros = rep(0, nrow(data)),
make_seasonality_features(data, period, order, prefix)))
}
#트랜드 변수 생성을 위한 준비 테이블
#m : prophet trend 생성을 위한 object
#data : 트랜드 생성전 준비가 필요한 데이터로 학습데이터 또는 예측데이터
#run.ex : 학습데이터일 경우 TRUE
setup_dataframe <- function(m, data, run.ex = FALSE) {
value <- m$value
if (ncol(data) > 1) {
data[[value]] <- as.numeric(data[[value]])
}
if (anyNA(data[['time']])) {
stop('Unable to parse date format in column time. Convert to date format.')
}
if (run.ex == TRUE) {
m$y.scale <- max(data[[value]])
m$start <- min(data[['time']])
m$t.scale <- as.numeric(difftime(max(data[['time']]), min(data[['time']]), units = c('days')))
}
data$t <- as.numeric(difftime(data[['time']], m$start, units = c('days')) / m$t.scale)
if (ncol(data) > 1) {
data$y_scaled <- data[[value]] / m$y.scale
}
return(list("m" = m, "data" = data))
}
# 변경점 지정
#m : prophet trend 생성을 위한 object
set_changepoints <- function(m) {
gran <- get_gran(m$history)
if (!is.null(m$changepoints)) {
if (length(m$changepoints) > 0) {
if (min(m$changepoints) < min(m$history[['time']])
|| max(m$changepoints) > max(m$history[['time']])) {
stop('Changepoints must fall within training data.')
}
}
temp <- format_time(m$changepoints)
if (gran == 'day'){
m$changepoints <- m$history$time[m$history$time >= zoo::as.Date(m$changepoints)]
m$changepoints.t <- sort(as.numeric(m$changepoints - m$start) / m$t.scale)
} else {
m$changepoints <- m$history$time[m$history$time >= temp]
m$changepoints.t <- sort(as.numeric(difftime(m$changepoints, m$start, units = c('days')) / m$t.scale))
}
} else {
if (m$n.changepoints > 0) {
# Place potential changepoints evenly through the first 80 pcnt of
# the history.
cp.indexes <- round(seq.int(1,
floor(nrow(m$history) * .8),
length.out = (m$n.changepoints + 1))
)[-1]
m$changepoints <- m$history[['time']][cp.indexes]
if (gran == 'day') {
if (length(m$changepoints) > 0) {
m$changepoints <- zoo::as.Date(m$changepoints)
m$changepoints.t <- sort(as.numeric(m$changepoints - m$start) / m$t.scale)
}
} else {
m$changepoints <- format_time(m$changepoints)
m$changepoints.t <- sort(as.numeric(difftime(m$changepoints, m$start, units = c('days')) / m$t.scale))
}
} else {
m$changepoints <- c()
}
}
return(m)
}
# 변경점 이전은 0, 이후는 1인 값을 가지는 matrix 생성
#m : prophet trend 생성을 위한 object
get_changepoint_matrix <- function(m) {
A <- matrix(0, nrow(m$history), length(m$changepoints.t))
for (i in 1:length(m$changepoints.t)) {
A[m$history$t >= m$changepoints.t[i], i] <- 1
}
return(A)
}
# 시계열의 첫번째와 마지막 포인트를 이용한 트랜드 생성에 필요한 초기 parameter 생성
#df : 학습데이터
linear_growth_init <- function(df) {
i <- which.min(as.POSIXct(df[['time']]))
j <- which.max(as.POSIXct(df[['time']]))
time.gap <- df$t[j] - df$t[i]
# Initialize the rate
k <- (df$y_scaled[j] - df$y_scaled[i]) / time.gap
# And the offset
m <- df$y_scaled[i] - k * df$t[i]
return(c(k, m))
}
# 확률함수 모델인 Stan model compile
#model : linear , linear trend parameter 생성 모델 compile
get_stan_model <- function(model) {
fn <- paste('prophet', model, 'growth.RData', sep = '_')
## If the cached model doesn't work, just compile a new one.
tryCatch({
binary <- system.file('libs',
Sys.getenv('R_ARCH'),
fn,
package = 'timesuperin',
mustWork = TRUE)
load(binary)
obj.name <- paste(model, 'growth.stanm', sep = '.')
stanm <- eval(parse(text = obj.name))
})
}
# 최종 트랜드를 생성하는 parameter 생성
#m : prophet trend 생성을 위한 object
mk.trend.parm<-function(m) {
#트랜드 변수 생성을 위한 준비 테이블
out <- setup_dataframe(m, m$history, run.ex = TRUE)
history <- out$data
m <- out$m
m$history <- history
# seasonality features data frame 생성
seasonal.features <- make_all_seasonality_features(m, history)
# 변경점 지정과 marix 생성
m <- set_changepoints(m)
A <- get_changepoint_matrix(m)
# stan model의 입력값 지정
dat <- list(
T = nrow(history),
K = ncol(seasonal.features),
S = length(m$changepoints.t),
y = history$y_scaled,
t = history$t,
A = A,
t_change = array(m$changepoints.t),
X = as.matrix(seasonal.features),
sigma = m$seasonality.prior.scale,
tau = m$changepoint.prior.scale
)
model <- get_stan_model('linear')
stan_init <- function() {
list(k = linear_growth_init(history)[1],
m = linear_growth_init(history)[2],
delta = array(rep(0, length(m$changepoints.t))),
beta = array(rep(0, ncol(seasonal.features))),
sigma_obs = 1
)
}
# stan model을 이용하여 트랜드 parameter 추정
stan.fit <- rstan::optimizing(
model,
data = dat,
init = stan_init,
iter = 1e4,
as_vector = FALSE
)
m$params <- stan.fit$par
return(m)
}
# 트랜드 생성(예측)
#t : 트랜드 생성 목표 데이터의 (현재 시간 - 데이터 시작 시간) / (데이터 시작 시간 - 데이터 끝시간)
#deltas : 생성된 변경점의 변화율
#m : prophet trend 생성을 위한 object
#changepoint.ts : (변경점 시간 - 데이터 시작 시간) / (데이터 시작 시간 - 데이터 끝시간)
piecewise_linear <- function(t, deltas, k, m, changepoint.ts) {
# Intercept changes
gammas <- -changepoint.ts * deltas
# Get cumulative slope and intercept at each t
k_t <- rep(k, length(t))
m_t <- rep(m, length(t))
for (s in 1:length(changepoint.ts)) {
indx <- t >= changepoint.ts[s]
k_t[indx] <- k_t[indx] + deltas[s]
m_t[indx] <- m_t[indx] + gammas[s]
}
y <- k_t * t + m_t
return(y)
}
# 트랜드 생성(예측)
#trend_param : 트랜드 생성에 필요한 파라미터, 모델 생성 함수에서 발생
#k : 실제값의 기본 성장율 , ((마지막 실제값 - 첫번째 실제값)/가장큰 실제값))
#m : offset, 가장큰 실제값대비 첫번째 실제값의 비율
#delta : 변경점의 성장 변화율
#beta : seasonal vector
#gamma : 수정된 offset, changepoints와 delta의 곱
#t.scale : 시간순으로 가장마지막과 가장첫번째의 시간 차이
#y.scale : 가장큰 실제값
#changepoints.t : (변경점 시간 - 데이터 시작 시간) / (데이터 시작 시간 - 데이터 끝시간)
predict_trend <- function(trend_param, df) {
df$t <- as.numeric(difftime(df[['time']], trend_param$start, units = c('days')) / trend_param$t.scale)
k <- mean(trend_param$params$k, na.rm = TRUE)
param.m <- mean(trend_param$params$m, na.rm = TRUE)
deltas <- trend_param$params$delta
t <- df$t
trend <- piecewise_linear(t, deltas, k, param.m, trend_param$changepoints.t)
return(trend * trend_param$y.scale)
}
|
0f5383343028e98837c84c6455be2fcc15627b26
|
[
"Markdown",
"R"
] | 2
|
Markdown
|
danbi-ncsoft/timesuperin
|
2fef6fa48299e597981e4ce082283180e972b2ef
|
5d002e1040b2f282664bcd74ace3b1fcfbbf6008
|
refs/heads/master
|
<repo_name>Sulemandar/API<file_sep>/WebAPICURD_Demo/Controllers/EmployeeController.cs
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Web.Http;
using WebAPICURD_Demo.Models;
namespace WebAPICURD_Demo.Controllers
{
public class EmployeeController : ApiController
{
public IEnumerable<Employee> GetEmployees()
{
using (EmployeeEntities db = new EmployeeEntities())
{
return db.Employees.ToList();
}
}
public HttpResponseMessage GetEmployeeById(int id)
{
using (EmployeeEntities db = new EmployeeEntities())
{
var entity = db.Employees.FirstOrDefault(e => e.ID == id);
if (entity == null)
{
return Request.CreateErrorResponse(HttpStatusCode.NotFound, "Employee with id=" + id.ToString() + " not found");
}
else
{
return Request.CreateResponse(HttpStatusCode.OK, entity);
}
}
}
public HttpResponseMessage Post([FromBody] Employee employee)
{
try
{
using (EmployeeEntities db = new EmployeeEntities())
{
db.Employees.Add(employee);
db.SaveChanges();
var message = Request.CreateResponse(HttpStatusCode.Created, employee);
// var entity = db.Employees.FirstOrDefault(e => e.ID == id);
message.Headers.Location = new Uri(Request.RequestUri + employee.ID.ToString());
return message;
}
}
catch (Exception ex)
{
return Request.CreateErrorResponse(HttpStatusCode.BadRequest, ex);
}
}
public HttpResponseMessage Delete(int id)
{
using (EmployeeEntities db = new EmployeeEntities())
{
var entity = db.Employees.FirstOrDefault(e => e.ID == id);
if (entity == null)
{
return Request.CreateErrorResponse(HttpStatusCode.NotFound, "Employee with id=" + id.ToString() + "not found to delete");
}
else
{
db.Employees.Remove(entity);
db.SaveChanges();
return Request.CreateResponse(HttpStatusCode.OK);
}
}
}
public HttpResponseMessage Put(int id, [FromBody] Employee employee)
{
using (EmployeeEntities db = new EmployeeEntities())
{
try
{
var entity = db.Employees.FirstOrDefault(e => e.ID == id);
if (entity == null)
{
return Request.CreateErrorResponse(HttpStatusCode.NotFound, "Employee with id=" + id.ToString() + "not found to update");
}
else
{
entity.Name = employee.Name;
entity.Age = employee.Age;
db.SaveChanges();
return Request.CreateResponse(HttpStatusCode.OK, entity);
}
}
catch (Exception ex)
{
return Request.CreateErrorResponse(HttpStatusCode.BadRequest, ex);
}
}
}
}
}
|
c1e6de31bb90cf9253af41366a1e1e0ffbc283d3
|
[
"C#"
] | 1
|
C#
|
Sulemandar/API
|
716357cd74aff53ce7734e8854acac2d07636c68
|
474a4c4336c2c4847564ea8d98d34b141df5b42b
|
refs/heads/master
|
<repo_name>TheVarunGupta/MapReduce<file_sep>/README.md
<!DOCTYPE html>
<div>
<h2>Intro to Hadoop and MapReduce - Udacity</h2>
<p>Link to the course-- </p><p><a href="https://classroom.udacity.com/courses/ud617">https://classroom.udacity.com/courses/ud617</a></p>
<p>The codes in this repo are only related to the project section in the course of the udacity and are meant to help people who are not able to solve the problems.</p>
<h3>Question I</h1>
<hr>
<p>The three questions about this data set are:</p>
<ul>
<li>Instead of breaking the sales down by store, instead give us a sales breakdown by product category across all of our stores.</li>
<li>Find the monetary value for the highest individual sale for each separate store.</li>
<li>Find the total sales value across all the stores, and the total number of sales. Assume there is only one reducer.</li>
</ul>
</div>
<div>
<div>
<div>
<h3>Question II</h1>
<hr>
<p>The data set we're using is an anonymized Web server log file from a public relations company whose clients were DVD distributors. The log file is in the udacity_training/data directory, and it's currently compressed using GnuZip. So you'll need to decompress it and then put it in HDFS. If you take a look at the file, you'll see that each line represents a hit to the Web server. It includes the IP address which accessed the site, the date and time of the access, and the name of the page which was visited.</p>
<p>The logfile is in <a target="_blank" href="http://en.wikipedia.org/wiki/Common_Log_Format">Common Log Format</a>:</p>
<pre><code><span class="hljs-number">10.223</span><span class="hljs-number">.157</span><span class="hljs-number">.186</span> - - [<span class="hljs-number">15</span>/Jul/<span class="hljs-number">2009</span>:<span class="hljs-number">15</span>:<span class="hljs-number">50</span>:<span class="hljs-number">35</span> -<span class="hljs-number">0700</span>] <span class="hljs-string">"GET /assets/js/lowpro.js HTTP/1.1"</span> <span class="hljs-number">200</span> <span class="hljs-number">10469</span>%h %l %u %t \<span class="hljs-string">"%r\" %>s %b</span>
</code></pre><p>Where:</p>
<ul>
<li>%h is the IP address of the client</li>
<li>%l is identity of the client, or "-" if it's unavailable</li>
<li>%u is username of the client, or "-" if it's unavailable</li>
<li>%t is the time that the server finished processing the request. The format is [day/month/year:hour:minute:second zone]</li>
<li>%r is the request line from the client is given (in double quotes). It contains the method, path, query-string, and protocol or the request.</li>
<li>%>s is the status code that the server sends back to the client. You will see see mostly status codes 200 (OK - The request has succeeded), 304 (Not Modified) and 404 (Not Found). See more information on status codes <a target="_blank" href="http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html">in W3C.org</a></li>
<li>%b is the size of the object returned to the client, in bytes. It will be "-" in case of status code 304.</li>
</ul>
</div>
</div>
</div><file_sep>/Question 2/reducer2.py
#!/usr/bin/python
import sys
Total = 0
oldKey = None
for line in sys.stdin:
data_mapped = line.strip().split("\t")
if len(data_mapped) != 2:
continue
Key,Value = data_mapped
if oldKey and oldKey != Key:
print oldKey, "\t",Total
oldKey = Key;
Total = 0
oldKey = Key
Total += float(Value)
if oldKey != None:
print oldKey, "\t", Total
|
a45f638b77950fa200d47e440566bd52837ad2bb
|
[
"Markdown",
"Python"
] | 2
|
Markdown
|
TheVarunGupta/MapReduce
|
927b84a8ed1ad86dfcad3a60853d296970772749
|
84d1510d921d45f742f74056b2accfa5f50217af
|
refs/heads/master
|
<repo_name>angelhackhackers/food-mesh-app<file_sep>/src/config/styles.js
'use strict';
import {StyleSheet} from 'react-native';
import color from '../constants/Colors';
// this is some random color name
var food='#ffffff' //green#73c700
var lightFood='#fefefe'
var lighterFood='#ff9f99'
export default StyleSheet.create({
createEventText: {
color: 'black',
fontSize:20
},
createContainer: {
paddingTop:10,
paddingBottom:10,
},
createEventTextUnderline: {
borderBottomColor: 'lightgrey',
},
createEventView: {
...StyleSheet.absoluteFillObject,
justifyContent: 'center',
alignItems: 'center',
backgroundColor: 'white',
},
smallFont: {
fontSize: 25,
color: 'white'
},
bigFont: {
fontSize: 50,
color: 'white',
},
requestListView:{
flexDirection:'row',
flex:1,
paddingTop:5,
paddingBottom:5,
},
requestListConfirm:{
alignSelf:'flex-end',
paddingLeft:5
},
eventlistSubtitle:{
fontSize:15,
color:'grey'
},
eventlistCellHeight:{
height:50
},
eventListavatar:{
borderWidth:2,
borderColor:food,
},
createEventButtonContainer: {
marginTop:30
},
createEventButton: {
backgroundColor:lighterFood,
height: 48,
width: '100%',
borderRadius:8,
},
createEventButtonGroup:{
backgroundColor:lighterFood,
borderRadius:8,
},
createEventButtonTitle: {
color:food,
fontSize: 20
},
avatar:{
borderWidth:4,
borderColor:food,
},
loginView: {
...StyleSheet.absoluteFillObject,
justifyContent: 'center',
alignItems: 'center',
backgroundColor: food,
},
loginButtonTitle: {
color:food,
fontSize:20
},
login2ButtonTitle: {
color:'white',
fontSize:20
},
loginButton: {
paddingRight:50,
paddingLeft:50,
height:48,
borderRadius:8,
width:'100%',
backgroundColor:'white',
},
login2Button: {
paddingRight:50,
paddingLeft:50,
height:48,
borderRadius:8,
width:'100%',
backgroundColor:'#73c700',
marginTop:40
},
loginButtonContainer: {
marginTop:30,
},
logoCombined: {
alignItems: 'center',
flex:0.3
},
loginText: {
color: 'white',
fontSize: 20
},
loginContainer: {
paddingTop:10,
paddingBottom:10,
marginBottom:10
},
loginTextUnderline: {
borderBottomColor: 'rgba(255,255,255,0.5)',
},
container: {
flex: 1,
justifyContent: 'center',
alignItems: 'center',
backgroundColor: '#F5FCFF',
},
title: {
fontSize: 12,
color: '#000',
backgroundColor: '#fff'
},
button: {
fontSize: 12,
color: '#000',
backgroundColor: '#fff'
}
});
<file_sep>/src/components/CameraPreview.js
/**
* Camera.js
* Camera Component
*/
import React from 'react';
import { Text, View, TouchableOpacity } from 'react-native';
import { Toast } from 'native-base';
import * as Icon from '@expo/vector-icons';
import * as Permissions from 'expo-permissions';
import { Camera } from 'expo-camera';
import { Storage } from 'aws-amplify';
import { Input, Button,ButtonGroup} from 'react-native-elements';
import style from '../config/styles'
import Colors from '../constants/Colors';
export default class CameraComponent extends React.Component {
constructor(props) {
super(props);
this.state = {
hasCameraPermission: null, // カメラ機能の許可
type: Camera.Constants.Type.back, // 背面カメラを利用
count:10
};
this.takePicture = this.takePicture.bind(this);
}
// 初期起動時、カメラの使用の権限を取得する。
async componentWillMount() {
const { status } = await Permissions.askAsync(Permissions.CAMERA);
this.setState({
hasCameraPermission: status === 'granted',
});
}
// 撮影
async takePicture() {
const pictureData = await this.camera.takePictureAsync({width:30,height:30});
this.setState({count:this.state.count-1})
if(this.state.count<1){alert('10 images uploaded')}
const awsReturn = uploadImageToS3(pictureData.uri)
console.log('return: ',awsReturn)
}
render() {
const {
hasCameraPermission,
} = this.state;
if (hasCameraPermission === null || hasCameraPermission === false) {
return (
<View>
<Text>
カメラの使用が許可されていません。
</Text>
</View>
);
}
return (
<View
style={{
flex: 1,
}}
>
<Camera
style={{
flex: 1,
}}
type={this.state.type}
ref={(ref) => {
this.camera = ref;
}}
>
<Input
placeholderTextColor='white'
containerStyle ={{...style.createEventContainer,height:40,margin:20,backgroundColor:'transparent'}}
inputContainerStyle={{...style.createEventTextUnderline,color:'white'}}
inputStyle = {{...style.createEventText,color:'white'}}
placeholder='Food Name'
/>
<Text style={{fontSize:30,color:'white',marginLeft:20}}>
{this.state.count}
</Text>
<View
style={{
flex: 1,
backgroundColor: 'transparent',
flexDirection: 'row',
}}
>
<TouchableOpacity
style={{
flex: 1,
alignSelf: 'flex-end',
alignItems: 'center',
}}
onPress={() => {
this.takePicture();
}}
>
<Icon.MaterialIcons
name="camera"
size={70}
style={{ marginBottom: 20 }}
color={Colors.tabIconDefault}
/>
</TouchableOpacity>
</View>
</Camera>
</View>
);
}
}
// const storeFileInS3 = async (
// fileUri,
// awsKey = null,
// access = "public"
// ) => {
// const blob = await new Promise((resolve, reject) => {
// const xhr = new XMLHttpRequest();
// xhr.onload = function() {
// resolve(xhr.response);
// };
// xhr.onerror = function() {
// reject(new TypeError("Network request failed"));
// };
// xhr.responseType = "blob";
// xhr.open("GET", fileUri, true);
// xhr.send(null);
// });
// const { name, type } = blob._data;
// const options = {
// level: access,
// contentType: type
// };
// const key = awsKey || name;
// try {
// const result = await Storage.put(key, blob, options);
// return {
// access,
// key: result.key
// };
// } catch (err) {
// throw err;
// }
// };
uploadImageToS3 = async uri => {
const response = await fetch(uri)
const blob = await response.blob() // format the data for images
const folder = 'images'
// generate a unique random name for every single image 'fixed length'
const fileName = Math.random().toString(18).slice(3).substr(0, 10) + '.jpeg'
await Storage.put(folder + '/' + fileName, blob, {
contentType: 'image/jpeg',
level: 'public'
})
.then(() => {
// every time a new image is added, we call all the items again
this.fetchImages('images/', { level: "public" })
})
.catch(err => console.log(err))
}
<file_sep>/src/screens/ProfileScreen.js
import React, { Component } from 'react'
import { AppRegistry, StyleSheet, View, Text } from 'react-native'
import { ViewPager } from 'rn-viewpager'
import MapView , {Marker} from 'react-native-maps';
import StepIndicator from 'react-native-step-indicator'
import SetName from '../components/SetName'
import AudioRecord from '../components/AudioRecord'
import PhotoGallery from '../components/photoGallery'
const PAGES = ['Page 1', 'Page 2', 'Page 3', 'Page 4', 'Page 5']
const thirdIndicatorStyles = {
stepIndicatorSize: 25,
currentStepIndicatorSize: 30,
separatorStrokeWidth: 2,
currentStepStrokeWidth: 3,
stepStrokeCurrentColor: '#7eaec4',
stepStrokeWidth: 3,
stepStrokeFinishedColor: '#7eaec4',
stepStrokeUnFinishedColor: '#dedede',
separatorFinishedColor: '#7eaec4',
separatorUnFinishedColor: '#dedede',
stepIndicatorFinishedColor: '#7eaec4',
stepIndicatorUnFinishedColor: '#ffffff',
stepIndicatorCurrentColor: '#ffffff',
stepIndicatorLabelFontSize: 0,
currentStepIndicatorLabelFontSize: 0,
stepIndicatorLabelCurrentColor: 'transparent',
stepIndicatorLabelFinishedColor: 'transparent',
stepIndicatorLabelUnFinishedColor: 'transparent',
labelColor: '#999999',
labelSize: 13,
currentStepLabelColor: '#7eaec4'
}
const getStepIndicatorIconConfig = ({ position, stepStatus }) => {
const iconConfig = {
name: 'feed',
color: stepStatus === 'finished' ? '#ffffff' : '#fe7013',
size: 15
}
switch (position) {
case 0: {
iconConfig.name = 'shopping-cart'
break
}
case 1: {
iconConfig.name = 'location-on'
break
}
case 2: {
iconConfig.name = 'assessment'
break
}
case 3: {
iconConfig.name = 'payment'
break
}
case 4: {
iconConfig.name = 'track-changes'
break
}
default: {
break
}
}
return iconConfig
}
export default class App extends Component {
constructor () {
super()
this.state = {
currentPage: 0
}
}
componentWillReceiveProps (nextProps, nextState) {
if (nextState.currentPage != this.state.currentPage) {
if (this.viewPager) {
this.viewPager.setPage(nextState.currentPage)
}
}
}
render () {
return (
<View style={styles.container}>
<View style={styles.stepIndicator}>
<StepIndicator
stepCount={4}
customStyles={thirdIndicatorStyles}
currentPosition={this.state.currentPage}
onPress={this.onStepPress}
labels={['Location', 'Name', 'Audio', '360 Photo']}
/>
</View>
<ViewPager
style={{ flexGrow: 1 }}
ref={viewPager => {
this.viewPager = viewPager
}}
onPageSelected={page => {
this.setState({ currentPage: page.position })
}}
>
{PAGES.map(page => this.renderViewPagerPage(page))}
</ViewPager>
</View>
)
}
onStepPress = position => {
this.setState({ currentPage: position })
this.viewPager.setPage(position)
}
renderViewPagerPage = data => {
console.log(data)
return (
<View style={styles.page}>
{ data=='Page 1'?<MapView
style={{ alignSelf: 'stretch', height: 400 }}
initialRegion={{
latitude: 35.674372, longitude: 139.768568,
latitudeDelta: 0.0922, longitudeDelta: 0.0421,
}}
><Marker draggable coordinate={{latitude: 35.674372, longitude: 139.768568,}}/>
</MapView>:null
}
{ data=='Page 2'?
<SetName/>
:null
}
{ data=='Page 3'?
<AudioRecord/>
:null
}
{ data=='Page 4'?
<PhotoGallery style={{height:'100%'}}/>
:null
}
</View>
)
}
renderStepIndicator = params => (
<View/>
)
renderLabel = ({ position, stepStatus, label, currentPosition }) => {
return (
<Text
style={
position === currentPosition
? styles.stepLabelSelected
: styles.stepLabel
}
>
{label}
</Text>
)
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
backgroundColor: '#ffffff'
},
stepIndicator: {
marginVertical: 50
},
page: {
flex: 1,
justifyContent: 'center',
alignItems: 'center'
},
stepLabel: {
fontSize: 12,
textAlign: 'center',
fontWeight: '500',
color: '#999999'
},
stepLabelSelected: {
fontSize: 12,
textAlign: 'center',
fontWeight: '500',
color: '#4aae4f'
}
})<file_sep>/src/models/dealer.js
export default class Dealer {
constructor(name, coordinate, price, drinks) {
this.name = name;
this.coordinate = coordinate;
this.price = price
this.drinks = drinks
}
get drinkStrings(){
drinkTypeList = this.drinks.map(drink=>drink.type + " * " + drink.amount)
return drinkTypeList.join('\n')
}
}
<file_sep>/src/models/events.js
export default class Events {
constructor(time,shop) {
this.time = time;
this.shop = shop;
}
}<file_sep>/src/screens/SettingsScreen.js
import React, { Component } from 'react';
import { Container, Header, Content, List, ListItem, Left, Body, Right, Thumbnail, Text } from 'native-base';
export default class ListAvatarExample extends Component {
render() {
return (
<Container>
<Content>
<List>
<ListItem avatar>
<Left>
<Thumbnail source={{ uri: 'https://www.uni-regensburg.de/Fakultaeten/phil_Fak_II/Psychologie/Psy_II/beautycheck/english/durchschnittsgesichter/m(01-32)_gr.jpg' }} />
</Left>
<Body>
<Text><NAME></Text>
<Text note> Obento x 1 </Text>
</Body>
<Right>
<Text note>3:43 pm</Text>
</Right>
</ListItem>
</List>
</Content>
</Container>
);
}
}<file_sep>/src/stores/store.js
import {Animated} from 'react-native'
import {observable, computed, decorate, observer, autorun}from 'mobx'
import Dealer from '../models/dealer'
import Coordinates from './../models/coordinates'
import Events from './../models/events'
class Store {
uchida = new Dealer('Lunch at Cafe',new Coordinates(35.657938, 139.898271), 1000, this.uchidaList)
domino = new Dealer('Lunch time!! Burgers??', new Coordinates(35.659905, 139.902691), 3000, this.dominoList)
brinton = new Dealer('Lets eat pizza!', new Coordinates(35.649920, 139.911442), 1000,this.brintonList)
sunroot = new Dealer('I like Italian',new Coordinates(35.64920, 139.918271),2000,this.brintonList)
firstSelect = new Dealer('Vegitarian',new Coordinates(35.6549905, 139.90271),1000,this.brintonList)
kaizo = new Dealer('I don\'t eat pork',new Coordinates(35.657938, 139.910299),3000,this.brintonList)
tokyoBay = new Dealer('<NAME>!',new Coordinates(35.653938, 139.899271),1000,this.brintonList)
scheduleTime = new Date();
scheduleEvent = new Events(this.scheduleTime,{name:'My Current Location'})
detailsVisible = false
AvatarURL=[]
dealerList = [
this.uchida,
this.domino,
this.brinton,
this.sunroot,
this.firstSelect,
this.kaizo,
this.tokyoBay
]
displayRegion=new Coordinates(0,0)
activeSlide=1
switchViewToEvent(eventID){
this.displayRegion.longitude=this.dealerList[eventID].coordinates.longitude
this.displayRegion.latitude=this.dealerList[eventID].coordinates.latitude
}
coordinateList = []
sliderRefArray = []
constructor() {
this.openDealer = new Animated.Value(0.2)
this.detailsVisible = false
this.dealerList.forEach((dealer)=>{
dealer.id = this.dealerList.indexOf(dealer)
})
this.coordinateList = this.dealerList.map(dealer =>
dealer.coordinate
)
}
}
decorate(Store, {openDealer:observable},{displayRegion:observable},{activeSlide:observable}
,{detailsVisible:observable},
)
export default (new Store);
|
e311aab2b1971f80b56e6266ac2dfd9eebfbcb56
|
[
"JavaScript"
] | 7
|
JavaScript
|
angelhackhackers/food-mesh-app
|
c430b5cb57d4f89f9072eedd326dd6cfc190f4d0
|
4f83a5d35ffbfea534569793ac3b1bb508cac426
|
refs/heads/master
|
<repo_name>laozhoubuluo/discuz_ranklistlimit<file_sep>/laozhoubuluo_ranklistlimit.class.php
<?php
if(!defined('IN_DISCUZ')) {
exit('Access Denied');
}
class plugin_laozhoubuluo_ranklistlimit {
public function common() {
global $_G;
// 开关关闭或不是 SPACECP 模块时直接返回 array() 结束 Hook
if(CURMODULE != 'spacecp' || !$_G['cache']['plugin']['laozhoubuluo_ranklistlimit']['status']) {
return array();
}
// 修改单价时接受单价上下限设置
if(isset($_GET['ac']) && isset($_GET['op']) && isset($_POST['unitprice']) && $_GET['ac'] == 'common' && $_GET['op'] == 'modifyunitprice') {
$unitprice = intval($_POST['unitprice']);
$unitlowerlimit = is_numeric($_G['cache']['plugin']['laozhoubuluo_ranklistlimit']['unitlowerlimit']) ? intval($_G['cache']['plugin']['laozhoubuluo_ranklistlimit']['unitlowerlimit']) : 0;
$unithigherlimit = is_numeric($_G['cache']['plugin']['laozhoubuluo_ranklistlimit']['unithigherlimit']) ? intval($_G['cache']['plugin']['laozhoubuluo_ranklistlimit']['unithigherlimit']) : 2147483647;
if($unitprice < $unitlowerlimit) {
showmessage(lang('plugin/laozhoubuluo_ranklistlimit', 'lower_unit', array('num' => $unitprice, 'limit' => $unitlowerlimit)));
}
if($unitprice > $unithigherlimit) {
showmessage(lang('plugin/laozhoubuluo_ranklistlimit', 'higher_unit', array('num' => $unitprice, 'limit' => $unithigherlimit)));
}
}
// 他人上榜时接受总价上下限设置
if(isset($_GET['ac']) && isset($_POST['friendsubmit']) && isset($_POST['fusername']) && isset($_POST['stakecredit']) && $_GET['ac'] == 'top') {
$uid = C::t('common_member')->fetch_uid_by_username($_POST['fusername']);
$showcredit = C::t('home_show')->fetch_by_uid_credit($uid)['credit'] + intval($_POST['stakecredit']);
$creditlowerlimit = is_numeric($_G['cache']['plugin']['laozhoubuluo_ranklistlimit']['creditlowerlimit']) ? intval($_G['cache']['plugin']['laozhoubuluo_ranklistlimit']['creditlowerlimit']) : 0;
$credithigherlimit = is_numeric($_G['cache']['plugin']['laozhoubuluo_ranklistlimit']['credithigherlimit']) ? intval($_G['cache']['plugin']['laozhoubuluo_ranklistlimit']['credithigherlimit']) : 2147483647;
if($showcredit < $creditlowerlimit) {
showmessage(lang('plugin/laozhoubuluo_ranklistlimit', 'lower_credit_friend', array('num' => $showcredit, 'limit' => $creditlowerlimit)));
}
if($showcredit > $credithigherlimit) {
showmessage(lang('plugin/laozhoubuluo_ranklistlimit', 'higher_credit_friend', array('num' => $showcredit, 'limit' => $credithigherlimit)));
}
}
// 本人上榜时接受单价上下限、总价上下限设置
if(isset($_GET['ac']) && isset($_POST['showsubmit']) && isset($_POST['showcredit']) && isset($_POST['unitprice']) && $_GET['ac'] == 'top') {
$showcredit = C::t('home_show')->fetch_by_uid_credit($_G['uid'])['credit'] + intval($_POST['showcredit']);
$unitprice = intval($_POST['unitprice']);
$creditlowerlimit = is_numeric($_G['cache']['plugin']['laozhoubuluo_ranklistlimit']['creditlowerlimit']) ? intval($_G['cache']['plugin']['laozhoubuluo_ranklistlimit']['creditlowerlimit']) : 0;
$credithigherlimit = is_numeric($_G['cache']['plugin']['laozhoubuluo_ranklistlimit']['credithigherlimit']) ? intval($_G['cache']['plugin']['laozhoubuluo_ranklistlimit']['credithigherlimit']) : 2147483647;
$unitlowerlimit = is_numeric($_G['cache']['plugin']['laozhoubuluo_ranklistlimit']['unitlowerlimit']) ? intval($_G['cache']['plugin']['laozhoubuluo_ranklistlimit']['unitlowerlimit']) : 0;
$unithigherlimit = is_numeric($_G['cache']['plugin']['laozhoubuluo_ranklistlimit']['unithigherlimit']) ? intval($_G['cache']['plugin']['laozhoubuluo_ranklistlimit']['unithigherlimit']) : 2147483647;
if($unitprice < $unitlowerlimit) {
showmessage(lang('plugin/laozhoubuluo_ranklistlimit', 'lower_unit', array('num' => $unitprice, 'limit' => $unitlowerlimit)));
}
if($unitprice > $unithigherlimit) {
showmessage(lang('plugin/laozhoubuluo_ranklistlimit', 'higher_unit', array('num' => $unitprice, 'limit' => $unithigherlimit)));
}
if($showcredit < $creditlowerlimit) {
showmessage(lang('plugin/laozhoubuluo_ranklistlimit', 'lower_credit', array('num' => $showcredit, 'limit' => $creditlowerlimit)));
}
if($showcredit > $credithigherlimit) {
showmessage(lang('plugin/laozhoubuluo_ranklistlimit', 'higher_credit', array('num' => $showcredit, 'limit' => $credithigherlimit)));
}
}
// 如果没被拦截, 直接返回 array() 结束 Hook
return array();
}
}<file_sep>/README.md
# 排行榜竞价积分限制
## 应用介绍
**您还在纠结排行榜竞价排名模块内大量低积分用户占据排行榜黄金位置么?还在担心论坛积分无法快速消耗么?还在担心用户一掷千金不符合社会导向么?**
本插件特别为此设计,可以限制排行榜竞价排名功能中竞价单价的上下限以及竞价金钱的上下限,**引导用户按要求进行竞价排名,优化积分运作体系,完善站点积分策略。**
## 开源信息
本插件使用 Apache 2.0 License 发布,并已经登录 GitHub 平台,地址为: https://github.com/laozhoubuluo/discuz_ranklistlimit 。
如有任何问题,欢迎您通过 Issue 反馈,或通过 Pull Requests 进行改进。
也欢迎您通过 Star 鼓励我们继续开发类似产品。
## 图片展示




|
3a62d16ff744392f601d454d5f2972b441c9c769
|
[
"Markdown",
"PHP"
] | 2
|
PHP
|
laozhoubuluo/discuz_ranklistlimit
|
d8c3192be9169d558afe688380dc3c717de09ce2
|
f16d9146eb63cfaa9ef2d5cc2f4c6e111fcbb983
|
refs/heads/master
|
<repo_name>nickest14/colossus<file_sep>/migrations/schema.sql
--
-- PostgreSQL database dump
--
-- Dumped from database version 11.5
-- Dumped by pg_dump version 12.3
SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET xmloption = content;
SET client_min_messages = warning;
SET row_security = off;
SET default_tablespace = '';
--
-- Name: groups; Type: TABLE; Schema: public; Owner: nick
--
CREATE TABLE public.groups (
id integer NOT NULL,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
name character varying(40)
);
ALTER TABLE public.groups OWNER TO nick;
--
-- Name: groups_id_seq; Type: SEQUENCE; Schema: public; Owner: nick
--
CREATE SEQUENCE public.groups_id_seq
AS integer
START WITH 1
INCREMENT BY 1
NO MINVALUE
NO MAXVALUE
CACHE 1;
ALTER TABLE public.groups_id_seq OWNER TO nick;
--
-- Name: groups_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: nick
--
ALTER SEQUENCE public.groups_id_seq OWNED BY public.groups.id;
--
-- Name: schema_migration; Type: TABLE; Schema: public; Owner: nick
--
CREATE TABLE public.schema_migration (
version character varying(14) NOT NULL
);
ALTER TABLE public.schema_migration OWNER TO nick;
--
-- Name: users; Type: TABLE; Schema: public; Owner: nick
--
CREATE TABLE public.users (
id uuid NOT NULL,
name character varying(40) NOT NULL,
email character varying(40) NOT NULL,
password character varying(255) NOT NULL,
login_at timestamp without time zone,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
provider character varying(255) NOT NULL,
provider_id character varying(255) NOT NULL,
group_id integer
);
ALTER TABLE public.users OWNER TO nick;
--
-- Name: groups id; Type: DEFAULT; Schema: public; Owner: nick
--
ALTER TABLE ONLY public.groups ALTER COLUMN id SET DEFAULT nextval('public.groups_id_seq'::regclass);
--
-- Name: groups groups_pkey; Type: CONSTRAINT; Schema: public; Owner: nick
--
ALTER TABLE ONLY public.groups
ADD CONSTRAINT groups_pkey PRIMARY KEY (id);
--
-- Name: users users_pkey; Type: CONSTRAINT; Schema: public; Owner: nick
--
ALTER TABLE ONLY public.users
ADD CONSTRAINT users_pkey PRIMARY KEY (id);
--
-- Name: groups_name_idx; Type: INDEX; Schema: public; Owner: nick
--
CREATE UNIQUE INDEX groups_name_idx ON public.groups USING btree (name);
--
-- Name: schema_migration_version_idx; Type: INDEX; Schema: public; Owner: nick
--
CREATE UNIQUE INDEX schema_migration_version_idx ON public.schema_migration USING btree (version);
--
-- Name: users_email_provider_provider_id_idx; Type: INDEX; Schema: public; Owner: nick
--
CREATE UNIQUE INDEX users_email_provider_provider_id_idx ON public.users USING btree (email, provider, provider_id);
--
-- Name: users users_group_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: nick
--
ALTER TABLE ONLY public.users
ADD CONSTRAINT users_group_id_fkey FOREIGN KEY (group_id) REFERENCES public.groups(id);
--
-- PostgreSQL database dump complete
--
<file_sep>/grifts/initialize.go
package grifts
import (
"colossus/models"
"github.com/markbates/grift/grift"
)
var _ = grift.Namespace("initialize", func() {
grift.Desc("initializeGroup", "Create a default group")
grift.Add("initializeGroup", func(c *grift.Context) error {
exists, err := models.DB.Where("name = ?", "defaultgroup").Exists("groups")
if err != nil {
return err
}
if !exists {
g := &models.Group{Name: "defaultgroup"}
err := models.DB.Create(g)
if err != nil {
return err
}
}
exists, err = models.DB.Where("name = ?", "admin").Exists("groups")
if err != nil {
return err
}
if !exists {
g := &models.Group{
Name: "admin",
}
err := models.DB.Create(g)
return err
}
return nil
})
})
<file_sep>/actions/auth.go
package actions
import (
"colossus/models"
"colossus/pkg"
"fmt"
"net/http"
"os"
"github.com/gobuffalo/buffalo"
"github.com/gobuffalo/pop/v5"
"github.com/gobuffalo/x/defaults"
"github.com/markbates/goth"
"github.com/markbates/goth/gothic"
"github.com/markbates/goth/providers/google"
"github.com/pkg/errors"
)
func init() {
gothic.Store = App().SessionStore
goth.UseProviders(
google.New(os.Getenv("GOOGLE_KEY"), os.Getenv("GOOGLE_SECRET"), fmt.Sprintf("%s%s", App().Host, "/auth/google/callback")),
)
}
func AuthCallback(c buffalo.Context) error {
gu, err := gothic.CompleteUserAuth(c.Response(), c.Request())
if err != nil {
return c.Error(401, err)
}
var userInfo = map[string]string{
"name": defaults.String(gu.Name, gu.NickName),
"providerID": gu.UserID,
"provider": gu.Provider,
"email": gu.Email,
}
tx := c.Value("tx").(*pop.Connection)
u := []models.User{}
err = tx.Eager("Group").Where("email = ? and provider = ? and provider_id = ?",
userInfo["email"], userInfo["provider"], userInfo["providerID"]).All(&u)
if err != nil {
return errors.WithStack(err)
}
var groupName string
if len(u) > 0 {
groupName = u[0].Group.Name
} else {
u := &models.User{}
u.Name = userInfo["name"]
u.Provider = userInfo["provider"]
u.ProviderID = userInfo["providerID"]
u.Email = userInfo["email"]
// TODO: Use cache replace db query group table
group := models.Group{}
err := tx.Where("Name = ?", "defaultgroup").First(&group)
if err != nil {
return c.Render(http.StatusBadRequest, r.String(err.Error()))
}
u.Group = &group
if err = tx.Save(u); err != nil {
return errors.WithStack(err)
}
groupName = group.Name
}
tokenString, err := pkg.CreateJWTToken(userInfo["email"], userInfo["provider"], userInfo["providerID"], groupName)
if err != nil {
return errors.WithStack(err)
}
var data = map[string]string{
"jwttoken": tokenString,
}
return c.Render(http.StatusOK, r.JSON(data))
}
<file_sep>/pkg/jwt.go
package pkg
import (
"errors"
"strings"
"time"
"github.com/dgrijalva/jwt-go"
"github.com/gobuffalo/envy"
)
var jwtSecret []byte
func init() {
jwtSecret = []byte(envy.Get("JWT_KEY", ""))
}
// Claims defines the custom jwt standard claims
type Claims struct {
Email string `json:"email"`
Provider string `json:"prodvider"`
Group string `json:"group"`
jwt.StandardClaims
}
// CreateJWTToken function is used to create user JWT token
func CreateJWTToken(email string, provider string, providerID string, group string) (string, error) {
claims := Claims{
Email: email,
Provider: provider,
Group: group,
StandardClaims: jwt.StandardClaims{
ExpiresAt: time.Now().Add(time.Hour * 24).Unix(),
Id: providerID,
},
}
token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)
tokenString, err := token.SignedString(jwtSecret)
if err != nil {
return "", err
}
return tokenString, nil
}
// CheckJWTToken function is used to check whether the token is correct or not
func CheckJWTToken(auth string) (*Claims, error) {
if !strings.HasPrefix(auth, "JWT ") {
return &Claims{}, errors.New("tokenstring should contains 'JWT'")
}
token := strings.Split(auth, "JWT ")[1]
tokenClaims, err := jwt.ParseWithClaims(token, &Claims{}, func(token *jwt.Token) (i interface{}, err error) {
return jwtSecret, nil
})
if err != nil {
message := ""
if ve, ok := err.(*jwt.ValidationError); ok {
if ve.Errors&jwt.ValidationErrorMalformed != 0 {
message = "token is malformed"
} else if ve.Errors&jwt.ValidationErrorUnverifiable != 0 {
message = "token could not be verified because of signing problems"
} else if ve.Errors&jwt.ValidationErrorSignatureInvalid != 0 {
message = "signature validation failed"
} else if ve.Errors&jwt.ValidationErrorExpired != 0 {
message = "token is expired"
} else if ve.Errors&jwt.ValidationErrorNotValidYet != 0 {
message = "token is not yet valid before sometime"
} else {
message = "can not handle this token"
}
}
return &Claims{}, errors.New(message)
}
if claim, ok := tokenClaims.Claims.(*Claims); ok && tokenClaims.Valid {
return claim, nil
}
return &Claims{}, errors.New("token is not valid")
}
<file_sep>/go.mod
module colossus
go 1.14
require (
github.com/dgrijalva/jwt-go v3.2.0+incompatible
github.com/go-redis/redis/v8 v8.2.2
github.com/gobuffalo/buffalo v0.16.15
github.com/gobuffalo/buffalo-pop/v2 v2.2.0
github.com/gobuffalo/envy v1.9.0
github.com/gobuffalo/mw-csrf v1.0.0
github.com/gobuffalo/mw-forcessl v0.0.0-20200131175327-94b2bd771862
github.com/gobuffalo/mw-i18n v0.0.0-20190129204410-552713a3ebb4
github.com/gobuffalo/mw-paramlogger v0.0.0-20190129202837-395da1998525
github.com/gobuffalo/nulls v0.2.0
github.com/gobuffalo/packr/v2 v2.8.0
github.com/gobuffalo/pop/v5 v5.2.4
github.com/gobuffalo/suite v2.8.2+incompatible
github.com/gobuffalo/suite/v3 v3.0.0
github.com/gobuffalo/validate/v3 v3.1.0
github.com/gobuffalo/x v0.0.0-20190224155809-6bb134105960
github.com/gofrs/uuid v3.2.0+incompatible
github.com/karrick/godirwalk v1.16.1 // indirect
github.com/markbates/goth v1.64.2
github.com/markbates/grift v1.5.0
github.com/pkg/errors v0.9.1
github.com/rogpeppe/go-internal v1.6.2 // indirect
github.com/sirupsen/logrus v1.6.0 // indirect
github.com/spf13/cobra v1.0.0 // indirect
github.com/unrolled/secure v1.0.7
golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a
golang.org/x/net v0.0.0-20200822124328-c89045814202 // indirect
golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208 // indirect
golang.org/x/sys v0.0.0-20200909081042-eff7692f9009 // indirect
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect
gopkg.in/yaml.v2 v2.3.0
)
<file_sep>/models/group.go
package models
import (
"encoding/json"
"strings"
"time"
"github.com/gobuffalo/pop/v5"
)
// Group is used by pop to map your groups database table to your go code.
type Group struct {
ID int `json:"id" db:"id"`
CreatedAt time.Time `json:"created_at" db:"created_at"`
UpdatedAt time.Time `json:"updated_at" db:"updated_at"`
Name string `json:"name" db:"name"`
Users Users `json:"users,omitempty" has_many:"users"`
}
// String is not required by pop and may be deleted
func (g Group) String() string {
jg, _ := json.Marshal(g)
return string(jg)
}
// Groups is not required by pop and may be deleted
type Groups []Group
// String is not required by pop and may be deleted
func (g Groups) String() string {
jg, _ := json.Marshal(g)
return string(jg)
}
// Create is to create group
func (g *Group) Create(tx *pop.Connection) error {
g.Name = strings.ToLower(g.Name)
return tx.Create(g)
}
// Update is to update group
func (g *Group) Update(tx *pop.Connection) error {
g.Name = strings.ToLower(g.Name)
return tx.Update(g)
}
// Delete is to update group
func (g *Group) Delete(tx *pop.Connection) error {
return tx.Destroy(g)
}
<file_sep>/pkg/wrapError.go
package pkg
import "github.com/gobuffalo/validate/v3"
type MyError struct {
*validate.Errors
Code int `json:"code"`
Message string `json:"message"`
}
func NewMyError(err error, code int) *MyError {
verr, ok := err.(*validate.Errors)
if !ok {
verr = validate.NewErrors()
}
msg := GetMessageFromErrorCodeMap(code)
if msg == "" {
msg = err.Error()
}
return &MyError{Errors: verr, Code: code, Message: msg}
}
func GetMessageFromErrorCodeMap(code int) string {
if msg, ok := errorCodeMessageMap[code]; ok {
return msg
}
return ""
}
// errorCodeMessageMap is a error code map manager
var errorCodeMessageMap = map[int]string{
// 1000 - 2000 for user relevant error codes
1000: "invalid password",
1001: "incorrect login data",
1002: "incorrect registration data",
1003: "",
1004: "incorrect data",
// database error
9999: "database error",
}
<file_sep>/pkg/jwt_test.go
package pkg
import (
"colossus/models"
"fmt"
"strings"
"testing"
"time"
"github.com/dgrijalva/jwt-go"
"github.com/gobuffalo/packr/v2"
"github.com/gobuffalo/suite/v3"
)
// ModelSuite is
type ModelSuite struct {
*suite.Model
}
// var jwtSecret = "testunnotechjwttoken"
var email, group = "test.unnotech.com", "testgroup"
var provider, providerID = "google", "999999"
func Test_ModelSuite(t *testing.T) {
model, err := suite.NewModelWithFixtures(packr.New("app:jwt:fixtures", "../fixtures/jwt"))
if err != nil {
t.Fatal(err)
}
as := &ModelSuite{
Model: model,
}
suite.Run(t, as)
}
func generateToken(jwtSecret []byte) string {
claims := Claims{
Email: email,
Provider: provider,
Group: group,
StandardClaims: jwt.StandardClaims{
ExpiresAt: time.Now().Add(time.Hour * 24).Unix(),
Id: providerID,
},
}
token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)
tokenString, _ := token.SignedString(jwtSecret)
return tokenString
}
func (ms *ModelSuite) Test_CreateJWTToken() {
token, err := CreateJWTToken(email, provider, providerID, group)
ms.NoError(err)
tokenClaims, err := jwt.ParseWithClaims(token, &Claims{}, func(token *jwt.Token) (i interface{}, err error) {
return jwtSecret, nil
})
claim, ok := tokenClaims.Claims.(*Claims)
ms.Equal(ok, true)
ms.Equal(claim.Email, email)
ms.Equal(claim.Group, group)
ms.Equal(claim.Provider, provider)
// Test with wrong jwtSecret
_, err = jwt.ParseWithClaims(token, &Claims{}, func(token *jwt.Token) (i interface{}, err error) {
return "wrongtoken", nil
})
ms.Error(err)
}
func (ms *ModelSuite) Test_CheckJWTToken() {
token := generateToken(jwtSecret)
// Lack of JWT at begin
_, err := CheckJWTToken(token)
ms.EqualError(err, "tokenstring should contains 'JWT'")
// Good token
tokenString := fmt.Sprintf("JWT %s", token)
_, err = CheckJWTToken(tokenString)
ms.NoError(err)
// Wrong JWT token
wrongSecret := []byte{'w', 'r', 'o', 'n', 'g'}
wrongTokenString := fmt.Sprintf("JWT %s", generateToken(wrongSecret))
_, err = CheckJWTToken(wrongTokenString)
ms.EqualError(err, "signature validation failed")
}
func (ms *ModelSuite) Test_CheckPermission() {
ms.LoadFixture("groups data")
g := models.Group{}
err := ms.DB.Where("name = ?", "defaultgroup").First(&g)
if err != nil {
return
}
g.GetPermission = []string{
"/v1/get/", "/v1/get2/", "/v1/get3/",
}
g.UpdatePermission = []string{
"/v1/update/", "/v1/update2/",
}
ms.DB.Update(&g)
// Check for group permission
getCase1 := map[string]interface{}{
"group": "defaultgroup",
"urlPath": "/v1/get/",
"method": "GET",
}
ms.NoError(CheckPermission(ms.DB, getCase1))
getCase2 := map[string]interface{}{
"group": "defaultgroup",
"urlPath": "/v1/get2/",
"method": "GET",
}
ms.NoError(CheckPermission(ms.DB, getCase2))
getCase3 := map[string]interface{}{
"group": "defaultgroup",
"urlPath": "/v1/getnull/",
"method": "GET",
}
ms.Error(CheckPermission(ms.DB, getCase3))
getCase4 := map[string]interface{}{
"group": "defaultgroupnull",
"urlPath": "/v1/get/",
"method": "GET",
}
ms.Error(CheckPermission(ms.DB, getCase4))
updateCase1 := map[string]interface{}{
"group": "defaultgroup",
"urlPath": "/v1/update/",
"method": "PUT",
}
ms.NoError(CheckPermission(ms.DB, updateCase1))
updateCase2 := map[string]interface{}{
"group": "defaultgroup",
"urlPath": "/v1/update/",
"method": "PATCH",
}
ms.NoError(CheckPermission(ms.DB, updateCase2))
updateCase3 := map[string]interface{}{
"group": "defaultgroup",
"urlPath": "/v1/update/",
"method": "POST",
}
ms.Error(CheckPermission(ms.DB, updateCase3))
updateCase4 := map[string]interface{}{
"group": "defaultgroupnull",
"urlPath": "/v1/update/",
"method": "PUT",
}
ms.Error(CheckPermission(ms.DB, updateCase4))
// Check for OPA permission
opaCase1 := map[string]interface{}{
"group": "admin",
}
ms.NoError(CheckPermission(ms.DB, opaCase1))
opaCase2 := map[string]interface{}{
"group": "defaultgroup",
"path": strings.Split("testjwt", "/"),
"method": "GET",
}
ms.NoError(CheckPermission(ms.DB, opaCase2))
opaCase3 := map[string]interface{}{
"group": "defaultgroup",
"path": strings.Split("testjwt/1", "/"),
"method": "GET",
}
ms.NoError(CheckPermission(ms.DB, opaCase3))
opaCase4 := map[string]interface{}{
"group": "defaultgroup",
"path": strings.Split("testjwt", "/"),
"method": "POST",
}
ms.Error(CheckPermission(ms.DB, opaCase4))
opaCase5 := map[string]interface{}{
"group": "testgroup",
"path": strings.Split("testjwt", "/"),
"method": "GET",
}
ms.Error(CheckPermission(ms.DB, opaCase5))
}
<file_sep>/models/user.go
package models
import (
"encoding/json"
"strings"
"time"
"github.com/gobuffalo/nulls"
"github.com/gobuffalo/pop/v5"
"github.com/gofrs/uuid"
"golang.org/x/crypto/bcrypt"
)
// User is used by pop to map your users database table to your go code.
type User struct {
ID uuid.UUID `json:"id" db:"id"`
Name string `json:"name" db:"name"`
Email string `json:"email" db:"email"`
Password string `json:"-" db:"password"`
LoginAt time.Time `json:"login_at" db:"login_at"`
CreatedAt time.Time `json:"created_at" db:"created_at"`
UpdatedAt time.Time `json:"updated_at" db:"updated_at"`
Provider string `json:"provider" db:"provider"`
ProviderID string `json:"provider_id" db:"provider_id"`
GroupID nulls.Int `json:"-" db:"group_id"`
Group *Group `json:"group,omitempty" belongs_to:"group"`
}
// String is not required by pop and may be deleted
func (u User) String() string {
ju, _ := json.Marshal(u)
return string(ju)
}
// Users is not required by pop and may be deleted
type Users []User
// String is not required by pop and may be deleted
func (u Users) String() string {
ju, _ := json.Marshal(u)
return string(ju)
}
// Create is to create user
func (u *User) Create(tx *pop.Connection) error {
u.Email = strings.ToLower(u.Email)
pw, err := bcrypt.GenerateFromPassword([]byte(u.Password), bcrypt.DefaultCost)
if err != nil {
return err
}
u.Password = string(pw)
return tx.Create(u)
}
<file_sep>/pkg/redis/redis.go
package rediswrap
import (
"bytes"
"context"
"fmt"
"io"
"io/ioutil"
"log"
"os"
"path/filepath"
"text/template"
"github.com/go-redis/redis/v8"
"github.com/gobuffalo/envy"
"gopkg.in/yaml.v2"
)
// Client is a redis client to interact with redis server
var (
Client *redis.Client
_configFilePath string
)
func init() {
var err error
env := envy.Get("GO_ENV", "development")
Client, err = Connect(env)
if err != nil {
log.Fatal(err)
}
}
// ConnectionDetails is to define redis config struct
type ConnectionDetails struct {
Host string `yaml:"host"` // localhost:6379
Port int `yaml:"port"`
Username string `yaml:"username"`
Password string `yaml:"<PASSWORD>"`
DB int `yaml:"db"`
PoolSize int `yaml:"pool"`
MaxRetries int `yaml:"max_retries"`
}
var connections = map[string]*ConnectionDetails{}
// GetConnections is to get redis connections config
func GetConnections() map[string]*ConnectionDetails {
return connections
}
// Connect accepts an environment variable to connect with corresponding configuration
func Connect(env string) (*redis.Client, error) {
// LoadConfigFile first
if err := LoadConfigFile(); err != nil {
return nil, err
}
ctx := context.Background()
config := connections[env]
client := redis.NewClient(&redis.Options{
Addr: fmt.Sprintf("%s:%d", config.Host, config.Port),
Username: config.Username,
Password: <PASSWORD>,
DB: config.DB,
PoolSize: config.PoolSize,
})
_, err := client.Ping(ctx).Result()
if err != nil {
return nil, err
}
return client, nil
}
// findConfigPath determines the config file path where should be read from
func findConfigPath() string {
path := envy.Get("REDIS_CONFIG_PATH", "")
if path == "" {
path = filepath.Join(filepath.Dir(filepath.Dir(os.Args[0])), "redis.yml")
}
return path
}
// LoadConfigFile return the redis connections config
func LoadConfigFile() error {
envy.Load()
path := findConfigPath()
f, err := os.Open(path)
if err != nil {
return err
}
defer f.Close()
return unmarshalConfig(f)
}
func tmplProcess(content []byte) ([]byte, error) {
tmpl := template.New("_redis_config_transformer")
tmpl.Funcs(map[string]interface{}{
"envOr": func(s1, s2 string) string {
return envy.Get(s1, s2)
},
"env": func(s1 string) string {
return envy.Get(s1, "")
},
})
t, err := tmpl.Parse(string(content))
if err != nil {
return nil, err
}
var bb bytes.Buffer
err = t.Execute(&bb, nil)
if err != nil {
return nil, err
}
return bb.Bytes(), nil
}
// unmarshalConfig unmarshal the file into the connection structure
func unmarshalConfig(r io.Reader) error {
content, err := ioutil.ReadAll(r)
if err != nil {
return err
}
b, err := tmplProcess(content)
if err != nil {
return err
}
err = yaml.Unmarshal(b, &connections)
return err
}
<file_sep>/pkg/constants/constants.go
package constants
// Define some constants
var (
DataBaseError = 9999
)
|
45a122f1711501296b73065f891e3424da9f620b
|
[
"SQL",
"Go Module",
"Go"
] | 11
|
SQL
|
nickest14/colossus
|
2c87d8fed08617ba954c931acbaf893659e90d25
|
f57f9fd3990b13fe5712b4f541c425ef7dd79496
|
refs/heads/master
|
<repo_name>Admsol/AngularAdminDash<file_sep>/js/test.js
describe("AngularAdminTests", function () {
var scope, controller;
beforeEach(function () {
module('myApp');
inject(function ($controller) {
controllerInjector = $controller;
});
scope = {};
var smt = controllerInjector('myController', {
$scope: scope
});
});
it("User Init Test", function () {
expect(scope.user.username).toEqual("");
});
it("Parse Signup Test", function () {
});
});<file_sep>/js/app.js
var app = angular.module('myApp', []);
app.controller('myController', ['$scope', 'MyLogin', function ($scope, MyLogin) {
var loginResult;
$scope.user = {
username: "",
pass: "",
token: "none",
status: "none"
};
$scope.onLoginClick = function () {
loginResult = MyLogin.login($scope.user.username, $scope.user.pass).then(function (data) {
console.log(data);
$scope.user.token = data.data.sessionToken;
$scope.user.status = data.statusText;
});
}
}]);
app.controller('myController2', ['$scope', function ($scope) {
$scope.info = "Important information";
}]);
app.factory('MyLogin', ['$http', function ($http) {
return {
login: function (user, pass) {
return $http({
url: "https://api.parse.com/1/users",
method: 'POST',
headers: {
'Content-Type': 'application/json',
"X-Parse-Application-Id": "AE7xZQU6JUDmrVuDKSHijRK00occMaAumpojNbSP",
"X-Parse-REST-API-Key": "<KEY>"
},
data: {
username: user,
password: <PASSWORD>
}
}).success(function (res) {
return res;
}).error(function (res) {
console.log("ERROR:" + res);
});
}
}
}]);
|
ac3667dccda692efd6db673428f1d7a9157d2dbd
|
[
"JavaScript"
] | 2
|
JavaScript
|
Admsol/AngularAdminDash
|
213bad9316409501e33e0d18982fa6bab7df75e8
|
09594ca2b559b8371af8a746df6921f46d9dafd5
|
refs/heads/master
|
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Bottleships.Logic;
namespace Bottleships.Communication
{
public class LocalCommander : ICommander
{
public LocalCommander(ICaptain captain)
{
Captain = captain;
}
public ICaptain Captain { get; private set; }
public void EndGame(GameEndNotification gameEndNotification)
{
this.Captain.EndGameNotification(gameEndNotification);
}
public void EndRound(RoundEndNotification roundEndNotification)
{
this.Captain.EndRoundNotification(roundEndNotification);
}
public string GetName()
{
return Captain.GetName();
}
public IEnumerable<Placement> GetPlacements(IEnumerable<Clazz> classes)
{
return Captain.GetPlacements(classes);
}
public IEnumerable<Shot> GetShots(IEnumerable<EnemyFleetInfo> enemyFleetInfo, int numberOfShots)
{
return Captain.GetShots(enemyFleetInfo, numberOfShots);
}
public void NotifyOfBeingHit(IEnumerable<HitNotification> hits)
{
this.Captain.NotifyOfBeingHit(hits);
}
public void RespondToShots(IEnumerable<ShotResult> results)
{
Captain.RespondToShots(results);
}
public void StartGame(GameStartNotification gameStartNotification)
{
Captain.StartGameNotification(gameStartNotification);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Bottleships.Logic
{
public enum Scores
{
Hit = 5,
Sink = 10,
WinningGame = 15,
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Bottleships.Logic
{
public class ScoreAwarded
{
public Scores Score { get; set; }
public Player Player { get; set; }
}
}
<file_sep>using Bottleships.Communication;
using System;
using System.Collections.Generic;
using System.Linq;
namespace Bottleships.Logic
{
public class Player
{
public string Name { get; private set; }
public override string ToString()
{
return this.Name;
}
protected ICommander Commander { get; private set; }
public Player(ICommander commander)
{
this.Commander = commander;
this.Name = this.Commander.GetName();
}
public IEnumerable<Shot> GetShots(Game game, Fleet myFleet)
{
var numberOfShots = myFleet.Ships.Count(s => s.IsAfloat);
var enemyFleets = game.Fleets.Where(f => !f.Equals(myFleet)).Select(f => new EnemyFleetInfo
{
Name = f.Player.Name,
NumberOfAfloatShipts = f.Ships.Count(s => s.IsAfloat)
});
return Commander.GetShots(enemyFleets, numberOfShots);
}
public Fleet GetFleet(IEnumerable<Clazz> classes)
{
var ships = new List<Ship>();
var fleet = new Fleet
{
Player = this
};
var positions = Commander.GetPlacements(classes);
foreach(var position in positions)
{
ships.Add(new Ship
{
Class = position.Class,
Direction = position.Direction,
Coordinates = position.Coordinates
});
}
fleet.Ships = ships;
return fleet;
}
public void RespondToShots(IEnumerable<ShotResult> results)
{
this.Commander.RespondToShots(results);
}
public void StartGame(Game currentGame)
{
this.Commander.StartGame(new GameStartNotification
{
NumberOfPlayers = currentGame.Players.Count(),
PlayerNames = currentGame.Players.Select(p => p.Name)
});
}
public void EndGame(Game currentGame)
{
this.Commander.EndGame(new GameEndNotification
{
Scores = currentGame.ScoresPerPlayer
});
}
public void EndRound(Round currentRound)
{
this.Commander.EndRound(new RoundEndNotification
{
Scores = currentRound.ScoresPerPlayer
});
}
public void NotifyOfBeingHit(IEnumerable<HitNotification> hits)
{
this.Commander.NotifyOfBeingHit(hits);
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Bottleships.Logic
{
public class Square : Coordinates
{
public bool IsCentre { get; set; }
public bool IsDamaged { get; set; }
public int PositionIndex { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
namespace Bottleships.Logic
{
public class Fleet
{
public Player Player { get; set; }
public IEnumerable<Ship> Ships { get; set; }
public bool StillHasShipsAfloat
{
get
{
return this.Ships.Any(s => s.IsAfloat);
}
}
public override string ToString()
{
return this.Player.ToString();
}
public ShotResult ResolveShot(Shot shot)
{
foreach(var ship in Ships.Where(s => s.IsAfloat))
{
var squares = ship.GetSquares();
foreach(var shipSpace in squares)
{
if(shipSpace.Equals(shot.Coordinates))
{
var isDamageCausing = !ship.DamageCoordinates.Contains(shot.Coordinates);
var damageResult = ship.RegisterDamage(shot.Coordinates);
return new ShotResult(shot, ship.Class, true, damageResult == DamageResult.Sank, isDamageCausing);
}
}
}
return new ShotResult(shot, null, false, false, false);
}
public void SinkShipsWhichCollideOrFallOutOfBounds()
{
foreach(var ship in Ships)
{
var squares = ship.GetSquares();
if(squares.Any(s => s.X < 0
|| s.X > 9
|| s.Y < 0
|| s.Y > 9))
{
ship.IsAfloat = false;
continue;
}
var otherSquares = this.Ships.Where(s => !s.Equals(ship)).SelectMany(s => s.GetSquares());
var collisions = otherSquares.Where(s => squares.Contains(s));
if(collisions.Any())
{
ship.IsAfloat = false;
continue;
}
}
}
}
}<file_sep>using Bottleships.Communication;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Bottleships.Logic
{
public class Shot
{
public string FleetName { get; set; }
public Coordinates Coordinates { get; set; }
public override bool Equals(object obj)
{
var other = obj as Shot;
return other != null
&& other.FleetName.Equals(this.FleetName)
&& other.Coordinates.Equals(this.Coordinates);
}
public override int GetHashCode()
{
return 27 * this.Coordinates.GetHashCode() * this.FleetName.GetHashCode();
}
}
}
<file_sep>namespace Bottleships.Communication
{
public class EnemyFleetInfo
{
public string Name { get; set; }
public int NumberOfAfloatShipts { get; set; }
}
}<file_sep>using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Text;
using System.Threading.Tasks;
namespace Bottleships.Communication
{
public class Server
{
private List<ConnectedPlayer> _connectedPlayers { get; set; }
private HttpListenerClass _listener;
public IEnumerable<ConnectedPlayer> ConnectedPlayers
{
get
{
lock (_connectedPlayers)
{
return _connectedPlayers;
}
}
}
public Server()
{
_connectedPlayers = new List<ConnectedPlayer>();
}
public void ListenForPlayers()
{
if (_listener == null)
{
_listener = new HttpListenerClass(3);
_listener.Start(5999);
_listener.ProcessRequest += Listener_ProcessRequest;
}
}
private void Listener_ProcessRequest(System.Net.HttpListenerContext context)
{
string body = null;
StreamReader sr = new StreamReader(context.Request.InputStream);
using (sr)
{
body = sr.ReadToEnd();
}
var method = context.Request.Url.AbsolutePath.Replace("/", "").ToLower();
if (method.Equals("registerplayer"))
{
var data = JsonConvert.DeserializeObject<ConnectedPlayer>(body);
context.Response.StatusCode = (int)HttpStatusCode.OK;
context.Response.ContentType = "text/plain";
using (StreamWriter sw = new StreamWriter(context.Response.OutputStream))
{
sw.WriteLine(context.Request.RawUrl);
}
lock (_connectedPlayers)
{
_connectedPlayers.Add(data);
}
}
}
public void StopListening()
{
if(_listener != null)
{
_listener.Stop();
_listener.Dispose();
_listener = null;
}
}
}
}<file_sep>namespace Bottleships.Logic
{
public class ShotResult
{
public Shot Shot { get; set; }
public Clazz Class { get; set; }
public bool WasAHit { get; set; }
public bool WasASink { get; set; }
public bool WasFreshDamage { get; set; }
public ShotResult(Shot shot, Clazz clazz, bool wasAHit, bool wasASink, bool wasFreshDamage)
{
this.Shot = shot;
this.Class = clazz;
this.WasAHit = wasAHit;
this.WasASink = wasASink;
this.WasFreshDamage = wasFreshDamage;
}
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Bottleships.Logic;
namespace Bottleships.Communication
{
public class RoundEndNotification
{
public IEnumerable<KeyValuePair<Player, int>> Scores { get; internal set; }
}
}
<file_sep>using Bottleships.Logic;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Bottleships.Communication
{
public class HitNotification
{
public string Shooter { get; set; }
public Coordinates Coordinates { get; set; }
public bool WasASink { get; set; }
public bool WasAHit { get; set; }
public Clazz ClassHit { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Bottleships.Logic;
using Newtonsoft.Json;
namespace Bottleships.Communication
{
public class RemoteCommander : ICommander
{
private ConnectedPlayer _connectedPlayer;
public RemoteCommander(ConnectedPlayer connectedPlayer)
{
_connectedPlayer = connectedPlayer;
}
public string GetName()
{
return _connectedPlayer.Name;
}
public IEnumerable<Placement> GetPlacements(IEnumerable<Clazz> classes)
{
var data = new HttpTransmitter().SendMessage(_connectedPlayer.Url, "getplacements", new PlacementRequest
{
Classes = classes.Select(c => c.Name)
});
return JsonConvert.DeserializeObject<IEnumerable<Placement>>(data);
}
public IEnumerable<Shot> GetShots(IEnumerable<EnemyFleetInfo> enemyFleetInfo, int numberOfShots)
{
var data = new HttpTransmitter().SendMessage(_connectedPlayer.Url, "getshots", new ShotRequest
{
NumberOfShots = numberOfShots,
EnemyFleets = enemyFleetInfo
});
return JsonConvert.DeserializeObject<IEnumerable<Shot>>(data);
}
public static void RegisterCaptain(string serverUrl)
{
var bot = new MyCaptain();
var thisUrl = $"http://{Environment.MachineName}:6999";
new HttpTransmitter().SendMessage(serverUrl, "registerplayer", new ConnectedPlayer
{
Name = bot.GetName(),
Url = thisUrl
});
}
public void RespondToShots(IEnumerable<ShotResult> results)
{
var data = new HttpTransmitter().SendMessage(_connectedPlayer.Url, "shotresult", results);
}
public void StartGame(GameStartNotification gameStartNotification)
{
var data = new HttpTransmitter().SendMessage(_connectedPlayer.Url, "startgame", gameStartNotification);
}
public void NotifyOfBeingHit(IEnumerable<HitNotification> hits)
{
var data = new HttpTransmitter().SendMessage(_connectedPlayer.Url, "hitnotification", hits);
}
public void EndGame(GameEndNotification gameEndNotification)
{
var data = new HttpTransmitter().SendMessage(_connectedPlayer.Url, "endgame", gameEndNotification);
}
public void EndRound(RoundEndNotification roundEndNotification)
{
var data = new HttpTransmitter().SendMessage(_connectedPlayer.Url, "endround", roundEndNotification);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Bottleships.Logic
{
public class Placement
{
public Clazz Class { get; set; }
public Coordinates Coordinates { get; set; }
public Direction Direction { get; set; }
}
}
<file_sep>using System.Collections.Generic;
namespace Bottleships.Communication
{
public class PlacementRequest
{
public IEnumerable<string> Classes { get; set; }
}
}<file_sep>using Bottleships.AI;
using Bottleships.Communication;
using Bottleships.Logic;
using Bottleships.UI;
using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.IO;
using System.Linq;
using System.Net;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace Bottleships
{
public partial class MainForm : Form
{
public Event Event { get; set; }
public Client Client { get; set; }
public Server Server { get; set; }
public string OverrideMessage { get; set; }
public int? RemainingTicksToDisplayOverrideMessage { get; set; }
public Timer Timer { get; set; }
public const int TurnTickInterval = 500;
public int ScrollingXPos = 0;
public int SelectedMenuIndex = 0;
public List<ICaptain> LocalGameOpponents { get; set; }
Bitmap MenuBackground { get; set; }
public Game CurrentGame
{
get
{
return this.Event?.CurrentRound?.CurrentGame;
}
}
public MainForm()
{
InitializeComponent();
}
protected override void OnLoad(EventArgs e)
{
base.OnLoad(e);
this.Timer = new Timer();
this.Timer.Tick += Timer_Tick;
this.Timer.Interval = 25;
this.Timer.Start();
this.DrawMainMenu();
}
private void Timer_Tick(object sender, EventArgs e)
{
if(ScrollingXPos <= -200) ScrollingXPos = this.pictureBox1.Width;
ScrollingXPos -= 2;
if(this.OverrideMessage == "Playing Remote Game" && !(this.Client?.IsGameRunning ?? true))
{
this.OverrideMessage = null;
this.Client?.EndGame(); // last ditch in case we've not shut things down properly
this.Server?.StopListening();
}
if (this.CurrentGame != null)
{
this.DoPreTurn();
this.DoTurn();
this.DrawGameScreen();
this.DoPostTurn();
if (this.CurrentGame.Winner == null)
{
this.CurrentGame.MoveTurnOntoNextPlayer();
}
else
{
EndGame();
this.StartNextGame();
}
}
else // if we're ticking but not in a game then just show the regular refresh screen
{
RefreshScreen();
}
}
public void RefreshScreen()
{
if (!string.IsNullOrWhiteSpace(this.OverrideMessage))
{
if (!RemainingTicksToDisplayOverrideMessage.HasValue // if it's an infinite message
|| RemainingTicksToDisplayOverrideMessage.Value > 0) // or has time left to roll
{
this.DrawOverrideMessageScreen();
}
if (RemainingTicksToDisplayOverrideMessage.HasValue) // decrement the counter
{
RemainingTicksToDisplayOverrideMessage--;
if(RemainingTicksToDisplayOverrideMessage <=0)
{
RemainingTicksToDisplayOverrideMessage = null;
OverrideMessage = null;
}
}
return;
}
if(this.CurrentGame != null)
{
if (this.CurrentGame.GameOver)
{
this.EndGame();
this.StartNextGame();
}
return;
}
if(this.Server != null)
{
DrawServerScreen();
return;
}
if(this.LocalGameOpponents != null)
{
DrawSelectLocalOpponentMenu();
return;
}
this.DrawMainMenu();
}
private void StartNextGame()
{
this.Event.CurrentRound.MoveOntoNextGame();
if (!this.Event.CurrentRound.RoundOver)
{
var gameFleets = new List<Fleet>();
foreach (var player in this.CurrentGame.Players)
{
player.StartGame(CurrentGame);
var fleet = player.GetFleet(this.Event.CurrentRound.Classes);
gameFleets.Add(fleet);
}
this.CurrentGame.Fleets = gameFleets;
}
else
{
foreach (var player in this.Event.Players)
{
player.EndRound(this.Event.CurrentRound);
}
}
}
private void EndGame()
{
OverrideMessage = this.CurrentGame.Winner == null // then show the winner's message
? "Draw!"
: $"{this.CurrentGame.Winner.Player.Name} wins!";
RemainingTicksToDisplayOverrideMessage = 3;
this.CurrentGame.Scores.Add(new ScoreAwarded
{
Player = this.CurrentGame.Winner.Player,
Score = Scores.WinningGame
});
foreach (var player in this.CurrentGame.Players)
{
player.EndGame(this.CurrentGame);
}
}
private void DrawMainMenu()
{
var bitmap = new Bitmap(this.pictureBox1.Width, this.pictureBox1.Height);
using (var gfx = Graphics.FromImage(bitmap))
{
DrawMenu(gfx,
bitmap,
true,
"Test Bot Locally",
"Connect Bot To Server",
"Host Server",
"Exit");
}
UpdateScreen(bitmap);
}
private void DrawMenu(Graphics gfx, Bitmap bitmap, bool alignTop, int[] selectedIndex, params string[] menuItems)
{
DrawMenu(gfx, bitmap, alignTop, true, 0, selectedIndex, menuItems);
}
private void DrawMenu(Graphics gfx, Bitmap bitmap, bool alignTop, params string[] menuItems)
{
DrawMenu(gfx, bitmap, alignTop, true, 0, new int[] { }, menuItems);
}
private void DrawMenu(Graphics gfx, Bitmap bitmap, bool alignTop, bool drawBackground, int indexOffset, int[] selectedIndicies, params string[] menuItems)
{
if (Timer.Interval != 25)
{
this.Timer.Interval = 25;
}
if (BackgroundImage == null)
{
BackgroundImage = ShipPainter.GetBitmapResource("Menu");
}
var distanceFromTheTop = 275;
var distanceFromTheBottom = 50;
var spacing = 55;
if (drawBackground == true)
{
gfx.DrawImage(BackgroundImage, new Rectangle(0, 0, this.pictureBox1.Width, this.pictureBox1.Height));
}
for (int i = 0; i < menuItems.Count(); i++)
{
int yPosition = alignTop
? distanceFromTheTop + (i * spacing)
: this.pictureBox1.Height - distanceFromTheBottom - (spacing * menuItems.Count()) + (i * spacing);
var selected = selectedIndicies.Contains(i + indexOffset);
var highlighted = SelectedMenuIndex == i + indexOffset && (ScrollingXPos / 10) % 2 == 0;
var brush = highlighted
? Brushes.White : selected
? Brushes.Green
: Brushes.Black;
gfx.DrawString(menuItems.ElementAt(i),
new Font(FontFamily.GenericMonospace, 36, FontStyle.Bold),
brush,
new PointF(10, yPosition));
}
}
private void DrawSelectLocalOpponentMenu()
{
var bitmap = new Bitmap(this.pictureBox1.Width, this.pictureBox1.Height);
using (var gfx = Graphics.FromImage(bitmap))
{
var selectedIndex = new List<int>();
if (this.LocalGameOpponents.OfType<RandomCaptain>().Any()) selectedIndex.Add(0);
if (this.LocalGameOpponents.OfType<SimpleCaptain>().Any()) selectedIndex.Add(1);
if (this.LocalGameOpponents.OfType<Nelson>().Any()) selectedIndex.Add(2);
DrawMenu(gfx,
bitmap,
true,
selectedIndex.ToArray(),
"Random",
"Simple Captain",
"Nelson");
DrawMenu(gfx,
bitmap,
false,
false,
3,
new int[] { },
"Start Game",
"Exit");
}
UpdateScreen(bitmap);
}
private void DrawOverrideMessageScreen()
{
var bitmap = new Bitmap(this.pictureBox1.Width, pictureBox1.Height);
using (Graphics gfx = Graphics.FromImage(bitmap))
{
DrawMenu(gfx, bitmap, true);
StringFormat format = new StringFormat();
format.LineAlignment = StringAlignment.Center;
format.Alignment = StringAlignment.Center;
gfx.DrawString(this.OverrideMessage, new Font(FontFamily.GenericMonospace, 48, FontStyle.Bold), Brushes.Black, new RectangleF(0, 0, this.pictureBox1.Width, this.pictureBox1.Height), format);
}
UpdateScreen(bitmap);
}
private void DrawServerScreen()
{
var bitmap = new Bitmap(this.pictureBox1.Width, pictureBox1.Height);
var spaceForHeader = 275;
using (Graphics gfx = Graphics.FromImage(bitmap))
{
DrawMenu(gfx, bitmap, false, "Start Game", "Close Server");
var brush = Brushes.Black;
var font = new Font(FontFamily.GenericMonospace, 36, FontStyle.Bold);
gfx.DrawString("Configuring Server", font, brush, new PointF(10, spaceForHeader + 10));
var listenText = $"Server listening on http://{Environment.MachineName}:5999{"".PadRight(3 - Math.Abs(ScrollingXPos / 10) % 3, '.')}";
gfx.DrawString(listenText, font, brush, new PointF(10, spaceForHeader + 75));
gfx.DrawString("Connected Players:", font, brush, new PointF(10, spaceForHeader + 110));
int i = 1;
foreach(var player in this.Server.ConnectedPlayers)
{
gfx.DrawString(player.Name, font, brush, new PointF(15, spaceForHeader + 110 + (55 * i)));
i++;
}
}
UpdateScreen(bitmap);
this.Server.ListenForPlayers();
}
public void DrawGameScreen()
{
var bitmap = new Bitmap(this.pictureBox1.Width, pictureBox1.Height);
var shipPainter = new ShipPainter();
bool twoRows = this.CurrentGame.Players.Count() > 3;
int playersWidth = twoRows ? 3 * 275 : this.CurrentGame.Players.Count() * 275;
int xBuffer = (this.pictureBox1.Width - playersWidth - 300) / 2;
int yBuffer = 100;
int i = 0;
int x = 0;
int y = 0;
StringFormat format;
using (var gfx = Graphics.FromImage(bitmap))
{
gfx.FillRectangle(Brushes.Aqua, 0, 0, this.pictureBox1.Width, this.pictureBox1.Height);
foreach (var fleet in this.CurrentGame.Fleets)
{
var fleetScreen = DrawFleetScreen(fleet, shipPainter, 550, 550);
GetCoords(i, this.CurrentGame.Fleets.Count(), out x, out y);
// fleet board
gfx.DrawImage(fleetScreen,
new Rectangle(xBuffer + (x * 275), yBuffer + (y * (275 + 75)), 275, 275),
new Rectangle(0, 0, 550, 550),
GraphicsUnit.Pixel);
// ship's names
format = new StringFormat();
format.LineAlignment = StringAlignment.Center;
format.Alignment = StringAlignment.Center;
gfx.DrawString(fleet.Player.Name,
new Font(FontFamily.GenericMonospace, 12),
Brushes.Black,
new Rectangle(xBuffer + (x * 275), yBuffer + (y * (275 + 75)) + 275, 275, 75),
format);
// red border
if (this.CurrentGame.PlayerWhosTurnItIs.Equals(fleet))
{
gfx.DrawRectangle(Pens.Red, new Rectangle(xBuffer + (x * 275), yBuffer + (y * (275 + 75)), 274, 274));
}
i++;
}
// scores
StringBuilder sb = new StringBuilder();
foreach(var score in this.CurrentGame.ScoresPerPlayer.OrderByDescending(s => s.Value))
{
sb.AppendLine($"{score.Value} - {score.Key.Name}");
}
format = new StringFormat();
//format.LineAlignment = StringAlignment.Near;
//format.Alignment = StringAlignment.Near;
gfx.DrawString(sb.ToString(),
new Font(FontFamily.GenericMonospace, 12),
Brushes.Black,
new Rectangle(this.pictureBox1.Width - 300, yBuffer, 300, this.pictureBox1.Height - yBuffer),
format);
i++;
}
this.UpdateScreen(bitmap);
}
private void GetCoords(int fleetIndex, int fleetCount, out int x, out int y)
{
if (fleetCount <= 3)
{
y = 0;
x = fleetIndex;
}
else
{
y = fleetIndex < 3 ? 0 : 1;
x = y == 1 ? fleetIndex - 3 : fleetIndex;
}
}
public Bitmap DrawFleetScreen(Fleet fleet, ShipPainter shipPainter, int width, int height)
{
var bitmap = new Bitmap(width, height);
using (Graphics gfx = Graphics.FromImage(bitmap))
{
gfx.FillRectangle(Brushes.Aqua, new RectangleF(0, 0, width, height));
foreach (var ship in fleet.Ships.Where(s => s.IsAfloat))
{
shipPainter.DrawShip(gfx, ship);
}
if (this.CurrentGame.CurrentPlayersShots != null)
{
var shotsAtThisPlayer = this.CurrentGame.CurrentPlayersShots.ContainsKey(fleet.Player)
? this.CurrentGame.CurrentPlayersShots[fleet.Player]
: new List<HitNotification>();
foreach (var lastTurnShot in shotsAtThisPlayer)
{
if (lastTurnShot.WasAHit)
{
DrawSomething(gfx, lastTurnShot.Coordinates, "Explosion", Color.Black);
}
else
{
DrawSomething(gfx, lastTurnShot.Coordinates, "Splash", Color.FromArgb(13, 27, 39));
}
}
}
for (int i = 1; i < 10; i++)
{
gfx.DrawLine(Pens.Black, new Point(0, (i * 51) ), new Point(this.pictureBox1.Width, (i * 51))); // horizontal
gfx.DrawLine(Pens.Black, new Point((i * 51), 0), new Point((i * 51), this.pictureBox1.Height)); // vertical
}
//StringFormat format = new StringFormat();
//format.LineAlignment = StringAlignment.Center;
//format.Alignment = StringAlignment.Center;
//gfx.DrawString(GetTitleText(),
// new Font(FontFamily.GenericMonospace, 22),
// Brushes.Black,
// new RectangleF(0, 0, width, height),
// format);
}
return bitmap;
}
protected void DrawSomething(Graphics gfx, Coordinates lastTurnShotCoorinates, string what, Color transparent)
{
var image = ShipPainter.GetBitmapResource(what);
image.MakeTransparent(transparent);
gfx.DrawImage(image, new Point(lastTurnShotCoorinates.X * 51, lastTurnShotCoorinates.Y * 51));
}
public delegate void UpdateScreenDelegate(Bitmap bitmap);
public void UpdateScreen(Bitmap bitmap)
{
if (InvokeRequired)
{
Invoke(new UpdateScreenDelegate(UpdateScreen));
}
else
{
this.pictureBox1.Image = bitmap;
}
}
private void DoPreTurn()
{
this.CurrentGame.SinkShipsWhichCollideOrFallOutOfBounds();
this.CurrentGame.CheckForWinners();
}
private void DoPostTurn()
{
this.CurrentGame.CheckForWinners();
var fleetsBeingShotAt = new List<int>();
int i = 0;
foreach (var fleet in this.CurrentGame.Fleets)
{
if (this.CurrentGame.CurrentPlayersShots.ContainsKey(fleet.Player))
{
fleetsBeingShotAt.Add(i);
}
i++;
}
}
private void DoTurn()
{
var shots = this.CurrentGame.PlayerWhosTurnItIs.Player.GetShots(this.CurrentGame, this.CurrentGame.PlayerWhosTurnItIs);
var results = new List<ShotResult>();
var hitNotifications = new Dictionary<Player, List<HitNotification>>();
foreach (var shot in shots)
{
var fleetBeingShotAt = this.CurrentGame.Fleets.SingleOrDefault(f => f.Player.Name.Equals(shot.FleetName));
if (fleetBeingShotAt != null)
{
var result = fleetBeingShotAt.ResolveShot(shot);
results.Add(result);
if (!hitNotifications.ContainsKey(fleetBeingShotAt.Player))
{
hitNotifications.Add(fleetBeingShotAt.Player, new List<HitNotification>());
}
if (result.WasAHit)
{
if (result.WasFreshDamage) // only award points for fresh damage
{
this.CurrentGame.Scores.Add(new ScoreAwarded
{
Player = this.CurrentGame.PlayerWhosTurnItIs.Player,
Score = result.WasASink ? Scores.Sink : Scores.Hit
});
}
hitNotifications[fleetBeingShotAt.Player].Add(new HitNotification // but notify regardless
{
Shooter = this.CurrentGame.PlayerWhosTurnItIs.Player.Name,
WasASink = result.WasASink,
Coordinates = shot.Coordinates,
ClassHit = result.Class,
WasAHit = true
});
}
else // record the miss
{
hitNotifications[fleetBeingShotAt.Player].Add(new HitNotification
{
Shooter = this.CurrentGame.PlayerWhosTurnItIs.Player.Name,
WasASink = false,
Coordinates = shot.Coordinates,
ClassHit = null,
WasAHit = false
});
}
}
}
foreach(var playersHit in hitNotifications)
{
playersHit.Key.NotifyOfBeingHit(playersHit.Value.Where(h => h.WasAHit));
}
this.CurrentGame.CurrentPlayersShots = hitNotifications;
this.CurrentGame.PlayerWhosTurnItIs.Player.RespondToShots(results);
}
private void MainForm_KeyDown(object sender, KeyEventArgs e)
{
if (Server != null)
{
if (e.KeyData == Keys.Up && SelectedMenuIndex > 0)
{
SelectedMenuIndex--;
}
if (e.KeyData == Keys.Down && SelectedMenuIndex < 1)
{
SelectedMenuIndex++;
}
if (e.KeyData == Keys.Enter)
{
switch (SelectedMenuIndex)
{
case 0: // start a remote game
this.Event = Event.CreateEventSchedule(this.Server.ConnectedPlayers);
this.StartNextGame();
this.Timer.Interval = TurnTickInterval;
this.OverrideMessage = "Starting Hosted Game";
this.DrawOverrideMessageScreen();
this.OverrideMessage = null;
break;
case 1: // abort
this.Server.StopListening();
this.Server = null;
this.SelectedMenuIndex = 0;
this.DrawMainMenu();
break;
}
}
}
else // main menu
{
if (e.KeyData == Keys.Up && SelectedMenuIndex > 0)
{
SelectedMenuIndex--;
}
if (e.KeyData == Keys.Down && SelectedMenuIndex < 4)
{
SelectedMenuIndex++;
}
if (e.KeyData == Keys.Enter)
{
if (LocalGameOpponents != null) // if we're selecting opponents
{
switch (SelectedMenuIndex)
{
case 0:
AddRemoveClassFromLocalOpponents<RandomCaptain>();
break;
case 1:
AddRemoveClassFromLocalOpponents<SimpleCaptain>();
break;
case 2:
AddRemoveClassFromLocalOpponents<Nelson>();
break;
case 3:
this.Server = null;
this.OverrideMessage = "Starting Local Game";
this.DrawOverrideMessageScreen();
this.OverrideMessage = null;
this.Event = Event.CreateLocalGame(this.LocalGameOpponents);
this.StartNextGame();
this.Timer.Interval = TurnTickInterval;
this.Timer.Start();
break;
case 4:
this.LocalGameOpponents = null;
this.SelectedMenuIndex = 0;
this.RefreshScreen();
break;
}
}
else // otherwise it's the main menu
{
switch (SelectedMenuIndex)
{
case 0: // play locally
this.LocalGameOpponents = new List<ICaptain>();
this.RefreshScreen();
break;
case 1: // connect to server
try
{
var server = "http://localhost:5999"; // the server name should be editable
RemoteCommander.RegisterCaptain(server);
this.Client = new Client(server);
this.Client.PlayGame(); // TODO: we need to disconnect the listener when the game ends or we'll have a problem
this.OverrideMessage = "Playing Remote Game";
this.RefreshScreen();
}
catch(Exception)
{
MessageBox.Show("Unable to connect to remote server", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
}
break;
case 2: // host server
this.Server = new Server();
this.SelectedMenuIndex = 0;
this.Timer.Interval = 25;
this.DrawServerScreen();
break;
default:
this.Client?.EndGame(); // last ditch in case we've not shut things down properly
this.Server?.StopListening();
this.Close();
break;
}
}
}
}
}
public void AddRemoveClassFromLocalOpponents<T>() where T : ICaptain, new()
{
if (!this.LocalGameOpponents.OfType<T>().Any())
{
var item = new T();
this.LocalGameOpponents.Add(item);
}
else
{
var toRemove = this.LocalGameOpponents.OfType<T>().ToArray();
foreach (var item in toRemove)
{
this.LocalGameOpponents.Remove(item);
}
}
}
protected override void OnClosing(CancelEventArgs e)
{
this.Client?.EndGame();
this.Server?.StopListening(); // catch all
base.OnClosing(e);
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Bottleships.Communication
{
public class ShotRequest
{
public int NumberOfShots { get; set; }
public IEnumerable<EnemyFleetInfo> EnemyFleets { get; set; }
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Bottleships.Communication
{
public class GameStartNotification
{
public IEnumerable<string> PlayerNames { get; set; }
public int NumberOfPlayers { get; set; }
}
}
<file_sep>using Bottleships.Communication;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Bottleships.Logic;
namespace Bottleships
{
public class MyCaptain : ICaptain
{
public string Name { get; set; }
public string GetName()
{
return this.Name;
}
public MyCaptain()
{
this.Name = "MyCaptain";
}
public IEnumerable<Placement> GetPlacements(IEnumerable<Clazz> classes)
{
var placements = new List<Placement>();
int i = 0;
foreach (var clazz in classes)
{
placements.Add(new Placement
{
Class = clazz,
Coordinates = new Coordinates { X = i * 2, Y = 5 },
Direction = Direction.Up
});
i++;
}
return placements;
}
public IEnumerable<Shot> GetShots(IEnumerable<EnemyFleetInfo> enemyFleetInfo, int numberOfShots)
{
var rand = new Random();
var shots = new List<Shot>();
var target = enemyFleetInfo.FirstOrDefault();
for (int i = 0; i < numberOfShots; i++)
{
var coords = new Coordinates
{
X = rand.Next(0, 9),
Y = rand.Next(0, 9)
};
shots.Add(new Shot
{
Coordinates = coords,
FleetName = target.Name
});
}
return shots;
}
public void RespondToShots(IEnumerable<ShotResult> results)
{
}
public void StartGameNotification(GameStartNotification gameStartNotification)
{
}
public void NotifyOfBeingHit(IEnumerable<HitNotification> hits)
{
}
public void EndGameNotification(GameEndNotification gameEndNotification)
{
}
public void EndRoundNotification(RoundEndNotification roundEndNotification)
{
}
}
}
<file_sep>using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Text;
using System.Threading.Tasks;
namespace Bottleships.Communication
{
public class HttpTransmitter
{
public string SendMessage(string address, string message, object data)
{
var request = (HttpWebRequest)WebRequest.Create(string.Format("{0}/{1}", address, message));
var postData = JsonConvert.SerializeObject(data);
var byteData = Encoding.ASCII.GetBytes(postData);
request.Method = "POST";
request.Timeout = 30000;
request.ContentType = "application/x-www-form-urlencoded";
request.ContentLength = byteData.Length;
using (var stream = request.GetRequestStream())
{
stream.Write(byteData, 0, byteData.Length);
}
var response = (HttpWebResponse)request.GetResponse();
var responseString = new StreamReader(response.GetResponseStream()).ReadToEnd();
return responseString;
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Bottleships.Logic
{
public class Ship
{
public Coordinates Coordinates { get; set; }
public Direction Direction { get; set; }
public Clazz Class { get; set; }
public List<int> DamageIndicies { get; set; }
public List<Coordinates> DamageCoordinates { get; set; }
public bool IsAfloat { get; set; }
public Ship()
{
this.IsAfloat = true;
this.DamageIndicies = new List<int>();
this.DamageCoordinates = new List<Coordinates>();
}
private void GetBackOfBoat(out Coordinates backOfBoat, out int xOffset, out int yOffset)
{
var isOdd = this.Class.Size % 2 == 1;
var halfRoundedDown = (this.Class.Size - (isOdd ? 1 : 0)) / 2;
var backOfBoatOffset = halfRoundedDown;
var frontOfBoatOffset = halfRoundedDown + (isOdd ? 0 : -1);
yOffset = 0;
xOffset = 0;
switch (Direction)
{
case Direction.Up:
yOffset = -1;
backOfBoat = new Coordinates { X = this.Coordinates.X, Y = this.Coordinates.Y + backOfBoatOffset };
break;
case Direction.Down:
yOffset = 1;
backOfBoat = new Coordinates { X = this.Coordinates.X, Y = this.Coordinates.Y - backOfBoatOffset };
break;
case Direction.Left:
backOfBoat = new Coordinates { X = this.Coordinates.X + backOfBoatOffset, Y = this.Coordinates.Y };
xOffset = -1;
break;
case Direction.Right:
backOfBoat = new Coordinates { X = this.Coordinates.X - backOfBoatOffset, Y = this.Coordinates.Y };
xOffset = 1;
break;
default:
backOfBoat = new Coordinates { X = this.Coordinates.X, Y = this.Coordinates.Y };
break;
}
}
public IEnumerable<Square> GetSquares()
{
var coords = new List<Square>();
GetBackOfBoat(out Coordinates backOfBoat, out int xOffset, out int yOffset);
for(int i = 0; i < this.Class.Size; i++)
{
var square = new Square
{
X = backOfBoat.X + (xOffset * i),
Y = backOfBoat.Y + (yOffset * i),
PositionIndex = i
};
coords.Add(square);
if(square.Equals(this.Coordinates))
{
square.IsCentre = true;
}
if(DamageIndicies.Contains(i))
{
square.IsDamaged = true;
}
}
return coords;
}
public Coordinates BackOfBoat
{
get
{
GetBackOfBoat(out Coordinates backOfBoat, out int xOffset, out int yOffset);
return backOfBoat;
}
}
public Coordinates FrontOfBoat
{
get
{
GetBackOfBoat(out Coordinates backOfBoat, out int xOffset, out int yOffset);
var frontOfBoat = new Coordinates
{
X = backOfBoat.X + (Class.Size * xOffset),
Y = backOfBoat.Y + (Class.Size * yOffset),
};
return frontOfBoat;
}
}
public DamageResult RegisterDamage(Coordinates coordinates)
{
if (this.IsAfloat)
{
var position = this.GetSquares();
foreach (var shipPosition in position)
{
if (shipPosition.Equals(coordinates) && !shipPosition.IsDamaged)
{
this.DamageIndicies.Add(shipPosition.PositionIndex);
this.DamageCoordinates.Add(coordinates);
}
}
IsAfloat = this.Class.Size > this.DamageIndicies.Count();
if (!IsAfloat)
{
return DamageResult.Sank;
}
return DamageResult.Hit;
}
else
{
return DamageResult.None;
}
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Bottleships.Logic;
namespace Bottleships.Communication
{
public interface ICaptain
{
string GetName();
IEnumerable<Placement> GetPlacements(IEnumerable<Clazz> classes);
IEnumerable<Shot> GetShots(IEnumerable<EnemyFleetInfo> enemyFleetInfo, int numberOfShots);
void RespondToShots(IEnumerable<ShotResult> results);
void StartGameNotification(GameStartNotification gameStartNotification);
void EndGameNotification(GameEndNotification gameEndNotification);
void NotifyOfBeingHit(IEnumerable<HitNotification> hits);
void EndRoundNotification(RoundEndNotification roundEndNotification);
}
}
<file_sep>using Bottleships.AI;
using Bottleships.Communication;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Bottleships.Logic
{
public class Event
{
public IEnumerable<Round> Rounds { get; set; }
protected int RoundIndex { get; set; }
public Round CurrentRound
{
get
{
return this.Rounds.ElementAt(this.RoundIndex);
}
}
public IEnumerable<KeyValuePair<Player, int>> ScoresPerPlayer
{
get
{
return this.Rounds.SelectMany(s => s.ScoresPerPlayer)
.GroupBy(g => g.Key)
.Select(g => new KeyValuePair<Player, int>(g.Key, g.Select(x => x.Value).Sum()))
.OrderByDescending(s => s.Value);
}
}
public IEnumerable<Player> Players
{
get
{
return this.Rounds
.SelectMany(r => r.Games)
.SelectMany(r => r.Players)
.Distinct();
}
}
public void MoveOntoNextRound()
{
this.RoundIndex++;
}
public static Event CreateEventSchedule(IEnumerable<ConnectedPlayer> connectedPlayers)
{
var remotePlayers = connectedPlayers.Select(cp => new Player(new RemoteCommander(cp)));
var houseRobots = new Player[]
{
new Player(new LocalCommander(new Nelson())),
//new Player(new LocalCommander(new RandomCaptain()))
};
var allplayers = remotePlayers.Union(houseRobots);
var pairedGames = CreateGamesForPlayerPairs(allplayers);
return new Event
{
Rounds = new Round[]
{
new Round(Clazz.AllClasses.ToArray())
{
Games = pairedGames
},
new Round(Clazz.AllClasses.ToArray())
{
Games = new Game[]
{
new Game(allplayers.ToArray())
}
}
}
};
}
private static IEnumerable<Game> CreateGamesForPlayerPairs(IEnumerable<Player> players)
{
var games = new List<Game>();
for (int i = 0; i < players.Count(); i++)
{
for (int j = 0; j < players.Count(); j++)
{
if (j >= i) continue;
games.Add(new Game(new Player[]
{
players.ElementAt(i),
players.ElementAt(j)
}));
}
}
return games;
}
public static Event CreateLocalGame(IEnumerable<ICaptain> opponents)
{
var allPlayers = new List<Player>();
var games = new List<Game>();
var humanPlayer = new Player(new LocalCommander(new MyCaptain()));
allPlayers.Add(humanPlayer);
foreach(var opponent in opponents)
{
var player = new Player(new LocalCommander(opponent));
allPlayers.Add(player);
games.Add(new Game(humanPlayer, player));
}
games.Add(new Game(allPlayers.ToArray()));
return new Event
{
Rounds = new Round[]
{
new Round(Clazz.AllClasses.ToArray())
{
Games = games.ToArray()
}
}
};
}
}
}
<file_sep>using Bottleships.Logic;
using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Text;
using System.Threading.Tasks;
namespace Bottleships.Communication
{
public class Client
{
private string _serverUrl;
private ICaptain _myCaptain;
private HttpTransmitter _transmitter;
private HttpListenerClass _listener;
private static object _lock = new object();
public bool IsGameRunning { get; private set; }
public Client(string serverUrl)
{
_serverUrl = serverUrl;
_myCaptain = new MyCaptain();
_transmitter = new HttpTransmitter();
_listener = new HttpListenerClass(3);
}
public void PlayGame()
{
_listener.Start(6999);
_listener.ProcessRequest += HttpListener_ProcessRequest;
lock (_lock)
{
this.IsGameRunning = true;
}
}
private void HttpListener_ProcessRequest(System.Net.HttpListenerContext context)
{
string body = null;
StreamReader sr = new StreamReader(context.Request.InputStream);
using (sr)
{
body = sr.ReadToEnd();
}
var method = context.Request.Url.AbsolutePath.Replace("/", "").ToLower();
if (method.Equals("getplacements"))
{
var data = JsonConvert.DeserializeObject<PlacementRequest>(body);
context.Response.StatusCode = (int)HttpStatusCode.OK;
context.Response.ContentType = "text/plain";
var allClasses = Clazz.AllClasses;
var requestedClasses = new List<Clazz>();
foreach(var clazz in data.Classes)
{
var matchingClass = allClasses.SingleOrDefault(c => c.Name.Equals(clazz, StringComparison.CurrentCultureIgnoreCase));
if(matchingClass != null)
{
requestedClasses.Add(matchingClass);
}
}
var placements = _myCaptain.GetPlacements(requestedClasses);
using (StreamWriter sw = new StreamWriter(context.Response.OutputStream))
{
sw.WriteLine(JsonConvert.SerializeObject(placements));
}
}
if (method.Equals("getshots"))
{
var data = JsonConvert.DeserializeObject<ShotRequest>(body);
context.Response.StatusCode = (int)HttpStatusCode.OK;
context.Response.ContentType = "text/plain";
var placements = _myCaptain.GetShots(data.EnemyFleets, data.NumberOfShots);
using (StreamWriter sw = new StreamWriter(context.Response.OutputStream))
{
sw.WriteLine(JsonConvert.SerializeObject(placements));
}
}
if (method.Equals("shotresult"))
{
var data = JsonConvert.DeserializeObject<IEnumerable<ShotResult>>(body);
context.Response.StatusCode = (int)HttpStatusCode.OK;
context.Response.ContentType = "text/plain";
_myCaptain.RespondToShots(data);
using (StreamWriter sw = new StreamWriter(context.Response.OutputStream))
{
sw.WriteLine(JsonConvert.SerializeObject(new { DataReceived = true }));
}
}
if (method.Equals("startgame"))
{
var data = JsonConvert.DeserializeObject<GameStartNotification>(body);
context.Response.StatusCode = (int)HttpStatusCode.OK;
context.Response.ContentType = "text/plain";
_myCaptain.StartGameNotification(data);
if (!IsGameRunning)
{
lock (_lock)
{
this.IsGameRunning = true;
}
}
using (StreamWriter sw = new StreamWriter(context.Response.OutputStream))
{
sw.WriteLine(JsonConvert.SerializeObject(new { DataReceived = true }));
}
}
if (method.Equals("endgame"))
{
var data = JsonConvert.DeserializeObject<GameEndNotification>(body);
context.Response.StatusCode = (int)HttpStatusCode.OK;
context.Response.ContentType = "text/plain";
_myCaptain.EndGameNotification(data);
using (StreamWriter sw = new StreamWriter(context.Response.OutputStream))
{
sw.WriteLine(JsonConvert.SerializeObject(new { DataReceived = true }));
}
}
if (method.Equals("endround"))
{
var data = JsonConvert.DeserializeObject<RoundEndNotification>(body);
context.Response.StatusCode = (int)HttpStatusCode.OK;
context.Response.ContentType = "text/plain";
_myCaptain.EndRoundNotification(data);
using (StreamWriter sw = new StreamWriter(context.Response.OutputStream))
{
sw.WriteLine(JsonConvert.SerializeObject(new { DataReceived = true }));
}
lock (_lock)
{
this.IsGameRunning = false;
}
}
if (method.Equals("hitnotification"))
{
var data = JsonConvert.DeserializeObject<IEnumerable<HitNotification>>(body);
context.Response.StatusCode = (int)HttpStatusCode.OK;
context.Response.ContentType = "text/plain";
_myCaptain.NotifyOfBeingHit(data);
using (StreamWriter sw = new StreamWriter(context.Response.OutputStream))
{
sw.WriteLine(JsonConvert.SerializeObject(new { DataReceived = true }));
}
}
}
public void EndGame()
{
_listener?.Stop();
_listener?.Dispose();
}
}
}
<file_sep>using Bottleships.Logic;
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Linq;
using System.Reflection;
using System.Resources;
using System.Text;
using System.Threading.Tasks;
namespace Bottleships.UI
{
public class ShipPainter
{
public ShipPainter()
{
}
public void DrawShip(Graphics gfx, Ship ship)
{
var shipSquares = ship.GetSquares();
foreach (var coords in shipSquares)
{
var brush = Brushes.Gray;
if (coords.Equals(shipSquares.First()))
{
DrawProw(gfx, ship.Class, ship.Direction, new Rectangle(1 + (coords.X * 51), 1 + (coords.Y * 51), 50, 50));
}
else if (coords.Equals(shipSquares.Last()))
{
DrawStern(gfx, ship.Class, ship.Direction, new Rectangle(1 + (coords.X * 51), 1 + (coords.Y * 51), 50, 50));
}
else
{
gfx.FillRectangle(brush, new Rectangle(1 + (coords.X * 51), 1 + (coords.Y * 51), 50, 50));
if(ship.Class.HasChimneys)
{
gfx.FillEllipse(Brushes.DarkGray, new Rectangle(1 + (coords.X * 51) + 10, 1 + (coords.Y * 51) + 10, 30, 30));
gfx.DrawEllipse(Pens.Black, new Rectangle(1 + (coords.X * 51) + 10, 1 + (coords.Y * 51) + 10, 30, 30));
}
}
if (coords.IsDamaged)
{
var image = ShipPainter.GetBitmapResource("Damage");
image.MakeTransparent(Color.White);
gfx.DrawImage(image, new Point((coords.X * 51), (coords.Y * 51)));
}
}
}
private void DrawProw(Graphics gfx, Clazz clazz, Direction direction, Rectangle rectangle)
{
var prowBitmap = new Bitmap(50, 50);
using (var prowGfx = Graphics.FromImage(prowBitmap))
{
prowGfx.FillPolygon(Brushes.Gray, new Point[]
{
new Point(0, 0),
new Point(25, 25),
new Point(50, 0),
});
RotateFlipType? flip;
switch(direction)
{
case Direction.Down:
flip = RotateFlipType.RotateNoneFlipY;
break;
case Direction.Left:
flip = RotateFlipType.Rotate270FlipNone;
break;
case Direction.Right:
flip = RotateFlipType.Rotate90FlipNone;
break;
default:
flip = null;
break;
}
if (flip.HasValue)
{
prowBitmap.RotateFlip(flip.Value);
}
}
gfx.DrawImage(prowBitmap, rectangle);
}
private void DrawStern(Graphics gfx, Clazz clazz, Direction direction, Rectangle rectangle)
{
var prowBitmap = new Bitmap(50, 50);
using (var prowGfx = Graphics.FromImage(prowBitmap))
{
prowGfx.FillPolygon(Brushes.Gray, new Point[]
{
new Point(0, 0),
new Point(5, 35),
new Point(45, 35),
new Point(50, 0),
});
RotateFlipType? flip;
switch (direction)
{
case Direction.Up:
flip = RotateFlipType.RotateNoneFlipY;
break;
case Direction.Right:
flip = RotateFlipType.Rotate270FlipNone;
break;
case Direction.Left:
flip = RotateFlipType.Rotate90FlipNone;
break;
default:
flip = null;
break;
}
if (flip.HasValue)
{
prowBitmap.RotateFlip(flip.Value);
}
}
gfx.DrawImage(prowBitmap, rectangle);
}
public static Bitmap GetBitmapResource(string name)
{
Assembly assembly = Assembly.GetExecutingAssembly();
string strBaseName = assembly.GetName().Name + ".Properties.Resources";
ResourceManager rm = new ResourceManager(strBaseName, assembly);
rm.IgnoreCase = true;
return (Bitmap)rm.GetObject(name);
}
}
}
<file_sep>using Bottleships.Communication;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Bottleships.Logic;
namespace Bottleships.AI
{
public class RandomCaptain : ICaptain
{
private string _name;
public RandomCaptain()
{
this._name = $"Random {Guid.NewGuid().ToString()}";
}
public string GetName()
{
return _name;
}
public IEnumerable<Placement> GetPlacements(IEnumerable<Clazz> classes)
{
var placements = new List<Placement>();
var rand = new Random(Guid.NewGuid().GetHashCode());
foreach (var clazz in classes.OrderByDescending(s => s.Size))
{
var ship = new Placement
{
Class = clazz,
Direction = (Direction)(1 + rand.Next(3)),
Coordinates = new Coordinates
{
X = rand.Next(9),
Y = rand.Next(9)
}
};
placements.Add(ship);
}
return placements;
}
public IEnumerable<Shot> GetShots(IEnumerable<EnemyFleetInfo> enemyFleetInfo, int numberOfShots)
{
var rand = new Random();
var shots = new List<Shot>();
var target = enemyFleetInfo // target
.Where(f => f.NumberOfAfloatShipts > 0) // any enemy with ships afloat
.OrderBy(f => Guid.NewGuid()) // and pick a random one for all shots
.FirstOrDefault();
for (int i = 0; i < numberOfShots; i++)
{
var coords = new Coordinates
{
X = rand.Next(0, 9),
Y = rand.Next(0, 9)
};
shots.Add(new Shot
{
Coordinates = coords,
FleetName = target.Name
});
}
return shots;
}
public void NotifyOfBeingHit(IEnumerable<HitNotification> hits)
{
}
public void RespondToShots(IEnumerable<ShotResult> results)
{
}
public void StartGameNotification(GameStartNotification gameStartNotification)
{
}
public void EndGameNotification(GameEndNotification gameEndNotification)
{
}
public void EndRoundNotification(RoundEndNotification roundEndNotification)
{
}
}
}
<file_sep>using Bottleships.Communication;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Bottleships.Logic
{
public class Game
{
public Fleet Winner { get; set; }
private Fleet _playerWhosTurnItIs;
public Fleet PlayerWhosTurnItIs
{
get
{
if (_playerWhosTurnItIs == null)
{
_playerWhosTurnItIs = this.Fleets.First();
}
return _playerWhosTurnItIs;
}
}
public IList<ScoreAwarded> Scores { get; private set; }
public IEnumerable<KeyValuePair<Player, int>> ScoresPerPlayer
{
get
{
return this.Scores
.GroupBy(g => g.Player)
.Select(g => new KeyValuePair<Player, int>(g.Key, g.Select(x => (int)x.Score).Sum()))
.OrderByDescending(s => s.Value);
}
}
public bool GameOver { get; set; }
public IEnumerable<Fleet> Fleets { get; set; }
public Game(params Player[] players)
{
this.Players = players;
this.Scores = new List<ScoreAwarded>();
}
public Dictionary<Player, List<HitNotification>> CurrentPlayersShots { get; set; }
public IEnumerable<Player> Players { get; private set; }
public void SinkShipsWhichCollideOrFallOutOfBounds()
{
foreach (var fleet in Fleets)
{
fleet.SinkShipsWhichCollideOrFallOutOfBounds();
}
}
/// <summary>
/// Checks if the game is over and sets the Winner/GameOver properties
/// </summary>
public void CheckForWinners()
{
this.Winner = null;
var fleetsWithShips = this.Fleets.Where(f => f.StillHasShipsAfloat);
if (fleetsWithShips.Count() == 0)
{
this.GameOver = true;
return;
}
else if (fleetsWithShips.Count() == 1)
{
this.Winner = fleetsWithShips.Single();
this.GameOver = true;
return;
}
this.GameOver = false;
}
public void MoveTurnOntoNextPlayer()
{
Fleet nextPlayer = null;
bool currentPlayerPassed = false;
foreach(var fleet in this.Fleets)
{
if(fleet.Equals(this.PlayerWhosTurnItIs))
{
currentPlayerPassed = true;
continue;
}
if(currentPlayerPassed && fleet.StillHasShipsAfloat)
{
nextPlayer = fleet;
break;
}
}
// if we've been all the way through with no joy (for example if the current player was at the end) then grab the first one
if (nextPlayer == null)
{
nextPlayer = this.Fleets.FirstOrDefault(f => f.StillHasShipsAfloat);
}
_playerWhosTurnItIs = nextPlayer;
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Bottleships.Communication
{
public class ClientUpdateEventArgs : EventArgs
{
public string MessageForScreen { get; set; }
}
}
<file_sep>namespace Bottleships.Logic
{
public class Coordinates
{
public int X { get; set; }
public int Y { get; set; }
public override bool Equals(object obj)
{
var other = obj as Coordinates;
return other != null
&& other.X == this.X
&& other.Y == this.Y;
}
public override int GetHashCode()
{
return 27 * this.X.GetHashCode() * this.Y.GetHashCode();
}
}
}<file_sep>namespace Bottleships.Logic
{
public enum DamageResult
{
None,
Hit,
Sank
}
}<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Bottleships.Logic
{
public class Clazz
{
public static Clazz AircraftCarrier = new Clazz("Aircraft Carrier", 5);
public static Clazz Battleship = new Clazz("Battleship", 4) { HasChimneys = true };
public static Clazz Frigate = new Clazz("Frigate", 3) { HasChimneys = true };
public static Clazz Submarine = new Clazz("Submarine", 3);
public static Clazz Gunboat = new Clazz("Gunboat", 2);
public static IEnumerable<Clazz> AllClasses
{
get
{
return new Clazz[]
{
AircraftCarrier,
Battleship,
Frigate,
Submarine,
Gunboat
};
}
}
public string Name { get; set; }
public int Size { get; set; }
public bool HasChimneys { get; set; }
public Clazz() // for serialising only
{
}
protected Clazz(string name, int size)
{
Name = name;
Size = size;
}
}
}
<file_sep># Bottleships
### What Are BottleShips and BotWars?
BotWars is an event I've run at my company. They are huge team building events of around 50 or 60 engineers who compete in randomly drawn teams (usually with a lot of pizza) in a competition. These are a huge amount of fun and help engineers meet and work alongside colleagues they may not have know ordinarily.
Bottleships is the 4th iteration of BotWars, it's based on the children's game of Battleships and teams will have to write an application which places their ships (harder than it sounds) and targets their shooting at each other's fleets.
For earlier versions of BotWars take a look at [BotWars](https://github.com/davidseanlittlewood/BeatTheBotsServer) and [BotWars2](https://github.com/ardliath/BotWars2) (I didn't develop BotWars3, I got to play). You're more than welcome to download, contribute, or run these at your own company.
This year the game is based on the game of Battleships, with each team creating an AI which competes against the others to prove it is the best admiral of the virtual seas.
### What Have You Got So Far?
The game is very much in the early versions:

With very basic graphics

And a distinctly "retro" [menu feel](https://github.com/ardliath/Bottleships/issues/12)

Functionally it's there, but there are lots of UI and playability improvements which need to be made. I'm hoping to run this event and so hope to have a working version developed by then.
### What's Different about Bottleships to traditional Battleships?
There are a few tweaks to make the game more interesting and harder for our teams:
- Each fleet will get one shot per surviving ship per turn, rather than the traditional shot per turn
- I'm hoping you'll be able to move your ships mid game (nice to have)
- In traditional Battleships you play against one other opponent, in Bottleships we can have 1v1 games and 1v1v1v1v1v1v1v1 games!
### What will the afternoon look like?
We will have four rounds played over a single afternoon. Players will be put into teams at around 12pm and they'll have limited time to get their first version up and running. Scores will be cumulative so winning the final round will not guarantee overall victory. The winner will be the best scoring team across the day.
The day will look something like this:
- Draw Teams (and lunch) @ 12pm
- Round 1 @1:30pm
- Round 2 @2:30pm
- Round 3 & 4 @3:30pm
- Close & Prizes @ 4pm
### How Can I Help?
Take a look at the [Issues Page](https://github.com/ardliath/Bottleships/issues) where I'm adding details of what I'm working on next.
### Can I Play Bottleships at my Company/With My Friends?
Of course! The game is under the MIT Licence, I'd recommend you wait until it's finished first though!
<file_sep>using Bottleships.Communication;
using Bottleships.Logic;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Bottleships.AI
{
public class Nelson : ICaptain
{
public class Find
{
public ShotResult OriginalHittingShot { get; set; }
public Stack<Shot> UpcomingShots { get; set; }
public Find()
{
this.UpcomingShots = new Stack<Shot>();
}
}
public void StartGameNotification(GameStartNotification gameStartNotification)
{
this.ShotHistory = new List<Shot>();
this.SearchShots = null;
this.Finds = null;
}
public string GetName()
{
return "Nelson";
}
public IEnumerable<Placement> GetPlacements(IEnumerable<Clazz> classes)
{
var placements = new List<Placement>();
var rand = new Random(Guid.NewGuid().GetHashCode());
int xMin, xMax, yMin, yMax;
foreach (var clazz in classes.OrderByDescending(s => s.Size))
{
var direction = (Direction)(1 + rand.Next(3));
if (direction == Direction.Up || direction == Direction.Down)
{
xMin = 0;
xMax = 9;
yMin = (clazz.Size / 2) + 1;
yMax = 9 - (clazz.Size / 2);
}
else
{
yMin = 0;
yMax = 9;
xMin = (clazz.Size / 2) + 1;
xMax = 9 - (clazz.Size / 2);
}
var ship = new Placement
{
Class = clazz,
Direction = direction,
Coordinates = new Coordinates
{
X = rand.Next(xMin, xMax),
Y = rand.Next(yMin, yMax)
}
};
placements.Add(ship);
}
return placements;
}
public List<Shot> ShotHistory { get; set; }
public string LastHittMe { get; set; }
public IEnumerable<Shot> GetShots(IEnumerable<EnemyFleetInfo> enemyFleetInfo, int numberOfShots)
{
if (this.SearchShots == null)
{
CreateSearchShots(enemyFleetInfo);
}
if(LastHittMe != null)
{
// if the person who last shot at me has no ships then go back to the weakest
var shooter = enemyFleetInfo.Single(s => s.Name == LastHittMe);
if(shooter.NumberOfAfloatShipts == 0)
{
LastHittMe = null;
}
}
var shotsToFire = new List<Shot>();
string target = LastHittMe;
if (target == null)
{
target = enemyFleetInfo
.Where(f => f.NumberOfAfloatShipts > 0)
.OrderByDescending(f => f.NumberOfAfloatShipts)
.FirstOrDefault()
?.Name;
}
// see if we've got any finds first
foreach (var find in Finds)
{
while (shotsToFire.Count < numberOfShots && find.UpcomingShots.Any())
{
var nextShot = find.UpcomingShots.Pop();
if (!WouldShotBeAWaste(nextShot))
{
shotsToFire.Add(nextShot);
}
}
}
// then fill up with searches
while (shotsToFire.Count < numberOfShots)
{
if (SearchShots.ContainsKey(target)
&& SearchShots[target].Any())
{
var nextSearchingShot = SearchShots[target].Pop();
if (!WouldShotBeAWaste(nextSearchingShot))
{
shotsToFire.Add(nextSearchingShot);
}
}
}
ShotHistory.AddRange(shotsToFire);
return shotsToFire;
}
private bool WouldShotBeAWaste(Shot shot)
{
if (ShotHistory.Contains(shot)) return true;
if (shot.Coordinates.X < 0) return true;
if (shot.Coordinates.X > 9) return true;
if (shot.Coordinates.Y < 0) return true;
if (shot.Coordinates.Y > 9) return true;
return false;
}
private void CreateSearchShots(IEnumerable<EnemyFleetInfo> fleets)
{
SearchShots = new Dictionary<string, Stack<Shot>>();
this.Finds = new List<Find>();
Stack<Shot> searchShots;
foreach (var fleet in fleets)
{
searchShots = new Stack<Shot>();
for (int y = 0; y < 10; y++)
{
for (int x = 0; x < 5; x++)
{
searchShots.Push(new Shot
{
FleetName = fleet.Name,
Coordinates = new Coordinates
{
X = (x * 2) + (y % 2 == 0 ? 0 : 1),
Y = y
}
});
}
}
SearchShots[fleet.Name] = searchShots;
}
}
Dictionary<string, Stack<Shot>> SearchShots { get; set; }
public List<Find> Finds { get; set; }
public void RespondToShots(IEnumerable<ShotResult> results)
{
foreach (var result in results)
{
if (result.WasAHit) // if we've hit
{
if (result.WasASink) // and sunk
{
foreach (var find in Finds.Where(f => f.OriginalHittingShot.Equals(result.Shot))) // then cancel all other shots aiming from that find
{
this.Finds.Remove(find);
}
}
var originatingShot = this.Finds.SingleOrDefault(f => f.OriginalHittingShot.Equals(result.Shot));
if (originatingShot == null) // it was a searching shot which hit
{
var find = new Find // create a new find
{
OriginalHittingShot = result
};
this.Finds.Add(find);
// with four upcoming shots which spiral out from it
find.UpcomingShots.Push(new Shot
{
FleetName = result.Shot.FleetName,
Coordinates = new Coordinates { X = result.Shot.Coordinates.X + 1, Y = result.Shot.Coordinates.Y }
});
find.UpcomingShots.Push(new Shot
{
FleetName = result.Shot.FleetName,
Coordinates = new Coordinates { X = result.Shot.Coordinates.X - 1, Y = result.Shot.Coordinates.Y }
});
find.UpcomingShots.Push(new Shot
{
FleetName = result.Shot.FleetName,
Coordinates = new Coordinates { X = result.Shot.Coordinates.X, Y = result.Shot.Coordinates.Y + 1 }
});
find.UpcomingShots.Push(new Shot
{
FleetName = result.Shot.FleetName,
Coordinates = new Coordinates { X = result.Shot.Coordinates.X, Y = result.Shot.Coordinates.Y - 1 }
});
}
else // if we do know where it came from
{
// create another shot extending in that direction
if(result.Shot.Coordinates.X > originatingShot.OriginalHittingShot.Shot.Coordinates.X) // if it's a shot to the right
{
originatingShot.UpcomingShots.Push(new Shot
{
FleetName = result.Shot.FleetName,
Coordinates = new Coordinates { X = result.Shot.Coordinates.X + 1, Y = result.Shot.Coordinates.Y }
});
}
if (result.Shot.Coordinates.X < originatingShot.OriginalHittingShot.Shot.Coordinates.X) // if it's a shot to the left
{
originatingShot.UpcomingShots.Push(new Shot
{
FleetName = result.Shot.FleetName,
Coordinates = new Coordinates { X = result.Shot.Coordinates.X - 1, Y = result.Shot.Coordinates.Y }
});
}
if (result.Shot.Coordinates.Y > originatingShot.OriginalHittingShot.Shot.Coordinates.Y)
{
originatingShot.UpcomingShots.Push(new Shot
{
FleetName = result.Shot.FleetName,
Coordinates = new Coordinates { X = result.Shot.Coordinates.X, Y = result.Shot.Coordinates.Y + 1 }
});
}
if (result.Shot.Coordinates.Y < originatingShot.OriginalHittingShot.Shot.Coordinates.Y)
{
originatingShot.UpcomingShots.Push(new Shot
{
FleetName = result.Shot.FleetName,
Coordinates = new Coordinates { X = result.Shot.Coordinates.X, Y = result.Shot.Coordinates.Y - 1 }
});
}
}
}
}
}
public void NotifyOfBeingHit(IEnumerable<HitNotification> hits)
{
// swap to shoot at whoever is shooting at me
LastHittMe = hits.LastOrDefault()?.Shooter;
}
public void EndGameNotification(GameEndNotification gameEndNotification)
{
}
public void EndRoundNotification(RoundEndNotification roundEndNotification)
{
}
}
}
<file_sep>using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Bottleships.Logic
{
public class Round
{
public IEnumerable<Game> Games { get; set; }
public bool RoundOver { get; set; }
public IEnumerable<Clazz> Classes { get; set; }
public IEnumerable<KeyValuePair<Player, int>> ScoresPerPlayer
{
get
{
return this.Games.SelectMany(s => s.ScoresPerPlayer)
.GroupBy(g => g.Key)
.Select(g => new KeyValuePair<Player, int>(g.Key, g.Select(x => x.Value).Sum()))
.OrderByDescending( s => s.Value);
}
}
public Round(params Clazz[] classes)
{
this.Classes = classes;
}
protected int? GameIndex { get; set; }
public Game CurrentGame
{
get
{
return this.GameIndex.HasValue
? this.Games.ElementAt(this.GameIndex.Value)
: null;
}
}
public void MoveOntoNextGame()
{
if (this.GameIndex.HasValue)
{
this.GameIndex++;
if (this.Games.Count() == this.GameIndex)
{
this.GameIndex = null;
this.RoundOver = true;
}
}
else
{
this.GameIndex = 0;
}
}
}
}
<file_sep>using Bottleships.Communication;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Bottleships.Logic;
namespace Bottleships.AI
{
public class SimpleCaptain : ICaptain
{
public SimpleCaptain()
{
}
public void StartGameNotification(GameStartNotification gameStartNotification)
{
this.ShotHistory = new List<Shot>();
}
public string GetName()
{
return "Simple Captain";
}
public IEnumerable<Placement> GetPlacements(IEnumerable<Clazz> classes)
{
var placements = new List<Placement>();
var rand = new Random(Guid.NewGuid().GetHashCode());
int xMin, xMax, yMin, yMax;
foreach (var clazz in classes.OrderByDescending(s => s.Size))
{
var direction = (Direction)(1 + rand.Next(3));
if(direction == Direction.Up || direction == Direction.Down)
{
xMin = 0;
xMax = 9;
yMin = (clazz.Size / 2) + 1;
yMax = 9 - (clazz.Size / 2);
}
else
{
yMin = 0;
yMax = 9;
xMin = (clazz.Size / 2) + 1;
xMax = 9 - (clazz.Size / 2);
}
var ship = new Placement
{
Class = clazz,
Direction = direction,
Coordinates = new Coordinates
{
X = rand.Next(xMin, xMax),
Y = rand.Next(yMin, yMax)
}
};
placements.Add(ship);
}
return placements;
}
public List<Shot> ShotHistory { get; set; }
public IEnumerable<Shot> GetShots(IEnumerable<EnemyFleetInfo> enemyFleetInfo, int numberOfShots)
{
var shots = GetShotsNotTaken(enemyFleetInfo)
.OrderBy(s => Guid.NewGuid().GetHashCode())
.Take(numberOfShots);
this.ShotHistory.AddRange(shots);
return shots;
}
public IEnumerable<Shot> GetShotsNotTaken(IEnumerable<EnemyFleetInfo> enemyFleetInfo)
{
var allShots = new List<Shot>();
var fleetsToShootAt = enemyFleetInfo.Where(f => f.NumberOfAfloatShipts > 0);
foreach (var enemy in fleetsToShootAt)
{
for(int x = 0; x < 10; x++)
{
for (int y = 0; y < 10; y++)
{
allShots.Add(new Shot
{
Coordinates = new Coordinates
{
X = x,
Y = y
},
FleetName = enemy.Name
});
}
}
}
return allShots.Except(this.ShotHistory);
}
public void RespondToShots(IEnumerable<ShotResult> results)
{
// Doesn't care if we've hit something, not smart enough for that
}
public void EndGameNotification(GameEndNotification gameEndNotification)
{
}
public void EndRoundNotification(RoundEndNotification roundEndNotification)
{
}
public void NotifyOfBeingHit(IEnumerable<HitNotification> hits)
{
}
}
}
<file_sep>using Bottleships.Communication;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Bottleships.Logic;
namespace Bottleships.AI
{
/// <summary>
/// This captain throws an error at everything - used for testing
/// </summary>
public class ExceptionalCaptain : ICaptain
{
public void EndGameNotification(GameEndNotification gameEndNotification)
{
throw new NotImplementedException();
}
public void EndRoundNotification(RoundEndNotification roundEndNotification)
{
throw new NotImplementedException();
}
public string GetName()
{
throw new NotImplementedException();
}
public IEnumerable<Placement> GetPlacements(IEnumerable<Clazz> classes)
{
throw new NotImplementedException();
}
public IEnumerable<Shot> GetShots(IEnumerable<EnemyFleetInfo> enemyFleetInfo, int numberOfShots)
{
throw new NotImplementedException();
}
public void NotifyOfBeingHit(IEnumerable<HitNotification> hits)
{
throw new NotImplementedException();
}
public void RespondToShots(IEnumerable<ShotResult> results)
{
throw new NotImplementedException();
}
public void StartGameNotification(GameStartNotification gameStartNotification)
{
throw new NotImplementedException();
}
}
}
|
9e7a21c98e10ed28360587cad3acc8cc3feefa2e
|
[
"Markdown",
"C#"
] | 36
|
C#
|
ardliath/Bottleships
|
e7e8257474299f608c0b23524b2c6f1c17600cc8
|
86af18869031922284eb178e73a5ad19a463be58
|
refs/heads/master
|
<file_sep>import java.util.Scanner;
public class _1_SymmetricNumbersInRange {
public static String getFirstHalf(String str, int endLoop){
String firstHalfString = "";
for (int i = 0; i < endLoop; i++) {
firstHalfString = firstHalfString + str.charAt(i);
}
return firstHalfString;
}
public static String getSecondHalf(String str, int endLoop){
String secondHalfString = "";
if (str.length() % 2 == 0) {
endLoop--;
}
for (int i = str.length() - 1; i > endLoop; i--) {
secondHalfString = secondHalfString + str.charAt(i);
}
return secondHalfString;
}
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
int start;
int end;
while (true) {
System.out.print("Enter start of the range: ");
String startStr = scanner.nextLine();
System.out.print("Enter end of the range: ");
String endStr = scanner.nextLine();
System.out.println(PrintString.newStringFromChars(55, '*'));
if (ParseNumbers.tryParseInt(startStr) &&
ParseNumbers.tryParseInt(endStr)) {
start = Integer.parseInt(startStr);
end = Integer.parseInt(endStr);
} else {
System.out.println("Please use correct number format.");
System.out.println(PrintString.newStringFromChars(55, '-'));
continue;
}
if (end < start || start < 0 || end < 0) {
System.out.println("Please enter correct range - positive " + System.lineSeparator() +
"numbers smaller and end bigger than start.");
System.out.println(PrintString.newStringFromChars(55, '-'));
continue;
}
break;
}
for(int i = start; i <= end; i++){
String numberString = i + "";
int halfSymbols = numberString.length() / 2;
String firstHalf = getFirstHalf(numberString, halfSymbols);
String secondtHalf = getSecondHalf(numberString, halfSymbols);
if (firstHalf.equals(secondtHalf)) {
System.out.print(numberString + " ");
}
}
System.out.println();
}
}
<file_sep>import java.util.Scanner;
public class _2_GenerateThreeLetterWords {
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
String inputLetters = scanner.nextLine();
char[] inputLettArr = inputLetters.toCharArray();
for (int firstPos = 0; firstPos < inputLetters.length(); firstPos++) {
for (int secondPos = 0; secondPos < inputLetters.length(); secondPos++) {
for (int thirdPos = 0; thirdPos < inputLetters.length(); thirdPos++) {
System.out.print(inputLettArr[firstPos] + "" +
inputLettArr[secondPos] + "" + inputLettArr[thirdPos] + " ");
}
}
}
}
}
<file_sep>Задачата за принтиране на карти е изпробвана на ОС Ubuntu и най-вероятно няма да тръгне на Windows.
<file_sep>import java.text.DecimalFormat;
import java.util.Scanner;
public class _6_FormattingNumbers {
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
System.out.print("Enter an integer: ");
String intNumString = scanner.nextLine();
System.out.print("Enter a floating-point number: ");
String firstFloatNumString = scanner.nextLine();
System.out.print("Enter a floateing-point number: ");
String secondFloatNumString = scanner.nextLine();
System.out.println(PrintString.newStringFromChars(50, '-'));
if (ParseNumbers.tryParseInt(intNumString) &&
ParseNumbers.tryParseDouble(firstFloatNumString) &&
ParseNumbers.tryParseDouble(secondFloatNumString)) {
int intNum = Integer.parseInt(intNumString);
double firstFPNUm = Double.parseDouble(firstFloatNumString);
double secondFPNUm = Double.parseDouble(secondFloatNumString);
String hexNum = Integer.toHexString(intNum);
String binNum = Integer.toBinaryString(intNum);
System.out.print("|");
System.out.print(String.format("%-10s", hexNum.toUpperCase()));
System.out.print("|");
System.out.print(String.format("%10s", binNum).replace(' ', '0'));
System.out.print("|");
System.out.printf("%10.2f", firstFPNUm);
System.out.print("|");
System.out.printf("%-10.3f", secondFPNUm);
System.out.print("|");
} else {
System.out.println("PLease use correct number format.");
}
}
}
<file_sep>import java.util.Scanner;
public class _06_CountSpecifiedWord {
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
System.out.println("PLease enter text.");
String text = scanner.nextLine();
System.out.print("Please enter a word to find: ");
String matcher = scanner.nextLine();
String[] words = text.split("\\W+");
int matcherCount = 0;
for (int i = 0; i < words.length; i++) {
if (words[i].equalsIgnoreCase(matcher)) {
matcherCount++;
}
}
System.out.printf("The word %s appears %d times in the text.", matcher, matcherCount);
}
}
<file_sep>import java.util.InputMismatchException;
import java.util.Scanner;
public class SumTwoNumbers {
public static void main(String[] args) {
while (true){
try{
Scanner sc = new Scanner(System.in);
System.out.print("Enter a number: ");
int firstNum = sc.nextInt();
System.out.print("Enter a number: ");
int secondNum = sc.nextInt();
int sum = firstNum + secondNum;
System.out.printf("The sum is: %s", sum);
break;
}catch(InputMismatchException e){
System.out.println("Please enter correct input.");
continue;
}
}
}
}
<file_sep>import java.util.Scanner;
public class _1_RectangleArea {
public static void main(String[] args) {
Scanner sc = new Scanner(System.in);
while(true){
try {
System.out.print("Plese enter two inntegers as sides of the rectangle: ");
String input = sc.nextLine();
System.out.println(newStringFromChars(60, '-'));
String[] sides = input.split(" ");
int firstSide = Integer.parseInt(sides[0]);
int secondSide = Integer.parseInt(sides[1]);
int area = firstSide * secondSide;
System.out.printf("The area of the rectangle is: %d.", area);
break;
} catch (NumberFormatException e) {
System.out.println("You have entered some symbols as input. Please enter only integers.");
System.out.println();
continue;
}
}
}
public static String newStringFromChars(int length, char charToFill) {
char[] array = new char[length];
int pos = 0;
while (pos < length) {
array[pos] = charToFill;
pos++;
}
return new String(array);
}
}
<file_sep>Входните файлове, а също и изходните се намират в папка src\IOFiles.<file_sep>import java.util.Map;
import java.util.Scanner;
import java.util.TreeMap;
public class _02_SequencesOfEqualStrings {
public static void printsStrings(String str, int count) {
for (int i = 0; i < count; i++) {
if (i == count - 1) {
System.out.print(str);
} else {
System.out.print(str + " ");
}
}
System.out.println();
}
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
System.out.println("Please enter words separated with a single space.");
String input = scanner.nextLine();
String[] wordsString = input.split(" ");
TreeMap<String, Integer> words = new TreeMap<String, Integer>();
for (String wordString : wordsString) {
if (words.containsKey(wordString)) {
words.put(wordString, words.get(wordString) + 1);
} else {
words.put(wordString, 1);
}
}
for (Map.Entry<String, Integer> word : words.entrySet()) {
String wordString = word.getKey();
int count = word.getValue();
printsStrings(wordString, count);
}
}
}
<file_sep>import java.util.List;
import java.util.Arrays;
import java.util.Scanner;
import java.util.Collections;
import java.text.DecimalFormat;
import org.apache.commons.lang3.ArrayUtils;
public class _4_TheSmallestOf3Numbers {
public static Boolean tryParse(String numAsString){
try {
Double.parseDouble(numAsString);
return true;
} catch (Exception e) {
return false;
}
}
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
double[] numbers = new double[3];
for (int i = 0; i < 3; i++) {
while (true) {
System.out.print("Enter a number: ");
String numberAsString = scanner.nextLine();
if (tryParse(numberAsString)) {
numbers[i] = Double.parseDouble(numberAsString);
break;
} else {
System.out.println("You have entered some wrong data.");
continue;
}
}
}
List<Double> numbersList = Arrays.asList(ArrayUtils.toObject(numbers));
double min = Collections.min(numbersList);
String minAsString = String.valueOf(min);
DecimalFormat df = new DecimalFormat("#.##");
minAsString = df.format(min);
System.out.printf("The min is %s", minAsString);
}
}
<file_sep>import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
public class PrintTheCurrentDateAndTime {
public static void main(String[] args){
DateTimeFormatter format = DateTimeFormatter.ofPattern("dd MMMM yyyy, EEEE");
String localDateTime = LocalDateTime.now().format(format);
System.out.println(localDateTime);
}
}
<file_sep>import java.awt.List;
import java.util.InputMismatchException;
import java.util.Random;
import java.util.Scanner;
public class _6_RandomHandsOfFiveCards {
public static List genegateDeck(){
String card = "";
List deck = new List();
for (int i = 2; i <= 14; i++) {
switch (i) {
case 10: card = "10"; break;
case 11: card = "J"; break;
case 12: card = "Q"; break;
case 13: card = "K"; break;
case 14: card = "A"; break;
default: card = "" + i; break;
}
for (int j = 1; j <= 4; j++) {
char color = 0;
switch (j) {
case 1: color = '♣'; deck.add(card + color); break;
case 2: color = '♦'; deck.add(card + color); break;
case 3: color = '♠'; deck.add(card + color); break;
case 4: color = '♥'; deck.add(card + color); break;
}
}
}
return deck;
}
public static void printArray(String[] array) {
for (String string : array) {
System.out.print(string + " ");
}
}
public static String[] generateHand(Random generator, List deck) {
String[] hand = new String[5];
for (int i = 0; i < 5; i++) {
int index = generator.nextInt(deck.countItems());
hand[i] = deck.getItem(index);
deck.remove(index);
}
return hand;
}
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
while (true) {
try {
System.out.print("Enter hands count: ");
int handsCount = scanner.nextInt();
Random randomGenerator = new Random();
for (int i = 0; i < handsCount; i++) {
List deck = genegateDeck();
String[] hand = generateHand(randomGenerator, deck);
printArray(hand);
System.out.println();
}
break;
} catch (InputMismatchException inE) {
System.out.println("Please use correct input format.");
scanner.nextLine();
continue;
}
}
}
}
<file_sep>import java.util.Arrays;
import java.util.InputMismatchException;
import java.util.Scanner;
public class _01_SortArrayOfNumbers {
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
try {
System.out.print("Enter count of numbers: ");
int numbersCount = scanner.nextInt();
scanner.nextLine();
int[] numbers = new int[numbersCount];
for (int i = 0; i < numbersCount; i++) {
numbers[i] = scanner.nextInt();
scanner.nextLine();
}
Arrays.sort(numbers);
for (int number : numbers) {
System.out.print(number + " ");
}
} catch (InputMismatchException e) {
System.err.println("Plese use correct input format.");
}
}
}
<file_sep>import java.util.Arrays;
import java.util.Iterator;
import java.util.Scanner;
public class SortArrayOfStrings {
public static void main(String[] args){
Scanner sc = new Scanner(System.in);
String[] strings;
try {
System.out.print("Enter number of strings: ");
int number = sc.nextInt();
strings = new String[number];
for (int i = 0; i < number; i++) {
strings[i] = sc.next();
}
} finally {
sc.close();
}
Arrays.sort(strings);
for (int i = 0; i < strings.length; i++) {
System.out.println(strings[i]);
}
}
}
<file_sep>import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Locale;
public class _9_ListOfProducts {
public static void printCollection(ArrayList<Product> collection) {
for (Product product : collection) {
System.out.println(product);
}
}
public static void createFileFromArrList(ArrayList<Product> products, String filePath){
try (FileWriter writer = new FileWriter(filePath)){
for (Product product : products) {
String productString = product.toString();
writer.write(productString);
writer.write(System.lineSeparator());
}
} catch (IOException e) {
e.printStackTrace();
}
}
public static void main(String[] args) {
Locale.setDefault(Locale.ROOT);
String fileString = "src" + File.separator + "IOFiles" + File.separator + "InputFileProductsProblem9.txt";
try (BufferedReader reader = new BufferedReader(new FileReader(fileString))){
ArrayList<Product> productsList = new ArrayList<Product>();
while (true) {
String line = reader.readLine();
if (line == null) {
break;
}
String[] productString = line.split(" ");
double price = 0;
String name = "";
// ArrayList<Product> productsList = new ArrayList<Product>();
if (ParseNumbers.tryParseDouble(productString[1])) {
price = Double.parseDouble(productString[1]);
name = productString[0];
} else {
throw new NumberFormatException();
}
productsList.add(new Product(price, name));
}
printCollection(productsList);
System.out.println();
Collections.sort(productsList);
printCollection(productsList);
String fileOutput = "src" + File.separator + "IOFiles" + File.separator + "OutputFileProductsProblem9.txt";
createFileFromArrList(productsList, fileOutput);
} catch (FileNotFoundException e) {
System.err.println("File not found.");
} catch (IOException e) {
System.err.println("Input output exception occirred.");
}
}
}
<file_sep>public class ParseNumbers {
public static Boolean tryParseInt(String numAsString){
try {
Integer.parseInt(numAsString);
return true;
} catch (Exception e) {
return false;
}
}
public static Boolean tryParseFloatingPoint(String numAsString){
try {
Float.parseFloat(numAsString);
return true;
} catch (Exception e) {
return false;
}
}
public static Boolean tryParseDouble(String numAsString){
try {
Double.parseDouble(numAsString);
return true;
} catch (Exception e) {
return false;
}
}
}
<file_sep>import java.util.Arrays;
import java.util.Scanner;
public class _2_TriangleArea {
public static Boolean tryParse(String numAsString){
try {
Double.parseDouble(numAsString);
return true;
} catch (Exception e) {
return false;
}
}
public static double[] getCoordinates(String vertice){
double[] coordinates = new double[2];
String[] auxiliaryArr = new String[2];
auxiliaryArr = vertice.split(" ");
if (auxiliaryArr.length < 2) {
throw new IllegalArgumentException("You have entered only one parameter.");
}
if (tryParse(auxiliaryArr[0]) &&
tryParse(auxiliaryArr[1])) {
coordinates[0] = Double.parseDouble(auxiliaryArr[0]);
coordinates[1] = Double.parseDouble(auxiliaryArr[1]);
} else {
throw new NumberFormatException("You have entered wrong data.");
}
return coordinates;
}
public static void main(String[] args) {
Scanner scanner = new Scanner(System.in);
System.out.println(
"Please enter coordinates of the three vertices separeated by a single space.");
System.out.println();
String vertice;
double[] coordinates = new double[2];
System.out.print("Enter coordinates: ");
vertice = scanner.nextLine();
coordinates = getCoordinates(vertice);
Point firstPoint = new Point (coordinates);
System.out.print("Enter coordinates: ");
vertice = scanner.nextLine();
coordinates = getCoordinates(vertice);
Point secondPoint = new Point (coordinates);
System.out.print("Enter coordinates: ");
vertice = scanner.nextLine();
coordinates = getCoordinates(vertice);
Point thirdPoint = new Point (coordinates);
Point[] points = new Point[3];
points[0] = firstPoint;
points[1] = secondPoint;
points[2] = thirdPoint;
Arrays.sort(points);
Triangle triangle = new Triangle(points);
double area = (triangle.getA().getX() * (triangle.getB().getY() - triangle.getC().getY()) +
triangle.getB().getX() * (triangle.getC().getY() - triangle.getA().getY()) +
triangle.getC().getX() * (triangle.getA().getY() - triangle.getB().getY())) / 2;
int output;
if (area != 0) {
int areaAbs = (int) Math.abs(area);
output = areaAbs;
} else {
output = 0;
}
System.out.println(output);
}
}
<file_sep>SoftUni---JavaBasics
====================
Contains projects with Java programming language
|
ce6167785f7b5c09b37d2961eed2e08b1da3bb25
|
[
"Markdown",
"Java",
"Text"
] | 18
|
Java
|
GBurlakova/SoftUni---JavaBasics
|
f0b83ace60a96b64c7e2bd20a68e9300f824a9fa
|
d15f490a5405596ab2f368e0b7ab95cee85d9a0a
|
refs/heads/master
|
<file_sep>#!/bin/bash
#
# Copyright (C) 2020 RB INTERNATIONAL NETWORK
#
# An Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
echo "<<<<< © RB INTERNATIONAL NETWORK™ >>>>>"
GIT_USERNAME=""
GIT_MAIL=""
echo "Enter GitHub Username: "
read GIT_USERNAME
echo "Enter GitHub Email: "
read GIT_MAIL
# Username
echo -e "\n================== Adding Username ==================\n"
git config --global user.name "$GIT_USERNAME"
echo -e "\n Added your username!\n"
#Email
echo -e "\n================== Adding Email ==================\n"
git config --global user.email "$GIT_MAIL"
echo -e "\n Added your email!\n"
#List Settings
echo -e "\n================== List Settings ==================\n"
git config --list
echo "<<<<< © RB INTERNATIONAL NETWORK™ >>>>>"
<file_sep>#!/bin/bash
#
# Copyright (C) 2020 RB INTERNATIONAL NETWORK
#
# An Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
echo "<<<<< © RB INTERNATIONAL NETWORK™ >>>>>"
ROM_DIR=""
ROM_NAME=""
echo "enter full rom directory"
echo "eg, /home/cyberjalagam/sakura"
read ROM_DIR
cd "$ROM_DIR"/out/target/product/RMX1831
ls
echo "Enter FULL rom name including extension: "
read ROM_NAME
mkdir working-dir
cp -v "$ROM_NAME" working-dir
cd working-dir && unzip "$ROM_NAME"
rm -rf "$ROM_NAME"
rm -rf boot.img
wget https://github.com/CyberJalagam/android_rom_building_scripts/raw/master/prebuilt-device-specific/RMX1831/boot.img
zip -r "$ROM_NAME" ./*
echo "Operation sucessful!, Full path of the modified rom given below"
realpath "$ROM_NAME"
echo "<<<<< © RB INTERNATIONAL NETWORK™ >>>>>"
|
654cc4520e40f9cab3dd3ae26bf29a4f8feabc7b
|
[
"Shell"
] | 2
|
Shell
|
Rohit-zero/android_rom_building_scripts
|
546e7fdae61b53e07fa06da9a761f956c3527e75
|
5fdbe8b6e983ca7dac64c93a07852f6352feca11
|
refs/heads/master
|
<file_sep>package com.wzq.bean;
public class HelloNewIdea {
public static void main(String[] args) {
System.out.println("Hello IDEA 2019,this is WangZhiqiang !");
}
}
|
a96b3266835c7c285cfe79ad5adb618375982b7a
|
[
"Java"
] | 1
|
Java
|
wangzhiqiangcom/codnewIdeaGitTest
|
19c8fe3a88d96511d368ef9f273d387e32ebad73
|
04aace656189746cfeae59bd5e2e240ae8e5a9fb
|
refs/heads/master
|
<file_sep>import axios from "axios";
// 유니 코드
const base_url = 'https://api.themoviedb.org/3';
const api_key = '<KEY>';
export const moviesApi = {
nowPlaying: () => axios.get(`${base_url}/movie/now_playing?api_key=${api_key}`),
upcoming: () => axios.get(`${base_url}/movie/upcoming?api_key=${api_key}`),
popular: () => axios.get(`${base_url}/movie/popular?api_key=${api_key}`),
movieDetail: id => axios.get(`${base_url}/movie/${id}?api_key=${api_key}`, {
params: {
append_to_response: "videos"
}
}),
search: term =>
axios.get(`${base_url}/search/movie?api_key=${api_key}`, {
params: {
query: encodeURIComponent(term)
}
})
};
export const tvApi = {
topRated: () => axios.get(`${base_url}/tv/top_rated?api_key=${api_key}`),
popular: () => axios.get(`${base_url}/tv/popular?api_key=${api_key}`),
airingToday: () => axios.get(`${base_url}/tv/airing_today?api_key=${api_key}`),
showDetail: id => axios.get(`${base_url}/tv/${id}?api_key=${api_key}`, {
params: {
append_to_response: "videos"
}
}),
search: term =>
axios.get(`${base_url}/search/tv?api_key=${api_key}`, {
params: {
query: encodeURIComponent(term)
}
})
};
// 니꼬 코드
//
// const api = axios.create({
// baseURL: "https://api.themoviedb.org/3/",
// params: {
// api_key: "<KEY>",
// language: "en-US"
// }
// });
//
// export const moviesApi = {
// nowPlaying: () => api.get("movie/now_playing"),
// upcoming: () => api.get("movie/upcoming"),
// popular: () => api.get("movie/popular"),
// movieDetail: id =>
// api.get(`movie/${id}`, {
// params: {
// append_to_response: "videos"
// }
// }),
// search: term =>
// api.get("search/movie", {
// params: {
// query: encodeURIComponent(term)
// }
// })
// };
//
// export const tvApi = {
// topRated: () => api.get("tv/top_rated"),
// popular: () => api.get("tv/popular"),
// airingToday: () => api.get("tv/airing_today"),
// showDetail: id =>
// api.get(`tv/${id}`, {
// params: {
// append_to_response: "videos"
// }
// }),
// search: term =>
// api.get("search/tv", {
// params: {
// query: encodeURIComponent(term)
// }
// })
// };
//
<file_sep>self.__precacheManifest = (self.__precacheManifest || []).concat([
{
"revision": "c1478694d2034879d327f2276cfbb0f5",
"url": "/nomflix-react-projext/index.html"
},
{
"revision": "1f6f59fb45df442d4e96",
"url": "/nomflix-react-projext/static/js/2.67aa7896.chunk.js"
},
{
"revision": "f231859d6585c4cd5f80c344783ed269",
"url": "/nomflix-react-projext/static/js/2.67aa7896.chunk.js.LICENSE.txt"
},
{
"revision": "4c2747395408feb12bd0",
"url": "/nomflix-react-projext/static/js/main.9d794d7a.chunk.js"
},
{
"revision": "f148b0c54876373c2b41",
"url": "/nomflix-react-projext/static/js/runtime-main.138c0b7c.js"
},
{
"revision": "21a837ebd674f5e6a6e90d16925cd50b",
"url": "/nomflix-react-projext/static/media/noPosterSmall.21a837eb.png"
}
]);<file_sep>import HomeContainer from "./HomeContainer";
export default HomeContainer<file_sep>import TVContainer from "./TVContainer";
export default TVContainer
|
d11bbf256ef4b502f6d21f70d03935eaeb90a5dc
|
[
"JavaScript"
] | 4
|
JavaScript
|
Rachel4858/nomflix-react-projext
|
b1fc992e4d71ddb969c88bb58da049cee57f0fbf
|
ea538589179cc4dcdf7e216298b68a2a0b73e9d9
|
refs/heads/master
|
<file_sep>const util = require('util');
const Promise = require('bluebird');
const { Pipe } = require('../lib');
// Gets a random integer between 0 (zero) and max
function randomWait(max) {
return Math.floor(Math.random() * (max + 1));
}
// Waits a random period of time before returning a dumb message to the
// callback given.
function doSomethingUninteresting(data) {
// simulate latency
var wait = randomWait(1000);
return Promise.delay(wait).then(
() => `It took me ${wait} milliseconds to notice you gave me ${data}.`
);
}
// Logs it then calls next.
function logIt(it) {
util.log(util.inspect(it));
return it;
}
// Observes it (by writing it to the console).
function observeIt(it) {
util.log(`observed: ${it}`);
return it;
}
// Create an fpipe over our uninteresting function...
var pipe = new Pipe(doSomethingUninteresting)
// Add some middleware...
.pipe(logIt)
.pipe(observeIt);
// Proccess the pipeline
pipe
.process('funcklewinker')
.then(msg => util.log('Got a message: '.concat(msg)))
.catch(err =>
util.log('Got an error: '.concat(util.inspect(err, false, 99)))
);
<file_sep># Function Pipe
A function pipe eases the construction of extensible processing pipelines.
Each step in the pipeline takes the result of the prior step as input, and produces its own result, which in turn, is used by the next step in the pipeline, until processing is complete and a final result is produced. Each step in the pipeline can be thought of as middleware, but unlike the [connect framework](https://github.com/senchalabs/connect), the inputs and outputs are not related to nodejs' original callback pattern.
## 2018 Breaking Changes
The 2018 version 1.0.0 has very little in common with the version published in 2012. The 2018 version of the `Pipe` is exclusively ES6, and is intended for use in nodejs.
## Getting Started
Create a pipeline by constructing an instance of the `Pipe` class, optionally specifying initial processing steps, then as needed, add additional processing steps using the `.pipe()` method, and eventually, execute the pipeline using the `.process()` method.
```javascript
const res = await new Pipe(fn1)
.pipe(fn2)
.pipe(fn3)
.process('an argument for fn1');
console.log(`Result of pipeline processing: ${res}`);
```
The result of each step in the process cascades into the next step, just like `Promise`'s `.then()` method.
```text
arg ---v
fn1(arg1) ---v
fn2(arg2) ---v
fn3(arg3) ---v
result
```
The constructor can take multiple steps...
```javascript
const res = await new Pipe(fn1, fn2, fn3).process('an argument for fn1');
console.log(`Result of pipeline processing: ${res}`);
```
`.pipe()` can take multiple steps...
```javascript
const res = await new Pipe(fn2)
.pipe(
fn2,
fn3
)
.process(42);
```
Arguments passed to `.process()` are projected into the first step...
```javascript
const res = await new Pipe((x, y) => x + y)
.pipe(x => x + 1)
.pipe(x => x * 2)
.process(10, 10);
```
Each step's result can be projected down stream in the pipe...
```javascript
const res = await new Pipe((x, y) => [x * 2, y * 5], true)
.pipe(
(x, y) => [x + y, 1],
true
)
.pipe((x, y) => (x + y) * 2)
.process(5, 2);
```
Errors propagate to the caller...
```javascript
const stdout = console.log.bind(console);
try {
const res = await new Pipe()
.pipe(() => stdout('click'))
.pipe(() => stdout('click'))
.pipe(() => {
throw new Error('Boom!');
})
.pipe(() => stdout('click'))
.pipe(() => stdout('click'))
.process();
} catch (err) {
assert.equal('Boom!', err.message);
}
```
<file_sep>const { Pipe } = require('..');
const { log } = require('util');
const Promise = require('bluebird');
function nullFn() {}
test('.ctor() with no args succeeds', () => {
const pipe = new Pipe();
expect(pipe).toBeDefined();
});
test('.ctor(fn) succeeds', () => {
const pipe = new Pipe(nullFn);
expect(pipe).toBeDefined();
});
test('.ctor([fn]) succeeds', () => {
const pipe = new Pipe([nullFn]);
expect(pipe).toBeDefined();
});
test('.ctor([fn,fn]) succeeds', () => {
const pipe = new Pipe([nullFn, nullFn]);
expect(pipe).toBeDefined();
});
test('.ctor(fn,fn) succeeds', () => {
const pipe = new Pipe(nullFn, nullFn);
expect(pipe).toBeDefined();
});
test('.pipe() succeeds with no arguments (but is dumb)', () => {
const pipe = new Pipe();
const pipe1 = pipe.pipe();
expect(pipe1).toBeDefined();
// Oh, by the way, pipes are immutable, so .pipe() creates a new pipe.
expect(pipe1).not.toBe(pipe);
});
test('.pipe(fn) adds a function to the pipe', async done => {
try {
const pipe = new Pipe().pipe(arg => `arguments[0]: ${arg}`);
const res = await pipe.process();
expect(res).toMatch('arguments[0]: undefined');
done();
} catch (err) {
done(err);
}
});
test('.pipe(fn, fn, fn) adds functions to the pipe in the order specified', async done => {
try {
const pipe = new Pipe().pipe(
x => x + 1,
x => x + 1,
x => {
log(`observed: ${x}`);
return x;
}
);
const res = await pipe.process(0);
expect(res).toBe(2);
done();
} catch (err) {
done(err);
}
});
test('.pipe([fn, fn, fn]) adds functions to the pipe in the order specified', async done => {
try {
const pipe = new Pipe().pipe(
x => x + 1,
x => x + 1,
x => {
log(`observed: ${x}`);
return x;
}
);
const res = await pipe.process(0);
expect(res).toBe(2);
done();
} catch (err) {
done(err);
}
});
test('.pipe(...) can mix and match async and non-async', async done => {
try {
const pipe = new Pipe().pipe(
x => x + 1,
async x => {
await Promise.delay(2);
return x + 1;
},
x => {
log(`observed: ${x}`);
return x;
}
);
const res = await pipe.process(0);
expect(res).toBe(2);
done();
} catch (err) {
done(err);
}
});
test('.pipe(...) will resolve promises', async done => {
try {
const pipe = new Pipe().pipe(
x => x + 1,
x => Promise.delay(2).then(() => x + 1),
x => {
log(`observed: ${x}`);
return x;
}
);
const res = await pipe.process(0);
expect(res).toBe(2);
done();
} catch (err) {
done(err);
}
});
test('.process(...) passes args to the pipeline', async done => {
try {
const one = Math.floor(Math.random() * 10) + 1;
const two = Math.floor(Math.random() * 10) + 1;
const pipe = new Pipe().pipe(
(x, y) => x + y,
x => Promise.delay(2).then(() => x + 1),
x => {
log(`observed: ${x}`);
return x;
}
);
const res = await pipe.process(one, two);
expect(res).toBe(one + two + 1);
done();
} catch (err) {
done(err);
}
});
test('.pipe(...) optionally project args downstream', async done => {
try {
const pipe = new Pipe((x, y) => [x * 2, y * 5], true)
.pipe(
(x, y) => [x + y, 1],
true
)
.pipe((x, y) => (x + y) * 2);
const res = await pipe.process(5, 2);
expect(res).toBe(42);
done();
} catch (err) {
done(err);
}
});
test('errors are propagated to the caller', async done => {
try {
const res = await new Pipe()
.pipe(() => log('click'))
.pipe(() => log('click'))
.pipe(() => {
throw new Error('Boom!');
})
.pipe(() => log('click'))
.pipe(() => log('click'))
.process();
expect(res).toBe(undefined);
} catch (err) {
expect(err.message).toBe('Boom!');
done();
}
});
test('.ctor() takes arrays', async done => {
try {
const pipe = new Pipe([
(x, y) => [x * 2, y * 5],
true,
(x, y) => [x + y, 1],
true,
(x, y) => (x + y) * 2
]);
const res = await pipe.process(5, 2);
expect(res).toBe(42);
done();
} catch (err) {
done(err);
}
});
test('.ctor() args must contain steps, optionally followed by a boolean', async done => {
try {
const pipe = new Pipe(
new Pipe((x, y) => [x * 2, y * 5]),
true,
(x, y) => [x + y, 1],
true,
(x, y) => (x + y) * 2
);
const res = await pipe.process(5, 2);
expect(res).toBe(42);
done();
} catch (err) {
done(err);
}
});
test('.ctor() errors on invalid type', async done => {
try {
const pipe = new Pipe(
new Pipe((x, y) => [x * 2, y * 5]),
'blue',
(x, y) => [x + y, 1],
true,
(x, y) => (x + y) * 2
);
const res = await pipe.process(5, 2);
expect(res).toBe(42);
} catch (err) {
expect(err.message).toBe(
'Invalid pipeline; array must contain processing steps, each optionally followed by a boolean. Received blue in position 1.'
);
done();
}
});
test('.ctor() errors when array elements are incorrectly ordered', async done => {
try {
const pipe = new Pipe(
new Pipe((x, y) => [x * 2, y * 5]),
true,
true,
(x, y) => [x + y, 1]
);
const res = await pipe.process(5, 2);
expect(res).toBe(42);
} catch (err) {
expect(err.message).toBe(
'Invalid pipeline; array must contain processing steps, each optionally followed by a boolean. Received true in position 2.'
);
done();
}
});
<file_sep>const isPromise = require('is-promise');
const $steps = Symbol('steps');
class Pipe {
static overPipeline(pipeline) {
const pipe = new Pipe();
pipe[$steps] = this.makeTuples(pipeline);
return pipe;
}
static makeTuples(steps) {
// steps can be defined as a pair (function, boolean) where the boolean
// indicates whether the result of the function is an array of input for the
// subsequent step (the array will be projected as arguments).
let tuples = [];
if (steps) {
let cursor = -1;
let item, next;
const len = steps.length;
while (++cursor < len) {
item = steps[cursor];
if (Array.isArray(item)) {
item = Pipe.overPipeline(item);
} else if (typeof item !== 'function' && !(item instanceof Pipe)) {
throw new Error(
`Invalid pipeline; array must contain processing steps, each optionally followed by a boolean. Received ${item} in position ${cursor}.`
);
}
next = cursor + 1 < len ? steps[cursor + 1] : undefined;
if (next && typeof next === 'boolean') {
++cursor;
}
tuples.push([item, next && typeof next === 'boolean' && next === true]);
}
}
return tuples;
}
constructor() {
this[$steps] = Pipe.makeTuples(arguments);
}
pipe() {
const steps = this[$steps];
const other = new Pipe();
other[$steps] = steps.concat(Pipe.makeTuples(arguments));
return other;
}
async process() {
const steps = this[$steps];
let i = 0;
const len = steps.length;
let result;
if (len) {
let [step, expand] = steps[0];
result =
step instanceof Pipe
? step.process.apply(step, [...arguments])
: step.apply(null, [...arguments]);
while (isPromise(result)) {
result = await result;
}
while (++i < len) {
let [next, nexp] = steps[i];
if (expand && Array.isArray(result)) {
result =
next instanceof Pipe
? next.process.apply(next, result)
: next.apply(null, result);
} else {
result = next instanceof Pipe ? next.process(result) : next(result);
}
expand = nexp;
while (isPromise(result)) {
result = await result;
}
}
}
return result;
}
}
module.exports = Pipe.Pipe = Pipe;
|
c676cc1230fea787d3e5961ea06f0c082ff5b7ed
|
[
"JavaScript",
"Markdown"
] | 4
|
JavaScript
|
flitbit/fpipe
|
39509c0f4e2c46e62c48a94e15b8ba8ed05c7d77
|
77ab63ed30cdfe4d4677885cf7a27b39b222771b
|
refs/heads/master
|
<file_sep># OgreMapper
A really uggly object mapper, but pretty fast!
**Why is it uglly?**
Because, it will map properties one by one! It's really, really uggly!
**So, how it could be fast?**
It will mapped at compile time instead of runtime like auto mapper does.
Comming soon
-------------
<file_sep>using System;
namespace OgreMapper
{
class Program
{
static void Main(string[] args)
{
if (args == null || args.Length == 0)
{
Console.WriteLine("You need inform your class name to map.");
return;
}
if (string.IsNullOrEmpty(args[0]))
Console.WriteLine("Class from empty");
if (string.IsNullOrEmpty(args[1]))
Console.WriteLine("Class to empty");
string pathFileFrom = args[0];
string pathFileTo = args[1];
Console.WriteLine("File From: " + pathFileFrom + Environment.NewLine);
Console.WriteLine("File To: " + pathFileTo + Environment.NewLine);
Console.WriteLine("Hello World!");
Console.ReadLine();
}
}
}
|
d674231639eaf60150735b7bdcd513800cb58270
|
[
"Markdown",
"C#"
] | 2
|
Markdown
|
wgasparin/OgreMapper
|
da2ea560a0f1afcd98e0a86b68051a82fb4b0d3f
|
62b3431e9b7f623adb385371f4a57ed226dd5847
|
refs/heads/master
|
<file_sep>// 读取wx-config/public/static/images/footbar文件夹下的文件夹
// 遍历读取文件夹下的文件,重命名
const path = require('path');
const fs = require('fs')
const basePath = '../wx-config/public/static/images/footbar'
const dirs = fs.readdirSync(path.resolve(__dirname, basePath));
dirs.map(el => {
// fs.rename(oldPath, newPth)
if (el.indexOf('-1') >= 0) {
return
}
fs.rename(path.resolve(__dirname, basePath + '/' + el + '/填充'), path.resolve(__dirname, basePath + '/' + el + '/full'), (err, data) => {
if(err) {
console.log(err)
} else {
console.log(el + '/填充', '已改为', el + '/full')
}
})
fs.rename(path.resolve(__dirname, basePath + '/' + el + '/线框'), path.resolve(__dirname, basePath + '/' + el + '/line'), (err, data) => {
if(err) {
console.log(err)
} else {
console.log(el + '/线框', '已改为', el + '/line')
}
})
})<file_sep>const path = require('path');
const fs = require('fs');
function Logs() {
this.createLogsFolder();
}
Logs.prototype.createLogsFolder = function() {
fs.readdir(path.resolve(__dirname, 'logs'), (err, data) => {
if (err && err.code === 'ENOENT') {
fs.mkdirSync(path.resolve(__dirname, 'logs'));
}
})
}
Logs.prototype.getTime = function() {
const date = new Date();
let ymd = [];
let hms= [];
ymd.push(date.getFullYear());
ymd.push((date.getMonth() + 1).toString().padStart(2, '0'));
ymd.push(date.getDate().toString().padStart(2, '0'));
hms.push(date.getHours().toString().padStart(2, '0'));
hms.push(date.getMinutes().toString().padStart(2, '0'));
hms.push(date.getSeconds().toString().padStart(2, '0'));
return {ymd, hms};
}
Logs.prototype.writeLog = function(log) {
const { ymd, hms } = this.getTime();
const fileName = ymd.join('-') + '.txt';
const dirData = fs.readdirSync(path.resolve(__dirname, 'logs'));
let logData = '';
if (dirData.includes(fileName)) {
logData = fs.readFileSync(path.resolve(__dirname, `logs/${fileName}`)).toString();
}
log = `${ymd.join('-')} ${hms.join(':')} ${log}`;
fs.writeFileSync(path.resolve(__dirname, `logs/${fileName}`), `${logData}\r\n${log}`);
console.log(log);
}
const logs = new Logs();
module.exports = logs.writeLog.bind(logs);
<file_sep># update-file-content
自用的修改文件内容、文件夹名称的nodejs小工具
|
9cfd7cbbc77c9148556f6a97dbe8497f1ed8f7b6
|
[
"JavaScript",
"Markdown"
] | 3
|
JavaScript
|
yanzhi2016/update-file-content
|
7d558b414e9310f2dad6dfe4aa64932b6aab015a
|
53d9ac1dd9a246d84ae3111d25ae4f6c8d5663d0
|
refs/heads/master
|
<file_sep>require("tools-for-instagram");
(async () => {
console.log("\n -- Get last messages from Inbox --\n".bold.underline);
let ig = await login({ inputLogin: "ksihkays" });
let inbox = await getInbox(ig);
inbox.forEach((chat) => {
if (chat.lastMessage.messageContent != undefined) {
console.log(chat);
// console.log(chat.lastMessage.messageContent);
console.log("----");
}
});
console.log("\nProcess done!\n".green);
})();
<file_sep>const tfi = require("tools-for-instagram");
const ipa = require("instagram-private-api");
const path = require("path");
const { readFile } = require("fs");
const { promisify } = require("util");
const readFileAsync = promisify(readFile);
(async () => {
let ig = await login();
let media_id = "2378414090164781998_28196870663";
await ig.media.delete({ mediaId: media_id, mediaType: "CAROUSEL" });
console.log("done");
})();
<file_sep>const tfi = require("tools-for-instagram");
const ipa = require("instagram-private-api");
//prettier-ignore
const { StickerBuilder } = require("instagram-private-api/dist/sticker-builder");
const path = require("path");
const { readFile } = require("fs");
const { promisify } = require("util");
const generateThumbnail = require("../generateThumbnail");
const readFileAsync = promisify(readFile);
(async () => {
let ig = await login();
const path =
"E:\\codes\\instagram-bot\\client\\123_ksihkays\\stories\\videos\\Lorem Ipsum Video.mp4";
const file = await readFileAsync(path);
// const coverImagePath = await generateThumbnail(path);
const coverImagePath =
"E:\\codes\\instagram-bot\\client\\123_ksihkays\\stories\\videos\\cover_Lorem Ipsum Video.mp4";
const coverImageBuffer = await readFileAsync(coverImagePath);
let id = await ig.user.getIdByUsername("syakhisk");
let id2 = await ig.user.getIdByUsername("ksihkays");
await ig.publish.story({
video: file,
coverImage: coverImageBuffer,
stickerConfig: new StickerBuilder()
.add(
StickerBuilder.mention({
userId: id,
}).center()
)
.add(
StickerBuilder.mention({
userId: id2,
}).center()
)
.build(),
});
await console.log("Done!");
})();
<file_sep>const tfi = require("tools-for-instagram");
global.noLogo = true;
const ipa = require("instagram-private-api");
const inqDataIG = require("./module/inqDataIG");
const inquirer = require("inquirer");
async function login_func() {
const data = await inqDataIG();
const ig = await login({
inputLogin: data.username,
inputPassword: <PASSWORD>,
});
}
(async () => {
let condition = true;
await login_func();
while (condition) {
await inquirer
.prompt([
{
type: "confirm",
name: "response",
message: "Wanna do another login?",
},
])
.then((answers) => {
if (answers.response == true) {
login_func();
} else {
condition = false;
}
});
}
})();
<file_sep>const moment = require("moment");
const fs = require("fs");
const chalk = require("chalk");
const path = require("path");
module.exports = function (config) {
function mkdirOverwrite(dir) {
if (!fs.existsSync(dir)) fs.mkdirSync(dir);
}
let dirName = config.date + "_" + config.seller;
let dirPath = path.join(__dirname, "..", "client", dirName);
let configString = JSON.stringify(config, null, " ");
try {
mkdirOverwrite(dirPath);
mkdirOverwrite(dirPath + "\\feeds\\");
mkdirOverwrite(dirPath + "\\feeds\\photos\\");
mkdirOverwrite(dirPath + "\\feeds\\videos\\");
mkdirOverwrite(dirPath + "\\stories\\");
mkdirOverwrite(dirPath + "\\stories\\photos\\");
mkdirOverwrite(dirPath + "\\stories\\videos\\");
fs.writeFile(path.join(dirPath, "config.json"), configString, (err) => {
if (err) console.log(`Something went wrong`);
});
} catch (error) {
return error;
}
return dirPath;
};
<file_sep>const tfi = require("tools-for-instagram");
const ipa = require("instagram-private-api");
const path = require("path");
const fs = require("fs");
const { promisify } = require("util");
const generateThumbnail = require("./generateThumbnail");
const formatVideo = require("./formatVideo");
const formatPhoto = require("./formatPhoto");
const readFileAsync = promisify(fs.readFile);
const clamp = require("clamp");
const chalk = require("chalk");
const { extname } = require("path");
module.exports = async (ig = -1, data) => {
console.log(
`\nProcessing album for ${chalk.magentaBright(
data.seller
)} at ${chalk.greenBright(data.dir)}\n`
);
let items = [];
let mentionSeller = await generateUsertagFromName(data.seller, 0.5, 0.5);
let mentionUniday = await generateUsertagFromName(data.mention, 0.4, 0.5);
for (let photo of data.photos) {
const photoFullPath = path.join(data.dir, "feeds", "photos", photo);
const formattedPhotos = await formatPhoto(photoFullPath);
const photoBuffer = await readFileAsync(formattedPhotos);
items.push({
file: photoBuffer,
usertags: {
in: [mentionSeller, mentionUniday],
},
});
}
for (let video of data.videos) {
// path of the video
if (extname(video) == ".jpg") continue;
let videoPath = path.join(data.dir, "feeds", "videos", video);
let formattedVid = await formatVideo(videoPath);
let videoBuffer = await readFileAsync(formattedVid);
let coverPath = await generateThumbnail(formattedVid);
let coverBuffer = await readFileAsync(coverPath);
items.push({
video: videoBuffer,
coverImage: coverBuffer,
usertags: {
in: [mentionSeller, mentionUniday],
},
});
}
console.log(`${chalk.cyan("Publishing album, please wait...")}`);
await ig.publish.album({ items, caption: data.caption });
console.log(
`Album for ${chalk.magentaBright(data.seller)} is ${chalk.yellow(
"published\n"
)}`
);
async function generateUsertagFromName(name, x, y) {
x = clamp(x, 0.0001, 0.9999);
y = clamp(y, 0.0001, 0.9999);
const { pk } = await ig.user.searchExact(name);
return {
user_id: pk,
position: [x, y],
};
}
};
<file_sep>const fs = require("fs");
const path = require("path");
const dir = "__dirname";
// const dir = "../../client/";
// let currentFolder = "username_15-8-2020_15.00" + "/photo";
module.exports = (dir, ext) => {
function extension(element) {
var extName = path.extname(element);
return extName === ext;
}
function notExtension(element) {
var extName = path.extname(element);
return extName != ext;
}
let files = fs.readdirSync(dir);
let validFiles = files.filter(extension);
let invalidFiles = files.filter(notExtension);
return {
validFiles,
invalidFiles,
};
};
<file_sep>var path = require("path");
var appDir = path.dirname(require.main.filename);
console.log(appDir);
<file_sep>const fs = require("fs");
const inquirer = require("inquirer");
const chalk = require("chalk");
require("dotenv").config();
module.exports = async function () {
var questions = [
{
type: "input",
name: "username",
message: "Instagram Username: ",
},
{
type: "password",
name: "password",
message: "Instagram Password (not shared): ",
},
];
await inquirer.prompt(questions).then((answers) => {
let file = `IG_USERNAME=${answers.username}\nIG_PASSWORD=${answers.password}\n#ONLINE_MODE=true`;
const wfe = fs.promises.writeFile(".env", file, (err) => {
if (err) console.log("Something went wrong..");
else console.log(`Used as ${chalk.green(process.env.IG_USERNAME)}`);
});
});
};
<file_sep>var inquirer = require("..");
var chalk = require("chalk");
var questions = [
{
type: "input",
name: "first_name",
message: "What's your first name",
},
];
inquirer.prompt(questions).then((answers) => {
console.log(JSON.stringify(answers, null, " "));
});
<file_sep>const inquirer = require("inquirer");
const fs = require("fs");
const { promisify } = require("util");
const readFileAsync = promisify(fs.readFile);
const readdirAsync = promisify(fs.readdir);
const path = require("path");
const moment = require("moment");
const { resolve } = require("path");
module.exports = async () => {
const validateAtSign = (value) => {
if (/(@|\s)/.test(value)) return "Plz don't use the @ or space";
else if (/\S/.test(value)) return true;
return "Plz enter the data :(";
};
let validateRequired = (value) => {
if (/\S/.test(value)) {
return true;
}
return "Enter the data plz";
};
const configQuestion = [
{
type: "input",
name: "seller",
message: "Plz type out instagram seller (w/o @ sign)",
validate: validateAtSign,
},
{
type: "input",
name: "mention",
message: "Plz type out another account to mention (w/o @ sign)",
validate: validateAtSign,
},
{
type: "editor",
name: "caption",
message: "Plz type out the caption (emoji will show 🧓 but it's okay)",
},
];
const initialQuestion = {
type: "list",
name: "action",
choices: ["Create New PP (initialize)", "Publish existing PP"],
};
async function main() {
const dir = path.join(__dirname, "..", "client");
const choice = await inquirer.prompt(initialQuestion);
let date = moment().format("DD-M-yyyy");
if (choice.action == "Publish existing PP") {
let availableFolder = await readdirAsync(dir);
const selected = await inquirer.prompt({
type: "list",
name: "selectedFolder",
message: "Select the folder plz",
choices: availableFolder,
});
let configPath = path.join(dir, selected.selectedFolder, "config.json");
// console.log("configpath:", configPath);
try {
let initial = await readFileAsync(configPath);
return [JSON.parse(initial), true];
} catch (error) {
if (error.code == "ENOENT") {
console.log("\nconfig.json file was not found");
console.log("plz delete the folder and try again :(");
}
}
} else {
/* -- if choosing create new pp file -- */
let config = await inquirer.prompt(configQuestion);
/* add uniday and date */
// config.mention = "unidaysmanda";
config.date = date;
return [config, false];
}
}
return await main();
};
<file_sep>const ipa = require("instagram-private-api");
const tfi = require("tools-for-instagram");
const login = require("tools-for-instagram/src/login");
(async () => {
const ig = await login({ inputLogin: "jeje.cy" });
})();
<file_sep>const fs = require("fs");
const path = require("path");
const dir = "__dirname";
// const dir = "../../client/";
// let currentFolder = "username_15-8-2020_15.00" + "/photo";
module.exports = async (dir) => {
let files = fs.readdirSync(dir);
return files;
};
<file_sep>const tfi = require("tools-for-instagram");
const ipa = require("instagram-private-api");
//prettier-ignore
const { StickerBuilder } = require("instagram-private-api/dist/sticker-builder");
const path = require("path");
const fs = require("fs");
const { promisify } = require("util");
const generateThumbnail = require("./generateThumbnail");
const clamp = require("clamp");
const chalk = require("chalk");
const readFileAsync = promisify(fs.readFile);
module.exports = async (ig = -1, data) => {
console.log(
`\nProcessing storie(s) for ${chalk.magentaBright(
data.seller
)} at ${chalk.greenBright(data.dir)}\n`
);
let mentionSeller = await generateUerIdFromName(data.seller, 0.5, 0.5);
let mentionUniday = await generateUerIdFromName(data.mention, 0.4, 0.5);
if (data.stories.videos.length) {
for (let video of data.stories.videos) {
if (path.extname(video) == ".jpg") continue;
const videoPath = path.join(data.dir, "stories", "videos", video);
const videoBuffer = await readFileAsync(videoPath);
const coverPath = await generateThumbnail(videoPath);
const coverBuffer = await readFileAsync(coverPath);
let options = {
video: videoBuffer,
coverImage: coverBuffer,
stickerConfig: new StickerBuilder()
.add(
StickerBuilder.mention({
userId: mentionSeller,
}).center()
)
.add(
StickerBuilder.mention({
userId: mentionUniday,
}).center()
)
.build(),
};
console.log(`${chalk.cyan("Publishing story, please wait...")}`);
await ig.publish.story(options);
console.log(`${video} is published as a story\n`);
}
}
if (data.stories.photos.length) {
for (let photo of data.stories.photos) {
const photoPath = path.join(data.dir, "stories", "photos", photo);
const photoBuffer = await readFileAsync(photoPath);
let options = {
file: photoBuffer,
stickerConfig: new StickerBuilder()
.add(
StickerBuilder.mention({
userId: mentionSeller,
}).center()
)
.add(
StickerBuilder.mention({
userId: mentionUniday,
}).center()
)
.build(),
};
console.log(`${chalk.cyan("Publishing story, please wait...")}`);
await ig.publish.story(options);
console.log(`${photo} is published as a story\n`);
}
}
async function generateUerIdFromName(name, x, y) {
x = clamp(x, 0.0001, 0.9999);
y = clamp(y, 0.0001, 0.9999);
const { pk } = await ig.user.searchExact(name);
return pk;
}
console.log(`Stories for ${data.seller} is ${chalk.yellow("published")}`);
};
<file_sep>const tfi = require("tools-for-instagram");
const ipa = require("instagram-private-api");
const path = require("path");
const { readFile } = require("fs");
const { promisify } = require("util");
const readFileAsync = promisify(readFile);
(async () => {
let ig = await login();
let picPath =
"E:\\codes\\instagram-bot\\client\\123_ksihkays\\feeds\\photos\\lorem.jpg";
const file = await readFileAsync(picPath);
let published = await ig.publish.photo({ file: file, caption: "test brow" });
const url = `https://www.instagram.com/p/${published.media.code}/`;
console.log("done");
console.log("Media id: " + url);
})();
<file_sep>require("dotenv").config();
const fs = require("fs");
const chalk = require("chalk");
const inqDataIG = require("./inqDataIG");
module.exports = async () => {
if (!process.env.IG_USERNAME || !process.env.IG_PASSWORD) {
let user = await inqDataIG();
let data = `IG_USERNAME=${user.username}\nIG_PASSWORD=${<PASSWORD>}\n#ONLINE_MODE=FALSE`;
fs.writeFile(".env", data, (err) => {
if (err) console.log(`Something went wrong`);
});
process.env.IG_USERNAME = user.username;
process.env.IG_PASSWORD = <PASSWORD>;
console.log(`${chalk.gray(".env")} is updated`);
}
console.log(`Using ${chalk.blueBright("@" + process.env.IG_USERNAME)}`);
console.log(
`${chalk.cyanBright(
"to modify your login account, edit with notepad"
)} ${chalk.green("Males-PP/code/.env\n")} ;)`
);
};
<file_sep>const ffmpegPath = require("@ffmpeg-installer/ffmpeg").path;
const ffmpeg = require("fluent-ffmpeg");
const path = require("path");
const util = require("util");
const chalk = require("chalk");
ffmpeg.setFfmpegPath(ffmpegPath);
module.exports = async (videoPath) => {
let filenameNoExt = path.basename(videoPath, path.extname(videoPath));
let promise = new Promise((resolve, reject) => {
ffmpeg(videoPath)
.on("filenames", function (filenames) {
filename = filenames[0];
console.log("Generating thumbnail for", chalk.greenBright(filename));
})
.on("end", () => {
console.log("Thumbnail generated\n");
resolve();
})
.on("error", (err) => {
reject();
})
.screenshots({
timestamps: [1],
filename: "cover_%b.jpg",
folder: path.join(videoPath, ".."),
});
});
let finalDir;
await promise.then(() => {
// console.log("inside ss promise");
finalDir = path.join(videoPath, "..", `cover_${filenameNoExt}.jpg`);
});
return finalDir;
};
<file_sep>const fs = require("fs");
const chalk = require("chalk");
const path = require("path");
const ffmpeg = require("fluent-ffmpeg");
const { resolve } = require("path");
const { reject } = require("delay");
const ffmpegPath = require("@ffmpeg-installer/ffmpeg").path;
ffmpeg.setFfmpegPath(ffmpegPath);
// let dir = "E:\\codes\\instagram-bot\\client\\Test\\2020-07-03 14-41-13.mkv";
module.exports = async (dir) => {
console.log(`Formatting ${chalk.greenBright(path.basename(dir))}...`);
let filenameNoExt = path.basename(dir, path.extname(dir));
let convertedFile = "converted_" + filenameNoExt + ".mp4";
let promise = new Promise((resolve, reject) => {
ffmpeg(dir)
.size("800x?")
.aspect("1:1")
.autopad("white")
.on("end", () => {
console.log("Video formatted\n");
resolve();
})
.on("error", (err) => {
console.log("Something wrong in formatVideo.js");
reject(err);
})
.save(path.join(dir, "..", convertedFile));
});
let finalDir;
await promise.then(() => {
// console.log("inside promise");
finalDir = path.join(dir, "..", convertedFile);
});
return finalDir;
};
|
f025214ad746bd87ae20b29d60bc25a118855346
|
[
"JavaScript"
] | 18
|
JavaScript
|
Syakhisk/males-pp
|
8158e1bb76eb799f75a6155d313a0e5eaff0a86b
|
2640f1acc12fc521a83574f1b5efb016dc2768fc
|
refs/heads/main
|
<file_sep># used libraries
import requests
import time
import datetime
import csv
from bs4 import BeautifulSoup
# for sending emails
import smtplib
def check_price():
URL = 'https://www.amazon.com/Currently-Unsupervised-Novelty-Graphic-Sarcasm/dp/B01HFFYP8A/ref=sr_1_15?dchild=1&keywords=data%2Btshirt&qid=1633430723&sr=8-15&th=1&psc=1'
headers = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.82 Safari/537.36", "Accept-Encoding":"gzip, deflate", "Accept":"text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "DNT":"1","Connection":"close", "Upgrade-Insecure-Requests":"1"}
page = requests.get(URL, headers = headers)
soup1 = BeautifulSoup(page.content, "html.parser")
soup2 = BeautifulSoup(soup1.prettify(),"html.parser")
title = soup2.find(id='productTitle').get_text()
price = soup2.find(id="priceblock_ourprice").get_text()
price = price.strip()[1:]
title = title.strip()
today = datetime.date.today()
header = ['Title', 'Price','Date']
data = [title, price,today]
with open('AmazonWebScraperDataset.csv', 'a+', newline='', encoding='UTF8') as f:
writer = csv.writer(f)
writer.writerow(data)
# Creating the csv file for the first time
URL = 'https://www.amazon.com/Currently-Unsupervised-Novelty-Graphic-Sarcasm/dp/B01HFFYP8A/ref=sr_1_15?dchild=1&keywords=data%2Btshirt&qid=1633430723&sr=8-15&th=1&psc=1'
headers = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/93.0.4577.82 Safari/537.36", "Accept-Encoding":"gzip, deflate", "Accept":"text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "DNT":"1","Connection":"close", "Upgrade-Insecure-Requests":"1"}
page = requests.get(URL, headers = headers)
soup1 = BeautifulSoup(page.content, "html.parser")
soup2 = BeautifulSoup(soup1.prettify(),"html.parser")
title = soup2.find(id='productTitle').get_text()
price = soup2.find(id="priceblock_ourprice").get_text()
price = price.strip()[1:]
title = title.strip()
today = datetime.date.today()
header = ['Title', 'Price','Date']
data = [title, price,today]
with open('AmazonWebScraperDataset.csv', 'w', newline='', encoding='UTF8') as f:
writer = csv.writer(f)
writer.writerow(header)
writer.writerow(data)
# repeating the process each day
while(True):
check_price()
time.sleep(86400) #checking the price everyday
# sending email function
def send_mail():
server = smtplib.SMTP_SSL('smtp.gmail.com',465)
server.ehlo()
#server.starttls()
server.ehlo()
server.login('<EMAIL>','xxxxxxxxxxxxxx')
subject = "The Shirt you want is below $15! Now is your chance to buy!"
body = "Maria, This is the moment we have been waiting for. Now is your chance to pick up the shirt of your dreams. Don't mess it up! Link here: https://www.amazon.com/Currently-Unsupervised-Novelty-Graphic-Sarcasm/dp/B01HFFYP8A/ref=sr_1_15?dchild=1&keywords=data%2Btshirt&qid=1633430723&sr=8-15&th=1&psc=1"
msg = f"Subject: {subject}\n\n{body}"
server.sendmail(
'<EMAIL>',
msg
)
|
b16cc2bc95fb8516b5cfa034a0a3f657333ee2b7
|
[
"Python"
] | 1
|
Python
|
mariaadissa/AmazonWebScraper
|
a24aecff4b8280db2b0980f9d843e650d23ebc20
|
b3fbcf4e2eafa91a9fcf77e5e2578c700021b7fe
|
refs/heads/master
|
<repo_name>DT021/Finhub-App<file_sep>/tracker/view_users.php
<?php
session_start();
if(isset($_SESSION['tracker']))
{
include "inc/header.php";
include "../includes/connect.php";
echo'
<div class="wrapper wrapper-content">
<div class="row wrapper border-bottom white-bg page-heading">
<div class="col-lg-10">
<h2>Staff Member</h2>
<ol class="breadcrumb">
<li>
<a href="view_users.php">Users</a>
</li>
<li>
<a>View All Users</a>
</li>
</ol>
</div>
<div class="col-lg-2">
</div>
</div>
<div class="wrapper wrapper-content animated fadeInRight">
<div class="row">
<div class="col-lg-12">
<div class="ibox float-e-margins">
<div class="ibox-title" height="200px !important">
<h5>Posted Records</h5>
</div>
<div class="ibox-content">
<div class="table-responsive">
<table class="table table-striped table-bordered table-hover dataTables-example" >
<thead>
<tr>
<th>S/n</th>
<th>First Name</th>
<th>Last Name</th>
<th>Username</th>
<th>Email</th>
<th>Role</th>
<th>Unit</th>
</tr>
</thead>
<tbody>';
$q = "SELECT * FROM users";
$r = mysqli_query($dbc, $q);
$num = mysqli_num_rows($r);
if($r){
$counter = 1;
while($row = mysqli_fetch_array($r, MYSQLI_ASSOC) )
{
echo '
<tr class="gradeX">
<td>'.$counter.'</td>
<td>'.$row['firstname'].'</td>
<td>'.$row['lastname'].'</td>
<td>'.$row['username'].'</td>
<td>'.$row['email'].'</td>
<td>'.$row['role'].'</td>
<td>'.$row['unit'].'</td>
</tr> ';
$counter++;
}
}
echo'
</tbody>
</table>
</div>
</div>
</div>
</div>
</div>
</div>
</div>';
include 'inc/footer.php';
}
else{
header("Location:../index.php");
}
<file_sep>/pages/update.php
<?php
use PHPMailer\PHPMailer\PHPMailer;
use PHPMailer\PHPMailer\Exception;
if (isset($_POST["updatee"]))
{
if(empty($_POST) === false){
require_once('../includes/connect.php');
$beneficiary = mysqli_escape_string($dbc, trim($_POST['beneficiary']));
$amount = mysqli_escape_string($dbc, trim($_POST['amount']));
$description = mysqli_escape_string($dbc, trim($_POST['description']));
$entrydate = mysqli_escape_string($dbc, trim($_POST['entrydate']));
$forecast = mysqli_escape_string($dbc, trim($_POST['forecast']));
// $status = mysqli_escape_string($dbc, trim($_POST['status']));
$id = mysqli_escape_string($dbc, trim($_POST['record_id']));
$query = "UPDATE records SET beneficiary='$beneficiary', amount='$amount', description='$description', entrydate='$entrydate', forecastdate='$forecast' WHERE id='$id'";
mysqli_query($dbc, $query) or die(mysqli_error($dbc));
//Load composer's autoloader
$q = "SELECT * FROM users WHERE role='supervisor' AND unit='Finance'";
$r = mysqli_query($dbc,$q);
$num = mysqli_num_rows($r);
if($num >= 0)
{
while ($raw = mysqli_fetch_array($r, MYSQLI_ASSOC))
{
require_once 'PHPMailer3/src/Exception.php';
require_once 'PHPMailer3/src/PHPMailer.php';
require_once 'PHPMailer3/src/SMTP.php';
$mail = new PHPMailer(true); // Passing `true` enables exceptions
try {
// $mail->SMTPDebug = 2; // Enable verbose debug output
$mail->isSMTP(); // Set mailer to use SMTP
$mail->Host = 'genesisgroupng.com'; // Specify main and backup SMTP servers
$mail->SMTPAuth = true; // Enable SMTP authentication
$mail->Username = '<EMAIL>'; // SMTP username
$mail->Password = '<PASSWORD>'; // SMTP password
//$mail->SMTPSecure = 'tls'; // Enable TLS encryption, `ssl` also accepted
$mail->Port = 25; // TCP port to connect to
//Recipients
$mail->setFrom('<EMAIL>', 'Finhub');
//Recipients
//$mail->addAddress('<EMAIL>', 'qsrbi'); // Add a recipient
$mail->addAddress($raw['email'], $raw['firstname']);
//Content
$mail->isHTML(true); // Set email format to HTML
$mail->Subject = 'Record Edited';
$mail->Body = "<h4>Hello,</h4>
<br />
An Update has occurred on the Finance System. It was carried out by ".$_SESSION['admin_username'].", Check the System for More details <br />Cheers";
$mail->AltBody = strip_tags($mail->Body);
$mail->send();
} catch (Exception $e) {
echo 'Message could not be sent.';
//commented in case of errors
echo 'Mailer Error: ' . $mail->ErrorInfo;
}
}
}
?>
<script>alert('Record Edited Successfully');</script>
<?php
}
}
?><file_sep>/tracker/mailer.php
<?php
use PHPMailer\PHPMailer\PHPMailer;
use PHPMailer\PHPMailer\Exception;
if($_SERVER['REQUEST_METHOD'] == "POST")
{
require "../includes/connect.php";
//uprofile
$username = mysqli_real_escape_string($dbc, $_POST['username']);
$password = mysqli_real_escape_string($dbc, trim($_POST['password']));
$email = mysqli_real_escape_string($dbc, trim($_POST['email']));
$firstname = mysqli_real_escape_string($dbc, trim($_POST['firstname']));
$lastname = mysqli_real_escape_string($dbc, trim($_POST['lastname']));
$phone = mysqli_real_escape_string($dbc, trim($_POST['phone']));
$role = mysqli_real_escape_string($dbc, $_POST['role']);
$sql = "INSERT INTO users (username, password, email, firstname, lastname, phone, role) VALUES('$username',sha1('$password'),'$email','$firstname','$lastname','$phone','$role')";
mysqli_query($dbc, $sql); //or die(mysqli_error());
//Load composer's autoloader
require_once 'PHPMailer3/src/Exception.php';
require_once 'PHPMailer3/src/PHPMailer.php';
require_once 'PHPMailer3/src/SMTP.php';
$mail = new PHPMailer(true); // Passing `true` enables exceptions
try {
//$mail->SMTPDebug = 2;
$mail->isSMTP(); // Set mailer to use SMTP
$mail->Host = 'tls://smtp.gmail.com:587';
$mail->SMTPOptions = array(
'ssl' => array(
'verify_peer' => false,
'verify_peer_name' => false,
'allow_self_signed' => true
)
);
$mail->SMTPAuth = true; // Enable SMTP authentication
$mail->Username = '<EMAIL>'; // SMTP username
$mail->Password = '<PASSWORD>'; // SMTP password
// $mail->SMTPSecure = 'tls'; // Enable TLS encryption, `ssl` also accepted
$mail->Port = 587; // TCP port to connect to
$mail->setFrom('<EMAIL>', 'Finance App');
//Recipients
//$mail->addAddress('<EMAIL>', 'qsrbi'); // Add a recipient
$mail->addAddress($email, $firstname);
//Content
$mail->isHTML(true); // Set email format to HTML
$mail->Subject = 'Access Granted';
$mail->Body = "<h4>Hello,</h4>
<br />
An account has been created for you on the Finance System. Here are the details <br />
Username: ".$username."<br />
Password: ".$<PASSWORD>." <br/ >
Cheers";
$mail->AltBody = strip_tags($mail->Body);
$mail->send();
} catch (Exception $e) {
echo 'Message could not be sent.';
//commented in case of errors
echo 'Mailer Error: ' . $mail->ErrorInfo;
}
echo "Member Added Successfully";
}
?><file_sep>/supervisor/pay.php
<?php
use PHPMailer\PHPMailer\PHPMailer;
use PHPMailer\PHPMailer\Exception;
include "../includes/connect.php";
if(isset($_POST['id']))
{
$dt=date('d/m/Y');
$id = $_POST['id'];
$query = "SELECT * FROM records WHERE id='$id'";
$r = mysqli_query($dbc, $query);
$row = mysqli_fetch_array($r, MYSQLI_ASSOC);
$unit = $row['unit'];
if($query)
{
$q = "UPDATE records SET status='paid', paydate='$dt' WHERE id='$id'";
$result = mysqli_query($dbc, $q) or die(mysqli_error($dbc));
echo "Marked As Paid!";
$que = "SELECT * FROM users WHERE unit='$unit'";
$res = mysqli_query($dbc, $que) or die(mysqli_error($dbc));
while ($raw = mysqli_fetch_array($res, MYSQLI_ASSOC))
{
if($raw['unit'] == $unit)
{
//Load Composer's autoloader
require_once 'PHPMailer3/src/Exception.php';
require_once 'PHPMailer3/src/PHPMailer.php';
require_once 'PHPMailer3/src/SMTP.php';
$mail = new PHPMailer(true); // Passing `true` enables exceptions
try {
// $mail->SMTPDebug = 2; // Enable verbose debug output
$mail->isSMTP(); // Set mailer to use SMTP
$mail->Host = 'genesisgroupng.com'; // Specify main and backup SMTP servers
$mail->SMTPAuth = true; // Enable SMTP authentication
$mail->Username = '<EMAIL>'; // SMTP username
$mail->Password = '<PASSWORD>'; // SMTP password
//$mail->SMTPSecure = 'tls'; // Enable TLS encryption, `ssl` also accepted
$mail->Port = 25; // TCP port to connect to
$mail->setFrom('<EMAIL>', 'Finhub');
// Add a recipient
$mail->addAddress($raw['email'], $raw['firstname']);
//Content
$mail->isHTML(true); // Set email format to HTML
$mail->Subject = 'Payment Status Updated!';
$mail->Body = 'Your payment with description: '.$row['description'] .' and beneficiary '.$row['beneficiary'] .' has been marked as paid Thanks';
$mail->send();
echo 'Message has been sent';
} catch (Exception $e) {
echo 'Message could not be sent. Mailer Error: ', $mail->ErrorInfo;
}
}
}
}
}
?><file_sep>/staff/delete.php
<?php
use PHPMailer\PHPMailer\PHPMailer;
use PHPMailer\PHPMailer\Exception;
if (isset($_POST["deletee"]))
{
if(empty($_POST) === false){
require_once('../includes/connect.php');
$id = mysqli_escape_string($dbc, trim($_POST['id']));
if($_POST['sure'] =='Yes')
{
$query = "DELETE FROM records WHERE id='$id'";
mysqli_query($dbc, $query) or die(mysqli_error($dbc));
//Load composer's autoloader
require 'PHPMailer3/src/Exception.php';
require 'PHPMailer3/src/PHPMailer.php';
require 'PHPMailer3/src/SMTP.php';
$mail = new PHPMailer(true); // Passing `true` enables exceptions
try {
// $mail->SMTPDebug = 2; // Enable verbose debug output
$mail->isSMTP(); // Set mailer to use SMTP
$mail->Host = 'genesisgroupng.com'; // Specify main and backup SMTP servers
$mail->SMTPAuth = true; // Enable SMTP authentication
$mail->Username = '<EMAIL>'; // SMTP username
$mail->Password = '<PASSWORD>'; // SMTP password
//$mail->SMTPSecure = 'tls'; // Enable TLS encryption, `ssl` also accepted
$mail->Port = 25; // TCP port to connect to
$mail->setFrom('<EMAIL>', 'Finhub');
//Recipient
// Add a recipient
$mail->addAddress('<EMAIL>', 'Olagoke');
//Content
$mail->isHTML(true); // Set email format to HTML
$mail->Subject = 'Record Deleted';
$mail->Body = "<h4>Hello,</h4>
<br />
A Record has been deleted from the Finance System by a Supervisor, Please check the system for more details<br />
Cheers";
$mail->AltBody = strip_tags($mail->Body);
$mail->send();
} catch (Exception $e) {
echo 'Message could not be sent.';
//commented in case of errors
echo 'Mailer Error: ' . $mail->ErrorInfo;
}
?>
<script>alert('Record Deleted Successfully');</script>
<?php
}else{
header("Location: view_records.php");
}
}else{
//
}
}
?><file_sep>/check.php
<?php
use PHPMailer\PHPMailer\PHPMailer;
use PHPMailer\PHPMailer\Exception;
require_once "includes/connect.php";
$dt=date('d/m/Y');
$td=date('d-m-Y');
$q = "SELECT * FROM records";
$r = mysqli_query($dbc, $q);
if($r)
{
while($row = mysqli_fetch_array($r, MYSQLI_ASSOC))
{
//$deal = $dt > $row['paydate'] && $row['status'] !== 'paid'
//conditon to check if the payment date has passed
if($dt > $row['paydate'] && $row['status'] !== 'paid')
{
$sql = "UPDATE records SET status='overdue' WHERE id =".$row['id'];
$q = mysqli_query($dbc, $sql) or die(mysqli_error($dbc));
$num = mysqli_affected_rows($dbc);
for($count = 0;$count<$num;$count++) {
require_once 'PHPMailer3/src/Exception.php';
require_once 'PHPMailer3/src/PHPMailer.php';
require_once 'PHPMailer3/src/SMTP.php';
$mail = new PHPMailer(true); // Passing `true` enables exceptions
try {
//$mail->SMTPDebug = 2;
$mail->isSMTP(); // Set mailer to use SMTP
$mail->Host = 'tls://smtp.gmail.com:587';
$mail->SMTPOptions = array(
'ssl' => array(
'verify_peer' => false,
'verify_peer_name' => false,
'allow_self_signed' => true
)
);
$mail->SMTPAuth = true; // Enable SMTP authentication
$mail->Username = '<EMAIL>'; // SMTP username
$mail->Password = '<PASSWORD>'; // SMTP password
// $mail->SMTPSecure = 'tls'; // Enable TLS encryption, `ssl` also accepted
$mail->Port = 587; // TCP port to connect to
$mail->setFrom('<EMAIL>', 'Finance App');
//$mail->addAddress('<EMAIL>', 'qsrbi'); // Add a recipient
$mail->addAddress('<EMAIL>', 'Olagoke');
$mail->addAddress('<EMAIL>', 'Kevin');
$mail->addAddress('<EMAIL>', 'Ola');
//Content
$mail->isHTML(true); // Set email format to HTML
$mail->Subject = 'Payment Status Updated';
$mail->Body = "<h4>Hello,</h4> <br /> Your payment to ".$row['beneficiary']. " slated for ".$row['paydate']. " is overdue, please check the finance portal to confirm the payment status;<br /> Cheers";
$mail->AltBody = strip_tags($mail->Body);$mail->send();
} catch (Exception $e) {
echo 'Message could not be sent.';
//commented in case of errors
echo 'Mailer Error: ' . $mail->ErrorInfo;
}
}
$sqlee = "SELECT * from records WHERE
".$dt." > ".$row['paydate']." AND status = 'pending'";
$q = mysqli_query($dbc, $sqlee);
$nums = mysqli_num_rows($q);
printf("Result set has %d rows.\n", $nums);
$raw = mysqli_fetch_array($q, MYSQLI_ASSOC);
for($counts = 0;$counts<$nums;$counts++){
echo $counts;
require_once 'PHPMailer3/src/Exception.php';
require_once 'PHPMailer3/src/PHPMailer.php';
require_once 'PHPMailer3/src/SMTP.php';
$mail = new PHPMailer(true); // Passing `true` enables exceptions
try { //$mail->SMTPDebug = 2;
$mail->isSMTP(); // Set mailer to use SMTP
$mail->Host = 'tls://smtp.gmail.com:587';
$mail->SMTPOptions = array(
'ssl' => array(
'verify_peer' => false,
'verify_peer_name' => false,
'allow_self_signed' => true
)
);
$mail->SMTPAuth = true; // Enable SMTP authentication
$mail->Username = '<EMAIL>'; // SMTP username
$mail->Password = '<PASSWORD>'; // SMTP password
// $mail->SMTPSecure = 'tls'; // Enable TLS encryption, `ssl` also accepted
$mail->Port = 587; // TCP port to connect to
$mail->setFrom('<EMAIL>', 'Finance App');
//$mail->addAddress('<EMAIL>', 'qsrbi'); // Add a recipient
$mail->addAddress('<EMAIL>', 'Olagoke');
$mail->addAddress('<EMAIL>', 'Kevin');
$mail->addAddress('<EMAIL>', 'Ola');
//Content
$mail->isHTML(true); // Set email format to HTML
$mail->Subject = 'Payment Status Notification';
$mail->Body = "<h4>Hello,</h4> <br /> Your payment to ".$row['beneficiary']. " slated for ".$row['paydate']. " is overdue, please check to confirm the payment status;<br /> Cheers";
$mail->AltBody = strip_tags($mail->Body);$mail->send();
} catch (Exception $e) {
echo 'Message could not be sent.';
//commented in case of errors
echo 'Mailer Error: ' . $mail->ErrorInfo;
}
}
}
//condition to check if the current date and the payment date are the same
if( $dt == $row['paydate'] && ($row['status'] !== 'paid' ))
{
$sql = "UPDATE records SET status='due' WHERE id =".$row['id'];
$q = mysqli_query($dbc, $sql) or die(mysqli_error($dbc));
$num = mysqli_affected_rows($dbc);
for($count = 0;$count<$num;$count++)
{
require_once 'PHPMailer3/src/Exception.php';
require_once 'PHPMailer3/src/PHPMailer.php';
require_once 'PHPMailer3/src/SMTP.php';
$mail = new PHPMailer(true); // Passing `true` enables exceptions
try {
//$mail->SMTPDebug = 2;
$mail->isSMTP(); // Set mailer to use SMTP
$mail->Host = 'tls://smtp.gmail.com:587';
$mail->SMTPOptions = array(
'ssl' => array(
'verify_peer' => false,
'verify_peer_name' => false,
'allow_self_signed' => true
)
);
$mail->SMTPAuth = true; // Enable SMTP authentication
$mail->Username = '<EMAIL>'; // SMTP username
$mail->Password = '<PASSWORD>'; // SMTP password
// $mail->SMTPSecure = 'tls'; // Enable TLS encryption, `ssl` also accepted
$mail->Port = 587; // TCP port to connect to
$mail->setFrom('<EMAIL>', 'Finance App');
//Recipients
//$mail->addAddress('<EMAIL>', 'qsrbi'); // Add a recipient
$mail->addAddress('<EMAIL>', 'Olagoke');
$mail->addAddress('<EMAIL>', 'Kevin');
$mail->addAddress('<EMAIL>', 'Ola');
//Content
$mail->isHTML(true); // Set email format to HTML
$mail->Subject = 'Payment Status Updated';
$mail->Body = "<h4>Hello,</h4><br />
Your payment to ".$row['beneficiary']. " slated for ".$row['paydate']. " is Due, please check the portal to confirm the payment status;<br /> Cheers";
$mail->AltBody = strip_tags($mail->Body);
$mail->send();
} catch (Exception $e) {
echo 'Message could not be sent.';
//commented in case of errors
echo 'Mailer Error: ' . $mail->ErrorInfo;
}
}
//Repeating the check process and firring the reminder email
if($q)
{ $sqle = "SELECT * from records WHERE
".$dt." = ".$row['paydate']." AND status = 'due'";
$query = mysqli_query($dbc, $sqle);$nums = mysqli_num_rows($query);
$raw = mysqli_fetch_array($query, MYSQLI_ASSOC);
for($counts = 0;$counts<$nums;$counts++){
require_once 'PHPMailer3/src/Exception.php';
require_once 'PHPMailer3/src/PHPMailer.php';
require_once 'PHPMailer3/src/SMTP.php';
$mail = new PHPMailer(true); // Passing `true` enables exceptions
try { //$mail->SMTPDebug = 2;
$mail->isSMTP(); // Set mailer to use SMTP
$mail->Host = 'tls://smtp.gmail.com:587';
$mail->SMTPOptions = array(
'ssl' => array(
'verify_peer' => false,
'verify_peer_name' => false,
'allow_self_signed' => true
)
);
$mail->SMTPAuth = true; // Enable SMTP authentication
$mail->Username = '<EMAIL>'; // SMTP username
$mail->Password = '<PASSWORD>'; // SMTP password
// $mail->SMTPSecure = 'tls'; // Enable TLS encryption, `ssl` also accepted
$mail->Port = 587; // TCP port to connect to
$mail->setFrom('<EMAIL>', 'Finance App');
//$mail->addAddress('<EMAIL>', 'qsrbi'); // Add a recipient
$mail->addAddress('<EMAIL>', 'Olagoke');
$mail->addAddress('<EMAIL>', 'Kevin');
$mail->addAddress('<EMAIL>', 'Ola');
//Content
$mail->isHTML(true); // Set email format to HTML
$mail->Subject = 'Payment Status Notification';
$mail->Body = "<h4>Hello,</h4> <br /> Your payment to ".$row['beneficiary']. " slated for ".$row['paydate']. " is Due, please check to confirm the payment status;<br /> Cheers";
$mail->AltBody = strip_tags($mail->Body);$mail->send();
} catch (Exception $e) {
echo 'Message could not be sent.';
//commented in case of errors
echo 'Mailer Error: ' . $mail->ErrorInfo;
}
}
}
}
//condtition for current date Lesser Than Three days to the payment date
if( $dt < $row['paydate'] && ($row['status'] !== 'paid' ))
{
$new = str_replace("/","-", $row['paydate']);
//echo $new."<br />";
$fdate = date('Y-m-d',strtotime($new));
$date1 = new DateTime($td);
$date2 = new DateTime($fdate);
$diff = $date2->diff($date1)->format("%a");
if($diff <= 3)
{
require_once 'PHPMailer3/src/Exception.php';
require_once 'PHPMailer3/src/PHPMailer.php';
require_once 'PHPMailer3/src/SMTP.php';
$mail = new PHPMailer(true); // Passing `true` enables exceptions
try { //$mail->SMTPDebug = 2;
$mail->isSMTP(); // Set mailer to use SMTP
$mail->Host = 'tls://smtp.gmail.com:587';
$mail->SMTPOptions = array(
'ssl' => array(
'verify_peer' => false,
'verify_peer_name' => false,
'allow_self_signed' => true
)
);
$mail->SMTPAuth = true; // Enable SMTP authentication
$mail->Username = '<EMAIL>'; // SMTP username
$mail->Password = '<PASSWORD>'; // SMTP password
// $mail->SMTPSecure = 'tls'; // Enable TLS encryption, `ssl` also accepted
$mail->Port = 587; // TCP port to connect to
$mail->setFrom('<EMAIL>', 'Finance App');
//$mail->addAddress('<EMAIL>', 'qsrbi'); // Add a recipient
$mail->addAddress('<EMAIL>', 'Olagoke');
$mail->addAddress('<EMAIL>', 'Kevin');
$mail->addAddress('<EMAIL>', 'Ola');
//Content
$mail->isHTML(true); // Set email format to HTML
$mail->Subject = 'Payment Status Notification';
$mail->Body = "<h4>Hello,</h4> <br /> Your payment to ".$row['beneficiary']. " slated for ".$row['paydate']. " is Due in ".$diff." days , please check to confirm the payment status;<br /> Cheers";
$mail->AltBody = strip_tags($mail->Body);$mail->send();
} catch (Exception $e) {
echo 'Message could not be sent.';
//commented in case of errors
echo 'Mailer Error: ' . $mail->ErrorInfo;
}
//}
}
}
}
}
?><file_sep>/pages/editor.php
<?php
use PHPMailer\PHPMailer\PHPMailer;
use PHPMailer\PHPMailer\Exception;
if($_SERVER['REQUEST_METHOD'] == "POST")
{
require "../includes/connect.php";
//uprofile
$username = mysqli_real_escape_string($dbc, $_POST['username']);
$password = mysqli_real_escape_string($dbc, trim($_POST['password']));
$email = mysqli_real_escape_string($dbc, trim($_POST['email']));
$firstname = mysqli_real_escape_string($dbc, trim($_POST['firstname']));
$lastname = mysqli_real_escape_string($dbc, trim($_POST['lastname']));
$phone = mysqli_real_escape_string($dbc, trim($_POST['phone']));
$role = mysqli_real_escape_string($dbc, $_POST['role']);
if($_POST['unit'] == 'nill')
{
$unit = 'Finance';
}
else{
$unit = $_POST['unit'];
}
$id = $_POST['id'];
$sql = "UPDATE users SET username ='$username', password =<PASSWORD>('<PASSWORD>'), email ='$email', firstname='$firstname', lastname='$lastname', phone='$phone', role='$role', unit='$unit' WHERE id='$id'";
mysqli_query($dbc, $sql) or die(mysqli_error($dbc));
//Load composer's autoloader
require_once 'PHPMailer3/src/Exception.php';
require_once 'PHPMailer3/src/PHPMailer.php';
require_once 'PHPMailer3/src/SMTP.php';
$mail = new PHPMailer(true); // Passing `true` enables exceptions
try {
// $mail->SMTPDebug = 2; // Enable verbose debug output
$mail->isSMTP(); // Set mailer to use SMTP
$mail->Host = 'genesisgroupng.com'; // Specify main and backup SMTP servers
$mail->SMTPAuth = true; // Enable SMTP authentication
$mail->Username = '<EMAIL>'; // SMTP username
$mail->Password = '<PASSWORD>'; // SMTP password
//$mail->SMTPSecure = 'tls'; // Enable TLS encryption, `ssl` also accepted
$mail->Port = 25; // TCP port to connect to
//Recipients
$mail->setFrom('<EMAIL>', 'Finhub');
//Recipients
//$mail->addAddress('<EMAIL>', 'qsrbi'); // Add a recipient
$mail->addAddress($email, $firstname);
//Content
$mail->isHTML(true); // Set email format to HTML
$mail->Subject = 'Finance Portal Credentials Updated';
$mail->Body = "<h4>Hello,</h4>
<br />
Your Account has been updated. Here is what you need to know ;<br />
Firstname: ".$firstname."<br />
Last Name: ".$lastname." <br/ >
Role: ".$role."<br />
Phone: ".$phone." <br/ >
Username: ".$username."<br />
Password: ".$<PASSWORD>." <br/ >
Email: ".$email." <br />
Url : http://genesisgroupng.com/finhub+/".$role." <br />
Cheers";
$mail->AltBody = strip_tags($mail->Body);
$mail->send();
} catch (Exception $e) {
echo 'Message could not be sent.';
//commented in case of errors
echo 'Mailer Error: ' . $mail->ErrorInfo;
}
echo "Account edited Successfully";
}
?><file_sep>/cc2.php
<?php
use PHPMailer\PHPMailer\PHPMailer;
use PHPMailer\PHPMailer\Exception;
require_once "includes/connect.php";
$dt=date('d/m/Y');
$td=date('d-m-Y');
$q = "SELECT * FROM records WHERE status <> 'paid' ";
$r = mysqli_query($dbc, $q) or die(mysqli_error($dbc));
if($r)
{
while($row = mysqli_fetch_array($r, MYSQLI_ASSOC))
{
$new = str_replace("/","-", $row['forecastdate']);
//echo $new."<br />";
$fdate = date('Y-m-d',strtotime($new));
$date1 = new DateTime($td);
$date2 = new DateTime($fdate);
if($date1 > $date2 && $row['status'] == 'pending' || $date1 > $date2 && $row['status'] == 'due' )
{
$sql = "UPDATE records SET status='overdue' WHERE id =".$row['id'];
$q = mysqli_query($dbc, $sql) or die(mysqli_error($dbc));
$num = mysqli_affected_rows($dbc);
if($q)
{
$query = "SELECT * FROM users WHERE unit='Finance'";
$result = mysqli_query($dbc, $query);
$number = mysqli_num_rows($result);
if($number >= 0)
{
while ($raws = mysqli_fetch_array($result, MYSQLI_ASSOC))
{
require_once 'PHPMailer3/src/Exception.php';
require_once 'PHPMailer3/src/PHPMailer.php';
require_once 'PHPMailer3/src/SMTP.php';
$mail = new PHPMailer(true); // Passing `true` enables exceptions
try {
// $mail->SMTPDebug = 2; // Enable verbose debug output
$mail->isSMTP(); // Set mailer to use SMTP
$mail->Host = 'genesis<EMAIL>'; // Specify main and backup SMTP servers
$mail->SMTPAuth = true; // Enable SMTP authentication
$mail->Username = '<EMAIL>'; // SMTP username
$mail->Password = '<PASSWORD>'; // SMTP password
//$mail->SMTPSecure = 'tls'; // Enable TLS encryption, `ssl` also accepted
$mail->Port = 25; // TCP port to connect to
//Recipients
$mail->setFrom('<EMAIL>', 'Finhub');
$mail->addAddress($raws['email'], $raws['firstname']);
//Content
$mail->isHTML(true); // Set email format to HTML
$mail->Subject = 'Payment Status Updated';
$mail->Body = "<h4>Hello,</h4> <br /> Your payment to ".$row['beneficiary']. " slated for ".$row['forecastdate']. " is overdue, please check the finance portal to confirm the payment status;<br /> Cheers";
$mail->AltBody = strip_tags($mail->Body);$mail->send();
} catch (Exception $e) {
echo 'Message could not be sent.';
//commented in case of errors
echo 'Mailer Error: ' . $mail->ErrorInfo;
}
}
}
}
}else
$new = str_replace("/","-", $row['forecastdate']);
//echo $new."<br />";
$fdate = date('Y-m-d',strtotime($new));
$date1 = new DateTime($td);
$date2 = new DateTime($fdate);
if($date1 > $date2 && $row['status'] == 'overdue')
{
$query = "SELECT * FROM users WHERE unit='Finance'";
$result = mysqli_query($dbc, $query);
$number = mysqli_num_rows($result);
if($number >= 0)
{
while ($raws = mysqli_fetch_array($result, MYSQLI_ASSOC))
{
require_once 'PHPMailer3/src/Exception.php';
require_once 'PHPMailer3/src/PHPMailer.php';
require_once 'PHPMailer3/src/SMTP.php';
$mail = new PHPMailer(true); // Passing `true` enables exceptions
try { //$mail->SMTPDebug = 2;
//$mail->SMTPDebug = 2; // Enable verbose debug output
$mail->isSMTP(); // Set mailer to use SMTP
$mail->Host = 'genesisgroupng.com'; // Specify main and backup SMTP servers
$mail->SMTPAuth = true; // Enable SMTP authentication
$mail->Username = '<EMAIL>'; // SMTP username
$mail->Password = '<PASSWORD>'; // SMTP password
//$mail->SMTPSecure = 'tls'; // Enable TLS encryption, `ssl` also accepted
$mail->Port = 25; // TCP port to connect to
//Recipients
$mail->setFrom('<EMAIL>', 'Finhub');
$mail->addAddress($raws['email'], $raws['firstname']);
//Content
$mail->isHTML(true); // Set email format to HTML
$mail->Subject = 'Payment Status Notification';
$mail->Body = "<h4>Hello,</h4> <br /> Your payment to ".$row['beneficiary']. " slated for ".$row['forecastdate']. " is overdue, please check to confirm the payment status;<br /> Cheers";
$mail->AltBody = strip_tags($mail->Body);$mail->send();
} catch (Exception $e) {
echo 'Message could not be sent.';
//commented in case of errors
echo 'Mailer Error: ' . $mail->ErrorInfo;
}
}
}
}
}
}
$query = "SELECT * FROM records WHERE status <> 'paid'";
$run = mysqli_query($dbc, $query) or die(mysqli_error($dbc));
$nums = mysqli_num_rows($run);
if($run){
while($raw = mysqli_fetch_array($run, MYSQLI_ASSOC))
{
if($dt == $raw['forecastdate'] && ($raw['status'] == 'pending' ))
{
$sql = "UPDATE records SET status='due' WHERE id =".$raw['id'];
$q = mysqli_query($dbc, $sql) or die(mysqli_error($dbc));
$num = mysqli_affected_rows($dbc);
if($q){
$query = "SELECT * FROM users WHERE unit='Finance'";
$result = mysqli_query($dbc, $query);
$number = mysqli_num_rows($result);
if($number >= 0)
{
while ($raws = mysqli_fetch_array($result, MYSQLI_ASSOC))
{
require_once 'PHPMailer3/src/Exception.php';
require_once 'PHPMailer3/src/PHPMailer.php';
require_once 'PHPMailer3/src/SMTP.php';
$mail = new PHPMailer(true); // Passing `true` enables exceptions
try {
//$mail->SMTPDebug = 2; // Enable verbose debug output
$mail->isSMTP(); // Set mailer to use SMTP
$mail->Host = 'genesisgroupng.com'; // Specify main and backup SMTP servers
$mail->SMTPAuth = true; // Enable SMTP authentication
$mail->Username = '<EMAIL>'; // SMTP username
$mail->Password = '<PASSWORD>'; // SMTP password
//$mail->SMTPSecure = 'tls'; // Enable TLS encryption, `ssl` also accepted
$mail->Port = 25; // TCP port to connect to
//Recipients
$mail->setFrom('<EMAIL>', 'Finhub');
$mail->addAddress($raws['email'], $raws['firstname']);
//Content
$mail->isHTML(true); // Set email format to HTML
$mail->Subject = 'Payment Status Updated';
$mail->Body = "<h4>Hello,</h4><br />
Your payment to ".$raw['beneficiary']. " slated for ".$raw['forecastdate']. " is Due, please check the portal to confirm the payment status;<br /> Cheers";
$mail->AltBody = strip_tags($mail->Body);
$mail->send();
} catch (Exception $e) {
echo 'Message could not be sent.';
//commented in case of errors
echo 'Mailer Error: ' . $mail->ErrorInfo;
}
}
}
}
}
else
if($dt == $raw['forecastdate'] && ($raw['status'] == 'due' ))
{
$sql = "UPDATE records SET status='due' WHERE id =".$raw['id'];
$q = mysqli_query($dbc, $sql) or die(mysqli_error($dbc));
$num = mysqli_affected_rows($dbc);
if($q)
{
$query = "SELECT * FROM users WHERE unit='Finance'";
$result = mysqli_query($dbc, $query);
$number = mysqli_num_rows($result);
if($number >= 0)
{
while ($raws = mysqli_fetch_array($result, MYSQLI_ASSOC))
{
require_once 'PHPMailer3/src/Exception.php';
require_once 'PHPMailer3/src/PHPMailer.php';
require_once 'PHPMailer3/src/SMTP.php';
$mail = new PHPMailer(true); // Passing `true` enables exceptions
try {
//$mail->SMTPDebug = 2; // Enable verbose debug output
$mail->isSMTP(); // Set mailer to use SMTP
$mail->Host = 'genesisgroupng.com'; // Specify main and backup SMTP servers
$mail->SMTPAuth = true; // Enable SMTP authentication
$mail->Username = '<EMAIL>'; // SMTP username
$mail->Password = '<PASSWORD>'; // SMTP password
//$mail->SMTPSecure = 'tls'; // Enable TLS encryption, `ssl` also accepted
$mail->Port = 25; // TCP port to connect to
//Recipients
$mail->setFrom('<EMAIL>', 'Finhub');
$mail->addAddress($raws['email'], $raws['firstname']);
//Content
$mail->isHTML(true); // Set email format to HTML
$mail->Subject = 'Payment Status Notification';
$mail->Body = "<h4>Hello,</h4><br />
Your payment to ".$raw['beneficiary']. " slated for ".$raw['forecastdate']. " is Due, please check the portal to confirm the payment status;<br /> Cheers";
$mail->AltBody = strip_tags($mail->Body);
$mail->send();
} catch (Exception $e) {
echo 'Message could not be sent.';
//commented in case of errors
echo 'Mailer Error: ' . $mail->ErrorInfo;
}
}
}
}
}
}
}
$query = "SELECT * FROM records WHERE status <> 'paid'";
$run = mysqli_query($dbc, $query) or die(mysqli_error($dbc));
$nums = mysqli_num_rows($run);
if($run){
while($raws = mysqli_fetch_array($run, MYSQLI_ASSOC))
{
if( $dt < $raws['forecastdate'] && ($raws['status'] !== 'paid' ))
{
$new = str_replace("/","-", $raws['forecastdate']);
//echo $new."<br />";
$fdate = date('Y-m-d',strtotime($new));
$date1 = new DateTime($td);
$date2 = new DateTime($fdate);
$diff = $date2->diff($date1)->format("%a");
if($diff <= 3)
{
$query = "SELECT * FROM users WHERE unit='Finance'";
$result = mysqli_query($dbc, $query);
$number = mysqli_num_rows($result);
if($number >= 0)
{
while ($raws = mysqli_fetch_array($result, MYSQLI_ASSOC))
{
require_once 'PHPMailer3/src/Exception.php';
require_once 'PHPMailer3/src/PHPMailer.php';
require_once 'PHPMailer3/src/SMTP.php';
$mail = new PHPMailer(true); // Passing `true` enables exceptions
try {
// $mail->SMTPDebug = 2; // Enable verbose debug output
$mail->isSMTP(); // Set mailer to use SMTP
$mail->Host = 'genesisgroupng.com'; // Specify main and backup SMTP servers
$mail->SMTPAuth = true; // Enable SMTP authentication
$mail->Username = '<EMAIL>'; // SMTP username
$mail->Password = '<PASSWORD>'; // SMTP password
//$mail->SMTPSecure = 'tls'; // Enable TLS encryption, `ssl` also accepted
$mail->Port = 25; // TCP port to connect to
//Recipients
$mail->setFrom('<EMAIL>', 'Finhub');
$mail->addAddress($raws['email'], $raws['firstname']);
//Content
$mail->isHTML(true); // Set email format to HTML
$mail->Subject = 'Payment Status Notification';
$mail->Body = "<h4>Hello,</h4> <br /> Your payment to ".$raws['beneficiary']. " slated for ".$raws['forecastdate']. " is Due in ".$diff." day(s) , please check to confirm the payment status;<br /> Cheers";
$mail->AltBody = strip_tags($mail->Body);$mail->send();
} catch (Exception $e) {
echo 'Message could not be sent.';
//commented in case of errors
echo 'Mailer Error: ' . $mail->ErrorInfo;
}
//}
}
}
}
}
}
}
?>
<file_sep>/tracker/panel.php
<?php
session_start();
if(isset($_SESSION['tracker']))
{
include "inc/header.php";
include "../includes/connect.php";
$unit = $_SESSION['unit'];
echo'
<div class="row">
<div class="col-lg-12">
<div class="wrapper wrapper-content">
<div class="row">
<div class="col-lg-3">
<div class="ibox float-e-margins">
<div class="ibox-title">
<h5>Recently Added Users</h5>
<div class="ibox-tools">
<span class="label label-warning-light pull-right">Latest</span>
</div>
</div>
<div class="ibox-content">
<div>
<div class="feed-activity-list">';
$query = "SELECT * FROM users LIMIT 5";
$run = mysqli_query($dbc,$query);
if($run)
{
while($row = mysqli_fetch_array($run, MYSQLI_ASSOC) )
{
echo '
<div class="feed-element">
<div class="media-body ">
<small class="text-danger">new</small>
<strong>'.$row['firstname']. ' '. $row['lastname'].'</strong> was Added <br>
<small class="mb-4">Role: '.$row['role'].' </small>
</div>
</div>
';
}
}
echo '
</div>
</div>
</div>
</div>
</div>
<div class="col-lg-4">
<div class="ibox float-e-margins">
<div class="ibox-title">
<h5>Recent Records From<span class="text-danger"> '.$_SESSION['unit'].' Unit</span></h5>
<div class="ibox-tools">
<span class="label label-warning-light pull-right">Latest</span>
</div>
</div>
<div class="ibox-content">
<div>
<div class="feed-activity-list">';
$query = "SELECT * FROM records WHERE unit ='$unit' ORDER BY `id` DESC LIMIT 10";
$run = mysqli_query($dbc,$query);
if($run)
{
while($row = mysqli_fetch_array($run, MYSQLI_ASSOC) )
{
echo '
<div class="feed-element">
<div class="media-body ">
<strong>'.$row['beneficiary']. ' To be Paid #'. $row['amount'].'</strong><br>
<small class="">Forecast Date:'.$row['forecastdate'].'</small><br/>
<small class="text-danger">Payment Status</small>: ' .$row['status'].'
</div>
</div>
';
}
}
echo '
</div>
</div>
</div>
</div>
</div>
<div class="col-lg-5">
<div class="ibox float-e-margins">
<div class="ibox-title">
<h5><span class="text-danger">'.$_SESSION['unit'].'</span> Unit Statistics</h5>
</div>
<div class="ibox-content">
<div class="row">
<div class="col-xs-3">
<small class="stats-label">All Records</small>';
$query = "SELECT * FROM records WHERE unit = '$unit' ";
$run = mysqli_query($dbc,$query);
$num = mysqli_num_rows($run);
echo'
<h4 class="text-danger">'.$num.'</h4>
</div>
<div class="col-xs-3">
<small class="stats-label">Paid Items</small>';
$query = "SELECT * FROM records WHERE unit = '$unit' AND status='paid'";
$run = mysqli_query($dbc,$query);
$num = mysqli_num_rows($run);
echo'
<h4 class="text-danger">'.$num.'</h4>
</div>
<div class="col-xs-3">
<small class="stats-label">Pending Items</small>';
$query = "SELECT * FROM records WHERE unit = '$unit' AND status='pending'";
$run = mysqli_query($dbc,$query);
$num = mysqli_num_rows($run);
echo'
<h4 class="text-danger">'.$num.'</h4>
</div>
<div class="col-xs-3">
<small class="stats-label">Overdue Items</small>';
$query = "SELECT * FROM records WHERE unit = '$unit' AND status='overdue'";
$run = mysqli_query($dbc,$query);
$num = mysqli_num_rows($run);
echo'
<h4 class="text-danger">'.$num.'</h4>
</div>
</div>
</div>
</div>
<div class="ibox float-e-margins">
<div class="ibox-title">
<h5>Payment Statistics</h5>
<div class="ibox-tools">
<span class="label label-danger pull-right">Filters</span>
</div>
</div>
<div class="ibox-content">
<div class="row">
<div class="col-xs-3">
<small class="stats-label">Total Amount </small>';
$query = "SELECT SUM(amount) AS total FROM records WHERE unit = '$unit'";
$run = mysqli_query($dbc,$query);
$row = mysqli_fetch_array($run, MYSQLI_ASSOC);
echo'
<h4 class="text-danger">#'.$row['total'].'</h4>
</div>
<div class="col-xs-3">
<small class="stats-label">Total Paid</small>';
$query = "SELECT SUM(amount) AS total FROM records WHERE unit = '$unit' AND status='paid'";
$run = mysqli_query($dbc,$query);
$row = mysqli_fetch_array($run, MYSQLI_ASSOC);
echo'
<h4 class="text-danger">#'.$row['total'].'</h4>
</div>
<div class="col-xs-3">
<small class="stats-label">Total Pending</small>';
$query = "SELECT SUM(amount) AS total FROM records WHERE unit = '$unit' AND status='pending'";
$run = mysqli_query($dbc,$query);
$row = mysqli_fetch_array($run, MYSQLI_ASSOC);
echo'
<h4 class="text-danger">#'.$row['total'].'</h4>
</div>
<div class="col-xs-3">
<small class="stats-label">Total Overdue</small>';
$query = "SELECT SUM(amount) AS total FROM records WHERE unit = '$unit' AND status='overdue'";
$run = mysqli_query($dbc,$query);
$row = mysqli_fetch_array($run, MYSQLI_ASSOC);
echo'
<h4 class="text-danger">#'.$row['total'].'</h4>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>';
include 'inc/footer.php';
}
elseif (isset($_SESSION['admin']) AND isset($_SESSION['username'])) {
echo 'Admin Logged in <a href="logout.php">Log Out</a>';
# code...
}
else{
header("Location:index.php");
}
<file_sep>/pages/panel.php
<?php
session_start();
if(isset($_SESSION['admin']))
{
include "inc/header.php";
include "../includes/connect.php";
echo'
<div class="row">
<div class="col-lg-12">
<div class="wrapper wrapper-content">
<div class="row">
<div class="col-lg-3">
<div class="ibox float-e-margins">
<div class="ibox-title">
<h5>Recently Added Users</h5>
<div class="ibox-tools">
<span class="label label-warning-light pull-right">Latest</span>
</div>
</div>
<div class="ibox-content">
<div>
<div class="feed-activity-list">';
$query = "SELECT * FROM users ORDER BY `id` DESC LIMIT 10";
$run = mysqli_query($dbc,$query);
if($run)
{
while($row = mysqli_fetch_array($run, MYSQLI_ASSOC) )
{
echo '
<div class="feed-element">
<div class="media-body ">
<small class="text-danger">new</small>
<strong>'.$row['firstname']. ' '. $row['lastname'].'</strong> was Added <br>
<small class="mb-4">Role: '.$row['role'].' </small>
</div>
</div>
';
}
}
echo '
</div>
</div>
</div>
</div>
</div>
<div class="col-lg-3">
<div class="ibox float-e-margins">
<div class="ibox-title">
<h5>Recent Records</h5>
<div class="ibox-tools">
<span class="label label-warning-light pull-right">Latest</span>
</div>
</div>
<div class="ibox-content">
<div>
<div class="feed-activity-list">';
$query = "SELECT * FROM records ORDER BY `id` DESC LIMIT 10";
$run = mysqli_query($dbc,$query);
if($run)
{
while($row = mysqli_fetch_array($run, MYSQLI_ASSOC) )
{
echo '
<div class="feed-element">
<div class="media-body ">
<strong>'.$row['beneficiary']. ' To be Paid #'. $row['amount'].'</strong><br>
<small class="">Forecast Date:'.$row['forecastdate'].'</small><br/>
<small class="text-danger">Payment Status</small>: ' .$row['status'].'
</div>
</div>
';
}
}
echo '
</div>
</div>
</div>
</div>
</div>
<div class="col-lg-6">
<div class="ibox float-e-margins">
<div class="ibox-title">
<h5>Record Statistics</h5>
<div class="ibox-tools">
<span class="label label-danger pull-right">Filters</span>
</div>
</div>
<div class="ibox-content">
<div class="row">
<div class="col-xs-3">
<small class="stats-label">All Records</small>';
$query = "SELECT * FROM records ";
$run = mysqli_query($dbc,$query);
$num = mysqli_num_rows($run);
echo'
<h4 class="text-danger">'.$num.'</h4>
</div>
<div class="col-xs-3">
<small class="stats-label">Total Paid</small>';
$query = "SELECT * FROM records WHERE status='paid'";
$run = mysqli_query($dbc,$query);
$num = mysqli_num_rows($run);
echo'
<h4 class="text-danger">'.$num.'</h4>
</div>
<div class="col-xs-3">
<small class="stats-label">Total Pending</small>';
$query = "SELECT * FROM records WHERE status='pending'";
$run = mysqli_query($dbc,$query);
$num = mysqli_num_rows($run);
echo'
<h4 class="text-danger">'.$num.'</h4>
</div>
<div class="col-xs-3">
<small class="stats-label">Total Overdue</small>';
$query = "SELECT * FROM records WHERE status='overdue'";
$run = mysqli_query($dbc,$query);
$num = mysqli_num_rows($run);
echo'
<h4 class="text-danger">'.$num.'</h4>
</div>
</div>
</div>
</div>
<div class="ibox float-e-margins">
<div class="ibox-title">
<h5>Payment Statistics</h5>
<div class="ibox-tools">
<span class="label label-danger pull-right">Filters</span>
</div>
</div>
<div class="ibox-content fil animated fadeInDown">
<div class="row">
<div class="col-xs-3">';
if(!isset($_POST['filter']))
{
$query = "SELECT SUM(amount) AS total FROM records ";
$run = mysqli_query($dbc,$query);
$row = mysqli_fetch_array($run, MYSQLI_ASSOC);
echo'<small class="stats-label">Total Amount </small>
<h4 class="text-danger">#'.$row['total'].'</h4>
</div>
<div class="col-xs-3">
<small class="stats-label">Total Paid</small>';
$query = "SELECT SUM(amount) AS total FROM records WHERE status='paid'";
$run = mysqli_query($dbc,$query);
$row = mysqli_fetch_array($run, MYSQLI_ASSOC);
echo'
<h4 class="text-danger">#'.$row['total'].'</h4>
</div>
<div class="col-xs-3">
<small class="stats-label">Total Pending</small>';
$query = "SELECT SUM(amount) AS total FROM records WHERE status='pending'";
$run = mysqli_query($dbc,$query);
$row = mysqli_fetch_array($run, MYSQLI_ASSOC);
echo'
<h4 class="text-danger">#'.$row['total'].'</h4>
</div>
<div class="col-xs-3">
<small class="stats-label">Total Overdue</small>';
$query = "SELECT SUM(amount) AS total FROM records WHERE status='overdue'";
$run = mysqli_query($dbc,$query);
$row = mysqli_fetch_array($run, MYSQLI_ASSOC);
echo'
<h4 class="text-danger">#'.$row['total'].'</h4>
</div>';
}else
if(isset($_POST['filter'])){
echo "Filter seen!";
}
echo '
</div>
</div>
<div class="ibox-content result animated slideInLeft">
<div class="row">
<div class="col-xs-3">
<small class="stats-label">Total Amount </small>
<h4 class="text-danger">#<b class="total"></b></h4>
</div>
<div class="col-xs-3">
<small class="stats-label">Total Paid</small>
<h4 class="text-danger">#<b class="paid"></b></h4>
</div>
<div class="col-xs-3">
<small class="stats-label">Total Pending</small>
<h4 class="text-danger">#<b class="pending"></b></h4>
</div>
<div class="col-xs-3">
<small class="stats-label">Total Overdue</small>
<h4 class="text-danger">#<b class="overdue"></b></h4>
</div>
</div>
</div>
<h3>Filter</h3>
<form class="form-group">
<input type="date" name="from" id="from" class="form-control" />
<input type="date" name="to" id="to" class="form-control" />
<button type="submit" class="btn btn-sm" name="filter" id="filter">Filter</button>
</form>
<h3 id="filters">Click</h3>
fdjdf
</div>
</div>
</div>
</div>
</div>';
include 'inc/footer.php';
}
else{
header("Location:index.php");
}
<file_sep>/supervisor/inc/footer.php
<div class="footer">
<div class="pull-right">
<strong></strong>
</div>
<div>
<strong>Copyright</strong> Genesis Group Nigeria © 2018
</div>
</div>
</div>
</div>
<!-- Mainly scripts -->
<script src="../assets/js/jquery-3.1.1.min.js"></script>
<script src="../assets/js/bootstrap.min.js"></script>
<script src="../assets/js/plugins/metisMenu/jquery.metisMenu.js"></script>
<script src="../assets/js/plugins/slimscroll/jquery.slimscroll.min.js"></script>
<!-- Flot -->
<script src="../assets/js/plugins/flot/jquery.flot.js"></script>
<script src="../assets/js/plugins/flot/jquery.flot.tooltip.min.js"></script>
<script src="../assets/js/plugins/flot/jquery.flot.spline.js"></script>
<script src="../assets/js/plugins/flot/jquery.flot.resize.js"></script>
<script src="../assets/js/plugins/flot/jquery.flot.pie.js"></script>
<script src="../assets/js/plugins/flot/jquery.flot.symbol.js"></script>
<script src="../assets/js/plugins/flot/jquery.flot.time.js"></script>
<!-- Peity -->
<script src="../assets/js/plugins/peity/jquery.peity.min.js"></script>
<script src="../assets/js/demo/peity-demo.js"></script>
<!-- Custom and plugin javascript -->
<script src="../assets/js/inspinia.js"></script>
<script src="../assets/js/plugins/pace/pace.min.js"></script>
<!-- jQuery UI -->
<script src="../assets/js/plugins/jquery-ui/jquery-ui.min.js"></script>
<!-- Jvectormap -->
<script src="../assets/js/plugins/jvectormap/jquery-jvectormap-2.0.2.min.js"></script>
<script src="../assets/js/plugins/jvectormap/jquery-jvectormap-world-mill-en.js"></script>
<!-- EayPIE -->
<script src="../assets/js/plugins/easypiechart/jquery.easypiechart.js"></script>
<!-- Sparkline -->
<script src="../assets/js/plugins/sparkline/jquery.sparkline.min.js"></script>
<!-- Sparkline demo data -->
<script src="../assets/js/demo/sparkline-demo.js"></script>
<script src="../assets/js/plugins/dataTables/datatables.min.js"></script>
<!-- Bootstrap markdown -->
<script src="../assets/js/plugins/bootstrap-markdown/bootstrap-markdown.js"></script>
<script src="../assets/js/plugins/bootstrap-markdown/markdown.js"></script>
<!-- Page-Level Scripts -->
<script>
$(document).ready(function(){
$('.dataTables-example').DataTable({
pageLength: 25,
responsive: true,
dom: '<"html5buttons"B>lTfgitp',
buttons: [
{ extend: 'copy'},
{extend: 'csv'},
{extend: 'excel', title: 'ExampleFile'},
{extend: 'pdf', title: 'ExampleFile'},
{extend: 'print',
customize: function (win){
$(win.document.body).addClass('white-bg');
$(win.document.body).css('font-size', '10px');
$(win.document.body).find('table')
.addClass('compact')
.css('font-size', 'inherit');
}
}
]
});
});
</script>
<script type="text/javascript">
$(document).ready(function(){
$('#contact').click(function(event){
event.preventDefault();
$("#uploadIm").show();
$.ajax({
url:"mailer.php",
method: "post",
data:$('form').serialize(),
dataType:"text",
success:function(strMessage){
$("#uploadIm").hide();
$('#message').text(strMessage);
$('#form').trigger("reset");
}
})
})
})
</script>
<script type="text/javascript">
$(document).ready(function(){
$('#edit').click(function(event){
event.preventDefault();
$("#uploadIm").show();
$.ajax({
url:"editor.php",
method: "post",
data:$('form').serialize(),
dataType:"text",
success:function(strMessage){
$("#uploadIm").hide();
$('#message').text(strMessage);
$('#form').trigger("reset");
}
})
})
})
</script>
<script type="text/javascript">
$(document).ready(function(){
$(document).on('click', '.edit', function(){
event.preventDefault();
$("#uploadIm").show();
var id = $(this).attr("name");
$.ajax({
url:"fetch.php",
method:"POST",
data:{id : id},
dataType:"json",
success: function(data){
$("#uploadIm").hide();
$('#beneficiary').val(data.beneficiary);
$('#amount').val(data.amount);
$('#description').val(data.description);
$('#entrydate').val(data.entrydate);
$('#forecast').val(data.forecastdate);
// $('#status').val(data.status);
$('#record_id').val(data.id);
}
})
})
})
</script>
<script type="text/javascript">
$(document).ready(function(){
$(document).on('click', '.pay', function(){
event.preventDefault();
var id = $(this).attr("name");
$.ajax({
url:"pay.php",
method:"POST",
data:{id:id},
dataType:"text",
success: function(strMessage){
var $text= 'Marked as Paid Successfully';
var $pops = $('<div class="alert alert-warning alert-dismissible show" id="marked" role="alert">'+ $text +'<button type="button" class="close" data-dismiss="alert" aria-label="Close"><span aria-hidden="true">×</span></button></div>'
);
$(".ibox-content").prepend($pops);
console.log(strMessage);
location.reload();
}
});
});
});
</script>
<script type="text/javascript">
$(document).ready(function(){
$(document).on('click', '.delete', function(){
event.preventDefault();
$("#uploadIm").show();
var id = $(this).attr("name");
$.ajax({
url:"fetch.php",
method:"POST",
data:{id : id},
dataType:"json",
success: function(data){
$("#uploadIm").hide();
$('#id').val(data.id);
}
})
})
})
</script>
<script>
$('select[name=role]').change(function () {
if ($(this).val() == 'Tracker') {
$('#supervisor').show('fadeIn', 3000);
}
else{
$('#supervisor').hide();
}
});
</script>
<script type="text/javascript">
$(document).ready(function(){
$('#filter').click(function(event){
event.preventDefault();
$("#uploadIm").show();
var $from = $('#from').val();
var $to = $('#to').val();
$('.appended').remove();
if($from >= $to)
{
alert('Invalid Selection');
}else
(
$.ajax({
url:"filter.php",
method: "POST",
data:$('form').serialize(),
dataType:"json",
success:function(data){
$('.fil').hide();
$('.result').show();
// console.log(data['all'][0].total);
$('.total').append('<span class="appended">'+data['all'][0].total+'</span>');
$('.paid').append('<span class="appended">'+data['paid'][0].total+'</span>');
$('.pending').append('<span class="appended">'+data['pending'][0].total+'</span>');
// console.log(data['paid'][0].total);
$('.overdue').append('<span class="appended">'+data['overdue'][0].total+'</span>');
// console.log(data['pending'][0].total);
// console.log(data['overdue'][0].total);
}
})
)
})
})
</script>
</body>
</html>
<file_sep>/records.sql
-- phpMyAdmin SQL Dump
-- version 4.7.0
-- https://www.phpmyadmin.net/
--
-- Host: 127.0.0.1
-- Generation Time: Nov 16, 2018 at 10:53 AM
-- Server version: 10.1.22-MariaDB
-- PHP Version: 7.1.4
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET AUTOCOMMIT = 0;
START TRANSACTION;
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8mb4 */;
--
-- Database: `finance`
--
-- --------------------------------------------------------
--
-- Table structure for table `records`
--
CREATE TABLE `records` (
`id` int(10) NOT NULL,
`beneficiary` varchar(100) NOT NULL,
`amount` varchar(1000) NOT NULL,
`description` varchar(255) NOT NULL,
`entrydate` date NOT NULL,
`paydate` date NOT NULL,
`status` text NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
--
-- Dumping data for table `records`
--
INSERT INTO `records` (`id`, `beneficiary`, `amount`, `description`, `entrydate`, `paydate`, `status`) VALUES
(1, '<NAME>', '50000', 'Free Gift given for his bravery', '2018-11-13', '2018-11-20', 'pending'),
(2, '<NAME>', '500000', 'Over time and loyalty bonus', '2018-11-13', '2018-11-14', 'overdue'),
(5, 'Chioma\'s Technologies', '50000', 'Payment for the new phones', '2018-11-13', '2018-11-22', 'pending'),
(6, '<NAME>', '30000', 'Trip to Enugu', '2018-11-13', '2018-11-20', 'due'),
(10, 'Favour Obasi', '50000', 'Bonus on extra items', '2018-11-13', '2018-11-20', 'paid'),
(11, '<NAME>', '50000', 'PDC Cabling and routing', '2018-11-13', '2018-11-20', 'paid'),
(12, '<NAME>', '50000', 'Free Goft given for his bravery', '2018-11-13', '2018-11-20', 'nill'),
(13, '<NAME>', '50000', 'Free Goft given for his bravery', '2018-11-13', '2018-11-20', 'nill'),
(14, '<NAME>', '50000', 'Free Goft given for his bravery', '2018-11-13', '2018-11-20', 'nill'),
(15, '<NAME>', '50000', 'Free Goft given for his bravery', '2018-11-13', '2018-11-20', 'nill'),
(16, '<NAME>', '50000', 'Free Goft given for his bravery', '2018-11-13', '2018-11-20', 'nill'),
(17, '<NAME>', '50000', 'Free Goft given for his bravery', '2018-11-13', '2018-11-20', 'nill'),
(18, '<NAME>', '50000', 'Free Goft given for his bravery', '2018-11-13', '2018-11-20', 'nill'),
(19, '<NAME>', '50000', 'Free Goft given for his bravery', '2018-11-13', '2018-11-20', 'nill'),
(20, '<NAME>', '50000', 'Free Goft given for his bravery', '2018-11-13', '2018-11-20', 'nill'),
(21, '<NAME>', '50000', 'Free Goft given for his bravery', '2018-11-13', '2018-11-20', 'nill'),
(22, '<NAME>', '50000', 'Free Goft given for his bravery', '2018-11-13', '2018-11-20', 'nill'),
(23, '<NAME>', '50000', 'Free Goft given for his bravery', '2018-11-13', '2018-11-20', 'nill'),
(24, '<NAME>', '50000', 'Free Goft given for his bravery', '2018-11-13', '2018-11-20', 'nill'),
(25, '<NAME>', '50000', 'Free Goft given for his bravery', '2018-11-13', '2018-11-20', 'nill'),
(26, '<NAME>', '50000', 'Free Goft given for his bravery', '2018-11-13', '2018-11-20', 'nill'),
(27, '<NAME>', '50000', 'Free Goft given for his bravery', '2018-11-13', '2018-11-20', 'nill'),
(28, '<NAME>', '50000', 'Free Goft given for his bravery', '2018-11-13', '2018-11-20', 'nill'),
(29, '<NAME>', '50000', 'Free Goft given for his bravery', '2018-11-13', '2018-11-20', 'nill'),
(30, '<NAME>', '50000', 'Free Goft given for his bravery', '2018-11-13', '2018-11-20', 'nill'),
(31, '<NAME>', '50000', 'Free Goft given for his bravery', '2018-11-13', '2018-11-20', 'nill'),
(32, '<NAME>', '50000', 'Free Goft given for his bravery', '2018-11-13', '2018-11-20', 'nill'),
(33, 'Beneficiary', '', 'Description', '0000-00-00', '0000-00-00', 'status'),
(34, '<NAME>', '', 'Payment for sweet services', '2010-03-18', '2011-12-18', 'paid'),
(35, 'Beneficiary', '', 'Description', '0000-00-00', '0000-00-00', 'status'),
(36, '<NAME>', '', 'Payment for sweet services', '2010-03-18', '2011-12-18', 'paid'),
(37, '<NAME> ', '', 'Payment for sweet services everdya', '2010-05-18', '2011-10-18', 'overdue'),
(38, '<NAME> ', '', 'Payment for sweet services', '2010-01-18', '0000-00-00', 'due'),
(39, 'Basket mouth', '', 'Payment for sweet services', '0000-00-00', '0000-00-00', 'pending'),
(40, '<NAME>', '', 'Payment for sweet services', '2010-11-18', '0000-00-00', 'pending');
--
-- Indexes for dumped tables
--
--
-- Indexes for table `records`
--
ALTER TABLE `records`
ADD PRIMARY KEY (`id`);
--
-- AUTO_INCREMENT for dumped tables
--
--
-- AUTO_INCREMENT for table `records`
--
ALTER TABLE `records`
MODIFY `id` int(10) NOT NULL AUTO_INCREMENT, AUTO_INCREMENT=41;COMMIT;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
<file_sep>/pages/add_records.php
<?php
session_start();
use PHPMailer\PHPMailer\PHPMailer;
use PHPMailer\PHPMailer\Exception;
if(isset($_SESSION['admin']))
{
if (isset($_POST["Import"]))
{
include "../includes/connect.php";
$filename=$_FILES["file"]["tmp_name"];
if($_FILES["file"]["size"] > 0)
{
$file = fopen($filename, "r");
$count= 0;
while (($emapData = fgetcsv($file, 10000, ",")) !== FALSE)
{
$count++;
if($count>1){
$emapData[0] = mysqli_real_escape_string($dbc, trim($emapData[0]));
$emapData[1] = mysqli_real_escape_string($dbc, trim($emapData[1]));
$emapData[2] = mysqli_real_escape_string($dbc, trim($emapData[2]));
$emapData[3] = mysqli_real_escape_string($dbc, trim($emapData[3]));
$emapData[4] = mysqli_real_escape_string($dbc, trim($emapData[4]));
$emapData[5] = mysqli_real_escape_string($dbc, trim($emapData[5]));
$emapData[6] = mysqli_real_escape_string($dbc, trim($emapData[6]));
$emapData[7] = mysqli_real_escape_string($dbc, trim($emapData[7]));
//It wiil insert a row to our subject table from our csv file`
$sql = "INSERT into records (beneficiary,description,amount,entrydate,forecastdate,paydate, status,unit)
values('$emapData[0]','$emapData[1]','$emapData[2]','$emapData[3]','$emapData[4]','$emapData[5]', '$emapData[6]','$emapData[7]')";
//we are using mysql_query function. it returns a resource on true else False on error
$result = mysqli_query($dbc, $sql) or die(mysqli_error($dbc));
if(!$result )
{
echo "<script type=\"text/javascript\">
alert(\"Invalid File:Please Upload CSV File.\");
window.location = \"add_records.php\"
</script>";
}
}
}
fclose($file);
//throws a message if data successfully imported to mysql database from excel file
$q = "SELECT * FROM users WHERE role='supervisor' OR role='staff' AND unit='Finance'";
$r = mysqli_query($dbc, $q);
$num = mysqli_num_rows($r);
if($num >= 0)
{
while ($raw = mysqli_fetch_array($r, MYSQLI_ASSOC))
{
//Load composer's autoloader
require_once 'PHPMailer3/src/Exception.php';
require_once 'PHPMailer3/src/PHPMailer.php';
require_once 'PHPMailer3/src/SMTP.php';
$mail = new PHPMailer(true); // Passing `true` enables exceptions
try {
//Server settings
//$mail->SMTPDebug = 2; // Enable verbose debug output
$mail->isSMTP(); // Set mailer to use SMTP
$mail->Host = 'genesisgroupng.com'; // Specify main and backup SMTP servers
$mail->SMTPAuth = true; // Enable SMTP authentication
$mail->Username = '<EMAIL>'; // SMTP username
$mail->Password = '<PASSWORD>'; // SMTP password
//$mail->SMTPSecure = 'tls'; // Enable TLS encryption, `ssl` also accepted
$mail->Port = 25; // TCP port to connect to
//Recipients
$mail->setFrom('<EMAIL>', 'Finhub');
//Recipients
//$mail->addAddress('<EMAIL>', 'qsrbi'); // Add a recipient
$mail->addAddress($raw['email'], $raw['firstname']);
//Content
$mail->isHTML(true); // Set email format to HTML
$mail->Subject = 'New Record Uploaded';
$mail->Body = "<h4>Hello,</h4>
<br />
A new Record has been uploaded to the finance Portal by ".$_SESSION['admin_username'].", please check the system for more details <br />
Cheers";
$mail->AltBody = strip_tags($mail->Body);
$mail->send();
} catch (Exception $e) {
echo 'Message could not be sent.';
//commented in case of errors
echo 'Mailer Error: ' . $mail->ErrorInfo;
}
echo "<script type=\"text/javascript\">
alert(\"CSV File has been successfully Imported.\");
window.location = \"add_records.php\"
</script>";
}
}
}
}
include "inc/header.php";
echo'
<div class="wrapper wrapper-content">
<div class="row wrapper border-bottom white-bg page-heading">
<div class="col-lg-10">
<h2>Admin</h2>
<ol class="breadcrumb">
<li>
<a href="view_records.php">Records</a>
</li>
<li>
<a>Add New Record</a>
</li>
</ol>
</div>
<div class="col-lg-2">
</div>
</div>
<div class="wrapper wrapper-content animated fadeInRight">
<div class="row">
<div class="col-lg-12">
<div class="ibox float-e-margins">
<div class="ibox-title">
<h5>Upload New DataSet</h5>
</div>
<div class="ibox-content">
<form action="" method="post"
name="frmExcelImport" id="frmExcelImport" enctype="multipart/form-data">
<div class="form-group">
<label for="exampleFormControlFile1">Upload New Record</label>
<div class="col-sm-5">
<input type="file" class="form-control" name="file" id="file" accept=".xls,.xlsx">
</div>
</div>
<div class="hr-line-dashed"></div>
<div class="form-group">
<div class="col-sm-4 col-sm-offset-2">
<button class="btn btn-primary" type="submit" name="Import">Submit</button>
</div>
</div><br />
</form>
<p class="text-danger">Sample CSV document found <a href="../download/try.csv">here</a></p>
</div>
</div>
</div>
</div>
</div>
</div>';
include 'inc/footer.php';
}
else{
header("Location:index.php");
}
<file_sep>/supervisor/mailer.php
<?php
use PHPMailer\PHPMailer\PHPMailer;
use PHPMailer\PHPMailer\Exception;
if($_SERVER['REQUEST_METHOD'] == "POST")
{
require "../includes/connect.php";
//uprofile
$username = mysqli_real_escape_string($dbc, $_POST['username']);
$password = mysqli_real_escape_string($dbc, trim($_POST['password']));
$email = mysqli_real_escape_string($dbc, trim($_POST['email']));
$firstname = mysqli_real_escape_string($dbc, trim($_POST['firstname']));
$lastname = mysqli_real_escape_string($dbc, trim($_POST['lastname']));
$phone = mysqli_real_escape_string($dbc, trim($_POST['phone']));
$role = mysqli_real_escape_string($dbc, $_POST['role']);
if($_POST['unit'] == 'nill')
{
$unit = 'Finance';
}
else{
$unit = $_POST['unit'];
}
$query= "SELECT username, email FROM users WHERE email ='$email' OR username='$username'";
$run = mysqli_query($dbc, $query) or die(mysqli_error($dbc));
if(mysqli_num_rows($run) == 0)
{
$sql = "INSERT INTO users (username, password, email, firstname, lastname, phone, role, unit) VALUES('$username',sha1('$password'),'$email','$firstname','$lastname','$phone','$role', '$unit')";
mysqli_query($dbc, $sql) or die(mysqli_error($dbc));
//Load composer's autoloader
require 'PHPMailer3/src/Exception.php';
require 'PHPMailer3/src/PHPMailer.php';
require 'PHPMailer3/src/SMTP.php';
$mail = new PHPMailer(true); // Passing `true` enables exceptions
try {
// $mail->SMTPDebug = 2; // Enable verbose debug output
$mail->isSMTP(); // Set mailer to use SMTP
$mail->Host = 'genesisgroupng.com'; // Specify main and backup SMTP servers
$mail->SMTPAuth = true; // Enable SMTP authentication
$mail->Username = '<EMAIL>'; // SMTP username
$mail->Password = '<PASSWORD>'; // SMTP password
//$mail->SMTPSecure = 'tls'; // Enable TLS encryption, `ssl` also accepted
$mail->Port = 25; // TCP port to connect to
//Recipients
$mail->setFrom('<EMAIL>', 'Finhub');
//Recipients
//$mail->addAddress('<EMAIL>', 'qsrbi'); // Add a recipient
$mail->addAddress($email, $firstname);
//Content
$mail->isHTML(true); // Set email format to HTML
$mail->Subject = 'Account Credentials For The Finance Portal';
$mail->Body = "<h4>Hello,</h4>
<br />
An account has been created for you on the Finance System. Here are the details <br />
Username: ".$username."<br />
Password: ".$<PASSWORD>." <br/ >
Portal Link : http://genesisgroupng.com/financehub+/".$role." <br />
Cheers";
$mail->AltBody = strip_tags($mail->Body);
$mail->send();
} catch (Exception $e) {
echo 'Message could not be sent.';
//commented in case of errors
echo 'Mailer Error: ' . $mail->ErrorInfo;
}
echo "Member Added Successfully";
}
else
{
echo "Username or Email Already Exists";
}
}
?><file_sep>/supervisor/delete_user.php
<?php
session_start();
if(isset($_SESSION['admin']))
{
include "inc/header.php";
include "../includes/connect.php";
if(isset($_GET['id']))
{
$id = $_GET['id'];
$sql="SELECT * FROM users where id = '$id'";
$result = mysqli_query($dbc, $sql) or die (mysqli_error($dbc));
if($_SERVER['REQUEST_METHOD'] == 'POST')
{
$md = $_POST['md'];
if($_POST['sure'] =='Yes')
{
$q = "DELETE FROM users WHERE id='$md'";
$r = mysqli_query($dbc, $q);
if($r)
{
echo"<p align=\"center\" style=\"color:blue\"><font face=\"comic sans ms\" size=\"4\">User Removed successfully</font><br />
<a href=\"view_users.php\" style=\"color:blue\" ><font face=\"comic sans ms\" size=\"3\">Click here </font></a></p>";
}
}
if($_POST['sure'] == 'No')
{
header("Location: view_users.php");
}
}
else
{
if(isset($_GET['id']))
{
$mid = $_GET['id'];
}
else
{
echo"<p align=\"center\" style=\"color:#C00\"><font face=\"comic sans ms\" size=\"4\">Page accessed in error</font><br /></p>";
exit();
}
echo'
<div class="wrapper wrapper-content">
<div class="row wrapper border-bottom white-bg page-heading">
<div class="col-lg-10">
<h2>Admin</h2>
<ol class="breadcrumb">
<li>
<a href="view_users.php">Users</a>
</li>
<li>
<a>View All Users</a>
</li>
</ol>
</div>
<div class="col-lg-2">
</div>
</div>
<br /><br />
<form action="" method="post">
<p align="center"><font size="3">Do you really want to delete this User?</font><br />
<input type="radio" name="sure" value="Yes" />Yes  
<input type="radio" name="sure" value="No" />No<br /><br />
<input type="hidden" name="md" value="'.$mid.'" /></p>
<p align="center">
<input class="form-control" type="submit" value="submit" style="width:150px" /><br />
</p>
</form>
';
}
}else{
echo "There was an error, please try again";
}
?>
<?php include 'inc/footer.php';
}
else{
header("Location:index.php");
}
?><file_sep>/supervisor/filter.php
<?php
if($_SERVER['REQUEST_METHOD'] == "POST")
{
require "../includes/connect.php";
$from = $_POST['from'];
$from = str_replace("-","/", $from);
$from = date('d/m/Y',strtotime($from));
$to = $_POST['to'];
$to = str_replace("-","/", $to);
$to = date('d/m/Y',strtotime($to));
// echo $from .' '. $to;
// define the array that will contain all result sets
$array = [];
// create an array for the result set coming from table 1
$array['all']= [];
$q = "SELECT SUM(amount) AS total FROM records WHERE forecastdate BETWEEN '$from' AND '$to'";
$r = mysqli_query($dbc, $q) or die(mysqli_error($dbc));
$row = mysqli_fetch_array($r, MYSQLI_ASSOC);
$array['all'][] = $row;
$array['paid']= [];
$q = "SELECT SUM(amount) AS total FROM records WHERE forecastdate BETWEEN '$from' AND '$to' AND status='paid'";
$r = mysqli_query($dbc, $q) or die(mysqli_error($dbc));
$row = mysqli_fetch_array($r, MYSQLI_ASSOC);
$array['paid'][] = $row;
$array['pending']= [];
$q = "SELECT SUM(amount) AS total FROM records WHERE forecastdate BETWEEN '$from' AND '$to' AND status='pending'";
$r = mysqli_query($dbc, $q) or die(mysqli_error($dbc));
$row = mysqli_fetch_array($r, MYSQLI_ASSOC);
$array['pending'][] = $row;
$array['overdue']= [];
$q = "SELECT SUM(amount) AS total FROM records WHERE forecastdate BETWEEN '$from' AND '$to' AND status='overdue'";
$r = mysqli_query($dbc, $q) or die(mysqli_error($dbc));
$row = mysqli_fetch_array($r, MYSQLI_ASSOC);
$array['overdue'][] = $row;
// return the results formatted as json
echo json_encode($array);
}
?><file_sep>/staff/range.php
<?php
session_start();
if(isset($_SESSION['staff']))
{
include "inc/header.php";
include "../includes/connect.php";
echo'
<div class="wrapper wrapper-content">
<div class="row wrapper border-bottom white-bg page-heading">
<div class="col-lg-8">
<h2>Staff Member</h2>
<ol class="breadcrumb">
<li>
<a href="view_records.php">Records</a>
</li>
<li>
<a>View All Posted Records</a>
</li>
</ol>
</div>
<div class="col-lg-4">
<div class="row">
<form class="form-group" method="POST" action="range.php">
<label>From</label>
<input class="form-control" type="date" name="from" />
<label>To</label>
<input class="form-control" type="date" name="to"/><br />
<button type="submit" name="range" class="btn btn-danger block">Range</button>
</form>
</div>
</div>
</div>
<div class="wrapper wrapper-content animated fadeInRight">
<div class="row">
<div class="col-lg-12">
<div class="ibox float-e-margins">
<div class="ibox-title" height="200px !important">';
if(isset($_POST['range']))
{
$from = $_POST['from'];
$from = str_replace("-","/", $from);
$from = date('d/m/Y',strtotime($from));
$to = $_POST['to'];
$to = str_replace("-","/", $to);
$to = date('d/m/Y',strtotime($to));
echo '<h5>Showing Records From '. (isset($_POST['range'])? $from .' to '. $to : 'Select Your Range' ).'</h5>';
echo'
</div>
<div class="ibox-content">
<div class="table-responsive">
<table class="table table-striped table-bordered table-hover dataTables-example" >
<thead>
<tr>
<th>S/n</th>
<th>Beneficiary</th>
<th>Amount</th>
<th>Description</th>
<th>Entry Date</th>
<th>Forecast Date</th>
<th>Payment Date</th>
<th>Status</th>
<th>Unit</th>
</tr>
</thead>
<tbody>';
$query = "SELECT * FROM records WHERE forecastdate BETWEEN '$from' AND '$to'";
$sql = mysqli_query($dbc, $query) or die(mysqli_error($dbc));
$num = mysqli_num_rows($sql);
$i = 0;
if($num > $i)
{
$counter = 1;
while($row = mysqli_fetch_array($sql, MYSQLI_ASSOC) )
{
echo '
<tr class="gradeX">
<td>'.$counter.'</td>
<td>'.$row['beneficiary'].'</td>
<td>'.$row['amount'].'</td>
<td>'.$row['description'].'</td>
<td>'.$row['entrydate'].'</td>
<td>'.$row['forecastdate'].'</td>
<td>'.$row['paydate'].'</td>
<td>'.$row['status'].'</td>
<td>'.$row['unit'].'</td>
</tr> ';
$counter++;
}
}else{
echo "<td colspan=\"7\">Sorry, there is no Data to display here, Please try Again</td>";
}
}
echo'
</tbody>
</table>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- Modal for Deleting -->
<!-- Button trigger modal -->
<button type="button" class="btn btn-primary" data-toggle="modal" data-target="#exampleModalCenter">
Launch demo modal
</button>
<!-- Modal -->
<div class="modal fade" id="exampleModalCenter2" tabindex="-1" role="dialog" aria-labelledby="exampleModalCenterTitle" aria-hidden="true">
<div class="modal-dialog modal-dialog-centered" role="document">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="exampleModalLongTitle">Confirm Deletion</h5>
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
<span aria-hidden="true">×</span>
</button>
</div>
<div class="modal-body">
<form action="" method="post">
<p align="center"><font size="3">Do you really want to delete this Record?</font><br />
<input type="radio" name="sure" value="Yes" />Yes  
<input type="radio" name="sure" value="No" />No<br /><br />
<input type="hidden" name="id" id="id" /></p>
<p align="center">
<input class="form-control button-danger" type="submit" name="deletee" value="submit" style="width:150px" /><br />
</p>
</form>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-secondary" data-dismiss="modal">Close</button>
</div>
</div>
</div>
</div>
<!-- End of Modal for editing -->
<!-- Modal for Editing -->
<!-- Modal -->
<div class="modal fade" id="exampleModalCenter" tabindex="-1" role="dialog" aria-labelledby="exampleModalCenterTitle" aria-hidden="true">
<div class="modal-dialog modal-dialog-centered" role="document">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title" id="exampleModalLongTitle">Edit Record </h5>
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
<span aria-hidden="true">×</span>
</button>
</div>
<div class="modal-body">
<form method="POST" class="form-horizontal">
<div class="form-group">
<label for="recipient-name" class="col-form-label">Beneficiary:</label>
<input type="text" class="form-control" id="beneficiary" name="beneficiary">
</div>
<div class="form-group">
<label for="recipient-name" class="col-form-label">Amount:</label>
<input type="text" class="form-control" id="amount" name="amount">
</div>
<input name="record_id" id="record_id" class="form-control" type="hidden" />
<div class="form-group">
<label for="message-text" class="col-form-label">Description:</label>
<textarea class="form-control" id="description" name="description"></textarea>
</div>
<div class="form-group">
<label for="recipient-name" class="col-form-label">Entry Date:</label>
<input type="text" class="form-control" id="entrydate" name="entrydate" placeholder="DD/MM/YYYY">
</div>
<div class="form-group">
<label for="recipient-name" class="col-form-label">Payment Date:</label>
<input type="text" class="form-control" id="paydate" name="paydate" placeholder="DD/MM/YYYY">
</div>
<div class="form-group">
<label for="recipient-name" class="col-form-label">Payment Status:</label>
<select class="form-control m-b" name="status">
<option value="nill">SELECT AN OPTION</option>
<option value="pending">Pending</option>
<option value="due">Due</option>
<option value="paid">Paid</option>
<option value="overdue">OverDue</option>
</select>
</div>
<button class="btn btn-primary" type="submit" name="updatee">Save Changes</button>
</form>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-secondary" data-dismiss="modal">Close</button>
</div>
</div>
</div>
</div>';
include 'inc/footer.php';
}
else{
header("Location:../index.php");
}
<file_sep>/supervisor/forgot-password.php
<?php
if($_SERVER['REQUEST_METHOD'] == "POST")
{
require('../includes/connect.php');
$email = $_POST['email'];
$pass = $_POST['password'];
$q = "SELECT * FROM users WHERE email='$email'";
$r = mysqli_query($dbc, $q) or die(mysqli_error($dbc));
if(mysqli_num_rows($r) == 1)
{
$query = "UPDATE users SET password = <PASSWORD>('$<PASSWORD>') WHERE email= '$email'";
$run = mysqli_query($dbc, $query) or die(mysqli_error($dbc));
if($run)
{
echo "<script type=\"text/javascript\">
alert(\"Password Changed Successfully.\");
window.location = \"index.php\"
</script>";
}
}
else
{
echo "<p align=\"center\" style=\"color:#C00\"><font size=\"4\">Wrong Entry<br />Access Denied..</font></p>";
}
}
?>
<!DOCTYPE html>
<html>
<meta http-equiv="content-type" content="text/html;charset=utf-8" />
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Genesis Finance App | Supervisor</title>
<link href="../assets/css/bootstrap.min.css" rel="stylesheet">
<link href="../assets/font-awesome/css/font-awesome.css" rel="stylesheet">
<link href="../assets/css/animate.css" rel="stylesheet">
<link href="../assets/css/style.css" rel="stylesheet">
</head>
<body class="gray-bg">
<div class="middle-box text-center loginscreen animated fadeInDown">
<div>
<div>
<h1 class="logo-name">FN+</h1>
</div>
<h3>Password Recovery</h3>
<p>To reset your password, Please Input your Email and desired password</p>
<form class="m-t" role="form" method="POST">
<div class="form-group">
<input type="email" name="email" class="form-control" placeholder="Email" required="">
</div>
<div class="form-group">
<input type="password" name="password" class="form-control" placeholder="<PASSWORD>" required="">
</div>
<button type="submit" class="btn btn-danger block full-width m-b">Reset</button>
<a href="index.php"><small>Login Instead?</small></a>
<p class="text-muted text-center"><small>Do not have an account?</small></p>
<a href="mailto:<EMAIL>">Request For An Account</a>
</form>
<p class="m-t"> <small>Genesis Finance App</small> </p>
</div>
</div>
<!-- Mainly scripts -->
<script src="../assets/js/jquery-3.1.1.min.js"></script>
<script src="../assets/js/bootstrap.min.js"></script>
</body>
</html>
|
dee5af8e00cb217cff696bdf44b17f510c25a8f7
|
[
"SQL",
"PHP"
] | 18
|
PHP
|
DT021/Finhub-App
|
d00ec5cc214e99cdf2942d23917964a8a0e5096b
|
1c99be6d33f43c1484f93011c9609aafb266117e
|
refs/heads/master
|
<file_sep>#!/bin/bash
skill fuwu
source ~/.profile
/home/wuxuewu/fuwuqi/base/./fuwu &
<file_sep>#!/bin/bash
skill fuwu
<file_sep>
#include"logicconfigmanager.hpp"
#include "lognamespace.hpp"
#include "enterfunction.hpp"
#include "socketku.hpp"
#include "pthreadstart.hpp"
#include "epoll_ku.hpp"
#include "HandlerList.hpp"
#include "SocketDBClient.hpp"
#include "SocketMsgClient.hpp"
void gateway()
{
int litenfd;
//开始读取游戏配置
if(!LOGIC_CONFIG->Init("/home/wuxuewu/fuwuqi/config_xml/"))
return;
//连接mysql服务器
const struct MysqlServer * mMysqlServer = LOGIC_CONFIG->GetServerMysqlConfig().GetMysqlServerConfig(); //获取mysql服务器IP和端口
if(DBCLIENT->CreateSocket(mMysqlServer->ip.c_str(),mMysqlServer->port) < 0)
return;
if(DBCLIENT->ConnectDB() < 0)
return;
//连接聊天服务器
const struct MsgServer * mMsgServer = LOGIC_CONFIG->GetServerMysqlConfig().GetMsgServerConfig(); //获取聊天服务器IP和端口
if(MSGCLIENT->CreateSocket(mMsgServer->ip.c_str(),mMsgServer->port) < 0)
return;
if(MSGCLIENT->ConnectDB() < 0)
return;
//初始化游戏模块 日志
if(!LogInit("/home/wuxuewu/fuwuqi/"))
{
return;
}
//初始化记录错误信息的日志
if(!MYLOG.Init("/home/wuxuewu/fuwuqi/log/error.txt"))
{
std::cout<<"error log create fail !"<<std::endl;
return;
}
//初始化各个模块的handler
HANDLERLIST->Init();
//初始化所有条件变量和锁
int Error;
Error = ConditionInit();
if (Error < 0)
{
std::cout<<"ConditionInit fail !"<<Error<<std::endl;
return;
}
//启动所有线程(读 写 处理工作 定时器 这四个线程)
if(pthreadstart() < 0)
return;
const struct my_server * test = LOGIC_CONFIG->GetServerMysqlConfig().GetServerConfig(); //获取IP和端口
// TCP/IP启动
Socket_Ku socket_lei(test->ip.c_str(),test->port);
if(socket_lei.socket_creat() < 0)
return;
if(socket_lei.socket_setsockopt() < 0)
return;
if(socket_lei.socket_bind() < 0)
return;
if(socket_lei.socket_listen() < 0)
return;
litenfd=socket_lei.socket_listcnfd(); //获取服务器监听IO
//epoll模型启动
if(EPOLLKU->Epoll_Creat() < 0)
return;
EPOLLKU->Epoll_Add(litenfd);
while(1)
{
EPOLLKU->Epoll_Wait(&socket_lei); //epoll检测IO事件
}
return;
}
<file_sep>
#ifndef _MAPCONFIG_HPP_
#define _MAPCONFIG_HPP_
#include"xmlnodeconfig.hpp"
#include <map>
struct MapInfo
{
int MapId;
std::string MapName;
};
class MapConfig
{
public:
MapConfig();
~MapConfig();
bool Init(std::string configname);
int InitMapConfig(TiXmlElement *RootElement);
int GetMaxValue(){return MapList.size();}
private:
std::map<int,MapInfo> MapList;
};
#endif<file_sep>
BIN = fuwu
include ../common.mk
<file_sep>
#ifndef _FIGHTSERVERCONFIG_HPP_
#define _FIGHTSERVERCONFIG_HPP_
#include"xmlnodeconfig.hpp"
#include <map>
struct FightServerInfo
{
int FightServerId;
int FightServerPort;
std::string FightServerIp;
uint32_t UserCount; //用户连接人数,多少个人正在连接该战斗服务器
};
class FightServerConfig
{
public:
FightServerConfig();
~FightServerConfig();
bool Init(std::string configname);
int InitFightServerConfig(TiXmlElement *RootElement);
FightServerInfo* GetFightServerById(int iFightServerId); //按战斗服务器ID获取该战斗服务器信息
int GetEmptyFightServerID(); //获取一个空闲的战斗服务器ID给战斗房间的玩家.
void AddFightServerUserCount(int iId,uint32_t iCount); //某个战斗服务器增加人数
void DownFightServerUserCount(int iId,uint32_t iCount); //某个战斗服务器减少人数
private:
std::map<int,FightServerInfo> FightServerMap;
};
#endif<file_sep>
#!/bin/bash
source ~/.profile
/home/wuxuewu/fuwuqi/base/./fuwu &
<file_sep>
#ifndef _TIMEEVENT_HPP_
#define _TIMEEVENT_HPP_
#include "RoleObj.hpp"
#include "quanju.hpp"
//该文件是封装所有定时器触发事件时执行的函数
void Time_MateFetch(void * Data); //匹配界面触发。当弹出匹配成功进入游戏,10秒内有玩家不点击同意就会触发事件
void Time_MateShowHeroFetch(void * Data); //英雄选择界面,规定30秒时间,30秒内触发事件进入场景
int SendHeroEnterFight(CRoleObj* pRoleObj,int SockIo,uint64_t uUid,ShowHeroRoom* pRoom);
#endif<file_sep>
#ifndef _CHATHANDLER_HPP_
#define _CHATHANDLER_HPP_
#include "Handler.hpp"
#include "../proto/CSmsg.pb.h"
#include "../proto/SSmsg.pb.h"
class ChatHandler : public IHandler
{
public:
ChatHandler();
virtual ~ChatHandler();
virtual int OnClientMsg(const CSMsg& rCSMsg, int iFd);
virtual int OnServerMsg(const SSMsg& rSSMsg);
static void* OnCSMsg(CSMsg& rMsg, uint64_t Uid, CSMsgID eMsgId, int CmdType);
private:
};
#endif<file_sep>
#ifndef _MATEWORK_HPP_
#define _MATEWORK_HPP_
#include "../proto/CSmsg.pb.h"
#include "quanju.hpp"
class MateWork
{
public:
MateWork();
~MateWork();
static int MateFetch(const CSMateFetchReq& rReq);
static int SendMateFetch(int SockIo,uint64_t Uid); //广播弹出成功匹配界面
static int SendMateNotSuccess(int SockIo,uint64_t Uid); //广播弹出成功匹配界面但是有人不点击同意导致不能开始游戏
static int SendMateNotUser(int SockIo,uint64_t Uid); //发送该协议给哪些没有点击同意让时间超时的玩家,让他们的客户端界面关闭掉匹配状态.
static int QuitMateFetch(const CSQuitMateFetchReq& rReq); //玩家在匹配中,然后他突然点击退出
static int NotButtonMateFetch(const CSNotButtonMateFetchReq& rReq); //成功匹配够人数弹出同意和拒绝按钮,但是玩家选择拒绝按钮
static int SuccessButtonMateFetch(const CSSuccessButtonMateFetchReq& rReq); //成功匹配够人数弹出同意和拒绝按钮,但是玩家选择同意按钮
static int EnterHeroShow(Room* PRoom,int iDeleteRoomIndex); //执行进入选英雄界面的操作
static int SendHeroInfo(CRoleObj* pRoleObj,int SockIo,uint64_t uUid,Room* pRoom); //发送所在英雄界面的玩家数据给客户端
static int EnterHeroShowBag(const CSEnterHeroShowBagReq& rReq); //根据装饰背包选择装饰背包的ID物品作为显示,除炸弹背包以外
static int ShowZhaDanBag(const CSShowZhaDanBagReq& rReq,CSShowZhaDanBagRsp* pRsp); //玩家选择炸弹
static int SendUserShowZhaDan(CRoleObj* pRoleObj,uint64_t UserUid,uint64_t Uid,uint32_t ZhaDanId); //广播给所有玩家该玩家选择该炸弹了
static int QuitGame(uint64_t Uid,int RoomIndex); //退出游戏时玩家还在匹配列表中
private:
};
#endif<file_sep>
#ifndef _UPDATEDATA_HPP_
#define _UPDATEDATA_HPP_
#include "RoleObj.hpp"
#include "../proto/SSmsg.pb.h"
#include "../proto/DBmsg.pb.h"
#include "../proto/CSmsg.pb.h"
enum MsgStatusType
{
GRADE = 1,
RANK = 2,
STATUS = 3,
VIP = 4,
HEAD = 5,
CHATFRAME =6
};
class UpdateData
{
public:
UpdateData();
~UpdateData();
static void* OnSSMsg(SSMsg& rMsg, uint64_t Uid, SSMsgID eMsgId, int CmdType);
//int Status表示向聊天服务器发送某个状态的更改
static int UpdateDatabase(CRoleObj* pRoleObj,int Status); //更新该用户的数据库所有数据(及时保存数据到数据库)
//玩家状态(等级,段位,状态(战斗状态),vip,头像ID,聊天ID)发生改变就要把该玩家发送给服务器
static int UpMsgStatus(uint64_t Uid,uint32_t Value,uint32_t Type);
private:
};
#endif
|
676a16be9fd9731442ce53559de1f70e96f55f4e
|
[
"Makefile",
"C++",
"Shell"
] | 11
|
Shell
|
wuxuewulinux/fuwuqi
|
a4575eae097d6278c47cd13eea24eeb491f64c29
|
594bc423f27feb3742037cf22ce9b537f0bdac25
|
refs/heads/master
|
<repo_name>tied/js-event-example<file_sep>/src/main/resources/js/toggle.js
(function($) {
// In this extension we add a collapse control to each box
// So, when a box has loaded we add a collapse/expand toggle button to the title bar of the box
$(".example-box").live("example-box-loaded", function() {
console.log("Adding collapse control to: ", this);
$(".box-head > .box-controls", this).append('<span class="box-toggle"></span>');
$(this).addClass("box-expanded");
});
// We'll listen for clicks on any box toggle button
$(".box-toggle").live("click", function() {
console.log("Box toggle clicked: ", this);
var box = $(this).closest(".example-box");
// Rather than actually performing the collapse/expand directly from the toggle click
// we'll delegate the task to another handler by triggering an event.
if (box.hasClass("box-collapsed")) {
box.trigger("box-expand");
} else if (box.hasClass("box-expanded")) {
box.trigger("box-collapse");
}
});
// Here we'll handle the actual collapse/expand events
$(".example-box")
.live("box-collapse", function() {
console.log("Collapse box: ", this);
$(this).addClass("box-collapsed").removeClass("box-expanded");
})
.live("box-expand", function() {
console.log("Expand box: ", this);
$(this).addClass("box-expanded").removeClass("box-collapsed");
});
})(jQuery);
|
c8d317f3e744eba6052f184bf687388a3c1a5400
|
[
"JavaScript"
] | 1
|
JavaScript
|
tied/js-event-example
|
254af95f1908b51f8969895d1c1874a2b02ec946
|
d1641b7c5592d49d1799352f3fe548f4e28b5526
|
refs/heads/master
|
<repo_name>ericgchu/INST126<file_sep>/project2/README.md
# Project 2: Hangman
## Summary
This is a fun hangman game with a twist... You have to guess the encrypted word to discover the decrypted word!
## How It Works
You have 10 chances to guess the word by inputting a letter for every chance.<br/>
Before you guess, you can see how you're doing so far + amount of chances left.<br/>
It will look something like....<br/>
---<br/>
You have X lives left and you have used these letters: A B C D E<br/>
Current Word: A - - - E - -<br/>
Guess a letter: <br/>
---<br/>
Repeat until you win!
<file_sep>/README.md
# INST126
Repository for INST126 projects
<file_sep>/project1/calculator.py
def main():
# Take the number which will decide the operation
operation = int(input("Please choose a number to complete an operation (on 2 numbers):
\n 1: Addition\n 2: Subtraction\n 3: Mutiplication\n 4: Division\n 5: Compute Power To\n 6:
Add 3 numbers\n 7: Mutiply 3 numbers\n 8: Subtract 3 numbers\n Enter here: "))
# Take int input
number1 = int(input("Please enter the first number:"))
number2 = int(input("Please enter the second number:"))
# Compute calculation
# Add
if operation == 1:
result = number1 + number2
# Subtract
elif operation == 2:
result = number1 - number2
# Mutiply
elif operation == 3:
result = number1 * number2
# Divide
elif operation == 4:
result = number1 / number2
# Take the power of
elif operation == 5:
result = number1 ** number2
# Otherwise we know we take 3 int inputs
else:
number3 = int(input("Please enter the third number:"))
# Add 3 numbers
if operation == 6:
result = number1 + number2 + number3
# Mutiply 3 numbers
elif operation == 7:
result = number1 * number2 * number3
# Subtract 3 numbers
elif operation == 8:
result = number1 - number2 - number3
# Print result
for i in range(1):
print("The result is: " + str(result))
main()
<file_sep>/project1/README.md
# Project 1: Calculator
## Summary
This is a simple calculator written in Python3 which can compute operations with 2 or 3 numbers.
## How To Use It
Enter a number for the operation you want to complete.<br/>
Enter the numbers you would like to operate on.<br/>
Your result will be printed for you!<br/>
<file_sep>/project2/hangman.py
import random
# Prints letters guessed with a space between each character
def printLettersGuessed(arr):
new_str = ""
for i in arr:
new_str += " " + i
return new_str
# Creates an array of dashes matching the length of the random word chosen
def convertToLines(word):
new_arr = []
for i in word:
new_arr.append("-")
return new_arr
# Checks if the word is in the already guessed array of chars
def alreadyGuessed(word, arr):
for i in arr:
if word == i:
return True
return False
# Checks if the guessed char is in the word
def guessInWord(guess, chosen):
for i in chosen:
if i == guess:
return True
return False
# Returns an array of indices of everywhere the guessed char is in the word
def getIndexes(guess, chosen):
arr = []
for i in range (0, len(chosen)):
if guess == chosen[i]:
arr.append(i)
return arr
# Prints the array of chars
def printArr(arr):
new_str = ""
for i in arr:
new_str += " " + i
return new_str
# Converts mystery word array into a word by stripping it from the array into a string
def convertToWord(arr):
new_str = ""
for i in arr:
new_str += i
return new_str
# Decrypts the encrypted word by manipulating ASCII values
def decrypt(ciphertext):
plaintext = ""
i = 0
while i < len(ciphertext):
if i % 2 == 0:
plaintext += str(chr(ord(ciphertext[i]) - 1))
else:
plaintext += str(chr(ord(ciphertext[i]) + 1))
i += 1
return plaintext
def main():
# Chose random word
words = ["WHDSPQZ" ,"XHO", "TTDBFRT", "QQJYF", "ENQD", "DNPK", "CNTR", "EHWHOD", "TSVMOHOF", "XNOCFQGTM"]
wordToGuess = words[random.randint(0, len(words)-1)]
# Set vars
lives = 10
wordGuessCorrect = False
lettersGuessed = []
# wordToGuess_lines is an array that holds the guesses
wordToGuess_lines = convertToLines(wordToGuess)
# Checks that we have more guess to give & we haven't guessed the correct word yet
while (lives > 0 and wordGuessCorrect == False):
print("\n")
print("You have " + str(lives) + " left and you have used these letters: "
+ printLettersGuessed(lettersGuessed))
print("Current word: " + printArr(wordToGuess_lines))
guess = input("Guess a letter: ").upper()
# If we already guessed the letter, we repeat the loop with no reprecussions
if (alreadyGuessed(guess, lettersGuessed)):
print("You have already used that letter. Guess another letter.")
else:
lettersGuessed.append(guess)
# Checks if the guess is even in the word
if (guessInWord(guess,wordToGuess)):
# If so, find all indices of its occurence
indexes = getIndexes(guess, wordToGuess)
# Place guesses into the indices of occurence
for i in indexes:
wordToGuess_lines[i] = guess
# If not in word, subtract a life
else:
print("Your letter, " + guess + " is not in the word.")
lives -= 1
# Check if the wordToGuess is already guessed correctly
if convertToWord(wordToGuess_lines) == wordToGuess:
wordGuessCorrect = True
print("\n")
# If word was guessed correctly, then enter this conditional
if (wordGuessCorrect):
print("Good Job! You guessed the word: " + wordToGuess)
print("This was the encrypted message!")
print("The decoded message says: " + decrypt(wordToGuess))
# If we lost lives before we could guess correctly, enter this conditional
else:
print("You died. Sorry. The word was: " + wordToGuess)
print("You'll have to guess the word correctly next time to decode the message!")
main()
<file_sep>/project4/README.md
# Project 4: Visualizing NBA Player Dataset Statistics.
Contributors: <NAME>, <NAME>, <NAME>, <NAME>
## Description
This project focuses on data analysis about each NBA players’ statistics in the league during the 2017-2018 season. Those who would want to use this project would be NBA scouts, NBA fans, fantasy basketball players, basketball lovers, etc. In terms of needs this project would fulfill, the goal of this project is to present users a certain player/list of player suggestions that best fit their needs as well as receive player statistics and position statistics. Users would be able to input multiple aspects such as age, position, true shooting percentage, player efficiency rating, etc. in order to find the player that best fits their statistical needs. The user may be presented a list of multiple players that fit their needs or just one. Along with a suggestion based on data analysis, this project is also able to calculate minimum, maximum, median, mean, standard deviation of statistics such as: True Shooting, 3 Point Attempt rate, Minutes per game for any player they may input. All in all, this project aims to be the ultimate guide for NBA fans to gain insight into each players’ statistics and find what they need in terms of data.
## Update #1:
### Functions Descriptions:
### main()
When main is run it acts as an interface to run a requested method given the user input.
### find_a_player()
This function acts finds multiple players depending on factors a user would input. For example: If I want a player that shoots >30% from the 3 point line, 20% fg percentage, this function would return all the players that meet this requirement. Potentially useful for NBA scouts to create a versatile yet functional team.
### get_player_position_stat()
The function allows users to input a certain position and will be returned with the avg minutes per game, avg true shooting, and avg 3 point attempt rate in which the position is.
### retrieve_playerstat()
This function allows users to input a certain players name as well as input a certain statistic they would like to retrieve. Statistics include minimum points (min), maximum points (max), median points for the season(median), average amount of points for the season(avg), standard deviation of True Shooting(True Shooting), average 3 Point Attempt rate(avg 3PA), and average minutes per game (avg MPG).
### Get_team_avg()
This function enables users to get the average minutes per game, true shooting, and 3 point attempts rate per game for a specific team as opposed to a specific position.
## Update #2:
### main()
The main method now takes user input to determine which function should be called. Also the data file is parsed and passed to respective function.
### find_a_player()
When this function is called it can either do a thorough player search or a customized user input player find. The thorough player search ask questions on all aspects regarding ability; player position, minutes played, player efficiency rating, true shooting percentage, 3 Point attempt percentage to find a set of player with matching abilities. The customized user input takes as many reqs or as little requirements the user wants to find his player. I still need to finish printing players that match data.
### get_player_position_stat()
####COMMENTS####
The game dataset will be retrieved from a website that contains all the statistics and in order to do that we need to import some packages for example the panda library
import pandas as pd. After that we will find the mean for minutes per game, true shooting, and 3-point attempt rate by looking at the position of each player.
### retrieve_playerstat()
This function allows users to input a certain players name, then input a certain statistic they would like to retrieve, and retrieve said statistic.
Statistics include: games played (G), minutes played (MP), player efficiency rating (PER), true shooting percentage (TS%), 3 point attempt rate (3PAr), free throw rate (FTr), offensive rebound percentage (ORB%), defensive rebound percentage (DRB%), total rebound percentage (TRB%), assist percentage (AST%), block percentage (BLK%), steal percentage (STL%), and turnover percentage (TOV%)
### Get_team_avg
This function enables users to get the average minutes per game, true shooting, and 3 point attempts rate per game for a specific team as opposed to a specific position.
This function gets the average or mean for avg mpg, true shooting, abd 3 point attempts column featured in the data set for a specific team. When the function is called a user is asked if they want to get the avg mpg, true shooting, and 3 point shooting for each team. The function will then provide the avg of the specified data for a specific team.
## Final Submission:
### main()
No updates since last update. The main method continues to take user input to determine which function should be called. Also the data file is opened and passed to respective function.
### find_a_player()
When this function is called it can either do a thorough player search or a customized user input player find. The thorough player search ask questions on all aspects regarding ability; player position, minutes played, player efficiency rating, true shooting percentage, 3 Point attempt percentage to find a set of player with matching abilities. The customized user input is now updated so that the user can request players based on specs on a singular line. Both 'thorough player search' & 'customized user input player find' now both rely on compute_players() to do the heavy lifting as described in the method below. Again, the list of players given are all the players that meet the minimum specifications given in the program.
Example #1: Thorough Player Search
Enter player position (PG: Point Guard, PF: Power Forward, SG: Shooting Guard, SF: Small Forward, C: Center, PF: Point Forward): SG
Enter minimum minutes played per game(as an int): 1200
Enter minimum player efficiency rating (as a float): 13
Enter minimum true shooting percentage (as a float [0-1]): .2
Enter minimum 3 Point attempt percentage (as a float [0-1]): .3
These players match those specs:
{'<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>'}
Example #2: Customized User Input Player Find
Follow the example to input specifications.
'Player Position: XX : Minutes Played Per Game: XX : Player Efficiency Rating: XX : True Shooting %: XX : 3-Point Attempt Rate: XX'
Enter Here:
Player Position: PF : Minutes Played Per Game: 23 : Player Efficiency Rating: 3 : True Shooting %: .4 : 3-Point Attempt Rate: .4
These players match those specs:
{'<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>'}
### compute_players()
This is a new function in which takes multiple params - df, position, minutes, rating, shooting, three_point - to tailor find players that meet the requirements the user wants. It manipulates the dataframe & remove duplicate players to finally display the players meeting the specifications.
### retrieve_playerstat()
This function allows users to input a certain player's first and last name and then input a certain statistic they would like to retrieve. The code will then retrieve said statistic and return it to the user.
Statistics include: games played (G), minutes played (MP), player efficiency rating (PER), true shooting percentage (TS%), 3 point attempt rate (3PAr), free throw rate (FTr), offensive rebound percentage (ORB%), defensive rebound percentage (DRB%), total rebound percentage (TRB%), assist percentage (AST%), block percentage (BLK%), steal percentage (STL%), and turnover percentage (TOV%)
### get_player_position_stat()
The main focus is to get the player game stats by looking at their positions in the game. We will then figure out the 3-point average, true attempt shooting, and average minutes per game. The code is designed to reterive the mean values for specific colums from the dataset provided.
### get_team_avg()
The get_team_avg() function calculates the average minutes per game that was played by each player on the team. It also calculates the average 3 point attempt rate per team as well as the average true shooting percentage for each team.
|
213ec325bde7a9f2208e06f47e04999c0a686d88
|
[
"Markdown",
"Python"
] | 6
|
Markdown
|
ericgchu/INST126
|
e743ea19482704a376e29a6bba718c32eedfd9df
|
7f2d67b7d08716a3bdd0b0eb627c95e3b5394e39
|
refs/heads/master
|
<repo_name>Ashwin-Raj18/doc-room<file_sep>/client/src/reducers/profile.js
import {
GET_PROFILE,
PROFILE_ERROR,
CLEAR_PROFILE,
UPDATE_PROFILE,
GET_PROFILES,
GET_ARTICLES,
GET_DPS
} from '../actions/types';
const initialState = {
profile : null,
profiles : [],
researchPublications : [],
loading : true,
error : {},
articles : [],
dpPics : []
};
export default function (state = initialState, action) {
const { type, payload } = action;
switch (type) {
case GET_PROFILE:
case UPDATE_PROFILE:
return {
...state,
profile : payload,
loading : false
};
case GET_PROFILES:
return {
...state,
profiles : payload,
loading : false
};
case CLEAR_PROFILE:
return {
...state,
profile : null,
researchPublications : []
};
case PROFILE_ERROR:
return {
...state,
error : payload,
loading : false,
profile : null
};
case GET_ARTICLES:
return {
...state,
articles : payload
};
case GET_DPS:
return {
...state,
dpPics : payload
};
default:
return state;
}
}
<file_sep>/README.md
# doc-room
Social platform for doctors to connect with each other.
This is a full-stack MERN application.
Mongoose is used for object modelling.
Cloud database Mongodb atlas for storage.
React-redux, thunk for state management.
Connect and mapStateToProps for states in component.
In client/components/posts/Post.js useSelector() and useDispatch() is used to demonstrate more modern way of mapping state into components.
Live App: http://www.doc-room.tk/
<file_sep>/routes/api/FileUpload.js
const express = require('express');
const router = express.Router();
const auth = require('../../middleware/auth');
const Profile = require('../../models/Profile');
var fs = require('fs');
const path = require('path');
module.exports = (upload) => {
/*
POST: Upload a single image/file to Image collection
*/
router
.route('/updateDisplayPic')
.post(upload.single('profileImg'), auth, (req, res, next) => {
const url = req.protocol + '://' + req.get('host');
Profile.findOne({ user: req.user.id })
.then((profile) => {
//delete previous pic if exists
if (profile.displayPic.filePath != '') {
fs.unlink(path.join(__basedir, profile.displayPic.filePath), function (
error
) {
if (error) {
console.log(error);
return;
}
console.log('Deleted file :', profile.displayPic.filePath);
});
}
//change picture
profile.displayPic = {
fileUrl : url + '/assets/userDp/' + req.file.filename,
filePath : 'public/assets/userDp/' + req.file.filename
};
profile
.save()
.then((result) => {
res.status(200).json(result);
})
.catch((err) => res.status(500).json(err));
})
.catch((err) => res.status(500).json(err));
});
return router;
};
<file_sep>/client/src/components/profile/Article.js
import React, { Fragment } from 'react';
const Article = ({ article }) => {
return (
<Fragment>
{article.image.length > 0 && (
<div className="article__img__container">
<img className="article__image" src={article.image[0]['url']} alt="" />
</div>
)}
<div
onClick={() => {
window.location.replace(article.url);
}}
className="article__content"
>
<div className="article__description">{article.description}</div>
</div>
</Fragment>
);
};
export default Article;
<file_sep>/client/src/components/posts/Posts.js
import React, { Fragment, useEffect } from 'react';
import PropTypes from 'prop-types';
import { useSelector } from 'react-redux';
import { useDispatch } from 'react-redux';
import PostItem from './PostItem';
import PostForm from './PostForm';
import { getPosts } from '../../actions/post';
import { getDpsByIds } from '../../actions/profile';
//using react-redux hooks to connect in this component
const Posts = () => {
//instead of mapStateToProps and connect
const { posts } = useSelector((state) => state.post);
const dps = useSelector((state) => state.profile.dpPics);
//instead of passing dispatching function in connect
const dispatch = useDispatch();
useEffect(
() => {
dispatch(getPosts());
},
[ dispatch ]
);
useEffect(
() => {
let users = posts.map((post) => post.user);
if (users.length > 0) {
dispatch(getDpsByIds({ userIds: users }));
}
},
[ posts, dispatch ]
);
const getDpUrl = (userId) => {
let ret = '';
dps.forEach((dp) => {
if (dp.user === userId && dp.displayPic.fileUrl !== '') {
ret = dp.displayPic.fileUrl;
}
});
return ret;
};
const getPostElement = (post) => {
let dpUrl = getDpUrl(post.user);
return <PostItem key={post._id} post={post} dpPic={dpUrl} />;
};
return (
<Fragment>
<h1 className="large text-primary">Posts</h1>
<p className="lead">
<i className="fas fa-user" /> Welcome to the community
</p>
<PostForm />
<div className="posts">{posts.map((post) => getPostElement(post))}</div>
</Fragment>
);
};
Posts.propTypes = {
getPosts : PropTypes.func.isRequired,
post : PropTypes.object.isRequired
};
export default Posts;
<file_sep>/client/src/components/dashboard/DashboardActions.js
import React, { useState, useEffect, useRef } from 'react';
import { Link } from 'react-router-dom';
import { useSelector } from 'react-redux';
import { useDispatch } from 'react-redux';
import { getCurrentProfile } from '../../actions/profile';
import { updateDp } from '../../actions/profile';
const DashboardActions = () => {
const { displayPic, user: { avatar } } = useSelector((state) => state.profile.profile);
const dispatch = useDispatch();
const [ dpProgress, setdpProgress ] = useState(0);
const handleFileUpload = ({ target: { files } }) => {
console.log(files[0]);
let data = new FormData();
data.append('profileImg', files[0]);
const options = {
onUploadProgress : (progressEvent) => {
const { loaded, total } = progressEvent;
let percent = Math.floor(loaded * 100 / total);
console.log(`${loaded}kb of ${total}kb`);
setdpProgress(percent);
if (percent === 100) {
setTimeout(() => {
setdpProgress(0);
}, 4000);
}
}
};
dispatch(updateDp(data, options));
};
const inputFile = useRef(null);
const onUploadClick = () => {
inputFile.current.click();
};
useEffect(
() => {
dispatch(getCurrentProfile());
},
[ dispatch ]
);
return (
<div className="dash-buttons">
<div className="dp_wrapper">
<img
alt="dp_img"
src={
displayPic.fileUrl && displayPic.fileUrl !== '' ? displayPic.fileUrl : avatar
}
/>
</div>
{dpProgress !== 0 ? (
<div className="dp__progress">Uploading :{dpProgress}%</div>
) : (
''
)}
<div className="upload__progress" />
<div onClick={onUploadClick} className="btn btn-light">
<input
style={{ display: 'none' }}
accept=".jpg,.png,.gif"
ref={inputFile}
onChange={handleFileUpload}
type="file"
/>
<i className="fas fa-user-circle text-primary" /> Change Profile Picture
</div>
<Link to="/edit-profile" className="btn btn-light">
<i className="fas fa-user-circle text-primary" /> Edit Profile
</Link>
<Link to="/add-experience" className="btn btn-light">
<i className="fab fa-black-tie text-primary" /> Add Experience
</Link>
<Link to="/add-education" className="btn btn-light">
<i className="fas fa-graduation-cap text-primary" /> Add Education
</Link>
</div>
);
};
export default DashboardActions;
<file_sep>/client/src/components/layout/Footer.js
import React from 'react';
function Footer () {
return (
<div className="footer">
<div className="footer__content__left">
<h5>Designed and developed by <NAME></h5>
<h5>Web-app specs:</h5>
<ul className="stack__list">
<li> MERN stack </li>
<li> React-redux, Thunk for state management</li>
<li> Mongoose for object modelling</li>
<li> Hosted on heroku</li>
</ul>
</div>
</div>
);
}
export default Footer;
|
db2277ea843a92c41aaf8170de1af6a22fa8fcda
|
[
"JavaScript",
"Markdown"
] | 7
|
JavaScript
|
Ashwin-Raj18/doc-room
|
415683525da93194b1f48c8c261e2ae6666a71cf
|
21d17eb7613e3b17484a8279f1cd463ae84e678e
|
refs/heads/master
|
<repo_name>Gerrri/test<file_sep>/test/src/test/test23.java
package test;
public class test23 {
}
|
32b16e7385ef8afb96db74a11066b761075e31c9
|
[
"Java"
] | 1
|
Java
|
Gerrri/test
|
03c599b3e8f32441edf2f16f90678b243bd62e96
|
bd1929e522e182637efcf9ccb4f98cd398f3138a
|
refs/heads/master
|
<file_sep>import { Component, OnInit, Input, ViewEncapsulation} from '@angular/core';
@Component({
selector: 'app-form',
templateUrl: './form.component.html',
styleUrls: ['./form.component.scss'],
encapsulation: ViewEncapsulation.None
})
export class FormComponent implements OnInit {
@Input() title = '';
name: Boolean = false;
email: Boolean = false;
message: Boolean = false;
submitDisabled = true;
ngOnInit() {
const contactForm = document.getElementById('contactForm');
contactForm.setAttribute('action', 'https://formspree.io/' + 'info.xenonlabs' + '@' + 'gmail' + '.' + 'com');
}
onNameChange(e: any) {
if (e) {
this.name = true;
} else {
this.name = false;
}
this.checkSubmit();
}
onMailChange(e: any) {
if (e) {
this.email = true;
} else {
this.email = false;
}
this.checkSubmit();
}
onMessageChange(e: any) {
if (e) {
this.message = true;
} else {
this.message = false;
}
this.checkSubmit();
}
checkSubmit() {
if (this.name && this.email && this.message) {
this.submitDisabled = false;
}
}
}
<file_sep>[](https://travis-ci.com/Xenon-Labs/website)
# Website
Xenon Labs Website
<file_sep>import { Component, OnInit, Input } from '@angular/core';
declare var $: any;
@Component({
selector: 'app-skills',
templateUrl: './skills.component.html',
styleUrls: ['./skills.component.scss']
})
export class SkillsComponent implements OnInit {
@Input() title: String = '';
@Input() skills: any = [];
constructor() { }
ngOnInit() {
this.animationEffect();
}
/*
Interactivity to determine when an animated element in in view.
In view elements trigger our animation
*/
animationEffect() {
$(document).ready(function () {
// Window and animation items
const animation_elements = $.find('.animation-element');
const web_window = $(window);
// Check to see if any animation containers are currently in view
function check_if_in_view() {
// Get current window information
const window_height = web_window.height();
const window_top_position = web_window.scrollTop();
const window_bottom_position = (window_top_position + window_height);
// Iterate through elements to see if its in view
$.each(animation_elements, function () {
// Get the element sinformation
const element = $(this);
const element_height = $(element).outerHeight();
const element_top_position = $(element).offset().top;
const element_bottom_position = (element_top_position + element_height);
// Check to see if this current container is visible
// Its viewable if it exists between the viewable space of the viewport
if ((element_bottom_position >= window_top_position) && (element_top_position <= window_bottom_position)) {
element.addClass('in-view');
} else {
element.removeClass('in-view');
}
});
}
// On or scroll, detect elements in view
$(window).on('scroll resize', function () {
check_if_in_view();
});
// Trigger our scroll event on initial load
$(window).trigger('scroll');
});
}
}
<file_sep>import { Component, OnInit, Input } from '@angular/core';
import { DomSanitizer } from '@angular/platform-browser';
import { DeviceDetectorService } from 'ngx-device-detector';
@Component({
selector: 'app-footer',
templateUrl: './footer.component.html',
styleUrls: ['./footer.component.scss']
})
export class FooterComponent implements OnInit {
@Input() home_links = [];
@Input() social_links = [];
deviceInfo: any = null;
isMobile: Boolean = false;
isTablet: Boolean = false;
isDesktop: Boolean = false;
constructor(private sanitizer: DomSanitizer, private deviceService: DeviceDetectorService) { }
ngOnInit() {
this.getDeviceInfo();
}
sanitize(url: string) {
return this.sanitizer.bypassSecurityTrustUrl(url);
}
getDeviceInfo() {
this.deviceInfo = this.deviceService.getDeviceInfo();
this.isMobile = this.deviceService.isMobile();
this.isTablet = this.deviceService.isTablet();
this.isDesktop = this.deviceService.isDesktop();
}
}
<file_sep>import { Component, OnInit } from '@angular/core';
@Component({
selector: 'app-home',
templateUrl: './home.component.html',
styleUrls: ['./home.component.scss']
})
export class HomeComponent implements OnInit {
skills_title: String = 'What We Build';
skills = [
{
title: 'Android Applications',
icon: 'fab fa-android fa-5x'
},
{
title: 'iOS Applications',
icon: 'fab fa-apple fa-5x'
},
{
title: 'Web Applications',
icon: 'fas fa-window-maximize fa-5x'
}
];
process_title: String = 'Our Process';
processes = [
{
title: 'Design Collaboratively',
icon: 'fas fa-pencil-alt fa-4x '
},
{
title: 'Build Product',
icon: 'fas fa-laptop-code fa-4x '
},
{
title: 'Review and Test',
icon: 'fas fa-screwdriver fa-4x'
},
{
title: 'Launch',
icon: 'fas fa-rocket fa-4x'
}
];
contact_title: String = 'CONTACT US';
constructor() { }
ngOnInit() {
}
}
<file_sep>import { Component, OnInit, AfterViewInit } from '@angular/core';
declare var $: any;
@Component({
selector: 'app-root',
templateUrl: './app.component.html',
styleUrls: ['./app.component.scss']
})
export class AppComponent implements OnInit, AfterViewInit {
title = 'NoNameBrand';
home_links = [
{
title: 'About Us',
link: '#about'
},
{
title: 'What We Build',
link: '#skills'
},
{
title: 'Our Process',
link: '#process'
},
{
title: 'Contact',
link: '#contact'
}
];
social_links = [
// {
// title: 'Twitter',
// icon: 'fa fa-twitter-square',
// link: 'https://twitter.com'
// uri: ''
// },
// {
// title: 'LinkedIn',
// icon: 'fa fa-linkedin-square',
// link: 'https://linkedin.com',
// uri: ''
// },
{
title: 'Facebook',
icon: 'fab fa-facebook-square',
link: 'https://www.facebook.com/Xenon-Labs-302160900396600/',
uri: 'fb://302160900396600'
},
{
title: 'Instagram',
icon: 'fab fa-instagram',
link: 'https://www.instagram.com/xenon_labs/',
uri: 'instagram://user?username=xenon_labs'
}
];
ngOnInit() { }
ngAfterViewInit() {
this.grayscaleJS();
this.landingPage();
}
landingPage() {
$(document).ready(function () {
// Landing page background size
$('#home ').css({
'padding-top': $(window).height() / 2.2,
'padding-bottom': $(window).height() / 1.8
});
});
$(window).resize(function() {
$('#home ').css({
'padding-top': $(window).height() / 2.2,
'padding-bottom': $(window).height() / 1.8
});
});
}
grayscaleJS() {
// JQuery stuff from Grayscale.js
$(document).ready(function () {
// Smooth scrolling using jQuery easing
$('a.js-scroll-trigger[href*="#"]:not([href="#"])').click(function (e) {
// if (location.pathname.replace(/^\//, '') === this.pathname.replace(/^\//, '') && location.hostname === this.hostname) {
let target = $(this.hash);
target = target.length ? target : $('[name=' + this.hash.slice(1) + ']');
if (target.length) {
$('html, body').animate({
scrollTop: (target.offset().top - 70)
}, 1000, 'easeInOutExpo');
return false;
}
// }
});
// Closes responsive menu when a scroll trigger link is clicked
$('.js-scroll-trigger').click(function (e) {
$('.navbar-collapse').collapse('hide');
});
// Activate scrollspy to add active class to navbar items on scroll
$('body').scrollspy({
target: '#mainNav',
offset: 100
});
// Collapse Navbar
const navbarCollapse = function () {
if ($('#mainNav').offset().top > 100) {
$('#mainNav').addClass('navbar-shrink');
} else {
$('#mainNav').removeClass('navbar-shrink');
}
};
// Collapse now if page is not at top
navbarCollapse();
// Collapse the navbar when page is scrolled
$(window).scroll(navbarCollapse);
});
}
}
|
2e8af3cd636ddeed158934c6de78722dd9127794
|
[
"Markdown",
"TypeScript"
] | 6
|
TypeScript
|
Xenon-Labs/website
|
7d751e89dc5052be8067b026547e10ab8418ede1
|
05949b2b5994a59793c0585b2721ec8ecbaaa3de
|
refs/heads/master
|
<file_sep>import argparse
import mechanicalsoup
from discover import discover, print_discover_output
from test import test, print_test_output
from util import find_cookies
def parser_init():
main_parser = argparse.ArgumentParser(add_help=False)
main_parser.add_argument('--custom-auth', nargs='?', type=str)
main_parser.add_argument('url')
parser = argparse.ArgumentParser()
subparsers = parser.add_subparsers(dest='command')
discover_parser = subparsers.add_parser('discover', parents=[main_parser])
discover_parser.add_argument('--common-words', nargs='?', type=str, required=True)
discover_parser.add_argument('--extensions', nargs='?', type=str)
test_parser = subparsers.add_parser('test', parents=[main_parser])
test_parser.add_argument('--common-words', nargs='?', type=str, required=True)
test_parser.add_argument('--extensions', nargs='?', type=str)
test_parser.add_argument('--vectors', nargs='?', type=str, required=True)
test_parser.add_argument('--sanitized-chars', nargs='?', type=str)
test_parser.add_argument('--sensitive', nargs='?', type=str, required=True)
test_parser.add_argument('--slow', nargs='?', type=int, default=500)
return parser
def discover_command(browser, args):
print("Now discovering: " + args.url)
formatted_pages, guesses, form_inputs, pages, query_param_pages = discover(browser, args)
cookies = find_cookies(browser)
print_discover_output(formatted_pages, guesses, form_inputs, cookies)
def test_command(browser, args):
print("Now testing: " + args.url)
formatted_pages, guesses, form_inputs, pages, query_param_pages = discover(browser, args)
unsanitized_count, leak_count, response_count, slow_count = test(browser, args, formatted_pages, pages, query_param_pages, form_inputs)
cookies = find_cookies(browser)
print_discover_output(formatted_pages, guesses, form_inputs, cookies)
print_test_output(unsanitized_count, leak_count, response_count, slow_count)
if __name__ == '__main__':
parser = parser_init()
args = parser.parse_args()
print("Starting fuzzing operations...")
browser = mechanicalsoup.StatefulBrowser()
# Make sure url ends with /
if args.url[-1] != '/':
args.url += '/'
if args.command == 'discover':
discover_command(browser, args)
elif args.command == 'test':
test_command(browser, args)
<file_sep>from collections import defaultdict
import urllib.parse
from util import *
def read_args(args):
# Populate extensions from file or with default values
exts = []
if args.extensions:
read_file(args.extensions, exts)
else:
print('Using default file extensions...')
exts.append('.php')
exts.append('')
# Populate paths from common_words file
paths = []
if args.common_words:
read_file(args.common_words, paths)
return exts, paths
def dvwa_auth(browser):
url = 'http://localhost/'
# Go to setup page and reset the database
browser.open(urllib.parse.urljoin(url, '/setup.php'))
browser.select_form('form[action="#"]')
browser.submit_selected()
# Go to login page and login as admin
browser.open(urllib.parse.urljoin(url, '/login.php'))
browser.select_form('form[action="login.php"]')
browser['username'] = 'admin'
browser['password'] = '<PASSWORD>'
browser.submit_selected()
# Change security level to low
browser.open(urllib.parse.urljoin(url, '/security.php'))
browser.select_form('form[action="#"]')
browser['security'] = 'low'
browser.submit_selected()
def page_guessing(browser, url, paths, exts, pages):
if browser.open(url).status_code == 200:
pages.add(url)
for path in paths:
for ext in exts:
page = urllib.parse.urljoin(url, path + ext)
# Do not go to logout.php
if 'logout.php' in page:
continue
resp = browser.open(page)
if resp.status_code == 200:
pages.add(page)
return pages
def page_crawling(browser, url, pages):
crawl_pages = set()
crawl_pages.update(pages)
visited_pages = set()
# Begin crawl search
while len(crawl_pages) > 0:
page = crawl_pages.pop()
visited_pages.add(page)
if 'logout' in page:
continue
resp = browser.open(page)
if resp.soup is None or resp.status_code != 200:
pages.remove(page)
continue
links = browser.links()
for link in links:
href = ''
link_href = link.get('href')
if link_href.startswith('?'):
href = urllib.parse.urljoin(page, link_href)
else:
href = urllib.parse.urljoin(url, link_href)
if url in href and href not in visited_pages:
pages.add(href)
crawl_pages.add(href)
def input_crawling(browser, pages):
form_inputs = defaultdict(set)
query_param_pages = set()
for page in pages:
if 'logout' in page:
continue
browser.open(page)
soup = browser.get_current_page()
form_elements = soup.find_all('form')
# Keep pages with different query parameters the same
page_title = page.split('?')[0]
if not form_elements:
form_inputs[page_title] = set()
if '?' in page:
query_param_pages.add(page)
continue
for form in form_elements:
inputs = form.find_all('input')
for input in inputs:
# Prefer to use 'name' over 'value' if possible
if 'name' in input.attrs:
form_inputs[page_title].add(input.attrs['name'])
elif 'value' in input.attrs:
form_inputs[page_title].add(input.attrs['value'])
return form_inputs, query_param_pages
def print_discover_output(formatted_pages, guesses, form_inputs, cookies):
# Print out the links guessed and discovered
print(line_double_sep.format('LINKS FOUND ON PAGE:'))
for page in formatted_pages.keys():
query_params = formatted_pages[page]
# If there exists a query parameter
if len(query_params) > 0:
print("{}, 'query_parameters(?=)': {}".format(page, query_params))
else:
print(page)
print(line_sep.format(''))
print(line_sep.format('LINKS SUCCESSFULLY GUESSED:'))
for guess in guesses:
print(guess)
print(line_sep.format(''))
# Print inputs discovered on each page
print(line_double_sep.format('INPUT FORMS ON PAGES:'))
for page in form_inputs.keys():
print(page)
for input in form_inputs[page]:
print(space_sep.format(input))
if cookies:
print(line_sep.format('COOKIES'))
for cookie in cookies.keys():
print(space_sep.format(cookie + ': ' + cookies[cookie]))
print(line_sep.format(''))
def discover(browser, args):
url = args.url
if args.custom_auth == 'dvwa':
dvwa_auth(browser)
exts, paths = read_args(args)
# First guess the pages
pages = set()
page_guessing(browser, url, paths, exts, pages)
guesses = set()
guesses.update(pages)
# Now discover other pages from pages guessed by crawling
page_crawling(browser, url, pages)
# Reformat links found and query parameters to a list
formatted_pages = defaultdict(list)
for page in pages:
if '?' in page:
parts = page.split('?', 2)
formatted_pages[parts[0]].append(parts[1])
elif not formatted_pages[page]:
formatted_pages[page] = []
# Now discover inputs on each page
form_inputs, query_param_pages = input_crawling(browser, pages)
return formatted_pages, guesses, form_inputs, pages, query_param_pages
<file_sep>line_sep = '====================\n{}'
line_double_sep = '====================\n{}\n===================='
space_sep = ' {}'
def read_file(filename, arr):
with open(filename) as f:
for line in f:
line = line.strip()
arr.append(line)
def find_cookies(browser):
cookies = {}
for cookie in browser.session.cookies:
cookies[cookie.name] = cookie.value
return cookies
<file_sep># fuzzer
SWEN-331 fuzzer project.
@TA I have received an extension on Part 2 to be due on 2020-10-27 @ 11:59PM.
## Assumptions
* Submitting the file upload form results in an OSError. This was counted as an HTTP response error.
* A page and its state after submitting a possible form are two separate cases of data leakage.
* "refresh" the page after submitting a form in case of redirects or HTML changes
## Installation
* Run `pip install -r requirements`. This is ran in the [GitLab CI](.gitlab-ci.yml) file.
## Running
* GitLab CI - `.gitlab-ci.yml` file is provided for running with a few different options.
* Local - Run the fuzzer on DVWA or fuzzer-tests with provided .txt files. `[]` are optional flags.
* DVWA:
```sh
python3 fuzzer/fuzz.py discover http://localhost/ [--custom-auth=dvwa] --common-words=common-words.txt [--extensions=common-extensions.txt]
python3 fuzzer/fuzz.py test http://localhost/ [--custom-auth=dvwa] --common-words=common-words.txt [--extensions=common-extensions.txt] --vectors=vectors.txt --sensitive=sensitive.txt [--slow=TIME_MS] [--sanitized-chars=sanitized-chars.txt]
```
* /fuzzer-tests:
```sh
python3 fuzzer/fuzz.py discover http://127.0.0.1/fuzzer-tests --common-words=common-words.txt [--extensions=common-extensions.txt]
python3 fuzzer/fuzz.py test http://127.0.0.1/fuzzer-tests --common-words=common-words.txt [--extensions=common-extensions.txt] --vectors=vectors.txt --sensitive=sensitive.txt [--slow=TIME_MS] [--sanitized-chars=sanitized-chars.txt]
```
<file_sep>pip==20.2.3
beautifulsoup4==4.9.1
certifi==2020.6.20
chardet==3.0.4
idna==2.10
requests==2.24.0
six==1.15.0
soupsieve==2.0.1
urllib3==1.25.10
mechanicalsoup==0.12.0
<file_sep>docker run --rm -p 80:80 andymeneely/swen331fuzzer
<file_sep>from collections import defaultdict
from util import *
def read_args(args):
vectors = []
if args.vectors:
read_file(args.vectors, vectors)
sensitive_data = []
if args.sensitive:
read_file(args.sensitive, sensitive_data)
sanitized_chars = []
if args.sanitized_chars:
read_file(args.sanitized_chars, sanitized_chars)
else:
sanitized_chars = ["<", ">"]
slow = 500
if args.slow:
slow = args.slow
return vectors, sensitive_data, sanitized_chars, slow
def check_pages(browser, pages, sensitive_data, slow):
leak_count = 0
response_count = 0
slow_count = 0
for page in pages:
if 'logout' in page:
continue
resp = browser.open(page)
for leak in sensitive_data:
if leak in resp.text:
leak_count += 1
if resp.elapsed.total_seconds() >= slow / 1000:
slow_count += 1
if resp.status_code != 200:
response_count += 1
return leak_count, response_count, slow_count
def check_sanitization(browser, form_inputs, sensitive_data, chars, slow):
phrase = "foo{}bar"
unsanitized_count = 0
leak_count = 0
response_count = 0
slow_count = 0
for page in form_inputs.keys():
if 'logout' in page:
continue
if not form_inputs[page]:
continue
resp = browser.open(page)
soup = browser.get_current_page()
form_elements = soup.find_all('form')
for char in chars:
for form in form_elements:
# Refresh the page in case a redirect from the form
browser.open(page)
current_form = browser.select_form(form)
inputs = form.find_all('input')
test_phrase = phrase.format(char)
try:
for input in inputs:
if 'name' in input.attrs and input.attrs['type'] != 'submit':
current_form.set(input.attrs['name'], test_phrase)
submit = form.find('input', {'type': 'submit'})
current_form.choose_submit(submit)
resp = browser.submit_selected()
if resp.status_code != 200:
response_count += 1
if resp.elapsed.total_seconds() >= slow / 1000:
slow_count += 1
# Possibly double counts leaks, but let's keep it simple and decide that a page and
# it's state after a form submission are two separate instances
for leak in sensitive_data:
if leak in resp.text:
leak_count += 1
if test_phrase in resp.text:
unsanitized_count += 1
except OSError as e:
# This should count as the form request could not be submitted
response_count += 1
continue
return unsanitized_count, leak_count, response_count, slow_count
def print_test_output(unsanitized_count, leak_count, response_count, slow_count):
print(line_double_sep.format(space_sep.format('TEST RESULTS')))
print('Number of unsanitized inputs: {}'.format(unsanitized_count))
print('Number of possible data leaks: {}'.format(leak_count))
print('Number of HTTP/Response Code Errors: {}'.format(response_count))
print('Number of slow responses: {}'.format(slow_count))
def test(browser, args, formatted_pages, pages, query_param_pages, form_inputs):
vectors, sensitive_data, sanitized_chars, slow = read_args(args)
total_leak_count = 0
total_response_count = 0
total_slow_count = 0
leak_count, response_count, slow_count = check_pages(browser, pages, sensitive_data, slow)
total_leak_count += leak_count
total_response_count += response_count
total_slow_count += slow_count
unsanitized_count, leak_count, response_count, slow_count = check_sanitization(browser, form_inputs, sensitive_data, sanitized_chars, slow)
total_leak_count += leak_count
total_response_count += response_count
total_slow_count += slow_count
return unsanitized_count, total_leak_count, total_response_count, total_slow_count
|
14e96ff7853a469fe96960aff140f31c2305855b
|
[
"Markdown",
"Python",
"Text",
"Shell"
] | 7
|
Python
|
mchaelkha/fuzzer
|
ae0111879776cdc311a57824ddf508704092937d
|
9a5fa5e81876fc443177a23aed5434d8a5ec6b6f
|
refs/heads/main
|
<repo_name>calvogeorge/Pewlett-Hackard-Analysis<file_sep>/Queries/queries.sql
SELECT first_name, last_name
FROM employees
WHERE birth_date BETWEEN '1952-01-01' AND '1955-12-31';
SELECT first_name, last_name
FROM employees
WHERE birth_date BETWEEN '1952-01-01' AND '1952-12-31';
SELECT first_name, last_name
FROM employees
WHERE birth_date BETWEEN '1953-01-01' AND '1953-12-31';
SELECT first_name, last_name
FROM employees
WHERE birth_date BETWEEN '1954-01-01' AND '1954-12-31';
SELECT first_name, last_name
FROM employees
WHERE birth_date BETWEEN '1955-01-01' AND '1955-12-31';
-- Retirement eligibility
SELECT first_name, last_name
FROM employees
WHERE (birth_date BETWEEN '1952-01-01' AND '1955-12-31')
AND (hire_date BETWEEN '1985-01-01' AND '1988-12-31');
SELECT COUNT(first_name)
FROM employees
WHERE (birth_date BETWEEN '1952-01-01' AND '1955-12-31')
AND (hire_date BETWEEN '1985-01-01' AND '1988-12-31');
-- Retirement eligibility
SELECT first_name, last_name
INTO retirement_info
FROM employees
WHERE (birth_date BETWEEN '1952-01-01' AND '1955-12-31')
AND (hire_date BETWEEN '1985-01-01' AND '1988-12-31');
SELECT * FROM retirement_info;
-- Create new table for retiring employees
SELECT emp_no, first_name, last_name
INTO retirement_info
FROM employees
WHERE (birth_date BETWEEN '1952-01-01' AND '1955-12-31')
AND (hire_date BETWEEN '1985-01-01' AND '1988-12-31');
-- Joining departments and dept_manager tables
SELECT d.dept_name,
dm.emp_no,
dm.from_date,
dm.to_date
FROM departments AS d
INNER JOIN dept_manager AS dm
ON d.dept_no = dm.dept_no;
-- Joining retirement_info and dept_emp tables
SELECT ri.emp_no,
ri.first_name,
ri.last_name,
de.to_date
INTO current_emp
FROM retirement_info AS ri
LEFT JOIN dept_emp AS de
ON ri.emp_no = de.emp_no
WHERE de.to_date = ('9999-01-01');
select *
--INTO sales_devp_retirement_info
FROM dept_info
WHERE (dept_name = 'Sales')
OR (dept_name = 'Development');
select *
--INTO sales_devp_retirement_info
FROM dept_info
WHERE dept_name IN ('Sales', 'Development')<file_sep>/Queries/Employee_Database_challenge.sql
-- Deliverable 1
SELECT e.emp_no,
e.first_name,
e.last_name,
t.title,
t.from_date,
t.to_date
INTO retirement_titles
FROM employees AS e
LEFT JOIN titles AS t
ON e.emp_no = t.emp_no
WHERE (birth_date BETWEEN '1952-01-01' AND '1955-12-31')
ORDER BY e.emp_no;
SELECT DISTINCT ON (emp_no) emp_no,
first_name,
last_name,
title
INTO unique_titles
FROM retirement_titles
ORDER BY emp_no, to_date DESC;
SELECT COUNT(emp_no) AS "employee_count",
title
INTO retiring_titles
FROM unique_titles
GROUP BY (title)
ORDER BY employee_count DESC;
-- Mentorship Program (Deliverable 2)
SELECT DISTINCT ON (emp_no) e.emp_no,
e.first_name,
e.last_name,
e.birth_date,
de.from_date,
de.to_date,
t.title
INTO mentorship_eligibilty
FROM employees AS e
INNER JOIN dept_emp AS de
ON e.emp_no = de.emp_no
INNER JOIN titles as t
ON e.emp_no = t.emp_no
WHERE (birth_date BETWEEN '1965-01-01' AND '1965-12-31')
AND (de.to_date= ('9999-01-01'))
ORDER BY e.emp_no;
--Extra Queries
-- updated Deliverable 1 query for summary table
SELECT DISTINCT ON (emp_no) e.emp_no,
e.first_name,
e.last_name,
t.title,
t.from_date,
t.to_date,
de.dept_no
INTO current_retirement_titles
FROM employees AS e
INNER JOIN titles AS t
ON e.emp_no = t.emp_no
INNER JOIN dept_emp AS de
ON e.emp_no = de.emp_no
WHERE (birth_date BETWEEN '1952-01-01' AND '1955-12-31')
AND (de.to_date= ('9999-01-01'))
ORDER BY e.emp_no, t.to_date DESC;
-- current retiring employees by title count
SELECT COUNT(emp_no) AS "employee_count",
title
INTO current_retiring_titles
FROM current_retirement_titles
GROUP BY (title)
ORDER BY employee_count DESC;
-- current retiring employees by department count
SELECT COUNT(crt.emp_no) AS "employee_count",
d.dept_name as dept_name
INTO current_retiring_dept
FROM current_retirement_titles as crt
INNER JOIN departments as d
on crt.dept_no = d.dept_no
GROUP BY (dept_name)
ORDER BY employee_count DESC;
<file_sep>/README.md
# Pewlett-Hackard-Analysis
## 1. Overview of the analysis
Pewlett-Hackard is looking to future proof itself as it faces an increased number of retirements coming in the following years. To help the company manage this retiring boom or silver tsunami as is being dubbed inside the company. The scope of this work is to provide data to help Pewlett-Hackard surf this silver tsunami.
## 2. Results
• Reviewing the tables generated as part of deliverable 1, we cannot use the data to accurately see the current retirement situation at Pewlett-Hackard. If we look at table “retiring_titles” (Table 2.1), we see the employee count being 90,398, which would be over 37% of Pewlett-Hackard’s total workforce, a significant number, but since we do not know how many of these are actual current employees we don't know the true impact. To fix the results of for deliverable 1, we need to update our queries with a filter to only produce include current employees.
**Table 2.1: Retiring Title**

• With a retirement being a high percentage of the company's total employees, it is imperative for the company to have all department manager plan to promote current employees to senior roles and leadership positions within the department, and hire new employees to fill their respective departments voids.
• From the mentorship_eligibility table we see there are 1550 current employees eligible for mentorship programs in Pewlett-Hackard’s, this is an invaluable resource that the company must tab, to help prepare and promote current employees to fill high number of vacancies that will be created in Seniors positions.
• The mentorship_eligibility table shows the many senior engineers, senior staff and technique leader, the company should focus its efforts in recruiting these higher ranker members for the membership programs, since it most they have more relevant experience to help nurture the next generation of leader and senior members.
## 3. Summary
In summary to correct provide more accurate information about the actual number of retiring employees and the potential hiring needs for each department, we will update retiring_titles table to count only current employees (Table 3.1 current_retiring_titles), also provide a table with the count of retiring employees by departments for each manager to evaluate its department needs (3.2 current_retiring_dept).
**Table 3.1: Current Retiring By Titles**

**Table 3.2: Current Retiring By Departments**

|
f748c22eb8d1b073f9377517418ed2ede2ffbf4e
|
[
"Markdown",
"SQL"
] | 3
|
SQL
|
calvogeorge/Pewlett-Hackard-Analysis
|
13e185fc19bd7cf4c5d764ce71000ce0ebe55864
|
ca18cca30fe067f5b0f30ceeca4e1a56dda9cdcf
|
refs/heads/main
|
<file_sep>#by <NAME> & <NAME>
#using python version 3.6.2
import speech_recognition as sr
#pip install SpeechRecognition
#pip install PyAudio
import webbrowser
import smtplib
import datetime
import getpass
def sendEmail(to, content, userEmail, password):
server = smtplib.SMTP('smtp.gmail.com', 587)
server.ehlo()
server.starttls()
server.login(userEmail, password)
server.sendmail(userEmail, to, content)
server.close()
def takeCommand():
startStop = True
while startStop:
listener = sr.Recognizer()
with sr.Microphone() as source:
print('Listening...')
listener.pause_threshold = 1
voice = listener.listen(source)
command = listener.recognize_google(voice)
command = command.lower()
if 'holmes' in command and 'open google' in command or 'open google' in command:
#say "open google"s
print('Okay sir, opening google...')
webbrowser.open('google.com')
elif 'holmes' in command and 'open youtube' in command or 'open youtube' in command:
#say "open youtube"
print('Okay sir, opening youtube...')
webbrowser.open('youtube.com')
elif 'holmes' in command and 'open future hacks' in command or 'open future hacks' in command:
#say "future hacks"
print('Okay sir, opening Devpost')
webbrowser.open('https://futurehacks-2021.devpost.com/?ref_feature=challenge&ref_medium=discover')
elif 'holmes' in command and 'the time' in command or 'the time' in command:
#say "what is the time"
Time = datetime.datetime.now().strftime("%I:%M:%S")
print("Sir, the current time is " + Time)
elif 'holmes' in command and 'the date' in command or 'the date' in command:
#say "what is the date"
year = str(datetime.datetime.now().year)
month = str(datetime.datetime.now().month)
day = str(datetime.datetime.now().day)
print("Sir, the date is, " + month + "/" + day + "/" + year)
elif 'holmes' in command and 'send email' in command or 'send email' in command:
#say "send email"
try:
userEmail = input('Enter your email: ')
password = getpass.getpass('What is your password: ')
emailTo = input('Enter the email you want to send to: ')
print('What should I say?')
voice2 = listener.listen(source)
query = listener.recognize_google(voice2)
content = query
to = emailTo
print('Okay sir, sending email...')
sendEmail(to, content, userEmail, password)
print('Email sent!')
except Exception as e:
print(e)
print('Sorry sir, I was unable to send that email')
elif 'bye holmes' in command or 'goodbye' in command:
print('Bye, sir')
startStop = False
elif 'hey holmes' in command or 'hello' in command:
print('Hello, sir. How may I help you?')
else:
print('Recognizing...')
print('Sorry, I could not understand that. Can you repeat it?')
if __name__ == "__main__":
password = '<PASSWORD>'
test = input('Finish the sentence. Pink kittens ')
if test == password:
numLock = '5344'
test2 = input('Enter the password for the 4 digit number lock: ')
if test2 == numLock:
print('Holmes has been accessed.')
print('Hello sir. How can I help you?')
takeCommand()
else: print('Permission Denied')
else:
print('Permission Denied')
|
7dfd2619565d60bebf885ec414b9c365c2039336
|
[
"Python"
] | 1
|
Python
|
rishabhp0125/HolmesAI
|
897440fe728911674f34f31fe68d8926579cd374
|
313651e9694b5bcda8882d2e16d870070651121c
|
refs/heads/Custem1.0
|
<repo_name>seahorseBra/CustemView<file_sep>/app/src/main/java/Utils/ImageUtil.java
package utils;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.Parcel;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.util.ArrayList;
import java.util.List;
import javaBean.ImagePiece;
/**
* 图片处理类
* Created by zchao on 2016/3/30.
*/
public class ImageUtil {
private Bitmap bitmap = null;
public ImageUtil() {
}
/**
* 图片分割为多块
* @param bitmap 需要分割的图片
* @param xSplitSize x方向分割数量
* @param ySplitSize y方向分割数量
* @return 分割后图片集合 {@link ImagePiece}
*/
public static List<ImagePiece> SplitImage(Bitmap bitmap, int xSplitSize, int ySplitSize){
if (bitmap == null)return null;
List<ImagePiece> pieces = new ArrayList<>();
int width = bitmap.getWidth();
int height = bitmap.getHeight();
int weightForPiece = width / xSplitSize;
int heightForPiece = height / ySplitSize;
for (int i = 0; i < ySplitSize; i++) {
for (int j = 0; j < xSplitSize; j++) {
ImagePiece imagePiece = new ImagePiece(Parcel.obtain());
imagePiece.indexX = j;
imagePiece.indexY = i;
imagePiece.bitmap = Bitmap.createBitmap(bitmap, j * weightForPiece, i * heightForPiece, weightForPiece, heightForPiece);
pieces.add(imagePiece);
}
}
return pieces;
}
/**
* 图片压缩,按最小
* @param bitmap 待压缩图片
* @param size 要压缩的大小
* @return
*/
public static Bitmap compressImage(Bitmap bitmap, float size){
ByteArrayOutputStream bos = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.JPEG, 100 , bos);
int quality = 100;
while (bos.size() / 1024 > size) {
bitmap.compress(Bitmap.CompressFormat.JPEG, quality , bos);
quality -= 10;
}
ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
Bitmap bitmap1 = BitmapFactory.decodeStream(bis);
return bitmap1;
}
/**
* 图片压缩,按百分比压缩
* @param bitmap 待压缩图片
* @param percent 要压缩的大小
* @return
*/
public static Bitmap compressImage(Bitmap bitmap, int percent){
ByteArrayOutputStream bos = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.JPEG, percent, bos);
ByteArrayInputStream bis = new ByteArrayInputStream(bos.toByteArray());
return BitmapFactory.decodeStream(bis);
}
/**
* 图片压缩
* @param pathName 压缩图片文件
* @param size 要压缩的大小
* @return
*/
public static Bitmap compressImage(String pathName, float size){
Bitmap bitmap = BitmapFactory.decodeFile(pathName);
return compressImage(bitmap, size);
}
}
<file_sep>/app/src/main/java/javaBean/HourWeather.java
package javaBean;
/**
* Created by zchao on 2016/6/6.
*/
public class HourWeather {
private int time;
private int weather;
private int temp;
public HourWeather(int time, int weather, int temp) {
this.time = time;
this.weather = weather;
this.temp = temp;
}
public void setTime(int time) {
this.time = time;
}
public void setWeather(int weather) {
this.weather = weather;
}
public void setTemp(int temp) {
this.temp = temp;
}
public int getTime() {
return time;
}
public int getWeather() {
return weather;
}
public int getTemp() {
return temp;
}
}
<file_sep>/app/src/main/java/com/example/administrator/custemview/FileSystem.java
package com.example.administrator.custemview;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.webkit.WebChromeClient;
import android.webkit.WebView;
import android.webkit.WebViewClient;
public class FileSystem extends BaseActivity {
private WebView webView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_file_system);
webView = (WebView) findViewById(R.id.web);
webView.loadUrl("http://www.tooopen.com/img/87_312.aspx");
WebViewClient client = new WebViewClient();
WebChromeClient chromeClient = new WebChromeClient();
}
@Override
protected void onSetting() {
}
}
<file_sep>/app/src/main/java/Utils/CApp.java
package utils;
import android.app.Application;
import android.content.Context;
import com.facebook.drawee.backends.pipeline.Fresco;
import model.ApiDal;
/**
* 全局变量
* Created by zchao on 2016/5/4.
*/
public class CApp extends Application{
public static Context context;
@Override
public void onCreate() {
super.onCreate();
context = getApplicationContext();
appinite();
}
private void appinite() {
utils.AppContext.inite(context);
ApiDal.newInstance().initeApiDal(getApplicationContext());
Fresco.initialize(context);
}
}
<file_sep>/myapplication/src/main/java/com/youloft/mysmall/myapplication/Province.java
package com.youloft.mysmall.myapplication;
/**
* Created by zchao on 2017/6/29.
* desc:
* version:
*/
public class Province {
public Province() {
}
public Province(String province, String city, String district) {
this.province = province;
this.city = city;
this.district = district;
}
public String province;
public String city;
public String district;
}
<file_sep>/app/src/main/java/com/example/administrator/custemview/BluetoothActivity.java
package com.example.administrator.custemview;
import android.annotation.TargetApi;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothGatt;
import android.bluetooth.BluetoothGattCallback;
import android.bluetooth.BluetoothGattCharacteristic;
import android.bluetooth.BluetoothGattDescriptor;
import android.bluetooth.BluetoothSocket;
import android.bluetooth.le.ScanCallback;
import android.bluetooth.le.ScanResult;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.graphics.Color;
import android.os.Build;
import android.os.Handler;
import android.os.Message;
import android.os.Parcelable;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.text.Editable;
import android.text.TextUtils;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.BaseAdapter;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Toast;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.StringTokenizer;
import java.util.UUID;
import java.util.concurrent.RunnableFuture;
import allinterface.Constants;
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
public class BluetoothActivity extends AppCompatActivity {
private Button On,Off,Visible,list;
private BluetoothAdapter BA;
private HashMap<String, BluetoothDevice> boundList = new HashMap<>();
private ListView lv, lv2;
private DeviceAdapter adapter = null;
private ConnectedThread connectedThread;
private EditText mEditText;
private Button mSendButton;
private TextView mTextView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_bluetooth);
On = (Button)findViewById(R.id.button1);
Off = (Button)findViewById(R.id.button2);
Visible = (Button)findViewById(R.id.button3);
list = (Button)findViewById(R.id.button4);
mEditText = (EditText) findViewById(R.id.edit_text);
mSendButton = (Button) findViewById(R.id.send);
mTextView = (TextView) findViewById(R.id.textView1);
lv = (ListView)findViewById(R.id.listView1);
lv2 = (ListView)findViewById(R.id.listView2);
BA = BluetoothAdapter.getDefaultAdapter();
mSendButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
String s = mEditText.getText().toString();
if (!TextUtils.isEmpty(s)) {
sendMessage(s);
}
}
});
}
private void sendMessage(String message) {
// Check that there's actually something to send
if (message.length() > 0) {
// Get the message bytes and tell the BluetoothChatService to write
byte[] send = message.getBytes();
connectedThread.write(send);
mEditText.setText("");
}
}
public void on(View view){
if (!BA.isEnabled()) {
Intent turnOn = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE);
startActivityForResult(turnOn, 0);
Toast.makeText(getApplicationContext(),"Turned on"
,Toast.LENGTH_LONG).show();
}
else{
Toast.makeText(getApplicationContext(),"Already on",
Toast.LENGTH_LONG).show();
}
}
public void list(View view){
Set<BluetoothDevice> bondedDevices = BA.getBondedDevices();
if (bondedDevices == null || bondedDevices.isEmpty()) {
return;
}
ArrayList list = new ArrayList();
for (BluetoothDevice bd:bondedDevices) {
boundList.put(bd.getName(), bd);
list.add(bd.getName());
}
Toast.makeText(getApplicationContext(),"Showing Paired Devices",
Toast.LENGTH_SHORT).show();
final ArrayAdapter adapter = new ArrayAdapter
(this,android.R.layout.simple_list_item_1, list);
lv.setAdapter(adapter);
lv.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
BluetoothDevice bluetoothDevice = boundList.get(adapter.getItem(position));
conn(bluetoothDevice);
}
});
}
private BroadcastReceiver receiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
String action = intent.getAction();
if (action.equals(BluetoothDevice.ACTION_FOUND)) {
BluetoothDevice device = intent.getParcelableExtra(BluetoothDevice.EXTRA_DEVICE);
if (adapter != null) {
adapter.addDevice(device);
}
} else if (action.equals(BluetoothDevice.ACTION_BOND_STATE_CHANGED)) {
BluetoothDevice device = intent.getParcelableExtra(BluetoothDevice.EXTRA_DEVICE);
int bondState = device.getBondState();
switch (bondState) {
case BluetoothDevice.BOND_NONE:
break;
case BluetoothDevice.BOND_BONDING:
break;
case BluetoothDevice.BOND_BONDED:
// 连接
conn(device);
break;
}
}
}
};
public void search(View view) {
IntentFilter filter = new IntentFilter();
filter.addAction(BluetoothDevice.ACTION_FOUND);
filter.addAction(BluetoothDevice.ACTION_BOND_STATE_CHANGED);
filter.addAction(BluetoothAdapter.ACTION_SCAN_MODE_CHANGED);
filter.addAction(BluetoothAdapter.ACTION_STATE_CHANGED);
registerReceiver(receiver, filter);
BA.startDiscovery();
adapter = new DeviceAdapter(this);
lv2.setAdapter(adapter);
lv2.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
BluetoothDevice device = adapter.foundDevice.get(position);
connetDevice(device);
}
});
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
if (BA.isDiscovering()) {
BA.cancelDiscovery();
}
}
}, 20000);
}
public void off(View view){
unregisterReceiver(receiver);
BA.disable();
Toast.makeText(getApplicationContext(),"Turned off" ,
Toast.LENGTH_LONG).show();
}
public void visible(View view){
Intent getVisible = new Intent(BluetoothAdapter.
ACTION_REQUEST_DISCOVERABLE);
startActivityForResult(getVisible, 0);
}
/**
* 配对或链接设备
* @param device
*/
private void connetDevice(BluetoothDevice device){
int bondState = device.getBondState();
switch (bondState) {
case BluetoothDevice.BOND_NONE:
Method createBond = null;
try {
createBond =BluetoothDevice.class.getMethod("createBond");
createBond.invoke(device);
} catch (NoSuchMethodException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
break;
case BluetoothDevice.BOND_BONDED:
conn(device);
break;
}
}
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
private void conn(BluetoothDevice device){
UUID uuid = UUID.fromString("00001101-0000-1000-8000-00805F9B34FB");
BluetoothSocket socket = null;
try {
socket = device.createRfcommSocketToServiceRecord(uuid);
socket.connect();
} catch (IOException e) {
e.printStackTrace();
}
connectedThread = new ConnectedThread(socket);
connectedThread.start();
}
private final Handler mHandler = new Handler(){
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case Constants.MESSAGE_WRITE:
byte[] writeBuf = (byte[]) msg.obj;
// construct a string from the buffer
String writeMessage = new String(writeBuf);
break;
case Constants.MESSAGE_READ:
byte[] readBuf = (byte[]) msg.obj;
// construct a string from the valid bytes in the buffer
String readMessage = new String(readBuf, 0, msg.arg1);
if (!TextUtils.isEmpty(readMessage)) {
mTextView.setText(readMessage);
}
break;
}
}
};
/**
* 设备链接中
*/
class ConnectedThread extends Thread {
private BluetoothSocket socket;
private final InputStream mInputStream;
private OutputStream mOutputStream;
public ConnectedThread(BluetoothSocket socket) {
this.socket = socket;
InputStream inputStream = null;
OutputStream outputStream = null;
try {
inputStream = socket.getInputStream();
outputStream = socket.getOutputStream();
} catch (IOException e) {
e.printStackTrace();
}
mInputStream = inputStream;
mOutputStream = outputStream;
}
@Override
public void run() {
byte[] buffer = new byte[1024];
while (true) {
try {
int count = mInputStream.read(buffer);
mHandler.obtainMessage(Constants.MESSAGE_READ, count, -1, buffer).sendToTarget();
} catch (IOException e) {
e.printStackTrace();
connectionLost();
}
}
}
public void write(byte[] buffer) {
try {
mOutputStream.write(buffer);
mHandler.obtainMessage(Constants.MESSAGE_WRITE, -1, -1, buffer).sendToTarget();
} catch (IOException e) {
e.printStackTrace();
}
}
public void cancel() {
try {
socket.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
private void connectionLost() {
Toast.makeText(this, "链接中断", Toast.LENGTH_SHORT).show();
}
class DeviceAdapter extends BaseAdapter{
private Context context;
private LayoutInflater inflater;
private ArrayList<BluetoothDevice> foundDevice = new ArrayList<>();
public DeviceAdapter(Context context) {
this.context = context;
inflater = LayoutInflater.from(context);
}
public void addDevice(BluetoothDevice device) {
if (!foundDevice.contains(device)) {
foundDevice.add(device);
notifyDataSetChanged();
}
}
@Override
public int getCount() {
return foundDevice.size();
}
@Override
public String getItem(int position) {
if (!TextUtils.isEmpty(foundDevice.get(position).getName())) {
return foundDevice.get(position).getName();
} else if (!TextUtils.isEmpty(foundDevice.get(position).getAddress())) {
return foundDevice.get(position).getAddress();
}
return "未知";
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
Holder holder = null;
if (convertView == null) {
convertView = inflater.inflate(R.layout.bluetooth_device_item, parent, false);
holder = new Holder();
holder.mTextView = (TextView) convertView.findViewById(R.id.text);
convertView.setTag(holder);
}else {
holder = (Holder) convertView.getTag();
}
holder.mTextView.setText(getItem(position));
return convertView;
}
class Holder{
public TextView mTextView;
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
return true;
}
}
<file_sep>/app/src/main/java/view/BezierView.java
package view;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.Point;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
import com.facebook.imagepipeline.memory.PooledByteArrayBufferedInputStream;
import java.util.ArrayList;
import utils.Utils;
/**
* Created by zchao on 2016/9/6.
*/
public class BezierView extends View {
private Paint paint;
private Path path;
private boolean isSelect;
private Point assistPoint;
private int height;
private int width;
private float firstX;
private float firstY;
private ArrayList<Point> p1 = new ArrayList<>();
private ArrayList<Point> p2 = new ArrayList<>();
private ArrayList<Point> p3 = new ArrayList<>();
private ArrayList<Point> p4 = new ArrayList<>();
public BezierView(Context context) {
this(context, null);
}
public BezierView(Context context, AttributeSet attrs) {
super(context, attrs);
paint = new Paint(Paint.ANTI_ALIAS_FLAG);
paint.setColor(Color.RED);
paint.setStrokeWidth(3);
paint.setDither(true);
paint.setStyle(Paint.Style.STROKE);
assistPoint = new Point();
path = new Path();
}
@Override
public boolean onTouchEvent(MotionEvent event) {
int action = event.getAction();
float xp = event.getX();
float yp = event.getY();
switch (action) {
case MotionEvent.ACTION_DOWN:
if (yp > height / 2 - Utils.dp2Px(30) && yp < height / 2 + Utils.dp2Px(30)) {
isSelect = true;
firstX = xp;
firstY = yp;
getParent().requestDisallowInterceptTouchEvent(true);
} else {
// getParent().requestDisallowInterceptTouchEvent(false);
}
break;
case MotionEvent.ACTION_MOVE:
assistPoint.x = (int) xp;
assistPoint.y = (int) yp;
invalidate();
break;
case MotionEvent.ACTION_UP:
isSelect = false;
break;
}
if (isSelect) {
return true;
} else {
return false;
}
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
width = w;
height = h;
assistPoint.x = w;
assistPoint.y = h/2;
for (int i = 0; i < 6; i++) {
Point point = new Point(w/5 * i, h/2);
p1.add(point);
}
//计算所有点的中点
for (int i = 0; i < p1.size() - 1; i++) {
p2.add(getCenter(p1.get(i), p1.get(i+1)));
}
//计算中点的中点
for (int i = 0; i < p2.size() - 1; i++) {
p3.add(getCenter(p2.get(i), p2.get(i+1)));
}
//计算所有点的贝塞尔曲线辅助点
for (int i = 0; i < p2.size(); i++) {
p4.add(translationPoint(p2.get(i), p3.get(i/2), p1.get(i+1)));
}
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
int width = getWidth();
int height = getHeight();
path.reset();
path.moveTo(0, height/2);
path.quadTo(assistPoint.x, assistPoint.y, width, height/2);
canvas.drawPath(path, paint);
}
private Point getCenter(int x1, int y1, int x2, int y2) {
Point point = new Point();
point.x = x1 + (x2 - x1)/2;
point.y = y1 + (y2 - y1)/2;
return point;
}
/**
* 计算两个点的中点
* @param p1
* @param p2
* @return
*/
private Point getCenter(Point p1, Point p2) {
Point point = new Point();
point.x = p1.x + (p2.x - p1.x)/2;
point.y = p1.y + (p2.y - p1.y)/2;
return point;
}
/**
* 根据两个点的位置关系来平移另一点
* @param srcPosition
* @param p1
* @param p2
* @return
*/
private Point translationPoint(Point srcPosition, Point p1, Point p2) {
Point dstPoint = new Point();
dstPoint.x = srcPosition.x + (p2.x - p1.x);
dstPoint.y = srcPosition.y + (p2.y - p1.y);
return dstPoint;
}
}
<file_sep>/app/src/main/java/utils/FlowDBManager.java
package utils;
import android.content.Context;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import java.util.ArrayList;
import java.util.List;
import javaBean.FlowInfo;
/**
* 首页信息流数据库管理类
* Created by zchao on 2016/7/7.
*/
public class FlowDBManager {
private Context context;
private SQLiteDatabase db = null;
private SQLHelper sqlHelper = null;
private static FlowDBManager instance = null;
public FlowDBManager(Context context) {
this.context = context;
this.sqlHelper = new SQLHelper(context);
}
/**
* @return
*/
public void openDb() {
if (db == null || !db.isOpen()) {
db = sqlHelper.getWritableDatabase();
}
}
/**
* 插入操作,使用key做主键,使用replace避免重复输入;
* @param key:对应各条信息流的唯一标识,需由服务器返回;
* @param seq:对应于信息流展示顺序;
* @param save_time:入库时间,用于更新数据库数据;
* @param flag:true表示用户手动取消了此条信息的关注,将不再展示此条信息;
* @param content:单条信息的具体数据;
*/
public void insert(String key, int seq, int page, long save_time,
int flag, String content){
openDb();
String sql = "REPLACE INTO " + SQLHelper.TABLE_NAME + " (key,seq,page,save_time,flag,content)"
+ " VALUES(?,?,?,?,?,?)"
+ " ";
Object args[]=new Object[]{key, seq, page, save_time, flag, content};
this.db.execSQL(sql, args);
this.db.close();
}
public void insert(FlowInfo info) {
if (info == null) {
return;
}
insert(info.key, info.seq, info.page, info.save_time, info.flag, info.content);
}
/**
* 批量插入
*/
public void insert(List<FlowInfo> list) {
if (list.isEmpty()) {
return;
}
for (int i = 0; i < list.size(); i++) {
insert(list.get(i));
}
}
/**
* 更新操作,当用户关闭某条消息时候
* @param key
* @param flag
*/
public void update(String key,int flag) {
openDb();
String sql = "UPDATE " + SQLHelper.TABLE_NAME + " SET flag=? WHERE key=?";
Object args[]=new Object[]{flag, key};
this.db.execSQL(sql, args);
this.db.close();
}
/**
* 删除某一条数据
* @param key
*/
public void delete(int key){
openDb();
String sql = "DELETE FROM " + SQLHelper.TABLE_NAME +" WHERE key=?";
Object args[]=new Object[]{key};
this.db.execSQL(sql, args);
this.db.close();
}
/**
* 此删除用于数据过期的删除
* @param save_time 如果数据库数据入库时间小于此数就删除
*/
public void delete(long save_time) {
openDb();
String sql = "DELETE FROM " + SQLHelper.TABLE_NAME + " WHERE save_time<?";
Object args[] = new Object[]{save_time};
this.db.execSQL(sql, args);
this.db.close();
}
/**
*
*/
public void clearDate() {
openDb();
db.delete(SQLHelper.TABLE_NAME, null, null);
this.db.close();
}
/**
*查询,查询时候就无需查询被用户忽略的
* @return
*/
public List<FlowInfo> query(){
openDb();
List<FlowInfo> all = new ArrayList();
String sql = "SELECT * FROM " + SQLHelper.TABLE_NAME + " WHERE flag=100";
Cursor result = this.db.rawQuery(sql, null);
if(result.getCount() == 0)return null;
for (result.moveToFirst(); !result.isAfterLast(); result.moveToNext()) {
FlowInfo flowInfo = new FlowInfo();
flowInfo.key = result.getString(0);
flowInfo.seq = result.getInt(1);
flowInfo.page = result.getInt(2);
flowInfo.save_time = result.getLong(3);
flowInfo.flag = result.getInt(4);
flowInfo.content = result.getString(5);
all.add(flowInfo);
}
this.db.close();
return all;
}
/**
* 按页数查询
* @param page
* @return
*/
public List<FlowInfo> query(int page) {
openDb();
List<FlowInfo> all = new ArrayList<>();
String sql = "SELECT * FROM " + SQLHelper.TABLE_NAME + " WHERE flag=100 AND page=?";
String args[] = new String[]{String.valueOf(page)};
Cursor result = this.db.rawQuery(sql, args);
if(result.getCount() == 0)return null;
for (result.moveToFirst(); !result.isAfterLast(); result.moveToNext()) {
FlowInfo flowInfo = new FlowInfo();
flowInfo.key = result.getString(0);
flowInfo.seq = result.getInt(1);
flowInfo.page = result.getInt(2);
flowInfo.save_time = result.getLong(3);
flowInfo.flag = result.getInt(4);
flowInfo.content = result.getString(5);
all.add(flowInfo);
}
this.db.close();
return all;
}
/**
* 按页数查询,并将结果排序
* @param page
* @param isSort 是否将查询结果按seq直接拍好序列
* @return
*/
public List<FlowInfo> query(int page, boolean isSort) {
openDb();
List<FlowInfo> all = new ArrayList<>();
String sql = "SELECT * FROM " + SQLHelper.TABLE_NAME + " WHERE flag=? AND page=?";
String sql1 = "SELECT * FROM " + SQLHelper.TABLE_NAME + " WHERE flag=? AND page=? AND save_time>=? ORDER BY seq";
String args[] = new String[]{String.valueOf(100), String.valueOf(page), String.valueOf(System.currentTimeMillis()/1000 - 60)};
Cursor result = this.db.rawQuery(isSort ? sql1 : sql, args);
if(result.getCount() == 0)return null;
for (result.moveToFirst(); !result.isAfterLast(); result.moveToNext()) {
FlowInfo flowInfo = new FlowInfo();
flowInfo.key = result.getString(0);
flowInfo.seq = result.getInt(1);
flowInfo.page = result.getInt(2);
flowInfo.save_time = result.getLong(3);
flowInfo.flag = result.getInt(4);
flowInfo.content = result.getString(5);
all.add(flowInfo);
}
this.db.close();
return all;
}
}
<file_sep>/app/build.gradle
apply plugin: 'com.android.application'
android {
compileSdkVersion 24
buildToolsVersion '25.0.0'
defaultConfig {
applicationId "com.example.administrator.custemview"
minSdkVersion 16
targetSdkVersion 24
versionCode 1
versionName "1.1"
}
archivesBaseName = "custem${defaultConfig.versionName}-${defaultConfig.versionCode}"
signingConfigs {
myConfig {
storeFile file('custemkeyalias.jks')
storePassword '<PASSWORD>'
keyAlias 'ee'
keyPassword '123456'
}
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
signingConfig signingConfigs.myConfig
applicationIdSuffix ".len"
}
debug {
applicationIdSuffix ".debug"
minifyEnabled false
}
}
productFlavors {
huawei {
}
xiaomistory {
}
publish {
}
// productFlavors.all { flavor ->
// flavor.manifestPlaceholders = [CHANNEL_VALUE: name]
// }
}
// applicationVariants.all{ variant->
// variant.outputs.each { output->
// def oldFile = output.outputFile
// def newName = '';
// if(variant.buildType.name.equals('release')){
// println(variant.productFlavors[0].name)
// def releaseApkName = 'app-v' + defaultConfig.versionName + '-' + variant.productFlavors[0].name + '-release-signed.apk'
// output.outputFile = new File(oldFile.parent, releaseApkName)
// }
//
// if(variant.buildType.name.equals('debug')){
// newName = oldFile.name.replace(".apk", "-v" + defaultConfig.versionName + "-build" + ".apk")
// output.outputFile = new File(oldFile.parent, newName)
// }
//
// }
// }
}
dependencies {
compile fileTree(include: ['*.jar'], dir: 'libs')
compile 'com.google.code.gson:gson:2.6.2'
compile 'com.jakewharton:butterknife:7.0.1'
compile 'com.squareup.retrofit:retrofit:1.9.0'
compile 'com.squareup.okio:okio:1.8.0'
compile 'com.facebook.fresco:fresco:0.10.0'
compile 'com.viewpagerindicator:parent:2.4.1'
compile 'io.reactivex:rxjava:1.2.1'
compile 'io.reactivex:rxandroid:1.2.1'
compile 'com.squareup.okhttp3:okhttp:3.4.1'
testCompile 'junit:junit:4.12'
compile 'com.android.support:appcompat-v7:24.2.1'
compile 'com.android.support:design:24.2.1'
compile 'com.android.support:recyclerview-v7:24.2.1'
compile 'com.android.support.constraint:constraint-layout:1.0.2'
compile 'org.dom4j:dom4j:2.0.1'
compile 'com.github.bumptech.glide:glide:4.0.0-RC1'
annotationProcessor 'com.github.bumptech.glide:compiler:4.0.0-RC1'
}
<file_sep>/app/src/main/java/com/example/administrator/custemview/SingletonActivity.java
package com.example.administrator.custemview;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.util.Log;
import singleton.SingletonTest;
import singleton.SingletonTest1;
/**
* Created by mavin on 2016/8/15.
*/
public class SingletonActivity extends BaseActivity {
private static final String TAG = "SingletonActivity";
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
for (int i = 0; i < 5; i++) {
new Thread(new Runnable() {
@Override
public void run() {
try {
Thread.sleep(300);//模拟延迟加载
Log.d(TAG, "hashCode:" + SingletonTest.getInstance().hashCode());
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}).start();
}
}
}
<file_sep>/app/src/main/java/view/DampView.java
package view;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.support.v4.app.NavUtils;
import android.util.AttributeSet;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.MotionEvent;
import android.view.View;
import android.view.animation.DecelerateInterpolator;
import android.widget.OverScroller;
/**
* Created by mavin on 2016/6/6.
*/
public class DampView extends View {
private static final String TAG = "DampView";
private int size;
private Paint paint;
private int itemWidth;
private OverScroller overScroller;
private int lastX;
private int realWidth;
public DampView(Context context) {
this(context, null);
}
public DampView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public DampView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
DisplayMetrics metrics = getResources().getDisplayMetrics();
size = metrics.widthPixels;
itemWidth = (size - getPaddingLeft() - getPaddingRight())/6;
realWidth = itemWidth * 20;
overScroller = new OverScroller(context, new DecelerateInterpolator());
paint = new Paint();
paint.setTextSize(dp2px(14));
}
@Override
public boolean onTouchEvent(MotionEvent event) {
int action = event.getAction();
int x = (int) event.getX();
switch (action) {
case MotionEvent.ACTION_DOWN:
lastX = x;
break;
case MotionEvent.ACTION_MOVE:
if (!overScroller.isFinished()) {
overScroller.abortAnimation();
}
int dx = (lastX - x);
if (getScrollX() < 0) {
dx = 0;
}
if (getScrollX() > realWidth - (size - getPaddingLeft() - getPaddingRight())) {
dx = 0;
}
scrollBy(dx, 0);
lastX = x;
break;
case MotionEvent.ACTION_UP:
break;
}
// postInvalidate();
return true;
}
@Override
public void computeScroll() {
super.computeScroll();
if (overScroller.computeScrollOffset()) {
scrollTo(overScroller.getCurrX(), 0);
}
postInvalidate();
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
Log.d(TAG, "onMeasure() called with: " + "widthMeasureSpec = [" + widthMeasureSpec + "], heightMeasureSpec = [" + heightMeasureSpec + "]");
setMeasuredDimension(realWidth, 200);
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
for (int i = 0; i < 20; i++) {
canvas.drawText(String.valueOf(i), i * itemWidth, 100, paint);
}
}
private float dp2px(int dp) {
float density = getResources().getDisplayMetrics().density;
return density * dp;
}
}
<file_sep>/app/src/main/java/com/example/administrator/custemview/BluetoothChatService.java
package com.example.administrator.custemview;
import android.bluetooth.BluetoothAdapter;
import android.bluetooth.BluetoothDevice;
import android.bluetooth.BluetoothSocket;
import android.content.Context;
import android.os.Handler;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.UUID;
import java.util.concurrent.ThreadPoolExecutor;
import allinterface.Constants;
/**
* Created by zchao on 2016/9/28.
*/
public class BluetoothChatService {
private static final UUID MY_UUID_SECURE =
UUID.fromString("fa87c0d0-afac-11de-8a39-0800200c9a66");
private Handler mHandler;
private Context mContext;
private BluetoothAdapter adapter;
private ConnectThread mConnectThread;
public BluetoothChatService(Handler mHandler, Context mContext) {
this.mHandler = mHandler;
this.mContext = mContext;
}
/**
* 新链接时候先,关闭之前所有的链接再新建一个链接
*/
private void conn(BluetoothSocket socket, BluetoothDevice device) {
if (mConnectThread != null) {
mConnectThread.cancel();
mConnectThread = null;
}
}
private void connectionFailed() {
}
/**
* 链接设备线程
*/
class ConnectThread extends Thread{
private final BluetoothDevice mDevice;
private BluetoothSocket mSocket;
public ConnectThread(BluetoothDevice device) {
this.mDevice =device;
try {
mSocket = mDevice.createRfcommSocketToServiceRecord(MY_UUID_SECURE);
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public void run() {
setName("ConnectThread");
try {
mSocket.connect();
} catch (IOException e) {
try {
mSocket.close();
} catch (IOException e1) {
e1.printStackTrace();
}
connectionFailed();
return;
}
synchronized (BluetoothChatService.this) {
mConnectThread = null;
}
conn(mSocket, mDevice);
}
public void cancel() {
try {
mSocket.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
/**
* 设备链接中
*/
class ConnectedThread extends Thread {
private BluetoothSocket socket;
private final InputStream mInputStream;
private OutputStream mOutputStream;
public ConnectedThread(BluetoothSocket socket) {
this.socket = socket;
InputStream inputStream = null;
OutputStream outputStream = null;
try {
inputStream = socket.getInputStream();
outputStream = socket.getOutputStream();
} catch (IOException e) {
e.printStackTrace();
}
mInputStream = inputStream;
mOutputStream = outputStream;
}
@Override
public void run() {
byte[] buffer = new byte[1024];
while (true) {
try {
int count = mInputStream.read(buffer);
mHandler.obtainMessage(Constants.MESSAGE_READ, count, -1, buffer).sendToTarget();
} catch (IOException e) {
e.printStackTrace();
connectionLost();
BluetoothChatService.this.start();
}
}
}
public void write(byte[] buffer) {
try {
mOutputStream.write(buffer);
mHandler.obtainMessage(Constants.MESSAGE_WRITE, -1, -1, buffer).sendToTarget();
} catch (IOException e) {
e.printStackTrace();
}
}
public void cancel() {
try {
socket.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
class AcceptThread extends Thread{
private BluetoothSocket socket;
public AcceptThread(BluetoothSocket socket) {
this.socket = socket;
}
@Override
public void run() {
}
}
/**
* 链接丢失处理
*/
private void connectionLost() {
}
public void start() {
}
}
<file_sep>/testLib/src/main/java/com/example/zchao/testlib/RtextActivity.java
package com.example.zchao.testlib;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.widget.TextView;
public class RtextActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_rtext);
TextView viewById = (TextView) findViewById(R.id.text223);
viewById.setText(viewById.getId()+"");
}
}
<file_sep>/app/src/main/java/com/example/administrator/custemview/JNIActivity.java
package com.example.administrator.custemview;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.widget.Button;
import android.widget.TextView;
import butterknife.Bind;
import butterknife.ButterKnife;
import butterknife.OnClick;
public class JNIActivity extends AppCompatActivity {
@Bind(R.id.result)
TextView mTextResult;
@Bind(R.id.java)
Button java;
@Bind(R.id.move)
Button move;
@Bind(R.id.jni)
Button jni;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_jni);
ButterKnife.bind(this);
}
@OnClick(R.id.java)
public void jisuanWithJava(){
long startTime = System.currentTimeMillis();
int result = 1;
for (int i = 0; i < 20; i++) {
result *= 2;
}
long endTime = System.currentTimeMillis();
mTextResult.setText(result + "用时:" + (endTime - startTime)/1000 + "秒" + (endTime - startTime)%1000 + "毫秒");
}
@OnClick(R.id.move)
public void jisuanWithMove(){
long startTime = System.currentTimeMillis();
int result = 1;
for (int i = 0; i < 20; i++) {
result <<= 1;
}
long endTime = System.currentTimeMillis();
mTextResult.setText(result + "用时:" + (endTime - startTime)/1000 + "秒" + (endTime - startTime)%1000 + "毫秒");
}
@OnClick(R.id.jni)
public void jisuanWithJNI(){
}
}
<file_sep>/app/src/main/java/view/TestView.java
package view;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.RadialGradient;
import android.graphics.Shader;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.util.AttributeSet;
import android.view.View;
import android.widget.ImageView;
import com.example.administrator.custemview.R;
/**
* Created by mavin on 2016/6/15.
*/
public class TestView extends View {
private Bitmap bitmap;
private Paint paint;
private PorterDuffXfermode porterDuffXfermode;
private int totleWidth;
private int totleHeight;
private int width;
private int height;
private Paint textPaint;
private Paint bitmapPaint;
private Bitmap bitmap1;
public TestView(Context context) {
this(context, null);
}
public TestView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public TestView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
textPaint = new Paint();
bitmapPaint = new Paint();
textPaint.setColor(Color.WHITE);
textPaint.setTextSize(40);
final Drawable drawable = getResources().getDrawable(R.drawable.shadow);
bitmap = Bitmap.createBitmap(400, 100, Bitmap.Config.ARGB_8888);
final Canvas canvas = new Canvas(bitmap);
drawable.setBounds(0,0, 400, 100);
drawable.draw(canvas);
bitmap1 = BitmapFactory.decodeResource(getResources(), R.drawable.index);
paint = new Paint(Paint.ANTI_ALIAS_FLAG);
porterDuffXfermode = new PorterDuffXfermode(PorterDuff.Mode.DST_OUT);
paint.setShader(new RadialGradient(bitmap1.getWidth()/2, bitmap1.getHeight()/2, bitmap1.getHeight()/2, 0xff000000, 0x00000000, Shader.TileMode.CLAMP));
// Bitmap.createBitmap()
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
width = MeasureSpec.getSize(widthMeasureSpec);
height = 400;
setMeasuredDimension(width, heightMeasureSpec);
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
// canvas.saveLayerAlpha()
canvas.drawBitmap(bitmap1, 0, 0, paint);
// paint.setXfermode(porterDuffXfermode);
// canvas.drawBitmap(bitmap, 0, 0, paint);
// paint.setXfermode(null);
}
}
<file_sep>/app/src/main/java/singleton/SingletonTest.java
package singleton;
import android.util.Log;
import java.io.ObjectStreamException;
/**
* Created by mavin on 2016/8/15.
*/
public class SingletonTest {
private static final String TAG = "SingletonActivity";
private SingletonTest() {
Log.d(TAG, "SingletonTest() called with: " + "");
}
public static SingletonTest getInstance() {
return SingletonHolder.instance;
}
static class SingletonHolder {
private static final SingletonTest instance = new SingletonTest();
private Object readResolve() throws ObjectStreamException {
return instance;
}
}
}
<file_sep>/lib/src/main/java/com/example/MyClass.java
package com.example;
public class MyClass {
public static void main(String[] arg){
Content content = new Content(new YahooStrategy());
content.getWeather();
Content content1 = new Content(new GoogleStrategy());
content1.getWeather();
}
}
<file_sep>/app/src/main/java/com/example/administrator/custemview/SQLTestActivity.java
package com.example.administrator.custemview;
import android.content.ContentValues;
import android.database.sqlite.SQLiteDatabase;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.widget.Button;
import android.widget.EditText;
import android.widget.TextView;
import java.util.ArrayList;
import butterknife.Bind;
import butterknife.ButterKnife;
import butterknife.OnClick;
import javaBean.FlowInfo;
import utils.FlowDBManager;
import utils.SQLHelper;
public class SQLTestActivity extends AppCompatActivity {
@Bind(R.id.insert)
Button insert;
@Bind(R.id.query)
Button query;
@Bind(R.id.update)
Button update;
@Bind(R.id.clear)
Button clear;
@Bind(R.id.content)
TextView content;
@Bind(R.id.key)
EditText key;
@Bind(R.id.seq)
EditText seq;
@Bind(R.id.page)
EditText page;
@Bind(R.id.contentT)
EditText contentT;
private FlowDBManager dbManager;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_sqltest);
ButterKnife.bind(this);
dbManager = new FlowDBManager(this);
}
@OnClick(R.id.insert)
public void insert() {
dbManager.insert(key.getText().toString(),
Integer.parseInt(seq.getText().toString().isEmpty()?"1":seq.getText().toString()),
Integer.parseInt(page.getText().toString().isEmpty()?"1":page.getText().toString()),
System.currentTimeMillis()/1000,
100,
contentT.getText().toString());
}
@OnClick(R.id.query)
public void query() {
ArrayList<FlowInfo> query = null;
if (page.getText().toString().isEmpty()) {
query = (ArrayList<FlowInfo>) dbManager.query();
} else {
query = (ArrayList<FlowInfo>) dbManager.query(Integer.parseInt(page.getText().toString()), true);
}
if (query == null || query.isEmpty()) {
content.setText("");
return;
}
StringBuilder sb = new StringBuilder();
for (int i = 0; i < query.size(); i++) {
sb.append(query.get(i).content);
sb.append("\n");
}
content.setText(sb.toString());
}
@OnClick(R.id.update)
public void update() {
dbManager.update(key.getText().toString(), 50);
}
@OnClick(R.id.clear)
public void clear() {
// dbManager.clearDate();
dbManager.delete(System.currentTimeMillis());
}
}
<file_sep>/app/src/main/java/com/example/administrator/custemview/SQLcitysActivity.java
package com.example.administrator.custemview;
import android.database.sqlite.SQLiteDatabase;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import org.dom4j.Document;
import org.dom4j.Element;
import org.dom4j.io.SAXReader;
import java.io.InputStreamReader;
import java.util.Iterator;
import java.util.List;
public class SQLcitysActivity extends AppCompatActivity {
private SPLHelper splHelper;
private SQLiteDatabase db;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
splHelper = new SPLHelper(this);
db = splHelper.getWritableDatabase();
InsertData();
}
public void insert(SQLiteDatabase db, String province, String city, String district) {
String sql = "REPLACE INTO " + SPLHelper.TABLE_NAME + " (province,city,district)"
+ " VALUES(?,?,?)"
+ " ";
Object args[] = new Object[]{province, city, district};
db.execSQL(sql, args);
db.close();
}
private void InsertData() {
try {
Document doc = new SAXReader().read(new InputStreamReader(
getResources().getAssets().open("province_data.xml"), "utf-8"));
//选择xml文件的节点
// Element node = doc.getRootElement();
List itemList = doc.selectNodes("root/province");
//遍历读出的xml中的节点
System.out.println("导入数据库的xml数据如下:\n");
System.out.println("姓名-----电话-----社交账号------手机号---------工作--------地址--------邮箱-------其他信息\n");
for (Iterator iter = itemList.iterator(); iter.hasNext(); ) {
Element el = (Element) iter.next();
//读取节点内容
String name = el.attributeValue("name");
List citylist = el.selectNodes("city");
for (Iterator city = citylist.iterator(); city.hasNext(); ) {
Element el1 = (Element) city.next();
//读取节点内容
String cityname = el1.attributeValue("name");
List districtlist = el1.selectNodes("district");
for (Iterator distric = districtlist.iterator(); distric.hasNext(); ) {
Element el2 = (Element) distric.next();
//读取节点内容
String districname = el2.attributeValue("name");
//为sql语句赋值
if (!db.isOpen()) {
db = splHelper.getWritableDatabase();
}
insert(db, name, cityname, districname);
System.out.print("将XML" + name + "|" + cityname + "|" + districname);
}
}
}
System.out.print("将XML文档数据导入数据库成功\n");
} catch (Exception e) {
e.printStackTrace();
} finally {
if (db != null) {
db.close();
}
}
}
}
<file_sep>/app/src/main/java/com/example/administrator/custemview/FileManagerActivity.java
package com.example.administrator.custemview;
import android.app.Notification;
import android.app.NotificationManager;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Color;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.ColorDrawable;
import android.os.Build;
import android.os.Bundle;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.View;
import android.view.animation.TranslateAnimation;
import android.widget.Button;
import android.widget.LinearLayout;
import android.widget.PopupWindow;
import android.widget.RemoteViews;
import android.widget.TextView;
import adapter.MyTestAdapter;
public class FileManagerActivity extends BaseActivity {
// private RecyclerView mFileList;
private MyTestAdapter adapter;
private TypedArray typedArray;
private LinearLayout mDelayGroup;
private Button mShowDialog;
private PopupWindow popupWindow;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_file_manager);
NotificationManager systemService = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
mDelayGroup = (LinearLayout) findViewById(R.id.delay_root);
// if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
// textView.setTextAppearance(context, android.R.style.TextAppearance_Material_Notification_Title);
// } else {
// textView.setTextAppearance(context, android.R.style.TextAppearance_StatusBar_EventContent_Title);
// }
mShowDialog = (Button) findViewById(R.id.show_dialog);
fastSetClickBehave(R.id.show_dialog);
TextView textView = new TextView(this);
textView.setTextAppearance(this, android.R.style.TextAppearance_Material_Notification_Title);
int textColor = textView.getCurrentTextColor();
int temp = textColor&0x00ffffff;
int color = temp|0x99000000;
String[] s = new String[]{"#"+Integer.toHexString(textColor), "#"+Integer.toHexString(color)};
int i = Color.parseColor(s[0]);
int j = Color.parseColor(s[1]);
RemoteViews remoteViews = new RemoteViews(getPackageName(), R.layout.custem_notify);
remoteViews.setTextColor(R.id.text, i);
remoteViews.setTextColor(R.id.text2, j);
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.JELLY_BEAN) {
Notification builder = new Notification.Builder(this)
.setContentTitle("测试")
.setSmallIcon(R.mipmap.checkbox)
.setAutoCancel(false)
.setContent(remoteViews)
.setContentText("agaweg")
.build();
systemService.notify(5555, builder);
}
// mFileList = (RecyclerView) findViewById(R.id.file_system_list);
// inite();
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.JELLY_BEAN) {
Notification builder = new Notification.Builder(this)
.setContentTitle("测试")
.setSmallIcon(R.mipmap.checkbox)
.setAutoCancel(false)
.setContentText("agaweg")
.build();
systemService.notify(222, builder);
}
}
private void showPopUp(Context context, View v) {
View view = LayoutInflater.from(context).inflate(R.layout.dialog_layout, null);
popupWindow = new PopupWindow(view,800,400);
popupWindow.setAnimationStyle(R.style.popup_window_anim);
popupWindow.setFocusable(true);
popupWindow.setOutsideTouchable(true);
popupWindow.setBackgroundDrawable(new ColorDrawable(getResources().getColor(android.R.color.transparent)));
int[] location = new int[2];
v.getLocationOnScreen(location);
popupWindow.showAtLocation(v, Gravity.NO_GRAVITY, location[0] - popupWindow.getWidth()/2 + v.getWidth()/2, location[1]-popupWindow.getHeight());
}
/* private void inite() {
LinearLayoutManager manager = new LinearLayoutManager(this, LinearLayoutManager.VERTICAL, false);
ArrayList<String> list = new ArrayList<String>();
list.add("egaweg");
list.add("egaweg");
list.add("egaweg");
list.add("egaweg");
list.add("egaweg");
list.add("egaweg");
list.add("egaweg");
list.add("egaweg");
list.add("egaweg");
list.add("egaweg");
list.add("egaweg");
list.add("egaweg");
mFileList.setLayoutManager(manager);
adapter = new MyTestAdapter(this, list);
mFileList.setAdapter(adapter);
}*/
public void showOrHide(View view) {
TranslateAnimation animationShow = new TranslateAnimation(TranslateAnimation.RELATIVE_TO_SELF, 0,
TranslateAnimation.RELATIVE_TO_SELF, 0,
TranslateAnimation.RELATIVE_TO_SELF, 1f,
TranslateAnimation.RELATIVE_TO_SELF, 0);
TranslateAnimation animationHide = new TranslateAnimation(TranslateAnimation.RELATIVE_TO_SELF, 0,
TranslateAnimation.RELATIVE_TO_SELF, 0,
TranslateAnimation.RELATIVE_TO_SELF, 0,
TranslateAnimation.RELATIVE_TO_SELF, 1f);
animationShow.setDuration(300);
animationHide.setDuration(300);
if (mDelayGroup.getVisibility() != View.VISIBLE) {
mDelayGroup.startAnimation(animationShow);
mDelayGroup.setVisibility(View.VISIBLE);
} else {
mDelayGroup.startAnimation(animationHide);
mDelayGroup.setVisibility(View.INVISIBLE);
}
}
@Override
public void onClick(View v) {
super.onClick(v);
switch (v.getId()) {
case R.id.show_dialog:
showDialog(v);
break;
}
}
private void showDialog(View v) {
if (popupWindow != null &&popupWindow.isShowing()) {
popupWindow.dismiss();
} else {
showPopUp(getApplicationContext(), v);
}
}
}
<file_sep>/app/src/main/java/view/RoundBgView.java
package view;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.RectF;
import android.util.AttributeSet;
import android.widget.ImageView;
import com.example.administrator.custemview.R;
import utils.Utils;
/**
* Created by Administrator on 2015/3/26.
*/
public class RoundBgView extends ImageView {
private Context context;
private float round = 0; //圆角大小
private int lineColor = 0x1e666666; //边框线颜色
private Paint mPaint, mLinePaint;
private Bitmap bitmap;
public RoundBgView(Context context, AttributeSet attrs) {
super(context, attrs);
this.context = context;
initeStyled(attrs);
mPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mLinePaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mPaint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.DST_IN));
mLinePaint.setColor(lineColor);
mLinePaint.setStrokeWidth(Utils.dp2Px(1));
mLinePaint.setStyle(Paint.Style.STROKE);
}
private void initeStyled(AttributeSet attrs){
TypedArray typedArray = context.obtainStyledAttributes(attrs, R.styleable.RoundBgView);
round = typedArray.getDimension(R.styleable.RoundBgView_BorderRadius, Utils.dp2Px(5));
lineColor = typedArray.getColor(R.styleable.RoundBgView_LineColor, 0x1e666666);
}
@Override
protected void onDraw(Canvas canvas) {
canvas.saveLayer(0, 0, getWidth(), getHeight(), null, Canvas.ALL_SAVE_FLAG);
super.onDraw(canvas);
createBitmap(getWidth());
if (bitmap != null && !bitmap.isRecycled()) {
canvas.drawBitmap(bitmap, 0,0,mPaint);
}
canvas.restore();
//绘制边框线
RectF rectF = new RectF(0,0,getWidth(),getHeight());
canvas.drawRoundRect(rectF,round, round, mLinePaint);
}
//生成一个跟view大小一致的黑色带圆角的图,使用此图来做蒙层
private void createBitmap(int w) {
if (bitmap == null || bitmap.isRecycled()) {
bitmap = Bitmap.createBitmap(w,w, Bitmap.Config.ARGB_8888);
Paint paint = new Paint(Paint.ANTI_ALIAS_FLAG);
paint.setColor(Color.BLACK);
Canvas canvas = new Canvas(bitmap);
RectF rectF = new RectF(0,0,w,w);
canvas.drawRoundRect(rectF,round, round,paint);
}
}
}
<file_sep>/app/src/main/java/allinterface/GitProject.java
package allinterface;
/**
* Created by zchao on 2016/5/19.
*/
public interface GitProject {
void getUserPro(String userName, Callback callback);
}
<file_sep>/app/src/main/java/view/PopViewForKeyboard.java
package view;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.inputmethodservice.Keyboard;
import android.text.TextPaint;
import android.view.KeyEvent;
import android.view.View;
import utils.Utils;
/**
* Created by mavin on 2017/3/5.
*/
public class PopViewForKeyboard extends View {
private Keyboard.Key key = null;
private int[] value = new int[4];
private TextPaint mTextPaint;
private int m14dp;
private OnKeyBack keyBackLisenter = null;
public PopViewForKeyboard(Context context, OnKeyBack keyBackLisenter) {
super(context, null);
this.keyBackLisenter = keyBackLisenter;
mTextPaint = new TextPaint(Paint.ANTI_ALIAS_FLAG);
mTextPaint.setColor(Color.BLACK);
m14dp = Utils.dp2Px(getResources(), 14);
mTextPaint.setTextSize(m14dp);
}
// @Override
// public boolean onKeyDown(int keyCode, KeyEvent event) {
// onEventHandle(keyCode);
// return false;
// }
public void onEventHandle(int event) {
int primaryCode = 0;
switch (event) {
case KeyEvent.KEYCODE_DPAD_DOWN:
break;
case KeyEvent.KEYCODE_DPAD_UP:
primaryCode = value[1];
break;
case KeyEvent.KEYCODE_DPAD_LEFT:
primaryCode = value[0];
break;
case KeyEvent.KEYCODE_DPAD_RIGHT:
primaryCode = value[2];
break;
case KeyEvent.KEYCODE_ENTER:
primaryCode = value[3];
break;
}
if (keyBackLisenter != null) {
keyBackLisenter.backKey(primaryCode);
}
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
setMeasuredDimension(Utils.dp2Px(getResources(), 50), Utils.dp2Px(getResources(), 60));
}
public void setKey(Keyboard.Key key) {
this.key = key;
if (key.codes.length == 4) {
value = key.codes;
} else if (key.codes.length == 3) {
value[0] = key.codes[0];
value[1] = 0;
value[2] = key.codes[1];
value[3] = key.codes[2];
}
postInvalidate();
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
canvas.drawColor(0xff99ffff);
canvas.drawText(String.valueOf((char) value[0]), 0, getHeight() / 2, mTextPaint);
canvas.drawText(String.valueOf((char) value[1]), getWidth() / 2, m14dp, mTextPaint);
canvas.drawText(String.valueOf((char) value[2]), getWidth() - m14dp, getHeight() / 2, mTextPaint);
canvas.drawText(String.valueOf((char) value[3]), getWidth() / 2, getHeight() / 2, mTextPaint);
}
interface OnKeyBack {
void backKey(int keyCode);
}
}
<file_sep>/app/src/main/java/model/ApiDal.java
package model;
import android.content.Context;
import android.os.Handler;
import android.os.Looper;
import android.util.Log;
import com.google.gson.FieldNamingPolicy;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.reflect.TypeToken;
import java.lang.reflect.Type;
import java.util.List;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import allinterface.ApiDateCallback;
import javaBean.GetIpInfoResponse;
import retrofit.Callback;
import retrofit.RestAdapter;
import retrofit.RetrofitError;
import retrofit.client.Response;
import retrofit.converter.GsonConverter;
import retrofit.http.GET;
/**
* Created by zchao on 2016/5/23.
*/
public class ApiDal {
private static final String TAG = "ApiDal";
private DiskCache mDiskCache;
private Context mContext;
private static ApiDal instance = null;
private Executor mPreloadExecutor;
private Handler mMainHandler;
public static ApiDal newInstance(){
if (instance == null) {
synchronized (ApiDal.class) {
if (instance == null) {
instance = new ApiDal();
}
}
}
return instance;
}
private ApiService mApiService;
public ApiDal() {
initeService();
}
public ApiDal initeApiDal(Context appContext) {
this.mContext = appContext;
mDiskCache = DiskCache.getInstance(appContext);
mPreloadExecutor = Executors.newFixedThreadPool(5);
mMainHandler = new Handler(Looper.getMainLooper());
return this;
}
private void initeService() {
Gson gson = new GsonBuilder()
.setFieldNamingPolicy(FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES)
.create();
RestAdapter restAdapter = new RestAdapter.Builder()
.setEndpoint("https://api.github.com")
.setConverter(new GsonConverter(new Gson()))
.build();
mApiService = restAdapter.create(ApiService.class);
}
private <T>void postResultToListener(final ApiDateCallback callback, final T data, final Exception e, final boolean success) {
mMainHandler.post(new Runnable() {
@Override
public void run() {
callback.onDateRecieved(data, e, success);
}
});
}
/**
* 获取github用户的项目信息
* @param userName
*/
public void getGitHub(final String userName, final ApiDateCallback callback){
final String cacheKey = "GITHUB" + userName;
mPreloadExecutor.execute(new Runnable() {
@Override
public void run() {
List<GetIpInfoResponse> model;
Type type = new TypeToken<CacheObj<List<GetIpInfoResponse>>>(){}.getType();
CacheObj<List<GetIpInfoResponse>> cacheObj = mDiskCache.getCacheObj(cacheKey, type);
if (cacheObj != null && !cacheObj.isOutOfDate(CacheTime.CACHE_TIME)) {
model = cacheObj.getObj();
if (model != null) {
postResultToListener(callback, model, null, true);
return;
}
}
mApiService.listRepos(userName, new Callback<List<GetIpInfoResponse>>() {
@Override
public void success(List<GetIpInfoResponse> getIpInfoResponses, Response response) {
if (getIpInfoResponses != null) {
mDiskCache.storyCacheObj(cacheKey, getIpInfoResponses);
postResultToListener(callback, getIpInfoResponses, null, true);
return;
}
}
@Override
public void failure(RetrofitError error) {
postResultToListener(callback, null, new RuntimeException(error), false);
return;
}
});
}
});
}
}
<file_sep>/app/src/main/java/com/example/administrator/custemview/DropDownActivity.java
package com.example.administrator.custemview;
import android.os.Bundle;
import android.view.View;
import android.widget.ImageView;
import butterknife.Bind;
import butterknife.ButterKnife;
import view.AutoCarouselView;
public class DropDownActivity extends BaseActivity {
private static final String TAG = "DropDownActivity";
@Bind(R.id.auto_carousel_view)
AutoCarouselView mSwitch;
private int screenNo = -1;
private int screenNum;
private int[] img_ids = new int[]{R.drawable.flower, R.drawable.index,
R.drawable.item_bg};
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_drop_dowm);
ButterKnife.bind(this);
screenNum = img_ids.length;
mSwitch.setAdapter(new AutoCarouselView.CarouselViewAdapter() {
@Override
public void bindView(View view) {
if (screenNo == screenNum - 1) {
screenNo = -1;
}
if (screenNo < screenNum - 1) {
screenNo++;
ImageView imgView = (ImageView) view.findViewById(R.id.img);
imgView.setImageResource(img_ids[screenNo]);
}
}
@Override
public int getViewResId() {
return R.layout.switcher_layout;
}
});
mSwitch.startCarousel();
}
}
<file_sep>/app/src/main/java/allinterface/UserModel.java
package allinterface;
/**
* Created by zchao on 2016/5/19.
*/
public interface UserModel {
void login(String userName, String password, Callback callback);
}
<file_sep>/app/src/main/java/view/EventInterceptTestView.java
package view;
import android.content.Context;
import android.util.AttributeSet;
import android.util.Log;
import android.view.MotionEvent;
import android.view.View;
import android.widget.TextView;
/**
* Created by zchao on 2016/5/13.
*/
public class EventInterceptTestView extends TextView {
private static final String TAG = "INTER";
public EventInterceptTestView(Context context) {
this(context, null);
}
public EventInterceptTestView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public EventInterceptTestView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
}
@Override
public boolean onTouchEvent(MotionEvent event) {
Log.d(TAG, "View onTouchEvent() called with: " + "event = [" + event + "]");
return true;
}
}
<file_sep>/app/src/main/java/com/example/administrator/custemview/PhoneDetailInfoActivity.java
package com.example.administrator.custemview;
import android.os.Build;
import android.os.Bundle;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.telephony.TelephonyManager;
import android.view.View;
import android.view.Window;
import android.widget.RelativeLayout;
import java.util.ArrayList;
import java.util.List;
import adapter.PhoneInfoAdapter;
public class PhoneDetailInfoActivity extends BaseActivity {
private RecyclerView mRv;
private PhoneInfoAdapter mAdapter;
private RelativeLayout mHeader;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_phone_detail_info);
setTitle("关于手机");
mSetting.setVisibility(View.GONE);
mRv = (RecyclerView) findViewById(R.id.rv);
Window window = getWindow();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
window.setStatusBarColor(0x002c48ff);
}
initeRv();
fillInfo();
}
private void fillInfo() {
List<PhoneInfo> info = new ArrayList();
TelephonyManager systemService = (TelephonyManager) getSystemService(TELEPHONY_SERVICE);
info.add(new PhoneInfo("设备ID", systemService.getDeviceId()));
info.add(new PhoneInfo("主板(BOARD)", Build.BOARD));
// info.add(new PhoneInfo("指令集(SUPPORTED_ABIS)", Build.SUPPORTED_ABIS.toString()));
info.add(new PhoneInfo("设备参数(DEVICE)", Build.DEVICE));
info.add(new PhoneInfo("屏幕参数(DISPLAY)", Build.DISPLAY));
info.add(new PhoneInfo("唯一编号(FINGERPRINT)", Build.FINGERPRINT));
info.add(new PhoneInfo("硬件序列号(SERIAL)", Build.SERIAL));
info.add(new PhoneInfo("修订版本列表(ID)", Build.ID));
info.add(new PhoneInfo("硬件制造商(MANUFACTURER)", Build.MANUFACTURER));
info.add(new PhoneInfo("版本(MODEL)", Build.MODEL));
info.add(new PhoneInfo("硬件名(HARDWARE)", Build.HARDWARE));
info.add(new PhoneInfo("手机产品名(PRODUCT)", Build.PRODUCT));
info.add(new PhoneInfo("Build描述标签(TAGS)", Build.TAGS));
info.add(new PhoneInfo("Builder类型(TYPE)", Build.TYPE));
info.add(new PhoneInfo("当前开发代号(VERSION.CODENAME)", Build.VERSION.CODENAME));
info.add(new PhoneInfo("源码控制版本号(VERSION.INCREMENTAL)", Build.VERSION.INCREMENTAL));
info.add(new PhoneInfo("版本字符串(VERSION.RELEASE)", Build.VERSION.RELEASE));
info.add(new PhoneInfo("版本号(VERSION.SDK_INT)", String.valueOf(Build.VERSION.SDK_INT)));
info.add(new PhoneInfo("Host值(HOST)", Build.HOST));
info.add(new PhoneInfo("User名(USER)", Build.USER));
info.add(new PhoneInfo("编译时间(TIME)", String.valueOf(Build.TIME)));
mAdapter.addInfo(info);
}
private void initeRv() {
LinearLayoutManager manager = new LinearLayoutManager(this);
mRv.setLayoutManager(manager);
mAdapter = new PhoneInfoAdapter(this);
mRv.setAdapter(mAdapter);
}
public class PhoneInfo {
public PhoneInfo() {
}
public PhoneInfo(String name, String info) {
this.name = name;
this.info = info;
}
public String name;
public String info;
}
}
<file_sep>/app/src/main/java/view/CustomViewTest.java
package view;
import android.content.Context;
import android.view.LayoutInflater;
import android.widget.FrameLayout;
import com.example.administrator.custemview.R;
/**
* Created by zchao on 2016/3/21.
*/
public class CustomViewTest extends FrameLayout {
public CustomViewTest(Context context) {
super(context);
LayoutInflater.from(context).inflate(R.layout.test_layout, this);
}
}
<file_sep>/app/src/main/java/com/example/administrator/custemview/ExecutorTestActivity.java
package com.example.administrator.custemview;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.text.format.Time;
import android.util.Log;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
public class ExecutorTestActivity extends AppCompatActivity {
private static final String TAG = "ExecutorTestActivity";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_executor_test);
/* ScheduledExecutorService executorService = Executors.newScheduledThreadPool(5);
Log.d(TAG, "onCreate() calle= [" + System.currentTimeMillis() + "]");
ExecutorService cachedThreadPool = Executors.newCachedThreadPool();
cachedThreadPool.submit(new Runnable() {
@Override
public void run() {
}
});
//周期执行,可设置周期时间
executorService.scheduleAtFixedRate(new Runnable() {
@Override
public void run() {
Log.d(TAG, "run() called="+System.currentTimeMillis());
}
}, 1, 3, TimeUnit.SECONDS);*/
}
}
<file_sep>/app/src/main/java/javaBean/Student.java
package javaBean;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
import java.util.Date;
/**
* Created by zchao on 2016/3/9.
*/
public class Student {
@Expose
private int userID;
@Expose
@SerializedName("name")
private String userName;
@SerializedName("nickname")
private String userNickName;
@SerializedName("bir")
@Expose
private Date birthDay;
private String userAddress;
public Student() {
}
public void setBirthDay(Date birthDay) {
this.birthDay = birthDay;
}
public Date getBirthDay() {
return birthDay;
}
public int getUserID() {
return userID;
}
public String getUserName() {
return userName;
}
public String getUserNickName() {
return userNickName;
}
public String getUserAddress() {
return userAddress;
}
public void setUserAddress(String userAddress) {
this.userAddress = userAddress;
}
public void setUserID(int userID) {
this.userID = userID;
}
public void setUserName(String userName) {
this.userName = userName;
}
public void setUserNickName(String userNickName) {
this.userNickName = userNickName;
}
@Override
public String toString() {
return "Student{" +
"userID=" + userID +
", userName='" + userName + '\'' +
", userNickName='" + userNickName + '\'' +
", birthDay=" + birthDay +
", userAddress='" + userAddress + '\'' +
'}';
}
}
<file_sep>/app/src/main/java/view/TestView2.java
package view;
import android.content.Context;
import android.content.res.Resources;
import android.support.annotation.Nullable;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.LayoutInflater;
import android.view.View;
/**
* Created by zchao on 2017/5/9.
* desc:
* version:
*/
public class TestView2 extends View {
public TestView2(Context context) {
super(context);
}
public TestView2(Context context, @Nullable AttributeSet attrs) {
super(context, attrs);
}
}
<file_sep>/app/src/main/java/view/ShadowImageView.java
package view;
import android.annotation.TargetApi;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.BlurMaskFilter;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.PorterDuff;
import android.graphics.PorterDuffXfermode;
import android.graphics.drawable.BitmapDrawable;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.renderscript.Allocation;
import android.renderscript.RenderScript;
import android.renderscript.ScriptIntrinsicBlur;
import android.util.AttributeSet;
import android.view.View;
import android.widget.ImageView;
import com.example.administrator.custemview.R;
/**
* Created by zchao on 2016/8/9.
*/
public class ShadowImageView extends ImageView {
private Drawable shadowDrawable = null;
private Paint mShadowPaint = null;
private Context context;
private float shadowDx, shadowDy; //阴影的偏离方向
private int shadowColor; //阴影颜色
private Paint paint;
private Bitmap bitmap;
public ShadowImageView(Context context) {
this(context, null);
}
public ShadowImageView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public ShadowImageView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
this.context = context;
initeAttr(attrs);
initShadow();
}
private void initeAttr(AttributeSet attrs) {
TypedArray typedArray = context.obtainStyledAttributes(attrs, R.styleable.ShadowImageView);
shadowDx = typedArray.getFloat(R.styleable.ShadowImageView_shadowDx, 0);
shadowDy = typedArray.getFloat(R.styleable.ShadowImage_shadowDY, 0);
shadowColor = typedArray.getColor(R.styleable.ShadowImageView_shadowColor, Color.BLACK);
}
private void initShadow() {
mShadowPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mShadowPaint.setShadowLayer(50, 0,20,0xff000000);
BlurMaskFilter maskFilter = new BlurMaskFilter(5, BlurMaskFilter.Blur.SOLID);
mShadowPaint.setMaskFilter(maskFilter);
// mShadowPaint.setColor();
paint = new Paint();
}
@Override
protected void onDraw(Canvas canvas) {
disableHardwareRendering(this);
int sl = canvas.saveLayer(0,0,getWidth(), getHeight(),null, Canvas.ALL_SAVE_FLAG);
shadowDrawable = getDrawable().mutate();
if (shadowDrawable != null) {
canvas.drawColor(shadowColor);
mShadowPaint.setXfermode(new PorterDuffXfermode(PorterDuff.Mode.DST_IN));
createShadow(shadowDrawable);
if (bitmap != null) {
canvas.drawBitmap(bitmap, 0, 0, mShadowPaint);
}
}
canvas.restoreToCount(sl);
canvas.drawCircle(10, 10, 5,mShadowPaint);
super.onDraw(canvas);
}
private void createShadow(Drawable shadowDrawable) {
Bitmap b = ((BitmapDrawable)shadowDrawable).getBitmap();
if (bitmap == null || bitmap.isRecycled()) {
Bitmap bitmap1 = Bitmap.createBitmap(getWidth(), getHeight(), Bitmap.Config.ARGB_8888);
Canvas canvas1 = new Canvas(bitmap1);
canvas1.drawBitmap(b, (getWidth()-b.getWidth())/2+shadowDx, (getHeight()-b.getHeight())/2+shadowDy,new Paint());
bitmap = fastblur(context, b, 10);
}
}
public Bitmap fastblur(Context context, Bitmap sentBitmap, int radius) {
Bitmap bitmap = sentBitmap.copy(sentBitmap.getConfig(), true);
if (radius < 1) {
return (null);
}
int w = bitmap.getWidth();
int h = bitmap.getHeight();
int[] pix = new int[w * h];
bitmap.getPixels(pix, 0, w, 0, 0, w, h);
int wm = w - 1;
int hm = h - 1;
int wh = w * h;
int div = radius + radius + 1;
int r[] = new int[wh];
int g[] = new int[wh];
int b[] = new int[wh];
int rsum, gsum, bsum, x, y, i, p, yp, yi, yw;
int vmin[] = new int[Math.max(w, h)];
int divsum = (div + 1) >> 1;
divsum *= divsum;
int temp = 256 * divsum;
int dv[] = new int[temp];
for (i = 0; i < temp; i++) {
dv[i] = (i / divsum);
}
yw = yi = 0;
int[][] stack = new int[div][3];
int stackpointer;
int stackstart;
int[] sir;
int rbs;
int r1 = radius + 1;
int routsum, goutsum, boutsum;
int rinsum, ginsum, binsum;
for (y = 0; y < h; y++) {
rinsum = ginsum = binsum = routsum = goutsum = boutsum = rsum = gsum = bsum = 0;
for (i = -radius; i <= radius; i++) {
p = pix[yi + Math.min(wm, Math.max(i, 0))];
sir = stack[i + radius];
sir[0] = (p & 0xff0000) >> 16;
sir[1] = (p & 0x00ff00) >> 8;
sir[2] = (p & 0x0000ff);
rbs = r1 - Math.abs(i);
rsum += sir[0] * rbs;
gsum += sir[1] * rbs;
bsum += sir[2] * rbs;
if (i > 0) {
rinsum += sir[0];
ginsum += sir[1];
binsum += sir[2];
} else {
routsum += sir[0];
goutsum += sir[1];
boutsum += sir[2];
}
}
stackpointer = radius;
for (x = 0; x < w; x++) {
r[yi] = dv[rsum];
g[yi] = dv[gsum];
b[yi] = dv[bsum];
rsum -= routsum;
gsum -= goutsum;
bsum -= boutsum;
stackstart = stackpointer - radius + div;
sir = stack[stackstart % div];
routsum -= sir[0];
goutsum -= sir[1];
boutsum -= sir[2];
if (y == 0) {
vmin[x] = Math.min(x + radius + 1, wm);
}
p = pix[yw + vmin[x]];
sir[0] = (p & 0xff0000) >> 16;
sir[1] = (p & 0x00ff00) >> 8;
sir[2] = (p & 0x0000ff);
rinsum += sir[0];
ginsum += sir[1];
binsum += sir[2];
rsum += rinsum;
gsum += ginsum;
bsum += binsum;
stackpointer = (stackpointer + 1) % div;
sir = stack[(stackpointer) % div];
routsum += sir[0];
goutsum += sir[1];
boutsum += sir[2];
rinsum -= sir[0];
ginsum -= sir[1];
binsum -= sir[2];
yi++;
}
yw += w;
}
for (x = 0; x < w; x++) {
rinsum = ginsum = binsum = routsum = goutsum = boutsum = rsum = gsum = bsum = 0;
yp = -radius * w;
for (i = -radius; i <= radius; i++) {
yi = Math.max(0, yp) + x;
sir = stack[i + radius];
sir[0] = r[yi];
sir[1] = g[yi];
sir[2] = b[yi];
rbs = r1 - Math.abs(i);
rsum += r[yi] * rbs;
gsum += g[yi] * rbs;
bsum += b[yi] * rbs;
if (i > 0) {
rinsum += sir[0];
ginsum += sir[1];
binsum += sir[2];
} else {
routsum += sir[0];
goutsum += sir[1];
boutsum += sir[2];
}
if (i < hm) {
yp += w;
}
}
yi = x;
stackpointer = radius;
for (y = 0; y < h; y++) {
pix[yi] = (0xff000000 & pix[yi]) | (dv[rsum] << 16)
| (dv[gsum] << 8) | dv[bsum];
rsum -= routsum;
gsum -= goutsum;
bsum -= boutsum;
stackstart = stackpointer - radius + div;
sir = stack[stackstart % div];
routsum -= sir[0];
goutsum -= sir[1];
boutsum -= sir[2];
if (x == 0) {
vmin[y] = Math.min(y + r1, hm) * w;
}
p = x + vmin[y];
sir[0] = r[p];
sir[1] = g[p];
sir[2] = b[p];
rinsum += sir[0];
ginsum += sir[1];
binsum += sir[2];
rsum += rinsum;
gsum += ginsum;
bsum += binsum;
stackpointer = (stackpointer + 1) % div;
sir = stack[stackpointer];
routsum += sir[0];
goutsum += sir[1];
boutsum += sir[2];
rinsum -= sir[0];
ginsum -= sir[1];
binsum -= sir[2];
yi += w;
}
}
bitmap.setPixels(pix, 0, w, 0, 0, w, h);
return (bitmap);
}
@TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR1)
private Bitmap blur(Bitmap bkg, float radius) {
Bitmap overlay = Bitmap.createBitmap(bkg.getWidth(), bkg.getHeight(), Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(overlay);
canvas.drawBitmap(bkg, 0, 0, null);
RenderScript rs = RenderScript.create(context);
Allocation overlayAlloc = Allocation.createFromBitmap(rs, overlay);
ScriptIntrinsicBlur blur = ScriptIntrinsicBlur.create(rs, overlayAlloc.getElement());
blur.setInput(overlayAlloc);
blur.setRadius(radius);
blur.forEach(overlayAlloc);
overlayAlloc.copyTo(overlay);
return null;
// return new BitmapDrawable(getResources(), overlay);
}
public static void disableHardwareRendering(View v) {
if(android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.HONEYCOMB) {
v.setLayerType(View.LAYER_TYPE_SOFTWARE, null);
}
}
}
<file_sep>/app/src/main/java/adapter/PhoneInfoAdapter.java
package adapter;
import android.content.Context;
import android.os.Build;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.example.administrator.custemview.PhoneDetailInfoActivity;
import com.example.administrator.custemview.R;
import java.util.ArrayList;
import java.util.List;
/**
* Created by zchao on 2016/5/17.
*/
public class PhoneInfoAdapter extends RecyclerView.Adapter<RecyclerView.ViewHolder> {
private Context context;
private List<PhoneDetailInfoActivity.PhoneInfo> info = new ArrayList();
private String product = Build.BRAND;
private final LayoutInflater inflater;
public PhoneInfoAdapter(Context context) {
this.context = context;
inflater = LayoutInflater.from(context);
}
public void addInfo(List<PhoneDetailInfoActivity.PhoneInfo> info){
this.info.addAll(info);
notifyDataSetChanged();
}
@Override
public RecyclerView.ViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
if (viewType == 0) {
return new HeadViewHolder(inflater.inflate(R.layout.phone_detail_head, parent, false));
} else if (viewType == 1) {
return new DetailHolder(inflater.inflate(R.layout.item_phone_info_detail, parent, false));
}
return null;
}
@Override
public void onBindViewHolder(RecyclerView.ViewHolder holder, int position) {
if (getItemViewType(position) == 0) {
((HeadViewHolder)holder).mProduct.setText(product);
}else {
((DetailHolder) holder).mName.setText(info.get(position - 1).name);
((DetailHolder) holder).mInfo.setText(info.get(position - 1).info);
}
}
@Override
public int getItemCount() {
return info.size() + 1;
}
@Override
public int getItemViewType(int position) {
if (position == 0) {
return 0;
} else {
return 1;
}
}
class DetailHolder extends RecyclerView.ViewHolder {
TextView mName, mInfo;
public DetailHolder(View itemView) {
super(itemView);
mName = (TextView) itemView.findViewById(R.id.item_phone_info_detail_name);
mInfo = (TextView) itemView.findViewById(R.id.item_phone_info_detail_info);
}
}
class HeadViewHolder extends RecyclerView.ViewHolder {
TextView mProduct;
public HeadViewHolder(View itemView) {
super(itemView);
mProduct = (TextView) itemView.findViewById(R.id.phone_product);
}
}
}
<file_sep>/app/src/main/java/ImageAnimation.java
import android.animation.Animator;
import android.content.Context;
import android.util.AttributeSet;
import android.view.View;
import android.view.animation.Animation;
/**
* Created by zchao on 2016/6/3.
*/
public class ImageAnimation extends Animation {
private View view;
private long duration;
public ImageAnimation() {
}
public ImageAnimation(Context context, AttributeSet attrs) {
super(context, attrs);
}
}
<file_sep>/app/src/main/java/com/example/administrator/custemview/OkHttpTestActivity.java
package com.example.administrator.custemview;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import java.io.File;
import java.io.IOException;
import okhttp3.Call;
import okhttp3.Callback;
import okhttp3.Headers;
import okhttp3.MediaType;
import okhttp3.MultipartBody;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.RequestBody;
import okhttp3.Response;
import okio.BufferedSink;
public class OkHttpTestActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_ok_http_test);
OkHttpClient client = new OkHttpClient();
MultipartBody request1 = new MultipartBody.Builder()
.addPart(Headers.of("", ""), RequestBody.create(MediaType.parse(""), ""))
.build();
Request requestPost = new Request.Builder()
.post(request1)
.build();
Request request = new Request.Builder()
.url("")
.get()
.build();
client.newCall(request).enqueue(new Callback() {
@Override
public void onFailure(Call call, IOException e) {
}
@Override
public void onResponse(Call call, Response response) throws IOException {
runOnUiThread(new Runnable() {
@Override
public void run() {
}
});
}
});
}
}
<file_sep>/app/src/main/java/com/example/administrator/custemview/ScreenReciver.java
package com.example.administrator.custemview;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.text.TextUtils;
/**
* Created by zchao on 2016/11/23.
*/
public class ScreenReciver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
String path = intent.getStringExtra("PATH");
if (!TextUtils.isEmpty(path)) {
Intent intent1 = new Intent(context, ScreenShotActivity.class);
intent1.putExtra("PATH", path);
intent1.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(intent1);
}
}
}
<file_sep>/app/src/main/java/view/CustomCheckBox.java
package view;
import android.animation.AnimatorInflater;
import android.animation.ObjectAnimator;
import android.animation.TypeEvaluator;
import android.animation.ValueAnimator;
import android.annotation.TargetApi;
import android.app.usage.UsageEvents;
import android.content.Context;
import android.graphics.PointF;
import android.os.Build;
import android.util.AttributeSet;
import android.util.EventLog;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.animation.AnimationUtils;
import android.view.animation.LinearInterpolator;
import android.widget.ImageView;
import android.widget.RelativeLayout;
import com.example.administrator.custemview.R;
/**
* Created by zchao on 2016/5/5.
*/
public class CustomCheckBox extends RelativeLayout {
private ImageView mImage;
public CustomCheckBox(final Context context, AttributeSet attrs) {
super(context, attrs);
LayoutInflater.from(context).inflate(R.layout.custem_checkbox, this);
mImage = (ImageView) findViewById(R.id.check);
findViewById(R.id.root).setOnClickListener(new OnClickListener() {
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
@Override
public void onClick(View v) {
int visibil = mImage.getVisibility();
if (visibil == VISIBLE) {
mImage.setAnimation(AnimationUtils.loadAnimation(context, R.anim.check_miss_anim));
mImage.setVisibility(INVISIBLE);
} else if (visibil == INVISIBLE) {
mImage.setAnimation(AnimationUtils.loadAnimation(context, R.anim.check_show_anim));
mImage.setVisibility(VISIBLE);
}
}
});
}
public boolean isChecked() {
return mImage.getVisibility() == VISIBLE ? true : false;
}
public void setChecked(boolean checked){
if (checked) {
mImage.setVisibility(VISIBLE);
}else {
mImage.setVisibility(INVISIBLE);
}
}
}
<file_sep>/app/src/main/java/view/CusBoardView.java
package view;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.RectF;
import android.graphics.drawable.ColorDrawable;
import android.inputmethodservice.Keyboard;
import android.inputmethodservice.KeyboardView;
import android.text.TextPaint;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.view.KeyEvent;
import android.view.View;
import android.widget.LinearLayout;
import android.widget.PopupWindow;
import android.widget.Toast;
import com.example.administrator.custemview.R;
import java.util.List;
import utils.Utils;
/**
* Created by zchao on 2017/3/2.
* desc:
* version:
*/
public class CusBoardView extends View {
public static final int KEYCODE_CLEAR = -7;
private Keyboard mKeyboard = null;
private List<Keyboard.Key> mKeys;
private TextPaint mTextPaint;
private Paint mLinePaint;
private Context mContext;
private int mCurrentKey = 0;
private OnKeyboardActionListener listener = null;
private PopupWindow mPopWindow;
private PopViewForKeyboard mPopView;
public CusBoardView(Context context) {
this(context, null);
}
public CusBoardView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public CusBoardView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
mContext = context;
mKeyboard = new Keyboard(context, R.xml.keyboard);
if (mKeys != null && mKeys.size() > 3) {
mCurrentKey = 3;
}
mTextPaint = new TextPaint(Paint.ANTI_ALIAS_FLAG);
mTextPaint.setColor(0xff000000);
mTextPaint.setTextSize(Utils.dp2Px(getResources(),15));
mLinePaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mLinePaint.setColor(Color.RED);
mLinePaint.setStrokeWidth(3);
mLinePaint.setStyle(Paint.Style.STROKE);
initData(context);
}
private void initData(Context context) {
mKeys = mKeyboard.getKeys();
mPopView = new PopViewForKeyboard(context, new PopViewForKeyboard.OnKeyBack() {
@Override
public void backKey(int keyCode) {
if (mPopWindow.isShowing()) {
mPopWindow.dismiss();
if (listener != null) {
listener.onKey(keyCode);
}
Toast.makeText(mContext, Character.toString((char)keyCode), Toast.LENGTH_SHORT).show();
}
}
});
mPopWindow = new PopupWindow(mPopView);
mPopWindow.setAnimationStyle(R.style.popup_window_anim_key);
mPopWindow.setWidth(LinearLayout.LayoutParams.WRAP_CONTENT);//必须设置宽和高
mPopWindow.setHeight(LinearLayout.LayoutParams.WRAP_CONTENT);
// mPopWindow.setFocusable(true);
ColorDrawable dw = new ColorDrawable(0000000000);
mPopWindow.setBackgroundDrawable(dw);
}
public void setKeyBoard(Keyboard keyBoard){
mKeyboard = keyBoard;
}
@Override
public void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
int sizeW = MeasureSpec.getSize(widthMeasureSpec);
int sizeY = MeasureSpec.getSize(heightMeasureSpec);
int modeW = MeasureSpec.getMode(widthMeasureSpec);
int modeY = MeasureSpec.getMode(heightMeasureSpec);
int height = 0;
if (mKeys != null && mKeys.size() > 0) {
height = mKeys.get(0).height;
height += Utils.dp2Px(getResources(),10);
}
setMeasuredDimension(sizeW, Math.max(sizeY, height * 4));
}
// @Override
// public boolean onKeyDown(int keyCode, KeyEvent event) {
// onEventHandle(keyCode);
// return false;
// }
public void onEventHandle(int event){
if (!mPopWindow.isShowing()) {
handleByView(event);
} else {
hanleByPopWindow(event);
}
}
private void hanleByPopWindow(int event) {
if (mPopView != null) {
mPopView.onEventHandle(event);
}
}
/**
* View自身位置的变化
* @param event
*/
private void handleByView(int event) {
switch (event) {
case KeyEvent.KEYCODE_DPAD_DOWN:
if (mCurrentKey < 8) {
mCurrentKey += 3;
} else if (mCurrentKey == 8) {
mCurrentKey += 2;
}
break;
case KeyEvent.KEYCODE_DPAD_UP:
if (mCurrentKey > 2 && mCurrentKey > 8) {
mCurrentKey -= 2;
} else if (mCurrentKey >2) {
mCurrentKey -= 3;
}
break;
case KeyEvent.KEYCODE_DPAD_LEFT:
if ((mCurrentKey%3) != 0) {
mCurrentKey -= 1;
}
break;
case KeyEvent.KEYCODE_DPAD_RIGHT:
if (mCurrentKey%3 != 2 && mCurrentKey != 10) {
mCurrentKey += 1;
}
break;
case KeyEvent.KEYCODE_ENTER:
openPopWindow(mKeys.get(mCurrentKey));
return;
}
postInvalidate();
}
/**
* 展开popupwindow
* @param key
*/
private void openPopWindow(Keyboard.Key key) {
if (TextUtils.isEmpty(key.popupCharacters)) {
if (listener != null) {
listener.onKey(key.codes[0]);
}
} else {
mPopView.setKey(key);
mPopWindow.showAsDropDown(this, key.x, key.y - getHeight());
}
}
@Override
public void onDraw(Canvas canvas) {
super.onDraw(canvas);
for (int i = 0; i < mKeys.size(); i++) {
if (mCurrentKey == i) {
mLinePaint.setColor(Color.GREEN);
} else {
mLinePaint.setColor(Color.RED);
}
Keyboard.Key key = mKeys.get(i);
if (!TextUtils.isEmpty(key.label)) {
canvas.drawText(key.label.toString(), (key.x + (key.width - mTextPaint.measureText(key.label.toString())) / 2), (key.y + key.height/2), mTextPaint);
}
canvas.drawRect(new RectF(key.x, key.y, key.x + key.width, key.y + key.height), mLinePaint);
}
}
public interface OnKeyboardActionListener {
void onKey(int primaryCode);
}
public void setOnKeyboardActionListener(OnKeyboardActionListener listener) {
this.listener = listener;
}
}
<file_sep>/app/src/main/java/view/MatrixTestView.java
package view;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.support.annotation.Nullable;
import android.util.AttributeSet;
import android.view.View;
import com.example.administrator.custemview.R;
/**
* Created by zchao on 2017/5/18.
* desc:
* version:
*/
public class MatrixTestView extends View {
private Bitmap bitmap;
private Matrix matrix, matrix1;
private Paint paint;
private int a = 0;
private int x = 0, y = 0;
private float scan = 0f;
private float[] value = new float[9];
public MatrixTestView(Context context) {
this(context, null);
}
public MatrixTestView(Context context, @Nullable AttributeSet attrs) {
super(context, attrs);
bitmap = BitmapFactory.decodeResource(getResources(), R.drawable.snow);
matrix = new Matrix();
matrix1 = new Matrix();
paint = new Paint(Paint.ANTI_ALIAS_FLAG);
// matrix1.preScale(0.5f, 0.5f);
// matrix.setScale(1f,1f);
}
@Override
protected void onDraw(Canvas canvas) {
changeMatrix();
canvas.drawBitmap(bitmap, matrix, paint);
// canvas.drawBitmap(bitmap, matrix1, paint);
invalidate();
}
private void changeMatrix() {
matrix.preRotate(1, bitmap.getWidth()/2, bitmap.getHeight()/2);
matrix.postTranslate(1, 1);
matrix.getValues(value);
if (value[2] >= getWidth() || value[5] >= getHeight()) {
matrix.reset();
value[2] = 0;
value[5] = 0;
}
// matrix1.preRotate(2, bitmap.getWidth()/2, bitmap.getHeight()/2);
matrix1.postTranslate(1,2);
}
}
<file_sep>/app/src/main/java/view/DragHelperViewGroup.java
package view;
import android.content.Context;
import android.support.v4.view.ViewCompat;
import android.support.v4.view.ViewConfigurationCompat;
import android.support.v4.view.ViewPager;
import android.support.v4.widget.ViewDragHelper;
import android.util.AttributeSet;
import android.util.Log;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewConfiguration;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import java.util.NoSuchElementException;
/**
* Created by zchao on 2016/5/13.
*/
public class DragHelperViewGroup extends FrameLayout {
private static final String TAG = "DragHelperViewGroup";
private ViewDragHelper viewDragHelper;
private View mMainView;
private View mMenuView;
private boolean needDragLeft = false;
private ViewDragHelper.Callback callback = new ViewDragHelper.Callback() {
@Override
public boolean tryCaptureView(View child, int pointerId) {
return mMainView == child;
}
@Override
public int clampViewPositionHorizontal(View child, int left, int dx) {
return left;
}
@Override
public int clampViewPositionVertical(View child, int top, int dy) {
return 0;
}
@Override
public void onViewReleased(View releasedChild, float xvel, float yvel) {
super.onViewReleased(releasedChild, xvel, yvel);
if (mMainView.getLeft() < mWidth / 2) {
viewDragHelper.smoothSlideViewTo(mMainView, 0, 0);
ViewCompat.postInvalidateOnAnimation(DragHelperViewGroup.this);
} else {
viewDragHelper.smoothSlideViewTo(mMainView, mWidth, 0);
ViewCompat.postInvalidateOnAnimation(DragHelperViewGroup.this);
}
}
};
private int mWidth;
private int mLastX;
private int mLastY;
private int mTouchSlop;
public DragHelperViewGroup(Context context) {
this(context, null);
}
public DragHelperViewGroup(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public DragHelperViewGroup(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
viewDragHelper = ViewDragHelper.create(this, callback);
ViewConfiguration configuration = ViewConfiguration.get(context);
mTouchSlop = ViewConfigurationCompat.getScaledPagingTouchSlop(configuration)/3;
}
@Override
protected void onFinishInflate() {
super.onFinishInflate();
mMenuView = getChildAt(0);
mMainView = getChildAt(1);
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
mWidth = mMenuView.getMeasuredWidth();
}
@Override
public boolean onInterceptTouchEvent(MotionEvent ev) {
return viewDragHelper.shouldInterceptTouchEvent(ev);
}
@Override
public boolean onTouchEvent(MotionEvent event) {
viewDragHelper.processTouchEvent(event);
return true;
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
}
@Override
public void computeScroll() {
if (viewDragHelper.continueSettling(true)) {
ViewCompat.postInvalidateOnAnimation(this);
}
}
}
<file_sep>/app/src/main/java/view/CaleanderGroup.java
package view;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.support.v4.view.ViewCompat;
import android.support.v4.widget.ViewDragHelper;
import android.util.AttributeSet;
import android.view.GestureDetector;
import android.view.MotionEvent;
import android.view.VelocityTracker;
import android.view.View;
import android.view.ViewConfiguration;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import android.widget.LinearLayout;
import android.widget.Scroller;
/**
* Created by zchao on 2016/11/9.
*/
public class CaleanderGroup extends FrameLayout {
private ViewDragHelper helper;
private Scroller scroller;
private int mTouchSlop;
private VelocityTracker tracker;
private GestureDetector detector;
private int minVelocity;
private int maxVelocitu;
private View view0;
private View view1;
private View view2;
public CaleanderGroup(Context context) {
this(context, null);
}
public CaleanderGroup(Context context, AttributeSet attrs) {
super(context, attrs);
scroller = new Scroller(context);
mTouchSlop = ViewConfiguration.get(context).getScaledTouchSlop();
minVelocity = ViewConfiguration.get(context).getScaledMinimumFlingVelocity();
maxVelocitu = ViewConfiguration.get(context).getScaledMaximumFlingVelocity();
detector = new GestureDetector(context, listener);
}
GestureDetector.SimpleOnGestureListener listener = new GestureDetector.SimpleOnGestureListener(){
@Override
public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) {
ViewCompat.offsetLeftAndRight(view0, (int) distanceX);
ViewCompat.offsetLeftAndRight(view1, (int) distanceX);
ViewCompat.offsetLeftAndRight(view2, (int) distanceX);
return super.onScroll(e1, e2, distanceX, distanceY);
}
@Override
public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) {
return super.onFling(e1, e2, velocityX, velocityY);
}
@Override
public boolean onSingleTapUp(MotionEvent e) {
return true;
}
};
ViewDragHelper.Callback callback = new ViewDragHelper.Callback() {
@Override
public boolean tryCaptureView(View child, int pointerId) {
String tag = (String) child.getTag();
return tag.equals("1") ;
}
@Override
public void onViewPositionChanged(View changedView, int left, int top, int dx, int dy) {
View view0 = findViewWithTag("0");
View view1 = findViewWithTag("1");
View view2 = findViewWithTag("2");
ViewCompat.offsetLeftAndRight(view0, dx);
ViewCompat.offsetLeftAndRight(view2, dx);
ViewCompat.offsetTopAndBottom(view0, 0);
ViewCompat.offsetTopAndBottom(view2, 0);
}
@Override
public int clampViewPositionVertical(View child, int top, int dy) {
return 0;
}
@Override
public int clampViewPositionHorizontal(View child, int left, int dx) {
return left;
}
@Override
public void onViewReleased(View releasedChild, float xvel, float yvel) {
View view0 = findViewWithTag("0");
View view1 = findViewWithTag("1");
View view2 = findViewWithTag("2");
if (releasedChild.getLeft() > getWidth() / 2) {
helper.smoothSlideViewTo(view1, getWidth(), 0);
helper.smoothSlideViewTo(view0, 0, 0);
ViewCompat.postInvalidateOnAnimation(CaleanderGroup.this);
view0.setTag("1");
view1.setTag("2");
view2.setTag("0");
requestLayout();
} else if (releasedChild.getLeft() < -getWidth() / 2) {
helper.smoothSlideViewTo(view1, -getWidth(), 0);
helper.smoothSlideViewTo(view2, 0, 0);
ViewCompat.postInvalidateOnAnimation(CaleanderGroup.this);
view0.setTag("2");
view1.setTag("0");
view2.setTag("1");
requestLayout();
} else {
// helper.smoothSlideViewTo(view0, -getWidth(), 0);
helper.smoothSlideViewTo(view1, 0, 0);
// helper.smoothSlideViewTo(view2, getWidth(), 0);
ViewCompat.postInvalidateOnAnimation(CaleanderGroup.this);
}
}
};
@Override
protected void onFinishInflate() {
view0 = findViewWithTag("0");
view1 = findViewWithTag("1");
view2 = findViewWithTag("2");
}
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
view0.layout(-getWidth(), 0, 0, getHeight());
view1.layout(0, 0, getWidth(), getHeight());
view2.layout(getWidth(), 0, getWidth() * 2, getHeight());
}
@Override
public boolean onInterceptTouchEvent(MotionEvent ev) {
return super.onInterceptTouchEvent(ev);
// int action = ev.getAction();
// if (tracker == null) {
// tracker = VelocityTracker.obtain();
// }
// tracker.addMovement(ev);
// tracker.computeCurrentVelocity();
// switch (action) {
// case MotionEvent.ACTION_DOWN:
//
// break;
// case MotionEvent.ACTION_MOVE:
//
// break;
// case MotionEvent.ACTION_UP:
//
// break;
// }
// return helper.shouldInterceptTouchEvent(ev);
}
@Override
public boolean onTouchEvent(MotionEvent event) {
detector.onTouchEvent(event);
return true;
}
@Override
public void computeScroll() {
if (helper.continueSettling(true)) {
ViewCompat.postInvalidateOnAnimation(this);
}
}
}
<file_sep>/app/src/main/java/com/example/administrator/custemview/ScreenShotDimen.java
package com.example.administrator.custemview;
import android.app.ActivityManager;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import java.util.List;
/**
* Created by zchao on 2016/11/23.
*/
public class ScreenShotDimen extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
if (intent.getAction().equals(Intent.ACTION_TIME_TICK)) {
ActivityManager manager = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
List<ActivityManager.RunningServiceInfo> runningServices = manager.getRunningServices(Integer.MAX_VALUE);
boolean isRun = false;
for (ActivityManager.RunningServiceInfo r:runningServices) {
if (r.service.getClassName().equals("com.example.administrator.custemview.ScreenShotService")) {
isRun = true;
break;
}
}
if (!isRun) {
Intent i = new Intent(context, ScreenShotService.class);
context.startService(i);
}
}
}
}
<file_sep>/app/src/main/java/view/OpenGLView.java
package view;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.view.MotionEvent;
/**
* Created by zchao on 2016/7/12.
*/
public class OpenGLView extends GLSurfaceView {
private float lastY;
private float lastX;
private OpenGLRenderer openGLRenderer;
public OpenGLView(Context context) {
this(context, null);
}
public OpenGLView(Context context, AttributeSet attrs) {
super(context, attrs);
openGLRenderer = new OpenGLRenderer();
setRenderer(openGLRenderer);
}
@Override
public boolean onTouchEvent(final MotionEvent event) {
queueEvent(new Runnable() {
@Override
public void run() {
float x = event.getX();
float y = event.getY();
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
lastX = x;
lastY = y;
break;
case MotionEvent.ACTION_MOVE:
float diffX = x - lastX;
float diffY = y - lastY;
break;
case MotionEvent.ACTION_UP:
break;
}
}
});
return true;
}
}
<file_sep>/app/src/main/java/adapter/Agegaweg.java
package adapter;
import android.content.Context;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.example.administrator.custemview.R;
/**
* Created by zchao on 2016/10/19.
*/
public class Agegaweg extends RecyclerView.Adapter<Agegaweg.CViewHolder> {
private Context context;
public Agegaweg(Context context) {
this.context = context;
}
@Override
public CViewHolder onCreateViewHolder(ViewGroup parent, int viewType) {
View view = LayoutInflater.from(context).inflate(R.layout.activity_clock, parent, false);
return new CViewHolder(view);
}
@Override
public void onBindViewHolder(CViewHolder holder, int position) {
}
@Override
public int getItemCount() {
return 0;
}
class CViewHolder extends RecyclerView.ViewHolder {
public CViewHolder(View itemView) {
super(itemView);
}
}
}
<file_sep>/app/src/main/java/utils/JNIClient.java
package utils;
/**
* Created by zchao on 2016/7/14.
*/
public class JNIClient {
static public native int jisuanJNI(int date);
}
<file_sep>/app/src/main/java/adapter/BaseAdapter.java
package adapter;
import android.content.Context;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import java.util.List;
/**
* RecyclerView的Adapter基类;
* Created by zchao on 2016/10/18.
*/
public abstract class BaseAdapter<T, V extends BaseViewHolder> extends RecyclerView.Adapter<V> {
protected Context context;
protected List<T> list;
protected OnItemClickListener clickListener;
protected OnItemLongClickListener longClickListener;
private int resID;
public BaseAdapter(Context context, List<T> list) {
this.context = context;
this.list = list;
}
protected abstract V createView(ViewGroup parent);
@Override
public V onCreateViewHolder(ViewGroup parent, int viewType) {
V view = createView(parent);
return view;
}
@Override
public void onBindViewHolder(V holder, final int position) {
View itemView = holder.itemView;
if (clickListener != null) {
itemView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
clickListener.onItemClick(position);
}
});
}
if (longClickListener != null) {
itemView.setOnLongClickListener(new View.OnLongClickListener() {
@Override
public boolean onLongClick(View v) {
longClickListener.onItemLongClick(position);
return true;
}
});
}
holder.bind(list.get(position));
}
@Override
public int getItemViewType(int position) {
return super.getItemViewType(position);
}
@Override
public int getItemCount() {
if (list != null) {
return list.size();
}
return 0;
}
public void addData(T t) {
if (list != null) {
list.add(t);
}
notifyDataSetChanged();
}
public void addDatas(List<T> list) {
if (list == null) {
this.list = list;
} else {
list.addAll(list);
}
notifyDataSetChanged();
}
public void setData(List<T> list) {
this.list = list;
notifyDataSetChanged();
}
public void setClickListener(OnItemClickListener clickListener) {
this.clickListener = clickListener;
}
public void setLongClickListener(OnItemLongClickListener longClickListener) {
this.longClickListener = longClickListener;
}
public interface OnItemClickListener {
void onItemClick(int position);
}
public interface OnItemLongClickListener {
void onItemLongClick(int position);
}
}
<file_sep>/app/src/main/java/com/example/administrator/custemview/LocationActivity.java
package com.example.administrator.custemview;
import android.Manifest;
import android.content.pm.PackageManager;
import android.location.Criteria;
import android.location.Location;
import android.location.LocationManager;
import android.os.Build;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.support.v4.app.ActivityCompat;
import android.widget.Button;
import android.widget.TextView;
import android.widget.Toast;
import butterknife.Bind;
import butterknife.ButterKnife;
import butterknife.OnClick;
import utils.LocationUtil;
import view.BreakViewGroup;
/**
* Created by zchao on 2017/9/15.
* desc:
* version:
*/
public class LocationActivity extends BaseActivity {
@Bind(R.id.location_net)
Button locationNet;
@Bind(R.id.location_gps)
Button locationGps;
@Bind(R.id.location_lati)
TextView locationLati;
@Bind(R.id.location_long)
TextView locationLong;
@Bind(R.id.bre)
BreakViewGroup mBre;
private boolean flag;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.location_activity);
ButterKnife.bind(this);
String[] aaa = {"的单独","的单独","的单独","的单独的单独的单独的单独的单独","的单独","的单独","的单独的单独的单独","的单独","的单独","的单独的单独","的单独","的单独","的单独的单独","的单独","的单独","的单独",};
mBre.removeAllViews();
for (int i = 0; i < aaa.length; i++) {
TextView textView = new TextView(this);
textView.setText(aaa[i]);
mBre.addView(textView);
}
}
/**
* 最好定位
*/
@OnClick(R.id.location_best)
public void locationByBest() {
if (flag) {
getBestLocation();
} else {
Toast.makeText(this, "no permission", Toast.LENGTH_SHORT).show();
}
}
/**
* 网络定位
*/
@OnClick(R.id.location_net)
public void locationByNet() {
if (flag) {
getNetworkLocation();
} else {
Toast.makeText(this, "no permission", Toast.LENGTH_SHORT).show();
}
}
/**
* gps定位
*/
@OnClick(R.id.location_gps)
public void locationByGps() {
if (flag) {
getGPSLocation();
} else {
Toast.makeText(this, "no permission", Toast.LENGTH_SHORT).show();
}
}
@Override
protected void onResume() {
super.onResume();
initPermission();
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == 1) {
flag = grantResults[0] == PackageManager.PERMISSION_GRANTED && grantResults[1] == PackageManager.PERMISSION_GRANTED;
}
}
private void initPermission() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
//检查权限
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.ACCESS_COARSE_LOCATION) != PackageManager.PERMISSION_GRANTED
|| ActivityCompat.checkSelfPermission(this, Manifest.permission.ACCESS_FINE_LOCATION) != PackageManager.PERMISSION_GRANTED) {
//请求权限
ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.ACCESS_COARSE_LOCATION, Manifest.permission.ACCESS_FINE_LOCATION}, 1);
} else {
flag = true;
}
} else {
flag = true;
}
}
/**
* 通过GPS获取定位信息
*/
public void getGPSLocation() {
Location gps = LocationUtil.getGPSLocation(this);
if (gps == null) {
//设置定位监听,因为GPS定位,第一次进来可能获取不到,通过设置监听,可以在有效的时间范围内获取定位信息
LocationUtil.addLocationListener(this, LocationManager.GPS_PROVIDER, new LocationUtil.ILocationListener() {
@Override
public void onSuccessLocation(Location location) {
if (location != null) {
Toast.makeText(LocationActivity.this, "gps onSuccessLocation location: lat==" + location.getLatitude() + " lng==" + location.getLongitude(), Toast.LENGTH_SHORT).show();
locationLati.setText(String.valueOf(location.getLatitude()));
locationLong.setText(String.valueOf(location.getLongitude()));
} else {
Toast.makeText(LocationActivity.this, "gps location is null", Toast.LENGTH_SHORT).show();
}
}
});
} else {
Toast.makeText(this, "gps location: lat==" + gps.getLatitude() + " lng==" + gps.getLongitude(), Toast.LENGTH_SHORT).show();
locationLati.setText(String.valueOf(gps.getLatitude()));
locationLong.setText(String.valueOf(gps.getLongitude()));
}
}
/**
* 通过网络等获取定位信息
*/
private void getNetworkLocation() {
Location net = LocationUtil.getNetWorkLocation(this);
if (net == null) {
Toast.makeText(this, "net location is null", Toast.LENGTH_SHORT).show();
} else {
Toast.makeText(this, "network location: lat==" + net.getLatitude() + " lng==" + net.getLongitude(), Toast.LENGTH_SHORT).show();
locationLati.setText(String.valueOf(net.getLatitude()));
locationLong.setText(String.valueOf(net.getLongitude()));
}
}
/**
* 采用最好的方式获取定位信息
*/
private void getBestLocation() {
Criteria c = new Criteria();//Criteria类是设置定位的标准信息(系统会根据你的要求,匹配最适合你的定位供应商),一个定位的辅助信息的类
c.setPowerRequirement(Criteria.POWER_LOW);//设置低耗电
c.setAltitudeRequired(false);//设置需要海拔
c.setBearingAccuracy(Criteria.ACCURACY_COARSE);//设置COARSE精度标准
c.setAccuracy(Criteria.ACCURACY_LOW);//设置低精度
c.setCostAllowed(true);
//... Criteria 还有其他属性,就不一一介绍了
Location best = LocationUtil.getBestLocation(this, c);
if (best == null) {
Toast.makeText(this, " best location is null", Toast.LENGTH_SHORT).show();
} else {
Toast.makeText(this, "best location: lat==" + best.getLatitude() + " lng==" + best.getLongitude(), Toast.LENGTH_SHORT).show();
locationLati.setText(String.valueOf(best.getLatitude()));
locationLong.setText(String.valueOf(best.getLongitude()));
}
}
}
<file_sep>/app/src/main/java/com/example/administrator/custemview/DragHelperActivity.java
package com.example.administrator.custemview;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
/**
* 仿QQ的侧滑练习
*/
public class DragHelperActivity extends BaseActivity {
private RecyclerView mRv;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_drag_helper);
}
}
<file_sep>/app/src/main/java/javaBean/ImagePiece.java
package javaBean;
import android.graphics.Bitmap;
import android.os.Parcel;
import android.os.Parcelable;
/**
* 图片实体类
* Created by zchao on 2016/3/30.
*/
public class ImagePiece implements Parcelable{
public int indexX;
public int indexY;
public Bitmap bitmap = null;
public ImagePiece(Parcel in) {
indexX = in.readInt();
indexY = in.readInt();
bitmap = in.readParcelable(Bitmap.class.getClassLoader());
}
public static final Creator<ImagePiece> CREATOR = new Creator<ImagePiece>() {
@Override
public ImagePiece createFromParcel(Parcel in) {
return new ImagePiece(in);
}
@Override
public ImagePiece[] newArray(int size) {
return new ImagePiece[size];
}
};
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeInt(indexX);
dest.writeInt(indexY);
dest.writeParcelable(bitmap, flags);
}
}
<file_sep>/app/src/main/java/view/ViewGroupTestA.java
package view;
import android.content.Context;
import android.content.IntentFilter;
import android.support.v4.view.ViewConfigurationCompat;
import android.util.AttributeSet;
import android.util.Log;
import android.view.MotionEvent;
import android.view.ViewConfiguration;
import android.widget.FrameLayout;
/**
* Created by zchao on 2016/5/13.
*/
public class ViewGroupTestA extends FrameLayout {
private static final String TAG = "INTER";
private int mLastX;
private int mLastY;
private boolean needIntercept = false;
private int mTouchSlop;
public ViewGroupTestA(Context context) {
this(context, null);
}
public ViewGroupTestA(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public ViewGroupTestA(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
ViewConfiguration configuration = ViewConfiguration.get(context);
mTouchSlop = ViewConfigurationCompat.getScaledPagingTouchSlop(configuration);
}
@Override
public boolean onInterceptTouchEvent(MotionEvent ev) {
int x = (int) ev.getX();
int y = (int) ev.getY();
needIntercept = false;
switch (ev.getAction()) {
case MotionEvent.ACTION_DOWN:
break;
case MotionEvent.ACTION_MOVE:
int dx = Math.abs(x - mLastX);
int dy = Math.abs(y - mLastY);
if (dx > mTouchSlop && dx * 0.5f > dy) {
needIntercept = true;
} else {
needIntercept = false;
}
break;
case MotionEvent.ACTION_UP:
break;
}
// return super.onInterceptTouchEvent(ev);
Log.d(TAG, "A onInterceptTouchEvent() called with: " + "ev = "+ needIntercept +"[" + ev + "]");
return needIntercept;
}
@Override
public boolean onTouchEvent(MotionEvent event) {
Log.d(TAG, "A onTouchEvent() called with: " + "event = "+ needIntercept +"[" + event + "]");
if (needIntercept) {
needIntercept = false;
// requestDisallowInterceptTouchEvent(true);
return true;
}
return super.onTouchEvent(event);
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
}
@Override
protected void onFinishInflate() {
super.onFinishInflate();
}
}
<file_sep>/app/src/main/java/com/example/administrator/custemview/StackViewActivity.java
package com.example.administrator.custemview;
import android.content.Context;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.BaseAdapter;
import android.widget.ImageView;
import android.widget.StackView;
import android.widget.TextView;
import java.util.ArrayList;
import java.util.List;
import java.util.Timer;
import java.util.TimerTask;
public class StackViewActivity extends AppCompatActivity {
private StackView stackView;
private int[] imageIds = {R.drawable.item_bg,R.drawable.index,R.drawable.item_bg,R.drawable.index};
private List<Integer> images = new ArrayList<>();
private ImageAdapter imageAdapter;
private TextView textView;
private Timer down;
private Timer timerup;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_stack_view);
stackView = (StackView) findViewById(R.id.stackview);
textView = (TextView) findViewById(R.id.textview);
initData();
imageAdapter = new ImageAdapter(images, this);
stackView.setAdapter(imageAdapter);
stackView.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
textView.setText("第"+(position+1)+"个杨幂");
}
});
}
public void initData(){
for (int i = 0; i < imageIds.length; i++) {
images.add(imageIds[i]);
}
}
public void click(View view){
switch (view.getId()){
case R.id.btn_down:
if(timerup!=null){
timerup.cancel();
}
down = new Timer();
down.schedule(new TimerTask() {
@Override
public void run() {
runOnUiThread(new Runnable() {
@Override
public void run() {
stackView.showNext();
}
});
}
},0,1000);
break;
case R.id.btn_up:
if(down!=null){
down.cancel();
}
timerup = new Timer();
timerup.schedule(new TimerTask() {
@Override
public void run() {
runOnUiThread(new Runnable() {
@Override
public void run() {
stackView.showPrevious();
}
});
}
},0,1000);
break;
}
}
class ImageAdapter extends BaseAdapter {
private List<Integer> mImages;
private Context mContext;
public ImageAdapter(List<Integer> mImages,Context context){
this.mImages = mImages;
mContext = context;
}
@Override
public int getCount() {
return mImages.size();
}
@Override
public Object getItem(int position) {
return mImages.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
ImageView imageView = new ImageView(mContext);
imageView.setImageResource(mImages.get(position));
return imageView;
}
}
}
<file_sep>/app/src/main/java/view/WeatherInterface.java
package view;
/**
* Created by mavin on 2016/6/20.
*/
public interface WeatherInterface {
}
<file_sep>/app/src/main/java/com/example/administrator/custemview/ToastMaster.java
package com.example.administrator.custemview;
import android.app.Application;
import android.content.Context;
import android.os.Handler;
import android.os.Looper;
import android.widget.Toast;
import utils.CApp;
/**
* Created by javen on 14-7-21.
*/
public class ToastMaster {
private static Toast mToast;
public static void showShortToast(final Context context, final Object message, final Object... args) {
cancelToast(mToast);
if (message == null)
return;
if (Looper.myLooper() != Looper.getMainLooper()) {
new Handler(Looper.getMainLooper()).post(new Runnable() {
@Override
public void run() {
mToast = Toast.makeText(context, String.format(message.toString(), args), Toast.LENGTH_SHORT);
// mToast.setGravity(Gravity.CENTER, 0, 0);
mToast.show();
}
});
} else {
mToast = Toast.makeText(context, String.format(message.toString(), args), Toast.LENGTH_SHORT);
// mToast.setGravity(Gravity.CENTER, 0, 0);
mToast.show();
}
}
public static void showLongToast(final Context context, final Object message, final Object... args) {
cancelToast(mToast);
if (message == null)
return;
if (Looper.myLooper() != Looper.getMainLooper()) {
new Handler(Looper.getMainLooper()).post(new Runnable() {
@Override
public void run() {
mToast = Toast.makeText(context, String.format(message.toString(), args), Toast.LENGTH_LONG);
// mToast.setGravity(Gravity.CENTER, 0, 0);
mToast.show();
}
});
} else {
mToast = Toast.makeText(context, String.format(message.toString(), args), Toast.LENGTH_LONG);
// mToast.setGravity(Gravity.CENTER, 0, 0);
mToast.show();
}
}
public static void cancelToast(final Toast toast) {
if (toast != null) {
new Handler(Looper.getMainLooper()).postDelayed(new Runnable() {
@Override
public void run() {
toast.cancel();
}
}, 100);
}
}
public static void clean() {
mToast = null;
}
public static void toast(String str) {
showShortToast(CApp.context, str);
}
}
<file_sep>/bluetoothsample/src/main/java/com/youloft/mysmall/bluetoothsample/MainActivity.java
package com.youloft.mysmall.bluetoothsample;
import android.bluetooth.BluetoothAdapter;
import android.content.DialogInterface;
import android.content.Intent;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.support.v7.widget.RecyclerView;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
public class MainActivity extends AppCompatActivity implements View.OnClickListener {
private RecyclerView mList;
private Button mOpenBT;
private Button mSearchBT;
private Button mSendMsg;
private TextView mState;
private BluetoothAdapter mBluetoothAdapter;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
iniView();
mBluetoothAdapter = BluetoothAdapter.getDefaultAdapter();
}
private void iniView() {
mList = (RecyclerView) findViewById(R.id.list);
mOpenBT = (Button) findViewById(R.id.button1);
mSearchBT = (Button) findViewById(R.id.button2);
mSendMsg = (Button) findViewById(R.id.button3);
mState = (TextView) findViewById(R.id.text_state);
mOpenBT.setOnClickListener(this);
mSearchBT.setOnClickListener(this);
mSendMsg.setOnClickListener(this);
}
@Override
public void onClick(View v) {
switch (v.getId()) {
case R.id.button1:
openBt();
break;
case R.id.button2:
searchBT();
break;
case R.id.button3:
sendMsg();
break;
}
}
/**
* 发送数据
*/
private void sendMsg() {
}
/**
* 搜索蓝牙
*/
private void searchBT() {
if (mBluetoothAdapter != null) {
}
}
private static int OPEN_CODE = 111;
/**
* 打开蓝牙
*/
private void openBt() {
if (mBluetoothAdapter != null) {
// Intent intent = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE);
// startActivityForResult(intent, OPEN_CODE);
boolean enable = mBluetoothAdapter.enable();
if (enable) {
mState.setText("蓝牙设备就绪");
return;
}
}
mState.setText("设备可能不支持蓝牙功能");
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == OPEN_CODE) {
if (resultCode == RESULT_OK) {
mState.setText("蓝牙设备就绪");
} else if (resultCode == RESULT_CANCELED) {
mState.setText("如需使用请打开蓝牙开关");
}
}
}
}
<file_sep>/app/src/main/java/javaBean/FlowInfo.java
package javaBean;
/**
* Created by zchao on 2016/7/8.
*/
public class FlowInfo {
public String key;
public int seq;
public int page;
public long save_time;
public int flag;
public String content;
}
<file_sep>/app/src/main/java/calendar/CalendarView.java
package calendar;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.widget.ImageView;
import utils.Utils;
/**
* Created by zchao on 2016/10/20.
*/
public class CalendarView extends ImageView {
private String dataString;
private Paint mTextPaint;
public CalendarView(Context context) {
this(context, null);
}
public CalendarView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}
public CalendarView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
mTextPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mTextPaint.setColor(Color.RED);
mTextPaint.setTextSize(Utils.dp2Px(14));
}
public void setDate(String dataString) {
this.dataString = dataString;
postInvalidate();
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
int width = getWidth();
int height = getHeight();
if (!TextUtils.isEmpty(dataString)) {
canvas.drawText(dataString, (width/2 - mTextPaint.measureText(dataString)/2), (height/2 - Utils.dp2Px(14)/2), mTextPaint);
}
}
}
<file_sep>/app/src/main/java/view/RainFlack.java
package view;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.graphics.Paint;
import com.nineoldandroids.animation.TypeEvaluator;
import com.nineoldandroids.animation.ValueAnimator;
import utils.RandomGenerator;
/**
* 雨的类, 移动, 移出屏幕会重新设置位置.
*/
public class RainFlack implements WeatherFlackInterface {
private Paint mPaint; // 画笔
private Bitmap bitmap; //雨图片
private ValueAnimator valueAnimator;
private int mAlpha, mAlpha1; //雨透明度
private int mViewWidth, mViewHeight; //View的总宽高
private Matrix matrix;
private static final float mXYRate = -47f / 174f; //这是按照雨滴图片的像素长宽比例算的,用此比例来计算X,Y方向上雨滴的速度,保证雨滴移动方向跟图片上雨滴方向一致
private RainValue mRainValue;
private RainFlack(int viewWidth, int viewHeight, Paint paint, final Bitmap bitmap) {
this.bitmap = bitmap;
mViewWidth = viewWidth;
mViewHeight = viewHeight;
int x = (int) RandomGenerator.getRandom(0, viewWidth);
int y = -bitmap.getHeight();
mRainValue = new RainValue(x, y);
mPaint = paint;
mAlpha1 = mAlpha = (int) RandomGenerator.getRandom(100, 255);
matrix = new Matrix();
iniValueAnimator();
}
/**
* 初始化动画
*/
private void iniValueAnimator() {
valueAnimator = new ValueAnimator();
valueAnimator.setObjectValues(mRainValue, new RainValue(0, mViewHeight));
valueAnimator.setStartDelay(RandomGenerator.getRandom(6000));
valueAnimator.setEvaluator(new TypeEvaluator<RainValue>() {
@Override
public RainValue evaluate(float fraction, RainValue startValue,
RainValue endValue) {
RainValue sunnyE = new RainValue();
if (fraction <= 0.5f) {//动画重新开始,重置alpha;
mAlpha = mAlpha1;
} else {
mAlpha -= 5;
if (mAlpha < 0) {
mAlpha = 0;
}
}
sunnyE.rainY = startValue.rainY + 100 * (fraction * 5) + 0.5f * 100 * (fraction * 5) * (fraction * 5);
sunnyE.rainX = startValue.rainX + mXYRate * sunnyE.rainY;
return sunnyE;
}
});
valueAnimator.setDuration(5000);
valueAnimator.setRepeatMode(ValueAnimator.RESTART);
valueAnimator.setRepeatCount(ValueAnimator.INFINITE);
valueAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator animation) {
mRainValue = (RainValue) animation.getAnimatedValue();
}
});
}
class RainValue {
public RainValue() {
}
public RainValue(float sunRotate, float circleScan) {
this.rainX = sunRotate;
this.rainY = circleScan;
}
public float rainX;
public float rainY;
}
/**
* 创建一片雨
*
* @param width
* @param height
* @param paint
* @param bitmap
* @return
*/
public static RainFlack create(int width, int height, Paint paint, Bitmap bitmap) {
if (width <= 0 || height <= 0) {
return null;
}
return new RainFlack(width, height, paint, bitmap);
}
public void draw(Canvas canvas) {
changePosition();
mPaint.setAlpha(mAlpha);
canvas.drawBitmap(bitmap, matrix, mPaint);
if (valueAnimator != null && !valueAnimator.isStarted()) {
valueAnimator.start();
}
}
/**
* 改变雨位置
*/
private void changePosition() {
matrix.reset();
matrix.postTranslate(mRainValue.rainX, mRainValue.rainY);
}
}<file_sep>/app/src/main/java/Utils/ReflectTest.java
package utils;
/**
* Created by zchao on 2016/12/28.
*/
public class ReflectTest {
private String word = "这是发射测试文字";
private String testMethed(String word){
return "这是反射测试方法:" + word;
}
}
<file_sep>/app/src/main/java/fragment/BaseFragment.java
package fragment;
import android.content.Context;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import com.example.administrator.custemview.R;
/**
* Created by zchao on 2016/9/6.
*/
public class BaseFragment extends Fragment {
private TextView mTextView;
public BaseFragment() {
super();
}
@Override
public void onAttach(Context context) {
super.onAttach(context);
}
@Override
public void setArguments(Bundle args) {
super.setArguments(args);
}
@Override
public void onViewCreated(View view, @Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
Bundle arguments = getArguments();
String word = arguments.getString("word");
mTextView = (TextView) view.findViewById(R.id.text);
if (word != null) {
mTextView.setText(word);
}
}
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
return inflater.inflate(R.layout.fragment_layout, container, false);
}
@Override
public void onActivityCreated(@Nullable Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
}
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
}
<file_sep>/lib/src/main/java/com/example/GoogleStrategy.java
package com.example;
/**
* Created by zchao on 2016/11/25.
*/
public class GoogleStrategy implements IStrategy {
@Override
public void getWeather() {
System.out.print("this is Google Strategy");
}
}
<file_sep>/myapplication/src/main/java/com/youloft/mysmall/myapplication/MyClass.java
package com.youloft.mysmall.myapplication;
import org.dom4j.Document;
import org.dom4j.DocumentFactory;
import org.dom4j.Element;
import org.dom4j.io.OutputFormat;
import org.dom4j.io.SAXReader;
import org.dom4j.io.XMLWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.UnsupportedEncodingException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
public class MyClass {
public static void main(String[] args) throws IOException {
//插入数据的sql语句
System.out.println("提取xml数据并导入数据库");
//读取xml文件的数据并写入数据库中
ArrayList<Province> provinces = readyData();
InsertDataToMysql(provinces);
}
public static ArrayList<Province> readyData() {
ArrayList<Province> list = new ArrayList<>();
try {
Document doc = new SAXReader().read(new InputStreamReader(
new FileInputStream(new File("F:\\MyApplication17\\test\\src\\main\\java\\com\\example\\province_data.xml")), "utf-8"));
//选择xml文件的节点
// Element node = doc.getRootElement();
List itemList = doc.selectNodes("root/province");
for (Iterator iter = itemList.iterator(); iter.hasNext(); ) {
Element el = (Element) iter.next();
//读取节点内容
String provincename = el.attributeValue("name");
List citylist = el.selectNodes("city");
for (Iterator city = citylist.iterator(); city.hasNext(); ) {
Element ci = (Element) city.next();
//读取节点内容
String cityname = ci.attributeValue("name");
List dlist = ci.selectNodes("district");
for (Iterator dis = dlist.iterator(); dis.hasNext(); ) {
Element dn = (Element) dis.next();
//读取节点内容
String districtname = dn.attributeValue("name");
list.add(new Province(provincename, cityname, districtname));
}
}
}
return list;
} catch (Exception e) {
e.printStackTrace();
} finally {
}
return null;
}
public static void InsertDataToMysql(ArrayList<Province> list) {
// TODO Auto-generated method stub
String sql = "insert into city(province,city,district) "
+ "values (?,?,?)";
Connection conn = null;
PreparedStatement pstmt = null;
try {
Class.forName("com.mysql.jdbc.Driver");
String url = "jdbc:mysql://localhost:3306/citys?useUnicode=true&characterEncoding=UTF-8&useJDBCCompliantTimezoneShift=true&useLegacyDatetimeCode=false&serverTimezone=UTC";
conn = DriverManager.getConnection(
url, "root", "wojiaomt4");
//准备执行sql语句
pstmt = conn.prepareStatement(sql);
for (int i = 0; i < list.size(); i++) {
Province province = list.get(i);
pstmt.setString(1, province.province);
pstmt.setString(2, province.city);
pstmt.setString(3, province.district);
pstmt.addBatch();
}
pstmt.executeBatch();
pstmt.close();
conn.close();
System.out.print("将XML文档数据导入数据库成功\n");
}
//捕获加载驱动程序异常
catch (ClassNotFoundException cnfex) {
System.err.println(
"装载 JDBC/ODBC 驱动程序失败。");
cnfex.printStackTrace();
}
//捕获连接数据库异常
catch (SQLException sqlex) {
System.err.println("无法连接数据库");
sqlex.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
} finally {
}
}
private static void SelectData() {
// TODO Auto-generated method stub
String sql = "select * from t_xml";
Connection conn = null;
PreparedStatement pstmt = null;
//声明结果集接受对象
ResultSet ret = null;
try {
Class.forName("com.mysql.jdbc.Driver");
conn = DriverManager.getConnection(
"jdbc:mysql://localhost:3306/mydb", "root", "");
pstmt = conn.prepareStatement(sql);
ret = pstmt.executeQuery();
//创建一个DocumentFactory对象
System.out.println("将数据库的内容写入xml文档:");
DocumentFactory factory = new DocumentFactory();
//通过factory对象创建一个doc文件对象
Document doc = factory.createDocument();
doc.addProcessingInstruction("crazyit", "website=\"http://www.crazyit.org\"");
//加入根元素
Element root = doc.addElement("txl");
System.out.println("写入xml文档的数据如下:\n");
while (ret.next()) {
String name = ret.getString(1);
String tel = ret.getString(2);
String qqmsn = ret.getString(3);
String mobile = ret.getString(4);
String work = ret.getString(5);
String address = ret.getString(6);
String email = ret.getString(7);
String othermsg = ret.getString(8);
Element user = root.addElement("user");
user.addAttribute("name", name);
user.addAttribute("tel", tel);
user.addAttribute("qqmsn", qqmsn);
user.addAttribute("mobile", mobile);
user.addAttribute("work", work);
user.addAttribute("address", address);
user.addAttribute("email", email);
user.addAttribute("othermsg", othermsg);
System.out.println(name + "\t" + tel + "\t" + qqmsn + "\t" +
mobile + "\t" + work + "\t" + address + "\t" + email + "\t" + othermsg);
}//显示数据
// OutputXml(doc);
ret.close();
conn.close();
} catch (ClassNotFoundException cnfex) {
System.err.println(
"装载 JDBC/ODBC 驱动程序失败。");
cnfex.printStackTrace();
}
//捕获连接数据库异常
catch (SQLException sqlex) {
System.err.println("无法连接数据库");
sqlex.printStackTrace();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
//准备执行sql语句
}
private static void OutputXml(Document doc) {
// TODO Auto-generated method stub
XMLWriter writer = null;
//定义一种输出格式
OutputFormat format = OutputFormat.createPrettyPrint();
format.setEncoding("UTF-8");// 设置XML文件的编码格式,如果有中文可设置为GBK或UTF-8
File file = new File("tongxunlu.xml");
//如果读取的内容中没有中文,可以使用以下的几行代码生成xml
//
// try {
// writer = new XMLWriter(new FileWriter(file), format);
// } catch (IOException e1) {
// e1.printStackTrace();
// }
// 如果上面设置的xml编码类型为GBK,或设为UTF-8但其中有中文则应当用FileWriter来构建xml文件(使用以下代码),否则会出现中文乱码问题
FileOutputStream fos = null;
try {
fos = new FileOutputStream(file);
} catch (FileNotFoundException e) {
e.printStackTrace();
}
try {
writer = new XMLWriter(new OutputStreamWriter(fos, "utf-8"), format);
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
}
try {
writer.write(doc);
writer.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
<file_sep>/lib/src/main/java/com/example/YahooStrategy.java
package com.example;
/**
* Created by zchao on 2016/11/25.
*/
public class YahooStrategy implements IStrategy {
@Override
public void getWeather() {
System.out.print("this is Yahoo Stragegy");
}
}
<file_sep>/javatest/build.gradle
apply plugin: 'java'
dependencies {
compile fileTree(include: ['*.jar'], dir: 'libs')
compile 'org.dom4j:dom4j:2.0.1'
compile 'mysql:mysql-connector-java:6.0.6'
compile fileTree(include: ['*.jar'], dir: 'libs')
compile files('lib/ZHConverter.jar')
}
sourceCompatibility = "1.7"
targetCompatibility = "1.7"
<file_sep>/app/src/main/java/adapter/BaseItemViewGroup.java
package adapter;
/**
* Created by zchao on 2016/10/19.
*/
public abstract class BaseItemViewGroup<T, V extends BaseViewHolder<T>> {
}
<file_sep>/app/src/main/java/view/StackGroupView.java
package view;
import android.animation.ValueAnimator;
import android.annotation.TargetApi;
import android.content.Context;
import android.os.Build;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.widget.OverScroller;
/**
* Created by zchao on 2016/5/9.
*/
public class StackGroupView extends ViewGroup {
private static final String TAG = "StackGroupView";
private int mScreenHeight;
private int mViewWidth;
private int mLastX = 0;
private OverScroller mScroller;
private int firstViewPosition = 0;
private int mEnd;
private int mStart;
public StackGroupView(Context context) {
this(context, null);
}
@TargetApi(Build.VERSION_CODES.GINGERBREAD)
public StackGroupView(Context context, AttributeSet attrs) {
super(context, attrs);
mScreenHeight = context.getResources().getDisplayMetrics().heightPixels;
mViewWidth = context.getResources().getDisplayMetrics().widthPixels;
mScroller = new OverScroller(context);
}
@TargetApi(Build.VERSION_CODES.HONEYCOMB)
@Override
public boolean onTouchEvent(MotionEvent event) {
View childAt = getChildAt(firstViewPosition);
int x = (int)event.getX();
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
mLastX = x;
mStart = x;
break;
case MotionEvent.ACTION_MOVE:
int dx = mLastX - x;
// if (dx > ) {
// }
break;
case MotionEvent.ACTION_UP:
break;
}
postInvalidate();
return true;
}
/**
* mScroller直接调用startScroll是不会有滚动效果的,只有在此方法中获取滚动情况才会做出滚动响应;
* 此方法在父控件执行drawChild时会调用
*/
@TargetApi(Build.VERSION_CODES.GINGERBREAD)
@Override
public void computeScroll() {
super.computeScroll();
}
@Override
public boolean onInterceptTouchEvent(MotionEvent ev) {
return super.onInterceptTouchEvent(ev);
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
measureWidth(widthMeasureSpec);
measureHeight(heightMeasureSpec);
int childCount = getChildCount();
for (int i = 0; i < childCount; ++i) {
measureChild(getChildAt(i), widthMeasureSpec, heightMeasureSpec);
}
}
private int measureWidth(int widthMeasureSpec) {
int mode = MeasureSpec.getMode(widthMeasureSpec);
int size = MeasureSpec.getSize(widthMeasureSpec);
switch (mode){
case MeasureSpec.AT_MOST:
break;
case MeasureSpec.EXACTLY:
mViewWidth = size;
break;
}
return size;
}
private int measureHeight(int heightMeasureSpec) {
int mode = MeasureSpec.getMode(heightMeasureSpec);
int size = MeasureSpec.getSize(heightMeasureSpec);
switch (mode){
case MeasureSpec.AT_MOST:
mScreenHeight = mScreenHeight/3;
break;
case MeasureSpec.EXACTLY:
mScreenHeight = size;
break;
}
return size;
}
@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
int childCount = getChildCount();
int maxViewWidth = (mViewWidth - 60) / 3;
for (int i = 0; i < childCount; i++) {
View child = getChildAt(i);
if (child.getVisibility() == GONE) {
break;
}
if (i < firstViewPosition || firstViewPosition + 4 < i) {
child.layout(0, mScreenHeight/2, 0, mScreenHeight/2);
}
if (firstViewPosition == i) {
child.layout(0, mScreenHeight / 3, maxViewWidth / 3, mScreenHeight * 2 / 3);
}
if (firstViewPosition + 1 == i) {
if (child.getVisibility() != GONE) {
child.layout(maxViewWidth / 3 + 10, mScreenHeight / 6, maxViewWidth + 10, mScreenHeight * 5 / 6);
}
}
if (firstViewPosition + 2 == i) {
if (child.getVisibility() != GONE) {
child.layout(mViewWidth / 2 - maxViewWidth / 2, 0, mViewWidth / 2 + maxViewWidth / 2, mScreenHeight);
}
}
if (firstViewPosition + 3 == i) {
if (child.getVisibility() != GONE) {
child.layout(mViewWidth / 2 + maxViewWidth / 2 + 20, mScreenHeight / 6, mViewWidth / 2 + maxViewWidth * 7/ 6 + 20, mScreenHeight * 5 / 6);
}
}
if (firstViewPosition + 4 == i) {
if (child.getVisibility() != GONE) {
child.layout(mViewWidth - maxViewWidth / 3, mScreenHeight / 3, mViewWidth, mScreenHeight * 2 / 3);
}
}
}
}
}
<file_sep>/app/src/main/java/utils/HandleMatrix.java
package utils;
import android.graphics.ColorMatrix;
/**
* 一些常用的图像处理矩阵
* Created by mavin on 2016/12/7.
*/
public class HandleMatrix {
public static float[] GRAY = { 0.33f,0.59f,0.11f,0,0,
0.33f,0.59f,0.11f,0,0,
0.33f,0.59f,0.11f,0,0,
0, 0, 0, 1,0};
public static float[] REVERSAL = { -1,0,0,1,1,
0,-1,0,1,1,
0,0,-1,1,1,
0,0,0,1,0,};
public static ColorMatrix getGrayMatrix(){
ColorMatrix matrix = new ColorMatrix();
matrix.set(GRAY);
return matrix;
}
public static ColorMatrix getMatrix(float[] src){
ColorMatrix matrix = new ColorMatrix();
matrix.set(src);
return matrix;
}
}
<file_sep>/app/src/main/java/com/example/administrator/custemview/ImageDrawActivity.java
package com.example.administrator.custemview;
import android.graphics.Bitmap;
import android.graphics.drawable.ShapeDrawable;
import android.graphics.drawable.shapes.RectShape;
import android.os.Bundle;
import android.support.v7.app.AppCompatActivity;
import android.widget.ImageView;
import com.bumptech.glide.load.engine.DiskCacheStrategy;
import com.bumptech.glide.request.target.SimpleTarget;
import com.bumptech.glide.request.transition.Transition;
import java.util.ArrayList;
import butterknife.Bind;
import butterknife.ButterKnife;
import utils.GlideApp;
import view.ImageStackDrawable;
public class ImageDrawActivity extends AppCompatActivity {
@Bind(R.id.img)
ImageView img;
@Bind(R.id.img1)
ImageView img1;
@Bind(R.id.img2)
ImageView img2;
@Bind(R.id.img3)
ImageView img3;
@Bind(R.id.img4)
ImageView img4;
@Bind(R.id.img5)
ImageView img5;
@Bind(R.id.img6)
ImageView img6;
private ImageView mImage;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_image_draw);
ButterKnife.bind(this);
ImageView[] is = {img1, img2, img3, img4, img5, img6};
mImage = (ImageView) findViewById(R.id.img);
ImageStackDrawable imageStackDrawable = new ImageStackDrawable(this);
ArrayList<String> list = new ArrayList<>();
list.add("https://ss1.bdstatic.com/70cFvXSh_Q1YnxGkpoWK1HF6hhy/it/u=3502343973,1168279496&fm=11&gp=0.jpg");
list.add("https://timgsa.baidu.com/timg?image&quality=80&size=b9999_10000&sec=1506079577514&di=d11b51459c2bb8528000b0427fbd28a8&imgtype=0&src=http%3A%2F%2Fimg0.ph.126.net%2FXjXl3KcowmXdE1pcsFVe8g%3D%3D%2F1067353111787095545.jpg");
list.add("https://timgsa.baidu.com/timg?image&quality=80&size=b9999_10000&sec=1506079577514&di=61c7a9453105db85a9a6562340f83289&imgtype=0&src=http%3A%2F%2Fwww.laozhq.cn%2FUploadFile%2F2013-2%2F20132274451175515.jpg");
list.add("https://timgsa.baidu.com/timg?image&quality=80&size=b9999_10000&sec=1506079577513&di=e9365b3711c87d4d09efa2bd1c003e99&imgtype=0&src=http%3A%2F%2Fimg.daimg.com%2Fuploads%2Fallimg%2F170214%2F3-1F214233558.jpg");
list.add("http://img.zcool.cn/community/05e5e1554af04100000115a8236351.jpg");
list.add("https://timgsa.baidu.com/timg?image&quality=80&size=b9999_10000&sec=1506079577513&di=f943a6354adb4d12435067eba565d113&imgtype=0&src=http%3A%2F%2Fpic32.photophoto.cn%2F20140812%2F0035035784895932_b.jpg");
imageStackDrawable.setData(list);
ShapeDrawable shapeDrawable = new ShapeDrawable(new RectShape());
shapeDrawable.setIntrinsicHeight(100);
shapeDrawable.setIntrinsicWidth(100);
mImage.setImageDrawable(imageStackDrawable);
for (int i = 0; i < list.size(); i++) {
GlideApp.with(this)
.asBitmap()
.load(list.get(i))
.placeholder(R.drawable.default_user_head_img)
.error(R.drawable.default_user_head_img)
.diskCacheStrategy(DiskCacheStrategy.ALL)
.skipMemoryCache(false)
.into(is[i]);
}
}
}
<file_sep>/app/src/main/java/javaBean/DayWeather.java
package javaBean;
/**
* Created by zchao on 2016/6/6.
*/
public class DayWeather {
private long time;
private int weather;
private int tempMax;
private int tempMin;
public void setTime(long time) {
this.time = time;
}
public void setWeather(int weather) {
this.weather = weather;
}
public void setTempMax(int tempMax) {
this.tempMax = tempMax;
}
public void setTempMin(int tempMin) {
this.tempMin = tempMin;
}
public long getTime() {
return time;
}
public int getWeather() {
return weather;
}
public int getTempMax() {
return tempMax;
}
public int getTempMin() {
return tempMin;
}
}
<file_sep>/app/src/main/java/model/Netdata.java
package model;
/**
* Created by zchao on 2016/5/19.
*/
public class Netdata {
/**
* time : 2015-08-14 07:47
* title : 女环卫工进酒店如厕 被保洁员以堵厕所为由打伤
* description : 女环卫工进酒店如厕 被保洁员以堵厕所为由打伤...
* picUrl : http://photocdn.sohu.com/20150814/Img418837718_ss.jpg
* url : http://news.sohu.com/20150814/n418837716.shtml
*/
private String time;
private String title;
private String description;
private String picUrl;
private String url;
public String getTime() {
return time;
}
public void setTime(String time) {
this.time = time;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getPicUrl() {
return picUrl;
}
public void setPicUrl(String picUrl) {
this.picUrl = picUrl;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
}
<file_sep>/app/src/main/java/com/example/administrator/custemview/ScreenrecordActivity.java
package com.example.administrator.custemview;
import android.annotation.TargetApi;
import android.app.Activity;
import android.content.Intent;
import android.media.projection.MediaProjection;
import android.media.projection.MediaProjectionManager;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.support.annotation.Nullable;
import android.widget.Button;
import android.widget.EditText;
import android.widget.Toast;
import java.io.File;
import butterknife.Bind;
import butterknife.ButterKnife;
import butterknife.OnClick;
import screenrecord.ScreenREC;
/**
* Created by zchao on 2016/8/16.
*/
@TargetApi(Build.VERSION_CODES.LOLLIPOP)
public class ScreenrecordActivity extends BaseActivity {
private static final String RESULT_CODE_KEY = "result_code";
private static final String RESULT_DATA_KEY = "result_data";
private static final int RESULT_REQUEST_CODE = 1;
private int mResultCode;
private Intent mResultData;
private MediaProjectionManager mProjectManager;
private MediaProjection mMediaPro;
@Bind(R.id.text_view)
EditText mEditText;
@Bind(R.id.start_btn)
Button mStartBtn;
private ScreenREC mScreenREC;
private File file;
// @Bind(R.id.surface_view)
// SurfaceView mSurfaceView;
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.screenrecord_activity);
ButterKnife.bind(this);
mProjectManager = (MediaProjectionManager) getSystemService(MEDIA_PROJECTION_SERVICE);
if (savedInstanceState != null) {
mResultCode = savedInstanceState.getInt(RESULT_CODE_KEY);
mResultData = savedInstanceState.getParcelable(RESULT_DATA_KEY);
}
file = new File(Environment.getExternalStorageDirectory(), "vedio-"+ System.currentTimeMillis()+ ".mp4");
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if (requestCode == RESULT_REQUEST_CODE) {
if (resultCode != Activity.RESULT_OK) {
Toast.makeText(this, "用户取消权限", Toast.LENGTH_LONG).show();
} else {
mMediaPro = mProjectManager.getMediaProjection(resultCode, data);
mScreenREC = new ScreenREC(this, mMediaPro, file);
mScreenREC.start();
mStartBtn.setText("STOP");
}
}
}
@OnClick(R.id.start_btn)
public void startScreenCapture() {
if (mScreenREC != null) {
mScreenREC.quit();
mScreenREC = null;
mStartBtn.setText("START");
} else {
startActivityForResult(mProjectManager.createScreenCaptureIntent(), RESULT_REQUEST_CODE);
}
}
@Override
protected void onDestroy() {
super.onDestroy();
if (mScreenREC != null) {
mScreenREC.quit();
mScreenREC = null;
}
}
}
<file_sep>/app/src/main/java/view/WeatherImgeTextView.java
package view;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Typeface;
import android.support.annotation.Nullable;
import android.util.AttributeSet;
import android.widget.TextView;
/**
* Created by zchao on 2017/5/23.
* desc:
* version:
*/
public class WeatherImgeTextView extends android.support.v7.widget.AppCompatTextView {
public WeatherImgeTextView(Context context) {
this(context, null);
}
public WeatherImgeTextView(Context context, @Nullable AttributeSet attrs) {
super(context, attrs);
Typeface typeface_number = Typeface.createFromAsset(context.getAssets(), "and_num_Regular.ttf");
setTypeface(typeface_number);
}
float fontHeight = -1;
@Override
protected void onDraw(Canvas canvas) {
canvas.save();
Paint paint = getPaint();
paint.setTextAlign(Paint.Align.CENTER);
paint.setColor(getCurrentTextColor());
if (fontHeight == -1) {
Paint.FontMetrics ft = getPaint().getFontMetrics();
fontHeight = ft.bottom - ft.top - ft.descent - ft.leading;
}
canvas.drawText(getText().toString().trim(), getWidth()/2, (getHeight() - fontHeight) / 2, paint);
canvas.restore();
}
}
<file_sep>/app/src/main/java/littlegridview/TestGridAdapter.java
package littlegridview;
import android.content.Context;
import android.text.TextUtils;
import android.view.View;
import android.widget.TextView;
import com.example.administrator.custemview.R;
import java.util.ArrayList;
/**
* Created by zchao on 2017/3/24.
* desc:
* version:
*/
public class TestGridAdapter extends MeasureViewAdapter<TestGridAdapter.MyHolder>{
ArrayList<String> mList = new ArrayList<>();
public TestGridAdapter(Context mContext) {
super(mContext);
}
@Override
public MyHolder onCreateView(Context context) {
return new MyHolder(context);
}
@Override
public int getTotleCount() {
return mList.size();
}
@Override
public int getSpanCount() {
return 2;
}
@Override
public int getDefaultCount() {
if (isOpen()) {
return getTotleCount();
} else {
return 4;
}
}
public void setmList(ArrayList<String> list) {
if (list == null ) {
return;
}
mList.clear();
mList.addAll(list);
notifyDataSetChanged();
}
@Override
public void onBindViewHolder(MyHolder holder, int position) {
holder.bindView(mList.get(position));
}
public class MyHolder extends LittleGridView.LittleGridViewBaseHolder<String>{
private TextView mText;
public MyHolder(Context context) {
super(context);
mText = (TextView) getItemView().findViewById(R.id.text);
}
@Override
public int getLayoutRes() {
return R.layout.little_grid_item;
}
@Override
public void bindView(String tool) {
if (!TextUtils.isEmpty(tool)) {
mText.setText(tool);
}
}
}
}
|
c2f29d2cf15b820ca5414ed4b2e038b0e4d36110
|
[
"Java",
"Gradle"
] | 73
|
Java
|
seahorseBra/CustemView
|
9799d3fa332f045e6c1a4ea2fdad2928d3cb7d72
|
d4500aa0723972d70f2269d233614b1e375b0c9f
|
refs/heads/master
|
<file_sep># SBReporter
Create excel reports from oracle database
Здесь буду вести проект по созданию отчетов из базы данных Oracle в формате Excel.
Идея такая: для периодической отправки разнообразных отчетов аналитиков
сейчас использую формирование текстовых файлов в sqlplus в формате html
таким способом:
SET MARKUP HTML ON
SPOOL xxxx.xls;
@@report_business_activity_xxxx.sql;
spool off;
данный формат excel воспринимает, но иногда не совсем корректно отображает.
Задача проекта: формировать отчеты в виде файлов в родном формате excel
Архитектура такая: скрипт на Python, на входе скрипт принимает:
1) файл с параметрами
2) файл с SQL-запросом
3) шаблон excel-файла (должны быть заполнены все заголовки)
На выходе скрипт формирует Excel-файл
Версии:
Версия 0.1 Начальная версия, в целом работоспособна, начало тестирования
Версия 0.2 Портирование на linux
Версия 0.3 Работоспособность на linux проверена, исправлены ошибки
Инструкция по установке:
1) Установите Python
1) Скопируйте файл SBReporter.py на сервер
2) Создайте файл с параметрами
3) Создайте excel шаблон (заголовок отчета)
4) Создайте файл запуска
5) Предоставьте права доступа
chmod u+x SBReporter.py
chmod u+x report_mpgu_stat_week.sh
6) Тестовый запуск
export ORACLE_HOME=
export LD_LIBRARY_PATH=
./SBReporter.py -i report_MPGU_1_week.ini -o report_mpgu_stat_week_052018.xlsx
7) Добавьте в планировщик cron sh скрипт
<file_sep>
-- Отчет
select *
from emias_infomat.DATABASECHANGELOG
<file_sep>[REPORT]
sql_file = report_MPGU_1_week.sql
first_row = 4
first_col = 1
excel_page = report_mpgu_stat_week
[DB]
hostName = 10.0.8.117
portNumber = 1521
sid = emiasdb
username = EMIAS_CLUSTER
password = ...
[LOG]
log_file = report_MPGU_1_week.log
<file_sep>#! /opt/Python/Python-3.6.5/python
# coding: utf8
"""
<NAME> 05/2018
<EMAIL>
Create Excel-file from single SQL
"""
import openpyxl
import logging
import logging.config
import sys
import cx_Oracle
import traceback
import configparser
import getopt
from pathlib import Path
# parameters
def read_params(inputfile):
# global vars
global log_file
global username
global password
global hostName
global portNumber
global sid
global sql_file
global sqlCommand
global first_row
global first_col
global excel_page
# default
excel_page = "Report"
# start
logger.debug('Input parameter file is "'+ inputfile+'"')
my_file = Path(inputfile)
if not my_file.is_file():
print('Parameter file: '+inputfile+" does not exists!")
logger.error('Parameter file: '+inputfile+" does not exists!")
sys.exit(2)
config = configparser.ConfigParser()
config.read(inputfile)
sql_file = config['REPORT']['sql_file']
excel_page = config['REPORT']['excel_page']
log_file = config['LOG']['log_file']
username = config['DB']['username']
password = config['DB']['<PASSWORD>']
hostName = config['DB']['hostName']
portNumber = config['DB']['portNumber']
sid = config['DB']['sid']
first_row = int(config['REPORT']['first_row'])
first_col = int(config['REPORT']['first_col'])
# read sql file
my_file = Path(sql_file)
sqlCommand = ''
if not my_file.is_file():
print('Sql file: '+sql_file+" does not exists!")
logger.error('Sql file: '+sql_file+" does not exists!")
sys.exit(2)
with open(sql_file, 'r') as myfile:
sqlCommand=myfile.read()
logger.info("Sql="+sqlCommand)
if sqlCommand == '':
logger.error('Sql file: '+sql_file+" empty!")
sys.exit(2)
# -----------------------------------------------------------------------------------
def add_to_excel(cur_sor):
my_file = Path(excel_file)
if not my_file.is_file():
logger.error('Excel file: '+excel_file+" does not exists!")
sys.exit(2)
wb = openpyxl.load_workbook(filename=excel_file)
try:
ws = wb[excel_page] # 'Report'
except Exception as e:
logger.error('Function - add_to_excel In Exception')
logger.error(' error open page: ',excel_page)
logger.error(traceback.print_exc())
row = first_row
for tupple_row in cur_sor:
col = first_col
for list_item in tupple_row:
ws.cell(row=row, column=col, value=list_item)
col = col + 1
row = row + 1
wb.save(excel_file)
# -----------------------------------------------------------------------------------
# Function to Execute Sql commands over TNS
def runSqlTNS(sqlCommand, username, password, hostName, portNumber, sID):
dsn_tns = cx_Oracle.makedsn(hostName, portNumber, sID)
# print dsn_tns
db = cx_Oracle.connect(username, password, dsn_tns)
logger.info("db.version="+db.version)
cursor = db.cursor()
cursor.execute(sqlCommand)
return cursor
# -----------------------------------------------------------------------------------
# MAIN proc
def main(argv):
global excel_file
global logger
# -------------------
# create logger
logging.config.fileConfig('logging.conf')
logger = logging.getLogger()
logger.info('Started')
# -------------------
# read input params
inputfile = ''
outputfile = ''
try:
opts, args = getopt.getopt(argv, "hi:o:", ["ifile=", "ofile="])
except getopt.GetoptError:
print('test.py -i <inputfile> -o <outputfile>')
logger.debug('No input params - Exit 2')
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print('test.py -i <inputfile> -o <outputfile>')
logger.debug('Help - Exit 2')
sys.exit()
elif opt in ("-i", "--ifile"):
inputfile = arg
elif opt in ("-o", "--ofile"):
outputfile = arg
inputfile = inputfile.strip()
outputfile = outputfile.strip()
logger.debug('inputfile='+inputfile)
logger.debug('outputfile='+outputfile)
if inputfile == '' or outputfile == '':
print('test.py -i <inputfile> -o <outputfile>')
logger.debug('No input params 2 - Exit 2')
sys.exit(2)
excel_file = outputfile
# -------------------
# Read report params
read_params(inputfile)
# -------------------
# Run Select
try:
c = runSqlTNS(sqlCommand, username, password, hostName, portNumber, sid)
except Exception as e:
logger.error('Function - runSql In Exception')
logger.error(traceback.print_exc())
try:
add_to_excel(c) # Send the Cursor to writetoExcel Function
c.close()
except Exception as e:
logger.error('Function - writeToExcel In Exception')
logger.error(traceback.print_exc())
logger.info('Finished')
if __name__ == "__main__":
print(sys.argv)
main(sys.argv[1:])
<file_sep>
--1. Статистика поданных заявлений через МПГУ за предыдущую неделю
select /*+ PARALLEL 8*/
l2.id as id_АЦ,
l2.name as наим_АЦ,
l.id as id_филиала,
l.name as наим_филиала,
count(case when are.attachment_request_status=10 then are.attachment_request_id end) as сохр_заявления,
(SELECT
/*+ PARALLEL 8*/
count (are1.attachment_request_id)
FROM attachment_request_event are1
join attachment_request ar1 on ar1.attachment_request_id = are1.attachment_request_id
where
ar1.source_attachment_request_id is not null
and are1.attachment_request_status=10
and trunc(are1.change_date) >= trunc(SYSDATE -14) and trunc(are1.change_date) <= trunc(SYSDATE -8)
and are1.attachment_request_id not in
(select attachment_request_id from attachment_request_event
where attachment_request_status in (20,40,50)
and trunc(change_date) >= trunc(SYSDATE -14) and trunc(change_date) <= trunc(SYSDATE -1) )
and ar1.lpu_id = l.id
) as ожид_заявл,
count(case when are.attachment_request_status=40 then are.attachment_request_id end) as прикр_заявления,
count(case when are.attachment_request_status=20 then are.attachment_request_id end) as отк_заявления,
count(case when are.attachment_request_status=50 then are.attachment_request_id end) as закр_заявления,
nvl(v.atp, 0) as действ_прикреп
from attachment_request ar
join attachment_request_event are on are.attachment_request_id = ar.attachment_request_id
join lpu l on l.id = ar.lpu_id
left join lpu_group lg on lg.id = l.lpu_group_id
left join lpu l2 on l2.id = lg.main_lpu_id
left join
(
select
l2.id as acid1,
l2.name as наим_АЦ,
l.id as filid1,
l.name as наим_филиал,
nvl(count(distinct ar.attachment_request_id ) ,0) as atp
from attachment_request ar
join lpu l on l.id=ar.lpu_id
left join lpu_group lg on lg.id = l.lpu_group_id
left join lpu l2 on l2.id = lg.main_lpu_id
join attachment_request_event are on are.attachment_request_id = ar.attachment_request_id
join attachment_request ar2 on ar2.person_insurance_code = ar.person_insurance_code and ar2.attachment_request_id!=ar.attachment_request_id
join attachment_request_event are2 on are2.attachment_request_id = ar2.attachment_request_id
join service_district sd on sd.id=ar2.service_district_id
join lpu_district_type ldt on ldt.id=sd.lpu_district_type and ldt.district_type=10
where
ar.source_attachment_request_id is not null
and
(
trunc(are.change_date)>= trunc(SYSDATE -7) and trunc(are.change_date) <= trunc(SYSDATE -1)
and are.attachment_request_status = 10
)
and
(
are2.attachment_request_status = 40
and are2.change_date<are.change_date
and are2.change_date>= add_months(are.change_date,-12)
and
(
are2.attachment_request_status <> 50
or
(are2.attachment_request_status = 50
and are2.change_date>are.change_date)
)
)
group by
l2.id,
l2.name,
l.id,
l.name
) v on l.id=v.filid1
where
ar.source_attachment_request_id is not null
and trunc(are.change_date)>= trunc(SYSDATE -7) and trunc(are.change_date) <= trunc(SYSDATE -1)
group by
l2.id,
l2.name,
l.id,
l.name,
nvl(v.atp, 0)
order by l2.id
<file_sep>[REPORT]
sql_file = report1.sql
first_row = 4
first_col = 1
excel_page = Report
[DB]
hostName = 10.0.8.116
portNumber = 1521
sid = emiasdb
username = EMIAS_INFOMAT
password = ...
[LOG]
log_file = c:\d\myapp.log
<file_sep># copy excel file
varDate=`date +%d_%m_%Y`
varFileName="report_mpgu_stat_week_1_${varDate}.xlsx"
cp report_mpgu_stat_week.xlsx $varFileName
# add data to excel
alias python=/opt/Python/Python-3.6.5/python
./SBReporter.py -i report_MPGU_1_week.ini -o ${varFileName}
|
7e36954fae6baccf0de1c8c2e1f1303682231cef
|
[
"SQL",
"Markdown",
"INI",
"Python",
"Shell"
] | 7
|
Markdown
|
sbrazgin/SBReporter
|
437730423a92bb240837bc55142ea5606ac7e7c3
|
7192f4ab837aa42a942ef968d7e17ad3fa20af02
|
refs/heads/master
|
<file_sep><html>
<head>
<!--Credit to caligari87 on reddit for the base code-->
<meta charset="UTF-8">
<title>egblip</title>
<link href="https://fonts.googleapis.com/css?family=Merriweather" rel="stylesheet">
<link rel="shortcut icon" href="favicon.ico" type="image/x-icon">
<link rel="icon" href="favicon.ico" type="image/x-icon">
<style>
:focus { outline:0; /*removes the dotted link border*/ }
a {
text-decoration: none;
color: #000000;
}
body {
text-align:center;
font-family:arial, sans-serif;
font-size:13px;
background-color: silver;
background: url("resources/background.png") no-repeat center center fixed;
background-size: cover;
color: #222222;
}
div.centerbox {
position:fixed;
top: 50%;
left: 50%;
transform: translate(-50%, -50%);
display:table-cell;
text-align:center;
vertical-align:middle;
box-shadow:0px 4px 6px dimgrey;
background: #f9f6ff;
border-radius: 20px;
}
div.iconbox {
width:88px;
height:96px;
margin:15px;
display:inline-block;
text-align:center;
vertical-align:middle;
background: #ffffff;
padding:0px;
transition:box-shadow 0.25s;
box-shadow:0px 1px 1px rgba(78, 62, 85, 0.88);
border-radius: 5px;
}
div.iconbox:hover {
box-shadow:0px 4px 6px rgba(78, 62, 85, 0.88);
transition:box-shadow 0.25s;
}
footer {
text-align:left;
position:absolute;
bottom:0;
padding:10px;
color: #FFE7F7;
}
img {
width:64px;
height:64px;
padding:5px;
padding-bottom:8px;
background: verylightgrey;
}
</style>
</head>
<body>
<script>
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','https://www.google-analytics.com/analytics.js','ga');
ga('create', 'UA-99868303-1', 'auto');
ga('send', 'pageview' [home]);
</script>
<?php
if ($_GET['run']) {
# This code will run if ?run=true is set.
shell_exec("/var/www/updatestartpage");
sleep(5);
header("Location: https://egblip.com");
}
?>
<!— This link will add ?run=true to your URL, myfilename.php?run=true —>
<p style="text-align: left; margin: 0px">
<a href="?run=true" ;="" style="color:#FFE7F7; text-align:left; padding:10px">Refresh</a>
</p>
<div class="centerbox">
<img style="width:100px; height:100px; padding:10px; padding-bottom:0px" src="resources/logo.png">
<h1 style="font-family: 'Merriweather', serif; padding-top:0px; color:#FF89AC">egblip.com</h1>
<b>
<a href="https://start.egblip.com"><div class="iconbox"><img src="resources/icons/start.png"></br>Startpage</div></a>
<a href="https://github.com/GregPikitis"><div class="iconbox"><img src="resources/icons/github.png"></br>Github</div></a>
<a href="
<?php
$resume = glob("resources/*.pdf");
echo($resume[0]);
?>
"><div class="iconbox"><img src="resources/icons/resume.png"></br>Resume</div></a>
</br>
<a href="https://school.egblip.com"><div class="iconbox"><img src="resources/icons/school.png"></br>School</div></a>
<a href="https://www.linkedin.com/in/emelia-blankenship-2176a2126/"><div class="iconbox"><img src="resources/icons/linkedin.png"></br>LinkedIn</div></a>
</b>
</div>
<footer>Inspired by <a style="color:inherit; text-decoration:inherit" href="https://github.com/caligari87/startpage">caligari87</a></footer>
</body>
</html>
|
4f44e46d1b7d1566df86f1e7cb3a79f562c2dc74
|
[
"PHP"
] | 1
|
PHP
|
GregPikitis/egblip.com
|
77b1f46f4be3ca1749937260e522481a2d1dbd7c
|
164ecee8d101f4ecbcd4e8033258c44050274b4f
|
refs/heads/main
|
<repo_name>alexduboisky/vue-components<file_sep>/src/helpers/mapComputed.js
export default (props) => ({
beforeCreate() {
const ctx = this;
const propsToMerge = props.reduce(
(acc, [names, getter, setter]) => ({
...acc,
...mapComputed(
names,
(field) => getter(field, ctx),
(value, field) => setter(value, field, ctx)
),
}),
{}
);
mergeComputed(this, propsToMerge);
},
});
const mapComputed = (fields = [], get = () => {}, set = () => {}) =>
fields.reduce(
(acc, field) => ({
...acc,
[field]: { get: () => get(field), set: (value) => set(value, field) },
}),
{}
);
const mergeComputed = (ctx, computed = {}) =>
mergeToOptions(ctx, "computed", computed);
/*
* Merge To Options (one level deep)
* Unified tool for dynamic adding props in $options to Vue component on beforeCreated hook
*
* @param {Object} ctx * - component's context (this).
* @param {Object} prop - property name. means 'computed' or 'method' or something else (but not tested for now).
* @param {Object} target * - object to merge add.
* */
const mergeToOptions = (ctx, prop = "computed", target = {}) => {
ctx.$options[prop] = { ...ctx.$options[prop], ...target };
};
<file_sep>/src/router/index.js
import Vue from "vue";
import VueRouter from "vue-router";
import Home from "../views/Home.vue";
Vue.use(VueRouter);
export const routes = [
{
path: "/",
name: "Home",
component: Home,
},
{
path: "/accordion",
name: "Accordion",
component: () => import("../views/Accordion.vue"),
},
{
path: "/floating-label",
name: "FloatingLabel",
component: () => import("../views/FloatingLabel.vue"),
},
{
path: "/forbidden",
name: "Forbidden",
component: () => import("../views/Forbidden.vue"),
beforeEnter: (to, from, next) => {
next({ name: "Home" });
},
},
{
path: "**",
name: "NotFound",
component: () => import("../views/NotFound.vue"),
},
];
const router = new VueRouter({
routes,
});
export default router;
|
bed96ea43dcf96b0058bf34b799b2c231cd1a9e1
|
[
"JavaScript"
] | 2
|
JavaScript
|
alexduboisky/vue-components
|
3c05bd10ba7b4b69f3eea50a4694f4969f246aa0
|
e32e4cb7d8a7a90476c1d280b94649e0b244c2ca
|
refs/heads/main
|
<file_sep># descripicaduras
Describe tu Picadura & nosotros te decimos que te pico
<file_sep>import web
import requests
import json
urls = (
'/picaduras?', 'Picaduras'
)
app = web.application(urls, globals())
class Picaduras():
def GET(self):
try:
picaduras = web.input()
texto=(picaduras["texto"])
key="<KEY>"
url="https://machinelearningforkids.co.uk/api/scratch/" + key + "/classify"
response = requests.get(url, params={ "data" : texto})
if response.ok:
responseData = response.json()
topMatch = responseData[0]
label = topMatch["class_name"]
confidence = topMatch["confidence"]
data = {}
data["Tipo: "] = label
data["%"] = confidence
result = json.dumps(data)
return result
else:
response.raise_for_status()
except:
data = {}
data["mensaje"] = "**SINTOMA NO RECONOCIDO**"
return json.dumps(data)
if __name__ == "__main__":
app.run()
|
3e044e629a7045d1a882025856c7d0582c37bb62
|
[
"Markdown",
"Python"
] | 2
|
Markdown
|
katiaOlem/descripicaduras
|
33408620afa6dfe2f8866d1c3b20124335a632f7
|
3832503a48ff13080b7e32b1cc888ea36c8cb643
|
refs/heads/master
|
<file_sep># Other Image Tasks
Code includes starting point and complete code for **NSTDemo** application. This project is a quick and easy way to get started experimenting with TuriCreate and CoreML, with visual output that is fun to test.

## Using NSTDemo
The structure of the app is very simple. There is an **AppDelegate.swift** file that is nothing more that what is required, and there is a **ViewController.swift** file with a corresponding **Main.storyboard** that controls a single view and its elements:
* a `UIBarButton` that summons an `UIImagePickerView` to select from Photos
* a `UIImageView` to display the image chosen
* a `UIPickerView` to select the model to apply
* a `UIButton` to apply the model and show the output in the `UIImageView`
* a `UIBarButton` that summons a `UIActivityView` to share the created image
* an optional `UIAlertView` that is displayed if the image selected is too small for the model to transform, or something else goes wrong in the transform process
There is a **StyleTransferModel.mlmodel** file that was trained for a quick demonstration. It features eight styles trained on style images with distinct colours and textures (see below), but was only trained with 6000 iterations and given very limited content images. This should be replaced with something trained for longer and with more diverse training content if output quality is to be improved.

There are also three additional Swift files. **Image.swift** contains `UIImage` extensions for cropping, resizing, and creating `CVPixelBuffer` from a given image. This file also includes a function `styled(with modelSelection: StyleModel) -> UIImage` that will return a copy of the image it is called on, styled with the model and options provided.
**StyleModel.swift** is the file provided the enumeration passed to the `styled(with:)` function. Its cases correspond to the styles the .mlmodel file was trained with. **Utils.swift** just contains a few general-purpose extensions to make code nicer to read and write, including functions for `MLMultiArray`, `CVPixelBuffer` and `CGContext` types.
### Creating a new MLModel
Once you've got [TuriCreate](https://github.com/apple/turicreate) up and running, ideally inside a virtualenv (refer to [Turi's installation guide](https://github.com/apple/turicreate#supported-platforms) for info), activate the virtualenv and make three folders inside it:
* **content** — place a collection of images of the type you'd like the style transfer to work on in here (ideally lots of images representing a vast range of content: people, animals, landscapes, objects, everything)
* **style** – place as many different style images as you'd like to be able to transfer in here (the images you want your created images to imitate the style of)
* **test** – place a collection of images of the same type as the content images in here, so you can potentially evaluate your style transfer model for effectiveness (like the first category but needs less images, content doesn't really matter but will benefit from being varied)

At the same level as the folders, add the **train_nst.py** script, and execute it using the command `python train_nst.py`.
Now, sit back and wait for your computer to train a model. Depending on how many styles you ask it to train, and how many content images you provide, this could take upwards of 2 days!
### Modifying code
In the **StyleModel.swift** file, there is the declaration of an enum called **StyleModel**. It looks as follows:

If a new .mlmodel file has been trained to replace the old one, some changes may need to be made.
1. the **enum cases** should list all model style options in the order they were input into training (this will often be alphabetical by image filename): the case name can be anything, the String rawValue should be how you want it listed in the app
2. the **model** variable type and return value must match the name of your .mlmodel file
3. the **constraints** for input size must match those listed in the .mlmodel details (see below)
4. the **isActive** variable return value can be replaced with conditional statements to omit styles from being visible and accessible in the app (in case you decide you don't like all styles you trained on, you can still use the same .mlmodel)

And that's it! Go forth and transform!
## License
**NSTDemo** is available under the [MIT](https://opensource.org/licenses/MIT) license. See the LICENSE file for more info.
All images used in the creation of models included are [Public Domain](https://creativecommons.org/share-your-work/public-domain/) or from Pixabay (either [CC0](https://creativecommons.org/share-your-work/public-domain/cc0/) or under the [Pixabay License](https://pixabay.com/service/license/)).
Images featured/style-transferred in screenshots are of--and belong to--the repository authors.<file_sep>//
// StyleModel.swift
// NSTDemo
//
// Created by <NAME> on 4/3/19.
// Copyright © 2019 Mars and Paris. All rights reserved.
//
import UIKit
import CoreML
enum StyleModel: String, CaseIterable {
case flip = "Upside Down"
// =====================================================================
// TODO 1: add new properties
// =====================================================================
init(index: Int) { self = StyleModel.styles[index] }
static var styles: [StyleModel] { return self.allCases }
var name: String { return self.rawValue }
var styleIndex: Int { return StyleModel.styles.firstIndex(of: self)! }
}
|
30fc5b342daf3530d7d6b4c8d5c6956fb7de30a2
|
[
"Markdown",
"Swift"
] | 2
|
Markdown
|
AIwithSwift/OtherImageTasks
|
9fe30643194d99f8d618b205f9cf9dca084e9fa3
|
07cc889206225675b7bd52481b999ddce40f6951
|
refs/heads/master
|
<file_sep>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Mar 26 19:07:01 2018
@author: noviayou
"""
import numpy as np
#01
C = 0.06 #coupon rate
N = 14 #number of years
I = 0.1
Par = -1000
pv=np.pv(rate=I,nper=N,pmt=C*Par,fv=Par) #present value of given pmt
print ("1.The current bond price is","{:,.2f}".format(pv))
#02
C = 0.09 #coupon rate
N = 14
Pv = 850.46 #current bond price
Par = -1000
rate=np.rate(nper=N,pmt=C*Par,pv=Pv,fv=Par) #interest rate function
print ("2.The rate is","{:,.2f}".format(rate*100),"%")
#03
N = 6
Pv = 970 # current bond price
I = 0.099 #YTM
Par = -1000
pmt=np.pmt(rate=I,nper=N,pv=Pv,fv=Par) #pmt function
C = pmt/Par
print ("3.The coupon rate should be","{:,.2f}".format(C*100),"%")
#04
N = 19*2 #number of years with semiannual payment
C = 0.094/2 #coupon rate
Pv = 1020 #current bond price
Par = -1000
rate = np.rate(nper=N,pmt=C*Par,pv=Pv,fv=Par) #interest rate function
YTM = rate*2
print ("4.The YTM is","{:,.2f}".format(YTM*100),"%")
#05
Realr = 0.055
Infr = 0.02
TR = ((1+Infr)*(1+Realr))-1 #Fisher Equation to get the treasury bill rate
print ("5.The Treasury bill rate should be","{:,.2f}%".format(TR*100))
#06
Totrate = 0.095
Infr = 0.05
Realr = ((1+Totrate)/(1+Infr)-1) #Fisher Equation
print ("6.The real interest rate should be","{:,.2f}%".format(Realr*100))
#07
CJ = 0.05/2
CK = 0.11/2
N = 7*2
I = 0.07/2
Par = -1000
pvJ=np.pv(rate=I,nper=N,pmt=CJ*Par,fv=Par) #current bond price for Bond J
pvK=np.pv(rate=I,nper=N,pmt=CK*Par,fv=Par) #current bond price for Bond K
#a&b
I1 = (0.07+0.02)/2
pvJ1=np.pv(rate=I1,nper=N,pmt=CJ*Par,fv=Par) #current bond price for Bond J with increasing rate
pvK1=np.pv(rate=I1,nper=N,pmt=CK*Par,fv=Par) #current bond price for Bond K with increasing rate
print ("7a.The precentage price change of Bond J is","{:,.2f}".format((pvJ1-pvJ)/pvJ
*100),"%")
print ("7b.The precentage price change of Bond K is","{:,.2f}".format((pvK1-pvK)/pvK
*100),"%")
#c&d
I2 = (0.07-0.02)/2
pvJ2=np.pv(rate=I2,nper=N,pmt=CJ*Par,fv=Par) #current bond price for bond J with decreasing rate
pvK2=np.pv(rate=I2,nper=N,pmt=CK*Par,fv=Par) #current bond price for bond K with decreasing rate
print ("7c.The precentage price change of Bond J is","{:,.2f}".format((pvJ2-pvJ)/pvJ
*100),"%")
print ("7d.The precentage price change of Bond K is","{:,.2f}".format((pvK2-pvK)/pvK
*100),"%")
#08
C = 0.104/2 # Coupon rate
Pv = -1013.04 # Selling Price
Par = 1000 # Par Value
N = 11*2 # Number
rate=np.rate(nper=N,pmt=C*Par,pv=Pv,fv=Par) #YTM function
pmt=np.pmt(rate=rate,nper=N,pv=Par,fv=Par) #use the YTM to get new PMT
Coupon = -pmt/Par #get the coupon payment
print ("8.The coupon rate is","{:,.2f}%".format(Coupon*100))
#09
C = 0.076/2 #Coupon Rate
CP = 1180 # Clean Price
Par = 1000 # Par Value
AI = (C*Par)*(4/6) # Accrued Interest
IP = CP+AI
print ("9.Invoice Price should be ${:,.2f}".format(IP))
#10
C = 5
EAR = 0.114
Month = 52 #1 year for 52 weeks
N = 32 # Number of Year
Growth=0.036 # Growth rate & Inflation rate
realr = ((1+EAR)/(1+Growth)) - 1 # Real Return Weekly Rate
APR = ((1+realr)**(1/Month)-1)*Month # Yearly Interest Rate
WeeklyR = APR/Month # Weekly Interest Rate
pv = np.pv(rate=WeeklyR,nper=N*Month,pmt=-C,fv=0)
print ("10.Present Value of Comitment should be $","{:,.2f}".format(pv))
<file_sep>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Feb 14 11:20:00 2018
@author: noviayou
"""
#1
import numpy as np
import math
pva="-31000" #present value
n=14 #the year
r=0.0625 #the discount rate
pmt=np.pmt(rate=r,nper=n,pv=int(pva),fv=0)
print ("Annual cash flow is" , "{:,.2f}".format(pmt))
#2
pmt=1200 #the PMT
pv=61000 #the present value
r=int(pmt)/int(pv) #the formula for interest rate
print ("interest rate per month :","{:,.2f}".format(r*100),"%")
#3
import numpy as np
pva=-100 #present value
fva=300 #future value
n=24 #72 months equals 24 quarters
rate=np.rate(nper=n,pmt=0,pv=pva,fv=fva) #interest rate function
print ("The rate of return per quarter is","{:,.3f}".format(rate*100),"%")
#4
n=16
g=0.05
r=0.1
payment=540000
A=(1+g)/(1+r)
B=A**n
pv=(payment/(r-g))*(1-B) #The growing annuity payment formula
print ("Present value of winings is","{:,.2f}".format(pv))
#5
pmt=-380 #the PMT
i=0.08 #the interest rate
fv=25694 #the future value
n=np.nper(rate=i,pmt=pmt,pv=0,fv=fv) #number of payments function
print ("There are","{:,.0f}".format(n),"payments")
#6
n=360
I=0.076/12
pmt=-800
pv=np.pv(rate=I,nper=n,pmt=pmt) #present value of given pmt
ball=230000-pv
ballfv=np.fv(nper=n,rate=I,pmt=0,pv=-ball) #future value of ballpayment
print ("The ballpayment should be","{:,.2f}".format(ballfv))
#7
pmt=17000
pv=-0.8*2800000
n=30*12
fv=0
rate=np.rate(nper=n,pmt=pmt,pv=pv,fv=fv)
print ("The APR is","{:,.2f}".format(rate*12*100))
EAR=((1+((rate*12)/12))** 12)-1
print ("The EAR is","{:,.2f}".format(EAR*100))
#8
pmt=-1700
r1=0.13/12
r2=0.10/12
n1=6*12
n2=10*12
fv=0
pv2=np.pv(nper=n2,rate=r2,pmt=pmt,fv = fv) #present value for the last 10 years
pv=(pv2)/((1+(r1))**n1) #discount back pv2 to the present
pv1=np.pv(nper=n1,rate=r1,pmt=pmt,fv=fv)
print ("Value of cash flows is","{:,.2f}".format(pv1+pv))
#9
pv=-43000
N=60
I=0.0825/12
pmt=np.pmt(rate=I,nper=N,pv=pv,fv=0)
print ("Montly payment should be","{:,.2f}".format(pmt))
#10
pv=5000
point=3
r=0.1
Upfront=pv*((point/100))
fv=pv*(1+r)
pv1=pv-Upfront
r1=(fv/pv1)-1
print ("Actual Rate is","{:.2f}%".format(r1* 100))
<file_sep>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Apr 3 09:39:56 2018
@author: noviayou
"""
#2
#a
import pandas as pd
data = {"Year" : [0,1,2,3,4,5,6,7,8,9,10,11], "CF" : [-2960,740,740,740,740,740
,740,740,740,740,740,740]}
cft = pd.DataFrame(data, columns=["Year", "CF"])
N = cft.iloc[0]["CF"]/740
#i = 0
#unpaid = cft.iloc[0]["CF"]
#while unpaid < 0:
# unpaid = cft.iloc[i]["CF"] + cft.iloc[i+1]["CF"]
# if unpaid < cft.iloc[i+2]["CF"]:
# leftunpaid = -unpaid/cft.iloc[i+2]["CF"]
# break
# i = i + 1
#
#noyears = i + 1
#payback = noyears + leftunpaid
print ("2a. Payback year is", "{:,.2f}".format(-N))
#b
data2 = {"Year2" : [0,1,2,3,4,5,6,7,8,9,10,11], "CF2" : [-4366,740,740,740,740,
740,740,740,740,740,740,740]}
cft2 = pd.DataFrame(data2, columns=["Year2", "CF2"])
N = cft2.iloc[0]["CF2"]/740
#i = 0
#unpaid2 = cft2.iloc[0]["CF2"]
#while unpaid2 < 0:
# unpaid2 = cft2.iloc[i]["CF2"] + cft2.iloc[i+1]["CF2"]
# if unpaid2 < cft2.iloc[i+2]["CF2"]:
# leftunpaid2 = -unpaid2/cft2.iloc[i+2]["CF2"]
# break
# i = i + 1
#
#noyears2 = i + 1
#payback2 = noyears2 + leftunpaid2
print ("2b. Payback year is", "{:,.2f}".format(-N))
#c
data3 = {"Year3" : [0,1,2,3,4,5,6,7,8,9,10,11], "CF3" : [-8880,740,740,740,740,
740,740,740,740,740,740,740]}
cft3 = pd.DataFrame(data3, columns=["Year3", "CF3"])
N = cft3.iloc[0]["CF3"]/740
#i = 0
#unpaid3 = cft3.iloc[0]["CF3"]
#while unpaid3 < 0:
# unpaid3 = cft3.iloc[i]["CF3"] + cft3.iloc[i+1]["CF3"]
# if unpaid3 < cft3.iloc[i+2]["CF3"]:
# leftunpaid3 = -unpaid3/cft3.iloc[i+2]["CF3"]
# break
# i = i + 1
#
#noyears3 = i + 1
#payback3 = noyears3 + leftunpaid3
print ("2c. Payback year is", "{:,.2f}".format(-N))
#3
data4 = {"Year4" : [0,1,2,3,4], "CF4" : [-7000,4200,5300,6100,7400]}
cft4 = pd.DataFrame(data4, columns = ["Year4","CF4"])
R = 0.14
i = 0
unpaid4 = cft4.iloc[0]["CF4"]
while unpaid4 < 0:
unpaid4 = cft4.iloc[i]["CF4"]/((1+R)**i) + cft4.iloc[i+1]["CF4"]/((1+R)**(i+1))
if unpaid4 < cft4.iloc[i+2]["CF4"]/((1+R)**(i+2)):
leftunpaid =
<file_sep>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Feb 19 11:12:17 2018
@author: noviayou
"""
#1a
D=1.32 #dividend paid at T0
G=0.06 #constant growth rate
R=0.1 #require rate of reture
P0=D*((1+G)/(R-G))
print ("1a: The current price is ","${:,.2f}".format(P0))
#1b
P10=D*((1+G)**11/(R-G))
print ("1b: The price in 10 years is ","${:,.2f}".format(P10))
#2
D1=4.55 #dividend ar time 1
G=0.03 #constant groth rate
PV=40 #current stock price
R=D1/PV+G
print ("2: The requrie rate of return is","{:,.2f}".format(R*100),"%")
#3
D1=4.90
G=0.043
R=0.079
PV=D1/(R-G)
print ("3: The company's current price is","${:,.2f}".format(PV))
#4
PV=90
R=0.1
Dyield=0.5*R #the total return on the stock is evenly divided between a capital gains yield and a dividend yield
D1=PV*Dyield #dividend for next year is the stock price times dividend yield
D0=D1/(1+Dyield)#dividend this year
print ("4: The current dividend is","${:,.2f}".format(D0))
#5
import numpy as np
D=-25 #dividend as pmt
N=7
I=0.1
PV=np.pv(rate=I,nper=N,pmt=D)
print ("5: The current stock price is","${:,.2f}".format(PV))
#6
D1=1.4
G=0.06
Ired=0.089 #the require rate of return for Red company
Iyellow=0.119 #the require rate of return for Yellow company
Iblue=0.153 #the require rate of return for Blue company
#a
PRed=D1/(Ired-G)
print ("6a: The stock price for Red Inc., is","${:,.2f}".format(PRed))
#b
PYellow=D1/(Iyellow-G)
print ("6b: The stock price for Yellow Inc., is","${:,.2f}".format(PYellow))
#c
PBlue=D1/(Iblue-G)
print ("6c: The stock price for Blue Inc., is","${:,.2f}".format(PBlue))
#7
D1=11
n=15
G=0.04
R=0.12
P15=D1/(R-G) #calculate value at t=15
PV=np.pv(rate=R,nper=n,pmt=0,fv=-P15) #discount back value at n=15
print ("7: The current price is","${:,.2f}".format(PV))
#8
D1=13
D2=11
D3=9
D4=5
G=0.08
R=0.17
P5=D4*((1+G)/(R-G)) #calculate value at t=5
PV5=np.pv(rate=R,nper=5,pmt=0,fv=-P5) #discount back value at t=5 to t0
PV4=np.pv(rate=R,nper=4,pmt=0,fv=-D4) #discount D4 back to t0
PV3=np.pv(rate=R,nper=3,pmt=0,fv=-D3) #discount D3 back to t0
PV2=np.pv(rate=R,nper=2,pmt=0,fv=-D2)
PV1=np.pv(rate=R,nper=1,pmt=0,fv=-D1)
PV=PV5+PV4+PV3+PV2+PV1 #add discounted future cash flow
print ("8: The current price is", "${:,.2f}".format(PV))
#9
G1=0.18
n=3
G2=0.04
R=0.07
D=1.60
D1=D*(1+G1)
D2=D*((1+G1)**2) #calculate Dividend at t2
D3=D*((1+G1)**3) #calculate dividend at t3
D4=D3*(1+G2) #calculate dividend at t4
P4=(D4*(1+G2))/(R-G2) #value at t4
PV4=np.pv(rate=R,nper=4,pmt=0,fv=-P4)
PV3=np.pv(rate=R,nper=3,pmt=0,fv=-D3)
PV2=np.pv(rate=R,nper=2,pmt=0,fv=-D2)
PV1=np.pv(rate=R,nper=1,pmt=0,fv=-D1) #discounct back future cash flow
PV=PV4+PV3+PV2+PV1+D
print ("9: The current price is", "${:,.2f}".format(PV))
#10
D=18
R=0.19
G=-0.12
PV=D*(1+G)/(R-G)
print ("10: Today,you will pay","${:,.2f}".format(PV))
#11
PV=75
R=0.11
G=0.05
div=(PV*(R-G))/(1+G)
print ("11: The current dividend is","${:,.2f}".format(div))
#12
D=14
R=0.07
P5=D/R
PV=np.pv(rate=R,nper=5,pmt=0,fv=-P5) #discount back future cash flow
print ("12: The current stock price is","${:,.2f}".format(PV))
#13
D=1.30
G1=0.35
G2=0.05
R=0.14
N=9
P1=(D*(1+G1)/(R-G1))*(1-((1+G1)/(1+R))**N) #the first part of cash flow
P2=(((1+G1)/(1+R))**N)*(D*(1+G2)/(R-G2)) #discount back growth rate future cash flow
print ("13: Price of Stock Today is","${:,.2f}".format(P1+P2))
<file_sep>
#1 Annual increasing in selling price
avgP07 = 27958
avgP01 = 21308
year = 6
incP = ((avgP07/avgP01)**(1/year)) - 1
print ("The annual increase in selling price: {0:.2f}%".format(incP * 100))
#2 Number of year
from math import exp, log
FerPri = 180000
PreMon = 35000 #Saving Money at t=0
Int = 0.05
NumYear = (log(FerPri)-log(PreMon))/ (log(1+Int))
print ('It will be {:.2f} years before you have enough money to buy the car' .format(NumYear))
#3 Present value of liability
from math import exp, log
mill=1000000
FV = 650*mill
year= 17
INT= 0.095
PV = FV/((1+INT)**(year))
print ('Present Value:', '${:,.2f}'.format(PV))
#4 Present Value of Windfall
mill = 1000000
PrizeVal = 4.5*mill
year = 73
Int = 0.08
PresentVal = PrizeVal/((1+Int)**(year))
print ('The present value of windfall: ${:,.2f}' .format(PresentVal))
#5 Future Value of Coin Collection
PresVal = 55
Int = 0.07
future = 2034
present = 1947
year = future - present
FutureVal = PresVal * ((1+Int)**year)
print ('The future value of Coin Collection: ${:,.2f}' .format(FutureVal))
#6 Percentage increase in Winner Check & Future Value of Winner Prize in 2040
WinPriz01 = 120
WinPriz02 = 1179000
n1 = 1895
n2 = 2007
n3 = 2040
Int = ((WinPriz02/WinPriz01)**(1/(n2-n1))) - 1
WinPriz03 = WinPriz02 * ((1+Int)**(n3-n2))
print ("a) The percentage increase per year in the winner's prize: {:.2f}%" .format(Int*100))
print ("b) The winner prize in 2040: ${:,.2f}" .format(WinPriz03*100))
#7 Annual return rate
Price2003 = 10305500
Price1999 = 12385500
n1=1999
n2 = 2003
year = n2-n1
Return = ((Price2003/Price1999)**(1/year))-1
print ('The annual return rate on the sculpture: {:.2f}%' .format(Return*100))
#8 Future Value
RetireAmt = 2000
Int = 0.1
n = 35
wait = 7
Future01 = RetireAmt*((1+Int)**n)
Future02 = RetireAmt*((1+Int)**(n+wait))
print ('a) There will be ${0:,.2f} in the account when you retire in 35 years' .format(Future01))
print ('b) There will be ${0:,.2f} in the account if waiting 7 years before contributing' .format(Future02))
#9 Future Value
PresVal = 29000
Int = 0.05
now = 2
future = 10
year = future - now
FutureVal = PresVal*((1+Int)**(year))
print ('There will be ${0:,.2f} in the account in 10 years' .format(FutureVal))
#10 Number of Investing Period
from math import exp, log
PresVal = 8000
now = 2
Int = 0.12
FutureVal = 95000
year = (log(FutureVal)-log(PresVal))/log(1+Int)
print ('Waiting {:,.2f} years to get $95000' .format(year +2))<file_sep>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Feb 22 11:56:07 2018
@author: noviayou
"""
from nltk.classify import NaiveBayesClassifier
def word_feats(words):
return dict([(word, True) for word in words])
positive_vocab=["awesome","innovation","outstanding","fantastic","terrific","good","great"]
negative_vocab=["bad","terrible","useless","hate"]
netural_vocab=["Blockchain","is","was","know","technology"]
positive_features=[(word_feats(pos),"pos") for pos in positive_vocab]
negative_features=[(word_feats(neg),"neg") for neg in negative_vocab]
netural_features=[(word_feats(neu),"neu") for neu in netural_vocab]
train=positive_features+negative_features+netural_features
cl=NaiveBayesClassifier(train)
twi=r"/Users/noviayou/Downloads/twitter"
import json
day1=r"/Users/noviayou/Downloads/twitter/00.json"
text=json.loads(day1)
print (text)
|
b1a98fed63f337bc2a22a86cf2f1adc0d3c0c29d
|
[
"Python"
] | 6
|
Python
|
noviayou/Financial-Management
|
fc013ac1f58191750a10133946f4d05be7fb2e07
|
a4f638753670e810a51319aa507e82f7649cdcfe
|
refs/heads/master
|
<file_sep>package com;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
@Entity
@Table(name="product")
public class Product {
@Id
private String productId;
@Column(name = "productName")
private String productName;
@Column(name = "productPrice")
private String productPrice;
public Product() {
super();
}
public Product(String productId, String productName, String productPrice) {
super();
this.productId = productId;
this.productName = productName;
this.productPrice = productPrice;
}
public String getProductId() {
return productId;
}
public void setProductId(String productId) {
this.productId = productId;
}
public String getProductName() {
return productName;
}
public void setProductName(String productName) {
this.productName = productName;
}
public String getProductPrice() {
return productPrice;
}
public void setProductPrice(String productPrice) {
this.productPrice = productPrice;
}
}
<file_sep>package com;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.servlet.ModelAndView;
@Controller
public class LoginController {
@RequestMapping("/")
public String WelcomeUser()
{
System.out.println("Successfully Launched");
return "login";
}
@RequestMapping(value="/validate",method=RequestMethod.POST)
public ModelAndView validate(HttpServletResponse response,
HttpServletRequest request){
String name=request.getParameter("uname");
String password=request.getParameter("password");
System.out.println(name);
System.out.println(password);
if(name.equals("loki") && (password.equals("123"))){
return new ModelAndView("welcome","msg","Hello User");
}
else{
return new ModelAndView("error","msg","Sorry user");
}
}
}
|
7d57fd608dd2ff731ca0728454c98ac38caf0f8b
|
[
"Java"
] | 2
|
Java
|
lokeshnehete/expertadvice
|
56edac09010be68558d468131ae677522af72604
|
ef610610f8fd2edc51d6024976159ef633e44b76
|
refs/heads/master
|
<file_sep>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'Crop Yield Loss Data Processing Toolbox',
'url': 'https://github.com/nicococo/CropYieldLoss',
'author': '<NAME>',
'author_email': '<EMAIL>',
'version': '0.1',
'install_requires': ['nose', 'cvxopt','scikit-learn','pandas'],
'packages': ['CropYieldLoss'],
'scripts': [],
'name': 'CropYieldLoss',
'classifiers':['Intended Audience :: Science/Research',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7']
}
setup(**config)<file_sep>import sklearn.svm as svm
import sklearn.metrics as metrics
from feature_construction_utils import *
# 1. load the processed challenged data (download see website)
wdata, dates, stations, dists, label = load_data('cylad1.npz')
# 2. generate 7 basic features per weather station per year from wdata
F = get_basic_weather_station_feats(wdata, dates)
# 3. generate the final dataset based on the basic features. Stack the '3' nearest neighbors atop, add a bias
# for each year and a bias for each district
X, inds, y = get_exms_basic_feats(3, F, stations, dates, dists, label,
add_dist_bias=True, add_year_bias=True, avg_neighbors=False)
# 4. get a split for training/test
X_train, inds_train, y_train, X_test, inds_test, y_test = split_train_test(X, inds, y, perc=0.2)
# 5. train a linear support vector machine (without intercept, balance the classes)
mySvm = svm.LinearSVC(C=1., fit_intercept=False, class_weight='balanced')
mySvm.fit(X_train, y_train)
# 6. evaluate the test set and report the area under the ROC curve
fpr, tpr, thresholds = metrics.roc_curve(y_test, mySvm.decision_function(X_test), pos_label=+1)
print 'AUC: ', metrics.auc(fpr, tpr)
<file_sep>import numpy as np
import sklearn.svm as svm
import sklearn.metrics as metrics
import os
from feature_construction_utils import *
from data_conversion_utils import *
if __name__ == "__main__":
FEATURE_FNAME = 'feats.npz'
INPUT_DATA = 'cylad1.npz'
if not os.path.isfile(FEATURE_FNAME):
# Loading the dataset takes a long time. It is better to process
# the features and save the intermediate result to file which
# can be loaded much faster.
wdata, dates, stations, dists, label = load_data(INPUT_DATA)
F = get_basic_weather_station_feats(wdata, dates)
np.savez(FEATURE_FNAME, F=F, dates=dates, stations=stations, dists=dists, label=label)
# load the processed file (will be very fast)
print '----- Load Data -----'
foo = np.load(FEATURE_FNAME)
F = foo['F']
dates = foo['dates']
stations = foo['stations']
dists = foo['dists']
label = foo['label']
reps = 25
knns = [1, 3, 5, 9, 13, 21]
percs = [0.05, 0.1, 0.2, 0.4, 0.6, 0.8, 0.9, 0.95]
# final results will be stored here
all_aucs = np.zeros((len(knns), len(percs)))
all_stds = np.zeros((len(knns), len(percs)))
for k in range(len(knns)):
X, inds, y = get_exms_basic_feats(knns[k], F, stations, dates, dists, label,
add_dist_bias=True, add_year_bias=True, avg_neighbors=False)
aucs = np.zeros((reps, len(percs)))
for r in range(reps):
for i in range(len(percs)):
X_train, inds_train, y_train, X_test, inds_test, y_test = split_train_test(X, inds, y, perc=percs[i])
mySvm = svm.LinearSVC(C=1., fit_intercept=False, class_weight='balanced')
mySvm.fit(X_train, y_train)
preds = mySvm.decision_function(X_test)
fpr, tpr, thresholds = metrics.roc_curve(y_test, preds, pos_label=+1)
aucs[r, i] = metrics.auc(fpr, tpr)
print '\n> ', knns[k], r, i, ' AUC = ', aucs[r], '\n'
all_aucs[k, :] = np.mean(aucs, axis=0)
all_stds[k, :] = np.std(aucs, axis=0)
print '--------- Done. ------------------'
print all_aucs
print all_stds
np.savez('ex2_results.npz', aucs=all_aucs, stds=all_stds, reps=reps, knns=knns, percs=percs)
print('\nThe End.')
<file_sep>import numpy as np
import sklearn.metrics as metrics
# Code and examples for Kernel Target Alignments (Christianini et al, NIPS 2001 and JMLR 2002).
# Author: <NAME>, <NAME>, 2016
def normalize_kernel(K):
# A kernel K is normalized, iff K_ii = 1 \forall i
N = K.shape[0]
a = np.sqrt(np.diag(K)).reshape((N, 1))
if any(np.isnan(a)) or any(np.isinf(a)) or any(np.abs(a) <= 1e-16):
print 'Numerical instabilities.'
C = np.eye(N)
else:
b = 1. / a
C = b.dot(b.T)
return K * C
def center_kernel(K):
# Mean free in feature space
N = K.shape[0]
a = np.ones((N, N)) / np.float(N)
return K - a.dot(K) - K.dot(a) + a.dot(K.dot(a))
def kta_align_general(K1, K2):
# Computes the (empirical) alignment of two kernels K1 and K2
# Definition 1: (Empirical) Alignment
# a = <K1, K2>_Frob
# b = sqrt( <K1, K1> <K2, K2>)
# kta = a / b
# with <A, B>_Frob = sum_ij A_ij B_ij = tr(AB')
return K1.dot(K2.T).trace() / np.sqrt(K1.dot(K1.T).trace() * K2.dot(K2.T).trace())
def kta_align_binary(K, y):
# Computes the (empirical) alignment of kernel K1 and
# a corresponding binary label vector y \in \{+1, -1\}^m
m = np.float(y.size)
YY = y.reshape((m, 1)).dot(y.reshape((1, m)))
return K.dot(YY).trace() / (m * np.sqrt(K.dot(K.T).trace()))
def load_data(fname):
import time
t = time.time()
data = np.load(fname)
print '-- Version:', data['version']
wdata = data['wdata']
wdata_cols = data['wdata_cols']
print '-- WDATA (stations x dates x measures):'
print wdata.shape
print wdata_cols
dates = data['dates']
dates_cols = data['dates_cols']
print '-- DATES (day x month x year corresponding to wdata):'
print dates.shape
print dates_cols
stations = data['stations']
stations_cols = data['stations_cols']
print '-- STATIONS (id, location for all stations corresponding to wdata):'
print stations.shape
print stations_cols
dists = data['dists']
dists_cols = data['dists_cols']
print '-- DISTS (name and list of associated stations):'
print dists.shape
print dists_cols
label = data['label']
label_cols = data['label_cols']
print '-- LABEL (for all districts and years):'
print label.shape
print label_cols
print('{0} seconds for loading the dataset.'.format(time.time() - t))
return wdata, dates, stations, dists, label
def get_task1_split_inds(dates, dists):
import itertools
# Returns indices of districts and years for train and test
train_years = np.arange(1979, 2006+1)
test_years = np.arange(2007, 2009+1)
districts = np.arange(dists.shape[0])
test = np.array(list(itertools.product(test_years, districts)))
train = np.array(list(itertools.product(train_years, districts)))
lens = np.round(0.2 * train.shape[0])
inds = np.random.permutation(train.shape[0])[:lens]
return train[inds, :], test
def get_task2_split_inds(dates, dists, perc=0.2):
# Returns indices of districts and years for train and test
import itertools
# Returns indices of districts and years for train and test
years = np.arange(1979, 2009+1)
districts = np.arange(dists.shape[0])
total = np.array(list(itertools.product(years, districts)))
lens = np.round(perc * total.shape[0])
inds = np.random.permutation(total.shape[0])
return total[inds[:lens], :], total[inds[lens:], :]
def get_challenge_split_inds(year_dists_inds, perc=0.2):
# Returns indices of districts and years for train and test
import itertools
# Returns indices of districts and years for train and test
years = np.arange(1979, 2009+1)
districts = np.unique(year_dists_inds[:, 1])
total = np.array(list(itertools.product(years, districts)))
lens = np.round(perc * total.shape[0])
inds = np.random.permutation(total.shape[0])
return total[inds[:lens], :], total[inds[lens:], :]
def split_train_test(X, year_dists_inds, y, perc=0.2, train_inds=None, test_inds=None):
if train_inds is None or test_inds is None:
# if not provided with training and test index lists, then assume
# the standard task
train_inds, test_inds = get_challenge_split_inds(year_dists_inds, perc)
n_train = train_inds.shape[0]
n_test = test_inds.shape[0]
n_feats = X.shape[1]
print 'Number of training year-district-pairs: ', n_train
print 'Number of test year-district-pairs: ', n_test
print 'Number of features: ', n_feats
print 'Total number of data points: ', X.shape[0]
inds1 = []
for (i, j) in train_inds:
inds1.extend(np.where((year_dists_inds[:, 0] == i) & (year_dists_inds[:, 1] == j))[0].tolist())
inds2 = []
for (i, j) in test_inds:
inds2.extend(np.where((year_dists_inds[:, 0] == i) & (year_dists_inds[:, 1] == j))[0].tolist())
print 'Number of train inds found: ', len(inds1)
print 'Number of test inds found: ', len(inds2)
print 'Total number of inds: ', len(inds1) + len(inds2)
print 'Intersecting indices: ', np.intersect1d(inds1, inds2)
assert len(inds1) + len(inds2) <= X.shape[0]
X_train = X[inds1, :]
year_dists_inds_train = year_dists_inds[inds1, :]
y_train = y[inds1]
X_test = X[inds2, :]
year_dists_inds_test = year_dists_inds[inds2, :]
y_test = y[inds2]
return X_train, year_dists_inds_train, y_train, X_test, year_dists_inds_test, y_test
def get_basic_weather_station_feats(wdata, dates):
# Return simple feats for the weather as in the challenge paper
years = np.unique(dates[:, 2])
X = np.zeros((wdata.shape[0], years.size, 7*5))
for j in range(years.size):
offset = 0
for m in [6, 7, 8, 9, 10]:
inds = np.where((dates[:, 2] == years[j]) & (dates[:, 1] == m))[0]
X[:, j, offset:offset+6] = np.mean(wdata[:, inds, :], axis=1)
X[:, j, offset+6] = np.sum(wdata[:, inds, 2] > 0.1, axis=1)
offset += 7
return X
def get_k_nearest_neighbors(k, X):
from sklearn.neighbors import NearestNeighbors
nbrs = NearestNeighbors(n_neighbors=k, algorithm='ball_tree').fit(X)
_, indices = nbrs.kneighbors(X)
for i in range(indices.shape[0]):
assert i in indices[i, :]
return indices
def get_k_nearest_weather_stations(k, stations):
# Calls get_k_nearest_neighbors
# Returns an array N \in I^{548 x k} of k nearest weather stations
X = stations[:, 1:3] # Longitude, Latitute, and Elevation
return get_k_nearest_neighbors(k, X)
def evaluate(y_true, y_pred):
fpr, tpr, thresholds = metrics.roc_curve(y_true, y_pred, pos_label=+1)
return metrics.auc(fpr, tpr)
def get_exms_basic_feats(k, basic_weather_feats, stations, dates, dists, label,
add_year_bias=True, add_dist_bias=True, avg_neighbors=False):
print('Generate examples based on basic weather features and k-nearest-neighbors (k={0}).'.format(k))
X0 = basic_weather_feats # stations x years x feats
print 'Basic weather features size: ', X0.shape
k_inds = get_k_nearest_weather_stations(k, stations)
print 'Knn matrix size: ', k_inds.shape
n_stations = X0.shape[0]
n_feats = X0.shape[2]
n_dists = dists.shape[0]
years = np.unique(dates[:, 2])
n_years = years.size
print 'Number of weather stations: ', n_stations
print 'Number of basic features: ', n_feats
print 'Number of districts: ', n_dists
print 'Number of years: ', n_years
print 'Years: ', years
# Outputs
k_real = k
if avg_neighbors:
k_real = 1
X = np.zeros((n_stations * n_years, n_feats * k_real + add_dist_bias*n_dists + add_year_bias*n_years))
print 'Output feature matrix size: ', X.shape
year_dist_inds = np.zeros((n_stations * n_years, 2), dtype=np.int)
print 'Corresponding Year-District indices size: ', year_dist_inds.shape
y = np.zeros((n_stations * n_years))
print 'Corresponding label size: ', y.shape
cnt = 0
for i in range(n_dists):
for sid in dists[i, 4]:
sind = np.where(stations[:, 0] == sid)[0]
assert sind.size == 1
for j in range(n_years):
y[cnt] = label[i, j]
year_dist_inds[cnt, :] = years[j], i
offset = 0
if not avg_neighbors: # stack neighbors atop each other
for nn in range(k):
X[cnt, offset:offset+n_feats] = X0[k_inds[sind, nn].flatten(), j, :]
offset += n_feats
else: # average neighbors
X[cnt, :n_feats] = np.mean(X0[k_inds[sind, :].flatten(), j, :], axis=0)
offset += n_feats
if add_dist_bias: # add bias for district
X[cnt, offset+i] = 1.
offset += n_dists
if add_year_bias: # add bias for year
X[cnt, offset+j] = 1.
cnt += 1
# convert y to -1,+1 := normal,anomaly:
y[y==0] = -1
print 'Convert label vector to -1/+1: ', np.unique(y)
return X, year_dist_inds, y<file_sep>import pandas as pd
import numpy as np
import datetime
import matplotlib.pyplot as plt
STATES = ['Bihar','MP','UP']
def get_districts_from_weather(path):
PATH_WEATHER = path, 'weather'
dists = list()
for s in STATES:
csv = pd.read_csv(PATH_WEATHER+'/'+s+'-stations-districts.csv', delimiter=';')
print csv.columns
ds = csv['DIST'].unique().tolist()
for didx in ds:
ind = np.where(csv['DIST'] == didx)[0][0]
dname = csv['DISTNAME'][ind]
sidx = csv['STCODE'][ind]
sname = csv['STNAME'][ind]
print [didx, dname, sidx, sname]
dists.append([didx, dname, sidx, sname])
print str(len(dists)) + ' districts found.'
return dists
def get_weather(district_id, path):
PATH_WEATHER = path, 'weather'
weather = {}
station_ids = list()
col_names = None
for s in STATES:
csv = pd.read_csv(PATH_WEATHER + '/' + s + '-stations-districts.csv', delimiter=';')
if district_id in csv['DIST'].unique():
inds = np.where(csv['DIST'] == district_id)[0]
station_ids.extend(csv['STATION'].iget(inds).unique())
for i in station_ids:
csv = pd.read_csv(PATH_WEATHER + '/' + s + '/weatherdata-' + str(i) + '.csv',
delimiter=',', warn_bad_lines=True, parse_dates=False)
col_names = csv.columns.values.tolist()[:-1]
weather[i] = csv.as_matrix()[:, :-1]
break
print 'Weather data column header: ' + col_names
return weather, station_ids, col_names
def get_yield(district_id, path):
PATH_YIELD = path, 'weather'
yields = None
years = None
col_names = None
for s in STATES:
xls = pd.ExcelFile(PATH_YIELD+'/' + s + '/dt-area-prod-a.xls')
table = xls.parse(0, index_col=None, na_values=['NA'])
col_names = table.columns.values.tolist()[5:]
inds = np.where(district_id == table['DIST'])[0]
if inds.size > 0:
# print table['YEAR'][inds]
# print table.loc[inds]
years = table['YEAR'][inds].as_matrix()
yields = table.loc[inds].as_matrix()[:, 5:]
break
print 'Production data column header: ' + col_names
return yields, years, col_names
def generate_full_dataset(path):
dists = get_districts_from_weather(path)
cnt = len(dists)
wdata = {}
for d in dists:
(wd, sid, wdata_col_name) = get_weather(d[0], path)
wdata.update(wd)
(ydata, years, yield_col_names) = get_yield(d[0], path)
d.append(sid)
d.append(years)
d.append(ydata)
print cnt
cnt -= 1
print 'Saving intermediate full data set...'
np.savez_compressed('{0}data.npz'.format(path), version='1.0',
wdata=wdata, dists=dists, yield_col_names=yield_col_names, wdata_col_name=wdata_col_name)
def ols(vecX, vecy):
# solve the ols regression with a single feature
vecX = np.hstack((vecX[:,np.newaxis], np.ones((vecX.size, 1)))) # datapoints x 2
XXt = vecX.T.dot(vecX)
XtY = vecX.T.dot(vecy)
w = np.linalg.inv(XXt).dot(XtY)
y_pred = w.dot(vecX.T)
return (vecy-y_pred)*(vecy-y_pred), y_pred > vecy
def calc_anom_threshold(se):
# assume se is sorted (se_1 <= se_2 <= ... <= se_n)
dse = se[1:] - se[:-1]
ind = np.argmax(dse)
cutoff = se[ind]
return cutoff, dse
def generate_anomalies(path, plot=False):
print('Generating Anomalies...')
print('Loading intermediate data file.')
data = np.load('{0}data.npz'.format(path))
dists = data['dists']
yield_col_names = data['yield_col_names']
print 'Number of districts: ', len(dists)
print 'Yield column names: ', yield_col_names
print 'Selected columns:', yield_col_names[[0, 1, 12, 13]]
# weather data goes from 1979 - 2014
# yield data starts from 1966 - 2009 (confirmed for all districts)
# count of missing and total values
cnt_missing = 0
cnt_total = 0
if plot:
plt.figure(1)
plt.title('Detrending: Sorted Squared Errors for each District (Time vs. Yield)')
idx = 1
total_data = 0
total_anoms = 0
time_anoms = np.zeros(len(dists[0][5]))
dist_anoms = np.zeros(len(dists))
lbl_dists = []
for d in dists:
years = d[5]
# cnt_missing += len(np.where(d[6][:, 1] <= 0.0)[0]) + len(np.where(d[6][:, 0] <= 0.0)[0]) + len(np.where(d[6][:, 13] <= 0.0)[0]) + len(np.where(d[6][:, 12] <= 0.0)[0])
cnt_missing += len(np.where(d[6][:, 1] < 0.0)[0]) + len(np.where(d[6][:, 0] < 0.0)[0]) + len(np.where(d[6][:, 13] < 0.0)[0]) + len(np.where(d[6][:, 12] < 0.0)[0])
cnt_total += 4.0*len(d[6][:, 1])
yields = (d[6][:, 1]+d[6][:, 13]) / (d[6][:, 0]+d[6][:, 12]) # rice + maiz
se, flag_anom = ols(years, yields.T)
se_bak = np.array(se)
se = se[flag_anom]
inds = np.argsort(se)
cutoff, dse = calc_anom_threshold(se[inds])
ainds = np.where((se_bak >= cutoff) & flag_anom)[0]
lbl = np.zeros(se_bak.size)
lbl[ainds] = 1.0
lbl_dists.append(lbl)
time_anoms[np.array(years[ainds]-1966., dtype='i')] += 1
dist_anoms[idx-1] = len(ainds)
total_anoms += len(ainds)
total_data += se_bak.size
idx += 1
if plot:
plt.subplot(10, 10, idx)
plt.plot(range(se.size), se[inds], '.-r')
plt.plot([0, se.size], [cutoff, cutoff], '-b')
plt.xticks([0, se.size+1], [])
plt.yticks([0, max(1.1*se)], [])
print '\nTotal num of datapoints, Number of anomalies, Fraction'
print (total_data, total_anoms, float(total_anoms)/float(total_data))
print '\nNum of neg. values encountered, Total number of values, Fraction of missing values'
print (cnt_missing, cnt_total, 100.0*float(cnt_missing)/float(cnt_total))
if plot:
plt.figure(2)
plt.subplot(2, 1, 1)
plt.bar(range(len(dists[0][5])), time_anoms)
plt.xticks(range(time_anoms.size), years)
plt.ylabel('Total Number of Anomalies')
plt.xlabel('Year')
plt.subplot(2, 1, 2)
plt.bar(range(len(dists)), dist_anoms)
plt.ylabel('Total Number of Anomalies')
plt.xlabel('District')
plt.show()
return lbl_dists, years
def cut_weather_data(path):
print('Thinning weather data...')
print('Loading intermediate data file.')
data = np.load('{0}data.npz'.format(path))
dists = data['dists']
wdata = data['wdata']
used_stations = []
for d in dists:
used_stations.extend(d[4])
used_stations = np.unique(used_stations)
print '\nThere are {0} weather stations in total.'.format(len(used_stations))
cnt = 0
cnt_skip = 0
stations_cnames = ['Id', 'Longitude', 'Latitude', 'Elevation']
stations = list()
data_cnames = ['Max Temperature', 'Min Temperature', 'Precipitation', 'Wind', 'Relative Humidity', 'Solar']
data = list()
dates_cnames = ['Day', 'Month', 'Year']
dates = list()
lens = list()
flag = 0
wdict = wdata[()]
for wid in wdict.keys():
entry = wdict[wid]
stations.append([wid, entry[0, 1], entry[0, 2], entry[0, 3]])
inds = []
for i in range(entry.shape[0]):
date = datetime.datetime.strptime(entry[i, 0], '%m/%d/%Y')
# if date.year >= 1979 and date.year <= 2009:
if 1979 <= date.year <= 2009:
inds.append(i) # save index
if not flag:
dates.append([date.day, date.month, date.year])
cnt += 1
else:
cnt_skip += 1
# skip non-used lines
entry = entry[inds, 4:]
print entry.shape
if not entry.shape[0] in lens:
lens.append(entry.shape[0])
data.append(entry)
flag = 1
dates = np.array(dates, dtype='i')
print 'Dimensionality of dates: ', dates.shape
print len(lens)
print 'Total number of weather measurement: {0}'.format(cnt)
print 'Total number of skipped weather measurement: {0}'.format(cnt_skip)
return data, data_cnames, stations, stations_cnames, dates, dates_cnames
def cut_dists_data(path):
data = np.load('{0}data.npz'.format(path))
dists = data['dists']
d = []
for i in range(len(dists)):
d.append(dists[i][:5])
print d[:3]
dists_cnames = ['DistCode', 'DistName', 'StateCode', 'StateName', 'StationIds']
return d, dists_cnames
def generate_processed_dataset(path):
print('Process intermediate data...')
# generate anomalies from yield data
(lbl, years) = generate_anomalies(path, plot=False)
# cut labels between years 1979 - 2009
cnt_all = 0
cnt_anom = 0
for i in range(len(lbl)):
lbl[i] = lbl[i][13:]
cnt_all += len(lbl[i])
cnt_anom += len(np.where(lbl[i] == 1.0)[0])
# cut years
years = years[13:]
print 'Years: ', years
print 'Number of years: ', years.size
print 'Check against label: ', len(lbl[0])
assert years.size == len(lbl[0])
# convert to array
lbl = np.array(lbl)
print('Total number of data points: {0}'.format(cnt_all))
print('Total number of anomalies: {0}'.format(cnt_anom))
print('Fraction: {0:1.2f}'.format(float(cnt_anom)/float(cnt_all)))
(data, data_cnames, stations, stations_cnames, dates, dates_cnames) = cut_weather_data(path)
(dists, dists_cnames) = cut_dists_data(path)
print('Saving...')
# np.savez_compressed just takes too long to load
np.savez('{0}cylad1.npz'.format(path), version='1.0',
wdata=data, wdata_cols=data_cnames,
dates=dates, dates_cols=dates_cnames,
stations=stations, stations_cols=stations_cnames,
dists=dists, dists_cols=dists_cnames,
label=lbl, label_cols=years)
print('Finished and Done :)')<file_sep>### About
An anomaly detection challenge for data with complex dependency
structure.
Accompanying software package.
For further information, please visit our challenge website at
http://nicococo.github.io/CropYieldLoss
### Data
There are three states from India: Bihar, MP (Madhya Pradesh), UP (Uttar Pradesh)
Two sets of data from different sources:
- Yield data per year per district
Source: ICRISAT http://vdsa.icrisat.ac.in
- Daily weather data (max temperature, min temperature, percipitation, wind, relative humidity, solar)
from different weather grid points. Weather data is available for several spatial locations within
each district.
Source: http://globalweather.tamu.edu
Processed data can be aquired at http://...
and should contain the following information:
- WDATA (548, 11284, 6)
['Max Temperature' 'Min Temperature' 'Precipitation' 'Wind' 'Relative Humidity' 'Solar']
- DATES (11284, 3)
['Day' 'Month' 'Year']
- STATIONS (548, 4)
['Id' 'Longitude' 'Latitude' 'Elevation']
- DISTS (93, 5)
['DistCode' 'DistName' 'StateCode' 'StateName' 'StationIds']
- LABEL (93, 31)
[1979 - 2009]
### Installation
You can conveniently install the software package using:
pip install git+https://github.com/nicococo/CropYieldLoss.git
### Basic Example
The most basic example for loading, preparing, training, and testing
is given in `ex1_simple.py`:
```python
wdata, dates, stations, dists, label = load_data('cylad1.npz')
F = get_basic_weather_station_feats(wdata, dates)
X, inds, y = get_exms_basic_feats(3, F, stations, dates, dists, label)
X_train, inds_train, y_train, X_test, inds_test, y_test = split_train_test(X, inds, y, perc=0.2)
mySvm = svm.LinearSVC(C=1., fit_intercept=False, class_weight='balanced')
mySvm.fit(X_train, y_train)
fpr, tpr, thresholds = metrics.roc_curve(y_test, mySvm.decision_function(X_test), pos_label=+1)
print 'AUC: ', metrics.auc(fpr, tpr)
```
Note: This example assumes that the processed data 'cylad1.npz' (see website where to
download) is stored in your current path.
### References
If you use results, software, or data from this challenge in your
own research, please cite our paper: __to appear...__
|
1fa387efda5a4bb89e3c42611a5ef1b8d29ca563
|
[
"Markdown",
"Python"
] | 6
|
Python
|
nicococo/CropYieldLoss
|
22e70c95e780df3ac7ba3ce9244d36e97e3f49ca
|
d773cd7e4653282dc0ab0509a63cfafa02fcee5a
|
refs/heads/master
|
<repo_name>partkyle/gambit<file_sep>/readme.markdown
Party Planning Poker
====================
A simple app written in the express web framework using socket.io for the frontend real-time interraction.
<file_sep>/app.js
/**
* Module dependencies.
*/
require('./lib/object');
var express = require('express');
var routes = require('./routes');
var Room = require('./lib/room');
var uuid = require('node-uuid');
var http = require('http');
var path = require('path');
var app = express();
// Configuration
app.configure(function(){
app.set('port', process.env.PORT || 3000);
app.set('views', __dirname + '/views');
app.set('view engine', 'jade');
app.use(express.favicon());
app.use(express.logger('dev'));
app.use(express.bodyParser());
app.use(express.methodOverride());
app.use(app.router);
app.use(express.static(path.join(__dirname, 'public')));
});
app.configure('development', function(){
app.use(express.errorHandler({ dumpExceptions: true, showStack: true }));
});
app.configure('production', function(){
app.use(express.errorHandler());
});
var server = http.createServer(app).listen(app.get('port'), function(){
console.log("Express server listening on port " + app.get('port'));
});
// Routes
app.get('/', routes.index);
app.get('/room/:id', function(req, res, next) {
io.sockets.in('lobby').emit('update-rooms', { rooms: Room.all() });
next();
}, routes.room);
app.post('/room', routes.newRoom);
// var clearIfEmpty = function(room) {
// // the next user will trigger the timeout,
// // so we can ignore this.
// if (room.players.size() === 0) {
// Room.delete(room.id);
// }
// };
// Sockets
var io = require('socket.io').listen(server);
var ROOM_TIME_LIMIT = 1000 * 60 * 5; // 5 minutes
io.sockets.on('connection', function(socket) {
var room;
var player;
var player_id = uuid.v4();
var updatePlayers = function() {
io.sockets.in(room.id).emit('update-players', { players: room.players, showResult: room.done() });
};
socket.on('lobby', function(data) {
console.log('user connected to lobby');
socket.set('room', 'lobby');
socket.join('lobby');
socket.emit('update-rooms', { rooms: Room.all() });
});
socket.on('init', function(data) {
room = Room.find(data.room_id);
room.players[player_id] = { name: player_id };
socket.set('room', room.id);
socket.join(room.id);
updatePlayers();
socket.emit('update-name', { room: room });
});
socket.on('click-card', function(data) {
console.log('player [%s] clicked %s', player_id, data.score);
room.players[player_id].score = data.score;
updatePlayers();
});
socket.on('change-name', function(data) {
room.players[player_id].name = data.name;
updatePlayers();
});
socket.on('change-room-name', function(data) {
console.log('changing room name for %s', data.room.id);
room.name = data.room.name;
io.sockets.in(room.id).emit('update-name', { room: room });
io.sockets.in('lobby').emit('update-rooms', { rooms: Room.all() });
});
socket.on('reset-game', function(data) {
room.reset();
io.sockets.in(room.id).emit('reset-game', { players: room.players });
});
socket.on('disconnect', function(data) {
if (room) {
delete room.players[player_id];
updatePlayers();
// setTimeout(function() {
// clearIfEmpty(room);
// }, ROOM_TIME_LIMIT);
}
});
});
|
c45a6c72d9cf020771c55a702eda197ae4f1b1d8
|
[
"Markdown",
"JavaScript"
] | 2
|
Markdown
|
partkyle/gambit
|
c6471507e11bd682165b150c9e7cb2cb458a4c08
|
19c9f992504cf8d88e6f488f73f31dc759bf32a3
|
refs/heads/master
|
<repo_name>LanFeusT23/dynamic-import-vue-router-issue<file_sep>/src/main.js
import _ from "lodash";
import VueRouter from "vue-router";
import Vue from 'vue'
import App from './App.vue'
const Landing = () => import(/* webpackChunkName: "Landing" */ "./pages/Landing.vue");
// import ViewPage from "./pages/ViewPage.vue";
const ViewPage = () => import(/* webpackChunkName: "ViewPage" */ "./pages/ViewPage.vue");
Vue.use(VueRouter);
const router = new VueRouter({
routes: [
{ path: "/", component: Landing, name: "landingPage" },
{ path: "/viewPage/:someKey", component: ViewPage, name: "viewPage", props: true }
]
});
function getQueryStringValue(key) {
return decodeURIComponent(window.location.search.replace(new RegExp("^(?:.*[&\\?]" + encodeURIComponent(key).replace(/[\.\+\*]/g, "\\$&") + "(?:\\=([^&]*))?)?.*$", "i"), "$1"));
}
var oldPage = getQueryStringValue("view");
var oldKey = getQueryStringValue("key");
if (!_.isEmpty(oldPage) && !_.isEmpty(oldKey)) {
if (oldPage === "oldUrl") {
router.replace({ name: 'viewPage', params: { someKey: oldKey } });
}
}
new Vue({
el: '#app',
render: h => h(App),
router
})
|
b2ad99525e5345952e1f1495ac27928647d048fe
|
[
"JavaScript"
] | 1
|
JavaScript
|
LanFeusT23/dynamic-import-vue-router-issue
|
8d2f52fd79a33c453148c76fad7aac4c7eb0757d
|
fc0c5b089f5a93d49da2ac43bcac8501aadb9d68
|
refs/heads/master
|
<file_sep>@(pageName: String)(pageContent: Html)
<section>
<div class="row">
<h3 id="board-n" class="text-center">@pageName</h3>
<p class="text-center">Our creative team is making everything possible</p>
</div>
<div class="row">
@pageContent
</div>
</section>
<file_sep>## PlayPin
----------------------------------------
PlayPin is a distributed image aggregator web application. It is developed in Scala, using the Play Framework. Cassandra is used for persistent storage. The application runs in Docker containers and the deployment assured by Vagrant. Vagrant files for both AWS and local deployment are provided. Amazon S3 is used for image storage. Nginx is used as a load balancer between three virtual machines running both the PlayPin and Cassandra. The system withstands a virtual machine crash without affecting uptime or data consistency.
Documentation present in [*Documentation PDF*](Documentation_CiobicaEA_IsacFA.pdf)
<file_sep># http://www.thisprogrammingthing.com/2015/multiple-vagrant-vms-in-one-vagrantfile/
Vagrant.configure("2") do |config|
# https://docs.vagrantup.com.
# boxes at https://atlas.hashicorp.com/search.
# config.vm.box = "ubuntu/trusty64"
# config.vm.network "forwarded_port", guest: 80, host: 8080
# config.vm.network "private_network", ip: "192.168.10.14"
config.vm.define "nginx" do |nginx|
nginx.vm.box = "ubuntu/trusty64"
nginx.vm.hostname = 'nginx'
nginx.vm.network :private_network, ip: "192.168.56.104"
nginx.vm.provider :virtualbox do |v|
# v.customize ["modifyvm", :id, "--natdnshostresolver1", "on"]
v.customize ["modifyvm", :id, "--memory", 512]
v.customize ["modifyvm", :id, "--name", "nginx"]
end
config.vm.provision "shell", inline: <<-SHELL
sudo su
apt-get update
apt -y install docker.io
service docker start
export DOCKER_IP='192.168.56.104'
docker run -d --net=host --name consul consul agent -server -bind=$DOCKER_IP -bootstrap -client=$DOCKER_IP -ui
wget -O /usr/local/bin/weave https://github.com/weaveworks/weave/releases/download/latest_release/weave
chmod a+x /usr/local/bin/weave
weave launch 192.168.56.101 192.168.56.102 192.168.56.103
docker run -d --name nginx -p 80:80 isac/nginx-consul-template:1.2
weave run 10.2.1.23/24 --name redis -h redis.weave.local -P -d redis
SHELL
end
end
<file_sep>#!/bin/bash
# addSecurityRule.sh
[ -r lastIP ] && [ -r removeSecurityRule.sh ] && ./removeSecurityRule.sh
currentIP=`curl -s http://ipinfo.io/ip`
aws ec2 authorize-security-group-ingress --group-id sg-862f65ee --ip-permissions "[{\"IpProtocol\": \"tcp\", \"FromPort\": 22, \"ToPort\": 22, \"IpRanges\": [{\"CidrIp\": \"$currentIP/32\"}]}]" && echo $currentIP > lastIP
<file_sep>sudo su
apt-get update
apt -y install docker.io
service docker start
export DOCKER_IP='172.31.25.5'
export CLIENT_IP='curl -s http://ipinfo.io/ip'
docker run -d --net=host --name consul consul agent -server -bind=$DOCKER_IP -bootstrap -client=$DOCKER_IP -ui consul
docker run -d --name nginx -p 80:80 isac/nginx-consul-template:1.2
<file_sep># http://www.thisprogrammingthing.com/2015/multiple-vagrant-vms-in-one-vagrantfile/
Vagrant.configure("2") do |config|
# https://docs.vagrantup.com.
# boxes at https://atlas.hashicorp.com/search.
# config.vm.box = "ubuntu/trusty64"
# config.vm.network "forwarded_port", guest: 80, host: 8080
# config.vm.network "private_network", ip: "192.168.10.14"
config.vm.define "web3" do |web3|
web3.vm.box = "ubuntu/trusty64"
web3.vm.hostname = 'web3'
web3.vm.network :private_network, ip: "192.168.56.103"
web3.vm.provider :virtualbox do |v|
# v.customize ["modifyvm", :id, "--natdnshostresolver1", "on"]
v.customize ["modifyvm", :id, "--memory", 1024]
v.customize ["modifyvm", :id, "--name", "web3"]
end
config.vm.provision "shell", inline: <<-SHELL
sudo su
apt-get update
apt -y install docker.io
service docker start
wget -O /usr/local/bin/weave https://github.com/weaveworks/weave/releases/download/latest_release/weave
chmod a+x /usr/local/bin/weave
weave launch 192.168.56.101 192.168.56.102 192.168.56.104
eval "$(weave env)"
curl -L git.io/scope -o /usr/local/bin/scope
chmod a+x /usr/local/bin/scope
scope launch
export DOCKER_IP='192.168.56.103'
docker run -d --net=host --name consul consul agent -bind=$DOCKER_IP -retry-join=192.168.56.104
docker run -d --name=registrator --net=host --volume=/var/run/docker.sock:/tmp/docker.sock gliderlabs/registrator:latest consul://127.0.0.1:8500
weave run 10.2.1.4/24 -d --name play-pin -p 8080:9000 isac/play-pin:1.9
weave run 10.2.1.1/24 --name cassandra -h cassandra1.weave.local -d -v /data:/var/lib/cassandra -v /../vagrant/cassandra-env.sh:/etc/cassandra/cassandra-env.sh -e CASSANDRA_LISTEN_ADDRESS=10.2.1.1 -p 7000:7000 cassandra:3.9
SHELL
end
config.vm.define "web1" do |web1|
web1.vm.box = "ubuntu/trusty64"
web1.vm.hostname = 'web1'
web1.vm.network :private_network, ip: "192.168.56.101"
web1.vm.provider :virtualbox do |v|
# v.customize ["modifyvm", :id, "--natdnshostresolver1", "on"]
v.customize ["modifyvm", :id, "--memory", 1024]
v.customize ["modifyvm", :id, "--name", "web1"]
end
config.vm.provision "shell", inline: <<-SHELL
sudo su
apt-get update
apt -y install docker.io
service docker start
wget -O /usr/local/bin/weave https://github.com/weaveworks/weave/releases/download/latest_release/weave
chmod a+x /usr/local/bin/weave
weave launch 192.168.56.102 192.168.56.103 192.168.56.104
eval "$(weave env)"
curl -L git.io/scope -o /usr/local/bin/scope
chmod a+x /usr/local/bin/scope
scope launch
export DOCKER_IP='192.168.56.101'
docker run -d --net=host --name consul consul agent -bind=$DOCKER_IP -retry-join=192.168.56.104
docker run -d --name=registrator --net=host --volume=/var/run/docker.sock:/tmp/docker.sock gliderlabs/registrator:latest consul://127.0.0.1:8500
weave run 10.2.1.5/24 -d --name play-pin -p 8080:9000 isac/play-pin:1.9
weave run 10.2.1.3/24 --name cassandra -h cassandra2.weave.local -d -v /data:/var/lib/cassandra -v /../vagrant/cassandra-env.sh:/etc/cassandra/cassandra-env.sh -e CASSANDRA_LISTEN_ADDRESS=10.2.1.3 -e CASSANDRA_SEEDS=10.2.1.1 -p 7000:7000 cassandra:3.9
SHELL
end
config.vm.define "web4" do |web4|
web4.vm.box = "ubuntu/trusty64"
web4.vm.hostname = 'web4'
web4.vm.network :private_network, ip: "192.168.56.102"
web4.vm.provider :virtualbox do |v|
# v.customize ["modifyvm", :id, "--natdnshostresolver1", "on"]
v.customize ["modifyvm", :id, "--memory", 1024]
v.customize ["modifyvm", :id, "--name", "web4"]
end
config.vm.provision "shell", inline: <<-SHELL
sudo su
apt-get update
apt -y install docker.io
service docker start
wget -O /usr/local/bin/weave https://github.com/weaveworks/weave/releases/download/latest_release/weave
chmod a+x /usr/local/bin/weave
weave launch 192.168.56.101 192.168.56.103 192.168.56.104
eval "$(weave env)"
curl -L git.io/scope -o /usr/local/bin/scope
chmod a+x /usr/local/bin/scope
scope launch
export DOCKER_IP='192.168.56.102'
docker run -d --net=host --name consul consul agent -bind=$DOCKER_IP -retry-join=192.168.56.104
docker run -d --name=registrator --net=host --volume=/var/run/docker.sock:/tmp/docker.sock gliderlabs/registrator:latest consul://127.0.0.1:8500
weave run 10.2.1.6/24 -d --name play-pin -p 8080:9000 isac/play-pin:1.9
weave run 10.2.1.2/24 --name cassandra -h cassandra3.weave.local -d -v /data:/var/lib/cassandra -v /../vagrant/cassandra-env.sh:/etc/cassandra/cassandra-env.sh -e CASSANDRA_LISTEN_ADDRESS=10.2.1.2 -e CASSANDRA_SEEDS=10.2.1.1 -p 7000:7000 cassandra:3.9
SHELL
end
end
<file_sep>MAX_HEAP_SIZE = "256M"
HEAP_NEWSIZE = "51M"<file_sep>FROM nginx
#Install Unzip and Wget
RUN apt-get update -qq && apt-get install -y unzip && apt-get install -y wget
#Install Consul Template
RUN wget "https://releases.hashicorp.com/consul-template/0.16.0/consul-template_0.16.0_linux_amd64.zip"
RUN unzip consul-template_0.16.0_linux_amd64.zip -d /usr/local/bin
#Setup Consul Template Files
RUN mkdir /etc/consul-templates
COPY ./app.conf.tmpl /etc/consul-templates/app.conf
# Remove all other conf files from nginx
RUN rm /etc/nginx/conf.d/*
#Default Variables
ENV CONSUL 172.31.25.5:8500
CMD service nginx start && consul-template -consul=$CONSUL -template "/etc/consul-templates/app.conf:/etc/nginx/conf.d/app.conf:service nginx restart || true"<file_sep>/**
* Created by lex on 01/10/16.
*/
// var webSocketReceive = $.simpleWebSocket({url: " ws://" + window.location.host + "/socket"});
$(document).ready(function(){
applyAjax("body");
});
function applyAjax(container){
$(container + ' a').on('click', function(e){
e.preventDefault();
var pageRef = $(this).attr('href');
if (pageRef.split("/")[1] == "board"){
callPage(pageRef, true)
} else{
callPage(pageRef, false)
}
});
}
function callPage(pageRefInput, ws){
// Using the core $.ajax() method
if(typeof socket != 'undefined') socket.close()
$.ajax({
url: pageRefInput,
type: "GET",
dataType : 'text',
success: function( response ) {
// console.log('the page was loaded', response);
$('.content').html(response);
applyAjax(".content");
},
error: function( error ) {
console.log('the page was NOT loaded', error);
},
complete: function( xhr, status ) {
console.log("The request is complete!");
}
});
}
function setPinLayout(){
$('#pinto-container').pinto({
itemWidth:250,
gapX:10,
gapY:20,
});
}
//******** design
<file_sep># -*- mode: ruby -*-
# vi: set ft=ruby :
# All Vagrant configuration is done below. The "2" in Vagrant.configure
# configures the configuration version (we support older styles for
# backwards compatibility). Please don't change it unless you know what
# you're doing.
Vagrant.configure("2") do |config|
config.vm.define "nginx" do |nginx|
nginx.vm.box = "aws-dummy"
nginx.vm.hostname = 'nginx'
nginx.vm.provider :aws do |aws, override|
aws.access_key_id = ENV['AWS_KEY']
aws.secret_access_key = ENV['AWS_SECRET']
aws.security_groups = ENV['AWS_GROUP']
aws.keypair_name = ENV['AWS_KEYNAME']
aws.region = "eu-central-1"
aws.instance_type = "t2.micro"
aws.ami = "ami-9c09f0f3"
aws.private_ip_address = "172.31.25.5"
override.ssh.username = "ubuntu"
override.ssh.private_key_path = ENV['AWS_KEYPATH']
end
nginx.vm.provision "shell", inline: <<-SHELL
sudo su
apt-get update
apt -y install docker.io
wget -O /usr/local/bin/weave https://github.com/weaveworks/weave/releases/download/latest_release/weave
chmod a+x /usr/local/bin/weave
weave launch
export DOCKER_IP='172.31.25.5'
docker run -d --net=host --name consul consul agent -server -bind=$DOCKER_IP -bootstrap -client=$DOCKER_IP -ui consul
docker run -d --name nginx -p 80:80 isac/nginx-consul-template:1.4
weave run 10.2.1.1/24 -d --name nginx -v /vagrant/app.conf:/etc/nginx/conf.d/app.conf -p 80:80 nginx
weave run 10.2.1.23/24 --name redis -h redis.weave.local -d redis
SHELL
end
config.vm.define "web1" do |web1|
web1.vm.box = "aws-dummy"
web1.vm.hostname = 'web1'
web1.vm.provider :aws do |aws, override|
aws.access_key_id = ENV['AWS_KEY']
aws.secret_access_key = ENV['AWS_SECRET']
aws.security_groups = ENV['AWS_GROUP']
aws.keypair_name = ENV['AWS_KEYNAME']
aws.region = "eu-central-1"
aws.instance_type = "t2.micro"
aws.ami = "ami-9c09f0f3"
aws.private_ip_address = "172.31.25.4"
override.ssh.username = "ubuntu"
override.ssh.private_key_path = ENV['AWS_KEYPATH']
end
web1.vm.provision "shell", inline: <<-SHELL
sudo su
apt-get update
apt -y install docker.io
wget -O /usr/local/bin/weave https://github.com/weaveworks/weave/releases/download/latest_release/weave
chmod a+x /usr/local/bin/weave
weave launch 172.31.25.5
export DOCKER_IP='172.31.25.4'
docker run -d --net=host --name consul consul agent -bind=$DOCKER_IP -retry-join=172.31.25.5
docker run -d --name=registrator --net=host --volume=/var/run/docker.sock:/tmp/docker.sock gliderlabs/registrator:latest consul://127.0.0.1:8500
weave run 10.2.1.2/24 -d --name play-pin -h app1.weave.local -p 8080:9000 isac/play-pin:1.9
weave run 10.2.1.3/24 -d --name cassandra -h cassandra1.weave.local -v /data:/var/lib/cassandra -v /vagrant/cassandra-env.sh:/etc/cassandra/cassandra-env.sh -e CASSANDRA_LISTEN_ADDRESS=10.2.1.3 cassandra:3.9
SHELL
end
config.vm.define "web2" do |web2|
web2.vm.box = "aws-dummy"
web2.vm.hostname = 'web2'
web2.vm.provider :aws do |aws, override|
aws.access_key_id = ENV['AWS_KEY']
aws.secret_access_key = ENV['AWS_SECRET']
aws.security_groups = ENV['AWS_GROUP']
aws.keypair_name = ENV['AWS_KEYNAME']
aws.region = "eu-central-1"
aws.instance_type = "t2.micro"
aws.ami = "ami-9c09f0f3"
aws.private_ip_address = "172.31.25.1"
override.ssh.username = "ubuntu"
override.ssh.private_key_path = ENV['AWS_KEYPATH']
end
web2.vm.provision "shell", inline: <<-SHELL
sudo su
apt-get update
apt -y install docker.io
wget -O /usr/local/bin/weave https://github.com/weaveworks/weave/releases/download/latest_release/weave
chmod a+x /usr/local/bin/weave
weave launch 172.31.25.5 172.31.25.4
export DOCKER_IP='172.31.25.1'
docker run -d --net=host --name consul consul agent -bind=$DOCKER_IP -retry-join=172.31.25.5
docker run -d --name=registrator --net=host --volume=/var/run/docker.sock:/tmp/docker.sock gliderlabs/registrator:latest consul://127.0.0.1:8500
weave run 10.2.1.4/24 -d --name play-pin -h app2.weave.local -p 8080:9000 isac/play-pin:1.9
weave run 10.2.1.5/24 -d --name cassandra -h cassandra2.weave.local -v /data:/var/lib/cassandra -v /vagrant/cassandra-env.sh:/etc/cassandra/cassandra-env.sh -e CASSANDRA_LISTEN_ADDRESS=10.2.1.5 -e CASSANDRA_SEEDS=10.2.1.3 cassandra:3.9
SHELL
end
config.vm.define "web3" do |web3|
web3.vm.box = "aws-dummy"
web3.vm.hostname = 'web3'
web3.vm.provider :aws do |aws, override|
aws.access_key_id = ENV['AWS_KEY']
aws.secret_access_key = ENV['AWS_SECRET']
aws.security_groups = ENV['AWS_GROUP']
aws.keypair_name = ENV['AWS_KEYNAME']
aws.region = "eu-central-1"
aws.instance_type = "t2.micro"
aws.ami = "ami-9c09f0f3"
aws.private_ip_address = "172.31.25.2"
override.ssh.username = "ubuntu"
override.ssh.private_key_path = ENV['AWS_KEYPATH']
end
web3.vm.provision "shell", inline: <<-SHELL
sudo su
apt-get update
apt -y install docker.io
wget -O /usr/local/bin/weave https://github.com/weaveworks/weave/releases/download/latest_release/weave
chmod a+x /usr/local/bin/weave
weave launch 172.31.25.5 172.31.25.4 172.31.25.1
export DOCKER_IP='172.31.25.2'
docker run -d --net=host --name consul consul agent -bind=$DOCKER_IP -retry-join=172.31.25.5
docker run -d --name=registrator --net=host --volume=/var/run/docker.sock:/tmp/docker.sock gliderlabs/registrator:latest consul://127.0.0.1:8500
weave run 10.2.1.6/24 -d --name play-pin -h app3.weave.local -p 8080:9000 isac/play-pin:1.9
weave run 10.2.1.8/24 -d --name cassandra -h cassandra3.weave.local -v /data:/var/lib/cassandra -v /vagrant/cassandra-env.sh:/etc/cassandra/cassandra-env.sh -e CASSANDRA_LISTEN_ADDRESS=10.2.1.8 -e CASSANDRA_SEEDS=10.2.1.3 cassandra:3.9
SHELL
end
end
#get public IP
#export DOCKER_IP=`curl -s http://ipinfo.io/ip`
<file_sep>@*
* This template takes a single argument, a String containing a
* message to display.
*@
@import com.websudos.phantom.dsl.ListResult
@(pins: ListResult[Pin], pagingState: String)
@*
* Call the `main` template with two arguments. The first
* argument is a `String` with the title of the page, the second
* argument is an `Html` object containing the body of the page.
*@
@pageLayout("Alta versiune de la all.scala.html") {
<h1>Home</h1>
<div id="wrapper">
<div class="card-columns">
@for(pin <- pins.records){
}
<input type="hidden" value="@pagingState">
</div>
</div>
}
<file_sep>#!/bin/bash
# removeSecurityRule.sh
if [ -r lastIP ]; then
currentIP=`cat lastIP`
aws ec2 revoke-security-group-ingress --group-id sg-0433846d --ip-permissions "[{\"IpProtocol\": \"tcp\", \"FromPort\": 22, \"ToPort\": 22, \"IpRanges\": [{\"CidrIp\": \"$currentIP/32\"}]}]" && echo $currentIP > lastIP
else
echo "$0: no file named lastIP found!"
exit 1
fi
|
c539343d06b984185fe945a3c41432d60088801b
|
[
"Ruby",
"HTML",
"Markdown",
"JavaScript",
"Dockerfile",
"Shell"
] | 12
|
HTML
|
isacandrei/PlayPin_Distributed_Image_Aggregator
|
de447bb2b8bf4845d399ab4a0669f2e336c5a4c6
|
c58cb551a44f1a2f3f48a1238696d09602522b0a
|
refs/heads/master
|
<file_sep>rootProject.name = 'auto_orientation'
<file_sep>import Flutter
import UIKit
public class AutoOrientationPlugin: NSObject, FlutterPlugin {
public static func register(with registrar: FlutterPluginRegistrar) {
let channel = FlutterMethodChannel(name: "auto_orientation", binaryMessenger: registrar.messenger())
let instance = AutoOrientationPlugin()
registrar.addMethodCallDelegate(instance, channel: channel)
}
public func handle(_ call: FlutterMethodCall, result: @escaping FlutterResult) {
if #available(iOS 16.0, *) {
setOrientation(call)
} else {
setLegacyOrientation(call)
}
UIViewController.attemptRotationToDeviceOrientation()
result(FlutterMethodNotImplemented)
}
@available(iOS 16.0, *)
func setOrientation(_ call: FlutterMethodCall) {
guard let windowScene = UIApplication.shared.connectedScenes.first as? UIWindowScene
else { return }
let resolvedMask: UIInterfaceOrientationMask
switch call.method {
case "setLandscapeRight", "setLandscapeAuto":
resolvedMask = UIInterfaceOrientationMask.landscapeRight
case "setLandscapeLeft":
resolvedMask = UIInterfaceOrientationMask.landscapeLeft
case "setPortraitUp", "setPortraitAuto":
resolvedMask = UIInterfaceOrientationMask.portrait
case "setPortraitDown":
resolvedMask = UIInterfaceOrientationMask.portraitUpsideDown
default:
resolvedMask = UIInterfaceOrientationMask.all
break
}
windowScene.requestGeometryUpdate(.iOS(interfaceOrientations: resolvedMask)) { error in
// TODO: Perhaps call back to Flutter with an error
}
}
func setLegacyOrientation(_ call: FlutterMethodCall) {
let resolvedOrientation: UIInterfaceOrientation
switch call.method {
case "setLandscapeRight", "setLandscapeAuto":
resolvedOrientation = UIInterfaceOrientation.landscapeRight
case "setLandscapeLeft":
resolvedOrientation = UIInterfaceOrientation.landscapeLeft
case "setPortraitUp", "setPortraitAuto":
resolvedOrientation = UIInterfaceOrientation.portrait
case "setPortraitDown":
resolvedOrientation = UIInterfaceOrientation.portraitUpsideDown
default:
resolvedOrientation = UIInterfaceOrientation.unknown
break
}
UIDevice.current.setValue(resolvedOrientation.rawValue, forKey: "orientation")
}
}
<file_sep># auto_orientation
This plugin was created to programmatically rotate on iOS and Android
## Getting Started
After importing the package you can use:
`AutoOrientation.landscapeLeftMode();` or
`AutoOrientation.landscapeRightMode();` or
`AutoOrientation.portraitDownMode();` (might not work) or
`AutoOrientation.portraitUpMode();` or
`AutoOrientation.portraitAutoMode();` (Android only) or
`AutoOrientation.portraitAutoMode(forceSensor: true);` (Use sensor data to change direction, ignoring user's rotation preference. Much like Youtube fullscreen. Android only) or
`AutoOrientation.landscapeAutoMode();` (Android only) or
`AutoOrientation.landscapeAutoMode(forceSensor: true);` (Use sensor data to change direction, ignoring user's rotation preference. Much like Youtube fullscreen. Android only) or
`AutoOrientation.fullAutoMode();`
No need to call `SystemChrome.setPreferredOrientations` because
it has been added to the library itself.
This has been done because before android auto-rotation wasn't
working after setting rotation using the plugin.
We've used it for a VideoScaffold, to programmatically change to landscape
and back to portrait if the scaffold is disposed.
## Example
Please have a look in the `example/` folder
<file_sep># AutoOrientation Example
An example of how to use the auto orientation plugin for Flutter
## Getting Started
For help getting started with Flutter, view our online
[documentation](http://flutter.io/).
<file_sep>## 2.3.1
* Fix for iOS < 16 after rewrite in Swift.
## 2.3.0
* Swift Conversion of iOS plugin incl. iOS 16 Support
## 2.2.2
* add UIRequiresFullScreen to example plist for iPad support
* Fix All Modes on iOS
* replace jcenter to mavenCentral
* Fix iOS 16 orientation
## 2.2.1
* Fix double rotation by removing the preferredOrientation
* Cleaned up example app
## 2.2.0
* Remove Android V1 embedding
* Upgrade example app, update Gradle dependency
* cleanup*
## 2.1.0
* Migrate to Android V2 embedding
## 2.0.2
* Fixes hanging on `await` calls
## 2.0.1
* Added SCREEN_ORIENTATION_USER and updated Library Versions.
## 2.0.0
* Migrate to null safety
## 1.0.7
* Use sensor rotation
## 1.0.6
* Revert flutter dependency version
## 1.0.5
* Revert flutter dependency version
## 1.0.4
* Update flutter dependency version
## 1.0.3
* Format code and update dependency version pinning
## 1.0.2
* Fix compatibility with android
## 1.0.1
* Update Changelog
## 1.0.0
* Add more specific implementation
* Add Android Support
* Add Example
## 0.0.2
* Create github repository and link it
## 0.0.1
* Create the first auto rotation for iOS
|
ca6c859f1edb8cbd9371f65876063c6e148471ef
|
[
"Swift",
"Markdown",
"Gradle"
] | 5
|
Gradle
|
bytepark/auto_orientation
|
e5d0553867a1aa5611f81625e324c24c4252e72f
|
916ac6daf0a55619dbdb0baf1c5a6df12fbc2d3e
|
refs/heads/master
|
<file_sep>
--
-- Table structure for table `ano_crm`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `ano_crm` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`ano_crm` varchar(245) DEFAULT NULL,
`r_auth` int(11) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `ano_crm`
--
--
-- Table structure for table `cidade`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `cidade` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`cidade` varchar(245) DEFAULT NULL,
`estado` int(11) DEFAULT NULL,
`r_auth` int(11) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `r_cidade_estado1` (`estado`),
CONSTRAINT `r_cidade_estado1` FOREIGN KEY (`estado`) REFERENCES `estado` (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `cidade`
--
--
-- Table structure for table `empresa`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `empresa` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`nome_fantasia` varchar(245) DEFAULT NULL,
`razao_social` varchar(245) DEFAULT NULL,
`cnpj` varchar(245) DEFAULT NULL,
`telefone` int(11) DEFAULT NULL,
`cep` varchar(245) DEFAULT NULL,
`estado` int(11) DEFAULT NULL,
`cidade` int(11) DEFAULT NULL,
`bairro` varchar(245) DEFAULT NULL,
`numero` varchar(245) DEFAULT NULL,
`complemento` varchar(245) DEFAULT NULL,
`r_auth` int(11) DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `r_empresa_estado1` (`estado`),
KEY `r_empresa_cidade2` (`cidade`),
CONSTRAINT `r_empresa_cidade2` FOREIGN KEY (`cidade`) REFERENCES `cidade` (`id`),
CONSTRAINT `r_empresa_estado1` FOREIGN KEY (`estado`) REFERENCES `estado` (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `empresa`
--
--
-- Table structure for table `estado`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `estado` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`estado` varchar(245) DEFAULT NULL,
`uf` varchar(245) DEFAULT NULL,
`pais` varchar(245) DEFAULT NULL,
`r_auth` int(11) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `estado`
--
--
-- Table structure for table `estado_civil`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `estado_civil` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`estado_civil` varchar(245) DEFAULT NULL,
`r_auth` int(11) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `estado_civil`
--
--
-- Table structure for table `expedicao`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `expedicao` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`expedicao` varchar(245) DEFAULT NULL,
`r_auth` int(11) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `expedicao`
--
--
-- Table structure for table `forma_de_pagamento`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `forma_de_pagamento` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`forma_de_pagamento` varchar(245) DEFAULT NULL,
`r_auth` int(11) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `forma_de_pagamento`
--
--
-- Table structure for table `medico`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `medico` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`crm` varchar(245) NOT NULL,
`ano_crm` int(11) DEFAULT NULL,
`situacao` int(11) DEFAULT NULL,
`nome` varchar(245) DEFAULT NULL,
`sexo` int(11) DEFAULT NULL,
`estado_civil` int(11) DEFAULT NULL,
`data_de_nascimento` varchar(245) DEFAULT NULL,
`nacionalidade` int(11) DEFAULT NULL,
`cpf` varchar(245) DEFAULT NULL,
`rg` varchar(245) DEFAULT NULL,
`expedicao` int(11) DEFAULT NULL,
`status` int(11) DEFAULT NULL,
`forma_de_pagamento` int(11) DEFAULT NULL,
`empresa` int(11) DEFAULT NULL,
`cep` varchar(245) DEFAULT NULL,
`estado` int(11) DEFAULT NULL,
`cidade` int(11) DEFAULT NULL,
`bairro` varchar(245) DEFAULT NULL,
`numero` varchar(245) DEFAULT NULL,
`complemento` varchar(245) DEFAULT NULL,
`r_auth` int(11) DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `unique_crm` (`crm`),
UNIQUE KEY `unique_cpf` (`cpf`),
UNIQUE KEY `unique_rg` (`rg`),
KEY `r_medico_ano_crm1` (`ano_crm`),
KEY `r_medico_situacao2` (`situacao`),
KEY `r_medico_sexo3` (`sexo`),
KEY `r_medico_estado_civil4` (`estado_civil`),
KEY `r_medico_nacionalidade5` (`nacionalidade`),
KEY `r_medico_expedicao6` (`expedicao`),
KEY `r_medico_status7` (`status`),
KEY `r_medico_forma_de_pagamento8` (`forma_de_pagamento`),
KEY `r_medico_empresa9` (`empresa`),
KEY `r_medico_estado10` (`estado`),
KEY `r_medico_cidade11` (`cidade`),
CONSTRAINT `r_medico_ano_crm1` FOREIGN KEY (`ano_crm`) REFERENCES `ano_crm` (`id`),
CONSTRAINT `r_medico_cidade11` FOREIGN KEY (`cidade`) REFERENCES `cidade` (`id`),
CONSTRAINT `r_medico_empresa9` FOREIGN KEY (`empresa`) REFERENCES `empresa` (`id`),
CONSTRAINT `r_medico_estado10` FOREIGN KEY (`estado`) REFERENCES `estado` (`id`),
CONSTRAINT `r_medico_estado_civil4` FOREIGN KEY (`estado_civil`) REFERENCES `estado_civil` (`id`),
CONSTRAINT `r_medico_expedicao6` FOREIGN KEY (`expedicao`) REFERENCES `expedicao` (`id`),
CONSTRAINT `r_medico_forma_de_pagamento8` FOREIGN KEY (`forma_de_pagamento`) REFERENCES `forma_de_pagamento` (`id`),
CONSTRAINT `r_medico_nacionalidade5` FOREIGN KEY (`nacionalidade`) REFERENCES `pais` (`id`),
CONSTRAINT `r_medico_sexo3` FOREIGN KEY (`sexo`) REFERENCES `sexo` (`id`),
CONSTRAINT `r_medico_situacao2` FOREIGN KEY (`situacao`) REFERENCES `situacao` (`id`),
CONSTRAINT `r_medico_status7` FOREIGN KEY (`status`) REFERENCES `status` (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `medico`
--
--
-- Table structure for table `pais`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `pais` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`pais` varchar(245) DEFAULT NULL,
`sigla` varchar(245) DEFAULT NULL,
`r_auth` int(11) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `pais`
--
--
-- Table structure for table `password_resets`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `password_resets` (
`email` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL,
`token` varchar(255) CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci NOT NULL,
`created_at` timestamp NULL DEFAULT NULL,
KEY `password_resets_email_index` (`email`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `password_resets`
--
--
-- Table structure for table `r_indicators`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `r_indicators` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`name` varchar(245) DEFAULT NULL,
`query` text,
`color` varchar(50) DEFAULT NULL,
`description` varchar(245) DEFAULT NULL,
`link` varchar(245) DEFAULT NULL,
`size` int(11) DEFAULT NULL,
`glyphicon` varchar(45) DEFAULT 'glyphicon glyphicon-signal',
`r_auth` int(11) DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `r_indicators`
--
--
-- Table structure for table `r_logs`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `r_logs` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`description` varchar(245) DEFAULT NULL,
`r_auth` int(11) DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=9 DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `r_logs`
--
INSERT INTO `r_logs` VALUES (1,'admin medico: visualizou a lista',1,'2018-10-30 19:10:26','2018-10-30 19:10:26'),(2,'admin medico: visualizou a tela de cadastro',1,'2018-10-30 19:10:34','2018-10-30 19:10:34'),(3,'admin medico: visualizou a lista',1,'2018-10-30 19:12:32','2018-10-30 19:12:32'),(4,'admin medico: visualizou a tela de cadastro',1,'2018-10-30 19:12:35','2018-10-30 19:12:35'),(5,'admin medico: visualizou a lista',1,'2018-10-30 19:18:06','2018-10-30 19:18:06'),(6,'admin medico: visualizou a tela de cadastro',1,'2018-10-30 19:18:09','2018-10-30 19:18:09'),(7,'admin medico: visualizou a lista',1,'2018-10-30 19:20:39','2018-10-30 19:20:39'),(8,'admin medico: visualizou a tela de cadastro',1,'2018-10-30 19:20:46','2018-10-30 19:20:46');
--
-- Table structure for table `r_permissions`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `r_permissions` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`user_id` int(11) DEFAULT NULL,
`profile_id` int(11) DEFAULT NULL,
`matriz_id` int(11) DEFAULT NULL,
`role` varchar(255) DEFAULT NULL,
`r_auth` int(11) DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `r_permissions`
--
--
-- Table structure for table `r_profiles`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `r_profiles` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`name` varchar(245) DEFAULT NULL,
`moderator` tinyint(1) DEFAULT '0',
`administrator` tinyint(1) DEFAULT '0',
`default` tinyint(1) DEFAULT '0',
`r_auth` int(11) DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=4 DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `r_profiles`
--
INSERT INTO `r_profiles` VALUES (1,'Usuario',0,0,1,NULL,'2018-10-30 22:20:14','2018-10-30 22:20:14'),(2,'Moderador',1,0,0,NULL,'2018-10-30 22:20:14','2018-10-30 22:20:14'),(3,'Administrador',1,1,0,NULL,'2018-10-30 22:20:14','2018-10-30 22:20:14');
--
-- Table structure for table `r_reports`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `r_reports` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`name` varchar(50) NOT NULL,
`query` text NOT NULL,
`image` varchar(245) DEFAULT NULL,
`description` text,
`size` varchar(10) DEFAULT NULL,
`r_auth` int(11) DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `r_reports`
--
INSERT INTO `r_reports` VALUES (1,'Usuarios','SELECT id, name, username, email FROM users;',NULL,NULL,NULL,NULL,'2018-10-30 22:20:14','2018-10-30 22:20:14');
--
-- Table structure for table `sexo`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `sexo` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`sexo` varchar(245) DEFAULT NULL,
`r_auth` int(11) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `sexo`
--
--
-- Table structure for table `situacao`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `situacao` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`situacao` varchar(245) DEFAULT NULL,
`r_auth` int(11) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `situacao`
--
--
-- Table structure for table `status`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `status` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`status` varchar(245) DEFAULT NULL,
`r_auth` int(11) DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `status`
--
--
-- Table structure for table `users`
--
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!40101 SET character_set_client = utf8 */;
CREATE TABLE `users` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`name` varchar(255) NOT NULL,
`username` varchar(50) DEFAULT NULL,
`password` varchar(255) NOT NULL,
`profile_id` varchar(20) DEFAULT NULL,
`status` tinyint(1) DEFAULT '1',
`profession` varchar(255) DEFAULT NULL,
`email` varchar(255) DEFAULT NULL,
`image` varchar(245) DEFAULT NULL,
`remember_token` varchar(245) DEFAULT NULL,
`r_auth` int(11) DEFAULT NULL,
`created_at` timestamp NULL DEFAULT NULL,
`updated_at` timestamp NULL DEFAULT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Dumping data for table `users`
--
INSERT INTO `users` VALUES (1,'admin','admin','$2y$10$ssBVFa0q6z9XRgQrxos8HeZttP2LlOPaUVwEUJQtCtwqxLPT1DH/O','3',1,NULL,'<EMAIL>',NULL,NULL,NULL,'2018-10-30 22:20:14','2018-10-30 22:20:14');
|
3b1cb373a507962c5a3f8b64e0fc763fdddb53b0
|
[
"SQL"
] | 1
|
SQL
|
LibrianoLima/SismedFeio
|
03fb2744ffc2f6e903b0640a5c09e2df137d8c18
|
2e0bb03927eb6d139362d9e747a679823533d5a0
|
refs/heads/master
|
<repo_name>PicoisCharlotte/ArgosRaspberry<file_sep>/main.py
from src.models import robot
from src.controllers import video
import configparser
config = configparser.ConfigParser()
config.read('config/config.ini')
def main() :
#robot.getRobot(config)
#video.getUrlVideo(config)
print("it work !!")
main()
<file_sep>/config/config.ini
[APIARGOS]
url = https://argosapi.herokuapp.com/
[ADAFRUITIO]
KEY=<KEY>
USERNAME=Titi78<file_sep>/README.md
# Argos Raspberry
Project in python to perform http requests and actions on google drive for the cloud from the raspberry.
## execute script
launched sensor script :
<pre>
<code>
make sensor
</pre>
</code>
launched action script :
<pre>
<code>
make action
</code>
</pre>
## Create Service file
- create a service file :
<pre>
<code>
sudo vim/nano /lib/systemd/system/your_service.service
</code>
</pre>
- add the following content in it, change just the description and the Execstart :
<pre>
<code>
[Unit]
Description=Your_Name_Service
After=multi-user.target
Conflicts=getty@tty1.service
[Service]
Type=simple
ExecStart=/usr/bin/python3 /path/to/project/your_python_file.py #main.py
StandardInput=tty-force
[Install]
WantedBy=multi-user.target
</code>
</pre>
- Enable newly added service
<pre>
<code>
sudo systemctl daemon-reload
sudo systemctl enable your_service.service
sudo systemctl start your_service.service
</code>
</pre>
## Create log file
add in service file, below of StandardInput this line :
<pre>
<code>
StandardOutput=syslog
StandardError=syslog
SyslogIdentifier=your_identifier # example : argosraspberry
</code>
</pre>
- create a file in /etc/rsyslog.d/name_file.conf with the following content:
<pre>
<code>
if $programname == 'your_identifier' then /path/to/log/your_file.log
& stop
</pre>
</code>
- After it, change the permissions of that path to something readable by syslog:
<pre>
<code>
# ls -alth /var/log/syslog
-rw-r----- 1 root adm 439K Mar 5 19:35 /var/log/syslog
# chown root:adm /path/to/log/your_file.log
</code>
</pre>
- restart all :
<pre>
<code>
sudo systemctl restart rsyslog
sudo systemctl daemon-reload
sudo systemctl restart your_service.service
</code>
</pre>
<file_sep>/src/controllers/mqtt/sensor.py
from Adafruit_IO import MQTTClient
from src.services import adafruitService
import sys
import time
import serial
global ser
global value1
global value2
countMotion = 0
countSound = 0
debug = False
ser = serial.Serial('/dev/ttyACM0', 9600 )
def sendSensorValue() :
client = adafruitService.getInstanceMqttClient()
client.on_connect = connected
client.on_disconnect = disconnected
client.connect()
client.loop_background()
time.sleep(5)
while True :
value1 = ser.readline().decode()
if debug: print("value 1 : ", value1)
value2 = ser.readline().decode()
if debug: print("value 2 : ", value2)
try :
setMotionSensor(client, value2)
setSoundSensor(client, value1)
time.sleep(2)
except KeyboardInterrupt :
break
def connected(client) :
print('Connected to Adafruit IO!')
def disconnected(client) :
print('Disconnected from Adafruit IO!')
sys.exit(1)
def setMotionSensor(client, value2) :
if int(value2) == 1 :
if debug: print("countMotion : ", countMotion)
if countMotion <= 1 :
client.publish('argos-feed.capteur-mouvement', value2)
if debug: print("post")
incrementMotion()
elif(int(value2) == 0) :
decrementMotion()
def setSoundSensor(client, value1) :
if int(value1) == 1 :
if debug: print("countSound : ", countSound)
if countSound <= 1 :
client.publish('argos-feed.capteur-son', value1)
if debug: print("post")
incrementSound()
elif(int(value1) == 0) :
decrementSound()
def incrementMotion() :
global countMotion
countMotion = countMotion + 1
def decrementMotion() :
global countMotion
countMotion = 0
def incrementSound() :
global countSound
countSound = countSound + 1
def decrementSound() :
global countSound
countSound = 0
<file_sep>/config/globalVariable.py
def init(config):
global cfg
cfg = config<file_sep>/src/controllers/mqtt/action.py
from Adafruit_IO import MQTTClient
from src.services import adafruitService
import sys
import time
import serial
global ser
ser = serial.Serial('/dev/ttyACM0') # si port usb change le changer la
def initAction() :
client = adafruitService.getInstanceMqttClient()
client.on_connect = connected
client.on_disconnect = disconnected
client.on_message = message
client.connect()
client.loop_blocking()
def connected(client) :
print('Connected to Adafruit IO! Listening for feed changes...')
client.subscribe('argos-feed.robotaction')
global ser
def disconnected(client) :
print('Disconnected from Adafruit IO!')
sys.exit(1)
def message(client, feed_id, payload) :
print('Feed {0} received new value: {1}'.format(feed_id, payload))
if feed_id == 'argos-feed.robotaction':
switcher = {
'10': right,
'8': left,
'5': straighOn,
'13': backOff,
'6': stop
}
try:
print(payload)
func = switcher.get(payload)
func()
except TypeError:
print("Invalid action")
def right() :
print("right")
print(ser)
ser.write(str(8).encode())
def left() :
print("left")
ser.write(str(9).encode())
def straighOn() :
print("straigh On")
ser.write(str(5).encode())
def backOff() :
print("back off")
ser.write(str(7).encode())
def stop() :
print("stop")
ser.write(str(6).encode())
<file_sep>/codeTestEnvoyeData.py
import serial
#remplacer les XXXX par le num de la cart arduino
ser = serial.Serial('/dev/ttyACM0')
print("CRTL + C pour arreter")
#rentre dans la boucle
while True :
#test avec le demarage
demarage = int(input('est ce que je m allume BG ?(1=oui / 2=non)'))
print(ser)
#envoye de la data
ser.write(str(demarage).encode())
<file_sep>/src/services/httpService.py
import requests
import threading
import time
def getRequest(url, params) :
r = requests.get(url = url, params = params)
return r.json()
def sendUrl(url) :
#t = threading.Timer(1.0, getUrl)
#t.start()
try:
while True:
print(url)
time.sleep(2)
except KeyboardInterrupt:
print("\nkill program")<file_sep>/makefile
sensor:
python3 sensorProcess.py
action:
python3 actionProcess.py<file_sep>/src/services/shellService.py
import subprocess
def execCommand(command) :
subprocess.call(command)<file_sep>/actionProcess.py
#from src.controllers.http import action
from src.controllers.mqtt import action
from config import globalVariable
import configparser
config = configparser.ConfigParser()
config.read('/home/pi/Documents/argosraspberry/config/config.ini')
globalVariable.init(config)
def initProcessAction() :
action.initAction()
initProcessAction()
<file_sep>/src/services/adafruitService.py
from Adafruit_IO import Client, Feed, MQTTClient
from config import globalVariable
def getInstanceAdafruitClient() :
return Client(globalVariable.cfg['ADAFRUITIO']['USERNAME'], globalVariable.cfg['ADAFRUITIO']['KEY'])
def getCreateFeed(intstanceClient, feedName) :
feed = Feed(name=feedName)
return intstanceClient.create_feed(feed)
def getInstanceMqttClient() :
return MQTTClient(globalVariable.cfg['ADAFRUITIO']['USERNAME'], globalVariable.cfg['ADAFRUITIO']['KEY'])
<file_sep>/src/controllers/http/action.py
from src.services import adafruitService
from Adafruit_IO import RequestError
import time
def initAction() :
robotMouvement()
def robotMouvement() :
value = ''
aio = adafruitService.getInstanceAdafruitClient()
try:
robotAction = aio.feeds('argos-feed.robotaction')
except RequestError:
robotAction = adafruitService.getCreateFeed(aio, 'argos-feed.robotaction')
while True :
try :
data = aio.receive(robotAction.key)
if value != data.value :
value = data.value
switcher = {
'10': right,
'8': left,
'5': straighOn,
'13': backOff,
'6': stop
}
try:
func = switcher.get(data.value)
func()
except TypeError:
print("Invalid action")
else :
print('same')
time.sleep(1)
except KeyboardInterrupt :
break
def right() :
print("right")
time.sleep(1)
def left() :
print("left")
time.sleep(1)
def straighOn() :
print("straigh On")
time.sleep(1)
def backOff() :
print("back off")
time.sleep(1)
def stop() :
print("stop")
time.sleep(1)
<file_sep>/src/controllers/http/sensor.py
from src.services import adafruitService
from Adafruit_IO import RequestError
import time
def initSensor() :
while True :
try :
getValueSoundSensor()
#setValueSoundSensor(10)
getValueMotionSensor()
except KeyboardInterrupt :
break
#Sound sensor
def getValueSoundSensor() :
aio = adafruitService.getInstanceAdafruitClient()
try:
soundSensor = aio.feeds('argos-feed.capteur-son')
except RequestError:
soundSensor = adafruitService.getCreateFeed(aio, 'argos-feed.capteur-son')
data = aio.receive(soundSensor.key)
time.sleep(2)
print("Sound sensor value retrieve from Adafruit : " + data.value)
def setValueSoundSensor(value) :
aio = adafruitService.getInstanceAdafruitClient()
try:
soundSensorFeed = aio.feeds('argos-feed.robotaction')
except RequestError:
soundSensorFeed = adafruitService.getCreateFeed(aio, 'argos-feed.robotaction')
aio.send_data(soundSensorFeed.key, value)
print("value post")
time.sleep(5)
# Motion sensor
def getValueMotionSensor() :
aio = adafruitService.getInstanceAdafruitClient()
try:
motionSensor = aio.feeds('argos-feed.capteur-mouvement')
except RequestError:
motionSensor = adafruitService.getCreateFeed(aio, 'argos-feed.capteur-mouvement')
data = aio.receive(motionSensor.key)
print("Motion sensor value retrieve from Adafruit : " + data.value)
time.sleep(5)<file_sep>/src/models/robot.py
from src.services import httpService
def getRobot(config) :
PARAMS = {'action': 'selectAllRobot'}
test = httpService.getRequest(config['DEFAULT']['url'] + 'robot/select', PARAMS)
print(test)<file_sep>/sensorProcess.py
#from src.controllers.http import sensor
from src.controllers.mqtt import sensor
from config import globalVariable
import configparser
config = configparser.ConfigParser()
config.read('/home/pi/Documents/argosraspberry/config/config.ini')
globalVariable.init(config)
def initProcessSensor() :
sensor.sendSensorValue()
initProcessSensor()
|
b1a37d05d64fc629867d682c40aa4e32817ab627
|
[
"Markdown",
"Python",
"Makefile",
"INI"
] | 16
|
Python
|
PicoisCharlotte/ArgosRaspberry
|
5859d4fbb9ed98d8e8362692549cf57c31e75fd2
|
cc2dcbc9255f8495594769e6423d2ebba33015cd
|
refs/heads/master
|
<file_sep>package controllers;
import play.*;
import play.mvc.*;
import models.User;
import views.html.*;
import play.data.Form;
import views.html.result.*;
import views.html.signed.*;
import java.util.*;
import models.*;
public class Show extends Controller {
public Result showhome() {
//Get Anime List
List<AniListEbean> aniList = AniListEbean.find.all();
return ok(home.render(aniList,".jpg"));
}
public Result showRegist() {
Form<User> userForm = Form.form(User.class);
return ok(regist.render(userForm));
}
public Result showToSigned() {
return ok(signed.render());
}
public Result showLogout() {
//Get Anime List
List<AniListEbean> aniList = AniListEbean.find.all();
//remove user session
session().remove("mail");
return ok(home.render(aniList,".jpg"));
}
}
<file_sep>package controllers;
import java.net.URL;
import play.*;
import play.mvc.*;
import views.html.*;
import views.html.result.*;
import views.html.signed.*;
import models.Anidata;
import models.AniListEbean;
import java.io.IOException;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import java.net.URLConnection;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.net.URLEncoder;
public class Test extends Controller {
//get 2016 anime
public Result Test20() throws JsonParseException, JsonMappingException, IOException {
//file downlaod
InputStream in = null;
OutputStream out = null;
try {
URL imgUrl2 = new URL("https://i.ytimg.com/vi/YMqu-iHKaT0/maxresdefault.jpg");
in = imgUrl2.openStream();
out = new FileOutputStream("/Users/keima/Downloads/act/MyApp001/public/images/ani_image/9982.jpg");
byte[] buf = new byte[1024];
int len = 0;
while ((len = in.read(buf)) > 0) {
out.write(buf, 0, len);
}
out.flush();
} catch (Exception e) {
} finally {
if (out != null) {
try {
out.close();
} catch (IOException e) {
e.printStackTrace();
}
}
if (in != null) {
try {
in.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
return ok(index.render("Your new application is ready."));
}
}
<file_sep>@(aniList: List[models.AniListEbean])(jpg: String)
<link rel='stylesheet' href='@routes.WebJarAssets.at(WebJarAssets.locate("css/bootstrap.min.css"))'>
<script type='text/javascript' src='@routes.WebJarAssets.at(WebJarAssets.locate("jquery.min.js"))'></script>
<div class="container">
<div class="jumbotron">
<h1>これが俺のアニメレビュー</h1>
<p>あなたの感想を思う存分吐き出しましょう!</p>
<p><a href="/regist">登録</a></p>
</div>
<div class="row">
<div class="col-sm-5">
<h3>最新のユーザレビュー</h3>
<p>ポケットモンスター 897話 / ここ10年で最高の話 / ゼニガメさん</p>
<p>ポケットモンスター 897話 / ここ10年で最高の話 / ゼニガメさん</p>
<p>ポケットモンスター 897話 / ここ10年で最高の話 / ゼニガメさん</p>
<p>ポケットモンスター 897話 / ここ10年で最高の話 / ゼニガメさん</p>
<p>ポケットモンスター 897話 / ここ10年で最高の話 / ゼニガメさん</p>
<p>ポケットモンスター 897話 / ここ10年で最高の話 / ゼニガメさん</p>
<p>ポケットモンスター 897話 / ここ10年で最高の話 / ゼニガメさん</p>
<p>ポケットモンスター 897話 / ここ10年で最高の話 / ゼニガメさん</p>
<p>ポケットモンスター 897話 / ここ10年で最高の話 / ゼニガメさん</p>
<p>ポケットモンスター 897話 / ここ10年で最高の話 / ゼニガメさん</p>
<p>ポケットモンスター 897話 / ここ10年で最高の話 / ゼニガメさん</p>
<p>ポケットモンスター 897話 / ここ10年で最高の話 / ゼニガメさん</p>
<p>ポケットモンスター 897話 / ここ10年で最高の話 / ゼニガメさん</p>
<p>ポケットモンスター 897話 / ここ10年で最高の話 / ゼニガメさん</p>
<p>ポケットモンスター 897話 / ここ10年で最高の話 / ゼニガメさん</p>
<p>ポケットモンスター 897話 / ここ10年で最高の話 / ゼニガメさん</p>
<p>ポケットモンスター 897話 / ここ10年で最高の話 / ゼニガメさん</p>
<p>ポケットモンスター 897話 / ここ10年で最高の話 / ゼニガメさん</p>
<p>ポケットモンスター 897話 / ここ10年で最高の話 / ゼニガメさん</p>
<p>ポケットモンスター 897話 / ここ10年で最高の話 / ゼニガメさん</p>
</div>
<div class="col-sm-7" style="margin-bottom:50px;">
<h3>最新話</h3>
<img src="assets/images/logo.jpg" width="180px height="180px" class='img-responsive' style="float:left;margin-left:5px">
<img src="assets/images/logo.jpg" width="180px height="180px" class='img-responsive' style="float:left;margin-left:5px">
<img src="assets/images/logo.jpg" width="180px height="180px" class='img-responsive' style="float:left;margin-left:5px">
<img src="assets/images/logo.jpg" width="180px height="180px" class='img-responsive' style="float:left;margin-left:5px">
<img src="assets/images/logo.jpg" width="180px height="180px" class='img-responsive' style="float:left;margin-left:5px">
<img src="assets/images/logo.jpg" width="180px height="180px" class='img-responsive' style="float:left;margin-left:5px">
<img src="assets/images/logo.jpg" width="180px height="180px" class='img-responsive' style="float:left;margin-left:5px">
<img src="assets/images/logo.jpg" width="180px height="180px" class='img-responsive' style="float:left;margin-left:5px">
<img src="assets/images/logo.jpg" width="180px height="180px" class='img-responsive' style="float:left;margin-left:5px">
</div>
<div class="col-sm-12">
<h3>アニメ一覧</h3>
@for(aniLists <-aniList){
<img src="assets/images/ani_image/@aniLists.ani_id@jpg" width="180px" height="180px" style="float:left;margin-left:5px">
}
</div>
</div>
</div><file_sep>package models;
public class Anidata {
public String id ;
public String title;
}<file_sep># これが俺のアニメレビュー(This is my anime review)
This project written in Play for Java, Story Base anime review site.
For example you can review pokemon 277th episode in this site but existing anime review sites can review entire pokemon episode. ...Hard to explain :(
### function
- Add broadcasting Anime to DB __automatically__
- Add newest episode each Anime __automatically__
- Add Anime Thumbnail __automatically__
- and so on...
### Tech
* Using MVC model
* Using ebean.
* DB Control(MySql)
* Using Twitter API ( For automatic search stuff )
* Using Google Custom Search API ( For getting thumbnail automatically)
* JSON Control (Using JACKSON)
* Play form helper
* and so on...
### IMAGES

<file_sep>package controllers;
import play.*;
import play.mvc.*;
import models.User;
import play.data.Form;
import views.html.*;
import views.html.result.*;
import java.util.List;
import models.*;
public class FormSubmit extends Controller {
public Result userSubmit() {
//get data from Userform
Form<User> userForm = Form.form(User.class);
User filledForm = userForm.bindFromRequest().get();
//Add data to User table
UserEbean user = new UserEbean();
user.mail = filledForm.mail;
user.password = <PASSWORD>;
user.nick = filledForm.nick;
user.age = filledForm.age;
user.sex = filledForm.sex;
user.save();
//save user session
session("mail", user.mail);
return ok(userres.render());
}
}<file_sep># --- Created by Ebean DDL
# To stop Ebean DDL generation, remove this comment and start using Evolutions
# --- !Ups
create table ani_list (
ani_id integer auto_increment not null,
title varchar(255),
story varchar(255),
time datetime(6) not null,
constraint pk_ani_list primary key (ani_id))
;
create table ani_user (
user_id varchar(255) not null,
mail varchar(255),
password varchar(255),
nick varchar(255),
age integer,
sex integer,
constraint pk_ani_user primary key (user_id))
;
# --- !Downs
SET FOREIGN_KEY_CHECKS=0;
drop table ani_list;
drop table ani_user;
SET FOREIGN_KEY_CHECKS=1;
<file_sep>package models;
public class User {
public String mail;
public String password;
public String nick;
public int age;
public int sex;
}<file_sep>package models;
import java.sql.Timestamp;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import com.avaje.ebean.Model;
import com.avaje.ebean.annotation.CreatedTimestamp;
import com.avaje.ebean.annotation.UpdatedTimestamp;
@Entity
@Table(name = "ani_user")
public class UserEbean extends Model {
/**
* user id
*/
@Id
@Column(name = "user_id")
public String userId;
/**
* username
*/
@Column(name = "mail")
public String mail;
/**
* password
*/
@Column(name = "password")
public String password;
/**
* nick
*/
@Column(name = "nick")
public String nick;
/**
* age
*/
@Column(name = "age")
public int age;
/**
* sex
*/
@Column(name = "sex")
public int sex;
/**
* find
*/
public static Find<Long, UserEbean> find = new Find<Long, UserEbean>() {
};
/**
* get userid
*
* @return userid
*/
public String getUserId() {
return userId;
}
/**
* user id conf
*
* @param userId
* userid
*/
public void setUserId(String userId) {
this.userId = userId;
}
/**
* username
*
* @return mail
*/
public String getMail() {
return mail;
}
/**
* username conf
*
* @param userName
* username
*/
public void setMail(String mail) {
this.mail = mail;
}
/**
* get password
*
* @return password
*/
public String getPassword() {
return password;
}
/**
* password conf
*
* @param password
* password
*/
public void setPassword(String password) {
this.password = <PASSWORD>;
}
/**
* get nick
*
* @return nick
*/
public String getNick() {
return nick;
}
/**
* nick
*
* @param nick
*
*/
public void setNick(String nick) {
this.nick = nick;
}
/**
* get age
*
* @return age
*/
public int getAge() {
return age;
}
/**
* age
*
* @param age
*
*/
public void setAge(int age) {
this.age = age;
}
/**
* get age
*
* @return age
*/
public int getSex() {
return sex;
}
/**
* age
*
* @param age
*
*/
public void setSex(int sex) {
this.sex = sex;
}
/**
* get find
*
* @return find
*/
public static Find<Long, UserEbean> getFind() {
return find;
}
/**
* find conf
*
* @param find
* find
*/
public static void setFind(Find<Long, UserEbean> find) {
UserEbean.find = find;
}
}
|
808e0a23bc0f81e67148f4ea84ac5a483261b715
|
[
"Markdown",
"Java",
"HTML",
"SQL"
] | 9
|
Java
|
keima-matsui/AnimeReview
|
43959b96c640fb57bbabaec78cd7fa0899d3a904
|
b2c19568c66e959747a709b6c1bc28ae8e0507ee
|
refs/heads/master
|
<file_sep>BASE_URL=http://localhost:8080
MONGODB_URI=mongodb://farwhyn:<EMAIL>:13179/mindfirst
SESSION_SECRET=secret
MAILGUN_USER=<EMAIL>
MAILGUN_PASSWORD=<PASSWORD>
SENDGRID_USER=hslogin
SENDGRID_PASSWORD=<PASSWORD>
NYT_KEY=9548be6f3a64163d23e1539f067fcabd:5:68537648
LASTFM_KEY=<KEY>
LASTFM_SECRET=is cb7857b8fba83f819ea46ca13681fe71
FACEBOOK_ID=2159855864045996
FACEBOOK_SECRET=fc9fe9c8a0daeec18ce2c1d09eb87001
INSTAGRAM_ID=9f5c39ab236a48e0aec354acb77eee9b
INSTAGRAM_SECRET=5920619aafe842128673e793a1c40028
GITHUB_ID=cb448b1d4f0c743a1e36
GITHUB_SECRET=<KEY>
TWITTER_KEY=6NNBDyJ2TavL407A3lWxPFKBI
TWITTER_SECRET=<KEY>
GOOGLE_ID=828110519058.apps.googleusercontent.com
GOOGLE_SECRET=JdZsIaWhUFIchmC1a_IZzOHb
LINKEDIN_ID=77chexmowru601
LINKEDIN_SECRET=szdC8lN2s2SuMSy8
LINKEDIN_CALLBACK_URL=http://localhost:8080/auth/linkedin/callback
STEAM_KEY=D1240DEF4D41D416FD291D0075B6ED3F
TWILIO_SID=AC6f0edc4c47becc6d0a952536fc9a6025
TWILIO_TOKEN=<PASSWORD>
CLOCKWORK_KEY=9ffb267f88df55762f74ba2f517a66dc8bedac5a
STRIPE_SKEY=<KEY>
STRIPE_PKEY=pk_test_6pRNASCoBOKtIshFeQd4XMUh
TUMBLR_KEY=<KEY>
TUMBLR_SECRET=<KEY>
FOURSQUARE_ID=2STROLSFBMZLAHG3IBA141EM2HGRF0IRIBB4KXMOGA2EH3JG
FOURSQUARE_SECRET=<KEY>
FOURSQUARE_REDIRECT_URL=http://localhost:8080/auth/foursquare/callback
PAYPAL_ID=AdGE8hDyixVoHmbhASqAThfbBcrbc<KEY>
PAYPAL_SECRET=<KEY>
PAYPAL_RETURN_URL=http://localhost:8080/api/paypal/success
PAYPAL_CANCEL_URL=http://localhost:8080/api/paypal/cancel
LOB_KEY=test_814e892b199d65ef6dbb3e4ad24689559ca
PINTEREST_ID=4989328633242858916
PINTEREST_SECRET=e61be5b3558fed60f6e6bd8c3b7d5a3857fcb863e35dca069d9d59b1acd9650e
PINTEREST_REDIRECT_URL=https://localhost:8080/auth/pinterest/callback
FITBIT_ID=22D6QJ
FITBIT_SECRET=<KEY>
FITBIT_REDIRECT_URL=http://localhost:8080/auth/fitbit/callback
GOOGLE_MAP_API_KEY=google-map-api-key
<file_sep>const Form = require('../models/FormData.js');
exports.getForms = (req, res) => {
Form.find((err, docs) => {
res.render('dashboard', { forms: docs });
}).select(['-_id', '-date']);
};
|
ec4239dcd5b23fa1dc2bf1325518d5ea5e02d4d0
|
[
"JavaScript",
"Shell"
] | 2
|
Shell
|
CyrusSA/mindfirst
|
e852838a48219f6a2a1970214a05124cc1ce8f90
|
a4c6559360a47e24cba35fd576ce3095c5f37c1b
|
refs/heads/master
|
<repo_name>kholidfu/flask_auto_builder<file_sep>/README.md
flask_auto_builder
==================
<file_sep>/farmers_copy.py
#!/usr/bin/env python
# author: @sopier
from dop.client import Client
import urllib2
import sys
class Farmers(object):
CLIENT_ID = 'client_id'
API_KEY = 'api_key'
def __init__(self):
pass
def create_droplet(self, name):
""" step 1: creating droplet """
client = Client(self.CLIENT_ID, self.API_KEY)
droplet = client.create_droplet(name, 66, 1505699, 1)
print droplet.to_json()['id']
def droplet_ip(self):
client = Client(self.CLIENT_ID, self.API_KEY)
droplets = client.show_active_droplets()
return [droplet.to_json() for droplet in droplets][-1]['ip_address']
def add_domain(self, domain):
""" step 2: add domain to droplet """
ip_address = self.droplet_ip()
urllib2.urlopen(
"https://api.digitalocean.com/domains/new?client_id=" \
+ self.CLIENT_ID + "&api_key=" + self.API_KEY + "&name=" \
+ domain + "&ip_address=" + ip_address)
if __name__ == "__main__":
f = Farmers()
sys.stdout.write("preparing to create droplet...\n")
f.create_droplet(sys.argv[1])
sys.stdout.write("droplet successfully created!\n")
f.add_domains(sys.argv[2])
sys.stdout.write("all domains added...\n")
# setelah ini bisa berlanjut ke namecheap API
<file_sep>/fabfile.py
#!/usr/bin/env python
# author: @sopier
"""
Automate flask app deployment
What you need to run this:
1. zipped app/ run.py and uwsgi.ini => format domain.xxx.tar.gz
2. "default" file which contain all sites conf
3. id_rsa.pub to connect to server without password prompt
4. supervisord.conf latest version
"""
# fabric thing
from fabric.api import *
from fabric.tasks import execute
from farmers import Farmers
f = Farmers()
env.hosts = [f.droplet_ip()]
droplet_ip = env.hosts[0]
def add_domain(site):
""" adding domain to DO"""
f.add_domain(site)
def create_user():
env.user = "root"
run("adduser sopier")
run("adduser sopier sudo")
def create_key():
""" delete old keys and generate new one"""
local("> ~/.ssh/known_hosts")
local("ssh-copy-id -i /home/banteng/.ssh/id_rsa.pub sopier@" \
+ droplet_ip)
def install_packages():
env.user = "sopier"
env.key_filename = "/home/banteng/.ssh/id_rsa"
run("sudo apt-get install build-essential python-dev" \
" python-pip nginx emacs24-nox libxml2-dev libxslt-dev zlib1g-dev libjpeg-dev python-lxml")
run("sudo pip install virtualenv supervisor")
def create_venv(domain):
""" tiap domain dibuatkan virtualenv sendiri2, misal example.com"""
env.user = "sopier"
env.key_filename = "/home/banteng/.ssh/id_rsa"
run("virtualenv " + domain)
def install_packages_venv(domain):
""" install flask uwsgi unidecode"""
env.user = "sopier"
env.key_filename = "/home/banteng/.ssh/id_rsa"
run("ln -s /usr/lib/python2.7/dist-packages/lxml* /home/sopier/" + domain + "/lib/python2.7/site-packages")
with lcd("/home/sopier/" + domain):
with path("/home/sopier/" + domain + "/bin/", behavior="prepend"):
run("pip install flask uwsgi unidecode beautifulsoup4 pillow bottlenose")
run("pip install --no-deps python-amazon-simple-product-api")
def upload_package(package, domain):
"""upload folder app/ run.py and uwsgi.ini from localhost"""
env.user = "sopier"
env.key_filename = "/home/banteng/.ssh/id_rsa"
local("scp " + package + " sopier@" + droplet_ip + ":")
run("mv " + package + " " + domain + "/")
run("cd " + domain + " && tar zxvf " + package)
run("cd " + domain + " && rm " + package)
def setup_nginx():
"""
rm default
cp default from localhost
"""
env.user = "root"
env.key_filename = "/home/banteng/.ssh/id_rsa"
local("scp default root@" + droplet_ip \
+ ":/etc/nginx/sites-available/default")
sudo("sed -i 's/.*64.*/server_names_hash_bucket_size 64;/' /etc/nginx/nginx.conf")
sudo("/etc/init.d/nginx restart")
def set_supervisor(domain):
""" setup for supervisor """
env.user = "sopier"
env.key_filename = "/home/banteng/.ssh/id_rsa"
local("scp run.py sopier@" + droplet_ip + ":/home/sopier/" + domain)
local("scp supervisord.conf sopier@" + droplet_ip + ":")
def run_site():
""" run the site """
env.user = "sopier"
env.key_filename = "/home/banteng/.ssh/id_rsa"
try:
sudo("pkill supervisord")
except:
pass
sudo("supervisord -c supervisord.conf")
def setup_server():
create_user()
create_key()
install_packages()
def deploy_site(site):
add_domain("www." + site)
create_venv(site)
install_packages_venv(site)
upload_package(site + ".tar.gz", site)
setup_nginx()
set_supervisor(site)
run_site()
|
fd52f61132bec2dd614ef57f50f2ff942396ea7b
|
[
"Markdown",
"Python"
] | 3
|
Markdown
|
kholidfu/flask_auto_builder
|
6cb570595b7573174b1ca358a4ef19abffb4560e
|
0af08d0aa63953451995b9beafdf8ddbb3aa909b
|
refs/heads/master
|
<file_sep>document.querySelector('.navbar').addEventListener('click', () => {
document.getElementById("burger-check").checked = true
});
document.querySelector('.times').addEventListener('click', () =>
document.querySelector('.popup').style.display = "none"
);
document.querySelector('.order__btn').addEventListener('click', () =>
document.querySelector('.popup').style.display = "block"
);
|
94fad43d0629a001b02eaacba5bd35ddc4428571
|
[
"JavaScript"
] | 1
|
JavaScript
|
CheffChelos/WebLabs
|
f9800fd60f0ff60fdb4b199cd9c3acee573756c2
|
b2ab9dff2664a6a59c57d780c85d3013f0f46f05
|
refs/heads/master
|
<repo_name>antonSuprun/wiki2SC<file_sep>/src/wikitools/user.py
# -*- coding: utf-8 -*-
# Copyright 2008, 2009 Mr.Z-man, bjweeks
# This file is part of wikitools.
# wikitools is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# wikitools is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with wikitools. If not, see <http://www.gnu.org/licenses/>.
import wiki
import page
import api
import socket
class User:
"""A user on the wiki"""
def __init__(self, site, name, check=True):
"""
wiki - A wiki object
name - The username, as a string
check - Checks for existence, normalizes name
"""
self.site = site
self.name = name
if not isinstance(self.name, unicode):
self.name = unicode(self.name, 'utf8')
self.exists = True # If we're not going to check, assume it does
self.blocked = None # So we can tell the difference between blocked/not blocked/haven't checked
self.editcount = -1
self.groups = []
self.id = 0
if check:
self.setUserInfo()
self.isIP = False
try:
s = socket.inet_aton(self.name.replace(' ', '_'))
if socket.inet_ntoa(s) == self.name:
self.isIP = True
self.exists = False
except:
pass
self.page = page.Page(self.site, ':'.join([self.site.namespaces[2]['*'], self.name]), check=check, followRedir=False)
def setUserInfo(self):
"""Sets basic user info"""
params = {
'action': 'query',
'list': 'users',
'ususers':self.name,
'usprop':'blockinfo|groups|editcount'
}
req = api.APIRequest(self.site, params)
response = req.query()
user = response['query']['users'][0]
self.name = user['name']
if 'missing' in user or 'invalid' in user:
self.exists = False
return
self.id = int(user['userid'])
self.editcount = int(user['editcount'])
if 'groups' in user:
self.groups = user['groups']
if 'blockedby' in user:
self.blocked = True
else:
self.blocked = False
return self
def getTalkPage(self, check=True, followRedir=False):
"""Convenience function to get an object for the user's talk page"""
return page.Page(self.site, ':'.join([self.site.namespaces[3]['*'], self.name]), check=check, followRedir=False)
def isBlocked(self, force=False):
"""Determine if a user is blocked"""
if self.blocked is not None and not force:
return self.blocked
params = {'action':'query',
'list':'blocks',
'bkusers':self.name,
'bkprop':'id'
}
req = api.APIRequest(self.site, params)
res = req.query(False)
if len(res['query']['blocks']) > 0:
self.blocked = True
else:
self.blocked = False
return self.blocked
def block(self, reason=False, expiry=False, anononly=False, nocreate=False, autoblock=False, noemail=False, hidename=False, allowusertalk=False, reblock=False):
"""Block the user
Params are the same as the API
reason - block reason
expiry - block expiration
anononly - block anonymous users only
nocreate - disable account creation
autoblock - block IP addresses used by the user
noemail - block user from sending email through the site
hidename - hide the username from the log (requires hideuser right)
allowusertalk - allow the user to edit their talk page
reblock - overwrite existing block
"""
params = {'action':'block',
'user':self.name,
'gettoken':''
}
req = api.APIRequest(self.site, params)
res = req.query()
token = res['block']['blocktoken']
params = {'action':'block',
'user':self.name,
'token':token
}
if reason:
params['reason'] = reason
if expiry:
params['expiry'] = expiry
if anononly:
params['anononly'] = ''
if nocreate:
params['nocreate'] = ''
if autoblock:
params['autoblock'] = ''
if noemail:
params['noemail'] = ''
if hidename:
params['hidename'] = ''
if allowusertalk:
params['allowusertalk'] = ''
if reblock:
params['reblock'] = ''
req = api.APIRequest(self.site, params, write=False)
res = req.query()
if 'block' in res:
self.blocked = True
return res
def unblock(self, reason=False):
"""Unblock the user
reason - reason for the log
"""
params = {
'action': 'unblock',
'user': self.name,
'gettoken': ''
}
req = api.APIRequest(self.site, params)
res = req.query()
token = res['unblock']['unblocktoken']
params = {
'action': 'unblock',
'user': self.name,
'token': token
}
if reason:
params['reason'] = reason
req = api.APIRequest(self.site, params, write=False)
res = req.query()
if 'unblock' in res:
self.blocked = False
return res
def __hash__(self):
return int(self.name) ^ hash(self.site.apibase)
def __eq__(self, other):
if not isinstance(other, User):
return False
if self.name == other.name and self.site == other.site:
return True
return False
def __ne__(self, other):
if not isinstance(other, User):
return True
if self.name == other.name and self.site == other.site:
return False
return True
def __str__(self):
return self.__class__.__name__ + ' ' + repr(self.name) + " on " + repr(self.site.domain)
def __repr__(self):
return "<"+self.__module__+'.'+self.__class__.__name__+" "+repr(self.name)+" on "+repr(self.site.apibase)+">"<file_sep>/src/wikitools/pagelist.py
# -*- coding: utf-8 -*-
# Copyright 2008, 2009 Mr.Z-man
# This file is part of wikitools.
# wikitools is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# wikitools is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with wikitools. If not, see <http://www.gnu.org/licenses/>.
import api
import page
import category
import wikifile
import math
def listFromQuery(site, queryresult):
"""Generate a list of pages from an API query result
queryresult is the list of pages from a list or generator query
e.g. - for a list=categorymembers query, use result['query']['categorymembers']
for a generator query, use result['query']['pages']
"""
ret = []
if isinstance(queryresult, list):
for item in queryresult:
pageid = False
if 'pageid' in item:
pageid = item['pageid']
if item['ns'] == 14:
item = category.Category(site, title=item['title'], check=False, followRedir=False, pageid=pageid)
elif item['ns'] == 6:
item = wikifile.File(site, title=item['title'], check=False, followRedir=False, pageid=pageid)
else:
item = page.Page(site, title=item['title'], check=False, followRedir=False, pageid=pageid)
ret.append(item)
else:
for key in queryresult.keys():
item = queryresult[key]
pageid = False
if 'pageid' in item:
pageid = item['pageid']
if item['ns'] == 14:
item = category.Category(site, title=item['title'], check=False, followRedir=False, pageid=pageid)
elif item['ns'] == 6:
item = wikifile.File(site, title=item['title'], check=False, followRedir=False, pageid=pageid)
else:
item = page.Page(site, title=item['title'], check=False, followRedir=False, pageid=pageid)
ret.append(item)
return ret
def listFromTitles(site, titles, check=True, followRedir=False):
"""Create a list of page objects from a list of titles
check and followRedir have the same meaning as in page.Page
"""
ret = []
if not check:
for title in titles:
title = page.Page(site, title=title, check=False)
ret.append(title)
else:
querylist = []
limit = int(site.limit)
if len(titles) > limit/10:
iters = int(math.ceil(float(len(titles)) / (limit/10)))
for x in range(0,iters):
lower = x*limit/10
upper = (x+1)*limit/10
querylist.append(titles[lower:upper])
else:
querylist.append(titles)
response = False
for item in querylist:
tlist = '|'.join(item)
if not isinstance(tlist, unicode):
tlist = unicode(tlist, 'utf8')
params = {'action':'query',
'titles':tlist,
}
if followRedir:
params['redirects'] = ''
req = api.APIRequest(site, params)
res = req.query(False)
if not response:
response = res
else:
# This breaks on non-existent titles, the api gives them negative numbers
# resultCombine doesn't account for this and ignores or overwrites the
# duplicate pageids
response = api.resultCombine('', response, res)
for key in response['query']['pages'].keys():
res = response['query']['pages'][key]
item = makePage(key, res, site)
ret.append(item)
return ret
def listFromPageids(site, pageids, check=True, followRedir=False):
"""Create a list of page objects from a list of pageids
check and followRedir have the same meaning as in page.Page
"""
ret = []
if not check:
for id in pageids:
title = page.Page(site, pageid=id, check=False)
ret.append(title)
else:
querylist = []
limit = int(site.limit)
if len(pageids) > limit/10:
iters = int(math.ceil(float(len(pageids)) / (limit/10)))
for x in range(0,iters):
lower = x*limit/10
upper = (x+1)*limit/10
querylist.append(pageids[lower:upper])
else:
querylist.append(pageids)
response = False
for item in querylist:
ids = [str(id) for id in item]
idlist = '|'.join(ids)
params = {'action':'query',
'pageids':idlist,
}
if followRedir:
params['redirects'] = ''
req = api.APIRequest(site, params)
res = req.query()
if not response:
response = res
else:
response = api.resultCombine('', response, res)
for key in response['query']['pages'].keys():
res = response['query']['pages'][key]
item = makePage(key, res, site)
ret.append(item)
return ret
def makePage(key, result, site):
title=False
if 'title' in result:
title = result['title']
if 'ns' in result and result['ns'] == 14:
item = category.Category(site, title=title, check=False, followRedir=False, pageid=key)
elif 'ns' in result and result['ns'] == 6:
item = wikifile.File(site, title=title, check=False, followRedir=False, pageid=key)
else:
item = page.Page(site, title=title, check=False, followRedir=False, pageid=key)
if 'missing' in result:
item.exists = False
if 'invalid' in result:
item = False
if 'ns' in result:
item.setNamespace(int(result['ns']))
return item<file_sep>/src/ParserAndCreatorFactory.py
# -*- coding: utf-8 -*-
'''
Created on 27.02.2012
@author: kulex4
'''
from creators.templateCreator import*
from parsers.templateParser import *
class ParserAndCreatorFactory():
_parsers = {}
_creators = {}
def __init__(self):
a = planetParser()
self._parsers[u'карточка планеты'] = a
b = planetCreator()
self._creators[u'карточка планеты'] = b
def getParser(self, information):
if information in self._parsers:
parser = self._parsers[information]
else: parser = baseParser()
return parser
def getCreator(self, information):
if information in self._creators:
creator = self._creators[information]
else: creator = baseCreator()
return creator<file_sep>/src/wikitools/__init__.py
# -*- coding: utf-8 -*-
__all__ = ["wiki", "api", "page", "category", "user", "pagelist", "wikifile"]
from wiki import *
from api import *
from page import *
from category import *
from user import *
from wikifile import *<file_sep>/src/parsers/templateParser.py
# -*- coding: utf-8 -*-
'''
Created on 27.02.2012
@author: Burger
'''
import re
def fromText(text):
templates = []
kol = 0
template = ""
for sim in text:
if kol == 0 and template != "":
template + '}'
templates.append(template + '}')
template = ""
if sim == '{': kol = kol + 1
elif sim == '}': kol = kol - 1
if kol != 0: template = template + sim
if template!='' and kol==0: templates.append(template)
prev=''
our=False
templete=''
for sim in text:
if sim=='!' and prev=='<':
our=True
templete=prev+sim
elif sim!='>' and our:
templete=templete+sim
elif sim=='>':
templates.append(templete)
templete=''
our=False
prev=sim
return templates
class baseParser():
def dellSpaces(self,text):
k=0
for sym in text:
if sym==' ':
k=k+1
else:break
text=text[k:len(text)]
k=0
text1=text[::-1]
l=len(text)-1
while(l>-1):
if text[l]==' ' or text[l]=='\n':l=l-1
else:break
text=text[0:l+1]
return text
def templateUnification(self,template):
dell=fromText(template[2:len(template)])
for templ in dell:
template=re.sub(templ,'', template)
template=re.sub('<br />','\n', template)
template=re.sub(r'\[.*|.*\]','', template)
template=re.sub(' ','', template)
template=re.sub('<ref.*>','', template)
template=re.sub('</ref>','', template)
template=re.sub('<sup>','^',template)
template=re.sub('</sup>','',template)
template=re.sub('<','<',template)
template=re.sub('&','&',template)
template=re.sub('>','>',template)
template=re.sub('"','"',template)
return template
def _keyValue(self, template):
template=self.templateUnification(template)
information={}
key=''
word=''
first=False
for sym in template:
if sym == '|':
key=self.dellSpaces(key)
word=self.dellSpaces(word)
if word!='' and key!='':
information[self.dellSpaces(key)]=self.dellSpaces(word)
key=''
word=''
elif sym=='=':
key=word
word=''
else: word=word+sym
return information
def parse(self, template): return self._keyValue(template)
class planetParser(baseParser):
def parse(self, template): return self._keyValue(template)<file_sep>/src/wiki2SC.py
# -*- coding: utf-8 -*-
'''
Created on 15.02.2012
@author: Burger
'''
from wikiWorker import wikiWorker
from ParserAndCreatorFactory import ParserAndCreatorFactory
#from compiler.ast import Printnl, TryFinally
class wiki2SC():
_path=None
_worker=None
def __init__(self,path="",siteName = 'http://en.wikipedia.org/w/api.php'):
self._path=path
self._worker=wikiWorker('http://ru.wikipedia.org/w/api.php')
def parseTemplete(self,path,template,templateName,myFactory):
parser = myFactory.getParser(templateName)
information = parser.parse(template)
creator = myFactory.getCreator(templateName)
result=creator.create(information)
f = file(unicode(path+result['name'])+'.gwf',"w")
f.write(result['information'])
f.close()
def workWithPage(self,page):
self._worker.add(page)
templates = self._worker.templatesFromPage(page)
result=False
try:
if(len(templates)>0):
result=True
myFactory = ParserAndCreatorFactory()
for template in templates:
self.parseTemplete(self._path,template,self._worker.getTamplateName(template),myFactory)
except:
print 'ERROR page--',page,'\n'
result=False
return result
def work(self,page):
links=[page]
while(1):
newWave=[]
print '--------------------------',len(links),'---------------------------------'
for link in links:
if self._worker.was(link):continue
print link,
if self.workWithPage(page=link):
newWave=newWave+self._worker.getLinksFromPage(link)
print ' yes'
else: print ' no'
links=[]
if len(newWave)>0:
links=newWave
else:break
#operator=wiki2SC(path='C:\\Users\\Burger\\Desktop\\',siteName='http://ru.wikipedia.org/w/api.php')
#operator.work( page=u'Уран_(планета)')
print 'Hello linux'<file_sep>/src/creators/templateCreator.py
# -*- coding: utf-8 -*-
'''
Created on 27.02.2012
@author: Burger
'''
class baseCreator():
forId=1
names=[]
def create(self,information):
return "base creator"
def getID(self,name):
if name=='' or name in self.names:
self.forId=self.forId+1
id=abs(hash(self.forId))
else: id=str(abs(hash(name)))
if not name in self.names: self.names.append(name)
return id
def genNode(self,name,x,y,space='',type='general_node'):
id=self.getID(name)
node=space+'<node type="node/const/'+type+'" idtf="'+unicode(name)+'" shapeColor="0" id="'+unicode(id)+'" parent="0" left="0" top="0" right="36" bottom="25" textColor="164" text_angle="0" text_font="Times New Roman [Arial]" font_size="10" x="'+str(x)+'" y="'+str(y)+'" haveBus="false">\n'
node=node+space+' <content type="0" mime_type="" file_name=""/>\n'+space+'</node>\n'
return {'node':node,'id':id}
def genArc(self,name='',b_x=0,b_y=0,e_x=0,e_y=0,id_b=0,id_e=0,space='',type='arc/const/pos',dotBBalance=0, dotEBalance=0,nodeType='arc'):
id_b=str(id_b)
id_e=str(id_e)
id=self.getID(name)
arc=space+'<'+nodeType+' type="'+type+'" idtf="'+name+'" shapeColor="0" id="'+str(id)+'" parent="0" id_b="'+str(id_b)+'" id_e="'+str(id_e)+'" b_x="'+str(b_x)+'" b_y="b_y" e_x="'+str(e_x)+'" e_y="'+str(e_y)+'" dotBBalance="'+str(dotBBalance)+'" dotEBalance="'+str(dotBBalance)+'">\n'
arc=arc+space+' <points/>\n'+space+'</'+nodeType+'>\n'
return {'arc':arc,'id':id}
class planetCreator(baseCreator):
def groupName(self):
return'планета'
def create(self,information):
result='<?xml version="1.0" encoding="UTF-8"?>\n'
result=result+'<GWF version="1.6">\n'
space=' '
result=result+space+'<staticSector>\n'
space=space+space
group=self.genNode(name=self.groupName(),x=293, y=58, space=space, type='group')
planet=self.genNode(name=information['название'], x=140, y=64, space=space)
arc=self.genArc(b_x=293, b_y=58, e_x=140, e_y=64, id_b=group['id'], id_e=planet['id'], space=space, type='arc/const/pos')
result=result+group['node']+'\n'+planet['node']+'\n'+arc['arc']
x=217
y=130
space=' '
for rel,inf in information.iteritems():
name=unicode(rel)
relation=self.genNode(name,x,y,space,'relation')
result=result+relation['node']
node=self.genNode(name=inf, x=x+50, y=y+50, space=space)
result=result+node['node']
node2=self.genNode(name=information['название'], x=x-50, y=y+50, space=space)
result=result+node2['node']
pair=self.genArc(b_x=293,b_y=58,e_x=x, e_y=y, id_b=node2['id'], id_e=node['id'], space=space, type='pair/const/orient',nodeType='pair')
arc=self.genArc(b_x=x, b_y=y,id_b=relation['id'], id_e=pair['id'], space=space, dotBBalance=0.5)
result=result+pair['arc']+arc['arc']
y=y+120
space=' '
result=result+space+'</staticSector>\n'
result=result+'</GWF>\n'
answer={'name':information['название'],'information':result}
return answer
#parser=planetParser()
<file_sep>/src/wikitools/category.py
# -*- coding: utf-8 -*-
# Copyright 2008, 2009 Mr.Z-man
# This file is part of wikitools.
# wikitools is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# wikitools is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with wikitools. If not, see <http://www.gnu.org/licenses/>.
import wiki
import page
import api
class Category(page.Page):
"""A category on the wiki"""
def __init__(self, site, title=False, check=True, followRedir=False, section=False, sectionnumber=False, pageid=False):
"""
wiki - A wiki object
title - The page title, as a string or unicode object
check - Checks for existence, normalizes title, required for most things
followRedir - follow redirects (check must be true)
section - the section name
sectionnumber - the section number
pageid - pageid, can be in place of title
"""
page.Page.__init__(self, site=site, title=title, check=check, followRedir=followRedir, section=section, sectionnumber=sectionnumber, pageid=pageid)
self.members = []
if self.namespace != 14:
self.setNamespace(14, check)
def getAllMembers(self, titleonly=False, reload=False, namespaces=False):
"""Gets a list of pages in the category
titleonly - set to True to only create a list of strings,
else it will be a list of Page objects
reload - reload the list even if it was generated before
namespaces - List of namespaces to restrict to (queries with this option will not be cached)
"""
if self.members and not reload:
if titleonly:
if namespaces is not False:
return [p.title for p in self.members if p.namespace in namespaces]
else:
return [p.title for p in self.members]
if namespaces is False:
return self.members
else:
return [p for p in self.members if p.namespace in namespaces]
else:
ret = []
members = []
for member in self.__getMembersInternal(namespaces):
members.append(member)
if titleonly:
ret.append(member.title)
if titleonly:
return ret
if namespaces is False:
self.members = members
return members
def getAllMembersGen(self, titleonly=False, reload=False, namespaces=False):
"""Generator function for pages in the category
titleonly - set to True to return strings,
else it will return Page objects
reload - reload the list even if it was generated before
namespaces - List of namespaces to restrict to (queries with this option will not be cached)
"""
if self.members and not reload:
for member in self.members:
if namespaces is False or member.namespace in namespaces:
if titleonly:
yield member.title
else:
yield member
else:
if namespaces is False:
self.members = []
for member in self.__getMembersInternal():
if namespaces is False:
self.members.append(member)
if titleonly:
yield member.title
else:
yield member
def __getMembersInternal(self, namespaces=False):
params = {'action':'query',
'list':'categorymembers',
'cmtitle':self.title,
'cmlimit':self.site.limit,
'cmprop':'title'
}
if namespaces is not False:
params['cmnamespace'] = '|'.join([str(ns) for ns in namespaces])
while True:
req = api.APIRequest(self.site, params)
data = req.query(False)
for item in data['query']['categorymembers']:
yield page.Page(self.site, item['title'], check=False, followRedir=False)
try:
params['cmcontinue'] = data['query-continue']['categorymembers']['cmcontinue']
except:
break <file_sep>/src/wikitools/wikifile.py
# -*- coding: utf-8 -*-
# Copyright 2009 Mr.Z-man
# This file is part of wikitools.
# wikitools is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# wikitools is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with wikitools. If not, see <http://www.gnu.org/licenses/>.
import wiki
import page
import api
import urllib2
class FileDimensionError(wiki.WikiError):
"""Invalid dimensions"""
class UploadError(wiki.WikiError):
"""Error during uploading"""
class File(page.Page):
"""A file on the wiki"""
def __init__(self, wiki, title, check=True, followRedir=False, section=False, sectionnumber=False, pageid=False):
"""
wiki - A wiki object
title - The page title, as a string or unicode object
check - Checks for existence, normalizes title, required for most things
followRedir - follow redirects (check must be true)
section - the section name
sectionnumber - the section number
pageid - pageid, can be in place of title
"""
page.Page.__init__(self, wiki, title, check, followRedir, section, sectionnumber, pageid)
if self.namespace != 6:
self.setNamespace(6, check)
self.usage = []
self.history = []
def getHistory(self, force=False):
if self.history and not force:
return self.history
if self.pageid == 0 and not self.title:
self.setPageInfo()
if not self.exists:
raise NoPage
params = {
'action': 'query',
'prop': 'imageinfo',
'iilimit': self.site.limit,
}
if self.pageid > 0:
params['pageids'] = self.pageid
else:
params['titles'] = self.title
req = api.APIRequest(self.site, params)
response = req.query()
self.history = response['query']['pages'][str(self.pageid)]['imageinfo']
return self.history
def getUsage(self, titleonly=False, force=False, namespaces=False):
"""Gets a list of pages that use the file
titleonly - set to True to only create a list of strings,
else it will be a list of Page objects
force - reload the list even if it was generated before
namespaces - List of namespaces to restrict to (queries with this option will not be cached)
"""
if self.usage and not reload:
if titleonly:
if namespaces is not False:
return [p.title for p in self.usage if p.namespace in namespaces]
else:
return [p.title for p in self.usage]
if namespaces is False:
return self.usage
else:
return [p for p in self.usage if p.namespace in namespaces]
else:
ret = []
usage = []
for title in self.__getUsageInternal(namespaces):
usage.append(title)
if titleonly:
ret.append(title.title)
if titleonly:
return ret
if namespaces is False:
self.usage = usage
return usage
def getUsageGen(self, titleonly=False, force=False, namespaces=False):
"""Generator function for pages that use the file
titleonly - set to True to return strings,
else it will return Page objects
force - reload the list even if it was generated before
namespaces - List of namespaces to restrict to (queries with this option will not be cached)
"""
if self.usage and not reload:
for title in self.usage:
if namespaces is False or title.namespace in namespaces:
if titleonly:
yield title.title
else:
yield title
else:
if namespaces is False:
self.usage = []
for title in self.__getUsageInternal():
if namespaces is False:
self.usage.append(title)
if titleonly:
yield title.title
else:
yield title
def __getUsageInternal(self, namespaces=False):
params = {'action':'query',
'list':'imageusage',
'iutitle':self.title,
'iulimit':self.site.limit,
}
if namespaces is not False:
params['iunamespace'] = '|'.join([str(ns) for ns in namespaces])
while True:
req = api.APIRequest(self.site, params)
data = req.query(False)
for item in data['query']['imageusage']:
yield page.Page(self.site, item['title'], check=False, followRedir=False)
try:
params['iucontinue'] = data['query-continue']['imageusage']['iucontinue']
except:
break
def __extractToList(self, json, stuff):
list = []
if stuff in json['query']:
for item in json['query'][stuff]:
list.append(item['title'])
return list
def download(self, width=False, height=False, location=False):
"""Download the image to a local file
width/height - set width OR height of the downloaded image
location - set the filename to save to. If not set, the page title
minus the namespace prefix will be used and saved to the current directory
"""
if self.pageid == 0:
self.setPageInfo()
params = {'action':'query',
'prop':'imageinfo',
'iiprop':'url'
}
if width and height:
raise FileDimensionError("Can't specify both width and height")
if width:
params['iiurlwidth'] = width
if height:
params['iiurlheight'] = height
if self.pageid != 0:
params['pageids'] = self.pageid
elif self.title:
params['titles'] = self.title
else:
self.setPageInfo()
if not self.exists: # Non-existant files may be on a shared repo (e.g. commons)
params['titles'] = self.title
else:
params['pageids'] = self.pageid
req = api.APIRequest(self.site, params)
res = req.query(False)
key = res['query']['pages'].keys()[0]
url = res['query']['pages'][key]['imageinfo'][0]['url']
if not location:
location = self.title.split(':', 1)[1]
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.site.cookies))
headers = { "User-agent": self.site.useragent }
request = urllib2.Request(url, None, headers)
data = opener.open(request)
f = open(location, 'wb', 0)
f.write(data.read())
f.close()
return location
def upload(self, fileobj=None, comment='', url=None, ignorewarnings=False, watch=False):
"""Upload a file, requires the "poster" module
fileobj - A file object opened for reading
comment - The log comment, used as the inital page content if the file
doesn't already exist on the wiki
url - A URL to upload the file from, if allowed on the wiki
ignorewarnings - Ignore warnings about duplicate files, etc.
watch - Add the page to your watchlist
"""
if not api.canupload and fileobj:
raise UploadError("The poster module is required for file uploading")
if not fileobj and not url:
raise UploadError("Must give either a file object or a URL")
if fileobj and url:
raise UploadError("Cannot give a file and a URL")
if fileobj:
if not isinstance(fileobj, file):
raise UploadError('If uploading from a file, a file object must be passed')
if fileobj.mode not in ['r', 'rb', 'r+']:
raise UploadError('File must be readable')
fileobj.seek(0)
params = {'action':'upload',
'comment':comment,
'filename':self.unprefixedtitle,
'token':self.getToken('edit') # There's no specific "upload" token
}
if url:
params['url'] = url
else:
params['file'] = fileobj
if ignorewarnings:
params['ignorewarnings'] = ''
if watch:
params['watch'] = ''
req = api.APIRequest(self.site, params, write=True, multipart=bool(fileobj))
res = req.query()
if 'upload' in res and res['upload']['result'] == 'Success':
self.wikitext = ''
self.links = []
self.templates = []
self.exists = True
return res<file_sep>/src/wikiWorker.py
# -*- coding: utf-8 -*-
'''
Created on 26.02.2012
@author: Burger
'''
from wikitools import wiki
from wikitools import category
from wikitools import page
class wikiWorker():
_site = None
_nameSite = None
_links = []
_templateNames = ['карточка планеты']
def __init__(self, siteName = 'http://en.wikipedia.org/w/api.php'):
self._nameSite = siteName
def __pageAndSite__(self,article):
wikiversion = ""
page = ""
res = ""
work = False
for sim in unicode(unicode(article)):
if(sim == "'"):
work = not work
if(not work):
if(page == ""): page = res
else: wikiversion = res
res = ""
elif(work):
res = res + sim
return {page:wikiversion}
def articlesFromCategory(self, siteName = 'http://en.wikipedia.org/w/api.php', categortName = u'Языки_программирования_по_алфавиту'):
self._site = self._openSite(siteName)
programingCategory = category.Category(self._site, unicode(categortName))
#return map(self.__pageAndSite__, programingCategory.getAllMembers())
articles = {}
for article in programingCategory.getAllMembers():
articles.update(self.__pageAndSite__(article))
return articles
def _openSite(self, siteName = None):
if(self._nameSite != siteName or self._nameSite == None):
if siteName is not None:
self._nameSite = siteName
self._site = wiki.Wiki(self._nameSite)
return self._site
def add(self,link):
self._links.append(link)
def was(self,link):
return link in self._links
def getLinksFromPage(self,pageName):
myPage = page.Page(self._site, pageName)
return myPage.getLinks()
def startPoint(self, siteName, pageName):
self._site = self._openSite(siteName)
myPage = page.Page(self._site, pageName)
self.addOrMarkLink(pageName)
for link in myPage.getLinks():
self.addOrMarkLink(link)
def getTamplateName(self, template = None):
name = ""
for sim in template:
if sim != '{' and sim != '|' and sim != '\n':
name = name + sim
elif sim == '|':
break
return unicode(name).lower()
def templatesFromPage(self, pageName):
self._site = self._openSite()
myPage = page.Page(self._site, pageName)
if not myPage.exists: return []
pageText = myPage.getWikiText()
pageTemplates = []
kol = 0
template = ""
for sim in pageText:
if kol == 0 and template != "":
template + '}'
if self.getTamplateName(template) in self._templateNames:
pageTemplates.append(template + '}')
template = ""
if sim == '{': kol = kol + 1
elif sim == '}': kol = kol - 1
if kol != 0: template = template + sim
return pageTemplates
|
1dc20d985e788dbe09fd0cdc05822c42dadea2f5
|
[
"Python"
] | 10
|
Python
|
antonSuprun/wiki2SC
|
c636c55926202465628fb1c7a0de61006c43b93a
|
bfeaac945704f5ad549baed15016b3ae3c8a5412
|
refs/heads/main
|
<file_sep># This script will generate a binary for the RFUZZ harness that produces the same VCD at a TLUL input
import sys
from ast import literal_eval as make_tuple
# TODO: 21 is generated for the pop function defined in the RFUZZ harness. Please generalize 21 as such.
bytesPerCycle = 21
def main(input_file):
with open(input_file, "r") as file:
input_data = file.read()
cycles = input_data.split('---')
allInsts = [cycle.split('\n') for cycle in cycles]
parsed_instructions = [[make_tuple(inst) for inst in cycle if inst] for cycle in allInsts]
allCycleBinary = []
for cycle in parsed_instructions:
binary = 0
cumulative_size = 0
for inst in cycle:
_, size, value = inst
mask = (1 << size) - 1
binary = ((value & mask) << cumulative_size) | binary
cumulative_size += size
cumulative_size = bytesPerCycle*8
zero_padding = cumulative_size - len(bin(binary)[2:])
full_value = "0"*zero_padding + bin(binary)[2:]
binary = [full_value[i:i+8] for i in range(0, len(full_value), 8)]
binary.reverse()
binary = ''.join(binary)
allCycleBinary.append(binary)
finalBinary = int(''.join(allCycleBinary), 2)
output_file = "binary/RFUZZ_longSeed.hwf"
with open(output_file, "wb") as file:
print(finalBinary.to_bytes(21*len(cycles), byteorder='big'))
file.write(finalBinary.to_bytes(21*len(cycles), byteorder='big'))
def ceil_8(num):
remainder = num % 8
if remainder:
num += 8 - remainder
return num // 8
if __name__ == "__main__":
main(sys.argv[1])
<file_sep>import sys
import json
import glob
import os
"""This script will take in an out folder from RFUZZ and convert each input (stored as a JSON) into its binary form"""
"""Iterates through provided output folder and generates the binary form for each input"""
def convertFilesToBinary(input_folder, output_folder):
if not os.path.isdir(output_folder):
os.mkdir(output_folder)
for filepath in glob.glob(input_folder + '/*'):
filename = filepath.split("/")[-1]
if filename != "latest.json" and filename != "config.json":
print("Processing: " + filename)
with open(filepath) as input:
data = json.load(input)
binary = generateBinary(data)
with open(output_folder + "/" + filename.split(".")[0] + ".hwf", 'wb') as new_file:
new_file.write(binary)
"""Converts data from a single file into binary input"""
def generateBinary(data):
byteArray = bytearray(data['entry']['inputs'])
print(byteArray)
return byteArray
if __name__ == "__main__":
input_folder = sys.argv[1]
output_folder = sys.argv[2]
convertFilesToBinary(input_folder, output_folder)<file_sep># RTLFuzzLab: a modular hardware fuzzing framework
RTLFuzzLab is designed to allow for easy experimentation with Coverage Directed Mutational Fuzz Testing on RTL designs.

For details about RTLFuzzLab, please see our abstract released in WOSET 2021.
[Abstract](https://woset-workshop.github.io/WOSET2021.html#article-10)
Fajardo, Brandon and Laeufer, Kevin and Bachrach, Jonathan and Sen, Koushik. **RTLFuzzLab: Building A Modular Open-Source Hardware Fuzzing Framework.** In *Workshop on Open-Source EDA Technology (WOSET)*, 2021.
BibTeX citation:
```
@inproceedings{fajardo2021rtlfuzzlab,
title={{RTLFuzzLab: Building A Modular Open-Source Hardware Fuzzing Framework}},
author={<NAME> and <NAME> and Bachrach, Jonathan and <NAME>},
booktitle={Workshop on Open-Source EDA Technology (WOSET)},
year={2021}
}
```
## Installation
### Dependencies
The following dependencies are required to run this software:
* make
* gcc
* g++
* java
* sbt
* verilator
* matplotlib
* scipy
### Get AFL Fork
```.sh
git clone https://github.com/ekiwi/AFL AFL_rtl_fuzz_lab
cd AFL_rtl_fuzz_lab
make
```
This AFL fork is functionally identical to upstream AFL.
Our version produces some additional meta-data that is used to produce better plots.
### Clone repo
```.sh
git clone https://github.com/ekiwi/rtl-fuzz-lab
```
### Run setup script (setup.sh)
```.sh
./setup.sh
```
This will create two fifos (`a2j` and `j2a`), a `seeds` directory, and compile the proxy to interface with AFL.
## Usage
### Run fuzzing script (fuzz.sh)
Script takes in two sets of arguments, separated by '---'.
1. First set is arguments to the Python script, fuzz.py.
> Execute "fuzz.py -h ---" for argument options to the Python script
> Existing seeds for --seed argument are available in: `rtl-fuzz-lab/src/fuzzing/template_seeds/binary`
2. Second set is arguments passed to the Scala script, AFLDriver.
The following are options to pass in:
> --FIRRTL \<path\>: FIRRTL design which is to be fuzzed. Existing designs under: test/resources/fuzzing
> --Harness \<rfuzz/tlul\>: Handles converting input bytes to hardware inputs. Current options: rfuzz, tlul (bus-centric)
> --Directed: Flag for ignoring coverage in bus-monitors
> --VCD: Flag for generating a VCD (value change dump)
> --Feedback \<number\>: Maximum number of times a coverage point can trigger per input
> --MuxToggleCoverage \<boolean\>: Options: false (Mux Toggle Coverage), true (Full Mux Toggle Coverage)
Example:
```.sh
python3 fuzz.py --time 3 --folder ./example --iterations 1 --afl-path ~/AFL_rtl_fuzz_lab --seed TLI2C_longSeed.hwf --- --FIRRTL test/resources/fuzzing/TLI2C.fir --Harness tlul --Directed --MuxToggleCoverage false --Feedback 255
```
### Analyze coverage (coverageAnalysis.py)
Script takes in set of arguments equivalent to second set of arguments to fuzz.py described above.
In addition, script takes in --Folder <folder> argument to specify location of folder to analyze.
Example:
```.sh
python3 coverageAnalysis.py --FIRRTL test/resources/fuzzing/TLI2C.fir --Harness tlul --Directed --MuxToggleCoverage false --Feedback 255 --Folder example/0.out
```
### Plot results (plotCoverage.py)
Takes in arguments: `do_average PATH [PATH ...]`
> See plotCoverage.py -h for argument options
> Outputs png of generated plot as rtl-fuzz-lab/coveragePlot.png
Example:
```.sh
python3 plotCoverage.py true example
```
## Acknowledgments
Integrating AFL with our Scala based fuzz bench would not have been possible without the awesome AFL proxy infrastructure from the [JQF](https://github.com/rohanpadhye/JQF) project.
## License
This code is open-source under a BSD license. See the `LICENSE` file for more information.
<file_sep>#!/usr/bin/python
import argparse
import os
import sys
import shutil
if '---' not in " ".join(sys.argv):
print("Please provide Scala arguments")
sys.exit(-1)
python_args, scala_args = " ".join(sys.argv).split('---')
parser = argparse.ArgumentParser(description="Run RTLFuzzLab")
# Python Arguments
parser.add_argument('-t', '--time', type=int, required=True,
help="The time, in minutes, to run the fuzzer")
parser.add_argument('-f', '--folder', type=str,
help="The output folder location")
parser.add_argument('-i', '--iterations', type=int, required=True,
help="The number of iterations to run")
parser.add_argument('-a', '--afl-path', type=str, default='~/AFL',
help="The path to the AFL folder")
parser.add_argument('--seed', type=str, default="",
help="Name of the seed in src/fuzzing/template_seeds/ to fuzz on")
args = parser.parse_args(python_args.split()[1:])
print("\nCreating jar file...\n")
os.system("sbt assembly")
os.environ['AFL_I_DONT_CARE_ABOUT_MISSING_CRASHES'] = '1'
os.environ['AFL_SKIP_CPUFREQ'] = '1'
if not os.path.isdir(args.folder):
os.mkdir(args.folder)
print("Generated output folder to store results")
# Moves seed to correct folder
if args.seed:
print("\nClearing seeds folder...")
for filename in os.listdir('seeds'):
file_path = os.path.join('seeds', filename)
try:
if os.path.isfile(file_path) or os.path.islink(file_path):
os.unlink(file_path)
elif os.path.isdir(file_path):
shutil.rmtree(file_path)
except Exception as e:
print('Failed to delete %s. Reason: %s' % (file_path, e))
exit()
print("Copying file to seeds folder:", args.seed)
f = os.path.join('src/fuzzing/template_seeds/binary', args.seed)
shutil.copy(f, 'seeds')
# Performs ITERATIONS fuzzing runs on provided parameters
for i in range(args.iterations):
out_folder_run = os.path.join(args.folder, str(i) + ".out")
print("\nStarting fuzzing run:", i)
print("Fuzzing on: \n MINUTES: {MINUTES} \n OUT_FOLDER: {OUT_FOLDER_RUN}".format(
MINUTES=args.time, OUT_FOLDER_RUN=out_folder_run))
# Prevent overwriting OUT_FOLDER_RUN
if os.path.exists(out_folder_run):
print("WARNING! DESIRED OUTPUT WOULD BE OVERWRITTEN: {OUT_FOLDER_RUN}".format(
OUT_FOLDER_RUN=out_folder_run))
sys.exit(1)
# Calls AFLDriver to setup fuzzing
print("Calling AFLDriver with arguments: {SCALA_ARGS}\n".format(
SCALA_ARGS=scala_args))
os.system("java -cp target/scala-2.12/rtl-fuzz-lab-assembly-0.1.jar fuzzing.afl.AFLDriver {SCALA_ARGS} --Folder {FOLDER} &".format(
SCALA_ARGS=scala_args, FOLDER=out_folder_run))
os.system("sleep 13s")
os.system('timeout {TIME_STRING}s {AFL_PATH}/afl-fuzz -d -i seeds -o {OUT_FOLDER_RUN} -f input -- ./fuzzing/afl-proxy a2j j2a log'.format(
TIME_STRING=str(args.time * 60 + 5), AFL_PATH=args.afl_path, OUT_FOLDER_RUN=out_folder_run))
sys.exit(0)
<file_sep>import argparse
import json
import matplotlib.pyplot as plt
import numpy as np
import os
from scipy.interpolate import interp1d
from matplotlib.lines import Line2D
# Code for manually adding labels modeled from following:
# https://stackoverflow.com/questions/39500265/manually-add-legend-items-python-matplotlib
"""Plot data found at each path in JSON_PATHS"""
def plot_json(do_average, json_paths):
data_per_path = load_json(json_paths)
for i, (data, json_files) in enumerate(data_per_path):
plot_lines(do_average, data, json_files, json_paths[i])
# Configure and show plot
plt.title("Coverage Over Time")
plt.ylabel("Cumulative coverage %")
plt.yticks([x for x in range(0, 110, 10)])
plt.xlabel("Seconds")
colors = ['darkorange', 'royalblue', 'green']
lines = [Line2D([0], [0], color=c, linewidth=2, linestyle='-') for c in colors]
labels = ['Zeros Seed', 'Relevant Seed', 'Zeros Seed -- Only Valid']
manual_legend = False
if manual_legend:
plt.legend(lines, labels)
else:
plt.legend()
plt.savefig("coveragePlot.png")
plt.show()
"""Gets plotting data from JSON files found recursively at each path in JSON_PATHS.
Return: List of tuples (INPUT_DATA, JSON_FILENAMES) for each path"""
def load_json(json_paths):
json_files_per_path = [recursive_locate_json([json_path]) for json_path in json_paths]
for i, names in enumerate(json_files_per_path):
assert names, "Path contains no JSON files: {}".format(json_paths[i])
data_per_path = []
for json_files in json_files_per_path:
files = [open(file, 'r') for file in json_files]
data = [json.load(file) for file in files]
[file.close() for file in files]
data_per_path.append((data, json_files))
return data_per_path
"""Locates all paths to JSON files. Searches recursively within folders.
Input (JSON_PATHS): List of files and folders that contain JSON files.
Return: List of all JSON files at JSON_PATHS."""
def recursive_locate_json(json_paths):
json_files = []
for path in json_paths:
if os.path.isfile(path) and path.split(".")[-1].lower() == "json":
json_files.append(path)
elif os.path.isdir(path):
subpaths = [os.path.join(path, subpath) for subpath in os.listdir(path)]
json_files.extend(recursive_locate_json(subpaths))
return json_files
"""Converts inputted JSON data to plots"""
def plot_lines(do_average, json_data, json_files, json_path):
plotting_data = [extract_plotting_data(input) for input in json_data]
# Plot data (Averaging code modeled from RFUZZ analysis.py script: https://github.com/ekiwi/rfuzz)
if do_average:
# Collects all times seen across passed in JSON files
all_times = []
[all_times.extend(creation_times) for (creation_times, _) in plotting_data]
all_times = sorted(set(all_times))
all_coverage = np.zeros((len(plotting_data), len(all_times)))
for i, (creation_times, cumulative_coverage) in enumerate(plotting_data):
# Returns function which interpolates y-value(s) when passed x-value(s). Obeys step function, using previous value when interpolating.
interp_function = interp1d(creation_times, cumulative_coverage, kind='previous', bounds_error=False, assume_sorted=True)
# Interpolates coverage value for each time in all_times. Saved to all_coverage matrix
all_coverage[i] = interp_function(all_times)
means = np.mean(all_coverage, axis=0)
plt.step(all_times, means, where='post', label="Averaged: " + json_path)
else:
for i in range(len(plotting_data)):
(creation_time, cumulative_coverage) = plotting_data[i]
plt.step(creation_time, cumulative_coverage, where='post', label=json_files[i])
"""Extract plotting data from a single JSON file's data"""
def extract_plotting_data(input_data):
creation_times = []
cumulative_coverage = []
for input in input_data['coverage_data']:
creation_times.append((input['creation_time']))
cumulative_coverage.append(input["cumulative_coverage"] * 100)
# Extract end time from JSON file and add it to plotting data
creation_times.append(input_data['end_time'])
cumulative_coverage.append(cumulative_coverage[-1])
assert len(creation_times) == len(cumulative_coverage), "NUMBER OF TIMES SHOULD EQUAL NUMBER OF COVERAGE READINGS"
return (creation_times, cumulative_coverage)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Script to plot fuzzing results', formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('do_average', help='Average plotting data per path')
parser.add_argument('json_paths', metavar='PATH', nargs='+', help='Path to recursively search for JSON files to plot\nAdd multiple paths to plot against each other')
args = parser.parse_args()
lower_do_average = args.do_average.lower()
if lower_do_average == "true":
do_average = True
elif lower_do_average == "false":
do_average = False
else:
raise argparse.ArgumentTypeError("DO_AVERAGE ARGUMENT MUST BE TRUE/FALSE, NOT: {}".format(args.do_average))
for path in args.json_paths:
if not (os.path.isfile(path) or os.path.isdir(path)):
raise argparse.ArgumentTypeError("PATH DOES NOT EXIST: {}".format(path))
plot_json(do_average, args.json_paths)
<file_sep>#!/bin/bash
#
# Prepare codebase for fuzzing
set -e
if ! [[ -p a2j ]]; then
mkfifo a2j
fi
if ! [[ -p j2a ]]; then
mkfifo j2a
fi
if ! [[ -d seeds ]]; then
mkdir seeds
fi
cd fuzzing
make
echo "SETUP COMPLETE"
exit 0
<file_sep># Copyright 2020 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""String coloring helper methods."""
def color_str_red(s):
"""Color string RED for writing to STDIN."""
return "\033[1m\033[91m{}\033[00m".format(s)
def color_str_green(s):
"""Color string GREEN for writing to STDIN."""
return "\033[1m\033[92m{}\033[00m".format(s)
def color_str_yellow(s):
"""Color string YELLOW for writing to STDIN."""
return "\033[93m{}\033[00m".format(s)
<file_sep># Copyright 2020 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from enum import IntEnum
import prettytable
from string_color import color_str_red as red
class TLULOpcode(IntEnum):
"""Hardware fuzzing opcode for fuzzing TL-UL driven cores."""
invalid = 0
wait = 1
read = 2
write = 3
class _YAMLTags:
opcode = "opcode"
address = "addr"
data = "data"
direct_in = "direct-in"
repeat = "repeat"
class TLULFuzzInstr:
opcode_type = "constant"
instr_type = "variable"
opcode_size = 1
address_size = 4
data_size = 4
direct_in_size = 0
endianness = "little"
def __init__(self, instr):
# init attributes
self.opcode_str = "invalid"
self.opcode = TLULOpcode.invalid
self.address = 0
self.data = 0
self.direct_in = None
self.repeat = 1
# Validate/Decode opcode string
if _YAMLTags.opcode not in instr:
print(red("ERROR: all YAML lines require an opcode field. ABORTING!"))
self.opcode_str = str(instr[_YAMLTags.opcode])
self.opcode = self._decode_opcode_str()
# Check ADDRESS and DATA fields exist in YAML and convert to int
if self.opcode == TLULOpcode.read:
if _YAMLTags.address not in instr:
print(red("ERROR: read opcodes require an ADDRESS field. ABORTING!"))
sys.exit(1)
self.address = int(instr[_YAMLTags.address])
elif self.opcode == TLULOpcode.write:
if _YAMLTags.address not in instr:
print(red("ERROR: write opcodes require an ADDRESS field. ABORTING!"))
sys.exit(1)
if _YAMLTags.data not in instr:
print(red("ERROR: write opcodes require an DATA field. ABORTING!"))
sys.exit(1)
self.address = int(instr[_YAMLTags.address])
self.data = int(instr[_YAMLTags.data])
# Validata address and data fields
self._validate_instr_field_size(_YAMLTags.address, self.address,
TLULFuzzInstr.address_size)
self._validate_instr_field_size(_YAMLTags.data, self.data,
TLULFuzzInstr.data_size)
# check if DIRECT_IN should exist in YAML and convert to int
if TLULFuzzInstr.direct_in_size > 0:
if _YAMLTags.direct_in not in instr:
print(red("ERROR: direct_in field required if size > 0. ABORTING!"))
else:
self.direct_in = int(instr[_YAMLTags.direct_in])
self._validate_instr_field_size(_YAMLTags.direct_in, self.direct_in,
TLULFuzzInstr.direct_in_size)
# check if REPEAT field exists in YAML and convert to int
if _YAMLTags.repeat in instr:
self.repeat = int(instr[_YAMLTags.repeat])
def __str__(self):
instr_table = prettytable.PrettyTable(header=False)
instr_table.title = "HW Fuzzing Instruction"
instr_table.field_names = ["Field", "Value"]
instr_table.add_row = (["Opcode", self.opcode_str])
instr_table.add_row = (["Address", "0x{:0>8X}".format(self.address)])
instr_table.add_row = (["Data", "0x{:0>8X}".format(self.data)])
if self.direct_in: #Added this becasue direct_in was left as None
instr_table.add_row = (["Direct In", "0x{:0>8X}".format(self.direct_in)])
instr_table.add_row = (["Repeat", self.repeat])
instr_table.align = "l"
return instr_table.get_string()
def _decode_opcode_str(self):
if self.opcode_str == "wait":
return TLULOpcode.wait
elif self.opcode_str == "read":
return TLULOpcode.read
elif self.opcode_str == "write":
return TLULOpcode.write
else:
print("ERROR: invalid opcode (%s) encountered. ABORTING!" %
self.opcode_str)
sys.exit(1)
def _validate_instr_field_size(self, field, value, size):
if value >= 2**(size * 8):
print(
red("ERROR: instruction field (%s) larger than size. ABORTING!" %
field))
sys.exit(1)
def _opcode2int(self):
if TLULFuzzInstr.opcode_type == "constant":
# Opcode is mapped to a fixed value
opcode_int = int(self.opcode)
else:
# Opcode is mapped to a range
max_opcode_value = 2**(TLULFuzzInstr.opcode_size * 8)
num_opcodes = len(TLULOpcode) - 1 # subtract 1 since 0 is "invalid"
opcode_int = (self.opcode - 1) * int(max_opcode_value / num_opcodes) + 1
return opcode_int
def to_bytes(self):
# create OPCODE bytes from integer
opcode_int = self._opcode2int()
opcode_bytes = opcode_int.to_bytes(TLULFuzzInstr.opcode_size,
byteorder=TLULFuzzInstr.endianness,
signed=False)
# create DATA bytes from integer value
address_bytes = self.address.to_bytes(TLULFuzzInstr.address_size,
byteorder=TLULFuzzInstr.endianness,
signed=False)
# create DATA bytes from integer value
data_bytes = self.data.to_bytes(TLULFuzzInstr.data_size,
byteorder=TLULFuzzInstr.endianness,
signed=False)
# create DIRECT_INTPUTS bytes from integer value (if any exist)
if self.direct_in is not None:
direct_in_bytes = self.direct_in.to_bytes(
TLULFuzzInstr.direct_in_size,
byteorder=TLULFuzzInstr.endianness,
signed=False)
# Build the instruction frame
if TLULFuzzInstr.instr_type == "fixed":
if self.direct_in is not None:
return opcode_bytes + address_bytes + data_bytes + direct_in_bytes
else:
return opcode_bytes + address_bytes + data_bytes
else:
if self.direct_in is not None:
# Include DIRECT_IN bits in instruction
if self.opcode == TLULOpcode.wait:
return opcode_bytes + direct_in_bytes
elif self.opcode == TLULOpcode.read:
return opcode_bytes + address_bytes + direct_in_bytes
else:
return opcode_bytes + address_bytes + data_bytes + direct_in_bytes
else:
# DO NOT include DIRECT_IN bits in instruction
if self.opcode == TLULOpcode.wait:
return opcode_bytes
elif self.opcode == TLULOpcode.read:
return opcode_bytes + address_bytes
else:
return opcode_bytes + address_bytes + data_bytes
<file_sep>#!/usr/bin/python3
# Copyright 2020 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import subprocess
import sys
import prettytable
import yaml
from string_color import color_str_green as green
from string_color import color_str_red as red
from string_color import color_str_yellow as yellow
from tlul_fuzz_instr import TLULFuzzInstr
def dump_seed_file_to_stdin(output_file_name):
"""Dumps generated seed file in hex format to STDIN."""
print(output_file_name + ":")
cmd = ["xxd", output_file_name]
try:
subprocess.check_call(cmd)
except subprocess.CalledProcessError:
print(red("ERROR: cannot dump generated seed file."))
sys.exit(1)
def gen_seed(input_yaml_file_name, output_file_name, verbose):
"""Parse YAML HW fuzzing opcodes and translates them in binary to file."""
print(f"Creating fuzzer seed from YAML: {input_yaml_file_name} ...")
with open(input_yaml_file_name, "r") as fp:
fuzz_opcodes = yaml.load(fp, Loader=yaml.Loader)
with open(output_file_name, "wb") as fp:
for instr in fuzz_opcodes:
hwf_instr = TLULFuzzInstr(instr)
#if verbose:
#print(hwf_instr)
for _ in range(hwf_instr.repeat):
bytes = hwf_instr.to_bytes()
fp.write(bytes)
print(green("Seed file generated!"))
if verbose:
dump_seed_file_to_stdin(output_file_name)
def _print_configs(args):
# Create table to print configurations to STDIN
config_table = prettytable.PrettyTable(header=False)
config_table.title = "Seed Generation Parameters"
config_table.field_names = ["Parameter", "Value"]
# Add parameter values to table
config_table.add_row(["Input (YAML) Filename", args.input_file_name])
config_table.add_row(["Output Filename", args.output_file_name])
#config_table.add_row(["Frame Type", args.frame_type]) This variable doesn't even exist
config_table.add_row(["Opcode Size (# bytes)", args.opcode_size])
config_table.add_row(["Address Size (# bytes)", args.address_size])
config_table.add_row(["Data Size (# bytes)", args.data_size])
# Print table
config_table.align = "l"
print(yellow(config_table.get_string()))
def parse_args(argv):
module_description = "OpenTitan Fuzzing Seed Composer"
parser = argparse.ArgumentParser(description=module_description)
parser.add_argument("--opcode-type",
default=TLULFuzzInstr.opcode_type,
choices=[
"constant",
"mapped",
],
type=str,
help="Fuzzing instruction opcode type.")
parser.add_argument("--instr-type",
default=TLULFuzzInstr.instr_type,
choices=[
"fixed",
"variable",
],
type=str,
help="Fuzzing instruction frame type.")
parser.add_argument("--endianness",
default=TLULFuzzInstr.endianness,
choices=[
"little",
"big",
],
type=str,
help="Endianness of HW Fuzzing Instruction frames.")
parser.add_argument("--opcode-size",
default=TLULFuzzInstr.opcode_size,
type=int,
help="Size of opcode field in bytes.")
parser.add_argument("--address-size",
default=TLULFuzzInstr.address_size,
type=int,
help="Size of address field in bytes")
parser.add_argument("--data-size",
default=TLULFuzzInstr.data_size,
type=int,
help="Size of data field in bytes.")
parser.add_argument("--direct-in-size",
default=TLULFuzzInstr.direct_in_size,
type=int,
help="Size of direct inputs field in bytes.")
parser.add_argument("-v",
"--verbose",
action="store_true",
help="Enable verbose status messages.")
parser.add_argument("input_file_name",
metavar="input.yaml",
help="Input configuration YAML file.")
parser.add_argument("output_file_name",
metavar="afl_seed.hwf",
help="Name of output seed file (hex).")
args = parser.parse_args(argv)
if args.verbose:
_print_configs(args)
return args
def config_tlul_fuzz_instr(args):
TLULFuzzInstr.opcode_type = args.opcode_type
TLULFuzzInstr.instr_type = args.instr_type
TLULFuzzInstr.opcode_size = args.opcode_size
TLULFuzzInstr.address_size = args.address_size
TLULFuzzInstr.data_size = args.data_size
TLULFuzzInstr.direct_in_size = args.direct_in_size
TLULFuzzInstr.endianness = args.endianness
def main(argv):
args = parse_args(argv)
config_tlul_fuzz_instr(args)
gen_seed(args.input_file_name, args.output_file_name, args.verbose)
if __name__ == "__main__":
main(sys.argv[1:])
<file_sep>#!/usr/bin/python
import os
import sys
print("\nCreating jar file...\n")
os.system("sbt assembly")
print("\nCalling CoverageAnalysis...\n")
arguments = ' '.join(sys.argv[1:])
os.system("java -cp target/scala-2.12/rtl-fuzz-lab-assembly-0.1.jar fuzzing.coverage.CoverageAnalysis {SCALA_ARGS}".format(SCALA_ARGS=arguments))
sys.exit(0)
<file_sep>all: afl-proxy
afl-proxy: afl-proxy.c
$(CC) $(CFLAGS) $< -o $@
clean:
rm -f afl-proxy
|
9ee4f480a2967a21149b01231093fec72b17ccc3
|
[
"Markdown",
"Python",
"Makefile",
"Shell"
] | 11
|
Python
|
ekiwi/rtl-fuzz-lab
|
bc5851993134017004f30118fbd35542a6571fac
|
04ac5d54fee2a6ca8799965fef5375233b5f6a57
|
refs/heads/main
|
<repo_name>Ohskie3/Hot-Restaurant-<file_sep>/db/tableData.js
const tableArr = [
{
customerName: 'Kyle',
customerEmail: '<EMAIL>',
customerID: '1234',
phoneNumber: '123-456-7890'
}
];
module.exports = tableArr
|
b3b403eb1ed821850f33c563760ec495f778f50d
|
[
"JavaScript"
] | 1
|
JavaScript
|
Ohskie3/Hot-Restaurant-
|
83050997ee3ef8792269aff5b4d6d969adeb654a
|
7637f2a6d536ee21ac2f152aa5b14b3a764d4997
|
refs/heads/master
|
<file_sep>export const style_roads = [
{
'minResolution': 0,
'maxResolution': 1.2,
'style': {
'rail': {
'colors': ['#919191'],
'dasharray': '2.0 2.0',
'widths': [2],
'caps': ['butt'],
'opacity': 1,
'useLabels': false
},
'motorway': {
'colors': ['#BA6E27', '#FD923A'],
'widths': [17, 15],
'caps': ['butt', 'round'],
'opacity': 1
},
'motorway_link': {
'colors': ['#BA6E27', '#FD923A'],
'widths': [17, 15],
'caps': ['butt', 'round'],
'opacity': 1
},
'trunk': {
'colors': ['#DD9F11', '#FFC345'],
'widths': [17, 15],
'caps': ['butt', 'round'],
'opacity': 1
},
'trunk_link': {
'colors': ['#C1B59D', '#FFC345'],
'widths': [17, 15],
'caps': ['butt', 'round'],
'opacity': 1
},
'primary': {
'colors': ['#C1B59D', '#FFFD8B'],
'widths': [16, 13],
'caps': ['butt', 'round'],
'opacity': 1
},
'primary_link': {
'colors': ['#888888', '#FFFFFF'],
'widths': [16,14],
'caps': ['butt', 'round'],
'opacity': 1,
},
'secondary': {
'colors': ['#C1B59D', '#FFFFFF'],
'widths': [15, 12],
'caps': ['butt', 'round'],
'opacity': 1
},
'secondary_link': {
'colors': ['#888888', '#FFFFFF'],
'widths': [15,13],
'caps': ['butt', 'round'],
'opacity': 1
},
'tertiary': {
'colors': ['#C1B59D', '#FFFFFF'],
'widths': [14, 11],
'caps': ['butt', 'round'],
'opacity': 1
},
'tertiary_link': {
'colors': ['#888888', '#FFFFFF'],
'widths': [14, 12],
'caps': ['butt', 'round'],
'opacity': 1
},
'classified': {
'colors': ['#888888', '#FFFFFF'],
'widths': [13, 11],
'caps': ['butt', 'round'],
'opacity': 1
},
'unclassified': {
'colors': ['#C1B59D', '#FFFFFF'],
'widths': [13, 11],
'caps': ['butt', 'round'],
'opacity': 1
},
'residential': {
'colors': ['#C1B59D', '#FFFFFF'],
'widths': [13, 11],
'caps': ['butt', 'round'],
'opacity': 1
},
'service': {
'colors': ['#C1B59D', '#FFFFFF'],
'widths': [6, 4],
'caps': ['butt', 'round'],
'opacity': 1
},
'road': {
'colors': ['#C1B59D', '#FFFFFF'],
'widths': [13, 11],
'caps': ['butt', 'round'],
'opacity': 1
},
'living_street': {
'colors': ['#C1B59D', '#FFFFFF'],
'widths': [13, 11],
'caps': ['butt', 'round'],
'opacity': 1
},
'pedestrian': {
'colors': ['#C1B59D', '#FAFAF5'],
'widths': [9, 7],
'caps': ['butt', 'round'],
'opacity': 1
},
'track': {
'colors': ['#C1B59D'],
'widths': [2],
'caps': ['round'],
'dasharray': '2.0 3.0',
'opacity': 1
},
'footway': {
'colors': ['#C1B59D'],
'widths': [2],
'caps': ['round'],
'dasharray': '2.0 3.0',
'opacity': 1
}
}
},
{
'minResolution': 1.2,
'maxResolution': 4.77,
'style': {
'rail': {
'colors': ['#919191'],
'dasharray': '2.0 2.0',
'widths': [1],
'caps': ['butt'],
'opacity': 0.8,
'useLabels': false
},
'motorway': {
'colors': ['#BA6E27', '#FD923A'],
'widths': [9, 7],
'caps': ['butt', 'round'],
'opacity': 1
},
'motorway_link': {
'colors': ['#BA6E27', '#FD923A'],
'widths': [9, 7],
'caps': ['butt', 'round'],
'opacity': 1
},
'trunk': {
'colors': ['#DD9F11', '#FFC345'],
'widths': [8, 6],
'caps': ['butt', 'round'],
'opacity': 1
},
'trunk_link': {
'colors': ['#C1B59D', '#FFC345'],
'widths': [8, 6],
'caps': ['butt', 'round'],
'opacity': 1
},
'primary': {
'colors': ['#C1B59D', '#FFFD8B'],
'widths': [9, 7],
'caps': ['butt', 'round'],
'opacity': 1
},
'primary_link': {
'colors': ['#888888', '#FFFFFF'],
'widths': [9,7],
'caps': ['butt', 'round'],
'opacity': 1,
},
'secondary': {
'colors': ['#C1B59D', '#FFFFFF'],
'widths': [8, 6],
'caps': ['butt', 'round'],
'opacity': 1
},
'secondary_link': {
'colors': ['#888888', '#FFFFFF'],
'widths': [8,6],
'caps': ['butt', 'round'],
'opacity': 1
},
'tertiary': {
'colors': ['#C1B59D', '#FFFFFF'],
'widths': [7, 5],
'caps': ['butt', 'round'],
'opacity': 1
},
'tertiary_link': {
'colors': ['#888888', '#FFFFFF'],
'widths': [7, 5],
'caps': ['butt', 'round'],
'opacity': 1
},
'classified': {
'colors': ['#888888', '#FFFFFF'],
'widths': [5, 3],
'caps': ['butt', 'round'],
'opacity': 1
},
'unclassified': {
'colors': ['#C1B59D', '#FFFFFF'],
'widths': [5, 3],
'caps': ['butt', 'round'],
'opacity': 1
},
'residential': {
'colors': ['#C1B59D', '#FFFFFF'],
'widths': [4, 2],
'caps': ['butt', 'round'],
'opacity': 1
},
'service': {
'colors': ['#C1B59D', '#FFFFFF'],
'widths': [4, 2],
'caps': ['butt', 'round'],
'opacity': 1
},
'road': {
'colors': ['#C1B59D', '#FFFFFF'],
'widths': [4, 2],
'caps': ['butt', 'round'],
'opacity': 1
},
'living_street': {
'colors': ['#C1B59D', '#FFFFFF'],
'widths': [4, 2],
'caps': ['butt', 'round'],
'opacity': 1
},
'pedestrian': {
'colors': ['#C1B59D', '#FAFAF5'],
'widths': [4, 2],
'caps': ['butt', 'round'],
'opacity': 1
},
'track': {
'colors': ['#C1B59D'],
'widths': [1],
'caps': ['round'],
'dasharray': '2.0 3.0',
'opacity': 1
},
'footway': {
'colors': ['#C1B59D'],
'widths': [1],
'caps': ['round'],
'dasharray': '2.0 3.0',
'opacity': 1
}
}
}, {
'minResolution': 4.77,
'maxResolution': 19.1,
'style': {
'rail': {
'colors': ['#919191'],
'dasharray': '2.0 2.0',
'widths': [0.7],
'caps': ['butt'],
'opacity': 0.7,
'useLabels': false
},
'motorway': {
'colors': ['#BA6E27', '#FD923A'],
'widths': [7, 5],
'caps': ['butt', 'round'],
'opacity': 1
},
'motorway_link': {
'colors': ['#BA6E27', '#FD923A'],
'widths': [7, 5],
'caps': ['butt', 'round'],
'opacity': 1
},
'trunk': {
'colors': ['#DD9F11', '#FFC345'],
'widths': [6, 4],
'caps': ['butt', 'round'],
'opacity': 1
},
'trunk_link': {
'colors': ['#C1B59D', '#FFC345'],
'widths': [6, 4],
'caps': ['butt', 'round'],
'opacity': 1
},
'primary': {
'colors': ['#C1B59D', '#FFFD8B'],
'widths': [5, 3],
'caps': ['butt', 'round'],
'opacity': 1
},
'primary_link': {
'colors': ['#888888', '#FFFFFF'],
'widths': [5, 3],
'caps': ['butt', 'round'],
'opacity': 1
},
'secondary': {
'colors': ['#C1B59D', '#FFFFFF'],
'widths': [5, 3],
'caps': ['butt', 'round'],
'opacity': 1
},
'secondary_link': {
'colors': ['#888888', '#FFFFFF'],
'widths': [5, 3],
'caps': ['butt', 'round'],
'opacity': 1
},
'tertiary': {
'colors': ['#C1B59D', '#FFFFFF'],
'widths': [5, 3],
'caps': ['butt', 'round'],
'opacity': 1
},
'tertiary_link': {
'colors': ['#888888', '#FFFFFF'],
'widths': [5, 3],
'caps': ['butt', 'round'],
'opacity': 1
},
'classified': {
'colors': ['#888888', '#FFFFFF'],
'widths': [4, 2],
'caps': ['butt', 'round'],
'opacity': 1,
'useLabels': false
},
'unclassified': {
'colors': ['#C1B59D', '#FFFFFF'],
'widths': [4, 2],
'caps': ['butt', 'round'],
'opacity': 1,
'useLabels': false
},
'residential': {
'colors': ['#C1B59D'],
'widths': [1],
'caps': ['round'],
'opacity': 1,
'useLabels': false
},
'living_street': {
'colors': ['#C1B59D', '#FFFFFF'],
'widths': [4, 2],
'caps': ['butt', 'round'],
'opacity': 1,
'useLabels': false
},
'pedestrian': {
'colors': ['#C1B59D', '#FAFAF5'],
'widths': [2.5, 0.75],
'caps': ['butt', 'round'],
'opacity': 1,
'useLabels': false
},
'track': {
'colors': ['#C1B59D'],
'widths': [1],
'caps': ['round'],
'dasharray': '2.0 3.0',
'opacity': 1,
'useLabels': false
},
'footway': {
'colors': ['#C1B59D'],
'widths': [1],
'caps': ['round'],
'dasharray': '2.0 3.0',
'opacity': 1,
'useLabels': false
}
}
}, {
'minResolution': 19.1,
'maxResolution': 38.21,
'style': {
'rail': {
'colors': ['#919191'],
'dasharray': '2.0 2.0',
'widths': [0.7],
'caps': ['butt'],
'opacity': 0.7,
'useLabels': false
},
'motorway': {
'colors': ['#BA6E27', '#FD923A'],
'widths': [6, 4],
'caps': ['butt', 'round'],
'opacity': 1
},
'motorway_link': {
'colors': ['#BA6E27', '#FD923A'],
'widths': [4, 2],
'caps': ['butt', 'round'],
'opacity': 1
},
'trunk': {
'colors': ['#DD9F11', '#FFC345'],
'widths': [6, 4],
'caps': ['butt', 'round'],
'opacity': 1
},
'primary': {
'colors': ['#C1B59D', '#FFFD8B'],
'widths': [5, 3],
'caps': ['butt', 'round'],
'opacity': 1
},
'secondary': {
'colors': ['#C1B59D', '#FFFFFF'],
'widths': [5, 3],
'caps': ['butt', 'round'],
'opacity': 1
},
'tertiary': {
'colors': ['#C1B59D', '#FFFFFF'],
'widths': [5, 3],
'caps': ['butt', 'round'],
'opacity': 1
}
}
}, {
'minResolution': 38.21,
'maxResolution': 80,
'style': {
'rail': {
'colors': ['#919191'],
'dasharray': '2.0 2.0',
'widths': [0.7],
'caps': ['butt'],
'opacity': 0.7,
'useLabels': false
},
'motorway': {
'colors': ['#BA6E27', '#FD923A'],
'widths': [5, 3],
'caps': ['butt', 'round'],
'opacity': 1
},
'motorway_link': {
'colors': ['#BA6E27', '#FD923A'],
'widths': [4, 2],
'caps': ['butt', 'round'],
'opacity': 1
},
'trunk': {
'colors': ['#DD9F11', '#FFC345'],
'widths': [5, 3],
'caps': ['butt', 'round'],
'opacity': 1
},
'primary': {
'colors': ['#C1B59D', '#FFFD8B'],
'widths': [4, 2],
'caps': ['butt', 'round'],
'opacity': 1
},
'secondary': {
'colors': ['#C1B59D', '#FFFFFF'],
'widths': [4, 2],
'caps': ['butt', 'round'],
'opacity': 1
},
'tertiary': {
'colors': ['#C1B59D', '#FFFFFF'],
'widths': [4, 2],
'caps': ['butt', 'round'],
'opacity': 1
}
}
}, {
'minResolution': 80,
'maxResolution': 611.49,
'style': {
'rail': {
'colors': ['#919191'],
'dasharray': '2.0 2.0',
'widths': [0.7],
'caps': ['butt'],
'opacity': 0.7,
'useLabels': false
},
'motorway': {
'colors': ['#BA6E27', '#FD923A'],
'widths': [4, 2],
'caps': ['butt', 'round'],
'opacity': 1
},
'motorway_link': {
'colors': ['#BA6E27', '#FD923A'],
'widths': [4, 2],
'caps': ['butt', 'round'],
'opacity': 1
},
'trunk': {
'colors': ['#DD9F11', '#FFC345'],
'widths': [4, 2],
'caps': ['butt', 'round'],
'opacity': 1
},
'primary': {
'colors': ['#FFFD8B'],
'widths': [2],
'caps': ['butt'],
'opacity': 1
},
'secondary': {
'colors': ['#FFFFFF'],
'widths': [2],
'caps': ['butt'],
'opacity': 1
},
'tertiary': {
'colors': ['#FFFFFF'],
'widths': [2],
'caps': ['butt'],
'opacity': 1
}
}
}, {
'minResolution': 611.49,
'maxResolution': 1222,
'style': {
'motorway': {
'colors': ['#FD923A'],
'widths': [2],
'caps': ['butt', 'round'],
'opacity': 0.8
},
'motorway_link': {
'colors': ['#FD923A'],
'widths': [2],
'caps': ['butt'],
'opacity': 0.8
},
'trunk': {
'colors': ['#FFC345'],
'widths': [2],
'caps': ['butt'],
'opacity': 0.8
},
'primary': {
'colors': ['#FFFD8B'],
'widths': [1],
'caps': ['butt'],
'opacity': 0.8
},
'secondary': {
'colors': ['#FFFFFF'],
'widths': [1],
'caps': ['butt'],
'opacity': 0.8
},
'tertiary': {
'colors': ['#FFFFFF'],
'widths': [1],
'caps': ['butt'],
'opacity': 0.8
}
}
}, {
'minResolution': 1222,
'maxResolution': 3000,
'style': {
'motorway': {
'colors': ['#FD923A'],
'widths': [1],
'caps': ['round'],
'opacity': 0.5
},
'motorway_link': {
'colors': ['#FD923A'],
'widths': [1],
'caps': ['butt'],
'opacity': 0.5
},
'trunk': {
'colors': ['#FFC345'],
'widths': [1],
'caps': ['butt'],
'opacity': 0.5
}
}
}];
<file_sep>export const style_waterareas = [{
'minResolution': 0,
'maxResolution': 99999,
'style': {
'riverbank': {
'fillColor': '#99B3CC',
'strokeColor': '#99B3CC',
'fillOpacity': 1,
'textFillColor': '#003fff',
'textFillWidth': 3,
'textStrokeColor': '#fff',
'textStrokeWidth': 5,
'textPlacement': 'point'
},
'water': {
'fillColor': '#99B3CC',
'strokeColor': '#99B3CC',
'fillOpacity': 1,
'textFillColor': '#003fff',
'textFillWidth': 3,
'textStrokeColor': '#fff',
'textStrokeWidth': 5,
'textPlacement': 'point'
}
}
}];
<file_sep>import TileQueue from 'ol/TileQueue';
import stringify from 'json-stringify-safe';
import { get } from 'ol/proj';
import { inView } from 'ol/layer/Layer';
import { getTilePriority as tilePriorityFunction } from 'ol/TileQueue';
import terrestrisVectorTiles from './index';
const worker = self;
let frameState;
let pixelRatio;
let rendererTransform;
const canvas = new OffscreenCanvas(1, 1);
// OffscreenCanvas does not have a style, so we mock it
canvas.style = {};
const context = canvas.getContext('2d');
const layers = [];
const vectortileslayer = new terrestrisVectorTiles({
useOffscreenCanvas: true,
calledFromWorker: true
});
layers.push(vectortileslayer);
vectortileslayer.getRenderer().useContainer = function (target, transform) {
this.containerReused = this.getLayer() !== layers[0];
this.canvas = canvas;
this.context = context;
this.container = {
firstElementChild: canvas,
style: {
opacity: vectortileslayer.getOpacity(),
}
};
rendererTransform = transform;
};
worker.postMessage({action: 'requestRender'});
// Minimal map-like functionality for rendering
const tileQueue = new TileQueue(
(tile, tileSourceKey, tileCenter, tileResolution) =>
tilePriorityFunction(
frameState,
tile,
tileSourceKey,
tileCenter,
tileResolution
),
() => worker.postMessage({action: 'requestRender'})
);
const maxTotalLoading = 8;
const maxNewLoads = 2;
worker.addEventListener('message', (event) => {
if (event.data.action !== 'render') {
return;
}
frameState = event.data.frameState;
if (!pixelRatio) {
pixelRatio = frameState.pixelRatio;
}
frameState.tileQueue = tileQueue;
frameState.viewState.projection = get('EPSG:3857');
layers.forEach((layer) => {
if (inView(layer.getLayerState(), frameState.viewState)) {
const renderer = layer.getRenderer();
renderer.renderFrame(frameState, canvas);
}
});
layers.forEach(
(layer) => layer.getRenderer().context && layer.renderDeclutter(frameState)
);
if (tileQueue.getTilesLoading() < maxTotalLoading) {
tileQueue.reprioritize();
tileQueue.loadMoreTiles(maxTotalLoading, maxNewLoads);
}
const imageData = canvas.transferToImageBitmap();
worker.postMessage(
{
action: 'rendered',
imageData: imageData,
transform: rendererTransform,
frameState: JSON.parse(stringify(frameState)),
},
[imageData]
);
});
<file_sep>export const style_bluebackground = [{
'minResolution': 0,
'maxResolution': 99999,
'style': {
'': {
'fillColor': '#074ca9',
'fillOpacity': 0.5,
'zIndex': -3
}
}
}];
<file_sep># terrestris vectortiles
A simple library that makes use of free available world-wide terrestris vectortiles in MapBox MVT format from OpenStreetMap data.
The main method `getOSMLayer` creates an openlayers VectorTile Layer, which you
can use straight in your openlayers application.

# demo
A simple demonstration application can be found here:
https://demo.terrestris.de/democlient/index.html
# how to use
Install the module e.g. by
`npm i @terrestris/vectortiles`
Then, in your code, import as follows:
```
import terrestrisVectorTiles from '@terrestris/vectortiles';
```
Then you can create the OSM-VectorTile layer by calling
```
const layer = new terrestrisVectorTiles({map});
```
or with custom configuration and style:
```
const layer = new terrestrisVectorTiles({
map: map,
useOffscreenCanvas: true,
declutter: false,
usePlacesLabels: false,
style_roads: [{
...
}]
});
```
<file_sep>export const style_countries = [{
'minResolution': 0,
'maxResolution': 99999,
'style': {
'': {
'strokeColor': '#7d887d',
'strokeWidth': 2,
'strokeOpacity': 0.5,
'fillColor': '#f2efe9',
'useLabels': false,
'zIndex': -2
}
}
}];
<file_sep>export const style_landusage = [{
'minResolution': 0,
'maxResolution': 600,
'style': {
'forest': {
'useLabels': false,
'fillColor': '#CCDEAC',
'zIndex': -1
},
'wood': {
'useLabels': false,
'fillColor': '#CCDEAC',
'zIndex': -1
},
'farm': {
'useLabels': false,
'fillColor': '#DCE8A9',
'zIndex': -1
},
'meadow': {
'useLabels': false,
'fillColor': '#DCE8A9',
'zIndex': -1
},
'grass': {
'useLabels': false,
'fillColor': '#DCE8A9',
'zIndex': -1
},
'pedestrian': {
'useLabels': false,
'fillColor': '#FAFAF5',
'zIndex': -1
},
'industrial': {
'useLabels': false,
'fillColor': '#D1D0CD',
'zIndex': -1
},
'commercial': {
'useLabels': false,
'fillColor': '#D1D0CD',
'zIndex': -1
},
'parking': {
'useLabels': false,
'fillColor': '#D1D0CD',
'zIndex': -1
},
'cemetery': {
'useLabels': false,
'fillColor': '#D1D1D1',
'zIndex': -1,
'andFilters': {},
},
'park': {
'useLabels': false,
'fillColor': '#B5D29C',
'zIndex': 1
},
'golf_course': {
'useLabels': false,
'fillColor': '#B5D29C',
'zIndex': -1
},
'hospital': {
'useLabels': false,
'fillColor': '#E5C6C3',
'zIndex': -1
},
'school': {
'useLabels': false,
'fillColor': '#DED2AC',
'zIndex': -1
},
'college': {
'useLabels': false,
'fillColor': '#DED2AC',
'zIndex': -1
},
'university': {
'useLabels': false,
'fillColor': '#DED2AC',
'zIndex': -1
},
'sports_center': {
'useLabels': false,
'fillColor': '#DED1AB',
'zIndex': -1
},
'stadium': {
'useLabels': false,
'fillColor': '#DED1AB',
'zIndex': -1
},
'pitch': {
'useLabels': false,
'fillColor': '#DED1AB',
'zIndex': -1
},
'residential': {
'useLabels': false,
'fillColor': '#F2EFE9',
'zIndex': -1
}
}
}];
<file_sep>export const style_buildings = [{
'minResolution': 0,
'maxResolution': 1.2,
'style': {
'yes': {
'fillColor': '#D1D1D1',
'strokeColor': '#B3B3B3',
'textPlacement': 'point'
},
'house': {
'fillColor': '#D1D1D1',
'strokeColor': '#B3B3B3',
'textPlacement': 'point'
},
'apartments': {
'fillColor': '#D1D1D1',
'strokeColor': '#B3B3B3',
'textPlacement': 'point'
},
'terrace': {
'fillColor': '#D1D1D1',
'strokeColor': '#B3B3B3',
'textPlacement': 'point'
},
'garage': {
'fillColor': '#D1D1D1',
'strokeColor': '#B3B3B3',
'textPlacement': 'point'
},
'detached': {
'fillColor': '#D1D1D1',
'strokeColor': '#B3B3B3',
'textPlacement': 'point'
},
'dormitory': {
'fillColor': '#D1D1D1',
'strokeColor': '#B3B3B3',
'textPlacement': 'point'
},
'residential': {
'fillColor': '#D1D1D1',
'strokeColor': '#B3B3B3',
'textPlacement': 'point'
},
'university': {
'fillColor': '#D1D1D1',
'strokeColor': '#B3B3B3',
'textPlacement': 'point'
},
'school': {
'fillColor': '#D1D1D1',
'strokeColor': '#B3B3B3',
'textPlacement': 'point'
},
'hospital': {
'fillColor': '#ce8f8f',
'strokeColor': '#B3B3B3',
'textPlacement': 'point'
},
'construction': {
'fillColor': '#D1D1D1',
'strokeColor': '#B3B3B3',
'textPlacement': 'point'
},
'chapel': {
'fillColor': '#D1D1D1',
'strokeColor': '#B3B3B3',
'textPlacement': 'point'
},
'cathedral': {
'fillColor': '#D1D1D1',
'strokeColor': '#B3B3B3',
'textPlacement': 'point'
},
'hotel': {
'fillColor': '#D1D1D1',
'strokeColor': '#B3B3B3',
'textPlacement': 'point'
},
'commercial': {
'fillColor': '#D1D1D1',
'strokeColor': '#B3B3B3',
'textPlacement': 'point'
},
'train_station': {
'fillColor': '#D1D1D1',
'strokeColor': '#B3B3B3',
'textPlacement': 'point'
},
}
}, {
'minResolution': 1.2,
'maxResolution': 2.4,
'style': {
'yes': {
'fillColor': '#D1D1D1',
'fillOpacity': 0.7,
'strokeColor': '#B3B3B3',
'useLabels': false
},
'house': {
'fillColor': '#D1D1D1',
'fillOpacity': 0.7,
'strokeColor': '#B3B3B3',
'useLabels': false
},
'apartments': {
'fillColor': '#D1D1D1',
'fillOpacity': 0.7,
'strokeColor': '#B3B3B3',
'useLabels': false
},
'terrace': {
'fillColor': '#D1D1D1',
'fillOpacity': 0.7,
'strokeColor': '#B3B3B3',
'useLabels': false
},
'garage': {
'fillColor': '#D1D1D1',
'fillOpacity': 0.7,
'strokeColor': '#B3B3B3',
'useLabels': false
},
'detached': {
'fillColor': '#D1D1D1',
'fillOpacity': 0.7,
'strokeColor': '#B3B3B3',
'useLabels': false
},
'dormitory': {
'fillColor': '#D1D1D1',
'fillOpacity': 0.7,
'strokeColor': '#B3B3B3',
'useLabels': false
},
'residential': {
'fillColor': '#D1D1D1',
'fillOpacity': 0.7,
'strokeColor': '#B3B3B3',
'useLabels': false
},
'university': {
'fillColor': '#D1D1D1',
'fillOpacity': 0.7,
'strokeColor': '#B3B3B3',
'useLabels': false
},
'school': {
'fillColor': '#D1D1D1',
'fillOpacity': 0.7,
'strokeColor': '#B3B3B3',
'useLabels': false
},
'hospital': {
'fillColor': '#ce8f8f',
'strokeColor': '#B3B3B3',
'textPlacement': 'point'
},
'construction': {
'fillColor': '#D1D1D1',
'fillOpacity': 0.7,
'strokeColor': '#B3B3B3',
'useLabels': false
},
'chapel': {
'fillColor': '#D1D1D1',
'fillOpacity': 0.7,
'strokeColor': '#B3B3B3',
'useLabels': false
},
'cathedral': {
'fillColor': '#D1D1D1',
'fillOpacity': 0.7,
'strokeColor': '#B3B3B3',
'useLabels': false
},
'hotel': {
'fillColor': '#D1D1D1',
'fillOpacity': 0.7,
'strokeColor': '#B3B3B3',
'useLabels': false
},
'commercial': {
'fillColor': '#D1D1D1',
'fillOpacity': 0.7,
'strokeColor': '#B3B3B3',
'useLabels': false
},
'train_station': {
'fillColor': '#D1D1D1',
'fillOpacity': 0.7,
'strokeColor': '#B3B3B3',
'useLabels': false
},
}
}, {
'minResolution': 2.4,
'maxResolution': 5,
'style': {
'yes': {
'fillColor': '#D1D1D1',
'fillOpacity': 0.3,
'strokeColor': '#B3B3B3',
'useLabels': false
},
'hospital': {
'fillColor': '#ce8f8f',
'strokeColor': '#B3B3B3',
'useLabels': false
}
}
}];
|
a6e6924fb696f83eafc84a798037960c1a6b6cf5
|
[
"JavaScript",
"Markdown"
] | 8
|
JavaScript
|
terrestris/vectortiles
|
66eaa1827f88f2a932323393504397dde20d3f33
|
7e6cf6e9a7d49f1a0d43b44eccbc00efb701e8ea
|
refs/heads/master
|
<file_sep>// generated from rosidl_generator_cpp/resource/idl__traits.hpp.em
// with input from vive_interfaces:msg\Hmd.idl
// generated code does not contain a copyright notice
#ifndef VIVE_INTERFACES__MSG__DETAIL__HMD__TRAITS_HPP_
#define VIVE_INTERFACES__MSG__DETAIL__HMD__TRAITS_HPP_
#include "vive_interfaces/msg/detail/hmd__struct.hpp"
#include <rosidl_runtime_cpp/traits.hpp>
#include <stdint.h>
#include <type_traits>
namespace rosidl_generator_traits
{
template<>
inline const char * data_type<vive_interfaces::msg::Hmd>()
{
return "vive_interfaces::msg::Hmd";
}
template<>
inline const char * name<vive_interfaces::msg::Hmd>()
{
return "vive_interfaces/msg/Hmd";
}
template<>
struct has_fixed_size<vive_interfaces::msg::Hmd>
: std::integral_constant<bool, true> {};
template<>
struct has_bounded_size<vive_interfaces::msg::Hmd>
: std::integral_constant<bool, true> {};
template<>
struct is_message<vive_interfaces::msg::Hmd>
: std::true_type {};
} // namespace rosidl_generator_traits
#endif // VIVE_INTERFACES__MSG__DETAIL__HMD__TRAITS_HPP_
<file_sep>// generated from rosidl_typesupport_c/resource/idl__type_support.cpp.em
// with input from vive_interfaces:msg\Controller.idl
// generated code does not contain a copyright notice
#include "cstddef"
#include "rosidl_runtime_c/message_type_support_struct.h"
#include "vive_interfaces/msg/rosidl_typesupport_c__visibility_control.h"
#include "vive_interfaces/msg/detail/controller__struct.h"
#include "rosidl_typesupport_c/identifier.h"
#include "rosidl_typesupport_c/message_type_support_dispatch.h"
#include "rosidl_typesupport_c/type_support_map.h"
#include "rosidl_typesupport_c/visibility_control.h"
#include "rosidl_typesupport_interface/macros.h"
namespace vive_interfaces
{
namespace msg
{
namespace rosidl_typesupport_c
{
typedef struct _Controller_type_support_ids_t
{
const char * typesupport_identifier[3];
} _Controller_type_support_ids_t;
static const _Controller_type_support_ids_t _Controller_message_typesupport_ids = {
{
"rosidl_typesupport_connext_c", // ::rosidl_typesupport_connext_c::typesupport_identifier,
"rosidl_typesupport_fastrtps_c", // ::rosidl_typesupport_fastrtps_c::typesupport_identifier,
"rosidl_typesupport_introspection_c", // ::rosidl_typesupport_introspection_c::typesupport_identifier,
}
};
typedef struct _Controller_type_support_symbol_names_t
{
const char * symbol_name[3];
} _Controller_type_support_symbol_names_t;
#define STRINGIFY_(s) #s
#define STRINGIFY(s) STRINGIFY_(s)
static const _Controller_type_support_symbol_names_t _Controller_message_typesupport_symbol_names = {
{
STRINGIFY(ROSIDL_TYPESUPPORT_INTERFACE__MESSAGE_SYMBOL_NAME(rosidl_typesupport_connext_c, vive_interfaces, msg, Controller)),
STRINGIFY(ROSIDL_TYPESUPPORT_INTERFACE__MESSAGE_SYMBOL_NAME(rosidl_typesupport_fastrtps_c, vive_interfaces, msg, Controller)),
STRINGIFY(ROSIDL_TYPESUPPORT_INTERFACE__MESSAGE_SYMBOL_NAME(rosidl_typesupport_introspection_c, vive_interfaces, msg, Controller)),
}
};
typedef struct _Controller_type_support_data_t
{
void * data[3];
} _Controller_type_support_data_t;
static _Controller_type_support_data_t _Controller_message_typesupport_data = {
{
0, // will store the shared library later
0, // will store the shared library later
0, // will store the shared library later
}
};
static const type_support_map_t _Controller_message_typesupport_map = {
3,
"vive_interfaces",
&_Controller_message_typesupport_ids.typesupport_identifier[0],
&_Controller_message_typesupport_symbol_names.symbol_name[0],
&_Controller_message_typesupport_data.data[0],
};
static const rosidl_message_type_support_t Controller_message_type_support_handle = {
rosidl_typesupport_c__typesupport_identifier,
reinterpret_cast<const type_support_map_t *>(&_Controller_message_typesupport_map),
rosidl_typesupport_c__get_message_typesupport_handle_function,
};
} // namespace rosidl_typesupport_c
} // namespace msg
} // namespace vive_interfaces
#ifdef __cplusplus
extern "C"
{
#endif
ROSIDL_TYPESUPPORT_C_EXPORT_vive_interfaces
const rosidl_message_type_support_t *
ROSIDL_TYPESUPPORT_INTERFACE__MESSAGE_SYMBOL_NAME(rosidl_typesupport_c, vive_interfaces, msg, Controller)() {
return &::vive_interfaces::msg::rosidl_typesupport_c::Controller_message_type_support_handle;
}
#ifdef __cplusplus
}
#endif
<file_sep># Install script for directory: D:/Omron_Robotics/dev_ws/src/vive_interfaces
# Set the install prefix
if(NOT DEFINED CMAKE_INSTALL_PREFIX)
set(CMAKE_INSTALL_PREFIX "D:/Omron_Robotics/dev_ws/install")
endif()
string(REGEX REPLACE "/$" "" CMAKE_INSTALL_PREFIX "${CMAKE_INSTALL_PREFIX}")
# Set the install configuration name.
if(NOT DEFINED CMAKE_INSTALL_CONFIG_NAME)
if(BUILD_TYPE)
string(REGEX REPLACE "^[^A-Za-z0-9_]+" ""
CMAKE_INSTALL_CONFIG_NAME "${BUILD_TYPE}")
else()
set(CMAKE_INSTALL_CONFIG_NAME "Release")
endif()
message(STATUS "Install configuration: \"${CMAKE_INSTALL_CONFIG_NAME}\"")
endif()
# Set the component getting installed.
if(NOT CMAKE_INSTALL_COMPONENT)
if(COMPONENT)
message(STATUS "Install component: \"${COMPONENT}\"")
set(CMAKE_INSTALL_COMPONENT "${COMPONENT}")
else()
set(CMAKE_INSTALL_COMPONENT)
endif()
endif()
# Is this installation the result of a crosscompile?
if(NOT DEFINED CMAKE_CROSSCOMPILING)
set(CMAKE_CROSSCOMPILING "FALSE")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/ament_index/resource_index/rosidl_interfaces" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/ament_cmake_index/share/ament_index/resource_index/rosidl_interfaces/vive_interfaces")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/include/vive_interfaces" TYPE DIRECTORY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/rosidl_generator_c/vive_interfaces/" REGEX "/[^/]*\\.h$")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Dd][Ee][Bb][Uu][Gg])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Debug/vive_interfaces__rosidl_generator_c.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ee][Aa][Ss][Ee])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Release/vive_interfaces__rosidl_generator_c.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Mm][Ii][Nn][Ss][Ii][Zz][Ee][Rr][Ee][Ll])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/MinSizeRel/vive_interfaces__rosidl_generator_c.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ww][Ii][Tt][Hh][Dd][Ee][Bb][Ii][Nn][Ff][Oo])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/RelWithDebInfo/vive_interfaces__rosidl_generator_c.lib")
endif()
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Dd][Ee][Bb][Uu][Gg])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Debug/vive_interfaces__rosidl_generator_c.dll")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ee][Aa][Ss][Ee])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Release/vive_interfaces__rosidl_generator_c.dll")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Mm][Ii][Nn][Ss][Ii][Zz][Ee][Rr][Ee][Ll])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/MinSizeRel/vive_interfaces__rosidl_generator_c.dll")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ww][Ii][Tt][Hh][Dd][Ee][Bb][Ii][Nn][Ff][Oo])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/RelWithDebInfo/vive_interfaces__rosidl_generator_c.dll")
endif()
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/include/vive_interfaces" TYPE DIRECTORY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/rosidl_typesupport_fastrtps_c/vive_interfaces/" REGEX "/[^/]*\\.cpp$" EXCLUDE)
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Dd][Ee][Bb][Uu][Gg])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Debug/vive_interfaces__rosidl_typesupport_fastrtps_c.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ee][Aa][Ss][Ee])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Release/vive_interfaces__rosidl_typesupport_fastrtps_c.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Mm][Ii][Nn][Ss][Ii][Zz][Ee][Rr][Ee][Ll])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/MinSizeRel/vive_interfaces__rosidl_typesupport_fastrtps_c.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ww][Ii][Tt][Hh][Dd][Ee][Bb][Ii][Nn][Ff][Oo])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/RelWithDebInfo/vive_interfaces__rosidl_typesupport_fastrtps_c.lib")
endif()
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Dd][Ee][Bb][Uu][Gg])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Debug/vive_interfaces__rosidl_typesupport_fastrtps_c.dll")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ee][Aa][Ss][Ee])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Release/vive_interfaces__rosidl_typesupport_fastrtps_c.dll")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Mm][Ii][Nn][Ss][Ii][Zz][Ee][Rr][Ee][Ll])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/MinSizeRel/vive_interfaces__rosidl_typesupport_fastrtps_c.dll")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ww][Ii][Tt][Hh][Dd][Ee][Bb][Ii][Nn][Ff][Oo])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/RelWithDebInfo/vive_interfaces__rosidl_typesupport_fastrtps_c.dll")
endif()
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/include/vive_interfaces" TYPE DIRECTORY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/rosidl_typesupport_fastrtps_cpp/vive_interfaces/" REGEX "/[^/]*\\.cpp$" EXCLUDE)
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Dd][Ee][Bb][Uu][Gg])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Debug/vive_interfaces__rosidl_typesupport_fastrtps_cpp.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ee][Aa][Ss][Ee])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Release/vive_interfaces__rosidl_typesupport_fastrtps_cpp.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Mm][Ii][Nn][Ss][Ii][Zz][Ee][Rr][Ee][Ll])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/MinSizeRel/vive_interfaces__rosidl_typesupport_fastrtps_cpp.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ww][Ii][Tt][Hh][Dd][Ee][Bb][Ii][Nn][Ff][Oo])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/RelWithDebInfo/vive_interfaces__rosidl_typesupport_fastrtps_cpp.lib")
endif()
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Dd][Ee][Bb][Uu][Gg])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Debug/vive_interfaces__rosidl_typesupport_fastrtps_cpp.dll")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ee][Aa][Ss][Ee])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Release/vive_interfaces__rosidl_typesupport_fastrtps_cpp.dll")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Mm][Ii][Nn][Ss][Ii][Zz][Ee][Rr][Ee][Ll])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/MinSizeRel/vive_interfaces__rosidl_typesupport_fastrtps_cpp.dll")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ww][Ii][Tt][Hh][Dd][Ee][Bb][Ii][Nn][Ff][Oo])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/RelWithDebInfo/vive_interfaces__rosidl_typesupport_fastrtps_cpp.dll")
endif()
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/include/vive_interfaces" TYPE DIRECTORY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/rosidl_typesupport_introspection_c/vive_interfaces/" REGEX "/[^/]*\\.h$")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Dd][Ee][Bb][Uu][Gg])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Debug/vive_interfaces__rosidl_typesupport_introspection_c.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ee][Aa][Ss][Ee])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Release/vive_interfaces__rosidl_typesupport_introspection_c.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Mm][Ii][Nn][Ss][Ii][Zz][Ee][Rr][Ee][Ll])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/MinSizeRel/vive_interfaces__rosidl_typesupport_introspection_c.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ww][Ii][Tt][Hh][Dd][Ee][Bb][Ii][Nn][Ff][Oo])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/RelWithDebInfo/vive_interfaces__rosidl_typesupport_introspection_c.lib")
endif()
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Dd][Ee][Bb][Uu][Gg])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Debug/vive_interfaces__rosidl_typesupport_introspection_c.dll")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ee][Aa][Ss][Ee])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Release/vive_interfaces__rosidl_typesupport_introspection_c.dll")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Mm][Ii][Nn][Ss][Ii][Zz][Ee][Rr][Ee][Ll])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/MinSizeRel/vive_interfaces__rosidl_typesupport_introspection_c.dll")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ww][Ii][Tt][Hh][Dd][Ee][Bb][Ii][Nn][Ff][Oo])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/RelWithDebInfo/vive_interfaces__rosidl_typesupport_introspection_c.dll")
endif()
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Dd][Ee][Bb][Uu][Gg])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Debug/vive_interfaces__rosidl_typesupport_c.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ee][Aa][Ss][Ee])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Release/vive_interfaces__rosidl_typesupport_c.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Mm][Ii][Nn][Ss][Ii][Zz][Ee][Rr][Ee][Ll])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/MinSizeRel/vive_interfaces__rosidl_typesupport_c.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ww][Ii][Tt][Hh][Dd][Ee][Bb][Ii][Nn][Ff][Oo])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/RelWithDebInfo/vive_interfaces__rosidl_typesupport_c.lib")
endif()
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Dd][Ee][Bb][Uu][Gg])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Debug/vive_interfaces__rosidl_typesupport_c.dll")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ee][Aa][Ss][Ee])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Release/vive_interfaces__rosidl_typesupport_c.dll")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Mm][Ii][Nn][Ss][Ii][Zz][Ee][Rr][Ee][Ll])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/MinSizeRel/vive_interfaces__rosidl_typesupport_c.dll")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ww][Ii][Tt][Hh][Dd][Ee][Bb][Ii][Nn][Ff][Oo])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/RelWithDebInfo/vive_interfaces__rosidl_typesupport_c.dll")
endif()
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/include/vive_interfaces" TYPE DIRECTORY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/rosidl_generator_cpp/vive_interfaces/" REGEX "/[^/]*\\.hpp$")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/include/vive_interfaces" TYPE DIRECTORY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/rosidl_typesupport_introspection_cpp/vive_interfaces/" REGEX "/[^/]*\\.hpp$")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Dd][Ee][Bb][Uu][Gg])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Debug/vive_interfaces__rosidl_typesupport_introspection_cpp.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ee][Aa][Ss][Ee])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Release/vive_interfaces__rosidl_typesupport_introspection_cpp.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Mm][Ii][Nn][Ss][Ii][Zz][Ee][Rr][Ee][Ll])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/MinSizeRel/vive_interfaces__rosidl_typesupport_introspection_cpp.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ww][Ii][Tt][Hh][Dd][Ee][Bb][Ii][Nn][Ff][Oo])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/RelWithDebInfo/vive_interfaces__rosidl_typesupport_introspection_cpp.lib")
endif()
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Dd][Ee][Bb][Uu][Gg])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Debug/vive_interfaces__rosidl_typesupport_introspection_cpp.dll")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ee][Aa][Ss][Ee])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Release/vive_interfaces__rosidl_typesupport_introspection_cpp.dll")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Mm][Ii][Nn][Ss][Ii][Zz][Ee][Rr][Ee][Ll])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/MinSizeRel/vive_interfaces__rosidl_typesupport_introspection_cpp.dll")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ww][Ii][Tt][Hh][Dd][Ee][Bb][Ii][Nn][Ff][Oo])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/RelWithDebInfo/vive_interfaces__rosidl_typesupport_introspection_cpp.dll")
endif()
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Dd][Ee][Bb][Uu][Gg])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Debug/vive_interfaces__rosidl_typesupport_cpp.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ee][Aa][Ss][Ee])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Release/vive_interfaces__rosidl_typesupport_cpp.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Mm][Ii][Nn][Ss][Ii][Zz][Ee][Rr][Ee][Ll])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/MinSizeRel/vive_interfaces__rosidl_typesupport_cpp.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ww][Ii][Tt][Hh][Dd][Ee][Bb][Ii][Nn][Ff][Oo])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/RelWithDebInfo/vive_interfaces__rosidl_typesupport_cpp.lib")
endif()
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Dd][Ee][Bb][Uu][Gg])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Debug/vive_interfaces__rosidl_typesupport_cpp.dll")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ee][Aa][Ss][Ee])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Release/vive_interfaces__rosidl_typesupport_cpp.dll")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Mm][Ii][Nn][Ss][Ii][Zz][Ee][Rr][Ee][Ll])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/MinSizeRel/vive_interfaces__rosidl_typesupport_cpp.dll")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ww][Ii][Tt][Hh][Dd][Ee][Bb][Ii][Nn][Ff][Oo])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/RelWithDebInfo/vive_interfaces__rosidl_typesupport_cpp.dll")
endif()
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/environment" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/ament_cmake_environment_hooks/pythonpath.bat")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/environment" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/ament_cmake_environment_hooks/pythonpath.dsv")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/Lib/site-packages/vive_interfaces" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/rosidl_generator_py/vive_interfaces/__init__.py")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
execute_process(
COMMAND
"D:/Omron_Robotics/dev_ws/venv/Scripts/python.exe" "-m" "compileall"
"D:/Omron_Robotics/dev_ws/install/Lib/site-packages/vive_interfaces/__init__.py"
)
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/Lib/site-packages/vive_interfaces/msg" TYPE DIRECTORY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/rosidl_generator_py/vive_interfaces/msg/" REGEX "/[^/]*\\.py$")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Dd][Ee][Bb][Uu][Gg])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/Lib/site-packages/vive_interfaces" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Debug/vive_interfaces_s__rosidl_typesupport_fastrtps_c.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ee][Aa][Ss][Ee])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/Lib/site-packages/vive_interfaces" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Release/vive_interfaces_s__rosidl_typesupport_fastrtps_c.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Mm][Ii][Nn][Ss][Ii][Zz][Ee][Rr][Ee][Ll])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/Lib/site-packages/vive_interfaces" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/MinSizeRel/vive_interfaces_s__rosidl_typesupport_fastrtps_c.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ww][Ii][Tt][Hh][Dd][Ee][Bb][Ii][Nn][Ff][Oo])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/Lib/site-packages/vive_interfaces" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/RelWithDebInfo/vive_interfaces_s__rosidl_typesupport_fastrtps_c.lib")
endif()
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Dd][Ee][Bb][Uu][Gg])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/Lib/site-packages/vive_interfaces" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/rosidl_generator_py/vive_interfaces/vive_interfaces_s__rosidl_typesupport_fastrtps_c.pyd")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ee][Aa][Ss][Ee])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/Lib/site-packages/vive_interfaces" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/rosidl_generator_py/vive_interfaces/vive_interfaces_s__rosidl_typesupport_fastrtps_c.pyd")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Mm][Ii][Nn][Ss][Ii][Zz][Ee][Rr][Ee][Ll])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/Lib/site-packages/vive_interfaces" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/rosidl_generator_py/vive_interfaces/vive_interfaces_s__rosidl_typesupport_fastrtps_c.pyd")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ww][Ii][Tt][Hh][Dd][Ee][Bb][Ii][Nn][Ff][Oo])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/Lib/site-packages/vive_interfaces" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/rosidl_generator_py/vive_interfaces/vive_interfaces_s__rosidl_typesupport_fastrtps_c.pyd")
endif()
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Dd][Ee][Bb][Uu][Gg])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/Lib/site-packages/vive_interfaces" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Debug/vive_interfaces_s__rosidl_typesupport_introspection_c.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ee][Aa][Ss][Ee])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/Lib/site-packages/vive_interfaces" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Release/vive_interfaces_s__rosidl_typesupport_introspection_c.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Mm][Ii][Nn][Ss][Ii][Zz][Ee][Rr][Ee][Ll])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/Lib/site-packages/vive_interfaces" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/MinSizeRel/vive_interfaces_s__rosidl_typesupport_introspection_c.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ww][Ii][Tt][Hh][Dd][Ee][Bb][Ii][Nn][Ff][Oo])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/Lib/site-packages/vive_interfaces" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/RelWithDebInfo/vive_interfaces_s__rosidl_typesupport_introspection_c.lib")
endif()
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Dd][Ee][Bb][Uu][Gg])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/Lib/site-packages/vive_interfaces" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/rosidl_generator_py/vive_interfaces/vive_interfaces_s__rosidl_typesupport_introspection_c.pyd")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ee][Aa][Ss][Ee])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/Lib/site-packages/vive_interfaces" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/rosidl_generator_py/vive_interfaces/vive_interfaces_s__rosidl_typesupport_introspection_c.pyd")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Mm][Ii][Nn][Ss][Ii][Zz][Ee][Rr][Ee][Ll])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/Lib/site-packages/vive_interfaces" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/rosidl_generator_py/vive_interfaces/vive_interfaces_s__rosidl_typesupport_introspection_c.pyd")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ww][Ii][Tt][Hh][Dd][Ee][Bb][Ii][Nn][Ff][Oo])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/Lib/site-packages/vive_interfaces" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/rosidl_generator_py/vive_interfaces/vive_interfaces_s__rosidl_typesupport_introspection_c.pyd")
endif()
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Dd][Ee][Bb][Uu][Gg])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/Lib/site-packages/vive_interfaces" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Debug/vive_interfaces_s__rosidl_typesupport_c.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ee][Aa][Ss][Ee])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/Lib/site-packages/vive_interfaces" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Release/vive_interfaces_s__rosidl_typesupport_c.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Mm][Ii][Nn][Ss][Ii][Zz][Ee][Rr][Ee][Ll])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/Lib/site-packages/vive_interfaces" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/MinSizeRel/vive_interfaces_s__rosidl_typesupport_c.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ww][Ii][Tt][Hh][Dd][Ee][Bb][Ii][Nn][Ff][Oo])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/Lib/site-packages/vive_interfaces" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/RelWithDebInfo/vive_interfaces_s__rosidl_typesupport_c.lib")
endif()
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Dd][Ee][Bb][Uu][Gg])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/Lib/site-packages/vive_interfaces" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/rosidl_generator_py/vive_interfaces/vive_interfaces_s__rosidl_typesupport_c.pyd")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ee][Aa][Ss][Ee])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/Lib/site-packages/vive_interfaces" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/rosidl_generator_py/vive_interfaces/vive_interfaces_s__rosidl_typesupport_c.pyd")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Mm][Ii][Nn][Ss][Ii][Zz][Ee][Rr][Ee][Ll])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/Lib/site-packages/vive_interfaces" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/rosidl_generator_py/vive_interfaces/vive_interfaces_s__rosidl_typesupport_c.pyd")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ww][Ii][Tt][Hh][Dd][Ee][Bb][Ii][Nn][Ff][Oo])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/Lib/site-packages/vive_interfaces" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/rosidl_generator_py/vive_interfaces/vive_interfaces_s__rosidl_typesupport_c.pyd")
endif()
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Dd][Ee][Bb][Uu][Gg])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Debug/vive_interfaces__python.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ee][Aa][Ss][Ee])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/Release/vive_interfaces__python.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Mm][Ii][Nn][Ss][Ii][Zz][Ee][Rr][Ee][Ll])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/MinSizeRel/vive_interfaces__python.lib")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ww][Ii][Tt][Hh][Dd][Ee][Bb][Ii][Nn][Ff][Oo])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/lib" TYPE STATIC_LIBRARY OPTIONAL FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/RelWithDebInfo/vive_interfaces__python.lib")
endif()
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Dd][Ee][Bb][Uu][Gg])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/rosidl_generator_py/vive_interfaces/vive_interfaces__python.dll")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ee][Aa][Ss][Ee])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/rosidl_generator_py/vive_interfaces/vive_interfaces__python.dll")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Mm][Ii][Nn][Ss][Ii][Zz][Ee][Rr][Ee][Ll])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/rosidl_generator_py/vive_interfaces/vive_interfaces__python.dll")
elseif("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ww][Ii][Tt][Hh][Dd][Ee][Bb][Ii][Nn][Ff][Oo])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/bin" TYPE SHARED_LIBRARY FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/rosidl_generator_py/vive_interfaces/vive_interfaces__python.dll")
endif()
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/msg" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/rosidl_adapter/vive_interfaces/msg/Hmd.idl")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/msg" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/rosidl_adapter/vive_interfaces/msg/Controller.idl")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/msg" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/src/vive_interfaces/msg/Hmd.msg")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/msg" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/src/vive_interfaces/msg/Controller.msg")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/ament_index/resource_index/package_run_dependencies" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/ament_cmake_index/share/ament_index/resource_index/package_run_dependencies/vive_interfaces")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/ament_index/resource_index/parent_prefix_path" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/ament_cmake_index/share/ament_index/resource_index/parent_prefix_path/vive_interfaces")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/environment" TYPE FILE FILES "C:/Program Files/ros2foxy/ros2-windows/share/ament_cmake_core/cmake/environment_hooks/environment/ament_prefix_path.bat")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/environment" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/ament_cmake_environment_hooks/ament_prefix_path.dsv")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/environment" TYPE FILE FILES "C:/Program Files/ros2foxy/ros2-windows/share/ament_cmake_core/cmake/environment_hooks/environment/path.bat")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/environment" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/ament_cmake_environment_hooks/path.dsv")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/ament_cmake_environment_hooks/local_setup.bat")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/ament_cmake_environment_hooks/local_setup.dsv")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/ament_cmake_environment_hooks/package.dsv")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/ament_index/resource_index/packages" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/ament_cmake_index/share/ament_index/resource_index/packages/vive_interfaces")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
if(EXISTS "$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake/vive_interfaces__rosidl_generator_cExport.cmake")
file(DIFFERENT EXPORT_FILE_CHANGED FILES
"$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake/vive_interfaces__rosidl_generator_cExport.cmake"
"D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_generator_cExport.cmake")
if(EXPORT_FILE_CHANGED)
file(GLOB OLD_CONFIG_FILES "$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake/vive_interfaces__rosidl_generator_cExport-*.cmake")
if(OLD_CONFIG_FILES)
message(STATUS "Old export file \"$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake/vive_interfaces__rosidl_generator_cExport.cmake\" will be replaced. Removing files [${OLD_CONFIG_FILES}].")
file(REMOVE ${OLD_CONFIG_FILES})
endif()
endif()
endif()
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_generator_cExport.cmake")
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Dd][Ee][Bb][Uu][Gg])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_generator_cExport-debug.cmake")
endif()
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Mm][Ii][Nn][Ss][Ii][Zz][Ee][Rr][Ee][Ll])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_generator_cExport-minsizerel.cmake")
endif()
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ww][Ii][Tt][Hh][Dd][Ee][Bb][Ii][Nn][Ff][Oo])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_generator_cExport-relwithdebinfo.cmake")
endif()
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ee][Aa][Ss][Ee])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_generator_cExport-release.cmake")
endif()
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
if(EXISTS "$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_introspection_cExport.cmake")
file(DIFFERENT EXPORT_FILE_CHANGED FILES
"$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_introspection_cExport.cmake"
"D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_introspection_cExport.cmake")
if(EXPORT_FILE_CHANGED)
file(GLOB OLD_CONFIG_FILES "$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_introspection_cExport-*.cmake")
if(OLD_CONFIG_FILES)
message(STATUS "Old export file \"$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_introspection_cExport.cmake\" will be replaced. Removing files [${OLD_CONFIG_FILES}].")
file(REMOVE ${OLD_CONFIG_FILES})
endif()
endif()
endif()
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_introspection_cExport.cmake")
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Dd][Ee][Bb][Uu][Gg])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_introspection_cExport-debug.cmake")
endif()
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Mm][Ii][Nn][Ss][Ii][Zz][Ee][Rr][Ee][Ll])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_introspection_cExport-minsizerel.cmake")
endif()
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ww][Ii][Tt][Hh][Dd][Ee][Bb][Ii][Nn][Ff][Oo])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_introspection_cExport-relwithdebinfo.cmake")
endif()
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ee][Aa][Ss][Ee])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_introspection_cExport-release.cmake")
endif()
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
if(EXISTS "$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_cExport.cmake")
file(DIFFERENT EXPORT_FILE_CHANGED FILES
"$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_cExport.cmake"
"D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_cExport.cmake")
if(EXPORT_FILE_CHANGED)
file(GLOB OLD_CONFIG_FILES "$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_cExport-*.cmake")
if(OLD_CONFIG_FILES)
message(STATUS "Old export file \"$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_cExport.cmake\" will be replaced. Removing files [${OLD_CONFIG_FILES}].")
file(REMOVE ${OLD_CONFIG_FILES})
endif()
endif()
endif()
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_cExport.cmake")
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Dd][Ee][Bb][Uu][Gg])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_cExport-debug.cmake")
endif()
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Mm][Ii][Nn][Ss][Ii][Zz][Ee][Rr][Ee][Ll])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_cExport-minsizerel.cmake")
endif()
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ww][Ii][Tt][Hh][Dd][Ee][Bb][Ii][Nn][Ff][Oo])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_cExport-relwithdebinfo.cmake")
endif()
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ee][Aa][Ss][Ee])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_cExport-release.cmake")
endif()
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
if(EXISTS "$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake/vive_interfaces__rosidl_generator_cppExport.cmake")
file(DIFFERENT EXPORT_FILE_CHANGED FILES
"$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake/vive_interfaces__rosidl_generator_cppExport.cmake"
"D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_generator_cppExport.cmake")
if(EXPORT_FILE_CHANGED)
file(GLOB OLD_CONFIG_FILES "$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake/vive_interfaces__rosidl_generator_cppExport-*.cmake")
if(OLD_CONFIG_FILES)
message(STATUS "Old export file \"$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake/vive_interfaces__rosidl_generator_cppExport.cmake\" will be replaced. Removing files [${OLD_CONFIG_FILES}].")
file(REMOVE ${OLD_CONFIG_FILES})
endif()
endif()
endif()
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_generator_cppExport.cmake")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
if(EXISTS "$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_introspection_cppExport.cmake")
file(DIFFERENT EXPORT_FILE_CHANGED FILES
"$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_introspection_cppExport.cmake"
"D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_introspection_cppExport.cmake")
if(EXPORT_FILE_CHANGED)
file(GLOB OLD_CONFIG_FILES "$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_introspection_cppExport-*.cmake")
if(OLD_CONFIG_FILES)
message(STATUS "Old export file \"$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_introspection_cppExport.cmake\" will be replaced. Removing files [${OLD_CONFIG_FILES}].")
file(REMOVE ${OLD_CONFIG_FILES})
endif()
endif()
endif()
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_introspection_cppExport.cmake")
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Dd][Ee][Bb][Uu][Gg])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_introspection_cppExport-debug.cmake")
endif()
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Mm][Ii][Nn][Ss][Ii][Zz][Ee][Rr][Ee][Ll])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_introspection_cppExport-minsizerel.cmake")
endif()
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ww][Ii][Tt][Hh][Dd][Ee][Bb][Ii][Nn][Ff][Oo])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_introspection_cppExport-relwithdebinfo.cmake")
endif()
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ee][Aa][Ss][Ee])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_introspection_cppExport-release.cmake")
endif()
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
if(EXISTS "$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_cppExport.cmake")
file(DIFFERENT EXPORT_FILE_CHANGED FILES
"$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_cppExport.cmake"
"D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_cppExport.cmake")
if(EXPORT_FILE_CHANGED)
file(GLOB OLD_CONFIG_FILES "$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_cppExport-*.cmake")
if(OLD_CONFIG_FILES)
message(STATUS "Old export file \"$ENV{DESTDIR}${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_cppExport.cmake\" will be replaced. Removing files [${OLD_CONFIG_FILES}].")
file(REMOVE ${OLD_CONFIG_FILES})
endif()
endif()
endif()
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_cppExport.cmake")
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Dd][Ee][Bb][Uu][Gg])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_cppExport-debug.cmake")
endif()
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Mm][Ii][Nn][Ss][Ii][Zz][Ee][Rr][Ee][Ll])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_cppExport-minsizerel.cmake")
endif()
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ww][Ii][Tt][Hh][Dd][Ee][Bb][Ii][Nn][Ff][Oo])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_cppExport-relwithdebinfo.cmake")
endif()
if("${CMAKE_INSTALL_CONFIG_NAME}" MATCHES "^([Rr][Ee][Ll][Ee][Aa][Ss][Ee])$")
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/CMakeFiles/Export/share/vive_interfaces/cmake/vive_interfaces__rosidl_typesupport_cppExport-release.cmake")
endif()
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/rosidl_cmake/rosidl_cmake-extras.cmake")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/ament_cmake_export_dependencies/ament_cmake_export_dependencies-extras.cmake")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/ament_cmake_export_libraries/ament_cmake_export_libraries-extras.cmake")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/ament_cmake_export_targets/ament_cmake_export_targets-extras.cmake")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/ament_cmake_export_include_directories/ament_cmake_export_include_directories-extras.cmake")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/rosidl_cmake/rosidl_cmake_export_typesupport_libraries-extras.cmake")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/build/vive_interfaces/rosidl_cmake/rosidl_cmake_export_typesupport_targets-extras.cmake")
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces/cmake" TYPE FILE FILES
"D:/Omron_Robotics/dev_ws/build/vive_interfaces/ament_cmake_core/vive_interfacesConfig.cmake"
"D:/Omron_Robotics/dev_ws/build/vive_interfaces/ament_cmake_core/vive_interfacesConfig-version.cmake"
)
endif()
if("x${CMAKE_INSTALL_COMPONENT}x" STREQUAL "xUnspecifiedx" OR NOT CMAKE_INSTALL_COMPONENT)
file(INSTALL DESTINATION "${CMAKE_INSTALL_PREFIX}/share/vive_interfaces" TYPE FILE FILES "D:/Omron_Robotics/dev_ws/src/vive_interfaces/package.xml")
endif()
if(NOT CMAKE_INSTALL_LOCAL_ONLY)
# Include the install script for each subdirectory.
include("D:/Omron_Robotics/dev_ws/build/vive_interfaces/vive_interfaces__py/cmake_install.cmake")
endif()
if(CMAKE_INSTALL_COMPONENT)
set(CMAKE_INSTALL_MANIFEST "install_manifest_${CMAKE_INSTALL_COMPONENT}.txt")
else()
set(CMAKE_INSTALL_MANIFEST "install_manifest.txt")
endif()
string(REPLACE ";" "\n" CMAKE_INSTALL_MANIFEST_CONTENT
"${CMAKE_INSTALL_MANIFEST_FILES}")
file(WRITE "D:/Omron_Robotics/dev_ws/build/vive_interfaces/${CMAKE_INSTALL_MANIFEST}"
"${CMAKE_INSTALL_MANIFEST_CONTENT}")
<file_sep>// generated from rosidl_generator_cpp/resource/idl.hpp.em
// generated code does not contain a copyright notice
#ifndef VIVE_INTERFACES__MSG__CONTROLLER_HPP_
#define VIVE_INTERFACES__MSG__CONTROLLER_HPP_
#include "vive_interfaces/msg/detail/controller__struct.hpp"
#include "vive_interfaces/msg/detail/controller__builder.hpp"
#include "vive_interfaces/msg/detail/controller__traits.hpp"
#endif // VIVE_INTERFACES__MSG__CONTROLLER_HPP_
<file_sep># generated from rosidl_generator_py/resource/_idl.py.em
# with input from vive_interfaces:msg\Controller.idl
# generated code does not contain a copyright notice
# Import statements for member types
import rosidl_parser.definition # noqa: E402, I100
class Metaclass_Controller(type):
"""Metaclass of message 'Controller'."""
_CREATE_ROS_MESSAGE = None
_CONVERT_FROM_PY = None
_CONVERT_TO_PY = None
_DESTROY_ROS_MESSAGE = None
_TYPE_SUPPORT = None
__constants = {
}
@classmethod
def __import_type_support__(cls):
try:
from rosidl_generator_py import import_type_support
module = import_type_support('vive_interfaces')
except ImportError:
import logging
import traceback
logger = logging.getLogger(
'vive_interfaces.msg.Controller')
logger.debug(
'Failed to import needed modules for type support:\n' +
traceback.format_exc())
else:
cls._CREATE_ROS_MESSAGE = module.create_ros_message_msg__msg__controller
cls._CONVERT_FROM_PY = module.convert_from_py_msg__msg__controller
cls._CONVERT_TO_PY = module.convert_to_py_msg__msg__controller
cls._TYPE_SUPPORT = module.type_support_msg__msg__controller
cls._DESTROY_ROS_MESSAGE = module.destroy_ros_message_msg__msg__controller
@classmethod
def __prepare__(cls, name, bases, **kwargs):
# list constant names here so that they appear in the help text of
# the message class under "Data and other attributes defined here:"
# as well as populate each message instance
return {
}
class Controller(metaclass=Metaclass_Controller):
"""Message class 'Controller'."""
__slots__ = [
'_x',
'_y',
'_z',
'_yaw',
'_pitch',
'_roll',
'_grip',
'_menu',
'_trigger',
'_trackpad_pressed',
'_trackpad_touched',
'_trackpad_x',
'_trackpad_y',
]
_fields_and_field_types = {
'x': 'float',
'y': 'float',
'z': 'float',
'yaw': 'float',
'pitch': 'float',
'roll': 'float',
'grip': 'int8',
'menu': 'int8',
'trigger': 'float',
'trackpad_pressed': 'int8',
'trackpad_touched': 'int8',
'trackpad_x': 'float',
'trackpad_y': 'float',
}
SLOT_TYPES = (
rosidl_parser.definition.BasicType('float'), # noqa: E501
rosidl_parser.definition.BasicType('float'), # noqa: E501
rosidl_parser.definition.BasicType('float'), # noqa: E501
rosidl_parser.definition.BasicType('float'), # noqa: E501
rosidl_parser.definition.BasicType('float'), # noqa: E501
rosidl_parser.definition.BasicType('float'), # noqa: E501
rosidl_parser.definition.BasicType('int8'), # noqa: E501
rosidl_parser.definition.BasicType('int8'), # noqa: E501
rosidl_parser.definition.BasicType('float'), # noqa: E501
rosidl_parser.definition.BasicType('int8'), # noqa: E501
rosidl_parser.definition.BasicType('int8'), # noqa: E501
rosidl_parser.definition.BasicType('float'), # noqa: E501
rosidl_parser.definition.BasicType('float'), # noqa: E501
)
def __init__(self, **kwargs):
assert all('_' + key in self.__slots__ for key in kwargs.keys()), \
'Invalid arguments passed to constructor: %s' % \
', '.join(sorted(k for k in kwargs.keys() if '_' + k not in self.__slots__))
self.x = kwargs.get('x', float())
self.y = kwargs.get('y', float())
self.z = kwargs.get('z', float())
self.yaw = kwargs.get('yaw', float())
self.pitch = kwargs.get('pitch', float())
self.roll = kwargs.get('roll', float())
self.grip = kwargs.get('grip', int())
self.menu = kwargs.get('menu', int())
self.trigger = kwargs.get('trigger', float())
self.trackpad_pressed = kwargs.get('trackpad_pressed', int())
self.trackpad_touched = kwargs.get('trackpad_touched', int())
self.trackpad_x = kwargs.get('trackpad_x', float())
self.trackpad_y = kwargs.get('trackpad_y', float())
def __repr__(self):
typename = self.__class__.__module__.split('.')
typename.pop()
typename.append(self.__class__.__name__)
args = []
for s, t in zip(self.__slots__, self.SLOT_TYPES):
field = getattr(self, s)
fieldstr = repr(field)
# We use Python array type for fields that can be directly stored
# in them, and "normal" sequences for everything else. If it is
# a type that we store in an array, strip off the 'array' portion.
if (
isinstance(t, rosidl_parser.definition.AbstractSequence) and
isinstance(t.value_type, rosidl_parser.definition.BasicType) and
t.value_type.typename in ['float', 'double', 'int8', 'uint8', 'int16', 'uint16', 'int32', 'uint32', 'int64', 'uint64']
):
if len(field) == 0:
fieldstr = '[]'
else:
assert fieldstr.startswith('array(')
prefix = "array('X', "
suffix = ')'
fieldstr = fieldstr[len(prefix):-len(suffix)]
args.append(s[1:] + '=' + fieldstr)
return '%s(%s)' % ('.'.join(typename), ', '.join(args))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
if self.x != other.x:
return False
if self.y != other.y:
return False
if self.z != other.z:
return False
if self.yaw != other.yaw:
return False
if self.pitch != other.pitch:
return False
if self.roll != other.roll:
return False
if self.grip != other.grip:
return False
if self.menu != other.menu:
return False
if self.trigger != other.trigger:
return False
if self.trackpad_pressed != other.trackpad_pressed:
return False
if self.trackpad_touched != other.trackpad_touched:
return False
if self.trackpad_x != other.trackpad_x:
return False
if self.trackpad_y != other.trackpad_y:
return False
return True
@classmethod
def get_fields_and_field_types(cls):
from copy import copy
return copy(cls._fields_and_field_types)
@property
def x(self):
"""Message field 'x'."""
return self._x
@x.setter
def x(self, value):
if __debug__:
assert \
isinstance(value, float), \
"The 'x' field must be of type 'float'"
self._x = value
@property
def y(self):
"""Message field 'y'."""
return self._y
@y.setter
def y(self, value):
if __debug__:
assert \
isinstance(value, float), \
"The 'y' field must be of type 'float'"
self._y = value
@property
def z(self):
"""Message field 'z'."""
return self._z
@z.setter
def z(self, value):
if __debug__:
assert \
isinstance(value, float), \
"The 'z' field must be of type 'float'"
self._z = value
@property
def yaw(self):
"""Message field 'yaw'."""
return self._yaw
@yaw.setter
def yaw(self, value):
if __debug__:
assert \
isinstance(value, float), \
"The 'yaw' field must be of type 'float'"
self._yaw = value
@property
def pitch(self):
"""Message field 'pitch'."""
return self._pitch
@pitch.setter
def pitch(self, value):
if __debug__:
assert \
isinstance(value, float), \
"The 'pitch' field must be of type 'float'"
self._pitch = value
@property
def roll(self):
"""Message field 'roll'."""
return self._roll
@roll.setter
def roll(self, value):
if __debug__:
assert \
isinstance(value, float), \
"The 'roll' field must be of type 'float'"
self._roll = value
@property
def grip(self):
"""Message field 'grip'."""
return self._grip
@grip.setter
def grip(self, value):
if __debug__:
assert \
isinstance(value, int), \
"The 'grip' field must be of type 'int'"
assert value >= -128 and value < 128, \
"The 'grip' field must be an integer in [-128, 127]"
self._grip = value
@property
def menu(self):
"""Message field 'menu'."""
return self._menu
@menu.setter
def menu(self, value):
if __debug__:
assert \
isinstance(value, int), \
"The 'menu' field must be of type 'int'"
assert value >= -128 and value < 128, \
"The 'menu' field must be an integer in [-128, 127]"
self._menu = value
@property
def trigger(self):
"""Message field 'trigger'."""
return self._trigger
@trigger.setter
def trigger(self, value):
if __debug__:
assert \
isinstance(value, float), \
"The 'trigger' field must be of type 'float'"
self._trigger = value
@property
def trackpad_pressed(self):
"""Message field 'trackpad_pressed'."""
return self._trackpad_pressed
@trackpad_pressed.setter
def trackpad_pressed(self, value):
if __debug__:
assert \
isinstance(value, int), \
"The 'trackpad_pressed' field must be of type 'int'"
assert value >= -128 and value < 128, \
"The 'trackpad_pressed' field must be an integer in [-128, 127]"
self._trackpad_pressed = value
@property
def trackpad_touched(self):
"""Message field 'trackpad_touched'."""
return self._trackpad_touched
@trackpad_touched.setter
def trackpad_touched(self, value):
if __debug__:
assert \
isinstance(value, int), \
"The 'trackpad_touched' field must be of type 'int'"
assert value >= -128 and value < 128, \
"The 'trackpad_touched' field must be an integer in [-128, 127]"
self._trackpad_touched = value
@property
def trackpad_x(self):
"""Message field 'trackpad_x'."""
return self._trackpad_x
@trackpad_x.setter
def trackpad_x(self, value):
if __debug__:
assert \
isinstance(value, float), \
"The 'trackpad_x' field must be of type 'float'"
self._trackpad_x = value
@property
def trackpad_y(self):
"""Message field 'trackpad_y'."""
return self._trackpad_y
@trackpad_y.setter
def trackpad_y(self, value):
if __debug__:
assert \
isinstance(value, float), \
"The 'trackpad_y' field must be of type 'float'"
self._trackpad_y = value
<file_sep>// generated from rosidl_typesupport_fastrtps_c/resource/idl__type_support_c.cpp.em
// with input from vive_interfaces:msg\Hmd.idl
// generated code does not contain a copyright notice
#include "vive_interfaces/msg/detail/hmd__rosidl_typesupport_fastrtps_c.h"
#include <cassert>
#include <limits>
#include <string>
#include "rosidl_typesupport_fastrtps_c/identifier.h"
#include "rosidl_typesupport_fastrtps_c/wstring_conversion.hpp"
#include "rosidl_typesupport_fastrtps_cpp/message_type_support.h"
#include "vive_interfaces/msg/rosidl_typesupport_fastrtps_c__visibility_control.h"
#include "vive_interfaces/msg/detail/hmd__struct.h"
#include "vive_interfaces/msg/detail/hmd__functions.h"
#include "fastcdr/Cdr.h"
#ifndef _WIN32
# pragma GCC diagnostic push
# pragma GCC diagnostic ignored "-Wunused-parameter"
# ifdef __clang__
# pragma clang diagnostic ignored "-Wdeprecated-register"
# pragma clang diagnostic ignored "-Wreturn-type-c-linkage"
# endif
#endif
#ifndef _WIN32
# pragma GCC diagnostic pop
#endif
// includes and forward declarations of message dependencies and their conversion functions
#if defined(__cplusplus)
extern "C"
{
#endif
// forward declare type support functions
using _Hmd__ros_msg_type = vive_interfaces__msg__Hmd;
static bool _Hmd__cdr_serialize(
const void * untyped_ros_message,
eprosima::fastcdr::Cdr & cdr)
{
if (!untyped_ros_message) {
fprintf(stderr, "ros message handle is null\n");
return false;
}
const _Hmd__ros_msg_type * ros_message = static_cast<const _Hmd__ros_msg_type *>(untyped_ros_message);
// Field name: x
{
cdr << ros_message->x;
}
// Field name: y
{
cdr << ros_message->y;
}
// Field name: z
{
cdr << ros_message->z;
}
// Field name: yaw
{
cdr << ros_message->yaw;
}
// Field name: pitch
{
cdr << ros_message->pitch;
}
// Field name: roll
{
cdr << ros_message->roll;
}
return true;
}
static bool _Hmd__cdr_deserialize(
eprosima::fastcdr::Cdr & cdr,
void * untyped_ros_message)
{
if (!untyped_ros_message) {
fprintf(stderr, "ros message handle is null\n");
return false;
}
_Hmd__ros_msg_type * ros_message = static_cast<_Hmd__ros_msg_type *>(untyped_ros_message);
// Field name: x
{
cdr >> ros_message->x;
}
// Field name: y
{
cdr >> ros_message->y;
}
// Field name: z
{
cdr >> ros_message->z;
}
// Field name: yaw
{
cdr >> ros_message->yaw;
}
// Field name: pitch
{
cdr >> ros_message->pitch;
}
// Field name: roll
{
cdr >> ros_message->roll;
}
return true;
}
ROSIDL_TYPESUPPORT_FASTRTPS_C_PUBLIC_vive_interfaces
size_t get_serialized_size_vive_interfaces__msg__Hmd(
const void * untyped_ros_message,
size_t current_alignment)
{
const _Hmd__ros_msg_type * ros_message = static_cast<const _Hmd__ros_msg_type *>(untyped_ros_message);
(void)ros_message;
size_t initial_alignment = current_alignment;
const size_t padding = 4;
const size_t wchar_size = 4;
(void)padding;
(void)wchar_size;
// field.name x
{
size_t item_size = sizeof(ros_message->x);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// field.name y
{
size_t item_size = sizeof(ros_message->y);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// field.name z
{
size_t item_size = sizeof(ros_message->z);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// field.name yaw
{
size_t item_size = sizeof(ros_message->yaw);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// field.name pitch
{
size_t item_size = sizeof(ros_message->pitch);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// field.name roll
{
size_t item_size = sizeof(ros_message->roll);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
return current_alignment - initial_alignment;
}
static uint32_t _Hmd__get_serialized_size(const void * untyped_ros_message)
{
return static_cast<uint32_t>(
get_serialized_size_vive_interfaces__msg__Hmd(
untyped_ros_message, 0));
}
ROSIDL_TYPESUPPORT_FASTRTPS_C_PUBLIC_vive_interfaces
size_t max_serialized_size_vive_interfaces__msg__Hmd(
bool & full_bounded,
size_t current_alignment)
{
size_t initial_alignment = current_alignment;
const size_t padding = 4;
const size_t wchar_size = 4;
(void)padding;
(void)wchar_size;
(void)full_bounded;
// member: x
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
// member: y
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
// member: z
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
// member: yaw
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
// member: pitch
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
// member: roll
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
return current_alignment - initial_alignment;
}
static size_t _Hmd__max_serialized_size(bool & full_bounded)
{
return max_serialized_size_vive_interfaces__msg__Hmd(
full_bounded, 0);
}
static message_type_support_callbacks_t __callbacks_Hmd = {
"vive_interfaces::msg",
"Hmd",
_Hmd__cdr_serialize,
_Hmd__cdr_deserialize,
_Hmd__get_serialized_size,
_Hmd__max_serialized_size
};
static rosidl_message_type_support_t _Hmd__type_support = {
rosidl_typesupport_fastrtps_c__identifier,
&__callbacks_Hmd,
get_message_typesupport_handle_function,
};
const rosidl_message_type_support_t *
ROSIDL_TYPESUPPORT_INTERFACE__MESSAGE_SYMBOL_NAME(rosidl_typesupport_fastrtps_c, vive_interfaces, msg, Hmd)() {
return &_Hmd__type_support;
}
#if defined(__cplusplus)
}
#endif
<file_sep># OpenVR OSC
> Easily track pose data from OpenVR devices via OSC
OpenVR OSC is a compact Python utility for tracking OpenVR devices (HMDs, controllers, trackers) and getting their position and rotation values over OSC. It can be used in VR as well as AR and any other position-based project and fed to any OSC-capable client.
This project is inspired by and used Triad's [Python OpenVR wrapper](https://github.com/TriadSemi/triad_openvr).
## Setup
### Requirements
This project requires Python `3` to run as dependencies are incompatible with Python `2.7`.
It also required an existing OpenVR installation (SteamVR etc.) and compatible hardware (HTC Vive etc.).
### Installation
To install the project dependencies run `pip install -r requirements.txt`.
## How to use
To run OpenVR-OSC enter the following in your terminal:
`python3 openvr-osc.py`
### Tracked devices
The following devices are tracked and sent via OSC:
- headsets (HMDs)
- controllers
- tracker units
The OSC messages sent are formated in as follows:
`\{DEVICE_TYPE}\{DEVICE_ID} - [f,f,f,f,f,f]`
### OSC message format
Each message contains multiple float values in an order set by the `pose mode`:
In `Euler` mode:
- x position
- y position
- z position
- yaw
- pitch
- roll
In `Quaternion` mode:
- `TBC`
Each tracking cycle is sent as a bundle of individual messages, per device, that are time synced.
### Configuration options
There are several configuration options to customize the OSC feed and the tracking
<details>
<summary>OSC server ip</summary>
Set the ip of the OSC server - `--ip 172.16.31.10`
Defaults to `127.0.0.1 (localhost)`.
</details>
<details>
<summary>OSC server port</summary>
Set the port of the OSC server - `--port 5000`
Defaults to `7000`.
</details>
<details>
<summary>tracked device type</summary>
By default all device types are tracked but that can lead to unnecesary OSC traffic.
If you know which device type you'd like to track (HMD, controllers, trackers), you can limit the tracking:
`--track=[hmd|controller|tracker]`
</details>
<details>
<summary>tracking frequency (coming soon)</summary>
> coming soon
</details>
<details>
<summary>pose mode (coming soon)</summary>
> coming soon
</details>
## Using SteamVR without a headset
To use the trackers and controllers without the need for a headset follow in the instructions in [this tutorial](http://help.triadsemi.com/steamvr-tracking/steamvr-tracking-without-an-hmd).
## Roadmap
- [ ] period check for device state and new tracked devices
- [ ] adjustable tracking frequency
- [ ] adjustable pose tracking mode (euler/quaternion)
- [ ] unique tracked device ids
> Made at ITP NYU
<file_sep>// generated from rosidl_generator_c/resource/idl.h.em
// with input from vive_interfaces:msg\Controller.idl
// generated code does not contain a copyright notice
#ifndef VIVE_INTERFACES__MSG__CONTROLLER_H_
#define VIVE_INTERFACES__MSG__CONTROLLER_H_
#include "vive_interfaces/msg/detail/controller__struct.h"
#include "vive_interfaces/msg/detail/controller__functions.h"
#include "vive_interfaces/msg/detail/controller__type_support.h"
#endif // VIVE_INTERFACES__MSG__CONTROLLER_H_
<file_sep>#----------------------------------------------------------------
# Generated CMake target import file for configuration "Release".
#----------------------------------------------------------------
# Commands may need to know the format version.
set(CMAKE_IMPORT_FILE_VERSION 1)
# Import target "vive_interfaces::vive_interfaces__rosidl_typesupport_c" for configuration "Release"
set_property(TARGET vive_interfaces::vive_interfaces__rosidl_typesupport_c APPEND PROPERTY IMPORTED_CONFIGURATIONS RELEASE)
set_target_properties(vive_interfaces::vive_interfaces__rosidl_typesupport_c PROPERTIES
IMPORTED_IMPLIB_RELEASE "${_IMPORT_PREFIX}/lib/vive_interfaces__rosidl_typesupport_c.lib"
IMPORTED_LOCATION_RELEASE "${_IMPORT_PREFIX}/bin/vive_interfaces__rosidl_typesupport_c.dll"
)
list(APPEND _IMPORT_CHECK_TARGETS vive_interfaces::vive_interfaces__rosidl_typesupport_c )
list(APPEND _IMPORT_CHECK_FILES_FOR_vive_interfaces::vive_interfaces__rosidl_typesupport_c "${_IMPORT_PREFIX}/lib/vive_interfaces__rosidl_typesupport_c.lib" "${_IMPORT_PREFIX}/bin/vive_interfaces__rosidl_typesupport_c.dll" )
# Commands beyond this point should not need to know the version.
set(CMAKE_IMPORT_FILE_VERSION)
<file_sep>// generated from rosidl_typesupport_introspection_c/resource/idl__type_support.c.em
// with input from vive_interfaces:msg\Controller.idl
// generated code does not contain a copyright notice
#include <stddef.h>
#include "vive_interfaces/msg/detail/controller__rosidl_typesupport_introspection_c.h"
#include "vive_interfaces/msg/rosidl_typesupport_introspection_c__visibility_control.h"
#include "rosidl_typesupport_introspection_c/field_types.h"
#include "rosidl_typesupport_introspection_c/identifier.h"
#include "rosidl_typesupport_introspection_c/message_introspection.h"
#include "vive_interfaces/msg/detail/controller__functions.h"
#include "vive_interfaces/msg/detail/controller__struct.h"
#ifdef __cplusplus
extern "C"
{
#endif
void Controller__rosidl_typesupport_introspection_c__Controller_init_function(
void * message_memory, enum rosidl_runtime_c__message_initialization _init)
{
// TODO(karsten1987): initializers are not yet implemented for typesupport c
// see https://github.com/ros2/ros2/issues/397
(void) _init;
vive_interfaces__msg__Controller__init(message_memory);
}
void Controller__rosidl_typesupport_introspection_c__Controller_fini_function(void * message_memory)
{
vive_interfaces__msg__Controller__fini(message_memory);
}
static rosidl_typesupport_introspection_c__MessageMember Controller__rosidl_typesupport_introspection_c__Controller_message_member_array[13] = {
{
"x", // name
rosidl_typesupport_introspection_c__ROS_TYPE_FLOAT, // type
0, // upper bound of string
NULL, // members of sub message
false, // is array
0, // array size
false, // is upper bound
offsetof(vive_interfaces__msg__Controller, x), // bytes offset in struct
NULL, // default value
NULL, // size() function pointer
NULL, // get_const(index) function pointer
NULL, // get(index) function pointer
NULL // resize(index) function pointer
},
{
"y", // name
rosidl_typesupport_introspection_c__ROS_TYPE_FLOAT, // type
0, // upper bound of string
NULL, // members of sub message
false, // is array
0, // array size
false, // is upper bound
offsetof(vive_interfaces__msg__Controller, y), // bytes offset in struct
NULL, // default value
NULL, // size() function pointer
NULL, // get_const(index) function pointer
NULL, // get(index) function pointer
NULL // resize(index) function pointer
},
{
"z", // name
rosidl_typesupport_introspection_c__ROS_TYPE_FLOAT, // type
0, // upper bound of string
NULL, // members of sub message
false, // is array
0, // array size
false, // is upper bound
offsetof(vive_interfaces__msg__Controller, z), // bytes offset in struct
NULL, // default value
NULL, // size() function pointer
NULL, // get_const(index) function pointer
NULL, // get(index) function pointer
NULL // resize(index) function pointer
},
{
"yaw", // name
rosidl_typesupport_introspection_c__ROS_TYPE_FLOAT, // type
0, // upper bound of string
NULL, // members of sub message
false, // is array
0, // array size
false, // is upper bound
offsetof(vive_interfaces__msg__Controller, yaw), // bytes offset in struct
NULL, // default value
NULL, // size() function pointer
NULL, // get_const(index) function pointer
NULL, // get(index) function pointer
NULL // resize(index) function pointer
},
{
"pitch", // name
rosidl_typesupport_introspection_c__ROS_TYPE_FLOAT, // type
0, // upper bound of string
NULL, // members of sub message
false, // is array
0, // array size
false, // is upper bound
offsetof(vive_interfaces__msg__Controller, pitch), // bytes offset in struct
NULL, // default value
NULL, // size() function pointer
NULL, // get_const(index) function pointer
NULL, // get(index) function pointer
NULL // resize(index) function pointer
},
{
"roll", // name
rosidl_typesupport_introspection_c__ROS_TYPE_FLOAT, // type
0, // upper bound of string
NULL, // members of sub message
false, // is array
0, // array size
false, // is upper bound
offsetof(vive_interfaces__msg__Controller, roll), // bytes offset in struct
NULL, // default value
NULL, // size() function pointer
NULL, // get_const(index) function pointer
NULL, // get(index) function pointer
NULL // resize(index) function pointer
},
{
"grip", // name
rosidl_typesupport_introspection_c__ROS_TYPE_INT8, // type
0, // upper bound of string
NULL, // members of sub message
false, // is array
0, // array size
false, // is upper bound
offsetof(vive_interfaces__msg__Controller, grip), // bytes offset in struct
NULL, // default value
NULL, // size() function pointer
NULL, // get_const(index) function pointer
NULL, // get(index) function pointer
NULL // resize(index) function pointer
},
{
"menu", // name
rosidl_typesupport_introspection_c__ROS_TYPE_INT8, // type
0, // upper bound of string
NULL, // members of sub message
false, // is array
0, // array size
false, // is upper bound
offsetof(vive_interfaces__msg__Controller, menu), // bytes offset in struct
NULL, // default value
NULL, // size() function pointer
NULL, // get_const(index) function pointer
NULL, // get(index) function pointer
NULL // resize(index) function pointer
},
{
"trigger", // name
rosidl_typesupport_introspection_c__ROS_TYPE_FLOAT, // type
0, // upper bound of string
NULL, // members of sub message
false, // is array
0, // array size
false, // is upper bound
offsetof(vive_interfaces__msg__Controller, trigger), // bytes offset in struct
NULL, // default value
NULL, // size() function pointer
NULL, // get_const(index) function pointer
NULL, // get(index) function pointer
NULL // resize(index) function pointer
},
{
"trackpad_pressed", // name
rosidl_typesupport_introspection_c__ROS_TYPE_INT8, // type
0, // upper bound of string
NULL, // members of sub message
false, // is array
0, // array size
false, // is upper bound
offsetof(vive_interfaces__msg__Controller, trackpad_pressed), // bytes offset in struct
NULL, // default value
NULL, // size() function pointer
NULL, // get_const(index) function pointer
NULL, // get(index) function pointer
NULL // resize(index) function pointer
},
{
"trackpad_touched", // name
rosidl_typesupport_introspection_c__ROS_TYPE_INT8, // type
0, // upper bound of string
NULL, // members of sub message
false, // is array
0, // array size
false, // is upper bound
offsetof(vive_interfaces__msg__Controller, trackpad_touched), // bytes offset in struct
NULL, // default value
NULL, // size() function pointer
NULL, // get_const(index) function pointer
NULL, // get(index) function pointer
NULL // resize(index) function pointer
},
{
"trackpad_x", // name
rosidl_typesupport_introspection_c__ROS_TYPE_FLOAT, // type
0, // upper bound of string
NULL, // members of sub message
false, // is array
0, // array size
false, // is upper bound
offsetof(vive_interfaces__msg__Controller, trackpad_x), // bytes offset in struct
NULL, // default value
NULL, // size() function pointer
NULL, // get_const(index) function pointer
NULL, // get(index) function pointer
NULL // resize(index) function pointer
},
{
"trackpad_y", // name
rosidl_typesupport_introspection_c__ROS_TYPE_FLOAT, // type
0, // upper bound of string
NULL, // members of sub message
false, // is array
0, // array size
false, // is upper bound
offsetof(vive_interfaces__msg__Controller, trackpad_y), // bytes offset in struct
NULL, // default value
NULL, // size() function pointer
NULL, // get_const(index) function pointer
NULL, // get(index) function pointer
NULL // resize(index) function pointer
}
};
static const rosidl_typesupport_introspection_c__MessageMembers Controller__rosidl_typesupport_introspection_c__Controller_message_members = {
"vive_interfaces__msg", // message namespace
"Controller", // message name
13, // number of fields
sizeof(vive_interfaces__msg__Controller),
Controller__rosidl_typesupport_introspection_c__Controller_message_member_array, // message members
Controller__rosidl_typesupport_introspection_c__Controller_init_function, // function to initialize message memory (memory has to be allocated)
Controller__rosidl_typesupport_introspection_c__Controller_fini_function // function to terminate message instance (will not free memory)
};
// this is not const since it must be initialized on first access
// since C does not allow non-integral compile-time constants
static rosidl_message_type_support_t Controller__rosidl_typesupport_introspection_c__Controller_message_type_support_handle = {
0,
&Controller__rosidl_typesupport_introspection_c__Controller_message_members,
get_message_typesupport_handle_function,
};
ROSIDL_TYPESUPPORT_INTROSPECTION_C_EXPORT_vive_interfaces
const rosidl_message_type_support_t *
ROSIDL_TYPESUPPORT_INTERFACE__MESSAGE_SYMBOL_NAME(rosidl_typesupport_introspection_c, vive_interfaces, msg, Controller)() {
if (!Controller__rosidl_typesupport_introspection_c__Controller_message_type_support_handle.typesupport_identifier) {
Controller__rosidl_typesupport_introspection_c__Controller_message_type_support_handle.typesupport_identifier =
rosidl_typesupport_introspection_c__identifier;
}
return &Controller__rosidl_typesupport_introspection_c__Controller_message_type_support_handle;
}
#ifdef __cplusplus
}
#endif
<file_sep>// generated from
// rosidl_typesupport_c/resource/rosidl_typesupport_c__visibility_control.h.in
// generated code does not contain a copyright notice
#ifndef VIVE_INTERFACES__MSG__ROSIDL_TYPESUPPORT_C__VISIBILITY_CONTROL_H_
#define VIVE_INTERFACES__MSG__ROSIDL_TYPESUPPORT_C__VISIBILITY_CONTROL_H_
#ifdef __cplusplus
extern "C"
{
#endif
// This logic was borrowed (then namespaced) from the examples on the gcc wiki:
// https://gcc.gnu.org/wiki/Visibility
#if defined _WIN32 || defined __CYGWIN__
#ifdef __GNUC__
#define ROSIDL_TYPESUPPORT_C_EXPORT_vive_interfaces __attribute__ ((dllexport))
#define ROSIDL_TYPESUPPORT_C_IMPORT_vive_interfaces __attribute__ ((dllimport))
#else
#define ROSIDL_TYPESUPPORT_C_EXPORT_vive_interfaces __declspec(dllexport)
#define ROSIDL_TYPESUPPORT_C_IMPORT_vive_interfaces __declspec(dllimport)
#endif
#ifdef ROSIDL_TYPESUPPORT_C_BUILDING_DLL_vive_interfaces
#define ROSIDL_TYPESUPPORT_C_PUBLIC_vive_interfaces ROSIDL_TYPESUPPORT_C_EXPORT_vive_interfaces
#else
#define ROSIDL_TYPESUPPORT_C_PUBLIC_vive_interfaces ROSIDL_TYPESUPPORT_C_IMPORT_vive_interfaces
#endif
#else
#define ROSIDL_TYPESUPPORT_C_EXPORT_vive_interfaces __attribute__ ((visibility("default")))
#define ROSIDL_TYPESUPPORT_C_IMPORT_vive_interfaces
#if __GNUC__ >= 4
#define ROSIDL_TYPESUPPORT_C_PUBLIC_vive_interfaces __attribute__ ((visibility("default")))
#else
#define ROSIDL_TYPESUPPORT_C_PUBLIC_vive_interfaces
#endif
#endif
#ifdef __cplusplus
}
#endif
#endif // VIVE_INTERFACES__MSG__ROSIDL_TYPESUPPORT_C__VISIBILITY_CONTROL_H_
<file_sep>// generated from rosidl_generator_c/resource/idl__functions.c.em
// with input from vive_interfaces:msg\Hmd.idl
// generated code does not contain a copyright notice
#include "vive_interfaces/msg/detail/hmd__functions.h"
#include <assert.h>
#include <stdbool.h>
#include <stdlib.h>
#include <string.h>
bool
vive_interfaces__msg__Hmd__init(vive_interfaces__msg__Hmd * msg)
{
if (!msg) {
return false;
}
// x
// y
// z
// yaw
// pitch
// roll
return true;
}
void
vive_interfaces__msg__Hmd__fini(vive_interfaces__msg__Hmd * msg)
{
if (!msg) {
return;
}
// x
// y
// z
// yaw
// pitch
// roll
}
vive_interfaces__msg__Hmd *
vive_interfaces__msg__Hmd__create()
{
vive_interfaces__msg__Hmd * msg = (vive_interfaces__msg__Hmd *)malloc(sizeof(vive_interfaces__msg__Hmd));
if (!msg) {
return NULL;
}
memset(msg, 0, sizeof(vive_interfaces__msg__Hmd));
bool success = vive_interfaces__msg__Hmd__init(msg);
if (!success) {
free(msg);
return NULL;
}
return msg;
}
void
vive_interfaces__msg__Hmd__destroy(vive_interfaces__msg__Hmd * msg)
{
if (msg) {
vive_interfaces__msg__Hmd__fini(msg);
}
free(msg);
}
bool
vive_interfaces__msg__Hmd__Sequence__init(vive_interfaces__msg__Hmd__Sequence * array, size_t size)
{
if (!array) {
return false;
}
vive_interfaces__msg__Hmd * data = NULL;
if (size) {
data = (vive_interfaces__msg__Hmd *)calloc(size, sizeof(vive_interfaces__msg__Hmd));
if (!data) {
return false;
}
// initialize all array elements
size_t i;
for (i = 0; i < size; ++i) {
bool success = vive_interfaces__msg__Hmd__init(&data[i]);
if (!success) {
break;
}
}
if (i < size) {
// if initialization failed finalize the already initialized array elements
for (; i > 0; --i) {
vive_interfaces__msg__Hmd__fini(&data[i - 1]);
}
free(data);
return false;
}
}
array->data = data;
array->size = size;
array->capacity = size;
return true;
}
void
vive_interfaces__msg__Hmd__Sequence__fini(vive_interfaces__msg__Hmd__Sequence * array)
{
if (!array) {
return;
}
if (array->data) {
// ensure that data and capacity values are consistent
assert(array->capacity > 0);
// finalize all array elements
for (size_t i = 0; i < array->capacity; ++i) {
vive_interfaces__msg__Hmd__fini(&array->data[i]);
}
free(array->data);
array->data = NULL;
array->size = 0;
array->capacity = 0;
} else {
// ensure that data, size, and capacity values are consistent
assert(0 == array->size);
assert(0 == array->capacity);
}
}
vive_interfaces__msg__Hmd__Sequence *
vive_interfaces__msg__Hmd__Sequence__create(size_t size)
{
vive_interfaces__msg__Hmd__Sequence * array = (vive_interfaces__msg__Hmd__Sequence *)malloc(sizeof(vive_interfaces__msg__Hmd__Sequence));
if (!array) {
return NULL;
}
bool success = vive_interfaces__msg__Hmd__Sequence__init(array, size);
if (!success) {
free(array);
return NULL;
}
return array;
}
void
vive_interfaces__msg__Hmd__Sequence__destroy(vive_interfaces__msg__Hmd__Sequence * array)
{
if (array) {
vive_interfaces__msg__Hmd__Sequence__fini(array);
}
free(array);
}
<file_sep>#----------------------------------------------------------------
# Generated CMake target import file for configuration "Release".
#----------------------------------------------------------------
# Commands may need to know the format version.
set(CMAKE_IMPORT_FILE_VERSION 1)
# Import target "vive_interfaces::vive_interfaces__rosidl_generator_c" for configuration "Release"
set_property(TARGET vive_interfaces::vive_interfaces__rosidl_generator_c APPEND PROPERTY IMPORTED_CONFIGURATIONS RELEASE)
set_target_properties(vive_interfaces::vive_interfaces__rosidl_generator_c PROPERTIES
IMPORTED_IMPLIB_RELEASE "${_IMPORT_PREFIX}/lib/vive_interfaces__rosidl_generator_c.lib"
IMPORTED_LOCATION_RELEASE "${_IMPORT_PREFIX}/bin/vive_interfaces__rosidl_generator_c.dll"
)
list(APPEND _IMPORT_CHECK_TARGETS vive_interfaces::vive_interfaces__rosidl_generator_c )
list(APPEND _IMPORT_CHECK_FILES_FOR_vive_interfaces::vive_interfaces__rosidl_generator_c "${_IMPORT_PREFIX}/lib/vive_interfaces__rosidl_generator_c.lib" "${_IMPORT_PREFIX}/bin/vive_interfaces__rosidl_generator_c.dll" )
# Commands beyond this point should not need to know the version.
set(CMAKE_IMPORT_FILE_VERSION)
<file_sep>// generated from rosidl_generator_py/resource/_idl_pkg_typesupport_entry_point.c.em
// generated code does not contain a copyright notice
#include <Python.h>
static PyMethodDef vive_interfaces__methods[] = {
{NULL, NULL, 0, NULL} /* sentinel */
};
static struct PyModuleDef vive_interfaces__module = {
PyModuleDef_HEAD_INIT,
"_vive_interfaces_support",
"_vive_interfaces_doc",
-1, /* -1 means that the module keeps state in global variables */
vive_interfaces__methods,
NULL,
NULL,
NULL,
NULL,
};
#include <stdbool.h>
#include <stdint.h>
#include "rosidl_runtime_c/visibility_control.h"
#include "rosidl_runtime_c/message_type_support_struct.h"
#include "rosidl_runtime_c/service_type_support_struct.h"
#include "rosidl_runtime_c/action_type_support_struct.h"
#include "vive_interfaces/msg/detail/hmd__type_support.h"
#include "vive_interfaces/msg/detail/hmd__struct.h"
#include "vive_interfaces/msg/detail/hmd__functions.h"
static void * vive_interfaces__msg__hmd__create_ros_message(void)
{
return vive_interfaces__msg__Hmd__create();
}
static void vive_interfaces__msg__hmd__destroy_ros_message(void * raw_ros_message)
{
vive_interfaces__msg__Hmd * ros_message = (vive_interfaces__msg__Hmd *)raw_ros_message;
vive_interfaces__msg__Hmd__destroy(ros_message);
}
ROSIDL_GENERATOR_C_IMPORT
bool vive_interfaces__msg__hmd__convert_from_py(PyObject * _pymsg, void * ros_message);
ROSIDL_GENERATOR_C_IMPORT
PyObject * vive_interfaces__msg__hmd__convert_to_py(void * raw_ros_message);
ROSIDL_GENERATOR_C_IMPORT
const rosidl_message_type_support_t *
ROSIDL_GET_MSG_TYPE_SUPPORT(vive_interfaces, msg, Hmd);
int8_t
_register_msg_type__msg__hmd(PyObject * pymodule)
{
int8_t err;
PyObject * pyobject_create_ros_message = NULL;
pyobject_create_ros_message = PyCapsule_New(
(void *)&vive_interfaces__msg__hmd__create_ros_message,
NULL, NULL);
if (!pyobject_create_ros_message) {
// previously added objects will be removed when the module is destroyed
return -1;
}
err = PyModule_AddObject(
pymodule,
"create_ros_message_msg__msg__hmd",
pyobject_create_ros_message);
if (err) {
// the created capsule needs to be decremented
Py_XDECREF(pyobject_create_ros_message);
// previously added objects will be removed when the module is destroyed
return err;
}
PyObject * pyobject_destroy_ros_message = NULL;
pyobject_destroy_ros_message = PyCapsule_New(
(void *)&vive_interfaces__msg__hmd__destroy_ros_message,
NULL, NULL);
if (!pyobject_destroy_ros_message) {
// previously added objects will be removed when the module is destroyed
return -1;
}
err = PyModule_AddObject(
pymodule,
"destroy_ros_message_msg__msg__hmd",
pyobject_destroy_ros_message);
if (err) {
// the created capsule needs to be decremented
Py_XDECREF(pyobject_destroy_ros_message);
// previously added objects will be removed when the module is destroyed
return err;
}
PyObject * pyobject_convert_from_py = NULL;
pyobject_convert_from_py = PyCapsule_New(
(void *)&vive_interfaces__msg__hmd__convert_from_py,
NULL, NULL);
if (!pyobject_convert_from_py) {
// previously added objects will be removed when the module is destroyed
return -1;
}
err = PyModule_AddObject(
pymodule,
"convert_from_py_msg__msg__hmd",
pyobject_convert_from_py);
if (err) {
// the created capsule needs to be decremented
Py_XDECREF(pyobject_convert_from_py);
// previously added objects will be removed when the module is destroyed
return err;
}
PyObject * pyobject_convert_to_py = NULL;
pyobject_convert_to_py = PyCapsule_New(
(void *)&vive_interfaces__msg__hmd__convert_to_py,
NULL, NULL);
if (!pyobject_convert_to_py) {
// previously added objects will be removed when the module is destroyed
return -1;
}
err = PyModule_AddObject(
pymodule,
"convert_to_py_msg__msg__hmd",
pyobject_convert_to_py);
if (err) {
// the created capsule needs to be decremented
Py_XDECREF(pyobject_convert_to_py);
// previously added objects will be removed when the module is destroyed
return err;
}
PyObject * pyobject_type_support = NULL;
pyobject_type_support = PyCapsule_New(
(void *)ROSIDL_GET_MSG_TYPE_SUPPORT(vive_interfaces, msg, Hmd),
NULL, NULL);
if (!pyobject_type_support) {
// previously added objects will be removed when the module is destroyed
return -1;
}
err = PyModule_AddObject(
pymodule,
"type_support_msg__msg__hmd",
pyobject_type_support);
if (err) {
// the created capsule needs to be decremented
Py_XDECREF(pyobject_type_support);
// previously added objects will be removed when the module is destroyed
return err;
}
return 0;
}
// already included above
// #include <stdbool.h>
// already included above
// #include <stdint.h>
// already included above
// #include "rosidl_runtime_c/visibility_control.h"
// already included above
// #include "rosidl_runtime_c/message_type_support_struct.h"
// already included above
// #include "rosidl_runtime_c/service_type_support_struct.h"
// already included above
// #include "rosidl_runtime_c/action_type_support_struct.h"
#include "vive_interfaces/msg/detail/controller__type_support.h"
#include "vive_interfaces/msg/detail/controller__struct.h"
#include "vive_interfaces/msg/detail/controller__functions.h"
static void * vive_interfaces__msg__controller__create_ros_message(void)
{
return vive_interfaces__msg__Controller__create();
}
static void vive_interfaces__msg__controller__destroy_ros_message(void * raw_ros_message)
{
vive_interfaces__msg__Controller * ros_message = (vive_interfaces__msg__Controller *)raw_ros_message;
vive_interfaces__msg__Controller__destroy(ros_message);
}
ROSIDL_GENERATOR_C_IMPORT
bool vive_interfaces__msg__controller__convert_from_py(PyObject * _pymsg, void * ros_message);
ROSIDL_GENERATOR_C_IMPORT
PyObject * vive_interfaces__msg__controller__convert_to_py(void * raw_ros_message);
ROSIDL_GENERATOR_C_IMPORT
const rosidl_message_type_support_t *
ROSIDL_GET_MSG_TYPE_SUPPORT(vive_interfaces, msg, Controller);
int8_t
_register_msg_type__msg__controller(PyObject * pymodule)
{
int8_t err;
PyObject * pyobject_create_ros_message = NULL;
pyobject_create_ros_message = PyCapsule_New(
(void *)&vive_interfaces__msg__controller__create_ros_message,
NULL, NULL);
if (!pyobject_create_ros_message) {
// previously added objects will be removed when the module is destroyed
return -1;
}
err = PyModule_AddObject(
pymodule,
"create_ros_message_msg__msg__controller",
pyobject_create_ros_message);
if (err) {
// the created capsule needs to be decremented
Py_XDECREF(pyobject_create_ros_message);
// previously added objects will be removed when the module is destroyed
return err;
}
PyObject * pyobject_destroy_ros_message = NULL;
pyobject_destroy_ros_message = PyCapsule_New(
(void *)&vive_interfaces__msg__controller__destroy_ros_message,
NULL, NULL);
if (!pyobject_destroy_ros_message) {
// previously added objects will be removed when the module is destroyed
return -1;
}
err = PyModule_AddObject(
pymodule,
"destroy_ros_message_msg__msg__controller",
pyobject_destroy_ros_message);
if (err) {
// the created capsule needs to be decremented
Py_XDECREF(pyobject_destroy_ros_message);
// previously added objects will be removed when the module is destroyed
return err;
}
PyObject * pyobject_convert_from_py = NULL;
pyobject_convert_from_py = PyCapsule_New(
(void *)&vive_interfaces__msg__controller__convert_from_py,
NULL, NULL);
if (!pyobject_convert_from_py) {
// previously added objects will be removed when the module is destroyed
return -1;
}
err = PyModule_AddObject(
pymodule,
"convert_from_py_msg__msg__controller",
pyobject_convert_from_py);
if (err) {
// the created capsule needs to be decremented
Py_XDECREF(pyobject_convert_from_py);
// previously added objects will be removed when the module is destroyed
return err;
}
PyObject * pyobject_convert_to_py = NULL;
pyobject_convert_to_py = PyCapsule_New(
(void *)&vive_interfaces__msg__controller__convert_to_py,
NULL, NULL);
if (!pyobject_convert_to_py) {
// previously added objects will be removed when the module is destroyed
return -1;
}
err = PyModule_AddObject(
pymodule,
"convert_to_py_msg__msg__controller",
pyobject_convert_to_py);
if (err) {
// the created capsule needs to be decremented
Py_XDECREF(pyobject_convert_to_py);
// previously added objects will be removed when the module is destroyed
return err;
}
PyObject * pyobject_type_support = NULL;
pyobject_type_support = PyCapsule_New(
(void *)ROSIDL_GET_MSG_TYPE_SUPPORT(vive_interfaces, msg, Controller),
NULL, NULL);
if (!pyobject_type_support) {
// previously added objects will be removed when the module is destroyed
return -1;
}
err = PyModule_AddObject(
pymodule,
"type_support_msg__msg__controller",
pyobject_type_support);
if (err) {
// the created capsule needs to be decremented
Py_XDECREF(pyobject_type_support);
// previously added objects will be removed when the module is destroyed
return err;
}
return 0;
}
PyMODINIT_FUNC
PyInit_vive_interfaces_s__rosidl_typesupport_fastrtps_c(void)
{
PyObject * pymodule = NULL;
pymodule = PyModule_Create(&vive_interfaces__module);
if (!pymodule) {
return NULL;
}
int8_t err;
err = _register_msg_type__msg__hmd(pymodule);
if (err) {
Py_XDECREF(pymodule);
return NULL;
}
err = _register_msg_type__msg__controller(pymodule);
if (err) {
Py_XDECREF(pymodule);
return NULL;
}
return pymodule;
}
<file_sep>#----------------------------------------------------------------
# Generated CMake target import file for configuration "RelWithDebInfo".
#----------------------------------------------------------------
# Commands may need to know the format version.
set(CMAKE_IMPORT_FILE_VERSION 1)
# Import target "vive_interfaces::vive_interfaces__rosidl_typesupport_introspection_cpp" for configuration "RelWithDebInfo"
set_property(TARGET vive_interfaces::vive_interfaces__rosidl_typesupport_introspection_cpp APPEND PROPERTY IMPORTED_CONFIGURATIONS RELWITHDEBINFO)
set_target_properties(vive_interfaces::vive_interfaces__rosidl_typesupport_introspection_cpp PROPERTIES
IMPORTED_IMPLIB_RELWITHDEBINFO "${_IMPORT_PREFIX}/lib/vive_interfaces__rosidl_typesupport_introspection_cpp.lib"
IMPORTED_LOCATION_RELWITHDEBINFO "${_IMPORT_PREFIX}/bin/vive_interfaces__rosidl_typesupport_introspection_cpp.dll"
)
list(APPEND _IMPORT_CHECK_TARGETS vive_interfaces::vive_interfaces__rosidl_typesupport_introspection_cpp )
list(APPEND _IMPORT_CHECK_FILES_FOR_vive_interfaces::vive_interfaces__rosidl_typesupport_introspection_cpp "${_IMPORT_PREFIX}/lib/vive_interfaces__rosidl_typesupport_introspection_cpp.lib" "${_IMPORT_PREFIX}/bin/vive_interfaces__rosidl_typesupport_introspection_cpp.dll" )
# Commands beyond this point should not need to know the version.
set(CMAKE_IMPORT_FILE_VERSION)
<file_sep>colorama==0.3.9
openvr==1.0.1301
python-osc==1.6.4
reprint==0.5.1
six==1.11.0
<file_sep>from vive_interfaces.msg._controller import Controller # noqa: F401
from vive_interfaces.msg._hmd import Hmd # noqa: F401
<file_sep>// generated from rosidl_generator_cpp/resource/idl__struct.hpp.em
// with input from vive_interfaces:msg\Controller.idl
// generated code does not contain a copyright notice
#ifndef VIVE_INTERFACES__MSG__DETAIL__CONTROLLER__STRUCT_HPP_
#define VIVE_INTERFACES__MSG__DETAIL__CONTROLLER__STRUCT_HPP_
#include <rosidl_runtime_cpp/bounded_vector.hpp>
#include <rosidl_runtime_cpp/message_initialization.hpp>
#include <algorithm>
#include <array>
#include <memory>
#include <string>
#include <vector>
#ifndef _WIN32
# define DEPRECATED__vive_interfaces__msg__Controller __attribute__((deprecated))
#else
# define DEPRECATED__vive_interfaces__msg__Controller __declspec(deprecated)
#endif
namespace vive_interfaces
{
namespace msg
{
// message struct
template<class ContainerAllocator>
struct Controller_
{
using Type = Controller_<ContainerAllocator>;
explicit Controller_(rosidl_runtime_cpp::MessageInitialization _init = rosidl_runtime_cpp::MessageInitialization::ALL)
{
if (rosidl_runtime_cpp::MessageInitialization::ALL == _init ||
rosidl_runtime_cpp::MessageInitialization::ZERO == _init)
{
this->x = 0.0f;
this->y = 0.0f;
this->z = 0.0f;
this->yaw = 0.0f;
this->pitch = 0.0f;
this->roll = 0.0f;
this->grip = 0;
this->menu = 0;
this->trigger = 0.0f;
this->trackpad_pressed = 0;
this->trackpad_touched = 0;
this->trackpad_x = 0.0f;
this->trackpad_y = 0.0f;
}
}
explicit Controller_(const ContainerAllocator & _alloc, rosidl_runtime_cpp::MessageInitialization _init = rosidl_runtime_cpp::MessageInitialization::ALL)
{
(void)_alloc;
if (rosidl_runtime_cpp::MessageInitialization::ALL == _init ||
rosidl_runtime_cpp::MessageInitialization::ZERO == _init)
{
this->x = 0.0f;
this->y = 0.0f;
this->z = 0.0f;
this->yaw = 0.0f;
this->pitch = 0.0f;
this->roll = 0.0f;
this->grip = 0;
this->menu = 0;
this->trigger = 0.0f;
this->trackpad_pressed = 0;
this->trackpad_touched = 0;
this->trackpad_x = 0.0f;
this->trackpad_y = 0.0f;
}
}
// field types and members
using _x_type =
float;
_x_type x;
using _y_type =
float;
_y_type y;
using _z_type =
float;
_z_type z;
using _yaw_type =
float;
_yaw_type yaw;
using _pitch_type =
float;
_pitch_type pitch;
using _roll_type =
float;
_roll_type roll;
using _grip_type =
int8_t;
_grip_type grip;
using _menu_type =
int8_t;
_menu_type menu;
using _trigger_type =
float;
_trigger_type trigger;
using _trackpad_pressed_type =
int8_t;
_trackpad_pressed_type trackpad_pressed;
using _trackpad_touched_type =
int8_t;
_trackpad_touched_type trackpad_touched;
using _trackpad_x_type =
float;
_trackpad_x_type trackpad_x;
using _trackpad_y_type =
float;
_trackpad_y_type trackpad_y;
// setters for named parameter idiom
Type & set__x(
const float & _arg)
{
this->x = _arg;
return *this;
}
Type & set__y(
const float & _arg)
{
this->y = _arg;
return *this;
}
Type & set__z(
const float & _arg)
{
this->z = _arg;
return *this;
}
Type & set__yaw(
const float & _arg)
{
this->yaw = _arg;
return *this;
}
Type & set__pitch(
const float & _arg)
{
this->pitch = _arg;
return *this;
}
Type & set__roll(
const float & _arg)
{
this->roll = _arg;
return *this;
}
Type & set__grip(
const int8_t & _arg)
{
this->grip = _arg;
return *this;
}
Type & set__menu(
const int8_t & _arg)
{
this->menu = _arg;
return *this;
}
Type & set__trigger(
const float & _arg)
{
this->trigger = _arg;
return *this;
}
Type & set__trackpad_pressed(
const int8_t & _arg)
{
this->trackpad_pressed = _arg;
return *this;
}
Type & set__trackpad_touched(
const int8_t & _arg)
{
this->trackpad_touched = _arg;
return *this;
}
Type & set__trackpad_x(
const float & _arg)
{
this->trackpad_x = _arg;
return *this;
}
Type & set__trackpad_y(
const float & _arg)
{
this->trackpad_y = _arg;
return *this;
}
// constant declarations
// pointer types
using RawPtr =
vive_interfaces::msg::Controller_<ContainerAllocator> *;
using ConstRawPtr =
const vive_interfaces::msg::Controller_<ContainerAllocator> *;
using SharedPtr =
std::shared_ptr<vive_interfaces::msg::Controller_<ContainerAllocator>>;
using ConstSharedPtr =
std::shared_ptr<vive_interfaces::msg::Controller_<ContainerAllocator> const>;
template<typename Deleter = std::default_delete<
vive_interfaces::msg::Controller_<ContainerAllocator>>>
using UniquePtrWithDeleter =
std::unique_ptr<vive_interfaces::msg::Controller_<ContainerAllocator>, Deleter>;
using UniquePtr = UniquePtrWithDeleter<>;
template<typename Deleter = std::default_delete<
vive_interfaces::msg::Controller_<ContainerAllocator>>>
using ConstUniquePtrWithDeleter =
std::unique_ptr<vive_interfaces::msg::Controller_<ContainerAllocator> const, Deleter>;
using ConstUniquePtr = ConstUniquePtrWithDeleter<>;
using WeakPtr =
std::weak_ptr<vive_interfaces::msg::Controller_<ContainerAllocator>>;
using ConstWeakPtr =
std::weak_ptr<vive_interfaces::msg::Controller_<ContainerAllocator> const>;
// pointer types similar to ROS 1, use SharedPtr / ConstSharedPtr instead
// NOTE: Can't use 'using' here because GNU C++ can't parse attributes properly
typedef DEPRECATED__vive_interfaces__msg__Controller
std::shared_ptr<vive_interfaces::msg::Controller_<ContainerAllocator>>
Ptr;
typedef DEPRECATED__vive_interfaces__msg__Controller
std::shared_ptr<vive_interfaces::msg::Controller_<ContainerAllocator> const>
ConstPtr;
// comparison operators
bool operator==(const Controller_ & other) const
{
if (this->x != other.x) {
return false;
}
if (this->y != other.y) {
return false;
}
if (this->z != other.z) {
return false;
}
if (this->yaw != other.yaw) {
return false;
}
if (this->pitch != other.pitch) {
return false;
}
if (this->roll != other.roll) {
return false;
}
if (this->grip != other.grip) {
return false;
}
if (this->menu != other.menu) {
return false;
}
if (this->trigger != other.trigger) {
return false;
}
if (this->trackpad_pressed != other.trackpad_pressed) {
return false;
}
if (this->trackpad_touched != other.trackpad_touched) {
return false;
}
if (this->trackpad_x != other.trackpad_x) {
return false;
}
if (this->trackpad_y != other.trackpad_y) {
return false;
}
return true;
}
bool operator!=(const Controller_ & other) const
{
return !this->operator==(other);
}
}; // struct Controller_
// alias to use template instance with default allocator
using Controller =
vive_interfaces::msg::Controller_<std::allocator<void>>;
// constant definitions
} // namespace msg
} // namespace vive_interfaces
#endif // VIVE_INTERFACES__MSG__DETAIL__CONTROLLER__STRUCT_HPP_
<file_sep># generated from rosidl_cmake/cmake/rosidl_cmake-extras.cmake.in
set(vive_interfaces_IDL_FILES "msg/Hmd.idl;msg/Controller.idl")
set(vive_interfaces_INTERFACE_FILES "msg/Hmd.msg;msg/Controller.msg")
<file_sep>// generated from rosidl_typesupport_fastrtps_cpp/resource/idl__type_support.cpp.em
// with input from vive_interfaces:msg\Controller.idl
// generated code does not contain a copyright notice
#include "vive_interfaces/msg/detail/controller__rosidl_typesupport_fastrtps_cpp.hpp"
#include "vive_interfaces/msg/detail/controller__struct.hpp"
#include <limits>
#include <stdexcept>
#include <string>
#include "rosidl_typesupport_cpp/message_type_support.hpp"
#include "rosidl_typesupport_fastrtps_cpp/identifier.hpp"
#include "rosidl_typesupport_fastrtps_cpp/message_type_support.h"
#include "rosidl_typesupport_fastrtps_cpp/message_type_support_decl.hpp"
#include "rosidl_typesupport_fastrtps_cpp/wstring_conversion.hpp"
#include "fastcdr/Cdr.h"
// forward declaration of message dependencies and their conversion functions
namespace vive_interfaces
{
namespace msg
{
namespace typesupport_fastrtps_cpp
{
bool
ROSIDL_TYPESUPPORT_FASTRTPS_CPP_PUBLIC_vive_interfaces
cdr_serialize(
const vive_interfaces::msg::Controller & ros_message,
eprosima::fastcdr::Cdr & cdr)
{
// Member: x
cdr << ros_message.x;
// Member: y
cdr << ros_message.y;
// Member: z
cdr << ros_message.z;
// Member: yaw
cdr << ros_message.yaw;
// Member: pitch
cdr << ros_message.pitch;
// Member: roll
cdr << ros_message.roll;
// Member: grip
cdr << ros_message.grip;
// Member: menu
cdr << ros_message.menu;
// Member: trigger
cdr << ros_message.trigger;
// Member: trackpad_pressed
cdr << ros_message.trackpad_pressed;
// Member: trackpad_touched
cdr << ros_message.trackpad_touched;
// Member: trackpad_x
cdr << ros_message.trackpad_x;
// Member: trackpad_y
cdr << ros_message.trackpad_y;
return true;
}
bool
ROSIDL_TYPESUPPORT_FASTRTPS_CPP_PUBLIC_vive_interfaces
cdr_deserialize(
eprosima::fastcdr::Cdr & cdr,
vive_interfaces::msg::Controller & ros_message)
{
// Member: x
cdr >> ros_message.x;
// Member: y
cdr >> ros_message.y;
// Member: z
cdr >> ros_message.z;
// Member: yaw
cdr >> ros_message.yaw;
// Member: pitch
cdr >> ros_message.pitch;
// Member: roll
cdr >> ros_message.roll;
// Member: grip
cdr >> ros_message.grip;
// Member: menu
cdr >> ros_message.menu;
// Member: trigger
cdr >> ros_message.trigger;
// Member: trackpad_pressed
cdr >> ros_message.trackpad_pressed;
// Member: trackpad_touched
cdr >> ros_message.trackpad_touched;
// Member: trackpad_x
cdr >> ros_message.trackpad_x;
// Member: trackpad_y
cdr >> ros_message.trackpad_y;
return true;
}
size_t
ROSIDL_TYPESUPPORT_FASTRTPS_CPP_PUBLIC_vive_interfaces
get_serialized_size(
const vive_interfaces::msg::Controller & ros_message,
size_t current_alignment)
{
size_t initial_alignment = current_alignment;
const size_t padding = 4;
const size_t wchar_size = 4;
(void)padding;
(void)wchar_size;
// Member: x
{
size_t item_size = sizeof(ros_message.x);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// Member: y
{
size_t item_size = sizeof(ros_message.y);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// Member: z
{
size_t item_size = sizeof(ros_message.z);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// Member: yaw
{
size_t item_size = sizeof(ros_message.yaw);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// Member: pitch
{
size_t item_size = sizeof(ros_message.pitch);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// Member: roll
{
size_t item_size = sizeof(ros_message.roll);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// Member: grip
{
size_t item_size = sizeof(ros_message.grip);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// Member: menu
{
size_t item_size = sizeof(ros_message.menu);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// Member: trigger
{
size_t item_size = sizeof(ros_message.trigger);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// Member: trackpad_pressed
{
size_t item_size = sizeof(ros_message.trackpad_pressed);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// Member: trackpad_touched
{
size_t item_size = sizeof(ros_message.trackpad_touched);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// Member: trackpad_x
{
size_t item_size = sizeof(ros_message.trackpad_x);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// Member: trackpad_y
{
size_t item_size = sizeof(ros_message.trackpad_y);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
return current_alignment - initial_alignment;
}
size_t
ROSIDL_TYPESUPPORT_FASTRTPS_CPP_PUBLIC_vive_interfaces
max_serialized_size_Controller(
bool & full_bounded,
size_t current_alignment)
{
size_t initial_alignment = current_alignment;
const size_t padding = 4;
const size_t wchar_size = 4;
(void)padding;
(void)wchar_size;
(void)full_bounded;
// Member: x
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
// Member: y
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
// Member: z
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
// Member: yaw
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
// Member: pitch
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
// Member: roll
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
// Member: grip
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint8_t);
}
// Member: menu
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint8_t);
}
// Member: trigger
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
// Member: trackpad_pressed
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint8_t);
}
// Member: trackpad_touched
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint8_t);
}
// Member: trackpad_x
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
// Member: trackpad_y
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
return current_alignment - initial_alignment;
}
static bool _Controller__cdr_serialize(
const void * untyped_ros_message,
eprosima::fastcdr::Cdr & cdr)
{
auto typed_message =
static_cast<const vive_interfaces::msg::Controller *>(
untyped_ros_message);
return cdr_serialize(*typed_message, cdr);
}
static bool _Controller__cdr_deserialize(
eprosima::fastcdr::Cdr & cdr,
void * untyped_ros_message)
{
auto typed_message =
static_cast<vive_interfaces::msg::Controller *>(
untyped_ros_message);
return cdr_deserialize(cdr, *typed_message);
}
static uint32_t _Controller__get_serialized_size(
const void * untyped_ros_message)
{
auto typed_message =
static_cast<const vive_interfaces::msg::Controller *>(
untyped_ros_message);
return static_cast<uint32_t>(get_serialized_size(*typed_message, 0));
}
static size_t _Controller__max_serialized_size(bool & full_bounded)
{
return max_serialized_size_Controller(full_bounded, 0);
}
static message_type_support_callbacks_t _Controller__callbacks = {
"vive_interfaces::msg",
"Controller",
_Controller__cdr_serialize,
_Controller__cdr_deserialize,
_Controller__get_serialized_size,
_Controller__max_serialized_size
};
static rosidl_message_type_support_t _Controller__handle = {
rosidl_typesupport_fastrtps_cpp::typesupport_identifier,
&_Controller__callbacks,
get_message_typesupport_handle_function,
};
} // namespace typesupport_fastrtps_cpp
} // namespace msg
} // namespace vive_interfaces
namespace rosidl_typesupport_fastrtps_cpp
{
template<>
ROSIDL_TYPESUPPORT_FASTRTPS_CPP_EXPORT_vive_interfaces
const rosidl_message_type_support_t *
get_message_type_support_handle<vive_interfaces::msg::Controller>()
{
return &vive_interfaces::msg::typesupport_fastrtps_cpp::_Controller__handle;
}
} // namespace rosidl_typesupport_fastrtps_cpp
#ifdef __cplusplus
extern "C"
{
#endif
const rosidl_message_type_support_t *
ROSIDL_TYPESUPPORT_INTERFACE__MESSAGE_SYMBOL_NAME(rosidl_typesupport_fastrtps_cpp, vive_interfaces, msg, Controller)() {
return &vive_interfaces::msg::typesupport_fastrtps_cpp::_Controller__handle;
}
#ifdef __cplusplus
}
#endif
<file_sep>#----------------------------------------------------------------
# Generated CMake target import file for configuration "Debug".
#----------------------------------------------------------------
# Commands may need to know the format version.
set(CMAKE_IMPORT_FILE_VERSION 1)
# Import target "vive_interfaces::vive_interfaces__rosidl_generator_c" for configuration "Debug"
set_property(TARGET vive_interfaces::vive_interfaces__rosidl_generator_c APPEND PROPERTY IMPORTED_CONFIGURATIONS DEBUG)
set_target_properties(vive_interfaces::vive_interfaces__rosidl_generator_c PROPERTIES
IMPORTED_IMPLIB_DEBUG "${_IMPORT_PREFIX}/lib/vive_interfaces__rosidl_generator_c.lib"
IMPORTED_LOCATION_DEBUG "${_IMPORT_PREFIX}/bin/vive_interfaces__rosidl_generator_c.dll"
)
list(APPEND _IMPORT_CHECK_TARGETS vive_interfaces::vive_interfaces__rosidl_generator_c )
list(APPEND _IMPORT_CHECK_FILES_FOR_vive_interfaces::vive_interfaces__rosidl_generator_c "${_IMPORT_PREFIX}/lib/vive_interfaces__rosidl_generator_c.lib" "${_IMPORT_PREFIX}/bin/vive_interfaces__rosidl_generator_c.dll" )
# Commands beyond this point should not need to know the version.
set(CMAKE_IMPORT_FILE_VERSION)
<file_sep>// generated from rosidl_typesupport_c/resource/idl__type_support.cpp.em
// with input from vive_interfaces:msg\Hmd.idl
// generated code does not contain a copyright notice
#include "cstddef"
#include "rosidl_runtime_c/message_type_support_struct.h"
#include "vive_interfaces/msg/rosidl_typesupport_c__visibility_control.h"
#include "vive_interfaces/msg/detail/hmd__struct.h"
#include "rosidl_typesupport_c/identifier.h"
#include "rosidl_typesupport_c/message_type_support_dispatch.h"
#include "rosidl_typesupport_c/type_support_map.h"
#include "rosidl_typesupport_c/visibility_control.h"
#include "rosidl_typesupport_interface/macros.h"
namespace vive_interfaces
{
namespace msg
{
namespace rosidl_typesupport_c
{
typedef struct _Hmd_type_support_ids_t
{
const char * typesupport_identifier[3];
} _Hmd_type_support_ids_t;
static const _Hmd_type_support_ids_t _Hmd_message_typesupport_ids = {
{
"rosidl_typesupport_connext_c", // ::rosidl_typesupport_connext_c::typesupport_identifier,
"rosidl_typesupport_fastrtps_c", // ::rosidl_typesupport_fastrtps_c::typesupport_identifier,
"rosidl_typesupport_introspection_c", // ::rosidl_typesupport_introspection_c::typesupport_identifier,
}
};
typedef struct _Hmd_type_support_symbol_names_t
{
const char * symbol_name[3];
} _Hmd_type_support_symbol_names_t;
#define STRINGIFY_(s) #s
#define STRINGIFY(s) STRINGIFY_(s)
static const _Hmd_type_support_symbol_names_t _Hmd_message_typesupport_symbol_names = {
{
STRINGIFY(ROSIDL_TYPESUPPORT_INTERFACE__MESSAGE_SYMBOL_NAME(rosidl_typesupport_connext_c, vive_interfaces, msg, Hmd)),
STRINGIFY(ROSIDL_TYPESUPPORT_INTERFACE__MESSAGE_SYMBOL_NAME(rosidl_typesupport_fastrtps_c, vive_interfaces, msg, Hmd)),
STRINGIFY(ROSIDL_TYPESUPPORT_INTERFACE__MESSAGE_SYMBOL_NAME(rosidl_typesupport_introspection_c, vive_interfaces, msg, Hmd)),
}
};
typedef struct _Hmd_type_support_data_t
{
void * data[3];
} _Hmd_type_support_data_t;
static _Hmd_type_support_data_t _Hmd_message_typesupport_data = {
{
0, // will store the shared library later
0, // will store the shared library later
0, // will store the shared library later
}
};
static const type_support_map_t _Hmd_message_typesupport_map = {
3,
"vive_interfaces",
&_Hmd_message_typesupport_ids.typesupport_identifier[0],
&_Hmd_message_typesupport_symbol_names.symbol_name[0],
&_Hmd_message_typesupport_data.data[0],
};
static const rosidl_message_type_support_t Hmd_message_type_support_handle = {
rosidl_typesupport_c__typesupport_identifier,
reinterpret_cast<const type_support_map_t *>(&_Hmd_message_typesupport_map),
rosidl_typesupport_c__get_message_typesupport_handle_function,
};
} // namespace rosidl_typesupport_c
} // namespace msg
} // namespace vive_interfaces
#ifdef __cplusplus
extern "C"
{
#endif
ROSIDL_TYPESUPPORT_C_EXPORT_vive_interfaces
const rosidl_message_type_support_t *
ROSIDL_TYPESUPPORT_INTERFACE__MESSAGE_SYMBOL_NAME(rosidl_typesupport_c, vive_interfaces, msg, Hmd)() {
return &::vive_interfaces::msg::rosidl_typesupport_c::Hmd_message_type_support_handle;
}
#ifdef __cplusplus
}
#endif
<file_sep>// generated from rosidl_generator_c/resource/idl.h.em
// with input from vive_interfaces:msg\Hmd.idl
// generated code does not contain a copyright notice
#ifndef VIVE_INTERFACES__MSG__HMD_H_
#define VIVE_INTERFACES__MSG__HMD_H_
#include "vive_interfaces/msg/detail/hmd__struct.h"
#include "vive_interfaces/msg/detail/hmd__functions.h"
#include "vive_interfaces/msg/detail/hmd__type_support.h"
#endif // VIVE_INTERFACES__MSG__HMD_H_
<file_sep>// generated from rosidl_typesupport_introspection_c/resource/idl__rosidl_typesupport_introspection_c.h.em
// with input from vive_interfaces:msg\Controller.idl
// generated code does not contain a copyright notice
#ifndef VIVE_INTERFACES__MSG__DETAIL__CONTROLLER__ROSIDL_TYPESUPPORT_INTROSPECTION_C_H_
#define VIVE_INTERFACES__MSG__DETAIL__CONTROLLER__ROSIDL_TYPESUPPORT_INTROSPECTION_C_H_
#ifdef __cplusplus
extern "C"
{
#endif
#include "rosidl_runtime_c/message_type_support_struct.h"
#include "rosidl_typesupport_interface/macros.h"
#include "vive_interfaces/msg/rosidl_typesupport_introspection_c__visibility_control.h"
ROSIDL_TYPESUPPORT_INTROSPECTION_C_PUBLIC_vive_interfaces
const rosidl_message_type_support_t *
ROSIDL_TYPESUPPORT_INTERFACE__MESSAGE_SYMBOL_NAME(rosidl_typesupport_introspection_c, vive_interfaces, msg, Controller)();
#ifdef __cplusplus
}
#endif
#endif // VIVE_INTERFACES__MSG__DETAIL__CONTROLLER__ROSIDL_TYPESUPPORT_INTROSPECTION_C_H_
<file_sep>from __future__ import print_function
import argparse
import math
import time
import sys
import random
import pandas as pd
from signal import signal, SIGINT
from triad_openvr import triad_openvr
from pythonosc import osc_message_builder
from pythonosc import osc_bundle_builder
from pythonosc import udp_client
from reprint import output
from colorama import Fore, Back, Style
print(Back.CYAN + Fore.WHITE + Style.BRIGHT +
""" \n OpenVR OSC 1.0 \n \n"""
+ Style.RESET_ALL)
# Initialize Tria's OpenVR wrapper and print discovered objects
v = triad_openvr.triad_openvr()
print(Style.DIM)
v.print_discovered_objects()
print(Style.RESET_ALL)
# Sort through all discovered devices and keep track by type
deviceCount = 0
devices = {
'tracker': [],
'hmd': [],
'controller': [],
'tracking reference': []
}
for deviceName, device in v.devices.items():
device._id = deviceName.split("_").pop()
devices[device.device_class.lower()].append(device)
deviceCount += 1
def handler(signal_received, frame):
# Handle any cleanup here
df["Device Type"] = device_types
df["Device Id"] = device_ids
df["X"] = x
df["Y"] = y
df["Z"] = z
df["Yaw"] = yaw
df["Pitch"] = pitch
df["Roll"] = roll
df.to_csv("htc_vive_output.csv")
#print('SIGINT or CTRL-C detected. Exiting gracefully')
exit(0)
if __name__ == "__main__":
# Parse CLI arguments
parser = argparse.ArgumentParser()
parser.add_argument("--ip", default="127.0.0.1", help="ip of the OSC server")
parser.add_argument("--port", type=int, default=7000, help="port the OSC server is listening on")
parser.add_argument("--track", nargs="*", default=["hmd", "tracker", "controller"], help="devices to track (hmd, tracker, controller)")
parser.add_argument("--freq", type=int, default=250, help="tracking frequency (in ms)")
parser.add_argument("--mode", choices=['euler', 'quaternion'], default="euler", help="get pose data in euler angles or quaternions")
args = parser.parse_args()
# Tell Python to run the handler() function when SIGINT is recieved
signal(SIGINT, handler)
# pose tracking interval
interval = 1/250
# initialize OSC client
client = udp_client.SimpleUDPClient(args.ip, args.port)
# print some stuff
print(Fore.GREEN + "\rSending OSC tracking data on " + args.ip + ":" + str(args.port), end="\n\n")
print(
Fore.YELLOW +
'{0: <13}'.format("OSC address") +
'{0: <11}'.format("X") +
'{0: <11}'.format("Y") +
'{0: <11}'.format("Z") +
'{0: <11}'.format("Yaw") +
'{0: <11}'.format("Pitch") +
'{0: <11}'.format("Roll") +
# '{0: <14}'.format("unPacketNum") +
'{0: <10}'.format("Trigger") +
'{0: <12}'.format("Trackpad X") +
'{0: <12}'.format("Trackpad Y") +
# '{0: <18}'.format("ulButton Pressed") +
# '{0: <18}'.format("ulButton Touched") +
'{0: <12}'.format("Menu Button") +
'{0: <18}'.format("Trackpad Pressed") +
'{0: <18}'.format("Trackpad Touched") +
'{0: <12}'.format("Grip Button"))
pose_tracking = pd.DataFrame()
df = pd.DataFrame()
device_types = []
device_ids = []
x = []
y = []
z = []
yaw = []
pitch = []
roll = []
unpacketnum = []
trigger = []
trackpad_x = []
trackpad_y = []
ulbuttonpressed = []
ulbuttontouched = []
menu_button = []
trackpad_pressed = []
trackpad_touched = []
grip_button = []
with output(output_type="list", initial_len=5, interval=0) as output_list:
while(True):
start = time.time()
# Initialize OSC bundle for all tracked controllers
bundle = osc_bundle_builder.OscBundleBuilder(osc_bundle_builder.IMMEDIATELY)
# iterate over tracked device types and build OSC messages
di = 0
for deviceType in args.track:
for device in devices[deviceType]:
# get device post
pose = device.get_pose_euler()
velocity = device.get_velocity()
angular_velocity = device.get_angular_velocity()
controller_inputs = device.get_controller_inputs()
haptic_pulse = device.trigger_haptic_pulse()
# Build message and add to bundle
msg = osc_message_builder.OscMessageBuilder(address="/" + deviceType + "/" + device._id)
#msg.add_arg(device.get_pose_euler())
bundle.add_content(msg.build())
### report device pose in the console
txt = Fore.CYAN + '{0: <13}'.format(deviceType + device._id) + Fore.WHITE + Style.BRIGHT
if not pose:
pose = previous_pose
for each in pose:
txt += '{0: <10}'.format("%.4f" % each)
txt += " "
# txt += '{0: <14}'.format(controller_inputs["unPacketNum"])
txt += '{0: <10.4f}'.format(controller_inputs["trigger"])
txt += '{0: <12.4f}'.format(controller_inputs["trackpad_x"])
txt += '{0: <12.4f}'.format(controller_inputs["trackpad_y"])
# txt += '{0: <18}'.format(controller_inputs["ulButtonPressed"])
# txt += '{0: <18}'.format(controller_inputs["ulButtonTouched"])
txt += '{0: <12}'.format(controller_inputs["menu_button"])
txt += '{0: <18}'.format(controller_inputs["trackpad_pressed"])
txt += '{0: <18}'.format(controller_inputs["trackpad_touched"])
txt += '{0: <12}'.format(controller_inputs["grip_button"])
device_types.append(deviceType)
device_ids.append(device._id)
x.append(pose[0])
y.append(pose[1])
z.append(pose[2])
yaw.append(pose[3])
pitch.append(pose[4])
roll.append(pose[5])
unpacketnum.append(controller_inputs["unPacketNum"])
trigger.append(controller_inputs["trigger"])
trackpad_x.append(controller_inputs["trackpad_x"])
trackpad_y.append(controller_inputs["trackpad_y"])
ulbuttonpressed.append(controller_inputs["ulButtonPressed"])
ulbuttontouched.append(controller_inputs["ulButtonTouched"])
menu_button.append(controller_inputs["menu_button"])
trackpad_pressed.append(controller_inputs["trackpad_pressed"])
trackpad_touched.append(controller_inputs["trackpad_touched"])
grip_button.append(controller_inputs["grip_button"])
output_list[di] = txt
di += 1
previous_pose = pose
# Send the bundle
client.send(bundle.build())
# wait for next tick
sleep_time = interval-(time.time()-start)
if sleep_time>0:
time.sleep(sleep_time)
<file_sep>// generated from rosidl_typesupport_cpp/resource/idl__type_support.cpp.em
// with input from vive_interfaces:msg\Controller.idl
// generated code does not contain a copyright notice
#include "cstddef"
#include "rosidl_runtime_c/message_type_support_struct.h"
#include "vive_interfaces/msg/detail/controller__struct.hpp"
#include "rosidl_typesupport_cpp/identifier.hpp"
#include "rosidl_typesupport_cpp/message_type_support.hpp"
#include "rosidl_typesupport_c/type_support_map.h"
#include "rosidl_typesupport_cpp/message_type_support_dispatch.hpp"
#include "rosidl_typesupport_cpp/visibility_control.h"
#include "rosidl_typesupport_interface/macros.h"
namespace vive_interfaces
{
namespace msg
{
namespace rosidl_typesupport_cpp
{
typedef struct _Controller_type_support_ids_t
{
const char * typesupport_identifier[3];
} _Controller_type_support_ids_t;
static const _Controller_type_support_ids_t _Controller_message_typesupport_ids = {
{
"rosidl_typesupport_connext_cpp", // ::rosidl_typesupport_connext_cpp::typesupport_identifier,
"rosidl_typesupport_fastrtps_cpp", // ::rosidl_typesupport_fastrtps_cpp::typesupport_identifier,
"rosidl_typesupport_introspection_cpp", // ::rosidl_typesupport_introspection_cpp::typesupport_identifier,
}
};
typedef struct _Controller_type_support_symbol_names_t
{
const char * symbol_name[3];
} _Controller_type_support_symbol_names_t;
#define STRINGIFY_(s) #s
#define STRINGIFY(s) STRINGIFY_(s)
static const _Controller_type_support_symbol_names_t _Controller_message_typesupport_symbol_names = {
{
STRINGIFY(ROSIDL_TYPESUPPORT_INTERFACE__MESSAGE_SYMBOL_NAME(rosidl_typesupport_connext_cpp, vive_interfaces, msg, Controller)),
STRINGIFY(ROSIDL_TYPESUPPORT_INTERFACE__MESSAGE_SYMBOL_NAME(rosidl_typesupport_fastrtps_cpp, vive_interfaces, msg, Controller)),
STRINGIFY(ROSIDL_TYPESUPPORT_INTERFACE__MESSAGE_SYMBOL_NAME(rosidl_typesupport_introspection_cpp, vive_interfaces, msg, Controller)),
}
};
typedef struct _Controller_type_support_data_t
{
void * data[3];
} _Controller_type_support_data_t;
static _Controller_type_support_data_t _Controller_message_typesupport_data = {
{
0, // will store the shared library later
0, // will store the shared library later
0, // will store the shared library later
}
};
static const type_support_map_t _Controller_message_typesupport_map = {
3,
"vive_interfaces",
&_Controller_message_typesupport_ids.typesupport_identifier[0],
&_Controller_message_typesupport_symbol_names.symbol_name[0],
&_Controller_message_typesupport_data.data[0],
};
static const rosidl_message_type_support_t Controller_message_type_support_handle = {
::rosidl_typesupport_cpp::typesupport_identifier,
reinterpret_cast<const type_support_map_t *>(&_Controller_message_typesupport_map),
::rosidl_typesupport_cpp::get_message_typesupport_handle_function,
};
} // namespace rosidl_typesupport_cpp
} // namespace msg
} // namespace vive_interfaces
namespace rosidl_typesupport_cpp
{
template<>
ROSIDL_TYPESUPPORT_CPP_PUBLIC
const rosidl_message_type_support_t *
get_message_type_support_handle<vive_interfaces::msg::Controller>()
{
return &::vive_interfaces::msg::rosidl_typesupport_cpp::Controller_message_type_support_handle;
}
#ifdef __cplusplus
extern "C"
{
#endif
ROSIDL_TYPESUPPORT_CPP_PUBLIC
const rosidl_message_type_support_t *
ROSIDL_TYPESUPPORT_INTERFACE__MESSAGE_SYMBOL_NAME(rosidl_typesupport_cpp, vive_interfaces, msg, Controller)() {
return get_message_type_support_handle<vive_interfaces::msg::Controller>();
}
#ifdef __cplusplus
}
#endif
} // namespace rosidl_typesupport_cpp
<file_sep>// generated from rosidl_generator_cpp/resource/idl__builder.hpp.em
// with input from vive_interfaces:msg\Controller.idl
// generated code does not contain a copyright notice
#ifndef VIVE_INTERFACES__MSG__DETAIL__CONTROLLER__BUILDER_HPP_
#define VIVE_INTERFACES__MSG__DETAIL__CONTROLLER__BUILDER_HPP_
#include "vive_interfaces/msg/detail/controller__struct.hpp"
#include <rosidl_runtime_cpp/message_initialization.hpp>
#include <algorithm>
#include <utility>
namespace vive_interfaces
{
namespace msg
{
namespace builder
{
class Init_Controller_trackpad_y
{
public:
explicit Init_Controller_trackpad_y(::vive_interfaces::msg::Controller & msg)
: msg_(msg)
{}
::vive_interfaces::msg::Controller trackpad_y(::vive_interfaces::msg::Controller::_trackpad_y_type arg)
{
msg_.trackpad_y = std::move(arg);
return std::move(msg_);
}
private:
::vive_interfaces::msg::Controller msg_;
};
class Init_Controller_trackpad_x
{
public:
explicit Init_Controller_trackpad_x(::vive_interfaces::msg::Controller & msg)
: msg_(msg)
{}
Init_Controller_trackpad_y trackpad_x(::vive_interfaces::msg::Controller::_trackpad_x_type arg)
{
msg_.trackpad_x = std::move(arg);
return Init_Controller_trackpad_y(msg_);
}
private:
::vive_interfaces::msg::Controller msg_;
};
class Init_Controller_trackpad_touched
{
public:
explicit Init_Controller_trackpad_touched(::vive_interfaces::msg::Controller & msg)
: msg_(msg)
{}
Init_Controller_trackpad_x trackpad_touched(::vive_interfaces::msg::Controller::_trackpad_touched_type arg)
{
msg_.trackpad_touched = std::move(arg);
return Init_Controller_trackpad_x(msg_);
}
private:
::vive_interfaces::msg::Controller msg_;
};
class Init_Controller_trackpad_pressed
{
public:
explicit Init_Controller_trackpad_pressed(::vive_interfaces::msg::Controller & msg)
: msg_(msg)
{}
Init_Controller_trackpad_touched trackpad_pressed(::vive_interfaces::msg::Controller::_trackpad_pressed_type arg)
{
msg_.trackpad_pressed = std::move(arg);
return Init_Controller_trackpad_touched(msg_);
}
private:
::vive_interfaces::msg::Controller msg_;
};
class Init_Controller_trigger
{
public:
explicit Init_Controller_trigger(::vive_interfaces::msg::Controller & msg)
: msg_(msg)
{}
Init_Controller_trackpad_pressed trigger(::vive_interfaces::msg::Controller::_trigger_type arg)
{
msg_.trigger = std::move(arg);
return Init_Controller_trackpad_pressed(msg_);
}
private:
::vive_interfaces::msg::Controller msg_;
};
class Init_Controller_menu
{
public:
explicit Init_Controller_menu(::vive_interfaces::msg::Controller & msg)
: msg_(msg)
{}
Init_Controller_trigger menu(::vive_interfaces::msg::Controller::_menu_type arg)
{
msg_.menu = std::move(arg);
return Init_Controller_trigger(msg_);
}
private:
::vive_interfaces::msg::Controller msg_;
};
class Init_Controller_grip
{
public:
explicit Init_Controller_grip(::vive_interfaces::msg::Controller & msg)
: msg_(msg)
{}
Init_Controller_menu grip(::vive_interfaces::msg::Controller::_grip_type arg)
{
msg_.grip = std::move(arg);
return Init_Controller_menu(msg_);
}
private:
::vive_interfaces::msg::Controller msg_;
};
class Init_Controller_roll
{
public:
explicit Init_Controller_roll(::vive_interfaces::msg::Controller & msg)
: msg_(msg)
{}
Init_Controller_grip roll(::vive_interfaces::msg::Controller::_roll_type arg)
{
msg_.roll = std::move(arg);
return Init_Controller_grip(msg_);
}
private:
::vive_interfaces::msg::Controller msg_;
};
class Init_Controller_pitch
{
public:
explicit Init_Controller_pitch(::vive_interfaces::msg::Controller & msg)
: msg_(msg)
{}
Init_Controller_roll pitch(::vive_interfaces::msg::Controller::_pitch_type arg)
{
msg_.pitch = std::move(arg);
return Init_Controller_roll(msg_);
}
private:
::vive_interfaces::msg::Controller msg_;
};
class Init_Controller_yaw
{
public:
explicit Init_Controller_yaw(::vive_interfaces::msg::Controller & msg)
: msg_(msg)
{}
Init_Controller_pitch yaw(::vive_interfaces::msg::Controller::_yaw_type arg)
{
msg_.yaw = std::move(arg);
return Init_Controller_pitch(msg_);
}
private:
::vive_interfaces::msg::Controller msg_;
};
class Init_Controller_z
{
public:
explicit Init_Controller_z(::vive_interfaces::msg::Controller & msg)
: msg_(msg)
{}
Init_Controller_yaw z(::vive_interfaces::msg::Controller::_z_type arg)
{
msg_.z = std::move(arg);
return Init_Controller_yaw(msg_);
}
private:
::vive_interfaces::msg::Controller msg_;
};
class Init_Controller_y
{
public:
explicit Init_Controller_y(::vive_interfaces::msg::Controller & msg)
: msg_(msg)
{}
Init_Controller_z y(::vive_interfaces::msg::Controller::_y_type arg)
{
msg_.y = std::move(arg);
return Init_Controller_z(msg_);
}
private:
::vive_interfaces::msg::Controller msg_;
};
class Init_Controller_x
{
public:
Init_Controller_x()
: msg_(::rosidl_runtime_cpp::MessageInitialization::SKIP)
{}
Init_Controller_y x(::vive_interfaces::msg::Controller::_x_type arg)
{
msg_.x = std::move(arg);
return Init_Controller_y(msg_);
}
private:
::vive_interfaces::msg::Controller msg_;
};
} // namespace builder
} // namespace msg
template<typename MessageType>
auto build();
template<>
inline
auto build<::vive_interfaces::msg::Controller>()
{
return vive_interfaces::msg::builder::Init_Controller_x();
}
} // namespace vive_interfaces
#endif // VIVE_INTERFACES__MSG__DETAIL__CONTROLLER__BUILDER_HPP_
<file_sep>// generated from rosidl_typesupport_introspection_cpp/resource/idl__type_support.cpp.em
// with input from vive_interfaces:msg\Controller.idl
// generated code does not contain a copyright notice
#include "array"
#include "cstddef"
#include "string"
#include "vector"
#include "rosidl_runtime_c/message_type_support_struct.h"
#include "rosidl_typesupport_cpp/message_type_support.hpp"
#include "rosidl_typesupport_interface/macros.h"
#include "vive_interfaces/msg/detail/controller__struct.hpp"
#include "rosidl_typesupport_introspection_cpp/field_types.hpp"
#include "rosidl_typesupport_introspection_cpp/identifier.hpp"
#include "rosidl_typesupport_introspection_cpp/message_introspection.hpp"
#include "rosidl_typesupport_introspection_cpp/message_type_support_decl.hpp"
#include "rosidl_typesupport_introspection_cpp/visibility_control.h"
namespace vive_interfaces
{
namespace msg
{
namespace rosidl_typesupport_introspection_cpp
{
void Controller_init_function(
void * message_memory, rosidl_runtime_cpp::MessageInitialization _init)
{
new (message_memory) vive_interfaces::msg::Controller(_init);
}
void Controller_fini_function(void * message_memory)
{
auto typed_message = static_cast<vive_interfaces::msg::Controller *>(message_memory);
typed_message->~Controller();
}
static const ::rosidl_typesupport_introspection_cpp::MessageMember Controller_message_member_array[13] = {
{
"x", // name
::rosidl_typesupport_introspection_cpp::ROS_TYPE_FLOAT, // type
0, // upper bound of string
nullptr, // members of sub message
false, // is array
0, // array size
false, // is upper bound
offsetof(vive_interfaces::msg::Controller, x), // bytes offset in struct
nullptr, // default value
nullptr, // size() function pointer
nullptr, // get_const(index) function pointer
nullptr, // get(index) function pointer
nullptr // resize(index) function pointer
},
{
"y", // name
::rosidl_typesupport_introspection_cpp::ROS_TYPE_FLOAT, // type
0, // upper bound of string
nullptr, // members of sub message
false, // is array
0, // array size
false, // is upper bound
offsetof(vive_interfaces::msg::Controller, y), // bytes offset in struct
nullptr, // default value
nullptr, // size() function pointer
nullptr, // get_const(index) function pointer
nullptr, // get(index) function pointer
nullptr // resize(index) function pointer
},
{
"z", // name
::rosidl_typesupport_introspection_cpp::ROS_TYPE_FLOAT, // type
0, // upper bound of string
nullptr, // members of sub message
false, // is array
0, // array size
false, // is upper bound
offsetof(vive_interfaces::msg::Controller, z), // bytes offset in struct
nullptr, // default value
nullptr, // size() function pointer
nullptr, // get_const(index) function pointer
nullptr, // get(index) function pointer
nullptr // resize(index) function pointer
},
{
"yaw", // name
::rosidl_typesupport_introspection_cpp::ROS_TYPE_FLOAT, // type
0, // upper bound of string
nullptr, // members of sub message
false, // is array
0, // array size
false, // is upper bound
offsetof(vive_interfaces::msg::Controller, yaw), // bytes offset in struct
nullptr, // default value
nullptr, // size() function pointer
nullptr, // get_const(index) function pointer
nullptr, // get(index) function pointer
nullptr // resize(index) function pointer
},
{
"pitch", // name
::rosidl_typesupport_introspection_cpp::ROS_TYPE_FLOAT, // type
0, // upper bound of string
nullptr, // members of sub message
false, // is array
0, // array size
false, // is upper bound
offsetof(vive_interfaces::msg::Controller, pitch), // bytes offset in struct
nullptr, // default value
nullptr, // size() function pointer
nullptr, // get_const(index) function pointer
nullptr, // get(index) function pointer
nullptr // resize(index) function pointer
},
{
"roll", // name
::rosidl_typesupport_introspection_cpp::ROS_TYPE_FLOAT, // type
0, // upper bound of string
nullptr, // members of sub message
false, // is array
0, // array size
false, // is upper bound
offsetof(vive_interfaces::msg::Controller, roll), // bytes offset in struct
nullptr, // default value
nullptr, // size() function pointer
nullptr, // get_const(index) function pointer
nullptr, // get(index) function pointer
nullptr // resize(index) function pointer
},
{
"grip", // name
::rosidl_typesupport_introspection_cpp::ROS_TYPE_INT8, // type
0, // upper bound of string
nullptr, // members of sub message
false, // is array
0, // array size
false, // is upper bound
offsetof(vive_interfaces::msg::Controller, grip), // bytes offset in struct
nullptr, // default value
nullptr, // size() function pointer
nullptr, // get_const(index) function pointer
nullptr, // get(index) function pointer
nullptr // resize(index) function pointer
},
{
"menu", // name
::rosidl_typesupport_introspection_cpp::ROS_TYPE_INT8, // type
0, // upper bound of string
nullptr, // members of sub message
false, // is array
0, // array size
false, // is upper bound
offsetof(vive_interfaces::msg::Controller, menu), // bytes offset in struct
nullptr, // default value
nullptr, // size() function pointer
nullptr, // get_const(index) function pointer
nullptr, // get(index) function pointer
nullptr // resize(index) function pointer
},
{
"trigger", // name
::rosidl_typesupport_introspection_cpp::ROS_TYPE_FLOAT, // type
0, // upper bound of string
nullptr, // members of sub message
false, // is array
0, // array size
false, // is upper bound
offsetof(vive_interfaces::msg::Controller, trigger), // bytes offset in struct
nullptr, // default value
nullptr, // size() function pointer
nullptr, // get_const(index) function pointer
nullptr, // get(index) function pointer
nullptr // resize(index) function pointer
},
{
"trackpad_pressed", // name
::rosidl_typesupport_introspection_cpp::ROS_TYPE_INT8, // type
0, // upper bound of string
nullptr, // members of sub message
false, // is array
0, // array size
false, // is upper bound
offsetof(vive_interfaces::msg::Controller, trackpad_pressed), // bytes offset in struct
nullptr, // default value
nullptr, // size() function pointer
nullptr, // get_const(index) function pointer
nullptr, // get(index) function pointer
nullptr // resize(index) function pointer
},
{
"trackpad_touched", // name
::rosidl_typesupport_introspection_cpp::ROS_TYPE_INT8, // type
0, // upper bound of string
nullptr, // members of sub message
false, // is array
0, // array size
false, // is upper bound
offsetof(vive_interfaces::msg::Controller, trackpad_touched), // bytes offset in struct
nullptr, // default value
nullptr, // size() function pointer
nullptr, // get_const(index) function pointer
nullptr, // get(index) function pointer
nullptr // resize(index) function pointer
},
{
"trackpad_x", // name
::rosidl_typesupport_introspection_cpp::ROS_TYPE_FLOAT, // type
0, // upper bound of string
nullptr, // members of sub message
false, // is array
0, // array size
false, // is upper bound
offsetof(vive_interfaces::msg::Controller, trackpad_x), // bytes offset in struct
nullptr, // default value
nullptr, // size() function pointer
nullptr, // get_const(index) function pointer
nullptr, // get(index) function pointer
nullptr // resize(index) function pointer
},
{
"trackpad_y", // name
::rosidl_typesupport_introspection_cpp::ROS_TYPE_FLOAT, // type
0, // upper bound of string
nullptr, // members of sub message
false, // is array
0, // array size
false, // is upper bound
offsetof(vive_interfaces::msg::Controller, trackpad_y), // bytes offset in struct
nullptr, // default value
nullptr, // size() function pointer
nullptr, // get_const(index) function pointer
nullptr, // get(index) function pointer
nullptr // resize(index) function pointer
}
};
static const ::rosidl_typesupport_introspection_cpp::MessageMembers Controller_message_members = {
"vive_interfaces::msg", // message namespace
"Controller", // message name
13, // number of fields
sizeof(vive_interfaces::msg::Controller),
Controller_message_member_array, // message members
Controller_init_function, // function to initialize message memory (memory has to be allocated)
Controller_fini_function // function to terminate message instance (will not free memory)
};
static const rosidl_message_type_support_t Controller_message_type_support_handle = {
::rosidl_typesupport_introspection_cpp::typesupport_identifier,
&Controller_message_members,
get_message_typesupport_handle_function,
};
} // namespace rosidl_typesupport_introspection_cpp
} // namespace msg
} // namespace vive_interfaces
namespace rosidl_typesupport_introspection_cpp
{
template<>
ROSIDL_TYPESUPPORT_INTROSPECTION_CPP_PUBLIC
const rosidl_message_type_support_t *
get_message_type_support_handle<vive_interfaces::msg::Controller>()
{
return &::vive_interfaces::msg::rosidl_typesupport_introspection_cpp::Controller_message_type_support_handle;
}
} // namespace rosidl_typesupport_introspection_cpp
#ifdef __cplusplus
extern "C"
{
#endif
ROSIDL_TYPESUPPORT_INTROSPECTION_CPP_PUBLIC
const rosidl_message_type_support_t *
ROSIDL_TYPESUPPORT_INTERFACE__MESSAGE_SYMBOL_NAME(rosidl_typesupport_introspection_cpp, vive_interfaces, msg, Controller)() {
return &::vive_interfaces::msg::rosidl_typesupport_introspection_cpp::Controller_message_type_support_handle;
}
#ifdef __cplusplus
}
#endif
<file_sep>#----------------------------------------------------------------
# Generated CMake target import file for configuration "RelWithDebInfo".
#----------------------------------------------------------------
# Commands may need to know the format version.
set(CMAKE_IMPORT_FILE_VERSION 1)
# Import target "vive_interfaces::vive_interfaces__rosidl_typesupport_introspection_c" for configuration "RelWithDebInfo"
set_property(TARGET vive_interfaces::vive_interfaces__rosidl_typesupport_introspection_c APPEND PROPERTY IMPORTED_CONFIGURATIONS RELWITHDEBINFO)
set_target_properties(vive_interfaces::vive_interfaces__rosidl_typesupport_introspection_c PROPERTIES
IMPORTED_IMPLIB_RELWITHDEBINFO "${_IMPORT_PREFIX}/lib/vive_interfaces__rosidl_typesupport_introspection_c.lib"
IMPORTED_LOCATION_RELWITHDEBINFO "${_IMPORT_PREFIX}/bin/vive_interfaces__rosidl_typesupport_introspection_c.dll"
)
list(APPEND _IMPORT_CHECK_TARGETS vive_interfaces::vive_interfaces__rosidl_typesupport_introspection_c )
list(APPEND _IMPORT_CHECK_FILES_FOR_vive_interfaces::vive_interfaces__rosidl_typesupport_introspection_c "${_IMPORT_PREFIX}/lib/vive_interfaces__rosidl_typesupport_introspection_c.lib" "${_IMPORT_PREFIX}/bin/vive_interfaces__rosidl_typesupport_introspection_c.dll" )
# Commands beyond this point should not need to know the version.
set(CMAKE_IMPORT_FILE_VERSION)
<file_sep>#----------------------------------------------------------------
# Generated CMake target import file for configuration "RelWithDebInfo".
#----------------------------------------------------------------
# Commands may need to know the format version.
set(CMAKE_IMPORT_FILE_VERSION 1)
# Import target "vive_interfaces::vive_interfaces__rosidl_typesupport_cpp" for configuration "RelWithDebInfo"
set_property(TARGET vive_interfaces::vive_interfaces__rosidl_typesupport_cpp APPEND PROPERTY IMPORTED_CONFIGURATIONS RELWITHDEBINFO)
set_target_properties(vive_interfaces::vive_interfaces__rosidl_typesupport_cpp PROPERTIES
IMPORTED_IMPLIB_RELWITHDEBINFO "${_IMPORT_PREFIX}/lib/vive_interfaces__rosidl_typesupport_cpp.lib"
IMPORTED_LOCATION_RELWITHDEBINFO "${_IMPORT_PREFIX}/bin/vive_interfaces__rosidl_typesupport_cpp.dll"
)
list(APPEND _IMPORT_CHECK_TARGETS vive_interfaces::vive_interfaces__rosidl_typesupport_cpp )
list(APPEND _IMPORT_CHECK_FILES_FOR_vive_interfaces::vive_interfaces__rosidl_typesupport_cpp "${_IMPORT_PREFIX}/lib/vive_interfaces__rosidl_typesupport_cpp.lib" "${_IMPORT_PREFIX}/bin/vive_interfaces__rosidl_typesupport_cpp.dll" )
# Commands beyond this point should not need to know the version.
set(CMAKE_IMPORT_FILE_VERSION)
<file_sep>#----------------------------------------------------------------
# Generated CMake target import file for configuration "MinSizeRel".
#----------------------------------------------------------------
# Commands may need to know the format version.
set(CMAKE_IMPORT_FILE_VERSION 1)
# Import target "vive_interfaces::vive_interfaces__rosidl_generator_c" for configuration "MinSizeRel"
set_property(TARGET vive_interfaces::vive_interfaces__rosidl_generator_c APPEND PROPERTY IMPORTED_CONFIGURATIONS MINSIZEREL)
set_target_properties(vive_interfaces::vive_interfaces__rosidl_generator_c PROPERTIES
IMPORTED_IMPLIB_MINSIZEREL "${_IMPORT_PREFIX}/lib/vive_interfaces__rosidl_generator_c.lib"
IMPORTED_LOCATION_MINSIZEREL "${_IMPORT_PREFIX}/bin/vive_interfaces__rosidl_generator_c.dll"
)
list(APPEND _IMPORT_CHECK_TARGETS vive_interfaces::vive_interfaces__rosidl_generator_c )
list(APPEND _IMPORT_CHECK_FILES_FOR_vive_interfaces::vive_interfaces__rosidl_generator_c "${_IMPORT_PREFIX}/lib/vive_interfaces__rosidl_generator_c.lib" "${_IMPORT_PREFIX}/bin/vive_interfaces__rosidl_generator_c.dll" )
# Commands beyond this point should not need to know the version.
set(CMAKE_IMPORT_FILE_VERSION)
<file_sep>set(_AMENT_PACKAGE_NAME "vive_interfaces")
set(vive_interfaces_VERSION "0.0.0")
set(vive_interfaces_MAINTAINER "hiver <<EMAIL>>")
set(vive_interfaces_BUILD_DEPENDS "rosidl_default_generators")
set(vive_interfaces_BUILDTOOL_DEPENDS "ament_cmake")
set(vive_interfaces_BUILD_EXPORT_DEPENDS )
set(vive_interfaces_BUILDTOOL_EXPORT_DEPENDS )
set(vive_interfaces_EXEC_DEPENDS "rosidl_default_runtime")
set(vive_interfaces_TEST_DEPENDS "ament_lint_auto" "ament_lint_common")
set(vive_interfaces_GROUP_DEPENDS )
set(vive_interfaces_MEMBER_OF_GROUPS "rosidl_interface_packages")
set(vive_interfaces_DEPRECATED "")
set(vive_interfaces_EXPORT_TAGS)
list(APPEND vive_interfaces_EXPORT_TAGS "<build_type>ament_cmake</build_type>")
<file_sep>// generated from rosidl_generator_cpp/resource/idl.hpp.em
// generated code does not contain a copyright notice
#ifndef VIVE_INTERFACES__MSG__HMD_HPP_
#define VIVE_INTERFACES__MSG__HMD_HPP_
#include "vive_interfaces/msg/detail/hmd__struct.hpp"
#include "vive_interfaces/msg/detail/hmd__builder.hpp"
#include "vive_interfaces/msg/detail/hmd__traits.hpp"
#endif // VIVE_INTERFACES__MSG__HMD_HPP_
<file_sep>// generated from rosidl_typesupport_fastrtps_c/resource/idl__type_support_c.cpp.em
// with input from vive_interfaces:msg\Controller.idl
// generated code does not contain a copyright notice
#include "vive_interfaces/msg/detail/controller__rosidl_typesupport_fastrtps_c.h"
#include <cassert>
#include <limits>
#include <string>
#include "rosidl_typesupport_fastrtps_c/identifier.h"
#include "rosidl_typesupport_fastrtps_c/wstring_conversion.hpp"
#include "rosidl_typesupport_fastrtps_cpp/message_type_support.h"
#include "vive_interfaces/msg/rosidl_typesupport_fastrtps_c__visibility_control.h"
#include "vive_interfaces/msg/detail/controller__struct.h"
#include "vive_interfaces/msg/detail/controller__functions.h"
#include "fastcdr/Cdr.h"
#ifndef _WIN32
# pragma GCC diagnostic push
# pragma GCC diagnostic ignored "-Wunused-parameter"
# ifdef __clang__
# pragma clang diagnostic ignored "-Wdeprecated-register"
# pragma clang diagnostic ignored "-Wreturn-type-c-linkage"
# endif
#endif
#ifndef _WIN32
# pragma GCC diagnostic pop
#endif
// includes and forward declarations of message dependencies and their conversion functions
#if defined(__cplusplus)
extern "C"
{
#endif
// forward declare type support functions
using _Controller__ros_msg_type = vive_interfaces__msg__Controller;
static bool _Controller__cdr_serialize(
const void * untyped_ros_message,
eprosima::fastcdr::Cdr & cdr)
{
if (!untyped_ros_message) {
fprintf(stderr, "ros message handle is null\n");
return false;
}
const _Controller__ros_msg_type * ros_message = static_cast<const _Controller__ros_msg_type *>(untyped_ros_message);
// Field name: x
{
cdr << ros_message->x;
}
// Field name: y
{
cdr << ros_message->y;
}
// Field name: z
{
cdr << ros_message->z;
}
// Field name: yaw
{
cdr << ros_message->yaw;
}
// Field name: pitch
{
cdr << ros_message->pitch;
}
// Field name: roll
{
cdr << ros_message->roll;
}
// Field name: grip
{
cdr << ros_message->grip;
}
// Field name: menu
{
cdr << ros_message->menu;
}
// Field name: trigger
{
cdr << ros_message->trigger;
}
// Field name: trackpad_pressed
{
cdr << ros_message->trackpad_pressed;
}
// Field name: trackpad_touched
{
cdr << ros_message->trackpad_touched;
}
// Field name: trackpad_x
{
cdr << ros_message->trackpad_x;
}
// Field name: trackpad_y
{
cdr << ros_message->trackpad_y;
}
return true;
}
static bool _Controller__cdr_deserialize(
eprosima::fastcdr::Cdr & cdr,
void * untyped_ros_message)
{
if (!untyped_ros_message) {
fprintf(stderr, "ros message handle is null\n");
return false;
}
_Controller__ros_msg_type * ros_message = static_cast<_Controller__ros_msg_type *>(untyped_ros_message);
// Field name: x
{
cdr >> ros_message->x;
}
// Field name: y
{
cdr >> ros_message->y;
}
// Field name: z
{
cdr >> ros_message->z;
}
// Field name: yaw
{
cdr >> ros_message->yaw;
}
// Field name: pitch
{
cdr >> ros_message->pitch;
}
// Field name: roll
{
cdr >> ros_message->roll;
}
// Field name: grip
{
cdr >> ros_message->grip;
}
// Field name: menu
{
cdr >> ros_message->menu;
}
// Field name: trigger
{
cdr >> ros_message->trigger;
}
// Field name: trackpad_pressed
{
cdr >> ros_message->trackpad_pressed;
}
// Field name: trackpad_touched
{
cdr >> ros_message->trackpad_touched;
}
// Field name: trackpad_x
{
cdr >> ros_message->trackpad_x;
}
// Field name: trackpad_y
{
cdr >> ros_message->trackpad_y;
}
return true;
}
ROSIDL_TYPESUPPORT_FASTRTPS_C_PUBLIC_vive_interfaces
size_t get_serialized_size_vive_interfaces__msg__Controller(
const void * untyped_ros_message,
size_t current_alignment)
{
const _Controller__ros_msg_type * ros_message = static_cast<const _Controller__ros_msg_type *>(untyped_ros_message);
(void)ros_message;
size_t initial_alignment = current_alignment;
const size_t padding = 4;
const size_t wchar_size = 4;
(void)padding;
(void)wchar_size;
// field.name x
{
size_t item_size = sizeof(ros_message->x);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// field.name y
{
size_t item_size = sizeof(ros_message->y);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// field.name z
{
size_t item_size = sizeof(ros_message->z);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// field.name yaw
{
size_t item_size = sizeof(ros_message->yaw);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// field.name pitch
{
size_t item_size = sizeof(ros_message->pitch);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// field.name roll
{
size_t item_size = sizeof(ros_message->roll);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// field.name grip
{
size_t item_size = sizeof(ros_message->grip);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// field.name menu
{
size_t item_size = sizeof(ros_message->menu);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// field.name trigger
{
size_t item_size = sizeof(ros_message->trigger);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// field.name trackpad_pressed
{
size_t item_size = sizeof(ros_message->trackpad_pressed);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// field.name trackpad_touched
{
size_t item_size = sizeof(ros_message->trackpad_touched);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// field.name trackpad_x
{
size_t item_size = sizeof(ros_message->trackpad_x);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// field.name trackpad_y
{
size_t item_size = sizeof(ros_message->trackpad_y);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
return current_alignment - initial_alignment;
}
static uint32_t _Controller__get_serialized_size(const void * untyped_ros_message)
{
return static_cast<uint32_t>(
get_serialized_size_vive_interfaces__msg__Controller(
untyped_ros_message, 0));
}
ROSIDL_TYPESUPPORT_FASTRTPS_C_PUBLIC_vive_interfaces
size_t max_serialized_size_vive_interfaces__msg__Controller(
bool & full_bounded,
size_t current_alignment)
{
size_t initial_alignment = current_alignment;
const size_t padding = 4;
const size_t wchar_size = 4;
(void)padding;
(void)wchar_size;
(void)full_bounded;
// member: x
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
// member: y
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
// member: z
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
// member: yaw
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
// member: pitch
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
// member: roll
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
// member: grip
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint8_t);
}
// member: menu
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint8_t);
}
// member: trigger
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
// member: trackpad_pressed
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint8_t);
}
// member: trackpad_touched
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint8_t);
}
// member: trackpad_x
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
// member: trackpad_y
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
return current_alignment - initial_alignment;
}
static size_t _Controller__max_serialized_size(bool & full_bounded)
{
return max_serialized_size_vive_interfaces__msg__Controller(
full_bounded, 0);
}
static message_type_support_callbacks_t __callbacks_Controller = {
"vive_interfaces::msg",
"Controller",
_Controller__cdr_serialize,
_Controller__cdr_deserialize,
_Controller__get_serialized_size,
_Controller__max_serialized_size
};
static rosidl_message_type_support_t _Controller__type_support = {
rosidl_typesupport_fastrtps_c__identifier,
&__callbacks_Controller,
get_message_typesupport_handle_function,
};
const rosidl_message_type_support_t *
ROSIDL_TYPESUPPORT_INTERFACE__MESSAGE_SYMBOL_NAME(rosidl_typesupport_fastrtps_c, vive_interfaces, msg, Controller)() {
return &_Controller__type_support;
}
#if defined(__cplusplus)
}
#endif
<file_sep>#----------------------------------------------------------------
# Generated CMake target import file for configuration "RelWithDebInfo".
#----------------------------------------------------------------
# Commands may need to know the format version.
set(CMAKE_IMPORT_FILE_VERSION 1)
# Import target "vive_interfaces::vive_interfaces__rosidl_generator_c" for configuration "RelWithDebInfo"
set_property(TARGET vive_interfaces::vive_interfaces__rosidl_generator_c APPEND PROPERTY IMPORTED_CONFIGURATIONS RELWITHDEBINFO)
set_target_properties(vive_interfaces::vive_interfaces__rosidl_generator_c PROPERTIES
IMPORTED_IMPLIB_RELWITHDEBINFO "${_IMPORT_PREFIX}/lib/vive_interfaces__rosidl_generator_c.lib"
IMPORTED_LOCATION_RELWITHDEBINFO "${_IMPORT_PREFIX}/bin/vive_interfaces__rosidl_generator_c.dll"
)
list(APPEND _IMPORT_CHECK_TARGETS vive_interfaces::vive_interfaces__rosidl_generator_c )
list(APPEND _IMPORT_CHECK_FILES_FOR_vive_interfaces::vive_interfaces__rosidl_generator_c "${_IMPORT_PREFIX}/lib/vive_interfaces__rosidl_generator_c.lib" "${_IMPORT_PREFIX}/bin/vive_interfaces__rosidl_generator_c.dll" )
# Commands beyond this point should not need to know the version.
set(CMAKE_IMPORT_FILE_VERSION)
<file_sep>// generated from rosidl_typesupport_cpp/resource/idl__type_support.cpp.em
// with input from vive_interfaces:msg\Hmd.idl
// generated code does not contain a copyright notice
#include "cstddef"
#include "rosidl_runtime_c/message_type_support_struct.h"
#include "vive_interfaces/msg/detail/hmd__struct.hpp"
#include "rosidl_typesupport_cpp/identifier.hpp"
#include "rosidl_typesupport_cpp/message_type_support.hpp"
#include "rosidl_typesupport_c/type_support_map.h"
#include "rosidl_typesupport_cpp/message_type_support_dispatch.hpp"
#include "rosidl_typesupport_cpp/visibility_control.h"
#include "rosidl_typesupport_interface/macros.h"
namespace vive_interfaces
{
namespace msg
{
namespace rosidl_typesupport_cpp
{
typedef struct _Hmd_type_support_ids_t
{
const char * typesupport_identifier[3];
} _Hmd_type_support_ids_t;
static const _Hmd_type_support_ids_t _Hmd_message_typesupport_ids = {
{
"rosidl_typesupport_connext_cpp", // ::rosidl_typesupport_connext_cpp::typesupport_identifier,
"rosidl_typesupport_fastrtps_cpp", // ::rosidl_typesupport_fastrtps_cpp::typesupport_identifier,
"rosidl_typesupport_introspection_cpp", // ::rosidl_typesupport_introspection_cpp::typesupport_identifier,
}
};
typedef struct _Hmd_type_support_symbol_names_t
{
const char * symbol_name[3];
} _Hmd_type_support_symbol_names_t;
#define STRINGIFY_(s) #s
#define STRINGIFY(s) STRINGIFY_(s)
static const _Hmd_type_support_symbol_names_t _Hmd_message_typesupport_symbol_names = {
{
STRINGIFY(ROSIDL_TYPESUPPORT_INTERFACE__MESSAGE_SYMBOL_NAME(rosidl_typesupport_connext_cpp, vive_interfaces, msg, Hmd)),
STRINGIFY(ROSIDL_TYPESUPPORT_INTERFACE__MESSAGE_SYMBOL_NAME(rosidl_typesupport_fastrtps_cpp, vive_interfaces, msg, Hmd)),
STRINGIFY(ROSIDL_TYPESUPPORT_INTERFACE__MESSAGE_SYMBOL_NAME(rosidl_typesupport_introspection_cpp, vive_interfaces, msg, Hmd)),
}
};
typedef struct _Hmd_type_support_data_t
{
void * data[3];
} _Hmd_type_support_data_t;
static _Hmd_type_support_data_t _Hmd_message_typesupport_data = {
{
0, // will store the shared library later
0, // will store the shared library later
0, // will store the shared library later
}
};
static const type_support_map_t _Hmd_message_typesupport_map = {
3,
"vive_interfaces",
&_Hmd_message_typesupport_ids.typesupport_identifier[0],
&_Hmd_message_typesupport_symbol_names.symbol_name[0],
&_Hmd_message_typesupport_data.data[0],
};
static const rosidl_message_type_support_t Hmd_message_type_support_handle = {
::rosidl_typesupport_cpp::typesupport_identifier,
reinterpret_cast<const type_support_map_t *>(&_Hmd_message_typesupport_map),
::rosidl_typesupport_cpp::get_message_typesupport_handle_function,
};
} // namespace rosidl_typesupport_cpp
} // namespace msg
} // namespace vive_interfaces
namespace rosidl_typesupport_cpp
{
template<>
ROSIDL_TYPESUPPORT_CPP_PUBLIC
const rosidl_message_type_support_t *
get_message_type_support_handle<vive_interfaces::msg::Hmd>()
{
return &::vive_interfaces::msg::rosidl_typesupport_cpp::Hmd_message_type_support_handle;
}
#ifdef __cplusplus
extern "C"
{
#endif
ROSIDL_TYPESUPPORT_CPP_PUBLIC
const rosidl_message_type_support_t *
ROSIDL_TYPESUPPORT_INTERFACE__MESSAGE_SYMBOL_NAME(rosidl_typesupport_cpp, vive_interfaces, msg, Hmd)() {
return get_message_type_support_handle<vive_interfaces::msg::Hmd>();
}
#ifdef __cplusplus
}
#endif
} // namespace rosidl_typesupport_cpp
<file_sep>// generated from
// rosidl_typesupport_introspection_c/resource/rosidl_typesupport_introspection_c__visibility_control.h.in
// generated code does not contain a copyright notice
#ifndef VIVE_INTERFACES__MSG__ROSIDL_TYPESUPPORT_INTROSPECTION_C__VISIBILITY_CONTROL_H_
#define VIVE_INTERFACES__MSG__ROSIDL_TYPESUPPORT_INTROSPECTION_C__VISIBILITY_CONTROL_H_
#ifdef __cplusplus
extern "C"
{
#endif
// This logic was borrowed (then namespaced) from the examples on the gcc wiki:
// https://gcc.gnu.org/wiki/Visibility
#if defined _WIN32 || defined __CYGWIN__
#ifdef __GNUC__
#define ROSIDL_TYPESUPPORT_INTROSPECTION_C_EXPORT_vive_interfaces __attribute__ ((dllexport))
#define ROSIDL_TYPESUPPORT_INTROSPECTION_C_IMPORT_vive_interfaces __attribute__ ((dllimport))
#else
#define ROSIDL_TYPESUPPORT_INTROSPECTION_C_EXPORT_vive_interfaces __declspec(dllexport)
#define ROSIDL_TYPESUPPORT_INTROSPECTION_C_IMPORT_vive_interfaces __declspec(dllimport)
#endif
#ifdef ROSIDL_TYPESUPPORT_INTROSPECTION_C_BUILDING_DLL_vive_interfaces
#define ROSIDL_TYPESUPPORT_INTROSPECTION_C_PUBLIC_vive_interfaces ROSIDL_TYPESUPPORT_INTROSPECTION_C_EXPORT_vive_interfaces
#else
#define ROSIDL_TYPESUPPORT_INTROSPECTION_C_PUBLIC_vive_interfaces ROSIDL_TYPESUPPORT_INTROSPECTION_C_IMPORT_vive_interfaces
#endif
#else
#define ROSIDL_TYPESUPPORT_INTROSPECTION_C_EXPORT_vive_interfaces __attribute__ ((visibility("default")))
#define ROSIDL_TYPESUPPORT_INTROSPECTION_C_IMPORT_vive_interfaces
#if __GNUC__ >= 4
#define ROSIDL_TYPESUPPORT_INTROSPECTION_C_PUBLIC_vive_interfaces __attribute__ ((visibility("default")))
#else
#define ROSIDL_TYPESUPPORT_INTROSPECTION_C_PUBLIC_vive_interfaces
#endif
#endif
#ifdef __cplusplus
}
#endif
#endif // VIVE_INTERFACES__MSG__ROSIDL_TYPESUPPORT_INTROSPECTION_C__VISIBILITY_CONTROL_H_
<file_sep>// generated from rosidl_generator_c/resource/idl__functions.h.em
// with input from vive_interfaces:msg\Hmd.idl
// generated code does not contain a copyright notice
#ifndef VIVE_INTERFACES__MSG__DETAIL__HMD__FUNCTIONS_H_
#define VIVE_INTERFACES__MSG__DETAIL__HMD__FUNCTIONS_H_
#ifdef __cplusplus
extern "C"
{
#endif
#include <stdbool.h>
#include <stdlib.h>
#include "rosidl_runtime_c/visibility_control.h"
#include "vive_interfaces/msg/rosidl_generator_c__visibility_control.h"
#include "vive_interfaces/msg/detail/hmd__struct.h"
/// Initialize msg/Hmd message.
/**
* If the init function is called twice for the same message without
* calling fini inbetween previously allocated memory will be leaked.
* \param[in,out] msg The previously allocated message pointer.
* Fields without a default value will not be initialized by this function.
* You might want to call memset(msg, 0, sizeof(
* vive_interfaces__msg__Hmd
* )) before or use
* vive_interfaces__msg__Hmd__create()
* to allocate and initialize the message.
* \return true if initialization was successful, otherwise false
*/
ROSIDL_GENERATOR_C_PUBLIC_vive_interfaces
bool
vive_interfaces__msg__Hmd__init(vive_interfaces__msg__Hmd * msg);
/// Finalize msg/Hmd message.
/**
* \param[in,out] msg The allocated message pointer.
*/
ROSIDL_GENERATOR_C_PUBLIC_vive_interfaces
void
vive_interfaces__msg__Hmd__fini(vive_interfaces__msg__Hmd * msg);
/// Create msg/Hmd message.
/**
* It allocates the memory for the message, sets the memory to zero, and
* calls
* vive_interfaces__msg__Hmd__init().
* \return The pointer to the initialized message if successful,
* otherwise NULL
*/
ROSIDL_GENERATOR_C_PUBLIC_vive_interfaces
vive_interfaces__msg__Hmd *
vive_interfaces__msg__Hmd__create();
/// Destroy msg/Hmd message.
/**
* It calls
* vive_interfaces__msg__Hmd__fini()
* and frees the memory of the message.
* \param[in,out] msg The allocated message pointer.
*/
ROSIDL_GENERATOR_C_PUBLIC_vive_interfaces
void
vive_interfaces__msg__Hmd__destroy(vive_interfaces__msg__Hmd * msg);
/// Initialize array of msg/Hmd messages.
/**
* It allocates the memory for the number of elements and calls
* vive_interfaces__msg__Hmd__init()
* for each element of the array.
* \param[in,out] array The allocated array pointer.
* \param[in] size The size / capacity of the array.
* \return true if initialization was successful, otherwise false
* If the array pointer is valid and the size is zero it is guaranteed
# to return true.
*/
ROSIDL_GENERATOR_C_PUBLIC_vive_interfaces
bool
vive_interfaces__msg__Hmd__Sequence__init(vive_interfaces__msg__Hmd__Sequence * array, size_t size);
/// Finalize array of msg/Hmd messages.
/**
* It calls
* vive_interfaces__msg__Hmd__fini()
* for each element of the array and frees the memory for the number of
* elements.
* \param[in,out] array The initialized array pointer.
*/
ROSIDL_GENERATOR_C_PUBLIC_vive_interfaces
void
vive_interfaces__msg__Hmd__Sequence__fini(vive_interfaces__msg__Hmd__Sequence * array);
/// Create array of msg/Hmd messages.
/**
* It allocates the memory for the array and calls
* vive_interfaces__msg__Hmd__Sequence__init().
* \param[in] size The size / capacity of the array.
* \return The pointer to the initialized array if successful, otherwise NULL
*/
ROSIDL_GENERATOR_C_PUBLIC_vive_interfaces
vive_interfaces__msg__Hmd__Sequence *
vive_interfaces__msg__Hmd__Sequence__create(size_t size);
/// Destroy array of msg/Hmd messages.
/**
* It calls
* vive_interfaces__msg__Hmd__Sequence__fini()
* on the array,
* and frees the memory of the array.
* \param[in,out] array The initialized array pointer.
*/
ROSIDL_GENERATOR_C_PUBLIC_vive_interfaces
void
vive_interfaces__msg__Hmd__Sequence__destroy(vive_interfaces__msg__Hmd__Sequence * array);
#ifdef __cplusplus
}
#endif
#endif // VIVE_INTERFACES__MSG__DETAIL__HMD__FUNCTIONS_H_
<file_sep># Omron_Robotics
---
Hiverlab Omron Robotics R & D
# Dependencies
---
## ROS2 dependencies
`pip install catkin_pkg cryptography empy ifcfg lark-parser lxml netifaces numpy opencv-python pyparsing pyyaml setuptools`
## RQt dependencies
`pip install pydot PyQt5`
## Colcon dependencies
`pip install colcon-common-extensions`
# References
---
1. https://github.com/ValveSoftware/openvr.git
2. https://github.com/BarakChamo/OpenVR-OSC.git
3. https://github.com/TriadSemi/triad_openvr.git
<file_sep>#----------------------------------------------------------------
# Generated CMake target import file for configuration "Debug".
#----------------------------------------------------------------
# Commands may need to know the format version.
set(CMAKE_IMPORT_FILE_VERSION 1)
# Import target "vive_interfaces::vive_interfaces__rosidl_typesupport_c" for configuration "Debug"
set_property(TARGET vive_interfaces::vive_interfaces__rosidl_typesupport_c APPEND PROPERTY IMPORTED_CONFIGURATIONS DEBUG)
set_target_properties(vive_interfaces::vive_interfaces__rosidl_typesupport_c PROPERTIES
IMPORTED_IMPLIB_DEBUG "${_IMPORT_PREFIX}/lib/vive_interfaces__rosidl_typesupport_c.lib"
IMPORTED_LOCATION_DEBUG "${_IMPORT_PREFIX}/bin/vive_interfaces__rosidl_typesupport_c.dll"
)
list(APPEND _IMPORT_CHECK_TARGETS vive_interfaces::vive_interfaces__rosidl_typesupport_c )
list(APPEND _IMPORT_CHECK_FILES_FOR_vive_interfaces::vive_interfaces__rosidl_typesupport_c "${_IMPORT_PREFIX}/lib/vive_interfaces__rosidl_typesupport_c.lib" "${_IMPORT_PREFIX}/bin/vive_interfaces__rosidl_typesupport_c.dll" )
# Commands beyond this point should not need to know the version.
set(CMAKE_IMPORT_FILE_VERSION)
<file_sep>import os
print(os.path.dirname(os.path.realpath(__file__)))
import argparse
import math
import time
import sys
import random
import pandas as pd
from signal import signal, SIGINT
from pythonosc import osc_message_builder
from pythonosc import osc_bundle_builder
from pythonosc import udp_client
from reprint import output
from colorama import Fore, Back, Style
import time
import sys
import openvr
import math
import json
from functools import lru_cache
# Function to print out text but instead of starting a new line it will overwrite the existing line
def update_text(txt):
sys.stdout.write('\r'+txt)
sys.stdout.flush()
#Convert the standard 3x4 position/rotation matrix to a x,y,z location and the appropriate Euler angles (in degrees)
def convert_to_euler(pose_mat):
yaw = 180 / math.pi * math.atan2(pose_mat[1][0], pose_mat[0][0])
pitch = 180 / math.pi * math.atan2(pose_mat[2][0], pose_mat[0][0])
roll = 180 / math.pi * math.atan2(pose_mat[2][1], pose_mat[2][2])
x = pose_mat[0][3]
y = pose_mat[1][3]
z = pose_mat[2][3]
return [x,y,z,yaw,pitch,roll]
#Convert the standard 3x4 position/rotation matrix to a x,y,z location and the appropriate Quaternion
def convert_to_quaternion(pose_mat):
# Per issue #2, adding a abs() so that sqrt only results in real numbers
r_w = math.sqrt(abs(1+pose_mat[0][0]+pose_mat[1][1]+pose_mat[2][2]))/2
r_x = (pose_mat[2][1]-pose_mat[1][2])/(4*r_w)
r_y = (pose_mat[0][2]-pose_mat[2][0])/(4*r_w)
r_z = (pose_mat[1][0]-pose_mat[0][1])/(4*r_w)
x = pose_mat[0][3]
y = pose_mat[1][3]
z = pose_mat[2][3]
return [x,y,z,r_w,r_x,r_y,r_z]
#Define a class to make it easy to append pose matricies and convert to both Euler and Quaternion for plotting
class pose_sample_buffer():
def __init__(self):
self.i = 0
self.index = []
self.time = []
self.x = []
self.y = []
self.z = []
self.yaw = []
self.pitch = []
self.roll = []
self.r_w = []
self.r_x = []
self.r_y = []
self.r_z = []
def append(self,pose_mat,t):
self.time.append(t)
self.x.append(pose_mat[0][3])
self.y.append(pose_mat[1][3])
self.z.append(pose_mat[2][3])
self.yaw.append(180 / math.pi * math.atan(pose_mat[1][0] /pose_mat[0][0]))
self.pitch.append(180 / math.pi * math.atan(-1 * pose_mat[2][0] / math.sqrt(pow(pose_mat[2][1], 2) + math.pow(pose_mat[2][2], 2))))
self.roll.append(180 / math.pi * math.atan(pose_mat[2][1] /pose_mat[2][2]))
r_w = math.sqrt(abs(1+pose_mat[0][0]+pose_mat[1][1]+pose_mat[2][2]))/2
self.r_w.append(r_w)
self.r_x.append((pose_mat[2][1]-pose_mat[1][2])/(4*r_w))
self.r_y.append((pose_mat[0][2]-pose_mat[2][0])/(4*r_w))
self.r_z.append((pose_mat[1][0]-pose_mat[0][1])/(4*r_w))
def get_pose(vr_obj):
return vr_obj.getDeviceToAbsoluteTrackingPose(openvr.TrackingUniverseStanding, 0, openvr.k_unMaxTrackedDeviceCount)
class vr_tracked_device():
def __init__(self,vr_obj,index,device_class):
self.device_class = device_class
self.index = index
self.vr = vr_obj
@lru_cache(maxsize=None)
def get_serial(self):
return self.vr.getStringTrackedDeviceProperty(self.index, openvr.Prop_SerialNumber_String)
def get_model(self):
return self.vr.getStringTrackedDeviceProperty(self.index, openvr.Prop_ModelNumber_String)
def get_battery_percent(self):
return self.vr.getFloatTrackedDeviceProperty(self.index, openvr.Prop_DeviceBatteryPercentage_Float)
def is_charging(self):
return self.vr.getBoolTrackedDeviceProperty(self.index, openvr.Prop_DeviceIsCharging_Bool)
def sample(self,num_samples,sample_rate):
interval = 1/sample_rate
rtn = pose_sample_buffer()
sample_start = time.time()
for i in range(num_samples):
start = time.time()
pose = get_pose(self.vr)
rtn.append(pose[self.index].mDeviceToAbsoluteTracking,time.time()-sample_start)
sleep_time = interval- (time.time()-start)
if sleep_time>0:
time.sleep(sleep_time)
return rtn
def get_pose_euler(self, pose=None):
if pose == None:
pose = get_pose(self.vr)
if pose[self.index].bPoseIsValid:
return convert_to_euler(pose[self.index].mDeviceToAbsoluteTracking)
else:
return None
def get_pose_matrix(self, pose=None):
if pose == None:
pose = get_pose(self.vr)
if pose[self.index].bPoseIsValid:
return pose[self.index].mDeviceToAbsoluteTracking
else:
return None
def get_velocity(self, pose=None):
if pose == None:
pose = get_pose(self.vr)
if pose[self.index].bPoseIsValid:
return pose[self.index].vVelocity
else:
return None
def get_angular_velocity(self, pose=None):
if pose == None:
pose = get_pose(self.vr)
if pose[self.index].bPoseIsValid:
return pose[self.index].vAngularVelocity
else:
return None
def get_pose_quaternion(self, pose=None):
if pose == None:
pose = get_pose(self.vr)
if pose[self.index].bPoseIsValid:
return convert_to_quaternion(pose[self.index].mDeviceToAbsoluteTracking)
else:
return None
def controller_state_to_dict(self, pControllerState):
# This function is graciously borrowed from https://gist.github.com/awesomebytes/75daab3adb62b331f21ecf3a03b3ab46
# docs: https://github.com/ValveSoftware/openvr/wiki/IVRSystem::GetControllerState
d = {}
d['unPacketNum'] = pControllerState.unPacketNum
# on trigger .y is always 0.0 says the docs
d['trigger'] = pControllerState.rAxis[1].x
# 0.0 on trigger is fully released
# -1.0 to 1.0 on joystick and trackpads
d['trackpad_x'] = pControllerState.rAxis[0].x
d['trackpad_y'] = pControllerState.rAxis[0].y
# These are published and always 0.0
# for i in range(2, 5):
# d['unknowns_' + str(i) + '_x'] = pControllerState.rAxis[i].x
# d['unknowns_' + str(i) + '_y'] = pControllerState.rAxis[i].y
d['ulButtonPressed'] = pControllerState.ulButtonPressed
d['ulButtonTouched'] = pControllerState.ulButtonTouched
# To make easier to understand what is going on
# Second bit marks menu button
d['menu_button'] = bool(pControllerState.ulButtonPressed >> 1 & 1)
# 32 bit marks trackpad
d['trackpad_pressed'] = bool(pControllerState.ulButtonPressed >> 32 & 1)
d['trackpad_touched'] = bool(pControllerState.ulButtonTouched >> 32 & 1)
# third bit marks grip button
d['grip_button'] = bool(pControllerState.ulButtonPressed >> 2 & 1)
# System button can't be read, if you press it
# the controllers stop reporting
return d
def get_controller_inputs(self):
result, state = self.vr.getControllerState(self.index)
return self.controller_state_to_dict(state)
def trigger_haptic_pulse(self, duration_micros=1000, axis_id=0):
"""
Causes devices with haptic feedback to vibrate for a short time.
"""
self.vr.triggerHapticPulse(self.index ,axis_id, duration_micros)
class vr_tracking_reference(vr_tracked_device):
def get_mode(self):
return self.vr.getStringTrackedDeviceProperty(self.index,openvr.Prop_ModeLabel_String).decode('utf-8').upper()
def sample(self,num_samples,sample_rate):
print("Warning: Tracking References do not move, sample isn't much use...")
class triad_openvr():
def __init__(self, configfile_path=None):
# Initialize OpenVR in the
self.vr = openvr.init(openvr.VRApplication_Other)
self.vrsystem = openvr.VRSystem()
# Initializing object to hold indexes for various tracked objects
self.object_names = {"Tracking Reference":[],"HMD":[],"Controller":[],"Tracker":[]}
self.devices = {}
self.device_index_map = {}
poses = self.vr.getDeviceToAbsoluteTrackingPose(openvr.TrackingUniverseStanding, 0,
openvr.k_unMaxTrackedDeviceCount)
# Loading config file
if configfile_path:
try:
with open(configfile_path, 'r') as json_data:
config = json.load(json_data)
except EnvironmentError: # parent of IOError, OSError *and* WindowsError where available
print('config.json not found.')
exit(1)
# Iterate through the pose list to find the active devices and determine their type
for i in range(openvr.k_unMaxTrackedDeviceCount):
if poses[i].bDeviceIsConnected:
device_serial = self.vr.getStringTrackedDeviceProperty(i,openvr.Prop_SerialNumber_String).decode('utf-8')
for device in config['devices']:
if device_serial == device['serial']:
device_name = device['name']
self.object_names[device['type']].append(device_name)
self.devices[device_name] = vr_tracked_device(self.vr,i,device['type'])
else:
# Iterate through the pose list to find the active devices and determine their type
for i in range(openvr.k_unMaxTrackedDeviceCount):
if poses[i].bDeviceIsConnected:
self.add_tracked_device(i)
def __del__(self):
openvr.shutdown()
def get_pose(self):
return get_pose(self.vr)
def poll_vr_events(self):
"""
Used to poll VR events and find any new tracked devices or ones that are no longer tracked.
"""
event = openvr.VREvent_t()
while self.vrsystem.pollNextEvent(event):
if event.eventType == openvr.VREvent_TrackedDeviceActivated:
self.add_tracked_device(event.trackedDeviceIndex)
elif event.eventType == openvr.VREvent_TrackedDeviceDeactivated:
#If we were already tracking this device, quit tracking it.
if event.trackedDeviceIndex in self.device_index_map:
self.remove_tracked_device(event.trackedDeviceIndex)
def add_tracked_device(self, tracked_device_index):
i = tracked_device_index
device_class = self.vr.getTrackedDeviceClass(i)
if (device_class == openvr.TrackedDeviceClass_Controller):
device_name = "controller_"+str(len(self.object_names["Controller"])+1)
self.object_names["Controller"].append(device_name)
self.devices[device_name] = vr_tracked_device(self.vr,i,"Controller")
self.device_index_map[i] = device_name
elif (device_class == openvr.TrackedDeviceClass_HMD):
device_name = "hmd_"+str(len(self.object_names["HMD"])+1)
self.object_names["HMD"].append(device_name)
self.devices[device_name] = vr_tracked_device(self.vr,i,"HMD")
self.device_index_map[i] = device_name
elif (device_class == openvr.TrackedDeviceClass_GenericTracker):
device_name = "tracker_"+str(len(self.object_names["Tracker"])+1)
self.object_names["Tracker"].append(device_name)
self.devices[device_name] = vr_tracked_device(self.vr,i,"Tracker")
self.device_index_map[i] = device_name
elif (device_class == openvr.TrackedDeviceClass_TrackingReference):
device_name = "tracking_reference_"+str(len(self.object_names["Tracking Reference"])+1)
self.object_names["Tracking Reference"].append(device_name)
self.devices[device_name] = vr_tracking_reference(self.vr,i,"Tracking Reference")
self.device_index_map[i] = device_name
def remove_tracked_device(self, tracked_device_index):
if tracked_device_index in self.device_index_map:
device_name = self.device_index_map[tracked_device_index]
self.object_names[self.devices[device_name].device_class].remove(device_name)
del self.device_index_map[tracked_device_index]
del self.devices[device_name]
else:
raise Exception("Tracked device index {} not valid. Not removing.".format(tracked_device_index))
def rename_device(self,old_device_name,new_device_name):
self.devices[new_device_name] = self.devices.pop(old_device_name)
for i in range(len(self.object_names[self.devices[new_device_name].device_class])):
if self.object_names[self.devices[new_device_name].device_class][i] == old_device_name:
self.object_names[self.devices[new_device_name].device_class][i] = new_device_name
def print_discovered_objects(self):
for device_type in self.object_names:
plural = device_type
if len(self.object_names[device_type])!=1:
plural+="s"
print("Found "+str(len(self.object_names[device_type]))+" "+plural)
for device in self.object_names[device_type]:
if device_type == "Tracking Reference":
print(" "+device+" ("+self.devices[device].get_serial()+
", Mode "+self.devices[device].get_model()+
", "+self.devices[device].get_model()+
")")
else:
print(" "+device+" ("+self.devices[device].get_serial()+
", "+self.devices[device].get_model()+")")
# Initialize Tria's OpenVR wrapper and print discovered objects
v = triad_openvr()
v.print_discovered_objects()
# Sort through all discovered devices and keep track by type
deviceCount = 0
devices = {
'tracker': [],
'hmd': [],
'controller': [],
'tracking reference': []
}
for deviceName, device in v.devices.items():
device._id = deviceName.split("_").pop()
devices[device.device_class.lower()].append(device)
deviceCount += 1
# pose tracking interval
interval = 1/250
ip = "127.0.0.1"
port = 7000
track = ["hmd"]
freq = 250
mode = ['euler', 'quaternion']
# initialize OSC client
client = udp_client.SimpleUDPClient(ip, port)
import rclpy
from rclpy.node import Node
from std_msgs.msg import String
from vive_interfaces.msg import Hmd
class MinimalPublisher(Node):
def __init__(self):
super().__init__('minimal_publisher')
self.publisher_ = self.create_publisher(Hmd, 'hmd', 10)
# timer_period = 0.5 # seconds
# self.timer = self.create_timer(timer_period, self.timer_callback)
# self.i = 0
def publish(self, x, y, z, yaw, pitch, roll):
msg = Hmd()
msg.x = x
msg.y = y
msg.z = z
msg.yaw = yaw
msg.pitch = pitch
msg.roll = roll
self.publisher_.publish(msg)
def log(self, txt):
self.get_logger().info(txt)
# def timer_callback(self):
# msg = Hmd()
# msg.x = float(self.i)
# msg.y = float(self.i)
# msg.z = float(self.i)
# msg.yaw = float(self.i)
# msg.pitch = float(self.i)
# msg.roll = float(self.i)
# msg.data = 'Hello World: %d' % self.i
# self.publisher_.publish(msg)
# text = "Publishing: "
# text += '{0: <10}'.format("%.4f" % msg.x)
# text += " "
# text += '{0: <10}'.format("%.4f" % msg.y)
# text += " "
# text += '{0: <10}'.format("%.4f" % msg.z)
# text += " "
# text += '{0: <10}'.format("%.4f" % msg.yaw)
# text += " "
# text += '{0: <10}'.format("%.4f" % msg.pitch)
# text += " "
# text += '{0: <10}'.format("%.4f" % msg.roll)
# text += " "
# self.get_logger().info(text)
# self.i += 1
def main(args=None):
rclpy.init(args=args)
minimal_publisher = MinimalPublisher()
# rclpy.spin(minimal_publisher)
while True:
# Initialize OSC bundle for all tracked controllers
bundle = osc_bundle_builder.OscBundleBuilder(osc_bundle_builder.IMMEDIATELY)
# iterate over tracked device types and build OSC messages
for deviceType in track:
for device in devices[deviceType]:
# get device post
pose = device.get_pose_euler()
# velocity = device.get_velocity()
# angular_velocity = device.get_angular_velocity()
# controller_inputs = device.get_controller_inputs()
# haptic_pulse = device.trigger_haptic_pulse()
# Build message and add to bundle
msg = osc_message_builder.OscMessageBuilder(address="/" + deviceType + "/" + device._id)
#msg.add_arg(device.get_pose_euler())
bundle.add_content(msg.build())
### report device pose in the console
txt = Fore.CYAN + '{0: <13}'.format(deviceType + device._id) + Fore.WHITE + Style.BRIGHT
if not pose:
pose = previous_pose
# Publish
minimal_publisher.publish(float(pose[0]), float(pose[1]), float(pose[2]), float(pose[3]), float(pose[4]), float(pose[5]))
# Log
for each in pose:
txt += '{0: <10}'.format("%.4f" % each)
txt += " "
minimal_publisher.log(txt)
previous_pose = pose
# Destroy the node explicitly
# (optional - otherwise it will be done automatically
# when the garbage collector destroys the node object)
minimal_publisher.destroy_node()
# rclpy.shutdown()
if __name__ == '__main__':
main()
<file_sep>// generated from rosidl_generator_cpp/resource/idl__struct.hpp.em
// with input from vive_interfaces:msg\Hmd.idl
// generated code does not contain a copyright notice
#ifndef VIVE_INTERFACES__MSG__DETAIL__HMD__STRUCT_HPP_
#define VIVE_INTERFACES__MSG__DETAIL__HMD__STRUCT_HPP_
#include <rosidl_runtime_cpp/bounded_vector.hpp>
#include <rosidl_runtime_cpp/message_initialization.hpp>
#include <algorithm>
#include <array>
#include <memory>
#include <string>
#include <vector>
#ifndef _WIN32
# define DEPRECATED__vive_interfaces__msg__Hmd __attribute__((deprecated))
#else
# define DEPRECATED__vive_interfaces__msg__Hmd __declspec(deprecated)
#endif
namespace vive_interfaces
{
namespace msg
{
// message struct
template<class ContainerAllocator>
struct Hmd_
{
using Type = Hmd_<ContainerAllocator>;
explicit Hmd_(rosidl_runtime_cpp::MessageInitialization _init = rosidl_runtime_cpp::MessageInitialization::ALL)
{
if (rosidl_runtime_cpp::MessageInitialization::ALL == _init ||
rosidl_runtime_cpp::MessageInitialization::ZERO == _init)
{
this->x = 0.0f;
this->y = 0.0f;
this->z = 0.0f;
this->yaw = 0.0f;
this->pitch = 0.0f;
this->roll = 0.0f;
}
}
explicit Hmd_(const ContainerAllocator & _alloc, rosidl_runtime_cpp::MessageInitialization _init = rosidl_runtime_cpp::MessageInitialization::ALL)
{
(void)_alloc;
if (rosidl_runtime_cpp::MessageInitialization::ALL == _init ||
rosidl_runtime_cpp::MessageInitialization::ZERO == _init)
{
this->x = 0.0f;
this->y = 0.0f;
this->z = 0.0f;
this->yaw = 0.0f;
this->pitch = 0.0f;
this->roll = 0.0f;
}
}
// field types and members
using _x_type =
float;
_x_type x;
using _y_type =
float;
_y_type y;
using _z_type =
float;
_z_type z;
using _yaw_type =
float;
_yaw_type yaw;
using _pitch_type =
float;
_pitch_type pitch;
using _roll_type =
float;
_roll_type roll;
// setters for named parameter idiom
Type & set__x(
const float & _arg)
{
this->x = _arg;
return *this;
}
Type & set__y(
const float & _arg)
{
this->y = _arg;
return *this;
}
Type & set__z(
const float & _arg)
{
this->z = _arg;
return *this;
}
Type & set__yaw(
const float & _arg)
{
this->yaw = _arg;
return *this;
}
Type & set__pitch(
const float & _arg)
{
this->pitch = _arg;
return *this;
}
Type & set__roll(
const float & _arg)
{
this->roll = _arg;
return *this;
}
// constant declarations
// pointer types
using RawPtr =
vive_interfaces::msg::Hmd_<ContainerAllocator> *;
using ConstRawPtr =
const vive_interfaces::msg::Hmd_<ContainerAllocator> *;
using SharedPtr =
std::shared_ptr<vive_interfaces::msg::Hmd_<ContainerAllocator>>;
using ConstSharedPtr =
std::shared_ptr<vive_interfaces::msg::Hmd_<ContainerAllocator> const>;
template<typename Deleter = std::default_delete<
vive_interfaces::msg::Hmd_<ContainerAllocator>>>
using UniquePtrWithDeleter =
std::unique_ptr<vive_interfaces::msg::Hmd_<ContainerAllocator>, Deleter>;
using UniquePtr = UniquePtrWithDeleter<>;
template<typename Deleter = std::default_delete<
vive_interfaces::msg::Hmd_<ContainerAllocator>>>
using ConstUniquePtrWithDeleter =
std::unique_ptr<vive_interfaces::msg::Hmd_<ContainerAllocator> const, Deleter>;
using ConstUniquePtr = ConstUniquePtrWithDeleter<>;
using WeakPtr =
std::weak_ptr<vive_interfaces::msg::Hmd_<ContainerAllocator>>;
using ConstWeakPtr =
std::weak_ptr<vive_interfaces::msg::Hmd_<ContainerAllocator> const>;
// pointer types similar to ROS 1, use SharedPtr / ConstSharedPtr instead
// NOTE: Can't use 'using' here because GNU C++ can't parse attributes properly
typedef DEPRECATED__vive_interfaces__msg__Hmd
std::shared_ptr<vive_interfaces::msg::Hmd_<ContainerAllocator>>
Ptr;
typedef DEPRECATED__vive_interfaces__msg__Hmd
std::shared_ptr<vive_interfaces::msg::Hmd_<ContainerAllocator> const>
ConstPtr;
// comparison operators
bool operator==(const Hmd_ & other) const
{
if (this->x != other.x) {
return false;
}
if (this->y != other.y) {
return false;
}
if (this->z != other.z) {
return false;
}
if (this->yaw != other.yaw) {
return false;
}
if (this->pitch != other.pitch) {
return false;
}
if (this->roll != other.roll) {
return false;
}
return true;
}
bool operator!=(const Hmd_ & other) const
{
return !this->operator==(other);
}
}; // struct Hmd_
// alias to use template instance with default allocator
using Hmd =
vive_interfaces::msg::Hmd_<std::allocator<void>>;
// constant definitions
} // namespace msg
} // namespace vive_interfaces
#endif // VIVE_INTERFACES__MSG__DETAIL__HMD__STRUCT_HPP_
<file_sep>// generated from rosidl_generator_cpp/resource/idl__builder.hpp.em
// with input from vive_interfaces:msg\Hmd.idl
// generated code does not contain a copyright notice
#ifndef VIVE_INTERFACES__MSG__DETAIL__HMD__BUILDER_HPP_
#define VIVE_INTERFACES__MSG__DETAIL__HMD__BUILDER_HPP_
#include "vive_interfaces/msg/detail/hmd__struct.hpp"
#include <rosidl_runtime_cpp/message_initialization.hpp>
#include <algorithm>
#include <utility>
namespace vive_interfaces
{
namespace msg
{
namespace builder
{
class Init_Hmd_roll
{
public:
explicit Init_Hmd_roll(::vive_interfaces::msg::Hmd & msg)
: msg_(msg)
{}
::vive_interfaces::msg::Hmd roll(::vive_interfaces::msg::Hmd::_roll_type arg)
{
msg_.roll = std::move(arg);
return std::move(msg_);
}
private:
::vive_interfaces::msg::Hmd msg_;
};
class Init_Hmd_pitch
{
public:
explicit Init_Hmd_pitch(::vive_interfaces::msg::Hmd & msg)
: msg_(msg)
{}
Init_Hmd_roll pitch(::vive_interfaces::msg::Hmd::_pitch_type arg)
{
msg_.pitch = std::move(arg);
return Init_Hmd_roll(msg_);
}
private:
::vive_interfaces::msg::Hmd msg_;
};
class Init_Hmd_yaw
{
public:
explicit Init_Hmd_yaw(::vive_interfaces::msg::Hmd & msg)
: msg_(msg)
{}
Init_Hmd_pitch yaw(::vive_interfaces::msg::Hmd::_yaw_type arg)
{
msg_.yaw = std::move(arg);
return Init_Hmd_pitch(msg_);
}
private:
::vive_interfaces::msg::Hmd msg_;
};
class Init_Hmd_z
{
public:
explicit Init_Hmd_z(::vive_interfaces::msg::Hmd & msg)
: msg_(msg)
{}
Init_Hmd_yaw z(::vive_interfaces::msg::Hmd::_z_type arg)
{
msg_.z = std::move(arg);
return Init_Hmd_yaw(msg_);
}
private:
::vive_interfaces::msg::Hmd msg_;
};
class Init_Hmd_y
{
public:
explicit Init_Hmd_y(::vive_interfaces::msg::Hmd & msg)
: msg_(msg)
{}
Init_Hmd_z y(::vive_interfaces::msg::Hmd::_y_type arg)
{
msg_.y = std::move(arg);
return Init_Hmd_z(msg_);
}
private:
::vive_interfaces::msg::Hmd msg_;
};
class Init_Hmd_x
{
public:
Init_Hmd_x()
: msg_(::rosidl_runtime_cpp::MessageInitialization::SKIP)
{}
Init_Hmd_y x(::vive_interfaces::msg::Hmd::_x_type arg)
{
msg_.x = std::move(arg);
return Init_Hmd_y(msg_);
}
private:
::vive_interfaces::msg::Hmd msg_;
};
} // namespace builder
} // namespace msg
template<typename MessageType>
auto build();
template<>
inline
auto build<::vive_interfaces::msg::Hmd>()
{
return vive_interfaces::msg::builder::Init_Hmd_x();
}
} // namespace vive_interfaces
#endif // VIVE_INTERFACES__MSG__DETAIL__HMD__BUILDER_HPP_
<file_sep>// generated from rosidl_typesupport_fastrtps_cpp/resource/idl__type_support.cpp.em
// with input from vive_interfaces:msg\Hmd.idl
// generated code does not contain a copyright notice
#include "vive_interfaces/msg/detail/hmd__rosidl_typesupport_fastrtps_cpp.hpp"
#include "vive_interfaces/msg/detail/hmd__struct.hpp"
#include <limits>
#include <stdexcept>
#include <string>
#include "rosidl_typesupport_cpp/message_type_support.hpp"
#include "rosidl_typesupport_fastrtps_cpp/identifier.hpp"
#include "rosidl_typesupport_fastrtps_cpp/message_type_support.h"
#include "rosidl_typesupport_fastrtps_cpp/message_type_support_decl.hpp"
#include "rosidl_typesupport_fastrtps_cpp/wstring_conversion.hpp"
#include "fastcdr/Cdr.h"
// forward declaration of message dependencies and their conversion functions
namespace vive_interfaces
{
namespace msg
{
namespace typesupport_fastrtps_cpp
{
bool
ROSIDL_TYPESUPPORT_FASTRTPS_CPP_PUBLIC_vive_interfaces
cdr_serialize(
const vive_interfaces::msg::Hmd & ros_message,
eprosima::fastcdr::Cdr & cdr)
{
// Member: x
cdr << ros_message.x;
// Member: y
cdr << ros_message.y;
// Member: z
cdr << ros_message.z;
// Member: yaw
cdr << ros_message.yaw;
// Member: pitch
cdr << ros_message.pitch;
// Member: roll
cdr << ros_message.roll;
return true;
}
bool
ROSIDL_TYPESUPPORT_FASTRTPS_CPP_PUBLIC_vive_interfaces
cdr_deserialize(
eprosima::fastcdr::Cdr & cdr,
vive_interfaces::msg::Hmd & ros_message)
{
// Member: x
cdr >> ros_message.x;
// Member: y
cdr >> ros_message.y;
// Member: z
cdr >> ros_message.z;
// Member: yaw
cdr >> ros_message.yaw;
// Member: pitch
cdr >> ros_message.pitch;
// Member: roll
cdr >> ros_message.roll;
return true;
}
size_t
ROSIDL_TYPESUPPORT_FASTRTPS_CPP_PUBLIC_vive_interfaces
get_serialized_size(
const vive_interfaces::msg::Hmd & ros_message,
size_t current_alignment)
{
size_t initial_alignment = current_alignment;
const size_t padding = 4;
const size_t wchar_size = 4;
(void)padding;
(void)wchar_size;
// Member: x
{
size_t item_size = sizeof(ros_message.x);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// Member: y
{
size_t item_size = sizeof(ros_message.y);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// Member: z
{
size_t item_size = sizeof(ros_message.z);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// Member: yaw
{
size_t item_size = sizeof(ros_message.yaw);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// Member: pitch
{
size_t item_size = sizeof(ros_message.pitch);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
// Member: roll
{
size_t item_size = sizeof(ros_message.roll);
current_alignment += item_size +
eprosima::fastcdr::Cdr::alignment(current_alignment, item_size);
}
return current_alignment - initial_alignment;
}
size_t
ROSIDL_TYPESUPPORT_FASTRTPS_CPP_PUBLIC_vive_interfaces
max_serialized_size_Hmd(
bool & full_bounded,
size_t current_alignment)
{
size_t initial_alignment = current_alignment;
const size_t padding = 4;
const size_t wchar_size = 4;
(void)padding;
(void)wchar_size;
(void)full_bounded;
// Member: x
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
// Member: y
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
// Member: z
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
// Member: yaw
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
// Member: pitch
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
// Member: roll
{
size_t array_size = 1;
current_alignment += array_size * sizeof(uint32_t) +
eprosima::fastcdr::Cdr::alignment(current_alignment, sizeof(uint32_t));
}
return current_alignment - initial_alignment;
}
static bool _Hmd__cdr_serialize(
const void * untyped_ros_message,
eprosima::fastcdr::Cdr & cdr)
{
auto typed_message =
static_cast<const vive_interfaces::msg::Hmd *>(
untyped_ros_message);
return cdr_serialize(*typed_message, cdr);
}
static bool _Hmd__cdr_deserialize(
eprosima::fastcdr::Cdr & cdr,
void * untyped_ros_message)
{
auto typed_message =
static_cast<vive_interfaces::msg::Hmd *>(
untyped_ros_message);
return cdr_deserialize(cdr, *typed_message);
}
static uint32_t _Hmd__get_serialized_size(
const void * untyped_ros_message)
{
auto typed_message =
static_cast<const vive_interfaces::msg::Hmd *>(
untyped_ros_message);
return static_cast<uint32_t>(get_serialized_size(*typed_message, 0));
}
static size_t _Hmd__max_serialized_size(bool & full_bounded)
{
return max_serialized_size_Hmd(full_bounded, 0);
}
static message_type_support_callbacks_t _Hmd__callbacks = {
"vive_interfaces::msg",
"Hmd",
_Hmd__cdr_serialize,
_Hmd__cdr_deserialize,
_Hmd__get_serialized_size,
_Hmd__max_serialized_size
};
static rosidl_message_type_support_t _Hmd__handle = {
rosidl_typesupport_fastrtps_cpp::typesupport_identifier,
&_Hmd__callbacks,
get_message_typesupport_handle_function,
};
} // namespace typesupport_fastrtps_cpp
} // namespace msg
} // namespace vive_interfaces
namespace rosidl_typesupport_fastrtps_cpp
{
template<>
ROSIDL_TYPESUPPORT_FASTRTPS_CPP_EXPORT_vive_interfaces
const rosidl_message_type_support_t *
get_message_type_support_handle<vive_interfaces::msg::Hmd>()
{
return &vive_interfaces::msg::typesupport_fastrtps_cpp::_Hmd__handle;
}
} // namespace rosidl_typesupport_fastrtps_cpp
#ifdef __cplusplus
extern "C"
{
#endif
const rosidl_message_type_support_t *
ROSIDL_TYPESUPPORT_INTERFACE__MESSAGE_SYMBOL_NAME(rosidl_typesupport_fastrtps_cpp, vive_interfaces, msg, Hmd)() {
return &vive_interfaces::msg::typesupport_fastrtps_cpp::_Hmd__handle;
}
#ifdef __cplusplus
}
#endif
<file_sep>import triad_openvr as vr
import pylab as plt
v = vr.triad_openvr()
data = v.devices["controller_1"].sample(1000,250)
plt.plot(data.time,data.x)
plt.title('Controller X Coordinate')
plt.xlabel('Time (seconds)')
plt.ylabel('X Coordinate (meters)')
|
c832d866c7e91b578419d34d3b6b71e5e6310ffd
|
[
"CMake",
"Markdown",
"Python",
"Text",
"C",
"C++"
] | 45
|
C++
|
bjuspi/Omron_Robotics
|
c8534bbd557f97cd6aee58d95798f9580b1a382a
|
253f7289446b2aafafd766087ed7828e1b8cf566
|
refs/heads/main
|
<repo_name>doppelbeaver/DopplerFace<file_sep>/README.md
# DopplerFace
Final Project for CS 386W. Inspired by the paper Soundwave: using the doppler effect to sense gestures (Gupta et. al.) and the code by @DanielRapp, we implemented an algorithm to detect face-touching movement.
## Abstract
Amid the ongoing Covid-19 pandemic, health organizations andgovernmental authorities around the world have advised people notto touch their faces. We propose DopplerFace, a novel face-touchingdetection model, which utilizes the Doppler effect on acoustic sig-nals. DopplerFace can be installed on any commodity laptop computer with built-in microphone and speaker. It will issue a warningwhen the human sitting in front of the laptop attempts to touch hisof her face. Unlike existing face-touching detection models, whichare based on camera input or wearable device sensors, DopplerFace is computationally cheaper, and unlikely to raise privacy concerns
## Acknowledgement
The authors would like to thank Professor <NAME> and Teaching Assistant <NAME> for valuable discussions
<file_sep>/DopplerFace/app.js
var lineChart = new Chart(document.getElementById('canvas').getContext('2d'), {
type: 'line',
data: {
datasets: [{
label: 'left',
fill: false,
lineTension: 0,
backgroundColor: 'rgb(255, 0, 0)',
borderColor: 'rgb(255, 0, 0)',
data: []
}, {
label: 'right',
fill: false,
lineTension: 0,
backgroundColor: 'rgb(0, 0, 255)',
borderColor: 'rgb(0, 0, 255)',
data: []
}]
},
options: {
scales: {
yAxes: [{
ticks: {
suggestedMin: 0,
suggestedMax: 35
}
}],
xAxes: [{
type: "linear", // MANDATORY TO SHOW YOUR POINTS! (THIS IS THE IMPORTANT BIT)
display: true, // mandatory
}],
}
}
});
var timeStep = 0;
var calibrationData = {
calibrationOn: true,
previousDiff: 0,
previousDirection: 0,
directionChanges: 0,
iteration: 0,
maxVolumeRatio: 0.01,
iterationCycles: 20,
upThreshold: 5,
downThreshold: 0,
upAmount: 1.1,
downAmount: 0.95
};
var freqBoundary = 33;
var lineBoundary = 100;
var windowBound = 50;
var freq = 20000;
var threshold = 200;
var touch = -1;
var warningDelta = 200;
var initTime = 1000;
var sign = function(x) {
return typeof x === 'number' ? x ? x < 0 ? -1 : 1 : x === x ? 0 : 0 : 0;
}
document.getElementById("ready").addEventListener('click', function() {
window.doppler = (function() {
//First we call Audio Context to help manipulate audio
var context = new (window.AudioContext ||
window.webkitAudioContext ||
window.mozAudioContext ||
window.oAudioContext ||
window.msAudioContext);
var oscillator = context.createOscillator();
var interval = 0;
//set up the fuction to read mic data whether calibration is on or off
var readMic = function(analyser, userCallback) {
var audioData = new Uint8Array(analyser.frequencyBinCount);
analyser.getByteFrequencyData(audioData);
var primaryTone = Math.round( freq/(context.sampleRate / 2) * analyser.fftSize/2 );
var primaryVolume = audioData[primaryTone];
var maxVolumeRatio = 0.01;
var rightBandwidth = 0;
var leftBandwidth = 0;
do {
leftBandwidth++;
var volume = audioData[primaryTone-leftBandwidth];
var normalizedVolume = volume / primaryVolume;
} while (normalizedVolume > calibrationData.maxVolumeRatio && leftBandwidth < freqBoundary);
do {
rightBandwidth++;
var volume = audioData[primaryTone+rightBandwidth];
var normalizedVolume = volume / primaryVolume;
} while (normalizedVolume > calibrationData.maxVolumeRatio && rightBandwidth < freqBoundary);
var band = { left: leftBandwidth, right: rightBandwidth, diff: leftBandwidth-rightBandwidth };
if (calibrationData.calibrationOn == true){
var direction = sign(band.diff);
if (calibrationData.previousDirection != direction) {
calibrationData.directionChanges++;
calibrationData.previousDirection = direction;
}
// make sure that the calubration only happen after x cycles
calibrationData.iteration = ((calibrationData.iteration + 1) % calibrationData.iterationCycles);
if (calibrationData.iteration == 0) {
if (calibrationData.directionChanges >= calibrationData.upThreshold) calibrationData.maxVolumeRatio *= calibrationData.upAmount;
if (calibrationData.directionChanges <= calibrationData.downThreshold) calibrationData.maxVolumeRatio *= calibrationData.downAmount;
calibrationData.maxVolumeRatio = Math.min(0.95, calibrationData.maxVolumeRatio);
calibrationData.maxVolumeRatio = Math.max(0.0001, calibrationData.maxVolumeRatio);
calibrationData.directionChanges = 0;
}
}
userCallback(band);
interval= setTimeout(readMic, 1, analyser, userCallback);
};
//In this function we handle the stream of audio in terms of its Doppler tone and calibration
var handleMic = function(stream, userCallback) {
// Loading microphone
var microphone = context.createMediaStreamSource(stream);
var analyser = context.createAnalyser();
analyser.smoothingTimeConstant = 0.5;
analyser.fftSize = 4096;
microphone.connect(analyser);
oscillator.frequency.value = freq;
oscillator.type = "sine";
oscillator.start(0);
oscillator.connect(context.destination);
//This is to optimize the doppler tone. At first, you might see that the sound is singificantly high.
setTimeout(function() {
var oldFreq = oscillator.frequency.value;
var audioData = new Uint8Array(analyser.frequencyBinCount);
var maxAmplitude = 0;
var maxAmplitudeIndex = 0;
var from = Math.round( 19000/(context.sampleRate / 2) * analyser.fftSize/2 );
var to = Math.round( 22000/(context.sampleRate / 2) * analyser.fftSize/2 );
for (var i = from; i < to; i++) {
oscillator.frequency.value = (context.sampleRate / 2)/(analyser.fftSize/2) * i;
analyser.getByteFrequencyData(audioData);
if (audioData[i] > maxAmplitude) {
maxAmplitude = audioData[i];
maxAmplitudeIndex= i;
}
}
if (maxAmplitudeIndex == 0) {
freq = oldFreq;
}
else {
freq = (context.sampleRate / 2)/(analyser.fftSize/2) * maxAmplitudeIndex;
}
oscillator.frequency.value = freq;
clearInterval(interval);
readMic(analyser, userCallback);
});
};
//Check whether the caubration is on
var calibrate = function(newVal) {
if (typeof newVal == "boolean") {
calibrationData.calibrationOn = newVal;
}
return calibrationData.calibrationOn;
};
return {
init: function(callback) {
navigator.getUserMedia_ = (navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia);
navigator.getUserMedia_({ audio: { optional: [{ echoCancellation: false }] } }, function(stream) {
handleMic(stream, callback);
}, function() { console.log('Error!') });
},
stop: function () {
clearInterval(interval);
},
calibrate: calibrate
}
})(window, document);
});
document.getElementById("start").addEventListener('click', function() {
window.doppler.init(function(bandwidth) {
if (document.getElementById("pause").checked) {
return;
}
if (lineChart.data.datasets[0].data.length >= lineBoundary) {
lineChart.data.datasets[0].data.shift();
}
lineChart.data.datasets[0].data.push({ y: bandwidth.left, x: timeStep });
if (lineChart.data.datasets[1].data.length >= lineBoundary) {
lineChart.data.datasets[1].data.shift();
}
lineChart.data.datasets[1].data.push({ y: bandwidth.right, x: timeStep });
lineChart.update(0);
timeStep++;
if (timeStep >= initTime && (touch == -1 || timeStep - touch >= warningDelta)) {
var left_sub_right = 0;
for (var i = lineBoundary - windowBound; i < lineBoundary; i++) {
left_sub_right += lineChart.data.datasets[0].data[i].y;
left_sub_right -= lineChart.data.datasets[1].data[i].y;
}
if (left_sub_right >= threshold) {
document.getElementById("warning").innerText = "YOU TOUCHED YOUR FACE";
document.getElementById("warning").style.color = "white";
document.getElementById("warning").style.backgroundColor = "red";
touch = timeStep;
} else {
document.getElementById("warning").innerText = "YOU ARE GOOD";
document.getElementById("warning").style.color = "black";
document.getElementById("warning").style.backgroundColor = "white";
}
}
});
});
|
1452743e5ecc05c188312c1f6f51332a7d79cc39
|
[
"Markdown",
"JavaScript"
] | 2
|
Markdown
|
doppelbeaver/DopplerFace
|
963fac1c1ffcd4f8c8bde85ef0ac6acb71915ce1
|
ec4502c1707963c55b48a9596c8eadb1004881be
|
refs/heads/master
|
<file_sep>(function () {
//DEBUG FUNCTION
//API.sendChat = function(message){
// console.debug(message);
// return -1;
//};
API.getWaitListPosition = function(id){
if(typeof id === 'undefined' || id === null){
id = API.getUser().id;
}
var wl = API.getWaitList();
for(var i = 0; i < wl.length; i++){
if(wl[i].id === id){
return i;
}
}
return -1;
};
var kill = function () {
clearInterval(plugBot.room.afkInterval);
plugBot.status = false;
};
var storeToStorage = function () {
localStorage.setItem("plugBotsettings", JSON.stringify(plugBot.settings));
localStorage.setItem("plugBotRoom", JSON.stringify(plugBot.room));
var plugBotStorageInfo = {
time: Date.now(),
stored: true,
version: plugBot.version
};
localStorage.setItem("plugBotStorageInfo", JSON.stringify(plugBotStorageInfo));
};
var subChat = function (chat, obj) {
if (typeof chat === "undefined") {
API.chatLog("There is a chat text missing.");
console.log("There is a chat text missing.");
return "[Error] No text message found.";
}
var lit = '%%';
for (var prop in obj) {
chat = chat.replace(lit + prop.toUpperCase() + lit, obj[prop]);
}
return chat;
};
var retrieveSettings = function () {
var settings = JSON.parse(localStorage.getItem("plugBotsettings"));
if (settings !== null) {
for (var prop in settings) {
plugBot.settings[prop] = settings[prop];
}
}
};
var retrieveFromStorage = function () {
var info = localStorage.getItem("plugBotStorageInfo");
if (info === null) API.chatLog("No previous data found.");
else {
var settings = JSON.parse(localStorage.getItem("plugBotsettings"));
var room = JSON.parse(localStorage.getItem("plugBotRoom"));
var elapsed = Date.now() - JSON.parse(info).time;
if ((elapsed < 1 * 60 * 60 * 1000)) {
API.chatLog("Retrieving previously stored data.");
for (var prop in settings) {
plugBot.settings[prop] = settings[prop];
}
plugBot.room.users = room.users;
plugBot.room.afkList = room.afkList;
plugBot.room.historyList = room.historyList;
plugBot.room.mutedUsers = room.mutedUsers;
plugBot.room.autoskip = room.autoskip;
plugBot.room.roomstats = room.roomstats;
plugBot.room.messages = room.messages;
plugBot.room.queue = room.queue;
plugBot.room.newBlacklisted = room.newBlacklisted;
API.chatLog("Previously stored data successfully retrieved.");
}
}
};
var botCreator = "<NAME> (m44rt3n)";
var botCreatorIDs = ["4090016"];
var plugBot = {
version: "1.0",
status: false,
name: "plugBot",
loggedInID: null,
scriptLink: "https://rawgit.com/maartenpeels/PlugBotV3/master/bot.js",
cmdLink: "https://rawgit.com/maartenpeels/PlugBotV3/master/README.md",
retrieveSettings: retrieveSettings,
retrieveFromStorage: retrieveFromStorage,
settings: {
botName: "plugBot",
startupCap: 200, // 1-200
startupVolume: 0, // 0-100
maximumAfk: 120,
afkRemoval: false,
maximumDc: 60,
voteSkip: true,
voteSkipLimit: 10,
timeGuard: true,
maximumSongLength: 10,
commandCooldown: 10,
usercommandsEnabled: true,
lockskipPosition: 3,
lockskipReasons: [
["theme", "This song does not fit the room theme. "],
["op", "This song is on the OP list. "],
["history", "This song is in the history. "],
["mix", "You played a mix, which is against the rules. "],
["sound", "The song you played had bad sound quality or no sound. "],
["nsfw", "The song you contained was NSFW (image or sound). "],
["unavailable", "The song you played was not available for some users. "]
],
afkpositionCheck: 15,
afkRankCheck: "ambassador",
filterChat: true,
etaRestriction: false,
welcome: false,
songstats: false,
commandLiteral: "!",
},
room: {
users: [],
afkList: [],
mutedUsers: [],
bannedUsers: [],
skippable: true,
usercommand: true,
allcommand: true,
afkInterval: null,
autoskip: true,
autoskipTimer: null,
queueing: 0,
queueable: true,
currentDJID: null,
historyList: [],
cycleTimer: setTimeout(function () {
}, 1),
roomstats: {
accountName: null,
totalWoots: 0,
totalCurates: 0,
totalMehs: 0,
launchTime: null,
songCount: 0,
chatmessages: 0
},
messages: [],
queue: {
id: [],
position: []
},
blacklists: {
},
newBlacklisted: [],
newBlacklistedSongFunction: null,
roulette: {
rouletteStatus: false,
participants: [],
countdown: null,
startRoulette: function () {
plugBot.room.roulette.rouletteStatus = true;
plugBot.room.roulette.countdown = setTimeout(function () {
plugBot.room.roulette.endRoulette();
}, 20 * 1000);
API.sendChat("/me The roulette is now open! Type !join to participate, you have 20 seconds!");
},
endRoulette: function () {
plugBot.room.roulette.rouletteStatus = false;
var ind = Math.floor(Math.random() * plugBot.room.roulette.participants.length);
var winner = plugBot.room.roulette.participants[ind];
plugBot.room.roulette.participants = [];
var pos = Math.floor((Math.random() * API.getWaitList().length) + 1);
var user = plugBot.userUtilities.lookupUser(winner);
var name = user.username;
API.sendChat(subChat("/me A winner has been picked! the lucky one is @%%NAME%%, he/she will be set to a random position(%%POSITION%%)!", {name: name, position: pos}));
setTimeout(function (winner, pos) {
plugBot.userUtilities.moveUser(winner, pos, false);
}, 1 * 1000, winner, pos);
}
}
},
User: function (id, name) {
this.id = id;
this.username = name;
this.jointime = Date.now();
this.lastActivity = Date.now();
this.votes = {
woot: 0,
meh: 0,
curate: 0
};
this.lastEta = null;
this.afkWarningCount = 0;
this.afkCountdown = null;
this.inRoom = true;
this.isMuted = false;
this.lastDC = {
time: null,
position: null,
songCount: 0
};
this.lastKnownPosition = null;
this.lastSlotsTime = null;
this.lostSlots = 0;
},
userUtilities: {
getJointime: function (user) {
return user.jointime;
},
getUser: function (user) {
return API.getUser(user.id);
},
updatePosition: function (user, newPos) {
user.lastKnownPosition = newPos;
},
updateDC: function (user) {
user.lastDC.time = Date.now();
user.lastDC.position = user.lastKnownPosition;
user.lastDC.songCount = plugBot.room.roomstats.songCount;
},
setLastActivity: function (user) {
user.lastActivity = Date.now();
user.afkWarningCount = 0;
clearTimeout(user.afkCountdown);
},
getLastActivity: function (user) {
return user.lastActivity;
},
getWarningCount: function (user) {
return user.afkWarningCount;
},
setWarningCount: function (user, value) {
user.afkWarningCount = value;
},
lookupUser: function (id) {
for (var i = 0; i < plugBot.room.users.length; i++) {
if (plugBot.room.users[i].id === id) {
return plugBot.room.users[i];
}
}
return false;
},
lookupUserName: function (name) {
for (var i = 0; i < plugBot.room.users.length; i++) {
var match = plugBot.room.users[i].username.trim() == name.trim();
if (match) {
return plugBot.room.users[i];
}
}
return false;
},
voteRatio: function (id) {
var user = plugBot.userUtilities.lookupUser(id);
var votes = user.votes;
if (votes.meh === 0) votes.ratio = 1;
else votes.ratio = (votes.woot / votes.meh).toFixed(2);
return votes;
},
getPermission: function (obj) { //1 requests
var u;
if (typeof obj === "object") u = obj;
else u = API.getUser(obj);
for (var i = 0; i < botCreatorIDs.length; i++) {
if (botCreatorIDs[i].indexOf(u.id) > -1) return 10;
}
if (u.gRole < 2) return u.role;
else {
switch (u.gRole) {
case 2:
return 7;
case 3:
return 8;
case 4:
return 9;
case 5:
return 10;
}
}
return 0;
},
moveUser: function (id, pos, priority) {
var user = plugBot.userUtilities.lookupUser(id);
var wlist = API.getWaitList();
if (API.getWaitListPosition(id) === -1) {
if (wlist.length < 50) {
API.moderateAddDJ(id);
if (pos !== 0) setTimeout(function (id, pos) {
API.moderateMoveDJ(id, pos);
}, 1250, id, pos);
}
else {
var alreadyQueued = -1;
for (var i = 0; i < plugBot.room.queue.id.length; i++) {
if (plugBot.room.queue.id[i] === id) alreadyQueued = i;
}
if (alreadyQueued !== -1) {
plugBot.room.queue.position[alreadyQueued] = pos;
return API.sendChat(subChat("/me User is already being added! Changed the desired position to %%POSITION%%.", {position: plugBot.room.queue.position[alreadyQueued]}));
}
plugBot.roomUtilities.booth.lockBooth();
if (priority) {
plugBot.room.queue.id.unshift(id);
plugBot.room.queue.position.unshift(pos);
}
else {
plugBot.room.queue.id.push(id);
plugBot.room.queue.position.push(pos);
}
var name = user.username;
return API.sendChat(subChat("/me Added @%%NAME%% to the queue. Current queue: %%POSITION%%.", {name: name, position: plugBot.room.queue.position.length}));
}
}
else API.moderateMoveDJ(id, pos);
},
dclookup: function (id) {
var user = plugBot.userUtilities.lookupUser(id);
if (typeof user === 'boolean') return "/me User not found.";
var name = user.username;
if (user.lastDC.time === null) return subChat("/me @%%NAME%% did not disconnect during my time here.", {name: name});
var dc = user.lastDC.time;
var pos = user.lastDC.position;
if (pos === null) return "/me No last position known. The waitlist needs to update at least once to register a user's last position.";
var timeDc = Date.now() - dc;
var validDC = false;
if (plugBot.settings.maximumDc * 60 * 1000 > timeDc) {
validDC = true;
}
var time = plugBot.roomUtilities.msToStr(timeDc);
if (!validDC) return (subChat("/me @%%NAME%%'s last disconnect (DC or leave) was too long ago: %%TIME%%.", {name: plugBot.userUtilities.getUser(user).username, time: time}));
var songsPassed = plugBot.room.roomstats.songCount - user.lastDC.songCount;
var afksRemoved = 0;
var afkList = plugBot.room.afkList;
for (var i = 0; i < afkList.length; i++) {
var timeAfk = afkList[i][1];
var posAfk = afkList[i][2];
if (dc < timeAfk && posAfk < pos) {
afksRemoved++;
}
}
var newPosition = user.lastDC.position - songsPassed - afksRemoved;
if (newPosition <= 0) newPosition = 1;
var msg = subChat("/me @%%NAME%% disconnected %%TIME%% ago and should be at position %%POSITION%%.", {name: plugBot.userUtilities.getUser(user).username, time: time, position: newPosition});
plugBot.userUtilities.moveUser(user.id, newPosition, true);
return msg;
}
},
roomUtilities: {
rankToNumber: function (rankString) {
var rankInt = null;
switch (rankString) {
case "admin":
rankInt = 10;
break;
case "ambassador":
rankInt = 7;
break;
case "host":
rankInt = 5;
break;
case "cohost":
rankInt = 4;
break;
case "manager":
rankInt = 3;
break;
case "bouncer":
rankInt = 2;
break;
case "residentdj":
rankInt = 1;
break;
case "user":
rankInt = 0;
break;
}
return rankInt;
},
msToStr: function (msTime) {
var ms, msg, timeAway;
msg = '';
timeAway = {
'days': 0,
'hours': 0,
'minutes': 0,
'seconds': 0
};
ms = {
'day': 24 * 60 * 60 * 1000,
'hour': 60 * 60 * 1000,
'minute': 60 * 1000,
'second': 1000
};
if (msTime > ms.day) {
timeAway.days = Math.floor(msTime / ms.day);
msTime = msTime % ms.day;
}
if (msTime > ms.hour) {
timeAway.hours = Math.floor(msTime / ms.hour);
msTime = msTime % ms.hour;
}
if (msTime > ms.minute) {
timeAway.minutes = Math.floor(msTime / ms.minute);
msTime = msTime % ms.minute;
}
if (msTime > ms.second) {
timeAway.seconds = Math.floor(msTime / ms.second);
}
if (timeAway.days !== 0) {
msg += timeAway.days.toString() + 'd';
}
if (timeAway.hours !== 0) {
msg += timeAway.hours.toString() + 'h';
}
if (timeAway.minutes !== 0) {
msg += timeAway.minutes.toString() + 'm';
}
if (timeAway.minutes < 1 && timeAway.hours < 1 && timeAway.days < 1) {
msg += timeAway.seconds.toString() + 's';
}
if (msg !== '') {
return msg;
} else {
return false;
}
},
booth: {
lockTimer: setTimeout(function () {
}, 1000),
locked: false,
lockBooth: function () {
API.moderateLockWaitList(!plugBot.roomUtilities.booth.locked);
plugBot.roomUtilities.booth.locked = false;
if (plugBot.settings.lockGuard) {
plugBot.roomUtilities.booth.lockTimer = setTimeout(function () {
API.moderateLockWaitList(plugBot.roomUtilities.booth.locked);
}, plugBot.settings.maximumLocktime * 60 * 1000);
}
},
unlockBooth: function () {
API.moderateLockWaitList(plugBot.roomUtilities.booth.locked);
clearTimeout(plugBot.roomUtilities.booth.lockTimer);
}
},
afkCheck: function () {
if (!plugBot.status || !plugBot.settings.afkRemoval) return void (0);
var rank = plugBot.roomUtilities.rankToNumber(plugBot.settings.afkRankCheck);
var djlist = API.getWaitList();
var lastPos = Math.min(djlist.length, plugBot.settings.afkpositionCheck);
if (lastPos - 1 > djlist.length) return void (0);
for (var i = 0; i < lastPos; i++) {
if (typeof djlist[i] !== 'undefined') {
var id = djlist[i].id;
var user = plugBot.userUtilities.lookupUser(id);
if (typeof user !== 'boolean') {
var plugUser = plugBot.userUtilities.getUser(user);
if (rank !== null && plugBot.userUtilities.getPermission(plugUser) <= rank) {
var name = plugUser.username;
var lastActive = plugBot.userUtilities.getLastActivity(user);
var inactivity = Date.now() - lastActive;
var time = plugBot.roomUtilities.msToStr(inactivity);
var warncount = user.afkWarningCount;
if (inactivity > plugBot.settings.maximumAfk * 60 * 1000) {
if (warncount === 0) {
API.sendChat(subChat("/me @%%NAME%%, you have been afk for %%TIME%%, please respond within 2 minutes or you will be removed.", {name: name, time: time}));
user.afkWarningCount = 3;
user.afkCountdown = setTimeout(function (userToChange) {
userToChange.afkWarningCount = 1;
}, 90 * 1000, user);
}
else if (warncount === 1) {
API.sendChat(subChat("/me @%%NAME%%, you will be removed due to AFK soon if you don't respond.", {name: name}));
user.afkWarningCount = 3;
user.afkCountdown = setTimeout(function (userToChange) {
userToChange.afkWarningCount = 2;
}, 30 * 1000, user);
}
else if (warncount === 2) {
var pos = API.getWaitListPosition(id);
if (pos !== -1) {
pos++;
plugBot.room.afkList.push([id, Date.now(), pos]);
user.lastDC = {
time: null,
position: null,
songCount: 0
};
API.moderateRemoveDJ(id);
API.sendChat(subChat("/me @%%NAME%%, you have been removed for being afk for %%TIME%%. You were at position %%POSITION%%. Chat at least once every %%MAXIMUMAFK%% minutes if you want to play a song.", {name: name, time: time, position: pos, maximumafk: plugBot.settings.maximumAfk}));
}
user.afkWarningCount = 0;
}
}
}
}
}
}
},
updateBlacklists: function () {
for (var bl in plugBot.settings.blacklists) {
plugBot.room.blacklists[bl] = [];
if (typeof plugBot.settings.blacklists[bl] === 'function') {
plugBot.room.blacklists[bl] = plugBot.settings.blacklists();
}
else if (typeof plugBot.settings.blacklists[bl] === 'string') {
if (plugBot.settings.blacklists[bl] === '') {
continue;
}
try {
(function (l) {
$.get(plugBot.settings.blacklists[l], function (data) {
if (typeof data === 'string') {
data = JSON.parse(data);
}
var list = [];
for (var prop in data) {
if (typeof data[prop].mid !== 'undefined') {
list.push(data[prop].mid);
}
}
plugBot.room.blacklists[l] = list;
})
})(bl);
}
catch (e) {
API.chatLog('Error setting' + bl + 'blacklist.');
console.log('Error setting' + bl + 'blacklist.');
console.log(e);
}
}
}
},
logNewBlacklistedSongs: function () {
if (typeof console.table !== 'undefined') {
console.table(plugBot.room.newBlacklisted);
}
else {
console.log(plugBot.room.newBlacklisted);
}
},
exportNewBlacklistedSongs: function () {
var list = {};
for (var i = 0; i < plugBot.room.newBlacklisted.length; i++) {
var track = plugBot.room.newBlacklisted[i];
list[track.list] = [];
list[track.list].push({
title: track.title,
author: track.author,
mid: track.mid
});
}
return list;
}
},
eventChat: function (chat) {
chat.message = chat.message.trim();
for (var i = 0; i < plugBot.room.users.length; i++) {
if (plugBot.room.users[i].id === chat.uid) {
plugBot.userUtilities.setLastActivity(plugBot.room.users[i]);
if (plugBot.room.users[i].username !== chat.un) {
plugBot.room.users[i].username = chat.un;
}
plugBot.room.messages[chat.uid] = chat;
}
}
if (plugBot.chatUtilities.chatFilter(chat)) return void (0);
if (!plugBot.chatUtilities.commandCheck(chat))
plugBot.chatUtilities.action(chat);
},
eventUserjoin: function (user) {
var known = false;
var index = null;
for (var i = 0; i < plugBot.room.users.length; i++) {
if (plugBot.room.users[i].id === user.id) {
known = true;
index = i;
}
}
var greet = true;
var welcomeback = null;
if (known) {
plugBot.room.users[index].inRoom = true;
var u = plugBot.userUtilities.lookupUser(user.id);
var jt = u.jointime;
var t = Date.now() - jt;
if (t < 10 * 1000) greet = false;
else welcomeback = true;
}
else {
plugBot.room.users.push(new plugBot.User(user.id, user.username));
welcomeback = false;
}
for (var j = 0; j < plugBot.room.users.length; j++) {
if (plugBot.userUtilities.getUser(plugBot.room.users[j]).id === user.id) {
plugBot.userUtilities.setLastActivity(plugBot.room.users[j]);
plugBot.room.users[j].jointime = Date.now();
}
}
if (plugBot.settings.welcome && greet) {
welcomeback ?
setTimeout(function (user) {
API.sendChat(subChat("/me Welcome back, %%NAME%%", {name: user.username}));
}, 1 * 1000, user)
:
setTimeout(function (user) {
API.sendChat(subChat("/me Welcome %%NAME%%", {name: user.username}));
}, 1 * 1000, user);
}
},
eventUserleave: function (user) {
for (var i = 0; i < plugBot.room.users.length; i++) {
if (plugBot.room.users[i].id === user.id) {
plugBot.userUtilities.updateDC(plugBot.room.users[i]);
plugBot.room.users[i].inRoom = false;
}
}
},
eventVoteupdate: function (obj) {
for (var i = 0; i < plugBot.room.users.length; i++) {
if (plugBot.room.users[i].id === obj.user.id) {
if (obj.vote === 1) {
plugBot.room.users[i].votes.woot++;
}
else {
plugBot.room.users[i].votes.meh++;
}
}
}
var mehs = API.getScore().negative;
var woots = API.getScore().positive;
var dj = API.getDJ();
if (plugBot.settings.voteSkip) {
if ((mehs - woots) >= (plugBot.settings.voteSkipLimit)) {
API.sendChat(subChat("/me @%%NAME%%, your song has exceeded the voteskip limit (%%LIMIT%% mehs).", {name: dj.username, limit: plugBot.settings.voteSkipLimit}));
API.moderateForceSkip();
}
}
},
eventCurateupdate: function (obj) {
for (var i = 0; i < plugBot.room.users.length; i++) {
if (plugBot.room.users[i].id === obj.user.id) {
plugBot.room.users[i].votes.curate++;
}
}
},
eventDjadvance: function (obj) {
$("#woot").click();
var user = plugBot.userUtilities.lookupUser(obj.dj.id)
for(var i = 0; i < plugBot.room.users.length; i++){
if(plugBot.room.users[i].id === user.id){
plugBot.room.users[i].lastDC = {
time: null,
position: null,
songCount: 0
};
}
}
var lastplay = obj.lastPlay;
if (typeof lastplay === 'undefined') return;
if (plugBot.settings.songstats) {
API.sendChat(subChat("/me %%ARTIST%% - %%TITLE%%: %%WOOTS%%W/%%GRABS%%G/%%MEHS%%M.", {artist: lastplay.media.author, title: lastplay.media.title, woots: lastplay.score.positive, grabs: lastplay.score.grabs, mehs: lastplay.score.negative}))
}
plugBot.room.roomstats.totalWoots += lastplay.score.positive;
plugBot.room.roomstats.totalMehs += lastplay.score.negative;
plugBot.room.roomstats.totalCurates += lastplay.score.grabs;
plugBot.room.roomstats.songCount++;
plugBot.room.currentDJID = obj.dj.id;
var alreadyPlayed = false;
for (var i = 0; i < plugBot.room.historyList.length; i++) {
if (plugBot.room.historyList[i][0] === obj.media.cid) {
var firstPlayed = plugBot.room.historyList[i][1];
var plays = plugBot.room.historyList[i].length - 1;
var lastPlayed = plugBot.room.historyList[i][plays];
API.sendChat(subChat("/me :repeat: This song has been played %%PLAYS%% time(s) in the last %%TIMETOTAL%%, last play was %%LASTTIME%% ago. :repeat:", {plays: plays, timetotal: plugBot.roomUtilities.msToStr(Date.now() - firstPlayed), lasttime: plugBot.roomUtilities.msToStr(Date.now() - lastPlayed)}));
plugBot.room.historyList[i].push(+new Date());
alreadyPlayed = true;
}
}
if (!alreadyPlayed) {
plugBot.room.historyList.push([obj.media.cid, +new Date()]);
}
var newMedia = obj.media;
if (plugBot.settings.timeGuard && newMedia.duration > plugBot.settings.maximumSongLength * 60 && !plugBot.room.roomevent) {
var name = obj.dj.username;
API.sendChat(subChat("/me @%%NAME%%, your song is longer than %%MAXLENGTH%% minutes, you need permission to play longer songs.", {name: name, maxlength: plugBot.settings.maximumSongLength}));
API.moderateForceSkip();
}
if (user.ownSong) {
API.sendChat(subChat("/me :up: @%%NAME%% has permission to play their own production!", {name: user.username}));
user.ownSong = false;
}
clearTimeout(plugBot.room.autoskipTimer);
if (plugBot.room.autoskip) {
var remaining = obj.media.duration * 1000;
plugBot.room.autoskipTimer = setTimeout(function () {
console.log("Skipping track.");
API.moderateForceSkip();
}, remaining + 3000);
}
storeToStorage();
},
eventWaitlistupdate: function (users) {
if (users.length < 50) {
if (plugBot.room.queue.id.length > 0 && plugBot.room.queueable) {
plugBot.room.queueable = false;
setTimeout(function () {
plugBot.room.queueable = true;
}, 500);
plugBot.room.queueing++;
var id, pos;
setTimeout(
function () {
id = plugBot.room.queue.id.splice(0, 1)[0];
pos = plugBot.room.queue.position.splice(0, 1)[0];
API.moderateAddDJ(id, pos);
setTimeout(
function (id, pos) {
API.moderateMoveDJ(id, pos);
plugBot.room.queueing--;
if (plugBot.room.queue.id.length === 0) setTimeout(function () {
plugBot.roomUtilities.booth.unlockBooth();
}, 1000);
}, 1000, id, pos);
}, 1000 + plugBot.room.queueing * 2500);
}
}
for (var i = 0; i < users.length; i++) {
var user = plugBot.userUtilities.lookupUser(users[i].id);
plugBot.userUtilities.updatePosition(user, API.getWaitListPosition(users[i].id) + 1);
}
},
chatcleaner: function (chat) {
if (!plugBot.settings.filterChat) return false;
if (plugBot.userUtilities.getPermission(chat.uid) > 1) return false;
var msg = chat.message;
var containsLetters = false;
for (var i = 0; i < msg.length; i++) {
ch = msg.charAt(i);
if ((ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') || (ch >= '0' && ch <= '9') || ch === ':' || ch === '^') containsLetters = true;
}
if (msg === '') {
return true;
}
if (!containsLetters && (msg.length === 1 || msg.length > 3)) return true;
msg = msg.replace(/[ ,;.:\/=~+%^*\-\\"'&@#]/g, '');
var capitals = 0;
var ch;
for (var i = 0; i < msg.length; i++) {
ch = msg.charAt(i);
if (ch >= 'A' && ch <= 'Z') capitals++;
}
if (capitals >= 40) {
API.sendChat(subChat("/me @%%NAME%%, unglue your capslock button please.", {name: chat.un}));
return true;
}
msg = msg.toLowerCase();
if (msg === 'skip') {
API.sendChat(subChat("/me @%%NAME%%, don't ask for skips.", {name: chat.un}));
return true;
}
for (var j = 0; j < plugBot.chatUtilities.spam.length; j++) {
if (msg === plugBot.chatUtilities.spam[j]) {
API.sendChat(subChat("/me @%%NAME%%, please don't spam.", {name: chat.un}));
return true;
}
}
return false;
},
chatUtilities: {
chatFilter: function (chat) {
var msg = chat.message;
var perm = plugBot.userUtilities.getPermission(chat.uid);
var user = plugBot.userUtilities.lookupUser(chat.uid);
var isMuted = false;
for (var i = 0; i < plugBot.room.mutedUsers.length; i++) {
if (plugBot.room.mutedUsers[i] === chat.uid) isMuted = true;
}
if (isMuted) {
API.moderateDeleteChat(chat.cid);
return true;
}
if (plugBot.settings.lockdownEnabled) {
if (perm === 0) {
API.moderateDeleteChat(chat.cid);
return true;
}
}
if (plugBot.chatcleaner(chat)) {
API.moderateDeleteChat(chat.cid);
return true;
}
if (msg.indexOf('http://adf.ly/') > -1) {
API.moderateDeleteChat(chat.cid);
API.sendChat(subChat("/me @%%NAME%%, please change your autowoot program. We suggest PlugCubed: http://plugcubed.net/", {name: chat.un}));
return true;
}
if (msg.indexOf('autojoin was not enabled') > 0 || msg.indexOf('AFK message was not enabled') > 0 || msg.indexOf('!afkdisable') > 0 || msg.indexOf('!joindisable') > 0 || msg.indexOf('autojoin disabled') > 0 || msg.indexOf('AFK message disabled') > 0) {
API.moderateDeleteChat(chat.cid);
return true;
}
var rlJoinChat = "/me @%%NAME%% joined the roulette! (!leave if you regret it.)";
var rlLeaveChat = "/me @%%NAME%% left the roulette!";
var joinedroulette = rlJoinChat.split('%%NAME%%');
if (joinedroulette[1].length > joinedroulette[0].length) joinedroulette = joinedroulette[1];
else joinedroulette = joinedroulette[0];
var leftroulette = rlLeaveChat.split('%%NAME%%');
if (leftroulette[1].length > leftroulette[0].length) leftroulette = leftroulette[1];
else leftroulette = leftroulette[0];
if ((msg.indexOf(joinedroulette) > -1 || msg.indexOf(leftroulette) > -1) && chat.uid === plugBot.loggedInID) {
setTimeout(function (id) {
API.moderateDeleteChat(id);
}, 2 * 1000, chat.cid);
return true;
}
return false;
},
commandCheck: function (chat) {
var cmd;
if (chat.message.charAt(0) === '!') {
var space = chat.message.indexOf(' ');
if (space === -1) {
cmd = chat.message;
}
else cmd = chat.message.substring(0, space);
}
else return false;
var userPerm = plugBot.userUtilities.getPermission(chat.uid);
//console.log("name: " + chat.un + ", perm: " + userPerm);
if (chat.message !== "!join" && chat.message !== "!leave") {
if (userPerm === 0 && !plugBot.room.usercommand) return void (0);
if (!plugBot.room.allcommand) return void (0);
}
if (chat.message === '!eta' && plugBot.settings.etaRestriction) {
if (userPerm < 2) {
var u = plugBot.userUtilities.lookupUser(chat.uid);
if (u.lastEta !== null && (Date.now() - u.lastEta) < 1 * 60 * 60 * 1000) {
API.moderateDeleteChat(chat.cid);
return void (0);
}
else u.lastEta = Date.now();
}
}
var executed = false;
for (var comm in plugBot.commands) {
var cmdCall = plugBot.commands[comm].command;
if (!Array.isArray(cmdCall)) {
cmdCall = [cmdCall]
}
for (var i = 0; i < cmdCall.length; i++) {
if (plugBot.settings.commandLiteral + cmdCall[i] === cmd) {
plugBot.commands[comm].functionality(chat, plugBot.settings.commandLiteral + cmdCall[i]);
executed = true;
break;
}
}
}
if (executed && userPerm === 0) {
plugBot.room.usercommand = false;
setTimeout(function () {
plugBot.room.usercommand = true;
}, plugBot.settings.commandCooldown * 1000);
}
if (executed) {
API.moderateDeleteChat(chat.cid);
plugBot.room.allcommand = false;
setTimeout(function () {
plugBot.room.allcommand = true;
}, 5 * 1000);
}
return executed;
},
action: function (chat) {
var user = plugBot.userUtilities.lookupUser(chat.uid);
if (chat.type === 'message') {
for (var j = 0; j < plugBot.room.users.length; j++) {
if (plugBot.userUtilities.getUser(plugBot.room.users[j]).id === chat.uid) {
plugBot.userUtilities.setLastActivity(plugBot.room.users[j]);
}
}
}
plugBot.room.roomstats.chatmessages++;
},
spam: [
'hueh', 'hu3', 'brbr', 'heu', 'brbr', 'kkkk', 'spoder', 'mafia', 'zuera', 'zueira',
'zueria', 'aehoo', 'aheu', 'alguem', 'algum', 'brazil', 'zoeira', 'fuckadmins', 'affff', 'vaisefoder', 'huenaarea',
'hitler', 'ashua', 'ahsu', 'ashau', 'lulz', 'huehue', 'hue', 'huehuehue', 'merda', 'pqp', 'puta', 'mulher', 'pula', 'retarda', 'caralho', 'filha', 'ppk',
'gringo', 'fuder', 'foder', 'hua', 'ahue', 'modafuka', 'modafoka', 'mudafuka', 'mudafoka', 'ooooooooooooooo', 'foda'
],
curses: [
'nigger', 'faggot', 'nigga', 'niqqa', 'motherfucker', 'modafocka'
]
},
connectAPI: function () {
this.proxy = {
eventChat: $.proxy(this.eventChat, this),
eventUserskip: $.proxy(this.eventUserskip, this),
eventUserjoin: $.proxy(this.eventUserjoin, this),
eventUserleave: $.proxy(this.eventUserleave, this),
eventUserfan: $.proxy(this.eventUserfan, this),
eventFriendjoin: $.proxy(this.eventFriendjoin, this),
eventFanjoin: $.proxy(this.eventFanjoin, this),
eventVoteupdate: $.proxy(this.eventVoteupdate, this),
eventCurateupdate: $.proxy(this.eventCurateupdate, this),
eventRoomscoreupdate: $.proxy(this.eventRoomscoreupdate, this),
eventDjadvance: $.proxy(this.eventDjadvance, this),
eventDjupdate: $.proxy(this.eventDjupdate, this),
eventWaitlistupdate: $.proxy(this.eventWaitlistupdate, this),
eventVoteskip: $.proxy(this.eventVoteskip, this),
eventModskip: $.proxy(this.eventModskip, this),
eventChatcommand: $.proxy(this.eventChatcommand, this),
eventHistoryupdate: $.proxy(this.eventHistoryupdate, this)
};
API.on(API.CHAT, this.proxy.eventChat);
API.on(API.USER_SKIP, this.proxy.eventUserskip);
API.on(API.USER_JOIN, this.proxy.eventUserjoin);
API.on(API.USER_LEAVE, this.proxy.eventUserleave);
API.on(API.USER_FAN, this.proxy.eventUserfan);
API.on(API.VOTE_UPDATE, this.proxy.eventVoteupdate);
API.on(API.GRAB_UPDATE, this.proxy.eventCurateupdate);
API.on(API.ROOM_SCORE_UPDATE, this.proxy.eventRoomscoreupdate);
API.on(API.ADVANCE, this.proxy.eventDjadvance);
API.on(API.WAIT_LIST_UPDATE, this.proxy.eventWaitlistupdate);
API.on(API.MOD_SKIP, this.proxy.eventModskip);
API.on(API.CHAT_COMMAND, this.proxy.eventChatcommand);
API.on(API.HISTORY_UPDATE, this.proxy.eventHistoryupdate);
},
disconnectAPI: function () {
API.off(API.CHAT, this.proxy.eventChat);
API.off(API.USER_SKIP, this.proxy.eventUserskip);
API.off(API.USER_JOIN, this.proxy.eventUserjoin);
API.off(API.USER_LEAVE, this.proxy.eventUserleave);
API.off(API.USER_FAN, this.proxy.eventUserfan);
API.off(API.VOTE_UPDATE, this.proxy.eventVoteupdate);
API.off(API.CURATE_UPDATE, this.proxy.eventCurateupdate);
API.off(API.ROOM_SCORE_UPDATE, this.proxy.eventRoomscoreupdate);
API.off(API.ADVANCE, this.proxy.eventDjadvance);
API.off(API.WAIT_LIST_UPDATE, this.proxy.eventWaitlistupdate);
API.off(API.MOD_SKIP, this.proxy.eventModskip);
API.off(API.CHAT_COMMAND, this.proxy.eventChatcommand);
API.off(API.HISTORY_UPDATE, this.proxy.eventHistoryupdate);
},
startup: function () {
Function.prototype.toString = function () {
return 'Function.'
};
var u = API.getUser();
if (plugBot.userUtilities.getPermission(u) < 2) return API.chatLog("Only bouncers and up can run a bot.");
if (plugBot.userUtilities.getPermission(u) === 2) API.chatLog("The bot can't move people when it's run as a bouncer.");
plugBot.connectAPI();
API.moderateDeleteChat = function (cid) {
$.ajax({
url: "https://plug.dj/_/chat/" + cid,
type: "DELETE"
})
};
retrieveSettings();
retrieveFromStorage();
window.bot = plugBot;
plugBot.roomUtilities.updateBlacklists();
setInterval(plugBot.roomUtilities.updateBlacklists, 60 * 60 * 1000);
if (plugBot.room.roomstats.launchTime === null) {
plugBot.room.roomstats.launchTime = Date.now();
}
for (var j = 0; j < plugBot.room.users.length; j++) {
plugBot.room.users[j].inRoom = false;
}
var userlist = API.getUsers();
for (var i = 0; i < userlist.length; i++) {
var known = false;
var ind = null;
for (var j = 0; j < plugBot.room.users.length; j++) {
if (plugBot.room.users[j].id === userlist[i].id) {
known = true;
ind = j;
}
}
if (known) {
plugBot.room.users[ind].inRoom = true;
}
else {
plugBot.room.users.push(new plugBot.User(userlist[i].id, userlist[i].username));
ind = plugBot.room.users.length - 1;
}
var wlIndex = API.getWaitListPosition(plugBot.room.users[ind].id) + 1;
plugBot.userUtilities.updatePosition(plugBot.room.users[ind], wlIndex);
}
plugBot.room.afkInterval = setInterval(function () {
plugBot.roomUtilities.afkCheck()
}, 10 * 1000);
plugBot.loggedInID = API.getUser().id;
plugBot.status = true;
API.sendChat('/cap ' + plugBot.settings.startupCap);
API.setVolume(plugBot.settings.startupVolume);
$("#woot").click();
API.chatLog('Avatars capped at ' + plugBot.settings.startupCap);
API.chatLog('Volume set to ' + plugBot.settings.startupVolume);
API.sendChat(subChat("/me %%BOTNAME%% v%%VERSION%% online!", {botname: plugBot.settings.botName, version: plugBot.version}));
},
commands: {
executable: function (minRank, chat) {
var id = chat.uid;
var perm = plugBot.userUtilities.getPermission(id);
var minPerm;
switch (minRank) {
case 'admin':
minPerm = 10;
break;
case 'ambassador':
minPerm = 7;
break;
case 'host':
minPerm = 5;
break;
case 'cohost':
minPerm = 4;
break;
case 'manager':
minPerm = 3;
break;
case 'mod':
if (plugBot.settings.bouncerPlus) {
minPerm = 2;
}
else {
minPerm = 3;
}
break;
case 'bouncer':
minPerm = 2;
break;
case 'residentdj':
minPerm = 1;
break;
case 'user':
minPerm = 0;
break;
default:
API.chatLog('error assigning minimum permission');
}
return perm >= minPerm;
},
/**
command: {
command: 'cmd',
rank: 'user/bouncer/mod/manager',
type: 'startsWith/exact',
functionality: function(chat, cmd){
if(this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if( !plugBot.commands.executable(this.rank, chat) ) return void (0);
else{
}
}
},
**/
//===COMMANDS===
activeCommand: {
command: 'active',
rank: 'bouncer',
type: 'startsWith',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var msg = chat.message;
var now = Date.now();
var chatters = 0;
var time;
if (msg.length === cmd.length) time = 60;
else {
time = msg.substring(cmd.length + 1);
if (isNaN(time)) return API.sendChat(subChat("/me [@%%NAME%%] Invalid time specified.", {name: chat.un}));
}
for (var i = 0; i < plugBot.room.users.length; i++) {
userTime = plugBot.userUtilities.getLastActivity(plugBot.room.users[i]);
if ((now - userTime) <= (time * 60 * 1000)) {
chatters++;
}
}
API.sendChat(subChat("/me [@%%NAME%% There have been %%AMOUNT%% users chatting in the past %%TIME%% minutes.", {name: chat.un, amount: chatters, time: time}));
}
}
},
addCommand: {
command: 'add',
rank: 'mod',
type: 'startsWith',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var msg = chat.message;
if (msg.length === cmd.length) return API.sendChat(subChat("/me [@%%NAME%%] No user specified.", {name: chat.un}));
var name = msg.substr(cmd.length + 2);
var user = plugBot.userUtilities.lookupUserName(name);
if (msg.length > cmd.length + 2) {
if (typeof user !== 'undefined') {
if (plugBot.room.roomevent) {
plugBot.room.eventArtists.push(user.id);
}
API.moderateAddDJ(user.id);
} else API.sendChat(subChat("/me [@%%NAME%%] Invalid user specified.", {name: chat.un}));
}
}
}
},
afklimitCommand: {
command: 'afklimit',
rank: 'manager',
type: 'startsWith',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var msg = chat.message;
if (msg.length === cmd.length) return API.sendChat(subChat("/me [@%%NAME%%] No limit specified.", {name: chat.un}));
var limit = msg.substring(cmd.length + 1);
if (!isNaN(limit)) {
plugBot.settings.maximumAfk = parseInt(limit, 10);
API.sendChat(subChat("/me [@%%NAME%%] Maximum afk duration set to %%TIME%% minutes.", {name: chat.un, time: plugBot.settings.maximumAfk}));
}
else API.sendChat(subChat("/me [@%%NAME%%] Invalid limit.", {name: chat.un}));
}
}
},
afkremovalCommand: {
command: 'afkremoval',
rank: 'mod',
type: 'exact',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
if (plugBot.settings.afkRemoval) {
plugBot.settings.afkRemoval = !plugBot.settings.afkRemoval;
clearInterval(plugBot.room.afkInterval);
API.sendChat(subChat("/me [@%%NAME%%] %%FUNCTION%% disabled.", {name: chat.un, 'function': "AFK removal"}));
}
else {
plugBot.settings.afkRemoval = !plugBot.settings.afkRemoval;
plugBot.room.afkInterval = setInterval(function () {
plugBot.roomUtilities.afkCheck()
}, 2 * 1000);
API.sendChat(subChat("/me [@%%NAME%%] %%FUNCTION%% enabled.", {name: chat.un, 'function': "AFK removal"}));
}
}
}
},
afkresetCommand: {
command: 'afkreset',
rank: 'bouncer',
type: 'startsWith',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var msg = chat.message;
if (msg.length === cmd.length) return API.sendChat(subChat("/me [@%%NAME%%] No user specified.", {name: chat.un}));
var name = msg.substring(cmd.length + 2);
var user = plugBot.userUtilities.lookupUserName(name);
if (typeof user === 'boolean') return API.sendChat(subChat("/me [@%%NAME%%] Invalid user specified.", {name: chat.un}));
plugBot.userUtilities.setLastActivity(user);
API.sendChat(subChat("/me [@%%NAME%%] Reset the afk status of @%%USERNAME%%.", {name: chat.un, username: name}));
}
}
},
afktimeCommand: {
command: 'afktime',
rank: 'bouncer',
type: 'startsWith',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var msg = chat.message;
if (msg.length === cmd.length) return API.sendChat(subChat("/me [@%%NAME%%] No user specified.", {name: chat.un}));
var name = msg.substring(cmd.length + 2);
var user = plugBot.userUtilities.lookupUserName(name);
if (typeof user === 'boolean') return API.sendChat(subChat("/me [@%%NAME%%] Invalid user specified.", {name: chat.un}));
var lastActive = plugBot.userUtilities.getLastActivity(user);
var inactivity = Date.now() - lastActive;
var time = plugBot.roomUtilities.msToStr(inactivity);
API.sendChat(subChat("/me [@%%NAME%%] @%%USERNAME%% has been inactive for %%TIME%%.", {name: chat.un, username: name, time: time}));
}
}
},
autoskipCommand: {
command: 'autoskip',
rank: 'mod',
type: 'exact',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
if (plugBot.room.autoskip) {
plugBot.room.autoskip = !plugBot.room.autoskip;
clearTimeout(plugBot.room.autoskipTimer);
return API.sendChat(subChat("/me [@%%NAME%%] %%FUNCTION%% disabled.", {name: chat.un, 'function': "autoskip"}));
}
else {
plugBot.room.autoskip = !plugBot.room.autoskip;
return API.sendChat(subChat("/me [@%%NAME%%] %%FUNCTION%% enabled.", {name: chat.un, 'function': "autoskip"}));
}
}
}
},
autowootCommand: {
command: 'autowoot',
rank: 'user',
type: 'exact',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
API.sendChat("/me We recommend PlugCubed for autowooting: http://plugcubed.net/");
}
}
},
banCommand: {
command: 'ban',
rank: 'bouncer',
type: 'startsWith',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var msg = chat.message;
if (msg.length === cmd.length) return API.sendChat(subChat("/me [@%%NAME%%] No user specified.", {name: chat.un}));
var name = msg.substr(cmd.length + 2);
var user = plugBot.userUtilities.lookupUserName(name);
if (typeof user === 'boolean') return API.sendChat(subChat("/me [@%%NAME%%] Invalid user specified.", {name: chat.un}));
API.moderateBanUser(user.id, 1, API.BAN.DAY);
}
}
},
blacklistCommand: {
command: ['blacklist', 'bl'],
rank: 'bouncer',
type: 'startsWith',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var msg = chat.message;
if (msg.length === cmd.length) return API.sendChat(subChat("/me [@%%NAME%%] No list specified.", {name: chat.un}));
var list = msg.substr(cmd.length + 1);
if (typeof plugBot.room.blacklists[list] === 'undefined') return API.sendChat(subChat("/me [@%%NAME%%] Invalid list specified.", {name: chat.un}));
else {
var media = API.getMedia();
var track = {
list: list,
author: media.author,
title: media.title,
mid: media.format + ':' + media.cid
};
plugBot.room.newBlacklisted.push(track);
plugBot.room.blacklists[list].push(media.format + ':' + media.cid);
API.sendChat(subChat("/me [@%%NAME%%] This track belongs on the %%BLACKLIST%% blacklist! [ %%AUTHOR%% - %%TITLE%% - %%MID%% ]", {name: chat.un, blacklist: list, author: media.author, title: media.title, mid: media.format + ':' + media.cid}));
API.moderateForceSkip();
if (typeof plugBot.room.newBlacklistedSongFunction === 'function') {
plugBot.room.newBlacklistedSongFunction(track);
}
}
}
}
},
clearchatCommand: {
command: 'clearchat',
rank: 'manager',
type: 'exact',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var currentchat = $('#chat-messages').children();
for (var i = 0; i < currentchat.length; i++) {
API.moderateDeleteChat(currentchat[i].getAttribute("data-cid"));
}
return API.sendChat(subChat("/me [@%%NAME%%] Cleared the chat.", {name: chat.un}));
}
}
},
commandsCommand: {
command: 'commands',
rank: 'user',
type: 'exact',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
API.sendChat(subChat("/me %%BOTNAME%% commands: %%LINK%%", {botname: plugBot.settings.botName, link: plugBot.cmdLink}));
}
}
},
cookieCommand: {
command: 'cookie',
rank: 'user',
type: 'startsWith',
cookies: ['has given you a chocolate chip cookie!',
'has given you a soft homemade oatmeal cookie!',
'has given you a plain, dry, old cookie. It was the last one in the bag. Gross.',
'gives you a sugar cookie. What, no frosting and sprinkles? 0/10 would not touch.',
'gives you a chocolate chip cookie. Oh wait, those are raisins. Bleck!',
'gives you an enormous cookie. Poking it gives you more cookies. Weird.',
'gives you a fortune cookie. It reads "Why aren\'t you working on any projects?"',
'gives you a fortune cookie. It reads "Give that special someone a compliment"',
'gives you a fortune cookie. It reads "Take a risk!"',
'gives you a fortune cookie. It reads "Go outside."',
'gives you a fortune cookie. It reads "Don\'t forget to eat your veggies!"',
'gives you a fortune cookie. It reads "Do you even lift?"',
'gives you a fortune cookie. It reads "m808 pls"',
'gives you a fortune cookie. It reads "If you move your hips, you\'ll get all the ladies."',
'gives you a fortune cookie. It reads "I love you."',
'gives you a Golden Cookie. You can\'t eat it because it is made of gold. Dammit.',
'gives you an Oreo cookie with a glass of milk!',
'gives you a rainbow cookie made with love :heart:',
'gives you an old cookie that was left out in the rain, it\'s moldy.',
'bakes you fresh cookies, it smells amazing.'
],
getCookie: function () {
var c = Math.floor(Math.random() * this.cookies.length);
return this.cookies[c];
},
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var msg = chat.message;
var space = msg.indexOf(' ');
if (space === -1) {
API.sendChat("/me eats a cookie.");
return false;
}
else {
var name = msg.substring(space + 2);
var user = plugBot.userUtilities.lookupUserName(name);
if (user === false || !user.inRoom) {
return API.sendChat(subChat("/em doesn't see %%NAME%% in room and eats a cookie himself.", {name: name}));
}
else if (user.username === chat.un) {
return API.sendChat(subChat("/me @%%NAME%%, you're a bit greedy, aren't you? Giving cookies to yourself, bah. Share some with other people!", {name: name}));
}
else {
return API.sendChat(subChat("/me @%%NAMETO%%, @%%NAMEFROM%% %%COOKIE%%", {nameto: user.username, namefrom: chat.un, cookie: this.getCookie()}));
}
}
}
}
},
cycleCommand: {
command: 'cycle',
rank: 'manager',
type: 'exact',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
plugBot.roomUtilities.changeDJCycle();
}
}
},
voteskipCommand: {
command: 'voteskip',
rank: 'manager',
type: 'startsWith',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var msg = chat.message;
if (msg.length <= cmd.length + 1) return API.sendChat(subChat("/me [@%%NAME%%] Voteskip limit is currently set to %%LIMIT%% mehs.", {name: chat.un, limit: plugBot.settings.voteSkipLimit}));
var argument = msg.substring(cmd.length + 1);
if (!plugBot.settings.voteSkip) plugBot.settings.voteSkip = !plugBot.settings.voteSkip;
if (isNaN(argument)) {
API.sendChat(subChat("/me [@%%NAME%%] Invalid voteskip limit, please try again using a number to signify the number of mehs.", {name: chat.un}));
}
else {
plugBot.settings.voteSkipLimit = argument;
API.sendChat(subChat("/me [@%%NAME%%] Voteskip limit set to %%LIMIT%%.", {name: chat.un, limit: plugBot.settings.voteSkipLimit}));
}
}
}
},
togglevoteskipCommand: {
command: 'togglevoteskip',
rank: 'bouncer',
type: 'exact',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
if (plugBot.settings.voteSkip) {
plugBot.settings.voteSkip = !plugBot.settings.voteSkip;
API.sendChat(subChat("/me [@%%NAME%%] %%FUNCTION%% disabled.", {name: chat.un, 'function': "voteskip"}));
}
else {
plugBot.settings.motdEnabled = !plugBot.settings.motdEnabled;
API.sendChat(subChat("/me [@%%NAME%%] %%FUNCTION%% enabled.", {name: chat.un, 'function': "voteskip"}));
}
}
}
},
dclookupCommand: {
command: ['dclookup', 'dc'],
rank: 'user',
type: 'startsWith',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var msg = chat.message;
var name;
if (msg.length === cmd.length) name = chat.un;
else {
name = msg.substring(cmd.length + 2);
var perm = plugBot.userUtilities.getPermission(chat.uid);
if (perm < 2) return API.sendChat(subChat("/me [@%%NAME%%] Only bouncers and above can do a lookup for others.", {name: chat.un}));
}
var user = plugBot.userUtilities.lookupUserName(name);
if (typeof user === 'boolean') return API.sendChat(subChat("/me [@%%NAME%%] Invalid user specified.", {name: chat.un}));
var toChat = plugBot.userUtilities.dclookup(user.id);
API.sendChat(toChat);
}
}
},
emojiCommand: {
command: 'emoji',
rank: 'user',
type: 'exact',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var link = 'http://www.emoji-cheat-sheet.com/';
API.sendChat(subChat("/me Emoji list: %%LINK%%", {link: link}));
}
}
},
etaCommand: {
command: 'eta',
rank: 'user',
type: 'startsWith',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var perm = plugBot.userUtilities.getPermission(chat.uid);
var msg = chat.message;
var name;
if (msg.length > cmd.length) {
if (perm < 2) return void (0);
name = msg.substring(cmd.length + 2);
} else name = chat.un;
var user = plugBot.userUtilities.lookupUserName(name);
if (typeof user === 'boolean') return API.sendChat(subChat("/me [@%%NAME%%] Invalid user specified.", {name: chat.un}));
var pos = API.getWaitListPosition(user.id);
if (pos < 0) return API.sendChat(subChat("/me @%%NAME%%, you are not on the waitlist.", {name: name}));
var timeRemaining = API.getTimeRemaining();
var estimateMS = ((pos + 1) * 4 * 60 + timeRemaining) * 1000;
var estimateString = plugBot.roomUtilities.msToStr(estimateMS);
API.sendChat(subChat("/me @%%NAME%% you will reach the booth in approximately %%TIME%%.", {name: name, time: estimateString}));
}
}
},
filterCommand: {
command: 'filter',
rank: 'bouncer',
type: 'exact',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
if (plugBot.settings.filterChat) {
plugBot.settings.filterChat = !plugBot.settings.filterChat;
return API.sendChat(subChat("/me [@%%NAME%%] %%FUNCTION%% disabled.", {name: chat.un, 'function': "chatfilter"}));
}
else {
plugBot.settings.filterChat = !plugBot.settings.filterChat;
return API.sendChat(subChat("/me [@%%NAME%%] %%FUNCTION%% enabled.", {name: chat.un, 'function': "chatfilter"}));
}
}
}
},
helpCommand: {
command: 'help',
rank: 'user',
type: 'exact',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var link = "http://i.imgur.com/SBAso1N.jpg";
API.sendChat(subChat("/me This image will get you started on plug: %%LINK%%", {link: link}));
}
}
},
joinCommand: {
command: 'join',
rank: 'user',
type: 'exact',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
if (plugBot.room.roulette.rouletteStatus && plugBot.room.roulette.participants.indexOf(chat.uid) < 0) {
plugBot.room.roulette.participants.push(chat.uid);
API.sendChat(subChat("/me @%%NAME%% joined the roulette! (!leave if you regret it.)", {name: chat.un}));
}
}
}
},
slotsCommand: {
command: 'slots',
rank: 'user',
type: 'exact',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var user = plugBot.userUtilities.lookupUser(chat.uid);
if(API.getWaitListPosition(chat.uid) > -1)
{
var resultInMinutes = 0;
if(user.lastSlotsTime != null){
var endTime = new Date();
var difference = endTime.getTime() - user.lastSlotsTime.getTime(); // This will give difference in milliseconds
resultInMinutes = Math.round(difference / 60000);
}
if(user.lastSlotsTime == null || resultInMinutes >= 5)
{
user.lastSlotsTime = new Date();
setTimeout(function () {
//get 3 random strings from array
var fruits = [":watermelon:", ":strawberry:", ":grapes:", ":lemon:", ":peach:", ":cherries:"];
var slots = [fruits[Math.floor(Math.random()*fruits.length)], fruits[Math.floor(Math.random()*fruits.length)], fruits[Math.floor(Math.random()*fruits.length)]];
//check if user has won something
var msg = "/me [@"+chat.un+"] "+slots[0]+"-"+slots[1]+"-"+slots[2]+", ";
if(slots[0] == slots[1] && slots[1] == slots[2])
{//3 in a row, gain 5 waitlist spots
msg += "you got 3 in a row! You gain 5 waitlist spots!";
var newPos = 0;
var oldPos = API.getWaitListPosition(user.id);
if(oldPos < 5) newPos = 0;
if(oldPos >= 5) newPos = oldPos-5;
moveUser(user.id, newPos, true);
}else if(slots[0] == slots[1] || slots[1] == slots[2])
{//2 in a row, play again now
msg += "you got 2 in a row! Play again immediately!";
user.lastSlotsTime = null;
}else if((slots[0] != slots[1]) && (slots[1] != slots[2]) && (slots[0] != slots[2]))
{//all different, user.lostSlots + 1
user.lostSlots = user.lostSlots+1;
if(user.lostSlots >= 5)
{
msg += "you have had 5 losses in a row, you will be put back 2 spots in the waitlist!";
var newPos = 0;
var oldPos = API.getWaitListPosition(user.id);
newPos = oldPos + 2;
if(newPos > 50) newPos = 50;
moveUser(user.id, newPos, true);
}else{
msg += "you got nothing! Amount of losses in a row is now " + user.lostSlots;
}
}
API.sendChat(msg);
}, 500);
}else{
setTimeout(function () {
API.sendChat(subChat("/me [@%%NAME%%] You can't use slots more than once every 5 minutes.", {name: chat.un}));
}, 500);
}
}else{
setTimeout(function () {
API.sendChat(subChat("/me [@%%NAME%%] You can't use slots when you are not in the waitlist.", {name: chat.un}));
}, 500);
}
}
}
},
jointimeCommand: {
command: 'jointime',
rank: 'bouncer',
type: 'startsWith',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var msg = chat.message;
if (msg.length === cmd.length) return API.sendChat(subChat("/me [@%%NAME%%] No user specified.", {name: chat.un}));
var name = msg.substring(cmd.length + 2);
var user = plugBot.userUtilities.lookupUserName(name);
if (typeof user === 'boolean') return API.sendChat(subChat("/me [@%%NAME%%] Invalid user specified.", {name: chat.un}));
var join = plugBot.userUtilities.getJointime(user);
var time = Date.now() - join;
var timeString = plugBot.roomUtilities.msToStr(time);
API.sendChat(subChat("/me [@%%NAMEFROM%%] @%%USERNAME%% has been in the room for %%TIME%%.", {namefrom: chat.un, username: name, time: timeString}));
}
}
},
kickCommand: {
command: 'kick',
rank: 'bouncer',
type: 'startsWith',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var msg = chat.message;
var lastSpace = msg.lastIndexOf(' ');
var time;
var name;
if (lastSpace === msg.indexOf(' ')) {
time = 0.25;
name = msg.substring(cmd.length + 2);
}
else {
time = msg.substring(lastSpace + 1);
name = msg.substring(cmd.length + 2, lastSpace);
}
var user = plugBot.userUtilities.lookupUserName(name);
var from = chat.un;
if (typeof user === 'boolean') return API.sendChat(subChat("/me [@%%NAME%%] No user specified.", {name: chat.un}));
var permFrom = plugBot.userUtilities.getPermission(chat.uid);
var permTokick = plugBot.userUtilities.getPermission(user.id);
if (permFrom <= permTokick)
return API.sendChat(subChat("/me [@%%NAME%%] you can't kick users with an equal or higher rank than you!", {name: chat.un}));
if (!isNaN(time)) {
API.sendChat(subChat("/me [@%%NAME%%], @%%USERNAME%% you are being kicked from the community for %%TIME%% minutes.", {name: chat.un, username: name, time: time}));
if (time > 24 * 60 * 60) API.moderateBanUser(user.id, 1, API.BAN.PERMA);
else API.moderateBanUser(user.id, 1, API.BAN.DAY);
setTimeout(function (id, name) {
API.moderateUnbanUser(id);
console.log('Unbanned @' + name + '. (' + id + ')');
}, time * 60 * 1000, user.id, name);
}
else API.sendChat(subChat("/me [@%%NAME%%] Invalid time specified.", {name: chat.un}));
}
}
},
killCommand: {
command: 'kill',
rank: 'manager',
type: 'exact',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
storeToStorage();
API.sendChat("/me Shutting down.");
plugBot.disconnectAPI();
setTimeout(function () {
kill();
}, 1000);
}
}
},
leaveCommand: {
command: 'leave',
rank: 'user',
type: 'exact',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var ind = plugBot.room.roulette.participants.indexOf(chat.uid);
if (ind > -1) {
plugBot.room.roulette.participants.splice(ind, 1);
API.sendChat(subChat("/me @%%NAME%% left the roulette!", {name: chat.un}));
}
}
}
},
linkCommand: {
command: 'link',
rank: 'user',
type: 'exact',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var media = API.getMedia();
var from = chat.un;
var user = plugBot.userUtilities.lookupUser(chat.uid);
var perm = plugBot.userUtilities.getPermission(chat.uid);
var dj = API.getDJ().id;
var isDj = false;
if (dj === chat.uid) isDj = true;
if (perm >= 1 || isDj) {
if (media.format === 1) {
var linkToSong = "https://www.youtube.com/watch?v=" + media.cid;
API.sendChat(subChat("/me [@%%NAME%%] Link to current song: %%LINK%%", {name: from, link: linkToSong}));
}
if (media.format === 2) {
SC.get('/tracks/' + media.cid, function (sound) {
API.sendChat(subChat("/me [@%%NAME%%] Link to current song: %%LINK%%", {name: from, link: sound.permalink_url}));
});
}
}
}
}
},
lockCommand: {
command: 'lock',
rank: 'mod',
type: 'exact',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
plugBot.roomUtilities.booth.lockBooth();
}
}
},
lockskipCommand: {
command: 'lockskip',
rank: 'bouncer',
type: 'startsWith',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
if (plugBot.room.skippable) {
var dj = API.getDJ();
var id = dj.id;
var name = dj.username;
var msgSend = '@' + name + ': ';
plugBot.room.queueable = false;
if (chat.message.length === cmd.length) {
API.sendChat(subChat("/me [%%NAME%% used lockskip.]", {name: chat.un}));
plugBot.roomUtilities.booth.lockBooth();
setTimeout(function (id) {
API.moderateForceSkip();
plugBot.room.skippable = false;
setTimeout(function () {
plugBot.room.skippable = true
}, 5 * 1000);
setTimeout(function (id) {
plugBot.userUtilities.moveUser(id, plugBot.settings.lockskipPosition, false);
plugBot.room.queueable = true;
setTimeout(function () {
plugBot.roomUtilities.booth.unlockBooth();
}, 1000);
}, 1500, id);
}, 1000, id);
return void (0);
}
var validReason = false;
var msg = chat.message;
var reason = msg.substring(cmd.length + 1);
for (var i = 0; i < plugBot.settings.lockskipReasons.length; i++) {
var r = plugBot.settings.lockskipReasons[i][0];
if (reason.indexOf(r) !== -1) {
validReason = true;
msgSend += plugBot.settings.lockskipReasons[i][1];
}
}
if (validReason) {
API.sendChat(subChat("/me [%%NAME%% used lockskip.]", {name: chat.un}));
plugBot.roomUtilities.booth.lockBooth();
setTimeout(function (id) {
API.moderateForceSkip();
plugBot.room.skippable = false;
API.sendChat(msgSend);
setTimeout(function () {
plugBot.room.skippable = true
}, 5 * 1000);
setTimeout(function (id) {
plugBot.userUtilities.moveUser(id, plugBot.settings.lockskipPosition, false);
plugBot.room.queueable = true;
setTimeout(function () {
plugBot.roomUtilities.booth.unlockBooth();
}, 1000);
}, 1500, id);
}, 1000, id);
return void (0);
}
}
}
}
},
lockskipposCommand: {
command: 'lockskippos',
rank: 'manager',
type: 'startsWith',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var msg = chat.message;
var pos = msg.substring(cmd.length + 1);
if (!isNaN(pos)) {
plugBot.settings.lockskipPosition = pos;
return API.sendChat(subChat("/me [@%%NAME%%] Lockskip will now move the dj to position %%POSITION%%.", {name: chat.un, position: plugBot.settings.lockskipPosition}));
}
else return API.sendChat(subChat("/me [@%%NAME%%] Invalid position specified.", {name: chat.un}));
}
}
},
maxlengthCommand: {
command: 'maxlength',
rank: 'manager',
type: 'startsWith',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var msg = chat.message;
var maxTime = msg.substring(cmd.length + 1);
if (!isNaN(maxTime)) {
plugBot.settings.maximumSongLength = maxTime;
return API.sendChat(subChat("/me [@%%NAME%%] The maximum song duration is set to %%TIME%% minutes.", {name: chat.un, time: plugBot.settings.maximumSongLength}));
}
else return API.sendChat(subChat("/me [@%%NAME%%] Invalid time specified.", {name: chat.un}));
}
}
},
moveCommand: {
command: 'move',
rank: 'mod',
type: 'startsWith',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var msg = chat.message;
if (msg.length === cmd.length) return API.sendChat(subChat("/me [@%%NAME%%] No user specified.", {name: chat.un}));
var firstSpace = msg.indexOf(' ');
var lastSpace = msg.lastIndexOf(' ');
var pos;
var name;
if (isNaN(parseInt(msg.substring(lastSpace + 1)))) {
pos = 1;
name = msg.substring(cmd.length + 2);
}
else {
pos = parseInt(msg.substring(lastSpace + 1));
name = msg.substring(cmd.length + 2, lastSpace);
}
var user = plugBot.userUtilities.lookupUserName(name);
if (typeof user === 'boolean') return API.sendChat(subChat("/me [@%%NAME%%] Invalid user specified.", {name: chat.un}));
if (user.id === plugBot.loggedInID) return API.sendChat(subChat("/me @%%NAME%%, don't try to add me to the waitlist, please.", {name: chat.un}));
if (!isNaN(pos)) {
API.sendChat(subChat("/me [%%NAME%% used move.]", {name: chat.un}));
plugBot.userUtilities.moveUser(user.id, pos, false);
} else return API.sendChat(subChat("/me [@%%NAME%%] Invalid position specified.", {name: chat.un}));
}
}
},
muteCommand: {
command: 'mute',
rank: 'bouncer',
type: 'startsWith',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var msg = chat.message;
if (msg.length === cmd.length) return API.sendChat(subChat("/me [@%%NAME%%] No user specified.", {name: chat.un}));
var lastSpace = msg.lastIndexOf(' ');
var time = null;
var name;
if (lastSpace === msg.indexOf(' ')) {
name = msg.substring(cmd.length + 2);
time = 45;
}
else {
time = msg.substring(lastSpace + 1);
if (isNaN(time) || time == "" || time == null || typeof time == "undefined") {
return API.sendChat(subChat("/me [@%%NAME%%] Invalid time specified.", {name: chat.un}));
}
name = msg.substring(cmd.length + 2, lastSpace);
}
var from = chat.un;
var user = plugBot.userUtilities.lookupUserName(name);
if (typeof user === 'boolean') return API.sendChat(subChat("/me [@%%NAME%%] Invalid user specified.", {name: chat.un}));
var permFrom = plugBot.userUtilities.getPermission(chat.uid);
var permUser = plugBot.userUtilities.getPermission(user.id);
if (permFrom > permUser) {
if (time > 45) {
API.sendChat(subChat("/me [@%%NAME%%] You can only mute for maximum %%TIME%% minutes.", {name: chat.un, time: "45"}));
API.moderateMuteUser(user.id, 1, API.MUTE.LONG);
}
else if (time === 45) {
API.moderateMuteUser(user.id, 1, API.MUTE.LONG);
API.sendChat(subChat( "/me [@%%NAME%%] Muted @%%USERNAME%% for %%TIME%% minutes.", {name: chat.un, username: name, time: time}));
}
else if (time > 30) {
API.moderateMuteUser(user.id, 1, API.MUTE.LONG);
API.sendChat(subChat( "/me [@%%NAME%%] Muted @%%USERNAME%% for %%TIME%% minutes.", {name: chat.un, username: name, time: time}));
setTimeout(function (id) {
API.moderateUnmuteUser(id);
}, time * 60 * 1000, user.id);
}
else if (time > 15) {
API.moderateMuteUser(user.id, 1, API.MUTE.MEDIUM);
API.sendChat(subChat( "/me [@%%NAME%%] Muted @%%USERNAME%% for %%TIME%% minutes.", {name: chat.un, username: name, time: time}));
setTimeout(function (id) {
API.moderateUnmuteUser(id);
}, time * 60 * 1000, user.id);
}
else {
API.moderateMuteUser(user.id, 1, API.MUTE.SHORT);
API.sendChat(subChat( "/me [@%%NAME%%] Muted @%%USERNAME%% for %%TIME%% minutes.", {name: chat.un, username: name, time: time}));
setTimeout(function (id) {
API.moderateUnmuteUser(id);
}, time * 60 * 1000, user.id);
}
}
else API.sendChat(subChat("/me [@%%NAME%%] You can't mute persons with an equal or higher rank than you.", {name: chat.un}));
}
}
},
pingCommand: {
command: 'ping',
rank: 'bouncer',
type: 'exact',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
API.sendChat("/me Pong!")
}
}
},
reloadCommand: {
command: 'reload',
rank: 'manager',
type: 'exact',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
API.sendChat("/me Be right back.");
storeToStorage();
plugBot.disconnectAPI();
kill();
setTimeout(function () {
$.getScript(plugBot.scriptLink);
}, 2000);
}
}
},
removeCommand: {
command: 'remove',
rank: 'mod',
type: 'startsWith',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var msg = chat.message;
if (msg.length > cmd.length + 2) {
var name = msg.substr(cmd.length + 2);
var user = plugBot.userUtilities.lookupUserName(name);
if (typeof user !== 'boolean') {
user.lastDC = {
time: null,
position: null,
songCount: 0
};
if (API.getDJ().id === user.id) {
API.moderateForceSkip();
setTimeout(function () {
API.moderateRemoveDJ(user.id);
}, 1 * 1000, user);
}
else API.moderateRemoveDJ(user.id);
} else API.sendChat(subChat("/me [@%%NAME%%] Specified user @%%USERNAME%% is not in the waitlist.", {name: chat.un, username: name}));
} else API.sendChat(subChat("/me [@%%NAME%%] No user specified.", {name: chat.un}));
}
}
},
restrictetaCommand: {
command: 'restricteta',
rank: 'bouncer',
type: 'exact',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
if (plugBot.settings.etaRestriction) {
plugBot.settings.etaRestriction = !plugBot.settings.etaRestriction;
return API.sendChat(subChat("/me [@%%NAME%%] %%FUNCTION%% disabled.", {name: chat.un, 'function': "etarestriction"}));
}
else {
plugBot.settings.etaRestriction = !plugBot.settings.etaRestriction;
return API.sendChat(subChat("/me [@%%NAME%%] %%FUNCTION%% enabled.", {name: chat.un, 'function': "etarestriction"}));
}
}
}
},
rouletteCommand: {
command: 'roulette',
rank: 'mod',
type: 'exact',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
if (!plugBot.room.roulette.rouletteStatus) {
plugBot.room.roulette.startRoulette();
}
}
}
},
sessionstatsCommand: {
command: 'sessionstats',
rank: 'bouncer',
type: 'exact',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var from = chat.un;
var woots = plugBot.room.roomstats.totalWoots;
var mehs = plugBot.room.roomstats.totalMehs;
var grabs = plugBot.room.roomstats.totalCurates;
API.sendChat(subChat("/me [@%%NAME%%] Total woots: %%WOOTS%%, total mehs: %%MEHS%%, total grabs: %%GRABS%%.", {name: from, woots: woots, mehs: mehs, grabs: grabs}));
}
}
},
skipCommand: {
command: 'skip',
rank: 'bouncer',
type: 'exact',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
API.sendChat(subChat("/me [%%NAME%% used skip.]", {name: chat.un}));
API.moderateForceSkip();
plugBot.room.skippable = false;
setTimeout(function () {
plugBot.room.skippable = true
}, 5 * 1000);
}
}
},
songstatsCommand: {
command: 'songstats',
rank: 'mod',
type: 'exact',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
if (plugBot.settings.songstats) {
plugBot.settings.songstats = !plugBot.settings.songstats;
return API.sendChat(subChat("/me [@%%NAME%%] %%FUNCTION%% disabled.", {name: chat.un, 'function': "songstats"}));
}
else {
plugBot.settings.songstats = !plugBot.settings.songstats;
return API.sendChat(subChat("/me [@%%NAME%%] %%FUNCTION%% enabled.", {name: chat.un, 'function': "songstats"}));
}
}
}
},
sourceCommand: {
command: 'source',
rank: 'user',
type: 'exact',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
API.sendChat('/me This bot was created by ' + botCreator + ".");
}
}
},
statusCommand: {
command: 'status',
rank: 'bouncer',
type: 'exact',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var from = chat.un;
var msg = '/me [@' + from + '] ';
msg += 'afkremoval: ';
if (plugBot.settings.afkRemoval) msg += 'ON';
else msg += 'OFF';
msg += '. ';
msg += "afksremoved: " + plugBot.room.afkList.length + '. ';
msg += 'afklimit: ' + plugBot.settings.maximumAfk + '. ';
msg += 'chatfilter: ';
if (plugBot.settings.filterChat) msg += 'ON';
else msg += 'OFF';
msg += '. ';
msg += 'voteskip: ';
if (plugBot.settings.voteskip) msg += 'ON';
else msg += 'OFF';
msg += '. ';
var launchT = plugBot.room.roomstats.launchTime;
var durationOnline = Date.now() - launchT;
var since = plugBot.roomUtilities.msToStr(durationOnline);
msg += subChat("I have been active for %%TIME%%.", {time: since});
return API.sendChat(msg);
}
}
},
swapCommand: {
command: 'swap',
rank: 'mod',
type: 'startsWith',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var msg = chat.message;
if (msg.length === cmd.length) return API.sendChat(subChat("/me [@%%NAME%%] No user specified.", {name: chat.un}));
var firstSpace = msg.indexOf(' ');
var lastSpace = msg.lastIndexOf(' ');
var name1 = msg.substring(cmd.length + 2, lastSpace);
var name2 = msg.substring(lastSpace + 2);
var user1 = plugBot.userUtilities.lookupUserName(name1);
var user2 = plugBot.userUtilities.lookupUserName(name2);
if (typeof user1 === 'boolean' || typeof user2 === 'boolean') return API.sendChat(subChat("/me [@%%NAME%%] Invalid user specified. (No names with spaces!)", {name: chat.un}));
if (user1.id === plugBot.loggedInID || user2.id === plugBot.loggedInID) return API.sendChat(subChat("/me @%%NAME%%, don't try to add me to the waitlist, please.", {name: chat.un}));
var p1 = API.getWaitListPosition(user1.id) + 1;
var p2 = API.getWaitListPosition(user2.id) + 1;
if (p1 < 0 || p2 < 0) return API.sendChat(subChat("/me [@%%NAME%%] Please only swap users that are in the waitlist!", {name: chat.un}));
API.sendChat(subChat("/me Swapping %%NAME1%% with %%NAME2%%.", {'name1': name1, 'name2': name2}));
if (p1 < p2) {
plugBot.userUtilities.moveUser(user2.id, p1, false);
setTimeout(function (user1, p2) {
plugBot.userUtilities.moveUser(user1.id, p2, false);
}, 2000, user1, p2);
}
else {
plugBot.userUtilities.moveUser(user1.id, p2, false);
setTimeout(function (user2, p1) {
plugBot.userUtilities.moveUser(user2.id, p1, false);
}, 2000, user2, p1);
}
}
}
},
unbanCommand: {
command: 'unban',
rank: 'bouncer',
type: 'startsWith',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
$(".icon-population").click();
$(".icon-ban").click();
setTimeout(function (chat) {
var msg = chat.message;
if (msg.length === cmd.length) return API.sendChat();
var name = msg.substring(cmd.length + 2);
var bannedUsers = API.getBannedUsers();
var found = false;
var bannedUser = null;
for (var i = 0; i < bannedUsers.length; i++) {
var user = bannedUsers[i];
if (user.username === name) {
bannedUser = user;
found = true;
}
}
if (!found) {
$(".icon-chat").click();
return API.sendChat(subChat("/me [@%%NAME%%] The user was not banned.", {name: chat.un}));
}
API.moderateUnbanUser(bannedUser.id);
console.log("Unbanned " + name);
setTimeout(function () {
$(".icon-chat").click();
}, 1000);
}, 1000, chat);
}
}
},
unlockCommand: {
command: 'unlock',
rank: 'mod',
type: 'exact',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
plugBot.roomUtilities.booth.unlockBooth();
}
}
},
unmuteCommand: {
command: 'unmute',
rank: 'bouncer',
type: 'startsWith',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var msg = chat.message;
var permFrom = plugBot.userUtilities.getPermission(chat.uid);
var from = chat.un;
var name = msg.substr(cmd.length + 2);
var user = plugBot.userUtilities.lookupUserName(name);
if (typeof user === 'boolean') return API.sendChat(subChat("/me [@%%NAME%%] Invalid user specified.", {name: chat.un}));
var permUser = plugBot.userUtilities.getPermission(user.id);
if (permFrom > permUser) {
try {
API.moderateUnmuteUser(user.id);
API.sendChat(subChat("/me [@%%NAME%%] Unmuted @%%USERNAME%%.", {name: chat.un, username: name}));
}
catch (e) {
API.sendChat(subChat("/me [@%%NAME%%] that user wasn't muted.", {name: chat.un}));
}
}
else API.sendChat(subChat("/me [@%%NAME%%] You can't unmute persons with an equal or higher rank than you.", {name: chat.un}));
}
}
},
usercmdcdCommand: {
command: 'usercmdcd',
rank: 'manager',
type: 'startsWith',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var msg = chat.message;
var cd = msg.substring(cmd.length + 1);
if (!isNaN(cd)) {
plugBot.settings.commandCooldown = cd;
return API.sendChat(subChat("/me [@%%NAME%%] The cooldown for commands by users is now set to %%TIME%% seconds.", {name: chat.un, time: plugBot.settings.commandCooldown}));
}
else return API.sendChat(subChat("/me [@%%NAME%%] Invalid time specified.", {name: chat.un}));
}
}
},
usercommandsCommand: {
command: 'usercommands',
rank: 'manager',
type: 'exact',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
if (plugBot.settings.usercommandsEnabled) {
API.sendChat(subChat("/me [@%%NAME%%] %%FUNCTION%% disabled.", {name: chat.un, 'function': "usercommands"}));
plugBot.settings.usercommandsEnabled = !plugBot.settings.usercommandsEnabled;
}
else {
API.sendChat(subChat("/me [@%%NAME%%] %%FUNCTION%% enabled.", {name: chat.un, 'function': "usercommands"}));
plugBot.settings.usercommandsEnabled = !plugBot.settings.usercommandsEnabled;
}
}
}
},
voteratioCommand: {
command: 'voteratio',
rank: 'bouncer',
type: 'startsWith',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
var msg = chat.message;
if (msg.length === cmd.length) return API.sendChat(subChat("/me [@%%NAME%%] No user specified.", {name: chat.un}));
var name = msg.substring(cmd.length + 2);
var user = plugBot.userUtilities.lookupUserName(name);
if (user === false) return API.sendChat(subChat("/me [@%%NAME%%] Invalid user specified.", {name: chat.un}));
var vratio = user.votes;
var ratio = null;
if (vratio.meh == 0){
ratio = vratio.woot;
}else{
ration = vratio.woot / vratio.meh;
}
API.sendChat(subChat("/me [@%%NAME%%] @%%USERNAME%% ~ woots: %%WOOT%%, mehs: %%MEHS%%, ratio (w/m): %%RATIO%%.", {name: chat.un, username: name, woot: vratio.woot, mehs: vratio.meh, ratio: ratio.toFixed(2)}));
}
}
},
welcomeCommand: {
command: 'welcome',
rank: 'manager',
type: 'exact',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
if (plugBot.settings.welcome) {
plugBot.settings.welcome = !plugBot.settings.welcome;
return API.sendChat(subChat("/me [@%%NAME%%] %%FUNCTION%% disabled.", {name: chat.un, 'function': "welcomemsg"}));
}
else {
plugBot.settings.welcome = !plugBot.settings.welcome;
return API.sendChat(subChat("/me [@%%NAME%%] %%FUNCTION%% enabled.", {name: chat.un, 'function': "welcomemsg"}));
}
}
}
},
clearlistCommand: {
command: 'clearlist',
rank: 'manager',
type: 'startsWith',
functionality: function (chat, cmd) {
if (this.type === 'exact' && chat.message.length !== cmd.length) return void (0);
if (!plugBot.commands.executable(this.rank, chat)) return void (0);
else {
API.sendChat(subChat("/me [%%NAME%% cleared waitlist.]", {name: chat.un}));
var msg = chat.message;
var lock = !(msg.length === cmd.length);
plugBot.roomUtilities.booth.lockBooth();
var wlist = API.getWaitList();
for (var i = 0; i < wlist.length; i++) {
API.moderateRemoveDJ(wlist[i].id);
}
if(!lock)
{
setTimeout(function () {
plugBot.roomUtilities.booth.unlockBooth();
}, 1000);
}
}
}
}
}
};
plugBot.startup();
}).call(this);
<file_sep>Plug.DJ Moderation Bot
==========================
Written by <NAME>
Running on: [Link](https://plug.dj/s31)
**If you have any issues with the bot you want to bring up with me, then open an issue on the issues tab above.**
#Overview
This bot is written to help:
1. Automate certain aspects of room moderation
2. Provide moderators additional tools to make their job easier
3. Track certain room statistics to optimize DJing experience (AFK status, disconnect logs, play history)
#Bot Features
--------------
###Bot Automation
####Local Storage
*The bot will save all it's data. When you turn off the bot, and back on again, all the settings will be loaded, also the disconnect-log, user info, history etc.
####AFK Monitor
*When a user is on the waitlist and is not active(sending chat messages), he/she will be notified 2 times, and will then be removed from the waitlist.
####History
*When a new DJ starts playing, all the info from the last will be added to a list. It will keep track of the user, the song, and the woots/mehs/curates
####Disconnect-log
*When a user disconnects, he/she will be added to a list, with the time of disconnect and waitlist position. This will be used by the 'dclookup' command
####User info
*The bot keeps track of some user info, if he/she is in the room or not, afk warnings, etc.
###Bot Commands
####Temporary(will update readme)
addCommand: {
command: 'add',
rank: 'mod',
type: 'startsWith' }
afklimitCommand: {
command: 'afklimit',
rank: 'manager',
type: 'startsWith' }
afkremovalCommand: {
command: 'afkremoval',
rank: 'mod',
type: 'exact' }
afkresetCommand: {
command: 'afkreset',
rank: 'bouncer',
type: 'startsWith' }
afktimeCommand: {
command: 'afktime',
rank: 'bouncer',
type: 'startsWith' }
autoskipCommand: {
command: 'autoskip',
rank: 'mod',
type: 'exact' }
autowootCommand: {
command: 'autowoot',
rank: 'user',
type: 'exact' }
banCommand: {
command: 'ban',
rank: 'bouncer',
type: 'startsWith' }
blacklistCommand: {
command: ['blacklist', 'bl'],
rank: 'bouncer',
type: 'startsWith' }
clearchatCommand: {
command: 'clearchat',
rank: 'manager',
type: 'exact' }
commandsCommand: {
command: 'commands',
rank: 'user',
type: 'exact' }
cookieCommand: {
command: 'cookie',
rank: 'user',
type: 'startsWith' }
cycleCommand: {
command: 'cycle',
rank: 'manager',
type: 'exact' }
voteskipCommand: {
command: 'voteskip',
rank: 'manager',
type: 'startsWith' }
togglevoteskipCommand: {
command: 'togglevoteskip',
rank: 'bouncer',
type: 'exact' }
dclookupCommand: {
command: ['dclookup', 'dc'],
rank: 'user',
type: 'startsWith' }
emojiCommand: {
command: 'emoji',
rank: 'user',
type: 'exact' }
etaCommand: {
command: 'eta',
rank: 'user',
type: 'startsWith' }
filterCommand: {
command: 'filter',
rank: 'bouncer',
type: 'exact' }
helpCommand: {
command: 'help',
rank: 'user',
type: 'exact' }
joinCommand: {
command: 'join',
rank: 'user',
type: 'exact' }
jointimeCommand: {
command: 'jointime',
rank: 'bouncer',
type: 'startsWith' }
kickCommand: {
command: 'kick',
rank: 'bouncer',
type: 'startsWith' }
killCommand: {
command: 'kill',
rank: 'manager',
type: 'exact' }
leaveCommand: {
command: 'leave',
rank: 'user',
type: 'exact' }
linkCommand: {
command: 'link',
rank: 'user',
type: 'exact' }
lockCommand: {
command: 'lock',
rank: 'mod',
type: 'exact' }
lockskipCommand: {
command: 'lockskip',
rank: 'bouncer',
type: 'startsWith' }
lockskipposCommand: {
command: 'lockskippos',
rank: 'manager',
type: 'startsWith' }
maxlengthCommand: {
command: 'maxlength',
rank: 'manager',
type: 'startsWith' }
moveCommand: {
command: 'move',
rank: 'mod',
type: 'startsWith' }
muteCommand: {
command: 'mute',
rank: 'bouncer',
type: 'startsWith' }
pingCommand: {
command: 'ping',
rank: 'bouncer',
type: 'exact' }
reloadCommand: {
command: 'reload',
rank: 'manager',
type: 'exact' }
removeCommand: {
command: 'remove',
rank: 'mod',
type: 'startsWith' }
restrictetaCommand: {
command: 'restricteta',
rank: 'bouncer',
type: 'exact' }
rouletteCommand: {
command: 'roulette',
rank: 'mod',
type: 'exact' }
sessionstatsCommand: {
command: 'sessionstats',
rank: 'bouncer',
type: 'exact' }
skipCommand: {
command: 'skip',
rank: 'bouncer',
type: 'exact' }
songstatsCommand: {
command: 'songstats',
rank: 'mod',
type: 'exact' }
sourceCommand: {
command: 'source',
rank: 'user',
type: 'exact' }
statusCommand: {
command: 'status',
rank: 'bouncer',
type: 'exact' }
swapCommand: {
command: 'swap',
rank: 'mod',
type: 'startsWith' }
toggleblCommand: {
command: 'togglebl',
rank: 'bouncer',
type: 'exact' }
unbanCommand: {
command: 'unban',
rank: 'bouncer',
type: 'startsWith' }
unlockCommand: {
command: 'unlock',
rank: 'mod',
type: 'exact' }
unmuteCommand: {
command: 'unmute',
rank: 'bouncer',
type: 'startsWith' }
usercmdcdCommand: {
command: 'usercmdcd',
rank: 'manager',
type: 'startsWith' }
usercommandsCommand: {
command: 'usercommands',
rank: 'manager',
type: 'exact' }
voteratioCommand: {
command: 'voteratio',
rank: 'bouncer',
type: 'startsWith' }
welcomeCommand: {
command: 'welcome',
rank: 'mod',
type: 'exact' }
clearlistCommand: {
command: 'clearlist',
rank: 'manager',
type: 'startsWith' }
#How to run
------------------------------
To run the script in your webbrowser, you would have to make a new bookmark with the path to your `bot.js` file from, you can simply type:
```Javascript
javascript:$.getScript('[YOUR INCLUDE LOCATION]');
```
Into the bookmark url. My file is usually on rawgit (https://rawgit.com), so my include would be:
```Javascript
javascript:$.getScript('https://rawgit.com/maartenpeels/PlugBotV3/master/bot.js');
```
That's all!
|
0f10fa5f968a807fc0d260b043e48992d6dd6543
|
[
"JavaScript",
"Markdown"
] | 2
|
JavaScript
|
maartenpeels/PlugBotV3
|
0e79d811ecacdd9b3c52ae4e415227792a686e32
|
30350517d66a780631a801b12b7be838304a4e4a
|
refs/heads/master
|
<file_sep>Alumni-Database-Application-Form
================================
The advancement of computers with accuracy and efficiency is the meaning of today’s
technology. That is why Alumni Management System is needed; as there is a good opportunity in
using it that prevents many problems such as inconsistency of records, insecurity of student files
caused by a manual system. With the help of Alumni Management System deployed over Internet,
the process of accessing student information becomes much easier. This application is developed
using HTML5, CSS3, MySQL and PHP.
<file_sep>-- phpMyAdmin SQL Dump
-- version 4.0.9
-- http://www.phpmyadmin.net
--
-- Host: 127.0.0.1
-- Generation Time: Aug 11, 2014 at 12:01 AM
-- Server version: 5.6.14
-- PHP Version: 5.5.6
SET SQL_MODE = "NO_AUTO_VALUE_ON_ZERO";
SET time_zone = "+00:00";
/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;
/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;
/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;
/*!40101 SET NAMES utf8 */;
--
-- Database: `bestalumni`
--
-- --------------------------------------------------------
--
-- Table structure for table `registration`
--
CREATE TABLE IF NOT EXISTS `registration` (
`name` varchar(50) NOT NULL,
`sex` varchar(7) NOT NULL,
`dob` int(11) NOT NULL,
`contact` int(10) NOT NULL,
`email` varchar(60) NOT NULL,
`courseinbest` varchar(15) NOT NULL,
`yearofpassing` year(4) NOT NULL,
`profession` varchar(30) DEFAULT NULL,
`organisation` varchar(70) DEFAULT NULL,
`permanentstreet1` varchar(40) DEFAULT NULL,
`permanentstreet2` varchar(40) DEFAULT NULL,
`permanentcity` varchar(40) DEFAULT NULL,
`permanentstate` varchar(20) DEFAULT NULL,
`permanentpincode` int(6) DEFAULT NULL,
`permanentcountry` varchar(25) DEFAULT NULL,
`blood` varchar(10) DEFAULT NULL
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;
/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
<file_sep><?php
// define variables and set to empty values
$name = $sex = $dob = $contact = $email = $courseinbest = $yearofpassing = $profession = $organisation = $permanentstreet1 = $permanentstreet2 = $permanentstate = $permanentcity = $permanentpincode = $permanentcountry = $blood = "";
// Create connection
$link=mysqli_connect("localhost","username","password","<PASSWORD>") or die("Error " . mysqli_error($link));
$name = $_POST["name"];
$sex = $_POST["sex"];
$dob = $_POST["dob"];
$contact = $_POST["contact"];
$email = $_POST["email"];
$courseinbest = $_POST["courseinbest"];
$yearofpassing = $_POST["yearofpassing"];
$profession = $_POST["profession"];
$organisation = $_POST["organisation"];
$permanentstreet1 = $_POST["permanentstreet1"];
$permanentstreet2 = $_POST["permanentstreet2"];
$permanentcity = $_POST["permanentcity"];
$permanentpincode = $_POST["permanentpincode"];
$permanentcountry = $_POST["permanentcountry"];
$blood = $_POST["blood"];
$query="INSERT INTO registration (`name`, `sex`, `dob`, `contact`, `email`, `courseinbest`,
`yearofpassing`, `profession`, `organisation`, `permanentstreet1`, `permanentstreet2`, `permanentcity`,
`permanentstate`, `permanentpincode`, `permanentcountry`, `blood`)
VALUES ('$name', '$sex', '$dob', '$contact', '$email', '$courseinbest', '$yearofpassing', '$profession',
'$organisation', '$permanentstreet1', '$permanentstreet2', '$permanentcity', '$permanentstate', '$permanentpincode', '$permanentcountry', '$blood')";
if (!mysqli_query($link,$query)) {
die('Error: ' . mysqli_error($link));
}
echo "1 record added";
?>
|
444081b5a5e54f2f6972e6e936611b3bce531847
|
[
"Markdown",
"SQL",
"PHP"
] | 3
|
Markdown
|
fidars/Alumni-Database-Application
|
cff06f46c1ea4aeb61d68ecdc44c0f8b8bdc1412
|
af38b658aaec740270a2954e80e608b26e968811
|
refs/heads/master
|
<repo_name>bowen2/acg<file_sep>/README.md
# acg
utility for playing arbitrary card games
## using
### dependencies
`npm install expess socket.io`
### execution
`node server`
An instance is now hosted locally (default port 5000).
<file_sep>/static/game.js
var socket = io({transports: ['websocket'], upgrade: false});
var canvas = document.getElementById('canvas');
var mouse;
var playerNum;
canvas.width = 800;
canvas.height = 600;
canvasState = new CanvasState(canvas);
socket.on('playerinfo', function(info) {
playerNum = info.num;
document.getElementById("intro").innerHTML = "You are player " + info.num + ". The players are " + info.players + ".";
});
socket.on('cards', function(cards) {
canvasState.reset();
for (var i in cards) {
var card = cards[i];
canvasState.addGraphic(new CardGraphic(card.x, card.y, card.code, card.cardID, card.visible));
}
canvasState.draw();
});
function CardGraphic(x, y, code, cardID, visible) {
this.x = x;
this.y = y;
this.cardID = cardID;
this.visible = visible;
if (visible != 'all' && !(this.visible.includes(playerNum))) {
code = 54;
}
this.graphicSrc = "static/images/" + code + ".png";
this.img = new Image();
}
CardGraphic.prototype.draw = function(context) {
var x = this.x;
var y = this.y;
var img = this.img;
this.img.src = this.graphicSrc;
context.drawImage(this.img, x, y, 100, 140);
}
CardGraphic.prototype.contains = function(mx, my) {
return (this.x <= mx) && (this.x + 100 >= mx) &&
(this.y <= my) && (this.y + 140 >= my);
}
function CanvasState(canvas) {
this.canvas = canvas;
this.width = canvas.width;
this.height = canvas.height;
this.context = canvas.getContext('2d');
var stylePaddingLeft, stylePaddingTop, styleBorderLeft, styleBorderTop;
if (document.defaultView && document.defaultView.getComputedStyle) {
this.stylePaddingLeft = parseInt(document.defaultView.getComputedStyle(canvas, null)['paddingLeft'], 10) || 0;
this.stylePaddingTop = parseInt(document.defaultView.getComputedStyle(canvas, null)['paddingTop'], 10) || 0;
this.styleBorderLeft = parseInt(document.defaultView.getComputedStyle(canvas, null)['borderLeftWidth'], 10) || 0;
this.styleBorderTop = parseInt(document.defaultView.getComputedStyle(canvas, null)['borderTopWidth'], 10) || 0;
}
var html = document.body.parentNode;
this.htmlTop = html.offsetTop;
this.htmlLeft = html.offsetLeft;
this.valid = false;
this.graphics = [];
this.dragging = false;
this.selection = null;
this.dragoffx = 0;
this.dragoffy = 0;
var baseState = this;
canvas.addEventListener('selectstart', function(e) {
e.preventDefault();
return false;
}, false);
canvas.addEventListener('mousedown', function(e) {
mouse = baseState.getMouse(e);
var mx = mouse.x;
var my = mouse.y;
var graphics = baseState.graphics;
var l = graphics.length;
for (var i = l - 1; i >= 0; --i) {
if (graphics[i].contains(mx, my) && e.button != 2) {
var sel = graphics.splice(i, 1)[0];
graphics.push(sel);
baseState.dragoffx = mx - sel.x;
baseState.dragoffy = my - sel.y;
baseState.dragging = true;
baseState.selection = sel;
baseState.valid = false;
return;
} else if (graphics[i].contains(mx, my) && e.button == 2) {
socket.emit('update', {cardID: graphics[i].cardID, visible: (graphics[i].visible == 'all') ? [] : 'all'});
return;
}
}
if (baseState.selection) {
baseState.selection = null;
baseState.valid = false;
}
}, true);
canvas.addEventListener('mousemove', function(e) {
mouse = baseState.getMouse(e);
if (baseState.dragging) {
baseState.selection.x = mouse.x - baseState.dragoffx;
baseState.selection.y = mouse.y - baseState.dragoffy;
baseState.valid = false;
}
}, true);
canvas.addEventListener('mouseup', function(e) {
if (baseState.dragging) {
socket.emit('update', {cardID: baseState.selection.cardID, x: baseState.selection.x, y: baseState.selection.y})
}
baseState.dragging = false;
}, true);
document.addEventListener('keypress', function(e) {
var mx = mouse.x;
var my = mouse.y;
var graphics = baseState.graphics;
var l = graphics.length;
for (var i = l - 1; i >= 0; --i) {
if (graphics[i].contains(mx, my)) {
if (graphics[i].visible.length == 0 && e.key == playerNum) {
socket.emit('update', {cardID: graphics[i].cardID, visible: [playerNum]});
} else if (graphics[i].visible != 'all' && graphics[i].visible[0] == playerNum) {
if (graphics[i].visible.includes(e.key)) {
socket.emit('update', {cardID: graphics[i].cardID, visible: graphics[i].visible.filter(x => x != e.key)});
} else {
graphics[i].visible.push(parseInt(e.key));
socket.emit('update', {cardID: graphics[i].cardID, visible: graphics[i].visible});
}
}
}
}
});
this.selectionColor = '#CC00000';
this.selectionWidth = 4;
this.fps = 60;
setInterval(function() {
baseState.draw();
}, 1000/this.fps);
}
CanvasState.prototype.clear = function() {
this.context.clearRect(0, 0, this.width, this.height);
}
CanvasState.prototype.draw = function() {
if (!this.valid) {
var context = this.context;
var graphics = this.graphics;
this.clear();
context.fillStyle = "#5BC236";
context.fillRect(0, 0, 800, 600);
for (var i in graphics) {
var graphic = graphics[i];
graphic.draw(context);
}
if (this.selection != null) {
context.strokeStyle = this.selectionColor;
context.lineWidth = this.selectionWidth;
var sel = this.selection;
context.strokeRect(sel.x, sel.y, 100, 140);
}
this.valid = true;
}
}
CanvasState.prototype.addGraphic = function(graphic) {
this.graphics.push(graphic);
this.valid = false;
}
CanvasState.prototype.reset = function() {
this.graphics = [];
this.valid = false;
}
CanvasState.prototype.getMouse = function(e) {
var element = this.canvas, offsetX = 0, offsetY = 0, mx, my;
if (element.offsetParent !== undefined) {
do {
offsetX += element.offsetLeft;
offsetY += element.offsetTop;
} while ((element = element.offsetParent));
}
offsetX += this.stylePaddingLeft + this.styleBorderLeft + this.htmlLeft;
offsetY += this.stylePaddingTop + this.styleBorderTop + this.htmlTop;
mx = e.pageX - offsetX;
my = e.pageY - offsetY;
return {x: mx, y: my};
}
var button = document.getElementById("add-deck");
button.addEventListener('click', function() {
socket.emit('deck');
}, false);
var rbutton = document.getElementById("shuffle");
rbutton.addEventListener('click', function() {
socket.emit('shuffle');
}, false);
var rrbutton = document.getElementById("hide-all");
rrbutton.addEventListener('click', function() {
socket.emit('hide-all');
}, false);
var rrrbutton = document.getElementById("clear-all");
rrrbutton.addEventListener('click', function() {
socket.emit('clear-all');
}, false);
var rrrrbutton = document.getElementById("collect");
rrrrbutton.addEventListener('click', function() {
socket.emit('collect');
}, false);
// [1] https://simonsarris.com/making-html5-canvas-useful/<file_sep>/static/images/rename.py
import os
for filename in os.listdir("."):
if (filename == "rename.py"):
continue
tokens = filename.split('.')[0].split()
try:
n = int(tokens[0]) - 1
except ValueError:
table = {"Jack": 10, "Queen": 11, "King": 12, "Ace": 0}
n = table[tokens[0]]
table = {"Clubs": 0, "Diamonds": 1, "Hearts": 2, "Spades": 3}
k = table[tokens[2]];
os.rename(filename, str(n + k*13) + ".png")<file_sep>/server.js
// Boilerplate
var express = require('express');
var http = require('http');
var path = require('path');
var socketIO = require('socket.io');
var app = express();
var server = http.Server(app);
var io = socketIO(server);
app.set('port', 8080);
app.use('/static', express.static(__dirname + '/static'));
app.get('/', function(request, response) {
response.sendFile(path.join(__dirname, 'index.html'));
});
server.listen(8080, function() {
console.log('Starting server on port 8080');
});
var players = [];
var playerNums = [];
io.on('connection', function(socket) {;
for (i = 1;; ++i) {
if (!playerNums.includes(i)) {
playerNums.push(i);
players.push({id: socket.id, num:playerNums.slice(-1)[0]})
break;
}
}
socket.emit('playerinfo', {players: playerNums, num: playerNums.slice(-1)[0]});
socket.emit('cards', cards);
socket.on('update', function(change) {
for (i in cards) {
if (cards[i].cardID == change.cardID) {
cards[i].x = change.x || cards[i].x;
cards[i].y = change.y || cards[i].y;
cards[i].visible = change.visible || cards[i].visible;
cards.push(cards.splice(i, 1)[0]);
}
}
io.sockets.emit('cards', cards);
});
socket.on('deck', function() {
addDeck();
socket.emit('cards', cards);
});
socket.on('shuffle', function() {
cards = shuffle(cards);
socket.emit('cards', cards);
});
socket.on('hide-all', function() {
for (var i in cards) {
cards[i].visible = [];
}
socket.emit('cards', cards);
});
socket.on('clear-all', function() {
cards = [];
socket.emit('cards', cards);
});
socket.on('collect', function() {
for (var i in cards) {
cards[i].x = 0;
cards[i].y = 0;
}
socket.emit('cards', cards);
});
socket.on('disconnect', function() {
for (var i in players) {
if (players[i].id == socket.id) {
playerNums.splice(playerNums.indexOf(players.splice(i, 1)[0].num), 1);
break;
}
}
});
});
var cards = [];
function Card(code, x, y, theta, cardID) {
this.code = code;
this.x = x;
this.y = y;
this.theta = theta;
this.cardID = cardID
this.visible = "all";
}
function addDeck() {
var l = cards.length;
for (i = 0; i < 54; ++i) {
cards.push(new Card(i, 50 + 2*i, 50 + 2*i, 0, i + l))
}
}
function shuffle(array) {
var currentIndex = array.length, temporaryValue, randomIndex;
// While there remain elements to shuffle...
while (0 !== currentIndex) {
// Pick a remaining element...
randomIndex = Math.floor(Math.random() * currentIndex);
currentIndex -= 1;
// And swap it with the current element.
temporaryValue = array[currentIndex];
array[currentIndex] = array[randomIndex];
array[randomIndex] = temporaryValue;
}
return array;
}
// https://stackoverflow.com/questions/2450954/how-to-randomize-shuffle-a-javascript-array
function addShuffledDeck() {
var l = cards.length;
var cs = [];
for (i = 0; i < 54; ++i) {
cs.push(new Card(i, 50 + 2*i, 50 +2*i, 0, i + l));
}
cs = shuffle(cs);
for (var i in cs) {
cards.push(cs[i]);
}
}
|
5f6a4d6742442c78863c6a04f791a55ec789b077
|
[
"Markdown",
"Python",
"JavaScript"
] | 4
|
Markdown
|
bowen2/acg
|
3bfad070f26cb8bd0786b68b47cc632b73d3d5ec
|
ac5bac4351287c60d1590a0f93720788b9a82223
|
refs/heads/master
|
<repo_name>zhongyw/itranswarp.js<file_sep>/www/controllers/wikiApi.js
'use strict';
// wiki api
var
_ = require('lodash'),
api = require('../api'),
db = require('../db'),
helper = require('../helper'),
images = require('./_images'),
search = require('../search/search'),
constants = require('../constants'),
json_schema = require('../json_schema');
var
attachmentApi = require('./attachmentApi');
var
User = db.user,
Wiki = db.wiki,
WikiPage = db.wikipage,
Text = db.text,
warp = db.warp,
next_id = db.next_id;
function indexWiki(r) {
process.nextTick(function () {
search.engine.index({
type: 'wiki',
id: r.id,
name: r.name,
description: r.description || '',
content: helper.html2text(helper.md2html(r.content, true)),
created_at: r.created_at,
updated_at: r.updated_at,
url: '/wiki/' + (r.wiki_id ? r.wiki_id + '/' : '') + r.id,
upvotes: 0
});
});
}
function unindexWiki(r) {
process.nextTick(function () {
search.engine.unindex({
id: r.id
});
});
}
function* $getWikis() {
return yield Wiki.$findAll({
order: 'name asc'
});
}
function* $getWiki(id, includeContent) {
var
text,
wiki = yield Wiki.$find(id);
if (wiki === null) {
throw api.notFound('Wiki');
}
if (includeContent) {
text = yield Text.$find(wiki.content_id);
if (text === null) {
throw api.notFound('Text');
}
wiki.content = text.value;
}
return wiki;
}
function* $getWikiPage(id, includeContent) {
var
text,
wp = yield WikiPage.$find(id);
if (wp === null) {
throw api.notFound('Wiki');
}
if (includeContent) {
text = yield Text.$find(wp.content_id);
if (text === null) {
throw api.notFound('Text');
}
wp.content = text.value;
}
return wp;
}
function treeIterate(nodes, root) {
var rid, removes;
rid = root.id;
root.children = [];
removes = [];
_.each(nodes, function (node, nid) {
if (node.parent_id === rid) {
root.children.push(node);
removes.push(nid);
}
});
_.each(removes, function (nid) {
delete nodes[nid];
});
if (root.children.length > 0) {
root.children.sort(function (n1, n2) {
return n1.display_order < n2.display_order ? (-1) : 1;
});
_.each(root.children, function (child) {
treeIterate(nodes, child);
});
}
}
function flatten(arr, depth, children) {
_.each(children, function (wp) {
wp.depth = depth;
arr.push(wp);
flatten(arr, depth + 1, wp.children);
});
}
function* $getWikiPages(wiki_id, returnAsDict) {
var
proot,
pdict = {},
pages = yield WikiPage.$findAll({
where: 'wiki_id=?',
params: [wiki_id]
});
_.each(pages, function (p) {
pdict[p.id] = p;
});
if (returnAsDict) {
return pdict;
}
proot = {
id: ''
};
treeIterate(pdict, proot);
return proot.children;
}
function* $getWikiTree(id, isFlatten) {
var
arr,
wiki = yield $getWiki(id),
children = yield $getWikiPages(id);
if (isFlatten) {
arr = [];
flatten(arr, 0, children);
wiki.children = arr;
}
else {
wiki.children = children;
}
return wiki;
}
function* $getNavigationMenus() {
var ws = yield $getWikis();
return _.map(ws, function (w) {
return {
name: w.name,
url: '/wiki/' + w.id
};
});
}
module.exports = {
$getNavigationMenus: $getNavigationMenus,
$getWikiTree: $getWikiTree,
$getWiki: $getWiki,
$getWikis: $getWikis,
$getWikiPage: $getWikiPage,
'GET /api/wikis/:id': function* (id) {
/**
* Get wiki by id.
*
* @name Get Wiki
* @param {string} id: Id of the wiki.
* @param {string} [format='']: Return html if format is 'html', default to raw.
* @return {object} Wiki object.
* @error {entity:notfound} Wiki was not found by id.
*/
var wiki = yield $getWiki(id, true);
if (this.request.query.format === 'html') {
wiki.content = helper.md2html(wiki.content, true);
}
this.body = wiki;
},
'GET /api/wikis': function* () {
/**
* Get all wikis.
*
* @name Get Wikis
* @return {object} Wikis object.
*/
this.body = {
wikis: yield $getWikis()
};
},
'POST /api/wikis': function* () {
/**
* Create a new wiki.
*
* @name Create Wiki
* @param {string} name: Name of the wiki.
* @param {string} description: Description of the wiki.
* @param {string} content: Content of the wiki.
* @param {string} [tag]: Tag of the wiki, seperated by ','.
* @param {string} [image]: Base64 encoded string as cover image.
* @return {object} The created wiki object.
* @error {parameter:invalid} If some parameter is invalid.
* @error {permission:denied} If current user has no permission.
*/
helper.checkPermission(this.request, constants.role.EDITOR);
var
wiki,
text,
wiki_id,
content_id,
attachment,
data = this.request.body;
json_schema.validate('createWiki', data);
wiki_id = next_id();
content_id = next_id();
// create image:
attachment = yield attachmentApi.$createAttachment(
this.request.user.id,
data.name.trim(),
data.description.trim(),
new Buffer(data.image, 'base64'),
null,
true);
// create text:
text = yield Text.$create({
id: content_id,
ref_id: wiki_id,
value: data.content
});
// create wiki:
wiki = yield Wiki.$create({
id: wiki_id,
content_id: content_id,
cover_id: attachment.id,
name: data.name.trim(),
description: data.description.trim(),
tag: data.tag.trim()
});
wiki.content = data.content;
this.body = wiki;
},
'POST /api/wikis/:id': function* (id) {
/**
* Update a wiki.
*
* @name Update Wiki
* @param {string} id: The id of the wiki.
* @param {string} [name]: The name of the wiki.
* @param {string} [description]: The description of the wiki.
* @param {string} [tag]: The tag of the wiki.
* @param {string} [content]: The content of the wiki.
* @param {string} [image]: Base64 encoded string as cover image.
* @return {object} The updated wiki object.
*/
helper.checkPermission(this.request, constants.role.EDITOR);
var
wiki,
text,
wiki_id,
content_id,
attachment,
props = [],
data = this.request.body;
json_schema.validate('updateWiki', data);
wiki = yield $getWiki(id);
if (data.name) {
wiki.name = data.name.trim();
props.push('name');
}
if (data.description) {
wiki.description = data.description.trim();
props.push('description');
}
if (data.tag) {
wiki.tag = data.tag.trim();
props.push('tag');
}
if (data.image) {
// create image:
attachment = yield attachmentApi.$createAttachment(
this.request.user.id,
wiki.name,
wiki.description,
new Buffer(data.image, 'base64'),
null,
true);
wiki.cover_id = attachment.id;
props.push('cover_id');
}
if (data.content) {
text = yield Text.$create({
ref_id: wiki.id,
value: data.content
});
wiki.content_id = text.id;
wiki.content = data.content;
props.push('content_id');
}
if (props.length > 0) {
props.push('updated_at');
props.push('version');
yield wiki.$update(props);
}
if (!wiki.content) {
text = yield Text.$find(wiki.content_id);
wiki.content = text.value;
}
this.body = wiki;
},
'POST /api/wikis/:id/wikipages': function* (wiki_id) {
/**
* Create a new wiki page.
*
* @name Create WikiPage
* @param {string} id: Id of the wiki.
* @param {string} name: Name of the wiki page.
* @param {string} parent_id: Parent id of the wiki page, specify '' for top level wiki page.
* @param {string} content: Content of the wiki.
* @return {object} The created wiki page object.
* @error {parameter:invalid} If some parameter is invalid.
* @error {permission:denied} If current user has no permission.
*/
helper.checkPermission(this.request, constants.role.EDITOR);
var
wiki,
wikipage,
text,
num, wp_id, content_id,
data = this.request.body;
json_schema.validate('createWikiPage', data);
// check wiki id:
yield $getWiki(wiki_id);
// check parent id:
if (data.parent_id) {
yield $getWikiPage(data.parent_id);
}
wp_id = next_id(),
content_id = next_id();
// count:
num = yield warp.$queryNumber(
'select max(display_order) from wikipages where wiki_id=? and parent_id=?',
[wiki_id, data.parent_id]
);
text = yield Text.$create({
id: content_id,
ref_id: wp_id,
value: data.content
});
// create wiki page:
wikipage = yield WikiPage.$create({
id: wp_id,
wiki_id: wiki_id,
content_id: content_id,
parent_id: data.parent_id,
name: data.name.trim(),
display_order: ((num === null) ? 0 : num + 1)
});
wikipage.content = data.content;
indexWiki(wikipage);
this.body = wikipage;
},
'POST /api/wikis/wikipages/:id': function* (id) {
/**
* Update a wiki page.
*
* @name Update WikiPage
* @param {string} id: The id of the wiki page.
* @param {string} [name]: The name of the wiki page.
* @param {string} [content]: The content of the wiki page.
* @return {object} The updated wiki object.
*/
helper.checkPermission(this.request, constants.role.EDITOR);
var
wikipage,
text,
props = [],
data = this.request.body;
json_schema.validate('updateWikiPage', data);
wikipage = yield $getWikiPage(id);
if (data.name) {
wikipage.name = data.name.trim();
props.push('name');
}
if (data.content) {
text = yield Text.$create({
ref_id: wikipage.id,
value: data.content
});
wikipage.content_id = text.id;
wikipage.content = data.content;
props.push('content_id');
}
if (props.length > 0) {
props.push('updated_at');
props.push('version');
yield wikipage.$update(props);
}
if (!wikipage.content) {
text = yield Text.$find(wikipage.content_id);
wikipage.content = text.value;
}
indexWiki(wikipage);
this.body = wikipage;
},
'GET /api/wikis/wikipages/:id': function* (id) {
/**
* Get wiki page by id.
*
* @name Get Wiki Page
* @param {string} id: Id of the wiki page.
* @param {string} [format='']: Return html if format is 'html', default to raw.
* @return {object} WikiPage object.
* @error {resource:notfound} WikiPage was not found by id.
*/
var wp = yield $getWikiPage(id, true);
if (this.request.query.format === 'html') {
wp.content = helper.md2html(wp.content, true);
}
this.body = wp;
},
'GET /api/wikis/:id/wikipages': function* (id) {
/**
* Get wiki pages as a tree list.
*
* @name Get WikiPages
* @param {string} id - The id of the wiki.
* @return {object} The full tree object.
*/
this.body = yield $getWikiTree(id);
},
'POST /api/wikis/wikipages/:id/move': function* (id) {
/**
* Move a wikipage to another node.
*
* @name Move WikiPage
* @param {string} id: The source id of the WikiPage.
* @param {string} parent_id: The target id of the WikiPage. Specify '' if move to top of the tree.
* @param {int} index: The index of the moved page.
* @return {object} The moved wiki object.
*/
helper.checkPermission(this.request, constants.role.EDITOR);
var
index, p, parent_id, i, L,
wiki,
movingPage,
parentPage,
allPages,
data = this.request.body;
json_schema.validate('moveWikiPage', data);
index = data.index;
parent_id = data.parent_id;
movingPage = yield $getWikiPage(id);
if (movingPage.parent_id === parent_id && movingPage.display_order === index) {
console.log('>> No need to update.');
this.body = movingPage;
return;
}
wiki = yield $getWiki(movingPage.wiki_id);
parentPage = parent_id === '' ? null : yield $getWikiPage(parent_id);
if (parentPage !== null && parentPage.wiki_id !== wiki.id) {
throw api.invalidParam('parent_id');
}
// check to prevent recursive:
allPages = yield $getWikiPages(wiki.id, true);
if (parentPage !== null) {
p = parentPage;
while (p.parent_id !== '') {
if (p.parent_id === movingPage.id) {
throw api.conflictError('WikiPage', 'Will cause recursive.');
}
p = allPages[p.parent_id];
}
}
// get current children:
L = [];
_.each(allPages, function (p, pid) {
if (p.parent_id === parent_id && p.id !== movingPage.id) {
L.push(p);
}
});
if (index > L.length) {
throw api.invalidParam('index');
}
L.sort(function (p1, p2) {
return p1.display_order < p2.display_order ? (-1) : 1;
});
L.splice(index, 0, movingPage);
// update display order and movingPage:
for (i=0; i<L.length; i++) {
yield warp.$update('update wikipages set display_order=? where id=?', [i, L[i].id]);
}
movingPage.display_order = index; // <-- already updated, but need to pass to result
movingPage.parent_id = parent_id;
yield movingPage.$update(['parent_id', 'updated_at', 'version']);
this.body = movingPage;
},
'POST /api/wikis/wikipages/:id/delete': function* (id) {
/**
* Delete a wikipage if it has no child wikipage.
*
* @name Delete WikiPage
* @param {string} id - The id of the wikipage.
* @return {object} Returns object contains id of deleted wiki. { "id": "1234" }
*/
helper.checkPermission(this.request, constants.role.EDITOR);
var
wikipage = yield $getWikiPage(id),
num = yield WikiPage.$findNumber({
select: 'count(id)',
where: 'parent_id=?',
params: [id]
});
if (num > 0) {
throw api.conflictError('WikiPage', 'Cannot delete a non-empty wiki pages.');
}
yield wikipage.$destroy();
// delete all texts:
yield warp.$update('delete from texts where ref_id=?', [id]);
unindexWiki(wikipage);
this.body = {
id: id
};
},
'POST /api/wikis/:id/delete': function* (id) {
/**
* Delete a wiki by its id.
*
* @name Delete Wiki
* @param {string} id: The id of the wikipage.
* @return {object} Results contains deleted id. e.g. {"id": "12345"}
* @error {resource:notfound} If resource not found by id.
*/
helper.checkPermission(this.request, constants.role.EDITOR);
var
wiki = yield $getWiki(id),
num = yield WikiPage.$findNumber({
select: 'count(id)',
where: 'wiki_id=?',
params: [id]
});
if (num > 0) {
throw api.conflictError('Wiki', 'Wiki is not empty.');
}
yield wiki.$destroy();
// delete all texts:
yield warp.$update('delete from texts where ref_id=?', [id]);
unindexWiki(wiki);
this.body = {
id: id
};
}
};
<file_sep>/www/controllers/navigationApi.js
'use strict';
// navigation api
var
_ = require('lodash'),
db = require('../db'),
api = require('../api'),
cache = require('../cache'),
helper = require('../helper'),
config = require('../config'),
constants = require('../constants'),
json_schema = require('../json_schema');
var
Navigation = db.navigation,
warp = db.warp,
next_id = db.next_id;
function* $getNavigation(id) {
var navigation = yield Navigation.$find(id);
if (navigation === null) {
throw api.notFound('Navigation');
}
return navigation;
}
function* $getNavigations() {
var navs = yield Navigation.$findAll({
order: 'display_order'
}), navRoots = [];
_.each(navs, function(nav){
if(!nav.parent_id) navRoots.push(nav);
});
_.each(navRoots, function(navRoot){
navRoot.childs = [];
_.each(navs, function(nav){
if(nav.parent_id === navRoot.id) navRoot.childs.push(nav);
});
});
return navRoots;
}
function* $getNavigationMenus() {
var
apiNames = ['categoryApi', 'articleApi', 'wikiApi', 'webpageApi', 'discussApi', 'attachmentApi', 'userApi', 'settingApi'],
apis = _.filter(
_.map(apiNames, function (name) {
return require('./' + name);
}), function (api) {
return api.hasOwnProperty('$getNavigationMenus');
}),
menus = [],
i;
for (i = 0; i < apis.length; i ++) {
menus = menus.concat(yield apis[i].$getNavigationMenus());
}
return menus;
}
module.exports = {
$getNavigation: $getNavigation,
$getNavigations: $getNavigations,
'GET /api/navigations/all/menus': function* () {
/**
* Get all navigation menus.
*
* @name Get NavigationMenus
* @return {object} Result like {"navigationMenus": [navigation array]}
*/
helper.checkPermission(this.request, constants.role.ADMIN);
this.body = {
navigationMenus: yield $getNavigationMenus()
};
},
'GET /api/navigations': function* () {
/**
* Get all navigations.
*
* @name Get Navigations
* @return {object} Result like {"navigations": [navigation array]}
*/
helper.checkPermission(this.request, constants.role.ADMIN);
this.body = {
navigations: yield $getNavigations()
};
},
'POST /api/navigations': function* () {
/**
* Create a navigation.
*
* @name Create Navigation
* @param {string} name: The name of the navigation.
* @param {string} url: The URL of the navigation.
* @return {object} The navigation object.
*/
helper.checkPermission(this.request, constants.role.ADMIN);
var
name,
url,
parent_id,
num,
data = this.request.body;
json_schema.validate('createNavigation', data);
name = data.name.trim();
url = data.url.trim();
parent_id = data.parent_id.trim();
num = yield Navigation.$findNumber('max(display_order)');
this.body = yield Navigation.$create({
name: name,
url: url,
parent_id : parent_id,
display_order: (num === null) ? 0 : num + 1
});
yield cache.$remove(constants.cache.NAVIGATIONS);
},
'POST /api/navigations/all/sort': function* () {
/**
* Sort navigations.
*
* @name Sort Navigations
* @param {array} id: The ids of the navigation.
* @return {object} The sort result like {"sort":true}.
*/
helper.checkPermission(this.request, constants.role.ADMIN);
var data = this.request.body;
json_schema.validate('sortNavigations', data);
this.body = {
navigations: yield helper.$sort(data.ids, yield $getNavigations())
};
yield cache.$remove(constants.cache.NAVIGATIONS);
},
'POST /api/navigations/:id/delete': function* (id) {
/**
* Delete a navigation.
*
* @name Delete Navigation
* @param {string} id: The id of the navigation.
* @return {object} The deleted navigation id like {"id":"123"}.
*/
helper.checkPermission(this.request, constants.role.ADMIN);
var navigation = yield $getNavigation(id);
yield navigation.$destroy();
this.body = {
id: id
};
yield cache.$remove(constants.cache.NAVIGATIONS);
}
};
<file_sep>/www/models/topic.js
'use strict';
// topic.js
var base = require('./_base.js');
module.exports = function (warp) {
return base.defineModel(warp, 'Topic', [
base.column_id('board_id', { index: true }),
base.column_varchar_50('ref_type'),
base.column_id('ref_id', { index: true }),
base.column_id('user_id', { index: true }),
base.column_bigint('replies'),
base.column_varchar_100('name'),
base.column_varchar_1000('tags'),
base.column_bigint('upvotes'),
base.column_bigint('downvotes'),
base.column_bigint('score'),
base.column_boolean('locked'),
base.column_text('content', { type: 'text' })
], {
table: 'topics'
});
};
<file_sep>/www/controllers/discussApi.js
'use strict';
// discuss api
var
_ = require('lodash'),
db = require('../db'),
api = require('../api'),
cache = require('../cache'),
helper = require('../helper'),
search = require('../search/search'),
constants = require('../constants'),
json_schema = require('../json_schema'),
userApi = require('./userApi');
var
Board = db.board,
Topic = db.topic,
Reply = db.reply,
warp = db.warp,
next_id = db.next_id;
function indexDiscuss(r) {
var doc = {
type: 'discuss',
id: r.id,
tags: r.tags || '',
name: r.name,
description: '',
content: helper.html2text(r.content),
created_at: r.created_at,
updated_at: r.updated_at,
url: '/discuss/' + (r.topic_id ? 'topics/' + r.topic_id + '/find/' + r.id : r.board_id + '/' + r.id),
upvotes: 0
};
process.nextTick(function () {
search.engine.index(doc);
});
}
function unindexDiscuss(r) {
process.nextTick(function () {
search.engine.unindex({
id: r.id
});
});
}
function unindexDiscussByIds(ids) {
process.nextTick(function () {
var
arr = ids,
fn = function () {
if (arr.length > 0) {
if (arr.length > 10) {
search.engine.unindex(arr.splice(arr.length - 10, 10));
} else {
search.engine.unindex(arr.splice(0, arr.length));
}
setTimeout(fn, 500);
}
};
fn();
});
}
function* $getNavigationMenus() {
return [{
name: 'Discuss',
url: '/discuss'
}];
}
function* $getBoard(id) {
var board = yield Board.$find(id);
if (board === null) {
throw api.notFound('Board');
}
return board;
}
function* $getBoardByTag(tag) {
var
boards = yield $getBoards(),
filtered = _.filter(boards, function (b) {
return b.tag === tag;
});
if (filtered.length === 0) {
throw api.notFound('Board');
}
return filtered[0];
}
function* $getBoards() {
return yield Board.$findAll({
order: 'display_order'
});
}
function* $lockBoard(id, locked) {
var board = yield $getBoard(id);
if (board.locked !== locked) {
board.locked = locked;
yield board.$update(['locked', 'updated_at', 'version']);
}
return board;
}
function* $getTopic(id) {
var topic = yield Topic.$find(id);
if (topic === null) {
throw api.notFound('Topic');
}
return topic;
}
var TOPIC_FIELDS_EXCLUDE_CONTENT = _.filter(Topic.__selectAttributesArray, function (field) {
return field !== 'content';
});
function* $getAllTopics(page) {
page.total = yield Topic.$findNumber({
select: 'count(id)'
});
if (page.isEmpty) {
return [];
}
return yield Topic.$findAll({
select: TOPIC_FIELDS_EXCLUDE_CONTENT,
order: 'id desc',
offset: page.offset,
limit: page.limit
});
}
function* $getTopics(board_id, page) {
page.total = yield Topic.$findNumber({
select: 'count(id)',
where: 'board_id=?',
params: [board_id]
});
if (page.isEmpty) {
return [];
}
return yield Topic.$findAll({
select: TOPIC_FIELDS_EXCLUDE_CONTENT,
where: 'board_id=?',
params: [board_id],
order: 'updated_at desc',
offset: page.offset,
limit: page.limit
});
}
function* $getTopicsByRef(ref_id, page) {
page.total = yield Topic.$findNumber({
select: 'count(id)',
where: 'ref_id=?',
params: [ref_id]
});
if (page.isEmpty) {
return [];
}
return yield Topic.$findAll({
where: 'ref_id=?',
params: [ref_id],
order: 'updated_at desc',
offset: page.offset,
limit: page.limit
});
}
function* $getAllReplies(page) {
page.total = yield Reply.$findNumber({
select: 'count(id)'
});
if (page.isEmpty) {
return [];
}
return yield Reply.$findAll({
order: 'id desc',
offset: page.offset,
limit: page.limit
});
}
function* $getReplies(topic_id, page) {
var num = yield Reply.$findNumber({
select: 'count(id)',
where: 'topic_id=?',
params: [topic_id]
});
// items = 1 topic + N replies:
page.total = num + 1;
if (num === 0) {
return [];
}
return yield Reply.$findAll({
where: 'topic_id=?',
params: [topic_id],
order: 'id',
offset: (page.index === 1) ? 0 : (page.offset - 1),
limit: (page.index === 1) ? (page.limit - 1) : page.limit
});
}
function* $getFirstReplies(topic_id, num) {
return yield Reply.$findAll({
where: 'topic_id=?',
params: [topic_id],
order: 'id',
limit: num
});
}
function* $getReplyPageIndex(topic, reply_id) {
var
num = yield Reply.$findNumber({
select: 'count(id)',
where: 'topic_id=? and id < ?',
params: [topic.id, reply_id]
});
return Math.floor((num + 1) / 20) + 1;
}
function* $createReply(user, topic_id, data) {
var
reply,
topic = yield $getTopic(topic_id);
if (topic.locked) {
throw api.conflictError('Topic', 'Topic is locked.');
}
reply = yield Reply.$create({
topic_id: topic_id,
user_id: user.id,
content: helper.md2html(data.content)
});
yield warp.$update('update topics set replies=replies+1, version=version+1, updated_at=? where id=?', [Date.now(), topic_id]);
reply.name = 'Re:' + topic.name;
indexDiscuss(reply);
delete reply.name;
if (topic.ref_id) {
yield cache.$remove('REF-TOPICS-' + topic.ref_id);
}
return reply;
}
function* $createTopic(user, board_id, ref_type, ref_id, data) {
var
board = yield $getBoard(board_id),
topic = yield Topic.$create({
board_id: board_id,
user_id: user.id,
ref_type: ref_type,
ref_id: ref_id,
name: data.name.trim(),
tags: (data.tags || '').trim(),
content: helper.md2html(data.content)
});
yield warp.$update('update boards set topics = topics + 1 where id=?', [board_id]);
indexDiscuss(topic);
if (ref_id) {
yield cache.$remove('REF-TOPICS-' + ref_id);
}
return topic;
}
function* $loadTopicsByRefWithCache(ref_id, page) {
if (page.index === 1) {
var key = 'REF-TOPICS-' + ref_id;
return yield cache.$get(key, function* () {
return yield $loadTopicsByRef(ref_id, page);
});
}
return yield $loadTopicsByRef(ref_id, page);
}
function* $loadTopicsByRef(ref_id, page) {
var
i,
topics = yield $getTopicsByRef(ref_id, page);
yield userApi.$bindUsers(topics);
for (i=0; i<topics.length; i++) {
yield $bindReplies(topics[i]);
}
return topics;
}
function* $bindReplies(topic) {
var key = 'REPLIES-' + topic.id + '_' + topic.version;
topic.replies = yield cache.$get(key, function* () {
var replies = yield $getFirstReplies(topic.id, 10);
yield userApi.$bindUsers(replies);
return replies;
});
}
module.exports = {
$getNavigationMenus: $getNavigationMenus,
$createTopic: $createTopic,
$getBoard: $getBoard,
$getBoardByTag: $getBoardByTag,
$getBoards: $getBoards,
$getTopic: $getTopic,
$getTopics: $getTopics,
$getTopicsByRef: $getTopicsByRef,
$getReplies: $getReplies,
$getFirstReplies: $getFirstReplies,
$getReplyPageIndex: $getReplyPageIndex,
'GET /api/ref/:id/topics': function* (id) {
/**
* Get topics by ref id
*/
var
page = helper.getPage(this.request, 10),
topics = yield $loadTopicsByRefWithCache(id, page);
this.body = {
page: page,
topics: topics
};
},
'GET /api/boards': function* () {
/**
* Get all boards.
*/
helper.checkPermission(this.request, constants.role.EDITOR);
this.body = {
boards: yield $getBoards()
};
},
'POST /api/boards': function* () {
/**
* Create new board.
*
* @name Create Board
* @param {string} name - The name of the board.
* @param {string} description - The description of the board.
* @return {object} Board object.
*/
helper.checkPermission(this.request, constants.role.ADMIN);
var
num,
data = this.request.body;
json_schema.validate('createBoard', data);
num = yield Board.$findNumber('max(display_order)');
this.body = yield Board.$create({
name: data.name.trim(),
tag: data.tag.trim(),
description: data.description.trim(),
display_order: ((num === null) ? 0 : num + 1)
});
},
'GET /api/boards/:id': function* (id) {
helper.checkPermission(this.request, constants.role.EDITOR);
this.body = yield $getBoard(id);
},
'POST /api/boards/:id': function* (id) {
/**
* Update a board.
*
* @name Update Board
* @param {string} id - The id of the board.
* @param {string} [name] - The new name of the board.
* @param {string} [description] - The new description of the board.
* @return {object} Board object that was updated.
*/
helper.checkPermission(this.request, constants.role.ADMIN);
var
board,
props = [],
data = this.request.body;
json_schema.validate('updateBoard', data);
board = yield $getBoard(id);
if (data.name) {
board.name = data.name.trim();
props.push('name');
}
if (data.description) {
board.description = data.description.trim();
props.push('description');
}
if (data.tag) {
board.tag = data.tag.trim();
props.push('tag');
}
if (props.length > 0) {
props.push('updated_at');
props.push('version');
yield board.$update(props);
}
this.body = board;
},
'POST /api/boards/:id/lock': function* (id) {
/**
* Lock the board by its id.
*
* @name Lock Board
* @param {string} id - The id of the board.
* @return {object} Board object.
*/
helper.checkPermission(this.request, constants.role.ADMIN);
this.body = yield $lockBoard(id, true);
},
'POST /api/boards/:id/unlock': function* (id) {
/**
* Unlock the board by its id.
*
* @name Unlock Board
* @param {string} id - The id of the board.
* @return {object} Board object.
*/
helper.checkPermission(this.request, constants.role.ADMIN);
this.body = yield $lockBoard(id, false);
},
'POST /api/boards/all/sort': function* () {
/**
* Sort boards.
*
* @name Sort Boards
* @param {array} id: The ids of boards.
* @return {object} The sort result like { "sort": true }.
*/
helper.checkPermission(this.request, constants.role.ADMIN);
var
board, boards,
i, pos, ids,
data = this.request.body;
json_schema.validate('sortBoards', data);
ids = data.ids;
boards = yield Board.$findAll();
if (ids.length !== boards.length) {
throw api.invalidParam('ids', 'Invalid id list.');
}
for (i=0; i<boards.length; i++) {
board = boards[i];
pos = ids.indexOf(board.id);
if (pos === (-1)) {
throw api.invalidParam('ids', 'Invalid id list.');
}
board.display_order = pos;
}
for (i=0; i<boards.length; i++) {
yield boards[i].$update(['display_order', 'updated_at', 'version']);
}
this.body = {
boards: yield $getBoards()
};
},
'GET /api/boards/:id/topics': function* (board_id) {
/**
* Get topics by page.
*/
helper.checkPermission(this.request, constants.role.EDITOR);
var
page = helper.getPage(this.request),
topics = yield $getTopics(board_id, page);
this.body = {
page: page,
topics: topics
};
},
'POST /api/boards/:id/topics': function* (board_id) {
/**
* Post a new topic.
*
* @param {string} id: The id of board.
* @param {string} name: The name of topic.
* @param {string} tags: The tags of topic.
* @param {string} content: The content of topic.
* @return {object} The topic object.
*/
helper.checkPermission(this.request, constants.role.SUBSCRIBER);
var
topic,
data = this.request.body;
json_schema.validate('createTopic', data);
topic = yield $createTopic(this.request.user, board_id, '', '', data);
this.body = topic;
},
'GET /api/topics': function* () {
/**
* Get all topics.
*/
helper.checkPermission(this.request, constants.role.EDITOR);
var
page = helper.getPage(this.request),
topics = yield $getAllTopics(page);
yield userApi.$bindUsers(topics);
this.body = {
page: page,
topics: topics
};
},
'GET /api/replies': function* () {
/**
* Get all replies by page.
*/
helper.checkPermission(this.request, constants.role.EDITOR);
var
page = helper.getPage(this.request),
replies = yield $getAllReplies(page);
yield userApi.$bindUsers(replies);
this.body = {
page: page,
replies: replies
};
},
'POST /api/replies/:id/delete': function* (id) {
/**
* Delete a reply by its id. NOTE delete a reply only mark it is deleted.
*
* @name Delete Reply.
* @param {string} id - The id of the reply.
* @return {object} Results contains deleted id. e.g. {"id": "12345"}
*/
helper.checkPermission(this.request, constants.role.EDITOR);
var reply = yield Reply.$find(id);
if (reply === null) {
throw api.notFound('Reply');
}
reply.deleted = true;
yield reply.$update(['deleted', 'updated_at', 'version']);
unindexDiscuss(reply);
this.body = {
id: id
};
},
'POST /api/topics/:id/delete': function* (id) {
/**
* Delete a topic by its id.
*
* @name Delete Topic
* @param {string} id - The id of the topic.
* @return {object} Results contains deleted id. e.g. {"id": "12345"}
*/
helper.checkPermission(this.request, constants.role.EDITOR);
var
topic = yield $getTopic(id),
reply_ids = yield warp.$query('select id from replies where topic_id=?', [id]);
yield topic.$destroy();
yield warp.$update('delete from replies where topic_id=?', [id]);
yield warp.$update('update boards set topics = topics - 1 where id=?', [topic.board_id]);
unindexDiscuss(topic);
unindexDiscussByIds(reply_ids);
this.body = {
id: id
};
},
'POST /api/topics/:id/replies': function* (id) {
/**
* Create a reply to a topic.
*
* @param {string} id: The id of topic.
* @param {string} content: The content of reply.
* @return {object} The reply object.
*/
helper.checkPermission(this.request, constants.role.SUBSCRIBER);
var data = this.request.body;
json_schema.validate('createReply', data);
this.body = yield $createReply(this.request.user, id, data);
}
};
<file_sep>/www/controllers/cacheApi.js
/**
* Created by zhongyw on 7/29/16.
*/
'use strict';
// cache api
var
cache = require('../cache'),
helper = require('../helper'),
constants = require('../constants');
var defaultCacheDefinitions = {
clearCache: [
{
key: 'name',
label: 'Name',
description: 'Name of the website',
value: 'Clean Index',
type: 'button'
}
]
}
module.exports = {
'GET /api/cache/definitions': function* () {
helper.checkPermission(this.request, constants.role.ADMIN);
this.body = defaultCacheDefinitions;
},
'GET /api/cache/clear': function* () {
helper.checkPermission(this.request, constants.role.ADMIN);
this.body = {};
},
'POST /api/cache/clear': function* () {
helper.checkPermission(this.request, constants.role.ADMIN);
yield cache.$remove(constants.cache.INDEXMODEL);
this.body = {code: "1", message: "success"};
}
}<file_sep>/www/controllers/roleApi.js
/**
* Created by zhongyongwei on 16/5/27.
**/
'use strict';
var
_ = require('lodash'),
api = require('../api'),
db = require('../db'),
helper = require('../helper'),
constants = require('../constants'),
json_schema = require('../json_schema');
module.exports = {
'GET /api/roles': function* (){
this.body = {
roles:constants.role
};
}
}<file_sep>/www/auth.js
'use strict';
/**
* Authenticate users.
*
* How to generate password:
*
* user's email: <EMAIL>
* user's password: <PASSWORD>
* send hashed password for authentication:
* {
* email: '<EMAIL>',
* passwd: '<PASSWORD>' // => sha1('<EMAIL>' + ':' + 'HelloWorld')
* }
* verify in db:
* db_password = loadFromDatabase(); // => '<PASSWORD>'
* authenticated = db_password === sha1(user_id + ':' + '<PASSWORD>1<PASSWORD>')
*
* that means, there are 2 sha1-hash for user's original password, and the salt is user's email and id.
*/
var
crypto = require('crypto'),
config = require('./config'),
api = require('./api'),
db = require('./db'),
constants = require('./constants');
var
User = db.user,
LocalUser = db.localuser,
AuthUser = db.authuser;
var
COOKIE_NAME = config.session.cookie,
COOKIE_SALT = config.session.salt,
COOKIE_EXPIRES_IN_MS = config.session.expires * 1000;
// for safe base64 replacements:
var
re_add = new RegExp(/\+/g),
re_sla = new RegExp(/\//g),
re_equ = new RegExp(/\=/g),
re_r_add = new RegExp(/\-/g),
re_r_sla = new RegExp(/\_/g),
re_r_equ = new RegExp(/\./g);
function _generatePassword(salt, inputPassword) {
return crypto.createHash('sha1').update(salt + ':' + inputPassword).digest('hex');
}
function _verifyPassword(salt, inputPassword, expectedPassword) {
return expectedPassword === crypto.createHash('sha1').update(salt + ':' + inputPassword).digest('hex');
}
// string -> base64:
function _safe_b64encode(s) {
var b64 = new Buffer(s).toString('base64');
return b64.replace(re_add, '-').replace(re_sla, '_').replace(re_equ, '.');
}
// base64 -> string
function _safe_b64decode(s) {
var b64 = s.replace(re_r_add, '+').replace(re_r_sla, '/').replace(re_r_equ, '=');
return new Buffer(b64, 'base64').toString();
}
// Generate a secure client session cookie by constructing string:
// base64(provider:id:expires:sha1(provider:id:expires:passwd:salt)).
function makeSessionCookie(provider, theId, passwd, expires) {
var
now = Date.now(),
min = now + 86400000, // 1 day
max = now + 2592000000, // 30 days
secure, sha1, str;
if (!expires) {
expires = now + COOKIE_EXPIRES_IN_MS;
} else if (expires < min) {
expires = min;
} else if (expires > max) {
expires = max;
}
secure = [provider, theId, String(expires), passwd, COOKIE_SALT].join(':');
sha1 = crypto.createHash('sha1').update(secure).digest('hex');
str = [provider, theId, expires, sha1].join(':');
console.log('make session cookie: ' + str);
console.log('session cookie expires at ' + new Date(expires).toLocaleString());
console.log('>>> secure: ' + secure);
console.log('>>> sha1: ' + sha1);
return _safe_b64encode(str);
}
function* $findUserAuthByProvider(provider, id) {
var au, lu, user, passwd;
if (provider === constants.signin.LOCAL) {
lu = yield LocalUser.$find(id);
if (lu === null) {
return null;
}
passwd = lu.passwd;
user = yield User.$find(lu.user_id);
}
else {
au = yield AuthUser.$find(id);
if (au === null) {
return null;
}
passwd = au.auth_token;
user = yield User.$find(au.user_id);
}
return {
user: user,
passwd: <PASSWORD>
};
}
// parse header 'Authorization: Basic xxxx'
function* $parseAuthorization(auth) {
console.log('try parse header: Authorization: ' + auth);
if ((auth.length < 6) || (auth.substring(0, 6) !== 'Basic ')) {
return null;
}
var
u, p, user, luser,
up = new Buffer(auth.substring(6), 'base64').toString().split(':');
if (up.length !== 2) {
return null;
}
u = up[0];
p = up[1];
if (!u || !p) {
return null;
}
// TODO: check sha1 regex?
user = yield User.$find({
where: 'email=?',
params: [u]
});
if (user) {
luser = yield LocalUser.$find({
where: 'user_id=?',
params: [user.id]
});
if (luser && _verifyPassword(luser.id, p, luser.passwd)) {
console.log('binded user: ' + user.name);
return user;
}
}
console.log('invalid authorization header.');
return null;
}
// parseSessionCookie:
// provider:uid:expires:sha1(provider:uid:expires:passwd:salt)
function* $parseSessionCookie(s) {
var
ss = _safe_b64decode(s).split(':'),
user,
auth,
theId, provider, expiresStr, expires, sha1, secure, expected;
if (ss.length !== 4) {
return null;
}
provider = ss[0];
theId = ss[1];
expiresStr = ss[2];
expires = parseInt(expiresStr, 10);
sha1 = ss[3];
if (isNaN(expires) || (expires < Date.now()) || !theId || !sha1) {
return null;
}
auth = yield $findUserAuthByProvider(provider, theId);
if (auth === null) {
return null;
}
// check:
secure = [provider, theId, expiresStr, auth.passwd, COOKIE_SALT].join(':');
expected = crypto.createHash('sha1').update(secure).digest('hex');
console.log('>>> secure: ' + secure);
console.log('>>> sha1: ' + sha1);
console.log('>>> expected: ' + expected);
if (sha1 !== expected) {
return null;
}
if (auth.user.locked_until > Date.now()) {
console.log('User is locked: ' + auth.user.email);
return null;
}
return auth.user;
}
// middle ware for bind user from session cookie:
function* $userIdentityParser(next) {
this.request.user = null;
var
auth,
user,
cookie = this.cookies.get(COOKIE_NAME);
if (cookie) {
console.log('try to parse session cookie...');
user = yield $parseSessionCookie(cookie);
if (user) {
user.passwd = '******';
this.request.user = user;
console.log('bind user from session cookie: ' + user.email);
}
else {
console.log('invalid session cookie. cleared.');
this.cookies.set(COOKIE_NAME, 'deleted', {
path: '/',
httpOnly: true,
expires: new Date(0)
});
}
yield next;
return;
}
auth = this.request.get('authorization');
if (auth) {
console.log('try to parse authorization header...');
user = yield $parseAuthorization(auth);
if (user) {
user.passwd = '******';
this.request.user = user;
console.log('bind user from authorization: ' + user.email);
}
}
yield next;
}
module.exports = {
generatePassword: _generatePassword,
verifyPassword: _verifyPassword,
makeSessionCookie: makeSessionCookie,
$userIdentityParser: $userIdentityParser
};
<file_sep>/www/controllers/manage.js
'use strict';
// manage.js
var
_ = require('lodash'),
fs = require('fs'),
db = require('../db'),
api = require('../api'),
cache = require('../cache'),
helper = require('../helper'),
constants = require('../constants'),
json_schema = require('../json_schema');
var
User = db.user,
Article = db.article,
Category = db.category,
warp = db.warp;
var
userApi = require('./userApi'),
wikiApi = require('./wikiApi'),
discussApi = require('./discussApi'),
articleApi = require('./articleApi'),
webpageApi = require('./webpageApi'),
settingApi = require('./settingApi'),
categoryApi = require('./categoryApi'),
attachmentApi = require('./attachmentApi'),
navigationApi = require('./navigationApi');
var apisList = [categoryApi, articleApi, webpageApi, wikiApi, discussApi, attachmentApi, navigationApi, userApi, settingApi];
// do management console
var KEY_WEBSITE = constants.cache.WEBSITE;
function getId(request) {
var id = request.query.id;
if (id && id.length === 50) {
return id;
}
throw api.notFound('id');
}
function* $getModel(model) {
if (model === undefined) {
model = {};
}
model.__website__ = yield settingApi.$getWebsiteSettings();
return model;
}
module.exports = {
'GET /manage/signin': function* () {
/**
* Display authentication.
*/
this.render('manage/signin.html', yield $getModel());
},
'GET /manage/': function* () {
this.response.redirect('/manage/article/');
},
// overview ///////////////////////////////////////////////////////////////
'GET /manage/overview/(index)?': function* () {
var page = helper.getPage(this.request);
this.body = '';
},
// article ////////////////////////////////////////////////////////////////
'GET /manage/article/(article_list)?': function* () {
this.render('manage/article/article_list.html', yield $getModel({
pageIndex: helper.getPageNumber(this.request)
}));
},
'GET /manage/article/category_list': function* () {
this.render('manage/article/category_list.html', yield $getModel({
pageIndex: helper.getPageNumber(this.request)
}));
},
'GET /manage/article/create_article': function* () {
this.render('manage/article/article_form.html', yield $getModel({
form: {
name: 'Create Article',
action: '/api/articles',
redirect: 'article_list'
}
}));
},
'GET /manage/article/edit_article': function* () {
var id = getId(this.request);
this.render('manage/article/article_form.html', yield $getModel({
id: id,
form: {
name: 'Edit Article',
action: '/api/articles/' + id,
redirect: 'article_list'
}
}));
},
'GET /manage/article/create_category': function* () {
this.render('manage/article/category_form.html', yield $getModel({
form: {
name: 'Create Category',
action: '/api/categories',
redirect: 'category_list'
}
}));
},
'GET /manage/article/edit_category': function* () {
var id = getId(this.request);
this.render('manage/article/category_form.html', yield $getModel({
id: id,
form: {
name: 'Edit Category',
action: '/api/categories/' + id,
redirect: 'category_list'
}
}));
},
// ask ////////////////////////////////////////////////////////////////
'GET /manage/ask/(ask_list)?': function* () {
this.render('manage/ask/ask_list.html', yield $getModel({
pageIndex: helper.getPageNumber(this.request)
}));
},
'GET /manage/ask/category_list': function* () {
this.render('manage/ask/category_list.html', yield $getModel({
pageIndex: helper.getPageNumber(this.request)
}));
},
'GET /manage/ask/create_ask': function* () {
this.render('manage/ask/ask_form.html', yield $getModel({
form: {
name: 'Create ask',
action: '/api/asks',
redirect: 'ask_list'
}
}));
},
'GET /manage/ask/edit_ask': function* () {
var id = getId(this.request);
this.render('manage/ask/ask_form.html', yield $getModel({
id: id,
form: {
name: 'Edit ask',
action: '/api/asks/' + id,
redirect: 'ask_list'
}
}));
},
'GET /manage/ask/create_category': function* () {
this.render('manage/ask/category_form.html', yield $getModel({
form: {
name: 'Create Category',
action: '/api/categories',
redirect: 'category_list'
}
}));
},
'GET /manage/ask/edit_category': function* () {
var id = getId(this.request);
this.render('manage/ask/category_form.html', yield $getModel({
id: id,
form: {
name: 'Edit Category',
action: '/api/categories/' + id,
redirect: 'category_list'
}
}));
},
// webpage ////////////////////////////////////////////////////////////////
'GET /manage/webpage/(webpage_list)?': function* () {
this.render('manage/webpage/webpage_list.html', yield $getModel({}));
},
'GET /manage/webpage/create_webpage': function* () {
this.render('manage/webpage/webpage_form.html', yield $getModel({
form: {
name: 'Create Web Page',
action: '/api/webpages',
redirect: 'webpage_list'
},
}));
},
'GET /manage/webpage/edit_webpage': function* () {
var id = getId(this.request);
this.render('manage/webpage/webpage_form.html', yield $getModel({
id: id,
form: {
name: 'Edit Web Page',
action: '/api/webpages/' + id,
redirect: 'webpage_list'
},
}));
},
// wiki ///////////////////////////////////////////////////////////////////
'GET /manage/wiki/(wiki_list)?': function* () {
this.render('manage/wiki/wiki_list.html', yield $getModel({}));
},
'GET /manage/wiki/create_wiki': function* () {
this.render('manage/wiki/wiki_form.html', yield $getModel({
form: {
name: 'Create Wiki',
action: '/api/wikis',
redirect: 'wiki_list'
}
}));
},
'GET /manage/wiki/edit_wiki': function* () {
var id = getId(this.request);
this.render('manage/wiki/wiki_form.html', yield $getModel({
id: id,
form: {
name: 'Edit Wiki',
action: '/api/wikis/' + id,
redirect: 'wiki_tree?id=' + id
}
}));
},
'GET /manage/wiki/wiki_tree': function* () {
var id = getId(this.request);
this.render('manage/wiki/wiki_tree.html', yield $getModel({
id: id
}));
},
'GET /manage/wiki/edit_wikipage': function* () {
var
id = getId(this.request),
wp = yield wikiApi.$getWikiPage(id);
this.render('manage/wiki/wikipage_form.html', yield $getModel({
id: id,
form: {
name: 'Edit Wiki Page',
action: '/api/wikis/wikipages/' + id,
redirect: 'wiki_tree?id=' + wp.wiki_id
}
}));
},
// board //////////////////////////////////////////////////////////////////
'GET /manage/discuss/(board_list)?': function* () {
this.render('manage/discuss/board_list.html', yield $getModel({}));
},
'GET /manage/discuss/create_board': function* () {
this.render('manage/discuss/board_form.html', yield $getModel({
form: {
name: 'Create Board',
action: '/api/boards',
redirect: 'board_list'
}
}));
},
'GET /manage/discuss/edit_board': function* () {
var id = getId(this.request);
this.render('manage/discuss/board_form.html', yield $getModel({
id: id,
form: {
name: 'Edit Board',
action: '/api/boards/' + id,
redirect: 'board_list'
}
}));
},
'GET /manage/discuss/reply_list': function* () {
this.render('manage/discuss/reply_list.html', yield $getModel({
pageIndex: helper.getPageNumber(this.request)
}));
},
'GET /manage/discuss/topic_list': function* () {
this.render('manage/discuss/topic_list.html', yield $getModel({
pageIndex: helper.getPageNumber(this.request)
}));
},
// attachment /////////////////////////////////////////////////////////////
'GET /manage/attachment/(attachment_list)?': function* () {
this.render('manage/attachment/attachment_list.html', yield $getModel({
pageIndex: helper.getPageNumber(this.request)
}));
},
// user ///////////////////////////////////////////////////////////////////
'GET /manage/user/(user_list)?': function* () {
this.render('manage/user/user_list.html', yield $getModel({
currentTime: Date.now(),
pageIndex: helper.getPageNumber(this.request)
}));
},
'GET /manage/user/user_create': function* () {
this.render('manage/user/user_create.html', yield $getModel({
form: {
name: 'Create User',
action: '/api/users',
redirect: 'user_list'
}
}));
},
// navigation /////////////////////////////////////////////////////////////
'GET /manage/navigation/(navigation_list)?': function* () {
this.render('manage/navigation/navigation_list.html', yield $getModel({}));
},
'GET /manage/navigation/create_navigation': function* () {
this.render('manage/navigation/navigation_form.html', yield $getModel({
form: {
name: 'Create Navigation',
action: '/api/navigations',
redirect: 'navigation_list'
}
}));
},
// setting ////////////////////////////////////////////////////////////////
'GET /manage/setting/': function* () {
this.response.redirect('/manage/setting/website');
},
'GET /manage/setting/:g': function* (g) {
this.render('manage/setting/setting_form.html', yield $getModel({
tabs: [
{
key: 'website',
name: 'Website'
},
{
key: 'snippets',
name: 'Snippets'
}
],
group: g,
form: {
name: 'Edit Settings',
action: '/api/settings/' + g,
redirect: g
}
}));
},
// cache /////////////////////////////////////////////////////////////////////
'GET /manage/cache/': function* () {
this.response.redirect('/manage/cache/clear');
},
'GET /manage/cache/:g': function* (g){
this.render('manage/cache/cache_form.html', yield $getModel({
tabs: [
{
key: 'clear',
name: '清除缓存'
},
{
key: 'display',
name: '查看缓存'
},
{
key: 'cache',
name: '缓存设置'
}
],
group: g,
form: {
name: 'Cache Settings',
action: '/api/cache/' + g,
redirect: g
}
}));
}
};
<file_sep>/www/test/test_article_api.js
'use strict';
// test article api:
var
_ = require('lodash'),
fs = require('fs'),
co = require('co'),
should = require('should'),
remote = require('./_remote'),
constants = require('../constants'),
roles = constants.role;
describe('#articles', function () {
var category = null;
var category2 = null;
before(remote.setup);
before(function (done) {
co(function* () {
category = yield remote.$post(roles.ADMIN, '/api/categories', {
name: 'Article Category',
tag: 'cat1'
});
remote.shouldNoError(category);
category2 = yield remote.$post(roles.ADMIN, '/api/categories', {
name: 'Article Category 2',
tag: 'cat2'
});
remote.shouldNoError(category2);
return 'ok';
}).then(function (result) {
done();
}, function (err) {
done(err);
});
});
describe('#api', function () {
it('should get empty articles', function* () {
var r = yield remote.$get(roles.CONTRIBUTOR, '/api/articles');
remote.shouldNoError(r);
r.page.total.should.equal(0);
r.articles.should.be.an.Array.and.have.length(0);
});
it('get articles failed for no permission', function* () {
var r = yield remote.$get(roles.SUBSCRIBER, '/api/articles');
remote.shouldHasError(r, 'permission:denied');
});
it('create article by contributor failed', function* () {
// create article:
var r1 = yield remote.$post(roles.CONTRIBUTOR, '/api/articles', {
category_id: category.id,
name: ' Try create Article ',
description: ' blablabla\nhaha... \n ',
tags: ' aaa,\n BBB, \t ccc,CcC',
content: 'Long content...',
image: remote.readFileSync('res-image.jpg').toString('base64')
});
remote.shouldHasError(r1, 'permission:denied');
});
it('create by admin and update, delete by editor failed', function* () {
// create article:
var r1 = yield remote.$post(roles.ADMIN, '/api/articles', {
category_id: category.id,
name: ' Article 1 ',
description: ' blablabla\nhaha... \n ',
tags: ' aaa,\n BBB, \t ccc,CcC',
content: 'Long content...',
image: remote.readFileSync('res-image.jpg').toString('base64')
});
//console.log("r1:",r1);
remote.shouldNoError(r1);
r1.category_id.should.equal(category.id);
r1.name.should.equal('Article 1');
r1.description.should.equal('blablabla\nhaha...');
r1.tags.should.equal('aaa,BBB,ccc');
r1.content.should.equal('Long content...');
r1.content_id.should.be.ok;
r1.cover_id.should.be.ok;
// update by editor:
var r2 = yield remote.$post(roles.EDITOR, '/api/articles/' + r1.id, {
name: 'Name Changed ',
content: 'Changed?'
});
remote.shouldHasError(r2, 'permission:denied');
// delete by editor:
var r3 = yield remote.$post(roles.EDITOR, '/api/articles/' + r1.id + '/delete');
remote.shouldHasError(r3, 'permission:denied');
});
it('create and update article by editor', function* () {
// create article:
var r1 = yield remote.$post(roles.EDITOR, '/api/articles', {
category_id: category.id,
name: 'Test Article ',
description: ' blablabla\nhaha... \n ',
tags: ' aaa,\n BBB, \t ccc,CcC',
content: 'Long content...',
image: remote.readFileSync('res-image.jpg').toString('base64')
});
remote.shouldNoError(r1);
r1.category_id.should.equal(category.id);
r1.name.should.equal('Test Article');
r1.description.should.equal('blablabla\nhaha...');
r1.tags.should.equal('aaa,BBB,ccc');
r1.content.should.equal('Long content...');
r1.content_id.should.be.ok;
r1.cover_id.should.be.ok;
r1.version.should.equal(0);
// check image:
var dl = yield remote.$download('/files/attachments/' + r1.cover_id + '/l');
remote.shouldNoError(dl);
dl.statusCode.should.equal(200);
dl.headers['content-type'].should.equal('image/jpeg');
parseInt(dl.headers['content-length'], 10).should.approximately(122826, 10000);
// update article:
var r2 = yield remote.$post(roles.EDITOR, '/api/articles/' + r1.id, {
name: 'Name Changed ',
content: 'Changed!'
});
remote.shouldNoError(r2);
r2.name.should.equal('Name Changed');
r2.content.should.equal('Changed!');
r2.content_id.should.not.equal(r1.content_id);
r2.cover_id.should.equal(r1.cover_id);
r2.user_id.should.equal(r1.user_id);
r2.version.should.equal(1);
// query:
var r3 = yield remote.$get(roles.EDITOR, '/api/articles/' + r1.id);
r3.name.should.equal(r2.name);
r3.content.should.equal(r2.content);
r3.version.should.equal(1);
// not updated:
r3.tags.should.equal(r1.tags);
r3.description.should.equal(r1.description);
});
it('create article then change cover', function* () {
// create article:
var r1 = yield remote.$post(roles.EDITOR, '/api/articles', {
category_id: category.id,
name: 'Before Cover Change ',
description: ' blablabla\nhaha... \n ',
tags: ' aaa,\n BBB, \t ccc,CcC',
content: 'Content not change...',
image: remote.readFileSync('res-image.jpg').toString('base64')
});
remote.shouldNoError(r1);
// update article:
var r2 = yield remote.$post(roles.EDITOR, '/api/articles/' + r1.id, {
image: remote.readFileSync('res-image-2.jpg').toString('base64')
});
remote.shouldNoError(r2);
r2.name.should.equal('Before Cover Change');
r2.content.should.equal('Content not change...');
r2.content_id.should.equal(r1.content_id);
r2.cover_id.should.not.equal(r1.cover_id);
// check image:
var dl = yield remote.$download('/files/attachments/' + r2.cover_id + '/l');
remote.shouldNoError(dl);
dl.statusCode.should.equal(200);
dl.headers['content-type'].should.equal('image/jpeg');
parseInt(dl.headers['content-length'], 10).should.approximately(39368, 10000);
});
it('create article with wrong parameter by editor', function* () {
var
i, r, params,
required = ['name', 'description', 'category_id', 'content', 'image'],
prepared = {
name: 'Test Params',
description: 'blablabla...',
category_id: category.id,
tags: 'tag1,tag2,tag3',
content: 'a long content...',
image: remote.readFileSync('res-image.jpg').toString('base64')
};
for (i=0; i<required.length; i++) {
params = _.clone(prepared);
delete params[required[i]];
r = yield remote.$post(roles.EDITOR, '/api/articles', params);
remote.shouldHasError(r, 'parameter:invalid', required[i]);
}
});
it('create article with invalid category_id', function* () {
var
r,
params = {
name: 'Test Params',
description: 'blablabla...',
category_id: remote.next_id(),
tags: 'tag1,tag2,tag3',
content: 'a long content...',
image: remote.readFileSync('res-image.jpg').toString('base64')
};
r = yield remote.$post(roles.EDITOR, '/api/articles', params);
remote.shouldHasError(r, 'entity:notfound', 'Category');
});
it('create article with invalid image', function* () {
var
r,
params = {
name: 'Test Params',
description: 'blablabla...',
category_id: category.id,
tags: 'tag1,tag2,tag3',
content: 'a long content...',
image: remote.readFileSync('res-plain.txt').toString('base64')
};
r = yield remote.$post(roles.EDITOR, '/api/articles', params);
remote.shouldHasError(r, 'parameter:invalid', 'image');
});
it('create and delete article by editor', function* () {
// create article:
var r1 = yield remote.$post(roles.EDITOR, '/api/articles', {
category_id: category.id,
name: 'To Be Delete... ',
description: ' blablabla\nhaha... \n ',
tags: ' aaa,\n BBB, \t ccc,CcC',
content: 'Content not change...',
image: remote.readFileSync('res-image.jpg').toString('base64')
});
remote.shouldNoError(r1);
// delete article:
var r2 = yield remote.$post(roles.EDITOR, '/api/articles/' + r1.id + '/delete');
remote.shouldNoError(r2);
r2.id.should.equal(r1.id);
// query:
var r3 = yield remote.$get(roles.EDITOR, '/api/articles/' + r1.id);
remote.shouldHasError(r3, 'entity:notfound', 'Article');
});
});
});
<file_sep>/www/controllers/weixin.js
'use strict';
var weixinConfig = {
token: 'galj1K<PASSWORD>26DMs8Nt',
appid: 'wx57319a3dde25f225',
appsecret: '<KEY>',
encodingAESKey: '<KEY>'
},
OAuth = require('wechat-oauth'),
WechatAPI = require('wechat-api'),
client = new OAuth(weixinConfig.appid, weixinConfig.encodingAESKey),
api = new WechatAPI(weixinConfig.appid, weixinConfig.appsecret),
wechat = require('wechat'),
coWechat = require('co-wechat');
module.exports = {
'GET /api/weixin/roles': function* (){
this.body = {
say: "Hell1o",
client: client,
url: client.getAuthorizeURLForWebsite('http://172.16.17.32/weichat_dev/osu/example_2.php'),
api: api
};
},
'POST /api/weixin/robot': coWechat(weixinConfig).middleware(function *() {
// 微信输入信息都在this.weixin上
var message = this.weixin;
console.log(message);
if (message.FromUserName === 'diaosi') {
// 回复屌丝(普通回复)
this.body = 'hehe';
} else if (message.FromUserName === 'text') {
//你也可以这样回复text类型的信息
this.body = {
content: 'text object',
type: 'text'
};
} else if (message.FromUserName === 'hehe') {
// 回复一段音乐
this.body = {
type: "music",
content: {
title: "来段音乐吧",
description: "一无所有",
musicUrl: "http://mp3.com/xx.mp3",
hqMusicUrl: "http://mp3.com/xx.mp3"
}
};
} else if (message.FromUserName === 'kf') {
// 转发到客服接口
this.body = {
type: "customerService",
kfAccount: "<PASSWORD>"
};
} else {
var msgStr = "";
_.each(message, function(value,key){
msgStr += key + ":" + value + " | ";
})
// 回复高富帅(图文回复)
this.body = [
{
title: 'Hi',
description: msgStr,
picurl: 'http://nodeapi.cloudfoundry.com/qrcode.jpg',
url: 'http://nodeapi.cloudfoundry.com/'
}
];
}
})
}<file_sep>/www/models/reply.js
'use strict';
// reply.js
var base = require('./_base.js');
module.exports = function (warp) {
return base.defineModel(warp, 'Reply', [
base.column_id('topic_id', { index: true }),
base.column_id('user_id', { index: true }),
base.column_boolean('deleted', { defaultValue: false }),
base.column_bigint('upvotes', { defaultValue: 0}),
base.column_bigint('downvotes', { defaultValue: 0}),
base.column_bigint('score', { defaultValue: 0}),
base.column_text('content', { type: 'text' })
], {
table: 'replies'
});
};
<file_sep>/www/config_default.js
'use strict';
/*
* This is the default configuration for iTranswarp.js.
*
* DO NOT change it. Instead, make a copy and rename to:
* "config_development.js" which is enabled in development environment.
* "config_production.js" which is enabled in production environment.
* Then edit settings you needed.
*/
module.exports = {
// server domain name:
domain: 'www.nbn8.com',
// the theme used, default to 'default':
theme: 'default',
session: {
cookie: 'isession',
// used to generate secure session cookie, can be set to any random string:
salt: 'iTranswarp.js',
// signin expires in N seconds:
expires: 7 * 24 * 3600,
// use https for management:
httpsForManagement: false
},
db: {
// host or ip address of mysql, e.g. '192.168.1.123':
host: '172.16.31.10',
// port of mysql, default to 3306:
port: 3306,
// user to login to mysql, change to your mysql user:
user: 'zhongyw',
// password to login to mysql, change to your mysql password:
password: '<PASSWORD>',
// database used in mysql, default to 'itranswarp':
database: 'zhongywdb',
// timeout before initial a connection to mysql, default to 3 seconds:
connectTimeout: 3000,
// maximum concurrent db connections:
connectionLimit: 20,
// acquire timeout:
acquireTimeout: 3000,
// waiting queue size:
queueLimit: 10
},
// NOT USED NOW:
cdn: {
static_prefix: ''
},
cache: {
prefix: 'it/',
// host or ip address of memcached:
host: '127.0.0.1',
// port of memcached, default to 11211:
port: 11211,
// connection timeout, default to 1 second:
timeout: 1000,
// retries when failed:
retries: 3
},
// NOT USED NOW:
queue: {
// host or ip address of redis:
host: '127.0.0.1',
// port of redis, default to 6379:
port: 6379
},
search: {
provider: 'site_search',
configs: {
// default set to google search:
search_url: 'https://www.google.com/search?ie=utf-8&q=%s',
// other search engines:
// baidu: 'http://www.baidu.com/s?ie=utf-8&wd=%s'
// bing: 'http://www.bing.com/search?ie=utf-8&q=%s'
domain: 'www.example.com'
}
},
// oauth2 providers that allow sign in from other oauth2 providers:
oauth2: {
// e.g. facebook oauth2 configuration:
/*'facebook': {
'icon': 'facebook',
'name': 'Facebook登录',
'app_key': '1229544160397328',
'app_secret': '<KEY>',
'redirect_uri': 'http://your-redirect-uri/config/in/facebook'
},*/
'weibo':{
'icon': 'weibo',
'name': '新浪微博登录',
'app_key': '3273065943',
'app_secret': 'f756a27fbb94ce8748c651a6d00b688e',
'redirect_uri': 'http://www.nbn8.com/auth/callback/weibo'
}
},
// END:
END: 'END'
};
<file_sep>/www/test/test_category_api.js
'use strict';
// test category api:
var
should = require('should'),
remote = require('./_remote'),
constants = require('../constants'),
roles = constants.role;
describe('#categories', function () {
before(remote.setup);
describe('#api', function () {
it('should get empty categories', function* () {
var r = yield remote.$get(roles.GUEST, '/api/categories');
remote.shouldNoError(r);
should(r.categories).be.ok;
r.categories.should.be.an.Array.and.have.length(0);
});
it('create a new category by admin ok', function* () {
var r = yield remote.$post(roles.ADMIN, '/api/categories', {
name: ' Test Category ',
tag: 'java',
description: ' this is a test category... '
});
remote.shouldNoError(r);
r.display_order.should.equal(0);
r.name.should.equal('Test Category');
r.tag.should.equal('java');
r.description.should.equal('this is a test category...');
r.version.should.equal(0);
r.id.should.be.ok.and.have.lengthOf(50);
// get by id:
var r2 = yield remote.$get(roles.GUEST, '/api/categories/' + r.id);
remote.shouldNoError(r2);
r2.id.should.equal(r.id);
r2.name.should.equal(r.name);
r2.description.should.equal(r.description);
r2.created_at.should.equal(r.created_at);
r2.updated_at.should.equal(r.updated_at);
r2.version.should.equal(r.version);
// create another:
var r3 = yield remote.$post(roles.ADMIN, '/api/categories', {
name: 'Another Category ',
tag: 'java'
});
remote.shouldNoError(r3);
r3.name.should.equal('Another Category');
r3.display_order.should.equal(1);
// get all category:
var rs = yield remote.$get(roles.GUEST, '/api/categories');
remote.shouldNoError(rs);
rs.categories.should.be.an.Array.and.have.lengthOf(2);
});
it('create new category with wrong parameter by admin', function* () {
var r = yield remote.$post(roles.ADMIN, '/api/categories', {
tag: 'java',
description: ' no name parameter... '
});
remote.shouldHasError(r, 'parameter:invalid', 'name');
var r = yield remote.$post(roles.ADMIN, '/api/categories', {
name: ' no tag parameter... '
});
remote.shouldHasError(r, 'parameter:invalid', 'tag');
});
it('create new category by editor', function* () {
var r = yield remote.$post(roles.EDITOR, '/api/categories', {
name: 'by editor',
tag: 'java',
description: ' parameter... '
});
remote.shouldHasError(r, 'permission:denied', 'permission');
});
it('update a category by admin', function* () {
var r = yield remote.$post(roles.ADMIN, '/api/categories', {
name: ' Before Update ',
tag: 'java',
description: ' '
});
remote.shouldNoError(r);
r.name.should.equal('Before Update');
r.tag.should.equal('java');
r.description.should.equal('');
r.version.should.equal(0);
var r2 = yield remote.$post(roles.ADMIN, '/api/categories/' + r.id, {
name: ' After Update ',
tag: 'python',
description: ' added description... \t '
});
remote.shouldNoError(r2);
r2.id.should.equal(r.id);
r2.name.should.equal('After Update');
r2.tag.should.equal('python');
r2.description.should.equal('added description...');
r2.created_at.should.equal(r.created_at);
r2.updated_at.should.greaterThan(r.updated_at);
r2.version.should.equal(1);
// query to verify:
var r3 = yield remote.$get(roles.ADMIN, '/api/categories/' + r.id);
remote.shouldNoError(r3);
r3.id.should.equal(r.id);
r3.name.should.equal('After Update');
r3.tag.should.equal('python');
r3.description.should.equal('added description...');
r3.created_at.should.equal(r.created_at);
r3.updated_at.should.greaterThan(r.updated_at);
r3.version.should.equal(1);
});
it('update a category by editor', function* () {
var r = yield remote.$post(roles.ADMIN, '/api/categories', {
name: ' Before Update ',
tag: 'java',
description: ' '
});
remote.shouldNoError(r);
// try update its name and description:
var r2 = yield remote.$post(roles.EDITOR, '/api/categories/' + r.id, {
name: ' Try Update\n ',
description: ' added description... \t '
});
remote.shouldHasError(r2, 'permission:denied', 'permission');
});
it('delete a category by admin', function* () {
// create first:
var r = yield remote.$post(roles.ADMIN, '/api/categories', {
name: ' Before Delete ',
tag: 'java',
description: ' '
});
remote.shouldNoError(r);
r.name.should.equal('Before Delete');
// try delete:
var r2 = yield remote.$post(roles.ADMIN, '/api/categories/' + r.id + '/delete');
remote.shouldNoError(r2);
r2.id.should.equal(r.id);
// try get again:
var r3 = yield remote.$get(roles.GUEST, '/api/categories/' + r.id);
remote.shouldHasError(r3, 'entity:notfound', 'Category');
});
it('delete a non-exist category by editor', function* () {
var r = yield remote.$post(roles.EDITOR, '/api/categories/' + remote.next_id() + '/delete');
remote.shouldHasError(r, 'permission:denied', 'permission');
});
it('delete a non-exist category by admin', function* () {
var r = yield remote.$post(roles.ADMIN, '/api/categories/' + remote.next_id() + '/delete');
remote.shouldHasError(r, 'entity:notfound', 'Category');
});
it('get non-exist category', function* () {
var r = yield remote.$get(roles.GUEST, '/api/categories/' + remote.next_id());
remote.shouldHasError(r, 'entity:notfound', 'Category');
});
});
});
<file_sep>/www/models/text.js
'use strict';
// text.js
var base = require('./_base.js');
module.exports = function (warp) {
return base.defineModel(warp, 'Text', [
base.column_id('ref_id'),
base.column_text('value')
], {
table: 'texts'
});
};
<file_sep>/www/models/mdm_device.js
/**
* Created by zhongyw on 7/31/16.
*/
'use strict';
// article.js
var base = require('./_base_pure.js');
module.exports = function (warp) {
return base.defineModel(warp, 'MdmDevice', [
base.column_varchar_100('device_identifier'),
base.column_varchar_500('udid'),
base.column_varchar_500('challenge'),
base.column_varchar_500('device_name'),
base.column_varchar_500('ownership'),
base.column_varchar_100('model'),
base.column_int('platform_id'),
base.column_varchar_100('user_agent'),
base.column_varchar_100('operation_system'),
], {
table: 'mdm_device'
});
};
<file_sep>/www/creator/modelCreater.js
/**
* usage:
* node modelCreater.js mdm_device_hardware
*/
'use strict';
var
_ = require('lodash'),
fs = require('fs'),
cfg = require('../config.js'),
Warp = require('mysql-warp');
var tableName = process.argv[2],
outFile = "../models/" + tableName + ".js",
str;
var warp = Warp.create(cfg.db);
function convert(row){
var type = row.Type;
if(type.indexOf("int") !== -1) type = 'int';
return "{\n" +
" name: '"+row.Field+"',\n" +
" type: '"+type+"'\n" +
" }"
}
function convertColumns(result){
var str = "";
_.each(result, function(row, index){
if(index !== 0){
str += ",";
}
str += convert(row);
})
return str;
}
warp.query('show columns from ' + tableName, function(err, result){
fs.unlink(outFile);
str = "'use strict'\n" +
"var base = require('./_base_pure.js'),\n" +
" _ = require('lodash');\n" +
"module.exports = function (warp) {\n" +
" return base.defineModel(warp, '"+tableName+"', [" + convertColumns(result);
str += "], {\n" +
" table: '"+ tableName +"'\n" +
" });\n"
str += "}";
fs.appendFileSync(outFile, str);
});
<file_sep>/www/models/board.js
'use strict';
// board.js
var base = require('./_base.js');
module.exports = function (warp) {
return base.defineModel(warp, 'Board', [
base.column_bigint('topics'),
base.column_boolean('locked'),
base.column_varchar_100('tag'),
base.column_varchar_100('name'),
base.column_varchar_1000('description'),
base.column_bigint('display_order')
], {
table: 'boards'
});
};
<file_sep>/www/views/themes/default/webpage/webpage.html
{% extends '../_base.html' %}
{% block nav %}
/webpage/{{ webpage.alias }}"
{% endblock %}
{% block title %}{{ webpage.name }}{% endblock %}
{% block head %}
{% set __sidebar_right__ = true %}
<style>
.x-webpage-visible {
display: block;
}
</style>
{% endblock %}
{% block content %}
<h3>{{ webpage.name }}</h3>
<div class="x-page-content x-content">
{{ webpage.content|safe }}
</div>
{% endblock %}
{% block sidebar_right_content %}
{% endblock %}
<file_sep>/www/helper.js
'use strict';
// helper:
var
_ = require('lodash'),
marked = require('marked'),
htmlparser = require('htmlparser2'),
api = require('./api'),
Page = require('./page');
var re_int = /^[0-9]+$/;
function string2Integer(s) {
if (re_int.test(s)) {
return parseInt(s, 10);
}
return null;
}
var safeRenderer = new marked.Renderer();
safeRenderer.link = function (href, title, text) {
if (href.indexOf('http://') !== 0 && href.indexOf('https://') !== 0) {
href = 'http://' + href;
}
return '<a target="_blank" rel="nofollow" href="' + href + '">' + text + '</a>';
};
var HTML2TEXT_TAGS = {
'applet': ' ',
'area': ' ',
'audio': '\n',
'base': ' ',
'basefont': '',
'br': '\n',
'button': ' ',
'canvas': ' ',
'cite': ' ',
'col': ' ',
'colgroup': ' ',
'datalist': ' ',
'dialog': ' ',
'embed': ' ',
'frame': '',
'frameset': '',
'head': '',
'hr': '\n',
'iframe': '',
'img': ' ',
'input': ' ',
'kbd': ' ',
'keygen': ' ',
'link': ' ',
'map': ' ',
'meta': ' ',
'meter': ' ',
'noframes': ' ',
'noscript': ' ',
'object': ' ',
'optgroup': ' ',
'option': ' ',
'output': ' ',
'param': ' ',
'progress': ' ',
'script': '\n',
'select': ' ',
'source': ' ',
'style': ' ',
'textarea': ' ',
'track': ' ',
'var': ' ',
'video': '\n',
'wbr': '\n'
};
function html2text(html) {
var
buffer = [],
saveTexts = [true],
saveCurrent = true,
parser = new htmlparser.Parser({
onopentag: function (tagname, attribs) {
if (saveCurrent) {
saveCurrent = !HTML2TEXT_TAGS[tagname];
}
saveTexts.push(saveCurrent);
},
ontext: function (text) {
if (saveCurrent) {
buffer.push(text);
}
},
onclosetag: function (tagname) {
saveTexts.pop();
saveCurrent = saveTexts[saveTexts.length - 1];
}
}, {
decodeEntities: true
});
parser.write(html);
parser.end();
return buffer.join('').replace(/\n/ig, ' ');
}
function* $md2html(md, cacheKey, isSafeInput) {
if (cacheKey) {
//
}
var html = isSafeInput ? marked(md) : marked(md, {
sanitize: true,
renderer: safeRenderer
});
return html;
}
function md2html(md, isSafeInput) {
var html = isSafeInput ? marked(md) : marked(md, {
sanitize: true,
renderer: safeRenderer
});
return html;
}
// ' A, B ; Ccc, ccc ' -> 'A,B,Ccc'
function formatTags(tags) {
if (!tags) {
return '';
}
var
lv,
dict = {},
arr = _.map(tags.split(/[\,\;\,\;]/), function (value) {
return value.trim();
});
return _.filter(arr, function (value) {
if (value) {
lv = value.toLowerCase();
if (dict.hasOwnProperty(lv)) {
return false;
}
dict[lv] = true;
return true;
}
return false;
}).join(',');
}
module.exports = {
formatTags: formatTags,
html2text: html2text,
md2html: md2html,
$md2html: $md2html,
$sort: function* (ids, entities) {
var i, pos, entity;
if (entities.length !== ids.length) {
throw api.invalidParam('ids', 'Invalid id list: expected ' + entities.length + ' ids.');
}
for (i=0; i<entities.length; i++) {
entity = entities[i];
pos = ids.indexOf(entity.id);
if (pos === (-1)) {
throw api.invalidParam('ids', 'Invalid id list: id \"' + entity.id + '\" not found.');
}
entity.display_order = pos;
}
// update:
for (i=0; i<entities.length; i++) {
entity = entities[i];
yield entity.$update(['display_order', 'updated_at', 'version']);
}
return _.sortBy(entities, function (entity) {
return entity.display_order;
});
},
checkPermission: function (request, expectedRole) {
if (!request.user || (request.user.role > expectedRole)) {
console.log('check permission failed: expected = ' + expectedRole + ', actual = ' + (request.user ? request.user.role : 'null'));
throw api.notAllowed('Do not have permission.');
}
},
getPageNumber: function (request) {
var index = string2Integer(request.query.page || '1');
if (index === null || index < 1) {
index = 1;
}
return index;
},
getPage: function (request, pageSize) {
var
index = string2Integer(request.query.page || '1'),
size = pageSize || string2Integer(request.query.size || '10');
if (index === null || index < 1) {
index = 1;
}
if (size === null || size < 10 || size > 100) {
size = 10;
}
return new Page(index, size);
},
isString: function (val) {
return typeof(val) === 'string';
},
isInteger: function (val) {
return (typeof(val) === 'number') && (val === Math.floor(val));
},
string2Integer: string2Integer
};
<file_sep>/www/init_db.sql
loading config_development...
configuration loaded:
{
"domain": "www.nbn8.com",
"theme": "default",
"session": {
"cookie": "isession",
"salt": "iTranswarp.js",
"expires": 604800,
"httpsForManagement": false
},
"db": {
"host": "localhost",
"port": 3306,
"user": "polysaas",
"password": "<PASSWORD>",
"database": "itranswarp",
"connectTimeout": 3000,
"connectionLimit": 20,
"acquireTimeout": 3000,
"queueLimit": 10
},
"cdn": {
"static_prefix": ""
},
"cache": {
"prefix": "it/",
"host": "127.0.0.1",
"port": 11211,
"timeout": 1000,
"retries": 3
},
"queue": {
"host": "127.0.0.1",
"port": 6379
},
"search": {
"provider": "site_search",
"configs": {
"search_url": "https://www.google.com/search?ie=utf-8&q=%s",
"domain": "www.example.com"
}
},
"oauth2": {
"weibo": {
"icon": "weibo",
"name": "新浪微博登录",
"app_key": "3273065943",
"app_secret": "f756a27fbb94ce8748c651a6d00b688e",
"redirect_uri": "http://www.nbn8.com/auth/callback/weibo"
},
"facebook": {
"icon": "facebook",
"name": "Facebook登录",
"app_key": "1229544160397328",
"app_secret": "<KEY>4cb52e",
"redirect_uri": "http://your-redirect-uri/config/in/facebook"
}
},
"END": "END",
"version": "1.0",
"build": "$BUILD$"
}
init mysql with mysql-warp...
[Pool@3eb1af39] Connection pool created.
found model: article
found model: asks
found model: attachment
found model: authuser
found model: board
found model: category
found model: comment
found model: localuser
found model: navigation
found model: random
found model: reply
found model: resource
found model: setting
found model: text
found model: topic
found model: user
found model: webpage
found model: wiki
found model: wikipage
-- Create administrator account:
Email: <file_sep>/www/controllers/attachmentApi.js
'use strict';
// attachment api
var
fs = require('fs'),
mime = require('mime'),
api = require('../api'),
db = require('../db'),
helper = require('../helper'),
constants = require('../constants'),
images = require('./_images'),
json_schema = require('../json_schema');
var
User = db.user,
Attachment = db.attachment,
Resource = db.resource,
warp = db.warp,
next_id = db.next_id;
function* $getAttachment(id) {
var atta = yield Attachment.$find(id);
if (atta === null) {
throw api.notFound('Attachment');
}
return atta;
}
function* $getAttachments(page) {
page.total = yield Attachment.$findNumber('count(id)');
if (page.isEmpty) {
return [];
}
return yield Attachment.$findAll({
offset: page.offset,
limit: page.limit,
order: 'created_at desc'
});
}
// create function(callback) with Attachment object returned in callback:
function* $createAttachment(user_id, name, description, buffer, mime, expectedImage) {
var
att_id = next_id(),
res_id = next_id(),
imageInfo = null;
try {
imageInfo = yield images.$getImageInfo(buffer);
}
catch (e) {
// not an image
console.log('attachment data is not an image.');
}
if (imageInfo !== null) {
if (['png', 'jpeg', 'gif'].indexOf(imageInfo.format) !== (-1)) {
mime = 'image/' + imageInfo.format;
}
else {
imageInfo = null;
}
}
if (imageInfo === null && expectedImage) {
throw api.invalidParam('image');
}
yield Resource.$create({
id: res_id,
ref_id: att_id,
value: buffer
});
return yield Attachment.$create({
id: att_id,
resource_id: res_id,
user_id: user_id,
size: buffer.length,
mime: mime,
meta: '',
width: imageInfo === null ? 0 : imageInfo.width,
height: imageInfo === null ? 0 : imageInfo.height,
name: name,
description: description
});
}
function* $downloadDefaultAttachment(id) {
yield $downloadAttachment.apply(this, [id, '0']);
}
function* $downloadAttachment(id, size) {
if ('0sml'.indexOf(size) === (-1)) {
this.status = 404;
return;
}
var
atta = yield $getAttachment(id),
mime = atta.mime,
resource = yield Resource.$find(atta.resource_id),
data, origin_width, origin_height, target_width, resize;
if (resource === null) {
throw api.notFound('Resource');
}
data = resource.value;
if (size !== '0') {
origin_width = atta.width;
origin_height = atta.height;
target_width = origin_width;
resize = false;
if (size === 's') {
// generate small image: 160 x N
if (origin_width > 160) {
target_width = 160;
resize = true;
}
} else if (size === 'm') {
// generate medium image: 320 x N
if (origin_width > 320) {
target_width = 320;
resize = true;
}
} else if (size === 'l') {
// generate large image: 640 x N
if (origin_width > 640) {
target_width = 640;
resize = true;
}
}
if (resize) {
data = yield images.$resizeKeepAspect(data, origin_width, origin_height, target_width, 0);
}
}
this.response.type = resize ? 'image/jpeg' : mime;
this.body = data;
}
module.exports = {
$getAttachment: $getAttachment,
$getAttachments: $getAttachments,
$createAttachment: $createAttachment,
'GET /files/attachments/:id': $downloadDefaultAttachment,
'GET /files/attachments/:id/:size': $downloadAttachment,
'GET /api/attachments/:id': function* (id) {
/**
* Get attachment.
*
* @name Get Attachment
* @param {string} id: Id of the attachment.
* @return {object} Attachment object.
* @error {resource:notfound} Attachment was not found by id.
*/
this.body = yield $getAttachment(id);
},
'GET /api/attachments': function* () {
/**
* Get attachments by page.
*
* @name Get Attachments
* @param {number} [page=1]: The page number, starts from 1.
* @return {object} Attachment objects and page information.
*/
var
page = helper.getPage(this.request),
attachments = yield $getAttachments(page);
this.body = {
page: page,
attachments: attachments
};
},
'POST /api/attachments/:id/delete': function* (id) {
/**
* Delete an attachment by its id.
*
* @name Delete Attachment
* @param {string} id: The id of the attachment.
* @return {object} Object contains deleted id.
* @error {resource:notfound} Attachment was not found by id.
* @error {permission:denied} If current user has no permission.
*/
helper.checkPermission(this.request, constants.role.CONTRIBUTOR);
var
atta = yield $getAttachment(id);
if (this.request.user.role !== constants.role.ADMIN && this.request.user.id !== atta.user_id) {
throw api.notAllowed('Permission denied.');
}
// delete:
yield atta.$destroy();
// delete all resources:
yield warp.$update('delete from resources where ref_id=?', [id]);
this.body = {
id: id
};
},
'POST /api/attachments': function* () {
/**
* Create a new attachment.
*
* @name Create Attachment
* @param {string} [name]: Name of the attachment, default to filename.
* @param {string} [mime=null]: Mime of the attachment, e.g. "application/pdf", all lowercase.
* @param {string} [description]: Description of the attachment, default to ''.
* @param {data} data: File data as base64.
* @return {object} The created attachment object.
* @error {permission:denied} If current user has no permission.
*/
helper.checkPermission(this.request, constants.role.CONTRIBUTOR);
var
buffer,
data = this.request.body;
json_schema.validate('createAttachment', data);
buffer = new Buffer(data.data, 'base64');
this.body = yield $createAttachment(
this.request.user.id,
data.name.trim(),
data.description.trim(),
buffer,
data.mime || 'application/octet-stream',
false
);
}
};
<file_sep>/www/views/themes/default/index.html
{% extends '_base.html' %}
{% block title %}{{ _('Home') }}{% endblock %}
{% block head %}
{% set __sidebar_right__ = true %}
<style>
.x-index-visible {
display: block;
}
</style>
{% endblock %}
{% block content %}
<h3>{{ _('Hot Articles') }}</h3>
<div style="max-width:640px;" class="uk-slidenav-position" data-uk-slideshow="{autoplay:true,pauseOnHover:false,kenburns:true}">
<ul class="uk-slideshow uk-overlay-active">
{% for a in hotArticles %}
<li>
<img src="/files/attachments/{{ a.cover_id }}/l" width="640" height="360">
<div onclick="window.open('/article/{{ a.id }}')" style="cursor:pointer" class="uk-overlay-panel uk-overlay-background uk-overlay-bottom uk-overlay-slide-bottom">
<h3>{{ a.name }}</h3>
<p>{{ a.description }}</p>
</div>
</li>
{% endfor %}
</ul>
<a href="#0" class="uk-slidenav uk-slidenav-contrast uk-slidenav-previous" data-uk-slideshow-item="previous" style="color: rgba(255,255,255,0.4)"></a>
<a href="#0" class="uk-slidenav uk-slidenav-contrast uk-slidenav-next" data-uk-slideshow-item="next" style="color: rgba(255,255,255,0.4)"></a>
</div>
{% endblock %}
{% block sidebar_right_content %}
<div class="uk-margin">
<h3>{{ _('Recent Articles') }}</h3>
<ul class="uk-nav">
{% for a in recentArticles %}
<li><a href="/article/{{ a.id }}" target="_blank">{{ a.name }}</a></li>
{% endfor %}
</ul>
</div>
{% endblock %}
<file_sep>/www/controllers/articleApi.js
'use strict';
// article api
var
_ = require('lodash'),
api = require('../api'),
db = require('../db'),
cache = require('../cache'),
images = require('./_images'),
helper = require('../helper'),
constants = require('../constants'),
search = require('../search/search'),
json_schema = require('../json_schema');
var
settingApi = require('./settingApi'),
categoryApi = require('./categoryApi'),
attachmentApi = require('./attachmentApi');
var
User = db.user,
Article = db.article,
Category = db.category,
Text = db.text,
warp = db.warp,
next_id = db.next_id;
function indexArticle(r) {
process.nextTick(function () {
search.engine.index({
type: 'article',
id: r.id,
tags: r.tags,
name: r.name,
description: r.description,
content: helper.html2text(helper.md2html(r.content)),
created_at: r.publish_at,
updated_at: r.updated_at,
url: '/article/' + r.id,
upvotes: 0
});
});
}
function unindexArticle(r) {
process.nextTick(function () {
search.engine.unindex({
id: r.id
});
});
}
function* $getRecentArticles(max) {
var now = Date.now();
return yield Article.$findAll({
where: 'publish_at<?',
order: 'publish_at desc',
params: [now],
offset: 0,
limit: max
});
}
function* $getAllArticles(page) {
page.total = yield Article.$findNumber('count(id)');
if (page.isEmpty) {
return [];
}
return yield Article.$findAll({
offset: page.offset,
limit: page.limit,
order: 'publish_at desc'
});
}
function* $getArticles(page) {
var now = Date.now();
page.total = yield Article.$findNumber({
select: 'count(id)',
where: 'publish_at<?',
params: [now]
});
if (page.isEmpty) {
return [];
}
return yield Article.$findAll({
offset: page.offset,
limit: page.limit,
order: 'publish_at desc'
});
}
function* $getArticlesByCategory(categoryId, page) {
var now = Date.now();
page.total = yield Article.$findNumber({
select: 'count(id)',
where: 'publish_at<? and category_id=?',
params: [now, categoryId]
});
if (page.isEmpty) {
return [];
}
return yield Article.$findAll({
order: 'publish_at desc',
where: 'publish_at<? and category_id=?',
params: [now, categoryId],
offset: page.offset,
limit: page.limit
});
}
function* $getArticle(id, includeContent) {
var
text,
article = yield Article.$find(id);
if (article === null) {
throw api.notFound('Article');
}
if (includeContent) {
text = yield Text.$find(article.content_id);
if (text === null) {
throw api.notFound('Text');
}
article.content = text.value;
}
return article;
}
function toRssDate(dt) {
return new Date(dt).toGMTString();
}
function* $getFeed(domain) {
var
i, text, article, url,
articles = yield $getRecentArticles(20),
last_publish_at = articles.length === 0 ? 0 : articles[0].publish_at,
website = yield settingApi.$getWebsiteSettings(),
rss = [],
rss_footer = '</channel></rss>';
rss.push('<?xml version="1.0"?>\n');
rss.push('<rss version="2.0"><channel><title><![CDATA[');
rss.push(website.name);
rss.push(']]></title><link>http://');
rss.push(domain);
rss.push('/</link><description><![CDATA[');
rss.push(website.description);
rss.push(']]></description><lastBuildDate>');
rss.push(toRssDate(last_publish_at));
rss.push('</lastBuildDate><generator>iTranswarp.js</generator><ttl>3600</ttl>');
if (articles.length === 0) {
rss.push(rss_footer);
}
else {
for (i=0; i<articles.length; i++) {
article = articles[i];
text = yield Text.$find(article.content_id);
url = 'http://' + domain + '/article/' + article.id;
rss.push('<item><title><![CDATA[');
rss.push(article.name);
rss.push(']]></title><link>');
rss.push(url);
rss.push('</link><guid>');
rss.push(url);
rss.push('</guid><author><![CDATA[');
rss.push(article.user_name);
rss.push(']]></author><pubDate>');
rss.push(toRssDate(article.publish_at));
rss.push('</pubDate><description><![CDATA[');
rss.push(helper.md2html(text.value, true));
rss.push(']]></description></item>');
}
rss.push(rss_footer);
}
return rss.join('');
}
var RE_TIMESTAMP = /^\-?[0-9]{1,13}$/;
module.exports = {
$getRecentArticles: $getRecentArticles,
$getArticlesByCategory: $getArticlesByCategory,
$getAllArticles: $getAllArticles,
$getArticles: $getArticles,
$getArticle: $getArticle,
'GET /feed': function* () {
var
rss,
host = this.request.host,
gf = function* () {
return yield $getFeed(host);
};
rss = yield cache.$get('cached_rss', gf);
this.set('Cache-Control', 'max-age: 3600');
this.type = 'text/xml';
this.body = rss;
},
'GET /api/articles/:id': function* (id) {
/**
* Get article.
*
* @name Get Article
* @param {string} id: Id of the article.
* @param {string} [format]: Return html if format is 'html', default to '' (raw).
* @return {object} Article object.
* @error {resource:notfound} Article was not found by id.
*/
var article = yield $getArticle(id, true);
if (article.publish_at > Date.now() && (this.request.user===null || this.request.user.role > constants.role.CONTRIBUTOR)) {
throw api.notFound('Article');
}
if (this.request.query.format === 'html') {
article.content = helper.md2html(article.content, true);
}
this.body = article;
},
'GET /api/articles': function* () {
/**
* Get articles by page.
*
* @name Get Articles
* @param {number} [page=1]: The page number, starts from 1.
* @return {object} Article objects and page information.
*/
helper.checkPermission(this.request, constants.role.CONTRIBUTOR);
var
page = helper.getPage(this.request),
articles = yield $getAllArticles(page);
this.body = {
page: page,
articles: articles
};
},
'POST /api/articles': function* () {
/**
* Create a new article.
*
* @name Create Article
* @param {string} category_id: Id of the category that article belongs to.
* @param {string} name: Name of the article.
* @param {string} description: Description of the article.
* @param {string} content: Content of the article.
* @param {string} [tags]: Tags of the article, seperated by ','.
* @param {string} [publish_at]: Publish time of the article with format 'yyyy-MM-dd HH:mm:ss', default to current time.
* @param {image} [image]: Base64 encoded image to upload as cover image.
* @return {object} The created article object.
* @error {parameter:invalid} If some parameter is invalid.
* @error {permission:denied} If current user has no permission.
*/
helper.checkPermission(this.request, constants.role.EDITOR);
var
text,
article,
attachment,
article_id,
content_id,
data = this.request.body;
json_schema.validate('createArticle', data);
// check category id:
yield categoryApi.$getCategory(data.category_id);
attachment = yield attachmentApi.$createAttachment(
this.request.user.id,
data.name.trim(),
data.description.trim(),
new Buffer(data.image, 'base64'),
null,
true);
content_id = next_id();
article_id = next_id();
text = yield Text.$create({
id: content_id,
ref_id: article_id,
value: data.content
});
article = yield Article.$create({
id: article_id,
user_id: this.request.user.id,
user_name: this.request.user.name,
category_id: data.category_id,
cover_id: attachment.id,
content_id: content_id,
name: data.name.trim(),
description: data.description.trim(),
tags: helper.formatTags(data.tags),
publish_at: (data.publish_at === undefined ? Date.now() : data.publish_at)
});
article.content = data.content;
indexArticle(article);
this.body = article;
},
'POST /api/articles/:id': function* (id) {
/**
* Update an exist article.
*
* @name Update Article
* @param {string} id: Id of the article.
* @param {string} [category_id]: Id of the category that article belongs to.
* @param {string} [name]: Name of the article.
* @param {string} [description]: Description of the article.
* @param {string} [content]: Content of the article.
* @param {string} [tags]: Tags of the article, seperated by ','.
* @param {string} [publish_at]: Publish time of the article with format 'yyyy-MM-dd HH:mm:ss'.
* @return {object} The updated article object.
* @error {resource:notfound} Article was not found by id.
* @error {parameter:invalid} If some parameter is invalid.
* @error {permission:denied} If current user has no permission.
*/
helper.checkPermission(this.request, constants.role.EDITOR);
var
user = this.request.user,
article,
props = [],
text,
attachment,
data = this.request.body;
json_schema.validate('updateArticle', data);
article = yield $getArticle(id);
if (user.role !== constants.role.ADMIN && user.id !== article.user_id) {
throw api.notAllowed('Permission denied.');
}
if (data.category_id) {
yield categoryApi.$getCategory(data.category_id);
article.category_id = data.category_id;
props.push('category_id');
}
if (data.name) {
article.name = data.name.trim();
props.push('name');
}
if (data.description) {
article.description = data.description.trim();
props.push('description');
}
if (data.tags) {
article.tags = helper.formatTags(data.tags);
props.push('tags');
}
if (data.publish_at !== undefined) {
article.publish_at = data.publish_at;
props.push('publish_at');
}
if (data.image) {
// check image:
attachment = yield attachmentApi.$createAttachment(
user.id,
article.name,
article.description,
new Buffer(data.image, 'base64'),
null,
true);
article.cover_id = attachment.id;
props.push('cover_id');
}
if (data.content) {
text = yield Text.$create({
ref_id: article.id,
value: data.content
});
article.content_id = text.id;
article.content = data.content;
props.push('content_id');
}
if (props.length > 0) {
props.push('updated_at');
props.push('version');
yield article.$update(props);
}
if (!article.content) {
text = yield Text.$find(article.content_id);
article.content = text.value;
}
this.body = article;
},
'POST /api/articles/:id/delete': function* (id) {
/**
* Delete an article.
*
* @name Delete Article
* @param {string} id: Id of the article.
* @return {object} Object contains deleted id.
* @error {resource:notfound} Article not found by id.
* @error {permission:denied} If current user has no permission.
*/
helper.checkPermission(this.request, constants.role.EDITOR);
var
user = this.request.user,
article = yield $getArticle(id);
if (user.role !== constants.role.ADMIN && user.id !== article.user_id) {
throw api.notAllowed('Permission denied.');
}
yield article.$destroy();
yield warp.$update('delete from texts where ref_id=?', [id]);
this.body = {
id: id
};
}
};
<file_sep>/www/models/comment.js
'use strict';
// comment.js
var base = require('./_base.js');
module.exports = function (warp) {
return base.defineModel(warp, 'Comment', [
base.column_varchar_50('ref_type'),
base.column_id('ref_id'),
base.column_id('user_id'),
base.column_varchar_100('user_name'),
base.column_varchar_1000('user_image_url'),
base.column_varchar_1000('content')
], {
table: 'comments'
});
};
<file_sep>/www/controllers/webpageApi.js
'use strict';
// static webpage api
var
_ = require('lodash'),
api = require('../api'),
db = require('../db'),
helper = require('../helper'),
constants = require('../constants'),
json_schema = require('../json_schema');
var
User = db.user,
Webpage = db.webpage,
Text = db.text,
warp = db.warp,
next_id = db.next_id;
function* $checkAliasAvailable(alias) {
var p = yield Webpage.$find({
where: 'alias=?',
params: [alias]
});
if (p !== null) {
throw api.invalidParam('alias', 'duplicate alias');
}
}
function* $getWebpages() {
return yield Webpage.$findAll({
order: 'alias'
});
}
function* $getWebpage(id, includeContent) {
var
text,
p = yield Webpage.$find(id);
if (p === null) {
throw api.notFound('Webpage');
}
if (includeContent) {
text = yield Text.$find(p.content_id);
p.content = text.value;
}
return p;
}
function* $getWebpageByAlias(alias, includeContent) {
var
text,
p = yield Webpage.$find({
where: 'alias=?',
params: [alias]
});
if (p === null) {
throw api.notFound('Webpage');
}
if (includeContent) {
text = yield Text.$find(p.content_id);
p.content = text.value;
}
return p;
}
function* $getNavigationMenus() {
var ps = yield $getWebpages();
return _.map(ps, function (p) {
return {
name: p.name,
url: '/webpage/' + p.alias
};
});
}
module.exports = {
$getNavigationMenus: $getNavigationMenus,
$getWebpage: $getWebpage,
$getWebpages: $getWebpages,
$getWebpageByAlias: $getWebpageByAlias,
'GET /api/webpages/:id': function* (id) {
/**
* Get webpage by id.
*
* @name Get Page
* @param {string} id - The id of the Webpage.
* @return {object} Webpage object.
*/
this.body = yield $getWebpage(id, true);
},
'GET /api/webpages': function* () {
/**
* Get all Webpages object (but no content value).
*
* @name Get Webpages
* @return {object} Result as {"webpages": [{webpage}, {webpage}...]}
*/
this.body = {
webpages: yield $getWebpages()
};
},
'POST /api/webpages': function* () {
/**
* Create a new webpage.
*
* @name Create Webage
* @param {string} name: The name of the webpage.
* @param {string} alias: The alias of the webpage.
* @param {string} content: The content of the webpage.
* @param {boolean} [draft=false]: The draft status of the webpage, default to false.
* @param {string} [tags]: The tags of the webpage, seperated by ','.
* @return {object} The created webpage object.
*/
helper.checkPermission(this.request, constants.role.ADMIN);
var
content_id,
webpage_id,
text,
webpage,
data = this.request.body;
json_schema.validate('createWebpage', data);
data.name = data.name.trim();
data.tags = helper.formatTags(data.tags);
yield $checkAliasAvailable(data.alias);
content_id = next_id();
webpage_id = next_id();
text = yield Text.$create({
id: content_id,
ref_id: webpage_id,
value: data.content
});
webpage = yield Webpage.$create({
id: webpage_id,
alias: data.alias,
content_id: content_id,
name: data.name,
tags: data.tags,
draft: data.draft
});
// attach content:
webpage.content = data.content;
this.body = webpage;
},
'POST /api/webpages/:id': function* (id) {
/**
* Update webpage by id.
*
* @name Update Page
* @param {string} id: The id of the webpage.
* @param {string} [name]: The name of the webpage.
* @param {string} [alias]: The alias of the webpage.
* @param {string} [content]: The content of the webpage.
* @param {boolean} [draft]: The draft status of the webpage.
* @param {string} [tags]: The tags of the webpage, seperated by ','.
* @return {object} Updated webpage object.
*/
helper.checkPermission(this.request, constants.role.ADMIN);
var
content_id = null,
webpage,
text,
props = [],
data = this.request.body;
json_schema.validate('updateWebpage', data);
webpage = yield $getWebpage(id);
if (data.alias && data.alias!==webpage.alias) {
yield $checkAliasAvailable(data.alias);
webpage.alias = data.alias;
props.push('alias');
}
if (data.name) {
webpage.name = data.name.trim();
props.push('name');
}
if (data.tags) {
webpage.tags = helper.formatTags(data.tags);
props.push('tags');
}
if (data.draft!==undefined) {
webpage.draft = data.draft;
props.push('draft');
}
if (data.content) {
content_id = next_id();
webpage.content_id = content_id;
props.push('content_id');
// update content
yield Text.$create({
id: content_id,
ref_id: id,
value: data.content
});
}
if (props.length > 0) {
props.push('updated_at');
props.push('version');
yield webpage.$update(props);
}
// attach content:
if (content_id) {
webpage.content = data.content;
}
else {
text = yield Text.$find(webpage.content_id);
webpage.content = text.value;
}
this.body = webpage;
},
'POST /api/webpages/:id/delete': function* (id) {
/**
* Delete a webpage by its id.
*
* @name Delete Page
* @param {string} id - The id of the webpage.
* @return {object} Results contains id of the webpage, e.g. {"id": "12345"}
*/
helper.checkPermission(this.request, constants.role.ADMIN);
var webpage = yield $getWebpage(id);
yield webpage.$destroy();
// delete all texts:
warp.$update('delete from texts where ref_id=?', [id]);
this.body = {
id: id
};
}
};
<file_sep>/www/test/test_setting_api.js
'use strict';
// test setting api:
var
_ = require('lodash'),
should = require('should'),
remote = require('./_remote'),
settingApi = require('../controllers/settingApi');
describe('#settings', function () {
before(remote.setup);
describe('#settingApi', function () {
it('should get empty setting', function* () {
var r = yield settingApi.$getSetting('group:key');
should(r===null).be.ok;
});
it('should get default setting value', function* () {
var r = yield settingApi.$getSetting('group:key', 'the-DEFAULT');
should(r==='the-DEFAULT').be.ok;
var r2 = yield settingApi.$getSetting('group:key', '');
should(r2==='').be.ok;
});
it('set setting with invalid key', function* () {
try {
yield settingApi.$setSetting('g1---k1', 'VALUE-001');
throw 'failed';
}
catch (e) {
should(e.error).be.ok;
e.error.should.equal('parameter:invalid');
e.data.should.equal('key');
}
});
it('set and get a setting', function* () {
yield settingApi.$setSetting('g1:k1', 'VALUE-001');
var r = yield settingApi.$getSetting('g1:k1', 'default');
should(r).be.ok;
r.should.equal('VALUE-001');
});
it('set and get settings', function* () {
var i, r;
for (i=0; i<9; i++) {
yield settingApi.$setSetting('web:key_' + i, 'VALUE--' + i);
}
r = yield settingApi.$getSettings('web');
should(r).be.ok;
r.key_0.should.equal('VALUE--0');
r.key_1.should.equal('VALUE--1');
r.key_2.should.equal('VALUE--2');
r.key_3.should.equal('VALUE--3');
r.key_4.should.equal('VALUE--4');
r.key_5.should.equal('VALUE--5');
r.key_6.should.equal('VALUE--6');
r.key_7.should.equal('VALUE--7');
r.key_8.should.equal('VALUE--8');
should(r.key_9===undefined).be.ok;
});
it('set settings', function* () {
var s = {
s1: '1+1',
s2: '2+2',
s3: '3+3'
};
yield settingApi.$setSettings('sss', s);
// get settings:
var r = yield settingApi.$getSettingsByDefaults('sss', {
s1: 'x1',
s2: 'x2',
s3: 'x3',
s4: 'x4'
});
r.s1.should.equal('1+1');
r.s2.should.equal('2+2');
r.s3.should.equal('3+3');
r.s4.should.equal('x4');
});
});
});
<file_sep>/www/views/themes/default/signin.html
{% extends '_base.html' %}
{% block title %}{{ _('Sign In') }}{% endblock %}
{% block head %}
<script>
$(function () {
showSignin(true);
});
</script>
{% endblock %}
{% block content %}
<h3>Please sign in:</h3>
{% endblock %}
<file_sep>/www/static/js/tools/bug_jira.js
/**
* Created by <EMAIL> on 2015/10/19.
*/
var myAppModule = angular.module('myAppModule',[]);
myAppModule.controller('myAppController', function($scope,calculateService,$http){
$scope.quantity = 1;
$scope.quantityResult = 0;
$scope.commitType = "Bug";
$scope.calculateQuantity = function(){
$scope.quantityResult = calculateService.calculate($scope.quantity,10);
};
$scope.clearForm = function(){
$scope.bug = "";
$scope.title = "";
$scope.rootCause = "";
$scope.solution = "";
$scope.svn = "";
}
$scope.crchange = function(opt){
$(".code-review-show ").text(opt);
}
$scope.fetchJiraData = function(opt){
$http({
method: 'post',
url: '/api/jira/fetchData'
}).success(function(data, status, headers, config) {
console.log("fetchData success");
}).error(function(data, status, headers, config) {
console.log("fetchData fail");
});
}
});
myAppModule.factory('calculateService', function(){
return {
calculate:function(xval, yval){
return xval * yval;
}
}
})<file_sep>/www/models/authuser.js
'use strict';
// authuser.js
var base = require('./_base.js');
module.exports = function (warp) {
return base.defineModel(warp, 'AuthUser', [
base.column_id('user_id'),
base.column_varchar_50('auth_provider'),
base.column_varchar_100('auth_id', { unique: true }),
base.column_varchar_500('auth_token'),
base.column_bigint('expires_at')
], {
table: 'authusers'
});
};
<file_sep>/www/controllers/updateApi.js
'use strict';
var
_ = require('lodash');
function * $update(){
}<file_sep>/www/test/test_discuss_api.js
'use strict';
// test wiki api:
var
_ = require('lodash'),
fs = require('fs'),
co = require('co'),
should = require('should'),
discussApi = require('../controllers/discussApi'),
Page = require('../page'),
remote = require('./_remote'),
constants = require('../constants'),
roles = constants.role;
describe('#discuss', function () {
before(remote.setup);
describe('#discuss-api', function () {
it('should get boards failed for no permission', function* () {
var r = yield remote.$get(roles.CONTRIBUTER, '/api/boards');
remote.shouldHasError(r, 'permission:denied');
});
it('should get empty boards', function* () {
var r = yield remote.$get(roles.EDITOR, '/api/boards');
remote.shouldNoError(r);
should(r.boards).be.ok;
r.boards.should.be.an.Array.and.have.length(0);
});
it('create board failed for no permission', function* () {
var r = yield remote.$post(roles.EDITOR, '/api/boards', {
name: 'Try create board...',
description: 'blablabla...',
tag: 'js'
});
remote.shouldHasError(r, 'permission:denied');
});
it('create board failed for invalid parameter', function* () {
var r1 = yield remote.$post(roles.ADMIN, '/api/boards', {
// tag: missing
name: 'Try create board...',
description: 'blablabla...',
});
remote.shouldHasError(r1, 'parameter:invalid', 'tag');
var r2 = yield remote.$post(roles.ADMIN, '/api/boards', {
tag: 'js',
// name: missing
description: 'blablabla...',
});
remote.shouldHasError(r2, 'parameter:invalid', 'name');
});
it('create board ok then update it', function* () {
var r = yield remote.$post(roles.ADMIN, '/api/boards', {
tag: 'js',
name: 'JavaScript HOWTO',
description: 'a javascript discuss board'
});
remote.shouldNoError(r);
r.tag.should.equal('js');
r.name.should.equal('JavaScript HOWTO');
r.description.should.equal('a javascript discuss board');
r.topics.should.equal(0);
r.display_order.should.equal(0);
// update by editor failed:
var r2 = yield remote.$post(roles.EDITOR, '/api/boards/' + r.id, {
tag: 'nodejs',
name: 'try change'
});
remote.shouldHasError(r2, 'permission:denied');
// update by admin ok:
var r3 = yield remote.$post(roles.ADMIN, '/api/boards/' + r.id, {
tag: 'nodejs',
name: 'try change'
});
remote.shouldNoError(r3);
r3.tag.should.equal('nodejs');
r3.name.should.equal('try change');
r3.version.should.equal(1);
// get board:
var r4 = yield remote.$get(roles.EDITOR, '/api/boards/' + r.id);
remote.shouldNoError(r4);
r4.tag.should.equal('nodejs');
r4.name.should.equal('try change');
r4.description.should.equal('a javascript discuss board')
r4.topics.should.equal(0);
r4.version.should.equal(1);
});
it('create topic failed for no permission', function* () {
// prepare board:
var b = yield remote.$post(roles.ADMIN, '/api/boards', {
tag: 'test',
name: 'test topic',
description: 'test for topic'
});
remote.shouldNoError(b);
// try create topic:
var r = yield remote.$post(roles.GUEST, '/api/boards/' + b.id + '/topics', {
name: 'try post a topic but should failed',
content: 'not signin yet...'
});
remote.shouldHasError(r, 'permission:denied');
});
it('create topic failed for invalid parameters', function* () {
// prepare board:
var b = yield remote.$post(roles.ADMIN, '/api/boards', {
tag: 'test',
name: 'test topic parameters',
description: 'test for topic parameters'
});
remote.shouldNoError(b);
// try create topic:
var r1 = yield remote.$post(roles.SUBSCRIBER, '/api/boards/' + b.id + '/topics', {
// name: missing
content: 'not signin yet...'
});
remote.shouldHasError(r1, 'parameter:invalid', 'name');
var r2 = yield remote.$post(roles.SUBSCRIBER, '/api/boards/' + b.id + '/topics', {
name: 'try post a topic but should failed',
//content: missing
});
remote.shouldHasError(r2, 'parameter:invalid', 'content');
});
it('create topic ok and delete topic', function* () {
// prepare board:
var b = yield remote.$post(roles.ADMIN, '/api/boards', {
tag: 'test',
name: 'test topic ok',
description: 'test for topic ok'
});
remote.shouldNoError(b);
b.topics.should.equal(0);
// create 11 topics:
var i, t;
for (i=1; i<=11; i++) {
yield remote.$sleep(2);
t = yield remote.$post(roles.SUBSCRIBER, '/api/boards/' + b.id + '/topics', {
name: 'topic-' + i,
content: 'topic-' + i + ':<script>alert(x)</script>',
tags: 's' + i
});
remote.shouldNoError(t);
t.name.should.equal('topic-' + i);
t.tags.should.equal('s' + i);
t.content.should.equal('<p>topic-' + i + ':<script>alert(x)</script></p>\n');
t.replies.should.equal(0);
}
// check topics number:
var b2 = yield remote.$get(roles.EDITOR, '/api/boards/' + b.id);
remote.shouldNoError(b2);
b2.topics.should.equal(11);
// query by page:
var p1 = yield remote.$get(roles.EDITOR, '/api/boards/' + b.id + '/topics', {
page: 1,
size: 10
});
remote.shouldNoError(p1);
p1.page.total.should.equal(11);
p1.page.index.should.equal(1);
p1.topics.should.be.an.Array.and.have.length(10);
p1.topics[0].name.should.equal('topic-11');
p1.topics[1].name.should.equal('topic-10');
p1.topics[9].name.should.equal('topic-2');
// page 2:
var p2 = yield remote.$get(roles.EDITOR, '/api/boards/' + b.id + '/topics', {
page: 2,
size: 10
});
remote.shouldNoError(p2);
p2.page.total.should.equal(11);
p2.page.index.should.equal(2);
p2.topics.should.be.an.Array.and.have.length(1);
p2.topics[0].name.should.equal('topic-1');
});
it('create reply failed for no permission and invalid parameters', function* () {
// prepare board:
var b = yield remote.$post(roles.ADMIN, '/api/boards', {
tag: 'test',
name: 'test reply',
description: 'test for reply'
});
remote.shouldNoError(b);
// prepare topic:
var t = yield remote.$post(roles.SUBSCRIBER, '/api/boards/' + b.id + '/topics', {
name: 'topic-for-reply',
content: 'this is test topic...',
tags: 'ttt'
});
remote.shouldNoError(t);
// create reply failed for no permission:
var r = yield remote.$post(roles.GUEST, '/api/topics/' + t.id + '/replies', {
content: 'try reply...'
});
remote.shouldHasError(r, 'permission:denied');
// create reply failed for invalid parameter:
var r = yield remote.$post(roles.SUBSCRIBER, '/api/topics/' + t.id + '/replies', {
});
remote.shouldHasError(r, 'parameter:invalid', 'content');
});
it('create reply ok and delete reply', function* () {
// prepare board:
var b = yield remote.$post(roles.ADMIN, '/api/boards', {
tag: 'test',
name: '<NAME>',
description: 'test for reply'
});
remote.shouldNoError(b);
// prepare topic:
var t = yield remote.$post(roles.SUBSCRIBER, '/api/boards/' + b.id + '/topics', {
name: 'topic-for-reply',
content: 'this is test topic...',
tags: 'ttt'
});
remote.shouldNoError(t);
// create 11 replies ok:
var i, r;
for (i=1; i<=11; i++) {
yield remote.$sleep(2);
r = yield remote.$post(roles.SUBSCRIBER, '/api/topics/' + t.id + '/replies', {
content: 'reply-' + i + ':<script>cannot run</script>'
});
remote.shouldNoError(r);
r.topic_id.should.equal(t.id);
r.content.should.equal('<p>reply-' + i + ':<script>cannot run</script></p>\n');
}
// query replies:
// page 1:
var p1 = new Page(1, 10);
var rs1 = yield discussApi.$getReplies(t.id, p1);
p1.total.should.equal(12); // 1 topic + 11 replies
rs1.should.be.an.Array.and.have.length(9); // 1 topic + 9 replies
rs1[0].content.should.equal('<p>reply-1:<script>cannot run</script></p>\n');
rs1[1].content.should.equal('<p>reply-2:<script>cannot run</script></p>\n');
rs1[8].content.should.equal('<p>reply-9:<script>cannot run</script></p>\n');
// page 2:
var p2 = new Page(2, 10);
var rs2 = yield discussApi.$getReplies(t.id, p2);
p2.total.should.equal(12);
rs2.should.be.an.Array.and.have.length(2); // 2 replies
rs2[0].content.should.equal('<p>reply-10:<script>cannot run</script></p>\n');
rs2[1].content.should.equal('<p>reply-11:<script>cannot run</script></p>\n');
});
});
});
<file_sep>/www/controllers/askApi.js
'use strict';
// ask api
var
_ = require('lodash'),
api = require('../api'),
db = require('../db'),
cache = require('../cache'),
images = require('./_images'),
helper = require('../helper'),
constants = require('../constants'),
search = require('../search/search'),
json_schema = require('../json_schema');
var
settingApi = require('./settingApi'),
categoryApi = require('./categoryApi'),
attachmentApi = require('./attachmentApi');
var
User = db.user,
Ask = db.ask,
Category = db.category,
Text = db.text,
warp = db.warp,
next_id = db.next_id;
function indexAsk(r) {
process.nextTick(function () {
search.engine.index({
type: 'ask',
id: r.id,
tags: r.tags,
name: r.name,
description: r.description,
content: helper.html2text(helper.md2html(r.content)),
created_at: r.publish_at,
updated_at: r.updated_at,
url: '/ask/' + r.id,
upvotes: 0
});
});
}
function unindexAsk(r) {
process.nextTick(function () {
search.engine.unindex({
id: r.id
});
});
}
function* $getRecentAsks(max) {
var now = Date.now();
return yield Ask.$findAll({
where: 'publish_at<?',
order: 'publish_at desc',
params: [now],
offset: 0,
limit: max
});
}
function* $getAllAsks(page) {
page.total = yield Ask.$findNumber('count(id)');
if (page.isEmpty) {
return [];
}
return yield Ask.$findAll({
offset: page.offset,
limit: page.limit,
order: 'publish_at desc'
});
}
function* $getAsks(page) {
var now = Date.now();
page.total = yield Ask.$findNumber({
select: 'count(id)',
where: 'publish_at<?',
params: [now]
});
if (page.isEmpty) {
return [];
}
return yield Ask.$findAll({
offset: page.offset,
limit: page.limit,
order: 'publish_at desc'
});
}
function* $getAsksByCategory(categoryId, page) {
var now = Date.now();
page.total = yield Ask.$findNumber({
select: 'count(id)',
where: 'publish_at<? and category_id=?',
params: [now, categoryId]
});
if (page.isEmpty) {
return [];
}
return yield Ask.$findAll({
order: 'publish_at desc',
where: 'publish_at<? and category_id=?',
params: [now, categoryId],
offset: page.offset,
limit: page.limit
});
}
function* $getAsk(id, includeContent) {
var
text,
ask = yield Ask.$find(id);
if (ask === null) {
throw api.notFound('Ask');
}
if (includeContent) {
text = yield Text.$find(ask.content_id);
if (text === null) {
throw api.notFound('Text');
}
ask.content = text.value;
}
return ask;
}
function toRssDate(dt) {
return new Date(dt).toGMTString();
}
function* $getFeed(domain) {
var
i, text, ask, url,
asks = yield $getRecentAsks(20),
last_publish_at = asks.length === 0 ? 0 : asks[0].publish_at,
website = yield settingApi.$getWebsiteSettings(),
rss = [],
rss_footer = '</channel></rss>';
rss.push('<?xml version="1.0"?>\n');
rss.push('<rss version="2.0"><channel><title><![CDATA[');
rss.push(website.name);
rss.push(']]></title><link>http://');
rss.push(domain);
rss.push('/</link><description><![CDATA[');
rss.push(website.description);
rss.push(']]></description><lastBuildDate>');
rss.push(toRssDate(last_publish_at));
rss.push('</lastBuildDate><generator>iTranswarp.js</generator><ttl>3600</ttl>');
if (asks.length === 0) {
rss.push(rss_footer);
}
else {
for (i=0; i<asks.length; i++) {
ask = asks[i];
text = yield Text.$find(ask.content_id);
url = 'http://' + domain + '/ask/' + ask.id;
rss.push('<item><title><![CDATA[');
rss.push(ask.name);
rss.push(']]></title><link>');
rss.push(url);
rss.push('</link><guid>');
rss.push(url);
rss.push('</guid><author><![CDATA[');
rss.push(ask.user_name);
rss.push(']]></author><pubDate>');
rss.push(toRssDate(ask.publish_at));
rss.push('</pubDate><description><![CDATA[');
rss.push(helper.md2html(text.value, true));
rss.push(']]></description></item>');
}
rss.push(rss_footer);
}
return rss.join('');
}
var RE_TIMESTAMP = /^\-?[0-9]{1,13}$/;
module.exports = {
$getRecentAsks: $getRecentAsks,
$getAsksByCategory: $getAsksByCategory,
$getAllAsks: $getAllAsks,
$getAsks: $getAsks,
$getAsk: $getAsk,
'GET /feed': function* () {
var
rss,
host = this.request.host,
gf = function* () {
return yield $getFeed(host);
};
rss = yield cache.$get('cached_rss', gf);
this.set('Cache-Control', 'max-age: 3600');
this.type = 'text/xml';
this.body = rss;
},
'GET /api/asks/:id': function* (id) {
/**
* Get ask.
*
* @name Get Ask
* @param {string} id: Id of the ask.
* @param {string} [format]: Return html if format is 'html', default to '' (raw).
* @return {object} Ask object.
* @error {resource:notfound} Ask was not found by id.
*/
var ask = yield $getAsk(id, true);
if (ask.publish_at > Date.now() && (this.request.user===null || this.request.user.role > constants.role.CONTRIBUTOR)) {
throw api.notFound('Ask');
}
if (this.request.query.format === 'html') {
ask.content = helper.md2html(ask.content, true);
}
this.body = ask;
},
'GET /api/asks': function* () {
/**
* Get asks by page.
*
* @name Get Asks
* @param {number} [page=1]: The page number, starts from 1.
* @return {object} Ask objects and page information.
*/
helper.checkPermission(this.request, constants.role.CONTRIBUTOR);
var
page = helper.getPage(this.request),
asks = yield $getAllAsks(page);
this.body = {
page: page,
asks: asks
};
},
'POST /api/asks': function* () {
/**
* Create a new ask.
*
* @name Create Ask
* @param {string} category_id: Id of the category that ask belongs to.
* @param {string} name: Name of the ask.
* @param {string} description: Description of the ask.
* @param {string} content: Content of the ask.
* @param {string} [tags]: Tags of the ask, seperated by ','.
* @param {string} [publish_at]: Publish time of the ask with format 'yyyy-MM-dd HH:mm:ss', default to current time.
* @param {image} [image]: Base64 encoded image to upload as cover image.
* @return {object} The created ask object.
* @error {parameter:invalid} If some parameter is invalid.
* @error {permission:denied} If current user has no permission.
*/
helper.checkPermission(this.request, constants.role.EDITOR);
var
text,
ask,
attachment,
ask_id,
content_id,
data = this.request.body;
json_schema.validate('createAsk', data);
// check category id:
yield categoryApi.$getCategory(data.category_id);
attachment = yield attachmentApi.$createAttachment(
this.request.user.id,
data.name.trim(),
data.description.trim(),
new Buffer(data.image, 'base64'),
null,
true);
content_id = next_id();
ask_id = next_id();
text = yield Text.$create({
id: content_id,
ref_id: ask_id,
value: data.content
});
ask = yield Ask.$create({
id: ask_id,
user_id: this.request.user.id,
user_name: this.request.user.name,
category_id: data.category_id,
cover_id: attachment.id,
content_id: content_id,
name: data.name.trim(),
description: data.description.trim(),
tags: helper.formatTags(data.tags),
publish_at: (data.publish_at === undefined ? Date.now() : data.publish_at)
});
ask.content = data.content;
indexAsk(ask);
this.body = ask;
},
'POST /api/asks/:id': function* (id) {
/**
* Update an exist ask.
*
* @name Update Ask
* @param {string} id: Id of the ask.
* @param {string} [category_id]: Id of the category that ask belongs to.
* @param {string} [name]: Name of the ask.
* @param {string} [description]: Description of the ask.
* @param {string} [content]: Content of the ask.
* @param {string} [tags]: Tags of the ask, seperated by ','.
* @param {string} [publish_at]: Publish time of the ask with format 'yyyy-MM-dd HH:mm:ss'.
* @return {object} The updated ask object.
* @error {resource:notfound} Ask was not found by id.
* @error {parameter:invalid} If some parameter is invalid.
* @error {permission:denied} If current user has no permission.
*/
helper.checkPermission(this.request, constants.role.EDITOR);
var
user = this.request.user,
ask,
props = [],
text,
attachment,
data = this.request.body;
json_schema.validate('updateAsk', data);
ask = yield $getAsk(id);
if (user.role !== constants.role.ADMIN && user.id !== ask.user_id) {
throw api.notAllowed('Permission denied.');
}
if (data.category_id) {
yield categoryApi.$getCategory(data.category_id);
ask.category_id = data.category_id;
props.push('category_id');
}
if (data.name) {
ask.name = data.name.trim();
props.push('name');
}
if (data.description) {
ask.description = data.description.trim();
props.push('description');
}
if (data.tags) {
ask.tags = helper.formatTags(data.tags);
props.push('tags');
}
if (data.publish_at !== undefined) {
ask.publish_at = data.publish_at;
props.push('publish_at');
}
if (data.image) {
// check image:
attachment = yield attachmentApi.$createAttachment(
user.id,
ask.name,
ask.description,
new Buffer(data.image, 'base64'),
null,
true);
ask.cover_id = attachment.id;
props.push('cover_id');
}
if (data.content) {
text = yield Text.$create({
ref_id: ask.id,
value: data.content
});
ask.content_id = text.id;
ask.content = data.content;
props.push('content_id');
}
if (props.length > 0) {
props.push('updated_at');
props.push('version');
yield ask.$update(props);
}
if (!ask.content) {
text = yield Text.$find(ask.content_id);
ask.content = text.value;
}
this.body = ask;
},
'POST /api/asks/:id/delete': function* (id) {
/**
* Delete an ask.
*
* @name Delete Ask
* @param {string} id: Id of the ask.
* @return {object} Object contains deleted id.
* @error {resource:notfound} Ask not found by id.
* @error {permission:denied} If current user has no permission.
*/
helper.checkPermission(this.request, constants.role.EDITOR);
var
user = this.request.user,
ask = yield $getAsk(id);
if (user.role !== constants.role.ADMIN && user.id !== ask.user_id) {
throw api.notAllowed('Permission denied.');
}
yield ask.$destroy();
yield warp.$update('delete from texts where ref_id=?', [id]);
this.body = {
id: id
};
}
};
<file_sep>/www/models/localuser.js
'use strict';
// localuser.js
var base = require('./_base.js');
module.exports = function (warp) {
return base.defineModel(warp, 'LocalUser', [
base.column_id('user_id', { unique: true }),
base.column_varchar_100('passwd')
], {
table: 'localusers'
});
};
<file_sep>/www/models/sys_version.js
/**
* Created by zhongyw on 7/31/16.
*/
'use strict';
var base = require('./_base_pure.js');
module.exports = function (warp) {
return base.defineModel(warp, 'SysVersion', [
base.column_varchar_100('version_sn'),
base.column_varchar_100('version_date'),
base.column_timestamp('create_time'),
], {
table: 'sys_version'
});
};
<file_sep>/www/models/user.js
'use strict';
// user.js
var constants = require('../constants');
var base = require('./_base');
module.exports = function (warp) {
return base.defineModel(warp, 'User', [
base.column_bigint('role', { defaultValue: constants.role.SUBSCRIBER }),
base.column_varchar_100('name'),
base.column_varchar_100('email', { unique: true, validate: { isEmail: true, isLowercase: true }}),
base.column_boolean('verified'),
base.column_varchar_1000('image_url'),
base.column_bigint('locked_until')
], {
table: 'users'
});
};
<file_sep>/www/models/setting.js
'use strict';
// setting.js
var base = require('./_base');
module.exports = function (warp) {
return base.defineModel(warp, 'Setting', [
base.column_varchar_100('group'),
base.column_varchar_100('key', { unique: true }),
base.column_text('value', { type: 'text', defaultValue: '' })
], {
table: 'settings'
});
};
<file_sep>/www/test/test_wiki_api.js
'use strict';
// test wiki api:
var
_ = require('lodash'),
fs = require('fs'),
co = require('co'),
should = require('should'),
remote = require('./_remote'),
constants = require('../constants'),
roles = constants.role;
describe('#wikis', function () {
before(remote.setup);
describe('#wiki-api', function () {
it('should get empty wikis', function* () {
var r = yield remote.$get(roles.GUEST, '/api/wikis');
remote.shouldNoError(r);
should(r.wikis).be.ok;
r.wikis.should.be.an.Array.and.have.length(0);
});
it('create and update wiki by editor', function* () {
// create wiki:
var r1 = yield remote.$post(roles.EDITOR, '/api/wikis', {
name: 'Test Wiki ',
description: ' blablabla\nhaha... \n ',
tag: 'abc',
content: 'Long content...',
image: remote.readFileSync('res-image.jpg').toString('base64')
});
remote.shouldNoError(r1);
r1.name.should.equal('Test Wiki');
r1.description.should.equal('blablabla\nhaha...');
r1.tag.should.equal('abc');
r1.content.should.equal('Long content...');
r1.cover_id.should.be.ok;
r1.version.should.equal(0);
// check image:
var dl = yield remote.$download('/files/attachments/' + r1.cover_id + '/l');
remote.shouldNoError(dl);
dl.statusCode.should.equal(200);
dl.headers['content-type'].should.equal('image/jpeg');
parseInt(dl.headers['content-length'], 10).should.approximately(122826, 10000);
// update wiki:
var r2 = yield remote.$post(roles.EDITOR, '/api/wikis/' + r1.id, {
name: 'Name Changed ',
content: 'Changed!'
});
remote.shouldNoError(r2);
r2.name.should.equal('Name Changed');
r2.content.should.equal('Changed!');
r2.version.should.equal(1);
// query:
var r3 = yield remote.$get(roles.GUEST, '/api/wikis/' + r1.id);
remote.shouldNoError(r3);
r3.name.should.equal(r2.name);
r3.content.should.equal(r2.content);
// not updated:
r3.tag.should.equal(r1.tag);
r3.description.should.equal(r1.description);
// query all wikis:
var r4 = yield remote.$get(roles.GUEST, '/api/wikis/');
remote.shouldNoError(r4);
should(r4.wikis).be.ok;
r4.wikis.should.be.an.Array.and.have.length(1);
var w = r4.wikis[0];
w.name.should.equal(r3.name);
});
it('create wiki then change cover by editor', function* () {
// create wiki:
var r1 = yield remote.$post(roles.EDITOR, '/api/wikis', {
name: 'Test Wiki ',
description: ' blablabla\nhaha... \n ',
tag: 'xyz',
content: ' Long content... ',
image: remote.readFileSync('res-image.jpg').toString('base64')
});
remote.shouldNoError(r1);
r1.name.should.equal('Test Wiki');
r1.cover_id.should.be.ok;
// update wiki:
var r2 = yield remote.$post(roles.EDITOR, '/api/wikis/' + r1.id, {
name: 'Name Changed ',
image: remote.readFileSync('res-image-2.jpg').toString('base64')
});
remote.shouldNoError(r2);
r2.name.should.equal('Name Changed');
r2.cover_id.should.not.equal(r1.cover_id);
r2.content.should.equal(r1.content);
r2.version.should.equal(1);
// check image:
var dl = yield remote.$download('/files/attachments/' + r2.cover_id + '/l');
remote.shouldNoError(dl);
dl.statusCode.should.equal(200);
dl.headers['content-type'].should.equal('image/jpeg');
parseInt(dl.headers['content-length'], 10).should.approximately(39368, 10000);
// upload non-image as cover:
var r3 = yield remote.$post(roles.EDITOR, '/api/wikis/' + r1.id, {
name: 'Name Changed Again! ',
image: remote.readFileSync('res-plain.txt').toString('base64')
});
remote.shouldHasError(r3, 'parameter:invalid', 'image');
});
it('create wiki and wikipage with wrong parameter by editor', function* () {
var
i, r, params,
required = ['name', 'tag', 'description', 'content', 'image'],
prepared = {
name: 'Test Param',
description: 'blablabla...',
tag: 'tag1',
content: 'a long content...',
image: remote.readFileSync('res-image.jpg').toString('base64')
};
for (i=0; i<required.length; i++) {
params = _.clone(prepared);
delete params[required[i]];
r = yield remote.$post(roles.EDITOR, '/api/wikis', params);
remote.shouldHasError(r, 'parameter:invalid', required[i]);
}
var w1 = yield remote.$post(roles.EDITOR, '/api/wikis', prepared);
remote.shouldNoError(w1);
// try create wikipage:
var r1 = yield remote.$post(roles.EDITOR, '/api/wikis/' + w1.id + '/wikipages', {
name: 'WP',
content: 'wiki page...'
});
remote.shouldHasError(r1, 'parameter:invalid', 'parent_id');
var r2 = yield remote.$post(roles.EDITOR, '/api/wikis/' + w1.id + '/wikipages', {
parent_id: remote.next_id(),
content: 'wiki page...'
});
remote.shouldHasError(r2, 'parameter:invalid', 'name');
var r3 = yield remote.$post(roles.EDITOR, '/api/wikis/' + w1.id + '/wikipages', {
parent_id: remote.next_id(),
name: 'WP'
});
remote.shouldHasError(r3, 'parameter:invalid', 'content');
});
it('create by contributor failed', function* () {
// create wiki:
var r = yield remote.$post(roles.CONTRIBUTER, '/api/wikis', {
name: ' To be delete... ',
tag: 'java',
description: ' blablabla\nhaha... \n ',
content: ' Long long long content... ',
image: remote.readFileSync('res-image.jpg').toString('base64')
});
remote.shouldHasError(r, 'permission:denied');
});
it('create and delete wiki by editor', function* () {
// create wiki:
var r1 = yield remote.$post(roles.EDITOR, '/api/wikis', {
name: ' To be delete... ',
tag: 'java',
description: ' blablabla\nhaha... \n ',
content: ' Long long long content... ',
image: remote.readFileSync('res-image.jpg').toString('base64')
});
remote.shouldNoError(r1);
r1.name.should.equal('To be delete...');
// delete wiki:
var r2 = yield remote.$post(roles.EDITOR, '/api/wikis/' + r1.id + '/delete');
remote.shouldNoError(r2);
r2.id.should.equal(r1.id);
// query:
var r3 = yield remote.$get(roles.GUEST, '/api/wikis/' + r1.id);
remote.shouldHasError(r3, 'entity:notfound', 'Wiki');
});
it('create wiki page, update and delete it', function* () {
// create wiki:
var w1 = yield remote.$post(roles.EDITOR, '/api/wikis', {
name: ' Test For WikiPage ',
tag: 'java',
description: ' blablabla\nhaha... \n ',
content: 'Long long long content... ',
image: remote.readFileSync('res-image.jpg').toString('base64')
});
remote.shouldNoError(w1);
// create wiki page:
// w1
// +- p1
var p1 = yield remote.$post(roles.EDITOR, '/api/wikis/' + w1.id + '/wikipages', {
parent_id: '',
name: 'First Wiki Page ',
content: 'This is a first wiki page...'
});
remote.shouldNoError(p1);
p1.wiki_id.should.equal(w1.id);
p1.parent_id.should.equal('');
p1.display_order.should.equal(0);
p1.name.should.equal('First Wiki Page');
p1.content.should.equal('This is a first wiki page...');
p1.version.should.equal(0);
// query p1:
var p2 = yield remote.$get(roles.EDITOR, '/api/wikis/wikipages/' + p1.id);
remote.shouldNoError(p2);
p2.wiki_id.should.equal(p1.wiki_id);
p2.parent_id.should.equal(p1.parent_id);
p2.display_order.should.equal(p1.display_order);
p2.name.should.equal(p1.name);
p2.content.should.equal(p1.content);
p2.version.should.equal(0);
// update p1:
var p3 = yield remote.$post(roles.EDITOR, '/api/wikis/wikipages/' + p1.id, {
name: 'Changed',
content: 'content changed.'
});
remote.shouldNoError(p3);
p3.name.should.equal('Changed');
p3.content.should.equal('content changed.');
// query again:
var p4 = yield remote.$post(roles.EDITOR, '/api/wikis/wikipages/' + p1.id);
remote.shouldNoError(p4);
p4.wiki_id.should.equal(p3.wiki_id);
p4.parent_id.should.equal(p3.parent_id);
p4.display_order.should.equal(p3.display_order);
p4.name.should.equal(p3.name);
p4.content.should.equal(p3.content);
p4.version.should.equal(1);
});
it('create wiki tree, move and try delete wiki', function* () {
// create wiki:
var w1 = yield remote.$post(roles.EDITOR, '/api/wikis', {
name: ' Tree ',
tag: 'wikipedia',
description: ' blablabla\nhaha... \n ',
content: 'Long long long content... ',
image: remote.readFileSync('res-image.jpg').toString('base64')
});
remote.shouldNoError(w1);
// create wiki page:
// w1
// +- p1
var p1 = yield remote.$post(roles.EDITOR, '/api/wikis/' + w1.id + '/wikipages', {
parent_id: '',
name: ' P1 - First Wiki Page ',
content: 'This is a first wiki page...'
});
remote.shouldNoError(p1);
p1.wiki_id.should.equal(w1.id);
p1.parent_id.should.equal('');
p1.display_order.should.equal(0);
p1.name.should.equal('P1 - First Wiki Page');
p1.content.should.equal('This is a first wiki page...');
// try delete wiki:
var r2 = yield remote.$post(roles.EDITOR, '/api/wikis/' + w1.id + '/delete');
remote.shouldHasError(r2, 'entity:conflict');
// try create wiki page again:
// w1
// +- p1
// +- p2
var p2 = yield remote.$post(roles.EDITOR, '/api/wikis/' + w1.id + '/wikipages', {
parent_id: p1.id,
name: 'P2',
content: 'child wiki page...'
});
remote.shouldNoError(p2);
p2.wiki_id.should.equal(w1.id);
p2.parent_id.should.equal(p1.id);
p2.display_order.should.equal(0);
p2.name.should.equal('P2');
p2.content.should.equal('child wiki page...');
// try create wiki page under w1:
// w1
// +- p1
// | +- p2
// +- p3
var p3 = yield remote.$post(roles.EDITOR, '/api/wikis/' + w1.id + '/wikipages', {
parent_id: '',
name: 'P3',
content: 'p3'
});
remote.shouldNoError(p3);
p3.wiki_id.should.equal(w1.id);
p3.parent_id.should.equal('');
p3.display_order.should.equal(1);
p3.name.should.equal('P3');
p3.content.should.equal('p3');
// try create wiki page under p2:
// w1
// +- p1
// | +- p2
// | +- p4
// +- p3
var p4 = yield remote.$post(roles.EDITOR, '/api/wikis/' + w1.id + '/wikipages', {
parent_id: p2.id,
name: 'P4',
content: 'p4'
});
remote.shouldNoError(p4);
p4.wiki_id.should.equal(w1.id);
p4.parent_id.should.equal(p2.id);
p4.display_order.should.equal(0);
p4.name.should.equal('P4');
p4.content.should.equal('p4');
// move p3 to p2 at index 0:
// w1
// +- p1
// . +- p2
// . +- p3 <----- move to here
// . +- p4
// +. p3 <----- from here
var np3 = yield remote.$post(roles.EDITOR, '/api/wikis/wikipages/' + p3.id + '/move', {
parent_id: p2.id,
index: 0
});
remote.shouldNoError(np3);
np3.wiki_id.should.equal(w1.id);
np3.parent_id.should.equal(p2.id);
np3.display_order.should.equal(0);
// move p4 to ROOT at index 0:
// w1
// +- p4 <-------- move to here
// +- p1
// +- p2
// +- p3
// +. p4 <-- from here
var np4 = yield remote.$post(roles.EDITOR, '/api/wikis/wikipages/' + p4.id + '/move', {
parent_id: '',
index: 0
});
remote.shouldNoError(np4);
np4.wiki_id.should.equal(w1.id);
np4.parent_id.should.equal('');
np4.display_order.should.equal(0);
// check p1 index:
var np1 = yield remote.$get(roles.EDITOR, '/api/wikis/wikipages/' + p1.id);
remote.shouldNoError(np1);
np1.display_order.should.equal(1);
// move p1 to p3 to make a recursive:
// w1
// +- p4
// +- p1 <----- i'm to here
// +- p2
// +- p3
// +- <----- to here, but not allowed!
var r4 = yield remote.$post(roles.EDITOR, '/api/wikis/wikipages/' + p1.id + '/move', {
parent_id: p3.id,
index: 0
});
remote.shouldHasError(r4, 'entity:conflict');
// try delete p2 failed because it has p3 as child:
var r5 = yield remote.$post(roles.EDITOR, '/api/wikis/wikipages/' + p2.id + '/delete');
remote.shouldHasError(r5, 'entity:conflict');
// try delete p3 ok because it has no child:
var r6 = yield remote.$post(roles.EDITOR, '/api/wikis/wikipages/' + p3.id + '/delete');
remote.shouldNoError(r6);
r6.id.should.equal(p3.id);
});
});
});
<file_sep>/www/test/test_attachment_api.js
'use strict';
// test attachment api:
var
_ = require('lodash'),
should = require('should'),
remote = require('./_remote'),
constants = require('../constants'),
roles = constants.role;
describe('#attachment', function () {
before(remote.setup);
describe('#api', function () {
it('should get empty attachment', function* () {
var atts = yield remote.$get(roles.GUEST, '/api/attachments');
should(atts).be.ok;
atts.attachments.should.be.an.Array.and.have.length(0);
atts.page.total.should.equal(0);
});
it('create attachment failed by subscriber', function* () {
// create attachment:
var r = yield remote.$post(roles.SUBSCRIBER, '/api/attachments', {
name: '<NAME> ',
description: ' bla bla bla... \n ',
data: remote.readFileSync('res-image.jpg').toString('base64')
});
remote.shouldHasError(r, 'permission:denied');
});
it('upload image by contributor', function* () {
var r = yield remote.$post(roles.CONTRIBUTOR, '/api/attachments', {
name: '<NAME> ',
description: ' bla bla bla... \n ',
data: remote.readFileSync('res-image.jpg').toString('base64')
});
remote.shouldNoError(r);
r.name.should.equal('<NAME>');
r.description.should.equal('bla bla bla...');
r.mime.should.equal('image/jpeg');
r.width.should.equal(1280);
r.height.should.equal(720);
r.size.should.equal(346158);
// get it:
var r2 = yield remote.$get(roles.GUEST, '/api/attachments/' + r.id);
remote.shouldNoError(r2);
r2.name.should.equal('<NAME>');
r2.description.should.equal('bla bla bla...');
r2.mime.should.equal('image/jpeg');
// get all:
var rs = yield remote.$get(roles.GUEST, '/api/attachments');
remote.shouldNoError(rs);
rs.page.total.should.equal(1);
rs.attachments.should.be.an.Array.and.have.length(1);
rs.attachments[0].id.should.equal(r.id);
rs.attachments[0].name.should.equal('Test Image');
rs.attachments[0].description.should.equal('bla bla bla...');
rs.attachments[0].mime.should.equal('image/jpeg');
// download it:
var d = yield remote.$download('/files/attachments/' + r.id);
remote.shouldNoError(d);
d.statusCode.should.equal(200);
d.headers['content-type'].should.equal('image/jpeg');
d.headers['content-length'].should.equal('346158');
// download 0, m, l, s:
var d0 = yield remote.$download('/files/attachments/' + r.id + '/0');
remote.shouldNoError(d0);
d0.statusCode.should.equal(200);
d0.headers['content-type'].should.equal('image/jpeg');
d0.headers['content-length'].should.equal('346158');
var dl = yield remote.$download('/files/attachments/' + r.id + '/l');
remote.shouldNoError(dl);
dl.statusCode.should.equal(200);
dl.headers['content-type'].should.equal('image/jpeg');
parseInt(dl.headers['content-length'], 10).should.approximately(122826, 10000);
var dm = yield remote.$download('/files/attachments/' + r.id + '/m');
remote.shouldNoError(dm);
dm.statusCode.should.equal(200);
dm.headers['content-type'].should.equal('image/jpeg');
parseInt(dm.headers['content-length'], 10).should.approximately(45043, 1000);
var ds = yield remote.$download('/files/attachments/' + r.id + '/s');
remote.shouldNoError(ds);
ds.statusCode.should.equal(200);
ds.headers['content-type'].should.equal('image/jpeg');
parseInt(ds.headers['content-length'], 10).should.approximately(25269, 1000);
});
it('upload text as text/plain', function* () {
// create attachment:
var r = yield remote.$post(roles.CONTRIBUTOR, '/api/attachments', {
name: ' Text ',
description: ' bla bla bla... \n ',
mime: 'text/plain',
data: remote.readFileSync('res-plain.txt').toString('base64')
});
remote.shouldNoError(r);
r.name.should.equal('Text');
r.description.should.equal('bla bla bla...');
r.mime.should.equal('text/plain');
});
it('upload image but said text/plain', function* () {
// create attachment:
var r = yield remote.$post(roles.CONTRIBUTOR, '/api/attachments', {
name: ' <NAME> ',
description: ' bla bla bla... \n ',
mime: 'text/plain',
data: remote.readFileSync('res-image.jpg').toString('base64')
});
remote.shouldNoError(r);
r.name.should.equal('Fake Text');
r.description.should.equal('bla bla bla...');
r.mime.should.equal('image/jpeg');
});
it('upload text file by contributor then delete it', function* () {
// create attachment:
var r = yield remote.$post(roles.CONTRIBUTOR, '/api/attachments', {
name: ' Text To Delete ',
description: ' bla bla bla... \n ',
data: remote.readFileSync('res-image.jpg').toString('base64')
});
remote.shouldNoError(r);
var r2 = yield remote.$post(roles.CONTRIBUTOR, '/api/attachments', {
name: ' Text2 To Delete ',
description: ' bla bla bla... \n ',
data: remote.readFileSync('res-image.jpg').toString('base64')
});
remote.shouldNoError(r2);
// try delete by another users:
var d1 = yield remote.$post(roles.SUBSCRIBER, '/api/attachments/' + r.id + '/delete');
remote.shouldHasError(d1, 'permission:denied');
// try delete by owner:
var d2 = yield remote.$post(roles.CONTRIBUTOR, '/api/attachments/' + r.id + '/delete');
remote.shouldNoError(d2);
d2.id.should.equal(r.id);
// try delete by admin:
var d3 = yield remote.$post(roles.ADMIN, '/api/attachments/' + r2.id + '/delete');
remote.shouldNoError(d3);
d3.id.should.equal(r2.id);
});
});
});
<file_sep>/www/author.js
'use strict';
var fs = require('fs');
var s = fs.readFileSync('./author', 'utf-8');
var j = s.replace(/\'/g, '\\\'').replace(/\\/g, '\\\\').replace(/\n/g, '\\n');
console.log(j);
<file_sep>/www/search/site_search.js
'use strict';
// site_search.js
// using search engine to search 'http://www.google.com/search?q=keywords site:www.example.com'
function createSearchEngine(cfg) {
var
search_url = cfg.search_url,
domain = cfg.domain,
buildSearchUrl = function (q) {
return search_url.replace('%s', encodeURIComponent(q + ' site:' + domain));
};
return {
external: true,
index: function (docs, callback) {
process.nextTick(function() {
callback && callback(null, { result: 'index ok but depends on search engine.'});
});
},
unindex: function (docs, callback) {
process.nextTick(function() {
callback && callback(null, { result: 'unindex ok but depends on search engine.'});
});
},
search: function (q, options, callback) {
if (arguments.length === 2) {
callback = options;
options = undefined;
}
if (callback) {
process.nextTick(function () {
callback(null, buildSearchUrl(q))
});
return;
}
return buildSearchUrl(q);
}
};
}
module.exports = {
createSearchEngine: createSearchEngine
};
<file_sep>/www/models/wiki.js
'use strict';
// wiki.js
var base = require('./_base.js');
module.exports = function (warp) {
return base.defineModel(warp, 'Wiki', [
base.column_id('cover_id'),
base.column_id('content_id'),
base.column_bigint('views'),
base.column_varchar_100('name'),
base.column_varchar_100('tag'),
base.column_varchar_1000('description')
], {
table: 'wikis'
});
};
<file_sep>/www/test/test_webpage_api.js
'use strict';
// test webpage api:
var
_ = require('lodash'),
should = require('should'),
remote = require('./_remote'),
constants = require('../constants'),
roles = constants.role;
describe('#webpages', function () {
before(remote.setup);
describe('#api', function () {
it('should get empty webpages', function* () {
var r = yield remote.$get(roles.GUEST, '/api/webpages');
remote.shouldNoError(r);
r.webpages.should.be.an.Array.and.have.length(0);
});
it('create webpage failed by editor', function* () {
var r = yield remote.$post(roles.EDITOR, '/api/webpages', {
alias: 'by-editor',
name: 'by editor',
content: '...'
});
remote.shouldHasError(r, 'permission:denied');
});
it('create duplicate webpages by admin', function* () {
// create webpage:
var r = yield remote.$post(roles.ADMIN, '/api/webpages', {
alias: 'duplicate',
name: '<NAME>',
draft: true,
content: 'first...'
});
remote.shouldNoError(r);
r.alias.should.equal('duplicate');
r.name.should.equal('<NAME>');
r.draft.should.be.true;
// create with same alias:
var r2 = yield remote.$post(roles.ADMIN, '/api/webpages', {
alias: 'duplicate',
name: 'second one',
content: 'second...'
});
remote.shouldHasError(r2, 'parameter:invalid', 'alias');
});
it('create and update webpage by admin', function* () {
// create webpage:
var r = yield remote.$post(roles.ADMIN, '/api/webpages', {
alias: 'test',
name: 'Test Webpage',
draft: true,
tags: 'aaa, BBB, \t ccc,CcC',
content: 'Long content...'
});
remote.shouldNoError(r);
r.draft.should.be.true;
r.tags.should.equal('aaa,BBB,ccc');
// update name:
var r2 = yield remote.$post(roles.ADMIN, '/api/webpages/' + r.id, {
name: ' Name Changed '
});
remote.shouldNoError(r2);
r2.name.should.equal('Name Changed');
// update text:
var r3 = yield remote.$post(roles.ADMIN, '/api/webpages/' + r.id, {
content: 'Content changed.'
});
remote.shouldNoError(r3);
r3.content.should.equal('Content changed.');
// update alias:
var r4 = yield remote.$post(roles.ADMIN, '/api/webpages/' + r.id, {
alias: 'test-2',
tags: ' A, B, C, c, D, '
});
remote.shouldNoError(r4);
r4.alias.should.equal('test-2');
r4.tags.should.equal('A,B,C,D');
r4.content.should.equal(r3.content);
});
it('create and update alias but duplicate by admin', function* () {
// create webpage:
var r1 = yield remote.$post(roles.ADMIN, '/api/webpages', {
alias: 'abc',
name: 'abc',
content: 'abc...'
});
remote.shouldNoError(r1);
var r2 = yield remote.$post(roles.ADMIN, '/api/webpages', {
alias: 'xyz',
name: 'xyz',
content: 'xyz...'
});
remote.shouldNoError(r2);
// try update alias 'abc' to 'xyz':
var r = yield remote.$post(roles.ADMIN, '/api/webpages/' + r1.id, {
alias: 'xyz'
});
remote.shouldHasError(r, 'parameter:invalid', 'alias');
});
it('create webpage with wrong parameter by admin', function* () {
var r1 = yield remote.$post(roles.ADMIN, '/api/webpages', {
name: 'Test',
alias: 'alias-x',
// content: 'missing',
tags: 'xxx'
});
remote.shouldHasError(r1, 'parameter:invalid', 'content');
var r2 = yield remote.$post(roles.ADMIN, '/api/webpages', {
// name: 'missing',
alias: 'alias-x',
content: 'the content...',
tags: 'xxx'
});
remote.shouldHasError(r2, 'parameter:invalid', 'name');
var r3 = yield remote.$post(roles.ADMIN, '/api/webpages', {
name: 'Test',
// alias: 'missing',
content: 'the content...',
tags: 'xxx'
});
remote.shouldHasError(r3, 'parameter:invalid', 'alias');
});
});
});
<file_sep>/www/models/resource.js
'use strict';
// resource.js
var base = require('./_base.js');
module.exports = function (warp) {
return base.defineModel(warp, 'Resource', [
base.column_id('ref_id'),
base.column_blob('value')
], {
table: 'resources'
});
};
<file_sep>/www/models/attachment.js
'use strict';
// attachment.js
var base = require('./_base.js');
module.exports = function (warp) {
return base.defineModel(warp, 'Attachment', [
base.column_id('user_id'),
base.column_id('resource_id'),
base.column_bigint('size'),
base.column_bigint('width'),
base.column_bigint('height'),
base.column_varchar_100('mime'),
base.column_varchar_100('name'),
base.column_varchar_100('meta', { defaultValue: '' }),
base.column_varchar_1000('description')
], {
table: 'attachments'
});
};
<file_sep>/www/cache.js
'use strict';
// init memcache:
var
_ = require('lodash'),
thunkify = require('thunkify'),
Memcached = require('memcached'),
config = require('./config');
// init memcached:
console.log('init memcache...');
var
DEFAULT_LIFETIME = 86400, // 24h
CACHE_PREFIX = config.cache.prefix,
memcached = new Memcached(config.cache.host + ':' + config.cache.port, {
'timeout': config.cache.timeout,
'retries': config.cache.retries
}),
$m_incr = thunkify(function (key, inc, callback) {
memcached.incr(key, inc, callback);
}),
$m_get = thunkify(function (key, callback) {
memcached.get(key, callback);
}),
$m_set = thunkify(function (key, value, lifetime, callback) {
memcached.set(key, value, lifetime, callback);
}),
$m_del = thunkify(function (key, callback) {
memcached.del(key, callback);
}),
$m_getMulti = thunkify(function (keys, callback) {
memcached.getMulti(keys, callback);
});
module.exports = {
$incr: function* (key, initial) {
var
k = CACHE_PREFIX + key,
data = yield $m_incr(k, 1);
if (data === false) {
if (initial === undefined) {
initial = 0;
}
yield $m_set(k, initial + 1, DEFAULT_LIFETIME);
data = initial + 1;
}
return data;
},
$count: function* (key) {
var
k = CACHE_PREFIX + key,
num = yield $m_get(k);
return (num === false) ? 0 : num;
},
$counts: function* (keys) {
if (keys.length === 0) {
return [];
}
var
multiKeys = _.map(keys, function (key) {
return CACHE_PREFIX + key;
}),
data = yield $m_getMulti(multiKeys);
return _.map(multiKeys, function (key) {
return data[key] || 0;
});
},
$get: function* (key, defaultValueOrFn, lifetime) {
/**
* get value from cache by key. If key not exist:
* return default value if defaultValueOrFn is not a function,
* otherwise call defaultValueOfFn, put the result into cache
* and return as value.
*/
var
k = CACHE_PREFIX + key,
data = yield $m_get(k);
if (data) {
// console.log('[cache] hit: ' + key);
return data;
}
console.log('[Cache] NOT hit: ' + key);
if (defaultValueOrFn) {
lifetime = lifetime || DEFAULT_LIFETIME;
if (typeof (defaultValueOrFn) === 'function') {
if (defaultValueOrFn.constructor.name === 'GeneratorFunction') {
console.log('yield generator to fill cache...');
data = yield defaultValueOrFn();
console.log('yield generator ok.')
}
else {
console.log('call function to fill cache...');
data = defaultValueOrFn();
console.log('call function ok.');
}
}
else {
data = defaultValueOrFn;
}
yield $m_set(k, data, lifetime);
console.log('[cache] cache set for key: ' + key);
}
else {
data = null;
}
return data;
},
$gets: function* (keys) {
if (keys.length === 0) {
return [];
}
var
multiKeys = _.map(keys, function (key) {
return CACHE_PREFIX + key;
}),
data = yield $m_getMulti(multiKeys);
return _.map(multiKeys, function (key) {
return data[key] || null;
});
},
$set: function* (key, value, lifetime) {
var k = CACHE_PREFIX + key;
yield $m_set(k, value, lifetime || DEFAULT_LIFETIME);
},
$remove: function* (key) {
var k = CACHE_PREFIX + key;
yield $m_del(k);
}
};
<file_sep>/www/models/category.js
'use strict';
// category.js
var base = require('./_base.js');
module.exports = function (warp) {
return base.defineModel(warp, 'Category', [
base.column_varchar_100('name'),
base.column_varchar_100('tag'),
base.column_varchar_1000('description'),
base.column_bigint('display_order')
], {
table: 'categories'
});
};
<file_sep>/www/controllers/userApi.js
'use strict';
// user api
var
_ = require('lodash'),
thunkify = require('thunkify'),
oauth2 = require('oauth2-warp'),
api = require('../api'),
db = require('../db'),
auth = require('../auth'),
helper = require('../helper'),
config = require('../config'),
json_schema = require('../json_schema'),
constants = require('../constants');
var
User = db.user,
AuthUser = db.authuser,
LocalUser = db.localuser,
warp = db.warp,
next_id = db.next_id;
var LOCAL_SIGNIN_EXPIRES_IN_MS = 1000 * config.session.expires;
var LOCK_TIMES = {
d: 86400000,
w: 604800000,
m: 2592000000,
y: 31536000000
};
// init oauth2 providers:
var oauth2_providers = {};
_.each(config.oauth2, function (cfg, name) {
var provider = oauth2.createProvider(
name,
cfg.app_key,
cfg.app_secret,
cfg.redirect_uri
);
provider.$getAuthentication = thunkify(provider.getAuthentication);
oauth2_providers[name] = provider;
console.log('Init OAuth2: ' + name + ', redirect_uri = ' + provider.redirect_uri);
});
function* $getUsers(page) {
page.total = yield User.$findNumber('count(id)');
if (page.isEmpty) {
return [];
}
var users = yield User.$findAll({
offset: page.offset,
limit: page.limit,
order: 'created_at desc'
});
return users;
}
function* $getUserByEmail(email) {
return yield User.$find({
where: 'email=?',
params: [email],
limit: 1
});
}
function* $getUser(id) {
var user = yield User.$find(id);
if (user === null) {
throw api.notFound('User');
}
return user;
}
function* $createUser(userParam){
var user = yield User.$create({
role: userParam.role,
name: userParam.name,
email: userParam.email
});
return user;
}
function* $createLocalUser(user, localUserParam){
var localUser = yield LocalUser.$create({
user_id: user.id,
passwd: localUserParam.passwd
});
localUser.passwd = auth.generatePassword(localUser.id, localUser.passwd);
yield localUser.$update(['passwd']);
}
function* $bindUsers(entities, propName) {
var i, entity, u, prop = propName || 'user_id';
for (i=0; i<entities.length; i++) {
entity = entities[i];
entity.user = yield User.$find({
select: ['id', 'name', 'image_url'],
where: 'id=?',
params: [entity[prop]]
});
}
}
function* $lockUser(id, lockTime) {
var user = yield $getUser(id);
if (user.role <= constants.role.ADMIN) {
throw api.notAllowed('Cannot lock admin user.');
}
user.locked_until = lockTime;
yield user.$update(['locked_until']);
return lockTime;
}
function* $processOAuthAuthentication(provider_name, authentication) {
var
auth_id = provider_name + ':' + authentication.auth_id,
auth_user,
user,
user_id;
auth_user = yield AuthUser.$find({
where: 'auth_id=?',
params: [auth_id]
});
if (auth_user === null) {
// first time to signin:
user_id = next_id();
user = {
id: user_id,
email: user_id + '@' + provider_name,
name: authentication.name,
image_url: authentication.image_url || '/static/img/user.png'
};
auth_user = {
user_id: user_id,
auth_provider: provider_name,
auth_id: auth_id,
auth_token: authentication.access_token,
expires_at: Date.now() + 1000 * Math.min(604800, authentication.expires_in)
};
yield AuthUser.$create(auth_user);
yield User.$create(user);
return {
user: user,
auth_user: auth_user
};
}
// not first time to signin:
auth_user.auth_token = authentication.access_token;
auth_user.expires_at = Date.now() + 1000 * Math.min(604800, authentication.expires_in);
yield auth_user.$update(['auth_token', 'expires_at', 'updated_at', 'version']);
// find user:
user = yield User.$find(auth_user.user_id);
if (user === null) {
console.log('Logic error: user not found!');
user_id = auth_user.user_id;
user = {
id: user_id,
email: user_id + '@' + provider_name,
name: authentication.name,
image_url: authentication.image_url || '/static/img/user.png'
};
yield User.$create(user);
}
return {
user: user,
auth_user: auth_user
};
}
function getReferer(request) {
var url = request.get('referer') || '/';
if (url.indexOf('/auth/') >= 0 || url.indexOf('/manage/') >= 0) {
url = '/';
}
return url;
}
module.exports = {
$getUser: $getUser,
$getUsers: $getUsers,
$bindUsers: $bindUsers,
'GET /api/users/:id': function* (id) {
helper.checkPermission(this.request, constants.role.EDITOR);
this.body = yield $getUser(id);
},
'GET /api/users': function* () {
helper.checkPermission(this.request, constants.role.EDITOR);
var
page = helper.getPage(this.request),
users = yield $getUsers(page);
this.body = {
page: page,
users: users
};
},
'POST /api/users': function* (){
helper.checkPermission(this.request, constants.role.EDITOR);
var email,
user,
role,
data = this.request.body;
email = data.email;
user = yield $getUserByEmail(email);
},
'POST /api/signUp': function* (){
// helper.checkPermission(this.request, constants.role.EDITOR);
var email,
user,
localUser,
role,
data = this.request.body,
userParam = {
email: data.email,
role: constants.role.SUBSCRIBER,
name: data.email
},
localUserParam = {
passwd: data.passwd
}
user = yield $createUser(userParam);
localUser = yield $createLocalUser(user, localUserParam);
this.body = {
page: 1,
users: 2
};
},
'POST /api/authenticate': function* () {
/**
* Authenticate user by email and password, for local user only.
*
* @param email: Email address, in lower case.
* @param passwd: The password, 40-chars SHA1 string, in lower case.
*/
var
email,
passwd,
user,
localuser,
data = this.request.body;
json_schema.validate('authenticate', data);
email = data.email,
passwd = data.passwd;
user = yield $getUserByEmail(email);
if (user === null) {
throw api.authFailed('email', 'Email not found.');
}
if (user.locked_until > Date.now()) {
throw api.authFailed('locked', 'User is locked.');
}
localuser = yield LocalUser.$find({
where: 'user_id=?',
params: [user.id]
});
if (localuser === null) {
throw api.authFailed('passwd', 'Cannot signin local.')
}
// check password:
if (!auth.verifyPassword(localuser.id, passwd, localuser.passwd)) {
throw api.authFailed('passwd', 'Bad password.');
}
// make session cookie:
var
expires = Date.now() + LOCAL_SIGNIN_EXPIRES_IN_MS,
cookieStr = auth.makeSessionCookie(constants.signin.LOCAL, localuser.id, localuser.passwd, expires);
this.cookies.set(config.session.cookie, cookieStr, {
path: '/',
httpOnly: true,
expires: new Date(expires)
});
console.log('set session cookie for user: ' + user.email);
this.body = user;
},
'GET /auth/signout': function* () {
this.cookies.set(config.session.cookie, 'deleted', {
path: '/',
httpOnly: true,
expires: new Date(0)
});
var redirect = getReferer(this.request);
console.log('Signout, goodbye!');
this.response.redirect(redirect);
},
'GET /auth/from/:name': function* (name) {
var provider, redirect, redirect_uri, jscallback, r;
provider = oauth2_providers[name];
if (!provider) {
this.response.status = 404;
this.body = 'Invalid URL.';
return;
}
redirect_uri = provider.redirect_uri;
if (redirect_uri.indexOf('http://') != 0) {
redirect_uri = 'http://' + this.request.host + '/auth/callback/' + name;
}
jscallback = this.request.query.jscallback;
if (jscallback) {
redirect_uri = redirect_uri + '?jscallback=' + jscallback;
}
else {
redirect = getReferer(this.request);
redirect_uri = redirect_uri + '?redirect=' + encodeURIComponent(redirect);
}
r = provider.getAuthenticateURL({
redirect_uri: redirect_uri
});
console.log('Redirect to: ' + r);
this.response.redirect(r);
},
'GET /auth/callback/:name': function* (name) {
var provider, redirect, redirect_uri, code, jscallback, authentication, r, auth_user, user, cookieStr;
provider = oauth2_providers[name];
if (!provider) {
this.response.status = 404;
this.body = 'Invalid URL.';
return;
}
jscallback = this.request.query.jscallback;
redirect = this.request.query.redirect || '/';
code = this.request.query.code;
if (!code) {
console.log('OAuth2 callback error: code is not found.');
this.body = '<html><body>Invalid code.</body></html>';
return;
}
try {
authentication = yield provider.$getAuthentication({
code: code
});
}
catch (e) {
console.log('OAuth2 callback error: get authentication failed.');
this.body = '<html><body>Authenticate failed.</body></html>';
return;
}
console.log('OAuth2 callback ok: ' + JSON.stringify(authentication));
r = yield $processOAuthAuthentication(name, authentication);
auth_user = r.auth_user;
user = r.user;
if (user.locked_until > Date.now()) {
console.log('User is locked: ' + user.email);
this.body = '<html><body>User is locked.</body></html>';
return;
}
// make session cookie:
cookieStr = auth.makeSessionCookie(name, auth_user.id, auth_user.auth_token, auth_user.expires_at);
this.cookies.set(config.session.cookie, cookieStr, {
path: '/',
httpOnly: true,
expires: new Date(auth_user.expires_at)
});
console.log('set session cookie for user: ' + user.email);
if (jscallback) {
this.body = '<html><body><script> window.opener.'
+ jscallback
+ '(null,' + JSON.stringify({
id: user.id,
name: user.name,
image_url: user.image_url
}) + ');self.close(); </script></body></html>';
}
else {
this.response.redirect(redirect);
}
},
'POST /api/users/:id/lock': function* (id) {
var locked_until = this.request.body.locked_until;
if (!helper.isInteger(locked_until) || (locked_until < 0)) {
throw api.invalidParam('locked_until', 'locked_until must be an integer as a timestamp.');
}
helper.checkPermission(this.request, constants.role.EDITOR);
yield $lockUser(id, locked_until);
this.body = {
locked_until: locked_until
};
}
};
<file_sep>/README.md
itranswarp.js
=============
A nodejs powered website containing blog, wiki, discuss and search engine.
### Environment
Nodejs: >= 0.12
MySQL: 5.1 ~ 5.6
Memcache
Nginx
### Configurations
You should make a copy of 'config_default.js' to 'config_production.js', and override some of the settings you needed:
$ cp www/config_default.js www/config_production.js
You can safely remove any settings you do not changed.
### Install packages
Run `npm install` to install all required packages:
$ npm install
### Initialize database
Run `node schema > init_db.sql` to generate initial schema as well as administrator's email and password.
You will get `init_db.sql` file in current directory. Run this SQL script by:
$ mysql -u root -p < init_db.sql
NOTE: re-run this SQL file will remove all existing data.
### Run
memcached
$ node --harmony app.js
You should able to see the home page in the browser with address `http://localhost:2015/`.
If you want to sign in to management console, go to `http://localhost:2015/manage/signin`, and sign in using the email and password you entered when running `node schema`.
<file_sep>/www/controllers/_images.js
'use strict';
// image operation.
var
api = require('../api'),
fs = require('fs'),
thunkify = require('thunkify'),
gm = require('gm').subClass({ imageMagick : true });
function calcScaleSize(origin_width, origin_height, resize_width, resize_height, keepAspect) {
function isEnlarge(tw, th) {
return origin_width < tw && origin_height < th;
}
if (resize_width <= 0 && resize_height <= 0) {
throw {"name": "Parameter error!"};
}
if (keepAspect === undefined) {
keepAspect = true;
}
if (origin_width === resize_width && origin_height === resize_height) {
return { width: origin_width, height: origin_height, resized: false, enlarge: false };
}
var
expected_height,
target_width = resize_width,
target_height = resize_height;
if (target_height <= 0) {
target_height = target_width * origin_height / origin_width;
return { width: target_width, height: target_height, resized: true, enlarge: isEnlarge(target_width, target_height) };
}
if (target_width <= 0) {
target_width = target_height * origin_width / origin_height;
return { width: target_width, height: target_height, resized: true, enlarge: isEnlarge(target_width, target_height) };
}
if (keepAspect) {
expected_height = target_width * origin_height / origin_width;
if (expected_height > target_height) {
target_width = target_height * origin_width / origin_height;
} else if (expected_height < target_height) {
target_height = expected_height;
}
}
return { width: target_width, height: target_height, resized: true, enlarge: isEnlarge(target_width, target_height) };
}
function getImageInfo(buffer, callback) {
var i = gm(buffer);
i.format(function (err, format) {
console.log("error:" + err);
if (err) {
return callback(api.invalidParam('image', 'Invalid image data'));
}
i.size(function (err, size) {
if (err) {
return callback(api.invalidParam('image', 'Invalid image data'));
}
callback(null, {
data: buffer,
format: format.toLowerCase(), // 'png', 'jpeg', 'gif'...
width: size.width,
height: size.height
});
});
});
}
/**
* resize to specific width and height, but keep aspect.
* callback should have signature (err, buffer).
*/
function resizeKeepAspect(buffer, origin_width, origin_height, resize_width, resize_height, callback) {
if (origin_width * resize_height === origin_height * resize_width && origin_width <= resize_width) {
console.log('no need to resize!');
return callback(null, buffer);
}
var
img = gm(buffer),
r = calcScaleSize(origin_width, origin_height, resize_width, resize_height, true);
console.log('resized to ' + r.width + 'x' + r.height);
img = img.resize(r.width, r.height);
return img.toBuffer(callback);
}
/**
* resize to specific width and height, crop if neccessary.
* callback should have signature (err, buffer).
*/
function resizeAsCover(buffer, origin_width, origin_height, resize_width, resize_height, callback) {
if (origin_width * resize_height === origin_height * resize_width && origin_width <= resize_width) {
console.log('no need to resize!');
return callback(null, buffer);
}
var
img = gm(buffer),
scale_width,
scale_height;
if (resize_width * origin_height === origin_width * resize_height) {
// fit!
console.log('resizeAsCover: fit!');
img = img.resize(resize_width, resize_height);
return img.toBuffer(callback);
}
if (resize_width * origin_height > origin_width * resize_height) {
// cut off top and bottom:
scale_width = resize_width;
console.log('resizeAsCover: resize to: ' + scale_width + ' x ?');
img = img.resize(scale_width, null);
// crop:
scale_height = scale_width * origin_height / origin_width;
img = img.crop(resize_width, resize_height, 0, Math.floor((scale_height - resize_height) / 2));
return img.toBuffer(callback);
}
// cut off left and right:
scale_height = resize_height;
console.log('resizeAsCover: resize to: ? x ' + scale_height);
img = img.resize(null, scale_height);
// crop:
scale_width = scale_height * origin_width / origin_height;
img = img.crop(resize_width, resize_height, Math.floor((scale_width - resize_width) / 2), 0);
return img.toBuffer(callback);
}
module.exports = {
$getImageInfo: thunkify(getImageInfo),
$resizeKeepAspect: thunkify(resizeKeepAspect),
$resizeAsCover: thunkify(resizeAsCover),
calcScaleSize: calcScaleSize
};
|
42df68cb2e122edacb0586b1d2d55661abc5f5a4
|
[
"JavaScript",
"SQL",
"HTML",
"Markdown"
] | 49
|
JavaScript
|
zhongyw/itranswarp.js
|
a0c851d5fa3f8df19d70718bbb2d58e26239a61d
|
2893438f83b0f53648b91c57f054c5747cdab69c
|
refs/heads/master
|
<file_sep>#!/env/bin/python
import random
def BubbleSortWords(Lista):
k=0
m=0
n=len(Lista)
q=0
for i in range(0,n):
for j in range(i, n-i-1):
if Lista[j+1]<Lista[j]:
Lista[j+1],Lista[j]=Lista[j],Lista[j+1]
m=1
k+=1
for r in range(n-2-i,i):
if Lista[r]<Lista[r-1]:
Lista[r],Lista[r-1]=Lista[r-1],Lista[r]
m=1
k+=1
if m==0: break
m=0;
print(k)
print(Lista)
if __name__=='__main__':
Lista=[]
archivo=open("Palabras.txt","r")
Lista=archivo.readlines()
BubbleSortWords(Lista)
<file_sep>import sys, pygame, time
pygame.init()
size = width, height = 1000, 650 #tamaño de la pantalla
speed = [2, 2]
speed2 = [3, 3]
# Define the colors we will use in RGB format
black = 0, 0, 0
WHITE = (255, 255, 255)
BLUE = (0, 0, 89)
GREEN = (0, 100, 0)
RED = (169, 0, 0)
Pink = (255, 102, 102)
Purple = (148, 0, 89)
Yellow = (237, 237, 2)
screen = pygame.display.set_mode(size)
pygame.display.set_caption("3D")
##########punto de fuga########################
x_0, y_0 = width/2, 0
############funcion#############################
def Three_point(x, y, z):
#offset para y
y_1 = (height - 150) - y
#calculo de y nueva
y = y_1 - 1
#offset para x
x_1 = width/2 + x
###########ecuacion de la recta#######################33
if y_1==0:
m=0
else:
m=1.0*((y - y_1))/(-y_1) #pendiente
x= x_1 + (x_0 - x_1)*m
if abs(x)-abs(int(x))>0.5:
x=int(x)+1
else:
x=int(x)
#####################################################33
print x, y
return [x, y]
if __name__=='__main__':
#puntos a converger
P=[0,0]
P_1=[-500,0]
P_2=[500,0]
P_3=[200,-100]
P_4=[-300,100]
#puntos del triángulo
P1=[-50,0]
P2=[50,0]
P3=[0,75]
done = False
clock = pygame.time.Clock()
z=0
while not done:
pygame.draw.circle(screen,RED, [width/2,0], 5, 0) ##punto de fuga
# This limits the while loop to a max of 10 times per second.
# Leave this out and we will use all CPU we can.
clock.tick(10)
for event in pygame.event.get():
if event.type == pygame.QUIT:
done=True
P=Three_point(P[0], P[1],z)
P_1=Three_point(P_1[0], P_1[1],z)
P_2=Three_point(P_2[0], P_2[1],z)
P_3=Three_point(P_3[0], P_3[1],z)
P_4=Three_point(P_4[0], P_4[1],z)
pygame.draw.circle(screen,GREEN, P, 5, 0)
pygame.draw.circle(screen,BLUE, P_1, 5, 0)
pygame.draw.circle(screen,Purple, P_2, 5, 0)
pygame.draw.circle(screen,Pink, P_3, 5, 0)
pygame.draw.circle(screen,Yellow, P_4, 5, 0)
P=[P[0]- width/2, (height - 150) - P[1]]
P_1=[P_1[0]- width/2, (height - 150) - P_1[1]]
P_2=[P_2[0]- width/2, (height - 150) - P_2[1]]
P_3=[P_3[0]- width/2, (height - 150) - P_3[1]]
P_4=[P_4[0]- width/2, (height - 150) - P_4[1]]
##############Triangulo###########################
P1=Three_point(P1[0], P1[1],z)
P2=Three_point(P2[0], P2[1],z)
P3=Three_point(P3[0], P3[1],z)
pygame.draw.polygon(screen,WHITE,[P1,P2,P3],5)
P1=[P1[0]- width/2, (height - 150) - P1[1]]
P2=[P2[0]- width/2, (height - 150) - P2[1]]
P3=[P3[0]- width/2, (height - 150) - P3[1]]
####################################################
z+=1
if z==max(P[1], P_1[1], P_2[1], P_3[1], P_4[1]):#500:
done=True
pygame.display.flip()
time.sleep(.01)
screen.fill(black)
pygame.quit()
<file_sep>#!/env/bin/python
import random
import math
T=100
def MergeSort(N):
m=len(N[0])
j=0
M=[[0 for i in range(m*2)] for i in range(int(len(N)/2.0))]
for i in range(0,len(N)-1,2):
M[j]=Sort(N[i],N[i+1])
j+=1
if len(N)%2!=0:
M+=[N[len(N)-1]]
if len(M[0])<T:
MergeSort(M)
else:
print(M,"\n")
def Sort(N, M):
n=len(N)
m=len(M)
R=[0 for i in range(n+m)]
#K=[0 for i in range(n+m)]
Max=max(max(N), max(M))
for i in range(n+m):
R[i]=min(min(N), min(M))
if min(min(N), min(M))==min(N):
Replace(Max, N)
else:
Replace(Max, M)
return R
def Replace(num, N):
Min=min(N)
for i in range(len(N)):
if N[i]==Min:
N[i]=num+1
break
return N
def Equal(N):
R=[0 for i in range(len(N))]
for i in range(len(N)-1):
R[i]=N[i]
return R
if __name__=='__main__':
N=[[0] for i in range(T)]
for i in range(T):
N[i][0]=random.randint(0,T)
print("Los numeros a ordenar son: \n", N)
print("\nLos numeros ordenados son: \n")
MergeSort(N)
<file_sep>#include<stdio.h>
#include<time.h>
#include<stdlib.h>
#define N 10000
void Buble(int A[N]);
int main(){
int A[N];
//printf("Lista de numeros a ordenar\n\n");
for(int i=0; i<N;i++){
A[i]= rand()%N;
//printf("%i ", A[i]);
}
Buble(A);
printf("\n\n");
//system("pause");
return 0;
}
void Buble(int A[N]){
int aux;
for(int i=0;i<N-1;i++){
for(int j=1;j<N-i;j++){
if(A[j-1]>A[j]){
aux=A[j];
A[j]=A[j-1];
A[j-1]=aux;
}
}
}
printf("\n\nLos numeros ordenados son: \n\n");
for(int i=0; i<N; i++){
printf("%i ", A[i]);
}
}
<file_sep>#!/env/bin/python
import random
n=10000
def BubbleSortWords(Lista):
k=0
m=0
n=len(Lista)
for i in range(0,n-1):
for j in range(1,n-i):
if Lista[j]<Lista[j-1]:
Lista[j],Lista[j-1]=Lista[j-1],Lista[j]
m=1
k+=1
if m==0: break
m=0;
print(k)
print(Lista)
if __name__=='__main__':
Lista=[]
archivo=open("Palabras.txt","r")
Lista=archivo.readlines()
BubbleSortWords(Lista)
<file_sep>#!/env/bin/python
import random
n=100
def BubbleSort(A):
m=0
for i in range(0,n-1):
for j in range(1,n-i):
if A[j]<A[j-1]:
A[j],A[j-1]=A[j-1],A[j]
m=1
if m==0: break
m=0
return A
if __name__=='__main__':
A=[0 for i in range(n)]
for i in range(n):
A[i]=random.randint(0,n-1)
print("Los numeros a ordenar son\n")
print(A)
A=BubbleSort(A)
print("\n\nLos numeros ordenados son\n")
print(A)
<file_sep>/* fgets example */
#include <stdio.h>
#include <string.h>
#define N 1000//0
void BubbleSort(char A[N][256]);
int main(){
FILE * pFile;
char A[N][256], B[256];
int i=0;
pFile = fopen ("Words2.txt" , "r");
if (pFile == NULL){
perror ("Error opening file");
return 0;
}
while(fgets (B, 256 , pFile) != NULL ){
strcpy(A[i], B);
//puts(A[i]);
i++;
}
fclose (pFile);
//BubbleSort
for(int i=0;i<N-1;i++){
for(int j=1;j<N;j++){
if(strcmp(A[j-1],A[j])==1){ //como es uno A[j-1]>A[j]
strcpy(B,A[j]); //A[j] lo copia en B[0]
strcpy(A[j],A[j-1]);
strcpy(A[j-1],B);
}
}
}
for(int i=0; i<N; i++){
puts(A[i]);
}
//BubbleSort(A);
return 0;
}
void BubbleSort(char A[N][100]){
char B[256];
for(int i=0;i<N-1;i++){
for(int j=1;j<N;j++){
if(strcmp(A[j-1],A[j])==1){ //como es uno A[j-1]>A[j]
strcpy(B,A[j]); //A[j] lo copia en B[0]
strcpy(A[j],A[j-1]);
strcpy(A[j-1],B);
}
}
}
for(int i=0; i<N; i++){
puts(A[i]);
}
}
<file_sep>#!/env/bin/python
import random
import math
n=10
def Heap(N, Large):
M=[[-10 for i in range(3)] for i in range(int(Large/2)-1)]
j=int(Large/2)-2
#calcular padres he hijos
for i in range(int(Large/2)-1):
M[j][0]=i
M[j][1]=2*i+1
M[j][2]=2*i+2
j-=1
print M
if Large%2==0:
#print Large/2 -1, Large-1
R=Swaping(N[Large/2-1], N[Large-1])
N[Large/2 -1]=R[0]
N[Large -1]=R[1]
for i in range(int(Large/2)-1):
R=Swaping(N[M[i][0]], N[M[i][1]], N[M[i][2]])
N[M[i][0]]=R[0]
N[M[i][1]]=R[1]
N[M[i][2]]=R[2]
#print N[M[i][0]], N[M[i][1]], N[M[i][2]]
N[0], N[Large-1] = N[Large-1], N[0]
Large-=1
if Large>2:
Heap(N, Large)
else:
R=Swaping(N[0], N[1], N[2])
N[2]=R[0]
N[1]=R[1]
N[0]=R[2]
N=N[::-1]
print N#, Large
def Swaping(N1, N2, N3=None):
if N1>N2:
N1, N2 = N2, N1
if N3!=None:
if N1>N3:
N1, N3 = N3, N1
N=[N1, N2, N3]
return N
if __name__=='__main__':
N=[0 for i in range(n)]
for i in range(n):
N[i]=random.randint(0,n)
N=[5, 9, 8, 6, 4, 8, 10, 7, 3, 7]
print "Los numeros a ordenar son\n", N
print "Los numeros ordenados son\n"
Heap(N, n)
<file_sep>#tablero ajedrez
import sys, pygame, time
pygame.init()
size = width, height = 1000, 650 #tamaņo de la pantalla
speed = [2, 2]
speed2 = [3, 3]
# Define the colors we will use in RGB format
Black = 0, 0, 0
White = (255, 255, 255)
Brown2= (89, 54, 24)
Brown = (119, 71, 26)
screen = pygame.display.set_mode(size)
pygame.display.set_caption("Tablero de ajedrez 3D")
######### Piezas blancas
Rey1= pygame.image.load("Rey1.png").convert_alpha() #carga la imagen
Rey1Rect = Rey1.get_rect().move(288,35)
Reina1= pygame.image.load("Reina1.png").convert_alpha() #carga la imagen
Reina1Rect = Reina1.get_rect().move(225,75)
Alfil1= pygame.image.load("Alfil1.png").convert_alpha() #carga la imagen
Alfil1Rect = Alfil1.get_rect().move(158,100)
Alfil1_1Rect = Alfil1.get_rect().move(350,25)
Caballo1= pygame.image.load("Caballo1.png").convert_alpha() #carga la imagen
Caballo1Rect = Caballo1.get_rect().move(95,125)
Caballo1_1Rect = Caballo1.get_rect().move(413,10)
Torre1= pygame.image.load("Torre1.png").convert_alpha() #carga la imagen
Torre1Rect = Torre1.get_rect().move(30,163)
Torre1_1Rect = Torre1.get_rect().move(470, -5)
Peon1= pygame.image.load("Peon1.png").convert_alpha() #carga la imagen
Peon1Rect = Peon1.get_rect().move(75,230)
Peon2Rect = Peon1.get_rect().move(138,205)
Peon3Rect = Peon1.get_rect().move(200,180)
Peon4Rect = Peon1.get_rect().move(263,155)
Peon5Rect = Peon1.get_rect().move(325,130)
Peon6Rect = Peon1.get_rect().move(388,105)
Peon7Rect = Peon1.get_rect().move(450,80)
Peon8Rect = Peon1.get_rect().move(513,55)
###########Piezas negras
Peon2= pygame.image.load("Peon2.png").convert_alpha() #carga la imagen
Peon1_1Rect = Peon2.get_rect().move(263,480)
Peon2_1Rect = Peon2.get_rect().move(325,455)
Peon3_1Rect = Peon2.get_rect().move(388,430)
Peon4_1Rect = Peon2.get_rect().move(450,405)
Peon5_1Rect = Peon2.get_rect().move(513,380)
Peon6_1Rect = Peon2.get_rect().move(575,355)
Peon7_1Rect = Peon2.get_rect().move(638,330)
Peon8_1Rect = Peon2.get_rect().move(700,305)
Rey2= pygame.image.load("Rey2.png").convert_alpha() #carga la imagen
Rey2Rect = Rey2.get_rect().move(488,415)
Reina2= pygame.image.load("Reina2.png").convert_alpha() #carga la imagen
Reina2Rect = Reina2.get_rect().move(550,400)
Alfil2= pygame.image.load("Alfil2.png").convert_alpha() #carga la imagen
Alfil2Rect = Alfil2.get_rect().move(425,460)
Alfil2_1Rect = Alfil2.get_rect().move(613,385)
Caballo2= pygame.image.load("Caballo2.png").convert_alpha() #carga la imagen
Caballo2Rect = Caballo2.get_rect().move(363,485)
Caballo2_1Rect = Caballo2.get_rect().move(675,360)
Torre2= pygame.image.load("Torre2.png").convert_alpha() #carga la imagen
Torre2Rect = Torre2.get_rect().move(300,520)
Torre2_1Rect = Torre2.get_rect().move(738,345)
if __name__=='__main__':
done = False
clock = pygame.time.Clock()
while not done:
# This limits the while loop to a max of 10 times per second.
# Leave this out and we will use all CPU we can.
clock.tick(10)
for event in pygame.event.get():
if event.type == pygame.QUIT:
done=True
######################## Bordes
pygame.draw.polygon(screen, Brown, [(300,600), (295,618), (824,407), (800,400)],0)
pygame.draw.polygon(screen, Brown, [(519,0), (500,0), (800,400), (824,407)],0)
pygame.draw.polygon(screen, Brown, [(0,200), (0,225), (295,618), (300,600)],0)
pygame.draw.polygon(screen, Brown, [(0,184), (0,200), (500,0), (460,0)],0)
######################## Base
pygame.draw.polygon(screen, Brown2, [(0,225), (0,260), (295,653), (295,618)],0)
pygame.draw.polygon(screen, Brown2, [(295,618), (295,653), (824,442), (824,407)],0)
######################## Cuadros
pygame.draw.polygon(screen, Black, [(63,175), (125,150), (163,200), (100,225)],0)
pygame.draw.polygon(screen, Black, [(38,250), (75,300), (138,275), (100,225)],0)
pygame.draw.polygon(screen, Black, [(138,275), (175,325), (238,300), (200,250)],0)
pygame.draw.polygon(screen, Black, [(113,350), (175,325), (213,375), (150,400)],0)
pygame.draw.polygon(screen, Black, [(275,350), (213,375), (250,425), (313,400)],0)
pygame.draw.polygon(screen, Black, [(188,450), (225,500), (288,475), (250,425)],0)
pygame.draw.polygon(screen, Black, [(288,475), (350,450), (388,500), (325,525)],0)
pygame.draw.polygon(screen, Black, [(263,550), (325,525), (363,575), (300,600)],0)
pygame.draw.polygon(screen, Black, [(188,125), (225,175), (288,150), (250,100)],0)
pygame.draw.polygon(screen, Black, [(200,250), (263,225), (225,175), (163,200)],0)
pygame.draw.polygon(screen, Black, [(300,275), (363,250), (325,200), (263,225)],0)
pygame.draw.polygon(screen, Black, [(300,275), (238,300), (275,350), (338,325)],0)
pygame.draw.polygon(screen, Black, [(313,400), (375,375), (413,425), (350,450)],0)
pygame.draw.polygon(screen, Black, [(388,500), (450,475), (488,525), (425,550)],0)
pygame.draw.polygon(screen, Black, [(450,475), (513,450), (475,400), (413,425)],0)
pygame.draw.polygon(screen, Black, [(400,300), (338,325), (375,375), (438,350)],0)
pygame.draw.polygon(screen, Black, [(313,75), (375,50), (413,100), (350,125)],0)
pygame.draw.polygon(screen, Black, [(350,125), (288,150), (325,200), (388,175)],0)
pygame.draw.polygon(screen, Black, [(388,175), (450,150), (488,200), (425,225)],0)
pygame.draw.polygon(screen, Black, [(363,250), (400,300), (463,275), (425,225)],0)
pygame.draw.polygon(screen, Black, [(463,275), (525,250), (563,300), (500,325)],0)
pygame.draw.polygon(screen, Black, [(438,350), (475,400), (538,375), (500,325)],0)
pygame.draw.polygon(screen, Black, [(575,425), (638,400), (600,350), (538,375)],0)
pygame.draw.polygon(screen, Black, [(575,425), (513,450), (550,500), (613,475)],0)
pygame.draw.polygon(screen, Black, [(638,400), (675,450), (738,425), (700,375)],0)
pygame.draw.polygon(screen, Black, [(663,325), (725,300), (763,350), (700,375)],0)
pygame.draw.polygon(screen, Black, [(663,325), (625,275), (563,300), (600,350)],0)
pygame.draw.polygon(screen, Black, [(588,225), (650,200), (688,250), (625,275)],0)
pygame.draw.polygon(screen, Black, [(488,200), (550,175), (588,225), (525,250)],0)
pygame.draw.polygon(screen, Black, [(550,175), (613,150), (575,100), (513,125)],0)
pygame.draw.polygon(screen, Black, [(413,100), (450,150), (513,125), (475,75)],0)
pygame.draw.polygon(screen, Black, [(438,25), (500,0), (538,50), (475,75)],0)
screen.blit(Rey1,Rey1Rect)
screen.blit(Reina1,Reina1Rect)
screen.blit(Alfil1,Alfil1Rect)
screen.blit(Alfil1,Alfil1_1Rect)
screen.blit(Caballo1,Caballo1Rect)
screen.blit(Caballo1,Caballo1_1Rect)
screen.blit(Torre1,Torre1Rect)
screen.blit(Torre1,Torre1_1Rect)
screen.blit(Peon1,Peon1Rect)
screen.blit(Peon1,Peon2Rect)
screen.blit(Peon1,Peon3Rect)
screen.blit(Peon1,Peon4Rect)
screen.blit(Peon1,Peon5Rect)
screen.blit(Peon1,Peon6Rect)
screen.blit(Peon1,Peon7Rect)
screen.blit(Peon1,Peon8Rect)
screen.blit(Peon2,Peon1_1Rect)
screen.blit(Peon2,Peon2_1Rect)
screen.blit(Peon2,Peon3_1Rect)
screen.blit(Peon2,Peon4_1Rect)
screen.blit(Peon2,Peon5_1Rect)
screen.blit(Peon2,Peon6_1Rect)
screen.blit(Peon2,Peon7_1Rect)
screen.blit(Peon2,Peon8_1Rect)
screen.blit(Rey2,Rey2Rect)
screen.blit(Reina2,Reina2Rect)
screen.blit(Alfil2,Alfil2Rect)
screen.blit(Alfil2,Alfil2_1Rect)
screen.blit(Caballo2,Caballo2Rect)
screen.blit(Caballo2,Caballo2_1Rect)
screen.blit(Torre2,Torre2Rect)
screen.blit(Torre2,Torre2_1Rect)
pygame.display.flip()
time.sleep(.01)
screen.fill(White)
pygame.quit()
<file_sep>#!/env/bin/python
import random
M=11 #Numero de renglones
N=16 #muero de columnas
def Matrix():
#L= [[0]* 10 for i in range(10)]
##Genera la matriz con la celulas vivas (1) y las celulas muertas (0) en forma aleatoria
#for i in range(M):
# for j in range(N):
# if 18<random.randint(0,99)<60:
# L[i][j]=1
#Matriz inicial pentadecatlón!
# 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
L=[[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], #1
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], #2
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], #3
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], #4
[0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0], #5
[0, 0, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0], #6
[0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0], #7
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], #8
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], #9
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], #10
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]] #11
for i in range(M):
print(L[i])
return L
def GameOfLife(A):
B= [[0]* N for i in range(M)]
for i in range(M):
for j in range(N):
B[i][j]=A[i][j]
#esquina [0,0]
a=A[0][1]+A[1][1]+A[1][0]
if A[0][0]==1 and a==1: B[0][0]=0
if A[0][0]==0 and a==3: B[0][0]=1
#esquina[0,N]
a=A[0][N-2]+A[1][N-2]+A[1][N-1]
if A[0][N-1]==1 and a==1: B[0][N-1]=0
if A[0][N-1]==0 and a==3: B[0][N-1]=1
#esquina [M,0]
a=A[M-2][0]+A[M-2][1]+A[M-1][1]
if A[M-1][0]==1 and a==1: B[M-1][0]=0
if A[M-1][0]==0 and a==3: B[M-1][0]=1
#esquina [M,N]
a=A[M-2][N-1]+A[M-2][N-2]+A[M-1][N-2]
if A[M-1][N-1]==1 and a==1: B[M-1][N-1]=0
if A[M-1][N-1]==0 and a==3: B[M-1][N-1]=1
#renglon 1 y M
Num=[0, 1, 4, 5]
for i in range(1, N-1):
#renglon 0
#print("Celula[0,",i,"]: ", A[0][i])
#print("Vecinos: ", A[0][i-1]," ", A[0][i+1], " ", A[1][i-1], " ", A[1][i], " ", A[1][i+1])
a=A[0][i-1]+A[0][i+1]+A[1][i-1]+A[1][i]+A[1][i+1]
#print("Suma de los veccinos: ", a)
if A[0][i]==1 and a in Num: B[0][i]=0
if A[0][i]==0 and a==3: B[0][i]=1
#Renglon M
b=A[M-1][i-1]+A[M-1][i+1]+A[M-2][i-1]+A[M-2][i]+A[M-2][i+1]
if A[M-1][i]==1 and b in Num: B[M-1][i]=0
if A[M-1][i]==0 and b==3: B[M-1][i]=1
#columna 0 y N
for i in range(1, M-2):
#columna 0
a=A[i-1][0]+A[i+1][0]+A[i-1][1]+A[i][1]+A[i+1][1]
if A[i][0]==1 and a in Num: B[i][0]=0
if A[i][0]==0 and a==3: B[i][0]=1
#columna N
b=A[i-1][N-1]+A[i+1][N-1]+A[i-1][N-2]+A[i][N-2]+A[i+1][N-2]
if A[i][N-1]==1 and b in Num: B[i][N-1]=0
if A[i][N-1]==0 and b==3: B[i][N-1]=1
#Centro
Num2=[0, 1, 4, 5, 6, 7, 8]
for i in range(1, M-1):
for j in range(1,N-1):
a=A[i-1][j-1]+A[i-1][j]+A[i-1][j+1]+A[i][j-1]+A[i][j+1]+A[i+1][j-1]+A[i+1][j]+A[i+1][j+1]
#print("Numero de vecinos vimos para [", i,",",j,"]: ",a)
if A[i][j]==1 and a in Num2: B[i][j]=0
if A[i][j]==0 and a==3: B[i][j]=1
for i in range(M):
print(B[i])
#primer renglon
#for i in range(1N):
return B
if __name__=='__main__':
print("iteracion 0")
A=Matrix()
for i in range(0,20):
print("Iteracion ",i+1)
B=GameOfLife(A)
A=B
#Si la célula es viva : Si tiene 2 o 3 vecinos vivos entonces vive, de otra manera muere
#Si la celula es muerta: solamente revive si tiene exactamente 3 vecinos vivos
<file_sep>s="""0000000000000000
0000000000000000
0000111111110000
0000101111010000
0000111111110000
0000000000000000
0000000000000000"""
arr=s.split('\n')
print("%s\n%s\n%s\n%s\n%s\n%s\n%s"%(arr[0],arr[1],arr[2],arr[3],arr[4],arr[5],arr[6]))
print('\n\n\n\n\n')
n=6
m=15
for k in range(0,3):
for i in range(0,n):
for j in range(0,m):
bandera=0
l=list(arr[i])
if i==0 and j==0:
bandera=int(arr[i][j+1])+int(arr[i+1][j])+int(arr[i+1][j+1])
if arr[i][j]=='0' and bandera==3:
l[j]='1'
arr[i]="".join(l)
if arr[i][j]=='1' and (bandera<2 or bandera>3):
l=list(arr[i])
l[j]='0'
arr[i]="".join(l)
if i==n and j==0:
bandera=int(arr[i][j+1])+int(arr[i-1][j])+int(arr[i-1][j+1])
if arr[i][j]=='0' and bandera==3:
l[j]='1'
arr[i]="".join(l)
if arr[i][j]=='1' and (bandera<2 or bandera>3):
l[j]='0'
arr[i]="".join(l)
if i==0 and j==m:
bandera=int(arr[i][j-1])+int(arr[i+1][j])+int(arr[i+1][j-1])
if arr[i][j]=='0' and bandera==3:
l[j]='1'
arr[i]="".join(l)
if arr[i][j]=='1' and (bandera<2 or bandera>3):
l[j]='0'
arr[i]="".join(l)
if i==n and j==m:
bandera=int(arr[i][j-1])+int(arr[i-1][j])+int(arr[i-1][j-1])
if arr[i][j]=='0' and bandera==3:
l[j]='1'
arr[i]="".join(l)
if arr[i][j]=='1' and (bandera<2 or bandera>3):
l[j]='0'
arr[i]="".join(l)
if i==0 and (j!=0 and j!=m):
bandera=int(arr[i][j+1])+int(arr[i][j-1])+int(arr[i+1][j-1])+int(arr[i+1][j])+int(arr[i+1][j+1])
if arr[i][j]=='0' and bandera==3:
l[j]='1'
arr[i]="".join(l)
if arr[i][j]=='1' and (bandera<2 or bandera>3):
l[j]='0'
arr[i]="".join(l)
if i==n and (j!=0 and j!=m):
bandera=int(arr[i][j+1])+int(arr[i][j-1])+int(arr[i-1][j-1])+int(arr[i-1][j])+int(arr[i-1][j+1])
if arr[i][j]=='0' and bandera==3:
l[j]='1'
arr[i]="".join(l)
if arr[i][j]=='1' and (bandera<2 or bandera>3):
l[j]='0'
arr[i]="".join(l)
if (i>0 and i<m) and (j>0 and j<m):
bandera=int(arr[i][j+1])+int(arr[i][j-1])+int(arr[i-1][j-1])+int(arr[i-1][j])+int(arr[i-1][j+1])+int(arr[i+1][j-1])+int(arr[i+1][j])+int(arr[i+1][j+1])
if arr[i][j]=='0' and bandera==3:
l[j]='1'
arr[i]="".join(l)
if arr[i][j]=='1' and (bandera<2 or bandera>3):
l[j]='0'
arr[i]="".join(l)
print("%s\n%s\n%s\n%s\n%s\n%s\n%s"%(arr[0],arr[1],arr[2],arr[3],arr[4],arr[5],arr[6]))
print('\n\n\n\n\n')
<file_sep>#tablero ajedrez
import sys, pygame, time, math
pygame.init()
size = width, height = 800, 680 #tamaño de la pantalla
speed = [2, 2]
speed2 = [3, 3]
# Define the colors we will use in RGB format
Black = 0, 0, 0
White = (255, 255, 255)
Brown2= (89, 54, 24)
Brown = (119, 71, 26)
Gray = (216, 216, 216)
Red = (169, 0, 0)
screen = pygame.display.set_mode(size)
pygame.display.set_caption("Tablero de ajedrez 3D")
X,Y=30,250 #esquina inferiror izquierda del primer cuadrito
p=50 # que tan largos serán los cuadrados (coordenada en x)
q=25 # que tan inclinados serán los cuadrados (coordenada en y)
n=q/p
######### Piezas blancas#####################
Rey1= pygame.image.load("Rey1.png").convert_alpha() #carga la imagen
Rey1Rect = Rey1.get_rect().move(X+4*p,Y-210)
Reina1= pygame.image.load("Reina1.png").convert_alpha() #carga la imagen
Reina1Rect = Reina1.get_rect().move(X+140, Y-170)
Alfil1= pygame.image.load("Alfil1.png").convert_alpha() #carga la imagen
Alfil1Rect = Alfil1.get_rect().move(X+95,Y-145)
Alfil1_1Rect = Alfil1.get_rect().move(X+5*p,Y-220)
Caballo1= pygame.image.load("Caballo1.png").convert_alpha() #carga la imagen
Caballo1Rect = Caballo1.get_rect().move(X+45,Y-110)
Caballo1_1Rect = Caballo1.get_rect().move(X+6*p-10,Y-240)
Torre1= pygame.image.load("Torre1.png").convert_alpha() #carga la imagen
Torre1Rect = Torre1.get_rect().move(X-5,Y-80)
Torre1_1Rect = Torre1.get_rect().move(X+7*p,Y-250)
Peon1= pygame.image.load("Peon1.png").convert_alpha() #carga la imagen
Peon1Rect = Peon1.get_rect().move(X+20,Y-20)
Peon2Rect = Peon1.get_rect().move(X+p+20,Y-70+q)
Peon3Rect = Peon1.get_rect().move(X+2*p+20,Y-2*p-20+ 2*q)
Peon4Rect = Peon1.get_rect().move(X+3*p+20,Y-3*p-20+ 3*q)
Peon5Rect = Peon1.get_rect().move(X+4*p+20,Y-4*p-20+ 4*q)
Peon6Rect = Peon1.get_rect().move(X+5*p+20,Y-5*p-20+ 5*q)
Peon7Rect = Peon1.get_rect().move(X+6*p+20,Y-6*p-20+ 6*q)
Peon8Rect = Peon1.get_rect().move(X+7*p+20,Y-7*p-20+ 7*q)
###########Piezas negras###########################################
Peon2= pygame.image.load("Peon2.png").convert_alpha() #carga la imagen
Peon1_1Rect = Peon2.get_rect().move(X+3*p-10,Y+5*p-q+10)
Peon2_1Rect = Peon2.get_rect().move(X+4*p-10,Y+5*p-2*q+10)
Peon3_1Rect = Peon2.get_rect().move(X+5*p-10,Y+5*p-3*q+10)
Peon4_1Rect = Peon2.get_rect().move(X+6*p-10,Y+5*p-4*q+10)
Peon5_1Rect = Peon2.get_rect().move(X+7*p-10,Y+5*p-5*q+10)
Peon6_1Rect = Peon2.get_rect().move(X+8*p-10,Y+5*p-6*q+10)
Peon7_1Rect = Peon2.get_rect().move(X+9*p-10,Y+5*p-7*q+10)
Peon8_1Rect = Peon2.get_rect().move(X+10*p-10,Y+5*p-8*q+10)
Rey2= pygame.image.load("Rey2.png").convert_alpha() #carga la imagen
Rey2Rect = Rey2.get_rect().move(488-135,415)
Reina2= pygame.image.load("Reina2.png").convert_alpha() #carga la imagen
Reina2Rect = Reina2.get_rect().move(550-149,405)
Alfil2= pygame.image.load("Alfil2.png").convert_alpha() #carga la imagen
Alfil2Rect = Alfil2.get_rect().move(425-125,460)
Alfil2_1Rect = Alfil2.get_rect().move(613-157,385)
Caballo2= pygame.image.load("Caballo2.png").convert_alpha() #carga la imagen
Caballo2Rect = Caballo2.get_rect().move(363-115,485)
Caballo2_1Rect = Caballo2.get_rect().move(675-170,365)
Torre2= pygame.image.load("Torre2.png").convert_alpha() #carga la imagen
Torre2Rect = Torre2.get_rect().move(200,520)
Torre2_1Rect = Torre2.get_rect().move(553,348)
#######################################################################
def Square(x,y,t):
if t==0:
pygame.draw.polygon(screen, Black, [(x,y), (x+p,y-q), (x+p-q, y-p-q), (x-q,y-p)],0)
else:
pygame.draw.polygon(screen, White, [(x,y), (x+p,y-q), (x+p-q, y-p-q), (x-q,y-p)],0)
def Board(x,y):
a,b = x-q, y-p
r=20
t=25
A=[a, b]
B=[a+(8*p), b-(8*q)]
C=[a + (8*q), b + 8*p]
D=[a + 8*p +8*q, b +8*p -8*q]
A1=[A[0]-t-8, A[1]-r]
B1=[B[0]+r-5, B[1]-t-8]
C1=[C[0]-r+5, C[1]+t+5]
D1=[D[0]+t+8, D[1]+r-10]
m=0
for j in range(y,y+ 8*p,p):
k=j
for i in range(x,x+8*p,p):
if m%2==0:
Square(i,k,0)
else:
Square(i,k,1)
k-=q
m+=1
x+=q
m+=1
########################## Bordes
pygame.draw.polygon(screen, Brown, [A,A1,C1,D1,D,C],0)
pygame.draw.polygon(screen, Brown, [A,A1,B1,D1,D,B],0)
if __name__=='__main__':
done = False
clock = pygame.time.Clock()
while not done:
# This limits the while loop to a max of 10 times per second.
# Leave this out and we will use all CPU we can.
clock.tick(10)
for event in pygame.event.get():
if event.type == pygame.QUIT:
done=True
Board(X,Y)
screen.blit(Rey1,Rey1Rect)
screen.blit(Reina1,Reina1Rect)
screen.blit(Alfil1,Alfil1Rect)
screen.blit(Alfil1,Alfil1_1Rect)
screen.blit(Caballo1,Caballo1Rect)
screen.blit(Caballo1,Caballo1_1Rect)
screen.blit(Torre1,Torre1Rect)
screen.blit(Torre1,Torre1_1Rect)
screen.blit(Peon1,Peon1Rect)
screen.blit(Peon1,Peon2Rect)
screen.blit(Peon1,Peon3Rect)
screen.blit(Peon1,Peon4Rect)
screen.blit(Peon1,Peon5Rect)
screen.blit(Peon1,Peon6Rect)
screen.blit(Peon1,Peon7Rect)
screen.blit(Peon1,Peon8Rect)
screen.blit(Peon2,Peon1_1Rect)
screen.blit(Peon2,Peon2_1Rect)
screen.blit(Peon2,Peon3_1Rect)
screen.blit(Peon2,Peon4_1Rect)
screen.blit(Peon2,Peon5_1Rect)
screen.blit(Peon2,Peon6_1Rect)
screen.blit(Peon2,Peon7_1Rect)
screen.blit(Peon2,Peon8_1Rect)
screen.blit(Rey2,Rey2Rect)
screen.blit(Reina2,Reina2Rect)
screen.blit(Alfil2,Alfil2Rect)
screen.blit(Alfil2,Alfil2_1Rect)
screen.blit(Caballo2,Caballo2Rect)
screen.blit(Caballo2,Caballo2_1Rect)
screen.blit(Torre2,Torre2Rect)
screen.blit(Torre2,Torre2_1Rect)
pygame.display.flip()
time.sleep(.01)
screen.fill(Gray)
pygame.quit()
<file_sep>#!/env/bin/python
import random
n=10000
def BubbleSort(A):
for i in range(1,n):
for j in range(0,n-1):
if A[i]<A[j]:
aux=A[j]
A[j]=A[i]
A[i]=aux
if __name__=='__main__':
A=[[]]
for i in range(n):
V=random.randint(0,99)
A.append(V)
#print("Los numeros a ordenar son\n")
#print(A)
BubbleSort(A)
print("\n\nLos numeros ordenados son\n")
print(A)
<file_sep>import sys, pygame, time
pygame.init()
size = width, height = 1300, 700 #tamaņo de la pantalla
speed = [2, 2]
speed2 = [3, 3]
black = 0, 0, 0
screen = pygame.display.set_mode(size)
pygame.display.set_caption("Ping pong")
ball = pygame.image.load("ball.png").convert_alpha() #carga la imagen
ballrect = ball.get_rect().move(50,50)
ball2 = pygame.image.load("ball.png") #carga la imagen
ballrect2 = ball2.get_rect().move(500,600)
time.sleep(5)
while 1:
for event in pygame.event.get():
if event.type == pygame.QUIT: sys.exit()
ballrect = ballrect.move(speed) #pelota 1
if ballrect.left < 0 or ballrect.right > width:
speed[0] = -speed[0]
if ballrect.top < 0 or ballrect.bottom > height:
speed[1] = -speed[1]
ballrect2 = ballrect2.move(speed2) #pelota 2
if ballrect2.left < 0 or ballrect2.right > width:
speed2[0] = -speed2[0]
if ballrect2.top < 0 or ballrect2.bottom > height:
speed2[1] = -speed2[1]
screen.fill(black)
screen.blit(ball, ballrect)
screen.blit(ball2, ballrect2)
pygame.display.flip()
<file_sep># Sorts
This repository has some codes that order numbers or words for example, bubblesort, cockatil sort, etc.
Created by:
-------------------------------
<NAME>
Started:
-------------------------------
2017/09/22 <3
TODO:
-------------------------------
* Bubble sor para Numeros en python
* Bubble sor para Numeros en c++
* Bubble sor para Palabras en python
* Bubble sor para Palabras en c++
* Bubble sor para numeros optimizado en python
* Bubble sor para numeros optimizado en c++
* Bubble sor para Palabras optimizado en python
* Bubble sor para Palabras optimizado en c++
* Cocktail sor para numeros en python
* Cocktail sor para numeros en c++
* Cocktail sor para palabras en python
* Cocktail sor para palabras en c++
<file_sep>#include<stdio.h>
#include<time.h>
#include<stdlib.h>
#define N 10
int main(){
int A[N];
int i;
printf("Lista de numeros a ordenar\n\n");
for(i=0;i<N;i++){
A[i]= rand()%N;
printf("%i ", A[i]);
}
printf("\n\n");
//system("pause");
return 0;
}
Equal(N):
R=[0 for i in range(len(N))]
for i in range(len(N)-1):
R[i]=N[i]
return R
<file_sep>#tablero ajedrez
import sys, pygame, time
pygame.init()
size = width, height = 1000, 650 #tamaņo de la pantalla
speed = [2, 2]
speed2 = [3, 3]
# Define the colors we will use in RGB format
Black = 0, 0, 0
White = (255, 255, 255)
Brown2= (89, 54, 24)
Brown = (119, 71, 26)
screen = pygame.display.set_mode(size)
pygame.display.set_caption("Tablero de ajedrez 3D")
if __name__=='__main__':
done = False
clock = pygame.time.Clock()
while not done:
# This limits the while loop to a max of 10 times per second.
# Leave this out and we will use all CPU we can.
clock.tick(10)
for event in pygame.event.get():
if event.type == pygame.QUIT:
done=True
######################## Bordes
pygame.draw.polygon(screen, Brown, [(300,600), (295,618), (824,407), (800,400)],0)
pygame.draw.polygon(screen, Brown, [(519,0), (500,0), (800,400), (824,407)],0)
pygame.draw.polygon(screen, Brown, [(0,200), (0,225), (295,618), (300,600)],0)
pygame.draw.polygon(screen, Brown, [(0,184), (0,200), (500,0), (460,0)],0)
######################## Base
pygame.draw.polygon(screen, Brown2, [(0,225), (0,260), (295,653), (295,618)],0)
pygame.draw.polygon(screen, Brown2, [(295,618), (295,653), (824,442), (824,407)],0)
######################## Cuadros
pygame.draw.polygon(screen, Black, [(63,175), (125,150), (163,200), (100,225)],0)
pygame.draw.polygon(screen, Black, [(38,250), (75,300), (138,275), (100,225)],0)
pygame.draw.polygon(screen, Black, [(138,275), (175,325), (238,300), (200,250)],0)
pygame.draw.polygon(screen, Black, [(113,350), (175,325), (213,375), (150,400)],0)
pygame.draw.polygon(screen, Black, [(275,350), (213,375), (250,425), (313,400)],0)
pygame.draw.polygon(screen, Black, [(188,450), (225,500), (288,475), (250,425)],0)
pygame.draw.polygon(screen, Black, [(288,475), (350,450), (388,500), (325,525)],0)
pygame.draw.polygon(screen, Black, [(263,550), (325,525), (363,575), (300,600)],0)
pygame.draw.polygon(screen, Black, [(188,125), (225,175), (288,150), (250,100)],0)
pygame.draw.polygon(screen, Black, [(200,250), (263,225), (225,175), (163,200)],0)
pygame.draw.polygon(screen, Black, [(300,275), (363,250), (325,200), (263,225)],0)
pygame.draw.polygon(screen, Black, [(300,275), (238,300), (275,350), (338,325)],0)
pygame.draw.polygon(screen, Black, [(313,400), (375,375), (413,425), (350,450)],0)
pygame.draw.polygon(screen, Black, [(388,500), (450,475), (488,525), (425,550)],0)
pygame.draw.polygon(screen, Black, [(450,475), (513,450), (475,400), (413,425)],0)
pygame.draw.polygon(screen, Black, [(400,300), (338,325), (375,375), (438,350)],0)
## pygame.draw.polygon(screen, Black, [(), (), (), ()],0)
## pygame.draw.polygon(screen, Black, [(), (), (), ()],0)
## pygame.draw.polygon(screen, Black, [(), (), (), ()],0)
## pygame.draw.polygon(screen, Black, [(), (), (), ()],0)
pygame.display.flip()
time.sleep(.01)
screen.fill(White)
pygame.quit()
<file_sep>/* fgets example */
#include <stdio.h>
#define N 10
void BubbleSort(char A[][256]);
int main(){
FILE * pFile;
char mystring[N][256];
int i;
pFile = fopen ("Palabras.txt" , "r");
if (pFile == NULL) perror ("Error opening file");
else {
i=0;
while(fgets (mystring[i], 100 , pFile) != NULL ){
//puts (mystring[i]);
i++;
}
}
BubleSort(mystring);
fclose (pFile);
return 0;
}
void BubbleSort(char A[][256]){
char c;
int i,j,k,s;
for(i=0;i<N;i++){
for(j=0;j<strlen(A[i])-1;j++){
for(k=0;k<strlen(A[i])-1;k++){
if(A[i][k]>A[i][k+1]){
c=A[i][k];
A[i][k]=A[i][k+1];
A[i][k+1]=c;
}
}
}
printf("%s",A[i]);
}
}*/
<file_sep>#include<stdio.h>
#include<time.h>
#include<stdlib.h>
#define N 10
int main(){
int A[N];
int i;
printf("Lista de numeros a ordenar\n\n");
for(i=0;i<N;i++){
A[i]= rand()%N;
//printf("%i ", A[i]);
}
printf("\n\n");
//system("pause");
return 0;
}
<file_sep>#!/env/bin/python
import random
n=100
def InsertSort(A):
M=[A[0]]
k=2
for i in range(1,n):
if A[i]<M[0]:
M=[A[i]]+M
elif A[i]>=M[len(M)-1]:
M=M+[A[i]]
else:
for j in range(1,len(M)):
if A[i]>=M[j-1] and A[i]<M[j]:
M.insert(j,A[i])
break
k+=1
#print(M)
if __name__=='__main__':
A=[0 for i in range(n)]
for i in range(n):
A[i]=random.randint(0,n)
#print("Los numeros a ordenar son\n", A)
#print("\nLos numeros ordenados son\n")
InsertSort(A)
|
15d0624c7b1f07f171494ab86ce383de5f1831de
|
[
"Markdown",
"C",
"Python",
"C++"
] | 20
|
Python
|
ArelyL/Sorts
|
88ae758d10e6f5e1288c6b897d23bc2b314381e9
|
6864278a5608572a6d9657a7d2775fa6e1d27ba7
|
refs/heads/master
|
<repo_name>kindlyops/en-notify<file_sep>/app/components/en-notify.js
export { default } from 'en-notify/components/en-notify'
<file_sep>/app/services/notify.js
export { default } from 'en-notify/services/notify';<file_sep>/addon/utils/message.js
import Ember from 'ember'
const { A } = Ember
export default Ember.Object.extend({
uid: null,
header: null,
text: null,
errors: A([]),
type: 'info',
closeAfter: undefined,
visible: undefined,
})
<file_sep>/README.md
# en-notify
A simple flash addon for ember.js. This is a WIP.
<file_sep>/addon/components/en-notify/message.js
import Ember from 'ember'
const { get, set, computed, Component, getWithDefault, run, testing } = Ember
export default Component.extend({
classNames: ['en-notify-message'],
classNameBindings: [
'message.type',
'message.visible:en-notify-show:en-notify-hide',
],
message: null,
closeAfter: 2500,
isSuccess: computed.equal('message.type', 'success'),
isError: computed.equal('message.type', 'error'),
init() {
this._super(...arguments)
set(this, 'message.visible', true)
},
didInsertElement() {
let closeAfter = getWithDefault(
this,
'message.closeAfter',
get(this, 'closeAfter'),
)
if (testing) {
closeAfter = false
}
if (closeAfter) {
run.later(
this,
() => {
if (get(this, 'isDestroyed')) return
this.send('close')
},
closeAfter,
)
}
},
actions: {
close() {
if (get(this, 'message.closed')) return
get(this, 'message').setProperties({
closed: true,
visible: false,
})
run.later(
this,
() => {
let parent = get(this, 'parentView')
if (get(this, 'isDestroyed') || !parent || !get(parent, 'messages'))
return
get(parent, 'messages').removeObject(get(this, 'message'))
set(this, 'message.visible', null)
},
250,
)
},
},
})
<file_sep>/addon/components/en-notify.js
import Ember from 'ember'
const { A, get, set, computed, Component, inject } = Ember
export default Component.extend({
classNames: ['en-notify'],
notify: inject.service(),
messages: null,
init() {
this._super(...arguments)
set(this, 'messages', A())
get(this, 'notify').setTarget(this)
},
willDestroyElement() {
get(this, 'notify').setTarget(null)
},
show(message) {
if (get(this, 'isDestroyed')) return
get(this, 'messages').pushObject(message)
return message
},
})
<file_sep>/app/components/en-notify/message.js
export { default } from 'en-notify/components/en-notify/message';<file_sep>/addon/services/notify.js
import Ember from 'ember'
import Message from 'en-notify/utils/message'
const { get, set, Service } = Ember
function aliasToShow(type) {
return function(message, options) {
return this.show(type, message, options)
}
}
export default Service.extend({
init() {
this.pending = []
},
success: aliasToShow('success'),
error: aliasToShow('error'),
show(type, options) {
let message = Message.create({
type,
uid: options.uid,
header: options.header,
text: options.text,
errors: options.errors,
closeAfter: options.closeAfter
})
let target = get(this, 'target')
if (target) {
target.show(message)
} else {
this.pending.push(message)
}
return message
},
remove(uid) {
let target = get(this, 'target')
let message, messages, filtered
if (target) {
messages = target.get('messages')
message = messages.filter(message => get(message, 'uid') === uid)
messages.removeObjects(message)
} else {
messages = this.pending
filtered = messages.filter(message => get(message, 'uid') !== uid)
this.pending = filtered
}
},
setTarget(target) {
set(this, 'target', target)
if (target) {
this.pending.forEach(message => target.show(message))
this.pending = []
}
},
})
<file_sep>/tests/dummy/app/controllers/application.js
import Ember from 'ember'
const { get, set, run, Controller, inject: { service }} = Ember
export default Controller.extend({
notify: service(),
init () {
let notify = get(this, 'notify').error({
uid: '123',
header: "Something went wrong",
text: 'Hello'
})
run.later(() => {
set(notify, 'text', 'ok')
}, 1500)
}
})
<file_sep>/app/utils/message.js
export { default } from 'en-notify/utils/message';
|
be6167aa0b9325c4ee5d827d3c89896a7a6efdc6
|
[
"JavaScript",
"Markdown"
] | 10
|
JavaScript
|
kindlyops/en-notify
|
b0b7da74ddc65e0f6b771f559bdfbd1c3cf481f3
|
9072620b09e8770f581d06d01c351a81bcae0c6c
|
refs/heads/master
|
<file_sep># Parser_url
The script parses the site. Displays links as absolute URL. Delatn doubles.
Run the script parser_url.php via the console.
options "./php.exe parser_url.php http://URL"
<file_sep><?php
require_once( "simple_html_dom.php" );
class PageDownload {
private $url;
private $handle;
protected static $_instance;
private function __construct() {
}
private function __clone() {
}
private function __sleep() {
}
private function __wakeup() {
}
public static function getInstance() {
if ( null === self::$_instance ) {
self::$_instance = new self();
}
return self::$_instance;
}
public function action( $url = null ) {
$ValidationUrl = new ValidationUrl();
if ( null != $ValidationUrl->action( $url ) ) {
return self::download( $url );
} else {
echo null;
}
}
private function download( $url ) {
$handle = file_get_contents( $url );
return $handle;
}
}
class ValidationUrl {
private $url;
public function __construct() {
}
public function action( $url = null ) {
try {
if ( filter_var( $url, FILTER_VALIDATE_URL ) ) {
return $url;
} else {
throw new Exception( 'Error Url' );
}
} catch
( Exception $e ) {
echo $e->getMessage();
return null;
}
}
}
class ParseUrl {
private $content;
private $links = array();
private $html;
public function __construct() {
}
public function action( $content = null ) {
$html = str_get_html( $content );
foreach ( $html->find( 'a' ) as $e ) {
$links[] = parse_url( $e->href )["host"];
}
$links = array_unique( $links );
return $links;
}
}
Class PrintUrl {
private $links;
public function __construct() {
}
public function action( $links = null ) {
foreach ( $links as $link ) {
echo $link;
echo "\n";
}
}
}
if ( $argc != 1 ) {
$url = $argv[1];
$PageDownload = PageDownload::getInstance();
$Page = $PageDownload->action( $url );
$Links = new ParseUrl();
$LinksResult = $Links->action( $Page );
$PrintUrl = new PrintUrl();
$PrintUrl->action( $LinksResult );
} else {
echo 'Error data';
}
|
3e98ae6a0e85c56b86747bdd1c9649a9756fa374
|
[
"Markdown",
"PHP"
] | 2
|
Markdown
|
bigturtle88/Parser_url
|
34f1f641aad54d1b4c310d62b9680d4903d89ddb
|
51191b5132da6b3b7f80ff10cd4148dbce99d15d
|
refs/heads/master
|
<file_sep>package com.aca.disqo.util.sortlist;
import java.util.List;
public final class SortingUtilCollection {
private SortingUtilCollection(){
}
public static <T extends Comparable<T>> void insertionSort(List<T> genericList) {
int n = genericList.size();
for (int i = 1; i < n; ++i) {
T key = (T) genericList.get(i);
int j = i - 1;
while (j >= 0 && genericList.get(j).compareTo(key) > 0) {
genericList.set(j + 1, genericList.get(j));
j--;
}
genericList.set(j + 1, key);
}
}
public static <T extends Comparable<T>> void quickSort(List<T> genericList) {
recQuickSort(0, genericList.size() - 1, genericList);
}
private static <T extends Comparable<T>> void recQuickSort(int left, int right, List<T> genericList) {
if (right - left <= 0) {
return;
} else {
T pivot = genericList.get(right);
int partition = partition(left, right, pivot, genericList);
recQuickSort(left, partition - 1, genericList);
recQuickSort(partition + 1, right, genericList);
}
}
private static <T extends Comparable<T>> int partition(int left, int right, T pivot, List<T> genericList) {
int leftPtr = left - 1;
int rightPtr = right;
while (true) {
while (genericList.get(++leftPtr).compareTo(pivot) < 0) {
}
while (rightPtr > 0 && genericList.get(--rightPtr).compareTo(pivot) > 0) {
}
if (leftPtr >= rightPtr) {
break;
} else {
swap(leftPtr, rightPtr, genericList);
}
}
swap(leftPtr, right, genericList);
return leftPtr;
}
private static <T extends Comparable<T>> void swap(int dex1, int dex2, List<T> genericList) {
T temp;
temp = genericList.get(dex1);
genericList.set(dex1, genericList.get(dex2));
genericList.set(dex2, temp);
}
}
<file_sep>package com.aca.disqo.collections.arraylist;
import java.util.Iterator;
public class MyArrayListIterator<T> implements Iterator<T> {
private MyArrayList<T> source = null;
private int index = 0;
public MyArrayListIterator(MyArrayList<T> source){
this.source = source;
}
@Override
public boolean hasNext() {
return this.index < this.source.size();
}
@Override
public T next() {
return this.source.get(this.index++);
}
}
<file_sep>package com.aca.disqo.collections.arraylist;
import java.util.Iterator;
import java.util.ListIterator;
public class MyArrayList<T> implements Iterable<T> {
private Object[] data;
private static final int FACTOR = 5;
private int capacity;
private int cursor;
public MyArrayList() {
capacity = FACTOR;
data = new Object[capacity];
}
@SuppressWarnings("unchecked")
public T get(int index) {
checkIndex(index);
return (T) data[index];
}
public void add(T obj) {
if (cursor == capacity - 1) {
growArray();
}
data[cursor++] = obj;
}
private void growArray() {
capacity += FACTOR;
Object[] newData = new Object[capacity];
System.arraycopy(data, 0, newData, 0, cursor);
data = newData;
}
public void remove(int index) {
checkIndex(index);
for (int i = index; i < cursor - 1; i++) {
data[i] = data[i + 1];
}
cursor--;
}
public boolean remove(T obj) {
int index = indexOf(obj);
if (index != -1) {
remove(index);
return true;
}
return false;
}
public int size() {
return cursor;
}
public boolean contains(T obj) {
return indexOf(obj) != -1;
}
private int indexOf(T obj) {
for (int i = 0; i < cursor; i++) {
if (data[i] != null) {
if (data[i].equals(obj)) {
return i;
}
} else {
return i;
}
}
return -1;
}
private void checkIndex(int index) {
if (index < 0) {
throw new IndexOutOfBoundsException("Index must be > 0");
}
if (index >= cursor) {
throw new IndexOutOfBoundsException("Index must be < " + cursor);
}
}
public String toString() {
StringBuilder result = new StringBuilder("[");
for (int i = 0; i < cursor; i++) {
result.append(data[i]);
if (i < cursor - 1) {
result.append(",");
}
}
result.append("]");
return result.toString();
}
@Override
public Iterator<T> iterator() {
return new MyArrayListIterator<>(this);
}
}
<file_sep>package com.aca.disqo.util.sortlist;
public class Person implements Comparable<Person>{
private String name;
private Integer age;
public Person() {
}
public Person(String name, Integer age) {
this.name = name;
this.age = age;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
@Override
public int compareTo(Person o) {
int result = this.age.compareTo(o.getAge());
if(result != 0){
return result;
}
return this.name.compareTo(o.getName());
}
@Override
public String toString() {
return "Person{" +
"name='" + name + '\'' +
", age=" + age +
'}';
}
}
<file_sep>package com.aca.disqo.calculateexpression;
public class Operator {
private String operator;
public Operator(String operator) {
this.operator = operator;
}
}
<file_sep>package com.aca.disqo.calculateexpression;
import java.util.Stack;
import java.util.stream.Collectors;
public class OperatorExpression extends Expression {
private Expression left;
private Expression right;
private static Stack<Expression> expressionStack = new Stack<>();
public OperatorExpression() {
}
@Override
public int calculate(String expression) {
String forLeft = String.valueOf(expression.toCharArray()[0]);
String forRight = expression.substring(2);
left = new ValueExpression(forLeft);
expressionStack.push(left);
if (forRight.length() == 1){
right = new ValueExpression(forRight);
expressionStack.push(right);
return expressionStack.stream().mapToInt(value1 -> value1.value).sum();
}
right = new OperatorExpression();
return calculate(forRight);
}
}
<file_sep>package com.aca.disqo.calculateexpression;
public abstract class Expression {
String expression;
int value;
public abstract int calculate(String expression);
}
|
91eeb8e2efdcdc826ca295dee53572b98ac95c0c
|
[
"Java"
] | 7
|
Java
|
AraChilingaryan/aca-disqo
|
0c17b4fa2bfef6b241f0347561601b5f7ae60844
|
a04de13e79510100aeb56d321095e6c8ec127447
|
refs/heads/master
|
<repo_name>lilianalmeida/FEUP-LCOM<file_sep>/lab5/keyboard.h
#ifndef _KEYBOARD_H_
#define _KEYBOARD_H_
#ifdef LAB3
int sys_inb_count(port_t port, uint32_t *byte);
#else
#define sys_inb_cnt(p,q) sys_inb(p,q)
#endif
//global variable
extern uint32_t counter; //sys_inb counter
extern bool kbc_ih_error;
extern uint32_t scanByte;
int (keyboard_subscribe)(uint8_t * bit_no);
int (keyboard_unsubscribe)();
int (kbc_pol)();
int (scancode_parse)(uint32_t byte, uint8_t nbyte);
int (interrupt_handler)();
void (isTwoByte)(bool *wait, uint8_t *nbyte);
#endif
<file_sep>/lab5/video_gr.c
#include <lcom/lcf.h>
#include <stdint.h>
#include <math.h>
#include "macro.h"
#include "video_gr.h"
static uint16_t h_res; //screen horizontal resolution
static uint16_t v_res; //screen vertical resolution
static uint8_t bits_per_pixel; // number of bits on each pixel
static char *video_mem;
static uint8_t redMaskSize;
static uint8_t redFieldPosition;
static uint8_t greenMaskSize;
static uint8_t greenFieldPosition;
static uint8_t blueMaskSize;
static uint8_t blueFieldPosition;
static uint16_t graphic_mode;
int vbe_get_mode_inf(uint16_t mode, vbe_mode_info_t* vmi_p){
mmap_t map;
phys_bytes buf;
struct reg86u r;
memset(&r, 0, sizeof(r)); /* zero the structure */
if (lm_alloc(sizeof(vbe_mode_info_t), &map) == NULL) {
printf("vbe_get_mode_inf: failed to allocate memory \n");
return 1;
}
buf = map.phys;
r.u.w.ax = VBE_MODE_INFO; /* VBE get mode info */
/* translate the buffer linear address to a far pointer */
r.u.w.es = PB2BASE(buf); /* set a segment base */
r.u.w.di = PB2OFF(buf); /* set the offset accordingly */
r.u.w.cx = mode;
r.u.b.intno = VBE_INT;
if (sys_int86(&r) != OK){
printf("vbe_get_mode_inf: sys_int86() failed to obtain mode info \n");
lm_free(&map);
return 1;
}
if(r.u.b.ah != 0){
lm_free(&map);
return 1;
}
*vmi_p = *(vbe_mode_info_t *)map.virt;
lm_free(&map);
return 0;
}
void *(vg_init)(uint16_t mode){
graphic_mode = mode;
if (lm_init(1) == NULL) {
printf("vbe_get_mode_inf: failed to initialize low memory \n");
return NULL;
}
vbe_mode_info_t vbe;
if (vbe_get_mode_inf(mode, &vbe) != OK){
return NULL;
}
bits_per_pixel = vbe.BitsPerPixel;
h_res = vbe.XResolution;
v_res = vbe.YResolution;
redMaskSize = vbe.RedMaskSize;
redFieldPosition = vbe.RedFieldPosition;
greenMaskSize = vbe.GreenMaskSize;
greenFieldPosition = vbe.GreenFieldPosition;
blueMaskSize = vbe.BlueMaskSize;
blueFieldPosition = vbe.BlueFieldPosition;
int r;
struct minix_mem_range mr; //struct to manage physical and virtual memory adresses
unsigned int vram_base = (unsigned int) vbe.PhysBasePtr; //VRAM's physical address
unsigned int vram_size = h_res*v_res*ceil(bits_per_pixel/8); //VRAM's size
mr.mr_base = (phys_bytes) vram_base; //assigning the memory info to the struct
mr.mr_limit = mr.mr_base + vram_size;
if( OK != (r = sys_privctl(SELF, SYS_PRIV_ADD_MEM, &mr)))
panic("sys_privctl (ADD_MEM) failed: %d\n", r); /* Map memory */
video_mem = vm_map_phys(SELF, (void *)mr.mr_base, vram_size);
if(video_mem == MAP_FAILED)
panic("couldn't map video memory");
struct reg86u reg86;
memset(®86, 0, sizeof(reg86));
reg86.u.w.ax = SET_VBE; // VBE call, function 02 -- set VBE mode
reg86.u.w.bx = BIT(14)|mode; // set bit 14: linear framebuffer
reg86.u.b.intno = VBE_INT;
if( sys_int86(®86) != OK ) {
printf("set_vbe_mode: sys_int86() failed \n");
return NULL;
}
return video_mem;
}
int col(uint16_t x, uint16_t y, uint32_t color){
if(x > h_res || y > v_res){
printf("Error: that pixel does not exist!", 0);
return 1;
}
int bytes_pixel = bits_per_pixel / 8;
if (bits_per_pixel == 15){
bytes_pixel = 2;
}
uint32_t col_tmp = color;
int video_tmp = 0;
for(int i = 0; i < bytes_pixel; i++, video_tmp++){
video_mem [(y*h_res*bytes_pixel) + (x*bytes_pixel) + video_tmp] = col_tmp;
col_tmp = col_tmp >> 8;
}
return 0;
}
int (vg_draw_hline) (uint16_t x, uint16_t y, uint16_t len, uint32_t color){
for(int i = 0; i < len; x++,i++){
if (x >= h_res ){
continue;
}
if(col(x, y ,color) != OK){
return 1;
}
}
return 0;
}
int (vg_draw_rectangle)(uint16_t x,uint16_t y,uint16_t width, uint16_t height, uint32_t color){
for(int i = 0; i< height; y++,i++){
if (y >= v_res ){
break;
}
if(vg_draw_hline(x,y, width, color)!= OK){
return 1;
}
}
return 0;
}
int drawPattern (uint8_t no_rectangles, uint32_t first, uint8_t step){
int rec_width = h_res / no_rectangles;
int rec_height = v_res / no_rectangles;
uint32_t red_mask, blue_mask, green_mask, R, G, B;
static int color;
for (unsigned int y = 0, i = 0; i < no_rectangles; i++, y += rec_height){
for (unsigned int x = 0, j = 0; j < no_rectangles; j++, x += rec_width){
if (graphic_mode == MODE105)
color = (first + (i * no_rectangles + j) * step) % (1 << bits_per_pixel);
else{
if (graphic_mode == MODE110){
red_mask = MASK_110_11A;
green_mask = MASK_110_11A;
blue_mask = MASK_110_11A;
}else if (graphic_mode == MODE115 || graphic_mode == MODE14C){
red_mask = MASK_115_14C;
green_mask = MASK_115_14C;
blue_mask = MASK_115_14C;
}else if (graphic_mode == MODE11A){
red_mask = MASK_110_11A;
green_mask = GREEN_MASK_11A;
blue_mask = MASK_110_11A;
}else {
return 1;
}
R = (((first >> redFieldPosition) & red_mask) + j * step) % (1 << redMaskSize);
G = (((first >> greenFieldPosition) & green_mask) + i * step) % (1 << greenMaskSize);
B = (((first >> blueFieldPosition) & blue_mask) + (i + j) * step) % (1 << blueMaskSize);
color = ((R << redFieldPosition) | (G << greenFieldPosition) | (B << blueFieldPosition));
}
if (vg_draw_rectangle(x, y, rec_width, rec_height, color) != OK){
return 1;
}
}
}
return 0;
}
int drawSprite (const char *xpm[], uint16_t x, uint16_t y){
uint16_t prev_x = x;
int width;
int height;
char* sprite_addr;
sprite_addr = read_xpm (xpm, &width, &height);
for (int i = 0; i < height; i++,y++){
if (y >= v_res ){
break;
}
for(int j = 0; j < width; j++, prev_x++){
if (prev_x >= h_res ){
continue;
}
if (col(prev_x, y, *(sprite_addr + i *width + j)) != OK){
return 1;
}
}
prev_x = x;
}
return 0;
}
void draw_sprite(Sprite *sp) {
uint32_t color;
int height = sp->height, width = sp->width, a = 0, y = sp->y, x = sp->x;
for (int i = 0; i < height; i++, y++) {
for (int j = 0; j < width; j++, x++) {
color = sp->map[a];
col(x, y, color);
a++;
}
x = sp->x;
}
}
void erase_sprite(Sprite *sp) {
int height = sp->height, width = sp->width, y = sp->y, x = sp->x;
for (int i = 0; i < height; i++, y++) {
for (int j = 0; j < width; j++, x++) {
col(x, y, BACKGROUNDCOLOR);
}
x = sp->x;
}
}
void move_sprite(Sprite *sprite, uint16_t xi, uint16_t yi, uint16_t xf, uint16_t yf, int16_t speed){
// if the speed is negative
if (speed < 0) {
if (sprite->x < xf && yi == yf) {
sprite->x++;
erase_sprite(sprite);
draw_sprite(sprite);
} else if (xi == xf && sprite->y < yf) {
erase_sprite(sprite);
sprite->y++;
draw_sprite(sprite);
} else if (sprite->x > xf && yi == yf) {
erase_sprite(sprite);
sprite->x--;
draw_sprite(sprite);
} else if (xi == xf && sprite->y > yf) {
erase_sprite(sprite);
sprite->y--;
draw_sprite(sprite);
}
// if speed is positive
} else {
if (sprite->x < xf && yi == yf) {
erase_sprite(sprite);
if ((sprite->x + speed) > xf){
sprite->x = xf;
}
sprite->x += sprite->xspeed;
draw_sprite(sprite);
}
else if (xi == xf && sprite->y < yf) {
erase_sprite(sprite);
if ((sprite->y + speed) > yf){
sprite->y = yf;
}
sprite->y += sprite->yspeed;
draw_sprite(sprite);
}
else if (sprite->x > xf && yi == yf) {
erase_sprite(sprite);
if ((sprite->x + speed) > xf){
sprite->x = xf;
}
sprite->x += -1 * sprite->xspeed;
draw_sprite(sprite);
}
else if (xi == xf && sprite->y > yf) {
erase_sprite(sprite);
if ((sprite->y + speed) > yf){
sprite->y = yf;
}
sprite->y += -1 * sprite->yspeed;
draw_sprite(sprite);
}
}
}
int vbe_get_controller_info(){
struct reg86u r;
mmap_t map;
phys_bytes buf;
vg_vbe_contr_info_t contr_info;
if ( lm_init(1) == NULL) {
printf("vbe_get_controller_info: failed to initialize low memory \n");
return 1;
}
strcpy(contr_info.VBESignature, "VBE2");
memset(&r, 0, sizeof(r)); /* zero the structure */
if (lm_alloc(sizeof( contr_info), &map) == NULL) {
printf("vbe_get_controller_info: failed to allocate memory \n");
return 1;
}
buf = map.phys;
r.u.w.ax = VBE_INFO;
/* translate the buffer linear address to a far pointer */
r.u.w.es = PB2BASE(buf); /* set a segment base */
r.u.w.di = PB2OFF(buf); /* set the offset accordingly */
r.u.b.intno = VBE_INT;
if (sys_int86(&r) != OK){
printf("vbe_get_controller_info: sys_int86() failed to obtain mode info \n");
lm_free(&map);
return 1;
}
contr_info = *((vg_vbe_contr_info_t*)map.virt);
if (vg_display_vbe_contr_info (&contr_info) != OK){
printf("vbe_get_controller_info: failed displaying VBE controller information \n");
return 1;
}
lm_free(&map);
return 0;
}
<file_sep>/lab3/macro.h
#ifndef _LCOM_MACRO_H_
#define _LCOM_MACRO_H_
#define BIT(n) (0x01 << (n))
#define STAT_REG 0x64
#define KBC_CMD_REG 0x64
#define OUT_BUF 0x60
#define IN_BUF 0x60
#define INTR_ENABLE BIT(0)
#define INTR_DISABLE 0xef
#define TWO_BYTE_SCANCODE 0xE0
#define KEYBOARD_IRQ 1
#define READ_COMMAND 0x20
#define WRITE_COMMAND 0x60
#define PAR_ERR BIT(7)
#define TO_ERR BIT(6)
#define IBF BIT(1)
#define OBF BIT(0)
#define AUX BIT(5)
#define ESC_CODE 0x81
#define DELAY_US 20000
#endif /* _LCOM_MACRO_H */
<file_sep>/lab2/lab2.c
#include <lcom/lcf.h>
#include <lcom/lab2.h>
#include <lcom/timer.h>
#include <stdbool.h>
#include <stdint.h>
#include "i8254.h"
//global variable
uint32_t counter = 0;
int main(int argc, char *argv[]) {
// sets the language of LCF messages (can be either EN-US or PT-PT)
lcf_set_language("EN-US");
// enables to log function invocations that are being "wrapped" by LCF
// [comment this out if you don't want/need/ it]
lcf_trace_calls("/home/lcom/labs/lab2/trace.txt");
// enables to save the output of printf function calls on a file
// [comment this out if you don't want/need it]
lcf_log_output("/home/lcom/labs/lab2/output.txt");
// handles control over to LCF
// [LCF handles command line arguments and invokes the right function]
if (lcf_start(argc, argv))
return 1;
// LCF clean up tasks
// [must be the last statement before return]
lcf_cleanup();
return 0;
}
//// 7.1 ////
int (timer_test_read_config)(uint8_t timer, enum timer_status_field field) {
if (timer > 2 || timer < 0){
printf("Timer not found. Choose a timer between 0 and 2",0);
return 1;
}
uint8_t st;
int erro = timer_get_conf(timer, &st);
if (erro != OK) {
printf("Error in timer_get_conf", 0);
return erro;
}
erro = timer_display_conf(timer, st, field);
if (erro != OK) {
printf("Error in timer_display_conf", 0);
return erro;
}
return 0;
}
//// 7.2 ////
int (timer_test_time_base)(uint8_t timer, uint32_t freq) {
if (timer > 2 || timer < 0){
printf("Timer not found",0);
return 1;
}
int erro = timer_set_frequency(timer, freq);
if (erro != OK) {
printf("Error in timer_set_frequency", 0);
return erro;
}
return 0;
}
//// 7.3 ////
int (timer_test_int)(uint8_t time) {
uint8_t bit_no = 1;
int erro=timer_subscribe_int(&bit_no);
if (erro != OK) {
printf("Error in timer_subscribe_int", 0);
return erro;
}
int ipc_status;
message msg;
uint32_t irq_set = bit_no;
while (counter / 60 < time) {
/* Get a request message. */
if ((erro = driver_receive(ANY, &msg, &ipc_status)) != 0) {
printf("Driver_receive failed with: %d", erro);
continue;
}
if (is_ipc_notify(ipc_status)) { /* received notification */
switch (_ENDPOINT_P(msg.m_source)) {
case HARDWARE: /* hardware interrupt notification */
if (msg.m_notify.interrupts & irq_set) { /* subscribed interrupt */
/* process it */
timer_int_handler();
if (counter % 60 == 0) {
timer_print_elapsed_time();
}
}
break;
default:
break; /* no other notifications expected: do nothing */
}
} else { /* received a standard message, not a notification */
/* no standard messages expected: do nothing */
}
}
erro=timer_unsubscribe_int();
if (erro != OK) {
printf("Error in timer_unsbscribe_int", 0);
return erro;
}
return 0;
}
//// 7.2 ////
int (util_get_LSB)(uint16_t val, uint8_t *lsb) {
*lsb = (uint8_t) val;
return 0;
}
int (util_get_MSB)(uint16_t val, uint8_t *msb) {
val = val >> 8;
*msb = (uint8_t) val;
return 0;
}
<file_sep>/lab4/lab4.c
// IMPORTANT: you must include the following line in all your C files
#include <lcom/lcf.h>
#include <lcom/timer.h>
#include <stdint.h>
#include <stdio.h>
// Any header files included below this line should have been created by you
#include "mouse_test.h"
#include "macros.h"
#include "i8254.h"
int main(int argc, char *argv[]) {
// sets the language of LCF messages (can be either EN-US or PT-PT)
lcf_set_language("EN-US");
// enables to log function invocations that are being "wrapped" by LCF
// [comment this out if you don't want/need/ it]
lcf_trace_calls("/home/lcom/labs/lab4/trace.txt");
// enables to save the output of printf function calls on a file
// [comment this out if you don't want/need it]
lcf_log_output("/home/lcom/labs/lab4/output.txt");
// handles control over to LCF
// [LCF handles command line arguments and invokes the right function]
if (lcf_start(argc, argv))
return 1;
// LCF clean up tasks
// [must be the last statement before return]
lcf_cleanup();
return 0;
}
int (mouse_test_packet)(uint32_t cnt) {
if(cnt <=0){
printf("Invalid parameter! Make sure cnt is greater than zero", 0);
return 1;
}
int ipc_status;
uint32_t counter = 0;
message msg;
uint8_t mouse_id;
uint32_t r;
if(set_stream_mode() != OK){
return 1;
}
if(mouse_enable_data() != 0){
printf("The program failed to enable the mouse data reporting\n");
return 1;
}
if(mouse_subscribe(&mouse_id) != 0){
printf("Error subscribing mouse notifications\n");
return -1;
}
uint32_t irq_set = BIT(mouse_id);
while(counter < cnt) {
if ( (r = driver_receive(ANY, &msg, &ipc_status)) != 0 ) {
printf("driver_receive failed with: %d\n", r);
continue;
}
if (is_ipc_notify(ipc_status)) {
switch (_ENDPOINT_P(msg.m_source)) {
case HARDWARE:
if (msg.m_notify.interrupts & irq_set) {
mouse_ih();
if (kbc_ih_error == true){
byteNumber = 0;
continue;
}
if(byteNumber == 3){
counter++;
print_packet();
byteNumber = 0;
}
}
default:
break;
}
} else {
}
}
if(mouse_unsubscribe() != 0){
printf("The program was unable to unsubscribe a mouse notification\n");
return 1;
}
if(mouse_disable_data() != 0){
printf("Error disabling mouse data reporting\n");
return 1;
}
return 0;
}
///////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////
int (mouse_test_remote)(uint16_t period, uint8_t cnt) {
if(period <=0 || cnt <=0 || cnt > 255){
printf("Invalid parameters! Make sure period and cnt are greater than zero and cnt is lower than 256.",0);
return 1;
}
uint32_t counter = 0;
while (counter < cnt){
if (byteNumber == 0){
write_kbc(READ_DATA);
}
mouse_ih();
if (kbc_ih_error == true){
byteNumber = 0;
continue;
}
if(byteNumber == 3){
print_packet();
byteNumber = 0;
counter++;
tickdelay (micros_to_ticks(period*1000));
}
}
if(set_stream_mode() != OK){
return 1;
}
if(mouse_disable_data() != OK){
return 1;
}
if(enable_mouse_interrupts()!= OK){
return 1;
}
return 0;
}
///////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////
int (mouse_test_async)(uint8_t idle_time) {
if(idle_time <= 0){
printf("Invalid parameter! Make sure idle_time is greater than zero.", 0);
return 1;
}
uint8_t bit_no_mouse, bit_no_timer;
uint32_t erro;
if (timer_subscribe_int(&bit_no_timer)) {
printf("Error in timer_subscribe_int", 0);
return 1;
}
if(set_stream_mode() != OK){
return 1;
}
if(mouse_enable_data() != 0){
printf("The program failed to enable the mouse data reporting\n");
return 1;
}
if(mouse_subscribe(&bit_no_mouse) != 0){
printf("Error subscribing mouse notifications\n");
return 1;
}
uint32_t irq_set_mouse = BIT(bit_no_mouse);
uint32_t irq_set_timer = BIT(bit_no_timer);
int ipc_status;
message msg;
while (counter_t / 60 < idle_time) {
if ((erro = driver_receive(ANY, &msg, &ipc_status)) != 0) {
printf("Driver_receive failed with: %d", erro);
continue;
}
if (is_ipc_notify(ipc_status)) {
switch (_ENDPOINT_P(msg.m_source)) {
case HARDWARE:
if (msg.m_notify.interrupts & irq_set_timer) {
timer_int_handler();
}
if (msg.m_notify.interrupts & irq_set_mouse) {
mouse_ih();
if (kbc_ih_error == true){
byteNumber = 0;
continue;
}
if(byteNumber == 3){
print_packet();
byteNumber = 0;
counter_t = 0;
}
}
break;
default:
break;
}
}
else {
/*do nothing */
}
}
if ( timer_unsubscribe_int()) { //unsubscribes the timer
printf("Error in timer_unsbscribe_int", 0);
return 1;
}
if(mouse_unsubscribe() != 0){
printf("The program was unable to unsubscribe a mouse notification\n");
return 1;
}
if(mouse_disable_data() != 0){
printf("Error disabling mouse data reporting\n");
return 1;
}
return 0;
}
///////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////
int (mouse_test_gesture)(uint8_t x_len, uint8_t tolerance){
if(x_len <= 0 || x_len > 255 || tolerance < 0 || tolerance > 255){
printf("Invalid arguments! Make sure x_len is between 0 and 256 and tolerance is not negative.",0);
return 1;
}
int ipc_status;
message msg;
uint8_t mouse_id;
uint32_t r;
struct mouse_ev event;
event.delta_x = 0;
event.delta_y = 0;
if(set_stream_mode() != OK){
return 1;
}
if(mouse_enable_data() != 0){
printf("The program failed to enable the mouse data reporting\n");
return 1;
}
if(mouse_subscribe(&mouse_id) != 0){
printf("Error subscribing mouse notifications\n");
return -1;
}
uint32_t irq_set = BIT(mouse_id);
while(state != COMP) {
if ( (r = driver_receive(ANY, &msg, &ipc_status)) != 0 ) {
printf("driver_receive failed with: %d\n", r);
continue;
}
if (is_ipc_notify(ipc_status)) {
switch (_ENDPOINT_P(msg.m_source)) {
case HARDWARE:
if (msg.m_notify.interrupts & irq_set) {
mouse_ih();
if (kbc_ih_error == true){
byteNumber = 0;
continue;
}
if(byteNumber == 3){
print_packet();
byteNumber = 0;
mouse_events_handler(&event,x_len, tolerance);
}
}
break;
default:
break;
}
} else {
/* do nothing */
}
}
if(mouse_unsubscribe() != 0){
printf("The program was unable to unsubscribe a mouse notification\n");
return 1;
}
if(mouse_disable_data() != 0){
printf("Error disabling mouse data reporting\n");
return 1;
}
return 0;
}
<file_sep>/lab5/sprite.c
#include <lcom/lcf.h>
#include <stdint.h>
#include "sprite.h"
#include "macro.h"
Sprite *create_sprite(const char *pic[], int x, int y, int xspeed, int yspeed) {
//allocate space for the "object"
Sprite *sp = (Sprite *)malloc(sizeof(Sprite));
if (sp == NULL)
return NULL;
// read the sprite pixmap
sp->map = read_xpm(pic, &(sp->width), &(sp->height));
if (sp->map == NULL) {
free(sp);
return NULL;
}
sp->x = x;
sp->y = y;
sp->xspeed = xspeed;
sp->yspeed = yspeed;
return sp;
}
void destroy_sprite(Sprite *sp) {
if (sp == NULL)
return;
if (sp->map)
free(sp->map);
free(sp);
sp = NULL; // XXX: pointer is passed by value
// should do this @ the caller
}
<file_sep>/lab5/macro.h
#ifndef _LCOM_MACRO_H_
#define _LCOM_MACRO_H_
#define BIT(n) (0x01 << (n))
#define PB2BASE(x) (((x) >> 4) & 0x0F000)
//#define PB2OFF(x) ((x) & 0x0FFFF)
#define STAT_REG 0x64
#define KBC_CMD_REG 0x64
#define OUT_BUF 0x60
#define INTR_ENABLE BIT(0)
#define INTR_DISABLE 0xef
#define TWO_BYTE_SCANCODE 0xE0
#define KEYBOARD_IRQ 1
#define READ_COMMAND 0x20
#define WRITE_COMMAND 0x60
#define PAR_ERR BIT(7)
#define TO_ERR BIT(6)
#define IBF BIT(1)
#define OBF BIT(0)
#define AUX BIT(5)
#define ESC_CODE 0x81
#define SET_VBE 0x4F02
#define LINEAR_FB 0x105
#define VBE_INT 0x10 /* BIOS video services */
#define VBE_INFO 0x4F00
#define VBE_MODE_INFO 0x4F01
#define MODE105 0x105
#define MODE110 0x110
#define MODE115 0x115
#define MODE11A 0x11A
#define MODE14C 0x14C
#define MASK_115_14C 0x00FF
#define MASK_110_11A 0x001F
#define GREEN_MASK_11A 0x003F
#define BACKGROUNDCOLOR 0
#define DELAY_US 20000
#define DELAY_TOS 1000000
#endif /* _LCOM_MACRO_H */
<file_sep>/lab5/lab5.c
// IMPORTANT: you must include the following line in all your C files
#include <lcom/lcf.h>
#include <lcom/timer.h>
#include <lcom/lab5.h>
#include <stdint.h>
#include <stdio.h>
#include "macro.h"
#include "video_gr.h"
#include "keyboard.h"
// Any header files included below this line should have been created by you
int main(int argc, char *argv[]) {
// sets the language of LCF messages (can be either EN-US or PT-PT)
lcf_set_language("EN-US");
// enables to log function invocations that are being "wrapped" by LCF
// [comment this out if you don't want/need it]
lcf_trace_calls("/home/lcom/labs/lab5/trace.txt");
// enables to save the output of printf function calls on a file
// [comment this out if you don't want/need it]
lcf_log_output("/home/lcom/labs/lab5/output.txt");
// handles control over to LCF
// [LCF handles command line arguments and invokes the right function]
if (lcf_start(argc, argv))
return 1;
// LCF clean up tasks
// [must be the last statement before return]
lcf_cleanup();
return 0;
}
int (video_test_init)(uint16_t mode, uint8_t delay) {
if (vg_init(mode) == NULL){
printf("Error:failed to set graphics mode\n");
return 1;
}
tickdelay (micros_to_ticks (delay*DELAY_TOS));
if (vg_exit() != OK){
printf("Error:failed to set default Minix 3 text mode\n");
return 1;
}
return 0;
}
int (video_test_rectangle)(uint16_t mode, uint16_t x, uint16_t y, uint16_t width, uint16_t height, uint32_t color) {
uint8_t bit_no;
int ipc_status;
message msg;
uint32_t r;
if(keyboard_subscribe(&bit_no) != OK){
printf("Error enabling keyboard interrupts\n");
return 1;
}
uint32_t irq_set = BIT(bit_no);
if(vg_init(mode) == NULL){
printf("Error setting graphics mode\n");
return 1;
}
if (vg_draw_rectangle (x,y,width,height,color) != OK){
printf("Error drawing rectangle\n");
return 1;
}
while (scanByte != ESC_CODE) {
/* Get a request message. */
if ((r = driver_receive(ANY, &msg, &ipc_status)) != 0) {
printf("driver_receive failed with: %d\n", r);
continue;
}
if (is_ipc_notify(ipc_status)) { /* received notification */
switch (_ENDPOINT_P(msg.m_source)) {
case HARDWARE: /* hardware interrupt notification */
if (msg.m_notify.interrupts & irq_set) { /* subscribed interrupt */
kbc_ih();
if (kbc_ih_error) {
kbc_ih_error = false;
continue;
}
tickdelay(micros_to_ticks(DELAY_US));
}
break;
default:
break; /* no other notifications expected: do nothing */
}
} else { /* received a standard message, not a notification */
/* no standard messages expected: do nothing */
}
}
if (keyboard_unsubscribe() != 0) {
printf("Error disabling keyboard interrupts\n");
return 1;
}
if (vg_exit() != OK){
printf("Error:failed to set default Minix 3 text mode\n");
return 1;
}
printf("Set to text mode\n");
return 0;
}
int (video_test_pattern)(uint16_t mode, uint8_t no_rectangles, uint32_t first, uint8_t step) {
uint8_t bit_no;
int ipc_status;
message msg;
uint32_t r;
if(keyboard_subscribe(&bit_no) != OK){
printf("Error enabling keyboard interrupts",0);
return 1;
}
uint32_t irq_set = BIT(bit_no);
if(vg_init(mode) == NULL){
vg_exit();
printf("Error setting graphics mode\n");
return 1;
}
if (drawPattern(no_rectangles, first, step) != OK){
printf("Error drawing pattern\n");
return 1;
}
while (scanByte != ESC_CODE) {
/* Get a request message. */
if ((r = driver_receive(ANY, &msg, &ipc_status)) != 0) {
printf("driver_receive failed with: %d\n", r);
continue;
}
if (is_ipc_notify(ipc_status)) { /* received notification */
switch (_ENDPOINT_P(msg.m_source)) {
case HARDWARE: /* hardware interrupt notification */
if (msg.m_notify.interrupts & irq_set) { /* subscribed interrupt */
kbc_ih();
if (kbc_ih_error) {
kbc_ih_error = false;
continue;
}
tickdelay(micros_to_ticks(DELAY_US));
}
break;
default:
break; /* no other notifications expected: do nothing */
}
} else { /* received a standard message, not a notification */
/* no standard messages expected: do nothing */
}
}
if (keyboard_unsubscribe() != 0) {
printf("Error disabling keyboard interrupts\n");
return 1;
}
if (vg_exit() != OK){
printf("Error:failed to set default Minix 3 text mode\n");
return 1;
}
printf("Set to text mode\n");
return 0;
}
int (video_test_xpm)(const char *xpm[], uint16_t x, uint16_t y){
uint8_t bit_no;
int ipc_status;
message msg;
uint32_t r;
if(keyboard_subscribe(&bit_no) != OK){
printf("Error enabling keyboard interrupts",0);
return 1;
}
uint32_t irq_set = BIT(bit_no);
if(vg_init(MODE105) == NULL){
printf("Error setting graphics mode\n");
return 1;
}
if (drawSprite (xpm, x, y) != OK){
printf("Error drawing sprite\n");
return 1;
}
while (scanByte != ESC_CODE) {
/* Get a request message. */
if ((r = driver_receive(ANY, &msg, &ipc_status)) != 0) {
printf("driver_receive failed with: %d\n", r);
continue;
}
if (is_ipc_notify(ipc_status)) { /* received notification */
switch (_ENDPOINT_P(msg.m_source)) {
case HARDWARE: /* hardware interrupt notification */
if (msg.m_notify.interrupts & irq_set) { /* subscribed interrupt */
kbc_ih();
if (kbc_ih_error) {
kbc_ih_error = false;
continue;
}
tickdelay(micros_to_ticks(DELAY_US));
}
break;
default:
break; /* no other notifications expected: do nothing */
}
} else { /* received a standard message, not a notification */
/* no standard messages expected: do nothing */
}
}
if (keyboard_unsubscribe() != 0) {
printf("Error disabling keyboard interrupts\n");
return 1;
}
vg_exit();
printf("Set to text mode\n");
return 0;
}
int (video_test_move)(const char *xpm[], uint16_t xi, uint16_t yi, uint16_t xf, uint16_t yf, int16_t speed, uint8_t fr_rate){
uint8_t bit_no_kbd, bit_no_timer;
int ipc_status, j = sys_hz()/fr_rate;
message msg;
Sprite *sprite;
if (j <= 0)
return 1;
if (xf == xi) {
sprite = create_sprite(xpm, xi, yi, 0, speed);
}
else if (yf == yi) {
sprite = create_sprite(xpm, xi, yi, speed, 0);
} else {
printf("Invalid movement", 0);
return 1;
}
int r;
if ((r = keyboard_subscribe(&bit_no_kbd)) != OK) {
printf("Error in keyboard_subscribe", 0);
return r;
}
if ((r = timer_subscribe_int(&bit_no_timer)) != OK) {
printf("Error in timer_subscribe_int", 0);
return r;
}
uint32_t irq_set_kbd = BIT(bit_no_kbd);
uint32_t irq_set_timer = BIT(bit_no_timer);
if(vg_init(MODE105) == NULL){
printf("Error setting graphics mode\n");
return 1;
}
uint32_t counter_t =0;
draw_sprite(sprite);
while (scanByte != ESC_CODE) {
/* Get a request message. */
if ((r = driver_receive(ANY, &msg, &ipc_status)) != 0) {
printf("driver_receive failed with: %d\n", r);
continue;
}
if (is_ipc_notify(ipc_status)) { /* received notification */
switch (_ENDPOINT_P(msg.m_source)) {
case HARDWARE: /* hardware interrupt notification */
if (msg.m_notify.interrupts & irq_set_timer) {/* subscribed interrupt */
timer_int_handler();
if((counter_t % j) == 0 && speed >= 0){
move_sprite(sprite, xi, yi, xf, yf, speed);
}
else if((counter_t *j) % abs(speed) == 0 && speed < 0){
move_sprite(sprite, xi, yi, xf, yf, speed);
}
}
if (msg.m_notify.interrupts & irq_set_kbd) { /* subscribed interrupt */
kbc_ih();
if (kbc_ih_error) {
kbc_ih_error = false;
continue;
}
tickdelay(micros_to_ticks(DELAY_US));
}
break;
default:
break; /* no other notifications expected: do nothing */
}
} else { /* received a standard message, not a notification */
/* no standard messages expected: do nothing */
}
}
destroy_sprite(sprite);
if (keyboard_unsubscribe() != 0) {
printf("Error disabling keyboard interrupts\n");
return 1;
}
if (timer_unsubscribe_int()!= OK) {
printf("Error in timer_unsbscribe_int", 0);
return 1;
}
vg_exit();
printf("Set to text mode\n");
return 0;
}
int (video_test_controller)(){
if (vbe_get_controller_info() != OK){
printf("Error gettinf the VBE controller information \n");
return 1;
}
return 0;
}
<file_sep>/lab3/keyboard.c
#include <lcom/lcf.h>
#include <lcom/timer.h>
#include <stdint.h>
#include "keyboard.h"
#include "macro.h"
#include "i8254.h"
//global variable
static int hook_id = 0x01;
uint32_t counter = 0;
uint32_t scanByte = 0;
int (sys_inb_count)(port_t port, uint32_t *byte) {
counter++;
return sys_inb(port, byte);
}
int (keyboard_subscribe)(uint8_t * bit_no) {
*bit_no = hook_id;
int erro = sys_irqsetpolicy(KEYBOARD_IRQ, (IRQ_REENABLE | IRQ_EXCLUSIVE),
&hook_id);
if (erro != OK) {
printf("Error in sys_irqsetpolicy", 0);
return erro;
}
return 0;
}
int (keyboard_unsubscribe)() {
int erro = sys_irqrmpolicy(&hook_id);
if (erro != OK) {
printf("Error in sys_irqrmpolicy", 0);
return erro;
}
return 0;
}
void (kbc_ih)(void) {
uint32_t stat = 0;
int numCiclos = 0;
while (numCiclos < 5) {
if (sys_inb_count(STAT_REG, &stat) != OK) {
kbc_ih_error = true;
return;
}
if (stat & OBF) {
if (sys_inb_count(OUT_BUF, &scanByte) != OK) {
kbc_ih_error = true;
return;
}
if ((stat & (PAR_ERR | TO_ERR)) == 0) {
kbc_ih_error = false;
return;
} else {
kbc_ih_error = true;
return;
}
}
numCiclos++;
}
kbc_ih_error = true;
return;
}
void (isTwoByte)(bool *wait, uint8_t *nbyte) {
if (*wait == false) {
if (scanByte == TWO_BYTE_SCANCODE) {
*wait = true;
return;
} else {
*nbyte = 1;
*wait = false;
}
}
else {
*nbyte = 2;
*wait = false;
}
}
int (scancode_parse)(uint32_t byte, uint8_t nbyte) {
uint8_t scancode[nbyte];
if (nbyte == 2) {
scancode[0] = TWO_BYTE_SCANCODE;
scancode[1] = (uint8_t) byte;
} else {
scancode[0] = (uint8_t) byte;
}
int erro = kbd_print_scancode(!((BIT(7) & byte) >> 7), nbyte, scancode);
if (erro != OK) {
printf("Error in kbd_print_scancode", 0);
return erro;
}
return 0;
}
int (kbc_pol)() {
uint32_t stat = 0;
int numCiclos = 0;
while (numCiclos <= 5) {
if (sys_inb_count(STAT_REG, &stat) != OK) {
return -1;
}
if (stat & OBF) {
if (sys_inb_count(OUT_BUF, &scanByte) != OK) {
return -1;
}
if ((stat & (PAR_ERR | TO_ERR | AUX)) != 0) {
return -1;
} else
return 0;
}
numCiclos++;
}
return -1;
}
int (interrupt_handler)() {
uint32_t cmd;
uint32_t stat = 0;
int numCiclos = 0;
if (sys_outb(KBC_CMD_REG, READ_COMMAND) != OK) {
return -1;
}
if (sys_inb(OUT_BUF, &cmd) != OK) {
return -1;
}
cmd = (cmd & INTR_DISABLE) | INTR_ENABLE;
if (sys_outb(KBC_CMD_REG, WRITE_COMMAND) != OK) {
return -1;
}
while (numCiclos < 5) {
if (sys_inb(STAT_REG, &stat) != 0)
return -1; //assuming it returns OK
if ((stat & IBF) == 0) {
if (sys_outb(OUT_BUF, cmd) != OK) {
return -1;
} else {
return 0;
}
}
numCiclos++;
}
return -1;
}
<file_sep>/lab4/macros.h
#ifndef _LCOM_MACROS_H_
#define _LCOM_MACROS_H_
#define BIT(n) (0x01 << (n))
#define KEYBOARD_IRQ 1
#define MOUSE_IRQ 12
#define STAT_REG 0x64 // Status Register
#define CTRL_REG 0x64 // Control Register
#define KBC_CMD_REG 0x64 // Keyboard command register
#define KBC_CMD_INIT 0xD4 // Write byte to mouse
#define OUT_BUFF 0x60 // Output Buffer
#define IN_BUFF 0x60 // Input Buffer
#define ESC_BREAK 0x81 // Breakcode of Escape key
#define MSB 0x80 // Most significant bit
#define TWO_BYTE_CODE 0xE0 // Double byte scancode
#define MOUSE_DIS_CMD 0xFD // Mouse disable interrupts command
#define MOUSE_EN_CMD 0xFD // Mouse enable interrupts command
#define READ_DATA 0xEB // Send data packet request
#define READ_COMMAND 0x20
#define WRITE_COMMAND 0x60
#define IBF BIT(1) // Input buffer full
#define OBF BIT(0) // Output buffer full
#define ACK 0xFA // Everything is OK
#define NACK 0xFE // Invalid byte
#define ERROR 0xFC // Second consecutive invalid byte
#define MOUSE_ENABLE 0XF4 // Enable data reporting in stream only
#define MOUSE_DISABLE 0XF5 // Disable data reporting in stream only
#define SET_REMOTE 0xF0 // Set remote mode
#define SET_STREAM 0xEA // Set stream mode
#define MOUSE_BIT3 BIT(3) // The 3rd bit of the first byte is always set
#define PAR_ERR BIT(7)
#define TO_ERR BIT(6)
#define DELAY_US 20000
#endif
<file_sep>/lab5/sprite.h
#ifndef _SPRITE_H_
#define _SPRITE_H_
typedef struct {
int x, y; // current position
int width, height; // dimensions
int xspeed, yspeed; // current speed
char *map; // the pixmap
} Sprite;
//int drawSprite (const char *pic[], uint16_t x, uint16_t y);
Sprite *create_sprite(const char *pic[], int x, int y, int xspeed, int yspeed);
//void draw_sprite(Sprite *sp);
//void erase_sprite(Sprite *sp);
void destroy_sprite(Sprite *sp);
#endif
<file_sep>/lab5/macros.h
#ifndef _LCOM_MACROS_H_
#define _LCOM_MACROS_H_
#define BIT(n) (0x01 << (n))
#define KEYBOARD_IRQ 1
#define MOUSE_IRQ 12
#define STAT_REG 0x64 // Status Register
#define CTRL_REG 0x64 // Control Register
#define KBC_CMD_REG 0x64 // Keyboard command register
#define KBC_CMD_INIT 0xD4 // Write byte to mouse
#define OUT_BUFF 0x60 // Output Buffer
#define IN_BUFF 0x60 // Input Buffer
#define ESC_BREAK 0x81 // Breakcode of Escape key
#define MSB 0x80 // Most significant bit
#define TWO_BYTE_CODE 0xE0 // Double byte scancode
#define MOUSE_DIS_CMD 0xFD // Mouse disable interrupts command
#define MOUSE_EN_CMD 0xFD // Mouse enable interrupts command
#define READ_DATA 0xEB // Send data packet request
#define READ_COMMAND 0x20
#define WRITE_COMMAND 0x60
#define IBF BIT(1) // Input buffer full
#define OBF BIT(0) // Output buffer full
#define ACK 0xFA // Everything is OK
#define NACK 0xFE // Invalid byte
#define ERROR 0xFC // Second consecutive invalid byte
#define MOUSE_ENABLE 0XF4 // Enable data reporting in stream only
#define MOUSE_DISABLE 0XF5 // Disable data reporting in stream only
#define SET_REMOTE 0xF0 // Set remote mode
#define SET_STREAM 0xEA // Set stream mode
#define MOUSE_BIT3 BIT(3) // The 3rd bit of the first byte is always set
#define PAR_ERR BIT(7)
#define TO_ERR BIT(6)
#define DELAY_US 20000
#endif //_LCOM_MACROS_H_
<file_sep>/lab5/video_gr.h
#ifndef _VIDEO_GR_H_
#define _VIDEO_GR_H_
#include "sprite.h"
int vbe_get_mode_inf(uint16_t mode, vbe_mode_info_t* vmi_p);
int col(uint16_t x, uint16_t y, uint32_t color);
int drawPattern (uint8_t no_rectangles, uint32_t first, uint8_t step);
int drawSprite (const char *xpm[], uint16_t x, uint16_t y);
void draw_sprite(Sprite *sp);
void erase_sprite(Sprite *sp);
int vbe_get_controller_info();
void move_sprite(Sprite *sprite, uint16_t xi, uint16_t yi, uint16_t xf, uint16_t yf, int16_t speed);
#endif
<file_sep>/lab4/mouse_test.h
#ifndef _MOUSE_TEST_H_
#define _MOUSE_TEST_H_
typedef enum {INIT, DRAW1, LINE1, VERTEX, DRAW2, LINE2, COMP} state_t;
int (mouse_subscribe)(uint8_t * bit_no);
int (mouse_unsubscribe)();
int (mouse_enable_data)();
int (mouse_disable_data)();
int (write_kbc)(uint32_t cmd_byte);
void (print_packet)();
int (set_stream_mode)();
int (enable_mouse_interrupts)();
void (gesture_handler)(struct mouse_ev *evt, uint8_t x_len);
void (mouse_events_handler)(struct mouse_ev *event, uint8_t x_len, uint8_t tolerance);
extern unsigned int byteNumber;
extern bool kbc_ih_error;
extern state_t state;
#endif //_MOUSE_TEST_H_
<file_sep>/lab3/lab3.c
#include <lcom/lcf.h>
#include <lcom/timer.h>
#include <stdbool.h>
#include <stdint.h>
#include "keyboard.h"
#include "macro.h"
#include "i8254.h"
extern uint32_t scanByte;
bool kbc_ih_error = false;
uint32_t scanByte_asm;
bool kbc_ih_error_asm;
int main(int argc, char *argv[]) {
// sets the language of LCF messages (can be either EN-US or PT-PT)
lcf_set_language("EN-US");
// enables to log function invocations that are being "wrapped" by LCF
// [comment this out if you don't want/need/ it]
lcf_trace_calls("/home/lcom/labs/lab3/trace.txt");
// enables to save the output of printf function calls on a file
// [comment this out if you don't want/need it]
lcf_log_output("/home/lcom/labs/lab3/output.txt");
// handles control over to LCF
// [LCF handles command line arguments and invokes the right function]
if (lcf_start(argc, argv))
return 1;
// LCF clean up tasks
// [must be the last statement before return]
lcf_cleanup();
return 0;
}
int (kbd_test_scan)(bool assembly) {
if (assembly != 0 && assembly != 1) {
printf("Assembly not valid in kbd_test_scan", 0);
return 1;
}
uint8_t bit_no;
int erro = keyboard_subscribe(&bit_no);
if (erro != OK) {
printf("Error in keyboard_subscribe", 0);
return erro;
}
uint32_t irq_set = BIT(bit_no);
int ipc_status;
message msg;
uint8_t nbyte = 0; //numero de bytes do scancode
bool wait = false;
while (scanByte != ESC_CODE) {
/* Get a request message. */
if ((erro = driver_receive(ANY, &msg, &ipc_status)) != 0) {
printf("Driver_receive failed with: %d", erro);
continue;
}
if (is_ipc_notify(ipc_status)) { /* received notification */
switch (_ENDPOINT_P(msg.m_source)) {
case HARDWARE: /* hardware interrupt notification */
if (msg.m_notify.interrupts & irq_set) { /* subscribed interrupt */
if (!assembly)
kbc_ih();
else if (assembly) {
kbc_asm_ih();
scanByte = scanByte_asm;
kbc_ih_error = kbc_ih_error_asm;
}
if (kbc_ih_error) {
kbc_ih_error = false;
} else {
isTwoByte(&wait, &nbyte);
if (wait == false) {
erro = scancode_parse(scanByte, nbyte);
if (erro != OK)
return erro;
}
}
tickdelay (micros_to_ticks(DELAY_US));
break; }
default:
break; /* no other notifications expected: do nothing */
}
}
else { /* received a standard message, not a notification */
/* no standard messages expected: do nothing */
}
}
if (!assembly) {
erro = kbd_print_no_sysinb(counter);
return erro;
}
erro = keyboard_unsubscribe();
if (erro != OK) {
printf("Error in keyboard_unsbscribe", 0);
return erro;
}
counter = 0;
return 0;
}
int (kbd_test_poll)() {
uint8_t nbyte = 0; //numero de bytes do scancode
bool wait = false;
while (scanByte != ESC_CODE) {
if (kbc_pol(scanByte) == -1) {
continue;
}
isTwoByte(&wait, &nbyte);
if (wait == false) {
int erro = scancode_parse(scanByte, nbyte);
if (erro != OK)
return erro;
}
tickdelay (micros_to_ticks(DELAY_US));}
if (kbd_print_no_sysinb (counter)) {
return !OK;
}
if (interrupt_handler() != OK) {
printf("Error in interrupt_handler", 0);
return 1;
}
counter = 0;
return 0;
}
int (kbd_test_timed_scan)(uint8_t n) {
if (n < 1) {
printf("Error: Invalid time.\n");
return 1;
}
uint8_t nbyte = 0; //numero de bytes do scancode
bool wait = false;
uint8_t bit_no_kbd, bit_no_timer;
int erro = keyboard_subscribe(&bit_no_kbd); //subscribes the keyboard
if (erro != OK) {
printf("Error in keyboard_subscribe", 0);
return erro;
}
erro = timer_subscribe_int(&bit_no_timer); //subscribes the timer
if (erro != OK) {
printf("Error in timer_subscribe_int", 0);
return erro;
}
uint32_t irq_set_kbd = BIT(bit_no_kbd);
uint32_t irq_set_timer = BIT(bit_no_timer);
int ipc_status;
message msg;
while (scanByte != ESC_CODE && counter_t / 60 < n) { //Stops if ESC is pressed or has no input for n seconds
/* Get a request message. */
if ((erro = driver_receive(ANY, &msg, &ipc_status)) != 0) {
printf("Driver_receive failed with: %d", erro);
continue;
}
if (is_ipc_notify(ipc_status)) { /* received notification */
switch (_ENDPOINT_P(msg.m_source)) {
case HARDWARE: /* hardware interrupt notification */
if (msg.m_notify.interrupts & irq_set_timer) {/* subscribed interrupt */
timer_int_handler();
}
if (msg.m_notify.interrupts & irq_set_kbd) { /* subscribed interrupt */
kbc_ih();
if (kbc_ih_error) {
kbc_ih_error = false;
} else {
isTwoByte(&wait, &nbyte); //checks if the code is two bytes or not
if (wait == false) {
erro = scancode_parse(scanByte, nbyte); //calls kbd_print_scancode with the correct arguments
if (erro != OK)
return erro;
}
counter_t = 0;
}
tickdelay (micros_to_ticks(DELAY_US));}
break;
default:
break; /* no other notifications expected: do nothing */
}
}
else { /* received a standard message, not a notification */
/* no standard messages expected: do nothing */
}
}
erro = keyboard_unsubscribe(); //unsubscribes the keyboard
if (erro != OK) {
printf("Error in keyboard_unsbscribe", 0);
return erro;
}
erro = timer_unsubscribe_int(); //unsibscribes the timer
if (erro != OK) {
printf("Error in timer_unsbscribe_int", 0);
return erro;
}
return 0;
}
<file_sep>/lab4/mouse_test.c
#include <lcom/lcf.h>
#include <lcom/timer.h>
#include "mouse_test.h"
#include "macros.h"
#include "i8254.h"
static int hook_id = 0x01;
unsigned int byteNumber = 0;
static uint32_t byte_array[3];
bool kbc_ih_error = false;
uint32_t counter_t = 0;
state_t state = INIT;
static struct packet pp;
int (mouse_subscribe)(uint8_t * bit_no) {
*bit_no = hook_id;
if(sys_irqsetpolicy(MOUSE_IRQ,(IRQ_REENABLE | IRQ_EXCLUSIVE),&hook_id) != OK){
printf("Error subscribing mouse\n");
return 1;
}
return 0;
}
///////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////
int (mouse_unsubscribe)() {
int erro = sys_irqrmpolicy(&hook_id);
if (erro != OK) {
printf("Error in sys_irqrmpolicy", 0);
return erro;
}
return 0;
}
///////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////
int (mouse_enable_data)(){
int counter =0;
while(counter <5){ //Return true if the mouse was enabled succesfully in less than 5 tries
if(write_kbc(MOUSE_ENABLE) == 0)
return 0;
counter++;
}
return 1;
}
///////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////
int (mouse_disable_data)(){
int counter =0;
while(counter <5){ //Return true if the mouse was enabled succesfully in less than 5 tries
if(write_kbc(MOUSE_DISABLE)== 0)
return 0;
counter++;
}
return 1;
}
///////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////
int (write_kbc)(uint32_t cmd_byte){
uint32_t status = 0;
int done = 0;
uint32_t verification;
while(!done){
if (sys_inb(STAT_REG, &status) != OK) return -1;
if ((status & IBF) == 0) {
sys_outb(KBC_CMD_REG, KBC_CMD_INIT); //prepares mouse for writing
}
if (sys_inb(STAT_REG, &status) != OK) return -1;
if ((status & IBF) == 0) {
sys_outb(IN_BUFF,cmd_byte); //writes the command byte
}
sys_inb(OUT_BUFF,&verification);
if(verification == NACK){
continue;
}
else if(verification == ERROR){
return 1;
}
else{
done= 1;
}
}
return 0;
}
///////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////
void (mouse_ih)(void){
uint32_t byte;
if(sys_inb(OUT_BUFF,&byte) != 0){//reads output buffer
printf("Error reading output buffer\n",0 );
kbc_ih_error = true;
return;
}
byte_array[byteNumber] = byte; //sends the byte read to the array
byteNumber++;
if(!((MOUSE_BIT3 & byte_array[0])>> 3)){ //tests bit(3) of the first byte
printf("Error reading mouse packet\n",0 );
kbc_ih_error = true;
return;
}
kbc_ih_error = false;
return;
}
///////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////
void (print_packet)(){
pp.bytes[0] = byte_array[0];
pp.bytes[1] = byte_array[1];
pp.bytes[2] = byte_array[2];
pp.rb = (byte_array[0] & BIT(1)) >> 1;
pp.lb = (byte_array[0] & BIT(0));
pp.mb = (byte_array[0] & BIT(2)) >> 2;
pp.x_ov = (byte_array[0] & BIT(6)) >> 6;
pp.y_ov = (byte_array[0] & BIT(7)) >> 7;
if ((byte_array[0] & BIT(4)) == 0){
pp.delta_x = 0x00FF & byte_array[1];
}
else{
pp.delta_x = 0xFF00 | byte_array[1];
}
if ((byte_array[0] & BIT(5)) == 0){
pp.delta_y = 0x00FF & byte_array[2];
}
else{
pp.delta_y = 0xFF00 | byte_array[2];
}
mouse_print_packet(&pp);
}
///////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////
int (set_stream_mode)(){
if(write_kbc(SET_STREAM)== 0)
return 0;
return 1;
}
///////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////
int (enable_mouse_interrupts)() {
uint32_t cmd;
uint32_t stat = 0;
int numCiclos = 0;
cmd = minix_get_dflt_kbc_cmd_byte();
while (numCiclos < 5) {
if (sys_inb(STAT_REG, &stat) != 0)
return 1;
if ((stat & IBF) == 0) {
if (sys_outb(KBC_CMD_REG, WRITE_COMMAND) != OK) {
return 1;
}
else{
numCiclos = 0;
break;
}
}
numCiclos++;
}
while (numCiclos < 5) {
if (sys_inb(STAT_REG, &stat) != 0)
return 1;
if ((stat & IBF) == 0) {
if (sys_outb(OUT_BUFF, cmd) != OK) {
return 1;
} else {
return 0;
}
}
numCiclos++;
}
return 1;
}
void (mouse_events_handler)(struct mouse_ev *event,uint8_t x_len, uint8_t tolerance){
if (!pp.rb && pp.lb && !pp.mb){ //if left button is pressed
if (pp.delta_x == 0 && pp.delta_y == 0){
event->type = LB_PRESSED;
gesture_handler(event, x_len);
return;
}
event->delta_x += pp.delta_x;
event->delta_y += pp.delta_y;
if ((pp.delta_x >= -tolerance) && (pp.delta_y >= -tolerance)){ //displacements in x and y
if (event->delta_y / event->delta_x > 0){ //positive slope
event->type = MOUSE_MOV;
gesture_handler(event, x_len);
}
else{
event->type = BUTTON_EV;
gesture_handler(event, x_len);
}
}else{
event->type = BUTTON_EV;
gesture_handler(event, x_len);
}
}else if (pp.rb && !pp.lb && !pp.mb){ //if rigth button is pressed
if(pp.delta_x == 0 && pp.delta_y == 0){
event->type = RB_PRESSED;
gesture_handler(event, x_len);
return;
}
event->delta_x += pp.delta_x;
event->delta_y += pp.delta_y;
if ((pp.delta_x >= -tolerance) && (pp.delta_y <= tolerance)){ //displacements in x and y
if (event->delta_y / event->delta_x < 0){ //positive slope
event->type = MOUSE_MOV;
gesture_handler(event, x_len);
}
else{
event->type = BUTTON_EV;
gesture_handler(event, x_len);
}
}else{
event->type = BUTTON_EV;
gesture_handler(event, x_len);
}
}else if (!pp.rb && !pp.lb && !pp.mb){ //if no button is pressed
if (state == LINE1 || state == DRAW1){
event->type = LB_RELEASED;
}else if (state == LINE2 || state == DRAW2){
event->type = RB_RELEASED;
}
else if(state == VERTEX) {
if ((abs(pp.delta_x) <= tolerance) && (abs(pp.delta_y) <= tolerance)){
event->type = MOUSE_MOV;
}
else{
event->type = BUTTON_EV;
}
}
else
event->type = BUTTON_EV;
gesture_handler(event, x_len);
}
else{ //if the middle button is pressed or more than one button is pressed
event->type = BUTTON_EV;
gesture_handler(event, x_len);
}
}
void (gesture_handler)(struct mouse_ev *evt, uint8_t x_len) {
switch (state) {
case INIT:
if( evt->type == LB_PRESSED )
state = DRAW1;
break;
case DRAW1:
if( evt->type == MOUSE_MOV ) {
if ((evt->delta_x >= x_len) && (evt->delta_y/evt->delta_x > 1)){
state = LINE1;
}
} else if( evt->type == LB_RELEASED || evt->type == RB_PRESSED || evt->type == BUTTON_EV){
state = INIT;
evt->delta_x = 0;
evt->delta_y = 0;
}
break;
case LINE1:
if( evt->type == LB_RELEASED ){
state = VERTEX;
}else if (evt->type == MOUSE_MOV){
state = DRAW1;
}else if(evt->type == RB_PRESSED || evt->type == BUTTON_EV){
state = INIT;
evt->delta_x = 0;
evt->delta_y = 0;
}
break;
case VERTEX:
if( evt->type == RB_PRESSED ){
state = DRAW2;
evt->delta_x = 0;
evt->delta_y = 0;
}else if(evt->type == LB_PRESSED){
state = DRAW1;
evt->delta_x = 0;
evt->delta_y = 0;
}else if(evt->type == BUTTON_EV){
state = INIT;
evt->delta_x = 0;
evt->delta_y = 0;
}
break;
case DRAW2:
if( evt->type == MOUSE_MOV ) {
if ((evt->delta_x >= x_len) && (abs(evt->delta_y/evt->delta_x) > 1)){
state = LINE2;
}
} else if( evt->type == RB_RELEASED || evt->type == LB_PRESSED || evt->type == BUTTON_EV){
state = INIT;
evt->delta_x = 0;
evt->delta_y = 0;
}
break;
case LINE2:
if( evt->type == RB_RELEASED ){
state = COMP;
}
if (evt->type == MOUSE_MOV){
state = DRAW2;
}else if(evt->type == LB_PRESSED || evt->type == BUTTON_EV){
state = INIT;
evt->delta_x = 0;
evt->delta_y = 0;
}
break;
default:
break;
}
}
|
33851d913256f6dcd46a8ae0d45ba659c4ae8b00
|
[
"C"
] | 16
|
C
|
lilianalmeida/FEUP-LCOM
|
888cfced4d667e8ed68226860bcf98c3df7878a8
|
6669b72f8e3741ecec31c473ef83330c2ad96b00
|
refs/heads/main
|
<file_sep># Leibniz Series. More info can be found at wikiHow.com
from math import trunc
import time
start_time = time.time()
def value_pi(digit):
current = 4
previous = 0
number = 0
add_up = 1
while abs(round(current, digit + 1) - round(previous, digit + 1)):
add_up += 2
previous = current
if number < 0:
number = 4 / add_up
else:
number = -4/ add_up
current += number
#print(f'calculating: {abs(round(current, digit + 1) - round(previous, digit + 1))}')
#current_time = time.time()
#print(current_time - start_time)
#print(f'{current}\t{previous}\t{add_up}\t{number}')
#print(f'{round(current, digit+1)}\t{round(previous, digit+1)}')
return trunc(current * 10 ** digit) / (10 ** digit)
while True:
try:
digit = int(input('decimal number? :'))
print(f'pi is {value_pi(digit)}')
print(f'calculation took {time.time() - start_time} seconds.')
break
except:
continue<file_sep># python
This repo is made for personal python programming practices.
<file_sep># I'm gonna make a war game from scratch
from random import shuffle
# global variables
suits = ('Hearts', 'Diamonds', 'Spades', 'Clubs')
ranks = ('Two', 'Three', 'Four', 'Five', 'Six', 'Seven', 'Eight', 'Nine','Ten', 'Jack', 'Queen', 'King', 'Ace')
values = {'Two': 2, 'Three': 3, 'Four': 4, 'Five': 5, 'Six': 6, 'Seven': 7,'Eight': 8, 'Nine': 9, 'Ten': 10, 'Jack': 11, 'Queen': 12, 'King': 10, 'Ace': 11}
# alternative way to generate the dictionary using enumerate()
'''
value = {}
for v, k in enumerate(ranks):
value[k] = v
'''
# Card class for each card
class Card:
def __init__(self, suit, rank):
self.suit = suit
self.rank = rank
def __str__(self, suit, rank):
return f'{self.rank} of {self.suit}'
# Deck class for each game
class Deck:
def __init__(self):
self.cards = []
for suit in suits:
for rank in ranks:
self.cards.append(Card(suit, rank))
def shuffle_cards(self):
shuffle(self.cards)
def deal(self):
return self.cards.pop()
# Hand class for each player
class Hand:
def __init__(self):
self.cards = []
def __len__(self):
return len(self.cards)
def add_cards(self, card):
if type(card) == type([]): # type(self.card)?
self.cards.extend(card)
else:
self.cards.append(card)
def draw_a_card(self):
return self.cards.pop(0) # in FIFO ways
# game logic
war_rule = 5
game_on = True
at_war = False
print('Welcome to the War Game!')
# initialize
game_deck = Deck()
game_deck.shuffle_cards()
game_round = 0
player1_hand = Hand()
player2_hand = Hand()
for i in range(26):
player1_hand.add_cards(game_deck.deal())
player2_hand.add_cards(game_deck.deal())
while game_on:
# print(f'Round {game_round}: ', end='')
game_round += 1
player1 = []
player2 = []
if player1_hand.__len__() == 0:
print('\nPlayer 2 has won! Player1 is out of cards...')
break
else:
player1.append(player1_hand.draw_a_card())
if player2_hand.__len__() == 0:
print('\nPlayer 1 has won! Player2 is out of card...')
break
else:
player2.append(player2_hand.draw_a_card())
at_war = True
print(f'Round {game_round:{3}}: ', end='')
while at_war:
if values[player1[-1].rank] > values[player2[-1].rank]:
player1_hand.add_cards(player1 + player2)
print(f'Player 1 has won.', end='')
print(f'\t Cards Left: {len(player1_hand)} vs {len(player2_hand)}')
at_war = False
elif values[player1[-1].rank] < values[player2[-1].rank]:
player2_hand.add_cards(player1 + player2)
print(f'Player 2 has won.', end='')
print(f'\t Cards Left: {len(player1_hand)} vs {len(player2_hand)}')
at_war = False
else:
print('{0:17}\t'.format('War!')) # still not accustomed to string formatting...
if len(player1_hand) < war_rule:
print('\nPlayer 2 has won the game! Player 1 have not enough cards for war...')
game_on = False
break
elif len(player2_hand) < war_rule:
print('\nPlayer 1 has won the game! Player 2 have not enough cards for war...')
game_on = False
break
else:
for i in range(war_rule):
player1.append(player1_hand.draw_a_card())
player2.append(player2_hand.draw_a_card())
# This took a long time to finish. The codes look inefficient.<file_sep>import random
suits = ('Hearts', 'Diamonds', 'Clubs', 'Spades')
ranks = (
'Two', 'Three', 'Four', 'Five', 'Six', 'Seven',
'Eight', 'Nine', 'Ten', 'Jack', 'Queen', 'King',
'Ace'
)
values = {
'Two': 2, 'Three': 3, 'Four': 4, 'Five': 5,
'Six': 6, 'Seven': 7, 'Eight': 8, 'Nine': 9,
'Ten': 10, 'Jack': 10, 'Queen': 10, 'King': 10,
'Ace': 11
}
'''
QUESTION(not solved)
can I use a list or a tuple of keys of the dictionary, instead of the rank variable?
대신 딕셔너리 키로 만든 리스트 를 사용할 수 있지 않을까?
'''
playing = True
class Card:
def __init__(self, suit, rank):
self.suit = suit
self.rank = rank
# String value
def __str__(self):
return f'{self.rank} of {self.suit}'
class Deck:
def __init__(self):
self.cards = []
for suit in suits:
for rank in ranks:
self.cards.append(Card(suit, rank))
# TODO(done): missing __str__ method impelementation
def __str__(self):
card_list = ''
for card in self.cards:
card_list = card_list + card.__str__()
# card_list += '\n ' + card.__str__()
# Still it's confusing that a tuple is immutable. Though I know concatenation is allowed.
return card_list
def shuffle(self):
random.shuffle(self.cards)
def deal(self):
return self.cards.pop()
class Hand:
def __init__(self):
self.cards = []
self.total = 0
self.aces = 0
def add_card(self, card):
self.cards.append(card)
self.total += values[card.rank]
if card.rank == 'Ace':
self.aces += 1
def adjust_for_ace(self):
while self.total > 21 and self.aces:
#if self.total > 21 and self.aces:
# MISTAKE(corrected): use while not if, because there might be more than 2 aces
self.total -= 10
self.aces -= 1
# TODO(done): implement defalut value instead of hard coding
class Chips:
def __init__(self, amount=100):
self.total = amount
self.bet = 0
def win_bet(self):
self.total += self.bet
def lose_bet(self):
self.total -= self.bet
def hit(deck, hand):
hand.add_card(deck.deal())
hand.adjust_for_ace()
def hit_or_stand(deck):
global playing
while True:
choice = input('Hit or Stand? ').lower()
if choice[0] == 'h':
hit(deck, player_hand)
elif choice[0] == 's':
playing = False
else:
print('Choose either \'Hit\' or \'Stand\'.')
continue
break
# TODO : make another version that using * operand for display the cards
def show_some(player_hand, dealer_hand):
print('\nDealer\'s Hand:')
print('<card hidden>')
#print('\n'.join([str(card) for card in dealer_hand.cards[1:]]))
print(dealer_hand.cards[1])
print('\nPlayer\'s Hand:', *player_hand.cards, sep = '\n')
#print('\n'.join([str(card) for card in player_hand.cards]))
def show_all(player_hand, dealer_hand):
print('\nDealer\'s Hand:', *dealer_hand.cards, sep = '\n')
#print('\n'.join([str(card) for card in dealer_hand.cards]))
print(f'Dealer\'s Total = {dealer_hand.total}')
print('\nPlayer\'s Hand:', *player_hand.cards, sep = '\n')
#print('\n'.join([str(card) for card in player_hand.cards]))
print(f'Player\'s Total = {player_hand.total}')
def take_bet(chips):
while True:
try:
chips.bet = int(input('How much would you bet? '))
except ValueError:
print('You must input a number, try again!')
#continue
# Still not sure about the flow! But this seems redundant.
else:
if chips.bet > chips.total:
print(f'You cannot bet more than {chips.total}, try again!')
#continue
# Still not sure about the flow! but it seems redundant.
else:
break
def player_busts(chips):
print('\nPlayer Busts!')
chips.lose_bet()
def player_wins(chips):
print('\nPlayer Wins!')
chips.win_bet()
def dealer_busts(chips):
print('\nDealer Busts!')
chips.win_bet()
def player_lose(chips):
print('\nDealer Wins!')
chips.lose_bet()
def push():
print('\nPlayer and Dealer Tie!')
while True:
deck = Deck()
deck.shuffle()
player_hand = Hand()
dealer_hand = Hand()
chips = Chips()
take_bet(chips)
for i in range(2):
player_hand.add_card(deck.deal())
dealer_hand.add_card(deck.deal())
show_some(player_hand, dealer_hand)
while playing:
hit_or_stand(deck)
show_some(player_hand, dealer_hand)
if player_hand.total > 21:
player_busts(chips)
#playing = False
break
if player_hand.total <= 21:
while dealer_hand.total < 17:
dealer_hand.add_card(deck.deal())
show_all(player_hand, dealer_hand)
if player_hand.total > dealer_hand.total:
player_wins(chips)
elif dealer_hand.total > 21:
dealer_busts(chips)
elif player_hand.total < dealer_hand.total:
player_lose(chips)
elif player_hand.total == dealer_hand.total:
push()
print(f'\nPlayer have {chips.total} chips.')
if input('Another Game? Yes or No: ').lower()[0] == 'y':
playing = True
continue
else:
break<file_sep>from random import randint
class Board:
def __init__(self):
self.board = [str(i) for i in range(10)] # not sure about self.board
def display_board(self):
print(' | |')
print(f' {self.board[7]} | {self.board[8]} | {self.board[9]}')
print(' | |')
print('------------')
print(' | |')
print(f' {self.board[4]} | {self.board[5]} | {self.board[6]}')
print(' | |')
print('------------')
print(' | |')
print(f' {self.board[1]} | {self.board[2]} | {self.board[3]}')
print(' | |')
def mark(self, position, marker):
if self.board[position].isdigit():
self.board[position] = marker
return True
else:
return False
def win_check(self, marker):
if ((self.board[1] == self.board[2] == self.board[3] == marker) or
(self.board[4] == self.board[5] == self.board[6] == marker) or
(self.board[7] == self.board[8] == self.board[9] == marker) or
(self.board[1] == self.board[4] == self.board[7] == marker) or
(self.board[2] == self.board[5] == self.board[8] == marker) or
(self.board[3] == self.board[6] == self.board[9] == marker) or
(self.board[1] == self.board[5] == self.board[9] == marker) or
(self.board[3] == self.board[5] == self.board[7] == marker)):
return marker
def full_board_check(self):
for cell in self.board[1:]:
if cell.isdigit():
return False
return True
# select marker for each player
def marker_choice():
while True:
p1_mark = input('Choose your marker, O or X: ').lower()
if p1_mark != 'o' and p1_mark != 'x':
print('Wrong input, choose either O or X...')
continue
else:
if p1_mark == 'o':
return ('O', 'X') # this will exit the func, so no break will be needed
else:
return ('X', 'O')
# show each player's marker
def confirm_marker(player1, player2):
print(f'Player 1\'s marker: {player1}\nPlayer 2\'s marker: {player2}')
# randomize the first player
def random_first():
first_player = randint(1, 2)
if first_player == 1:
return 'player1'
else:
return 'player2'
# place marker on the board
def make_move(board, player_marker, turn):
while True:
user_input = input(f'{turn}({player_marker}), Choose your position : ')
if user_input.isalpha():
print('Please provide a number from 1 to 9...')
continue
else:
position = int(user_input)
if position < 0 or position > 9:
print('Out of range, try again...')
continue
else:
if board.mark(position, player_marker):
break
else:
print(f'{position} is already occupied, try again...')
continue
# game function
def game(board, player, turn):
make_move(board, player, turn)
board.display_board()
# print(board.full_board_check())
if board.win_check(player) == player:
print(f'{turn} has won the game!')
return (False, turn)
elif board.full_board_check():
print('No more space! Draw!')
return (False, turn)
else:
if turn == 'player1':
return(True, 'player2')
else:
return(True, 'player1')
# replay
def wanna_replay():
while True:
answer = input('Play Again?[yes/no] ').lower()
if answer[0] != 'y' and answer[0] != 'n':
print('Answer again, Yes or No?')
continue
elif answer[0] == 'y':
return True
else:
return False
# game itself
while True:
print('Welcome to tic-tac-toe game!\n')
new_board = Board()
game_on = True
new_board.display_board()
player1, player2 = marker_choice()
confirm_marker(player1, player2)
turn = random_first()
while game_on:
if turn == 'player1':
game_on, turn = game(new_board, player1, turn)
elif turn == 'player2':
game_on, turn = game(new_board, player2, turn)
# replay
if wanna_replay():
continue
else:
break
|
0b65210b72ae786c8fd047c40abedad649e9a783
|
[
"Markdown",
"Python"
] | 5
|
Python
|
rkim79/python
|
4ab57ff5f32c1a2ac37e4dfc02e7fcdbee1631c4
|
191779e56e0e9a6639bdc41f57cc59aedd531954
|
refs/heads/master
|
<repo_name>kosk0525kosk/object-oriented-game<file_sep>/scripts/stages/stage1.js
class Stage1 extends Stage {
constructor() {
super();
this.objs.push(new Ground(0, -gameManager.groundH));
this.objs.push(new Block(200, 50));
}
}<file_sep>/scripts/objects/game-object.js
class GameObject {
constructor(x = 0, y = 0) {
this.x = x;
this.y = y;
this.w = 0;
this.h = 0;
}
// 左右の端のx座標,上下の端のy座標
get leftX() { return this.x; }
get rightX() { return this.x + this.w; }
get topY() { return this.y + this.h; }
get bottomY() { return this.y; }
// objへの着地
land(obj) {
this.dy = 0;
this.y = obj.topY;
}
static collision(obj, stage) {
for (let i = 0; i < stage.objs.length; i++) {
if (GameObject.isLanding(obj, stage.objs[i])) {
obj.land(stage.objs[i]);
}
}
}
// 着地判定
// aがbに着地しているときtrue
static isLanding(a, b) {
return (
(
(a.leftX > b.leftX && a.leftX < b.rightX)
|| (a.rightX > b.leftX && a.rightX < b.rightX)
)
&& (a.bottomY <= b.topY)
);
}
}<file_sep>/scripts/objects/stage-object/block.js
class Block extends GameObject {
constructor(x, y) {
super(x, y);
this.w = 50;
this.h = 50;
}
display() {
fill(200);
rect(this.x, this.y, this.w, this.h);
}
}<file_sep>/scripts/stages/stage.js
class Stage {
constructor() {
this.objs = new Array();
}
display() {
// 背景・外枠
background(255);
noFill();
rect(0, -gameManager.groundH, width, height);
for(let i = 0; i < this.objs.length; i++) {
this.objs[i].display();
}
}
}<file_sep>/scripts/game-manager.js
class GameManager {
// ゲッター
// 例えば get a() {return b} と設定すると this.a で b が帰ってくるようになる
get groundW() { return width; }
get groundH() { return height / 8; }
get stageH() { return height - this.groundH; }
get stageW() { return width; }
// 初期化
initGame() {
this.player = new Player(0, 0);
this.stage = new Stage1();
}
// 座標系の変換
transformCanvas() {
// キャンバスの原点
translate(0, height - this.groundH);
// y軸の正の方向が上方向となるようにする
scale(1, -1);
}
// 描画
display() {
// ステージ
this.stage.display();
// プレイヤー
this.player.display();
}
// 状態を更新
updateStatus() {
// プレイヤー
this.player.move();
GameObject.collision(this.player, this.stage);
}
// 衝突判定
collision() {
//
for (let i = 0; i < this.stage.objs.length; i++) {
if (GameObject.isLanding(this.player, this.stage.objs[i])) {
this.player.land(this.stage.objs[i]);
}
}
}
// p5jsに認識させるkeyPressedメソッド
keyPressed() {
this.player.keyPressed();
}
}
|
399da6cbd9775bbd53644329a6dbbf2ee88def93
|
[
"JavaScript"
] | 5
|
JavaScript
|
kosk0525kosk/object-oriented-game
|
41b232078e501168bbf3e20b6dca4d365880c79e
|
ad0cef36d4f6d39dc88c13f39272f830e6e168bd
|
refs/heads/master
|
<repo_name>morinted/gerrit-fetch-all<file_sep>/gerrit-fetch-all
#! /bin/sh
REMOTE="${1-origin}"
BRANCHES=$(git branch -l)
git ls-remote "${REMOTE}" | grep /changes/ | awk '{print $2;}' | while read REF
do
local_name=$(echo "${REF}" | sed 's#refs/changes/../#change/#')
if [[ $BRANCHES != *"$local_name"* ]]; then
git fetch "${REMOTE}" "${REF}"
git branch `echo "${REF}" | sed 's#refs/changes/../#change/#'` FETCH_HEAD
fi
done
|
13c08a5003114e902c9c01165b00b34b3a40a9ae
|
[
"Shell"
] | 1
|
Shell
|
morinted/gerrit-fetch-all
|
35487bfaef13d5aa4f0c8e389a3b521775f1010e
|
b4df604dd49b0915233b6585ab252aa9df7d06dd
|
refs/heads/master
|
<file_sep>import re
import sys
from game import Game
from config import *
from model import *
from utils import find_version_given_filename
"""
Run this file with argument specifying the models from terminal if you
want to play ai-vs-ai game
e.g. python3 ai-vs-ai.py saved-models/version0000.h5 saved-models/version0033.h5
"""
def load_agent(model_path, model_num, verbose=False):
model = ResidualCNN()
model.version = utils.find_version_given_filename(model_path)
if verbose:
print('\nLoading model {} from path {}'.format(model_num, model_path))
model.load_weights(model_path)
if verbose:
print('Model {} is loaded sucessfully\n'.format(model_num))
return model
def agent_match(model1_path, model2_path, num_games, verbose=False, tree_tau=DET_TREE_TAU, enforce_move_limit=False):
win_count = { PLAYER_ONE: 0, PLAYER_TWO: 0 }
model1 = load_agent(model1_path, 1, verbose)
model2 = load_agent(model2_path, 2, verbose)
for i in range(num_games):
if verbose:
utils.stress_message('Game {}'.format(i + 1))
game = Game(p1_type='ai', p2_type='ai', verbose=verbose, model1=model1, model2=model2, tree_tau=tree_tau)
winner = game.start(enforce_move_limit=enforce_move_limit)
if winner is not None:
win_count[winner] += 1
if verbose:
print('Agent "{}" wins {} matches'.format(model1_path, win_count[PLAYER_ONE]))
print('Agent "{}" wins {} matches'.format(model2_path, win_count[PLAYER_TWO]))
# Return the winner by at least 55% win rate
if win_count[PLAYER_ONE] > int(0.55 * num_games):
return model1_path
elif win_count[PLAYER_TWO] > int(0.55 * num_games):
return model2_path
else:
return None
if __name__ == '__main__':
if len(sys.argv) < 3:
print('Usage: python3 ai-vis-ai.py <model1_path> <model2_path> [<tree tau>]')
exit()
if len(sys.argv) == 3:
agent_match(sys.argv[1], sys.argv[2], 1, True)
else:
tree_tau = float(sys.argv[3])
utils.stress_message('Using tree_tau {} initially'.format(tree_tau))
agent_match(sys.argv[1], sys.argv[2], 1, True, tree_tau)
<file_sep>import sys
import h5py
import numpy as np
def count_items(v_y):
count = dict()
for val in v_y:
if val in count:
count[val] += 1
else:
count[val] = 1
return count
def get_train_label_count(path):
with h5py.File(path, 'r') as H:
board_x = np.copy(H['board_x'])
pi_y = np.copy(H['pi_y'])
v_y = np.copy(H['v_y'])
return count_items(v_y)
if __name__ == '__main__':
if len(sys.argv) != 2:
print('Missing training data file')
exit()
path = sys.argv[1]
print(get_train_label_count(path))
<file_sep>from __future__ import print_function
import numpy as np
from config import *
import board_utils
import operator
from collections import deque
class Board:
def __init__(self, randomised=False):
"""
Get the numpy array representing this board.
Array is shaped 7x7x3, where the first 7x7 plane
is the current board, while the latter are the
two previous steps.
PLAYER_ONE and PLAYER_TWO's checkers are initialised
at bottom left and top right corners respectively.
"""
self.board = np.zeros((BOARD_WIDTH, BOARD_HEIGHT, BOARD_HIST_MOVES), dtype='uint8') # Initialize empty board
self.board[:, :, 0] = np.array([[0, 0, 0, 0, 2, 2, 2],
[0, 0, 0, 0, 0, 2, 2],
[0, 0, 0, 0, 0, 0, 2],
[0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0],
[1, 1, 0, 0, 0, 0, 0],
[1, 1, 1, 0, 0, 0, 0]])
# == Directions Map ==
#
# NW north
# west east
# south SE
self.directions = [
(-1, 0), # north
(0, 1), # east
(1, 1), # southeast
(1, 0), # south
(0, -1), # west
(-1, -1) # northwest
]
self.checkers_pos = [None,
{0: (BOARD_HEIGHT-1, 0), 1: (BOARD_HEIGHT-2, 0), 2: (BOARD_HEIGHT-1, 1),
3: (BOARD_HEIGHT-3, 0), 4: (BOARD_HEIGHT-2, 1), 5: (BOARD_HEIGHT-1, 2)},
{0: (0, BOARD_WIDTH-1), 1: (1, BOARD_WIDTH-1), 2: (0, BOARD_WIDTH-2),
3: (2, BOARD_WIDTH-1), 4: (1, BOARD_WIDTH-2), 5: (0, BOARD_WIDTH-3)}]
self.checkers_id = [None,
{(BOARD_HEIGHT-1, 0): 0, (BOARD_HEIGHT-2, 0): 1, (BOARD_HEIGHT-1, 1): 2,
(BOARD_HEIGHT-3, 0): 3, (BOARD_HEIGHT-2, 1): 4, (BOARD_HEIGHT-1, 2): 5},
{(0, BOARD_WIDTH-1): 0, (1, BOARD_WIDTH-1): 1, (0, BOARD_WIDTH-2): 2,
(2, BOARD_WIDTH-1): 3, (1, BOARD_WIDTH-2): 4, (0, BOARD_WIDTH-3): 5}]
self.hist_moves = deque()
if randomised:
self.randomise_initial_state()
def randomise_initial_state(self):
'''
Randomise the starting state of board
'''
self.board[:, :, 0] = 0
position_list = [(row, col) for row in range(BOARD_HEIGHT) for col in range(BOARD_WIDTH)]
# Randomly choose 12 positions and put checkers there
chosen_indexes = np.random.choice(len(position_list), size=NUM_CHECKERS*2, replace=False)
chosen_position = [position_list[i] for i in chosen_indexes]
self.checkers_pos = [None, {}, {}]
self.checkers_id = [None, {}, {}]
# Take care to initialise the checkers_pos/checkers_id lookup table
index = 0
for player_num in [PLAYER_ONE, PLAYER_TWO]:
for checker_id in range(NUM_CHECKERS):
checker_pos = chosen_position[index]
self.board[checker_pos][0] = player_num
self.checkers_pos[player_num][checker_id] = checker_pos
self.checkers_id[player_num][checker_pos] = checker_id
index += 1
assert index == NUM_CHECKERS * 2
def check_win(self):
"""
Returns the winner given the current board state; 0 if game still going
To win:
player 1: all checkers to upper right
player 2: all checkers to lower left
"""
cur_board = self.board[:, :, 0]
one_win = two_win = True
for k in range(BOARD_WIDTH - ROWS_OF_CHECKERS, BOARD_WIDTH):
if one_win:
up_diag = cur_board.diagonal(k)
if not np.array_equal(up_diag, [PLAYER_ONE]*len(up_diag)):
one_win = False
if two_win:
down_diag = cur_board.diagonal(-k)
if not np.array_equal(down_diag, [PLAYER_TWO]*len(down_diag)):
two_win = False
if not one_win and not two_win:
return 0
return PLAYER_ONE if one_win else PLAYER_TWO
def visualise(self, cur_player=None, gap_btw_checkers=3):
"""
Prints the current board for human visualisation
"""
print('=' * 75)
print('Current Status:' + ' ' * 40 + 'Current Player: {}\n'.format(cur_player))
cur_board = self.board[:, :, 0] # Get current board from the topmost layer
visual_width = BOARD_WIDTH * (gap_btw_checkers + 1) - gap_btw_checkers
visual_height = BOARD_HEIGHT * 2 - 1 # Dimensions for visualisation
leading_spaces = visual_width // 2
for i in range(1, visual_height + 1):
# Number of slots in the board row
num_slots = i if i <= BOARD_WIDTH else visual_height - i + 1
print('\tRow {:2}{}'.format(i, ' '*8), end='')
# Print leading spaces
print(' ' * ((leading_spaces - (num_slots - 1) * ((gap_btw_checkers + 1) // 2))), end='')
print((' ' * gap_btw_checkers).join(map(str, cur_board.diagonal(BOARD_WIDTH - i))), end='\n\n') # Board contents
print('=' * 75)
def valid_checker_moves(self, cur_player, checker_pos):
"""
Returns all valid moves for one checker piece
"""
result = []
# map to check already explored moves
check_map = np.zeros((BOARD_WIDTH, BOARD_HEIGHT), dtype='uint8')
# expand to each directions without jump
result.append(checker_pos)
check_map[checker_pos] = 1
for walk_dir in self.directions:
row, col = tuple(map(operator.add, checker_pos, walk_dir))
if not board_utils.is_valid_pos(row, col):
continue
if self.board[row, col, 0] == 0:
result.append((row, col))
check_map[row, col] = 1
# check continous jump moves
self.board[checker_pos[0], checker_pos[1], 0] = 0; # Remove current checker before checking
self.valid_checker_jump_moves(result, check_map, checker_pos)
self.board[checker_pos[0], checker_pos[1], 0] = cur_player; # Put back current checker
result.remove(checker_pos) # Don't allow staying
return result
def valid_checker_jump_moves(self, valid_moves, check_map, checker_pos):
"""
Add all recursive jumping moves into the list of valid moves
"""
curr_row, curr_col = checker_pos
# expand with jump
for walk_dir in self.directions:
step = 1
row_inc, col_inc = walk_dir
row, col = curr_row + row_inc, curr_col + col_inc
valid_pos = True
# Go along the direction to find the first checker and record steps
while True:
if not board_utils.is_valid_pos(row, col):
valid_pos = False
break
if self.board[row, col, 0] != 0:
break
step += 1
row += row_inc
col += col_inc
if not valid_pos:
continue
# Continue in the direction to find the mirror move
for i in range(step):
row += row_inc
col += col_inc
if not board_utils.is_valid_pos(row, col) or self.board[row, col, 0] != 0:
valid_pos = False
break
if not valid_pos:
continue
# get the row and col ready to jump
# check whether the destination is visited
if check_map[row, col] == 1:
continue
# store moves
valid_moves.append((row, col))
check_map[row][col] = 1
self.valid_checker_jump_moves(valid_moves, check_map, (row, col))
def get_valid_moves(self, cur_player):
"""
Returns the collection of valid moves given the current player, in np indices
"""
valid_moves_set = {}
for checker_pos in self.checkers_pos[cur_player].values():
valid_moves_set[checker_pos] = self.valid_checker_moves(cur_player, checker_pos)
return valid_moves_set
def place(self, cur_player, origin_pos, dest_pos):
"""
Makes a move with array indices
"""
# Make copy and make move
cur_board = np.copy(self.board[:, :, 0])
cur_board[origin_pos], cur_board[dest_pos] = cur_board[dest_pos], cur_board[origin_pos]
# Move the checker in both id->positon and position->id lookup
for checker_id, checker_pos in self.checkers_pos[cur_player].items():
if checker_pos == origin_pos:
self.checkers_pos[cur_player][checker_id] = dest_pos
break
self.checkers_id[cur_player][dest_pos] = self.checkers_id[cur_player].pop(origin_pos)
# Update history
self.board = np.concatenate((np.expand_dims(cur_board, axis=2), self.board[:, :, :BOARD_HIST_MOVES - 1]), axis=2)
# Record history moves
if len(self.hist_moves) == TOTAL_HIST_MOVES:
self.hist_moves.popleft()
self.hist_moves.append((origin_pos,dest_pos))
return self.check_win()
def player_progress(self, player_id):
"""
Given player_id, return number of its checkers having reached the opponent's field.
"""
cur_board = self.board[:, :, 0]
diag_sign = player_id if player_id == PLAYER_ONE else -1
reached_checkers_num = 0
for k in range(BOARD_WIDTH - ROWS_OF_CHECKERS, BOARD_WIDTH):
diag = cur_board.diagonal(diag_sign * k)
for i in diag:
if i == player_id:
reached_checkers_num += 1;
return reached_checkers_num
def player_forward_distance(self, player_id):
"""
Given player_id, return the total forward distance its checkers went through.
"""
cur_board = self.board[:, :, 0]
checkers_pos = self.checkers_pos[player_id]
distance = 0
if player_id == PLAYER_ONE:
distance = PLAYER_ONE_DISTANCE_OFFSET
for _, pos in checkers_pos.items():
row, _ = board_utils.np_index_to_human_coord(pos)
distance -= row
else:
distance = PLAYER_TWO_DISTANCE_OFFSET
for _, pos in checkers_pos.items():
row, _ = board_utils.np_index_to_human_coord(pos)
distance += row
return distance
if __name__ == '__main__':
"""
Put board.py testcases here
"""
board = Board()
board.visualise()
# print(board.board[board.checker_pos[PLAYER_ONE][0][0],
# board.checker_pos[PLAYER_ONE][0][1], 0])
#
# print(board.board[6, 0, 0])
# print(board.board)
# board.print_board()
# print(board.get_valid_moves(1))
# print(board.check_win())
# print(board.check_win())
# for i in range(50000):
# board.place()
<file_sep>import gc
import math
import copy
import random
import numpy as np
import utils
import board
from model import *
from config import *
class Node:
def __init__(self, state, currPlayer):
self.state = state
self.currPlayer = currPlayer
self.edges = []
self.pi = np.zeros(NUM_CHECKERS * BOARD_WIDTH * BOARD_HEIGHT, dtype='float64')
def isLeaf(self):
return len(self.edges) == 0
class Edge:
def __init__(self, inNode, outNode, prior, fromPos, toPos):
self.inNode = inNode
self.outNode = outNode
self.currPlayer = inNode.currPlayer
self.fromPos = fromPos
self.toPos = toPos
self.stats = {
'N': 0,
'W': 0,
'Q': 0,
'P': prior
}
class MCTS:
def __init__(self, root, model, cpuct=C_PUCT, num_itr=MCTS_SIMULATIONS, tree_tau=TREE_TAU):
self.root = root
self.cpuct = cpuct
self.num_itr = num_itr
self.model = model
self.tree_tau = tree_tau
def moveToLeaf(self):
breadcrumbs = []
currentNode = self.root
while not currentNode.isLeaf():
maxQU = float('-inf')
chosen_edges = []
N_sum = 0
for edge in currentNode.edges:
N_sum += edge.stats['N']
for edge in currentNode.edges:
U = self.cpuct * edge.stats['P'] * np.sqrt(N_sum) / (1. + edge.stats['N'])
QU = edge.stats['Q'] + U
if QU > maxQU:
maxQU = QU
chosen_edges = [edge]
elif math.fabs(QU - maxQU) < EPSILON:
chosen_edges.append(edge)
# Choose a random node to continue simulation
sampled_edge = random.choice(chosen_edges)
breadcrumbs.append(sampled_edge)
currentNode = sampled_edge.outNode
return currentNode, breadcrumbs
def expandAndBackUp(self, leafNode, breadcrumbs):
assert leafNode.isLeaf()
winner = leafNode.state.check_win()
if winner:
for edge in breadcrumbs:
# If a win state occurred, then then leafNode must be the turn of the lost player
# Therefore when backing up, the leafNode player gets negative reward
direction = -1 if edge.currPlayer == leafNode.currPlayer else 1
edge.stats['N'] += 1
edge.stats['W'] += REWARD['win'] * direction
edge.stats['Q'] = edge.stats['W'] / float(edge.stats['N']) # Use float() for python2 compatibility
return
# Use model to make prediction at a leaf node
p_evaluated, v_evaluated = self.model.predict(utils.to_model_input(leafNode.state, leafNode.currPlayer))
valid_actions = leafNode.state.get_valid_moves(leafNode.currPlayer)
for checker_pos, action_set in valid_actions.items():
checker_id = leafNode.state.checkers_id[leafNode.currPlayer][checker_pos]
for destination_pos in action_set:
# Get index in neural net output vector
prior_index = utils.encode_checker_index(checker_id, destination_pos)
next_player = PLAYER_ONE + PLAYER_TWO - leafNode.currPlayer
# Set up new state of game
next_state = copy.deepcopy(leafNode.state)
next_state.place(leafNode.currPlayer, checker_pos, destination_pos)
# Build new edge and node for the new state
newNode = Node(next_state, next_player)
newEdge = Edge(leafNode, newNode, p_evaluated[prior_index], checker_pos, destination_pos)
leafNode.edges.append(newEdge)
# Back up the value
for edge in breadcrumbs:
# The value is from the perspective of leafNode player
# so the direction is positive for the leafNode player
direction = 1 if edge.currPlayer == leafNode.currPlayer else -1
edge.stats['N'] += 1
edge.stats['W'] += v_evaluated * direction
edge.stats['Q'] = edge.stats['W'] / float(edge.stats['N']) # Use float() for python2 compatibility
def search(self):
# Build Monte Carlo tree from root using lots of simulations
for i in range(self.num_itr):
leafNode, breadcrumbs = self.moveToLeaf()
self.expandAndBackUp(leafNode, breadcrumbs)
# Calculat PI and sample an edge
chosen_edges = []
maxN = float('-inf')
for edge in self.root.edges:
probability = pow(edge.stats['N'], (1. / self.tree_tau))
checker_id = self.root.state.checkers_id[self.root.currPlayer][edge.fromPos]
neural_net_index = utils.encode_checker_index(checker_id, edge.toPos)
self.root.pi[neural_net_index] = probability
self.root.pi /= np.sum(self.root.pi)
# Sample an action with given probablities
sampled_index = np.random.choice(np.arange(len(self.root.pi)), p=self.root.pi)
sampled_checker_id, sampled_to = utils.decode_checker_index(sampled_index)
sampled_from = self.root.state.checkers_pos[self.root.currPlayer][sampled_checker_id]
# Get the edge corresponding to the sampled action
sampled_edge = None
for edge in self.root.edges:
if edge.fromPos == sampled_from and edge.toPos == sampled_to:
sampled_edge = edge
break
assert sampled_edge != None
return self.root.pi, sampled_edge
if __name__ == '__main__':
count = 0
board = board.Board()
node = Node(board, 1)
model = ResidualCNN()
tree = MCTS(node, model)
<file_sep>import sys
import random
import numpy as np
import multiprocessing as mp
import utils
from config import *
def evaluate(worker_id, model1, model2, num_games):
# Load the current model in the worker only for prediction and set GPU limit
import tensorflow as tf
tf_config = tf.ConfigProto()
tf_config.gpu_options.allow_growth = True
session = tf.Session(config=tf_config)
from keras.backend.tensorflow_backend import set_session
set_session(session=session)
# Re-seed the generators: since the RNG was copied from parent process
np.random.seed() # None seed to source from /dev/urandom
random.seed()
print('Worker {}: matching between {} and {} with {} games'.format(worker_id, model1, model2 or 'Greedy', num_games))
if model2 is not None:
from ai_vs_ai import agent_match as match
else:
from ai_vs_greedy import agent_greedy_match as match
model1_wincount = model2_wincount = draw_count = 0
for i in range(num_games):
winner = None
if model2 is None:
winner = match(model1, num_games=1)
else:
# Alternate players
if i % 2 == 0:
winner = match(model1, model2, num_games=1)
else:
winner = match(model2, model1, num_games=1)
if winner is None:
draw_count += 1
elif winner == model1:
model1_wincount += 1
else:
model2_wincount += 1
print('Worker {}: model1 "{}" wins {}/{} games'.format(worker_id, model1, model1_wincount, num_games))
print('Worker {}: model2/greedy wins {}/{} games'.format(worker_id, model2_wincount, num_games))
print('Worker {}: {}/{} games were draw'.format(worker_id, draw_count, num_games))
return model1_wincount, model2_wincount, draw_count
def evaluate_in_parallel(model1, model2, num_games, num_workers):
if model2 is not None:
utils.stress_message('Evaluating model "{}" against model "{}" on {} games'
.format(model1, model2, num_games), True)
# Process pool for parallelism
process_pool = mp.Pool(processes=num_workers)
work_share = num_games // num_workers
worker_results = []
# Send processes to generate self plays
for i in range(num_workers):
if i == num_workers - 1:
work_share += (num_games % num_workers)
# Send workers
result_async = process_pool.apply_async(
evaluate,
args=(i + 1, model1, model2, work_share))
worker_results.append(result_async)
try:
# Join processes and count games
model1_wincount = model2_wincount = draw_count = 0
for result in worker_results:
game_stats = result.get()
model1_wincount += game_stats[0]
model2_wincount += game_stats[1]
draw_count += game_stats[2]
process_pool.close()
# Exit early if need
except KeyboardInterrupt:
utils.stress_message('SIGINT caught, exiting')
process_pool.terminate()
process_pool.join()
exit()
process_pool.join()
utils.stress_message('Overall, model1 "{}" wins {}/{} against model2 "{}"'
.format(model1, model1_wincount, num_games, model2), True)
return model1_wincount, model2_wincount, draw_count
if __name__ == '__main__':
if len(sys.argv) < 2:
print('\nUsage: python3 evaluate_models.py <Model1> [<Model2> <Number of games>]')
exit()
path1 = sys.argv[1]
path2 = None
num_games = 100
if len(sys.argv) > 2:
path2 = sys.argv[2]
if len(sys.argv) > 3:
num_games = int(sys.argv[3])
if path2 is None:
print('\nModel 2 is not given, evaluating model1 against greedy player')
p1_wincount, p2_wincount, draw_count = evaluate_in_parallel(path1, path2, num_games, NUM_WORKERS)
num_nondraws = num_games - draw_count
message = '''
With {5} games in total:
Model 1 wins {0}/{1} games ({6}%)
{2} wins {3}/{1} games ({7}%)
{4} Games were draw
'''.format(p1_wincount, num_nondraws, (path2 or 'Greedy')
, p2_wincount, draw_count, num_games
, 100*p1_wincount/num_nondraws, 100*p2_wincount/num_nondraws)
print(message)
<file_sep>import os
import re
import h5py
import datetime
import numpy as np
from sys import getsizeof
from collections import Mapping, Container
from config import *
def find_version_given_filename(filename):
pattern = '({}|{})([0-9]{{4}})(-weights|)\.h5'.format(MODEL_PREFIX, G_MODEL_PREFIX)
matches = re.search(pattern, filename)
if matches is None:
print('No 4-digit version number found in filename "{}"!'.format(filename))
return -1
return int(matches.group(2))
def get_model_path_from_version(version):
return '{}/{}{:0>4}.h5'.format(SAVE_MODELS_DIR, MODEL_PREFIX, version)
def cur_time():
return datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
def stress_message(message, extra_newline=False):
print('{2}{0}\n{1}\n{0}{2}'.format('='*len(message), message, '\n' if extra_newline else ''))
def get_p1_winloss_reward(board, winner=None):
"""
Return the reward for player one in the game, given the final board state
"""
winner = winner or board.check_win()
if winner == PLAYER_ONE:
return REWARD['win']
elif winner == PLAYER_TWO:
return REWARD['lose']
else:
return REWARD['draw']
def save_train_data(board_x, pi_y, v_y, version):
''' Write current iteration training data to disk '''
if not os.path.exists(SAVE_TRAIN_DATA_DIR):
os.makedirs(SAVE_TRAIN_DATA_DIR)
with h5py.File('{}/{}{}.h5'.format(SAVE_TRAIN_DATA_DIR, SAVE_TRAIN_DATA_PREF, version), 'w') as H:
H.create_dataset('board_x', data=board_x)
H.create_dataset('pi_y', data=pi_y)
H.create_dataset('v_y', data=v_y)
def convert_to_train_data(self_play_games):
''' Return python lists containing training data '''
board_x, pi_y, v_y = [], [], []
for game in self_play_games:
history, reward = game
curr_player = PLAYER_ONE
for board, pi in history:
board_x.append(to_model_input(board, curr_player))
pi_y.append(pi)
v_y.append(reward)
reward = -reward
curr_player = PLAYER_ONE + PLAYER_TWO - curr_player
return board_x, pi_y, v_y
def augment_train_data(board_x, pi_y, v_y):
''' Augment training data by horizontal flipping of the board '''
new_board_x, new_pi_y, new_v_y = [], [], []
for i in range(len(board_x)):
new_board = np.copy(board_x[i])
new_pi = np.copy(pi_y[i])
new_v = v_y[i]
# Flip the board along the other diagonal, in the last dimension
for j in range(new_board.shape[-1]):
new_board[:, :, j] = np.fliplr(np.rot90(new_board[:, :, j]))
new_board_x.append(new_board)
new_pi_y.append(new_pi)
new_v_y.append(new_v)
board_x += new_board_x
pi_y += new_pi_y
v_y += new_v_y
return board_x, pi_y, v_y # Return the same references
def to_model_input(board, cur_player):
"""
Input:
board: 7 x 7 x 3 board._board. Each channel contains positions of both players' checkers.
cur_player: player number of the current player
Output:
7 x 7 x 7. First 3 channel is player 1, next 3 channel is player 2, last channel is all 0 if player 1 is to play.
"""
# initialise the model input
model_input = np.zeros((BOARD_WIDTH, BOARD_HEIGHT, BOARD_HIST_MOVES * 2 + 1)) # may change dtype afterwards
# get np array board
new_board = board.board
# get history moves
hist_moves = board.hist_moves
# get opponent player
op_player = PLAYER_ONE + PLAYER_TWO - cur_player
# firstly, construct the current state layers
op_layer = np.copy(new_board[:, :, 0])
cur_layer = np.copy(new_board[:, :, 0])
# construct layer for current player
np.putmask(cur_layer, cur_layer != cur_player, 0)
for checker_id, checker_pos in board.checkers_pos[cur_player].items():
cur_layer[checker_pos[0], checker_pos[1]] = checker_id + 1
# construct layer for opponent player
np.putmask(op_layer, op_layer != op_player, 0)
for checker_id, checker_pos in board.checkers_pos[op_player].items():
op_layer[checker_pos[0], checker_pos[1]] = checker_id + 1
model_input[:, :, 0] = np.copy(cur_layer)
model_input[:, :, 1] = np.copy(op_layer)
# construct the latter layers
moved_player = op_player
hist_index = len(hist_moves) - 1
for channel in range(1, BOARD_HIST_MOVES):
if not np.any(new_board[:, :, channel]): # timestep < 0
break
move = hist_moves[hist_index]
orig_pos = move[0]
dest_pos = move[1]
if moved_player == cur_player:
value = cur_layer[dest_pos]
cur_layer[dest_pos] = cur_layer[orig_pos]
cur_layer[orig_pos] = value
else:
value = op_layer[dest_pos]
op_layer[dest_pos] = op_layer[orig_pos]
op_layer[orig_pos] = value
hist_index -= 1
moved_player = PLAYER_ONE + PLAYER_TWO - moved_player
model_input[:, :, channel * 2] = np.copy(cur_layer)
model_input[:, :, channel * 2 + 1] = np.copy(op_layer)
if cur_player == PLAYER_TWO: # player 2 to play
model_input[:, :, BOARD_HIST_MOVES * 2] = np.ones((BOARD_WIDTH, BOARD_HEIGHT))
return model_input
def encode_checker_index(checker_id, coord):
"""
Convert a checker and its destination
to the model's output encoding.
"""
region = checker_id * BOARD_WIDTH * BOARD_HEIGHT # get the element-block in the model's output
offset = coord[0] * BOARD_WIDTH + coord[1] # offset in this region
return region + offset
def decode_checker_index(model_output_index):
"""
Convert the index in the model's output vector
to the checker number and its destination on board
"""
checker_id = model_output_index // (BOARD_WIDTH * BOARD_HEIGHT)
offset = model_output_index % (BOARD_WIDTH * BOARD_HEIGHT)
dest = offset // BOARD_WIDTH, offset % BOARD_WIDTH
return checker_id, dest
def softmax(input):
''' Compute the softmax (prediction) given input '''
input = np.copy(input).astype('float64')
input -= np.max(input, axis=-1, keepdims=True) # For numerical stability
exps = np.exp(input)
return exps / np.sum(exps, axis=-1, keepdims=True)
def deepsizeof(obj, visited):
d = deepsizeof
if id(obj) in visited:
return 0
r = getsizeof(obj)
visited.add(id(obj))
if isinstance(obj, Mapping):
r += sum(d(k, visited) + d(v, visited) for k, v in obj.items())
if isinstance(obj, Container):
r += sum(d(x, visited) for x in obj)
return r
if __name__ == '__main__':
from board import Board
b = Board()
p1 = to_model_input(b, PLAYER_ONE)
p2 = to_model_input(b, PLAYER_TWO)
print(p1[:, :, 0])
print(p2[:, :, 0])
print()
# print(p1[:, :, 1])
# print(p2[:, :, 1])
# print()
# print(p1[:, :, 6])
# print(p2[:, :, 6])
<file_sep>import sys
import utils
from game import Game
from config import *
from model import *
"""
Run this file with argument specifying the model from terminal if you
want to play ai-vs-greedy game
e.g. python3 ai-vs-greedy.py saved-models/version0033.h5
Player one is ai, player two is greedy
"""
def load_agent(model_path, verbose=False):
model = ResidualCNN()
model.version = utils.find_version_given_filename(model_path)
if verbose:
print('\nLoading model from path {}'.format(model_path))
model.load_weights(model_path)
if verbose:
print('Model is loaded sucessfully\n')
return model
def human_agent_match(model_path, verbose=False, tree_tau=DET_TREE_TAU):
model = load_agent(model_path)
game = Game(p1_type='ai', p2_type='human', verbose=verbose, model1=model)
winner = game.start()
return winner
if __name__ == '__main__':
if len(sys.argv) < 2:
print('\nUsage: python3 human_vs_ai.py <Model Path> [<tree tau>]\n')
exit()
model_path = sys.argv[1]
tt = DET_TREE_TAU
if len(sys.argv) == 3:
tt = float(sys.argv[2])
utils.stress_message('Using tree_tau {} initially'.format(tt))
human_agent_match(model_path, verbose=True, tree_tau=tt)
<file_sep># Chinese Checkers AI (Being Updated)
(Please star⭑ if you find this repo useful)
- This is the repository for a strong Chinese Checkers agent trained with Reinforcement Learning and guided with Heuristics and Monte Carlo Tree Search.
- The source code is almost complete while we're pushing the final changes
- Clone the repository by running `git clone https://github.com/kenziyuliu/PythonChineseCheckers.git` in your terminal
## Try for yourself:
1. Run `python3 human_vs_ai.py <model path>` to play against a pre-trained model. We have included two models for you to play against: `good_model.h5` and `good_model2.h5`. Depending on your style of playing, you may find one model stronger than the other.
## To play normal human vs human matches:
1. Run `python3 human_vs_human.py`
## To play against a simple greedy player:
1. Run `python3 human_vs_greedy.py`
## Some dependencies:
- Keras >= 2.1.6
- TensorFlow >= 1.6.0
- Python >= 3.6.5
- NumPy >= 1.14.3
- h5py >= 2.7.1
<file_sep>import tensorflow as tf
def softmax_cross_entropy_with_logits(y_true, y_pred):
return tf.nn.softmax_cross_entropy_with_logits_v2(labels=y_true, logits=y_pred)
<file_sep>import gc
import re
import os
import sys
import h5py
import datetime
import threading
import multiprocessing as mp
import utils
from config import *
from model import *
from data_generators import GreedyDataGenerator
from keras.callbacks import EarlyStopping, ModelCheckpoint
def generate_self_play(worker_id, num_self_play):
# Re-seed the generators: since the RNG was copied from parent process
np.random.seed() # None seed to source from /dev/urandom
# Worker start generating self plays according to their workload
worker_result = []
num_normal_games = int(G_NORMAL_GAME_RATIO * num_self_play)
num_rand_start_games = int(G_RAND_START_GAME_RATIO * num_self_play)
num_randomised_games = num_self_play - num_normal_games - num_rand_start_games
normal_gen = GreedyDataGenerator()
random_start_gen = GreedyDataGenerator(random_start=True)
randomised_gen = GreedyDataGenerator(randomised=True)
def generate(num_games, gen):
for i in range(num_games):
worker_result.append(gen.generate_play())
if len(worker_result) % 100 == 0:
print('Worker {}: generated {} self-plays'.format(worker_id, len(worker_result)))
generate(num_normal_games, normal_gen)
generate(num_rand_start_games, random_start_gen)
generate(num_randomised_games, randomised_gen)
print('Worker {}: generated {} self-plays'.format(worker_id, len(worker_result)))
return worker_result
def generate_self_play_in_parallel(num_self_play, num_workers):
# Process pool for parallelism
process_pool = mp.Pool(processes=num_workers)
work_share = num_self_play // num_workers
worker_results = []
# Send processes to generate self plays
for i in range(num_workers):
if i == num_workers - 1:
work_share += (num_self_play % num_workers)
# Send workers
result_async = process_pool.apply_async(generate_self_play, args=(i + 1, work_share))
worker_results.append(result_async)
# Join processes and summarise the generated final list of games
game_list = []
for result in worker_results:
game_list += result.get()
process_pool.close()
process_pool.join()
return game_list
def train(num_games, model, version):
# print some useful message
message = 'At {}, Starting to generate {} greedy self-play games for version {}'.format(utils.cur_time(), num_games, version)
utils.stress_message(message, True)
# Generate games
games = generate_self_play_in_parallel(num_games, NUM_WORKERS)
utils.stress_message('Preparing training examples from {} games'.format(len(games)))
# Convert self-play games to training data
board_x, pi_y, v_y = utils.convert_to_train_data(games)
board_x, pi_y, v_y = utils.augment_train_data(board_x, pi_y, v_y)
assert len(board_x) == len(pi_y) == len(v_y)
print('\nNumber of training examples (Total): {}'.format(len(board_x)))
# Sample a portion of training data
num_train_data = int(G_DATA_RETENTION_RATE * len(board_x))
sampled_idx = np.random.choice(len(board_x), num_train_data, replace=False)
board_x_train = np.array([board_x[sampled_idx[i]] for i in range(num_train_data)])
pi_y_train = np.array([pi_y[sampled_idx[i]] for i in range(num_train_data)])
v_y_train = np.array([v_y[sampled_idx[i]] for i in range(num_train_data)])
# board_x_val = np.array([board_x[sampled_idx[i]] for i in range(num_train_data, num_train_data + G_NUM_VAL_DATA)])
# pi_y_val = np.array([pi_y[sampled_idx[i]] for i in range(num_train_data, num_train_data + G_NUM_VAL_DATA)])
# v_y_val = np.array([v_y[sampled_idx[i]] for i in range(num_train_data, num_train_data + G_NUM_VAL_DATA)])
assert len(board_x_train) == len(pi_y_train) == len(v_y_train)
print('Number of training examples (Sampled): {}\n'.format(len(board_x_train)))
# Make sure that the directory is available
if not os.path.exists(SAVE_WEIGHTS_DIR):
os.makedirs(SAVE_WEIGHTS_DIR)
model.model.fit(board_x_train, [pi_y_train, v_y_train],
# validation_data=((board_x_val, [pi_y_val, v_y_val]) if G_NUM_VAL_DATA > 0 else None),
validation_split=G_VAL_SPLIT,
batch_size=G_BATCH_SIZE,
epochs=G_ITER_PER_EPOCH,
shuffle=True)
# callbacks=[EarlyStopping(monitor='val_loss', min_delta=0.001, patience=5),
# ModelCheckpoint(filepath=SAVE_WEIGHTS_DIR+'GreedyWeights-ep{epoch:02d}-val{val_loss:.2f}.h5',
# save_best_only=True, save_weights_only=True)])
model.save_weights(SAVE_WEIGHTS_DIR, G_MODEL_PREFIX, version=version)
utils.stress_message('GreedyModel Weights version {} saved to {}'.format(version, SAVE_WEIGHTS_DIR), True)
if __name__ == '__main__':
print('Initialising Model...')
model = ResidualCNN()
print('Model Initialised')
version = 0
if len(sys.argv) == 2:
checkpoint = sys.argv[1]
print('Continue training from version "{}"'.format(checkpoint))
model.load_weights(checkpoint)
version = utils.find_version_given_filename(checkpoint) + 1
print('\nCurrent training version {}'.format(version))
for i in range(G_EPOCHS):
train(G_GAMES_PER_EPOCH, model, i + version)
<file_sep>import copy
import numpy as np
import random
import utils
from config import *
from board import Board
from MCTS import MCTS, Node
def selfplay(model1, model2=None, randomised=False):
'''
Generate an agent self-play given two models
TODO: if `randomised`, randomise starting board state
'''
if model2 is None:
model2 = model1
player_progresses = [0, 0]
player_turn = 0
num_useless_moves = 0
play_history = []
tree_tau = TREE_TAU
board = Board(randomised=randomised)
root = Node(board, PLAYER_ONE) # initial game state
use_model1 = True
while True:
model = model1 if use_model1 else model2
if len(root.state.hist_moves) < INITIAL_RANDOM_MOVES:
root = make_random_move(root)
else:
# Use Current model to make a move
root = make_move(root, model, tree_tau, play_history)
assert root.isLeaf()
hist_moves = root.state.hist_moves
cur_player_hist_moves = [hist_moves[i] for i in range(len(hist_moves) - 1, -1, -2)]
history_dests = set([move[1] for move in cur_player_hist_moves])
# If limited destinations exist in the past moves, then there is some kind of repetition
if len(cur_player_hist_moves) * 2 >= TOTAL_HIST_MOVES and len(history_dests) <= UNIQUE_DEST_LIMIT:
print('Repetition detected: stopping and discarding game')
return None, None
# Evaluate player progress for stopping
progress_evaluated = root.state.player_progress(player_turn + 1)
if progress_evaluated > player_progresses[player_turn]:
num_useless_moves = int(num_useless_moves * (NUM_CHECKERS - 1) / NUM_CHECKERS)
player_progresses[player_turn] = progress_evaluated
else:
num_useless_moves += 1
# Change player
player_turn = 1 - player_turn
use_model1 = not use_model1
# Change TREE_TAU to very small if game has certain progress so actions are deterministic
if len(play_history) + INITIAL_RANDOM_MOVES > TOTAL_MOVES_TILL_TAU0:
if tree_tau == TREE_TAU:
print('selfplay: Changing tree_tau to {} as total number of moves is now {}'.format(DET_TREE_TAU, len(play_history)))
tree_tau = DET_TREE_TAU
if root.state.check_win():
print('END GAME REACHED')
break
# Stop (and discard) the game if it's nonsense
if num_useless_moves >= PROGRESS_MOVE_LIMIT:
print('Game stopped by reaching progress move limit; Game Discarded')
return None, None
if randomised:
# Discard the first `BOARD_HIST_MOVES` as the history is not enough
return play_history[BOARD_HIST_MOVES:], utils.get_p1_winloss_reward(root.state)
else:
return play_history, utils.get_p1_winloss_reward(root.state)
def make_random_move(root):
'''
Independent on MCTS.
Instead sample a random move from current board's valid moves.
'''
random.seed()
cur_state = root.state
player = root.currPlayer
valid_actions = cur_state.get_valid_moves(player) # dict, key: checker pos, value: possible dest from pos
random_start = random.choice(list(valid_actions.keys()))
while len(valid_actions[random_start]) == 0:
random_start = random.choice(list(valid_actions.keys()))
random_end = random.choice(valid_actions[random_start])
next_state = copy.deepcopy(cur_state)
next_state.place(player, random_start, random_end)
new_player = PLAYER_ONE + PLAYER_TWO - player
return Node(next_state, new_player)
def make_move(root, model, tree_tau, play_history):
'''
Given a current board state, perform tree search
and make a move
(Code inside original while loop of selfplay())
'''
assert root.isLeaf()
tree = MCTS(root, model, tree_tau=tree_tau)
# Make the first expansion to possible next states
tree.expandAndBackUp(tree.root, breadcrumbs=[]) # breadcrumbs=[] as root has empth path back to root
assert len(tree.root.edges) > 0 # as root has been expanded
# Add Dirichlet noise to prior probs at the root to ensure all moves may be tried
dirichlet_noise = np.random.dirichlet(np.ones(len(tree.root.edges)) * DIRICHLET_ALPHA)
for i in range(len(tree.root.edges)):
tree.root.edges[i].stats['P'] *= (1. - DIR_NOISE_FACTOR)
tree.root.edges[i].stats['P'] += DIR_NOISE_FACTOR * dirichlet_noise[i]
# Decide next move from the root with 1 level of prior probability
pi, sampled_edge = tree.search()
play_history.append((tree.root.state, pi))
outNode = sampled_edge.outNode
outNode.edges.clear()
return copy.deepcopy(outNode) # root for next iteration
# def get_reward(board):
# """
# return the reward for player one
# """
# winner = board.check_win()
# if winner == PLAYER_ONE:
# return REWARD["win"]
# if winner == PLAYER_TWO:
# return REWARD["lose"]
#
# player_one_distance = board.player_forward_distance(PLAYER_ONE)
# player_two_distance = board.player_forward_distance(PLAYER_TWO)
#
# if abs(player_one_distance - player_two_distance) <= DIST_THRES_FOR_REWARD:
# return REWARD["draw"]
#
# return 1 if (player_one_distance - player_two_distance >= DIST_THRES_FOR_REWARD) else -1
if __name__ == '__main__':
'''
Some tests here
'''
import sys
import time
from model import ResidualCNN
if len(sys.argv) != 2:
print('Model needed for testing: python3 selfplay.py <model path>')
exit()
model_path = sys.argv[1]
model = ResidualCNN()
model.load_weights(model_path)
history, reward = selfplay(model)
for i in range(8):
board, pi = history[i]
board.visualise()
time.sleep(3)
<file_sep>import sys
import utils
from game import Game
from config import *
from model import *
"""
Run this file with argument specifying the model from terminal if you
want to play ai-vs-greedy game
e.g. python3 ai-vs-greedy.py saved-models/version0033.h5
Player one is ai, player two is greedy
"""
def load_agent(model_path, verbose=False):
model = ResidualCNN()
model.version = utils.find_version_given_filename(model_path)
if verbose:
print('\nLoading model from path {}'.format(model_path))
model.load_weights(model_path)
if verbose:
print('Model is loaded sucessfully\n')
return model
def agent_greedy_match(model_path, num_games, verbose=False, tree_tau=DET_TREE_TAU):
player1 = 'ai'
player2 = 'greedy'
win_count = { player1 : 0, player2 : 0 }
model = load_agent(model_path)
for i in range(num_games):
if verbose:
utils.stress_message('Game {}'.format(i + 1))
if player1 == 'ai':
game = Game(p1_type=player1, p2_type=player2, verbose=verbose, model1=model)
else:
game = Game(p1_type=player1, p2_type=player2, verbose=verbose, model2=model)
winner = game.start()
if winner is not None:
if winner == PLAYER_ONE:
win_count[player1] += 1
else:
win_count[player2] += 1
# Swap
player1, player2 = player2, player1
if verbose:
utils.stress_message('Agent wins {} games and Greedy wins {} games with total games {}'
.format(win_count['ai'], win_count['greedy'], num_games))
if win_count['ai'] > win_count['greedy']:
return model_path
elif win_count['greedy'] > win_count['ai']:
return 'greedy'
else:
return None
if __name__ == '__main__':
if len(sys.argv) < 2:
print('\nUsage: python3 ai_vs_greedy.py <Model Path> [<tree tau>]\n')
exit()
model_path = sys.argv[1]
tt = DET_TREE_TAU
if len(sys.argv) == 3:
tt = float(sys.argv[2])
utils.stress_message('Using tree_tau {} initially'.format(tt))
agent_greedy_match(model_path, num_games=1, verbose=True, tree_tau=tt)
<file_sep>from game import Game
"""
Run this file directly from terminal if you
want to play human-vs-human game
"""
if __name__ == '__main__':
human_game = Game(p1_type='human', p2_type='human')
human_game.start()
<file_sep>import os
from keras import regularizers
from keras.optimizers import SGD, Adam
from keras.models import load_model
from keras.models import Model as KerasModel
from keras.layers import Input, Conv2D, Flatten, Dense, BatchNormalization, LeakyReLU, Activation, add
import utils
from board import *
from config import *
from loss import softmax_cross_entropy_with_logits
class Model:
def __init__(self, input_dim, filters, version=0):
self.input_dim = input_dim
self.filters = filters
self.version = version
def predict(self, input_board):
logits, v = self.model.predict(np.expand_dims(input_board, axis=0).astype('float64'))
p = utils.softmax(logits) # Apply softmax on the logits after prediction
return p.squeeze(), v.squeeze() # Remove the extra batch dimension
def save(self, save_dir, model_prefix, version):
if not os.path.exists(save_dir):
os.makedirs(save_dir)
self.version = version
self.model.save('{}/{}{:0>4}.h5'.format(save_dir, model_prefix, version))
print('\nSaved model "{}{:0>4}.h5" to "{}"\n'.format(model_prefix, version, save_dir))
def save_weights(self, save_dir, prefix, version):
if not os.path.exists(save_dir):
os.makedirs(save_dir)
self.model.save_weights('{}/{}{:0>4}-weights.h5'.format(save_dir, prefix, version))
utils.stress_message('Saved model weights "{}{:0>4}-weights" to "{}"'.format(prefix, version, save_dir), True)
def load(self, filepath):
self.model = load_model(
filepath,
custom_objects={'softmax_cross_entropy_with_logits': softmax_cross_entropy_with_logits}
)
return self.model
def load_weights(self, filepath):
self.model.load_weights(filepath)
return self.model # Return reference to model just in case
class ResidualCNN(Model):
def __init__(self, input_dim=INPUT_DIM, filters=NUM_FILTERS):
Model.__init__(self, input_dim, filters)
self.model = self.build_model()
def build_model(self):
main_input = Input(shape=self.input_dim)
regularizer = regularizers.l2(REG_CONST)
x = Conv2D(filters=64, kernel_size=3, kernel_regularizer=regularizer, padding='valid')(main_input)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = self.residual_block(x, [32, 32, 64], kernel_size=3, regularizer=regularizer)
x = self.residual_block(x, [32, 32, 64], kernel_size=3, regularizer=regularizer)
x = self.residual_block(x, [32, 32, 64], kernel_size=3, regularizer=regularizer)
x = self.residual_block(x, [32, 32, 64], kernel_size=3, regularizer=regularizer)
x = self.residual_block(x, [32, 32, 64], kernel_size=3, regularizer=regularizer)
x = self.residual_block(x, [32, 32, 64], kernel_size=3, regularizer=regularizer)
x = self.residual_block(x, [32, 32, 64], kernel_size=3, regularizer=regularizer)
x = self.residual_block(x, [32, 32, 64], kernel_size=3, regularizer=regularizer)
x = self.residual_block(x, [32, 32, 64], kernel_size=3, regularizer=regularizer)
policy = self.policy_head(x, regularizer)
value = self.value_head(x, regularizer)
model = KerasModel(inputs=[main_input], outputs=[policy, value])
model.compile(loss={'policy_head':softmax_cross_entropy_with_logits, 'value_head':'mean_squared_error'}
, optimizer=SGD(lr=LEARNING_RATE, momentum=0.9, nesterov=True) # NOTE: keep here for reuse
# , optimizer=Adam(lr=LEARNING_RATE)
, loss_weights=LOSS_WEIGHTS)
return model
def value_head(self, head_input, regularizer):
x = Conv2D(filters=1, kernel_size=1, kernel_regularizer=regularizer)(head_input)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Flatten()(x)
x = Dense(32,
use_bias=True,
activation='relu',
kernel_regularizer=regularizer)(x)
x = Dense(1,
use_bias=True,
activation='tanh',
kernel_regularizer=regularizer,
name='value_head')(x)
return x
def policy_head(self, head_input, regularizer):
x = Conv2D(filters=16, kernel_size=1, kernel_regularizer=regularizer)(head_input)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Flatten()(x)
x = Dense(NUM_CHECKERS * BOARD_WIDTH * BOARD_WIDTH,
use_bias=True,
activation='linear',
kernel_regularizer=regularizer,
name='policy_head')(x)
return x
def residual_block(self, block_input, filters, kernel_size, regularizer):
'''
Residual block setup code referenced from Keras
https://github.com/keras-team/keras
'''
x = Conv2D(filters=filters[0]
, kernel_size=1
, kernel_regularizer=regularizer)(block_input)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(filters=filters[1]
, kernel_size=kernel_size
, padding='same'
, kernel_regularizer=regularizer)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(filters=filters[2]
, kernel_size=1
, kernel_regularizer=regularizer)(x)
x = BatchNormalization()(x)
x = add([x, block_input])
x = Activation('relu')(x)
return x
def conv_block(self, block_input, filters, kernel_size, regularizer):
'''
Conv block setup code referenced from Keras
https://github.com/keras-team/keras
'''
x = Conv2D(filters=filters[0]
, kernel_size=1
, kernel_regularizer=regularizer)(block_input)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(filters=filters[1]
, kernel_size=kernel_size
, padding='same'
, kernel_regularizer=regularizer)(x)
x = BatchNormalization()(x)
x = Activation('relu')(x)
x = Conv2D(filters=filters[2]
, kernel_size=1
, kernel_regularizer=regularizer)(x)
x = BatchNormalization()(x)
shortcut = Conv2D(filters=filters[2]
, kernel_size=1
, kernel_regularizer=regularizer)(block_input)
shortcut = BatchNormalization()(shortcut)
x = add([x, shortcut])
x = Activation('relu')(x)
return x
if __name__ == '__main__':
# checker_pos = []
# for i in range(6 * 49 + 1):
# checker_pos.append(utils.decode_checker_index(i))
# # print(utils.decode_checker_index(i))
#
# count = 0
# for checker_id, pos in checker_pos:
# assert count == utils.encode_checker_index(checker_id, pos)
# # print(utils.encode_checker_index(checker_id, pos))
# count += 1
#
# # Test `to_model_input`
# gameboard = Board()
# gameboard.place(1, (5, 0), (3, 0))
# board = gameboard.board
# for i in range(board.shape[2]):
# print(board[:, :, i])
# model_input = utils.to_model_input(board, 1)
# print('\n\n')
# for i in range(model_input.shape[2]):
# print(model_input[:, :, i])
from keras.utils.vis_utils import plot_model
model = ResidualCNN()
# test for saving
model.model.summary()
# plot_model(model.model, to_file='model_plot_noshowname.png', show_shapes=True, show_layer_names=False)
<file_sep>import os
import copy
import random
import numpy as np
import board_utils
from config import *
from model import *
from board import Board
from MCTS import Node, MCTS
"""
Both players must support decide_move(self, board, verbose) method
"""
class HumanPlayer:
def __init__(self, player_num):
self.player_num = player_num
def decide_move(self, board, verbose=True, total_moves=None):
"""
Given current board, return a move to play.
:type board: Class Board
:rtype A list of 2 tuples, specifying the move's FROM and TO.
"""
# First print game info
os.system('clear')
board.visualise(cur_player=self.player_num)
valid_moves = board.get_valid_moves(self.player_num)
human_valid_moves = board_utils.convert_np_to_human_moves(valid_moves)
if verbose:
for checker in human_valid_moves:
print("Checker {} can move to: {}".format(checker, sorted(human_valid_moves[checker])))
print()
(from_i, from_j), (to_i, to_j) = (-1, -1), (-1, -1)
while 1:
if verbose:
# x = the row number on visualised board, y = the position of the checker in that row from left
print('You should specify position by row number and the count from left.')
print('Please input your move with format: start_row start_col end_row end_col')
try:
human_from_row, human_from_col, human_to_row, human_to_col = map(int, input().split())
except ValueError:
print("\nInvalid Move Format! Try again!")
continue
from_i, from_j = board_utils.human_coord_to_np_index((human_from_row, human_from_col))
to_i, to_j = board_utils.human_coord_to_np_index((human_to_row, human_to_col))
if (from_i, from_j) in valid_moves and (to_i, to_j) in valid_moves[(from_i, from_j)]:
break
print("\nInvalid Move! Try again!")
return (from_i, from_j), (to_i, to_j)
class GreedyPlayer:
def __init__(self, player_num, stochastic=False):
self.player_num = player_num
self.stochastic = stochastic
def decide_move(self, board, verbose=False, training=False, total_moves=None):
valid_moves = board.get_valid_moves(self.player_num)
human_valid_moves = board_utils.convert_np_to_human_moves(valid_moves)
max_dist = float('-inf')
if self.stochastic:
prior = []
backward_moves = []
forward_moves = []
for start in human_valid_moves:
for end in human_valid_moves[start]:
dist = end[0] - start[0]
if self.player_num == PLAYER_ONE:
dist = -dist
if dist > 0:
forward_moves.append((start, end))
prior.append(dist)
else:
backward_moves.append((start, end))
if len(forward_moves) == 0:
pick_start, pick_end = random.choice(backward_moves)
else:
prior = np.array(prior) / sum(prior)
index = np.random.choice(len(forward_moves), p=prior)
pick_start, pick_end = forward_moves[index]
else:
best_moves = []
for start in human_valid_moves:
for end in human_valid_moves[start]:
dist = end[0] - start[0] # Evaluate distance by how many steps forward
if self.player_num == PLAYER_ONE: # Revert distance as player1 moves up
dist = -dist
if dist > max_dist:
max_dist = dist
best_moves = [(start, end)]
elif dist == max_dist:
best_moves.append((start, end))
# When there are many possible moves, pick the one that's the last
last_checker, _ = max(best_moves, key=lambda x: (x[0][0] if self.player_num == PLAYER_ONE else -x[0][0]))
# Take away staying-move, and get all moves that is for the last checker
filtered_best_moves = [move for move in best_moves if move[0][0] == last_checker[0]]
if training:
return filtered_best_moves
# Then randomly sample a move
pick_start, pick_end = random.choice(filtered_best_moves)
if verbose:
board.visualise(cur_player = self.player_num)
print('GreedyPlayer moved from {} to {}\n'.format(pick_start, pick_end))
return board_utils.human_coord_to_np_index(pick_start), \
board_utils.human_coord_to_np_index(pick_end)
class AiPlayer:
def __init__(self, player_num, model, tree_tau):
self.player_num = player_num
self.model = model
self.tree_tau = tree_tau
def decide_move(self, board, verbose=False, total_moves=None):
"""
Given current board, return a move to play.
:type board: Class Board
:rtype A list of 2 tuples, specifying the move's FROM and TO.
"""
if verbose:
board.visualise(cur_player = self.player_num)
print('Facing the board above, Ai Version {} is thinking.'.format(self.model.version))
node = Node(board, self.player_num)
# Play deterministically when moves reach a certain number
if total_moves is not None and total_moves > TOTAL_MOVES_TILL_TAU0:
if self.tree_tau != DET_TREE_TAU:
print('Player {}: changing tree tau from {} to {}'.format(self.player_num, self.tree_tau, DET_TREE_TAU))
self.tree_tau = DET_TREE_TAU
tree = MCTS(node, self.model, tree_tau=self.tree_tau)
pi, sampled_edge = tree.search()
if verbose:
human_fromPos = board_utils.np_index_to_human_coord(sampled_edge.fromPos)
human_toPos = board_utils.np_index_to_human_coord(sampled_edge.toPos)
print('Ai Version {} moved from {} to {}\n'.format(
self.model.version, human_fromPos, human_toPos))
return sampled_edge.fromPos, sampled_edge.toPos
if __name__ == "__main__":
pass
<file_sep>from game import Game
"""
Run this file directly from terminal if you
want to play human-vs-greedy game
"""
if __name__ == '__main__':
game = Game(p1_type='human', p2_type='greedy')
game.start()
<file_sep>import sys
from keras.models import load_model
from loss import softmax_cross_entropy_with_logits
def get_weights(filename):
print('Loading model...')
model = load_model(filename, custom_objects={'softmax_cross_entropy_with_logits': softmax_cross_entropy_with_logits})
savename = '{}_weights.h5'.format(filename)
print('Saving model weights to "{}"'.format(savename))
model.save_weights(savename)
if __name__ == '__main__':
if len(sys.argv) != 2:
print('\nUsage: python3 get_model_weights.py <model path>\n')
exit()
filename = sys.argv[1]
get_weights(filename)<file_sep>import os
import h5py
import numpy as np
def combine_train_data(board_x, pi_y, v_y, first_version, last_version, save_dir, pref):
all_board_x, all_pi_y, all_v_y = [], [], []
if len(board_x) > 0 and len(pi_y) > 0 and len(v_y) > 0:
all_board_x.append(board_x)
all_pi_y.append(pi_y)
all_v_y.append(v_y)
# Read data from previous iterations
for i in range(first_version, last_version + 1):
if i >= 0:
filename = '{}/{}{}.h5'.format(save_dir, pref, i)
if not os.path.exists(filename):
utils.stress_message('{} does not exist!'.format(filename))
continue
with h5py.File(filename, 'r') as H:
all_board_x.append(np.copy(H['board_x']))
all_pi_y.append(np.copy(H['pi_y']))
all_v_y.append(np.copy(H['v_y']))
if len(all_board_x) > 0 and len(all_pi_y) > 0 and len(all_v_y) > 0:
# Make a pool of training data from previous iterations
board_x = np.vstack(all_board_x)
pi_y = np.vstack(all_pi_y)
v_y = np.hstack(all_v_y) # hstack as v_y is 1D array
return board_x, pi_y, v_y, len(all_board_x) # Last retval is total iterations used
# If no data at all: return empty training data
return [], [], [], 0
def save_train_data(board_x, pi_y, v_y):
''' Write current iteration training data to disk '''
with h5py.File('combined.h5', 'w') as H:
H.create_dataset('board_x', data=board_x)
H.create_dataset('pi_y', data=pi_y)
H.create_dataset('v_y', data=v_y)
if __name__ == '__main__':
import sys
if len(sys.argv) != 5:
print('\nUsage: python3 combine_data.py <dir> <pref> <1st version num> <last version num>\n')
exit()
board_x, pi_y, v_y, num = combine_train_data([], [], [], int(sys.argv[3]), int(sys.argv[4]), sys.argv[1], sys.argv[2])
save_train_data(board_x, pi_y, v_y)
<file_sep>import copy
import random
import numpy as np
import utils
from player import GreedyPlayer
from board import *
from datetime import datetime, timedelta
from config import *
import board_utils
from model import Model
class GreedyDataGenerator:
def __init__(self, randomised=False, random_start=False):
self.cur_player = GreedyPlayer(player_num=1)
self.next_player = GreedyPlayer(player_num=2)
self.randomised = randomised
self.random_start = random_start
self.board = Board(randomised=randomised)
def swap_players(self):
self.cur_player, self.next_player = self.next_player, self.cur_player
def generate_play(self):
play_history = []
final_winner = None
start_time = datetime.now()
# Make some random moves if random start is specified
if self.random_start:
for i in range(INITIAL_RANDOM_MOVES):
valid_actions = self.board.get_valid_moves(self.cur_player.player_num)
random_start = random.choice(list(valid_actions.keys()))
while len(valid_actions[random_start]) == 0:
random_start = random.choice(list(valid_actions.keys()))
random_end = random.choice(valid_actions[random_start])
# No need to check winner: game is just starting
self.board.place(self.cur_player.player_num, random_start, random_end)
self.swap_players()
while True:
best_moves = self.cur_player.decide_move(self.board, verbose=False, training=True)
pi = np.zeros(NUM_CHECKERS * BOARD_WIDTH * BOARD_HEIGHT)
for move in best_moves:
start = board_utils.human_coord_to_np_index(move[0])
end = board_utils.human_coord_to_np_index(move[1])
checker_id = self.board.checkers_id[self.cur_player.player_num][start]
neural_net_index = utils.encode_checker_index(checker_id, end)
pi[neural_net_index] = 1.0 / len(best_moves)
play_history.append((copy.deepcopy(self.board), pi))
pick_start, pick_end = random.choice(best_moves)
move_from = board_utils.human_coord_to_np_index(pick_start)
move_to = board_utils.human_coord_to_np_index(pick_end)
# Make the move on board and check winner
winner = self.board.place(self.cur_player.player_num, move_from, move_to)
if winner:
final_winner = winner
break
# Check if game is stuck
if datetime.now() - start_time > timedelta(seconds=STUCK_TIME_LIMIT):
return play_history[:AVERAGE_TOTAL_MOVE], REWARD['draw']
self.swap_players()
reward = utils.get_p1_winloss_reward(self.board, final_winner)
# Reset generator for next game
self.board = Board(randomised=self.randomised)
# Keep meaningful move history
if self.randomised:
return play_history[BOARD_HIST_MOVES:], reward
else:
return play_history, reward
if __name__ == "__main__":
lens = []
num_games = 5000
randomGen = GreedyDataGenerator(randomised=True)
normalGen = GreedyDataGenerator(randomised=False)
for i in range(num_games):
history, reward = randomGen.generate_play()
lens.append(len(history))
history, reward = normalGen.generate_play()
lens.append(len(history))
# history[0][0].visualise()
# print(history[0][1])
# history[1][0].visualise()
# print(history[1][1])
# history[2][0].visualise()
# print(history[2][1])
#
# history[-3][0].visualise()
# print(history[-3][1])
# history[-2][0].visualise()
# print(history[-2][1])
# history[-1][0].visualise()
# print(history[-1][1])
#
# import time
# time.sleep(500)
# print(len(generator.generate_play()[0]))
print('Average game length over {} games:'.format(len(lens)), sum(lens) / len(lens))
# print('Trying random start')
# gen = GreedyDataGenerator(random_start=True)
# history, reward = gen.generate_play()
# history[0][0].visualise()
# history[1][0].visualise()
# history[2][0].visualise()
<file_sep>import utils
import numpy as np
from game import Game
from config import *
"""
Run this file directly from terminal if you
want to play human-vs-greedy game
"""
if __name__ == '__main__':
count = { PLAYER_ONE : 0, PLAYER_TWO : 0 }
num_games = 50
end_states = []
for i in range(num_games):
utils.stress_message('Game {}'.format(i + 1))
game = Game(p1_type='greedy', p2_type='greedy', verbose=False)
winner = game.start()
if winner is not None:
count[winner] += 1
end_states.append(game.board.board[..., 0])
unique_states = np.unique(np.array(end_states), axis=0)
print('\n{} end game states, {} of them is unique\n'.format(num_games, len(unique_states)))
print('Player {} wins {} matches'.format(PLAYER_ONE, count[PLAYER_ONE]))
print('Player {} wins {} matches'.format(PLAYER_TWO, count[PLAYER_TWO]))
<file_sep>from config import *
def np_index_to_human_coord(coord):
np_i, np_j = coord
human_row = np_i - np_j + BOARD_WIDTH
human_col = min(np_i, np_j) + 1
return human_row, human_col
def human_coord_to_np_index(coord):
human_row, human_col = coord
np_i = human_col - 1 + max(0, human_row - BOARD_WIDTH)
np_j = human_col - 1 - min(0, human_row - BOARD_WIDTH)
return np_i, np_j
def is_valid_pos(i, j):
return i >= 0 and i < BOARD_HEIGHT and j >= 0 and j < BOARD_WIDTH
def convert_np_to_human_moves(np_moves):
return { np_index_to_human_coord(key) : \
[np_index_to_human_coord(to) for to in np_moves[key]] \
for key in np_moves }
if __name__ == '__main__':
"""
Put board_utils.py test cases here.
"""
print(human_coord_to_np_index((13, 1)))
print(human_coord_to_np_index((12, 1)))
print(human_coord_to_np_index((10, 1)))
print(human_coord_to_np_index((5, 3)))
print(np_index_to_human_coord((6, 0)))
print(np_index_to_human_coord((7, 1)))
<file_sep>import re
import os
import sys
import h5py
import datetime
import threading
import argparse
import multiprocessing as mp
import utils
from config import *
from model import *
from MCTS import *
from selfplay import selfplay
"""
This file coordinates training procedure, including:
1. invoke self play
2. store result from self play & start training NN immediately based on that single example
3. evaluate how well the training is, via loss, (# games drawn due to 50 moves no progress?, ) etc.
4. save model checkpoints for each 'x' self play
5. allow loading saved model checkpoints given argument
"""
def generate_self_play(worker_id, model_path, num_self_play, model2_path=None):
# Load the current model in the worker only for prediction and set GPU limit
import tensorflow as tf
tf_config = tf.ConfigProto()
tf_config.gpu_options.allow_growth = True
session = tf.Session(config=tf_config)
from keras.backend.tensorflow_backend import set_session
set_session(session=session)
# Re-seed the generators: since the RNG was copied from parent process
np.random.seed() # None seed to source from /dev/urandom
random.seed()
# Decide what model to use
model = ResidualCNN()
model2 = None
if model_path is not None:
print('Worker {}: loading model "{}"'.format(worker_id, model_path))
model.load_weights(model_path)
print('Worker {}: model load successful'.format(worker_id))
if model2_path is not None:
print('Worker {}: loading 2nd model "{}"'.format(worker_id, model2_path))
model2 = ResidualCNN()
model2.load_weights(model2_path)
print('Worker {}: 2nd model load successful'.format(worker_id))
else:
print ('Worker {}: Model2 is None; using Model1 to generate selfplays'.format(worker_id))
else:
print('Worker {}: using un-trained model'.format(worker_id))
# Worker start generating self plays according to their workload
worker_result = []
for i in range(num_self_play):
play_history, p1_reward = selfplay(model, model2, randomised=False)
if play_history is not None and p1_reward is not None:
worker_result.append((play_history, p1_reward))
print('Worker {}: generated {} self-plays'.format(worker_id, len(worker_result)))
return worker_result
def generate_self_play_in_parallel(model_path, num_self_play, num_workers, model2_path=None):
# Process pool for parallelism
process_pool = mp.Pool(processes=num_workers)
work_share = num_self_play // num_workers
worker_results = []
# Send processes to generate self plays
for i in range(num_workers):
if i == num_workers - 1:
work_share += (num_self_play % num_workers)
# Send workers
result_async = process_pool.apply_async(
generate_self_play,
args=(i + 1, model_path, work_share, model2_path))
worker_results.append(result_async)
try:
# Join processes and summarise the generated final list of games
game_list = []
for result in worker_results:
game_list += result.get()
process_pool.close()
# Exit early if need
except KeyboardInterrupt:
utils.stress_message('SIGINT caught, exiting')
process_pool.terminate()
process_pool.join()
exit()
process_pool.join()
return game_list
def train(model_path, board_x, pi_y, v_y, data_retention, version):
# Set TF gpu limit
import tensorflow as tf
tf_config = tf.ConfigProto()
tf_config.gpu_options.allow_growth = True
session = tf.Session(config=tf_config)
from keras.backend.tensorflow_backend import set_session
set_session(session=session)
np.random.seed()
random.seed()
message = 'At {}, Training Version {}, Number of examples: {} (retaining {:.1f}%)' \
.format(utils.cur_time(), version, len(board_x), data_retention * 100)
utils.stress_message(message, True)
# Make sure path is not null if we are not training from scratch
cur_model = ResidualCNN()
if version > 0:
assert model_path is not None
cur_model.load_weights(model_path)
# Sample a portion of training data before training
sampled_idx = np.random.choice(len(board_x), int(data_retention * len(board_x)), replace=False)
sampled_board_x = board_x[sampled_idx]
sampled_pi_y = pi_y[sampled_idx]
sampled_v_y = v_y[sampled_idx]
cur_model.model.fit(sampled_board_x, [sampled_pi_y, sampled_v_y],
batch_size=BATCH_SIZE,
validation_split=0.05,
epochs=EPOCHS,
shuffle=True)
cur_model.save_weights(SAVE_WEIGHTS_DIR, MODEL_PREFIX, version)
# cur_model.save(SAVE_MODELS_DIR, MODEL_PREFIX, version)
def evaluate(worker_id, best_model, cur_model, num_games):
# Load the current model in the worker only for prediction and set GPU limit
import tensorflow as tf
tf_config = tf.ConfigProto()
tf_config.gpu_options.allow_growth = True
session = tf.Session(config=tf_config)
from keras.backend.tensorflow_backend import set_session
set_session(session=session)
# Re-seed the generators: since the RNG was copied from parent process
np.random.seed() # None seed to source from /dev/urandom
random.seed()
from ai_vs_ai import agent_match
cur_model_wincount = 0
best_model_wincount = 0
draw_count = 0
for i in range(num_games):
# Alternate players
if i % 2 == 0:
winner = agent_match(best_model, cur_model, num_games=1, enforce_move_limit=True)
else:
winner = agent_match(cur_model, best_model, num_games=1, enforce_move_limit=True)
if winner == cur_model:
cur_model_wincount += 1
elif winner == best_model:
best_model_wincount += 1
else:
draw_count += 1
print('Worker {}: cur_model "{}" wins {}/{} games'.format(worker_id, cur_model, cur_model_wincount, num_games))
print('Worker {}: best_model "{}" wins {}/{} games'.format(worker_id, best_model, best_model_wincount, num_games))
print('Worker {}: {}/{} games were draw'.format(worker_id, draw_count, num_games))
return cur_model_wincount
def evaluate_in_parallel(best_model, cur_model, num_games, num_workers):
utils.stress_message('Evaluating model "{}" against current best model "{}" on {} games'
.format(cur_model, best_model, num_games), True)
# Process pool for parallelism
process_pool = mp.Pool(processes=num_workers)
work_share = num_games // num_workers
worker_results = []
# Send processes to generate self plays
for i in range(num_workers):
if i == num_workers - 1:
work_share += (num_games % num_workers)
# Send workers
result_async = process_pool.apply_async(
evaluate,
args=(i + 1, best_model, cur_model, work_share))
worker_results.append(result_async)
try:
# Join processes and count games
cur_model_wincount = 0
for result in worker_results:
cur_model_wincount += result.get()
process_pool.close()
# Exit early if need
except KeyboardInterrupt:
utils.stress_message('SIGINT caught, exiting')
process_pool.terminate()
process_pool.join()
exit()
process_pool.join()
utils.stress_message('Overall, cur_model "{}" wins {}/{} against best_model "{}"'
.format(cur_model, cur_model_wincount, num_games, best_model), True)
return cur_model_wincount
def evolve(cur_model_path, other_opponent_for_selfplay, iteration_count, best_model):
while True:
# print some useful message
message = 'At {}, Starting to generate self-plays for Version {}'.format(utils.cur_time(), iteration_count)
utils.stress_message(message, True)
##########################
##### GENERATE PLAYS #####
##########################
if other_opponent_for_selfplay is not None:
print('(Generating games using current model {} and other model {})'.format(cur_model_path, other_opponent_for_selfplay))
games = generate_self_play_in_parallel(cur_model_path, NUM_SELF_PLAY, NUM_WORKERS, model2_path=other_opponent_for_selfplay)
elif best_model is not None:
print('(Generating games using given best model: {})'.format(best_model))
games = generate_self_play_in_parallel(best_model, NUM_SELF_PLAY, NUM_WORKERS)
else:
# # Use previous version to generate selfplay if necessary
# model2_path = None
# if SELF_PLAY_DIFF_MODEL and ITERATION_COUNT > 1:
# model2_version = get_rand_prev_version(ITERATION_COUNT)
# model2_path = get_weights_path_from_version(model2_version)
# utils.stress_message('.. and vs. Version {}'.format(model2_version))
#
# games = generate_self_play_in_parallel(cur_model_path, NUM_SELF_PLAY, NUM_WORKERS, model2_path)
games = generate_self_play_in_parallel(cur_model_path, NUM_SELF_PLAY, NUM_WORKERS)
##########################
##### PREPARING DATA #####
##########################
# Convert self-play games to training data
board_x, pi_y, v_y = utils.convert_to_train_data(games)
board_x, pi_y, v_y = utils.augment_train_data(board_x, pi_y, v_y)
# Numpyify and save for later iterations
board_x, pi_y, v_y = np.array(board_x), np.array(pi_y), np.array(v_y)
if len(board_x) > 0 and len(pi_y) > 0 and len(v_y) > 0:
utils.save_train_data(board_x, pi_y, v_y, version=iteration_count)
# Get prev iters training data
board_x, pi_y, v_y, data_iters_used = combine_prev_iters_train_data(board_x, pi_y, v_y, iteration_count)
assert len(board_x) == len(pi_y) == len(v_y)
# Train only if there were data
if data_iters_used == 0:
utils.stress_message('No training data for iteration {}! Re-iterating...'.format(iteration_count))
continue
# Calculate training set retention rate including current iteration; use default if too high
data_retention_rate = min(1. / data_iters_used, DEF_DATA_RETENTION_RATE)
#################
##### TRAIN #####
#################
# Use a *new process* to train since we DONT want to load TF in the parent process
training_process = mp.Process(
target=train,
args=(cur_model_path, board_x, pi_y, v_y, data_retention_rate, iteration_count))
training_process.start()
training_process.join()
# Update path variable since we made a new version
# cur_model_path = get_model_path_from_version(ITERATION_COUNT)
cur_model_path = get_weights_path_from_version(iteration_count)
####################
##### EVALUATE #####
####################
if best_model is not None:
cur_model_wincount = evaluate_in_parallel(best_model, cur_model_path, EVAL_GAMES, NUM_WORKERS)
if cur_model_wincount > int(0.55 * EVAL_GAMES):
best_model = cur_model_path
utils.stress_message('Now using {} as the best model'.format(best_model))
else:
utils.stress_message('Output model of this iteration is not better; retaining {} as the best model'.format(best_model), True)
# Update version number
iteration_count += 1
def combine_prev_iters_train_data(board_x, pi_y, v_y, iteration_count):
all_board_x, all_pi_y, all_v_y = [], [], []
if len(board_x) > 0 and len(pi_y) > 0 and len(v_y) > 0:
all_board_x.append(board_x)
all_pi_y.append(pi_y)
all_v_y.append(v_y)
# Read data from previous iterations
for i in range(iteration_count - PAST_ITER_COUNT, iteration_count):
if i >= 0:
filename = '{}/{}{}.h5'.format(SAVE_TRAIN_DATA_DIR, SAVE_TRAIN_DATA_PREF, i)
if not os.path.exists(filename):
utils.stress_message('{} does not exist!'.format(filename))
continue
with h5py.File(filename, 'r') as H:
all_board_x.append(np.copy(H['board_x']))
all_pi_y.append(np.copy(H['pi_y']))
all_v_y.append(np.copy(H['v_y']))
if len(all_board_x) > 0 and len(all_pi_y) > 0 and len(all_v_y) > 0:
# Make a pool of training data from previous iterations
board_x = np.vstack(all_board_x)
pi_y = np.vstack(all_pi_y)
v_y = np.hstack(all_v_y) # hstack as v_y is 1D array
return board_x, pi_y, v_y, len(all_board_x) # Last retval is total iterations used
# If no data at all: return empty training data
return [], [], [], 0
def get_model_path_from_version(version):
return '{}/{}{:0>4}.h5'.format(SAVE_MODELS_DIR, MODEL_PREFIX, version)
def get_weights_path_from_version(version):
return '{}/{}{:0>4}-weights.h5'.format(SAVE_WEIGHTS_DIR, MODEL_PREFIX, version)
def get_rand_prev_version(upper):
return np.random.randint(upper // 2, upper)
def build_parser():
parser = argparse.ArgumentParser()
parser.add_argument('--cur_model_path', '-c',
dest='model_path',
help='current model_path of model to optimize on (newest model)')
parser.add_argument('--best_model_path', '-b',
dest='best_model_path',
help='best model')
parser.add_argument('--other_opponent_for_selfplay', '-p',
dest='other_opponent_for_selfplay',
help='boolean to specify selfplay generated by both current model and best model or not, \
if not generate with only best model')
return parser
if __name__ == '__main__':
parser = build_parser()
args = parser.parse_args()
model_path = args.model_path
best_model = args.best_model_path
other_opponent_for_selfplay = args.other_opponent_for_selfplay
try:
# Read the count from file name
iteration_count = utils.find_version_given_filename(model_path) + 1
except:
iteration_count = 0
if best_model is not None:
print('\nBest model {} specified!\n'.format(best_model))
if other_opponent_for_selfplay is not None:
print('\nOpponent {} for selfplay specified\n'.format(other_opponent_for_selfplay))
utils.stress_message('Start to training from version: {}'.format(iteration_count), True)
evolve(model_path, other_opponent_for_selfplay, iteration_count, best_model)
<file_sep>import numpy as np
from collections import deque
from player import HumanPlayer, GreedyPlayer, AiPlayer
from board import Board
from config import *
class Game:
def __init__(self, p1_type=None, p2_type=None, verbose=True, model1=None, model2=None, tree_tau=DET_TREE_TAU):
if p1_type is None or p2_type is None:
p1_type, p2_type = self.get_player_types()
p1_type = p1_type[0].lower()
p2_type = p2_type[0].lower()
if p1_type == 'h':
self.player_one = HumanPlayer(player_num=1)
elif p1_type == 'g':
self.player_one = GreedyPlayer(player_num=1)
else:
self.player_one = AiPlayer(player_num=1, model=model1, tree_tau=tree_tau)
if p2_type == 'h':
self.player_two = HumanPlayer(player_num=2)
elif p2_type == 'g':
self.player_two = GreedyPlayer(player_num=2)
else:
self.player_two = AiPlayer(player_num=2, model=(model1 if model2 is None else model2), tree_tau=tree_tau)
self.cur_player = self.player_one
self.next_player = self.player_two
self.verbose = verbose
self.board = Board()
def get_player_types(self):
p1_type = p2_type = ''
while 1:
p1_type = input('Enter player type of player 1 ([H]uman/[G]reedyRobot/[A]I): ')
if p1_type[0].lower() in TYPES_OF_PLAYERS:
break
print('Invalid input. Try again.')
while 1:
p2_type = input('Enter player type of player 2 ([H]uman/[G]reedyRobot/[A]I): ')
if p2_type[0].lower() in TYPES_OF_PLAYERS:
break
print('Invalid input. Try again.')
return p1_type, p2_type
def swap_players(self):
self.cur_player, self.next_player = self.next_player, self.cur_player
def start(self, enforce_move_limit=False):
np.random.seed()
total_moves = 0
history_dests = deque()
num_moves = 0
while True:
move_from, move_to = self.cur_player.decide_move(self.board, verbose=self.verbose, total_moves=total_moves) # Get move from player
winner = self.board.place(self.cur_player.player_num, move_from, move_to) # Make the move on board and check winner
total_moves += 1
if self.verbose:
print('Total Moves: {}'.format(total_moves))
if winner:
break
if len(history_dests) == TOTAL_HIST_MOVES:
history_dests.popleft()
history_dests.append(move_to)
# Impose repetition limit
cur_player_hist_dest = set([history_dests[i] for i in range(len(history_dests) - 1, -1, -2)])
if len(history_dests) == TOTAL_HIST_MOVES and len(cur_player_hist_dest) <= UNIQUE_DEST_LIMIT:
print('Repetition detected: stopping game')
winner = None
break
num_moves += 1
if enforce_move_limit and num_moves >= PROGRESS_MOVE_LIMIT:
print('Game stopped by reaching progress move limit; Game Discarded')
winner = None
break
self.swap_players()
if self.verbose:
self.board.visualise()
if winner is not None:
print('Player {} wins!'.format(winner))
return winner
if __name__ == '__main__':
'''
Ad hoc games
'''
from collections import Counter
wincount = Counter()
for i in range(10000):
game = Game(p1_type='greedy', p2_type='greedy', verbose=False)
game.player_two = GreedyPlayer(player_num=2, stochastic=True)
wincount[game.start()] += 1
print(wincount)
'''
Counter({1: 5172, 2: 4675, None: 153})
Counter({1: 5233, 2: 4594, None: 173})
determin wins 9908 stochastic wins 9766 draw 326
'''
<file_sep>''' Player '''
# Fixed 2 player
PLAYER_ONE = 1
PLAYER_TWO = 2
''' Board/Game '''
ROWS_OF_CHECKERS = 3
NUM_CHECKERS = (1 + ROWS_OF_CHECKERS) * ROWS_OF_CHECKERS // 2
NUM_DIRECTIONS = 6
BOARD_WIDTH = BOARD_HEIGHT = ROWS_OF_CHECKERS * 2 + 1
BOARD_HIST_MOVES = 3 # Number of history moves to keep
TYPES_OF_PLAYERS = ['h', 'g', 'a']
PLAYER_ONE_DISTANCE_OFFSET = 70
PLAYER_TWO_DISTANCE_OFFSET = -14
TOTAL_HIST_MOVES = 16 # Total number of history moves to keep for checking repetitions
UNIQUE_DEST_LIMIT = 3
''' Dirichlet Noise '''
DIRICHLET_ALPHA = 0.03 # Alpha for ~ Dir(), assuming symmetric Dirichlet distribution
DIR_NOISE_FACTOR = 0.25 # Weight of Dirichlet noise on root prior probablities
''' Model '''
# Model input dimensions
INPUT_DIM = (BOARD_WIDTH, BOARD_HEIGHT, BOARD_HIST_MOVES * 2 + 1)
NUM_FILTERS = 64 # Default number of filters for conv layers
NUM_RESIDUAL_BLOCKS = 12 # Number of residual blocks in the model
''' MCTS and RL '''
PROGRESS_MOVE_LIMIT = 100
REWARD = {'lose' : -1, 'draw' : 0, 'win' : 1}
REWARD_FACTOR = 10 # Scale the reward if necessary
TREE_TAU = 1
DET_TREE_TAU = 0.01
C_PUCT = 3.5
MCTS_SIMULATIONS = 175
EPSILON = 1e-5
TOTAL_MOVES_TILL_TAU0 = 16
INITIAL_RANDOM_MOVES = 6
DIST_THRES_FOR_REWARD = 2 # Threshold for reward for player forward distance difference
EVAL_GAMES = 24
''' Loss Weights depending on training '''
LOSS_WEIGHTS = { 'policy_head': 1., 'value_head': 1. }
''' Train '''
SAVE_MODELS_DIR = 'saved-models/'
SAVE_WEIGHTS_DIR = 'saved-weights/'
MODEL_PREFIX = 'version'
SAVE_TRAIN_DATA_DIR = 'generated-training-data/'
SAVE_TRAIN_DATA_PREF = 'data-for-iter-'
PAST_ITER_COUNT = 1 # Number of past iterations to use
DEF_DATA_RETENTION_RATE = 0.5 # Default percentage of training data to keep when sampling
BATCH_SIZE = 32
REG_CONST = 6e-3 # Weight decay constant (l1/l2 regularizer)
LEARNING_RATE = 0.0001 # Traning learning rate
EPOCHS = 5 # Training Epochs
NUM_SELF_PLAY = 180 # Total number of self plays to generate
NUM_WORKERS = 12 # For generating self plays in parallel
SELF_PLAY_DIFF_MODEL = False
''' Greedy-Supervised Training '''
# G_NUM_GAMES = 60000
G_AVG_GAME_LEN = 21
G_DATA_RETENTION_RATE = 1. / G_AVG_GAME_LEN
G_EPOCHS = 100
G_ITER_PER_EPOCH = 100
G_GAMES_PER_EPOCH = 15000
G_VAL_SPLIT = 0.1
G_NORMAL_GAME_RATIO = 0.2
G_RAND_START_GAME_RATIO = 0.3
G_MODEL_PREFIX = 'greedy-model'
G_BATCH_SIZE = 32
# G_NUM_VAL_DATA = 3000
''' Greedy Data Generator '''
THRESHOLD_FOR_RANDOMIZATION = 2
AVERAGE_TOTAL_MOVE = 43
STUCK_TIME_LIMIT = 0.1
"""
NOTE:
When training in greedy:
- use lower regulurisation (1e-4)
- 1 weight on value head
When training in RL:
- higher REG (5e-3)
- 1. weight on both value and policy head
- ~5 Epoch
- 12 Workers
- ~144 MCTS Simulations
- TREE_TAU = 1
When testing:
- Use DET_TREE_TAU, which is already set in Game.py (Or TREE_TAU = 0.01)
- MCST Similations = depending on need
"""
|
3825d4e3c8de2dcc3e0b228cf15a6f00c015575f
|
[
"Markdown",
"Python"
] | 24
|
Python
|
kenziyuliu/PythonChineseCheckers
|
ffa8155dca3c78dae2530ce7ce769b3047a7d74d
|
d069bcdb6e45fbc5794c442779df4ecc5a2c5963
|
refs/heads/master
|
<repo_name>AlcaDesign/tmi.js-channelset<file_sep>/index.js
const tmijsClient = require('tmi.js').client,
normalizeChannel = require('tmi.js/lib/utils').channel;
class ChannelSet extends Set {
constructor(client) {
if(!(client instanceof tmijsClient)) {
throw new TypeError('parameter 1 is not of type \'tmijs.client\'.');
}
super(client.channels || []);
this.client = client;
this.client
.on('join', (channel, username, self) => {
if(self) {
this.add(channel);
}
})
.on('part', (channel, username, self) => {
if(self) {
this.delete(channel);
}
})
.on('disconnected', reason => {
this.clear();
});
}
add(value) {
return super.add(normalizeChannel(value));
}
delete(value) {
return super.delete(normalizeChannel(value));
}
has(value) {
return super.has(normalizeChannel(value));
}
}
module.exports = ChannelSet;
<file_sep>/README.md
# What
This module tracks [tmi.js](https://npmjs.com/tmi.js) channel joins, channel
parts, and client disconnects as a [Set](https://mdn.io/Set) instance.
# Install
```
$ npm install --save tmi.js-channelset
```
# Usage
```javascript
const ChannelSet = require('tmi.js-channelset');
let client = new tmijs.client({
/* ... */
}),
channels = ChannelSet(client);
```
# Example
```
$ git clone https://github.com/AlcaDesign/tmi.js-channelset.git
$ cd tmi.js-channelset
$ npm install
$ node examples\basic.js
```
<file_sep>/examples/basic.js
/*
Expected Timeline:
[0.000]: connected
[0.100]: joined first channel
[2.100]: joined second channel
[2.200]: parted first channel
[4.100]: disconnected
*/
const tmijs = require('tmi.js'),
ChannelSet = require('..'),
utils = require('tmi.js/lib/utils');
let joinTheseChannels = [ 'alca', 'twitchplayspokemon' ],
client = new tmijs.client({
connection: { reconnect: true },
channels: [ 'alca', 'twitchplayspokemon' ]
}),
channels = new ChannelSet(client),
start = false;
function log(...text) {
if(start === false) {
start = Date.now();
}
let now = new Date(new Date() - start),
seconds = now.getSeconds(),
milli = ('00' + now.getMilliseconds()).slice(-3);
text.unshift(`[${seconds}.${milli}]`);
console.log.apply(console, text);
}
function colorize(code) {
return (...text) => `\u001b[${code}m${text.join(' ')}\u001b[39m`;
}
let red = colorize(91),
green = colorize(92),
cyan = colorize(96),
grey = colorize(90),
white = colorize(97);
function check(reason) {
log(white('Checking channels:'), reason ? grey(`(${reason})`) : '');
joinTheseChannels.forEach(name => {
log(' -', (channels.has(name) ? green : red)(name));
});
}
client.connect()
.then(() => check('connected'))
.then(() => utils.promiseDelay(4000))
.then(() => client.disconnect());
client.on('join', (channel, username, self) => {
if(self) {
check(`joined ${channel}`);
if(joinTheseChannels.indexOf(channel.replace('#', '')) === 0) {
setTimeout(client.part.bind(client), 2000, channel);
}
}
});
client.on('part', (channel, username, self) => {
if(self) check(`parted ${channel}`);
});
client.on('disconnected', reason => {
check(`disconnected: ${reason}`);
});
|
8e2708c2ad1e6c4a9b0934c418f9c634f3200a16
|
[
"JavaScript",
"Markdown"
] | 3
|
JavaScript
|
AlcaDesign/tmi.js-channelset
|
3fee9e96cd3e287af7390b909d535167c556ac87
|
0510ee9015742e7f3a7f500207bfd85c2008caf2
|
refs/heads/master
|
<file_sep># Задание <NAME>.
<file_sep>jQuery(document).ready(function($){
console.log(flexslider);
$('.slider').flexslider({
selector: ".tabs > .tab",
animation: "slide"
});
});
function initMap() {
var map = new google.maps.Map(document.getElementById('map'), {
zoom: 18,
center: {
lat: 49.8448856,
lng: 24.0234585
},
disableDefaultUI: true
});
}
|
d0f89c43a0dbbc3c6ffa2163ddcd02a6ea57dba7
|
[
"Markdown",
"JavaScript"
] | 2
|
Markdown
|
Oleg-Ovcharenko/Luka
|
d05804237b4076ca2e927e730ec76269ce63c22b
|
8b74d0910a60934335daf39b7044df79fc4fa45d
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.